Skip to content

Commit

Permalink
Merge pull request #417 from lsst/u/yoachim/fo_plot_fix
Browse files Browse the repository at this point in the history
U/yoachim/fo plot fix
  • Loading branch information
yoachim authored Sep 12, 2024
2 parents 9f3e0c5 + f99938d commit 355c171
Show file tree
Hide file tree
Showing 18 changed files with 91 additions and 805 deletions.
4 changes: 1 addition & 3 deletions rubin_sim/maf/batches/col_map_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ def col_map_dict(dict_name=None):
col_map["seeingGeom"] = "seeingFwhmGeom"
col_map["skyBrightness"] = "skyBrightness"
col_map["moonDistance"] = "moonDistance"
col_map["fieldId"] = "fieldId"
col_map["proposalId"] = "proposalId"
col_map["slewactivities"] = {}
col_map["metadataList"] = [
"airmass",
Expand All @@ -41,7 +39,7 @@ def col_map_dict(dict_name=None):
"saturation_mag",
]
col_map["metadataAngleList"] = ["rotSkyPos"]
col_map["note"] = "note"
col_map["scheduler_note"] = "scheduler_note"

elif dict_name == "opsimv4":
col_map = {}
Expand Down
18 changes: 15 additions & 3 deletions rubin_sim/maf/batches/ddf_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,13 @@

import healpy as hp
import numpy as np
from rubin_scheduler.utils import angular_separation, ddf_locations, hpid2_ra_dec, sample_patch_on_sphere
from rubin_scheduler.utils import (
angular_separation,
ddf_locations,
ddf_locations_pre3_5,
hpid2_ra_dec,
sample_patch_on_sphere,
)

import rubin_sim.maf as maf

Expand All @@ -16,6 +22,7 @@ def ddfBatch(
nside_sne=128,
extra_sql=None,
extra_info_label=None,
old_coords=False,
):
"""
A set of metrics to evaluate DDF fields.
Expand All @@ -42,6 +49,8 @@ def ddfBatch(
necessary sql constraints for each metric.
extra_info_label : `str`, optional
Additional description information to add (alongside the extra_sql)
old_coords : `bool`
Use the default locations for the DDFs from pre-July 2024. Default False.
Returns
-------
Expand All @@ -53,7 +62,10 @@ def ddfBatch(
# Define the slicer to use for each DDF
# Get standard DDF locations and reformat information as a dictionary
ddfs = {}
ddfs_rough = ddf_locations()
if old_coords:
ddfs_rough = ddf_locations_pre3_5()
else:
ddfs_rough = ddf_locations()
for ddf in ddfs_rough:
ddfs[ddf] = {"ra": ddfs_rough[ddf][0], "dec": ddfs_rough[ddf][1]}
# Combine the Euclid double-field into one - but with two ra/dec values
Expand Down Expand Up @@ -474,7 +486,7 @@ def ddfBatch(
for f in filterlist:
fieldsqls[f] = sqls[f]
else:
fieldsql = f"note like '%{fieldname}%'"
fieldsql = f"scheduler_note like '%{fieldname}%'"
for f in filterlist:
if len(sqls[f]) > 0:
fieldsqls[f] = fieldsql + " and " + sqls[f]
Expand Down
26 changes: 14 additions & 12 deletions rubin_sim/maf/batches/glance_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def glanceBatch(
bundle_list.append(bundle)

# alt az of long gaps
sql = "note = 'long'"
sql = "scheduler_note = 'long'"
metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits long")
bundle = metric_bundles.MetricBundle(
metric,
Expand All @@ -156,7 +156,7 @@ def glanceBatch(
)
bundle_list.append(bundle)

sql = "note like 'blob_long%'"
sql = "scheduler_note like 'blob_long%'"
metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits blob long")
bundle = metric_bundles.MetricBundle(
metric,
Expand All @@ -168,7 +168,7 @@ def glanceBatch(
)
bundle_list.append(bundle)

sql = "note like '%neo%' or note like '%near_sun%'"
sql = "scheduler_note like '%neo%' or scheduler_note like '%near_sun%'"
metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits twilight near sun")
bundle = metric_bundles.MetricBundle(
metric,
Expand All @@ -182,7 +182,7 @@ def glanceBatch(

# alt,az pf ToO

sql = "note like 'ToO%'"
sql = "scheduler_note like 'ToO%'"
metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits long")
bundle = metric_bundles.MetricBundle(
metric,
Expand Down Expand Up @@ -277,7 +277,7 @@ def glanceBatch(
bundle_list.append(bundle)

# Make a cumulative plot of a WFD spot
sql = "note not like '%NEO%' and note not like '%near_sun%'"
sql = "scheduler_note not like '%NEO%' and scheduler_note not like '%near_sun%'"
uslicer = slicers.UserPointsSlicer(ra=0, dec=-20)
metric = metrics.CumulativeMetric()
metricb = metric_bundles.MetricBundle(
Expand Down Expand Up @@ -362,16 +362,16 @@ def glanceBatch(
)
bundle_list.append(bundle)

# stats from the note column
if "note" in colmap.keys():
# stats from the scheduler_note column
if "scheduler_note" in colmap.keys():
displayDict = {"group": "Basic Stats", "subgroup": "Percent stats"}
metric = metrics.StringCountMetric(col=colmap["note"], percent=True, metric_name="Percents")
metric = metrics.StringCountMetric(col=colmap["scheduler_note"], percent=True, metric_name="Percents")
sql = ""
slicer = slicers.UniSlicer()
bundle = metric_bundles.MetricBundle(metric, slicer, sql, display_dict=displayDict)
bundle_list.append(bundle)
displayDict["subgroup"] = "Count Stats"
metric = metrics.StringCountMetric(col=colmap["note"], metric_name="Counts")
metric = metrics.StringCountMetric(col=colmap["scheduler_note"], metric_name="Counts")
bundle = metric_bundles.MetricBundle(metric, slicer, sql, display_dict=displayDict)
bundle_list.append(bundle)

Expand All @@ -381,7 +381,7 @@ def glanceBatch(
displayDict["subgroup"] = ""
for ddf in ddf_surveys:
label = ddf.replace("DD:", "")
sql = 'note like "%s%%"' % ("DD:" + label)
sql = 'scheduler_note like "%s%%"' % ("DD:" + label)
slicer = slicers.UniSlicer()
metric = metrics.CumulativeMetric()
metricb = metric_bundles.MetricBundle(
Expand Down Expand Up @@ -515,7 +515,7 @@ def glanceBatch(
lat_lon_deg=colmap["raDecDeg"],
)
for filtername in filternames:
sql = "filter='%s' and note like 'ToO%%'" % filtername
sql = "filter='%s' and scheduler_note like 'ToO%%'" % filtername
metric = metrics.CountMetric(col=colmap["mjd"], metric_name="N ToO")
bundle = metric_bundles.MetricBundle(
metric,
Expand All @@ -528,7 +528,9 @@ def glanceBatch(
)
bundle_list.append(bundle)

too_sqls = ["note like 'ToO, %" + "t%i'" % hour for hour in [0, 1, 2, 4, 24, 48]] + ["note like 'ToO, %'"]
too_sqls = ["scheduler_note like 'ToO, %" + "t%i'" % hour for hour in [0, 1, 2, 4, 24, 48]] + [
"scheduler_note like 'ToO, %'"
]
slicer = slicers.UniSlicer()
for sql in too_sqls:
metric = metrics.CountMetric(col="night")
Expand Down
4 changes: 2 additions & 2 deletions rubin_sim/maf/batches/info_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def info_bundle_dicts(allsky_slicer, wfd_slicer, opsim="opsim", colmap=batches.c
colmap,
opsim,
slicer=allsky_slicer,
extraSql='night > 365*3.5 and night < 365*4.5 and note not like "%DD%"',
extraSql='night > 365*3.5 and night < 365*4.5 and scheduler_note not like "%DD%"',
extraInfoLabel="Yr 3-4",
runLength=1,
)
Expand All @@ -87,7 +87,7 @@ def info_bundle_dicts(allsky_slicer, wfd_slicer, opsim="opsim", colmap=batches.c
extraInfoLabel="WFD",
)
)
dd_constraint = "note like '%DD%'"
dd_constraint = "scheduler_note like '%DD%'"
bdict.update(
batches.nvisitsPerSubset(
colmap,
Expand Down
14 changes: 7 additions & 7 deletions rubin_sim/maf/batches/radar_limited.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ def radar_limited(
for yr_cut in yrs:
ptsrc_lim_mag_i_band = mag_cuts[yr_cut]
sqlconstraint = "night <= %s" % (yr_cut * 365.25 + 0.5)
sqlconstraint += ' and note not like "DD%"'
sqlconstraint += ' and scheduler_note not like "DD%"'
info_label = f"{bandpass} band non-DD year {yr_cut}"
ThreebyTwoSummary_simple = metrics.StaticProbesFoMEmulatorMetricSimple(
nside=nside, year=yr_cut, metric_name="3x2ptFoM_simple"
Expand Down Expand Up @@ -581,7 +581,7 @@ def radar_limited(
subgroupCount += 1
displayDict["subgroup"] = f"{subgroupCount}: WL"
displayDict["order"] = 0
sqlconstraint = 'note not like "DD%" and (filter="g" or filter="r" or filter="i")'
sqlconstraint = 'scheduler_note not like "DD%" and (filter="g" or filter="r" or filter="i")'
info_label = "gri band non-DD"
minExpTime = 15
m = metrics.WeakLensingNvisits(
Expand Down Expand Up @@ -612,7 +612,7 @@ def radar_limited(
# Do the weak lensing per year
for year in [10]:
sqlconstraint = (
'note not like "DD%"'
'scheduler_note not like "DD%"'
+ ' and (filter="g" or filter="r" or filter="i") and night < %i' % (year * 365.25)
)
m = metrics.WeakLensingNvisits(
Expand Down Expand Up @@ -648,7 +648,7 @@ def radar_limited(
bundleList.append(bundle)

sqlconstraint = (
'note not like "DD%"'
'scheduler_note not like "DD%"'
+ ' and (filter="r" or filter="i" or filter="z") and night < %i' % (year * 365.25)
)
m = metrics.WeakLensingNvisits(
Expand Down Expand Up @@ -741,7 +741,7 @@ def radar_limited(
bundle = mb.MetricBundle(
metric,
snslicer,
"note not like '%DD%'",
"scheduler_note not like '%DD%'",
plot_dict=plotDict,
display_dict=displayDict,
info_label="DDF excluded",
Expand Down Expand Up @@ -925,7 +925,7 @@ def radar_limited(
bundle = mb.MetricBundle(
metric,
kneslicer,
"note not like 'DD%'",
"scheduler_note not like 'DD%'",
run_name=runName,
info_label="single model",
summary_metrics=lightcurve_summary(),
Expand All @@ -947,7 +947,7 @@ def radar_limited(
bundle = mb.MetricBundle(
metric_allkne,
kneslicer_allkne,
"note not like 'DD%'",
"scheduler_note not like 'DD%'",
run_name=runName,
info_label="all models",
summary_metrics=lightcurve_summary(),
Expand Down
20 changes: 10 additions & 10 deletions rubin_sim/maf/batches/science_radar_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,7 @@ def science_radar_batch(
for yr_cut in yrs:
ptsrc_lim_mag_i_band = mag_cuts[yr_cut]
sqlconstraint = "night <= %s" % (yr_cut * 365.25 + 0.5)
sqlconstraint += ' and note not like "DD%"'
sqlconstraint += ' and scheduler_note not like "DD%"'
info_label = f"{bandpass} band non-DD year {yr_cut}"
ThreebyTwoSummary_simple = metrics.StaticProbesFoMEmulatorMetricSimple(
nside=nside, year=yr_cut, metric_name="3x2ptFoM_simple"
Expand Down Expand Up @@ -614,7 +614,7 @@ def science_radar_batch(
plotDict = {"n_ticks": 5}
# Have to include all filters in query to check for filter coverage.
# Galaxy numbers calculated using 'bandpass' images only though.
sqlconstraint = 'note not like "DD%"'
sqlconstraint = 'scheduler_note not like "DD%"'
info_label = f"{bandpass} band galaxies non-DD"
metric = maf.DepthLimitedNumGalMetric(
nside=nside,
Expand Down Expand Up @@ -654,7 +654,7 @@ def science_radar_batch(
subgroupCount += 1
displayDict["subgroup"] = f"{subgroupCount}: WL"
displayDict["order"] = 0
sqlconstraint = 'note not like "DD%" and (filter="g" or filter="r" or filter="i")'
sqlconstraint = 'scheduler_note not like "DD%" and (filter="g" or filter="r" or filter="i")'
info_label = "gri band non-DD"
minExpTime = 15
m = metrics.WeakLensingNvisits(
Expand Down Expand Up @@ -686,7 +686,7 @@ def science_radar_batch(
for year in np.arange(1, 10):
displayDict["order"] = year
sqlconstraint = (
'note not like "DD%"'
'scheduler_note not like "DD%"'
+ ' and (filter="g" or filter="r" or filter="i") and night < %i' % (year * 365.25)
)
m = metrics.WeakLensingNvisits(
Expand Down Expand Up @@ -722,7 +722,7 @@ def science_radar_batch(
bundleList.append(bundle)

sqlconstraint = (
'note not like "DD%"'
'scheduler_note not like "DD%"'
+ ' and (filter="r" or filter="i" or filter="z") and night < %i' % (year * 365.25)
)
m = metrics.WeakLensingNvisits(
Expand Down Expand Up @@ -781,7 +781,7 @@ def science_radar_batch(
# Kuiper per year in gri and riz
for year in np.arange(1, 10):
sqlconstraint = (
'note not like "DD%"'
'scheduler_note not like "DD%"'
+ ' and (filter="g" or filter="r" or filter="i") and night < %i' % (year * 365.25)
)
metric1 = metrics.KuiperMetric("rotSkyPos", metric_name="Kuiper_rotSkyPos_gri_year%i" % year)
Expand Down Expand Up @@ -835,7 +835,7 @@ def science_radar_batch(
bundle = mb.MetricBundle(
metric,
snslicer,
"note not like '%DD%'",
"scheduler_note not like '%DD%'",
plot_dict=plotDict,
display_dict=displayDict,
info_label="DDF excluded",
Expand All @@ -858,7 +858,7 @@ def science_radar_batch(

# Calculate the number of expected QSOs, in each band
for f in filterlist:
sql = filtersqls[f] + ' and note not like "%DD%"'
sql = filtersqls[f] + ' and scheduler_note not like "%DD%"'
md = filterinfo_label[f] + " and non-DD"
summaryMetrics = [metrics.SumMetric(metric_name="Total QSO")]
zmin = 0.3
Expand Down Expand Up @@ -1359,7 +1359,7 @@ def science_radar_batch(
bundle = mb.MetricBundle(
metric,
kneslicer,
"note not like 'DD%'",
"scheduler_note not like 'DD%'",
run_name=runName,
info_label="single model",
summary_metrics=lightcurve_summary(),
Expand All @@ -1381,7 +1381,7 @@ def science_radar_batch(
bundle = mb.MetricBundle(
metric_allkne,
kneslicer_allkne,
"note not like 'DD%'",
"scheduler_note not like 'DD%'",
run_name=runName,
info_label="all models",
summary_metrics=lightcurve_summary(),
Expand Down
5 changes: 4 additions & 1 deletion rubin_sim/maf/ddf_dir.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ def ddf_dir():
parser = argparse.ArgumentParser()
parser.add_argument("--db", type=str, default=None)
parser.add_argument("--nside", type=int, default=512)
parser.add_argument("--old_coords", dest="old_coords", action="store_true")
parser.set_defaults(verbose=False)

args = parser.parse_args()

if args.db is None:
Expand All @@ -35,7 +38,7 @@ def ddf_dir():
shutil.rmtree(name + "_ddf")

bdict = {}
bdict.update(batches.ddfBatch(run_name=name, nside=args.nside))
bdict.update(batches.ddfBatch(run_name=name, nside=args.nside, old_coords=args.old_coords))
results_db = db.ResultsDb(out_dir=name + "_ddf")
group = mb.MetricBundleGroup(
bdict,
Expand Down
2 changes: 1 addition & 1 deletion rubin_sim/maf/metrics/sn_n_sn_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def __init__(
nexp_col="numExposures",
vistime_col="visitTime",
seeing_col="seeingFwhmEff",
note_col="note",
note_col="scheduler_note",
season=[-1],
coadd_night=True,
zmin=0.1,
Expand Down
2 changes: 1 addition & 1 deletion rubin_sim/maf/metrics/use_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
class UseMetric(BaseMetric): # pylint: disable=too-few-public-methods
"""Metric to classify visits by type of visits"""

def __init__(self, note_col="note", **kwargs):
def __init__(self, note_col="scheduler_note", **kwargs):
self.note_col = note_col
super().__init__(col=[note_col], metric_dtype="object", **kwargs)

Expand Down
Loading

0 comments on commit 355c171

Please sign in to comment.