From c0674dc7f2c6da91d35561dddedcaf2b6dcf8aa2 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 26 Dec 2024 17:15:35 +0000 Subject: [PATCH 01/69] fix: changes to context loading --- sotodlib/site_pipeline/finalize_focal_plane.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index 1e80bc662..40ceeeda9 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -582,6 +582,8 @@ def main(): ) # ~1 sigma cut weights[weights < 0.61] = np.nan + if np.sum(np.isfinite(weights)) < min_points/2: + logger.error("\t\tToo few points! Skipping...") # Store weighted values focal_plane.add_fp(i, fp, weights, template_msk) From 5ef5dca672db8f777280ac7e2f04420203434a95 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Sun, 19 Jan 2025 02:52:11 +0000 Subject: [PATCH 02/69] feat: include r2 from tod fits in the output files --- sotodlib/coords/fp_containers.py | 20 +++++++++++++------ .../site_pipeline/finalize_focal_plane.py | 15 +++++++------- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index 622167b54..d9e9bb023 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -178,9 +178,9 @@ def empty(cls, template, stream_id, wafer_slot, n_aman): if template is None: raise TypeError("template must be an instance of Template, not None") full_fp = np.full(template.fp.shape + (n_aman,), np.nan) - tot_weight = np.zeros(len(template.det_ids)) + tot_weight = np.zeros((len(template.det_ids), 2)) avg_fp = np.full_like(template.fp, np.nan) - weight = np.zeros(len(template.det_ids)) + weight = np.zeros((len(template.det_ids), 2)) transformed = template.fp.copy() center = template.center.copy() center_transformed = template.center.copy() @@ -219,6 +219,7 @@ def map_by_det_id(self, aman): srt = np.argsort(aman.det_info.det_id[msk]) xi = aman.pointing.xi[msk][srt][mapping] eta = aman.pointing.eta[msk][srt][mapping] + r2 = aman.pointing.R2[msk][srt][mapping] if "polarization" in aman: # name of field just a placeholder for now gamma = aman.polarization.polang[msk][srt][mapping] @@ -227,12 +228,13 @@ def map_by_det_id(self, aman): else: gamma = np.full(len(xi), np.nan) fp = np.column_stack((xi, eta, gamma)) - return fp, template_msk + return fp, r2, template_msk def add_fp(self, i, fp, weights, template_msk): if self.full_fp is None or self.tot_weight is None: raise ValueError("full_fp or tot_weight not initialized") - self.full_fp[template_msk, :, i] = fp * weights[..., None] + self.full_fp[template_msk, :, i] = fp * weights[:, 0][..., None] + weights = np.nan_to_num(weights) self.tot_weight[template_msk] += weights def save(self, f, db_info, group): @@ -269,6 +271,7 @@ def save(self, f, db_info, group): ("eta_m", np.float32), ("gamma_m", np.float32), ("weights", np.float32), + ("r2", np.float32), ("n_point", np.int8), ("n_gamma", np.int8), ] @@ -277,7 +280,7 @@ def save(self, f, db_info, group): self.det_ids, *(self.transformed.T), *(self.avg_fp.T), - self.weights, + *(self.weights.T), self.n_point, self.n_gamma, ), @@ -305,6 +308,8 @@ def save(self, f, db_info, group): def load(cls, group): stream_id = group.name.split("/")[-1] fp_full = read_dataset(group.file, f"{group.name}/focal_plane_full") + if fp_full.keys is None: + raise ValueError("fp_full somehow has no keys") det_ids = fp_full["dets:det_id"] avg_fp = np.column_stack( ( @@ -313,7 +318,10 @@ def load(cls, group): np.array(fp_full["gamma_m"]), ) ) - weights = fp_full["weights"] + # For backwards compatibility + weights = np.array(fp_full["weights"]) + if "r2" in fp_full.keys: + weights = np.column_stack((weights, np.array(fp_full["r2"]))) transformed = np.column_stack( ( np.array(fp_full["xi_t"]), diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index 40ceeeda9..ffea95e84 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -60,9 +60,9 @@ def _avg_focalplane(full_fp, tot_weight): msk = np.isfinite(full_fp) n_obs = np.sum(np.any(msk, axis=1), axis=-1) n_point, _, n_gamma = tuple(np.sum(msk, axis=-1).T) - tot_weight[tot_weight == 0] = np.nan - avg_fp = np.nansum(full_fp, axis=-1) / tot_weight[..., None] - avg_weight = tot_weight / n_obs + tot_weight[tot_weight[:, 0] == 0] = np.nan + avg_fp = np.nansum(full_fp, axis=-1) / tot_weight[:, 0][..., None] + avg_weight = tot_weight / n_obs[..., None] # nansum all all nans is 0, addressing that case here all_nan = ~np.any( @@ -334,7 +334,7 @@ def _mk_pointing_config(telescope_flavor, tube_slot, wafer_slot, config): def _restrict_inliers(aman, focal_plane): # TODO: Use gamma as well # Map to template - fp, template_msk = focal_plane.map_by_det_id(aman) + fp, _, template_msk = focal_plane.map_by_det_id(aman) fp = fp[:, :2] inliers = np.ones(len(fp), dtype=bool) @@ -553,7 +553,7 @@ def main(): _restrict_inliers(aman, focal_plane) # Mapping to template - fp, template_msk = focal_plane.map_by_det_id(aman) + fp, r2, template_msk = focal_plane.map_by_det_id(aman) focal_plane.template.add_wafer_info(aman, template_msk) # Try an initial alignment and get weights @@ -582,10 +582,11 @@ def main(): ) # ~1 sigma cut weights[weights < 0.61] = np.nan - if np.sum(np.isfinite(weights)) < min_points/2: + if np.sum(np.isfinite(weights)) < min_points / 2: logger.error("\t\tToo few points! Skipping...") # Store weighted values + weights = np.column_stack((weights, r2)) focal_plane.add_fp(i, fp, weights, template_msk) # Compute the average focal plane with weights @@ -605,7 +606,7 @@ def main(): affine, shift = mt.get_affine_two_stage( focal_plane.template.fp[:, :2], focal_plane.avg_fp[:, :2], - focal_plane.weights, + focal_plane.weights[:, 0], ) except ValueError as e: logger.error("\t%s", e) From cfbdc46c7d00cbb5c7af5ddfc8ebdcbb139362ca Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Sun, 6 Apr 2025 13:48:11 +0000 Subject: [PATCH 03/69] feat: dd option to remove roll --- .../site_pipeline/finalize_focal_plane.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index ffea95e84..ffb1ec7e8 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -382,6 +382,38 @@ def _restrict_inliers(aman, focal_plane): ) +def _reverse_roll(fp, aff, sft, aman): + if "obs_info" not in aman: + raise ValueError("Can't reverse roll without obs information") + if "roll_center" not in aman.obs_info: + raise ValueError("Can't reverse roll without roll information") + roll = -1*np.deg2rad(aman.obs_info.roll_center) + + # We want to shift so we rotating about the origin + # To get to nominal we do fp@aff + sft + # So if we just want to recenter we do fp + sft@aff^-1 + inv_aff, _ = mt.invert_transform(aff, np.zeros_like(sft)) + sft_adj = sft@inv_aff + fp_sft = fp[:, :2] + sft_adj + + # Now lets reverse the roll + # The transpose is the inverse + rot = np.array([[np.cos(roll), -1*np.sin(roll)], [np.sin(roll), np.cos(roll)]]) + fp_rot = fp_sft@rot + + # And undo the shift, keeping track of rotations + fp_rot -= sft_adj@rot + + # Make sure its set + fp[:, :2] = fp_rot + + # For gamma lets just shift by the roll + fp[:, 2] -= roll + + return fp + + + def main(): # Read in input pars parser = ap.ArgumentParser() @@ -565,6 +597,9 @@ def main(): logger.error("\t\t%s", e) continue aligned = mt.apply_transform(fp[:, :2], aff, sft) + + if config.get('reverse_roll', False): + fp = _reverse_roll(fp, aff, sft, aman) if np.any(np.isfinite(fp[:, 2])): gscale, gsft = gamma_fit( fp[:, 2], focal_plane.template.fp[template_msk, 2] From 7699a8deb315653b7a4d5b58bdb1aa0bd52bd8a2 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Sun, 27 Apr 2025 16:07:32 +0000 Subject: [PATCH 04/69] fixes for handling LAT data --- sotodlib/coords/fp_containers.py | 10 ++++--- .../site_pipeline/finalize_focal_plane.py | 29 ++++++++++++------- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index d9e9bb023..6a8d93f6c 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -219,7 +219,9 @@ def map_by_det_id(self, aman): srt = np.argsort(aman.det_info.det_id[msk]) xi = aman.pointing.xi[msk][srt][mapping] eta = aman.pointing.eta[msk][srt][mapping] - r2 = aman.pointing.R2[msk][srt][mapping] + r2 = np.nan + np.zeros_like(eta) + if "r2" in aman.pointing: + r2 = aman.pointing.R2[msk][srt][mapping] if "polarization" in aman: # name of field just a placeholder for now gamma = aman.polarization.polang[msk][srt][mapping] @@ -641,21 +643,21 @@ def plot_receiver(receiver, plot_dir): xlims, ylims = receiver.lims fig, axs_all = plt.subplots( - 4, 3, sharex="col", sharey="row", constrained_layout=True + len(valid_ids), 3, sharex="col", sharey="row", constrained_layout=True ) axs = axs_all.flat axs[0].set_title("Xi") axs[1].set_title("Eta") axs[2].set_title("Gamma") cf = None - for i in range(4): + for i in range(len(valid_ids)): for j in range(3): axs[3 * i + j].set_aspect("equal") axs[3 * i + j].set_xlim(xlims) axs[3 * i + j].set_ylim(ylims) for fp in receiver.focal_planes: msk = (fp.template.id_strs == valid_ids[i]) * fp.isfinite - if np.sum(msk) == 0: + if np.sum(msk) < 3: continue diff = fp.diff * 180 * 60 * 60 / np.pi cf = axs[3 * i + 0].tricontourf( diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index ffb1ec7e8..f9250210b 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -139,13 +139,15 @@ def _load_template(template_path, ufm, pointing_cfg): def _get_obs_ids(ctx, metalist, start_time, stop_time, query=None, obs_ids=[], tags=[]): - query_all = query + all_obs = obs_ids query_obs = [] - if query is None: - query_all = f"type=='obs' and start_time>{start_time} and stop_time<{stop_time}" - if ctx.obsdb is None: - raise ValueError("No obsdb!") - all_obs = ctx.obsdb.query(query_all, tags=tags)["obs_id"] + if len(obs_ids) == 0: + query_all = query + if query is None: + query_all = f"type=='obs' and start_time>{start_time} and stop_time<{stop_time}" + if ctx.obsdb is None: + raise ValueError("No obsdb!") + all_obs = ctx.obsdb.query(query_all, tags=tags)["obs_id"] dbs = [ metadata.ManifestDb(md["db"]) for md in ctx["metadata"] @@ -194,7 +196,11 @@ def _load_ctx(config): if roll < roll_range[0] or roll > roll_range[1]: logger.info("%s has a roll that is out of range", obs_id) continue - aman = ctx.get_meta(obs_id, dets=dets) + try: + aman = ctx.get_meta(obs_id, dets=dets) + except metadata.loader.LoaderError: + logger.error("Failed to load %s, skipping", obs_id) + continue if aman.obs_info.tube_slot == "stp1": aman.obs_info.tube_slot = "st1" if "det_info" not in aman: @@ -568,17 +574,20 @@ def main(): for i, (aman, obs_id) in enumerate(zip(amans_restrict, obs_ids)): logger.info("\tWorking on %s", obs_id) - if aman.dets.count == 0: - logger.info("\t\tNo dets found, skipping") + if aman.dets.count < min_points: + logger.info("\t\tToo few dets found, skipping") continue + if config.get("faked_gamma", False): + aman.pointing.gamma[:] = np.nan + # Restrict to optical dets optical = np.isin( aman.det_info.det_id, focal_plane.template.det_ids[template.optical] ) aman.restrict("dets", aman.dets.vals[optical]) if aman.dets.count == 0: - logger.info("\t\tNo optical dets, skipping", stream_id) + logger.info("\t\tNo optical dets, skipping...") continue # Do some outlier cuts From e9496ea67143e58a223a747a74227953f3e3893f Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Mon, 12 May 2025 17:55:12 +0000 Subject: [PATCH 05/69] feat: add lat pointing model and pointing model support in focal plane creation --- sotodlib/coords/pointing_model.py | 204 +++++++++++++----- .../site_pipeline/finalize_focal_plane.py | 94 +++++++- 2 files changed, 231 insertions(+), 67 deletions(-) diff --git a/sotodlib/coords/pointing_model.py b/sotodlib/coords/pointing_model.py index 4b2be6f7e..8c70a634b 100644 --- a/sotodlib/coords/pointing_model.py +++ b/sotodlib/coords/pointing_model.py @@ -1,12 +1,14 @@ # Copyright (c) 2024 Simons Observatory. # Full license can be found in the top level "LICENSE" file. +import logging + import numpy as np +from so3g.proj import quat + from .. import core from .helpers import _valid_arg -from so3g.proj import quat -import logging logger = logging.getLogger(__name__) DEG = np.pi / 180 @@ -42,16 +44,15 @@ def apply_basic_pointing_model(tod): _boresight.wrap("el", _ancil.el_enc * DEG, [(0, "samps")]) _boresight.wrap("roll", -1 * _ancil.boresight_enc * DEG, [(0, "samps")]) - if 'boresight' in tod: - del tod['boresight'] + if "boresight" in tod: + del tod["boresight"] - tod.wrap('boresight', _boresight) + tod.wrap("boresight", _boresight) return _boresight -def apply_pointing_model(tod, pointing_model=None, ancil=None, - wrap=None): +def apply_pointing_model(tod, pointing_model=None, ancil=None, wrap=None): """Applies a static pointing model to compute corrected boresight position and orientation in horizon coordinates. The encoder values in tod.ancil are consumed as raw data, and the computed @@ -72,28 +73,33 @@ def apply_pointing_model(tod, pointing_model=None, ancil=None, AxisManager: the corrected boresight. """ - if pointing_model is None and 'pointing_model' not in tod: - logger.warning('No pointing_model found -- applying basic model.') - assert wrap in (None, 'boresight'), \ - 'When using naive pointing model, wrap=... not supported' + if pointing_model is None and "pointing_model" not in tod: + logger.warning("No pointing_model found -- applying basic model.") + assert wrap in ( + None, + "boresight", + ), "When using naive pointing model, wrap=... not supported" return apply_basic_pointing_model(tod) - pointing_model = _valid_arg(pointing_model, 'pointing_model', - src=tod) - ancil = _valid_arg(ancil, 'ancil', src=tod) + pointing_model = _valid_arg(pointing_model, "pointing_model", src=tod) + ancil = _valid_arg(ancil, "ancil", src=tod) # Encoder values, to radians. - vers = pointing_model['version'] - tel_type = vers.split('_')[0] - if tel_type == 'sat': + if pointing_model is None: + raise ValueError("No pointing_model specified") + if "version" not in pointing_model: + raise ValueError("Pointing model version not specified") + vers = pointing_model["version"] + tel_type = vers.split("_")[0] + if tel_type == "sat": boresight = apply_pointing_model_sat(vers, pointing_model, tod, ancil) - elif tel_type == 'lat': - boresight = apply_pointing_model_lat(vers, pointing_model, tod, ancil) + elif tel_type == "lat": + boresight = apply_pointing_model_lat(vers, pointing_model, ancil) else: raise ValueError(f'Unimplemented pointing model "{vers}"') if wrap is None: - wrap = 'boresight' + wrap = "boresight" if wrap is not False: if wrap in tod._fields: del tod[wrap] @@ -104,10 +110,10 @@ def apply_pointing_model(tod, pointing_model=None, ancil=None, def apply_pointing_model_sat(vers, params, tod, ancil): az, el, roll = _get_sat_enc_radians(ancil) - if vers == 'sat_naive': + if vers == "sat_naive": return _new_boresight(ancil.samps, az=az, el=el, roll=roll) - elif vers == 'sat_v1': + elif vers == "sat_v1": az1, el1, roll1 = model_sat_v1(params, az, el, roll) return _new_boresight(ancil.samps, az=az1, el=el1, roll=roll1) @@ -115,8 +121,75 @@ def apply_pointing_model_sat(vers, params, tod, ancil): raise ValueError(f'Unimplemented pointing model "{vers}"') -def apply_pointing_model_lat(vers, tod, pointing_model, ancil): - raise ValueError(f'Unimplemented pointing model "{vers}"') +def apply_pointing_model_lat(vers, params, ancil): + az, el, roll = _get_lat_enc_radians(ancil) + if vers == "lat_v1": + az1, el1, roll1 = model_lat_v1(params, az, el, roll) + return _new_boresight(ancil.samps, az=az1, el=el1, roll=roll1) + + else: + raise ValueError(f'Unimplemented pointing model "{vers}"') + + +# +# LAT model(s) +# +def model_lat_v1(params, az, el, roll): + """Applies pointing model to (az, el, roll). + + Args: + params: AxisManager (or dict) of pointing parameters. + az, el, roll: naive horizon coordinates, in radians, of the + boresight. + + The implemented model parameters are: + + - rx_{xi, eta}_offset: The offset between the LATR center and the axis + of the corotator. + - cr_offset: The corotator encoder offset. + - el_{xi, eta}_offset: The offset between the elevation exis and the + corotator axis. + - mir_{xi, eta}_offset: The offset between the mirror's axis and the elevation + axis (ie: a tilt in the mirrors). + - az_offset: The azimuth encoder offset. + - el_offset: The elevation encoder offset. + """ + _params = { + "az_offset": -6.431949463522369e-07, + "el_offset": 0.10784213916209494, + "cr_offset": 0.1068009204473848, + "el_xi_offset": 0.07524197484976551, + "el_eta_offset": 0.13704409641376075, + "rx_xi_offset": -0.03523515856319279, + "rx_eta_offset": -0.005858421274865713, + "mir_xi_offset": -0.17792288910750848, + "mir_eta_offset": 0.21440894087003562, + } + _params = _params.copy() + _params.update(params) + params = _params + cr = el - roll - np.deg2rad(60) + for key, val in params.items(): + if not isinstance(val, (float, int)): + continue + params[key] = np.deg2rad(val) + q_enc = quat.rotation_lonlat( + -1 * (az.copy() + params["az_offset"]), el.copy() + params["el_offset"] + ) + q_mir = quat.rotation_xieta(params["mir_xi_offset"], params["mir_eta_offset"]) + q_el_roll = quat.euler(2, el.copy() + params["el_offset"] - np.deg2rad(60)) + q_tel = quat.rotation_xieta(params["el_xi_offset"], params["el_eta_offset"]) + q_cr_roll = quat.euler(2, -1 * cr - params["cr_offset"]) + q_rx = quat.rotation_xieta(params["rx_xi_offset"], params["rx_eta_offset"]) + new_az, el, roll = ( + quat.decompose_lonlat(q_enc * q_mir * q_el_roll * q_tel * q_cr_roll * q_rx) + * np.array([-1, 1, 1])[..., None] + ) + + change = ((new_az - az) + np.pi) % (2 * np.pi) - np.pi + az = az.copy() + change + + return az, el, roll # @@ -127,18 +200,19 @@ def apply_pointing_model_lat(vers, tod, pointing_model, ancil): # if their value is zero (and that should be the registered default). defaults_sat_v1 = { - 'enc_offset_az': 0., - 'enc_offset_el': 0., - 'enc_offset_boresight': 0., - 'fp_offset_xi0': 0., - 'fp_offset_eta0': 0., - 'fp_rot_xi0': 0., - 'fp_rot_eta0': 0., - 'az_rot': 0., - 'base_tilt_cos': 0., - 'base_tilt_sin': 0., + "enc_offset_az": 0.0, + "enc_offset_el": 0.0, + "enc_offset_boresight": 0.0, + "fp_offset_xi0": 0.0, + "fp_offset_eta0": 0.0, + "fp_rot_xi0": 0.0, + "fp_rot_eta0": 0.0, + "az_rot": 0.0, + "base_tilt_cos": 0.0, + "base_tilt_sin": 0.0, } + def model_sat_v1(params, az, el, roll): """Applies pointing model to (az, el, roll). @@ -152,12 +226,12 @@ def model_sat_v1(params, az, el, roll): - fp_rot_{xi,eta}0: within the focal plane (i.e. relative to the corrected boresight), the center of rotation of the boresight. Radians. - - fp_offset_{xi,eta}0: Offset of focal plane's rotational center + - fp_offset_{xi,eta}0: Offset of focal plane's rotational center relative to position in focal plane that lies along the optical axis. Corrects for "collimation error". - - enc_offset_{az,el,boresight}: Encoder offsets, in Radians. + - enc_offset_{az,el,boresight}: Encoder offsets, in Radians. Sign convention: True = Encoder + Offset - - base_tilt_{cos,sin}: Base tilt coefficients, in radians. + - base_tilt_{cos,sin}: Base tilt coefficients, in radians. - az_rot: Dimensionless parameter describing a linear dependence of Az on El. """ @@ -169,31 +243,37 @@ def model_sat_v1(params, az, el, roll): params, _p = _p, None for k, v in params.items(): - if k == 'version': + if k == "version": continue - if k not in defaults_sat_v1 and v != 0.: + if k not in defaults_sat_v1 and v != 0.0: raise ValueError(f'Handling of model param "{k}" is not implemented.') # Construct offsetted encoders. az_orig = az.copy() - az_twist = params['az_rot'] * (el + params['enc_offset_el']) - az = az + params['enc_offset_az'] + az_twist - el = el + params['enc_offset_el'] - roll = roll - params['enc_offset_boresight'] + az_twist = params["az_rot"] * (el + params["enc_offset_el"]) + az = az + params["enc_offset_az"] + az_twist + el = el + params["enc_offset_el"] + roll = roll - params["enc_offset_boresight"] # Rotation that tilts the base (referred to vals after enc correction). - base_tilt = get_base_tilt_q(params['base_tilt_cos'], params['base_tilt_sin']) + base_tilt = get_base_tilt_q(params["base_tilt_cos"], params["base_tilt_sin"]) # Rotation that takes a vector in array-centered focal plane coords # to a vector in boresight-rotation-centered focal plane coords. - q_fp_rot = ~quat.rotation_xieta(params['fp_rot_xi0'], params['fp_rot_eta0']) - - # Rotation that moves the center of the focal plane to fp_offset_(xi, eta)0. - q_fp_offset = quat.rotation_xieta(params['fp_offset_xi0'], params['fp_offset_eta0']) + q_fp_rot = ~quat.rotation_xieta(params["fp_rot_xi0"], params["fp_rot_eta0"]) + + # Rotation that moves the center of the focal plane to fp_offset_(xi, eta)0. + q_fp_offset = quat.rotation_xieta(params["fp_offset_xi0"], params["fp_offset_eta0"]) # Horizon coordinates. - q_hs = (base_tilt * quat.rotation_lonlat(-az, el) - * q_fp_offset * ~q_fp_rot * quat.euler(2, roll) * q_fp_rot) + q_hs = ( + base_tilt + * quat.rotation_lonlat(-az, el) + * q_fp_offset + * ~q_fp_rot + * quat.euler(2, roll) + * q_fp_rot + ) neg_az, el, roll = quat.decompose_lonlat(q_hs) @@ -206,18 +286,29 @@ def model_sat_v1(params, az, el, roll): # Support functions + def _new_boresight(samps, az=None, el=None, roll=None): boresight = core.AxisManager(samps) - for k, v in zip(['az', 'el', 'roll'], [az, el, roll]): - boresight.wrap_new(k, shape=('samps',), dtype='float64') + for k, v in zip(["az", "el", "roll"], [az, el, roll]): + boresight.wrap_new(k, shape=("samps",), dtype="float64") if v is not None: boresight[k][:] = v return boresight + def _get_sat_enc_radians(ancil): - return (ancil.az_enc * DEG, - ancil.el_enc * DEG, - -ancil.boresight_enc * DEG) + return (ancil.az_enc * DEG, ancil.el_enc * DEG, -ancil.boresight_enc * DEG) + + +def _get_lat_enc_radians(ancil): + az = ancil.az_enc * DEG + el = ancil.el_enc * DEG + if "roll_enc" in ancil: + roll = ancil.roll_enc * DEG + else: + roll = (ancil.el_enc - ancil.corotator_enc - 60) * DEG + return (az, el, roll) + def get_base_tilt_q(c, s): """Returns the quaternion rotation that applies base tilt, taking @@ -233,6 +324,5 @@ def get_base_tilt_q(c, s): phi = np.arctan2(s, c) # And that base tilt causes the true el to lie below the expected # (encoder) el, at that position. - amp = (c**2 + s**2)**.5 + amp = (c**2 + s**2) ** 0.5 return quat.euler(2, phi) * quat.euler(1, amp) * quat.euler(2, -phi) - diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index f9250210b..da1abb8a5 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -3,6 +3,7 @@ import logging import os from copy import deepcopy +from importlib import import_module from typing import List, Optional import h5py @@ -12,6 +13,7 @@ import yaml from scipy.cluster import vq from scipy.optimize import minimize +from so3g.proj import quat from sotodlib.coords import optics as op from sotodlib.coords.fp_containers import ( FocalPlane, @@ -24,7 +26,8 @@ plot_receiver, plot_ufm, ) -from sotodlib.core import AxisManager, Context, metadata +from sotodlib.coords.pointing_model import apply_pointing_model +from sotodlib.core import AxisManager, Context, IndexAxis, metadata from sotodlib.io.metadata import read_dataset from sotodlib.site_pipeline import util @@ -144,7 +147,9 @@ def _get_obs_ids(ctx, metalist, start_time, stop_time, query=None, obs_ids=[], t if len(obs_ids) == 0: query_all = query if query is None: - query_all = f"type=='obs' and start_time>{start_time} and stop_time<{stop_time}" + query_all = ( + f"type=='obs' and start_time>{start_time} and stop_time<{stop_time}" + ) if ctx.obsdb is None: raise ValueError("No obsdb!") all_obs = ctx.obsdb.query(query_all, tags=tags)["obs_id"] @@ -388,29 +393,86 @@ def _restrict_inliers(aman, focal_plane): ) +def _apply_pointing_model(config, aman): + if "pointing_model" not in config: + logger.info("\t\tNo pointing model specified!") + return aman + if not config["pointing_model"].get("apply", False): + logger.info("\t\tNot applying pointing model") + return aman + if "function" not in config["pointing_model"]: + logger.info("\t\tUsing default pointing model function") + func = apply_pointing_model + else: + func = getattr( + import_module(config["pointing_model"]["function"][0]), + config["pointing_model"]["function"][1], + ) + if "az" not in aman.pointing: + raise ValueError("Need to have az in pointing fits to apply pointing model") + if "el" not in aman.pointing: + raise ValueError("Need to have el in pointing fits to apply pointing model") + if "roll" not in aman.pointing: + raise ValueError("Need to have roll in pointing fits to apply pointing model") + + params = config["pointing_model"].get("params", {}) + ancil = AxisManager(IndexAxis("samps", aman.dets.count)) + ancil.wrap("az_enc", np.rad2deg(aman.pointing.az)) + ancil.wrap("el_enc", np.rad2deg(aman.pointing.el)) + ancil.wrap("roll_enc", np.rad2deg(aman.pointing.roll)) + bs = func(aman, params, ancil, False) + q_fp = quat.rotation_xieta(aman.pointing.xi, aman.pointing.eta) + have_gamma = False + if "gamma" in aman.pointing: + if np.any(np.isnan(aman.pointing.gamma)): + logger.warning( + "\t\tnans in gamma, not including in pointing model correction" + ) + else: + q_fp = quat.rotation_xieta( + aman.pointing.xi, aman.pointing.eta, aman.pointing.gamma + ) + have_gamma = True + + xi, eta, gamma = quat.decompose_xieta( + ~quat.euler(2, bs.roll) + * ~quat.rotation_lonlat(-bs.az, bs.el) + * quat.rotation_lonlat(-1 * aman.pointing.az, aman.pointing.el) + * quat.euler(2, aman.pointing.roll) + * q_fp + ) + + aman.pointing.xi[:] = xi + aman.pointing.eta[:] = eta + if have_gamma: + aman.pointing.gamma[:] = gamma + + return aman + + def _reverse_roll(fp, aff, sft, aman): if "obs_info" not in aman: raise ValueError("Can't reverse roll without obs information") if "roll_center" not in aman.obs_info: raise ValueError("Can't reverse roll without roll information") - roll = -1*np.deg2rad(aman.obs_info.roll_center) + roll = -1 * np.deg2rad(aman.obs_info.roll_center) # We want to shift so we rotating about the origin # To get to nominal we do fp@aff + sft # So if we just want to recenter we do fp + sft@aff^-1 inv_aff, _ = mt.invert_transform(aff, np.zeros_like(sft)) - sft_adj = sft@inv_aff + sft_adj = sft @ inv_aff fp_sft = fp[:, :2] + sft_adj # Now lets reverse the roll # The transpose is the inverse - rot = np.array([[np.cos(roll), -1*np.sin(roll)], [np.sin(roll), np.cos(roll)]]) - fp_rot = fp_sft@rot + rot = np.array([[np.cos(roll), -1 * np.sin(roll)], [np.sin(roll), np.cos(roll)]]) + fp_rot = fp_sft @ rot # And undo the shift, keeping track of rotations - fp_rot -= sft_adj@rot + fp_rot -= sft_adj @ rot - # Make sure its set + # Make sure its set fp[:, :2] = fp_rot # For gamma lets just shift by the roll @@ -419,7 +481,6 @@ def _reverse_roll(fp, aff, sft, aman): return fp - def main(): # Read in input pars parser = ap.ArgumentParser() @@ -590,7 +651,20 @@ def main(): logger.info("\t\tNo optical dets, skipping...") continue + # Apply pointing model if we want to + aman = _apply_pointing_model(config, aman) + # Do some outlier cuts + if "hits" in aman.pointing: + aman.restrict( + "dets", aman.pointing.hits > config.get("min_hits", 5) + ) + if aman.dets.count == 0: + logger.info("\t\tNo high hits dets, skipping...") + continue + if aman.dets.count < min_points: + logger.info("\t\tToo few dets found, skipping") + continue _restrict_inliers(aman, focal_plane) # Mapping to template @@ -607,7 +681,7 @@ def main(): continue aligned = mt.apply_transform(fp[:, :2], aff, sft) - if config.get('reverse_roll', False): + if config.get("reverse_roll", False): fp = _reverse_roll(fp, aff, sft, aman) if np.any(np.isfinite(fp[:, 2])): gscale, gsft = gamma_fit( From b857da65c35a4693ab98c1ee6516750642135adf Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Mon, 12 May 2025 18:49:17 +0000 Subject: [PATCH 06/69] feat: add ability to pass missing arrays with template --- sotodlib/coords/fp_containers.py | 20 +++++++++--- .../site_pipeline/finalize_focal_plane.py | 32 ++++++++++++++----- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index 6a8d93f6c..ff599f3f3 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -173,6 +173,10 @@ def dist(self): return np.linalg.norm(self.diff, axis=1) return np.linalg.norm(self.diff[:, :2], axis=1) + @property + def padded(self): + return self.tot_weight is None + @classmethod def empty(cls, template, stream_id, wafer_slot, n_aman): if template is None: @@ -246,9 +250,10 @@ def save(self, f, db_info, group): ("xi", np.float32), ("eta", np.float32), ("gamma", np.float32), + ("measured", np.bool_), ] fpout = np.fromiter( - zip(self.det_ids, *(self.transformed.T)), dtype=outdt, count=ndets + zip(self.det_ids, *(self.transformed.T), np.ones(len(self.det_ids), dtype=bool)*(self.padded)), dtype=outdt, count=ndets ) write_dataset( metadata.ResultSet.from_friend(fpout), @@ -377,6 +382,11 @@ def __post_init__(self): self.center, self.transform_fullcm.affine, self.transform_fullcm.shift ) + @property + def num_fps(self): + fps = [fp for fp in self.focal_planes if fp.tot_weight is not None] + return len(fps) + @classmethod def from_pointing_cfg(cls, pointing_cfg): name = pointing_cfg["tube_slot"] @@ -554,9 +564,9 @@ def plot_ufm(focal_plane, plot_dir): def plot_ot(ot, plot_dir): fig, (ax1, ax2) = plt.subplots(1, 2, constrained_layout=True) - dists = [fp.dist[fp.isfinite] * 180 * 60 * 60 / np.pi for fp in ot.focal_planes] - xis = [fp.transformed[fp.isfinite, 0] for fp in ot.focal_planes] - etas = [fp.transformed[fp.isfinite, 1] for fp in ot.focal_planes] + dists = [fp.dist[fp.isfinite] * 180 * 60 * 60 / np.pi for fp in ot.focal_planes if fp.tot_weight is not None] + xis = [fp.transformed[fp.isfinite, 0] for fp in ot.focal_planes if fp.tot_weight is not None] + etas = [fp.transformed[fp.isfinite, 1] for fp in ot.focal_planes if fp.tot_weight is not None] # Plot the radial dist r = np.sqrt(np.hstack(xis) ** 2 + np.hstack(etas) ** 2) @@ -656,6 +666,8 @@ def plot_receiver(receiver, plot_dir): axs[3 * i + j].set_xlim(xlims) axs[3 * i + j].set_ylim(ylims) for fp in receiver.focal_planes: + if fp.tot_weight is None: + continue msk = (fp.template.id_strs == valid_ids[i]) * fp.isfinite if np.sum(msk) < 3: continue diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index da1abb8a5..a82f5df3e 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -553,6 +553,14 @@ def main(): logger.error("Provided template doesn't exist, trying to generate one") gen_template = True + # Need to move installed OT and WS of array to templace for this + # if config.get("pad", False): + # logger.info("Padding missing arrays with template, getting complete list of arrays from template") + # if not have_template: + # logger.warning("\tNo template provided, arrays not found in any observations will be missing") + # with h5py.File(template_path) as f: + # stream_ids = list(f.keys()) + # Split up into batches # Right now either per_obs or all at once # Maybe allow for batch my encoder angle later? @@ -718,6 +726,11 @@ def main(): logger.info("\t%d points in fit", tot_points) if tot_points < min_points: logger.error("\tToo few points! Skipping...") + if config.get("pad", False): + logger.info("\tPadding output with template") + focal_plane.transformed = focal_plane.template.fp + focal_plane.tot_weight = None + ots[ot].focal_planes.append(focal_plane) continue try: @@ -790,15 +803,16 @@ def main(): todel = [] for name, ot in ots.items(): logger.info("Fitting common mode for %s", ot.name) - if len(ot.focal_planes) == 0: + centers = np.vstack([fp.template.center for fp in ot.focal_planes if fp.tot_weight is not None]) + centers_transformed = np.vstack( + [fp.center_transformed for fp in ot.focal_planes if fp.tot_weight is not None] + ) + if len(centers) == 0: logger.error("\tNo focal planes found! Skipping...") - todel.append(name) + if not config.get("pad", False): + todel.append(name) continue plot_ot(ot, plot_dir) - centers = np.vstack([fp.template.center for fp in ot.focal_planes]) - centers_transformed = np.vstack( - [fp.center_transformed for fp in ot.focal_planes] - ) if centers.shape[0] < 3: logger.warning( "\tToo few wafers fit to compute common mode, transform will be approximated" @@ -842,9 +856,9 @@ def main(): ) recv_transform = deepcopy(tuple(ots.values())[0].transform_fullcm) else: - centers = np.vstack([ot.center for ot in ots.values()]) + centers = np.vstack([ot.center for ot in ots.values() if ot.num_fps > 0]) centers_transformed = np.vstack( - [ot.center_transformed for ot in ots.values()] + [ot.center_transformed for ot in ots.values() if ot.num_fps > 0] ) if len(ots) < 3: logger.info( @@ -921,6 +935,8 @@ def main(): start_time=config["start_time"], stop_time=config["stop_time"], ) + if config.get("pad", False): + logger.info("Padding missing arrays with values from template") with h5py.File(outpath, "a") as f: if group in f: del f[group] From 69529f436f137fc41e49a0603a7b62628454754c Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Mon, 12 May 2025 18:53:46 +0000 Subject: [PATCH 07/69] fix: zero out defaults for LAT pointing model and revert formatting --- sotodlib/coords/pointing_model.py | 111 ++++++++++++++---------------- 1 file changed, 51 insertions(+), 60 deletions(-) diff --git a/sotodlib/coords/pointing_model.py b/sotodlib/coords/pointing_model.py index 8c70a634b..762c825b6 100644 --- a/sotodlib/coords/pointing_model.py +++ b/sotodlib/coords/pointing_model.py @@ -1,14 +1,12 @@ # Copyright (c) 2024 Simons Observatory. # Full license can be found in the top level "LICENSE" file. -import logging - import numpy as np -from so3g.proj import quat - from .. import core from .helpers import _valid_arg +from so3g.proj import quat +import logging logger = logging.getLogger(__name__) DEG = np.pi / 180 @@ -44,15 +42,16 @@ def apply_basic_pointing_model(tod): _boresight.wrap("el", _ancil.el_enc * DEG, [(0, "samps")]) _boresight.wrap("roll", -1 * _ancil.boresight_enc * DEG, [(0, "samps")]) - if "boresight" in tod: - del tod["boresight"] + if 'boresight' in tod: + del tod['boresight'] - tod.wrap("boresight", _boresight) + tod.wrap('boresight', _boresight) return _boresight -def apply_pointing_model(tod, pointing_model=None, ancil=None, wrap=None): +def apply_pointing_model(tod, pointing_model=None, ancil=None, + wrap=None): """Applies a static pointing model to compute corrected boresight position and orientation in horizon coordinates. The encoder values in tod.ancil are consumed as raw data, and the computed @@ -110,10 +109,10 @@ def apply_pointing_model(tod, pointing_model=None, ancil=None, wrap=None): def apply_pointing_model_sat(vers, params, tod, ancil): az, el, roll = _get_sat_enc_radians(ancil) - if vers == "sat_naive": + if vers == 'sat_naive': return _new_boresight(ancil.samps, az=az, el=el, roll=roll) - elif vers == "sat_v1": + elif vers == 'sat_v1': az1, el1, roll1 = model_sat_v1(params, az, el, roll) return _new_boresight(ancil.samps, az=az1, el=el1, roll=roll1) @@ -155,15 +154,15 @@ def model_lat_v1(params, az, el, roll): - el_offset: The elevation encoder offset. """ _params = { - "az_offset": -6.431949463522369e-07, - "el_offset": 0.10784213916209494, - "cr_offset": 0.1068009204473848, - "el_xi_offset": 0.07524197484976551, - "el_eta_offset": 0.13704409641376075, - "rx_xi_offset": -0.03523515856319279, - "rx_eta_offset": -0.005858421274865713, - "mir_xi_offset": -0.17792288910750848, - "mir_eta_offset": 0.21440894087003562, + "az_offset": 0, + "el_offset": 0, + "cr_offset": 0, + "el_xi_offset": 0, + "el_eta_offset": 0, + "rx_xi_offset": 0, + "rx_eta_offset": 0, + "mir_xi_offset": 0, + "mir_eta_offset": 0, } _params = _params.copy() _params.update(params) @@ -200,19 +199,18 @@ def model_lat_v1(params, az, el, roll): # if their value is zero (and that should be the registered default). defaults_sat_v1 = { - "enc_offset_az": 0.0, - "enc_offset_el": 0.0, - "enc_offset_boresight": 0.0, - "fp_offset_xi0": 0.0, - "fp_offset_eta0": 0.0, - "fp_rot_xi0": 0.0, - "fp_rot_eta0": 0.0, - "az_rot": 0.0, - "base_tilt_cos": 0.0, - "base_tilt_sin": 0.0, + 'enc_offset_az': 0., + 'enc_offset_el': 0., + 'enc_offset_boresight': 0., + 'fp_offset_xi0': 0., + 'fp_offset_eta0': 0., + 'fp_rot_xi0': 0., + 'fp_rot_eta0': 0., + 'az_rot': 0., + 'base_tilt_cos': 0., + 'base_tilt_sin': 0., } - def model_sat_v1(params, az, el, roll): """Applies pointing model to (az, el, roll). @@ -226,12 +224,12 @@ def model_sat_v1(params, az, el, roll): - fp_rot_{xi,eta}0: within the focal plane (i.e. relative to the corrected boresight), the center of rotation of the boresight. Radians. - - fp_offset_{xi,eta}0: Offset of focal plane's rotational center + - fp_offset_{xi,eta}0: Offset of focal plane's rotational center relative to position in focal plane that lies along the optical axis. Corrects for "collimation error". - - enc_offset_{az,el,boresight}: Encoder offsets, in Radians. + - enc_offset_{az,el,boresight}: Encoder offsets, in Radians. Sign convention: True = Encoder + Offset - - base_tilt_{cos,sin}: Base tilt coefficients, in radians. + - base_tilt_{cos,sin}: Base tilt coefficients, in radians. - az_rot: Dimensionless parameter describing a linear dependence of Az on El. """ @@ -243,37 +241,31 @@ def model_sat_v1(params, az, el, roll): params, _p = _p, None for k, v in params.items(): - if k == "version": + if k == 'version': continue - if k not in defaults_sat_v1 and v != 0.0: + if k not in defaults_sat_v1 and v != 0.: raise ValueError(f'Handling of model param "{k}" is not implemented.') # Construct offsetted encoders. az_orig = az.copy() - az_twist = params["az_rot"] * (el + params["enc_offset_el"]) - az = az + params["enc_offset_az"] + az_twist - el = el + params["enc_offset_el"] - roll = roll - params["enc_offset_boresight"] + az_twist = params['az_rot'] * (el + params['enc_offset_el']) + az = az + params['enc_offset_az'] + az_twist + el = el + params['enc_offset_el'] + roll = roll - params['enc_offset_boresight'] # Rotation that tilts the base (referred to vals after enc correction). - base_tilt = get_base_tilt_q(params["base_tilt_cos"], params["base_tilt_sin"]) + base_tilt = get_base_tilt_q(params['base_tilt_cos'], params['base_tilt_sin']) # Rotation that takes a vector in array-centered focal plane coords # to a vector in boresight-rotation-centered focal plane coords. - q_fp_rot = ~quat.rotation_xieta(params["fp_rot_xi0"], params["fp_rot_eta0"]) - - # Rotation that moves the center of the focal plane to fp_offset_(xi, eta)0. - q_fp_offset = quat.rotation_xieta(params["fp_offset_xi0"], params["fp_offset_eta0"]) + q_fp_rot = ~quat.rotation_xieta(params['fp_rot_xi0'], params['fp_rot_eta0']) + + # Rotation that moves the center of the focal plane to fp_offset_(xi, eta)0. + q_fp_offset = quat.rotation_xieta(params['fp_offset_xi0'], params['fp_offset_eta0']) # Horizon coordinates. - q_hs = ( - base_tilt - * quat.rotation_lonlat(-az, el) - * q_fp_offset - * ~q_fp_rot - * quat.euler(2, roll) - * q_fp_rot - ) + q_hs = (base_tilt * quat.rotation_lonlat(-az, el) + * q_fp_offset * ~q_fp_rot * quat.euler(2, roll) * q_fp_rot) neg_az, el, roll = quat.decompose_lonlat(q_hs) @@ -286,19 +278,18 @@ def model_sat_v1(params, az, el, roll): # Support functions - def _new_boresight(samps, az=None, el=None, roll=None): boresight = core.AxisManager(samps) - for k, v in zip(["az", "el", "roll"], [az, el, roll]): - boresight.wrap_new(k, shape=("samps",), dtype="float64") + for k, v in zip(['az', 'el', 'roll'], [az, el, roll]): + boresight.wrap_new(k, shape=('samps',), dtype='float64') if v is not None: boresight[k][:] = v return boresight - def _get_sat_enc_radians(ancil): - return (ancil.az_enc * DEG, ancil.el_enc * DEG, -ancil.boresight_enc * DEG) - + return (ancil.az_enc * DEG, + ancil.el_enc * DEG, + -ancil.boresight_enc * DEG) def _get_lat_enc_radians(ancil): az = ancil.az_enc * DEG @@ -309,7 +300,6 @@ def _get_lat_enc_radians(ancil): roll = (ancil.el_enc - ancil.corotator_enc - 60) * DEG return (az, el, roll) - def get_base_tilt_q(c, s): """Returns the quaternion rotation that applies base tilt, taking vectors in the platforms horizon coordinates to vectors in the @@ -324,5 +314,6 @@ def get_base_tilt_q(c, s): phi = np.arctan2(s, c) # And that base tilt causes the true el to lie below the expected # (encoder) el, at that position. - amp = (c**2 + s**2) ** 0.5 + amp = (c**2 + s**2)**.5 return quat.euler(2, phi) * quat.euler(1, amp) * quat.euler(2, -phi) + From fc95de2afae4ac3f907681687375dc27ec59bd18 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Mon, 12 May 2025 20:31:41 +0000 Subject: [PATCH 08/69] fix: override params from context loaded pointing model properly --- sotodlib/site_pipeline/finalize_focal_plane.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index a82f5df3e..f30423573 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -416,6 +416,13 @@ def _apply_pointing_model(config, aman): raise ValueError("Need to have roll in pointing fits to apply pointing model") params = config["pointing_model"].get("params", {}) + if "pointing_model" in aman: + for key, val in params.items(): + if key in aman.pointing_model: + aman.pointing_model[key] = val + else: + aman.pointing_model.wrap(key, val) + params = aman.pointing_model ancil = AxisManager(IndexAxis("samps", aman.dets.count)) ancil.wrap("az_enc", np.rad2deg(aman.pointing.az)) ancil.wrap("el_enc", np.rad2deg(aman.pointing.el)) From 7e5fff078f87bd3f0ee07e39acd6a3f422e5bd86 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Tue, 27 May 2025 12:32:09 -0700 Subject: [PATCH 09/69] also trim on proc_aman in fft_trim --- sotodlib/preprocess/processes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 093a146f3..36e012144 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -28,7 +28,8 @@ class FFTTrim(_Preprocess): """ name = "fft_trim" def process(self, aman, proc_aman, sim=False): - tod_ops.fft_trim(aman, **self.process_cfgs) + start_stop = tod_ops.fft_trim(aman, **self.process_cfgs) + proc_aman.restrict(self.process_cfgs.get('axis', 'samps'), (start_stop)) class Detrend(_Preprocess): """Detrend the signal. All processing configs go to `detrend_tod` From 60ca4323e3aa28ceb6e03ff5024e2e5d0781a813 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 27 May 2025 12:35:31 -0700 Subject: [PATCH 10/69] starting work --- sotodlib/site_pipeline/make_ml_map.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index d77e3af15..140bb4ada 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -6,6 +6,7 @@ def get_parser(parser=None): parser.add_argument("query") parser.add_argument("area") parser.add_argument("odir") + parser.add_argument("preprocess_config", help="Preprocess configuration file") parser.add_argument("prefix", nargs="?") parser.add_argument( "--comps", type=str, default="TQU",help="List of components to solve for. T, QU or TQU, but only TQU is consistent with the actual data") parser.add_argument("-W", "--wafers", type=str, default=None, help="Detector wafer subsets to map with. ,-sep") @@ -39,6 +40,7 @@ def main(**args): from sotodlib import tod_ops, mapmaking, core from sotodlib.tod_ops import filters from sotodlib.mapmaking import log + from sotodlib.preprocess import preprocess_util as pp_util from pixell import enmap, utils, fft, bunch, wcsutils, mpi, bench import yaml @@ -100,6 +102,14 @@ class DataMissing(Exception): pass if args.srcsamp: srcsamp_mask = enmap.read_map(args.srcsamp) + # set up the preprocessing + try: + preproc = yaml.safe_load(open(args.preprocess_config, 'r')) + except: + if comm.rank==0: + L.info(f"{args.preprocess_config} is not a valid config") + sys.exit(1) + passes = mapmaking.setup_passes(downsample=args.downsample, maxiter=args.maxiter, interpol=args.interpol) for ipass, passinfo in enumerate(passes): L.info("Starting pass %d/%d maxit %d down %d interp %s" % (ipass+1, len(passes), passinfo.maxiter, passinfo.downsample, passinfo.interpol)) @@ -175,7 +185,9 @@ class DataMissing(Exception): pass if len(my_dets) == 0: raise DataMissing("no dets left") # Actually read the data with bench.mark("read_obs %s" % sub_id): - obs = context.get_obs(sub_id, meta=meta) + #obs = context.get_obs(sub_id, meta=meta) + #print(obs_id) + obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta, logger=L) # Fix boresight mapmaking.fix_boresight_glitches(obs) From f68260f591d671211249c10c5516a9dd226f2119 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 27 May 2025 12:55:01 -0700 Subject: [PATCH 11/69] more work --- sotodlib/site_pipeline/make_ml_map.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 140bb4ada..c7cf58afd 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -199,7 +199,7 @@ class DataMissing(Exception): pass obs.wrap("site", np.full(1, SITE)) # Prepare our data. FFT-truncate for faster fft ops - obs.restrict("samps", [0, fft.fft_len(obs.samps.count)]) + #obs.restrict("samps", [0, fft.fft_len(obs.samps.count)]) # Desolope to make it periodic. This should be done *before* # dropping to single precision, to avoid unnecessary loss of precision due @@ -225,20 +225,21 @@ class DataMissing(Exception): pass L.debug("Skipped %s (all dets cut)" % (sub_id)) continue # Gapfill glitches. This function name isn't the clearest - tod_ops.get_gap_fill(obs, flags=obs.flags.glitch_flags, swap=True) + #tod_ops.get_gap_fill(obs, flags=obs.flags.glitch_flags, swap=True) # Gain calibration gain = 1 - for gtype in ["relcal","abscal"]: - gain *= obs[gtype][:,None] + #for gtype in ["relcal","abscal"]: + # gain *= obs[gtype][:,None] obs.signal *= gain # Fourier-space calibration fsig = fft.rfft(obs.signal) freq = fft.rfftfreq(obs.samps.count, 1/srate) + # iir and timeconstant will be applied in the preprocessing eventually # iir filter iir_filter = filters.iir_filter()(freq, obs) fsig /= iir_filter gain /= iir_filter[0].real # keep track of total gain for our record - fsig /= filters.timeconst_filter(None)(freq, obs) + fsig /= filters.timeconst_filter(timeconst = obs.det_cal.tau_eff)(freq, obs) fft.irfft(fsig, obs.signal, normalize=True) del fsig @@ -246,9 +247,9 @@ class DataMissing(Exception): pass #obs.focal_plane.xi += obs.boresight_offset.xi #obs.focal_plane.eta += obs.boresight_offset.eta #obs.focal_plane.gamma += obs.boresight_offset.gamma - obs.focal_plane.xi += obs.boresight_offset.dx - obs.focal_plane.eta += obs.boresight_offset.dy - obs.focal_plane.gamma += obs.boresight_offset.gamma + #obs.focal_plane.xi += obs.boresight_offset.dx + #obs.focal_plane.eta += obs.boresight_offset.dy + #obs.focal_plane.gamma += obs.boresight_offset.gamma # Injecting at this point makes us insensitive to any bias introduced # in the earlier steps (mainly from gapfilling). The alternative is From c07083ed2301c3721d89e67932170c5fb0cf9c68 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 28 May 2025 09:53:46 -0700 Subject: [PATCH 12/69] Made LAT and SAT model default parameter imports similar. Removed deg2radian hardcoded in lat model. --- sotodlib/coords/pointing_model.py | 79 ++++++++++++++++++------------- 1 file changed, 45 insertions(+), 34 deletions(-) diff --git a/sotodlib/coords/pointing_model.py b/sotodlib/coords/pointing_model.py index 762c825b6..ae273862c 100644 --- a/sotodlib/coords/pointing_model.py +++ b/sotodlib/coords/pointing_model.py @@ -122,6 +122,10 @@ def apply_pointing_model_sat(vers, params, tod, ancil): def apply_pointing_model_lat(vers, params, ancil): az, el, roll = _get_lat_enc_radians(ancil) + + if vers == 'lat_naive': + return _new_boresight(ancil.samps, az=az, el=el, roll=roll) + if vers == "lat_v1": az1, el1, roll1 = model_lat_v1(params, az, el, roll) return _new_boresight(ancil.samps, az=az1, el=el1, roll=roll1) @@ -132,7 +136,7 @@ def apply_pointing_model_lat(vers, params, ancil): # # LAT model(s) -# +# def model_lat_v1(params, az, el, roll): """Applies pointing model to (az, el, roll). @@ -153,25 +157,20 @@ def model_lat_v1(params, az, el, roll): - az_offset: The azimuth encoder offset. - el_offset: The elevation encoder offset. """ - _params = { - "az_offset": 0, - "el_offset": 0, - "cr_offset": 0, - "el_xi_offset": 0, - "el_eta_offset": 0, - "rx_xi_offset": 0, - "rx_eta_offset": 0, - "mir_xi_offset": 0, - "mir_eta_offset": 0, - } - _params = _params.copy() - _params.update(params) - params = _params - cr = el - roll - np.deg2rad(60) - for key, val in params.items(): - if not isinstance(val, (float, int)): + _p = dict(param_defaults['lat_v1']) + if isinstance(params, dict): + _p.update(params) + else: + _p.update({k: params[k] for k in params._fields.keys()}) + params, _p = _p, None + + for k, v in params.items(): + if k == 'version': continue - params[key] = np.deg2rad(val) + if k not in param_defaults['lat_v1'] and v != 0.: + raise ValueError(f'Handling of model param "{k}" is not implemented.') + + cr = el - roll - np.deg2rad(60) q_enc = quat.rotation_lonlat( -1 * (az.copy() + params["az_offset"]), el.copy() + params["el_offset"] ) @@ -198,19 +197,6 @@ def model_lat_v1(params, az, el, roll): # sat_v1: you can expand v1, as long as new params don't do anything # if their value is zero (and that should be the registered default). -defaults_sat_v1 = { - 'enc_offset_az': 0., - 'enc_offset_el': 0., - 'enc_offset_boresight': 0., - 'fp_offset_xi0': 0., - 'fp_offset_eta0': 0., - 'fp_rot_xi0': 0., - 'fp_rot_eta0': 0., - 'az_rot': 0., - 'base_tilt_cos': 0., - 'base_tilt_sin': 0., -} - def model_sat_v1(params, az, el, roll): """Applies pointing model to (az, el, roll). @@ -233,7 +219,7 @@ def model_sat_v1(params, az, el, roll): - az_rot: Dimensionless parameter describing a linear dependence of Az on El. """ - _p = dict(defaults_sat_v1) + _p = dict(param_defaults['sat_v1']) if isinstance(params, dict): _p.update(params) else: @@ -243,7 +229,7 @@ def model_sat_v1(params, az, el, roll): for k, v in params.items(): if k == 'version': continue - if k not in defaults_sat_v1 and v != 0.: + if k not in param_defaults['sat_v1'] and v != 0.: raise ValueError(f'Handling of model param "{k}" is not implemented.') # Construct offsetted encoders. @@ -277,6 +263,31 @@ def model_sat_v1(params, az, el, roll): # Support functions +param_defaults={ + 'lat_v1' : { + 'az_offset': 0, + 'el_offset': 0, + 'cr_offset': 0, + 'el_xi_offset': 0, + 'el_eta_offset': 0, + 'rx_xi_offset': 0, + 'rx_eta_offset': 0, + 'mir_xi_offset': 0, + 'mir_eta_offset': 0, + }, + 'sat_v1' : { + 'enc_offset_az': 0., + 'enc_offset_el': 0., + 'enc_offset_boresight': 0., + 'fp_offset_xi0': 0., + 'fp_offset_eta0': 0., + 'fp_rot_xi0': 0., + 'fp_rot_eta0': 0., + 'az_rot': 0., + 'base_tilt_cos': 0., + 'base_tilt_sin': 0. + } +} def _new_boresight(samps, az=None, el=None, roll=None): boresight = core.AxisManager(samps) From 37818b7f9d45e4331c0ac8e92ad404b406de2962 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 29 May 2025 13:21:05 -0700 Subject: [PATCH 13/69] non-opt detectors, pointing model, calibrate to pW --- sotodlib/site_pipeline/make_ml_map.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index c7cf58afd..a7c3ce010 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -39,6 +39,7 @@ def main(**args): from sotodlib.io import metadata # PerDetectorHdf5 work-around from sotodlib import tod_ops, mapmaking, core from sotodlib.tod_ops import filters + from sotodlib.coords import pointing_model from sotodlib.mapmaking import log from sotodlib.preprocess import preprocess_util as pp_util from pixell import enmap, utils, fft, bunch, wcsutils, mpi, bench @@ -189,10 +190,16 @@ class DataMissing(Exception): pass #print(obs_id) obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta, logger=L) + # Cut non-optical dets + obs.restrict('dets', obs.dets.vals[obs.det_info.wafer.type == 'OPTC']) # Fix boresight mapmaking.fix_boresight_glitches(obs) # Get our sample rate. Would have been nice to have this available in the axisman srate = (obs.samps.count-1)/(obs.timestamps[-1]-obs.timestamps[0]) + # Apply pointing model + pointing_model.apply_pointing_model(obs) + # Calibrate to pW + obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T # Add site and weather, since they're not in obs yet obs.wrap("weather", np.full(1, "typical")) From 2a87548d6d76698be48a7761bc73ec0c9ade6549 Mon Sep 17 00:00:00 2001 From: chervias Date: Wed, 11 Jun 2025 13:18:05 -0700 Subject: [PATCH 14/69] adding abscal and other things --- sotodlib/site_pipeline/make_ml_map.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index a7c3ce010..3be47f23f 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -189,6 +189,19 @@ class DataMissing(Exception): pass #obs = context.get_obs(sub_id, meta=meta) #print(obs_id) obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta, logger=L) + #if obs.dets.count < 100: + # L.debug("Skipped %s (Not enough detectors)" % (sub_id)) + # continue + # Check nans + mask = np.logical_not(np.isfinite(obs.signal)) + if mask.sum() > 0: + L.debug("Skipped %s (a nan in signal)" % (sub_id)) + continue + zero_dets = np.sum(obs.signal, axis=1) + mask = zero_dets == 0.0 + if mask.any(): + L.debug("%s has all 0s in at least 1 detector" % (sub_id)) + obs.restrict('dets', obs.dets.vals[np.logical_not(mask)]) # Cut non-optical dets obs.restrict('dets', obs.dets.vals[obs.det_info.wafer.type == 'OPTC']) @@ -200,6 +213,11 @@ class DataMissing(Exception): pass pointing_model.apply_pointing_model(obs) # Calibrate to pW obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T + # Calibrate to K_cmb + obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T + if obs.dets.count < 10: + L.debug("Skipped %s (less than 10 detectors)" % (sub_id)) + continue # Add site and weather, since they're not in obs yet obs.wrap("weather", np.full(1, "typical")) From 3b4e7dcde23be6b8035e384f5b0208ce22f24893 Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 16 Jun 2025 14:04:41 -0700 Subject: [PATCH 15/69] we increase the maxcut on find_usable_detectors since some scan types have a huge fraction of turnarounds --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 3be47f23f..769b05356 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -244,7 +244,7 @@ class DataMissing(Exception): pass # Optionally skip all the calibration. Useful for sims. if not args.nocal: # Disqualify overly cut detectors - good_dets = mapmaking.find_usable_detectors(obs) + good_dets = mapmaking.find_usable_detectors(obs,maxcut=0.5) obs.restrict("dets", good_dets) if obs.dets.count == 0: L.debug("Skipped %s (all dets cut)" % (sub_id)) From 39d4eb22d3914b26576e91ec350d91ac9df0771f Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 23 Jun 2025 14:49:01 -0700 Subject: [PATCH 16/69] Moving some preprocessing under nocal steps --- sotodlib/site_pipeline/make_ml_map.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 769b05356..7afa3c91f 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -187,16 +187,16 @@ class DataMissing(Exception): pass # Actually read the data with bench.mark("read_obs %s" % sub_id): #obs = context.get_obs(sub_id, meta=meta) - #print(obs_id) obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta, logger=L) - #if obs.dets.count < 100: - # L.debug("Skipped %s (Not enough detectors)" % (sub_id)) - # continue + if obs.dets.count < 10: + L.debug("Skipped %s (Not enough detectors)" % (sub_id)) + continue # Check nans mask = np.logical_not(np.isfinite(obs.signal)) if mask.sum() > 0: L.debug("Skipped %s (a nan in signal)" % (sub_id)) continue + # Check all 0s zero_dets = np.sum(obs.signal, axis=1) mask = zero_dets == 0.0 if mask.any(): @@ -209,15 +209,6 @@ class DataMissing(Exception): pass mapmaking.fix_boresight_glitches(obs) # Get our sample rate. Would have been nice to have this available in the axisman srate = (obs.samps.count-1)/(obs.timestamps[-1]-obs.timestamps[0]) - # Apply pointing model - pointing_model.apply_pointing_model(obs) - # Calibrate to pW - obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T - # Calibrate to K_cmb - obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T - if obs.dets.count < 10: - L.debug("Skipped %s (less than 10 detectors)" % (sub_id)) - continue # Add site and weather, since they're not in obs yet obs.wrap("weather", np.full(1, "typical")) @@ -243,6 +234,12 @@ class DataMissing(Exception): pass # Optionally skip all the calibration. Useful for sims. if not args.nocal: + # Apply pointing model + pointing_model.apply_pointing_model(obs) + # Calibrate to pW + obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T + # Calibrate to K_cmb + obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T # Disqualify overly cut detectors good_dets = mapmaking.find_usable_detectors(obs,maxcut=0.5) obs.restrict("dets", good_dets) From a92d55e3d4c4003c8f0d0b9adc6539f47a7c0a63 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 24 Jun 2025 08:41:08 -0700 Subject: [PATCH 17/69] Implementing catch of LoaderError for obs that dont exist in the preprocess db --- sotodlib/site_pipeline/make_ml_map.py | 31 ++++++++++++++------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 7afa3c91f..e0d5f3e16 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -36,15 +36,17 @@ def get_parser(parser=None): def main(**args): import numpy as np, sys, time, warnings, os, so3g from sotodlib.core import Context, AxisManager, IndexAxis - from sotodlib.io import metadata # PerDetectorHdf5 work-around from sotodlib import tod_ops, mapmaking, core from sotodlib.tod_ops import filters from sotodlib.coords import pointing_model from sotodlib.mapmaking import log from sotodlib.preprocess import preprocess_util as pp_util + from sotodlib.core import metadata from pixell import enmap, utils, fft, bunch, wcsutils, mpi, bench import yaml + LoaderError = metadata.loader.LoaderError + #try: import moby2.analysis.socompat #except ImportError: warnings.warn("Can't import moby2.analysis.socompat. ACT data input probably won't work") class DataMissing(Exception): pass @@ -202,7 +204,6 @@ class DataMissing(Exception): pass if mask.any(): L.debug("%s has all 0s in at least 1 detector" % (sub_id)) obs.restrict('dets', obs.dets.vals[np.logical_not(mask)]) - # Cut non-optical dets obs.restrict('dets', obs.dets.vals[obs.det_info.wafer.type == 'OPTC']) # Fix boresight @@ -237,9 +238,9 @@ class DataMissing(Exception): pass # Apply pointing model pointing_model.apply_pointing_model(obs) # Calibrate to pW - obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T + #obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T # Calibrate to K_cmb - obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T + #obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T # Disqualify overly cut detectors good_dets = mapmaking.find_usable_detectors(obs,maxcut=0.5) obs.restrict("dets", good_dets) @@ -249,21 +250,21 @@ class DataMissing(Exception): pass # Gapfill glitches. This function name isn't the clearest #tod_ops.get_gap_fill(obs, flags=obs.flags.glitch_flags, swap=True) # Gain calibration - gain = 1 + #gain = 1 #for gtype in ["relcal","abscal"]: # gain *= obs[gtype][:,None] - obs.signal *= gain + #obs.signal *= gain # Fourier-space calibration - fsig = fft.rfft(obs.signal) - freq = fft.rfftfreq(obs.samps.count, 1/srate) + #fsig = fft.rfft(obs.signal) + #freq = fft.rfftfreq(obs.samps.count, 1/srate) # iir and timeconstant will be applied in the preprocessing eventually # iir filter - iir_filter = filters.iir_filter()(freq, obs) - fsig /= iir_filter - gain /= iir_filter[0].real # keep track of total gain for our record - fsig /= filters.timeconst_filter(timeconst = obs.det_cal.tau_eff)(freq, obs) - fft.irfft(fsig, obs.signal, normalize=True) - del fsig + #iir_filter = filters.iir_filter()(freq, obs) + #fsig /= iir_filter + #gain /= iir_filter[0].real # keep track of total gain for our record + #fsig /= filters.timeconst_filter(timeconst = obs.det_cal.tau_eff)(freq, obs) + #fft.irfft(fsig, obs.signal, normalize=True) + #del fsig # Apply pointing correction. #obs.focal_plane.xi += obs.boresight_offset.xi @@ -313,7 +314,7 @@ class DataMissing(Exception): pass print("Writing noise model %s" % nmat_file) utils.mkdir(nmat_dir) mapmaking.write_nmat(nmat_file, mapmaker.data[-1].nmat) - except (DataMissing,IndexError,ValueError) as e: + except (DataMissing,IndexError,ValueError,LoaderError) as e: L.debug("Skipped %s (%s)" % (sub_id, str(e))) continue From 84542bc948c6f2c12b4dd26b590c3cfbc5f8e0fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigurd=20Kirkevold=20N=C3=A6ss?= Date: Tue, 24 Jun 2025 11:42:14 -0700 Subject: [PATCH 18/69] Fix broken pior support --- sotodlib/mapmaking/ml_mapmaker.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/sotodlib/mapmaking/ml_mapmaker.py b/sotodlib/mapmaking/ml_mapmaker.py index b0d382b6b..0118516ff 100644 --- a/sotodlib/mapmaking/ml_mapmaker.py +++ b/sotodlib/mapmaking/ml_mapmaker.py @@ -116,7 +116,9 @@ def A(self, x_zip): tod = evaluator.evaluate(data) data.nmat.apply(tod) accumulator.accumulate(data, tod) - return accumulator.finish() + accumulator.finish() + self.prior(evaluator.x, accumulator.x) + return self.dof.zip(*accumulator.x) def M(self, x_zip): iwork = self.dof.unzip(x_zip) @@ -167,6 +169,10 @@ def solve( # x_zip is the raw solution, as a 1d vector. yield bunch.Bunch(i=solver.i, err=solver.err, x=self.dof.unzip(solver.x), x_zip=solver.x) + def prior(self, xins, xouts): + for signal, xin, xout in zip(self.signals, xins, xouts): + signal.prior(xin, xout) + def translate(self, other, x_zip): """Translate degrees of freedom x from some other mapamaker to the current one. The other mapmaker must have the same list of signals, except that they can have @@ -183,10 +189,10 @@ class MLEvaluator: """Helper for MLMapmaker that represents the action of P in the model d = Px+n.""" def __init__(self, x_zip, signals, dof, dtype=np.float32): self.signals = signals - self.x_zip = x_zip + self.x = dof.unzip(x_zip) self.dof = dof self.dtype = dtype - self.iwork = [signal.to_work(m) for signal, m in zip(self.signals, self.dof.unzip(x_zip))] + self.iwork = [signal.to_work(m) for signal, m in zip(self.signals, self.x)] def evaluate(self, data, tod=None): """Evaluate Px for one tod""" if tod is None: tod = np.zeros([data.ndet, data.nsamp], self.dtype) @@ -206,9 +212,8 @@ def accumulate(self, data, tod): signal.backward(data.id, tod, self.owork[si]) def finish(self): """Return the full P'd based on the previous accumulation""" - return self.dof.zip( - *[signal.from_work(w) for signal, w in zip(self.signals, self.owork)] - ) + self.x = [signal.from_work(w) for signal, w in zip(self.signals, self.owork)] + return self.dof.zip(*self.x) class Signal: """This class represents a thing we want to solve for, e.g. the sky, ground, cut samples, etc.""" From f8bde19eb89cdf7e938f0843cd01e52d996c9975 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigurd=20Kirkevold=20N=C3=A6ss?= Date: Tue, 24 Jun 2025 16:44:37 -0700 Subject: [PATCH 19/69] restore x_zip member for evaluator --- sotodlib/mapmaking/ml_mapmaker.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/ml_mapmaker.py b/sotodlib/mapmaking/ml_mapmaker.py index 0118516ff..ac674f7c4 100644 --- a/sotodlib/mapmaking/ml_mapmaker.py +++ b/sotodlib/mapmaking/ml_mapmaker.py @@ -189,6 +189,7 @@ class MLEvaluator: """Helper for MLMapmaker that represents the action of P in the model d = Px+n.""" def __init__(self, x_zip, signals, dof, dtype=np.float32): self.signals = signals + self.x_zip = x_zip self.x = dof.unzip(x_zip) self.dof = dof self.dtype = dtype @@ -213,7 +214,8 @@ def accumulate(self, data, tod): def finish(self): """Return the full P'd based on the previous accumulation""" self.x = [signal.from_work(w) for signal, w in zip(self.signals, self.owork)] - return self.dof.zip(*self.x) + self.x_zip = self.dof.zip(*self.x) + return self.x_zip class Signal: """This class represents a thing we want to solve for, e.g. the sky, ground, cut samples, etc.""" From 1d9aa55ca102b95de5c81a05d0634cd42a8e2f82 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Wed, 2 Jul 2025 11:56:43 -0700 Subject: [PATCH 20/69] fix: look for r2 properly --- sotodlib/coords/fp_containers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index ff599f3f3..c7130d12d 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -224,7 +224,7 @@ def map_by_det_id(self, aman): xi = aman.pointing.xi[msk][srt][mapping] eta = aman.pointing.eta[msk][srt][mapping] r2 = np.nan + np.zeros_like(eta) - if "r2" in aman.pointing: + if "R2" in aman.pointing: r2 = aman.pointing.R2[msk][srt][mapping] if "polarization" in aman: # name of field just a placeholder for now From 5567fe53870d448714cb3d9d4f19b87e7a0ee986 Mon Sep 17 00:00:00 2001 From: chervias Date: Fri, 4 Jul 2025 12:28:54 -0700 Subject: [PATCH 21/69] removing the logger from load_an_preprocess since I dont want so much verbosity --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index e0d5f3e16..d0d0a6513 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -189,7 +189,7 @@ class DataMissing(Exception): pass # Actually read the data with bench.mark("read_obs %s" % sub_id): #obs = context.get_obs(sub_id, meta=meta) - obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta, logger=L) + obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta) if obs.dets.count < 10: L.debug("Skipped %s (Not enough detectors)" % (sub_id)) continue From c205068dc65592e13fe3b62e167d78ec67d5b82c Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Mon, 7 Jul 2025 13:55:44 -0700 Subject: [PATCH 22/69] Rename LAT parameters. Reconfigure definitions for clarity. Added descriptions to quaternion rotations. --- sotodlib/coords/pointing_model.py | 99 +++++++++++++++++++++---------- 1 file changed, 67 insertions(+), 32 deletions(-) diff --git a/sotodlib/coords/pointing_model.py b/sotodlib/coords/pointing_model.py index ae273862c..ffb510340 100644 --- a/sotodlib/coords/pointing_model.py +++ b/sotodlib/coords/pointing_model.py @@ -145,17 +145,17 @@ def model_lat_v1(params, az, el, roll): az, el, roll: naive horizon coordinates, in radians, of the boresight. - The implemented model parameters are: + The implemented model parameters are all in Radians: + - enc_offset_{az, el, cr}: Encoder offsets in Radians. + Sign convention: True = Encoder + Offset + - cr_center_{xi,eta}0: The (xi,eta) coordinate in the LATR-centered + focal plane that remains fixed under corotation. + - el_axis_center_{xi,eta}0: The (xi,eta) coordinate in the CR-centered + focal plane that appears fixed when the elevation structure is rotated + about its axis. + - mir_center_{xi,eta}0: The (xi,eta) coordinate in the El-structure-centered + focal plane about which any mirror misalignment rotates. - - rx_{xi, eta}_offset: The offset between the LATR center and the axis - of the corotator. - - cr_offset: The corotator encoder offset. - - el_{xi, eta}_offset: The offset between the elevation exis and the - corotator axis. - - mir_{xi, eta}_offset: The offset between the mirror's axis and the elevation - axis (ie: a tilt in the mirrors). - - az_offset: The azimuth encoder offset. - - el_offset: The elevation encoder offset. """ _p = dict(param_defaults['lat_v1']) if isinstance(params, dict): @@ -169,23 +169,58 @@ def model_lat_v1(params, az, el, roll): continue if k not in param_defaults['lat_v1'] and v != 0.: raise ValueError(f'Handling of model param "{k}" is not implemented.') - + + # Here we reconstruct the naive corotator value before applying + # the elevation encoder offset. cr = el - roll - np.deg2rad(60) - q_enc = quat.rotation_lonlat( - -1 * (az.copy() + params["az_offset"]), el.copy() + params["el_offset"] + + az_orig = az.copy() + az = az + params['enc_offset_az'] + el = el + params['enc_offset_el'] + cr = cr + params['enc_offset_cr'] + + # Lonlat rotation with az and el encoder offsets included. + q_lonlat = quat.rotation_lonlat(-1 * az, el) + + # Rotation that takes a vector in elevation-hub centered + # coordinates to final boresight centered coordinates. + # Accounts for any mirror-related offsets. + q_mir_center = ~quat.rotation_xieta( + params['mir_center_xi0'], + params['mir_center_eta0'] ) - q_mir = quat.rotation_xieta(params["mir_xi_offset"], params["mir_eta_offset"]) - q_el_roll = quat.euler(2, el.copy() + params["el_offset"] - np.deg2rad(60)) - q_tel = quat.rotation_xieta(params["el_xi_offset"], params["el_eta_offset"]) - q_cr_roll = quat.euler(2, -1 * cr - params["cr_offset"]) - q_rx = quat.rotation_xieta(params["rx_xi_offset"], params["rx_eta_offset"]) - new_az, el, roll = ( - quat.decompose_lonlat(q_enc * q_mir * q_el_roll * q_tel * q_cr_roll * q_rx) - * np.array([-1, 1, 1])[..., None] + + # Elevation component of roll motion + q_el_roll = quat.euler(2, el - np.deg2rad(60)) + + # Rotation that takes a vector in telescope's elevation-hub centered + # coordinates to mirror-centered coordinates + q_el_axis_center = ~quat.rotation_xieta( + params['el_axis_center_xi0'], + params['el_axis_center_eta0'] ) - change = ((new_az - az) + np.pi) % (2 * np.pi) - np.pi - az = az.copy() + change + # Corotator component of roll motion + q_cr_roll = quat.euler(2, -1 * cr) + + # Rotation that takes a vector in LATR/focal plane-centered coordinates to + # corotator-centered coordinates. + q_cr_center = ~quat.rotation_xieta( + params['cr_center_xi0'], + params['cr_center_eta0'] + ) + + # Horizon Coordiantes + q_hs = ( + q_lonlat * q_mir_center + * q_el_roll * q_el_axis_center + * q_cr_roll * q_cr_center + ) + new_az, el, roll = quat.decompose_lonlat(q_hs)* np.array([-1, 1, 1])[..., None] + + # Make corrected az as close as possible to the input az. + change = ((new_az - az_orig) + np.pi) % (2 * np.pi) - np.pi + az = az_orig + change return az, el, roll @@ -265,15 +300,15 @@ def model_sat_v1(params, az, el, roll): # Support functions param_defaults={ 'lat_v1' : { - 'az_offset': 0, - 'el_offset': 0, - 'cr_offset': 0, - 'el_xi_offset': 0, - 'el_eta_offset': 0, - 'rx_xi_offset': 0, - 'rx_eta_offset': 0, - 'mir_xi_offset': 0, - 'mir_eta_offset': 0, + 'enc_offset_az': 0, + 'enc_offset_el': 0, + 'enc_offset_cr': 0, + 'el_axis_center_xi0': 0, + 'el_axis_center_eta0': 0, + 'cr_center_xi0': 0, + 'cr_center_eta0': 0, + 'mir_center_xi0': 0, + 'mir_center_eta0': 0, }, 'sat_v1' : { 'enc_offset_az': 0., From 125d7a7dfb408a8daad5848653bc3944bd684442 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Fri, 16 May 2025 16:35:37 +0000 Subject: [PATCH 23/69] fix: fixes to padding --- sotodlib/coords/fp_containers.py | 2 +- sotodlib/site_pipeline/finalize_focal_plane.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index c7130d12d..13ebe88ad 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -250,7 +250,7 @@ def save(self, f, db_info, group): ("xi", np.float32), ("eta", np.float32), ("gamma", np.float32), - ("measured", np.bool_), + ("padded", np.bool_), ] fpout = np.fromiter( zip(self.det_ids, *(self.transformed.T), np.ones(len(self.det_ids), dtype=bool)*(self.padded)), dtype=outdt, count=ndets diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index f30423573..b9267a09c 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -810,15 +810,15 @@ def main(): todel = [] for name, ot in ots.items(): logger.info("Fitting common mode for %s", ot.name) - centers = np.vstack([fp.template.center for fp in ot.focal_planes if fp.tot_weight is not None]) - centers_transformed = np.vstack( - [fp.center_transformed for fp in ot.focal_planes if fp.tot_weight is not None] - ) - if len(centers) == 0: + if ot.num_fps == 0: logger.error("\tNo focal planes found! Skipping...") if not config.get("pad", False): todel.append(name) continue + centers = np.vstack([fp.template.center for fp in ot.focal_planes if fp.tot_weight is not None]) + centers_transformed = np.vstack( + [fp.center_transformed for fp in ot.focal_planes if fp.tot_weight is not None] + ) plot_ot(ot, plot_dir) if centers.shape[0] < 3: logger.warning( From 022fec04c6454df2917249e2553f334249d83ea5 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Tue, 8 Jul 2025 17:41:35 +0000 Subject: [PATCH 24/69] fix: skip obs with no roll in obsdb --- sotodlib/site_pipeline/finalize_focal_plane.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index b9267a09c..d3f8f89ba 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -198,6 +198,8 @@ def _load_ctx(config): dets = config["context"].get("dets", {}) for obs_id in obs_ids: roll = ctx.obsdb.get(obs_id)["roll_center"] + if roll is None: + continue if roll < roll_range[0] or roll > roll_range[1]: logger.info("%s has a roll that is out of range", obs_id) continue From 6c36a1493b364bc6d0cb3efd67d83bcef9a4708f Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 10 Jul 2025 07:54:24 -0700 Subject: [PATCH 25/69] switch to close --- sotodlib/coords/fp_containers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index 13ebe88ad..88d442eab 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -559,7 +559,7 @@ def plot_ufm(focal_plane, plot_dir): plt.savefig( os.path.join(plot_dir, f"{focal_plane.stream_id}.png"), bbox_inches="tight" ) - plt.clf() + plt.close() def plot_ot(ot, plot_dir): @@ -600,7 +600,7 @@ def plot_ot(ot, plot_dir): else: os.makedirs(plot_dir, exist_ok=True) plt.savefig(os.path.join(plot_dir, f"{ot.name}.png"), bbox_inches="tight") - plt.clf() + plt.close() def plot_by_gamma(focal_plane, plot_dir): @@ -637,7 +637,7 @@ def plot_by_gamma(focal_plane, plot_dir): os.path.join(plot_dir, f"{focal_plane.stream_id}_by_gamma.png"), bbox_inches="tight", ) - plt.clf() + plt.close() def plot_receiver(receiver, plot_dir): @@ -713,4 +713,4 @@ def plot_receiver(receiver, plot_dir): else: os.makedirs(plot_dir, exist_ok=True) plt.savefig(os.path.join(plot_dir, f"receiver.png"), bbox_inches="tight") - plt.clf() + plt.close() From 96538da5784ea3d59f7016f4753e8e295d68e678 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 10 Jul 2025 07:55:19 -0700 Subject: [PATCH 26/69] fix: dont compute common modes when we have no data --- .../site_pipeline/finalize_focal_plane.py | 42 ++++++++++++++----- 1 file changed, 31 insertions(+), 11 deletions(-) diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index d3f8f89ba..843b4fc7c 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -568,7 +568,7 @@ def main(): # if not have_template: # logger.warning("\tNo template provided, arrays not found in any observations will be missing") # with h5py.File(template_path) as f: - # stream_ids = list(f.keys()) + # stream_ids = list(f.keys()) # Split up into batches # Right now either per_obs or all at once @@ -812,16 +812,32 @@ def main(): todel = [] for name, ot in ots.items(): logger.info("Fitting common mode for %s", ot.name) - if ot.num_fps == 0: + centers = np.atleast_2d( + np.array( + [ + fp.template.center + for fp in ot.focal_planes + if fp.tot_weight is not None + ] + ) + ) + centers_transformed = np.atleast_2d( + np.array( + [ + fp.center_transformed + for fp in ot.focal_planes + if fp.tot_weight is not None + ] + ) + ) + if ot.num_fps == 0 or centers.size == 0 or centers_transformed.size == 0: logger.error("\tNo focal planes found! Skipping...") if not config.get("pad", False): todel.append(name) continue - centers = np.vstack([fp.template.center for fp in ot.focal_planes if fp.tot_weight is not None]) - centers_transformed = np.vstack( - [fp.center_transformed for fp in ot.focal_planes if fp.tot_weight is not None] - ) plot_ot(ot, plot_dir) + centers = centers.reshape((-1, 3)) + centers_transformed = centers_transformed.reshape((-1, 3)) if centers.shape[0] < 3: logger.warning( "\tToo few wafers fit to compute common mode, transform will be approximated" @@ -856,7 +872,13 @@ def main(): # Full receiver common mode logger.info("Fitting receiver common mode") - if len(ots) == 0: + centers = np.atleast_2d( + np.array([ot.center for ot in ots.values() if ot.num_fps > 0]) + ) + centers_transformed = np.atleast_2d( + np.array([ot.center_transformed for ot in ots.values() if ot.num_fps > 0]) + ) + if len(ots) == 0 or centers.size == 0 or centers_transformed.size == 0: logger.error("\tNo optics tubes found! Skipping...") continue elif len(ots) == 1: @@ -865,10 +887,8 @@ def main(): ) recv_transform = deepcopy(tuple(ots.values())[0].transform_fullcm) else: - centers = np.vstack([ot.center for ot in ots.values() if ot.num_fps > 0]) - centers_transformed = np.vstack( - [ot.center_transformed for ot in ots.values() if ot.num_fps > 0] - ) + centers = centers.reshape((-1, 3)) + centers_transformed = centers_transformed.reshape((-1, 3)) if len(ots) < 3: logger.info( "\tNot enough OTs to fit receiver common mode, transform will be approximated" From 1e17f5439d18054ab488f571daf642c46634d997 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 10 Jul 2025 10:18:40 -0700 Subject: [PATCH 27/69] update comments to address matthews review --- sotodlib/coords/pointing_model.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/sotodlib/coords/pointing_model.py b/sotodlib/coords/pointing_model.py index ffb510340..de4c2e9eb 100644 --- a/sotodlib/coords/pointing_model.py +++ b/sotodlib/coords/pointing_model.py @@ -136,7 +136,7 @@ def apply_pointing_model_lat(vers, params, ancil): # # LAT model(s) -# +# def model_lat_v1(params, az, el, roll): """Applies pointing model to (az, el, roll). @@ -145,8 +145,8 @@ def model_lat_v1(params, az, el, roll): az, el, roll: naive horizon coordinates, in radians, of the boresight. - The implemented model parameters are all in Radians: - - enc_offset_{az, el, cr}: Encoder offsets in Radians. + The implemented model parameters are all in radians: + - enc_offset_{az, el, cr}: Encoder offsets in radians. Sign convention: True = Encoder + Offset - cr_center_{xi,eta}0: The (xi,eta) coordinate in the LATR-centered focal plane that remains fixed under corotation. @@ -154,7 +154,7 @@ def model_lat_v1(params, az, el, roll): focal plane that appears fixed when the elevation structure is rotated about its axis. - mir_center_{xi,eta}0: The (xi,eta) coordinate in the El-structure-centered - focal plane about which any mirror misalignment rotates. + focal plane relative to which any tilts due to mirror misalignment occur. """ _p = dict(param_defaults['lat_v1']) @@ -193,8 +193,8 @@ def model_lat_v1(params, az, el, roll): # Elevation component of roll motion q_el_roll = quat.euler(2, el - np.deg2rad(60)) - # Rotation that takes a vector in telescope's elevation-hub centered - # coordinates to mirror-centered coordinates + # Rotation that takes a vector in telescope's corotator-centered + # coordinates to el-hub-centered coordinates q_el_axis_center = ~quat.rotation_xieta( params['el_axis_center_xi0'], params['el_axis_center_eta0'] @@ -210,7 +210,7 @@ def model_lat_v1(params, az, el, roll): params['cr_center_eta0'] ) - # Horizon Coordiantes + # Horizon Coordinates q_hs = ( q_lonlat * q_mir_center * q_el_roll * q_el_axis_center From d3133149f9ebff27287cfe75997f1358f3ad0ac4 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Wed, 16 Jul 2025 16:38:25 -0700 Subject: [PATCH 28/69] check if freq cutoffs in full aman --- sotodlib/preprocess/pcore.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sotodlib/preprocess/pcore.py b/sotodlib/preprocess/pcore.py index 974e6a53b..f00338c00 100644 --- a/sotodlib/preprocess/pcore.py +++ b/sotodlib/preprocess/pcore.py @@ -514,7 +514,8 @@ def run(self, aman, proc_aman=None, select=True, sim=False, update_plot=False): break # copy updated frequency cutoffs to full - full.move("frequency_cutoffs", None) + if "frequency_cutoffs" in full: + full.move("frequency_cutoffs", None) full.wrap("frequency_cutoffs", proc_aman["frequency_cutoffs"]) return full, success From b6b575335cfa58634f019a7f781357de8c6e3b97 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Sat, 26 Jul 2025 14:33:46 +0000 Subject: [PATCH 29/69] fix: missing quaternion in lat gamma calculation --- sotodlib/coords/optics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/coords/optics.py b/sotodlib/coords/optics.py index bc816e58b..161e70919 100644 --- a/sotodlib/coords/optics.py +++ b/sotodlib/coords/optics.py @@ -439,7 +439,7 @@ def LAT_focal_plane(aman, zemax_path, x=None, y=None, pol=None, roll=0, tube_slo pol_el, pol_xel = LAT_pix2sky( pol_x, pol_y, sec2el, sec2xel, array2secx, array2secy, roll ) - pol_xi, pol_eta, _ = quat.decompose_xieta(quat.rotation_lonlat(-pol_xel, pol_el)) + pol_xi, pol_eta, _ = quat.decompose_xieta(quat.euler(1, np.deg2rad(90)) * quat.rotation_lonlat(-pol_xel, pol_el)) gamma = get_gamma(pol_xi, pol_eta) if np.isscalar(xi): gamma = gamma[0] From 13a46feb56a7b13b6f1e667f0a4e81c3f3cb72a5 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Tue, 9 Sep 2025 08:43:28 -0700 Subject: [PATCH 30/69] use old load_and_preprocess --- sotodlib/preprocess/preprocess_util.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/sotodlib/preprocess/preprocess_util.py b/sotodlib/preprocess/preprocess_util.py index 0d5f316ab..7426604a4 100644 --- a/sotodlib/preprocess/preprocess_util.py +++ b/sotodlib/preprocess/preprocess_util.py @@ -319,13 +319,8 @@ def load_preprocess_det_select(obs_id, configs, context=None, pipe = Pipeline(configs["process_pipe"], logger=logger) meta = context.get_meta(obs_id, dets=dets, meta=meta) - logger.info("Restricting detectors on all processes") - keep_all = np.ones(meta.dets.count,dtype=bool) - for process in pipe[:]: - keep = process.select(meta, in_place=False) - if isinstance(keep, np.ndarray): - keep_all &= keep - meta.restrict("dets", meta.dets.vals[keep_all]) + logger.info(f"Cutting on the last process: {pipe[-1].name}") + pipe[-1].select(meta) return meta @@ -372,7 +367,7 @@ def load_and_preprocess(obs_id, configs, context=None, dets=None, meta=None, else: pipe = Pipeline(configs["process_pipe"], logger=logger) aman = context.get_obs(meta, no_signal=no_signal) - pipe.run(aman, aman.preprocess, select=False) + pipe.run(aman, aman.preprocess) return aman From a4faf06315de3331aaa3ee411c69b27fb82bdd3b Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 23 Sep 2025 07:53:04 -0700 Subject: [PATCH 31/69] adding the sensitivity cut from Sigurd --- sotodlib/site_pipeline/make_ml_map.py | 49 +++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 3 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index d0d0a6513..8d79a3a17 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -33,6 +33,21 @@ def get_parser(parser=None): parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") return parser +sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} + +def sensitivity_cut(rms_uKrts, sens_lim, med_tol=0.2, max_lim=100): + import numpy as np + # First reject detectors with unreasonably low noise + good = rms_uKrts >= sens_lim + # Also reject far too noisy detectors + good &= rms_uKrts < sens_lim*max_lim + # Then reject outliers + if np.sum(good) == 0: return good + ref = np.median(rms_uKrts[good]) + good &= rms_uKrts > ref*med_tol + good &= rms_uKrts < ref/med_tol + return good + def main(**args): import numpy as np, sys, time, warnings, os, so3g from sotodlib.core import Context, AxisManager, IndexAxis @@ -190,7 +205,7 @@ class DataMissing(Exception): pass with bench.mark("read_obs %s" % sub_id): #obs = context.get_obs(sub_id, meta=meta) obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta) - if obs.dets.count < 10: + if obs.dets.count < 50: L.debug("Skipped %s (Not enough detectors)" % (sub_id)) continue # Check nans @@ -235,14 +250,42 @@ class DataMissing(Exception): pass # Optionally skip all the calibration. Useful for sims. if not args.nocal: + # measure rms + rms = np.std(obs.signal,-1) + rms *= (1/srate)**0.5 + if args.unit=='K': + good = sensitivity_cut(rms*1e6, sens_limits[band]) + elif args.unit == 'uK': + good = sensitivity_cut(rms, sens_limits[band]) + #nrms = np.sum(good) + # Cut detectors with too big a fraction of samples cut, + # or cuts occuring too often. + #cuts = obs.flags.glitch_flags.mask() + #cutfrac = cuts.sum()/np.prod(cuts.shape) + #cutdens = (cuts.bins[:,1]-cuts.bins[:,0])/cuts.nsamp + #good &= np.array((cutfrac < 0.1)) + #ndens = dev.np.sum(good) + # Cut all detectors if too large a fraction is cut + #good &= dev.np.sum(good)/meta.ndet_full > 0.25 + #nfinal = dev.np.sum(good) + #signal = dev.np.ascontiguousarray(signal[good]) # 600 ms! + #good = dev.get(good) # cuts, dets, fplane etc. need this on the cpu + #cuts = cuts [good] + if np.logical_not(good).sum() / obs.dets.count > 0.5: + L.debug("Skipped %s (more than 40pc of detectors cut by sens)" % (sub_id)) + continue + else: + obs.restrict("dets", good) + #L.debug(f"{sub_id} has {good.sum()}/{obs.dets.count} detectors good") + #obs.restrict("dets", good) # Apply pointing model - pointing_model.apply_pointing_model(obs) + #pointing_model.apply_pointing_model(obs) # Calibrate to pW #obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T # Calibrate to K_cmb #obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T # Disqualify overly cut detectors - good_dets = mapmaking.find_usable_detectors(obs,maxcut=0.5) + good_dets = mapmaking.find_usable_detectors(obs) obs.restrict("dets", good_dets) if obs.dets.count == 0: L.debug("Skipped %s (all dets cut)" % (sub_id)) From 321d936ab037d66dd1ecff5778df04fd707289ef Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 30 Sep 2025 06:12:10 -0700 Subject: [PATCH 32/69] Updating estimate_rms the way Sigurd does it --- sotodlib/site_pipeline/make_ml_map.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 8d79a3a17..28716a778 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -1,3 +1,5 @@ +import numpy as np + def get_parser(parser=None): # a config file to pass all parameters is pending import argparse @@ -35,8 +37,17 @@ def get_parser(parser=None): sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} +def measure_rms(tod, dt=1, bsize=32, nblock=10): + tod = tod[:,:tod.shape[1]//bsize*bsize] + tod = tod.reshape(tod.shape[0],-1,bsize) + bstep = max(1,tod.shape[1]//nblock) + tod = tod[:,::bstep,:][:,:nblock,:] + rms = np.median(np.std(tod,-1),-1) + # to µK√s units + rms *= dt**0.5 + return rms + def sensitivity_cut(rms_uKrts, sens_lim, med_tol=0.2, max_lim=100): - import numpy as np # First reject detectors with unreasonably low noise good = rms_uKrts >= sens_lim # Also reject far too noisy detectors @@ -251,8 +262,7 @@ class DataMissing(Exception): pass # Optionally skip all the calibration. Useful for sims. if not args.nocal: # measure rms - rms = np.std(obs.signal,-1) - rms *= (1/srate)**0.5 + rms = measure_rms(obs.signal, dt=1/srate) if args.unit=='K': good = sensitivity_cut(rms*1e6, sens_limits[band]) elif args.unit == 'uK': From c83a13e470d3eab6bdce0a4d6e52735c12678729 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Wed, 1 Oct 2025 17:22:49 -0400 Subject: [PATCH 33/69] feat: add cli arg to downgrade input geometry (#1399) --- sotodlib/site_pipeline/make_ml_map.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 28716a778..6adb0a51c 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -28,6 +28,7 @@ def get_parser(parser=None): parser.add_argument( "--nmat-dir", type=str, default="{odir}/nmats") parser.add_argument( "--nmat-mode", type=str, default="build", help="How to build the noise matrix. 'build': Always build from tod. 'cache': Use if available in nmat-dir, otherwise build and save. 'load': Load from nmat-dir, error if missing. 'save': Build from tod and save.") parser.add_argument("-d", "--downsample", type=str, default="1", help="Downsample TOD by this factor. ,-sep") + parser.add_argument("-D", "--downgrade", type=int, default=1, help="Downgrade the input area by this factor") parser.add_argument( "--maxiter", type=str, default="500",help="Max number of CG steps per pass. ,-sep") parser.add_argument( "--interpol", type=str, default="nearest", help="Pmat interpol per pass. ,-sep") parser.add_argument("-T", "--tiled" , type=int, default=1, help="0: untiled maps. Nonzero: tiled maps") @@ -84,6 +85,9 @@ class DataMissing(Exception): pass comm = mpi.COMM_WORLD shape, wcs = enmap.read_map_geometry(args.area) + if args.downgrade > 1: + shape, wcs = enmap.downgrade_geometry(shape, wcs, args.downgrade) + # Reconstruct that wcs in case default fields have changed; otherwise # we risk adding information in MPI due to reconstruction, and that # can cause is_compatible failures. From f6b315d7c18b71f8104d64eeddec85f4a3889fe5 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Thu, 2 Oct 2025 11:20:35 -0700 Subject: [PATCH 34/69] valid data backwards compatibility --- sotodlib/preprocess/preprocess_util.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/sotodlib/preprocess/preprocess_util.py b/sotodlib/preprocess/preprocess_util.py index c7e15e3e1..14a84c3a1 100644 --- a/sotodlib/preprocess/preprocess_util.py +++ b/sotodlib/preprocess/preprocess_util.py @@ -14,6 +14,7 @@ from sotodlib.coords import demod as demod_mm from sotodlib.tod_ops import t2pleakage from sotodlib.core.flagman import has_any_cuts +from so3g.proj import RangesMatrix from .. import core @@ -368,7 +369,11 @@ def load_and_preprocess(obs_id, configs, context=None, dets=None, meta=None, configs, context = get_preprocess_context(configs, context) meta = context.get_meta(obs_id, dets=dets, meta=meta) if 'valid_data' in meta.preprocess: - keep = has_any_cuts(meta.preprocess.valid_data.valid_data) + if isinstance(meta.preprocess.valid_data, core.AxisManager): + field = meta.preprocess.valid_data.valid_data + else: + field = meta.preprocess.valid_data + keep = has_any_cuts(field) meta.restrict("dets", keep) else: det_vals = load_preprocess_det_select(obs_id, configs=configs, context=context, @@ -454,7 +459,11 @@ def multilayer_load_and_preprocess(obs_id, configs_init, configs_proc, logger.info("Restricting detectors on all proc pipeline processes") if 'valid_data' in meta_proc.preprocess: - keep_all = has_any_cuts(meta_proc.preprocess.valid_data.valid_data) + if isinstance(meta_proc.preprocess.valid_data, core.AxisManager): + field = meta_proc.preprocess.valid_data.valid_data + else: + field = meta_proc.preprocess.valid_data + keep = has_any_cuts(field) else: keep_all = np.ones(meta_proc.dets.count, dtype=bool) for process in pipe_proc[:]: From d354b04ab780617312e8a1b64b48adaeeb8fb6bd Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Thu, 2 Oct 2025 11:30:25 -0700 Subject: [PATCH 35/69] use default loader if aman --- sotodlib/preprocess/preprocess_util.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/sotodlib/preprocess/preprocess_util.py b/sotodlib/preprocess/preprocess_util.py index 14a84c3a1..7008c22e2 100644 --- a/sotodlib/preprocess/preprocess_util.py +++ b/sotodlib/preprocess/preprocess_util.py @@ -368,12 +368,11 @@ def load_and_preprocess(obs_id, configs, context=None, dets=None, meta=None, configs, context = get_preprocess_context(configs, context) meta = context.get_meta(obs_id, dets=dets, meta=meta) - if 'valid_data' in meta.preprocess: - if isinstance(meta.preprocess.valid_data, core.AxisManager): - field = meta.preprocess.valid_data.valid_data - else: - field = meta.preprocess.valid_data - keep = has_any_cuts(field) + if ( + 'valid_data' in meta.preprocess and + isinstance(meta.preprocess.valid_data, core.AxisManager) + ): + keep = has_any_cuts(meta.preprocess.valid_data.valid_data) meta.restrict("dets", keep) else: det_vals = load_preprocess_det_select(obs_id, configs=configs, context=context, @@ -458,12 +457,11 @@ def multilayer_load_and_preprocess(obs_id, configs_init, configs_proc, pipe_proc = Pipeline(configs_proc["process_pipe"], logger=logger) logger.info("Restricting detectors on all proc pipeline processes") - if 'valid_data' in meta_proc.preprocess: - if isinstance(meta_proc.preprocess.valid_data, core.AxisManager): - field = meta_proc.preprocess.valid_data.valid_data - else: - field = meta_proc.preprocess.valid_data - keep = has_any_cuts(field) + if ( + 'valid_data' in meta_proc.preprocess and + isinstance(meta_proc.preprocess.valid_data, core.AxisManager) + ): + keep_all = has_any_cuts(meta_proc.preprocess.valid_data.valid_data) else: keep_all = np.ones(meta_proc.dets.count, dtype=bool) for process in pipe_proc[:]: From 35052c0f967eedb4b009f19d840956626d9580f8 Mon Sep 17 00:00:00 2001 From: Michael McCrackan Date: Thu, 2 Oct 2025 11:31:20 -0700 Subject: [PATCH 36/69] remove added import --- sotodlib/preprocess/preprocess_util.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sotodlib/preprocess/preprocess_util.py b/sotodlib/preprocess/preprocess_util.py index 7008c22e2..11d83539c 100644 --- a/sotodlib/preprocess/preprocess_util.py +++ b/sotodlib/preprocess/preprocess_util.py @@ -14,7 +14,6 @@ from sotodlib.coords import demod as demod_mm from sotodlib.tod_ops import t2pleakage from sotodlib.core.flagman import has_any_cuts -from so3g.proj import RangesMatrix from .. import core From 4cb71b2da531aa91d21ae5b34371970144d22895 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 14 Oct 2025 06:44:02 -0700 Subject: [PATCH 37/69] Datacount output on ml mapmaker --- sotodlib/site_pipeline/make_ml_map.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 6adb0a51c..4937b6e3f 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -222,11 +222,13 @@ class DataMissing(Exception): pass obs = pp_util.load_and_preprocess(obs_id, preproc, context=context, meta=meta) if obs.dets.count < 50: L.debug("Skipped %s (Not enough detectors)" % (sub_id)) + L.debug("Datacount: %s full" % (sub_id)) continue # Check nans mask = np.logical_not(np.isfinite(obs.signal)) if mask.sum() > 0: L.debug("Skipped %s (a nan in signal)" % (sub_id)) + L.debug("Datacount: %s full" % (sub_id)) continue # Check all 0s zero_dets = np.sum(obs.signal, axis=1) @@ -286,12 +288,11 @@ class DataMissing(Exception): pass #good = dev.get(good) # cuts, dets, fplane etc. need this on the cpu #cuts = cuts [good] if np.logical_not(good).sum() / obs.dets.count > 0.5: - L.debug("Skipped %s (more than 40pc of detectors cut by sens)" % (sub_id)) + L.debug("Skipped %s (more than 50pc of detectors cut by sens)" % (sub_id)) + L.debug("Datacount: %s full" % (sub_id)) continue else: obs.restrict("dets", good) - #L.debug(f"{sub_id} has {good.sum()}/{obs.dets.count} detectors good") - #obs.restrict("dets", good) # Apply pointing model #pointing_model.apply_pointing_model(obs) # Calibrate to pW @@ -299,10 +300,11 @@ class DataMissing(Exception): pass # Calibrate to K_cmb #obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T # Disqualify overly cut detectors - good_dets = mapmaking.find_usable_detectors(obs) + good_dets = mapmaking.find_usable_detectors(obs, maxcut=0.3) obs.restrict("dets", good_dets) if obs.dets.count == 0: L.debug("Skipped %s (all dets cut)" % (sub_id)) + L.debug("Datacount: %s full" % (sub_id)) continue # Gapfill glitches. This function name isn't the clearest #tod_ops.get_gap_fill(obs, flags=obs.flags.glitch_flags, swap=True) @@ -342,6 +344,8 @@ class DataMissing(Exception): pass if passinfo.downsample != 1: obs = mapmaking.downsample_obs(obs, passinfo.downsample) + mmask = obs.flags.glitch_flags.mask() + L.debug(f"Datacount: {sub_id} added {obs.dets.count} {np.logical_not(mmask).sum()} ") # Maybe load precomputed noise model. # FIXME: How to handle multipass here? @@ -373,6 +377,7 @@ class DataMissing(Exception): pass mapmaking.write_nmat(nmat_file, mapmaker.data[-1].nmat) except (DataMissing,IndexError,ValueError,LoaderError) as e: L.debug("Skipped %s (%s)" % (sub_id, str(e))) + L.debug("Datacount: %s full" % (sub_id)) continue nkept = comm.allreduce(nkept) From d7831570cf7691424bf6eadfa8bf5f7beb42c14c Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 21 Oct 2025 10:23:40 -0700 Subject: [PATCH 38/69] Fixing bug that would kill ml mpamker when there was a mpi rank left without obs --- sotodlib/mapmaking/ml_mapmaker.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sotodlib/mapmaking/ml_mapmaker.py b/sotodlib/mapmaking/ml_mapmaker.py index ac674f7c4..6e559231b 100644 --- a/sotodlib/mapmaking/ml_mapmaker.py +++ b/sotodlib/mapmaking/ml_mapmaker.py @@ -539,8 +539,8 @@ def prepare(self): """Process the added observations, determining our degrees of freedom etc. Should be done before calling forward and backward.""" if self.ready: return - self.rhs = np.concatenate(self.rhs) - self.div = np.concatenate(self.div) + self.rhs = np.concatenate(self.rhs) if len(self.rhs) > 0 else np.zeros(0, self.dtype) + self.div = np.concatenate(self.div) if len(self.div) > 0 else np.zeros(0, self.dtype) self.dof = smutils.ArrayZipper(self.rhs.shape, dtype=self.dtype, comm=self.comm) self.ready = True @@ -682,7 +682,7 @@ def prepare(self): Should be done before calling forward and backward.""" if self.ready: return SignalCut.prepare(self) - self.distsamps= np.concatenate(self.distsamps) + self.distsamps = np.concatenate(self.distsamps) if len(self.distsamps) > 0 else np.zeros(0, self.dtype) x = np.minimum(self.distsamps/self.redge, 1) self.epsilon = np.exp(np.log(self.eps_edge) * (1-x) + np.log(self.eps_core) * x) self.epsilon *= self.div From 5383c9c6485fa229391ed1e42723b61a9c804eb3 Mon Sep 17 00:00:00 2001 From: Carlos Hervias Date: Thu, 13 Nov 2025 12:31:32 -0300 Subject: [PATCH 39/69] Dropping downweights in the ACT noise model --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 4937b6e3f..ce25baf6e 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -183,7 +183,7 @@ class DataMissing(Exception): pass # split into two parts signal.translate_single and signal.forward_single). But I don't think those # building blocks would be very reusable, and the full thing is more general. if args.nmat == "uncorr": noise_model = mapmaking.NmatUncorr() - elif args.nmat == "corr": noise_model = mapmaking.NmatDetvecs(verbose=verbose>1, downweight=[1e-4, 0.25, 0.50], window=args.window) + elif args.nmat == "corr": noise_model = mapmaking.NmatDetvecs(verbose=verbose>1, window=args.window) else: raise ValueError("Unrecognized noise model '%s'" % args.nmat) signal_cut = mapmaking.SignalCut(comm, dtype=dtype_tod) From 902817abf3033449e59c64d9c0e046d1119748d0 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 18 Nov 2025 03:57:08 -0800 Subject: [PATCH 40/69] Adding sidelobe cutting functions --- sotodlib/coords/__init__.py | 1 + sotodlib/coords/sidelobes.py | 67 ++++++++++++++++++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 sotodlib/coords/sidelobes.py diff --git a/sotodlib/coords/__init__.py b/sotodlib/coords/__init__.py index d29e0373c..5dd371831 100644 --- a/sotodlib/coords/__init__.py +++ b/sotodlib/coords/__init__.py @@ -9,3 +9,4 @@ from . import demod from . import pointing_model from . import healpix_utils +from . import sidelobes diff --git a/sotodlib/coords/sidelobes.py b/sotodlib/coords/sidelobes.py new file mode 100644 index 000000000..ac9a8c78b --- /dev/null +++ b/sotodlib/coords/sidelobes.py @@ -0,0 +1,67 @@ +import numpy as np +from pixell import enmap, utils, coordsys +from sotodlib import coords, mapmaking +import so3g + +def Simplecut(ndets, nsamps): + return so3g.proj.RangesMatrix.zeros((ndets,nsamps)) + +def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): + if object_list is None: object_list = ["sun", "moon"] + cutss = [] + for name in object_list: + if name not in sidelobe_cutters: + fname = args[name + "_mask"] + if not fname: raise ValueError("config setting %s_mask missing for sidelobe cut" % name) + sidelobe_cutters[name] = SidelobeCutter(fname, objname=name, dtype=obs.signal.dtype) + cutter = sidelobe_cutters[name] + cuts = cutter.make_cuts(obs) + cutss.append(cuts) + return cutss + +class SidelobeCutter: + def __init__(self, mask, objname, dtype=np.float32, rise_tol=1*utils.degree): + self.mask = mask + self.lmap = enmap.read_map(mask).astype(np.double) + self.objname= objname + self.sys_sotodlib = "%s,up=hor" % np.char.capitalize([objname])[0] + self.sys_pixell = "hor,on=%s" % objname + self.rise_tol = rise_tol + self.dtype = dtype + + def make_cuts(self, obs): + # First check if the object is above the horizon. We just check the endpoints + # to keep things simple + ts = obs.timestamps[[0,-1]] + hor = coordsys.transform(self.sys_pixell, "hor", coordsys.Coords(ra=0, dec=0), ctime=ts, site="so", weather=mapmaking.unarr(obs.weather)) + risen = np.any(hor.el > self.rise_tol) + if not risen: return Simplecut(obs.signal.shape[0], obs.signal.shape[1]) + # Ok, it's above the horizon, check which samples are affected + try: + recenter = mapmaking.parse_recentering(self.sys_sotodlib) + rot = mapmaking.recentering_to_quat_lonlat(*mapmaking.evaluate_recentering( + recenter, ctime=obs.timestamps[len(obs.timestamps) // 2], + geom=(self.lmap.shape, self.lmap.wcs), + site=mapmaking.unarr(obs.site), + )) + pmat = coords.pmat.P.for_tod(obs, comps='T', geom=(self.lmap.shape, self.lmap.wcs), + rot=rot, threads="domdir", weather=mapmaking.unarr(obs.weather), + site=mapmaking.unarr(obs.site) + ) + tod = np.zeros(obs.signal.shape[:2], dtype=self.dtype) + forward(tod, self.lmap, pmat) + except RuntimeError: + # This happens when the interpolated pointing ends up outside the -npix:+2npix range + # that gpu_mm allows. This can happen when a detector moves too close to the north pole + # in the object-centered coordinates, which in CAR makes it seem to teleport by 180 + # degrees. When combined with unwinding, some jumps being sligthly less than 180 and + # some slightly more than 180 can lead to the numbers drifting over a large range. + # It might be better to catch this by just looking for values too close to the poles + # explicitly instead of relying on gpu_mm to catch things itself + #L.print("Error cutting %s sidelobes for %s: Pointing overflow. Skipping cut" % (self.objname, id), level=2, color=colors.red) + return Simplecut(obs.signal.shape[0], obs.signal.shape[1]) + cuts = so3g.proj.RangesMatrix.from_mask(tod.astype(int)) + return cuts + +def forward(tod, map, pmat): + pmat.from_map(dest=tod, signal_map=map, comps="T",) From 5fb42ea00efb71e5667a11fd2efb7ae65ccd8f98 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 18 Nov 2025 04:14:39 -0800 Subject: [PATCH 41/69] Adding docstring for sidelobes --- sotodlib/coords/sidelobes.py | 41 ++++++++++++++++++++++++++++++------ 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/sotodlib/coords/sidelobes.py b/sotodlib/coords/sidelobes.py index ac9a8c78b..32033f69d 100644 --- a/sotodlib/coords/sidelobes.py +++ b/sotodlib/coords/sidelobes.py @@ -3,10 +3,36 @@ from sotodlib import coords, mapmaking import so3g -def Simplecut(ndets, nsamps): - return so3g.proj.RangesMatrix.zeros((ndets,nsamps)) - def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): + """ + Function to calculate cuts (RangesMatrix) for Sun and Moon + from a binary sidelobe mask with an arbitrary shape. + The masks MUST BE full sky and relatively low resolution since + the map is loaded as a non-tiled enmap. + + Parameters + ---------- + obs : sotodlib.core.AxisManager + An observation axis manager. + args : dict + An argument dictionary, which must contains the path to the + masks in mask_sun and mask_moon. This is only used for these + paths. + sidelobe_cutters : dict + A dict holding SidelobeCutter for each sun and moon. In the + first run this should be empty and will be filled. Subsequent + runs will use these precalculated objects. + object_list : list, optional + A list of the objects we want to mask. If None, sun and moon + will be run. + + Returns + ------- + cutss : list + A list where the elements are the cuts for the objects requested. + Each will be a RangesMatrix with shape (ndets,nsamps). + """ + if object_list is None: object_list = ["sun", "moon"] cutss = [] for name in object_list: @@ -19,12 +45,15 @@ def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): cutss.append(cuts) return cutss +def Simplecut(ndets, nsamps): + return so3g.proj.RangesMatrix.zeros((ndets,nsamps)) + class SidelobeCutter: def __init__(self, mask, objname, dtype=np.float32, rise_tol=1*utils.degree): self.mask = mask - self.lmap = enmap.read_map(mask).astype(np.double) + self.lmap = enmap.read_map(mask).astype(np.double) #from_map will complain if the map is not double self.objname= objname - self.sys_sotodlib = "%s,up=hor" % np.char.capitalize([objname])[0] + self.sys_sotodlib = "%s,up=hor" % np.char.capitalize([objname])[0] # this is to capitalize the first letter self.sys_pixell = "hor,on=%s" % objname self.rise_tol = rise_tol self.dtype = dtype @@ -60,7 +89,7 @@ def make_cuts(self, obs): # explicitly instead of relying on gpu_mm to catch things itself #L.print("Error cutting %s sidelobes for %s: Pointing overflow. Skipping cut" % (self.objname, id), level=2, color=colors.red) return Simplecut(obs.signal.shape[0], obs.signal.shape[1]) - cuts = so3g.proj.RangesMatrix.from_mask(tod.astype(int)) + cuts = so3g.proj.RangesMatrix.from_mask(tod.astype(int)) #here the mask is expected to be int return cuts def forward(tod, map, pmat): From 3a7572b73ad7e436c8e546f43b2bdd2b2ac3a086 Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 18 Nov 2025 04:22:15 -0800 Subject: [PATCH 42/69] fixing indentation --- sotodlib/coords/sidelobes.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/sotodlib/coords/sidelobes.py b/sotodlib/coords/sidelobes.py index 32033f69d..79a82b58a 100644 --- a/sotodlib/coords/sidelobes.py +++ b/sotodlib/coords/sidelobes.py @@ -9,7 +9,7 @@ def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): from a binary sidelobe mask with an arbitrary shape. The masks MUST BE full sky and relatively low resolution since the map is loaded as a non-tiled enmap. - + Parameters ---------- obs : sotodlib.core.AxisManager @@ -25,25 +25,25 @@ def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): object_list : list, optional A list of the objects we want to mask. If None, sun and moon will be run. - + Returns ------- cutss : list A list where the elements are the cuts for the objects requested. Each will be a RangesMatrix with shape (ndets,nsamps). """ - - if object_list is None: object_list = ["sun", "moon"] - cutss = [] - for name in object_list: - if name not in sidelobe_cutters: - fname = args[name + "_mask"] - if not fname: raise ValueError("config setting %s_mask missing for sidelobe cut" % name) - sidelobe_cutters[name] = SidelobeCutter(fname, objname=name, dtype=obs.signal.dtype) - cutter = sidelobe_cutters[name] - cuts = cutter.make_cuts(obs) - cutss.append(cuts) - return cutss + + if object_list is None: object_list = ["sun", "moon"] + cutss = [] + for name in object_list: + if name not in sidelobe_cutters: + fname = args[name + "_mask"] + if not fname: raise ValueError("config setting %s_mask missing for sidelobe cut" % name) + sidelobe_cutters[name] = SidelobeCutter(fname, objname=name, dtype=obs.signal.dtype) + cutter = sidelobe_cutters[name] + cuts = cutter.make_cuts(obs) + cutss.append(cuts) + return cutss def Simplecut(ndets, nsamps): return so3g.proj.RangesMatrix.zeros((ndets,nsamps)) @@ -57,7 +57,7 @@ def __init__(self, mask, objname, dtype=np.float32, rise_tol=1*utils.degree): self.sys_pixell = "hor,on=%s" % objname self.rise_tol = rise_tol self.dtype = dtype - + def make_cuts(self, obs): # First check if the object is above the horizon. We just check the endpoints # to keep things simple From bbcf0b2370f858ad395b0c4231ef7963b18a774c Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 18 Nov 2025 04:26:18 -0800 Subject: [PATCH 43/69] adding the numpy-quaternion dependence --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 87535c3fe..a47e318d8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ dependencies = [ "psycopg2-binary", "lmfit", "flacarray>=0.3.4", + "numpy-quaternion", ] dynamic=["version"] classifiers = [ From 802503046c23ee7d8b6b71f133cb8fc5b0ba44e3 Mon Sep 17 00:00:00 2001 From: chervias Date: Wed, 14 Jan 2026 09:42:08 -0800 Subject: [PATCH 44/69] Changes to make_ml_map.py to use the sidelobe cutter --- sotodlib/site_pipeline/make_ml_map.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index c655584ff..0cb983414 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -34,6 +34,8 @@ def get_parser(parser=None): parser.add_argument("-T", "--tiled" , type=int, default=1, help="0: untiled maps. Nonzero: tiled maps") parser.add_argument( "--srcsamp", type=str, default=None, help="path to mask file where True regions indicate where bright object mitigation should be applied. Mask is in equatorial coordinates. Not tiled, so should be low-res to not waste memory.") parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") + parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") + parser.add_argument( "--moon-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/moon.fits", help="Location of Moon sidelobe mask") return parser sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} @@ -65,7 +67,7 @@ def main(**args): from sotodlib.core import Context, AxisManager, IndexAxis from sotodlib import tod_ops, mapmaking, core from sotodlib.tod_ops import filters - from sotodlib.coords import pointing_model + from sotodlib.coords import pointing_model, sidelobes from sotodlib.mapmaking import log from sotodlib.preprocess import preprocess_util as pp_util from sotodlib.core import metadata @@ -197,6 +199,7 @@ class DataMissing(Exception): pass # to factorize out that zeroing into its own thing that's easier to control. signals.append(signal_srcsamp) mapmaker = mapmaking.MLMapmaker(signals, noise_model=noise_model, dtype=dtype_tod, verbose=verbose>0) + sidelobe_cutters = {} nkept = 0 # TODO: Fix the task distribution. The current one doesn't care which mpi @@ -351,6 +354,11 @@ class DataMissing(Exception): pass mmask = obs.flags.glitch_flags.mask() L.debug(f"Datacount: {sub_id} added {obs.dets.count} {np.logical_not(mmask).sum()} ") + # sidelobes cuts + cutss = sidelobes.sidelobe_cut(obs, args, sidelobe_cutters) + for cut in cutss: + obs.flags.glitch_flags += cut + # Maybe load precomputed noise model. # FIXME: How to handle multipass here? nmat_file = nmat_dir + "/nmat_%s.hdf" % name From 95dcfc57a587c0e0e3f381716dc65e8b0ba610af Mon Sep 17 00:00:00 2001 From: chervias Date: Wed, 14 Jan 2026 10:02:30 -0800 Subject: [PATCH 45/69] Adding the hits option to make_ml_map.fits --- sotodlib/site_pipeline/make_ml_map.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 0cb983414..9ea66df17 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -36,6 +36,7 @@ def get_parser(parser=None): parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") parser.add_argument( "--moon-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/moon.fits", help="Location of Moon sidelobe mask") + parser.add_argument("-h", "--hits", action="store_true", help="Write hits maps") return parser sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} @@ -408,6 +409,9 @@ class DataMissing(Exception): pass signal_map.write(prefix, "div", signal_map.div, unit=args.unit+'^-2') signal_map.write(prefix, "bin", enmap.map_mul(signal_map.idiv, signal_map.rhs), unit=args.unit) L.info("Wrote rhs, div, bin") + if args.hits: + signal_map.write(prefix, "hits", signal_map.hits, unit='hits') + L.info("Wrote hits") # Set up initial condition x0 = None if ipass == 0 else mapmaker.translate(mapmaker_prev, eval_prev.x_zip) From 19bc5b642963efd2f0d7fea4a8fb87c81bcecddc Mon Sep 17 00:00:00 2001 From: chervias Date: Wed, 14 Jan 2026 10:41:21 -0800 Subject: [PATCH 46/69] Fixing an error on the --hits option --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 9ea66df17..e7a988f37 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -36,7 +36,7 @@ def get_parser(parser=None): parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") parser.add_argument( "--moon-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/moon.fits", help="Location of Moon sidelobe mask") - parser.add_argument("-h", "--hits", action="store_true", help="Write hits maps") + parser.add_argument("--hits", action="store_true", help="Write hits maps") return parser sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} From 172f63cbc868b925691867b19cc38bf2a880a689 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 22 Jan 2026 21:18:49 -0500 Subject: [PATCH 47/69] ML Mapmaker fixes (#1404) * fix: pass recenter to srcsamp, adjust quaternion for hor recenter, default to planets module for recenter if possible * feat: dont reprocess observations that were cut in previous passes * fix: dont error out if some procs have no surviving tods * fix: dont call finalize explicitly * feat: add OT filter to ml mapmaker * feat: add option to null out some flags when finding usable detectors and take in usable detector thresh as a cli arg * fix: dont error if we are missing expected flags and some minor changes requested by carlos * fix: move MPI pruning to function and fix typos * fix: pass mapmaker to pruning func * fix: move mapmaker comm replacements back to script * fix: revert horizon centering --- sotodlib/mapmaking/ml_mapmaker.py | 1 + sotodlib/mapmaking/utils.py | 53 +++++++++++++++++++++++---- sotodlib/site_pipeline/make_ml_map.py | 30 +++++++++++++-- 3 files changed, 73 insertions(+), 11 deletions(-) diff --git a/sotodlib/mapmaking/ml_mapmaker.py b/sotodlib/mapmaking/ml_mapmaker.py index 6e559231b..3e078a094 100644 --- a/sotodlib/mapmaking/ml_mapmaker.py +++ b/sotodlib/mapmaking/ml_mapmaker.py @@ -644,6 +644,7 @@ def add_obs(self, id, obs, nmat, Nd): """ # First scan our mask to find which samples need this # treatment + ctime = obs.timestamps if self.recenter: rec = smutils.evaluate_recentering(self.recenter, ctime=ctime[len(ctime) // 2], geom=(self.mask.shape, self.mask.wcs), site=smutils.unarr(obs.site)) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index bf931d8c3..399c93e93 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -1,3 +1,4 @@ +import sys import logging import os from typing import Optional, Any, Union @@ -380,7 +381,7 @@ def expand_ids(obs_ids, context=None, bands=None): sub_ids.append("%s:ws%d:%s" % (obs_id, si, band)) return sub_ids -def filter_subids(subids, wafers=None, bands=None): +def filter_subids(subids, wafers=None, bands=None, ots=None): subids = np.asarray(subids) if wafers is not None: wafs = astr_tok(subids,":",1) @@ -388,6 +389,11 @@ def filter_subids(subids, wafers=None, bands=None): if bands is not None: bpass = astr_tok(subids,":",2) subids = subids[np.isin(bpass, bands)] + if ots is not None: + # Somewhat hacky implementation + obs_ids = astr_tok(subids, ":",0) + has_ot = np.prod([np.char.find(obs_ids, ot) for ot in ots], 0) + subids = subids[has_ot >= 0] return subids def astr_cat(*arrs): @@ -484,10 +490,16 @@ def get_pos(name, ctime, sys=None): elif name == "auto": return np.array([0,0]) # would use geom here else: - obj = getattr(ephem, name)() - djd = ctime/86400 + 40587.0 + 2400000.5 - 2415020 - obj.compute(djd) - return np.array([obj.a_ra, obj.a_dec]) + try: + planet = coords.planets.SlowSource.for_named_source( + name, ctime) + ra0, dec0 = planet.pos(tod.timestamps.mean()) + except: + obj = getattr(ephem, name)() + djd = ctime/86400 + 40587.0 + 2400000.5 - 2415020 + obj.compute(djd) + ra0, dec0 = obj.a_ra, obj.a_dec + return np.array([ra0, dec0]) else: return to_cel(name, sys, ctime, site, weather) p1 = get_pos(info["from"], ctime, info["from_sys"]) @@ -569,8 +581,11 @@ def rangemat_sum(rangemat): res[i] = np.sum(ra[:,1]-ra[:,0]) return res -def find_usable_detectors(obs, maxcut=0.1, glitch_flags: str = "flags.glitch_flags"): - ncut = rangemat_sum(obs[glitch_flags]) +def find_usable_detectors(obs, maxcut=0.1, glitch_flags: str = "flags.glitch_flags", to_null : str = "flags.expected_flags"): + flag = obs[glitch_flags] + if to_null != "" and to_null in obs._fields: + flag = flag*~obs[to_null] + ncut = rangemat_sum(flag) good = ncut < obs.samps.count * maxcut return obs.dets.vals[good] @@ -830,3 +845,27 @@ def atomic_db_aux(atomic_db, info: list[AtomicInfo]): session.commit() except exc.IntegrityError: session.rollback() + + +def prune_mpi(comm, ranks_to_keep): + """ + Prune unneeded MPI procs. + + Arguments: + + comm: The MPI communicator currently in use. + + ranks_to_keep: List of current ranks to keep in the new communicator. + + Returns: + + comm: Modified communicator with only the processes we want to keep. + """ + group = comm.Get_group() + new_group = group.Incl(ranks_to_keep) + new_comm = comm.Create(new_group) + if comm.rank not in ranks_to_keep: + sys.exit(0) + comm = new_comm + + return comm diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index e7a988f37..e0744591e 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -12,6 +12,7 @@ def get_parser(parser=None): parser.add_argument("prefix", nargs="?") parser.add_argument( "--comps", type=str, default="TQU",help="List of components to solve for. T, QU or TQU, but only TQU is consistent with the actual data") parser.add_argument("-W", "--wafers", type=str, default=None, help="Detector wafer subsets to map with. ,-sep") + parser.add_argument("-O", "--ots", type=str, default=None, help="Optics tubes to map with. ,-sep") parser.add_argument("-B", "--bands", type=str, default=None, help="Bandpasses to map. ,-sep") parser.add_argument("-C", "--context", type=str, default="/mnt/so1/shared/todsims/pipe-s0001/v4/context.yaml") parser.add_argument( "--tods", type=str, default=None, help="Arbitrary slice to apply to the list of tods to analyse") @@ -34,6 +35,7 @@ def get_parser(parser=None): parser.add_argument("-T", "--tiled" , type=int, default=1, help="0: untiled maps. Nonzero: tiled maps") parser.add_argument( "--srcsamp", type=str, default=None, help="path to mask file where True regions indicate where bright object mitigation should be applied. Mask is in equatorial coordinates. Not tiled, so should be low-res to not waste memory.") parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") + parser.add_argument( "--maxcut", type=float, default=.3, help="Maximum fraction of cut samples in a detector.") parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") parser.add_argument( "--moon-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/moon.fits", help="Location of Moon sidelobe mask") parser.add_argument("--hits", action="store_true", help="Write hits maps") @@ -115,10 +117,11 @@ class DataMissing(Exception): pass with bench.mark('context'): context = Context(args.context) + ots = args.ots.split(",") if args.ots else None wafers = args.wafers.split(",") if args.wafers else None bands = args.bands .split(",") if args.bands else None sub_ids = mapmaking.get_subids(args.query, context=context) - sub_ids = mapmaking.filter_subids(sub_ids, wafers=wafers, bands=bands) + sub_ids = mapmaking.filter_subids(sub_ids, wafers=wafers, bands=bands, ots=ots) # restrict tod selection further. E.g. --tods [0], --tods[:1], --tods[::100], --tods[[0,1,5,10]], etc. if args.tods: @@ -147,6 +150,7 @@ class DataMissing(Exception): pass sys.exit(1) passes = mapmaking.setup_passes(downsample=args.downsample, maxiter=args.maxiter, interpol=args.interpol) + to_skip = [] for ipass, passinfo in enumerate(passes): L.info("Starting pass %d/%d maxit %d down %d interp %s" % (ipass+1, len(passes), passinfo.maxiter, passinfo.downsample, passinfo.interpol)) pass_prefix = prefix + "pass%d_" % (ipass+1) @@ -194,6 +198,7 @@ class DataMissing(Exception): pass signal_map = mapmaking.SignalMap(shape, wcs, comm, comps=comps, dtype=dtype_map, recenter=recenter, tiled=args.tiled>0, interpol=args.interpol) signals = [signal_cut, signal_map] if args.srcsamp: + signal_srcsamp = mapmaking.SignalSrcsamp(comm, srcsamp_mask, recenter=recenter, dtype=dtype_tod) signal_srcsamp = mapmaking.SignalSrcsamp(comm, srcsamp_mask, dtype=dtype_tod) # This *must* come after all the other signals due to how it zeros out the # affected samples in the tod (inherited from SignalCut). Might have been better @@ -203,6 +208,7 @@ class DataMissing(Exception): pass sidelobe_cutters = {} nkept = 0 + to_skip_all = comm.allreduce(to_skip) # TODO: Fix the task distribution. The current one doesn't care which mpi # task gets which tods, which sabotages the pixel-saving effects of tiled maps! # To be able to distribute the tods sensibly, we need a rough estimate of where @@ -214,6 +220,10 @@ class DataMissing(Exception): pass obs_id, wafer, band = sub_id.split(":") name = sub_id.replace(":", "_") L.debug("Processing %s" % sub_id) + if sub_id in to_skip_all: + L.debug("Skipped %s (Cut in previous pass)" % (sub_id)) + continue + try: meta = context.get_meta(sub_id) # Optionally restrict to maximum number of detectors. This is mainly @@ -231,12 +241,14 @@ class DataMissing(Exception): pass if obs.dets.count < 50: L.debug("Skipped %s (Not enough detectors)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) + to_skip += [sub_id] continue # Check nans mask = np.logical_not(np.isfinite(obs.signal)) if mask.sum() > 0: L.debug("Skipped %s (a nan in signal)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) + to_skip += [sub_id] continue # Check all 0s zero_dets = np.sum(obs.signal, axis=1) @@ -298,6 +310,7 @@ class DataMissing(Exception): pass if np.logical_not(good).sum() / obs.dets.count > 0.5: L.debug("Skipped %s (more than 50pc of detectors cut by sens)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) + to_skip += [sub_id] continue else: obs.restrict("dets", good) @@ -308,9 +321,10 @@ class DataMissing(Exception): pass # Calibrate to K_cmb #obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T # Disqualify overly cut detectors - good_dets = mapmaking.find_usable_detectors(obs, maxcut=0.3) + good_dets = mapmaking.find_usable_detectors(obs, args.maxcut) obs.restrict("dets", good_dets) if obs.dets.count == 0: + to_skip += [sub_id] L.debug("Skipped %s (all dets cut)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) continue @@ -393,11 +407,19 @@ class DataMissing(Exception): pass L.debug("Datacount: %s full" % (sub_id)) continue - nkept = comm.allreduce(nkept) - if nkept == 0: + nkept_all = np.array(comm.allgather(nkept)) + if np.sum(nkept_all) == 0: if comm.rank == 0: L.info("All tods failed. Giving up") sys.exit(1) + + if np.any(nkept_all == 0): + if nkept == 0: + L.info("No tods assigned to this process. Pruning") + comm = mapmaking.prune_mpi(comm, np.where(nkept_all > 0)[0]) + for signal in mapmaker.signals: + if hasattr(signal, "comm"): + signal.comm = comm L.info("Done building") From 7aa02b70df488e02288dfeff5b0edb6258166b73 Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 2 Feb 2026 10:08:04 -0800 Subject: [PATCH 48/69] Changes due to Matthews comments --- pyproject.toml | 2 +- sotodlib/coords/sidelobes.py | 67 +++++++++++++++--------------------- 2 files changed, 28 insertions(+), 41 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dde55f2a9..4ff754f58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ "psycopg2-binary", "lmfit", "flacarray>=0.3.4", - "numpy-quaternion", + "numpy-quaternion", # Required quietly by pixell.coordsys "h5py", ] dynamic=["version"] diff --git a/sotodlib/coords/sidelobes.py b/sotodlib/coords/sidelobes.py index 79a82b58a..f54c77303 100644 --- a/sotodlib/coords/sidelobes.py +++ b/sotodlib/coords/sidelobes.py @@ -3,7 +3,7 @@ from sotodlib import coords, mapmaking import so3g -def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): +def get_cuts(aman, args, sidelobe_cutters=None, object_list=None): """ Function to calculate cuts (RangesMatrix) for Sun and Moon from a binary sidelobe mask with an arbitrary shape. @@ -12,20 +12,20 @@ def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): Parameters ---------- - obs : sotodlib.core.AxisManager + aman : sotodlib.core.AxisManager An observation axis manager. args : dict An argument dictionary, which must contains the path to the - masks in mask_sun and mask_moon. This is only used for these + masks in sun_mask and moon_mask. This is only used for these paths. - sidelobe_cutters : dict + sidelobe_cutters : dict, optional A dict holding SidelobeCutter for each sun and moon. In the first run this should be empty and will be filled. Subsequent runs will use these precalculated objects. object_list : list, optional - A list of the objects we want to mask. If None, sun and moon - will be run. - + A list of strings indicating the objects to mask. + If None, sun and moon will be run. + Returns ------- cutss : list @@ -34,18 +34,19 @@ def sidelobe_cut(obs, args, sidelobe_cutters, object_list=None): """ if object_list is None: object_list = ["sun", "moon"] + if sidelobe_cutters is None: sidelobe_cutters = {} cutss = [] for name in object_list: if name not in sidelobe_cutters: fname = args[name + "_mask"] if not fname: raise ValueError("config setting %s_mask missing for sidelobe cut" % name) - sidelobe_cutters[name] = SidelobeCutter(fname, objname=name, dtype=obs.signal.dtype) + sidelobe_cutters[name] = SidelobeCutter(fname, objname=name, dtype=aman.signal.dtype) cutter = sidelobe_cutters[name] - cuts = cutter.make_cuts(obs) + cuts = cutter.make_cuts(aman) cutss.append(cuts) return cutss -def Simplecut(ndets, nsamps): +def _simple_cut(ndets, nsamps): return so3g.proj.RangesMatrix.zeros((ndets,nsamps)) class SidelobeCutter: @@ -58,39 +59,25 @@ def __init__(self, mask, objname, dtype=np.float32, rise_tol=1*utils.degree): self.rise_tol = rise_tol self.dtype = dtype - def make_cuts(self, obs): + def make_cuts(self, aman): # First check if the object is above the horizon. We just check the endpoints # to keep things simple - ts = obs.timestamps[[0,-1]] - hor = coordsys.transform(self.sys_pixell, "hor", coordsys.Coords(ra=0, dec=0), ctime=ts, site="so", weather=mapmaking.unarr(obs.weather)) + ts = aman.timestamps[[0,-1]] + hor = coordsys.transform(self.sys_pixell, "hor", coordsys.Coords(ra=0, dec=0), ctime=ts, site="so", weather=mapmaking.unarr(aman.weather)) risen = np.any(hor.el > self.rise_tol) - if not risen: return Simplecut(obs.signal.shape[0], obs.signal.shape[1]) + if not risen: return _simple_cut(aman.signal.shape[0], aman.signal.shape[1]) # Ok, it's above the horizon, check which samples are affected - try: - recenter = mapmaking.parse_recentering(self.sys_sotodlib) - rot = mapmaking.recentering_to_quat_lonlat(*mapmaking.evaluate_recentering( - recenter, ctime=obs.timestamps[len(obs.timestamps) // 2], - geom=(self.lmap.shape, self.lmap.wcs), - site=mapmaking.unarr(obs.site), - )) - pmat = coords.pmat.P.for_tod(obs, comps='T', geom=(self.lmap.shape, self.lmap.wcs), - rot=rot, threads="domdir", weather=mapmaking.unarr(obs.weather), - site=mapmaking.unarr(obs.site) - ) - tod = np.zeros(obs.signal.shape[:2], dtype=self.dtype) - forward(tod, self.lmap, pmat) - except RuntimeError: - # This happens when the interpolated pointing ends up outside the -npix:+2npix range - # that gpu_mm allows. This can happen when a detector moves too close to the north pole - # in the object-centered coordinates, which in CAR makes it seem to teleport by 180 - # degrees. When combined with unwinding, some jumps being sligthly less than 180 and - # some slightly more than 180 can lead to the numbers drifting over a large range. - # It might be better to catch this by just looking for values too close to the poles - # explicitly instead of relying on gpu_mm to catch things itself - #L.print("Error cutting %s sidelobes for %s: Pointing overflow. Skipping cut" % (self.objname, id), level=2, color=colors.red) - return Simplecut(obs.signal.shape[0], obs.signal.shape[1]) + recenter = mapmaking.parse_recentering(self.sys_sotodlib) + rot = mapmaking.recentering_to_quat_lonlat(*mapmaking.evaluate_recentering( + recenter, ctime=aman.timestamps[len(aman.timestamps) // 2], + geom=(self.lmap.shape, self.lmap.wcs), + site=mapmaking.unarr(aman.site), + )) + pmat = coords.pmat.P.for_tod(aman, comps='T', geom=(self.lmap.shape, self.lmap.wcs), + rot=rot, threads="domdir", weather=mapmaking.unarr(aman.weather), + site=mapmaking.unarr(aman.site) + ) + tod = np.zeros(aman.signal.shape[:2], dtype=self.dtype) + pmat.from_map(dest=tod, signal_map=self.lmap, comps="T",) cuts = so3g.proj.RangesMatrix.from_mask(tod.astype(int)) #here the mask is expected to be int return cuts - -def forward(tod, map, pmat): - pmat.from_map(dest=tod, signal_map=map, comps="T",) From 6f160089c7e6367e9f324990043dca1148ff6f59 Mon Sep 17 00:00:00 2001 From: chervias Date: Mon, 2 Feb 2026 10:12:05 -0800 Subject: [PATCH 49/69] sidelobe function changed names in the ml mapmaker script --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index e0744591e..49a56a9e5 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -370,7 +370,7 @@ class DataMissing(Exception): pass L.debug(f"Datacount: {sub_id} added {obs.dets.count} {np.logical_not(mmask).sum()} ") # sidelobes cuts - cutss = sidelobes.sidelobe_cut(obs, args, sidelobe_cutters) + cutss = sidelobes.get_cuts(obs, args, sidelobe_cutters) for cut in cutss: obs.flags.glitch_flags += cut From f7b8b3064043308320cc3cc01764877197644b94 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 12 Feb 2026 06:15:36 -0800 Subject: [PATCH 50/69] Adding option to skip sidelobe masks --- sotodlib/site_pipeline/make_ml_map.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 49a56a9e5..65de9854d 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -36,6 +36,7 @@ def get_parser(parser=None): parser.add_argument( "--srcsamp", type=str, default=None, help="path to mask file where True regions indicate where bright object mitigation should be applied. Mask is in equatorial coordinates. Not tiled, so should be low-res to not waste memory.") parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") parser.add_argument( "--maxcut", type=float, default=.3, help="Maximum fraction of cut samples in a detector.") + parser.add_argument( "--no-sidelobe", action="store_true", help="Do not mask Moon/Sun sidelobes") parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") parser.add_argument( "--moon-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/moon.fits", help="Location of Moon sidelobe mask") parser.add_argument("--hits", action="store_true", help="Write hits maps") @@ -370,9 +371,10 @@ class DataMissing(Exception): pass L.debug(f"Datacount: {sub_id} added {obs.dets.count} {np.logical_not(mmask).sum()} ") # sidelobes cuts - cutss = sidelobes.get_cuts(obs, args, sidelobe_cutters) - for cut in cutss: - obs.flags.glitch_flags += cut + if not args.no_sidelobe: + cutss = sidelobes.get_cuts(obs, args, sidelobe_cutters) + for cut in cutss: + obs.flags.glitch_flags += cut # Maybe load precomputed noise model. # FIXME: How to handle multipass here? From f8cfaffe17dac56f3869775aefb6fbff33ffafca Mon Sep 17 00:00:00 2001 From: Bai-Chiang Date: Sat, 7 Mar 2026 17:48:35 -0600 Subject: [PATCH 51/69] mlmapmaker: Fix missing tiles in multi pass Co-authored-by: amaurea --- sotodlib/mapmaking/ml_mapmaker.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/sotodlib/mapmaking/ml_mapmaker.py b/sotodlib/mapmaking/ml_mapmaker.py index 6e559231b..5c4cc557d 100644 --- a/sotodlib/mapmaking/ml_mapmaker.py +++ b/sotodlib/mapmaking/ml_mapmaker.py @@ -486,10 +486,16 @@ def _checkcompat(self, other): def translate(self, other, map): """Translate map from another SignalMap representation to the current, returning a new map. The new map may be a reference to the original.""" - # Currently we don't support any actual translation, but could handle - # resolution changes in the future (probably not useful though) + # Check that the geometry doesn't change, since we don't suppor that self._checkcompat(other) - return map + # It's possible for the tile ownership to change between mapmaking + # passes, since the active tiles are determined based on which tiles were + # actually hit. + if self.tiled: + omap = tilemap.redistribute(map, self.comm, active=self.rhs.active) + return omap + else: + return map class SignalCut(Signal): def __init__( From ca0f8a880a941db0b09b103ef117b232824274b8 Mon Sep 17 00:00:00 2001 From: Bai-Chiang Date: Mon, 9 Mar 2026 19:44:17 -0500 Subject: [PATCH 52/69] mlmapmaker TOAST: Fix wrong timing log The timing of wrapping observation was including the entire time span of previous pass mapmaker CG time. --- sotodlib/toast/ops/mlmapmaker.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/sotodlib/toast/ops/mlmapmaker.py b/sotodlib/toast/ops/mlmapmaker.py index da90dd198..037cdcaec 100644 --- a/sotodlib/toast/ops/mlmapmaker.py +++ b/sotodlib/toast/ops/mlmapmaker.py @@ -392,8 +392,11 @@ def _save_noise_model(self, mapmaker, nmat, nmat_file, gcomm): return @function_timer - def _wrap_obs(self, ob, dets, passinfo): + def _wrap_obs(self, ob, dets, passinfo, comm): """ Prepare data for the mapmaker """ + log = Logger.get() + timer = Timer() + timer.start() # Get the focalplane for this observation fp = ob.telescope.focalplane @@ -486,6 +489,11 @@ def _wrap_obs(self, ob, dets, passinfo): # >>> tod.boresight # AxisManager(az[samps], el[samps], roll[samps], samps:OffsetAxis(372680)) + log.info_rank( + f"MLMapmaker wrapped observations in", + comm=comm, + timer=timer, + ) return axobs @function_timer @@ -753,7 +761,7 @@ def _exec(self, data, detectors=None, **kwargs): nmat, nmat_file = self._load_noise_model(ob, npass, ipass, gcomm) # wrap_obs finishes in line 250 of make_ml_map.py, at the downsampling - axobs = self._wrap_obs(ob, dets, passinfo) + axobs = self._wrap_obs(ob, dets, passinfo, comm) if ipass > 0: # Evaluate the final model of the previous pass' mapmaker @@ -779,11 +787,6 @@ def _exec(self, data, detectors=None, **kwargs): if comm is not None: comm.barrier() - log.info_rank( - f"MLMapmaker wrapped observations in", - comm=comm, - timer=timer, - ) # _init_mapmaker covers lines 293-303 of make_ml_map.py x0 = self._init_mapmaker( From d774668ef6a8e7ee1e53a182ae3fc27f45854afc Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 10 Mar 2026 07:41:41 -0700 Subject: [PATCH 53/69] moving the sidelobe cuts before downsampling --- sotodlib/site_pipeline/make_ml_map.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 65de9854d..91cce42ab 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -365,17 +365,17 @@ class DataMissing(Exception): pass mapmaking.inject_map(obs, map_to_inject, recenter=recenter, interpol=args.interpol) utils.deslope(obs.signal, w=5, inplace=True) - if passinfo.downsample != 1: - obs = mapmaking.downsample_obs(obs, passinfo.downsample) - mmask = obs.flags.glitch_flags.mask() - L.debug(f"Datacount: {sub_id} added {obs.dets.count} {np.logical_not(mmask).sum()} ") - # sidelobes cuts if not args.no_sidelobe: cutss = sidelobes.get_cuts(obs, args, sidelobe_cutters) for cut in cutss: obs.flags.glitch_flags += cut + if passinfo.downsample != 1: + obs = mapmaking.downsample_obs(obs, passinfo.downsample) + mmask = obs.flags.glitch_flags.mask() + L.debug(f"Datacount: {sub_id} added {obs.dets.count} {np.logical_not(mmask).sum()} ") + # Maybe load precomputed noise model. # FIXME: How to handle multipass here? nmat_file = nmat_dir + "/nmat_%s.hdf" % name @@ -404,7 +404,7 @@ class DataMissing(Exception): pass print("Writing noise model %s" % nmat_file) utils.mkdir(nmat_dir) mapmaking.write_nmat(nmat_file, mapmaker.data[-1].nmat) - except (DataMissing,IndexError,ValueError,LoaderError) as e: + except (DataMissing,IndexError,ValueError,LoaderError,TypeError) as e: L.debug("Skipped %s (%s)" % (sub_id, str(e))) L.debug("Datacount: %s full" % (sub_id)) continue From da6c2586a2974b35bb88845b9c5d7d7ef905e3ab Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 10 Mar 2026 19:37:45 -0700 Subject: [PATCH 54/69] Fixing a bug removing the mpi pruning for now --- sotodlib/site_pipeline/make_ml_map.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 91cce42ab..f99b9615e 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -89,6 +89,7 @@ class DataMissing(Exception): pass SITE = args.site.lower() verbose = args.verbose - args.quiet comm = mpi.COMM_WORLD + comm_size = comm.size shape, wcs = enmap.read_map_geometry(args.area) if args.downgrade > 1: @@ -215,7 +216,7 @@ class DataMissing(Exception): pass # To be able to distribute the tods sensibly, we need a rough estimate of where # on the sky each tod is. We should be able to get this using the central # ctime, az and el for each tod. - for ind in range(comm.rank, len(sub_ids), comm.size): + for ind in range(comm.rank, len(sub_ids), comm_size): # Detsets correspond to separate files, so treat them as separate TODs. sub_id = sub_ids[ind] obs_id, wafer, band = sub_id.split(":") @@ -415,13 +416,13 @@ class DataMissing(Exception): pass L.info("All tods failed. Giving up") sys.exit(1) - if np.any(nkept_all == 0): - if nkept == 0: - L.info("No tods assigned to this process. Pruning") - comm = mapmaking.prune_mpi(comm, np.where(nkept_all > 0)[0]) - for signal in mapmaker.signals: - if hasattr(signal, "comm"): - signal.comm = comm + #if np.any(nkept_all == 0): + # if nkept == 0: + # L.info("No tods assigned to this process. Pruning") + # comm = mapmaking.prune_mpi(comm, np.where(nkept_all > 0)[0]) + # for signal in mapmaker.signals: + # if hasattr(signal, "comm"): + # signal.comm = comm L.info("Done building") From cf2d411bf9f5e1e77dedb2c130527a880e2d6b36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigurd=20Kirkevold=20N=C3=A6ss?= Date: Mon, 16 Mar 2026 03:40:02 -0700 Subject: [PATCH 55/69] Fix TOD distribution Finally fix the missing TOD distribution. This ensures that each mpi task owns tods that are reasonably local on the sky. Without this, there's no point in doing tiled mapmaking, since every task would end up with every TOD would end up with every tile in its workspace. --- sotodlib/mapmaking/utils.py | 40 +++++++++++++++++++++++++++ sotodlib/site_pipeline/make_ml_map.py | 8 +++++- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index 399c93e93..45fe868cb 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -323,6 +323,18 @@ def get_subids_file(fname, context=None): sub_ids = expand_ids(sub_ids, context=context) return sub_ids +def get_obsinfo_subids(subids, context): + """Given a list of subids, return a ResultSet with one entry + from the obsdb for each subid. Ideally one would have this + from the original query that defined the subids, but since + subids are often read in directly from a file, we can't rely + on that. This function is a bit inefficient, since it starts + from the full obsdb and then looks up the subids from it""" + obsinfo = context.obsdb.query("1") + obsids = split_subids(subids)[0] + inds = putils.find(obsinfo["obs_id"], obsids) + return obsinfo.subset(rows=inds) + def expand_ids(obs_ids, context=None, bands=None): """Given a list of ids that are either obs_ids or sub_ids, expand any obs_ids into sub_ids and return the resulting list. @@ -764,6 +776,34 @@ def setup_passes(downsample="1", maxiter="500", interpol="nearest", npass=None): passes.append(entry) return passes +def distribute_tods_ra(obsinfo, nsplit, site="so", weather="typical"): + """For each row in obsinfo, assign it to one of nsplit categories + such that each category is as compact in RA as possible, and each + category has as similar size as possible. Assumes that all obs + are at the same site.""" + if len(obsinfo) <= nsplit: + return np.arange(nsplit)[:len(obsinfo)] + # Get a reprsentative RA per entry + az = obsinfo["az_center"] * putils.degree + el = obsinfo["el_center"] * putils.degree + ctime = (obsinfo["start_time"] + obsinfo["stop_time"])/2 + ra = so3g.proj.CelestialSightLine.az_el(ctime, az, el, site=site, weather=weather).coords()[:,0] + # Put everything on the same copy of the sky + ra = putils.rewind_compact(ra) + # Partition sorted ras equally by weight. Would ideally include + # ndet too, but the effective ndet depends on the cuts etc. + order = np.argsort(ra) + weight= obsinfo["n_samples"][order] + cum = putils.cumsum(weight) + wtot = cum[-1]+weight[-1] # = np.sum(weight) + owner = np.zeros(len(obsinfo),int) + owner[order] = putils.floor(cum*nsplit/wtot) + # Make sure none ended up empty. If they did, fall back to unweighted, + # which will always succeed + if np.any(np.bincount(owner, minlength=nsplit) == 0): + owner[order] = np.arange(len(obsinfo))*nsplit/len(obsinfo) + return owner + Base = declarative_base() class AtomicInfo(Base): diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index f99b9615e..ec9bd9fd7 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -137,6 +137,11 @@ class DataMissing(Exception): pass sys.exit(1) L.info("Found %d tods" % (len(sub_ids))) + # Define our task distribution + obsinfo = mapmaking.get_obsinfo_subids(sub_ids, context) + owner = mapmaking.distribute_tods_ra(obsinfo, comm.size) # assumes site=so + myinds = np.where(owner==comm.rank)[0] + if args.inject: map_to_inject = enmap.read_map(args.inject).astype(dtype_map) @@ -211,12 +216,13 @@ class DataMissing(Exception): pass nkept = 0 to_skip_all = comm.allreduce(to_skip) + # TODO: Fix the task distribution. The current one doesn't care which mpi # task gets which tods, which sabotages the pixel-saving effects of tiled maps! # To be able to distribute the tods sensibly, we need a rough estimate of where # on the sky each tod is. We should be able to get this using the central # ctime, az and el for each tod. - for ind in range(comm.rank, len(sub_ids), comm_size): + for ind in myinds: # Detsets correspond to separate files, so treat them as separate TODs. sub_id = sub_ids[ind] obs_id, wafer, band = sub_id.split(":") From 0807626a8956b5f95bc88af12f0ee09fac0f81b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigurd=20Kirkevold=20N=C3=A6ss?= Date: Mon, 16 Mar 2026 08:20:15 -0700 Subject: [PATCH 56/69] Configurable cut type in ML mapmaker Adds --cut-type argument to make_ml_map.py. Defaults to "full", which is the old behavior. The other valid argument is "poly", which uses ACT-style polynomial cuts. These use O(10x) less memory, but the SignalCut preconditioner may need to be tuned to ensure they converge properly. This commit aims to make it easy to test. Once testing and any tuning is done, then "poly" can be made the default. --- sotodlib/site_pipeline/make_ml_map.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index f99b9615e..715fc445c 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -40,6 +40,7 @@ def get_parser(parser=None): parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") parser.add_argument( "--moon-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/moon.fits", help="Location of Moon sidelobe mask") parser.add_argument("--hits", action="store_true", help="Write hits maps") + parser.add_argument("--cut-type", type=str, default="full") return parser sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} @@ -196,7 +197,7 @@ class DataMissing(Exception): pass elif args.nmat == "corr_dct": noise_model = mapmaking.NmatDetvecsDCT(verbose=verbose>1) else: raise ValueError("Unrecognized noise model '%s'" % args.nmat) - signal_cut = mapmaking.SignalCut(comm, dtype=dtype_tod) + signal_cut = mapmaking.SignalCut(comm, dtype=dtype_tod, cut_type=args.cut_type) signal_map = mapmaking.SignalMap(shape, wcs, comm, comps=comps, dtype=dtype_map, recenter=recenter, tiled=args.tiled>0, interpol=args.interpol) signals = [signal_cut, signal_map] if args.srcsamp: From 8652e2abec77ed7fea35e680c71712f0c6807a2b Mon Sep 17 00:00:00 2001 From: amaurea Date: Wed, 25 Mar 2026 06:36:12 +0100 Subject: [PATCH 57/69] Fix TOD distribution (#1579) Finally fix the missing TOD distribution. This ensures that each mpi task owns tods that are reasonably local on the sky. Without this, there's no point in doing tiled mapmaking, since every task would end up with every TOD would end up with every tile in its workspace. --- sotodlib/mapmaking/utils.py | 40 +++++++++++++++++++++++++++ sotodlib/site_pipeline/make_ml_map.py | 8 +++++- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index 399c93e93..45fe868cb 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -323,6 +323,18 @@ def get_subids_file(fname, context=None): sub_ids = expand_ids(sub_ids, context=context) return sub_ids +def get_obsinfo_subids(subids, context): + """Given a list of subids, return a ResultSet with one entry + from the obsdb for each subid. Ideally one would have this + from the original query that defined the subids, but since + subids are often read in directly from a file, we can't rely + on that. This function is a bit inefficient, since it starts + from the full obsdb and then looks up the subids from it""" + obsinfo = context.obsdb.query("1") + obsids = split_subids(subids)[0] + inds = putils.find(obsinfo["obs_id"], obsids) + return obsinfo.subset(rows=inds) + def expand_ids(obs_ids, context=None, bands=None): """Given a list of ids that are either obs_ids or sub_ids, expand any obs_ids into sub_ids and return the resulting list. @@ -764,6 +776,34 @@ def setup_passes(downsample="1", maxiter="500", interpol="nearest", npass=None): passes.append(entry) return passes +def distribute_tods_ra(obsinfo, nsplit, site="so", weather="typical"): + """For each row in obsinfo, assign it to one of nsplit categories + such that each category is as compact in RA as possible, and each + category has as similar size as possible. Assumes that all obs + are at the same site.""" + if len(obsinfo) <= nsplit: + return np.arange(nsplit)[:len(obsinfo)] + # Get a reprsentative RA per entry + az = obsinfo["az_center"] * putils.degree + el = obsinfo["el_center"] * putils.degree + ctime = (obsinfo["start_time"] + obsinfo["stop_time"])/2 + ra = so3g.proj.CelestialSightLine.az_el(ctime, az, el, site=site, weather=weather).coords()[:,0] + # Put everything on the same copy of the sky + ra = putils.rewind_compact(ra) + # Partition sorted ras equally by weight. Would ideally include + # ndet too, but the effective ndet depends on the cuts etc. + order = np.argsort(ra) + weight= obsinfo["n_samples"][order] + cum = putils.cumsum(weight) + wtot = cum[-1]+weight[-1] # = np.sum(weight) + owner = np.zeros(len(obsinfo),int) + owner[order] = putils.floor(cum*nsplit/wtot) + # Make sure none ended up empty. If they did, fall back to unweighted, + # which will always succeed + if np.any(np.bincount(owner, minlength=nsplit) == 0): + owner[order] = np.arange(len(obsinfo))*nsplit/len(obsinfo) + return owner + Base = declarative_base() class AtomicInfo(Base): diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index f99b9615e..ec9bd9fd7 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -137,6 +137,11 @@ class DataMissing(Exception): pass sys.exit(1) L.info("Found %d tods" % (len(sub_ids))) + # Define our task distribution + obsinfo = mapmaking.get_obsinfo_subids(sub_ids, context) + owner = mapmaking.distribute_tods_ra(obsinfo, comm.size) # assumes site=so + myinds = np.where(owner==comm.rank)[0] + if args.inject: map_to_inject = enmap.read_map(args.inject).astype(dtype_map) @@ -211,12 +216,13 @@ class DataMissing(Exception): pass nkept = 0 to_skip_all = comm.allreduce(to_skip) + # TODO: Fix the task distribution. The current one doesn't care which mpi # task gets which tods, which sabotages the pixel-saving effects of tiled maps! # To be able to distribute the tods sensibly, we need a rough estimate of where # on the sky each tod is. We should be able to get this using the central # ctime, az and el for each tod. - for ind in range(comm.rank, len(sub_ids), comm_size): + for ind in myinds: # Detsets correspond to separate files, so treat them as separate TODs. sub_id = sub_ids[ind] obs_id, wafer, band = sub_id.split(":") From 744f6e0937fe53d2b0fe1e69096312d4e2481b1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigurd=20Kirkevold=20N=C3=A6ss?= Date: Tue, 7 Apr 2026 06:23:22 -0700 Subject: [PATCH 58/69] Fix bug where mapmaker preconditioner would break if any observations are completely outside the geometry --- sotodlib/coords/pmat.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sotodlib/coords/pmat.py b/sotodlib/coords/pmat.py index b6ae2cb4f..f302b7265 100644 --- a/sotodlib/coords/pmat.py +++ b/sotodlib/coords/pmat.py @@ -235,7 +235,8 @@ def zeros(self, super_shape=None, comps=None): return proj.zeros(super_shape) elif self.pix_scheme == 'rectpix': if self.tiled: - return tilemap.from_tiles(proj.zeros(super_shape), self.geom) + # second super_shape here only needed when all tiles are empty + return tilemap.from_tiles(proj.zeros(super_shape), self.geom.copy(pre=super_shape)) else: return enmap.ndmap(proj.zeros(super_shape), wcs=self.geom.wcs) From 49808287dae4c391e92f8e2de4bb35f7d7cdc81c Mon Sep 17 00:00:00 2001 From: chervias Date: Tue, 7 Apr 2026 12:34:37 -0700 Subject: [PATCH 59/69] Some refactoring --- sotodlib/site_pipeline/make_ml_map.py | 97 +++++---------------------- 1 file changed, 15 insertions(+), 82 deletions(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index aed7992ba..37be5b382 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -43,47 +43,20 @@ def get_parser(parser=None): parser.add_argument("--cut-type", type=str, default="full") return parser -sens_limits = {"f030":120, "f040":80, "f090":100, "f150":140, "f220":300, "f280":750} - -def measure_rms(tod, dt=1, bsize=32, nblock=10): - tod = tod[:,:tod.shape[1]//bsize*bsize] - tod = tod.reshape(tod.shape[0],-1,bsize) - bstep = max(1,tod.shape[1]//nblock) - tod = tod[:,::bstep,:][:,:nblock,:] - rms = np.median(np.std(tod,-1),-1) - # to µK√s units - rms *= dt**0.5 - return rms - -def sensitivity_cut(rms_uKrts, sens_lim, med_tol=0.2, max_lim=100): - # First reject detectors with unreasonably low noise - good = rms_uKrts >= sens_lim - # Also reject far too noisy detectors - good &= rms_uKrts < sens_lim*max_lim - # Then reject outliers - if np.sum(good) == 0: return good - ref = np.median(rms_uKrts[good]) - good &= rms_uKrts > ref*med_tol - good &= rms_uKrts < ref/med_tol - return good - def main(**args): - import numpy as np, sys, time, warnings, os, so3g - from sotodlib.core import Context, AxisManager, IndexAxis - from sotodlib import tod_ops, mapmaking, core - from sotodlib.tod_ops import filters - from sotodlib.coords import pointing_model, sidelobes + import sys, time, warnings, os, so3g, yaml + + from sotodlib import mapmaking, core + from sotodlib.coords import sidelobes from sotodlib.mapmaking import log from sotodlib.preprocess import preprocess_util as pp_util - from sotodlib.core import metadata - from pixell import enmap, utils, fft, bunch, wcsutils, mpi, bench - import yaml + from sotodlib.site_pipeline.utils import depth1_utils as d1u + from sotodlib.site_pipeline.utils.config import _get_config - LoaderError = metadata.loader.LoaderError + from pixell import enmap, utils, fft, bunch, wcsutils, mpi, bench - #try: import moby2.analysis.socompat - #except ImportError: warnings.warn("Can't import moby2.analysis.socompat. ACT data input probably won't work") - class DataMissing(Exception): pass + LoaderError = d1u.LoaderError + DataMissing = d1u.DataMissing warnings.simplefilter('ignore') args = bunch.Bunch(**args) @@ -118,7 +91,7 @@ class DataMissing(Exception): pass recenter = mapmaking.parse_recentering(args.center_at) with bench.mark('context'): - context = Context(args.context) + context = core.Context(args.context) ots = args.ots.split(",") if args.ots else None wafers = args.wafers.split(",") if args.wafers else None @@ -151,7 +124,7 @@ class DataMissing(Exception): pass # set up the preprocessing try: - preproc = yaml.safe_load(open(args.preprocess_config, 'r')) + preproc = _get_config(args.preprocess_config) except: if comm.rank==0: L.info(f"{args.preprocess_config} is not a valid config") @@ -297,11 +270,11 @@ class DataMissing(Exception): pass # Optionally skip all the calibration. Useful for sims. if not args.nocal: # measure rms - rms = measure_rms(obs.signal, dt=1/srate) + rms = d1u.measure_rms(obs.signal, dt=1/srate) if args.unit=='K': - good = sensitivity_cut(rms*1e6, sens_limits[band]) + good = d1u.sensitivity_cut(rms*1e6, d1u.SENS_LIMITS[band]) elif args.unit == 'uK': - good = sensitivity_cut(rms, sens_limits[band]) + good = d1u.sensitivity_cut(rms, d1u.SENS_LIMITS[band]) #nrms = np.sum(good) # Cut detectors with too big a fraction of samples cut, # or cuts occuring too often. @@ -317,18 +290,12 @@ class DataMissing(Exception): pass #good = dev.get(good) # cuts, dets, fplane etc. need this on the cpu #cuts = cuts [good] if np.logical_not(good).sum() / obs.dets.count > 0.5: - L.debug("Skipped %s (more than 50pc of detectors cut by sens)" % (sub_id)) + L.debug("Skipped %s (more than 50 percent of detectors cut by sens)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) to_skip += [sub_id] continue else: obs.restrict("dets", good) - # Apply pointing model - #pointing_model.apply_pointing_model(obs) - # Calibrate to pW - #obs.signal = np.multiply(obs.signal.T, obs.det_cal.phase_to_pW).T - # Calibrate to K_cmb - #obs.signal = np.multiply(obs.signal.T, obs.abscal.abscal_cmb).T # Disqualify overly cut detectors good_dets = mapmaking.find_usable_detectors(obs, args.maxcut) obs.restrict("dets", good_dets) @@ -337,32 +304,6 @@ class DataMissing(Exception): pass L.debug("Skipped %s (all dets cut)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) continue - # Gapfill glitches. This function name isn't the clearest - #tod_ops.get_gap_fill(obs, flags=obs.flags.glitch_flags, swap=True) - # Gain calibration - #gain = 1 - #for gtype in ["relcal","abscal"]: - # gain *= obs[gtype][:,None] - #obs.signal *= gain - # Fourier-space calibration - #fsig = fft.rfft(obs.signal) - #freq = fft.rfftfreq(obs.samps.count, 1/srate) - # iir and timeconstant will be applied in the preprocessing eventually - # iir filter - #iir_filter = filters.iir_filter()(freq, obs) - #fsig /= iir_filter - #gain /= iir_filter[0].real # keep track of total gain for our record - #fsig /= filters.timeconst_filter(timeconst = obs.det_cal.tau_eff)(freq, obs) - #fft.irfft(fsig, obs.signal, normalize=True) - #del fsig - - # Apply pointing correction. - #obs.focal_plane.xi += obs.boresight_offset.xi - #obs.focal_plane.eta += obs.boresight_offset.eta - #obs.focal_plane.gamma += obs.boresight_offset.gamma - #obs.focal_plane.xi += obs.boresight_offset.dx - #obs.focal_plane.eta += obs.boresight_offset.dy - #obs.focal_plane.gamma += obs.boresight_offset.gamma # Injecting at this point makes us insensitive to any bias introduced # in the earlier steps (mainly from gapfilling). The alternative is @@ -422,14 +363,6 @@ class DataMissing(Exception): pass if comm.rank == 0: L.info("All tods failed. Giving up") sys.exit(1) - - #if np.any(nkept_all == 0): - # if nkept == 0: - # L.info("No tods assigned to this process. Pruning") - # comm = mapmaking.prune_mpi(comm, np.where(nkept_all > 0)[0]) - # for signal in mapmaker.signals: - # if hasattr(signal, "comm"): - # signal.comm = comm L.info("Done building") From 5ec48b8d13a1865bd7f36ce66c1fb185a86b50b5 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 9 Apr 2026 12:15:49 -0700 Subject: [PATCH 60/69] addressing some of Giannis comments --- sotodlib/mapmaking/utils.py | 2 +- sotodlib/site_pipeline/make_ml_map.py | 17 ++++++----------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index 329fc9b48..a2667018a 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -505,7 +505,7 @@ def get_pos(name, ctime, sys=None): try: planet = coords.planets.SlowSource.for_named_source( name, ctime) - ra0, dec0 = planet.pos(tod.timestamps.mean()) + ra0, dec0 = planet.pos(np.mean(ctime)) except: obj = getattr(ephem, name)() djd = ctime/86400 + 40587.0 + 2400000.5 - 2415020 diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 37be5b382..317f6378b 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -44,16 +44,15 @@ def get_parser(parser=None): return parser def main(**args): - import sys, time, warnings, os, so3g, yaml + import sys, time, warnings, os, so3g from sotodlib import mapmaking, core from sotodlib.coords import sidelobes - from sotodlib.mapmaking import log from sotodlib.preprocess import preprocess_util as pp_util from sotodlib.site_pipeline.utils import depth1_utils as d1u from sotodlib.site_pipeline.utils.config import _get_config - from pixell import enmap, utils, fft, bunch, wcsutils, mpi, bench + from pixell import enmap, utils, bunch, wcsutils, mpi, bench LoaderError = d1u.LoaderError DataMissing = d1u.DataMissing @@ -113,7 +112,7 @@ def main(**args): # Define our task distribution obsinfo = mapmaking.get_obsinfo_subids(sub_ids, context) - owner = mapmaking.distribute_tods_ra(obsinfo, comm.size) # assumes site=so + owner = mapmaking.distribute_tods_ra(obsinfo, comm.size, site=SITE) myinds = np.where(owner==comm.rank)[0] if args.inject: @@ -191,11 +190,6 @@ def main(**args): nkept = 0 to_skip_all = comm.allreduce(to_skip) - # TODO: Fix the task distribution. The current one doesn't care which mpi - # task gets which tods, which sabotages the pixel-saving effects of tiled maps! - # To be able to distribute the tods sensibly, we need a rough estimate of where - # on the sky each tod is. We should be able to get this using the central - # ctime, az and el for each tod. for ind in myinds: # Detsets correspond to separate files, so treat them as separate TODs. sub_id = sub_ids[ind] @@ -358,8 +352,9 @@ def main(**args): L.debug("Datacount: %s full" % (sub_id)) continue - nkept_all = np.array(comm.allgather(nkept)) - if np.sum(nkept_all) == 0: + nkept_all = np.array([0],dtype='i') + comm.Allreduce(np.array([nkept],dtype='i'), nkept_all, op=mpi.SUM) + if nkept_all[0] == 0: if comm.rank == 0: L.info("All tods failed. Giving up") sys.exit(1) From d00592a43c374eb4eab16c303e44506d496d2efa Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 16 Apr 2026 09:13:27 -0700 Subject: [PATCH 61/69] Addressing more comments by Giannis --- sotodlib/mapmaking/utils.py | 24 ------------------- sotodlib/site_pipeline/make_ml_map.py | 34 +++++---------------------- 2 files changed, 6 insertions(+), 52 deletions(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index a2667018a..ff27a026c 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -886,27 +886,3 @@ def atomic_db_aux(atomic_db, info: list[AtomicInfo]): session.commit() except exc.IntegrityError: session.rollback() - - -def prune_mpi(comm, ranks_to_keep): - """ - Prune unneeded MPI procs. - - Arguments: - - comm: The MPI communicator currently in use. - - ranks_to_keep: List of current ranks to keep in the new communicator. - - Returns: - - comm: Modified communicator with only the processes we want to keep. - """ - group = comm.Get_group() - new_group = group.Incl(ranks_to_keep) - new_comm = comm.Create(new_group) - if comm.rank not in ranks_to_keep: - sys.exit(0) - comm = new_comm - - return comm diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 317f6378b..9ae994b89 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -62,7 +62,6 @@ def main(**args): SITE = args.site.lower() verbose = args.verbose - args.quiet comm = mpi.COMM_WORLD - comm_size = comm.size shape, wcs = enmap.read_map_geometry(args.area) if args.downgrade > 1: @@ -72,8 +71,6 @@ def main(**args): # we risk adding information in MPI due to reconstruction, and that # can cause is_compatible failures. wcs = wcsutils.WCS(wcs.to_header()) - # Set shape to None to allow map to fit these TODs exactly. - #shape = None comps = args.comps ncomp = len(comps) @@ -220,19 +217,17 @@ def main(**args): to_skip += [sub_id] continue # Check nans - mask = np.logical_not(np.isfinite(obs.signal)) - if mask.sum() > 0: - L.debug("Skipped %s (a nan in signal)" % (sub_id)) + if not np.all(np.isfinite(obs.signal)): + L.debug("Skipped %s (there is a nan in signal)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) to_skip += [sub_id] continue # Check all 0s - zero_dets = np.sum(obs.signal, axis=1) - mask = zero_dets == 0.0 - if mask.any(): + zero_dets = np.sum(obs.signal, axis=1) # sum the signal across the samples + if np.any(zero_dets == 0.0): L.debug("%s has all 0s in at least 1 detector" % (sub_id)) - obs.restrict('dets', obs.dets.vals[np.logical_not(mask)]) - # Cut non-optical dets + obs.restrict('dets', obs.dets.vals[np.logical_not(zero_dets == 0.0)]) + # Cut non-optical dets, this will be redundant if the preprocessing already cut them obs.restrict('dets', obs.dets.vals[obs.det_info.wafer.type == 'OPTC']) # Fix boresight mapmaking.fix_boresight_glitches(obs) @@ -243,9 +238,6 @@ def main(**args): obs.wrap("weather", np.full(1, "typical")) obs.wrap("site", np.full(1, SITE)) - # Prepare our data. FFT-truncate for faster fft ops - #obs.restrict("samps", [0, fft.fft_len(obs.samps.count)]) - # Desolope to make it periodic. This should be done *before* # dropping to single precision, to avoid unnecessary loss of precision due # to potential high offses in the raw tod. @@ -269,20 +261,6 @@ def main(**args): good = d1u.sensitivity_cut(rms*1e6, d1u.SENS_LIMITS[band]) elif args.unit == 'uK': good = d1u.sensitivity_cut(rms, d1u.SENS_LIMITS[band]) - #nrms = np.sum(good) - # Cut detectors with too big a fraction of samples cut, - # or cuts occuring too often. - #cuts = obs.flags.glitch_flags.mask() - #cutfrac = cuts.sum()/np.prod(cuts.shape) - #cutdens = (cuts.bins[:,1]-cuts.bins[:,0])/cuts.nsamp - #good &= np.array((cutfrac < 0.1)) - #ndens = dev.np.sum(good) - # Cut all detectors if too large a fraction is cut - #good &= dev.np.sum(good)/meta.ndet_full > 0.25 - #nfinal = dev.np.sum(good) - #signal = dev.np.ascontiguousarray(signal[good]) # 600 ms! - #good = dev.get(good) # cuts, dets, fplane etc. need this on the cpu - #cuts = cuts [good] if np.logical_not(good).sum() / obs.dets.count > 0.5: L.debug("Skipped %s (more than 50 percent of detectors cut by sens)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) From 0f7ea3456c03f8723d56a0b3b70114e80c0e6374 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 16 Apr 2026 15:00:05 -0400 Subject: [PATCH 62/69] fix: small fixes from giannis's review --- sotodlib/mapmaking/utils.py | 2 +- sotodlib/site_pipeline/make_ml_map.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index ff27a026c..a81359f04 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -595,7 +595,7 @@ def rangemat_sum(rangemat): def find_usable_detectors(obs, maxcut=0.1, glitch_flags: str = "flags.glitch_flags", to_null : str = "flags.expected_flags"): flag = obs[glitch_flags] - if to_null != "" and to_null in obs._fields: + if to_null and to_null in obs: flag = flag*~obs[to_null] ncut = rangemat_sum(flag) good = ncut < obs.samps.count * maxcut diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index a2076b498..89055dd7b 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -185,7 +185,6 @@ def main(**args): sidelobe_cutters = {} nkept = 0 - to_skip_all = comm.allreduce(to_skip) for ind in myinds: # Detsets correspond to separate files, so treat them as separate TODs. @@ -193,7 +192,7 @@ def main(**args): obs_id, wafer, band = sub_id.split(":") name = sub_id.replace(":", "_") L.debug("Processing %s" % sub_id) - if sub_id in to_skip_all: + if sub_id in to_skip: L.debug("Skipped %s (Cut in previous pass)" % (sub_id)) continue From 9097e2dcdf9c9590320b36a2ce743876189669b8 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 16 Apr 2026 13:18:52 -0700 Subject: [PATCH 63/69] more of Giannis comments --- sotodlib/mapmaking/utils.py | 6 +++--- sotodlib/site_pipeline/make_ml_map.py | 15 +++++++++++---- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index a81359f04..9d0b5c35f 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -330,10 +330,10 @@ def get_obsinfo_subids(subids, context): subids are often read in directly from a file, we can't rely on that. This function is a bit inefficient, since it starts from the full obsdb and then looks up the subids from it""" - obsinfo = context.obsdb.query("1") obsids = split_subids(subids)[0] - inds = putils.find(obsinfo["obs_id"], obsids) - return obsinfo.subset(rows=inds) + ids_str = ",".join(f"'{subid}'" for subid in obsids) + rs = context.obsdb.query("obs_id IN (%s)" % ids_str) + return rs def expand_ids(obs_ids, context=None, bands=None): """Given a list of ids that are either obs_ids or sub_ids, expand any obs_ids diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 89055dd7b..59b217c3c 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -1,4 +1,5 @@ import numpy as np +import sys def get_parser(parser=None): # a config file to pass all parameters is pending @@ -43,8 +44,14 @@ def get_parser(parser=None): parser.add_argument("--cut-type", type=str, default="full") return parser +def exit(comm, code): + if comm is not None: # This check might need an expansion + comm.Abort(code) + else: + sys.exit(code) + def main(**args): - import sys, time, warnings, os, so3g + import time, warnings, os, so3g from sotodlib import mapmaking, core from sotodlib.coords import sidelobes @@ -104,7 +111,7 @@ def main(**args): if len(sub_ids) == 0: if comm.rank == 0: print("No tods found!") - sys.exit(1) + exit(comm, 1) L.info("Found %d tods" % (len(sub_ids))) # Define our task distribution @@ -124,7 +131,7 @@ def main(**args): except: if comm.rank==0: L.info(f"{args.preprocess_config} is not a valid config") - sys.exit(1) + exit(comm, 1) passes = mapmaking.setup_passes(downsample=args.downsample, maxiter=args.maxiter, interpol=args.interpol) to_skip = [] @@ -334,7 +341,7 @@ def main(**args): if nkept_all[0] == 0: if comm.rank == 0: L.info("All tods failed. Giving up") - sys.exit(1) + exit(1) L.info("Done building") From 855b937ea68a94969f1a481a80f6bba336f79a9e Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 16 Apr 2026 13:48:44 -0700 Subject: [PATCH 64/69] even more of Giannis comments --- sotodlib/mapmaking/utils.py | 43 ++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index 9d0b5c35f..9acd09c34 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -782,28 +782,27 @@ def distribute_tods_ra(obsinfo, nsplit, site="so", weather="typical"): such that each category is as compact in RA as possible, and each category has as similar size as possible. Assumes that all obs are at the same site.""" - if len(obsinfo) <= nsplit: - return np.arange(nsplit)[:len(obsinfo)] - # Get a reprsentative RA per entry - az = obsinfo["az_center"] * putils.degree - el = obsinfo["el_center"] * putils.degree - ctime = (obsinfo["start_time"] + obsinfo["stop_time"])/2 - ra = so3g.proj.CelestialSightLine.az_el(ctime, az, el, site=site, weather=weather).coords()[:,0] - # Put everything on the same copy of the sky - ra = putils.rewind_compact(ra) - # Partition sorted ras equally by weight. Would ideally include - # ndet too, but the effective ndet depends on the cuts etc. - order = np.argsort(ra) - weight= obsinfo["n_samples"][order] - cum = putils.cumsum(weight) - wtot = cum[-1]+weight[-1] # = np.sum(weight) - owner = np.zeros(len(obsinfo),int) - owner[order] = putils.floor(cum*nsplit/wtot) - # Make sure none ended up empty. If they did, fall back to unweighted, - # which will always succeed - if np.any(np.bincount(owner, minlength=nsplit) == 0): - owner[order] = np.arange(len(obsinfo))*nsplit/len(obsinfo) - return owner + owners = np.arange(nsplit,dtype=int)[:len(obsinfo)] + if len(obsinfo) > nsplit: + # Get a reprsentative RA per entry + az = obsinfo["az_center"] * putils.degree + el = obsinfo["el_center"] * putils.degree + ctime = (obsinfo["start_time"] + obsinfo["stop_time"])/2 + ra = so3g.proj.CelestialSightLine.az_el(ctime, az, el, site=site, weather=weather).coords()[:,0] + # Put everything on the same copy of the sky + ra = putils.rewind_compact(ra) + # Partition sorted ras equally by weight. Would ideally include + # ndet too, but the effective ndet depends on the cuts etc. + order = np.argsort(ra) + weight= obsinfo["n_samples"][order] + cum = putils.cumsum(weight) + wtot = cum[-1]+weight[-1] # = np.sum(weight) + owners[order] = putils.floor(cum*nsplit/wtot) + # Make sure none ended up empty. If they did, fall back to unweighted, + # which will always succeed + if np.any(np.bincount(owners, minlength=nsplit) == 0): + owners[order] = np.arange(len(obsinfo))*nsplit/len(obsinfo) + return owners Base = declarative_base() From 3fad9c3bec7156f5bee300f1c621404b2a1c93e2 Mon Sep 17 00:00:00 2001 From: Carlos Hervias Date: Tue, 21 Apr 2026 21:30:04 -0400 Subject: [PATCH 65/69] Changing default window to 2 s from 0 --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index 59b217c3c..e007fdae1 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -24,7 +24,7 @@ def get_parser(parser=None): parser.add_argument("-v", "--verbose", action="count", default=0) parser.add_argument("-q", "--quiet", action="count", default=0) parser.add_argument("-@", "--center-at", type=str, default=None) - parser.add_argument("-w", "--window", type=float, default=0.0) + parser.add_argument("-w", "--window", type=float, default=2.0) parser.add_argument("-i", "--inject", type=str, default=None, help="Path to map to inject. Equatorial coordinates") parser.add_argument( "--nocal", action="store_true", help="Disable calibration. Useful for sims") parser.add_argument( "--nmat-dir", type=str, default="{odir}/nmats") From 61b734085255aef352893e65c7b031c5b792ca09 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 22 Apr 2026 10:03:07 -0400 Subject: [PATCH 66/69] feat: adding some unittests (#1617) --- tests/test_mapmaking_utils.py | 103 ++++++++++++++++++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 tests/test_mapmaking_utils.py diff --git a/tests/test_mapmaking_utils.py b/tests/test_mapmaking_utils.py new file mode 100644 index 000000000..5074d7202 --- /dev/null +++ b/tests/test_mapmaking_utils.py @@ -0,0 +1,103 @@ +# Copyright (c) 2023-2025 Simons Observatory. +# Full license can be found in the top level "LICENSE" file. + +import numpy as np +import so3g + +from sotodlib import core +from sotodlib.mapmaking.utils import filter_subids, find_usable_detectors + +from ._helpers import quick_tod + +# --------------------------------------------------------------------------- +# filter_subids +# --------------------------------------------------------------------------- + +SUBIDS = np.array([ + "obs_1750770479_satp3_1111111:ws3:f090", + "obs_1750774090_satp3_1111111:ws0:f150", + "obs_1750777688_satp3_1111011:ws0:f090", + "obs_1750781294_satp3_1110111:ws0:f150", + "obs_1750785970_satp3_1000000:ws0:f090" +]) + + +def test_filter_subids_no_filter(): + result = filter_subids(SUBIDS) + np.testing.assert_array_equal(result, SUBIDS) + + +def test_filter_subids_by_wafer(): + result = filter_subids(SUBIDS, wafers=["ws0"]) + assert all(":ws0:" in s for s in result) + assert len(result) == 4 + + +def test_filter_subids_by_band(): + result = filter_subids(SUBIDS, bands=["f090"]) + assert all(s.endswith("f090") for s in result) + assert len(result) == 3 + + +def test_filter_subids_by_ot(): + result = filter_subids(SUBIDS, ots=["satp3"]) + assert all("satp3" in s for s in result) + assert len(result) == 5 + + +def test_filter_subids_no_match(): + result = filter_subids(SUBIDS, wafers=["ws99"]) + assert len(result) == 0 + + +# --------------------------------------------------------------------------- +# find_usable_detectors +# --------------------------------------------------------------------------- + +def _add_glitch_flags(tod, cut_ranges_per_det): + """Add flags.glitch_flags to a quick_tod AxisManager. + + cut_ranges_per_det: list of lists of [start, end] pairs, one per detector. + """ + n_dets = tod.dets.count + n_samps = tod.samps.count + ranges = [] + for i in range(n_dets): + r = cut_ranges_per_det[i] + if r: + ri = so3g.RangesInt32.from_array( + np.array(r, dtype=np.int32).reshape(-1, 2), n_samps + ) + else: + ri = so3g.RangesInt32(n_samps) + ranges.append(ri) + glitch_flags = so3g.proj.ranges.RangesMatrix(ranges) + flags_aman = core.AxisManager(tod.dets, tod.samps) + flags_aman.wrap("glitch_flags", glitch_flags, [(0, "dets"), (1, "samps")]) + tod.wrap("flags", flags_aman) + return tod + + +def test_find_usable_detectors_all_good(): + tod = quick_tod(3, 1000) + tod = _add_glitch_flags(tod, [[], [], []]) + result = find_usable_detectors(tod) + np.testing.assert_array_equal(result, np.array(tod.dets.vals)) + + +def test_find_usable_detectors_one_cut(): + # det0001 has 20 % of samples flagged → removed with default maxcut=0.1 + tod = quick_tod(3, 1000) + tod = _add_glitch_flags(tod, [[], [[0, 200]], []]) + result = find_usable_detectors(tod) + assert tod.dets.vals[1] not in result + assert tod.dets.vals[0] in result + assert tod.dets.vals[2] in result + + +def test_find_usable_detectors_maxcut_threshold(): + # det0: 5 % flagged — kept at maxcut=0.1, removed at maxcut=0.04 + tod = quick_tod(2, 1000) + tod = _add_glitch_flags(tod, [[[0, 50]], []]) + assert tod.dets.vals[0] in find_usable_detectors(tod, maxcut=0.1) + assert tod.dets.vals[0] not in find_usable_detectors(tod, maxcut=0.04) From 1705c271e83a6952be6fb2c1ea281de249e3a201 Mon Sep 17 00:00:00 2001 From: chervias Date: Wed, 22 Apr 2026 12:55:17 -0700 Subject: [PATCH 67/69] reverting back the changes on distribute_tods_ra, there was a bug. Thanks to Bai-Chiang for finding this --- sotodlib/mapmaking/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index 9acd09c34..76cdcd73d 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -782,8 +782,9 @@ def distribute_tods_ra(obsinfo, nsplit, site="so", weather="typical"): such that each category is as compact in RA as possible, and each category has as similar size as possible. Assumes that all obs are at the same site.""" - owners = np.arange(nsplit,dtype=int)[:len(obsinfo)] - if len(obsinfo) > nsplit: + if len(obsinfo) <= nsplit: + return np.arange(nsplit)[:len(obsinfo)] + else: # Get a reprsentative RA per entry az = obsinfo["az_center"] * putils.degree el = obsinfo["el_center"] * putils.degree @@ -797,12 +798,13 @@ def distribute_tods_ra(obsinfo, nsplit, site="so", weather="typical"): weight= obsinfo["n_samples"][order] cum = putils.cumsum(weight) wtot = cum[-1]+weight[-1] # = np.sum(weight) + owners = np.zeros(len(obsinfo),int) owners[order] = putils.floor(cum*nsplit/wtot) # Make sure none ended up empty. If they did, fall back to unweighted, # which will always succeed if np.any(np.bincount(owners, minlength=nsplit) == 0): owners[order] = np.arange(len(obsinfo))*nsplit/len(obsinfo) - return owners + return owners Base = declarative_base() From c8afb141cf59cca6bbe0741e0ab0fe99b79156d8 Mon Sep 17 00:00:00 2001 From: amaurea Date: Thu, 23 Apr 2026 17:40:44 +0200 Subject: [PATCH 68/69] Fix nmat (#1615) * Better control over map vs. tod units * Fix implementation of Jon's noise model. The step where previous eigenvectors were projected out from the covmat used the wrong formula. Also simplified the internal normalization, though this doesn't change anything. Added NmatDebug too. Sotodlib and sogma still don't match when both use Jon's noise model. Currently sotodlib looks quite a bit better --- sotodlib/mapmaking/noise_model.py | 75 ++++++++++++++++++++------- sotodlib/mapmaking/utils.py | 9 ++-- sotodlib/site_pipeline/make_ml_map.py | 17 +++--- 3 files changed, 74 insertions(+), 27 deletions(-) diff --git a/sotodlib/mapmaking/noise_model.py b/sotodlib/mapmaking/noise_model.py index 73165602d..b1334954b 100644 --- a/sotodlib/mapmaking/noise_model.py +++ b/sotodlib/mapmaking/noise_model.py @@ -111,6 +111,45 @@ def write(self, fname): def from_bunch(data): return NmatUncorr(spacing=data.spacing, nbin=data.nbin, nmin=data.nmin, bins=data.bins, ips_binned=data.ips_binned, ivar=data.ivar, window=data.window, nwin=data.nwin) +class NmatDebug(Nmat): + def __init__(self, ivar=None, alpha=-3, fknee=2.5, profile=None, bsize=256, downweight=1): + self.bsize = bsize + self.ivar = ivar + self.alpha = alpha + self.fknee = fknee + self.profile = profile + self.downweight = downweight + self.nwin = 0 + self.ready = ivar is not None + def build(self, tod, srate, **kwargs): + nsamp = tod.shape[1] + nblock = nsamp//self.bsize + var = np.median(np.var(tod[:,:nblock*self.bsize].reshape(-1,nblock,self.bsize),-1),-1) + with utils.nowarn(): + ivar = utils.without_nan(1/var)*self.downweight + f = np.fft.rfftfreq(nsamp, 1/srate) + with utils.nowarn(): + profile = 1/(1+(f/self.fknee)**self.alpha) + profile /= nsamp # fft normalization + return NmatDebug(ivar=ivar, alpha=self.alpha, fknee=self.fknee, bsize=self.bsize, profile=profile, downweight=self.downweight) + def apply(self, tod, inplace=True): + tod = self.white(tod, inplace=inplace) + ft = np.empty((tod.shape[0],tod.shape[1]//2+1),utils.complex_dtype(tod.dtype)) + fft.rfft(tod, ft) + ft *= self.profile + fft.irfft(ft, tod) + return tod + def white(self, tod, inplace=True, nwin=None): + if not inplace: tod = tod.copy() + tod *= self.ivar[:,None] + return tod + def write(self, fname): + self.check_ready() + data = bunch.Bunch(type="NmatDebug") + for field in ["ivar", "alpha", "fknee", "profile", "bsize", "downweight"]: + data[field] = getattr(self, field) + bunch.write(fname, data) + class NmatDetvecs(Nmat): def __init__(self, bin_edges=None, eig_lim=16, single_lim=0.55, mode_bins=[0.25,4.0,20], downweight=[], window=2, nwin=None, verbose=False, bins=None, @@ -174,7 +213,7 @@ def build_fourier(self, ftod, nsamp, srate, nwin=0, **kwargs): b = np.maximum(1,b) E[bi], D[bi], Nd[bi] = measure_detvecs(ftod[:,b[0]:b[1]], vecs) # Optionally downweight the lowest frequency bins - if self.downweight != None and len(self.downweight) > 0: + if self.downweight is not None and len(self.downweight) > 0: D[:len(self.downweight)] /= np.array(self.downweight)[:,None] # Instead of VEV' we can have just VV' if we bake sqrt(E) into V V = vecs[None]*E[:,None]**0.5 @@ -187,11 +226,10 @@ def build_fourier(self, ftod, nsamp, srate, nwin=0, **kwargs): # Also compute a representative white noise level bsize = bins[:,1]-bins[:,0] ivar = np.sum(iD*bsize[:,None],0)/np.sum(bsize) - # What about units? I haven't applied any fourier unit factors so far, - # so we're in plain power units. From the uncorrelated model I found - # that factor of tod.shape[1] is needed - iD *= nsamp - iV *= nsamp**0.5 + # I originally normalized iD and iV here, but then undid that normalization + # when actually applying the model. Let's just skip that + #iD *= nsamp + #iV *= nsamp**0.5 ivar *= nsamp # Fix dtype bins = np.ascontiguousarray(bins.astype(np.int32)) @@ -221,11 +259,12 @@ def apply_fourier(self, ftod, nsamp, slow=False): for bi, b in enumerate(self.bins): # Want to multiply by iD + siViV' ft = ftod[:,b[0]:b[1]] - iD = self.iD[bi]/nsamp - iV = self.iV[bi]/nsamp**0.5 + iD = self.iD[bi] #/nsamp + iV = self.iV[bi] #/nsamp**0.5 ft[:] = iD[:,None]*ft + self.s*iV.dot(iV.T.dot(ft)) else: - so3g.nmat_detvecs_apply(ftod.view(dtype), self.bins, self.iD, self.iV, float(self.s), float(nsamp)) + # using normalization 1 instead of nsamp, since I didn't multiply by nsamp earlier + so3g.nmat_detvecs_apply(ftod.view(dtype), self.bins, self.iD, self.iV, float(self.s), float(1)) # I divided by the normalization above instead of passing normalize=True # here to reduce the number of operations needed @@ -441,7 +480,7 @@ def build_fourier(self, ftod, nsamp, srate, **kwargs): b = np.maximum(1,b) E[bi], D[bi], Nd[bi] = measure_detvecs(ftod[:,b[0]:b[1]], vecs) # Optionally downweight the lowest frequency bins - if self.downweight != None and len(self.downweight) > 0: + if self.downweight is not None and len(self.downweight) > 0: D[:len(self.downweight)] /= np.array(self.downweight)[:,None] # Instead of VEV' we can have just VV' if we bake sqrt(E) into V V = vecs[None]*E[:,None]**0.5 @@ -454,11 +493,10 @@ def build_fourier(self, ftod, nsamp, srate, **kwargs): # Also compute a representative white noise level bsize = bins[:,1]-bins[:,0] ivar = np.sum(iD*bsize[:,None],0)/np.sum(bsize) - # What about units? I haven't applied any fourier unit factors so far, - # so we're in plain power units. From the uncorrelated model I found - # that factor of tod.shape[1] is needed - iD *= nsamp - iV *= nsamp**0.5 + # I originally normalized iD and iV here, but then undid that normalization + # when actually applying the model. Let's just skip that + #iD *= nsamp + #iV *= nsamp**0.5 ivar *= nsamp # Fix dtype bins = np.ascontiguousarray(bins.astype(np.int32)) @@ -486,11 +524,12 @@ def apply_fourier(self, ftod, nsamp, slow=False): for bi, b in enumerate(self.bins): # Want to multiply by iD + siViV' ft = ftod[:,b[0]:b[1]] - iD = self.iD[bi]/nsamp - iV = self.iV[bi]/nsamp**0.5 + iD = self.iD[bi] #/nsamp + iV = self.iV[bi] #/nsamp**0.5 ft[:] = iD[:,None]*ft + self.s*iV.dot(iV.T.dot(ft)) else: - so3g.nmat_detvecs_apply(ftod.view(dtype), self.bins, self.iD, self.iV, float(self.s), float(nsamp), dct_binning=True) + # using normalization 1 instead of nsamp, since I didn't multiply by nsamp earlier + so3g.nmat_detvecs_apply(ftod.view(dtype), self.bins, self.iD, self.iV, float(self.s), float(1), dct_binning=True) # I divided by the normalization above instead of passing normalize=True # here to reduce the number of operations needed diff --git a/sotodlib/mapmaking/utils.py b/sotodlib/mapmaking/utils.py index 76cdcd73d..36e44b353 100644 --- a/sotodlib/mapmaking/utils.py +++ b/sotodlib/mapmaking/utils.py @@ -156,10 +156,13 @@ def measure_cov(d, nmax=10000): def project_out(d, modes): return d-modes.T.dot(modes.dot(d)) def project_out_from_matrix(A, V): - # Used Woodbury to project out the given vectors from the covmat A + """This is the equivalent of deprojecting V in a timestream, + and then measuring the covariance of the resulting timestream. + It will therefore always be positive definite.""" if V.size == 0: return A - Q = A.dot(V) - return A - Q.dot(np.linalg.solve(np.conj(V.T).dot(Q), np.conj(Q.T))) + AV = A.dot(V) + # q = a-VV'a, Q = = A + VV'AV'V - AVV' - VV'A + return A + V.dot(V.T.dot(AV)).dot(V.T) - AV.dot(V.T) - V.dot(AV.T) def measure_power(d): return np.real(np.mean(d*np.conj(d),-1)) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index e007fdae1..c10fd019d 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -36,6 +36,7 @@ def get_parser(parser=None): parser.add_argument("-T", "--tiled" , type=int, default=1, help="0: untiled maps. Nonzero: tiled maps") parser.add_argument( "--srcsamp", type=str, default=None, help="path to mask file where True regions indicate where bright object mitigation should be applied. Mask is in equatorial coordinates. Not tiled, so should be low-res to not waste memory.") parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") + parser.add_argument( "--iunit", type=str, default="uK", help="Unit of the raw tod") parser.add_argument( "--maxcut", type=float, default=.3, help="Maximum fraction of cut samples in a detector.") parser.add_argument( "--no-sidelobe", action="store_true", help="Do not mask Moon/Sun sidelobes") parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask") @@ -44,6 +45,8 @@ def get_parser(parser=None): parser.add_argument("--cut-type", type=str, default="full") return parser +unit_defs = {"nK":1e-9, "uK":1e-6, "mK":1e-3, "K":1.0, "kK":1e3, "MK": 1e6, "GK":1e9} + def exit(comm, code): if comm is not None: # This check might need an expansion comm.Abort(code) @@ -176,6 +179,7 @@ def main(**args): if args.nmat == "uncorr": noise_model = mapmaking.NmatUncorr() elif args.nmat == "corr": noise_model = mapmaking.NmatDetvecs(verbose=verbose>1, window=args.window) elif args.nmat == "corr_dct": noise_model = mapmaking.NmatDetvecsDCT(verbose=verbose>1) + elif args.nmat == "debug": noise_model = mapmaking.NmatDebug() else: raise ValueError("Unrecognized noise model '%s'" % args.nmat) signal_cut = mapmaking.SignalCut(comm, dtype=dtype_tod, cut_type=args.cut_type) @@ -250,6 +254,9 @@ def main(**args): utils.deslope(obs.signal, w=5, inplace=True) obs.signal = obs.signal.astype(dtype_tod) + # Convert to our target unit + obs.signal *= unit_defs[args.iunit]/unit_defs[args.unit] + if "flags" not in obs: obs.wrap("flags", core.AxisManager(obs.dets, obs.samps)) @@ -261,12 +268,10 @@ def main(**args): # Optionally skip all the calibration. Useful for sims. if not args.nocal: - # measure rms - rms = d1u.measure_rms(obs.signal, dt=1/srate) - if args.unit=='K': - good = d1u.sensitivity_cut(rms*1e6, d1u.SENS_LIMITS[band]) - elif args.unit == 'uK': - good = d1u.sensitivity_cut(rms, d1u.SENS_LIMITS[band]) + # measure rms, and convert to µKrts for comparison with sens_limits + rms = d1u.measure_rms(obs.signal, dt=1/srate) + rms *= unit_defs[args.unit]/unit_defs["uK"] + good = d1u.sensitivity_cut(rms, d1u.SENS_LIMITS[band]) if np.logical_not(good).sum() / obs.dets.count > 0.5: L.debug("Skipped %s (more than 50 percent of detectors cut by sens)" % (sub_id)) L.debug("Datacount: %s full" % (sub_id)) From 466a10d7236ca7fef3f7ad347c27448f076c3ed1 Mon Sep 17 00:00:00 2001 From: chervias Date: Thu, 23 Apr 2026 08:54:12 -0700 Subject: [PATCH 69/69] making iunit K by default --- sotodlib/site_pipeline/make_ml_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/make_ml_map.py b/sotodlib/site_pipeline/make_ml_map.py index c10fd019d..92f94e05c 100644 --- a/sotodlib/site_pipeline/make_ml_map.py +++ b/sotodlib/site_pipeline/make_ml_map.py @@ -36,7 +36,7 @@ def get_parser(parser=None): parser.add_argument("-T", "--tiled" , type=int, default=1, help="0: untiled maps. Nonzero: tiled maps") parser.add_argument( "--srcsamp", type=str, default=None, help="path to mask file where True regions indicate where bright object mitigation should be applied. Mask is in equatorial coordinates. Not tiled, so should be low-res to not waste memory.") parser.add_argument( "--unit", type=str, default="uK", help="Unit of the maps") - parser.add_argument( "--iunit", type=str, default="uK", help="Unit of the raw tod") + parser.add_argument( "--iunit", type=str, default="K", help="Unit of the raw tod") parser.add_argument( "--maxcut", type=float, default=.3, help="Maximum fraction of cut samples in a detector.") parser.add_argument( "--no-sidelobe", action="store_true", help="Do not mask Moon/Sun sidelobes") parser.add_argument( "--sun-mask", type=str, default="/global/cfs/cdirs/sobs/users/sigurdkn/masks/sidelobe/sun.fits", help="Location of Sun sidelobe mask")