From 6a10d5bae861ff12a7671c9acbb9adae1f44a81a Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 19 Mar 2024 02:05:11 +0000 Subject: [PATCH 01/48] initial commit --- sotodlib/coords/mapbased_pointing.py | 545 ++++++++++++++++++ .../site_pipeline/combine_focal_planes.py | 1 + .../site_pipeline/make_mapbased_pointing.py | 102 ++++ sotodlib/site_pipeline/update_pointing.py | 196 +++++++ 4 files changed, 844 insertions(+) create mode 100644 sotodlib/coords/mapbased_pointing.py create mode 100644 sotodlib/site_pipeline/combine_focal_planes.py create mode 100644 sotodlib/site_pipeline/make_mapbased_pointing.py create mode 100644 sotodlib/site_pipeline/update_pointing.py diff --git a/sotodlib/coords/mapbased_pointing.py b/sotodlib/coords/mapbased_pointing.py new file mode 100644 index 000000000..276c4dd7a --- /dev/null +++ b/sotodlib/coords/mapbased_pointing.py @@ -0,0 +1,545 @@ +import os +import re +from tqdm import tqdm +import numpy as np +from scipy import interpolate +from scipy.optimize import curve_fit + +from sotodlib import core +from sotodlib import coords +from sotodlib.coords import optics +from sotodlib.core import metadata +from sotodlib.io.metadata import write_dataset, read_dataset + +from so3g.proj import quat +from pixell import enmap +import h5py +from scipy.ndimage import maximum_filter + +def get_planet_trajectry(tod, planet, _split=20, return_model=False): + """ + Generate the trajectory of a given planet over a specified time range. + + Parameters: + tod : An axis manager + planet (str): The name of the planet for which to generate the trajectory. + _split (int, optional): Number of points to interpolate the trajectory. Defaults to 20. + return_model (bool, optional): If True, returns interpolation functions of az and el. Defaults to False. + + Returns: + If return_model is True: + tuple: Tuple containing interpolation functions for azimuth and elevation. + If return_model is False: + array: Array of quaternions representing trajectry of the planet at each timestamp. + """ + timestamps_sparse = np.linspace(tod.timestamps[0], tod.timestamps[-1], _split) + + planet_az_sparse = np.zeros_like(timestamps_sparse) + planet_el_sparse = np.zeros_like(timestamps_sparse) + for i, timestamp in enumerate(timestamps_sparse): + az, el, _ = coords.planets.get_source_azel(planet, timestamp) + planet_az_sparse[i] = az + planet_el_sparse[i] = el + planet_az_func = interpolate.interp1d(timestamps_sparse, planet_az_sparse, kind="quadratic", fill_value='extrapolate') + planet_el_func = interpolate.interp1d(timestamps_sparse, planet_el_sparse, kind="quadratic", fill_value='extrapolate') + if return_model: + return planet_az_func, planet_el_func + else: + planet_az = planet_az_func(tod.timestamps) + planet_el = planet_el_func(tod.timestamps) + q_planet = quat.rotation_lonlat(planet_az, planet_el) + return q_planet + +def get_wafer_centered_sight(tod, planet, q_planet=None, q_bs=None, q_wafer=None): + """ + Calculate the sightline vector from the focal plane, centered on the wafer, to a planet. + + Parameters: + tod : An axis manager + planet (str): The name of the planet to calculate the sightline vector. + q_planet (optional): Quaternion representing the trajectry of the planet. + If None, it will be computed using get_planet_trajectory. Defaults to None. + q_bs (optional): Quaternion representing the trajectry of the boresight. + If None, it will be computed using the current boresight angles from tod. Defaults to None. + q_wafer (optional): Quaternion representing the center of wafer to the center of boresight. + If None, it will be computed using the median of the focal plane xi and eta from tod.focal_plane. + Defaults to None. + + Returns: + Sightline vector for the planet trajectry centered on the center of the wafer. + """ + if q_planet is None: + q_planet = get_planet_trajectry(tod, planet) + if q_bs is None: + q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) + if q_wafer is None: + q_wafer = quat.rotation_xieta(np.nanmedian(tod.focal_plane.xi), + np.nanmedian(tod.focal_plane.eta)) + + xi_wafer, eta_wafer, _ = quat.decompose_xieta(q_wafer) + q_wafer_f = quat.rotation_xieta(-xi_wafer, eta_wafer) + z_to_x = quat.rotation_lonlat(0, 0) + sight = z_to_x * ~(q_bs * q_wafer_f) * q_planet + return sight + +def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), + roll_bs_offset=0., tod=None, wrap_to_tod=True,): + """ + Calculate the xi and eta coordinates for a given wafer slot on the focal plane. + + Parameters: + wafer_slot (str): The slot identifier of the wafer. + optics_config_fn (str): File name containing the optics configuration. + xieta_bs_offset (tuple): Offset in xieta coordinates for the focal plane, default is (0., 0.). + roll_bs_offset (float): Boresight roll offset. Default is 0 + tod (TimeOrderedData): TOD object to which focal plane infomation that all detectors have uniform pointing at center of the wafer is wrapped. + wrap_to_tod (bool): If True, wrap the calculated xi and eta coordinates to the Time-Ordered Data (TOD), default is True. + + Returns: + tuple: A tuple containing the calculated xi and eta coordinates for the specified wafer slot. + """ + + optics_config = optics.load_ufm_to_fp_config(optics_config_fn)['SAT'] + wafer_x, wafer_y = optics_config[wafer_slot]['dx'], optics_config[wafer_slot]['dy'] + wafer_r = np.sqrt(wafer_x**2 + wafer_y**2) + wafer_theta = np.arctan2(wafer_y, wafer_x) + + fp_to_sky = optics.sat_to_sky(optics.SAT_X, optics.SAT_LON) + lon = fp_to_sky(wafer_r) + + q1 = quat.rotation_iso(lon, 0) + + q2 = quat.rotation_iso(0, 0, np.pi/2 - wafer_theta - roll_bs_offset) + q3 = quat.rotation_xieta(xieta_bs_offset[0], xieta_bs_offset[1]) + q = q3 * q2 * q1 + + xi_wafer, eta_wafer, _ = quat.decompose_xieta(q) + if wrap_to_tod: + if tod is None: + raise ValueError('tod is not provided.') + if 'focal_plane' in tod._fields.keys(): + tod.move('focal_plane', None) + focal_plane = core.AxisManager(tod.dets) + focal_plane.wrap('xi', np.ones(tod.dets.count, dtype='float32') * xi_wafer, [(0, 'dets')]) + focal_plane.wrap('eta', np.ones(tod.dets.count, dtype='float32') * eta_wafer, [(0, 'dets')]) + focal_plane.wrap('gamma', np.zeros(tod.dets.count, dtype='float32'), [(0, 'dets')]) + tod.wrap('focal_plane', focal_plane) + tod.boresight.roll *= 0. + return xi_wafer, eta_wafer + + +def make_wafer_centered_maps(tod, planet, optics_config_fn, map_hdf, + xieta_bs_offset=(0., 0.), roll_bs_offset=None, + signal='signal', wcs_kernel=None, res=0.3*coords.DEG, cuts=None,): + """ + Generate boresight-centered maps from Time-Ordered Data (TOD) for each individual detector. + + Parameters: + tod : an axismanager object + planet (str): Name of the planet for which the trajectory is calculated. + optics_config_fn (str): File name containing the optics configuration. + map_hdf (str): Path to the HDF5 file where the maps will be saved. + xieta_bs_offset (tuple): Offset in xieta coordinates for the boresight, default is (0., 0.). + roll_bs_offset (float): Offset in roll angle for the boresight, default is None. + signal (str): Name of the signal to be used, default is 'signal'. + wcs_kernel (ndarray): WCS kernel for mapping, default is None. + res (float): Resolution of the map in degrees, default is 0.3 degrees. + cuts (tuple): Cuts to be applied to the map, default is None. + + Returns: + None + """ + + if wcs_kernel is None: + wcs_kernel = coords.get_wcs_kernel('car', 0, 0, res) + + q_planet = get_planet_trajectry(tod, planet) + q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) + + if roll_bs_offset is None: + roll_bs_offset = np.mean(tod.boresight.roll) + + # wafer + if np.unique(tod.det_info.wafer_slot).shape[0] > 1: + raise ValueError('tod include detectors from more than one wafer') + wafer_slot = tod.det_info.wafer_slot[0] + xi_wafer, eta_wafer = get_wafer_xieta(wafer_slot=wafer_slot, + xieta_bs_offset=xieta_bs_offset, + roll_bs_offset=roll_bs_offset, + tod=tod, + optics_config_fn=optics_config_fn, + wrap_to_tod=True) + + coords.planets.compute_source_flags(tod, center_on=planet, max_pix=100000000, + wrap=planet, mask={'shape':'circle', 'xyr':[0,0,8]}) + + q_wafer = quat.rotation_xieta(xi_wafer, eta_wafer) + sight = get_wafer_centered_sight(tod, planet, q_planet, q_bs, q_wafer) + + + + xi0 = tod.focal_plane.xi[0] + eta0 = tod.focal_plane.eta[0] + xi_bs_offset, eta_bs_offset = xieta_bs_offset + + tod.focal_plane.xi *= 0. + tod.focal_plane.eta *= 0. + tod.boresight.roll *= 0. + + if cuts is None: + cuts = ~tod.flags[planet] + P = coords.P.for_tod(tod=tod, wcs_kernel=wcs_kernel, comps='T', cuts=cuts, sight=sight, threads=False) + for di, det in enumerate(tqdm(tod.dets.vals)): + det_weights = np.zeros(tod.dets.count, dtype='float32') + det_weights[di] = 1. + mT_weighted = P.to_map(tod=tod, signal=signal, comps='T', det_weights=det_weights) + wT = P.to_weights(tod, signal=signal, comps='T', det_weights=det_weights) + mT = P.remove_weights(signal_map=mT_weighted, weights_map=wT, comps='T')[0] + + enmap.write_hdf(map_hdf, mT, address=det, + extra={'xi0': xi0, 'eta0': eta0, + 'xi_bs_offset': xi_bs_offset, 'eta_bs_offset': eta_bs_offset, 'roll_bs_offset': roll_bs_offset}) + return + +def detect_peak_xieta(mT, filter_size=None): + """ + Detects the peak in a given pixcell map and converts it to ξ and η coordinates. + + Parameters: + - mT (enmap.ndmap): a map object + - filter_size (int, optional): Size of the filter window for peak detection. + If not provided, it's calculated as a fraction + of the minimum dimension of mT. + + Returns: + - xi_peak (float): xi coordinate of the peak. + - eta_peak (float): eta coordinate of the peak. + - ra_peak (float): ra coordinate of the peak. + - dec_peak (float): dec coordinate of the peak. + - peak_i (int): Row index of the peak. + - peak_j (int): Column index of the peak. + """ + if filter_size is None: + filter_size = int(np.min(mT.shape)//10) + local_max = maximum_filter(mT, footprint=np.ones((filter_size, filter_size)), + mode='constant', cval=np.nan) + peak_i, peak_j = np.where(mT == np.nanmax(local_max)) + peak_i = int(np.median(peak_i)) + peak_j = int(np.median(peak_j)) + dec_grid, ra_grid = mT.posmap() + + ra_peak = ra_grid[peak_i][peak_j] + dec_peak = dec_grid[peak_i][peak_j] + xi_peak, eta_peak = _radec2xieta(ra_peak, dec_peak) + return xi_peak, eta_peak, ra_peak, dec_peak, peak_i, peak_j + +def get_center_of_mass(x, y, z, + circle_mask={'x0':0, 'y0':0, 'r_circle':3.0*coords.DEG}, + percentile_mask = {'q': 50}): + """ + Calculates the center of mass of a dataset within specified masks. + + Parameters: + - x (ndarray): Array of x-coordinates. + - y (ndarray): Array of y-coordinates. + - z (ndarray): Array of data values corresponding to the coordinates. + - circle_mask (dict, optional): Parameters defining circular mask. + Should contain keys 'x0', 'y0', and 'r_circle'. + Defaults to a circle centered at (0, 0) with radius 3.0 degrees. + - percentile_mask (dict, optional): Parameters defining percentile mask. + Should contain key 'q' representing the percentile threshold. + Defaults to the 50th percentile. + + Returns: + - x_center (float): x-coordinate of the center of mass. + - y_center (float): y-coordinate of the center of mass. + """ + mask = ~np.isnan(z) + if circle_mask is not None: + x0, y0 = circle_mask['x0'], circle_mask['y0'] + r_circle = circle_mask['r_circle'] + r = np.sqrt((x-x0)**2 + (y-y0)**2) + mask = np.logical_and(mask, rnp.nanpercentile(z[mask], q)) + + _x = x[mask] + _y = y[mask] + _z = z[mask] + + total_mass = np.nansum(_z) + x_center = np.nansum(_x * _z) / total_mass + y_center = np.nansum(_y * _z) / total_mass + return x_center, y_center + +def get_edgemap(mT, edge_avoidance=1*coords.DEG, edge_check='nan'): + """ + Generates an edge map for a given map, marking regions near the edges where data is potentially unreliable. + + Parameters: + - mT (enmap.ndmap): a map object + - edge_avoidance (float, optional): Size of the edge avoidance region, defaults to 1 degree. + - edge_check (str, optional): Method for checking edges. Should be one of {'nan', 'zero'}. + 'nan': Checks for NaN values at edges. + 'zero': Checks for zero values at edges. + Defaults to 'nan'. + + Returns: + - edge_map (enmap.ndmap): 2D boolean array representing the edge map, where True indicates regions near the edges. + """ + if edge_check not in ('nan', 'zero'): + raise ValueError('only `nan` or `zero` is supported') + + edge_map = enmap.zeros(mT.shape, mT.wcs) + edge_margin_size = int(edge_avoidance/np.mean(mT.pixshape())) + + for i, row in enumerate(mT): + if edge_check == 'nan': + nonzero_idxes = np.where(~np.isnan(row))[0] + elif edge_check == 'zero': + nonzero_idxes = np.where(row != 0)[0] + if len(nonzero_idxes>0): + edge_map[i, :nonzero_idxes[0] + edge_margin_size] = True + edge_map[i, nonzero_idxes[-1] - edge_margin_size:] = True + else: + edge_map[i, :] = True + + for j, col in enumerate(mT.T): + if edge_check == 'nan': + nonzero_idxes = np.where(~np.isnan(col))[0] + elif edge_check == 'zero': + nonzero_idxes = np.where(col != 0)[0] + if len(nonzero_idxes>0): + edge_map[:nonzero_idxes[0] + edge_margin_size, j] = True + edge_map[nonzero_idxes[-1] - edge_margin_size:, j] = True + else: + edge_map[:, j] = True + return edge_map + + + +def map_to_xieta(mT, edge_avoidance=1.0*coords.DEG, edge_check='nan', + r_tune_circle=1.0*coords.DEG, q_tune=50, + r_fit_circle=3.0*coords.DEG, beam_sigma_init=0.5*coords.DEG, ): + """ + Derive (xi,eta) coordinate of a peak from a given map and calculates the coefficient of determination (R^2) + as a measure of how well the data fits a Gaussian model around the peak. + + Parameters: + - mT (enmap.ndmap): a map object. + - edge_avoidance (float, optional): Size of the edge avoidance region, defaults to 1 degree. + - edge_check (str, optional): Method for checking edges. Should be one of {'nan', 'zero'}. Defaults to 'nan'. + - r_tune_circle (float, optional): Radius of the circle used for tuning the peak position, specified in radians. Defaults to 1 degree. + - q_tune (int, optional): Percentile threshold used for tuning the peak position. Defaults to 50. + - r_fit_circle (float, optional): Radius of the circle used for fitting the Gaussian model, specified in radians. Defaults to 3 degrees. + - beam_sigma_init (float, optional): Initial guess for the sigma parameter of the Gaussian beam, specified in radians. Defaults to 0.5 degree. + + Returns: + - xi_det (float): ξ coordinate of the detected peak. + - eta_det (float): η coordinate of the detected peak. + - R2_det (float): Coefficient of determination (R^2) indicating the goodness of fit of the data around the peak. + If no valid peak is detected or if fitting fails, returns NaN. + """ + if np.all(np.isnan(mT)): + xi_det, eta_det, R2_det = np.nan, np.nan, np.nan + + else: + xi_peak, eta_peak, ra_peak, dec_peak, peak_i, peak_j = detect_peak_xieta(mT) + if edge_avoidance > 0.: + edge_map = get_edgemap(mT, edge_avoidance=edge_avoidance, edge_check=edge_check) + edge_valid = not edge_map[peak_i, peak_j] + else: + edge_valid = True + + if edge_valid: + dec_flat, ra_flat = mT.posmap() + dec_flat, ra_flat = dec_flat.flatten(), ra_flat.flatten() + xi_flat, eta_flat = _radec2xieta(ra_flat, dec_flat) + + circle_mask = {'x0':xi_peak, 'y0':eta_peak, 'r_circle':r_tune_circle} + percentile_mask = {'q': q_tune} + xi_peak, eta_peak = get_center_of_mass(xi_flat, eta_flat, mT.flatten(), + circle_mask=circle_mask, percentile_mask=percentile_mask) + + # check R2(=coefficient of determination) + r = np.sqrt((xi_flat - xi_peak)**2 + (eta_flat - eta_peak)**2) + z = mT.flatten() + mask_fit = np.logical_and(~np.isnan(z), r R2_threshold + if np.all(~mask): + xi, eta, gamma, R2 = np.nan, np.nan, np.nan, np.nan + else: + if method == 'highest_R2': + idx = np.argmax(val['R2'][mask]) + xi, eta, gamma, R2 = val['xi'][mask][idx], val['eta'][mask][idx], val['gamma'][mask][idx], val['R2'][mask][idx] + elif method == 'mean': + xi, eta, gamma = np.mean(val['xi'][mask]), np.mean(val['eta'][mask]), np.mean(val['gamma'][mask]) + R2 = np.nan + elif method == 'median': + xi, eta, gamma = np.median(val['xi'][mask]), np.median(val['eta'][mask]), np.median(val['gamma'][mask]) + R2 = np.nan + else: + raise ValueError('Not supported method. Supported methods are `highest_R2`, `mean` or `median`') + focal_plane.rows.append((det, band, channel, R2, xi, eta, gamma)) + if save: + if output_dir is None: + output_dir = os.path.join(os.getcwd(), 'combined_pointing_results') + if not os.path.exists(output_dir): + os.makedirs(output_dir) + if save_name is None: + ctimes = np.atleast_1d([]) + wafer_slots = np.atleast_1d([]) + + for file in pointing_result_files: + filename = os.path.basename(file) + match = re.search('\d{10}', filename) + ctime = int(match.group(0) if match else None) + match = re.search('ws\d{1}', filename) + ws = match.group(0) + ctimes = np.append(ctimes, ctime) + wafer_slots = np.append(wafer_slots, ws) + ctimes = ctimes.astype('int') + wafer_slots = np.sort(np.unique(wafer_slots.astype('U3'))) + save_name = f'focal_plane_{ctimes.min()}_{ctimes.max()}_' + ''.join(wafer_slots) + '.hdf' + + write_dataset(focal_plane, os.path.join(output_dir, save_name), 'focal_plane', overwrite=True) + return focal_plane \ No newline at end of file diff --git a/sotodlib/site_pipeline/combine_focal_planes.py b/sotodlib/site_pipeline/combine_focal_planes.py new file mode 100644 index 000000000..b190074fa --- /dev/null +++ b/sotodlib/site_pipeline/combine_focal_planes.py @@ -0,0 +1 @@ +# combine multiple results \ No newline at end of file diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/make_mapbased_pointing.py new file mode 100644 index 000000000..82feaf79e --- /dev/null +++ b/sotodlib/site_pipeline/make_mapbased_pointing.py @@ -0,0 +1,102 @@ +import os +import numpy as np +import yaml +import argparse + +from sotodlib import core +from sotodlib import coords +from sotodlib import tod_ops +from sotodlib.tod_ops.filters import high_pass_sine2, low_pass_sine2, fourier_filter +from sotodlib.coords import mapbased_pointing as mbp +from sotodlib.site_pipeline import update_pointing as up +from sotodlib.io.metadata import write_dataset + +from sotodlib.site_pipeline import util +logger = util.init_logger(__name__, 'make_mapbased_pointing: ') + +def filter_tod(tod, cutoff_high=0.01, cutoff_low=1.8): + if cutoff_low is not None: + tod.signal = fourier_filter(tod, filt_function=low_pass_sine2(cutoff=cutoff_low),) + if cutoff_high is not None: + tod.signal = fourier_filter(tod, filt_function=high_pass_sine2(cutoff=cutoff_high),) + return + +def tod_process(tod): + tod_ops.detrend_tod(tod) + tod_ops.apodize_cosine(tod, apodize_samps=2000) + filter_tod(tod) + tod.restrict('samps', (tod.samps.offset+2000, tod.samps.offset+tod.samps.count-2000)) + return + +def main(ctx_file, obs_id, wafer_slot, + sso_name, optics_config_fn, + map_dir, mapbased_result_dir, todbased_result_dir, + tune_by_tod=True, R2_threshold=0.3, restrict_dets=False): + + ctx = core.Context(ctx_file) + meta = ctx.get_meta(obs_id) + meta.restrict('dets', meta.dets.vals[meta.det_info.wafer_slot == wafer_slot]) + if restrict_dets: + meta.restrict('dets', meta.dets.vals[:100]) + logger.info('loading data') + tod = ctx.get_obs(meta) + logger.info('tod processing') + tod_process(tod) + + if not os.path.exists(map_dir): + logger.info(f'Make a directory: f{map_dir}') + os.makedirs(map_dir) + + logger.info(f'Making single detector maps') + map_hdf = os.path.join(map_dir, f'{obs_id}_{wafer_slot}.hdf') + mbp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf,) + + logger.info(f'Making map-based pointing results') + result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' + focal_plane_rset_mapbased = mbp.get_xieta_from_maps(map_hdf, + save=True, + output_dir=mapbased_result_dir, + filename=result_filename, + force_zero_roll=False, + edge_avoidance=1.0*coords.DEG) + + if tune_by_tod: + focal_plane = core.AxisManager(tod.dets) + focal_plane.wrap('xi', focal_plane_rset_mapbased['xi'], [(0, 'dets')]) + focal_plane.wrap('eta', focal_plane_rset_mapbased['eta'], [(0, 'dets')]) + focal_plane.wrap('gamma', focal_plane_rset_mapbased['gamma'], [(0, 'dets')]) + is_low_R2 = focal_plane_rset_mapbased['R2'] < R2_threshold + focal_plane.xi[is_low_R2] = np.nan + focal_plane.eta[is_low_R2] = np.nan + + tod.focal_plane = focal_plane + tod.flags.move(sso_name, None) + logger.info(f'Making tod-based pointing results') + focal_plane_rset_todbased = up.update_xieta(tod, sso_name, ds_factor=10, + save=True, + result_dir=todbased_result_dir, + filename=result_filename) + return + + + +def get_parser(): + parser = argparse.ArgumentParser(description="Process TOD data and update pointing") + parser.add_argument("ctx_file", type=str, help="Path to the context file") + parser.add_argument("obs_id", type=str, help="Observation ID") + parser.add_argument("wafer_slot", type=int, help="Wafer slot number") + parser.add_argument("sso_name", type=str, help="Name of Solar System Object (SSO)") + parser.add_argument("optics_config_fn", type=str, help="Path to optics configuration file") + parser.add_argument("map_dir", type=str, help="Directory to save map data") + parser.add_argument("mapbased_result_dir", type=str, help="Directory to save map-based result") + parser.add_argument("todbased_result_dir", type=str, help="Directory to save TOD-based result") + parser.add_argument("--tune_by_tod", action="store_true", help="Whether to tune by TOD data") + parser.add_argument("--R2_threshold", type=float, default=0.3, + help="Threshold for R2 value. If R2 of map-domain result is lower than the threshold,\ + the tod-fitting for that detector is skipped.") + parser.add_argument("--restrict_dets", action="store_true", + help="If specified, number of detectors are restricted to 100") + return parser + +if __name__ == '__main__': + util.main_launcher(main, get_parser) diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py new file mode 100644 index 000000000..2fe72094b --- /dev/null +++ b/sotodlib/site_pipeline/update_pointing.py @@ -0,0 +1,196 @@ +import os +import numpy as np +import yaml +import h5py +import matplotlib.pyplot as plt +from tqdm import tqdm +import scipy +from scipy.optimize import minimize +from sotodlib.core import metadata +from sotodlib.io.metadata import write_dataset + +from sotodlib import core +from sotodlib import coords +from sotodlib import tod_ops +import so3g +from so3g.proj import quat +import sotodlib.coords.planets as planets + +from sotodlib.tod_ops import pca +from so3g.proj import Ranges, RangesMatrix +from pixell import enmap, enplot +from sotodlib.tod_ops.filters import high_pass_sine2, low_pass_sine2, fourier_filter + +from sotodlib.site_pipeline import util +logger = util.init_logger(__name__, 'update_pointing: ') + +def _gaussian2d(xi, eta, a, xi0, eta0, fwhm_xi, fwhm_eta, phi): + """Simulate a time stream with an Gaussian beam model + Args + ------ + xi, eta: cordinates in the detector's system + a: float + amplitude of the Gaussian beam model + xi0, eta0: float, float + center position of the Gaussian beam model + fwhm_xi, fwhm_eta, phi: float, float, float + fwhm along the xi, eta axis (rotated) + and the rotation angle (in radians) + + Ouput: + ------ + sim_data: 1d array of float + Time stream at sampling points given by xieta + """ + xi_rot = xi * np.cos(phi) - eta * np.sin(phi) + eta_rot = xi * np.sin(phi) + eta * np.cos(phi) + factor = 2 * np.sqrt(2 * np.log(2)) + xi_coef = -0.5 * (xi_rot - xi0) ** 2 / (fwhm_xi / factor) ** 2 + eta_coef = -0.5 * (eta_rot - eta0) ** 2 / (fwhm_eta / factor) ** 2 + sim_data = a * np.exp(xi_coef + eta_coef) + return sim_data + +def filter_tod(tod, cutoff_high=0.01, cutoff_low=1.8): + if cutoff_low is not None: + tod.signal = fourier_filter(tod, filt_function=low_pass_sine2(cutoff=cutoff_low),) + if cutoff_high is not None: + tod.signal = fourier_filter(tod, filt_function=high_pass_sine2(cutoff=cutoff_high),) + return + +def tod_process(tod): + tod_ops.detrend_tod(tod) + tod_ops.apodize_cosine(tod, apodize_samps=2000) + filter_tod(tod) + tod.restrict('samps', (tod.samps.offset+2000, tod.samps.offset+tod.samps.count-2000)) + return + +def update_xieta(tod, sso_name='moon', ds_factor=10, fwhm = 1.*coords.DEG, + save=False, result_dir=None, filename=None): + """ + Update xieta parameters for each detector by tod fitting of a point source observation + + Parameters: + - tod : an Axismanager object + - sso_name (str): Name of the Solar System Object (SSO). + - ds_factor (int): Downsampling factor for processing TOD. + - fwhm (float): Full width at half maximum of the Gaussian model. + - save (bool): Flag indicating whether to save the updated focal plane data. + - result_dir (str): Directory where the updated data will be saved. + - filename (str): Name of the file to save the updated data. + + Returns: + - focal_plane (ResultSet): ResultSet containing updated xieta parameters for each detector. + """ + mask_ds = slice(None, None, ds_factor) + + fp_isnan = np.isnan(tod.focal_plane.xi) + if np.any(fp_isnan): + tod.focal_plane.xi[fp_isnan] = 0. + tod.focal_plane.eta[fp_isnan] = 0. + tod.focal_plane.gamma[fp_isnan] = 0. + + coords.planets.compute_source_flags(tod, center_on=sso_name, max_pix=100000000, + wrap=sso_name, mask={'shape':'circle', 'xyr':[0,0,3]}) + + summed_flag = np.sum(tod.flags[sso_name].mask()[~fp_isnan], axis=0).astype('bool') + idx_hit = np.where(summed_flag)[0] + idx_first, idx_last = idx_hit[0], idx_hit[-1] + tod.restrict('samps', (tod.samps.offset+idx_first, tod.samps.offset+idx_last)) + csl = so3g.proj.CelestialSightLine.az_el(tod.timestamps[mask_ds], tod.boresight.az[mask_ds], + tod.boresight.el[mask_ds], weather="typical") + q_bore = csl.Q + + ts_ds = tod.timestamps[mask_ds] + sig_ds = tod.signal[:, mask_ds] + source_flags_ds = tod.flags[sso_name].mask()[:, mask_ds] + xieta_dict = {} + for di, det in enumerate(tqdm(tod.dets.vals)): + if fp_isnan[di]: + xieta_dict[det] = {'xi':np.nan, 'eta':np.nan, 'R2':np.nan} + else: + mask_di = source_flags_ds[di] + ts = ts_ds[mask_di] + + xieta_det = np.array([tod.focal_plane.xi[di], tod.focal_plane.eta[di]]) + q_det = so3g.proj.quat.rotation_xieta(xieta_det[0], xieta_det[1]) + d1_unix = np.median(ts) + planet = planets.SlowSource.for_named_source(sso_name, d1_unix * 1.) + ra0, dec0 = planet.pos(d1_unix) + q_obj = so3g.proj.quat.rotation_lonlat(ra0, dec0) + q_total = ~q_det * ~q_bore * q_obj + + xi_src, eta_src, _ = quat.decompose_xieta(q_total) + xieta_src = np.array([xi_src, eta_src]) + xieta_src = xieta_src[:, mask_di] + + + + sig = sig_ds[di][mask_di] + amp = np.ptp(sig) + def fit_func(xi0, eta0): + model_tod = _gaussian2d(xieta_src[0], xieta_src[1], amp, xi0, eta0, fwhm, fwhm, 0) + residual = sig - model_tod + return np.sum(residual ** 2) + + res = minimize(lambda x: fit_func(*x), [0, 0]) + R2 = 1 - res.fun/np.sum((sig - np.mean(sig))**2) + + if np.rad2deg(np.sqrt(np.sum(res.x**2))) > 1.0: + xieta_dict[det] = {'xi':np.nan, 'eta':np.nan, 'R2':np.nan} + else: + xieta_det += res.x + xieta_dict[det] = {'xi':xieta_det[0], 'eta':xieta_det[1], 'R2':R2} + + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'band', 'channel', 'R2', 'xi', 'eta', 'gamma']) + for det in tod.dets.vals: + band = int(det.split('_')[-2]) + channel = int(det.split('_')[-1]) + focal_plane.rows.append((det, band, channel, xieta_dict[det]['R2'], + xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0.)) + if save: + assert result_dir is not None + assert filename is not None + if not os.path.exists(result_dir): + os.makedirs(result_dir) + write_dataset(focal_plane, + filename=os.path.join(result_dir, filename), + address='focal_plane', + overwrite=True) + return focal_plane + +def main(ctx_file, obs_id, wafer_slot, sso_name, result_dir, + ds_factor=10, fwhm = 1.*coords.DEG, restrict_dets=False): + ctx = core.Context(ctx_file) + meta = ctx.get_meta(obs_id) + meta.restrict('dets', meta.dets.vals[meta.det_info.wafer_slot == wafer_slot]) + if restrict_dets: + meta.restrict('dets', meta.dets.vals[:100]) + + logger.info('loading data') + tod = ctx.get_obs(meta) + logger.info('tod processing') + tod_process(tod) + + if not os.path.exists(result_dir): + logger.info(f'Make a directory: f{result_dir}') + os.makedirs(result_dir) + + result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' + focal_plane_rset = update_xieta(tod=tod, sso_name=sso_name, ds_factor=ds_factor, fwhm=fwhm, + save=True, result_dir=result_dir, filename=result_filename) + return + +def get_parser(): + parser = argparse.ArgumentParser(description="Description of the script.") + parser.add_argument("ctx_file", type=str, help="Path to the context file.") + parser.add_argument("obs_id", type=str, help="Observation ID.") + parser.add_argument("wafer_slot", type=int, help="Wafer slot number.") + parser.add_argument("sso_name", type=str, help="Name of the Solar System Object (SSO).") + parser.add_argument("result_dir", type=str, help="Directory to save the result.") + parser.add_argument("--ds_factor", type=int, default=10, help="Downsampling factor for TOD processing.") + parser.add_argument("--fwhm", type=float, default=1.0, help="Full width at half maximum of the Gaussian model.") + parser.add_argument("--restrict_dets", action="store_true", help="Flag to restrict the number of detectors.") + return parser + +if __name__ == '__main__': + util.main_launcher(main, get_parser) From 0af181a458224aca53ee095a7466101da75bcc9b Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 19 Mar 2024 05:36:51 +0000 Subject: [PATCH 02/48] add combine_focal_planes.py --- sotodlib/coords/mapbased_pointing.py | 65 --------- .../site_pipeline/combine_focal_planes.py | 131 +++++++++++++++++- sotodlib/site_pipeline/update_pointing.py | 2 +- 3 files changed, 131 insertions(+), 67 deletions(-) diff --git a/sotodlib/coords/mapbased_pointing.py b/sotodlib/coords/mapbased_pointing.py index 276c4dd7a..a374a621e 100644 --- a/sotodlib/coords/mapbased_pointing.py +++ b/sotodlib/coords/mapbased_pointing.py @@ -478,68 +478,3 @@ def _add_xieta(xieta1, xieta2): xi_add, eta_add, _ = quat.decompose_xieta(q_add) return xi_add, eta_add -def combine_pointings(pointing_result_files, method='mean', R2_threshold=0.3, - save=False, output_dir=None, save_name=None): - combined_dict = {} - for file in pointing_result_files: - rset = read_dataset(file, 'focal_plane') - for row in rset[:]: - if row['dets:readout_id'] not in combined_dict.keys(): - combined_dict[row['dets:readout_id']] = {} - combined_dict[row['dets:readout_id']]['band'] = row['band'] - combined_dict[row['dets:readout_id']]['channel'] = row['channel'] - - combined_dict[row['dets:readout_id']]['R2'] = np.atleast_1d([]) - combined_dict[row['dets:readout_id']]['xi'] = np.atleast_1d([]) - combined_dict[row['dets:readout_id']]['eta'] = np.atleast_1d([]) - combined_dict[row['dets:readout_id']]['gamma'] = np.atleast_1d([]) - - combined_dict[row['dets:readout_id']]['R2'] = np.append(combined_dict[row['dets:readout_id']]['R2'], row['R2']) - combined_dict[row['dets:readout_id']]['xi'] = np.append(combined_dict[row['dets:readout_id']]['xi'], row['xi']) - combined_dict[row['dets:readout_id']]['eta'] = np.append(combined_dict[row['dets:readout_id']]['eta'], row['eta']) - combined_dict[row['dets:readout_id']]['gamma'] = np.append(combined_dict[row['dets:readout_id']]['gamma'], row['gamma']) - - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'band', 'channel', 'R2', 'xi', 'eta', 'gamma']) - for det, val in combined_dict.items(): - band = int(val['band']) - channel = int(val['channel']) - - mask = val['R2'] > R2_threshold - if np.all(~mask): - xi, eta, gamma, R2 = np.nan, np.nan, np.nan, np.nan - else: - if method == 'highest_R2': - idx = np.argmax(val['R2'][mask]) - xi, eta, gamma, R2 = val['xi'][mask][idx], val['eta'][mask][idx], val['gamma'][mask][idx], val['R2'][mask][idx] - elif method == 'mean': - xi, eta, gamma = np.mean(val['xi'][mask]), np.mean(val['eta'][mask]), np.mean(val['gamma'][mask]) - R2 = np.nan - elif method == 'median': - xi, eta, gamma = np.median(val['xi'][mask]), np.median(val['eta'][mask]), np.median(val['gamma'][mask]) - R2 = np.nan - else: - raise ValueError('Not supported method. Supported methods are `highest_R2`, `mean` or `median`') - focal_plane.rows.append((det, band, channel, R2, xi, eta, gamma)) - if save: - if output_dir is None: - output_dir = os.path.join(os.getcwd(), 'combined_pointing_results') - if not os.path.exists(output_dir): - os.makedirs(output_dir) - if save_name is None: - ctimes = np.atleast_1d([]) - wafer_slots = np.atleast_1d([]) - - for file in pointing_result_files: - filename = os.path.basename(file) - match = re.search('\d{10}', filename) - ctime = int(match.group(0) if match else None) - match = re.search('ws\d{1}', filename) - ws = match.group(0) - ctimes = np.append(ctimes, ctime) - wafer_slots = np.append(wafer_slots, ws) - ctimes = ctimes.astype('int') - wafer_slots = np.sort(np.unique(wafer_slots.astype('U3'))) - save_name = f'focal_plane_{ctimes.min()}_{ctimes.max()}_' + ''.join(wafer_slots) + '.hdf' - - write_dataset(focal_plane, os.path.join(output_dir, save_name), 'focal_plane', overwrite=True) - return focal_plane \ No newline at end of file diff --git a/sotodlib/site_pipeline/combine_focal_planes.py b/sotodlib/site_pipeline/combine_focal_planes.py index b190074fa..b8a1184a4 100644 --- a/sotodlib/site_pipeline/combine_focal_planes.py +++ b/sotodlib/site_pipeline/combine_focal_planes.py @@ -1 +1,130 @@ -# combine multiple results \ No newline at end of file +import os +import re +import glob +import numpy as np + +from sotodlib.core import metadata +from sotodlib.io.metadata import write_dataset, read_dataset + +from sotodlib.site_pipeline import util +logger = util.init_logger(__name__, 'combine_focal_planes: ') + +def combine_pointings(pointing_result_files, method='highest_R2', R2_threshold=0.3, + save=False, output_dir=None, save_name=None): + combined_dict = {} + for file in pointing_result_files: + rset = read_dataset(file, 'focal_plane') + for row in rset[:]: + if row['dets:readout_id'] not in combined_dict.keys(): + combined_dict[row['dets:readout_id']] = {} + combined_dict[row['dets:readout_id']]['band'] = row['band'] + combined_dict[row['dets:readout_id']]['channel'] = row['channel'] + + combined_dict[row['dets:readout_id']]['R2'] = np.atleast_1d([]) + combined_dict[row['dets:readout_id']]['xi'] = np.atleast_1d([]) + combined_dict[row['dets:readout_id']]['eta'] = np.atleast_1d([]) + combined_dict[row['dets:readout_id']]['gamma'] = np.atleast_1d([]) + + combined_dict[row['dets:readout_id']]['R2'] = np.append(combined_dict[row['dets:readout_id']]['R2'], row['R2']) + combined_dict[row['dets:readout_id']]['xi'] = np.append(combined_dict[row['dets:readout_id']]['xi'], row['xi']) + combined_dict[row['dets:readout_id']]['eta'] = np.append(combined_dict[row['dets:readout_id']]['eta'], row['eta']) + combined_dict[row['dets:readout_id']]['gamma'] = np.append(combined_dict[row['dets:readout_id']]['gamma'], row['gamma']) + + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'band', 'channel', 'R2', 'xi', 'eta', 'gamma']) + for det, val in combined_dict.items(): + band = int(val['band']) + channel = int(val['channel']) + + mask = val['R2'] > R2_threshold + if np.all(~mask): + xi, eta, gamma, R2 = np.nan, np.nan, np.nan, np.nan + else: + if method == 'highest_R2': + idx = np.argmax(val['R2'][mask]) + xi, eta, gamma, R2 = val['xi'][mask][idx], val['eta'][mask][idx], val['gamma'][mask][idx], val['R2'][mask][idx] + elif method == 'mean': + xi, eta, gamma = np.mean(val['xi'][mask]), np.mean(val['eta'][mask]), np.mean(val['gamma'][mask]) + R2 = np.nan + elif method == 'median': + xi, eta, gamma = np.median(val['xi'][mask]), np.median(val['eta'][mask]), np.median(val['gamma'][mask]) + R2 = np.nan + else: + raise ValueError('Not supported method. Supported methods are `highest_R2`, `mean` or `median`') + focal_plane.rows.append((det, band, channel, R2, xi, eta, gamma)) + if save: + if output_dir is None: + output_dir = os.path.join(os.getcwd(), 'combined_pointing_results') + if not os.path.exists(output_dir): + os.makedirs(output_dir) + if save_name is None: + ctimes = np.atleast_1d([]) + wafer_slots = np.atleast_1d([]) + for file in pointing_result_files: + filename = os.path.basename(file) + match = re.search('\d{10}', filename) + ctime = int(match.group(0) if match else None) + match = re.search('ws\d{1}', filename) + ws = match.group(0) + ctimes = np.append(ctimes, ctime) + wafer_slots = np.append(wafer_slots, ws) + ctimes = ctimes.astype('int') + wafer_slots = np.sort(np.unique(wafer_slots.astype('U3'))) + save_name = f'focal_plane_{ctimes.min()}_{ctimes.max()}_' + ''.join(wafer_slots) + '.hdf' + + write_dataset(focal_plane, os.path.join(output_dir, save_name), 'focal_plane', overwrite=True) + return focal_plane + +def combine_onewafer_results(pointing_dir, ws, output_dir, filename=None, + method='highest_R2', R2_threshold=0.3,): + pointing_result_files = glob.glob(os.path.join(pointing_dir, f'focal_plane*{ws}.hdf')) + if filename is None: + filename = f'focal_plane_{ws}_combined.hdf' + _ = combine_pointings(pointing_result_files, save=True, output_dir=output_dir, save_name=filename) + return + +def combine_allwafer_results(pointing_dir, output_dir, filename=None, + method='highest_R2', R2_threshold=0.3,): + pointing_result_files = glob.glob(os.path.join(pointing_dir, 'focal_plane*.hdf')) + if filename is None: + filename = f'focal_plane_combined.hdf' + _ = combine_pointings(pointing_result_files, save=True, output_dir=output_dir, save_name=filename) + return + +def make_detabase(focal_plane_file, db_file,): + scheme = metadata.ManifestScheme().add_data_field('dataset') + db = metadata.ManifestDb(scheme=scheme) + db.add_entry({'dataset': 'focal_plane'}, filename=focal_plane_file) + db.to_file(db_file) + return + +def main(pointing_dir, output_dir=None, method='highest_R2', R2_threshold=0.3,): + if output_dir is None: + output_dir = os.path.join(os.getcwd(), 'combined_results') + + logger.info('Combining each wafer resluts') + wafer_slots = [f'ws{i}' for i in range(7)] + for ws in wafer_slots: + combine_onewafer_results(pointing_dir=pointing_dir, ws=ws, + output_dir=output_dir, filename=None, + method=method, R2_threshold=R2_threshold) + + logger.info('Combining all wafer resluts') + combine_allwafer_results(pointing_dir=pointing_dir, output_dir=output_dir, filename='focal_plane_combined.hdf', + method=method, R2_threshold=R2_threshold) + + logger.info('Making a database') + focal_plane_file = os.path.join(output_dir, 'focal_plane_combined.hdf') + db_file = os.path.join(output_dir, 'focal_plane_combined.sqlite') + make_detabase(focal_plane_file, db_file,) + return + +def get_parser(): + parser = argparse.ArgumentParser(description="Combine multiple result of pointing.") + parser.add_argument('--pointing_dir', type=str, required=True, help='Directory containing pointing result files.') + parser.add_argument('--output_dir', type=str, default=None, help='Directory to save combined results. Default is "combined_results".') + parser.add_argument('--method', type=str, default='highest_R2', choices=['highest_R2', 'mean', 'median'], help='Combination method. Default is "highest_R2".') + parser.add_argument('--R2_threshold', type=float, default=0.3, help='Threshold for R2 value. Default is 0.3.') + return parser + +if __name__ == '__main__': + util.main_launcher(main, get_parser) diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index 2fe72094b..d3015d688 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -181,7 +181,7 @@ def main(ctx_file, obs_id, wafer_slot, sso_name, result_dir, return def get_parser(): - parser = argparse.ArgumentParser(description="Description of the script.") + parser = argparse.ArgumentParser(description="Get updated result of pointings with tod-based results") parser.add_argument("ctx_file", type=str, help="Path to the context file.") parser.add_argument("obs_id", type=str, help="Observation ID.") parser.add_argument("wafer_slot", type=int, help="Wafer slot number.") From 32e24af07371958171d1134f3bfb4170ef0d6edd Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 19 Mar 2024 05:47:57 +0000 Subject: [PATCH 03/48] resolve wrong git operations --- sotodlib/coords/demod.py | 42 +---------------------------------- tests/test_demod_map.py | 48 ---------------------------------------- 2 files changed, 1 insertion(+), 89 deletions(-) diff --git a/sotodlib/coords/demod.py b/sotodlib/coords/demod.py index 709c34462..c6b9dcd66 100644 --- a/sotodlib/coords/demod.py +++ b/sotodlib/coords/demod.py @@ -140,44 +140,4 @@ def make_map(tod, output = {'map': mTQU, 'weighted_map': mTQU_weighted, 'weight': wTQU} - return output - -def from_map(tod, signal_map, cuts=None, flip_gamma=True, wrap=False, modulated=False): - """ - Generate simulated TOD with HWP from a given signal map. - - Args: - tod : an axisManager object - signal_map: pixell.enmap.ndmap containing (Tmap, Qmap, Umap) representing the signal. - cuts (RangesMatrix, optional): Cuts to apply to the data. Default is None. - flip_gamma (bool, optional): Whether to flip detector coordinate. If you use the HWP, keep it `True`. Default is True. - wrap (bool, optional): Whether to wrap the simulated data. Default is False. - modulated (bool, optional): If True, return modulated signal. If False, return the demodulated signal - (`dsT`, `demodQ`, and `demodU`). Default is False. - - Returns: - `modulate==False`: A tuple containing the TOD (np.array) of dsT, demodQ and demodU. - `modulate==True` : The modulated TOD (np.array) - - """ - Tmap, Qmap, Umap = signal_map - - P = coords.P.for_tod(tod=tod, geom=signal_map.geometry, cuts=cuts, - comps='QU', hwp=flip_gamma) - dsT_sim = P.from_map(Tmap, comps='T') - demodQ_sim = P.from_map(enmap.enmap([Qmap, Umap]), comps='QU') - demodU_sim = P.from_map(enmap.enmap([Umap, -Qmap]), comps='QU') - - if modulated is False: - if wrap: - tod.wrap('dsT', dsT_sim, [(0, 'dets'), (1, 'samps')]) - tod.wrap('demodQ', demodQ_sim, [(0, 'dets'), (1, 'samps')]) - tod.wrap('demodU', demodU_sim, [(0, 'dets'), (1, 'samps')]) - return dsT_sim, demodQ_sim, demodU_sim - else: - assert 'hwp_angle' in tod._fields - signal_sim = dsT_sim + demodQ_sim*np.cos(4*tod.hwp_angle) + demodU_sim*np.sin(4*tod.hwp_angle) - if wrap: - tod.wrap('signal', signal_sim, [(0, 'dets'), (1, 'samps')]) - return signal_sim - \ No newline at end of file + return output \ No newline at end of file diff --git a/tests/test_demod_map.py b/tests/test_demod_map.py index 525f35467..36d1b7853 100644 --- a/tests/test_demod_map.py +++ b/tests/test_demod_map.py @@ -130,52 +130,4 @@ def test_10_mod_demod(self): means = [m[s].mean() for m in m0] print(means) assert(abs(means[1] - Q_stream) < TOL) - assert(abs(means[2] - U_stream) < TOL) - - def test_from_map_demodulated(self): - """Test the coords.demod.from_map function of demodulated signal. - - """ - tod = quick_tod(10, 10000) - TOL = 0.0001 - - shape, wcs = enmap.fullsky_geometry(res=0.5*coords.DEG) - signal_map = enmap.zeros((3, *shape), wcs) - T_stream, Q_stream, U_stream = 1., 0.25, 0.01 - signal_map[0] += T_stream - signal_map[1] += Q_stream - signal_map[2] += U_stream - _ = coords.demod.from_map(tod, signal_map, modulated=False, wrap=True) - - results = coords.demod.make_map(tod) - m0 = results['map'] - s = m0[1] != 0 - means = [m[s].mean() for m in m0] - assert(abs(means[1] - Q_stream) < TOL) - assert(abs(means[2] - U_stream) < TOL) - - def test_from_map_modulated(self): - """Test the coords.demod.from_map function of modulated signal. - - """ - tod = quick_tod(10, 10000) - tod.move('signal', None) - TOL = .01 - - shape, wcs = enmap.fullsky_geometry(res=0.5*coords.DEG) - signal_map = enmap.zeros((3, *shape), wcs) - - T_stream, Q_stream, U_stream = 1., 0.25, 0.01 - signal_map[0] += T_stream - signal_map[1] += Q_stream - signal_map[2] += U_stream - - _ = coords.demod.from_map(tod, signal_map, modulated=True, wrap=True) - hwp.demod_tod(tod) - results = coords.demod.make_map(tod) - - m0 = results['map'] - s = m0[1] != 0. - means = [m[s].mean() for m in m0] - assert(abs(means[1] - Q_stream) < TOL) assert(abs(means[2] - U_stream) < TOL) \ No newline at end of file From 76db4c563d54c1ab9a7029bd6498f8ff9396bda1 Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 30 Apr 2024 09:02:35 +0000 Subject: [PATCH 04/48] using a new preprocess style in map_based pointing reconstruction --- ...ased_pointing.py => map_based_pointing.py} | 0 .../site_pipeline/make_mapbased_pointing.py | 128 ++++++++++-------- 2 files changed, 68 insertions(+), 60 deletions(-) rename sotodlib/coords/{mapbased_pointing.py => map_based_pointing.py} (100%) diff --git a/sotodlib/coords/mapbased_pointing.py b/sotodlib/coords/map_based_pointing.py similarity index 100% rename from sotodlib/coords/mapbased_pointing.py rename to sotodlib/coords/map_based_pointing.py diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/make_mapbased_pointing.py index 82feaf79e..c898e015f 100644 --- a/sotodlib/site_pipeline/make_mapbased_pointing.py +++ b/sotodlib/site_pipeline/make_mapbased_pointing.py @@ -7,95 +7,103 @@ from sotodlib import coords from sotodlib import tod_ops from sotodlib.tod_ops.filters import high_pass_sine2, low_pass_sine2, fourier_filter -from sotodlib.coords import mapbased_pointing as mbp +from sotodlib.coords import map_based_pointing as mbp from sotodlib.site_pipeline import update_pointing as up from sotodlib.io.metadata import write_dataset from sotodlib.site_pipeline import util -logger = util.init_logger(__name__, 'make_mapbased_pointing: ') - -def filter_tod(tod, cutoff_high=0.01, cutoff_low=1.8): - if cutoff_low is not None: - tod.signal = fourier_filter(tod, filt_function=low_pass_sine2(cutoff=cutoff_low),) - if cutoff_high is not None: - tod.signal = fourier_filter(tod, filt_function=high_pass_sine2(cutoff=cutoff_high),) - return - -def tod_process(tod): - tod_ops.detrend_tod(tod) - tod_ops.apodize_cosine(tod, apodize_samps=2000) - filter_tod(tod) - tod.restrict('samps', (tod.samps.offset+2000, tod.samps.offset+tod.samps.count-2000)) - return +from sotodlib.preprocess import Pipeline +logger = util.init_logger(__name__, 'make_map_based_pointing: ') + +def main(configs, obs_id, wafer_slot, + sso_name=None, optics_config_fn=None, + single_det_maps_dir=None, map_based_result_dir=None, tod_based_result_dir=None, + tune_by_tod=None, restrict_dets_for_debug=False): + + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + + # Derive parameters from config file + if optics_config_fn is None: + optics_config_fn = configs.get('optics_config_fn') + if single_det_maps_dir is None: + single_det_maps_dir = configs.get('single_det_maps_dir') + if map_based_result_dir is None: + map_based_result_dir = configs.get('map_based_result_dir') + if tod_based_result_dir is None: + tod_based_result_dir = configs.get('tod_based_result_dir') -def main(ctx_file, obs_id, wafer_slot, - sso_name, optics_config_fn, - map_dir, mapbased_result_dir, todbased_result_dir, - tune_by_tod=True, R2_threshold=0.3, restrict_dets=False): + res_deg = configs.get('res_deg') + edge_avoidance_deg = configs.get('edge_avoidance_deg') + tune_by_tod = configs.get('tune_by_tod') + R2_threshold = configs.get('R2_threshold') + ds_factor = configs.get('ds_factor') - ctx = core.Context(ctx_file) - meta = ctx.get_meta(obs_id) - meta.restrict('dets', meta.dets.vals[meta.det_info.wafer_slot == wafer_slot]) - if restrict_dets: - meta.restrict('dets', meta.dets.vals[:100]) + + ctx = core.Context(configs.get('context_file')) + # If sso_name is not specified, get sso name from observation tags + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + if sso_name is None: + if 'moon' in obs_tags: + sso_name = 'moon' + elif 'jupiter' in obs_tags: + sso_name = 'jupiter' + else: + raise ValueError('sso_name is not specified') + + # Load data logger.info('loading data') + meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) + if restrict_dets_for_debug is not False: + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) tod = ctx.get_obs(meta) - logger.info('tod processing') - tod_process(tod) - if not os.path.exists(map_dir): - logger.info(f'Make a directory: f{map_dir}') - os.makedirs(map_dir) + # tod processing + logger.info('tod processing') + pipe = Pipeline(configs["process_pipe"], logger=logger) + proc_aman, success = pipe.run(tod) - logger.info(f'Making single detector maps') - map_hdf = os.path.join(map_dir, f'{obs_id}_{wafer_slot}.hdf') - mbp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf,) + # make single detecctor maps + logger.info(f'Making single detector maps') + os.makedirs(single_det_maps_dir, exist_ok=True) + map_hdf = os.path.join(single_det_maps_dir, f'{obs_id}_{wafer_slot}.hdf') + mbp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf, res=res_deg*coords.DEG) + # reconstruct pointing from single detector maps logger.info(f'Making map-based pointing results') result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' - focal_plane_rset_mapbased = mbp.get_xieta_from_maps(map_hdf, - save=True, - output_dir=mapbased_result_dir, + focal_plane_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, + output_dir=map_based_result_dir, filename=result_filename, force_zero_roll=False, - edge_avoidance=1.0*coords.DEG) + edge_avoidance = edge_avoidance_deg*coords.DEG) if tune_by_tod: focal_plane = core.AxisManager(tod.dets) - focal_plane.wrap('xi', focal_plane_rset_mapbased['xi'], [(0, 'dets')]) - focal_plane.wrap('eta', focal_plane_rset_mapbased['eta'], [(0, 'dets')]) - focal_plane.wrap('gamma', focal_plane_rset_mapbased['gamma'], [(0, 'dets')]) - is_low_R2 = focal_plane_rset_mapbased['R2'] < R2_threshold + focal_plane.wrap('xi', focal_plane_rset_map_based['xi'], [(0, 'dets')]) + focal_plane.wrap('eta', focal_plane_rset_map_based['eta'], [(0, 'dets')]) + focal_plane.wrap('gamma', focal_plane_rset_map_based['gamma'], [(0, 'dets')]) + is_low_R2 = focal_plane_rset_map_based['R2'] < R2_threshold focal_plane.xi[is_low_R2] = np.nan focal_plane.eta[is_low_R2] = np.nan tod.focal_plane = focal_plane tod.flags.move(sso_name, None) logger.info(f'Making tod-based pointing results') - focal_plane_rset_todbased = up.update_xieta(tod, sso_name, ds_factor=10, - save=True, - result_dir=todbased_result_dir, - filename=result_filename) + focal_plane_rset_tod_based = up.update_xieta(tod, sso_name, ds_factor=ds_factor, save=True, + result_dir=tod_based_result_dir, filename=result_filename) return - - def get_parser(): parser = argparse.ArgumentParser(description="Process TOD data and update pointing") - parser.add_argument("ctx_file", type=str, help="Path to the context file") - parser.add_argument("obs_id", type=str, help="Observation ID") + parser.add_argument("configs", type=str, help="Path to the configuration file") + parser.add_argument("obs_id", type=int, help="Observation ID") parser.add_argument("wafer_slot", type=int, help="Wafer slot number") - parser.add_argument("sso_name", type=str, help="Name of Solar System Object (SSO)") - parser.add_argument("optics_config_fn", type=str, help="Path to optics configuration file") - parser.add_argument("map_dir", type=str, help="Directory to save map data") - parser.add_argument("mapbased_result_dir", type=str, help="Directory to save map-based result") - parser.add_argument("todbased_result_dir", type=str, help="Directory to save TOD-based result") - parser.add_argument("--tune_by_tod", action="store_true", help="Whether to tune by TOD data") - parser.add_argument("--R2_threshold", type=float, default=0.3, - help="Threshold for R2 value. If R2 of map-domain result is lower than the threshold,\ - the tod-fitting for that detector is skipped.") - parser.add_argument("--restrict_dets", action="store_true", - help="If specified, number of detectors are restricted to 100") + parser.add_argument("--sso_name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter')") + parser.add_argument("--optics_config_fn", type=str, default=None, help="Path to optics configuration file") + parser.add_argument("--single_det_maps_dir", type=str, default=None, help="Directory to save single detector maps") + parser.add_argument("--map_based_result_dir", type=str, default=None, help="Directory to save map-based pointing results") + parser.add_argument("--tod_based_result_dir", type=str, default=None, help="Directory to save TOD-based pointing results") return parser if __name__ == '__main__': From 20b522cf00b1eec129600d17c604acdeff78c713 Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 30 Apr 2024 12:23:26 +0000 Subject: [PATCH 05/48] added except method in flags.reduce --- sotodlib/core/flagman.py | 4 +++- sotodlib/preprocess/processes.py | 7 ++++++- sotodlib/site_pipeline/update_pointing.py | 2 -- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/sotodlib/core/flagman.py b/sotodlib/core/flagman.py index 4a513278b..3a64faf50 100644 --- a/sotodlib/core/flagman.py +++ b/sotodlib/core/flagman.py @@ -172,7 +172,7 @@ def reduce(self, flags=None, method='union', wrap=False, new_flag=None, flags: List of flags to collapse together. Uses their names. If flags is None then all flags are reduced method: How to collapse the data. Accepts 'union','intersect', - or function. + 'except', or function. wrap: if True, add reduced flag to self new_flag: name of new flag, required if wrap is True remove_reduced: if True, remove all reduced flags from self @@ -198,6 +198,8 @@ def reduce(self, flags=None, method='union', wrap=False, new_flag=None, op = lambda x, y: x+y elif method == 'intersect': op = lambda x, y: x*y + elif method == 'except': + op = lambda x, y: x*~y else: op = method out = reduce(op, to_reduce) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 1e1293344..baae3cae6 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -782,7 +782,11 @@ def plot(self, aman, proc_aman, filename): for sso in proc_aman.sso_footprint._assignments.keys(): planet_aman = proc_aman.sso_footprint[sso] plot_sso_footprint(aman, planet_aman, sso, filename=filename.replace('{name}', f'{sso}_sso_footprint'), **self.plot_cfgs) - + +class ComputeSourceFlags(_Preprocess): + name = 'compute_source_flags' + def process(self, aman, proc_aman): + planets.compute_source_flags(aman, **self.process_cfgs) class FourierFilter(_Preprocess): """ @@ -882,3 +886,4 @@ def save(self, proc_aman, rc_aman): _Preprocess.register(SubPolyf) _Preprocess.register(DetBiasFlags) _Preprocess.register(SSOFootprint) +_Preprocess.register(ComputeSourceFlags) diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index d3015d688..a1cfade81 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -122,8 +122,6 @@ def update_xieta(tod, sso_name='moon', ds_factor=10, fwhm = 1.*coords.DEG, xi_src, eta_src, _ = quat.decompose_xieta(q_total) xieta_src = np.array([xi_src, eta_src]) xieta_src = xieta_src[:, mask_di] - - sig = sig_ds[di][mask_di] amp = np.ptp(sig) From 740f1e30d804936731b1bb32af151fa7b8f9d17b Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 30 Apr 2024 12:50:03 +0000 Subject: [PATCH 06/48] added ReduceFlags function in preprocessing --- sotodlib/preprocess/processes.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index baae3cae6..14adb046f 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -783,6 +783,11 @@ def plot(self, aman, proc_aman, filename): planet_aman = proc_aman.sso_footprint[sso] plot_sso_footprint(aman, planet_aman, sso, filename=filename.replace('{name}', f'{sso}_sso_footprint'), **self.plot_cfgs) +class ReduceFlags(_Preprocess): + name = 'reduce_flags' + def process(self, aman, proc_aman): + aman.flags.reduce(**self.process_cfgs) + class ComputeSourceFlags(_Preprocess): name = 'compute_source_flags' def process(self, aman, proc_aman): @@ -887,3 +892,4 @@ def save(self, proc_aman, rc_aman): _Preprocess.register(DetBiasFlags) _Preprocess.register(SSOFootprint) _Preprocess.register(ComputeSourceFlags) +_Preprocess.register(ReduceFlags) From 7b403a78b50a681241dd9a1d2cbacfbc328a40c5 Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Thu, 2 May 2024 12:44:39 +0000 Subject: [PATCH 07/48] new fitting code --- sotodlib/coords/map_based_pointing.py | 47 ++-- .../site_pipeline/make_mapbased_pointing.py | 37 ++- sotodlib/site_pipeline/update_pointing.py | 224 +++++++++++++----- 3 files changed, 220 insertions(+), 88 deletions(-) diff --git a/sotodlib/coords/map_based_pointing.py b/sotodlib/coords/map_based_pointing.py index a374a621e..294a350a2 100644 --- a/sotodlib/coords/map_based_pointing.py +++ b/sotodlib/coords/map_based_pointing.py @@ -108,8 +108,7 @@ def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), lon = fp_to_sky(wafer_r) q1 = quat.rotation_iso(lon, 0) - - q2 = quat.rotation_iso(0, 0, np.pi/2 - wafer_theta - roll_bs_offset) + q2 = quat.rotation_iso(0, 0, np.pi/2 - wafer_theta + roll_bs_offset) q3 = quat.rotation_xieta(xieta_bs_offset[0], xieta_bs_offset[1]) q = q3 * q2 * q1 @@ -124,36 +123,39 @@ def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), focal_plane.wrap('eta', np.ones(tod.dets.count, dtype='float32') * eta_wafer, [(0, 'dets')]) focal_plane.wrap('gamma', np.zeros(tod.dets.count, dtype='float32'), [(0, 'dets')]) tod.wrap('focal_plane', focal_plane) + + # set boresight roll to zero + tod.boresight.wrap('roll_original', tod.boresight.roll, [(0, 'samps')]) tod.boresight.roll *= 0. + return xi_wafer, eta_wafer -def make_wafer_centered_maps(tod, planet, optics_config_fn, map_hdf, +def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, xieta_bs_offset=(0., 0.), roll_bs_offset=None, - signal='signal', wcs_kernel=None, res=0.3*coords.DEG, cuts=None,): + signal='signal', wafer_mask_deg=8., res_deg=0.3, cuts=None,): """ Generate boresight-centered maps from Time-Ordered Data (TOD) for each individual detector. Parameters: tod : an axismanager object - planet (str): Name of the planet for which the trajectory is calculated. + sso_name (str): Name of the planet for which the trajectory is calculated. optics_config_fn (str): File name containing the optics configuration. map_hdf (str): Path to the HDF5 file where the maps will be saved. xieta_bs_offset (tuple): Offset in xieta coordinates for the boresight, default is (0., 0.). roll_bs_offset (float): Offset in roll angle for the boresight, default is None. signal (str): Name of the signal to be used, default is 'signal'. wcs_kernel (ndarray): WCS kernel for mapping, default is None. - res (float): Resolution of the map in degrees, default is 0.3 degrees. + res_deg (float): Resolution of the map in degrees, default is 0.3 degrees. cuts (tuple): Cuts to be applied to the map, default is None. Returns: None """ - if wcs_kernel is None: - wcs_kernel = coords.get_wcs_kernel('car', 0, 0, res) + - q_planet = get_planet_trajectry(tod, planet) + q_planet = get_planet_trajectry(tod, sso_name) q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) if roll_bs_offset is None: @@ -170,25 +172,26 @@ def make_wafer_centered_maps(tod, planet, optics_config_fn, map_hdf, optics_config_fn=optics_config_fn, wrap_to_tod=True) - coords.planets.compute_source_flags(tod, center_on=planet, max_pix=100000000, - wrap=planet, mask={'shape':'circle', 'xyr':[0,0,8]}) + coords.planets.compute_source_flags(tod, center_on=sso_name, max_pix=100000000, + wrap='source', mask={'shape':'circle', 'xyr':[0., 0., wafer_mask_deg]}) - q_wafer = quat.rotation_xieta(xi_wafer, eta_wafer) - sight = get_wafer_centered_sight(tod, planet, q_planet, q_bs, q_wafer) - - + + q_wafer = quat.rotation_xieta(xi_wafer, eta_wafer) + sight = get_wafer_centered_sight(tod, sso_name, q_planet, q_bs, q_wafer) xi0 = tod.focal_plane.xi[0] eta0 = tod.focal_plane.eta[0] - xi_bs_offset, eta_bs_offset = xieta_bs_offset - + xi_bs_offset, eta_bs_offset = xieta_bs_offset tod.focal_plane.xi *= 0. tod.focal_plane.eta *= 0. tod.boresight.roll *= 0. + + box = np.deg2rad([[-wafer_mask_deg, -wafer_mask_deg], [wafer_mask_deg, wafer_mask_deg]]) + geom = enmap.geometry(pos=box, res=res_deg*coords.DEG) if cuts is None: - cuts = ~tod.flags[planet] - P = coords.P.for_tod(tod=tod, wcs_kernel=wcs_kernel, comps='T', cuts=cuts, sight=sight, threads=False) + cuts = ~tod.flags['source'] + P = coords.P.for_tod(tod=tod, geom=geom, comps='T', cuts=cuts, sight=sight, threads=False) for di, det in enumerate(tqdm(tod.dets.vals)): det_weights = np.zeros(tod.dets.count, dtype='float32') det_weights[di] = 1. @@ -337,8 +340,8 @@ def map_to_xieta(mT, edge_avoidance=1.0*coords.DEG, edge_check='nan', - beam_sigma_init (float, optional): Initial guess for the sigma parameter of the Gaussian beam, specified in radians. Defaults to 0.5 degree. Returns: - - xi_det (float): ξ coordinate of the detected peak. - - eta_det (float): η coordinate of the detected peak. + - xi_det (float): xi coordinate of the detected peak. + - eta_det (float): eta coordinate of the detected peak. - R2_det (float): Coefficient of determination (R^2) indicating the goodness of fit of the data around the peak. If no valid peak is detected or if fitting fails, returns NaN. """ @@ -432,7 +435,7 @@ def get_xieta_from_maps(map_hdf_file, q1 = quat.rotation_xieta(xi, eta) q2 = quat.rotation_xieta(xi_bs_offset, eta_bs_offset) - q3 = quat.rotation_iso(0, 0, roll_bs_offset) + q3 = quat.rotation_iso(0, 0, -roll_bs_offset) # q = q3 * ~q2 * q1 xieta = quat.decompose_xieta(q) xi, eta = xieta[0], xieta[1] diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/make_mapbased_pointing.py index c898e015f..e566dbf21 100644 --- a/sotodlib/site_pipeline/make_mapbased_pointing.py +++ b/sotodlib/site_pipeline/make_mapbased_pointing.py @@ -33,8 +33,12 @@ def main(configs, obs_id, wafer_slot, if tod_based_result_dir is None: tod_based_result_dir = configs.get('tod_based_result_dir') - res_deg = configs.get('res_deg') - edge_avoidance_deg = configs.get('edge_avoidance_deg') + xieta_bs_offset = configs.get('xieta_bs_offset', [0., 0.]) + wafer_mask_deg = configs.get('wafer_mask_deg', 8.) + res_deg = configs.get('res_deg', 0.3) + edge_avoidance_deg = configs.get('edge_avoidance_deg', 0.3) + save_force_zero_roll = configs.get('save_force_zero_roll', True) + tune_by_tod = configs.get('tune_by_tod') R2_threshold = configs.get('R2_threshold') ds_factor = configs.get('ds_factor') @@ -67,12 +71,14 @@ def main(configs, obs_id, wafer_slot, logger.info(f'Making single detector maps') os.makedirs(single_det_maps_dir, exist_ok=True) map_hdf = os.path.join(single_det_maps_dir, f'{obs_id}_{wafer_slot}.hdf') - mbp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf, res=res_deg*coords.DEG) + mbp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf, + xieta_bs_offset=xieta_bs_offset, + wafer_mask_deg=wafer_mask_deg, res_deg=res_deg) # reconstruct pointing from single detector maps - logger.info(f'Making map-based pointing results') + logger.info(f'Saving map-based pointing results') result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' - focal_plane_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, + fp_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, output_dir=map_based_result_dir, filename=result_filename, force_zero_roll=False, @@ -80,18 +86,27 @@ def main(configs, obs_id, wafer_slot, if tune_by_tod: focal_plane = core.AxisManager(tod.dets) - focal_plane.wrap('xi', focal_plane_rset_map_based['xi'], [(0, 'dets')]) - focal_plane.wrap('eta', focal_plane_rset_map_based['eta'], [(0, 'dets')]) - focal_plane.wrap('gamma', focal_plane_rset_map_based['gamma'], [(0, 'dets')]) - is_low_R2 = focal_plane_rset_map_based['R2'] < R2_threshold + focal_plane.wrap('xi', fp_rset_map_based['xi'], [(0, 'dets')]) + focal_plane.wrap('eta', fp_rset_map_based['eta'], [(0, 'dets')]) + focal_plane.wrap('gamma', fp_rset_map_based['gamma'], [(0, 'dets')]) + is_low_R2 = fp_rset_map_based['R2'] < R2_threshold focal_plane.xi[is_low_R2] = np.nan focal_plane.eta[is_low_R2] = np.nan tod.focal_plane = focal_plane tod.flags.move(sso_name, None) logger.info(f'Making tod-based pointing results') - focal_plane_rset_tod_based = up.update_xieta(tod, sso_name, ds_factor=ds_factor, save=True, - result_dir=tod_based_result_dir, filename=result_filename) + fp_rset_tod_based = up.update_xieta(tod, sso_name, ds_factor=ds_factor, save=True, + result_dir=tod_based_result_dir, filename=result_filename) + + if save_force_zero_roll: + logger.info(f'Saving map-based pointing results (force-zero-roll)') + output_dir = map_based_result_dir + '_force_zero_roll' + fp_rset_map_based_force_zero_roll = mbp.get_xieta_from_maps(map_hdf, save=True, + output_dir=output_dir, + filename=result_filename, + force_zero_roll=True, + edge_avoidance = edge_avoidance_deg*coords.DEG) return def get_parser(): diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index a1cfade81..65472819c 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -5,9 +5,9 @@ import matplotlib.pyplot as plt from tqdm import tqdm import scipy -from scipy.optimize import minimize +from scipy.optimize import curve_fit from sotodlib.core import metadata -from sotodlib.io.metadata import write_dataset +from sotodlib.io.metadata import read_dataset, write_dataset from sotodlib import core from sotodlib import coords @@ -24,48 +24,110 @@ from sotodlib.site_pipeline import util logger = util.init_logger(__name__, 'update_pointing: ') -def _gaussian2d(xi, eta, a, xi0, eta0, fwhm_xi, fwhm_eta, phi): +def _gaussian2d(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a): """Simulate a time stream with an Gaussian beam model Args ------ xi, eta: cordinates in the detector's system + xi0, eta0: float, float + center position of the Gaussian beam model + fwhm_xi, fwhm_eta, phi: float, float, float + fwhm along the xi, eta axis (rotated) + and the rotation angle (in radians) a: float amplitude of the Gaussian beam model + + Ouput: + ------ + sim_data: 1d array of float + Time stream at sampling points given by xieta + """ + xi, eta = xieta + xi_rot = xi * np.cos(phi) - eta * np.sin(phi) + eta_rot = xi * np.sin(phi) + eta * np.cos(phi) + factor = 2 * np.sqrt(2 * np.log(2)) + xi_coef = -0.5 * (xi_rot - xi0) ** 2 / (fwhm_xi / factor) ** 2 + eta_coef = -0.5 * (eta_rot - eta0) ** 2 / (fwhm_eta / factor) ** 2 + sim_data = a * np.exp(xi_coef + eta_coef) + return sim_data + +def _gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, b2,): + """Simulate a time stream with an Gaussian beam model with non-linear response + Args + ------ + xi, eta: cordinates in the detector's system xi0, eta0: float, float center position of the Gaussian beam model fwhm_xi, fwhm_eta, phi: float, float, float fwhm along the xi, eta axis (rotated) and the rotation angle (in radians) + a: float + amplitude of the Gaussian beam model + b2: float + coefficient of 2nd-order term Ouput: ------ sim_data: 1d array of float Time stream at sampling points given by xieta """ + xi, eta = xieta xi_rot = xi * np.cos(phi) - eta * np.sin(phi) eta_rot = xi * np.sin(phi) + eta * np.cos(phi) factor = 2 * np.sqrt(2 * np.log(2)) xi_coef = -0.5 * (xi_rot - xi0) ** 2 / (fwhm_xi / factor) ** 2 eta_coef = -0.5 * (eta_rot - eta0) ** 2 / (fwhm_eta / factor) ** 2 - sim_data = a * np.exp(xi_coef + eta_coef) + _y = np.exp(xi_coef + eta_coef) + sim_data = a * (_y + b2*_y**2) return sim_data -def filter_tod(tod, cutoff_high=0.01, cutoff_low=1.8): - if cutoff_low is not None: - tod.signal = fourier_filter(tod, filt_function=low_pass_sine2(cutoff=cutoff_low),) - if cutoff_high is not None: - tod.signal = fourier_filter(tod, filt_function=high_pass_sine2(cutoff=cutoff_high),) - return +# def filter_tod(tod, cutoff_high=0.01, cutoff_low=1.8): +# if cutoff_low is not None: +# tod.signal = fourier_filter(tod, filt_function=low_pass_sine2(cutoff=cutoff_low),) +# if cutoff_high is not None: +# tod.signal = fourier_filter(tod, filt_function=high_pass_sine2(cutoff=cutoff_high),) +# return -def tod_process(tod): - tod_ops.detrend_tod(tod) - tod_ops.apodize_cosine(tod, apodize_samps=2000) - filter_tod(tod) - tod.restrict('samps', (tod.samps.offset+2000, tod.samps.offset+tod.samps.count-2000)) - return +# def tod_process(tod): +# tod_ops.detrend_tod(tod) +# tod_ops.apodize_cosine(tod, apodize_samps=2000) +# filter_tod(tod) +# tod.restrict('samps', (tod.samps.offset+2000, tod.samps.offset+tod.samps.count-2000)) +# return + +def wrap_fp_from_hdf(tod, fp_hdf_file, data_set='focal_plane'): + fp_rset = read_dataset(fp_hdf_file, data_set) + tod.restrict('dets', tod.dets.vals[np.in1d(tod.dets.vals, fp_rset['dets:readout_id'])]) + focal_plane = core.AxisManager(tod.dets) + focal_plane.wrap_new('xi', shape=('dets', )) + focal_plane.wrap_new('eta', shape=('dets', )) + focal_plane.wrap_new('gamma', shape=('dets', )) -def update_xieta(tod, sso_name='moon', ds_factor=10, fwhm = 1.*coords.DEG, - save=False, result_dir=None, filename=None): + for di, det in enumerate(tod.dets.vals): + di_rset = np.where(fp_rset['dets:readout_id'] == det)[0][0] + focal_plane.xi[di] = fp_rset['xi'][di_rset] + focal_plane.eta[di] = fp_rset['eta'][di_rset] + focal_plane.gamma[di] = fp_rset['gamma'][di_rset] + + if 'focal_plane' in tod._fields.keys(): + tod.move('focal_plane', None) + tod.wrap('focal_plane', focal_plane) + return + + +def update_xieta(tod, + sso_name='moon', + fp_hdf_file=None, + input_force_zero_roll=False, + pipe=None, + ds_factor=10, + mask_deg=3, + fit_func_name = '_gaussian2d_nonlin', + fwhm_init_deg = 0.5, + error_estimation_method='force_one_redchi2', # rms_from_data + flag_name_rms_calc = 'source', + flag_rms_calc_exclusive = True, + save=False, result_dir=None, filename=None): """ Update xieta parameters for each detector by tod fitting of a point source observation @@ -81,70 +143,122 @@ def update_xieta(tod, sso_name='moon', ds_factor=10, fwhm = 1.*coords.DEG, Returns: - focal_plane (ResultSet): ResultSet containing updated xieta parameters for each detector. """ - mask_ds = slice(None, None, ds_factor) + # if focal_plane result is specified, use the information as a prior + if fp_hdf_file is not None: + wrap_fp_from_hdf(tod, fp_hdf_file) + + # set dets without focal_plane info to have (xi, eta, gamma) = (0, 0, 0), just to avoid error + xieta_isnan = (np.isnan(tod.focal_plane.xi)) | (np.isnan(tod.focal_plane.eta)) + gamma_isnan = np.isnan(tod.focal_plane.gamma) + tod.focal_plane.xi[xieta_isnan] = 0. + tod.focal_plane.eta[xieta_isnan] = 0. + tod.focal_plane.gamma[gamma_isnan] = 0. - fp_isnan = np.isnan(tod.focal_plane.xi) - if np.any(fp_isnan): - tod.focal_plane.xi[fp_isnan] = 0. - tod.focal_plane.eta[fp_isnan] = 0. - tod.focal_plane.gamma[fp_isnan] = 0. + # If input focal_plane is a result with `force_zero_roll`, set the roll to be zero + # Original value is stored to `roll_original` + if input_force_zero_roll: + if 'roll_original' in tod.boresight._fields.keys(): + pass + else: + tod.boresight.wrap('roll_original', tod.boresight.roll, [(0, 'samps')]) + tod.boresight.roll *= 0. - coords.planets.compute_source_flags(tod, center_on=sso_name, max_pix=100000000, - wrap=sso_name, mask={'shape':'circle', 'xyr':[0,0,3]}) + # compute source flags + if 'source' in tod.flags._fields.keys(): + tod.flags.move('source', None) + coords.planets.compute_source_flags(tod, + center_on=sso_name, + max_pix=1e10, + wrap='source', + mask={'shape':'circle', 'xyr':[0.,0.,mask_deg]}) - summed_flag = np.sum(tod.flags[sso_name].mask()[~fp_isnan], axis=0).astype('bool') + # restrict data to duration when at least one detector hit the source + summed_flag = np.sum(tod.flags['source'].mask()[~xieta_isnan], axis=0).astype('bool') idx_hit = np.where(summed_flag)[0] idx_first, idx_last = idx_hit[0], idx_hit[-1] tod.restrict('samps', (tod.samps.offset+idx_first, tod.samps.offset+idx_last)) - csl = so3g.proj.CelestialSightLine.az_el(tod.timestamps[mask_ds], tod.boresight.az[mask_ds], - tod.boresight.el[mask_ds], weather="typical") - q_bore = csl.Q + # run preprocess pipeline if provided + if pipe is not None: + proc_aman, success = pipe.run(tod) + + # get rms of flagged region for later error estimation + if flag_rms_calc_exclusive: + mask_for_rms_calc = tod.flags[flag_name_rms_calc].mask() + else: + mask_for_rms_calc = ~tod.flags[flag_name_rms_calc].mask() + rms = np.ma.std(np.ma.masked_array(tod.signal, mask_for_rms_calc), axis=1).data + tod.wrap('rms', rms, [(0, 'dets')]) + + # use downsampled data for faster fitting + mask_ds = slice(None, None, ds_factor) ts_ds = tod.timestamps[mask_ds] + q_bore = so3g.proj.CelestialSightLine.az_el(ts_ds, tod.boresight.az[mask_ds], + tod.boresight.el[mask_ds], weather="typical").Q + q_bore_roll = quat.rotation_iso(0, 0, np.median(tod.boresight.roll)) sig_ds = tod.signal[:, mask_ds] - source_flags_ds = tod.flags[sso_name].mask()[:, mask_ds] + source_flags_ds = tod.flags['source'].mask()[:, mask_ds] + + # fit each detector data xieta_dict = {} for di, det in enumerate(tqdm(tod.dets.vals)): - if fp_isnan[di]: - xieta_dict[det] = {'xi':np.nan, 'eta':np.nan, 'R2':np.nan} + mask_di = source_flags_ds[di] + if np.any([xieta_isnan[di], np.all(mask_di==False), tod.rms[di]==0.]): + xieta_dict[det] = {'xi': np.nan, 'eta': np.nan, 'xi_err': np.nan, 'eta_err': np.nan, + 'R2': np.nan, 'redchi2': np.nan} else: - mask_di = source_flags_ds[di] ts = ts_ds[mask_di] - + d1_unix = np.median(ts) xieta_det = np.array([tod.focal_plane.xi[di], tod.focal_plane.eta[di]]) + q_det = so3g.proj.quat.rotation_xieta(xieta_det[0], xieta_det[1]) - d1_unix = np.median(ts) planet = planets.SlowSource.for_named_source(sso_name, d1_unix * 1.) ra0, dec0 = planet.pos(d1_unix) q_obj = so3g.proj.quat.rotation_lonlat(ra0, dec0) - q_total = ~q_det * ~q_bore * q_obj + q_total = ~q_det * ~q_bore_roll * ~q_bore * q_obj xi_src, eta_src, _ = quat.decompose_xieta(q_total) xieta_src = np.array([xi_src, eta_src]) xieta_src = xieta_src[:, mask_di] - sig = sig_ds[di][mask_di] - amp = np.ptp(sig) - def fit_func(xi0, eta0): - model_tod = _gaussian2d(xieta_src[0], xieta_src[1], amp, xi0, eta0, fwhm, fwhm, 0) - residual = sig - model_tod - return np.sum(residual ** 2) - res = minimize(lambda x: fit_func(*x), [0, 0]) - R2 = 1 - res.fun/np.sum((sig - np.mean(sig))**2) + ptp_val = np.ptp(np.percentile(sig, [0.1, 99.9])) + + if fit_func_name == '_gaussian2d': + p0 = (0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val) + fit_func = _gaussian2d + elif fit_func_name == '_gaussian2d_nonlin': + p0 = (0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val, -0.1,) + fit_func = _gaussian2d_nonlin + + popt, pcov = curve_fit(fit_func, xdata=xieta_src, ydata=sig, p0=p0, sigma=tod.rms[di]*np.ones_like(sig), + absolute_sigma=True, maxfev=int(1e5)) - if np.rad2deg(np.sqrt(np.sum(res.x**2))) > 1.0: - xieta_dict[det] = {'xi':np.nan, 'eta':np.nan, 'R2':np.nan} + chi2 = np.sum(((fit_func(xieta_src, *popt) - sig)/tod.rms[di])**2) + redchi2 = chi2 / (np.prod(xieta_src.shape) - popt.shape[0]) + R2 = 1. - np.sum((fit_func(xieta_src, *popt) - sig)**2) / np.sum((sig - sig.mean())**2) + xi_opt, eta_opt = popt[0], popt[1] + + if error_estimation_method == 'rms_from_data': + xi_err, eta_err = np.sqrt(pcov[0,0]), np.sqrt(pcov[1,1]) + elif error_estimation_method == 'force_one_redchi2': + # The error of (xi, eta) is equivalent the case if the error bar of each data point is set + # as the reduced chi-square is equal to unity. + xi_err, eta_err = np.sqrt(pcov[0,0] * redchi2), np.sqrt(pcov[1,1] * redchi2) + redchi2 = 1. else: - xieta_det += res.x - xieta_dict[det] = {'xi':xieta_det[0], 'eta':xieta_det[1], 'R2':R2} - - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'band', 'channel', 'R2', 'xi', 'eta', 'gamma']) + raise NameError("Unsupported name for 'error_estimation_method'") + + xieta_det += np.array([xi_opt, eta_opt]) + xieta_dict[det] = {'xi': xieta_det[0], 'eta': xieta_det[1], 'xi_err': xi_err, 'eta_err': eta_err, + 'R2': R2, 'redchi2': redchi2} + + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2']) for det in tod.dets.vals: - band = int(det.split('_')[-2]) - channel = int(det.split('_')[-1]) - focal_plane.rows.append((det, band, channel, xieta_dict[det]['R2'], - xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0.)) + focal_plane.rows.append((det, xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0., + xieta_dict[det]['xi_err'], xieta_dict[det]['eta_err'], + xieta_dict[det]['R2'], xieta_dict[det]['redchi2'], + )) if save: assert result_dir is not None assert filename is not None From 5101736a1950e3df2a0ccdfaa731271519f9e889 Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 7 May 2024 02:27:21 +0000 Subject: [PATCH 08/48] added argument save_normal_roll --- .../site_pipeline/make_mapbased_pointing.py | 121 ++++--- sotodlib/site_pipeline/update_pointing.py | 296 +++++++++++------- 2 files changed, 256 insertions(+), 161 deletions(-) diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/make_mapbased_pointing.py index e566dbf21..23f15416d 100644 --- a/sotodlib/site_pipeline/make_mapbased_pointing.py +++ b/sotodlib/site_pipeline/make_mapbased_pointing.py @@ -16,7 +16,7 @@ logger = util.init_logger(__name__, 'make_map_based_pointing: ') def main(configs, obs_id, wafer_slot, - sso_name=None, optics_config_fn=None, + sso_name=None, single_det_maps_dir=None, map_based_result_dir=None, tod_based_result_dir=None, tune_by_tod=None, restrict_dets_for_debug=False): @@ -24,35 +24,42 @@ def main(configs, obs_id, wafer_slot, configs = yaml.safe_load(open(configs, "r")) # Derive parameters from config file - if optics_config_fn is None: - optics_config_fn = configs.get('optics_config_fn') + ctx = core.Context(configs.get('context_file')) if single_det_maps_dir is None: single_det_maps_dir = configs.get('single_det_maps_dir') if map_based_result_dir is None: map_based_result_dir = configs.get('map_based_result_dir') if tod_based_result_dir is None: tod_based_result_dir = configs.get('tod_based_result_dir') - + optics_config_fn = configs.get('optics_config_fn') xieta_bs_offset = configs.get('xieta_bs_offset', [0., 0.]) wafer_mask_deg = configs.get('wafer_mask_deg', 8.) res_deg = configs.get('res_deg', 0.3) edge_avoidance_deg = configs.get('edge_avoidance_deg', 0.3) + save_normal_roll = configs.get('save_normal_roll', True) save_force_zero_roll = configs.get('save_force_zero_roll', True) + # parameters for tod tuning tune_by_tod = configs.get('tune_by_tod') - R2_threshold = configs.get('R2_threshold') - ds_factor = configs.get('ds_factor') + if tune_by_tod: + tod_ds_factor = configs.get('tod_ds_factor') + tod_mask_deg = configs.get('tod_mask_deg') + tod_fit_func_name = configs.get('tod_fit_func_name') + tod_max_non_linear_order = configs.get('tod_max_non_linear_order') + tod_fwhm_init_deg = configs.get('tod_fwhm_init_deg') + tod_error_estimation_method = configs.get('tod_error_estimation_method') + tod_flag_name_rms_calc = configs.get('tod_flag_name_rms_calc') + tod_flag_rms_calc_exclusive = configs.get('tod_flag_rms_calc_exclusive') - ctx = core.Context(configs.get('context_file')) # If sso_name is not specified, get sso name from observation tags obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] if sso_name is None: - if 'moon' in obs_tags: - sso_name = 'moon' - elif 'jupiter' in obs_tags: - sso_name = 'jupiter' - else: + known_source_names = ['moon', 'jupiter'] + for _source_name in known_source_names: + if _source_name in obs_tags: + sso_name = _source_name + if _source_name is None: raise ValueError('sso_name is not specified') # Load data @@ -76,49 +83,83 @@ def main(configs, obs_id, wafer_slot, wafer_mask_deg=wafer_mask_deg, res_deg=res_deg) # reconstruct pointing from single detector maps - logger.info(f'Saving map-based pointing results') - result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' - fp_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, - output_dir=map_based_result_dir, - filename=result_filename, - force_zero_roll=False, - edge_avoidance = edge_avoidance_deg*coords.DEG) - - if tune_by_tod: - focal_plane = core.AxisManager(tod.dets) - focal_plane.wrap('xi', fp_rset_map_based['xi'], [(0, 'dets')]) - focal_plane.wrap('eta', fp_rset_map_based['eta'], [(0, 'dets')]) - focal_plane.wrap('gamma', fp_rset_map_based['gamma'], [(0, 'dets')]) - is_low_R2 = fp_rset_map_based['R2'] < R2_threshold - focal_plane.xi[is_low_R2] = np.nan - focal_plane.eta[is_low_R2] = np.nan - - tod.focal_plane = focal_plane - tod.flags.move(sso_name, None) - logger.info(f'Making tod-based pointing results') - fp_rset_tod_based = up.update_xieta(tod, sso_name, ds_factor=ds_factor, save=True, - result_dir=tod_based_result_dir, filename=result_filename) + if save_normal_roll: + logger.info(f'Saving map-based pointing results') + result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' + fp_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, + output_dir=map_based_result_dir, + filename=result_filename, + force_zero_roll=False, + edge_avoidance = edge_avoidance_deg*coords.DEG) + + if tune_by_tod: + logger.info(f'Making tod-based pointing results') + up.wrap_fp_rset(tod, fp_rset_map_based) + fp_rset_tod_based = up.update_xieta( tod, + sso_name=sso_name, + fp_hdf_file=None, + force_zero_roll=False, + pipe=None, + ds_factor=tod_ds_factor, + mask_deg=tod_mask_deg, + fit_func_name = tod_fit_func_name, + max_non_linear_order = tod_max_non_linear_order, + fwhm_init_deg = tod_fwhm_init_deg, + error_estimation_method=tod_error_estimation_method, + flag_name_rms_calc = tod_flag_name_rms_calc, + flag_rms_calc_exclusive = tod_flag_rms_calc_exclusive, + ) + os.makedirs(tod_based_result_dir, exist_ok=True) + write_dataset(fp_rset_tod_based, + filename=os.path.join(tod_based_result_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), + address='focal_plane', + overwrite=True) if save_force_zero_roll: logger.info(f'Saving map-based pointing results (force-zero-roll)') - output_dir = map_based_result_dir + '_force_zero_roll' + map_based_result_dir_force_zero_roll = map_based_result_dir + '_force_zero_roll' fp_rset_map_based_force_zero_roll = mbp.get_xieta_from_maps(map_hdf, save=True, - output_dir=output_dir, + output_dir=map_based_result_dir_force_zero_roll, filename=result_filename, force_zero_roll=True, - edge_avoidance = edge_avoidance_deg*coords.DEG) + edge_avoidance = edge_avoidance_deg*coords.DEG) + if tune_by_tod: + logger.info(f'Making tod-based pointing results (force-zero-roll)') + up.wrap_fp_rset(tod, fp_rset_map_based_force_zero_roll) + tod_based_result_dir_force_zero_roll = tod_based_result_dir + '_force_zero_roll' + fp_rset_tod_based_force_zero_roll = up.update_xieta( tod, + sso_name=sso_name, + fp_hdf_file=None, + force_zero_roll=False, + pipe=None, + ds_factor=tod_ds_factor, + mask_deg=tod_mask_deg, + fit_func_name = tod_fit_func_name, + max_non_linear_order = tod_max_non_linear_order, + fwhm_init_deg = tod_fwhm_init_deg, + error_estimation_method=tod_error_estimation_method, + flag_name_rms_calc = tod_flag_name_rms_calc, + flag_rms_calc_exclusive = tod_flag_rms_calc_exclusive, + ) + os.makedirs(tod_based_result_dir_force_zero_roll, exist_ok=True) + write_dataset(fp_rset_tod_based_force_zero_roll, + filename=os.path.join(tod_based_result_dir_force_zero_roll, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), + address='focal_plane', + overwrite=True) + + return def get_parser(): parser = argparse.ArgumentParser(description="Process TOD data and update pointing") parser.add_argument("configs", type=str, help="Path to the configuration file") - parser.add_argument("obs_id", type=int, help="Observation ID") - parser.add_argument("wafer_slot", type=int, help="Wafer slot number") + parser.add_argument("obs_id", type=str, help="Observation id") + parser.add_argument("wafer_slot", type=str, help="Wafer slot") parser.add_argument("--sso_name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter')") - parser.add_argument("--optics_config_fn", type=str, default=None, help="Path to optics configuration file") parser.add_argument("--single_det_maps_dir", type=str, default=None, help="Directory to save single detector maps") parser.add_argument("--map_based_result_dir", type=str, default=None, help="Directory to save map-based pointing results") parser.add_argument("--tod_based_result_dir", type=str, default=None, help="Directory to save TOD-based pointing results") + parser.add_argument("--restrict_dets_for_debug", type=int, default=False) return parser if __name__ == '__main__': diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index 65472819c..1d107c0e7 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -24,8 +24,8 @@ from sotodlib.site_pipeline import util logger = util.init_logger(__name__, 'update_pointing: ') -def _gaussian2d(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a): - """Simulate a time stream with an Gaussian beam model +def gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, nonlin_coeffs): + """ An Gaussian beam model with non-linear response Args ------ xi, eta: cordinates in the detector's system @@ -36,11 +36,12 @@ def _gaussian2d(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a): and the rotation angle (in radians) a: float amplitude of the Gaussian beam model - + nonlin_coeffs: float + Coefficient of non-linear term normalized by linear term (from 2nd term). + The order is ascending. Ouput: ------ - sim_data: 1d array of float - Time stream at sampling points given by xieta + Model at xieta """ xi, eta = xieta xi_rot = xi * np.cos(phi) - eta * np.sin(phi) @@ -48,55 +49,18 @@ def _gaussian2d(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a): factor = 2 * np.sqrt(2 * np.log(2)) xi_coef = -0.5 * (xi_rot - xi0) ** 2 / (fwhm_xi / factor) ** 2 eta_coef = -0.5 * (eta_rot - eta0) ** 2 / (fwhm_eta / factor) ** 2 - sim_data = a * np.exp(xi_coef + eta_coef) - return sim_data - -def _gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, b2,): - """Simulate a time stream with an Gaussian beam model with non-linear response - Args - ------ - xi, eta: cordinates in the detector's system - xi0, eta0: float, float - center position of the Gaussian beam model - fwhm_xi, fwhm_eta, phi: float, float, float - fwhm along the xi, eta axis (rotated) - and the rotation angle (in radians) - a: float - amplitude of the Gaussian beam model - b2: float - coefficient of 2nd-order term + lin_gauss = np.exp(xi_coef + eta_coef) + polycoeffs_discending = np.hstack([nonlin_coeffs[::-1], [1, 0]]) + return a * np.poly1d(polycoeffs_discending)(lin_gauss) - Ouput: - ------ - sim_data: 1d array of float - Time stream at sampling points given by xieta +def wrapper_gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, *args): """ - xi, eta = xieta - xi_rot = xi * np.cos(phi) - eta * np.sin(phi) - eta_rot = xi * np.sin(phi) + eta * np.cos(phi) - factor = 2 * np.sqrt(2 * np.log(2)) - xi_coef = -0.5 * (xi_rot - xi0) ** 2 / (fwhm_xi / factor) ** 2 - eta_coef = -0.5 * (eta_rot - eta0) ** 2 / (fwhm_eta / factor) ** 2 - _y = np.exp(xi_coef + eta_coef) - sim_data = a * (_y + b2*_y**2) - return sim_data - -# def filter_tod(tod, cutoff_high=0.01, cutoff_low=1.8): -# if cutoff_low is not None: -# tod.signal = fourier_filter(tod, filt_function=low_pass_sine2(cutoff=cutoff_low),) -# if cutoff_high is not None: -# tod.signal = fourier_filter(tod, filt_function=high_pass_sine2(cutoff=cutoff_high),) -# return - -# def tod_process(tod): -# tod_ops.detrend_tod(tod) -# tod_ops.apodize_cosine(tod, apodize_samps=2000) -# filter_tod(tod) -# tod.restrict('samps', (tod.samps.offset+2000, tod.samps.offset+tod.samps.count-2000)) -# return + A wrapper for `gaussian2d_nonlin` + """ + nonlin_coeffs = np.array(args) + return gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, nonlin_coeffs) -def wrap_fp_from_hdf(tod, fp_hdf_file, data_set='focal_plane'): - fp_rset = read_dataset(fp_hdf_file, data_set) +def wrap_fp_rset(tod, fp_rset): tod.restrict('dets', tod.dets.vals[np.in1d(tod.dets.vals, fp_rset['dets:readout_id'])]) focal_plane = core.AxisManager(tod.dets) focal_plane.wrap_new('xi', shape=('dets', )) @@ -114,31 +78,68 @@ def wrap_fp_from_hdf(tod, fp_hdf_file, data_set='focal_plane'): tod.wrap('focal_plane', focal_plane) return +def wrap_fp_from_hdf(tod, fp_hdf_file, data_set='focal_plane'): + fp_rset = read_dataset(fp_hdf_file, data_set) + wrap_fp_rset(tod, fp_rset) + return + def update_xieta(tod, sso_name='moon', fp_hdf_file=None, - input_force_zero_roll=False, + save_dir=None, pipe=None, + force_zero_roll=False, ds_factor=10, mask_deg=3, - fit_func_name = '_gaussian2d_nonlin', + fit_func_name = 'gaussian2d_nonlin', + max_non_linear_order = 1, fwhm_init_deg = 0.5, error_estimation_method='force_one_redchi2', # rms_from_data flag_name_rms_calc = 'source', flag_rms_calc_exclusive = True, - save=False, result_dir=None, filename=None): + save=True, ): """ - Update xieta parameters for each detector by tod fitting of a point source observation + Update xieta parameters for each detector by TOD fitting of a point source observation. Parameters: - - tod : an Axismanager object - - sso_name (str): Name of the Solar System Object (SSO). - - ds_factor (int): Downsampling factor for processing TOD. - - fwhm (float): Full width at half maximum of the Gaussian model. - - save (bool): Flag indicating whether to save the updated focal plane data. - - result_dir (str): Directory where the updated data will be saved. - - filename (str): Name of the file to save the updated data. + - tod : + an Axismanager object + - sso_name (str): + Name of the Solar System Object (SSO). + - fp_hdf_file (str or None): + Path to the HDF file containing focal plane information. Default is None. + If None, tod.focal_plane is used for focal plane information. + - save_dir (str or None): + Directory where the updated data will be saved. Required if save is True. + - force_zero_roll (bool): + Flag indicating whether to force the roll to be zero. Default is False. + If True, input and output focal plane information assumes force_zero_roll condition. + - pipe (Pipeline or None): + Preprocessing pipeline to be applied to the TOD. Default is None, which + do not apply any processing. + - ds_factor (int): + Downsampling factor for fitting. Default is 10. + - mask_deg (float): + Mask radius in degrees for source flagging. Default is 3. + - fit_func_name (str): + Name of the fitting function. Default is 'gaussian2d_nonlin'. 'gaussian2d_nonlin' is only supported. + - max_non_linear_order (int): + Maximum non-linear order for fitting function. Default is 1. If you want to use simple gaussian set it to be 1. + Higher order is for the case that detector response is distorted by non-point-like source or too-strogng source, such as the Moon. + - fwhm_init_deg (float): + Initial guess for full width at half maximum in degrees. Default is 0.5. + - error_estimation_method (str): + Method for error estimation. Default is 'rms_from_data'. 'rms_from_data' and 'force_one_redchi2' are supported. + If 'rms_from_data', errorbar of each data point is set by root-mean-square of the data points flaged by 'flag_name_rms_calc', + and errorbar of xi,eta is set from the fit covariance matrix. If 'force_one_redchi2', the errorbar of (xi,eta) is equivalent the case + if the error bar of each data point is set as the reduced chi-square is equal to unity. + - flag_name_rms_calc (str): + Name of the flag used for RMS calculation. Default is 'source'. + - flag_rms_calc_exclusive (bool): + Flag indicating whether the RMS calculation is exclusive to the flag. Default is True. + - save (bool): + Flag indicating whether to save the updated focal plane data. Default is True. Returns: - focal_plane (ResultSet): ResultSet containing updated xieta parameters for each detector. @@ -156,7 +157,7 @@ def update_xieta(tod, # If input focal_plane is a result with `force_zero_roll`, set the roll to be zero # Original value is stored to `roll_original` - if input_force_zero_roll: + if force_zero_roll: if 'roll_original' in tod.boresight._fields.keys(): pass else: @@ -188,6 +189,8 @@ def update_xieta(tod, else: mask_for_rms_calc = ~tod.flags[flag_name_rms_calc].mask() rms = np.ma.std(np.ma.masked_array(tod.signal, mask_for_rms_calc), axis=1).data + if 'rms' in tod._fields.keys(): + tod.move('rms', None) tod.wrap('rms', rms, [(0, 'dets')]) # use downsampled data for faster fitting @@ -220,38 +223,50 @@ def update_xieta(tod, xi_src, eta_src, _ = quat.decompose_xieta(q_total) xieta_src = np.array([xi_src, eta_src]) xieta_src = xieta_src[:, mask_di] - sig = sig_ds[di][mask_di] - + sig = sig_ds[di][mask_di] ptp_val = np.ptp(np.percentile(sig, [0.1, 99.9])) - if fit_func_name == '_gaussian2d': - p0 = (0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val) - fit_func = _gaussian2d - elif fit_func_name == '_gaussian2d_nonlin': - p0 = (0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val, -0.1,) - fit_func = _gaussian2d_nonlin - - popt, pcov = curve_fit(fit_func, xdata=xieta_src, ydata=sig, p0=p0, sigma=tod.rms[di]*np.ones_like(sig), - absolute_sigma=True, maxfev=int(1e5)) - - chi2 = np.sum(((fit_func(xieta_src, *popt) - sig)/tod.rms[di])**2) - redchi2 = chi2 / (np.prod(xieta_src.shape) - popt.shape[0]) - R2 = 1. - np.sum((fit_func(xieta_src, *popt) - sig)**2) / np.sum((sig - sig.mean())**2) - xi_opt, eta_opt = popt[0], popt[1] - - if error_estimation_method == 'rms_from_data': - xi_err, eta_err = np.sqrt(pcov[0,0]), np.sqrt(pcov[1,1]) - elif error_estimation_method == 'force_one_redchi2': - # The error of (xi, eta) is equivalent the case if the error bar of each data point is set - # as the reduced chi-square is equal to unity. - xi_err, eta_err = np.sqrt(pcov[0,0] * redchi2), np.sqrt(pcov[1,1] * redchi2) - redchi2 = 1. + if fit_func_name == 'gaussian2d_nonlin': + p0 = np.array([0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val]) + bounds = np.array( + [[-np.inf, -np.inf, 0.1*fwhm_init_deg*coords.DEG, 0.1*fwhm_init_deg*coords.DEG, -np.pi, 0.1*ptp_val], + [np.inf, np.inf, 10*fwhm_init_deg*coords.DEG, 10*fwhm_init_deg*coords.DEG, np.pi, 10*ptp_val]] + ) + if max_non_linear_order >= 2: + p0 = np.append(p0, np.zeros(max_non_linear_order-1)) + bounds = np.hstack([bounds, + np.vstack([[-np.inf * np.ones(max_non_linear_order-1), + np.inf * np.ones(max_non_linear_order-1)]]) + ]) + fit_func = wrapper_gaussian2d_nonlin else: - raise NameError("Unsupported name for 'error_estimation_method'") + raise NameError("Unsupported name for 'fit_func_name'") - xieta_det += np.array([xi_opt, eta_opt]) - xieta_dict[det] = {'xi': xieta_det[0], 'eta': xieta_det[1], 'xi_err': xi_err, 'eta_err': eta_err, - 'R2': R2, 'redchi2': redchi2} + try: + popt, pcov = curve_fit(fit_func, xdata=xieta_src, ydata=sig, sigma=tod.rms[di]*np.ones_like(sig), + p0=p0, bounds=bounds, absolute_sigma=True) + + chi2 = np.sum(((fit_func(xieta_src, *popt) - sig)/tod.rms[di])**2) + redchi2 = chi2 / (np.prod(xieta_src.shape) - popt.shape[0]) + R2 = 1. - np.sum((fit_func(xieta_src, *popt) - sig)**2) / np.sum((sig - sig.mean())**2) + xi_opt, eta_opt = popt[0], popt[1] + + if error_estimation_method == 'rms_from_data': + xi_err, eta_err = np.sqrt(pcov[0,0]), np.sqrt(pcov[1,1]) + elif error_estimation_method == 'force_one_redchi2': + # The error of (xi, eta) is equivalent the case if the error bar of each data point is set + # as the reduced chi-square is equal to unity. + xi_err, eta_err = np.sqrt(pcov[0,0] * redchi2), np.sqrt(pcov[1,1] * redchi2) + redchi2 = 1. + else: + raise NameError("Unsupported name for 'error_estimation_method'") + + xieta_det += np.array([xi_opt, eta_opt]) + xieta_dict[det] = {'xi': xieta_det[0], 'eta': xieta_det[1], 'xi_err': xi_err, 'eta_err': eta_err, + 'R2': R2, 'redchi2': redchi2} + except RuntimeError: + xieta_dict[det] = {'xi': np.nan, 'eta': np.nan, 'xi_err': np.nan, 'eta_err': np.nan, + 'R2': np.nan, 'redchi2': np.nan} focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2']) for det in tod.dets.vals: @@ -259,37 +274,78 @@ def update_xieta(tod, xieta_dict[det]['xi_err'], xieta_dict[det]['eta_err'], xieta_dict[det]['R2'], xieta_dict[det]['redchi2'], )) - if save: - assert result_dir is not None - assert filename is not None - if not os.path.exists(result_dir): - os.makedirs(result_dir) - write_dataset(focal_plane, - filename=os.path.join(result_dir, filename), - address='focal_plane', - overwrite=True) + return focal_plane -def main(ctx_file, obs_id, wafer_slot, sso_name, result_dir, - ds_factor=10, fwhm = 1.*coords.DEG, restrict_dets=False): - ctx = core.Context(ctx_file) - meta = ctx.get_meta(obs_id) - meta.restrict('dets', meta.dets.vals[meta.det_info.wafer_slot == wafer_slot]) - if restrict_dets: - meta.restrict('dets', meta.dets.vals[:100]) - +def main(configs, obs_id, wafer_slot, sso_name=None, + fp_hdf_file=None, save_dir=None, restrict_dets_for_debug=False): + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + + # Derive parameters from config file + ctx = core.Context(configs.get('context_file')) + if fp_hdf_file is None: + fp_hdf_file = configs.get('fp_hdf_file', None) + if save_dir is None: + save_dir = configs.get('save_dir', None) + + # get sso_name if it is not specified + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + if sso_name is None: + known_source_names = ['moon', 'jupiter'] + for _source_name in known_source_names: + if _source_name in obs_tags: + sso_name = _source_name + if _source_name is None: + raise ValueError('sso_name is not specified') + + # construct pipeline from configs + pipe = Pipeline(configs["process_pipe"]) + for pipe_component in pipe: + if pipe_component.name == 'compute_source_flags': + pipe_component.process_cfgs['center_on'] = sso_name + + # Other parameters + force_zero_roll = configs.get('force_zero_roll') + ds_factor = configs.get('ds_factor') + mask_deg = configs.get('mask_deg') + fit_func_name = configs.get('fit_func_name') + max_non_linear_order = configs.get('max_non_linear_order') + fwhm_init_deg = configs.get('fwhm_init_deg') + error_estimation_method = configs.get('error_estimation_method') + flag_name_rms_calc = configls.get('flag_name_rms_calc') + flag_rms_calc_exclusive = configls.get('flag_rms_calc_exclusive') + + + # Load data logger.info('loading data') + meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) + if restrict_dets_for_debug is not False: + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) tod = ctx.get_obs(meta) - logger.info('tod processing') - tod_process(tod) - if not os.path.exists(result_dir): - logger.info(f'Make a directory: f{result_dir}') - os.makedirs(result_dir) + # get pointing + focal_plane_rset = update_xieta( tod, + sso_name=sso_name, + fp_hdf_file=fp_hdf_file, + force_zero_roll=force_zero_roll, + pipe=pipe, + ds_factor=ds_factor, + mask_deg=mask_deg, + fit_func_name = fit_func_name, + max_non_linear_order = max_non_linear_order, + fwhm_init_deg = fwhm_init_deg, + error_estimation_method=error_estimation_method, + flag_name_rms_calc = flag_name_rms_calc, + flag_rms_calc_exclusive = flag_rms_calc_exclusive, + ) - result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' - focal_plane_rset = update_xieta(tod=tod, sso_name=sso_name, ds_factor=ds_factor, fwhm=fwhm, - save=True, result_dir=result_dir, filename=result_filename) + os.makedirs(save_dir, exist_ok=True) + write_dataset(focal_plane_rset, + filename=os.path.join(save_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), + address='focal_plane', + overwrite=True) + return def get_parser(): @@ -297,11 +353,9 @@ def get_parser(): parser.add_argument("ctx_file", type=str, help="Path to the context file.") parser.add_argument("obs_id", type=str, help="Observation ID.") parser.add_argument("wafer_slot", type=int, help="Wafer slot number.") - parser.add_argument("sso_name", type=str, help="Name of the Solar System Object (SSO).") - parser.add_argument("result_dir", type=str, help="Directory to save the result.") - parser.add_argument("--ds_factor", type=int, default=10, help="Downsampling factor for TOD processing.") - parser.add_argument("--fwhm", type=float, default=1.0, help="Full width at half maximum of the Gaussian model.") - parser.add_argument("--restrict_dets", action="store_true", help="Flag to restrict the number of detectors.") + parser.add_argument("sso_name", type=str, default=None, help="Name of the Solar System Object (SSO).") + parser.add_argument("save_dir", type=str, help="Directory to save the result.") + parser.add_argument("restrict_dets_for_debug", action="store_true", help="Flag to restrict the number of detectors.") return parser if __name__ == '__main__': From d708edb7f980f51b2e5c0aab0400e8e14b37e031 Mon Sep 17 00:00:00 2001 From: Tomoki Terasaki Date: Tue, 7 May 2024 09:49:37 +0000 Subject: [PATCH 09/48] tod based fitting is automated --- sotodlib/coords/map_based_pointing.py | 35 ++++++- .../site_pipeline/make_mapbased_pointing.py | 60 ++++++++++-- sotodlib/site_pipeline/update_pointing.py | 97 ++++++++++++++----- 3 files changed, 159 insertions(+), 33 deletions(-) diff --git a/sotodlib/coords/map_based_pointing.py b/sotodlib/coords/map_based_pointing.py index 294a350a2..9f87ae406 100644 --- a/sotodlib/coords/map_based_pointing.py +++ b/sotodlib/coords/map_based_pointing.py @@ -50,7 +50,7 @@ def get_planet_trajectry(tod, planet, _split=20, return_model=False): q_planet = quat.rotation_lonlat(planet_az, planet_el) return q_planet -def get_wafer_centered_sight(tod, planet, q_planet=None, q_bs=None, q_wafer=None): +def get_wafer_centered_sight(tod=None, planet=None, q_planet=None, q_bs=None, q_wafer=None): """ Calculate the sightline vector from the focal plane, centered on the wafer, to a planet. @@ -131,6 +131,34 @@ def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), return xi_wafer, eta_wafer +def get_rough_hit_time(tod, wafer_slot, sso_name, circle_r_deg=7.,optics_config_fn=None): + """ + Estimate the rough hit time for a axismanager, wafer_slot, and sso_name. + + Parameters: + tod : An AxisManager object + wafer_slot (str): Identifier for the wafer slot. + sso_name (str): Name of the Solar System Object (e.g., 'moon', 'jupiter'). + circle_r_deg (float, optional): Radius in degrees defining the circular region around the wafer center. + Defaults to 7 degrees. + + Returns: + float: Estimated rough hit time within the circular region around the wafer center. + """ + q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) + q_planet = get_planet_trajectry(tod, sso_name) + xi_wafer, eta_wafer = get_wafer_xieta(wafer_slot, optics_config_fn=optics_config_fn, + roll_bs_offset=np.median(tod.boresight.roll), wrap_to_tod=False) + q_wafer = quat.rotation_xieta(xi_wafer, eta_wafer) + + q_wafer_centered = get_wafer_centered_sight(q_planet=q_planet, q_bs=q_bs, q_wafer=q_wafer) + x_to_z = ~quat.rotation_lonlat(0, 0) + xi_wafer_centered, eta_wafer_centered, _ = quat.decompose_xieta(x_to_z * q_wafer_centered) + r_wafer_centered = np.sqrt(xi_wafer_centered**2 + eta_wafer_centered**2) + hit_time = (tod.timestamps[-1] - tod.timestamps[0]) * np.mean(np.rad2deg(r_wafer_centered) < circle_r_deg) + return hit_time + + def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, xieta_bs_offset=(0., 0.), roll_bs_offset=None, signal='signal', wafer_mask_deg=8., res_deg=0.3, cuts=None,): @@ -151,10 +179,7 @@ def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, Returns: None - """ - - - + """ q_planet = get_planet_trajectry(tod, sso_name) q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/make_mapbased_pointing.py index 23f15416d..3adc0a401 100644 --- a/sotodlib/site_pipeline/make_mapbased_pointing.py +++ b/sotodlib/site_pipeline/make_mapbased_pointing.py @@ -14,8 +14,8 @@ from sotodlib.site_pipeline import util from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'make_map_based_pointing: ') - -def main(configs, obs_id, wafer_slot, + +def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, single_det_maps_dir=None, map_based_result_dir=None, tod_based_result_dir=None, tune_by_tod=None, restrict_dets_for_debug=False): @@ -82,10 +82,11 @@ def main(configs, obs_id, wafer_slot, xieta_bs_offset=xieta_bs_offset, wafer_mask_deg=wafer_mask_deg, res_deg=res_deg) + result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' # reconstruct pointing from single detector maps if save_normal_roll: logger.info(f'Saving map-based pointing results') - result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' + fp_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, output_dir=map_based_result_dir, filename=result_filename, @@ -146,19 +147,66 @@ def main(configs, obs_id, wafer_slot, filename=os.path.join(tod_based_result_dir_force_zero_roll, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), address='focal_plane', overwrite=True) - - return +def main(configs, obs_id, wafer_slots, + sso_name=None, + single_det_maps_dir=None, map_based_result_dir=None, tod_based_result_dir=None, + tune_by_tod=None, hit_time_threshold=1200, hit_circle_r_deg=7.0, + restrict_dets_for_debug=False): + + logger.info('get wafer_slots which hit the source because wafer_slots are not specified') + if wafer_slots is None: + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + + ctx = core.Context(configs.get('context_file')) + optics_config_fn = configs.get('optics_config_fn') + + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + if sso_name is None: + known_source_names = ['moon', 'jupiter'] + for _source_name in known_source_names: + if _source_name in obs_tags: + sso_name = _source_name + if _source_name is None: + raise ValueError('sso_name is not specified') + + wafer_slots = [] + tod = ctx.get_obs(obs_id, dets=[]) + for ws in [f'ws{i}' for i in range(7)]: + hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, + optics_config_fn=optics_config_fn) + logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') + if hit_time > hit_time_threshold: + wafer_slots.append(ws) + assert np.all(np.array(wafer_slots, dtype='U2') == 'ws') + + logger.info(f'wafer_slots which pointing calculated: {wafer_slots}') + for wafer_slot in wafer_slots: + main_one_wafer(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + single_det_maps_dir=single_det_maps_dir, + map_based_result_dir=map_based_result_dir, + tod_based_result_dir=tod_based_result_dir, + tune_by_tod=tune_by_tod, + restrict_dets_for_debug=restrict_dets_for_debug) + def get_parser(): parser = argparse.ArgumentParser(description="Process TOD data and update pointing") parser.add_argument("configs", type=str, help="Path to the configuration file") parser.add_argument("obs_id", type=str, help="Observation id") - parser.add_argument("wafer_slot", type=str, help="Wafer slot") + parser.add_argument("--wafer_slots", nargs='*', default=None, help="Wafer slots to be processed") parser.add_argument("--sso_name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter')") parser.add_argument("--single_det_maps_dir", type=str, default=None, help="Directory to save single detector maps") parser.add_argument("--map_based_result_dir", type=str, default=None, help="Directory to save map-based pointing results") parser.add_argument("--tod_based_result_dir", type=str, default=None, help="Directory to save TOD-based pointing results") + parser.add_argument("--hit_time_threshold", type=float, default=1200, + help="Minimum hit time. If calculated wafer hit time is smaller than that, pointing calculation for that wafer is skipped") + parser.add_argument("--hit_circle_r_deg", type=float, default=7., + help="circle radius for wafer hit time calculation") parser.add_argument("--restrict_dets_for_debug", type=int, default=False) return parser diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index 1d107c0e7..9c98c2f69 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -2,26 +2,21 @@ import numpy as np import yaml import h5py -import matplotlib.pyplot as plt +import argparse from tqdm import tqdm import scipy from scipy.optimize import curve_fit from sotodlib.core import metadata from sotodlib.io.metadata import read_dataset, write_dataset - +from sotodlib.coords import map_based_pointing as mbp from sotodlib import core from sotodlib import coords from sotodlib import tod_ops import so3g from so3g.proj import quat import sotodlib.coords.planets as planets - -from sotodlib.tod_ops import pca -from so3g.proj import Ranges, RangesMatrix -from pixell import enmap, enplot -from sotodlib.tod_ops.filters import high_pass_sine2, low_pass_sine2, fourier_filter - from sotodlib.site_pipeline import util +from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'update_pointing: ') def gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, nonlin_coeffs): @@ -109,9 +104,7 @@ def update_xieta(tod, Name of the Solar System Object (SSO). - fp_hdf_file (str or None): Path to the HDF file containing focal plane information. Default is None. - If None, tod.focal_plane is used for focal plane information. - - save_dir (str or None): - Directory where the updated data will be saved. Required if save is True. + If None, tod.focal_plane is used for focal plane information. - force_zero_roll (bool): Flag indicating whether to force the roll to be zero. Default is False. If True, input and output focal plane information assumes force_zero_roll condition. @@ -229,8 +222,8 @@ def update_xieta(tod, if fit_func_name == 'gaussian2d_nonlin': p0 = np.array([0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val]) bounds = np.array( - [[-np.inf, -np.inf, 0.1*fwhm_init_deg*coords.DEG, 0.1*fwhm_init_deg*coords.DEG, -np.pi, 0.1*ptp_val], - [np.inf, np.inf, 10*fwhm_init_deg*coords.DEG, 10*fwhm_init_deg*coords.DEG, np.pi, 10*ptp_val]] + [[-np.inf, -np.inf, fwhm_init_deg*coords.DEG/5., fwhm_init_deg*coords.DEG/5., -np.pi, 0.1*ptp_val], + [np.inf, np.inf, fwhm_init_deg*coords.DEG*5, fwhm_init_deg*coords.DEG*5, np.pi, 10*ptp_val]] ) if max_non_linear_order >= 2: p0 = np.append(p0, np.zeros(max_non_linear_order-1)) @@ -277,15 +270,28 @@ def update_xieta(tod, return focal_plane -def main(configs, obs_id, wafer_slot, sso_name=None, - fp_hdf_file=None, save_dir=None, restrict_dets_for_debug=False): +def main_one_wafer(configs, obs_id, wafer_slot, + sso_name=None, fp_hdf_file=None, fp_hdf_dir=None, + save_dir=None, restrict_dets_for_debug=False): if type(configs) == str: configs = yaml.safe_load(open(configs, "r")) # Derive parameters from config file ctx = core.Context(configs.get('context_file')) + + # get prior if fp_hdf_file is None: fp_hdf_file = configs.get('fp_hdf_file', None) + if fp_hdf_dir is None: + fp_hdf_dir = configs.get('fp_hdf_dir', None) + if fp_hdf_file is None: + if fp_hdf_dir is None: + pass + else: + fp_hdf_file = os.path.join(fp_hdf_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf') + if not os.path.exists(fp_hdf_file): + fp_hdf_file = None + if save_dir is None: save_dir = configs.get('save_dir', None) @@ -313,8 +319,8 @@ def main(configs, obs_id, wafer_slot, sso_name=None, max_non_linear_order = configs.get('max_non_linear_order') fwhm_init_deg = configs.get('fwhm_init_deg') error_estimation_method = configs.get('error_estimation_method') - flag_name_rms_calc = configls.get('flag_name_rms_calc') - flag_rms_calc_exclusive = configls.get('flag_rms_calc_exclusive') + flag_name_rms_calc = configs.get('flag_name_rms_calc') + flag_rms_calc_exclusive = configs.get('flag_rms_calc_exclusive') # Load data @@ -348,14 +354,61 @@ def main(configs, obs_id, wafer_slot, sso_name=None, return +def main(configs, obs_id, wafer_slots=None, + sso_name=None, fp_hdf_file=None, fp_hdf_dir=None, save_dir=None, + hit_time_threshold=1200, hit_circle_r_deg=7.0, + restrict_dets_for_debug=False): + logger.info('get wafer_slots which hit the source because wafer_slots are not specified') + if wafer_slots is None: + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + + ctx = core.Context(configs.get('context_file')) + optics_config_fn = configs.get('optics_config_fn') + + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + if sso_name is None: + known_source_names = ['moon', 'jupiter'] + for _source_name in known_source_names: + if _source_name in obs_tags: + sso_name = _source_name + if _source_name is None: + raise ValueError('sso_name is not specified') + + wafer_slots = [] + tod = ctx.get_obs(obs_id, dets=[]) + for ws in [f'ws{i}' for i in range(7)]: + hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, + optics_config_fn=optics_config_fn) + logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') + if hit_time > hit_time_threshold: + wafer_slots.append(ws) + assert np.all(np.array(wafer_slots, dtype='U2') == 'ws') + + logger.info(f'wafer_slots which pointing calculated: {wafer_slots}') + for wafer_slot in wafer_slots: + main_one_wafer(configs, + obs_id, + wafer_slot, + sso_name=sso_name, + fp_hdf_file=fp_hdf_file, + fp_hdf_dir=fp_hdf_dir, + save_dir=save_dir, + restrict_dets_for_debug=restrict_dets_for_debug) + return + + def get_parser(): parser = argparse.ArgumentParser(description="Get updated result of pointings with tod-based results") - parser.add_argument("ctx_file", type=str, help="Path to the context file.") + parser.add_argument("configs", type=str, help="Path to the configuration file") parser.add_argument("obs_id", type=str, help="Observation ID.") - parser.add_argument("wafer_slot", type=int, help="Wafer slot number.") - parser.add_argument("sso_name", type=str, default=None, help="Name of the Solar System Object (SSO).") - parser.add_argument("save_dir", type=str, help="Directory to save the result.") - parser.add_argument("restrict_dets_for_debug", action="store_true", help="Flag to restrict the number of detectors.") + parser.add_argument("--wafer_slots", nargs='*', default=None, help="Wafer slots to be processed") + parser.add_argument("--sso_name", type=str, default=None, help="Name of the Solar System Object (SSO).") + parser.add_argument("--fp_hdf_file", type=str, default=None, help="File path to the focal_plane hdf file used as a prior") + parser.add_argument("--fp_hdf_dir", type=str, default=None, + help="Directory path where focal_plane hdf file of each observation are stored. Used only fp_hdf_file is not specified.") + parser.add_argument("--save_dir", type=str, help="Directory to save the result.") + parser.add_argument("--restrict_dets_for_debug", type=int, help="Flag to restrict the number of detectors.") return parser if __name__ == '__main__': From 34481e7108541aeef800e1a48b8c4c5f444ef3b7 Mon Sep 17 00:00:00 2001 From: tterasaki Date: Thu, 9 May 2024 07:45:20 +0000 Subject: [PATCH 10/48] main function is done --- sotodlib/coords/map_based_pointing.py | 8 +- .../site_pipeline/make_mapbased_pointing.py | 343 +++++++++++------- sotodlib/site_pipeline/update_pointing.py | 288 ++++++++++----- 3 files changed, 414 insertions(+), 225 deletions(-) diff --git a/sotodlib/coords/map_based_pointing.py b/sotodlib/coords/map_based_pointing.py index 9f87ae406..23e814713 100644 --- a/sotodlib/coords/map_based_pointing.py +++ b/sotodlib/coords/map_based_pointing.py @@ -477,12 +477,10 @@ def get_xieta_from_maps(map_hdf_file, filename = 'focal_plane_' + os.path.splitext(os.path.basename(map_hdf_file))[0] + '.hdf' output_file = os.path.join(output_dir, filename) - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'band', 'channel', 'R2', 'xi', 'eta', 'gamma']) + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'R2']) for det in dets: - band = int(det.split('_')[-2]) - channel = int(det.split('_')[-1]) - focal_plane.rows.append((det, band, channel, xieta_dict[det]['R2'], - xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0.)) + focal_plane.rows.append((det, xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0., + xieta_dict[det]['R2'])) write_dataset(focal_plane, output_file, 'focal_plane', overwrite=True) return focal_plane diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/make_mapbased_pointing.py index 3adc0a401..faed09a43 100644 --- a/sotodlib/site_pipeline/make_mapbased_pointing.py +++ b/sotodlib/site_pipeline/make_mapbased_pointing.py @@ -2,66 +2,61 @@ import numpy as np import yaml import argparse +import time +import glob from sotodlib import core from sotodlib import coords from sotodlib import tod_ops -from sotodlib.tod_ops.filters import high_pass_sine2, low_pass_sine2, fourier_filter from sotodlib.coords import map_based_pointing as mbp from sotodlib.site_pipeline import update_pointing as up -from sotodlib.io.metadata import write_dataset +from sotodlib.io import metadata +from sotodlib.io.metadata import read_dataset, write_dataset from sotodlib.site_pipeline import util from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'make_map_based_pointing: ') -def main_one_wafer(configs, obs_id, wafer_slot, - sso_name=None, - single_det_maps_dir=None, map_based_result_dir=None, tod_based_result_dir=None, - tune_by_tod=None, restrict_dets_for_debug=False): +def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter']): + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + sso_names = [] + for _name in candidate_names: + if _name in obs_tags: + sso_names.append(_name) + if len(sso_names) == 0: + raise NameError('Could not find sso_name from observation tags') + else: + return sso_names +def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, + restrict_dets_for_debug=False): if type(configs) == str: configs = yaml.safe_load(open(configs, "r")) # Derive parameters from config file + # required parameters ctx = core.Context(configs.get('context_file')) - if single_det_maps_dir is None: - single_det_maps_dir = configs.get('single_det_maps_dir') - if map_based_result_dir is None: - map_based_result_dir = configs.get('map_based_result_dir') - if tod_based_result_dir is None: - tod_based_result_dir = configs.get('tod_based_result_dir') + single_det_maps_dir = configs.get('single_det_maps_dir') + result_dir = configs.get('result_dir') optics_config_fn = configs.get('optics_config_fn') + save_normal_roll = configs.get('save_normal_roll') + save_force_zero_roll = configs.get('save_force_zero_roll') + + # optional parameters xieta_bs_offset = configs.get('xieta_bs_offset', [0., 0.]) wafer_mask_deg = configs.get('wafer_mask_deg', 8.) res_deg = configs.get('res_deg', 0.3) edge_avoidance_deg = configs.get('edge_avoidance_deg', 0.3) - save_normal_roll = configs.get('save_normal_roll', True) - save_force_zero_roll = configs.get('save_force_zero_roll', True) - # parameters for tod tuning - tune_by_tod = configs.get('tune_by_tod') - if tune_by_tod: - tod_ds_factor = configs.get('tod_ds_factor') - tod_mask_deg = configs.get('tod_mask_deg') - tod_fit_func_name = configs.get('tod_fit_func_name') - tod_max_non_linear_order = configs.get('tod_max_non_linear_order') - tod_fwhm_init_deg = configs.get('tod_fwhm_init_deg') - tod_error_estimation_method = configs.get('tod_error_estimation_method') - tod_flag_name_rms_calc = configs.get('tod_flag_name_rms_calc') - tod_flag_rms_calc_exclusive = configs.get('tod_flag_rms_calc_exclusive') - - - # If sso_name is not specified, get sso name from observation tags - obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] if sso_name is None: - known_source_names = ['moon', 'jupiter'] - for _source_name in known_source_names: - if _source_name in obs_tags: - sso_name = _source_name - if _source_name is None: - raise ValueError('sso_name is not specified') - + logger.info('deriving sso_name from observation tag') + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + sso_names = _get_sso_names_from_tags(ctx, obs_id) + sso_name = sso_names[0] + if len(sso_names) >= 2: + logger.info(f'sso_names of {sso_names} are found from observation tags.' + + f'Processing only {sso_name}') + # Load data logger.info('loading data') meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) @@ -88,125 +83,203 @@ def main_one_wafer(configs, obs_id, wafer_slot, logger.info(f'Saving map-based pointing results') fp_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, - output_dir=map_based_result_dir, - filename=result_filename, - force_zero_roll=False, - edge_avoidance = edge_avoidance_deg*coords.DEG) - - if tune_by_tod: - logger.info(f'Making tod-based pointing results') - up.wrap_fp_rset(tod, fp_rset_map_based) - fp_rset_tod_based = up.update_xieta( tod, - sso_name=sso_name, - fp_hdf_file=None, - force_zero_roll=False, - pipe=None, - ds_factor=tod_ds_factor, - mask_deg=tod_mask_deg, - fit_func_name = tod_fit_func_name, - max_non_linear_order = tod_max_non_linear_order, - fwhm_init_deg = tod_fwhm_init_deg, - error_estimation_method=tod_error_estimation_method, - flag_name_rms_calc = tod_flag_name_rms_calc, - flag_rms_calc_exclusive = tod_flag_rms_calc_exclusive, - ) - os.makedirs(tod_based_result_dir, exist_ok=True) - write_dataset(fp_rset_tod_based, - filename=os.path.join(tod_based_result_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), - address='focal_plane', - overwrite=True) + output_dir=result_dir, + filename=result_filename, + force_zero_roll=False, + edge_avoidance = edge_avoidance_deg*coords.DEG) if save_force_zero_roll: logger.info(f'Saving map-based pointing results (force-zero-roll)') - map_based_result_dir_force_zero_roll = map_based_result_dir + '_force_zero_roll' + result_dir_force_zero_roll = result_dir + '_force_zero_roll' fp_rset_map_based_force_zero_roll = mbp.get_xieta_from_maps(map_hdf, save=True, - output_dir=map_based_result_dir_force_zero_roll, + output_dir=result_dir_force_zero_roll, filename=result_filename, force_zero_roll=True, edge_avoidance = edge_avoidance_deg*coords.DEG) - if tune_by_tod: - logger.info(f'Making tod-based pointing results (force-zero-roll)') - up.wrap_fp_rset(tod, fp_rset_map_based_force_zero_roll) - tod_based_result_dir_force_zero_roll = tod_based_result_dir + '_force_zero_roll' - fp_rset_tod_based_force_zero_roll = up.update_xieta( tod, - sso_name=sso_name, - fp_hdf_file=None, - force_zero_roll=False, - pipe=None, - ds_factor=tod_ds_factor, - mask_deg=tod_mask_deg, - fit_func_name = tod_fit_func_name, - max_non_linear_order = tod_max_non_linear_order, - fwhm_init_deg = tod_fwhm_init_deg, - error_estimation_method=tod_error_estimation_method, - flag_name_rms_calc = tod_flag_name_rms_calc, - flag_rms_calc_exclusive = tod_flag_rms_calc_exclusive, - ) - os.makedirs(tod_based_result_dir_force_zero_roll, exist_ok=True) - write_dataset(fp_rset_tod_based_force_zero_roll, - filename=os.path.join(tod_based_result_dir_force_zero_roll, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), - address='focal_plane', - overwrite=True) return -def main(configs, obs_id, wafer_slots, - sso_name=None, - single_det_maps_dir=None, map_based_result_dir=None, tod_based_result_dir=None, - tune_by_tod=None, hit_time_threshold=1200, hit_circle_r_deg=7.0, - restrict_dets_for_debug=False): - - logger.info('get wafer_slots which hit the source because wafer_slots are not specified') - if wafer_slots is None: - if type(configs) == str: - configs = yaml.safe_load(open(configs, "r")) - - ctx = core.Context(configs.get('context_file')) - optics_config_fn = configs.get('optics_config_fn') +def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=False): + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + ctx = core.Context(configs.get('context_file')) + single_det_maps_dir = configs.get('single_det_maps_dir') + result_dir = configs.get('result_dir') + save_normal_roll = configs.get('save_normal_roll', True) + save_force_zero_roll = configs.get('save_force_zero_roll', True) + + meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) + if restrict_dets_for_debug is not False: + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' + + fp_rset_dummy_map_based = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'R2']) + for det in meta.dets.vals: + fp_rset_dummy_map_based.rows.append((det, np.nan, np.nan, np.nan, np.nan)) + if save_normal_roll: + os.makedirs(result_dir, exist_ok=True) + write_dataset(fp_rset_dummy_map_based, + filename=os.path.join(result_dir, result_filename), + address='focal_plane', + overwrite=True) + + if save_force_zero_roll: + result_dir_force_zero_roll = result_dir + '_force_zero_roll' + os.makedirs(result_dir_force_zero_roll, exist_ok=True) + write_dataset(fp_rset_dummy_map_based, + filename=os.path.join(result_dir_force_zero_roll, result_filename), + address='focal_plane', + overwrite=True) + return + +def combine_pointings(pointing_result_files): + combined_dict = {} + for file in pointing_result_files: + rset = read_dataset(file, 'focal_plane') + for row in rset[:]: + if row['dets:readout_id'] not in combined_dict.keys(): + combined_dict[row['dets:readout_id']] = {} + combined_dict[row['dets:readout_id']]['xi'] = row['xi'] + combined_dict[row['dets:readout_id']]['eta'] = row['eta'] + combined_dict[row['dets:readout_id']]['gamma'] = row['gamma'] + combined_dict[row['dets:readout_id']]['R2'] = row['R2'] + + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'R2']) + + for det, val in combined_dict.items(): + focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['R2'])) + return focal_plane + +def main_one_obs(configs, obs_id, sso_name=None, + restrict_dets_for_debug=False): + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + ctx = core.Context(configs.get('context_file')) + optics_config_fn = configs.get('optics_config_fn') + + result_dir = configs.get('result_dir') + save_normal_roll = configs.get('save_normal_roll') + save_force_zero_roll = configs.get('save_force_zero_roll') + + hit_time_threshold = configs.get('hit_time_threshold', 600) + hit_circle_r_deg = configs.get('hit_circle_r_deg', 7.0) + + if sso_name is None: + logger.info('deriving sso_name from observation tag') obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] - if sso_name is None: - known_source_names = ['moon', 'jupiter'] - for _source_name in known_source_names: - if _source_name in obs_tags: - sso_name = _source_name - if _source_name is None: - raise ValueError('sso_name is not specified') - - wafer_slots = [] - tod = ctx.get_obs(obs_id, dets=[]) - for ws in [f'ws{i}' for i in range(7)]: - hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, - optics_config_fn=optics_config_fn) - logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') - if hit_time > hit_time_threshold: - wafer_slots.append(ws) - assert np.all(np.array(wafer_slots, dtype='U2') == 'ws') - - logger.info(f'wafer_slots which pointing calculated: {wafer_slots}') - for wafer_slot in wafer_slots: + sso_names = _get_sso_names_from_tags(ctx, obs_id) + sso_name = sso_names[0] + if len(sso_names) >= 2: + logger.info(f'sso_names of {sso_names} are found from observation tags.' + + f'Processing only {sso_name}') + + tod = ctx.get_obs(obs_id, dets=[]) + streamed_wafer_slots = ['ws{}'.format(index) for index, bit in enumerate(obs_id.split('_')[-1]) if bit == '1'] + processed_wafer_slots = [] + skipped_wafer_slots = [] + + for ws in streamed_wafer_slots: + hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, + optics_config_fn=optics_config_fn) + logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') + if hit_time > hit_time_threshold: + processed_wafer_slots.append(ws) + else: + skipped_wafer_slots.append(ws) + + logger.info(f'wafer_slots which pointing calculated: {processed_wafer_slots}') + for wafer_slot in processed_wafer_slots: + logger.info(f'Processing {obs_id}, {wafer_slot}') main_one_wafer(configs=configs, obs_id=obs_id, wafer_slot=wafer_slot, sso_name=sso_name, - single_det_maps_dir=single_det_maps_dir, - map_based_result_dir=map_based_result_dir, - tod_based_result_dir=tod_based_result_dir, - tune_by_tod=tune_by_tod, restrict_dets_for_debug=restrict_dets_for_debug) + + logger.info(f'create dummy hdf for non-hitting wafer: {skipped_wafer_slots}') + for wafer_slot in skipped_wafer_slots: + main_one_wafer_dummy(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + restrict_dets_for_debug=restrict_dets_for_debug) + + logger.info('making combined result') + if save_normal_roll: + pointing_result_files = glob.glob(os.path.join(result_dir, f'focal_plane_{obs_id}_ws[0-6].hdf')) + fp_rset_full = combine_pointings(pointing_result_files) + fp_rset_full_file = os.path.join(os.path.join(result_dir, f'focal_plane_{obs_id}_all.hdf')) + write_dataset(fp_rset_full, filename=fp_rset_full_file, + address='focal_plane', overwrite=True) + + + if save_force_zero_roll: + result_dir_force_zero_roll = result_dir + '_force_zero_roll' + pointing_result_files = glob.glob(os.path.join(result_dir_force_zero_roll, f'focal_plane_{obs_id}_ws[0-6].hdf')) + fp_rset_full = combine_pointings(pointing_result_files) + fp_rset_full_file = os.path.join(os.path.join(result_dir_force_zero_roll, f'focal_plane_{obs_id}_all.hdf')) + write_dataset(fp_rset_full, filename=fp_rset_full_file, + address='focal_plane', overwrite=True) + + + return + +def main(configs, min_ctime=None, max_ctime=None, update_delay=None, + obs_id=None, wafer_slot=None, sso_name=None, restrict_dets_for_debug=False): + if (min_ctime is None) and (update_delay is not None): + # If min_ctime is provided it will use that.. + # Otherwise it will use update_delay to set min_ctime. + min_ctime = int(time.time()) - update_delay*86400 + + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + ctx = core.Context(configs.get('context_file')) + + if obs_id is None: + query_text = configs.get('query_text', None) + query_tags = configs.get('query_tags', None) + tot_query = "and " + if query_text is not None: + tot_query += f"{query_text} and " + if min_ctime is not None: + tot_query += f"timestamp>={min_ctime} and " + if max_ctime is not None: + tot_query += f"timestamp<={max_ctime} and " + tot_query = tot_query[4:-4] + if tot_query == "": + tot_query = "1" + + logger.info(f'tot_query: {tot_query}') + obs_list= ctx.obsdb.query(tot_query, query_tags) + + for obs in obs_list: + obs_id = obs['obs_id'] + logger.info(f'Processing {obs_id}') + main_one_obs(configs=configs, obs_id=obs_id, + restrict_dets_for_debug=restrict_dets_for_debug) + + elif obs_id is not None: + if wafer_slot is None: + main_one_obs(configs=configs, obs_id=obs_id, sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) + else: + main_one_wafer(configs=configs, obs_id=obs_id, wafer_slot=wafer_slot, sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) def get_parser(): parser = argparse.ArgumentParser(description="Process TOD data and update pointing") parser.add_argument("configs", type=str, help="Path to the configuration file") - parser.add_argument("obs_id", type=str, help="Observation id") - parser.add_argument("--wafer_slots", nargs='*', default=None, help="Wafer slots to be processed") - parser.add_argument("--sso_name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter')") - parser.add_argument("--single_det_maps_dir", type=str, default=None, help="Directory to save single detector maps") - parser.add_argument("--map_based_result_dir", type=str, default=None, help="Directory to save map-based pointing results") - parser.add_argument("--tod_based_result_dir", type=str, default=None, help="Directory to save TOD-based pointing results") - parser.add_argument("--hit_time_threshold", type=float, default=1200, - help="Minimum hit time. If calculated wafer hit time is smaller than that, pointing calculation for that wafer is skipped") - parser.add_argument("--hit_circle_r_deg", type=float, default=7., - help="circle radius for wafer hit time calculation") + parser.add_argument('--min_ctime', type=int, help="Minimum timestamp for the beginning of an observation list") + parser.add_argument('--max_ctime', type=int, help="Maximum timestamp for the beginning of an observation list") + parser.add_argument('--update-delay', type=int, help="Number of days (unit is days) in the past to start observation list.") + parser.add_argument("--obs_id", type=str, + help="Specific observation obs_id to process. If provided, overrides other filtering parameters.") + + parser.add_argument("--wafer_slot", type=str, default=None, + help="Wafer slot to be processed (e.g., 'ws0', 'ws3'). Valid only when obs_id is specified.") + + parser.add_argument("--sso_name", type=str, default=None, + help="Name of solar system object (e.g., 'moon', 'jupiter'). If not specified, get sso_name from observation tags. "\ + + "Valid only when obs_id is specified") parser.add_argument("--restrict_dets_for_debug", type=int, default=False) return parser diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index 9c98c2f69..7f1c73b31 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -3,8 +3,10 @@ import yaml import h5py import argparse +import time +import glob from tqdm import tqdm -import scipy + from scipy.optimize import curve_fit from sotodlib.core import metadata from sotodlib.io.metadata import read_dataset, write_dataset @@ -19,6 +21,17 @@ from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'update_pointing: ') +def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter']): + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + sso_names = [] + for _name in candidate_names: + if _name in obs_tags: + sso_names.append(_name) + if len(sso_names) == 0: + raise NameError('Could not find sso_name from observation tags') + else: + return sso_names + def gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, nonlin_coeffs): """ An Gaussian beam model with non-linear response Args @@ -80,20 +93,19 @@ def wrap_fp_from_hdf(tod, fp_hdf_file, data_set='focal_plane'): def update_xieta(tod, - sso_name='moon', + sso_name=None, fp_hdf_file=None, - save_dir=None, - pipe=None, force_zero_roll=False, + pipe=None, ds_factor=10, mask_deg=3, fit_func_name = 'gaussian2d_nonlin', max_non_linear_order = 1, fwhm_init_deg = 0.5, - error_estimation_method='force_one_redchi2', # rms_from_data + error_estimation_method='force_one_redchi2', flag_name_rms_calc = 'source', flag_rms_calc_exclusive = True, - save=True, ): + ): """ Update xieta parameters for each detector by TOD fitting of a point source observation. @@ -123,7 +135,7 @@ def update_xieta(tod, - fwhm_init_deg (float): Initial guess for full width at half maximum in degrees. Default is 0.5. - error_estimation_method (str): - Method for error estimation. Default is 'rms_from_data'. 'rms_from_data' and 'force_one_redchi2' are supported. + Method for error estimation. Default is 'force_one_redchi2'. 'force_one_redchi2' and 'rms_from_data' are supported. If 'rms_from_data', errorbar of each data point is set by root-mean-square of the data points flaged by 'flag_name_rms_calc', and errorbar of xi,eta is set from the fit covariance matrix. If 'force_one_redchi2', the errorbar of (xi,eta) is equivalent the case if the error bar of each data point is set as the reduced chi-square is equal to unity. @@ -131,8 +143,6 @@ def update_xieta(tod, Name of the flag used for RMS calculation. Default is 'source'. - flag_rms_calc_exclusive (bool): Flag indicating whether the RMS calculation is exclusive to the flag. Default is True. - - save (bool): - Flag indicating whether to save the updated focal plane data. Default is True. Returns: - focal_plane (ResultSet): ResultSet containing updated xieta parameters for each detector. @@ -270,9 +280,8 @@ def update_xieta(tod, return focal_plane -def main_one_wafer(configs, obs_id, wafer_slot, - sso_name=None, fp_hdf_file=None, fp_hdf_dir=None, - save_dir=None, restrict_dets_for_debug=False): +def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, + restrict_dets_for_debug=False): if type(configs) == str: configs = yaml.safe_load(open(configs, "r")) @@ -280,47 +289,45 @@ def main_one_wafer(configs, obs_id, wafer_slot, ctx = core.Context(configs.get('context_file')) # get prior + fp_hdf_file = configs.get('fp_hdf_file', None) + fp_hdf_dir = configs.get('fp_hdf_dir', None) if fp_hdf_file is None: - fp_hdf_file = configs.get('fp_hdf_file', None) - if fp_hdf_dir is None: - fp_hdf_dir = configs.get('fp_hdf_dir', None) - if fp_hdf_file is None: - if fp_hdf_dir is None: - pass - else: + if fp_hdf_dir is not None: fp_hdf_file = os.path.join(fp_hdf_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf') if not os.path.exists(fp_hdf_file): fp_hdf_file = None - - if save_dir is None: - save_dir = configs.get('save_dir', None) + + result_dir = configs.get('result_dir') + force_zero_roll = configs.get('force_zero_roll', True) + if force_zero_roll: + result_dir = result_dir + '_force_zero_roll' # get sso_name if it is not specified - obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] if sso_name is None: - known_source_names = ['moon', 'jupiter'] - for _source_name in known_source_names: - if _source_name in obs_tags: - sso_name = _source_name - if _source_name is None: - raise ValueError('sso_name is not specified') + logger.info('deriving sso_name from observation tag') + obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] + sso_names = _get_sso_names_from_tags(ctx, obs_id) + sso_name = sso_names[0] + if len(sso_names) >= 2: + logger.info(f'sso_names of {sso_names} are found from observation tags.' + + f'Processing only {sso_name}') # construct pipeline from configs - pipe = Pipeline(configs["process_pipe"]) + pipe = Pipeline(configs["process_pipe"], logger=logger) for pipe_component in pipe: if pipe_component.name == 'compute_source_flags': pipe_component.process_cfgs['center_on'] = sso_name # Other parameters force_zero_roll = configs.get('force_zero_roll') - ds_factor = configs.get('ds_factor') - mask_deg = configs.get('mask_deg') - fit_func_name = configs.get('fit_func_name') - max_non_linear_order = configs.get('max_non_linear_order') - fwhm_init_deg = configs.get('fwhm_init_deg') - error_estimation_method = configs.get('error_estimation_method') - flag_name_rms_calc = configs.get('flag_name_rms_calc') - flag_rms_calc_exclusive = configs.get('flag_rms_calc_exclusive') + ds_factor = configs.get('ds_factor', 20) + mask_deg = configs.get('mask_deg', 3.0) + fit_func_name = configs.get('fit_func_name', 'gaussian2d_nonlin') + max_non_linear_order = configs.get('max_non_linear_order', 2) + fwhm_init_deg = configs.get('fwhm_init_deg', 0.5) + error_estimation_method = configs.get('error_estimation_method', 'force_one_redchi2') + flag_name_rms_calc = configs.get('flag_name_rms_calc', 'source') + flag_rms_calc_exclusive = configs.get('flag_rms_calc_exclusive', True) # Load data @@ -346,69 +353,180 @@ def main_one_wafer(configs, obs_id, wafer_slot, flag_rms_calc_exclusive = flag_rms_calc_exclusive, ) - os.makedirs(save_dir, exist_ok=True) + os.makedirs(result_dir, exist_ok=True) write_dataset(focal_plane_rset, - filename=os.path.join(save_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), + filename=os.path.join(result_dir, f'focal_plane_{obs_id}_{wafer_slot}.hdf'), address='focal_plane', overwrite=True) return -def main(configs, obs_id, wafer_slots=None, - sso_name=None, fp_hdf_file=None, fp_hdf_dir=None, save_dir=None, - hit_time_threshold=1200, hit_circle_r_deg=7.0, - restrict_dets_for_debug=False): - logger.info('get wafer_slots which hit the source because wafer_slots are not specified') - if wafer_slots is None: - if type(configs) == str: - configs = yaml.safe_load(open(configs, "r")) - - ctx = core.Context(configs.get('context_file')) - optics_config_fn = configs.get('optics_config_fn') +def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=False): + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + ctx = core.Context(configs.get('context_file')) + result_dir = configs.get('result_dir') + force_zero_roll = configs.get('force_zero_roll', True) + if force_zero_roll: + result_dir = result_dir + '_force_zero_roll' + + meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) + if restrict_dets_for_debug is not False: + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' + + fp_rset_dummy = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', + 'xi_err', 'eta_err', 'R2', 'redchi2']) + for det in meta.dets.vals: + fp_rset_dummy.rows.append((det, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan)) + os.makedirs(result_dir, exist_ok=True) + write_dataset(fp_rset_dummy, + filename=os.path.join(result_dir, result_filename), + address='focal_plane', + overwrite=True) + return + +def combine_pointings(pointing_result_files): + combined_dict = {} + for file in pointing_result_files: + rset = read_dataset(file, 'focal_plane') + for row in rset[:]: + if row['dets:readout_id'] not in combined_dict.keys(): + combined_dict[row['dets:readout_id']] = {} + combined_dict[row['dets:readout_id']]['xi'] = row['xi'] + combined_dict[row['dets:readout_id']]['eta'] = row['eta'] + combined_dict[row['dets:readout_id']]['gamma'] = row['gamma'] + combined_dict[row['dets:readout_id']]['xi_err'] = row['xi_err'] + combined_dict[row['dets:readout_id']]['eta_err'] = row['eta_err'] + combined_dict[row['dets:readout_id']]['R2'] = row['R2'] + combined_dict[row['dets:readout_id']]['redchi2'] = row['redchi2'] + + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2']) + + for det, val in combined_dict.items(): + focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['xi_err'], val['eta_err'], val['R2'], val['redchi2'])) + return focal_plane + +def main_one_obs(configs, obs_id, sso_name=None, + restrict_dets_for_debug=False): + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + ctx = core.Context(configs.get('context_file')) + result_dir = configs.get('result_dir') + force_zero_roll = configs.get('force_zero_roll', True) + if force_zero_roll: + result_dir = result_dir + '_force_zero_roll' + optics_config_fn = configs.get('optics_config_fn') + + hit_time_threshold = configs.get('hit_time_threshold', 600) + hit_circle_r_deg = configs.get('hit_circle_r_deg', 7.0) + + if sso_name is None: + logger.info('deriving sso_name from observation tag') obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] - if sso_name is None: - known_source_names = ['moon', 'jupiter'] - for _source_name in known_source_names: - if _source_name in obs_tags: - sso_name = _source_name - if _source_name is None: - raise ValueError('sso_name is not specified') - - wafer_slots = [] - tod = ctx.get_obs(obs_id, dets=[]) - for ws in [f'ws{i}' for i in range(7)]: - hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, - optics_config_fn=optics_config_fn) - logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') - if hit_time > hit_time_threshold: - wafer_slots.append(ws) - assert np.all(np.array(wafer_slots, dtype='U2') == 'ws') - - logger.info(f'wafer_slots which pointing calculated: {wafer_slots}') - for wafer_slot in wafer_slots: - main_one_wafer(configs, - obs_id, - wafer_slot, + sso_names = _get_sso_names_from_tags(ctx, obs_id) + sso_name = sso_names[0] + if len(sso_names) >= 2: + logger.info(f'sso_names of {sso_names} are found from observation tags.' + + f'Processing only {sso_name}') + + tod = ctx.get_obs(obs_id, dets=[]) + streamed_wafer_slots = ['ws{}'.format(index) for index, bit in enumerate(obs_id.split('_')[-1]) if bit == '1'] + processed_wafer_slots = [] + skipped_wafer_slots = [] + for ws in streamed_wafer_slots: + hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, + optics_config_fn=optics_config_fn) + logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') + if hit_time > hit_time_threshold: + processed_wafer_slots.append(ws) + else: + skipped_wafer_slots.append(ws) + + logger.info(f'wafer_slots which pointing calculated: {processed_wafer_slots}') + for wafer_slot in processed_wafer_slots: + logger.info(f'Processing {obs_id}, {wafer_slot}') + main_one_wafer(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, sso_name=sso_name, - fp_hdf_file=fp_hdf_file, - fp_hdf_dir=fp_hdf_dir, - save_dir=save_dir, restrict_dets_for_debug=restrict_dets_for_debug) + + logger.info(f'create dummy hdf for non-hitting wafer: {skipped_wafer_slots}') + for wafer_slot in skipped_wafer_slots: + main_one_wafer_dummy(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + restrict_dets_for_debug=restrict_dets_for_debug) + + logger.info('making combined result') + pointing_result_files = glob.glob(os.path.join(result_dir, f'focal_plane_{obs_id}_ws[0-6].hdf')) + fp_rset_full = combine_pointings(pointing_result_files) + fp_rset_full_file = os.path.join(os.path.join(result_dir, f'focal_plane_{obs_id}_all.hdf')) + write_dataset(fp_rset_full, filename=fp_rset_full_file, + address='focal_plane', overwrite=True) + +def main(configs, min_ctime=None, max_ctime=None, update_delay=None, + obs_id=None, wafer_slot=None, sso_name=None, restrict_dets_for_debug=False): + if (min_ctime is None) and (update_delay is not None): + # If min_ctime is provided it will use that.. + # Otherwise it will use update_delay to set min_ctime. + min_ctime = int(time.time()) - update_delay*86400 + + if type(configs) == str: + configs = yaml.safe_load(open(configs, "r")) + ctx = core.Context(configs.get('context_file')) + + if obs_id is None: + query_text = configs.get('query_text', None) + query_tags = configs.get('query_tags', None) + tot_query = "and " + if query_text is not None: + tot_query += f"{query_text} and " + if min_ctime is not None: + tot_query += f"timestamp>={min_ctime} and " + if max_ctime is not None: + tot_query += f"timestamp<={max_ctime} and " + tot_query = tot_query[4:-4] + if tot_query == "": + tot_query = "1" + + logger.info(f'tot_query: {tot_query}') + obs_list= ctx.obsdb.query(tot_query, query_tags) + + for obs in obs_list: + obs_id = obs['obs_id'] + logger.info(f'Processing {obs_id}') + main_one_obs(configs=configs, obs_id=obs_id, + restrict_dets_for_debug=restrict_dets_for_debug) + + elif obs_id is not None: + if wafer_slot is None: + main_one_obs(configs=configs, obs_id=obs_id, sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) + else: + main_one_wafer(configs=configs, obs_id=obs_id, wafer_slot=wafer_slot, sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) return def get_parser(): parser = argparse.ArgumentParser(description="Get updated result of pointings with tod-based results") parser.add_argument("configs", type=str, help="Path to the configuration file") - parser.add_argument("obs_id", type=str, help="Observation ID.") - parser.add_argument("--wafer_slots", nargs='*', default=None, help="Wafer slots to be processed") - parser.add_argument("--sso_name", type=str, default=None, help="Name of the Solar System Object (SSO).") - parser.add_argument("--fp_hdf_file", type=str, default=None, help="File path to the focal_plane hdf file used as a prior") - parser.add_argument("--fp_hdf_dir", type=str, default=None, - help="Directory path where focal_plane hdf file of each observation are stored. Used only fp_hdf_file is not specified.") - parser.add_argument("--save_dir", type=str, help="Directory to save the result.") - parser.add_argument("--restrict_dets_for_debug", type=int, help="Flag to restrict the number of detectors.") + parser.add_argument('--min_ctime', type=int, help="Minimum timestamp for the beginning of an observation list") + parser.add_argument('--max_ctime', type=int, help="Maximum timestamp for the beginning of an observation list") + parser.add_argument('--update-delay', type=int, help="Number of days (unit is days) in the past to start observation list.") + parser.add_argument("--obs_id", type=str, + help="Specific observation obs_id to process. If provided, overrides other filtering parameters.") + + parser.add_argument("--wafer_slot", type=str, default=None, + help="Wafer slot to be processed (e.g., 'ws0', 'ws3'). Valid only when obs_id is specified.") + + parser.add_argument("--sso_name", type=str, default=None, + help="Name of solar system object (e.g., 'moon', 'jupiter'). If not specified, get sso_name from observation tags. "\ + + "Valid only when obs_id is specified") + parser.add_argument("--restrict_dets_for_debug", type=int, default=False) return parser if __name__ == '__main__': From 2da796509a015182df4d0f33f46f9992c1bd859f Mon Sep 17 00:00:00 2001 From: tterasaki Date: Fri, 10 May 2024 05:11:48 +0000 Subject: [PATCH 11/48] removed combine_focal_plane.py which is no longer used --- .../site_pipeline/combine_focal_planes.py | 130 ------------------ 1 file changed, 130 deletions(-) delete mode 100644 sotodlib/site_pipeline/combine_focal_planes.py diff --git a/sotodlib/site_pipeline/combine_focal_planes.py b/sotodlib/site_pipeline/combine_focal_planes.py deleted file mode 100644 index b8a1184a4..000000000 --- a/sotodlib/site_pipeline/combine_focal_planes.py +++ /dev/null @@ -1,130 +0,0 @@ -import os -import re -import glob -import numpy as np - -from sotodlib.core import metadata -from sotodlib.io.metadata import write_dataset, read_dataset - -from sotodlib.site_pipeline import util -logger = util.init_logger(__name__, 'combine_focal_planes: ') - -def combine_pointings(pointing_result_files, method='highest_R2', R2_threshold=0.3, - save=False, output_dir=None, save_name=None): - combined_dict = {} - for file in pointing_result_files: - rset = read_dataset(file, 'focal_plane') - for row in rset[:]: - if row['dets:readout_id'] not in combined_dict.keys(): - combined_dict[row['dets:readout_id']] = {} - combined_dict[row['dets:readout_id']]['band'] = row['band'] - combined_dict[row['dets:readout_id']]['channel'] = row['channel'] - - combined_dict[row['dets:readout_id']]['R2'] = np.atleast_1d([]) - combined_dict[row['dets:readout_id']]['xi'] = np.atleast_1d([]) - combined_dict[row['dets:readout_id']]['eta'] = np.atleast_1d([]) - combined_dict[row['dets:readout_id']]['gamma'] = np.atleast_1d([]) - - combined_dict[row['dets:readout_id']]['R2'] = np.append(combined_dict[row['dets:readout_id']]['R2'], row['R2']) - combined_dict[row['dets:readout_id']]['xi'] = np.append(combined_dict[row['dets:readout_id']]['xi'], row['xi']) - combined_dict[row['dets:readout_id']]['eta'] = np.append(combined_dict[row['dets:readout_id']]['eta'], row['eta']) - combined_dict[row['dets:readout_id']]['gamma'] = np.append(combined_dict[row['dets:readout_id']]['gamma'], row['gamma']) - - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'band', 'channel', 'R2', 'xi', 'eta', 'gamma']) - for det, val in combined_dict.items(): - band = int(val['band']) - channel = int(val['channel']) - - mask = val['R2'] > R2_threshold - if np.all(~mask): - xi, eta, gamma, R2 = np.nan, np.nan, np.nan, np.nan - else: - if method == 'highest_R2': - idx = np.argmax(val['R2'][mask]) - xi, eta, gamma, R2 = val['xi'][mask][idx], val['eta'][mask][idx], val['gamma'][mask][idx], val['R2'][mask][idx] - elif method == 'mean': - xi, eta, gamma = np.mean(val['xi'][mask]), np.mean(val['eta'][mask]), np.mean(val['gamma'][mask]) - R2 = np.nan - elif method == 'median': - xi, eta, gamma = np.median(val['xi'][mask]), np.median(val['eta'][mask]), np.median(val['gamma'][mask]) - R2 = np.nan - else: - raise ValueError('Not supported method. Supported methods are `highest_R2`, `mean` or `median`') - focal_plane.rows.append((det, band, channel, R2, xi, eta, gamma)) - if save: - if output_dir is None: - output_dir = os.path.join(os.getcwd(), 'combined_pointing_results') - if not os.path.exists(output_dir): - os.makedirs(output_dir) - if save_name is None: - ctimes = np.atleast_1d([]) - wafer_slots = np.atleast_1d([]) - for file in pointing_result_files: - filename = os.path.basename(file) - match = re.search('\d{10}', filename) - ctime = int(match.group(0) if match else None) - match = re.search('ws\d{1}', filename) - ws = match.group(0) - ctimes = np.append(ctimes, ctime) - wafer_slots = np.append(wafer_slots, ws) - ctimes = ctimes.astype('int') - wafer_slots = np.sort(np.unique(wafer_slots.astype('U3'))) - save_name = f'focal_plane_{ctimes.min()}_{ctimes.max()}_' + ''.join(wafer_slots) + '.hdf' - - write_dataset(focal_plane, os.path.join(output_dir, save_name), 'focal_plane', overwrite=True) - return focal_plane - -def combine_onewafer_results(pointing_dir, ws, output_dir, filename=None, - method='highest_R2', R2_threshold=0.3,): - pointing_result_files = glob.glob(os.path.join(pointing_dir, f'focal_plane*{ws}.hdf')) - if filename is None: - filename = f'focal_plane_{ws}_combined.hdf' - _ = combine_pointings(pointing_result_files, save=True, output_dir=output_dir, save_name=filename) - return - -def combine_allwafer_results(pointing_dir, output_dir, filename=None, - method='highest_R2', R2_threshold=0.3,): - pointing_result_files = glob.glob(os.path.join(pointing_dir, 'focal_plane*.hdf')) - if filename is None: - filename = f'focal_plane_combined.hdf' - _ = combine_pointings(pointing_result_files, save=True, output_dir=output_dir, save_name=filename) - return - -def make_detabase(focal_plane_file, db_file,): - scheme = metadata.ManifestScheme().add_data_field('dataset') - db = metadata.ManifestDb(scheme=scheme) - db.add_entry({'dataset': 'focal_plane'}, filename=focal_plane_file) - db.to_file(db_file) - return - -def main(pointing_dir, output_dir=None, method='highest_R2', R2_threshold=0.3,): - if output_dir is None: - output_dir = os.path.join(os.getcwd(), 'combined_results') - - logger.info('Combining each wafer resluts') - wafer_slots = [f'ws{i}' for i in range(7)] - for ws in wafer_slots: - combine_onewafer_results(pointing_dir=pointing_dir, ws=ws, - output_dir=output_dir, filename=None, - method=method, R2_threshold=R2_threshold) - - logger.info('Combining all wafer resluts') - combine_allwafer_results(pointing_dir=pointing_dir, output_dir=output_dir, filename='focal_plane_combined.hdf', - method=method, R2_threshold=R2_threshold) - - logger.info('Making a database') - focal_plane_file = os.path.join(output_dir, 'focal_plane_combined.hdf') - db_file = os.path.join(output_dir, 'focal_plane_combined.sqlite') - make_detabase(focal_plane_file, db_file,) - return - -def get_parser(): - parser = argparse.ArgumentParser(description="Combine multiple result of pointing.") - parser.add_argument('--pointing_dir', type=str, required=True, help='Directory containing pointing result files.') - parser.add_argument('--output_dir', type=str, default=None, help='Directory to save combined results. Default is "combined_results".') - parser.add_argument('--method', type=str, default='highest_R2', choices=['highest_R2', 'mean', 'median'], help='Combination method. Default is "highest_R2".') - parser.add_argument('--R2_threshold', type=float, default=0.3, help='Threshold for R2 value. Default is 0.3.') - return parser - -if __name__ == '__main__': - util.main_launcher(main, get_parser) From 8d8fae46b9c7b03368bd0bbdbb288b5ed026ad2c Mon Sep 17 00:00:00 2001 From: tterasaki Date: Thu, 30 May 2024 09:04:47 +0000 Subject: [PATCH 12/48] removed planets.close() --- sotodlib/coords/map_based_pointing.py | 1 + sotodlib/coords/planets.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/sotodlib/coords/map_based_pointing.py b/sotodlib/coords/map_based_pointing.py index 23e814713..43305b37c 100644 --- a/sotodlib/coords/map_based_pointing.py +++ b/sotodlib/coords/map_based_pointing.py @@ -32,6 +32,7 @@ def get_planet_trajectry(tod, planet, _split=20, return_model=False): If return_model is False: array: Array of quaternions representing trajectry of the planet at each timestamp. """ + print(planet) timestamps_sparse = np.linspace(tod.timestamps[0], tod.timestamps[-1], _split) planet_az_sparse = np.zeros_like(timestamps_sparse) diff --git a/sotodlib/coords/planets.py b/sotodlib/coords/planets.py index 322cd7e66..0c96c40d2 100644 --- a/sotodlib/coords/planets.py +++ b/sotodlib/coords/planets.py @@ -303,7 +303,7 @@ def _get_astrometric(source_name, timestamp, site='_default'): sf_timestamp = timescale.from_datetime( datetime.datetime.fromtimestamp(timestamp, tz=skyfield_api.utc)) astrometric = observatory.at(sf_timestamp).observe(target) - planets.close() + #planets.close() return astrometric From 59f55ce515db4315b9b852b5a7beb6bbe4ac206e Mon Sep 17 00:00:00 2001 From: tterasaki Date: Fri, 31 May 2024 04:57:22 +0000 Subject: [PATCH 13/48] tuning for master branch --- sotodlib/preprocess/processes.py | 3 +++ sotodlib/site_pipeline/update_pointing.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index f10cd4d49..9d3d72ceb 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -845,6 +845,9 @@ class SourceFlags(_Preprocess): """ name = "source_flags" + def process(self, aman, proc_aman): + tod_ops.flags.get_source_flags(aman, **self.process_cfgs) + def calc_and_save(self, aman, proc_aman): center_on = self.calc_cfgs.get('center_on', 'planet') # Get source from tags diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index 7f1c73b31..9af4fc96f 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -315,7 +315,7 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, # construct pipeline from configs pipe = Pipeline(configs["process_pipe"], logger=logger) for pipe_component in pipe: - if pipe_component.name == 'compute_source_flags': + if pipe_component.name == 'source_flags': pipe_component.process_cfgs['center_on'] = sso_name # Other parameters From 5b80e503cec9c8fa68d6cddf50d7d8076885fe13 Mon Sep 17 00:00:00 2001 From: tterasaki Date: Fri, 31 May 2024 06:21:39 +0000 Subject: [PATCH 14/48] fix preprocessing stuff --- sotodlib/preprocess/processes.py | 3 --- sotodlib/site_pipeline/update_pointing.py | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index 7998ac517..0b236c782 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -844,9 +844,6 @@ class SourceFlags(_Preprocess): .. autofunction:: sotodlib.tod_ops.flags.get_source_flags """ name = "source_flags" - - def process(self, aman, proc_aman): - tod_ops.flags.get_source_flags(aman, **self.process_cfgs) def calc_and_save(self, aman, proc_aman): center_on = self.calc_cfgs.get('center_on', 'planet') diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/update_pointing.py index 9af4fc96f..2098ebbd3 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/update_pointing.py @@ -316,7 +316,7 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, pipe = Pipeline(configs["process_pipe"], logger=logger) for pipe_component in pipe: if pipe_component.name == 'source_flags': - pipe_component.process_cfgs['center_on'] = sso_name + pipe_component.calc_cfgs['center_on'] = sso_name # Other parameters force_zero_roll = configs.get('force_zero_roll') From 6e91879c17bf3f98fcf90367d0f1252a3d9538bd Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Fri, 11 Oct 2024 11:17:08 -0700 Subject: [PATCH 15/48] Added script that solves satv1 pointing model parameters. --- .../site_pipeline/solve_pointing_model.py | 278 ++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 sotodlib/site_pipeline/solve_pointing_model.py diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py new file mode 100644 index 000000000..218d9b36d --- /dev/null +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -0,0 +1,278 @@ +import os, sys, pickle, math, h5py +import numpy as np +import argparse as ap +import so3g.proj.quat as quat +import lmfit +from lmfit import minimize, Parameters +import yaml +import logging + +from sotodlib.site_pipeline import util +from sotodlib import core +from sotodlib.coords import pointing_model as pm +from sotodlib.coords import fp_containers as fpc + +DEG = np.pi / 180.0 +ARCMIN = DEG / 60 + + +def _load_nom_centers(config): + # Load Nominal UFM Center Locations from centered focal_plane + ffp_path = config.get("ffp_path") + ufms = config.get("ufms") + nom_ufm_centers = np.zeros([1, 7, 3]) * np.nan + rx = fpc.Receiver.load_file(ffp_path) + OT = rx["0"].optics_tubes[0] + for ufm in range(len(OT.focal_planes)): + index = ufms.index(OT.focal_planes[ufm].stream_id) + nom_ufm_centers[0, index, :3] = OT.focal_planes[ufm].center + nom_ufm_centers = nom_ufm_centers[0] + + return nom_ufm_centers + + +# obsdb_entries = [ctx.obsdb.get(obsid) for obsid in filelist] +def _load_per_obs_data(config): + # Load per-observation UFM center data points and weights + # The per obs .h5 file a dict with obs_id for keys + per_obs_fps = config.get("per_obs_fps") + ufms = config.get("ufms") + rxs = fpc.Receiver.load_file(per_obs_fps) + if config.get("platform") == "satp1": + filelist = list(rxs.keys()) + # the following are known to be bad fits: + filelist = [ + item + for item in filelist + if "_1713" not in item and "1716423951" not in item + ] + else: + filelist = list(rxs.keys()) + obsidnum = np.array( + [filelist[id].split("_")[1] for id, _ in enumerate(filelist)], dtype=int + ) + obs_ufm_centers = np.zeros([len(filelist), 7, 3]) * np.nan + weights_ufm = np.zeros([len(filelist), 7]) + + for i, ffp in enumerate(filelist): + this_OT = rxs[ffp].optics_tubes[0] + for u in range(len(this_OT.focal_planes)): + index = ufms.index(this_OT.focal_planes[u].stream_id) + obs_ufm_centers[i, index, :3] = this_OT.focal_planes[u].center_transformed + weights_ufm[i, index] = np.nansum(this_OT.focal_planes[u].weights) + weights_ufm = weights_ufm / 1720.0 + weights_ufm[weights_ufm < config.get("weight_cutoff")] = 0.0 + + return filelist, obs_ufm_centers, weights_ufm + + +def _load_obs_boresight(config, filelist): + # Load boresight elevation information from each observation + # Put into an axis manager + ctx = core.Context(config["context"]["path"]) + az_c = [ctx.obsdb.get(obsid)["az_center"] for obsid in filelist] + el_c = [ctx.obsdb.get(obsid)["el_center"] for obsid in filelist] + roll_c = [ctx.obsdb.get(obsid)["roll_center"] for obsid in filelist] + az_c = np.round(np.array(az_c), 4) + el_c = np.round(np.array(el_c), 4) + roll_c = np.round(np.array(roll_c), 4) + roll_c[np.where(roll_c == 0)[0]] = 0 # rounding gives negative 0 sometimes. + + ancil = core.AxisManager(core.IndexAxis("samps")) + ancil.wrap("az_enc", np.repeat(az_c, 7), [(0, "samps")]) + ancil.wrap("boresight_enc", np.repeat(-1 * roll_c, 7), [(0, "samps")]) + ancil.wrap("el_enc", np.repeat(el_c, 7), [(0, "samps")]) + + return ancil, roll_c + + +def _init_fit_params(config): + default_params = pm.defaults_sat_v1 + fixed_params = config.get("fixed_params") + # Initialize lmfit Parameter object + fit_params = Parameters() + for p in list(default_params.keys()): + fit_params.add(p, value=0.0, vary=True) + # Turn off various parameters depending on platform + for fix in fixed_params: + fit_params[fix].set(vary=False) + + return fit_params + + +def chi_sq(weights, dist): + N = np.identity(len(dist)) * weights + chi2 = dist.T * N * dist + return chi2 + + +def objective_model_func_lmfit(params, solver_aman, return_fit=False, weights=True): + if type(params) == lmfit.parameter.Parameters: + params = params.valuesdict() + xi_nom, eta_nom, gam_nom = solver_aman.nom_ufm_centers + az, el, roll = pm._get_sat_enc_radians(solver_aman.ancil) + az1, el1, roll1 = pm.model_sat_v1(params, az, el, roll) + ## Quat math is based on this equation: q_nomodel * q_det_data == q_model * q_det_true + q_nomodel = quat.rotation_lonlat(-az, el, 0) + q_model = quat.rotation_lonlat(-az1, el1, roll1) + q_det_true = quat.rotation_xieta(xi_nom, eta_nom, 0) + xi_mod, eta_mod, gamma_mod = quat.decompose_xieta(~q_nomodel * q_model * q_det_true) + xi_ffp, eta_ffp, gamma_ffp = solver_aman.ffp_ufm_center_fits + if return_fit: + return xi_mod, eta_mod, gamma_mod + else: + dist = [] + for i in range(len(xi_mod)): + dist.append(math.dist([xi_ffp[i], eta_ffp[i]], [xi_mod[i], eta_mod[i]])) + if weights: + return chi_sq(solver_aman.weights, np.array(dist)) + else: + return chi_sq(np.ones(len(dist)), np.array(dist)) + + +def get_RMS(model_xieta, data_xieta, weights): + diff = (model_xieta[0] / ARCMIN - data_xieta[0] / DEG * 60) ** 2 + ( + model_xieta[1] / ARCMIN - data_xieta[1] / DEG * 60 + ) ** 2 + return (np.nansum(diff * weights) / np.nansum(weights)) ** 0.5 + + +def _round_params(param_dict, decimal): + P = {} + for k in list(param_dict.keys()): + P[k] = np.round(param_dict[k], decimal) + return P + + +def _create_db(filename, save_dir): + db_filename = os.path.join(save_dir, filename) + # Get Database ready + if os.path.exists(db_filename): + return core.metadata.ManifestDb(db_filename) + else: + os.makedirs(save_dir, exist_ok=True) + scheme = core.metadata.ManifestScheme() + scheme.add_data_field("dataset") + return core.metadata.ManifestDb(db_filename, scheme=scheme) + + +def main(): + # Read input parameters + parser = ap.ArgumentParser() + parser.add_argument("config_path", help="Location of the config file") + args = parser.parse_args() + + # Read relevant config file info + with open(args.config_path, "r", encoding="utf-8") as file: + config = yaml.safe_load(file) + platform = config.get("platform") # e.g. satp1 + pm_version = config.get("pm_version") # e.g. sat_v1 + solution_version_tag = config.get("solution_version_tag") # e.g. YYMMDDr# + save_dir = os.path.join( + config.get("outdir"), f"{platform}_pointing_model_{solution_version_tag}" + ) + if not os.path.exists(save_dir): + os.makedirs(save_dir) + # savemeta_dir = os.path.join(config.get("savemeta_dir"), solution_version_tag) + # if not os.path.exists(savemeta_dir): + # os.makedirs(savemeta_dir, exists_ok=False) + + # Initialize Logger + logger = util.init_logger(__name__, "Solve pointing_model") + logpath = os.path.join(save_dir, "pointing_model.log") + logfile = logging.FileHandler(logpath) + logger.addHandler(logfile) + + # Load in focal_plane and boresigt data + nom_ufm_centers = _load_nom_centers(config) + logger.info("Loaded nominal UFM centers from %s: ", config.get("ffp_path")) + filelist, obs_ufm_centers, weights_ufm = _load_per_obs_data(config) + logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) + logger.info("Including data from these obs:") + logger.info(filelist) + ancil, roll_c = _load_obs_boresight(config, filelist) + logger.info("Loaded boresight data from obs ids.") + + # Build Axis Managers + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + + solver_aman = core.AxisManager(core.IndexAxis("samps")) + solver_aman.wrap("ancil", ancil) + solver_aman.wrap("obs_info", obs_info) + solver_aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) + solver_aman.wrap( + "nom_ufm_centers", + np.repeat([nom_ufm_centers], len(filelist), axis=0) + .reshape(len(filelist) * 7, 3) + .T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap( + "ffp_ufm_center_fits", + obs_ufm_centers.reshape(len(filelist) * 7, 3).T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) + + weights_mask = np.where(solver_aman["weights"] == 0)[0] + solver_aman["ffp_ufm_center_fits"][:, weights_mask] = np.nan + + logger.info("Built axis manager") + + # Initialize Parameters to Fit with Model + fit_params = _init_fit_params(config) + + # Solve for Model Paramters + model_solved_params = lmfit.minimize( + objective_model_func_lmfit, + fit_params, + method="nelder", + nan_policy="omit", + args=(solver_aman, False, True), + ) + model_fits = objective_model_func_lmfit( + model_solved_params.params, solver_aman, return_fit=True + ) + + test_params = _round_params(model_solved_params.params.valuesdict(), 8) + test_params["version"] = pm_version + + logger.info("Found best-fit pointing model parameters") + logger.info(test_params) + logger.info( + "RMS on fit: %f", + get_RMS(model_fits, solver_aman.ffp_ufm_center_fits, solver_aman.weights), + ) + + # Save fit results to the axis manager + modelfit_aman = core.AxisManager() + modelfit_aman.wrap("xi", model_fits[0]) + modelfit_aman.wrap("eta", model_fits[1]) + # modelfit_aman.wrap("gamma", model_fits[2]) + solver_aman.wrap("model_fits", modelfit_aman) + + param_aman = core.AxisManager() + for k in list(test_params.keys()): + param_aman.wrap(k, test_params[k]) + solver_aman.wrap("pointing_model", param_aman) + solver_aman.wrap( + "fit_rms", + get_RMS(model_fits, solver_aman.ffp_ufm_center_fits, solver_aman.weights), + ) + + # Save .h5 and ManifestDb + h5_rel = "pointing_model_data.h5" + h5_filename = os.path.join(save_dir, h5_rel) + solver_aman.save(h5_filename, overwrite=True) + dbfile = "db.sqlite" + db = _create_db(dbfile, save_dir) + db.add_entry({"dataset": "pointing_model"}, filename=h5_rel, replace=True) + db.to_file(os.path.join(save_dir, dbfile)) + +############ + +if __name__ == "__main__": + main() From 51093ed8b1d4a6c3f3ce5ef6d4fd123ecce8e22c Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 12:33:56 -0700 Subject: [PATCH 16/48] Make solve_pointing_model match site_pipeline standards. Added plotting functionality, added fit iteration, added documentation. --- docs/site_pipeline.rst | 126 +++ sotodlib/site_pipeline/cli.py | 4 +- .../site_pipeline/solve_pointing_model.py | 730 +++++++++++++++--- 3 files changed, 767 insertions(+), 93 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index baee5ead1..709fb47a9 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -560,6 +560,132 @@ the ``focal_plane`` dataset. This can be done like so: This will give you a dict of ``Receiver`` dataclasses with all the focal plane data. The keys of this dict are the start times for combined focal planes and the ``obs_id`` for per-obs. +solve_pointing_model +-------------------- +This script solves for the pointing model parameters using moon observations. +The inputs are the the el_center, roll_center, and the UFM +center locations as fit by finalize_focal_plane, +per each moon observation. +The fitter uses lmfit with a Nelder-Mead minimization routine to minimize +the distance between modeled data points and the reference data points. +By default, the measured UFM centers are the ``reference`` points, +and the quaternion rotations of the pointing model are applied to the nominal, template, UFM center locations. +However, the model can be applied in reverse, with the template positions as reference -- some diagnostic plots are in this space, as it makes it easier to view residuals from multiple boresight orientations at once. +See the ``xieta_model`` parameter comments for more details. + +``solve_pointing_model`` can be iterated once after the first parameter fit. +Specifying ``iterate_cutoff`` in arcmin will exlude outliers from next round of fitting. +Config file format +`````````````````` +Here is an annotated basic configuration file. +The first block are mandatory entries. The second block are optional. + +.. code-block:: yaml + # Mandatory to include in config file + + # Specify platform for the code to run on. (satp1, satp3 are supported) + platform: satp1 + # pm_version tells coords.pointing_model which pointing model to use. + # It determines the quaternion model. Don't change this. + pm_version: sat_v1 + # Tag for metadata versioning, will appended to output directory + solution_version_tag: YYMMDDr + # Output directory to save results in. + # A sub directory in {outdir} will be created as + #{platform}_pointing_model_{solution_version_tag}_{xieta_model}_{add_tag} + outdir: /your/save/directory + # Load data and reference focal plane templates + # ffp_path must be common mode subtracted version of focal_plane + ffp_path: "/path/to/centered/focal_plane/metadata/focal_plane_cmsub.h5" + # per_obs_fps are multiple per-observation focal_plane + # fits saved into one h5 file. These are fitted to moon measurements. + # only data from UFMs that had good detector fits are included. + per_obs_fps: /path/to/per-obs/finalize/focal/plane/results/per_obs/focal_plane.h5 + # The provided context file is used to load boresight/elevation data from + # obs_ids included in per_obs_fps + context: + path: "/so/metadata/satp1/contexts/nominal/focal_plane.h5" + # List of ufms in order of wafer slot, not currently future + # proof when new ufms are swapped in. + # This assists unpacking per_obs_fps with its sparse UFM info. + ufms: ['ufm_mv19', 'ufm_mv18', 'ufm_mv22', + 'ufm_mv29', 'ufm_mv7', 'ufm_mv9', 'ufm_mv15'] #satp1 + # ufms: ['ufm_mv5', 'ufm_mv27', 'ufm_mv35', + 'ufm_mv12', 'ufm_mv23', 'ufm_mv33', 'ufm_mv17'] #satp3 + + # Optional configuration parameters + + # parameters included here will be fixed in the minimization. + # See comments for which to fix for different platforms. + fixed_params: + - az_rot #all + - base_tilt_cos #all + - base_tilt_sin #all + - fp_rot_eta0 #all + - fp_offset_eta0 #all + - fp_rot_xi0 #satp3 only + # "xieta_model" decides which parameter space the fitting occurs in. + # "measured": The fitter applies pointing model to template UFM xi-eta + # locations based on El and Roll of the obs, to get the modeled + # xi-etas to match the measured data points. + # Modeled data points get spread out based on boresight. + # "template": The fitter applies pointing model "backwards" on measured + # xieta data points to match them to template locations. + # The modeled data points cluster around + # the nominal template locations. + # Each method gives slightly different results. "measured" is default. + xieta_model: measured + # Define a weight cutoff for fitting routine. + weight_cutoff: 0.2 + # Cut out any known bad observations. + skip_tags: + - "_1713" #bad timing satp1 + - "1716423951" #Just a very bad fit satp1 + # Option to iterate parameter fitting. If not None, data points with + # fit residuals higher than cutoff will be excluded from second round of fits. + iterate_cutoff: None # or arcmin + # Make diagnostic plots. + make_plots: True + # any additional tag to append on results directory + append: "" + save_output: True + +Output file format +`````````````````` +The inputs and outputs ``solve_pointing_model`` are stored as an AxisManager, before saving to an .h5 file. +Only the pointing model parameters + version are saved to the ManifestdB ``db.sqlite``. +.. code-block:: text + + pointing_model_data.h5 + - ancil (aman) + - az_enc (num_obs * 7) + - boresight_enc (num_obs * 7) + - el_enc (num_obs * 7) + - ffp_ufm_center_fits ((xi, eta, gamma), num_obs * 7) #This where the info from per_obs_fps gets stored + - nom_ufm_centers ((xi, eta, gamma), num_obs * 7) #This is where info from cmsub focal_plane goes. + - obs_info (aman) + - obs_ids (num_obs) + - roll_c (num_obs * 7) + - weights (num_obs * 7) + - model_fits (aman) + - eta (num_obs * 7) + - xi (num_obs * 7) + - pointing_model (aman) + - pm_version + - parameters by name + #├── parameter_fit_stats #Not yet implemented in output, but visible in log file. + #│ ├── name + #│ ├── value + #│ ├── vary + #│ ├── min + #│ ├── max + #│ ├── stderr + #│ └── correl #correlation with other fit params + #└── excluded + + + + preprocess-tod -------------- This script is set up to run a preprocessing pipeline using the preprocess diff --git a/sotodlib/site_pipeline/cli.py b/sotodlib/site_pipeline/cli.py index a11ca65f1..6b6ec05cc 100644 --- a/sotodlib/site_pipeline/cli.py +++ b/sotodlib/site_pipeline/cli.py @@ -49,7 +49,8 @@ def main(obs_id=None, config_file=None, logger=None): preprocess_tod, update_g3tsmurf_db, update_obsdb, - make_cosamp_hk + make_cosamp_hk, + solve_pointing_model ) # Dictionary matching element name to a submodule (which must have @@ -68,6 +69,7 @@ def main(obs_id=None, config_file=None, logger=None): 'update-g3tsmurf-db': update_g3tsmurf_db, 'update-obsdb': update_obsdb, 'make-cosamp-hk': make_cosamp_hk, + 'solve-pointing-model': solve_pointing_model } CLI_NAME = 'so-site-pipeline' diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 218d9b36d..06eff0bd8 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -1,13 +1,17 @@ import os, sys, pickle, math, h5py import numpy as np -import argparse as ap +import argparse import so3g.proj.quat as quat import lmfit from lmfit import minimize, Parameters import yaml import logging +import matplotlib -from sotodlib.site_pipeline import util +matplotlib.use("agg") +import matplotlib.pyplot as plt + +from sotodlib.site_pipeline import util as sp_util from sotodlib import core from sotodlib.coords import pointing_model as pm from sotodlib.coords import fp_containers as fpc @@ -15,42 +19,34 @@ DEG = np.pi / 180.0 ARCMIN = DEG / 60 +plt.rcParams["axes.grid"] = True +plt.rcParams["grid.alpha"] = 0.5 + -def _load_nom_centers(config): +def load_nom_centers(config): # Load Nominal UFM Center Locations from centered focal_plane ffp_path = config.get("ffp_path") ufms = config.get("ufms") - nom_ufm_centers = np.zeros([1, 7, 3]) * np.nan + nom_ufm_centers = np.zeros([7, 3]) * np.nan rx = fpc.Receiver.load_file(ffp_path) OT = rx["0"].optics_tubes[0] for ufm in range(len(OT.focal_planes)): index = ufms.index(OT.focal_planes[ufm].stream_id) - nom_ufm_centers[0, index, :3] = OT.focal_planes[ufm].center - nom_ufm_centers = nom_ufm_centers[0] + nom_ufm_centers[index, :3] = OT.focal_planes[ufm].center return nom_ufm_centers -# obsdb_entries = [ctx.obsdb.get(obsid) for obsid in filelist] def _load_per_obs_data(config): # Load per-observation UFM center data points and weights # The per obs .h5 file a dict with obs_id for keys per_obs_fps = config.get("per_obs_fps") ufms = config.get("ufms") + skip_tags = config.get("skip_tags", []) rxs = fpc.Receiver.load_file(per_obs_fps) - if config.get("platform") == "satp1": - filelist = list(rxs.keys()) - # the following are known to be bad fits: - filelist = [ - item - for item in filelist - if "_1713" not in item and "1716423951" not in item - ] - else: - filelist = list(rxs.keys()) - obsidnum = np.array( - [filelist[id].split("_")[1] for id, _ in enumerate(filelist)], dtype=int - ) + filelist = list(rxs.keys()) + for skip in skip_tags: + filelist = [obs for obs in filelist if skip not in obs] obs_ufm_centers = np.zeros([len(filelist), 7, 3]) * np.nan weights_ufm = np.zeros([len(filelist), 7]) @@ -70,9 +66,10 @@ def _load_obs_boresight(config, filelist): # Load boresight elevation information from each observation # Put into an axis manager ctx = core.Context(config["context"]["path"]) - az_c = [ctx.obsdb.get(obsid)["az_center"] for obsid in filelist] - el_c = [ctx.obsdb.get(obsid)["el_center"] for obsid in filelist] - roll_c = [ctx.obsdb.get(obsid)["roll_center"] for obsid in filelist] + obs_info = [ctx.obsdb.get(obsid) for obsid in filelist] + az_c = [obs["az_center"] for obs in obs_info] + el_c = [obs["el_center"] for obs in obs_info] + roll_c = [obs["roll_center"] for obs in obs_info] az_c = np.round(np.array(az_c), 4) el_c = np.round(np.array(el_c), 4) roll_c = np.round(np.array(roll_c), 4) @@ -106,33 +103,80 @@ def chi_sq(weights, dist): return chi2 -def objective_model_func_lmfit(params, solver_aman, return_fit=False, weights=True): +def model_template_xieta(params, pm_version, solver_aman): + """ + Transform a measured (xi,eta) back into template position + Data to Template -- modeling data as true template + Quat math is based on this equation: + q_nomodel * q_det_meas == q_model * q_det_true + """ + xi_meas = solver_aman.ffp_ufm_center_fits[0] + eta_meas = solver_aman.ffp_ufm_center_fits[1] + if type(params) == lmfit.parameter.Parameters: + params = params.valuesdict() + params["version"] = pm_version + if "sat" in pm_version: + az, el, roll = pm._get_sat_enc_radians(solver_aman.ancil) + boresight = pm.apply_pointing_model(solver_aman, pointing_model=params, wrap=False) + az1, el1, roll1 = boresight.az, boresight.el, boresight.roll + q_nomodel = quat.rotation_lonlat(-az, el, 0) + q_model = quat.rotation_lonlat(-az1, el1, roll1) + q_det_meas = quat.rotation_xieta(xi_meas, eta_meas, 0) + xi_mod_true, eta_mod_true, _ = quat.decompose_xieta( + ~q_model * q_nomodel * q_det_meas + ) + + return xi_mod_true, eta_mod_true + + +def model_measured_xieta(params, pm_version, solver_aman): + """ + Transform template (xi,eta) to match measured (xi,eta). + Template to Data -- modeling the template as measured data + Quat math is based on this equation: + q_nomodel * q_det_meas == q_model * q_det_true + """ if type(params) == lmfit.parameter.Parameters: params = params.valuesdict() - xi_nom, eta_nom, gam_nom = solver_aman.nom_ufm_centers - az, el, roll = pm._get_sat_enc_radians(solver_aman.ancil) - az1, el1, roll1 = pm.model_sat_v1(params, az, el, roll) - ## Quat math is based on this equation: q_nomodel * q_det_data == q_model * q_det_true + params["version"] = pm_version + xi_true, eta_true, gam_true = solver_aman.nom_ufm_centers + if "sat" in pm_version: + az, el, roll = pm._get_sat_enc_radians(solver_aman.ancil) + boresight = pm.apply_pointing_model(solver_aman, pointing_model=params, wrap=False) + az1, el1, roll1 = boresight.az, boresight.el, boresight.roll + q_nomodel = quat.rotation_lonlat(-az, el, 0) q_model = quat.rotation_lonlat(-az1, el1, roll1) - q_det_true = quat.rotation_xieta(xi_nom, eta_nom, 0) - xi_mod, eta_mod, gamma_mod = quat.decompose_xieta(~q_nomodel * q_model * q_det_true) - xi_ffp, eta_ffp, gamma_ffp = solver_aman.ffp_ufm_center_fits - if return_fit: - return xi_mod, eta_mod, gamma_mod + q_det_true = quat.rotation_xieta(xi_true, eta_true, 0) + xi_mod_meas, eta_mod_meas, _ = quat.decompose_xieta( + ~q_nomodel * q_model * q_det_true + ) + + return xi_mod_meas, eta_mod_meas + + +def objective_model_func_lmfit( + params, pm_version, solver_aman, xieta_model, weights=True +): + if xieta_model == "measured": + xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) + xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits + elif xieta_model == "template": + xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) + xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers + + dist = [] + for i in range(len(xi_mod)): + dist.append(math.dist([xi_ref[i], eta_ref[i]], [xi_mod[i], eta_mod[i]])) + if weights: + return chi_sq(solver_aman.weights, np.array(dist)) else: - dist = [] - for i in range(len(xi_mod)): - dist.append(math.dist([xi_ffp[i], eta_ffp[i]], [xi_mod[i], eta_mod[i]])) - if weights: - return chi_sq(solver_aman.weights, np.array(dist)) - else: - return chi_sq(np.ones(len(dist)), np.array(dist)) - - -def get_RMS(model_xieta, data_xieta, weights): - diff = (model_xieta[0] / ARCMIN - data_xieta[0] / DEG * 60) ** 2 + ( - model_xieta[1] / ARCMIN - data_xieta[1] / DEG * 60 + return chi_sq(np.ones(len(dist)), np.array(dist)) + + +def get_RMS(model_xieta, ref_xieta, weights): + diff = (model_xieta[0] / ARCMIN - ref_xieta[0] / ARCMIN) ** 2 + ( + model_xieta[1] / ARCMIN - ref_xieta[1] / ARCMIN ) ** 2 return (np.nansum(diff * weights) / np.nansum(weights)) ** 0.5 @@ -156,40 +200,83 @@ def _create_db(filename, save_dir): return core.metadata.ManifestDb(db_filename, scheme=scheme) -def main(): - # Read input parameters - parser = ap.ArgumentParser() - parser.add_argument("config_path", help="Location of the config file") - args = parser.parse_args() +def get_parser(parser=None): + if parser is None: + parser = argparse.ArgumentParser() + parser.add_argument("config_path", help="Path to Configuration File") + return parser + + +def build_param_fit_stat_aman(output): + # takes output of lmfit.minimize() + parameter_fit_stats = core.AxisManager(core.IndexAxis("parameters")) + parameter_fit_stats.wrap( + "name", np.array([output.params[p].name for p in output.params]) + ) + parameter_fit_stats.wrap( + "value", np.array([output.params[p].value for p in output.params]) + ) + parameter_fit_stats.wrap( + "vary", np.array([output.params[p].vary for p in output.params]) + ) + parameter_fit_stats.wrap( + "min", np.array([output.params[p].min for p in output.params]) + ) + parameter_fit_stats.wrap( + "max", np.array([output.params[p].max for p in output.params]) + ) + parameter_fit_stats.wrap( + "stderr", np.array([output.params[p].stderr for p in output.params]) + ) + parameter_fit_stats.wrap( + "correl", np.array([output.params[p].correl for p in output.params]) + ) + return parameter_fit_stats + +def main(config_path: str): # Read relevant config file info - with open(args.config_path, "r", encoding="utf-8") as file: + with open(config_path, "r", encoding="utf-8") as file: config = yaml.safe_load(file) platform = config.get("platform") # e.g. satp1 pm_version = config.get("pm_version") # e.g. sat_v1 - solution_version_tag = config.get("solution_version_tag") # e.g. YYMMDDr# + sv_tag = config.get("solution_version_tag") # e.g. YYMMDDr# + xieta_model = config.get("xieta_model", "measured") + xe_tag = f"{xieta_model}_xieta" + iterate_cutoff = config.get("iterate_cutoff", None) + append = config.get("append", "") + append_tag = f"{bool(append)*'_'}{append}" save_dir = os.path.join( - config.get("outdir"), f"{platform}_pointing_model_{solution_version_tag}" + config.get("outdir"), f"{platform}_pointing_model_{sv_tag}", f"{xe_tag}{append_tag}" ) - if not os.path.exists(save_dir): - os.makedirs(save_dir) - # savemeta_dir = os.path.join(config.get("savemeta_dir"), solution_version_tag) - # if not os.path.exists(savemeta_dir): - # os.makedirs(savemeta_dir, exists_ok=False) + os.makedirs(save_dir, exist_ok=True) # Initialize Logger - logger = util.init_logger(__name__, "Solve pointing_model") + logger = sp_util.init_logger(__name__, "Solve pointing_model") logpath = os.path.join(save_dir, "pointing_model.log") logfile = logging.FileHandler(logpath) logger.addHandler(logfile) + if xieta_model != "measured" and xieta_model != "template": + logger.error( + 'Not recognized xieta_model. \ + Only "measured" or "template" accepted' + ) + exit + logger.info( + "Pointing model will try to replicate (model) the %s data.", xieta_model + ) + # Load in focal_plane and boresigt data - nom_ufm_centers = _load_nom_centers(config) + nom_ufm_centers = load_nom_centers(config) logger.info("Loaded nominal UFM centers from %s: ", config.get("ffp_path")) + logger.info(nom_ufm_centers) + filelist, obs_ufm_centers, weights_ufm = _load_per_obs_data(config) logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) logger.info("Including data from these obs:") logger.info(filelist) + ancil, roll_c = _load_obs_boresight(config, filelist) logger.info("Loaded boresight data from obs ids.") @@ -217,62 +304,521 @@ def main(): ) solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) - weights_mask = np.where(solver_aman["weights"] == 0)[0] - solver_aman["ffp_ufm_center_fits"][:, weights_mask] = np.nan - + # Make weights/data cuts + # solver_aman.weights[solver_aman.ancil.az_enc > 360] = 0.0 logger.info("Built axis manager") # Initialize Parameters to Fit with Model fit_params = _init_fit_params(config) - - # Solve for Model Paramters + if xieta_model == "measured": + model_reference = solver_aman.ffp_ufm_center_fits + elif xieta_model == "template": + model_reference = solver_aman.nom_ufm_centers + + # Solve for Model Parameters + # use chosen xieta_model to solve for parameters + use_weights = True model_solved_params = lmfit.minimize( objective_model_func_lmfit, fit_params, method="nelder", nan_policy="omit", - args=(solver_aman, False, True), - ) - model_fits = objective_model_func_lmfit( - model_solved_params.params, solver_aman, return_fit=True + args=(pm_version, solver_aman, xieta_model, use_weights), ) test_params = _round_params(model_solved_params.params.valuesdict(), 8) test_params["version"] = pm_version - logger.info("Found best-fit pointing model parameters") - logger.info(test_params) - logger.info( - "RMS on fit: %f", - get_RMS(model_fits, solver_aman.ffp_ufm_center_fits, solver_aman.weights), - ) - - # Save fit results to the axis manager - modelfit_aman = core.AxisManager() - modelfit_aman.wrap("xi", model_fits[0]) - modelfit_aman.wrap("eta", model_fits[1]) - # modelfit_aman.wrap("gamma", model_fits[2]) - solver_aman.wrap("model_fits", modelfit_aman) + logger.info(model_solved_params.params.pretty_print(precision=5, colwidth=11)) + # save pointing model parameters to axis manager param_aman = core.AxisManager() for k in list(test_params.keys()): param_aman.wrap(k, test_params[k]) solver_aman.wrap("pointing_model", param_aman) + + # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) + # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats) + + # Model template and measured points using parameters found above + if xieta_model == "measured": + model_fits = model_measured_xieta( + solver_aman.pointing_model, pm_version, solver_aman + ) + elif xieta_model == "template": + model_fits = model_template_xieta( + solver_aman.pointing_model, pm_version, solver_aman + ) + + logger.info( + "RMS on fit: %f", get_RMS(model_fits, model_reference, solver_aman.weights) + ) + + fit_residual_i1 = np.array( + [ + math.dist( + [model_reference[0][i], model_reference[1][i]], + [model_fits[0][i], model_fits[1][i]], + ) + for i in range(len(model_fits[0])) + ] + ) + + # Save fit results to the axis manager + modelfit_aman = core.AxisManager() + modelfit_aman.wrap("xi", model_fits[0], overwrite=True) + modelfit_aman.wrap("eta", model_fits[1], overwrite=True) + solver_aman.wrap("model_fits", modelfit_aman, overwrite=True) solver_aman.wrap( "fit_rms", - get_RMS(model_fits, solver_aman.ffp_ufm_center_fits, solver_aman.weights), + get_RMS(model_fits, model_reference, solver_aman.weights), + overwrite=True, ) - # Save .h5 and ManifestDb - h5_rel = "pointing_model_data.h5" - h5_filename = os.path.join(save_dir, h5_rel) - solver_aman.save(h5_filename, overwrite=True) - dbfile = "db.sqlite" - db = _create_db(dbfile, save_dir) - db.add_entry({"dataset": "pointing_model"}, filename=h5_rel, replace=True) - db.to_file(os.path.join(save_dir, dbfile)) + if config.get("make_plots"): + tag = "_i1" + plot_ws0_model_fits(solver_aman, config, save_dir, tag) + plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag) + plot_residuals_vs_ancil(solver_aman, config, save_dir, tag) + plot_xieta_cross_residuals(solver_aman, config, save_dir, tag) + plot_xieta_residuals(solver_aman, config, save_dir, tag) + + + if iterate_cutoff is not None: + logger.info("Iterating parameter solution") + + cutoff = np.nanstd(fit_residual_i1) + np.nanmedian(fit_residual_i1) + logger.info(f"1 std away from residual Median: {cutoff / ARCMIN} arcmin") + logger.info(f"Using {iterate_cutoff} as cutoff") + bad_fit_mask = np.where((fit_residual_i1 / ARCMIN) > iterate_cutoff)[0] + logger.info("Bad fit indices:") + logger.info(bad_fit_mask) + logger.info( + "%f data points are higher than %s arcmin", + len(bad_fit_mask), + iterate_cutoff, + ) + bad_filename = bad_fit_mask // 7 + bad_wafer = bad_fit_mask % 7 + for mask_ind, full_ind in enumerate(bad_fit_mask): + logger.info( + f"{filelist[bad_filename[mask_ind]]} ws{bad_wafer[mask_ind]} is bad. Roll {solver_aman.roll_c[full_ind]}, El {solver_aman.ancil.el_enc[full_ind]}" + ) + + solver_aman.weights[bad_fit_mask] = 0.0 + use_weights = True + model_solved_params = lmfit.minimize( + objective_model_func_lmfit, + fit_params, + method="nelder", + nan_policy="omit", + args=(pm_version, solver_aman, xieta_model, use_weights), + ) + + test_params = _round_params(model_solved_params.params.valuesdict(), 8) + test_params["version"] = pm_version + logger.info("Found best-fit pointing model parameters, second iteration") + logger.info(model_solved_params.params.pretty_print(precision=5, colwidth=11)) + + # save pointing model parameters to axis manager + param_aman = core.AxisManager() + for k in list(test_params.keys()): + param_aman.wrap(k, test_params[k]) + solver_aman.wrap("pointing_model", param_aman, overwrite=True) + + # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) + # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats, overwrite=True) + + # Recalculate best fit modeled points + if xieta_model == "measured": + model_fits = model_measured_xieta( + solver_aman.pointing_model, pm_version, solver_aman + ) + elif xieta_model == "template": + model_fits = model_template_xieta( + solver_aman.pointing_model, pm_version, solver_aman + ) + logger.info( + "RMS on fit: %f", get_RMS(model_fits, model_reference, solver_aman.weights) + ) + fit_residual_i2 = np.array( + [ + math.dist( + [model_reference[0][i], model_reference[1][i]], + [model_fits[0][i], model_fits[1][i]], + ) + for i in range(len(model_fits[0])) + ] + ) + # Save fit results to the axis manager + modelfit_aman = core.AxisManager() + modelfit_aman.wrap("xi", model_fits[0], overwrite=True) + modelfit_aman.wrap("eta", model_fits[1], overwrite=True) + solver_aman.wrap("model_fits", modelfit_aman, overwrite=True) + solver_aman.wrap( + "fit_rms", + get_RMS(model_fits, model_reference, solver_aman.weights), + overwrite=True, + ) + if config.get("make_plots"): + tag = "_i2" + plot_ws0_model_fits(solver_aman, config, save_dir, tag) + plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag) + plot_residuals_vs_ancil(solver_aman, config, save_dir, tag) + plot_xieta_cross_residuals(solver_aman, config, save_dir, tag) + plot_xieta_residuals(solver_aman, config, save_dir, tag) + plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2, bad_fit_mask) + else: + if config.get("make_plots"): + plot_total_residuals(solver_aman, config, save_dir, tag='', fit_residual_i1=fit_residual_i1) + + + if config.get("save_output"): + # Save .h5 and ManifestDb + h5_rel = "pointing_model_data.h5" + h5_filename = os.path.join(save_dir, h5_rel) + solver_aman.save(h5_filename, overwrite=True) + dbfile = "db.sqlite" + db = _create_db(dbfile, save_dir) + db.add_entry({"dataset": "pointing_model"}, filename=h5_rel, replace=True) + db.to_file(os.path.join(save_dir, dbfile)) + + +#################### +# Plotting Functions +#################### + +def plot_ws0_model_fits(solver_aman, config, save_dir, tag=""): + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + platform = config.get("platform") + plotmask = np.where(solver_aman.weights) + rms = np.round(solver_aman.fit_rms, 4) + xi_model_fit = solver_aman.model_fits.xi + eta_model_fit = solver_aman.model_fits.eta + if config.get("xieta_model") == "measured": + xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits + elif config.get("xieta_model") == "template": + xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers + markercolor = solver_aman.ancil.el_enc + coloredby = "El" + scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) + + #### + fig = plt.figure(figsize=(6, 6)) + gs = fig.add_gridspec(2, 2) + ax = fig.add_subplot(gs[:, :]) + ax.plot( + solver_aman["nom_ufm_centers"][0, : 7 + 1] / DEG, + solver_aman["nom_ufm_centers"][1, : 7 + 1] / DEG, + "rx", + label="Nominal Center", + ) + ax.scatter( + xi_ref[plotmask] / DEG, + eta_ref[plotmask] / DEG, + c=markercolor[plotmask], + alpha=0.5, + label="Data", + edgecolors="k", + linewidths=0.4, + s=130 * scale_weights[plotmask], + cmap="jet", + vmax=65, + ) + im = ax.scatter( + xi_model_fit / DEG, + eta_model_fit / DEG, + marker="*", + c=markercolor, + cmap="jet", + edgecolor="gray", + lw=0.3, + s=130, + label=f"Model, RMS = {rms}", + vmax=65, + ) + ax.legend(loc=1, fontsize="small") + ax.set_xlabel("Xi (deg)") + ax.set_ylabel("Eta (deg)") + plt.colorbar(im, location="top", fraction=0.046, pad=0.04) + ax.set_title(f"Fits, Colored by {coloredby} (deg)\n\n\n") + + # Plot lines connecting data to modeled data point + xitoxi = np.empty((len(xi_model_fit), 2)) + xitoxi[:, 0] = xi_ref / DEG + xitoxi[:, 1] = xi_model_fit / DEG + etatoeta = np.empty((len(eta_model_fit), 2)) + etatoeta[:, 0] = eta_ref / DEG + etatoeta[:, 1] = eta_model_fit / DEG + ax.plot(xitoxi.T, etatoeta.T, "k", lw=0.4) + ax.set_xlim(-1, 1); ax.set_ylim(-1, 1) + plt.subplots_adjust(left=0.1, right=0.90, bottom=0.05, hspace=0.3) + plt.savefig(f"{plot_dir}/{platform}_ws0_model_fits{tag}.png", dpi=350) + plt.close() + + +def plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag=""): + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + pm_version = config.get("pm_version") + platform = config.get("platform") + scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) + xi_unmod, eta_unmod = model_template_xieta( + solver_aman.pointing_model, pm_version, solver_aman + ) + xi0, eta0 = model_template_xieta(pm.defaults_sat_v1, pm_version, solver_aman) + + fig, ax = plt.subplots(2, 4, figsize=(9, 6)) + for i in range(7): + ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") + im = ax[i // 4, i % 4].scatter( + xi_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[0, i] / ARCMIN, + eta_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[1, i] / ARCMIN, + c=solver_aman.ancil.el_enc[i::7], + s=scale_weights[i::7] * 80, + marker="o", + lw=0, + alpha=0.5, + cmap="jet", + ) + plt.colorbar(im, ax[1, 3], label="Elevation (deg)", fraction=0.046, pad=0.04) + plt.tight_layout() + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_elevation{tag}.png", dpi=350) + plt.close() + + fig, ax = plt.subplots(2, 4, figsize=(9, 6)) + for i in range(7): + ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") + im = ax[i // 4, i % 4].scatter( + xi_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[0, i] / ARCMIN, + eta_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[1, i] / ARCMIN, + c=solver_aman.ancil.boresight_enc[i::7], + s=scale_weights[i::7] * 80, + marker="o", + lw=0, + alpha=0.5, + cmap="jet", + ) + ax[i // 4, i % 4].set_xlim(-15, 15) + ax[i // 4, i % 4].set_ylim(-15, 15) + ax[i // 4, i % 4].set_title(f"ws{i}") + plt.colorbar(im, ax[1, 3], label="Boresight (deg)", fraction=0.046, pad=0.04) + plt.tight_layout() + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_boresight{tag}.png", dpi=350) + plt.close() + + +def plot_residuals_vs_ancil(solver_aman, config, save_dir, tag): + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + platform = config.get("platform") + scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) + plotmask = np.where(solver_aman.weights) + xi_model_fit = solver_aman.model_fits.xi + eta_model_fit = solver_aman.model_fits.eta + if config.get("xieta_model") == "measured": + xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits + elif config.get("xieta_model") == "template": + xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers + + fig, ax = plt.subplots(2, 3, figsize=(8, 6), sharex="col", sharey="row") + plt.setp(ax[0, 1].get_yticklabels(), visible=False) + plt.suptitle(r"$\delta \xi$, $\delta \eta$ vs Az, El, Boresight") + for k in range(6): + i = k // 3 + j = k % 3 + if i == 0: + model = xi_model_fit + ref = xi_ref + elif i == 1: + model = eta_model_fit + ref = eta_ref + if j == 0: + x = solver_aman.ancil.az_enc % 360 + elif j == 1: + x = solver_aman.ancil.el_enc + elif j == 2: + x = solver_aman.ancil.boresight_enc + + ax[i, j].scatter( + x[plotmask], + (model - ref)[plotmask] / ARCMIN, + color="k", + marker=".", + alpha=0.3, + lw=0, + s=scale_weights[plotmask] * 80, + ) + ax[i, j].axhline(0, xmin=0, xmax=1, color="k", lw=2, alpha=0.5) + mxb = np.polyfit( + x[plotmask], + (model[plotmask] - ref[plotmask]) / ARCMIN, + 1, + w=scale_weights[plotmask], + ) + xrange = np.arange(np.nanmin(x), np.nanmax(x)) + ax[i, j].plot(xrange, mxb[0] * xrange + mxb[1], "r", lw=1) + ax[0, 0].set_ylabel("dXi [arcmin]") + ax[1, 0].set_ylabel("dEta [arcmin]") + ax[1, 0].set_xlabel("Azimuth [deg]") + ax[1, 1].set_xlabel("Elevation [deg]") + ax[1, 2].set_xlabel("Boresight [deg]") + plt.tight_layout() + plt.savefig(f"{plot_dir}/{platform}_residuals_vs_ancillary{tag}.png", dpi=350) + plt.close() + +def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2=None, bad_fit_mask=None): + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + iterate_cutoff = config.get("iterate_cutoff", None) + platform = config.get("platform") + + if fit_residual_i2 is not None: + fig = plt.figure(figsize=(6,4)) + gs = fig.add_gridspec(7,1) + ax1 = fig.add_subplot(gs[0:-2,:]) + ax2 = fig.add_subplot(gs[-2:,:]) + #Plot first fit iteration residuals + ax1.plot(np.arange(len(fit_residual_i1)), fit_residual_i1 / ARCMIN, + 'r.', mew=0, alpha=0.6, lw=0, label = '1st Fit') + ax1.set_ylabel(f'Fit Residual $\left|\Delta$(xi, eta)$\right|$ [arcmin]') + ax1.set_xlabel('Data point') + ax1.axhline(iterate_cutoff, xmin=0, xmax=1, color="k", linestyle = ':', + lw=0.8, label = 'Cutoff') + #Plot second fit iteration residuals + ax1.plot(np.arange(len(fit_residual_i2)), + fit_residual_i2 / ARCMIN, 'b*', + alpha=0.5, lw=0, mew=0, label = '2nd fit') + ax1.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) + ax1.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') + ax1.legend(loc=2) + + ax2.plot(np.arange(len(fit_residual_i1)), (fit_residual_i2 - fit_residual_i1) / ARCMIN, + 'k.', mew=0, alpha = 0.6, label = "Res i2 - Res i1") + ax2.plot(np.arange(len(fit_residual_i1))[bad_fit_mask], (fit_residual_i2 - fit_residual_i1)[bad_fit_mask] / ARCMIN, + 'kx', lw=0.2, alpha = 0.6, label = "Excl. from i2 fit") + ax2.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) + ax2.legend(fontsize='x-small') + ax2.set_xlabel('Data points') + ax2.set_ylabel(r'$\Delta$ Residuals') + plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) + + else: + fig, ax = plt.subplots() + #Plot first fit iteration residuals + ax.plot(np.arange(len(fit_residual_i1)), fit_residual_i1 / ARCMIN, + 'r.', mew=0, alpha=0.6, lw=0, label = '1st Fit') + ax.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') + ax.set_xlabel('Data points') + plt.legend(loc=2) + plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) + plt.close() + +def plot_xieta_residuals(solver_aman, config, save_dir, tag): + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) + plotmask = np.where(solver_aman.weights) + platform = config.get("platform") + xi_model_fit = solver_aman.model_fits.xi + eta_model_fit = solver_aman.model_fits.eta + if config.get("xieta_model") == "measured": + xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits + elif config.get("xieta_model") == "template": + xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers + + fig, ax = plt.subplots(2, 1) + for i, xe in enumerate(['Xi','Eta']): + if xe == 'Xi': + xaxis_ref = xi_ref + xlabel = 'Xi' + yref = xi_ref + ymodel = xi_model_fit + ylabel = 'dXi' + elif xe == 'Eta': + xaxis_ref = eta_ref + xlabel = 'Eta' + yref = eta_ref + ymodel = eta_model_fit + ylabel = 'dEta' + #xi residuals vs xi + im = ax[i].scatter( + xaxis_ref[plotmask] / DEG, + (ymodel - yref)[plotmask] / ARCMIN, + marker="*", + c=xaxis_ref[plotmask], + cmap="jet", + s=100 * scale_weights[plotmask], + alpha=scale_weights[plotmask], + edgecolors="k", + linewidths=0.4, + ) + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + cb.ax.set_title(xlabel) + ax[i].axhline(0, xmin=0, xmax=1, color="k", lw=0.8, alpha=0.6) + ax[i].axvline(0, ymin=0, ymax=1, color="k", lw=0.8, alpha=0.5) + ax[i].set_ylim(-10, 20) + ax[i].set_xlabel(f"{xlabel} (deg)", fontsize="small") + ax[i].set_ylabel(f"{ylabel} [arcmin]") + plt.tight_layout() + plt.savefig(f"{plot_dir}/{platform}_xieta_residuals{tag}.png", dpi=350) + plt.close() + + +def plot_xieta_cross_residuals(solver_aman, config, save_dir, tag): + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) + plotmask = np.where(solver_aman.weights) + platform = config.get("platform") + xi_model_fit = solver_aman.model_fits.xi + eta_model_fit = solver_aman.model_fits.eta + if config.get("xieta_model") == "measured": + xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits + elif config.get("xieta_model") == "template": + xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers + + fig, ax = plt.subplots(2, 1) + for i, xe in enumerate(['Xi','Eta']): + if xe == 'Xi': + xaxis_ref = eta_ref + xlabel = 'Eta' + yref = xi_ref + ymodel = xi_model_fit + ylabel = 'dXi' + elif xe == 'Eta': + xaxis_ref = xi_ref + xlabel = 'Xi' + yref = eta_ref + ymodel = eta_model_fit + ylabel = 'dEta' + #xi residuals vs xi + im = ax[i].scatter( + xaxis_ref[plotmask] / DEG, + (ymodel - yref)[plotmask] / ARCMIN, + marker="*", + c=xaxis_ref[plotmask], + cmap="jet", + s=100 * scale_weights[plotmask], + alpha=scale_weights[plotmask], + edgecolors="k", + linewidths=0.4, + ) + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + cb.ax.set_title(xlabel) + ax[i].axhline(0, xmin=0, xmax=1, color="k", lw=0.8, alpha=0.6) + ax[i].axvline(0, ymin=0, ymax=1, color="k", lw=0.8, alpha=0.5) + ax[i].set_ylim(-10, 20) + ax[i].set_xlabel(f"{xlabel} (deg)", fontsize="small") + ax[i].set_ylabel(f"{ylabel} [arcmin]") + plt.tight_layout() + plt.savefig(f"{plot_dir}/{platform}_xieta_cross_residuals{tag}.png", dpi=350) + plt.close() + + ############ if __name__ == "__main__": - main() + sp_util.main_launcher(main, get_parser) From 0e5ef43de4d6c767a65483d7b56da337a11c8d58 Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 12:46:04 -0700 Subject: [PATCH 17/48] Edits to documentation --- docs/site_pipeline.rst | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index 709fb47a9..0eb686775 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -575,12 +575,14 @@ See the ``xieta_model`` parameter comments for more details. ``solve_pointing_model`` can be iterated once after the first parameter fit. Specifying ``iterate_cutoff`` in arcmin will exlude outliers from next round of fitting. + Config file format `````````````````` Here is an annotated basic configuration file. The first block are mandatory entries. The second block are optional. .. code-block:: yaml + # Mandatory to include in config file # Specify platform for the code to run on. (satp1, satp3 are supported) @@ -654,6 +656,7 @@ Output file format `````````````````` The inputs and outputs ``solve_pointing_model`` are stored as an AxisManager, before saving to an .h5 file. Only the pointing model parameters + version are saved to the ManifestdB ``db.sqlite``. + .. code-block:: text pointing_model_data.h5 @@ -673,15 +676,15 @@ Only the pointing model parameters + version are saved to the ManifestdB ``db.sq - pointing_model (aman) - pm_version - parameters by name - #├── parameter_fit_stats #Not yet implemented in output, but visible in log file. - #│ ├── name - #│ ├── value - #│ ├── vary - #│ ├── min - #│ ├── max - #│ ├── stderr - #│ └── correl #correlation with other fit params - #└── excluded + #- parameter_fit_stats #Not yet implemented in output, but visible in log file. + # - name + # - value + # - vary + # - min + # - max + # - stderr + # - correl #correlation with other fit params + #- excluded (Data points) From 63308fd1a0293b018943cdc3385cb592793262c2 Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 13:21:13 -0700 Subject: [PATCH 18/48] Trying again with docs syntax. --- docs/site_pipeline.rst | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index 0eb686775..8e2ab08a2 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -562,13 +562,14 @@ The keys of this dict are the start times for combined focal planes and the ``ob solve_pointing_model -------------------- + This script solves for the pointing model parameters using moon observations. The inputs are the the el_center, roll_center, and the UFM center locations as fit by finalize_focal_plane, per each moon observation. The fitter uses lmfit with a Nelder-Mead minimization routine to minimize the distance between modeled data points and the reference data points. -By default, the measured UFM centers are the ``reference`` points, +By default, the measured UFM centers are the reference points, and the quaternion rotations of the pointing model are applied to the nominal, template, UFM center locations. However, the model can be applied in reverse, with the template positions as reference -- some diagnostic plots are in this space, as it makes it easier to view residuals from multiple boresight orientations at once. See the ``xieta_model`` parameter comments for more details. @@ -576,8 +577,13 @@ See the ``xieta_model`` parameter comments for more details. ``solve_pointing_model`` can be iterated once after the first parameter fit. Specifying ``iterate_cutoff`` in arcmin will exlude outliers from next round of fitting. +.. automodule:: sotodlib.site_pipeline.solve_pointing_model + :members: + :undoc-members: + Config file format `````````````````` + Here is an annotated basic configuration file. The first block are mandatory entries. The second block are optional. @@ -652,14 +658,15 @@ The first block are mandatory entries. The second block are optional. append: "" save_output: True + Output file format `````````````````` + The inputs and outputs ``solve_pointing_model`` are stored as an AxisManager, before saving to an .h5 file. Only the pointing model parameters + version are saved to the ManifestdB ``db.sqlite``. -.. code-block:: text - - pointing_model_data.h5 +.. code_block:: text + - ancil (aman) - az_enc (num_obs * 7) - boresight_enc (num_obs * 7) @@ -687,8 +694,6 @@ Only the pointing model parameters + version are saved to the ManifestdB ``db.sq #- excluded (Data points) - - preprocess-tod -------------- This script is set up to run a preprocessing pipeline using the preprocess From fd14407527574c8f787bbf43860ab59eacb4a8b2 Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 13:38:15 -0700 Subject: [PATCH 19/48] Edit docs syntax. Surely it is getting close by now. --- docs/site_pipeline.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index 8e2ab08a2..e810f2bf0 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -616,10 +616,10 @@ The first block are mandatory entries. The second block are optional. # List of ufms in order of wafer slot, not currently future # proof when new ufms are swapped in. # This assists unpacking per_obs_fps with its sparse UFM info. - ufms: ['ufm_mv19', 'ufm_mv18', 'ufm_mv22', - 'ufm_mv29', 'ufm_mv7', 'ufm_mv9', 'ufm_mv15'] #satp1 - # ufms: ['ufm_mv5', 'ufm_mv27', 'ufm_mv35', - 'ufm_mv12', 'ufm_mv23', 'ufm_mv33', 'ufm_mv17'] #satp3 + ufms: ['ufm_mv19', 'ufm_mv18', 'ufm_mv22', 'ufm_mv29', + 'ufm_mv7', 'ufm_mv9', 'ufm_mv15'] #satp1 + # ufms: ['ufm_mv5', 'ufm_mv27', 'ufm_mv35', 'ufm_mv12', + # 'ufm_mv23', 'ufm_mv33', 'ufm_mv17'] #satp3 # Optional configuration parameters From b8c6ed9c46d08efcc40f15bf044ceced2e7eb449 Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 13:41:42 -0700 Subject: [PATCH 20/48] Edit documentation. Fingers crossed. --- docs/site_pipeline.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index e810f2bf0..73393844a 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -665,7 +665,7 @@ Output file format The inputs and outputs ``solve_pointing_model`` are stored as an AxisManager, before saving to an .h5 file. Only the pointing model parameters + version are saved to the ManifestdB ``db.sqlite``. -.. code_block:: text +.. code-block:: text - ancil (aman) - az_enc (num_obs * 7) From 2071000e8f5d89ab59170c3595d8ebea0c50b9aa Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 15:32:12 -0700 Subject: [PATCH 21/48] Added lmfit to list of modules to mock import. --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 610559890..631caf779 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -103,7 +103,7 @@ 'skyfield', 'h5py', 'pyfftw', 'scipy', 'toast', 'pixell', 'scikit', 'skimage', 'numdifftools', 'traitlets', 'ephem', 'influxdb', 'megham', 'detmap', - 'sodetlib'): + 'sodetlib', 'lmfit'): try: foo = import_module(missing) except ImportError: From 22470038829bcc86cabb7c38f0cfd09672577c23 Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Mon, 21 Oct 2024 16:42:53 -0700 Subject: [PATCH 22/48] Small change to logger output for outliers after iteration. Includes more information on weights and residuals of outlying data points. --- .../site_pipeline/solve_pointing_model.py | 49 ++++++++++++------- 1 file changed, 31 insertions(+), 18 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 06eff0bd8..93031c6e1 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -174,10 +174,14 @@ def objective_model_func_lmfit( return chi_sq(np.ones(len(dist)), np.array(dist)) -def get_RMS(model_xieta, ref_xieta, weights): - diff = (model_xieta[0] / ARCMIN - ref_xieta[0] / ARCMIN) ** 2 + ( - model_xieta[1] / ARCMIN - ref_xieta[1] / ARCMIN - ) ** 2 +def get_RMS(model_xieta, ref_xieta, weights, use_inds=None): + if use_inds is not None: + diff = ((model_xieta[0] - ref_xieta[0])[use_inds] / ARCMIN) ** 2 + \ + ((model_xieta[1] - ref_xieta[1])[use_inds] / ARCMIN) ** 2 + weights = weights[use_inds] + else: + diff = ((model_xieta[0] - ref_xieta[0]) / ARCMIN) ** 2 + \ + ((model_xieta[1] - ref_xieta[1]) / ARCMIN) ** 2 return (np.nansum(diff * weights) / np.nansum(weights)) ** 0.5 @@ -390,22 +394,31 @@ def main(config_path: str): cutoff = np.nanstd(fit_residual_i1) + np.nanmedian(fit_residual_i1) logger.info(f"1 std away from residual Median: {cutoff / ARCMIN} arcmin") logger.info(f"Using {iterate_cutoff} as cutoff") - bad_fit_mask = np.where((fit_residual_i1 / ARCMIN) > iterate_cutoff)[0] + bad_fit_inds = np.where((fit_residual_i1 / ARCMIN) > iterate_cutoff)[0] logger.info("Bad fit indices:") - logger.info(bad_fit_mask) + logger.info(bad_fit_inds) logger.info( "%f data points are higher than %s arcmin", - len(bad_fit_mask), + len(bad_fit_inds), iterate_cutoff, ) - bad_filename = bad_fit_mask // 7 - bad_wafer = bad_fit_mask % 7 - for mask_ind, full_ind in enumerate(bad_fit_mask): - logger.info( - f"{filelist[bad_filename[mask_ind]]} ws{bad_wafer[mask_ind]} is bad. Roll {solver_aman.roll_c[full_ind]}, El {solver_aman.ancil.el_enc[full_ind]}" - ) + bad_filename = bad_fit_inds // 7 + bad_wafer = bad_fit_inds % 7 + logger.info("Outliers:") + for i, full_i in enumerate(bad_fit_inds): + logger.info(f"{filelist[bad_filename[i]]}; ws{bad_wafer[i]}; Resid. {np.round(fit_residual_i1[full_i] / ARCMIN, 4)}") + logger.info(f"--- Roll {solver_aman.roll_c[full_i]}; El {solver_aman.ancil.el_enc[full_i]}; weight {np.round(solver_aman.weights[full_i],4)}") + + num_bad_non_zero = sum([solver_aman.weights[ind] > 0 for ind in bad_fit_inds]) + logger.info(f"Only {num_bad_non_zero} outliers with non-zero weight.") + + #Print RMS of initial fits without outlying data points before + #zero-ing the weights. + good_fit_inds = np.where((fit_residual_i1 / ARCMIN) < iterate_cutoff)[0] + masked_rms = get_RMS(model_fits, model_reference, solver_aman.weights, use_inds = good_fit_inds) + logger.info("RMS on initial fit without outliers: %f", masked_rms) - solver_aman.weights[bad_fit_mask] = 0.0 + solver_aman.weights[bad_fit_inds] = 0.0 use_weights = True model_solved_params = lmfit.minimize( objective_model_func_lmfit, @@ -439,7 +452,7 @@ def main(config_path: str): solver_aman.pointing_model, pm_version, solver_aman ) logger.info( - "RMS on fit: %f", get_RMS(model_fits, model_reference, solver_aman.weights) + "RMS on secondary fit: %f", get_RMS(model_fits, model_reference, solver_aman.weights) ) fit_residual_i2 = np.array( [ @@ -467,7 +480,7 @@ def main(config_path: str): plot_residuals_vs_ancil(solver_aman, config, save_dir, tag) plot_xieta_cross_residuals(solver_aman, config, save_dir, tag) plot_xieta_residuals(solver_aman, config, save_dir, tag) - plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2, bad_fit_mask) + plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2, bad_fit_inds) else: if config.get("make_plots"): plot_total_residuals(solver_aman, config, save_dir, tag='', fit_residual_i1=fit_residual_i1) @@ -668,7 +681,7 @@ def plot_residuals_vs_ancil(solver_aman, config, save_dir, tag): plt.savefig(f"{plot_dir}/{platform}_residuals_vs_ancillary{tag}.png", dpi=350) plt.close() -def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2=None, bad_fit_mask=None): +def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2=None, bad_fit_inds=None): plot_dir = os.path.join(save_dir, "plots") os.makedirs(plot_dir, exist_ok=True) iterate_cutoff = config.get("iterate_cutoff", None) @@ -696,7 +709,7 @@ def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, f ax2.plot(np.arange(len(fit_residual_i1)), (fit_residual_i2 - fit_residual_i1) / ARCMIN, 'k.', mew=0, alpha = 0.6, label = "Res i2 - Res i1") - ax2.plot(np.arange(len(fit_residual_i1))[bad_fit_mask], (fit_residual_i2 - fit_residual_i1)[bad_fit_mask] / ARCMIN, + ax2.plot(np.arange(len(fit_residual_i1))[bad_fit_inds], (fit_residual_i2 - fit_residual_i1)[bad_fit_inds] / ARCMIN, 'kx', lw=0.2, alpha = 0.6, label = "Excl. from i2 fit") ax2.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) ax2.legend(fontsize='x-small') From 64a96965bc1b55a93239ad1ce6e06939a9aa7d7a Mon Sep 17 00:00:00 2001 From: Elle Claire Shaw Date: Wed, 23 Oct 2024 13:25:24 -0700 Subject: [PATCH 23/48] Made changes to log file and diagnostic plots. --- .../site_pipeline/solve_pointing_model.py | 96 ++++++++++++++----- 1 file changed, 70 insertions(+), 26 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 93031c6e1..8adead517 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -58,6 +58,8 @@ def _load_per_obs_data(config): weights_ufm[i, index] = np.nansum(this_OT.focal_planes[u].weights) weights_ufm = weights_ufm / 1720.0 weights_ufm[weights_ufm < config.get("weight_cutoff")] = 0.0 + initial_weights_mask = np.where(weights_ufm == 0) + obs_ufm_centers[initial_weights_mask] = np.nan return filelist, obs_ufm_centers, weights_ufm @@ -333,7 +335,10 @@ def main(config_path: str): test_params = _round_params(model_solved_params.params.valuesdict(), 8) test_params["version"] = pm_version logger.info("Found best-fit pointing model parameters") + logger.info(test_params) logger.info(model_solved_params.params.pretty_print(precision=5, colwidth=11)) + logger.info("Fit Report:") + logger.info(lmfit.fit_report(model_solved_params)) # save pointing model parameters to axis manager param_aman = core.AxisManager() @@ -409,9 +414,6 @@ def main(config_path: str): logger.info(f"{filelist[bad_filename[i]]}; ws{bad_wafer[i]}; Resid. {np.round(fit_residual_i1[full_i] / ARCMIN, 4)}") logger.info(f"--- Roll {solver_aman.roll_c[full_i]}; El {solver_aman.ancil.el_enc[full_i]}; weight {np.round(solver_aman.weights[full_i],4)}") - num_bad_non_zero = sum([solver_aman.weights[ind] > 0 for ind in bad_fit_inds]) - logger.info(f"Only {num_bad_non_zero} outliers with non-zero weight.") - #Print RMS of initial fits without outlying data points before #zero-ing the weights. good_fit_inds = np.where((fit_residual_i1 / ARCMIN) < iterate_cutoff)[0] @@ -431,7 +433,11 @@ def main(config_path: str): test_params = _round_params(model_solved_params.params.valuesdict(), 8) test_params["version"] = pm_version logger.info("Found best-fit pointing model parameters, second iteration") + logger.info(test_params) logger.info(model_solved_params.params.pretty_print(precision=5, colwidth=11)) + logger.info("Fit Report:") + logger.info(lmfit.fit_report(model_solved_params)) + # save pointing model parameters to axis manager param_aman = core.AxisManager() @@ -480,10 +486,21 @@ def main(config_path: str): plot_residuals_vs_ancil(solver_aman, config, save_dir, tag) plot_xieta_cross_residuals(solver_aman, config, save_dir, tag) plot_xieta_residuals(solver_aman, config, save_dir, tag) - plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2, bad_fit_inds) + plot_total_residuals(solver_aman=solver_aman, + config=config, + save_dir=save_dir, + tag=tag, + fit_residual_i1=fit_residual_i1, + fit_residual_i2=fit_residual_i2, + bad_fit_inds=bad_fit_inds, + ) else: if config.get("make_plots"): - plot_total_residuals(solver_aman, config, save_dir, tag='', fit_residual_i1=fit_residual_i1) + plot_total_residuals(solver_aman, + config, + save_dir, + tag='', + fit_residual_i1=fit_residual_i1) if config.get("save_output"): @@ -495,6 +512,7 @@ def main(config_path: str): db = _create_db(dbfile, save_dir) db.add_entry({"dataset": "pointing_model"}, filename=h5_rel, replace=True) db.to_file(os.path.join(save_dir, dbfile)) + logger.info("Done") #################### @@ -531,7 +549,7 @@ def plot_ws0_model_fits(solver_aman, config, save_dir, tag=""): xi_ref[plotmask] / DEG, eta_ref[plotmask] / DEG, c=markercolor[plotmask], - alpha=0.5, + alpha=0.4, label="Data", edgecolors="k", linewidths=0.4, @@ -554,8 +572,9 @@ def plot_ws0_model_fits(solver_aman, config, save_dir, tag=""): ax.legend(loc=1, fontsize="small") ax.set_xlabel("Xi (deg)") ax.set_ylabel("Eta (deg)") - plt.colorbar(im, location="top", fraction=0.046, pad=0.04) - ax.set_title(f"Fits, Colored by {coloredby} (deg)\n\n\n") + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + cb.ax.set_title(coloredby) + ax.set_title(f"Fits, Colored by {coloredby} (deg)") # Plot lines connecting data to modeled data point xitoxi = np.empty((len(xi_model_fit), 2)) @@ -565,8 +584,9 @@ def plot_ws0_model_fits(solver_aman, config, save_dir, tag=""): etatoeta[:, 0] = eta_ref / DEG etatoeta[:, 1] = eta_model_fit / DEG ax.plot(xitoxi.T, etatoeta.T, "k", lw=0.4) - ax.set_xlim(-1, 1); ax.set_ylim(-1, 1) - plt.subplots_adjust(left=0.1, right=0.90, bottom=0.05, hspace=0.3) + ax.set_xlim(-1, .25); ax.set_ylim(-.5, .5) + #plt.subplots_adjust(left=0.1, right=0.90, bottom=0.05, hspace=0.3) + plt.tight_layout() plt.savefig(f"{plot_dir}/{platform}_ws0_model_fits{tag}.png", dpi=350) plt.close() @@ -590,11 +610,15 @@ def plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag=""): eta_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[1, i] / ARCMIN, c=solver_aman.ancil.el_enc[i::7], s=scale_weights[i::7] * 80, + edgecolor="gray", + lw=0.3, marker="o", - lw=0, alpha=0.5, cmap="jet", ) + ax[i // 4, i % 4].set_xlim(-12, 12) + ax[i // 4, i % 4].set_ylim(-12, 12) + ax[i // 4, i % 4].set_title(f"ws{i}") plt.colorbar(im, ax[1, 3], label="Elevation (deg)", fraction=0.046, pad=0.04) plt.tight_layout() plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_elevation{tag}.png", dpi=350) @@ -608,13 +632,14 @@ def plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag=""): eta_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[1, i] / ARCMIN, c=solver_aman.ancil.boresight_enc[i::7], s=scale_weights[i::7] * 80, + edgecolor="gray", + lw=0.3, marker="o", - lw=0, alpha=0.5, cmap="jet", ) - ax[i // 4, i % 4].set_xlim(-15, 15) - ax[i // 4, i % 4].set_ylim(-15, 15) + ax[i // 4, i % 4].set_xlim(-12, 12) + ax[i // 4, i % 4].set_ylim(-12, 12) ax[i // 4, i % 4].set_title(f"ws{i}") plt.colorbar(im, ax[1, 3], label="Boresight (deg)", fraction=0.046, pad=0.04) plt.tight_layout() @@ -671,7 +696,10 @@ def plot_residuals_vs_ancil(solver_aman, config, save_dir, tag): w=scale_weights[plotmask], ) xrange = np.arange(np.nanmin(x), np.nanmax(x)) - ax[i, j].plot(xrange, mxb[0] * xrange + mxb[1], "r", lw=1) + ax[i, j].plot(xrange, mxb[0] * xrange + mxb[1], "r", lw=1, + label=f'Slope {np.round(mxb[0],4)}\n [arcmin/deg]') + ax[i, j].legend(fontsize='small') + ax[0, 0].set_ylabel("dXi [arcmin]") ax[1, 0].set_ylabel("dEta [arcmin]") ax[1, 0].set_xlabel("Azimuth [deg]") @@ -681,12 +709,19 @@ def plot_residuals_vs_ancil(solver_aman, config, save_dir, tag): plt.savefig(f"{plot_dir}/{platform}_residuals_vs_ancillary{tag}.png", dpi=350) plt.close() -def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, fit_residual_i2=None, bad_fit_inds=None): +def plot_total_residuals(solver_aman, + config, + save_dir, + tag, + fit_residual_i1, + fit_residual_i2=None, + bad_fit_inds=None, + ): plot_dir = os.path.join(save_dir, "plots") os.makedirs(plot_dir, exist_ok=True) iterate_cutoff = config.get("iterate_cutoff", None) platform = config.get("platform") - + scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) if fit_residual_i2 is not None: fig = plt.figure(figsize=(6,4)) gs = fig.add_gridspec(7,1) @@ -703,14 +738,23 @@ def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, f ax1.plot(np.arange(len(fit_residual_i2)), fit_residual_i2 / ARCMIN, 'b*', alpha=0.5, lw=0, mew=0, label = '2nd fit') + xtox = np.empty((len(fit_residual_i2), 2)) + xtox[:, 0] = np.arange(len(fit_residual_i1)) + xtox[:, 1] = np.arange(len(fit_residual_i2)) + ytoy = np.empty((len(fit_residual_i1), 2)) + ytoy[:, 0] = fit_residual_i1 / ARCMIN + ytoy[:, 1] = fit_residual_i2 / ARCMIN + ax1.plot(xtox.T, ytoy.T, "k", lw=0.4) + ax1.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) ax1.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') - ax1.legend(loc=2) + ax1.legend(loc=2,fontsize='small') - ax2.plot(np.arange(len(fit_residual_i1)), (fit_residual_i2 - fit_residual_i1) / ARCMIN, - 'k.', mew=0, alpha = 0.6, label = "Res i2 - Res i1") + ax2.scatter(np.arange(len(fit_residual_i1)), (fit_residual_i2 - fit_residual_i1) / ARCMIN, c='k', + marker = 'o', s = scale_weights*50, + lw=0, alpha = 0.6, label = "Res i2 - Res i1") ax2.plot(np.arange(len(fit_residual_i1))[bad_fit_inds], (fit_residual_i2 - fit_residual_i1)[bad_fit_inds] / ARCMIN, - 'kx', lw=0.2, alpha = 0.6, label = "Excl. from i2 fit") + 'kx', ms=7, lw=0.2, alpha = 0.6, label = "Excl. from i2 fit") ax2.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) ax2.legend(fontsize='x-small') ax2.set_xlabel('Data points') @@ -718,12 +762,12 @@ def plot_total_residuals(solver_aman, config, save_dir, tag, fit_residual_i1, f plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) else: - fig, ax = plt.subplots() + fig, ax1 = plt.subplots() #Plot first fit iteration residuals - ax.plot(np.arange(len(fit_residual_i1)), fit_residual_i1 / ARCMIN, - 'r.', mew=0, alpha=0.6, lw=0, label = '1st Fit') - ax.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') - ax.set_xlabel('Data points') + ax1.scatter(np.arange(len(fit_residual_i1)), fit_residual_i1 / ARCMIN, + s = scale_weights*50, color='r', lw=0, alpha=0.6, label = '1st Fit') + ax1.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') + ax1.set_xlabel('Data points') plt.legend(loc=2) plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) plt.close() From 89ec639c88d5e511501a91b6fc50da83a6ecb0f8 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Fri, 23 May 2025 13:00:48 -0700 Subject: [PATCH 24/48] Renamed map_based_pointing and update pointing to get_brightsrc_pointing_step1 and step2. Added functionality to run as parallel processing job on a cluster. Included some documentation in site_pipeline.rst --- docs/site_pipeline.rst | 184 ++++++++++++++++++ ...ased_pointing.py => brightsrc_pointing.py} | 33 ++-- ...ing.py => get_brightsrc_pointing_step1.py} | 116 ++++++++--- ...ing.py => get_brightsrc_pointing_step2.py} | 87 +++++++-- 4 files changed, 361 insertions(+), 59 deletions(-) rename sotodlib/coords/{map_based_pointing.py => brightsrc_pointing.py} (96%) rename sotodlib/site_pipeline/{make_mapbased_pointing.py => get_brightsrc_pointing_step1.py} (75%) rename sotodlib/site_pipeline/{update_pointing.py => get_brightsrc_pointing_step2.py} (88%) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index d228a5580..4f8d7481b 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -157,6 +157,190 @@ work, here's a more basic example that will work:: stream_id: ufm_mv9 + +get_brightsrc_pointing_part1 and get_brightsrc_pointing_part2 +---------------------------------------------------------------- + +The two-part ``get_brightsrc_pointing`` script set will will run solve for the xieta +coordinates of detectors that observe a bright source during an observation. + +It is a two part process that requires a map step and then a tod step. +To run, the scripts require config files described below. + +The code will process all wafers unless otherwise specified. +It is recommended to run with ``parallel_job: True`` in the config files if analyzing +multiple wafers at once. Otherwise, specify a wafer slot or restrict detectors in CL args. + +Recommended SLURM settings + - ``--nodes=1`` + - ``--ntasks=1`` + - ``--time=00:45:00`` + - ``--cpus-per-task=14`` + - ``--mem=150G`` + - export OMP_NUM_THREADS=1 + + +Recommended Command Line arguments: + - ``configs`` + - ``--obs_id`` + - ``--sso_name`` + +Optional Command Line arguments: + - ``--wafer_slot`` e.g. ws0 + - ``--restrict_dets_for_debug`` integer, or comma separated list of det readout_ids. + +Options to include min_ctime and max_ctime arguments, which will proces all obs +in the time frame. + +.. argparse:: + :module: sotodlib.site_pipeline.get_brightsrc_pointing_part1 or _part2 + :func: get_parser + + + +Generated results +``````````````````` +Saves results as ResultSet .hdf file in the results_dir. +ResultSet<[dets:readout_id, xi, eta, gamma, xi_err, eta_err, R2, redchi2], N rows> + +Load data with sotodlib.io.metadata.read_dataset( results.hdf, 'focal_plane') + +Configuration +````````````````` +These scripts take in a config yaml file + +Part 1 is the map-based step. Its config file should look like the following: +The parameters in these examples are used for SAT mid-freq moon observations. + +.. code-block:: yaml + + context_file: /path/to/context.yaml + query_tags: ['moon', 'jupiter', 'mars'] (alternatively specify --sso_name in kwargs + + optics_config_fn: /path/to/ufm_to_fp.yaml + single_det_maps_dir: /path/to/results/single_det_maps + results_dir: /path/to/results/map_based_results + + parallel_job: True + wafer_mask_det: 8. + res_deg: 0.3 + xieta_bs_offset: [0., 0.] + save_normal_roll: False #false for SAT, true for LAT + save_force_zero_roll: True #true for SAT, false for LAT + + hit_time_threshold: 600 #seconds + hit_circle_r_deg: 7. + + process_pipe: + - name: 'detrend' + process: + count: 2000 + method: 'linear' + - name: 'apodize' + process: + apodize_samps: 2000 + - name: 'fourier_filter' + process: + signal_name: "signal" + wrap_name: null + filt_function: "low_pass_sine2" + trim_samps: null + filter_params: + cutoff: 1.9 + width: 0.2 + - name: 'fourier_filter' + process: + signal_name: "signal" + wrap_name: null + filt_function: "high_pass_sine2" + trim_samps: 2000 + filter_params: + cutoff: 0.05 + width: 0.1 + +Part 2 is the TOD-based step. Its config file should look like the following. +The parameters in these examples are used for SAT mid-freq moon observations. + +.. code-block:: yaml + + context_file: /path/to/context.yaml + query_tags: ['moon', 'jupiter', 'mars'] (alternatively specify --sso_name in kwargs + + optics_config_fn: /path/to/ufm_to_fp.yaml + + fp_hdf_dir: /path/to/results/map_based_results from step 1 config file. + # If force_zero_roll is was True, then append _force_zero_roll to the end + result_dir: /path/to/resuls/tod_based_results + + parallel_job: True + force_zero_roll: True + + ds_factor: 40 + mask_deg: 2.5 + fit_func_name: 'gaussian2d_nonlin' + max_non_linear_order: 3 + fwhm_init_deg: 0.5 + error_estimation_method: 'force_one_redchi2' + flag_name_rms_calc: 'around_source' + flag_rms_calc_exclusive: False + + process_pipe: + - name: 'detrend' + process: + count: 2000 + method: 'linear' + - name: 'fourier_filter' + process: + signal_name: 'signal' + filt_function: 'iir_filter' + trim_samps: null + filter_params: + invert: True + - name: 'apodize' + process: + apodize_samps: 2000 + - name: 'fourier_filter' + process: + signal_name: "signal" + wrap_name: null + filt_function: "low_pass_sine2" + trim_samps: null + filter_params: + cutoff: 1.9 + width: 0.2 + - name: 'source_flags' + calc: + merge: True + max_pix: 10000000000 + source_flags_name: 'source_wide' + mask: + shape: circle + xyr: [0., 0., 5.0] + - name: 'source_flags' + calc: + merge: True + max_pix: 10000000000 + source_flags_name: 'source_narrow' + mask: + shape: circle + xyr: [0., 0., 3.0] + - name: 'reduce_flags' + process: + flags: ['source_wide', 'source_narrow'] + method: 'except' + wrap: True + new_flag: 'around_source' + - name: 'flag_turnarounds' + process: + truncate: True + - name: 'sub_polyf' + process: + method: 'legendre' + degree: 2 + mask: 'around_source' + exclusive: False + + make_read_det_match ``````````````````` This script generates the readout ID to detector ID mapping required to diff --git a/sotodlib/coords/map_based_pointing.py b/sotodlib/coords/brightsrc_pointing.py similarity index 96% rename from sotodlib/coords/map_based_pointing.py rename to sotodlib/coords/brightsrc_pointing.py index 43305b37c..5985da6fe 100644 --- a/sotodlib/coords/map_based_pointing.py +++ b/sotodlib/coords/brightsrc_pointing.py @@ -4,6 +4,7 @@ import numpy as np from scipy import interpolate from scipy.optimize import curve_fit +from joblib import Parallel, delayed from sotodlib import core from sotodlib import coords @@ -16,7 +17,8 @@ import h5py from scipy.ndimage import maximum_filter -def get_planet_trajectry(tod, planet, _split=20, return_model=False): + +def get_planet_trajectory(tod, planet, _split=20, return_model=False): """ Generate the trajectory of a given planet over a specified time range. @@ -30,7 +32,7 @@ def get_planet_trajectry(tod, planet, _split=20, return_model=False): If return_model is True: tuple: Tuple containing interpolation functions for azimuth and elevation. If return_model is False: - array: Array of quaternions representing trajectry of the planet at each timestamp. + array: Array of quaternions representing trajectory of the planet at each timestamp. """ print(planet) timestamps_sparse = np.linspace(tod.timestamps[0], tod.timestamps[-1], _split) @@ -58,19 +60,19 @@ def get_wafer_centered_sight(tod=None, planet=None, q_planet=None, q_bs=None, q_ Parameters: tod : An axis manager planet (str): The name of the planet to calculate the sightline vector. - q_planet (optional): Quaternion representing the trajectry of the planet. + q_planet (optional): Quaternion representing the trajectory of the planet. If None, it will be computed using get_planet_trajectory. Defaults to None. - q_bs (optional): Quaternion representing the trajectry of the boresight. + q_bs (optional): Quaternion representing the trajectory of the boresight. If None, it will be computed using the current boresight angles from tod. Defaults to None. q_wafer (optional): Quaternion representing the center of wafer to the center of boresight. If None, it will be computed using the median of the focal plane xi and eta from tod.focal_plane. Defaults to None. Returns: - Sightline vector for the planet trajectry centered on the center of the wafer. + Sightline vector for the planet trajectory centered on the center of the wafer. """ if q_planet is None: - q_planet = get_planet_trajectry(tod, planet) + q_planet = get_planet_trajectory(tod, planet) if q_bs is None: q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) if q_wafer is None: @@ -147,7 +149,7 @@ def get_rough_hit_time(tod, wafer_slot, sso_name, circle_r_deg=7.,optics_config_ float: Estimated rough hit time within the circular region around the wafer center. """ q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) - q_planet = get_planet_trajectry(tod, sso_name) + q_planet = get_planet_trajectory(tod, sso_name) xi_wafer, eta_wafer = get_wafer_xieta(wafer_slot, optics_config_fn=optics_config_fn, roll_bs_offset=np.median(tod.boresight.roll), wrap_to_tod=False) q_wafer = quat.rotation_xieta(xi_wafer, eta_wafer) @@ -159,13 +161,11 @@ def get_rough_hit_time(tod, wafer_slot, sso_name, circle_r_deg=7.,optics_config_ hit_time = (tod.timestamps[-1] - tod.timestamps[0]) * np.mean(np.rad2deg(r_wafer_centered) < circle_r_deg) return hit_time - def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, xieta_bs_offset=(0., 0.), roll_bs_offset=None, signal='signal', wafer_mask_deg=8., res_deg=0.3, cuts=None,): """ Generate boresight-centered maps from Time-Ordered Data (TOD) for each individual detector. - Parameters: tod : an axismanager object sso_name (str): Name of the planet for which the trajectory is calculated. @@ -181,9 +181,8 @@ def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, Returns: None """ - q_planet = get_planet_trajectry(tod, sso_name) + q_planet = get_planet_trajectory(tod, sso_name) q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) - if roll_bs_offset is None: roll_bs_offset = np.mean(tod.boresight.roll) @@ -218,16 +217,22 @@ def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, if cuts is None: cuts = ~tod.flags['source'] P = coords.P.for_tod(tod=tod, geom=geom, comps='T', cuts=cuts, sight=sight, threads=False) + + wT = None for di, det in enumerate(tqdm(tod.dets.vals)): det_weights = np.zeros(tod.dets.count, dtype='float32') det_weights[di] = 1. mT_weighted = P.to_map(tod=tod, signal=signal, comps='T', det_weights=det_weights) - wT = P.to_weights(tod, signal=signal, comps='T', det_weights=det_weights) + if wT is None: + wT = P.to_weights(tod, signal=signal, comps='T', det_weights=det_weights) mT = P.remove_weights(signal_map=mT_weighted, weights_map=wT, comps='T')[0] enmap.write_hdf(map_hdf, mT, address=det, - extra={'xi0': xi0, 'eta0': eta0, - 'xi_bs_offset': xi_bs_offset, 'eta_bs_offset': eta_bs_offset, 'roll_bs_offset': roll_bs_offset}) + extra={'xi0': xi0, + 'eta0': eta0, + 'xi_bs_offset': xi_bs_offset, + 'eta_bs_offset': eta_bs_offset, + 'roll_bs_offset': roll_bs_offset}) return def detect_peak_xieta(mT, filter_size=None): diff --git a/sotodlib/site_pipeline/make_mapbased_pointing.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py similarity index 75% rename from sotodlib/site_pipeline/make_mapbased_pointing.py rename to sotodlib/site_pipeline/get_brightsrc_pointing_step1.py index faed09a43..2b7aea511 100644 --- a/sotodlib/site_pipeline/make_mapbased_pointing.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py @@ -4,12 +4,12 @@ import argparse import time import glob +from joblib import Parallel, delayed from sotodlib import core from sotodlib import coords from sotodlib import tod_ops -from sotodlib.coords import map_based_pointing as mbp -from sotodlib.site_pipeline import update_pointing as up +from sotodlib.coords import brightsrc_pointing as bsp from sotodlib.io import metadata from sotodlib.io.metadata import read_dataset, write_dataset @@ -17,7 +17,7 @@ from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'make_map_based_pointing: ') -def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter']): +def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter', 'mars']): obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] sso_names = [] for _name in candidate_names: @@ -58,31 +58,44 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, f'Processing only {sso_name}') # Load data - logger.info('loading data') + logger.info(f'loading meta data: {wafer_slot}') meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) + logger.info(f'finished loading meta data: {wafer_slot}') + try: + meta.restrict('dets', meta.detcal.bg > -1) + except: + pass if restrict_dets_for_debug is not False: - meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + try: + restrict_dets_for_debug = int(restrict_dets_for_debug) + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + except ValueError: + _testdets = restrict_dets_for_debug.split(',') + restrict_list = [det.split('\'')[1].strip() for det in _testdets] + meta.restrict('dets', restrict_list) + logger.info(f'loading tod data: {wafer_slot}') tod = ctx.get_obs(meta) - + logger.info(f'finished loading tod data: {wafer_slot}') # tod processing - logger.info('tod processing') + logger.info(f'tod processing {wafer_slot}') pipe = Pipeline(configs["process_pipe"], logger=logger) proc_aman, success = pipe.run(tod) - + logger.info(f'done with tod processing {wafer_slot}') # make single detecctor maps logger.info(f'Making single detector maps') os.makedirs(single_det_maps_dir, exist_ok=True) map_hdf = os.path.join(single_det_maps_dir, f'{obs_id}_{wafer_slot}.hdf') - mbp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf, + bsp.make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf=map_hdf, xieta_bs_offset=xieta_bs_offset, wafer_mask_deg=wafer_mask_deg, res_deg=res_deg) - + + #next step result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' # reconstruct pointing from single detector maps if save_normal_roll: logger.info(f'Saving map-based pointing results') - fp_rset_map_based = mbp.get_xieta_from_maps(map_hdf, save=True, + fp_rset_map_based = bsp.get_xieta_from_maps(map_hdf, save=True, output_dir=result_dir, filename=result_filename, force_zero_roll=False, @@ -91,7 +104,7 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, if save_force_zero_roll: logger.info(f'Saving map-based pointing results (force-zero-roll)') result_dir_force_zero_roll = result_dir + '_force_zero_roll' - fp_rset_map_based_force_zero_roll = mbp.get_xieta_from_maps(map_hdf, save=True, + fp_rset_map_based_force_zero_roll = bsp.get_xieta_from_maps(map_hdf, save=True, output_dir=result_dir_force_zero_roll, filename=result_filename, force_zero_roll=True, @@ -109,7 +122,13 @@ def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=Fa meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) if restrict_dets_for_debug is not False: - meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + try: + restrict_dets_for_debug = int(restrict_dets_for_debug) + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + except ValueError: + _testdets = restrict_dets_for_debug.split(',') + restrict_list = [det.split('\'')[1].strip() for det in _testdets] + meta.restrict('dets', restrict_list) result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' fp_rset_dummy_map_based = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'R2']) @@ -150,6 +169,15 @@ def combine_pointings(pointing_result_files): focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['R2'])) return focal_plane +def parallel_process_wafer_slot(configs, obs_id, wafer_slot, sso_name, restrict_dets_for_debug): + logger.info(f'Processing {obs_id}, {wafer_slot}') + main_one_wafer(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) + + def main_one_obs(configs, obs_id, sso_name=None, restrict_dets_for_debug=False): if type(configs) == str: @@ -176,26 +204,57 @@ def main_one_obs(configs, obs_id, sso_name=None, tod = ctx.get_obs(obs_id, dets=[]) streamed_wafer_slots = ['ws{}'.format(index) for index, bit in enumerate(obs_id.split('_')[-1]) if bit == '1'] processed_wafer_slots = [] + finished_wafer_slots = [] skipped_wafer_slots = [] + check_dir = result_dir + '_force_zero_roll' if save_force_zero_roll else result_dir for ws in streamed_wafer_slots: - hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, - optics_config_fn=optics_config_fn) + hit_time = bsp.get_rough_hit_time(tod, + wafer_slot=ws, + sso_name=sso_name, + circle_r_deg=hit_circle_r_deg, + optics_config_fn=optics_config_fn) logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') - if hit_time > hit_time_threshold: - processed_wafer_slots.append(ws) + if hit_time >= hit_time_threshold: + if os.path.exists(os.path.join(check_dir, f'focal_plane_{obs_id}_{ws}.hdf')): + finished_wafer_slots.append(ws) + else: + processed_wafer_slots.append(ws) else: skipped_wafer_slots.append(ws) - logger.info(f'wafer_slots which pointing calculated: {processed_wafer_slots}') - for wafer_slot in processed_wafer_slots: - logger.info(f'Processing {obs_id}, {wafer_slot}') - main_one_wafer(configs=configs, - obs_id=obs_id, - wafer_slot=wafer_slot, - sso_name=sso_name, - restrict_dets_for_debug=restrict_dets_for_debug) - + logger.info(f'Found saved data for these wafer_slots: {finished_wafer_slots}') + logger.info(f'Will continue for these wafer_slots: {processed_wafer_slots}') + + if configs.get('parallel_job'): + logger.info('Continuing with parallel job') + try: + n_jobs = int(os.environ.get('SLURM_CPUS_PER_TASK', 1)) + except: + n_jobs = -1 + + logger.info('Entering wafer pool') + Parallel(n_jobs=n_jobs)( + delayed(parallel_process_wafer_slot)( + configs, + obs_id, + wafer_slot, + sso_name, + restrict_dets_for_debug, + ) + for wafer_slot in processed_wafer_slots + ) + logger.info('Exiting wafer pool') + else: + logger.info('Continuing with serial processing of wafers.') + for wafer_slot in processed_wafer_slots: + logger.info(f'Processing {obs_id}, {wafer_slot}') + main_one_wafer(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) + logger.info(f'create dummy hdf for non-hitting wafer: {skipped_wafer_slots}') for wafer_slot in skipped_wafer_slots: main_one_wafer_dummy(configs=configs, @@ -220,7 +279,7 @@ def main_one_obs(configs, obs_id, sso_name=None, write_dataset(fp_rset_full, filename=fp_rset_full_file, address='focal_plane', overwrite=True) - + logger.info(f'ta da! Finished with {obs_id}') return def main(configs, min_ctime=None, max_ctime=None, update_delay=None, @@ -258,6 +317,7 @@ def main(configs, min_ctime=None, max_ctime=None, update_delay=None, restrict_dets_for_debug=restrict_dets_for_debug) elif obs_id is not None: + logger.info(f'Processing {obs_id}') if wafer_slot is None: main_one_obs(configs=configs, obs_id=obs_id, sso_name=sso_name, restrict_dets_for_debug=restrict_dets_for_debug) @@ -280,7 +340,7 @@ def get_parser(): parser.add_argument("--sso_name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter'). If not specified, get sso_name from observation tags. "\ + "Valid only when obs_id is specified") - parser.add_argument("--restrict_dets_for_debug", type=int, default=False) + parser.add_argument("--restrict_dets_for_debug", type=str, default=False) return parser if __name__ == '__main__': diff --git a/sotodlib/site_pipeline/update_pointing.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py similarity index 88% rename from sotodlib/site_pipeline/update_pointing.py rename to sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 2098ebbd3..6081d1800 100644 --- a/sotodlib/site_pipeline/update_pointing.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -6,11 +6,12 @@ import time import glob from tqdm import tqdm +from joblib import Parallel, delayed from scipy.optimize import curve_fit from sotodlib.core import metadata from sotodlib.io.metadata import read_dataset, write_dataset -from sotodlib.coords import map_based_pointing as mbp +from sotodlib.coords import brightsrc_pointing as bsp from sotodlib import core from sotodlib import coords from sotodlib import tod_ops @@ -334,7 +335,14 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, logger.info('loading data') meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) if restrict_dets_for_debug is not False: - meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + try: + restrict_dets_for_debug = int(restrict_dets_for_debug) + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + except ValueError: + _testdets = restrict_dets_for_debug.split(',') + restrict_list = [det.split('\'')[1].strip() for det in _testdets] + meta.restrict('dets', restrict_list) + tod = ctx.get_obs(meta) # get pointing @@ -372,7 +380,13 @@ def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=Fa meta = ctx.get_meta(obs_id, dets={'wafer_slot': wafer_slot}) if restrict_dets_for_debug is not False: - meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + try: + restrict_dets_for_debug = int(restrict_dets_for_debug) + meta.restrict('dets', meta.dets.vals[:restrict_dets_for_debug]) + except ValueError: + _testdets = restrict_dets_for_debug.split(',') + restrict_list = [det.split('\'')[1].strip() for det in _testdets] + meta.restrict('dets', restrict_list) result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' fp_rset_dummy = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', @@ -408,6 +422,14 @@ def combine_pointings(pointing_result_files): focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['xi_err'], val['eta_err'], val['R2'], val['redchi2'])) return focal_plane +def parallel_process_wafer_slot(configs, obs_id, wafer_slot, sso_name, restrict_dets_for_debug): + logger.info(f'Processing {obs_id}, {wafer_slot}') + main_one_wafer(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) + def main_one_obs(configs, obs_id, sso_name=None, restrict_dets_for_debug=False): if type(configs) == str: @@ -434,25 +456,54 @@ def main_one_obs(configs, obs_id, sso_name=None, tod = ctx.get_obs(obs_id, dets=[]) streamed_wafer_slots = ['ws{}'.format(index) for index, bit in enumerate(obs_id.split('_')[-1]) if bit == '1'] processed_wafer_slots = [] + finished_wafer_slots = [] skipped_wafer_slots = [] + check_dir = result_dir + '_force_zero_roll' if force_zero_roll else result_dir + for ws in streamed_wafer_slots: - hit_time = mbp.get_rough_hit_time(tod, wafer_slot=ws, sso_name=sso_name, circle_r_deg=hit_circle_r_deg, - optics_config_fn=optics_config_fn) + hit_time = bsp.get_rough_hit_time(tod, + wafer_slot=ws, + sso_name=sso_name, + circle_r_deg=hit_circle_r_deg, + optics_config_fn=optics_config_fn) logger.info(f'hit_time for {ws} is {hit_time:.1f} [sec]') - if hit_time > hit_time_threshold: - processed_wafer_slots.append(ws) + if hit_time >= hit_time_threshold: + if os.path.exists(os.path.join(check_dir, f'focal_plane_{obs_id}_{ws}.hdf')): + finished_wafer_slots.append(ws) + else: + processed_wafer_slots.append(ws) else: skipped_wafer_slots.append(ws) - - logger.info(f'wafer_slots which pointing calculated: {processed_wafer_slots}') - for wafer_slot in processed_wafer_slots: - logger.info(f'Processing {obs_id}, {wafer_slot}') - main_one_wafer(configs=configs, - obs_id=obs_id, - wafer_slot=wafer_slot, - sso_name=sso_name, - restrict_dets_for_debug=restrict_dets_for_debug) - + + logger.info(f'Found saved data for these wafer_slots: {finished_wafer_slots}') + logger.info(f'Will continue for these wafer_slots: {processed_wafer_slots}') + + if configs.get('parallel_job'): + logger.info('Continuing with parallel job') + try: + n_jobs = int(os.environ.get('SLURM_CPUS_PER_TASK', 1)) + except: + n_jobs = -1 + Parallel(n_jobs=n_jobs)( + delayed(parallel_process_wafer_slot)( + configs, + obs_id, + wafer_slot, + sso_name, + restrict_dets_for_debug + ) + for wafer_slot in processed_wafer_slots + ) + else: + logger.info('Continuing with serial processing of wafers.') + for wafer_slot in processed_wafer_slots: + logger.info(f'Processing {obs_id}, {wafer_slot}') + main_one_wafer(configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug) + logger.info(f'create dummy hdf for non-hitting wafer: {skipped_wafer_slots}') for wafer_slot in skipped_wafer_slots: main_one_wafer_dummy(configs=configs, @@ -466,6 +517,7 @@ def main_one_obs(configs, obs_id, sso_name=None, fp_rset_full_file = os.path.join(os.path.join(result_dir, f'focal_plane_{obs_id}_all.hdf')) write_dataset(fp_rset_full, filename=fp_rset_full_file, address='focal_plane', overwrite=True) + logger.info(f'ta da! Finsihed with {obs_id}') def main(configs, min_ctime=None, max_ctime=None, update_delay=None, obs_id=None, wafer_slot=None, sso_name=None, restrict_dets_for_debug=False): @@ -502,6 +554,7 @@ def main(configs, min_ctime=None, max_ctime=None, update_delay=None, restrict_dets_for_debug=restrict_dets_for_debug) elif obs_id is not None: + logger.info(f'Processing {obs_id}') if wafer_slot is None: main_one_obs(configs=configs, obs_id=obs_id, sso_name=sso_name, restrict_dets_for_debug=restrict_dets_for_debug) From 4ac7e450ccffeb11b5fe5ee5798dd3c2071ca6ed Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Tue, 15 Jul 2025 16:57:06 -0700 Subject: [PATCH 25/48] Major update to solve_pointing_model.py to include per-detector fitting option and new plotting class. --- .../site_pipeline/solve_pointing_model.py | 2047 ++++++++++++----- 1 file changed, 1431 insertions(+), 616 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 8adead517..b1b21fb57 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -1,16 +1,25 @@ -import os, sys, pickle, math, h5py -import numpy as np +import os +import pickle +import math +import h5py import argparse -import so3g.proj.quat as quat -import lmfit -from lmfit import minimize, Parameters import yaml import logging +import numpy as np +import so3g.proj.quat as quat +import pdb +# import lmfit +import lmfit +from lmfit import minimize, Parameters, fit_report +import time +import shutil + import matplotlib matplotlib.use("agg") import matplotlib.pyplot as plt +from sotodlib.coords.helpers import _valid_arg from sotodlib.site_pipeline import util as sp_util from sotodlib import core from sotodlib.coords import pointing_model as pm @@ -23,7 +32,7 @@ plt.rcParams["grid.alpha"] = 0.5 -def load_nom_centers(config): +def load_nom_ufm_centers(config): # Load Nominal UFM Center Locations from centered focal_plane ffp_path = config.get("ffp_path") ufms = config.get("ufms") @@ -31,24 +40,33 @@ def load_nom_centers(config): rx = fpc.Receiver.load_file(ffp_path) OT = rx["0"].optics_tubes[0] for ufm in range(len(OT.focal_planes)): - index = ufms.index(OT.focal_planes[ufm].stream_id) + try: + index = ufms.index(OT.focal_planes[ufm].stream_id) + except: + temp_ufms = config.get("temp_ufms") + index = temp_ufms.index(OT.focal_planes[ufm].stream_id) nom_ufm_centers[index, :3] = OT.focal_planes[ufm].center return nom_ufm_centers - -def _load_per_obs_data(config): +def load_per_obs_data(config): # Load per-observation UFM center data points and weights # The per obs .h5 file a dict with obs_id for keys per_obs_fps = config.get("per_obs_fps") ufms = config.get("ufms") skip_tags = config.get("skip_tags", []) + t0 = config.get("begin_timerange", 0) + tf = config.get("end_timerange", 3000000000) rxs = fpc.Receiver.load_file(per_obs_fps) - filelist = list(rxs.keys()) - for skip in skip_tags: - filelist = [obs for obs in filelist if skip not in obs] + + filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] + filelist = [obs for obs in filelist if int(obs.split("_")[1]) > t0 and int(obs.split("_")[1]) < tf] + if config.get("use_these_files") is not None: + filelist = [filelist[i] for i in config.get("use_these_files")] + obs_ufm_centers = np.zeros([len(filelist), 7, 3]) * np.nan weights_ufm = np.zeros([len(filelist), 7]) + obs_index = [] for i, ffp in enumerate(filelist): this_OT = rxs[ffp].optics_tubes[0] @@ -56,70 +74,260 @@ def _load_per_obs_data(config): index = ufms.index(this_OT.focal_planes[u].stream_id) obs_ufm_centers[i, index, :3] = this_OT.focal_planes[u].center_transformed weights_ufm[i, index] = np.nansum(this_OT.focal_planes[u].weights) + obs_index.append(np.repeat(i, 7)) + weights_ufm = weights_ufm / 1720.0 weights_ufm[weights_ufm < config.get("weight_cutoff")] = 0.0 initial_weights_mask = np.where(weights_ufm == 0) obs_ufm_centers[initial_weights_mask] = np.nan + obs_index = np.concatenate(obs_index) + #obs_index[initial_weights_mask] = np.nan + + return filelist, obs_ufm_centers, weights_ufm, obs_index + +def load_nom_focal_plane_full(config, ufm): + which_template = config.get("use_as_template", "ffp") + if which_template == "nominal": + ffp_path = config.get("nominal") + with h5py.File(ffp_path, "r") as template_fp: + det_ids = template_fp[ufm]["dets:det_id"][:] + xi = template_fp[ufm]["xi"][:] + eta = template_fp[ufm]["eta"][:] + gamma = template_fp[ufm]["gamma"][:] + nom_det_array = np.stack((xi, eta, gamma), axis=1) + + elif which_template == "ffp": + ffp_path = config.get("ffp_path") + with h5py.File(ffp_path, "r") as template_fp: + OT = template_fp["0/st1"] + fpf = np.array(OT[ufm]["focal_plane_full"][:]) + # Extracting specific columns using structured arrays + det_ids = fpf[ + "dets:det_id" + ] # Assuming 'f0' corresponds to the detector IDs + xi = fpf["xi_t"] # Assuming 'f1' corresponds to xi + eta = fpf["eta_t"] # Assuming 'f2' corresponds to eta + gamma = fpf["gamma_t"] # Assuming 'f3' corresponds to gamma + nom_det_array = np.stack((xi, eta, gamma), axis=1) + + return det_ids, nom_det_array + +def create_size_mask(obs_index): + #create comparably sized datasets of all obs. + unique, counts = np.unique(obs_index, return_counts=True) + min_count = min(counts) + limiting_mask = np.zeros_like(obs_index, dtype=bool) + for dataset in unique: + indices = np.where(obs_index == dataset)[0] + selected_indices = np.random.choice(indices, min_count, replace=False) + limiting_mask[selected_indices] = True + return limiting_mask + +def create_culling_mask(obs_index, cull_dets): + # Remove a random fraction 1/cull_dets of each dataset + unique, counts = np.unique(obs_index, return_counts=True) + #min_count = min(counts) + culling_mask = np.zeros_like(obs_index, dtype=bool) + for dataset, count in zip(unique, counts): + indices = np.where(obs_index == dataset)[0] + selected_indices = np.random.choice(indices, (cull_dets - 1)* count // cull_dets, replace=False) + culling_mask[selected_indices] = True + return culling_mask + + +def load_per_detector_data(config, return_all_dets=False): + per_obs_fps = config.get("per_obs_fps") + skip_tags = config.get("skip_tags", []) + t0 = config.get("begin_timerange", 0) + tf = config.get("end_timerange", int(time.time())) + rxs = fpc.Receiver.load_file(per_obs_fps) + band = config.get("band") + if band is not None: + band = band.encode("utf-8") + + cull_dets = config.get("cull_dets", None) + cull_twice = config.get("cull_twice", False) + even_obs_size = config.get("even_obs_size", False) + + filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] + filelist = [obs for obs in filelist if int(obs.split("_")[1]) > t0 and int(obs.split("_")[1]) < tf] + if config.get("use_these_files") is not None: + filelist = [filelist[i] for i in config.get("use_these_files")] + + weights_dets, obs_dets_fits, stream_id_list, obs_index = [], [], [], [] + which_ufm = config.get("which_ufm", None) + which_data = config.get("use_as_data") + which_weights = config.get("use_as_weights", None) + + for i, ffp in enumerate(filelist): + this_OT = rxs[ffp].optics_tubes[0] + for ufm in this_OT.focal_planes: + if which_ufm is not None and ufm.stream_id not in which_ufm: + continue + stream_id_list.append(ufm.stream_id) + weights = ufm.weights[:, 1] if which_weights == "r2" else ufm.weights[:, 0] + data = ufm.avg_fp if which_data == "raw" else ufm.transformed + weights_dets.append(weights) + obs_dets_fits.append(data) + obs_index.append(np.repeat(i, len(ufm.weights))) + + nom_data = [load_nom_focal_plane_full(config, s) for s in stream_id_list] + all_det_ids, all_nom_det_array = map(np.concatenate, zip(*nom_data)) + weights_dets = np.concatenate(weights_dets) + obs_dets_fits = np.concatenate(obs_dets_fits, axis=0) + obs_index = np.concatenate(obs_index) + + weights_dets[weights_dets < config.get("weight_cutoff")] = 0.0 + obs_dets_fits[np.where(weights_dets == 0)] = np.nan + mask = ~np.isnan(weights_dets) + + if return_all_dets: + #plotting use-case to compare subset fits with the entire dataset. + return ( + filelist, + obs_dets_fits[mask], + weights_dets[mask], + all_nom_det_array[mask], + all_det_ids[mask], + obs_index[mask], + ) - return filelist, obs_ufm_centers, weights_ufm - + else: + # Reduce detector counts for computation + if band is not None: + mask &= np.array([band in det for det in all_det_ids]) + #apply weights and band mask + obs_dets_fits = obs_dets_fits[mask] + weights_dets = weights_dets[mask] + all_nom_det_array = all_nom_det_array[mask] + all_det_ids = all_det_ids[mask] + obs_index = obs_index[mask] + if even_obs_size: + mask = create_size_mask(obs_index) + obs_dets_fits = obs_dets_fits[mask] + weights_dets = weights_dets[mask] + all_nom_det_array = all_nom_det_array[mask] + all_det_ids = all_det_ids[mask] + obs_index = obs_index[mask] + if cull_dets is not None: + for _ in range(2 if cull_twice else 1): + mask = create_culling_mask(obs_index, cull_dets) + obs_dets_fits = obs_dets_fits[mask] + weights_dets = weights_dets[mask] + all_nom_det_array = all_nom_det_array[mask] + all_det_ids = all_det_ids[mask] + obs_index = obs_index[mask] + return ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index + ) -def _load_obs_boresight(config, filelist): +def load_obs_boresight(config, filelist): # Load boresight elevation information from each observation # Put into an axis manager + pm_version = config.get("pm_version") ctx = core.Context(config["context"]["path"]) obs_info = [ctx.obsdb.get(obsid) for obsid in filelist] - az_c = [obs["az_center"] for obs in obs_info] - el_c = [obs["el_center"] for obs in obs_info] - roll_c = [obs["roll_center"] for obs in obs_info] - az_c = np.round(np.array(az_c), 4) - el_c = np.round(np.array(el_c), 4) - roll_c = np.round(np.array(roll_c), 4) - roll_c[np.where(roll_c == 0)[0]] = 0 # rounding gives negative 0 sometimes. + az_c = np.array([obs["az_center"] for obs in obs_info]) + el_c = np.array([obs["el_center"] for obs in obs_info]) + roll_c = np.array([obs["roll_center"] for obs in obs_info]) + #az_c = np.round(np.array(az_c), 4) + #el_c = np.round(np.array(el_c), 4) + #roll_c = np.round(np.array(roll_c), 4) + #roll_c[np.where(roll_c == 0)[0]] = 0 # rounding gives negative 0 sometimes. ancil = core.AxisManager(core.IndexAxis("samps")) ancil.wrap("az_enc", np.repeat(az_c, 7), [(0, "samps")]) - ancil.wrap("boresight_enc", np.repeat(-1 * roll_c, 7), [(0, "samps")]) ancil.wrap("el_enc", np.repeat(el_c, 7), [(0, "samps")]) + if "lat" in pm_version: + ancil.wrap("corotator_enc", np.repeat((el_c - 60. - roll_c), 7), [(0, "samps")]) + if "sat" in pm_version: + ancil.wrap("boresight_enc", np.repeat(-1 * roll_c, 7), [(0, "samps")]) + return ancil, roll_c + + +def load_obs_boresight_per_detector(config, filelist, obs_ind): + # Load boresight elevation information from each observation + # Put into an axis manager + platform = config.get("platform") + ctx = core.Context(config["context"]["path"]) + obs_info = [ctx.obsdb.get(obsid) for obsid in filelist] + az_c = np.array([obs["az_center"] for obs in obs_info]) + el_c = np.array([obs["el_center"] for obs in obs_info]) + roll_c = np.array([obs["roll_center"] for obs in obs_info]) + + ancil = core.AxisManager(core.IndexAxis("samps")) + if platform == 'lat': + roll_c = np.array([roll_c[i] for i in obs_ind]) + ancil.wrap("az_enc", np.array([az_c[i] for i in obs_ind]), [(0, "samps")]) + ancil.wrap("el_enc", np.array([el_c[i] for i in obs_ind]), [(0, "samps")]) + ancil.wrap("corotator_enc", ancil.el_enc - 60. - roll_c, [(0, "samps")]) + else: + roll_c = np.array([roll_c[i] for i in obs_ind]) + ancil.wrap("az_enc", np.array([az_c[i] for i in obs_ind]), [(0, "samps")]) + ancil.wrap("el_enc", np.array([el_c[i] for i in obs_ind]), [(0, "samps")]) + ancil.wrap("boresight_enc", -1 * roll_c, [(0, "samps")]) return ancil, roll_c def _init_fit_params(config): - default_params = pm.defaults_sat_v1 - fixed_params = config.get("fixed_params") + pm_version = config.get("pm_version") + init_params = config.get("initial_params", pm.param_defaults[pm_version]) + fixed_params = config.get("fixed_params",None) # Initialize lmfit Parameter object fit_params = Parameters() - for p in list(default_params.keys()): - fit_params.add(p, value=0.0, vary=True) + for p in init_params.keys(): + fit_params.add(p, value=init_params[p], vary=True) # Turn off various parameters depending on platform - for fix in fixed_params: - fit_params[fix].set(vary=False) + if fixed_params is not None: + for fix in fixed_params: + fit_params[fix].set(vary=False) return fit_params +def objective_model_func_lmfit( + params, pm_version, solver_aman, xieta_model, weights=True +): + if xieta_model == "measured": + xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) + xi_ref, eta_ref, _ = solver_aman.measured_xieta_data + elif xieta_model == "template": + xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) + xi_ref, eta_ref, _ = solver_aman.nominal_xieta_locs + dist = np.sqrt((xi_ref - xi_mod) ** 2 + (eta_ref - eta_mod) ** 2) + #print(np.nansum(dist)) + weights_array = solver_aman.weights if weights else np.ones(len(dist)) + return chi_sq(weights_array, dist) def chi_sq(weights, dist): N = np.identity(len(dist)) * weights chi2 = dist.T * N * dist + #chi2 = dist.T @ N @ dist return chi2 - -def model_template_xieta(params, pm_version, solver_aman): +def model_template_xieta(params, pm_version, aman): """ Transform a measured (xi,eta) back into template position Data to Template -- modeling data as true template Quat math is based on this equation: q_nomodel * q_det_meas == q_model * q_det_true """ - xi_meas = solver_aman.ffp_ufm_center_fits[0] - eta_meas = solver_aman.ffp_ufm_center_fits[1] - if type(params) == lmfit.parameter.Parameters: - params = params.valuesdict() + xi_meas = aman.measured_xieta_data[0] + eta_meas = aman.measured_xieta_data[1] + params = params.valuesdict() if isinstance(params, Parameters) else params + # if type(params) == lmfit.parameter.Parameters: + # params = params.valuesdict() params["version"] = pm_version if "sat" in pm_version: - az, el, roll = pm._get_sat_enc_radians(solver_aman.ancil) - boresight = pm.apply_pointing_model(solver_aman, pointing_model=params, wrap=False) + az, el, roll = pm._get_sat_enc_radians(aman.ancil) + if "lat" in pm_version: + az, el, roll = pm._get_lat_enc_radians(aman.ancil) + boresight = pm.apply_pointing_model(aman, pointing_model=params, wrap=False) az1, el1, roll1 = boresight.az, boresight.el, boresight.roll q_nomodel = quat.rotation_lonlat(-az, el, 0) q_model = quat.rotation_lonlat(-az1, el1, roll1) @@ -127,24 +335,25 @@ def model_template_xieta(params, pm_version, solver_aman): xi_mod_true, eta_mod_true, _ = quat.decompose_xieta( ~q_model * q_nomodel * q_det_meas ) - return xi_mod_true, eta_mod_true - - -def model_measured_xieta(params, pm_version, solver_aman): + +def model_measured_xieta(params, pm_version, aman): """ Transform template (xi,eta) to match measured (xi,eta). Template to Data -- modeling the template as measured data Quat math is based on this equation: q_nomodel * q_det_meas == q_model * q_det_true """ - if type(params) == lmfit.parameter.Parameters: - params = params.valuesdict() + params = params.valuesdict() if isinstance(params, Parameters) else params + # if type(params) == lmfit.parameter.Parameters: + # params = params.valuesdict() params["version"] = pm_version - xi_true, eta_true, gam_true = solver_aman.nom_ufm_centers + xi_true, eta_true, gam_true = aman.nominal_xieta_locs if "sat" in pm_version: - az, el, roll = pm._get_sat_enc_radians(solver_aman.ancil) - boresight = pm.apply_pointing_model(solver_aman, pointing_model=params, wrap=False) + az, el, roll = pm._get_sat_enc_radians(aman.ancil) + if "lat" in pm_version: + az, el, roll = pm._get_lat_enc_radians(aman.ancil) + boresight = pm.apply_pointing_model(aman, pointing_model=params, wrap=False) az1, el1, roll1 = boresight.az, boresight.el, boresight.roll q_nomodel = quat.rotation_lonlat(-az, el, 0) @@ -157,34 +366,30 @@ def model_measured_xieta(params, pm_version, solver_aman): return xi_mod_meas, eta_mod_meas -def objective_model_func_lmfit( - params, pm_version, solver_aman, xieta_model, weights=True -): - if xieta_model == "measured": - xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) - xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits - elif xieta_model == "template": - xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) - xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers - - dist = [] - for i in range(len(xi_mod)): - dist.append(math.dist([xi_ref[i], eta_ref[i]], [xi_mod[i], eta_mod[i]])) - if weights: - return chi_sq(solver_aman.weights, np.array(dist)) - else: - return chi_sq(np.ones(len(dist)), np.array(dist)) - - -def get_RMS(model_xieta, ref_xieta, weights, use_inds=None): +def calc_RMS_and_residuals(modeled_fits, model_reference, weights, use_inds=None): + diff = ((modeled_fits[0] - model_reference[0]) / ARCMIN) ** 2 +\ + ((modeled_fits[1] - model_reference[1]) / ARCMIN) ** 2 if use_inds is not None: - diff = ((model_xieta[0] - ref_xieta[0])[use_inds] / ARCMIN) ** 2 + \ - ((model_xieta[1] - ref_xieta[1])[use_inds] / ARCMIN) ** 2 + diff = diff[use_inds] weights = weights[use_inds] - else: - diff = ((model_xieta[0] - ref_xieta[0]) / ARCMIN) ** 2 + \ - ((model_xieta[1] - ref_xieta[1]) / ARCMIN) ** 2 - return (np.nansum(diff * weights) / np.nansum(weights)) ** 0.5 + return np.sqrt(np.nansum(diff * weights) / np.nansum(weights)), diff**0.5 + + +def apply_model_params(xieta_model, pointing_model, pm_version, aman, use_inds=None): + #fetch_RMS_and_residuals + #Apply PM parameters to either template or data points, calc RMS, return residuals. + if xieta_model == "measured": + model_reference = aman.measured_xieta_data + modeled_fits = model_measured_xieta( + pointing_model, pm_version, aman + ) + elif xieta_model == "template": + model_reference = aman.nominal_xieta_locs + modeled_fits = model_template_xieta( + pointing_model, pm_version, aman + ) + rms, fit_residuals = calc_RMS_and_residuals(modeled_fits, model_reference, aman.weights, use_inds=use_inds) + return modeled_fits, fit_residuals, rms, model_reference def _round_params(param_dict, decimal): @@ -202,6 +407,7 @@ def _create_db(filename, save_dir): else: os.makedirs(save_dir, exist_ok=True) scheme = core.metadata.ManifestScheme() + scheme.add_range_match("obs:obs_timestamp") scheme.add_data_field("dataset") return core.metadata.ManifestDb(db_filename, scheme=scheme) @@ -213,33 +419,6 @@ def get_parser(parser=None): return parser -def build_param_fit_stat_aman(output): - # takes output of lmfit.minimize() - parameter_fit_stats = core.AxisManager(core.IndexAxis("parameters")) - parameter_fit_stats.wrap( - "name", np.array([output.params[p].name for p in output.params]) - ) - parameter_fit_stats.wrap( - "value", np.array([output.params[p].value for p in output.params]) - ) - parameter_fit_stats.wrap( - "vary", np.array([output.params[p].vary for p in output.params]) - ) - parameter_fit_stats.wrap( - "min", np.array([output.params[p].min for p in output.params]) - ) - parameter_fit_stats.wrap( - "max", np.array([output.params[p].max for p in output.params]) - ) - parameter_fit_stats.wrap( - "stderr", np.array([output.params[p].stderr for p in output.params]) - ) - parameter_fit_stats.wrap( - "correl", np.array([output.params[p].correl for p in output.params]) - ) - return parameter_fit_stats - - def main(config_path: str): # Read relevant config file info with open(config_path, "r", encoding="utf-8") as file: @@ -250,12 +429,32 @@ def main(config_path: str): xieta_model = config.get("xieta_model", "measured") xe_tag = f"{xieta_model}_xieta" iterate_cutoff = config.get("iterate_cutoff", None) - append = config.get("append", "") - append_tag = f"{bool(append)*'_'}{append}" + plotlims = config.get("plotlims", 20) + append_string = config.get("append", "") + if append_string is not None: + append_tag = f"{bool(append_string)*'_'}{append_string}" + else: + append_tag = "" + which_ufm = config.get("which_ufm", "") + if which_ufm is not None: + which_ufm = which_ufm if isinstance(which_ufm, list) else [which_ufm] + suffixes = [ufm.split("_")[-1] for ufm in which_ufm] + ufm_tag = "_" + "_".join(suffixes) + else: + ufm_tag = "" + band = config.get("band", None) + if config.get("band") is not None: + band_tag = f"_{band}" + else: + band_tag = "" save_dir = os.path.join( - config.get("outdir"), f"{platform}_pointing_model_{sv_tag}", f"{xe_tag}{append_tag}" + config.get("outdir"), + f"{platform}_pointing_model_{sv_tag}", + f"{xe_tag}{append_tag}{ufm_tag}{band_tag}", ) os.makedirs(save_dir, exist_ok=True) + shutil.copy(config_path, os.path.join(save_dir, "config.yaml")) + # Initialize Logger logger = sp_util.init_logger(__name__, "Solve pointing_model") @@ -273,64 +472,158 @@ def main(config_path: str): "Pointing model will try to replicate (model) the %s data.", xieta_model ) - # Load in focal_plane and boresigt data - nom_ufm_centers = load_nom_centers(config) - logger.info("Loaded nominal UFM centers from %s: ", config.get("ffp_path")) - logger.info(nom_ufm_centers) - - filelist, obs_ufm_centers, weights_ufm = _load_per_obs_data(config) - logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) - logger.info("Including data from these obs:") - logger.info(filelist) - - ancil, roll_c = _load_obs_boresight(config, filelist) - logger.info("Loaded boresight data from obs ids.") - - # Build Axis Managers - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - - solver_aman = core.AxisManager(core.IndexAxis("samps")) - solver_aman.wrap("ancil", ancil) - solver_aman.wrap("obs_info", obs_info) - solver_aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) - solver_aman.wrap( - "nom_ufm_centers", - np.repeat([nom_ufm_centers], len(filelist), axis=0) - .reshape(len(filelist) * 7, 3) - .T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap( - "ffp_ufm_center_fits", - obs_ufm_centers.reshape(len(filelist) * 7, 3).T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) + ########################################################## + ### Begin split for per-detector or per-UFM center fitting + ########################################################## + + fit_type = config.get("fit_type", "detector") + if fit_type == "detector": + + which_ufm = config.get("which_ufm", None) + if which_ufm is not None: + logger.info( + "Performing per-detector fits for subset of UFMs: %s. ", which_ufm + ) + else: + logger.info("Performing per-detector fits for all UFM data.") + + which_data = config.get("use_as_data") + # use_weights = False if which_data == 'raw' else config.get("use_weights", True) + use_weights = config.get("use_weights", True) + + #Make axis manager with full detector set. + # Keep wafer/band/obs cuts but do not further downsample. + fitcheck_aman = core.AxisManager(core.IndexAxis("samps")) + ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index, + ) = load_per_detector_data(config, return_all_dets=True) + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + + fitcheck_aman.wrap("ancil", ancil) + fitcheck_aman.wrap( + "nominal_xieta_locs", all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + fitcheck_aman.wrap( + "measured_xieta_data", obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + fitcheck_aman.wrap("weights", weights_dets, [(0, "samps")]) + fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) + + #Now make axis manager that has down sampled data for computation + ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index, + ) = load_per_detector_data(config) + + logger.info("Loaded %s data points", len(weights_dets)) + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + + # Build Axis Managers + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + + + solver_aman = core.AxisManager(core.IndexAxis("samps")) + solver_aman.wrap("ancil", ancil) + solver_aman.wrap("obs_info", obs_info) + solver_aman.wrap("roll_c", roll_c, [(0, "samps")]) + solver_aman.wrap( + "nominal_xieta_locs", + all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap( + "measured_xieta_data", + obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap("weights", weights_dets, [(0, "samps")]) + solver_aman.wrap("obs_index", obs_index) + # Make weights/data cuts + logger.info("Built axis manager") + + ########################################################################### + elif fit_type == "ufm_center": + use_weights = config.get("use_weights", True) + # Load in focal_plane and boresight data + nom_ufm_centers = load_nom_ufm_centers(config) + logger.info("Loaded nominal UFM centers from %s: ", config.get("ffp_path")) + logger.info(nom_ufm_centers) + + filelist, obs_ufm_centers, weights_ufm, obs_index = load_per_obs_data(config) + logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) + logger.info("Including data from these obs:") + logger.info(filelist) + + ancil, roll_c = load_obs_boresight(config, filelist) + logger.info("Loaded boresight data from obs ids.") + + # Build Axis Managers + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + + solver_aman = core.AxisManager(core.IndexAxis("samps")) + solver_aman.wrap("ancil", ancil) + solver_aman.wrap("obs_info", obs_info) + solver_aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) + solver_aman.wrap( + "nominal_xieta_locs", + np.repeat([nom_ufm_centers], len(filelist), axis=0) + .reshape(len(filelist) * 7, 3) + .T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap( + "measured_xieta_data", + obs_ufm_centers.reshape(len(filelist) * 7, 3).T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) + solver_aman.wrap("obs_index", obs_index) + # Make weights/data cuts + logger.info("Built axis manager") - # Make weights/data cuts - # solver_aman.weights[solver_aman.ancil.az_enc > 360] = 0.0 - logger.info("Built axis manager") + ############################ + # END SPLIT + ############################ # Initialize Parameters to Fit with Model fit_params = _init_fit_params(config) + logger.info("Initialized fit parameters") + """ if xieta_model == "measured": - model_reference = solver_aman.ffp_ufm_center_fits + model_reference = solver_aman.measured_xieta_data elif xieta_model == "template": - model_reference = solver_aman.nom_ufm_centers - + model_reference = solver_aman.nominal_xieta_locs + """ # Solve for Model Parameters # use chosen xieta_model to solve for parameters - use_weights = True - model_solved_params = lmfit.minimize( + # use_weights = config.get("use_weights",True) + model_solved_params = minimize( objective_model_func_lmfit, fit_params, method="nelder", nan_policy="omit", args=(pm_version, solver_aman, xieta_model, use_weights), ) + logger.info("Ran 1st Minimization") test_params = _round_params(model_solved_params.params.valuesdict(), 8) test_params["version"] = pm_version @@ -338,7 +631,7 @@ def main(config_path: str): logger.info(test_params) logger.info(model_solved_params.params.pretty_print(precision=5, colwidth=11)) logger.info("Fit Report:") - logger.info(lmfit.fit_report(model_solved_params)) + logger.info(fit_report(model_solved_params)) # save pointing model parameters to axis manager param_aman = core.AxisManager() @@ -346,60 +639,67 @@ def main(config_path: str): param_aman.wrap(k, test_params[k]) solver_aman.wrap("pointing_model", param_aman) + # save errors to axis manager + error_aman = core.AxisManager() + for k in list(model_solved_params.params.values()): + error_aman.wrap(k.name, k.stderr) + solver_aman.wrap("pointing_model_errors", error_aman) + # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats) # Model template and measured points using parameters found above - if xieta_model == "measured": - model_fits = model_measured_xieta( - solver_aman.pointing_model, pm_version, solver_aman - ) - elif xieta_model == "template": - model_fits = model_template_xieta( - solver_aman.pointing_model, pm_version, solver_aman - ) - + modeled_fits, fit_residuals_i1, rms_i1, model_reference = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, solver_aman) logger.info( - "RMS on fit: %f", get_RMS(model_fits, model_reference, solver_aman.weights) - ) - - fit_residual_i1 = np.array( - [ - math.dist( - [model_reference[0][i], model_reference[1][i]], - [model_fits[0][i], model_fits[1][i]], - ) - for i in range(len(model_fits[0])) - ] - ) + "RMS on fit: %f arcmin", rms_i1) # Save fit results to the axis manager modelfit_aman = core.AxisManager() - modelfit_aman.wrap("xi", model_fits[0], overwrite=True) - modelfit_aman.wrap("eta", model_fits[1], overwrite=True) - solver_aman.wrap("model_fits", modelfit_aman, overwrite=True) - solver_aman.wrap( - "fit_rms", - get_RMS(model_fits, model_reference, solver_aman.weights), - overwrite=True, - ) + modelfit_aman.wrap("xi", modeled_fits[0], overwrite=True) + modelfit_aman.wrap("eta", modeled_fits[1], overwrite=True) + solver_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) + solver_aman.wrap("fit_residuals", fit_residuals_i1, overwrite=True) + solver_aman.wrap("fit_rms", rms_i1, overwrite=True) + + + if fit_type == "detector": + _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) + logger.info("RMS on FULL detector set: %f arcmin", rms_full) + solver_aman.wrap("fit_residuals_full", fit_residuals_full, overwrite=True) + solver_aman.wrap("fit_rms_full", rms_full, overwrite=True) + solver_aman.wrap("obs_index_full", fitcheck_aman.obs_index) + + cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) + logger.info(f"2 stdev away from residual Median: {cutoff:.2f} arcmin") if config.get("make_plots"): tag = "_i1" - plot_ws0_model_fits(solver_aman, config, save_dir, tag) - plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag) - plot_residuals_vs_ancil(solver_aman, config, save_dir, tag) - plot_xieta_cross_residuals(solver_aman, config, save_dir, tag) - plot_xieta_residuals(solver_aman, config, save_dir, tag) - + plotter = ModelFitsPlotter(solver_aman=solver_aman, + config=config, + save_dir=save_dir, + iteration_tag=tag, + save_figure=True, + plotlims=plotlims) + if fit_type == "ufm_center": + plotter.plot_ws0_modeled_fits() + plotter.plot_template_space_fits_per_wafer() + plotter.plot_residuals_vs_ancil() + plotter.plot_xieta_cross_residuals() + plotter.plot_xieta_residuals() + else: + plotter.plot_modeled_fits() + plotter.plot_template_space_fits_per_detector() + plotter.plot_residuals_vs_ancil() + plotter.plot_residuals_histograms() + plotter.plot_dets_in_these_obs() if iterate_cutoff is not None: logger.info("Iterating parameter solution") - - cutoff = np.nanstd(fit_residual_i1) + np.nanmedian(fit_residual_i1) - logger.info(f"1 std away from residual Median: {cutoff / ARCMIN} arcmin") logger.info(f"Using {iterate_cutoff} as cutoff") - bad_fit_inds = np.where((fit_residual_i1 / ARCMIN) > iterate_cutoff)[0] + if iterate_cutoff == "auto": + iterate_cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) + logger.info(f"Using {iterate_cutoff} as cutoff") + bad_fit_inds = np.where(fit_residuals_i1 > iterate_cutoff)[0] logger.info("Bad fit indices:") logger.info(bad_fit_inds) logger.info( @@ -407,101 +707,123 @@ def main(config_path: str): len(bad_fit_inds), iterate_cutoff, ) - bad_filename = bad_fit_inds // 7 - bad_wafer = bad_fit_inds % 7 - logger.info("Outliers:") - for i, full_i in enumerate(bad_fit_inds): - logger.info(f"{filelist[bad_filename[i]]}; ws{bad_wafer[i]}; Resid. {np.round(fit_residual_i1[full_i] / ARCMIN, 4)}") - logger.info(f"--- Roll {solver_aman.roll_c[full_i]}; El {solver_aman.ancil.el_enc[full_i]}; weight {np.round(solver_aman.weights[full_i],4)}") - - #Print RMS of initial fits without outlying data points before - #zero-ing the weights. - good_fit_inds = np.where((fit_residual_i1 / ARCMIN) < iterate_cutoff)[0] - masked_rms = get_RMS(model_fits, model_reference, solver_aman.weights, use_inds = good_fit_inds) - logger.info("RMS on initial fit without outliers: %f", masked_rms) - - solver_aman.weights[bad_fit_inds] = 0.0 - use_weights = True - model_solved_params = lmfit.minimize( - objective_model_func_lmfit, - fit_params, - method="nelder", - nan_policy="omit", - args=(pm_version, solver_aman, xieta_model, use_weights), - ) - - test_params = _round_params(model_solved_params.params.valuesdict(), 8) - test_params["version"] = pm_version - logger.info("Found best-fit pointing model parameters, second iteration") - logger.info(test_params) - logger.info(model_solved_params.params.pretty_print(precision=5, colwidth=11)) - logger.info("Fit Report:") - logger.info(lmfit.fit_report(model_solved_params)) - - - # save pointing model parameters to axis manager - param_aman = core.AxisManager() - for k in list(test_params.keys()): - param_aman.wrap(k, test_params[k]) - solver_aman.wrap("pointing_model", param_aman, overwrite=True) - - # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) - # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats, overwrite=True) - # Recalculate best fit modeled points - if xieta_model == "measured": - model_fits = model_measured_xieta( - solver_aman.pointing_model, pm_version, solver_aman + if len(bad_fit_inds) != 0: + if fit_type == "ufm_center": + bad_filename = bad_fit_inds // 7 + bad_wafer = bad_fit_inds % 7 + logger.info("Outliers:") + for i, full_i in enumerate(bad_fit_inds): + logger.info( + f"{filelist[bad_filename[i]]}; ws{bad_wafer[i]}; Resid. {np.round(fit_residuals_i1[full_i], 4)}" + ) + logger.info( + f"--- Roll {solver_aman.roll_c[full_i]}; El {solver_aman.ancil.el_enc[full_i]}; weight {np.round(solver_aman.weights[full_i],4)}" + ) + + # Print RMS of initial fits without outlying data points before + # zero-ing the weights. + good_fit_inds = np.where(fit_residuals_i1 < iterate_cutoff)[0] + _, _, masked_rms, _ = apply_model_params(xieta_model, + solver_aman.pointing_model, + pm_version, + solver_aman, + use_inds=good_fit_inds) + + logger.info("RMS on initial fit without outliers: %f arcmin", masked_rms) + solver_aman.wrap('bad_fit_inds', bad_fit_inds) + solver_aman.weights[bad_fit_inds] = 0.0 + + model_solved_params = minimize( + objective_model_func_lmfit, + fit_params, + method="nelder", + nan_policy="omit", + args=(pm_version, solver_aman, xieta_model, use_weights), ) - elif xieta_model == "template": - model_fits = model_template_xieta( - solver_aman.pointing_model, pm_version, solver_aman + + test_params = _round_params(model_solved_params.params.valuesdict(), 8) + test_params["version"] = pm_version + logger.info("Found best-fit pointing model parameters, second iteration") + logger.info(test_params) + logger.info( + model_solved_params.params.pretty_print(precision=5, colwidth=11) ) - logger.info( - "RMS on secondary fit: %f", get_RMS(model_fits, model_reference, solver_aman.weights) - ) - fit_residual_i2 = np.array( - [ - math.dist( - [model_reference[0][i], model_reference[1][i]], - [model_fits[0][i], model_fits[1][i]], - ) - for i in range(len(model_fits[0])) - ] - ) - # Save fit results to the axis manager - modelfit_aman = core.AxisManager() - modelfit_aman.wrap("xi", model_fits[0], overwrite=True) - modelfit_aman.wrap("eta", model_fits[1], overwrite=True) - solver_aman.wrap("model_fits", modelfit_aman, overwrite=True) - solver_aman.wrap( - "fit_rms", - get_RMS(model_fits, model_reference, solver_aman.weights), - overwrite=True, - ) - if config.get("make_plots"): - tag = "_i2" - plot_ws0_model_fits(solver_aman, config, save_dir, tag) - plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag) - plot_residuals_vs_ancil(solver_aman, config, save_dir, tag) - plot_xieta_cross_residuals(solver_aman, config, save_dir, tag) - plot_xieta_residuals(solver_aman, config, save_dir, tag) - plot_total_residuals(solver_aman=solver_aman, - config=config, - save_dir=save_dir, - tag=tag, - fit_residual_i1=fit_residual_i1, - fit_residual_i2=fit_residual_i2, - bad_fit_inds=bad_fit_inds, - ) + logger.info("Fit Report:") + logger.info(lmfit.fit_report(model_solved_params)) + + # save pointing model parameters to axis manager + solver_aman.move("pointing_model", "pointing_model_i1") + param_aman = core.AxisManager() + for k in list(test_params.keys()): + param_aman.wrap(k, test_params[k]) + solver_aman.wrap("pointing_model", param_aman, overwrite=True) + + # save errors to axis manager + solver_aman.move("pointing_model_errors", "pointing_model_errors_i1") + error_aman = core.AxisManager() + for k in list(model_solved_params.params.values()): + error_aman.wrap(k.name, k.stderr) + solver_aman.wrap("pointing_model_errors", error_aman, overwrite=True) + + # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) + # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats, overwrite=True) + + # Recalculate best-fit modeled points + modeled_fits, fit_residuals_i2, rms_i2, model_reference = apply_model_params(xieta_model, + solver_aman.pointing_model, + pm_version, + solver_aman) + + logger.info("RMS on secondary fit: %f arcmin", rms_i2) + + # Save fit results to the axis manager + modelfit_aman = core.AxisManager() + modelfit_aman.wrap("xi", modeled_fits[0], overwrite=True) + modelfit_aman.wrap("eta", modeled_fits[1], overwrite=True) + solver_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) + solver_aman.move("fit_residuals", "fit_residuals_i1") + solver_aman.wrap("fit_residuals", fit_residuals_i2, overwrite=True) + solver_aman.move("fit_rms", "fit_rms_i1") + solver_aman.wrap("fit_rms", rms_i2, overwrite=True) + + if fit_type == "detector": + _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) + logger.info("RMS on FULL detector set: %f arcmin", rms_full) + solver_aman.move("fit_residuals_full", "fit_residuals_full_i1") + solver_aman.move("fit_rms_full", "fit_rms_full_i1") + solver_aman.wrap("fit_residuals_full", fit_residuals_full, overwrite=True) + solver_aman.wrap("fit_rms_full", rms_full, overwrite=True) + + if config.get("make_plots"): + tag = "_i2" + plotter = ModelFitsPlotter(solver_aman=solver_aman, + config=config, + save_dir=save_dir, + iteration_tag=tag, + save_figure=True, + plotlims=plotlims) + plotter.plot_total_residuals() + plotter.plot_residuals_vs_ancil() + if fit_type == "ufm_center": + plotter.plot_ws0_modeled_fits() + plotter.plot_template_space_fits_per_wafer() + plotter.plot_xieta_cross_residuals() + plotter.plot_xieta_residuals() + else: + plotter.plot_modeled_fits() + plotter.plot_template_space_fits_per_detector() + plotter.plot_residuals_histograms() + plotter.plot_dets_in_these_obs() else: if config.get("make_plots"): - plot_total_residuals(solver_aman, - config, - save_dir, - tag='', - fit_residual_i1=fit_residual_i1) - + plotter = ModelFitsPlotter(solver_aman=solver_aman, + config=config, + save_dir=save_dir, + iteration_tag="", + save_figure=True, + plotlims=plotlims) + plotter.plot_total_residuals() if config.get("save_output"): # Save .h5 and ManifestDb @@ -509,372 +831,865 @@ def main(config_path: str): h5_filename = os.path.join(save_dir, h5_rel) solver_aman.save(h5_filename, overwrite=True) dbfile = "db.sqlite" + t0 = config.get("begin_timerange", 0) + t1 = config.get("end_timerange", int(time.time())) + Epoch_Name = config.get("epoch_name") db = _create_db(dbfile, save_dir) - db.add_entry({"dataset": "pointing_model"}, filename=h5_rel, replace=True) + db.add_entry( + {"obs:obs_timestamp": (t0, t1), "dataset": f"{Epoch_Name}_parameters"}, + filename=h5_rel, + replace=True, + ) db.to_file(os.path.join(save_dir, dbfile)) logger.info("Done") - + #################### # Plotting Functions #################### -def plot_ws0_model_fits(solver_aman, config, save_dir, tag=""): - plot_dir = os.path.join(save_dir, "plots") - os.makedirs(plot_dir, exist_ok=True) - platform = config.get("platform") - plotmask = np.where(solver_aman.weights) - rms = np.round(solver_aman.fit_rms, 4) - xi_model_fit = solver_aman.model_fits.xi - eta_model_fit = solver_aman.model_fits.eta - if config.get("xieta_model") == "measured": - xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits - elif config.get("xieta_model") == "template": - xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers - markercolor = solver_aman.ancil.el_enc - coloredby = "El" - scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) - - #### - fig = plt.figure(figsize=(6, 6)) - gs = fig.add_gridspec(2, 2) - ax = fig.add_subplot(gs[:, :]) - ax.plot( - solver_aman["nom_ufm_centers"][0, : 7 + 1] / DEG, - solver_aman["nom_ufm_centers"][1, : 7 + 1] / DEG, - "rx", - label="Nominal Center", - ) - ax.scatter( - xi_ref[plotmask] / DEG, - eta_ref[plotmask] / DEG, - c=markercolor[plotmask], - alpha=0.4, - label="Data", - edgecolors="k", - linewidths=0.4, - s=130 * scale_weights[plotmask], - cmap="jet", - vmax=65, - ) - im = ax.scatter( - xi_model_fit / DEG, - eta_model_fit / DEG, - marker="*", - c=markercolor, - cmap="jet", - edgecolor="gray", - lw=0.3, - s=130, - label=f"Model, RMS = {rms}", - vmax=65, - ) - ax.legend(loc=1, fontsize="small") - ax.set_xlabel("Xi (deg)") - ax.set_ylabel("Eta (deg)") - cb = plt.colorbar(im, fraction=0.046, pad=0.04) - cb.ax.set_title(coloredby) - ax.set_title(f"Fits, Colored by {coloredby} (deg)") - - # Plot lines connecting data to modeled data point - xitoxi = np.empty((len(xi_model_fit), 2)) - xitoxi[:, 0] = xi_ref / DEG - xitoxi[:, 1] = xi_model_fit / DEG - etatoeta = np.empty((len(eta_model_fit), 2)) - etatoeta[:, 0] = eta_ref / DEG - etatoeta[:, 1] = eta_model_fit / DEG - ax.plot(xitoxi.T, etatoeta.T, "k", lw=0.4) - ax.set_xlim(-1, .25); ax.set_ylim(-.5, .5) - #plt.subplots_adjust(left=0.1, right=0.90, bottom=0.05, hspace=0.3) - plt.tight_layout() - plt.savefig(f"{plot_dir}/{platform}_ws0_model_fits{tag}.png", dpi=350) - plt.close() - - -def plot_template_space_fits_per_wafer(solver_aman, config, save_dir, tag=""): - plot_dir = os.path.join(save_dir, "plots") - os.makedirs(plot_dir, exist_ok=True) - pm_version = config.get("pm_version") - platform = config.get("platform") - scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) - xi_unmod, eta_unmod = model_template_xieta( - solver_aman.pointing_model, pm_version, solver_aman - ) - xi0, eta0 = model_template_xieta(pm.defaults_sat_v1, pm_version, solver_aman) - - fig, ax = plt.subplots(2, 4, figsize=(9, 6)) - for i in range(7): - ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") - im = ax[i // 4, i % 4].scatter( - xi_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[0, i] / ARCMIN, - eta_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[1, i] / ARCMIN, - c=solver_aman.ancil.el_enc[i::7], - s=scale_weights[i::7] * 80, - edgecolor="gray", - lw=0.3, - marker="o", - alpha=0.5, +class ModelFitsPlotter: + def __init__(self, solver_aman, config, save_dir, iteration_tag="", save_figure=True, plotlims=None): + + self.solver_aman = solver_aman + self.config = config + self.tag = iteration_tag + self.save_figure = save_figure + if plotlims is not None: + self.plotlims = plotlims + else: + self.plotlims = config.get("plotlims", 20) + plot_dir = os.path.join(save_dir, "plots") + os.makedirs(plot_dir, exist_ok=True) + self.plot_dir = plot_dir + + self.platform = config.get("platform") + self.pm_version = config.get("pm_version") + self.ufms = config.get("ufms") + self.which_ufm = config.get("which_ufm",None) + self.xieta_model = config.get("xieta_model", "measured") + self.append_string = config.get("append","") + self.iterate_cutoff = config.get("iterate_cutoff", None) + + + def plot_dets_in_these_obs(self): + platform = self.platform + plot_dir = self.plot_dir + measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.solver_aman.weights + roll_c = self.solver_aman.roll_c + elev = self.solver_aman.ancil.el_enc + azim = self.solver_aman.ancil.az_enc + + plt.figure() + fig, ax = plt.subplots(2,2,figsize=(11,10)) + a0 = ax[0,0].scatter(measured_xieta_data[0], measured_xieta_data[1], + c=roll_c, alpha=0.5, s=10, cmap='jet',vmin=-45,vmax=45) + a1= ax[1,0].scatter(measured_xieta_data[0], measured_xieta_data[1], + c=elev, alpha=0.3, s=10, cmap='jet',vmin=45,vmax=65) + a2= ax[0,1].scatter(measured_xieta_data[0], measured_xieta_data[1], + c=azim, alpha=0.3, s=10, cmap='jet', vmin=0,vmax=420) + a3= ax[1,1].scatter(measured_xieta_data[0], measured_xieta_data[1], + c=weights, alpha=0.3, s=10, cmap='jet',vmin=0.8, vmax=1) + c0 = plt.colorbar(a0) + c1 = plt.colorbar(a1) + c2 = plt.colorbar(a2) + c3 = plt.colorbar(a3) + ax[0,0].set_title('color by boresight.roll', fontsize='medium') + ax[1,0].set_title('color by fit Elevation', fontsize='medium') + ax[0,1].set_title('color by fit Azimuth', fontsize='medium') + ax[1,1].set_title('color by fit weights', fontsize='medium') + plt.suptitle('Detectors hit in these observations') + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_dets_in_these_obs.png", dpi=350) + plt.close() + + def plot_modeled_fits(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + ancil = self.solver_aman.ancil + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.solver_aman.weights + modeled_fits = self.solver_aman.modeled_fits + fit_rms = self.solver_aman.fit_rms + if self.which_ufm is not None: + if isinstance(self.which_ufm, list): + ufm_list = self.which_ufm + else: + ufm_list = [self.which_ufm] + else: + ufm_list = self.ufms + plotmask = np.where(weights) + nom_array = np.concatenate( + [load_nom_focal_plane_full(self.config, ufm)[1] for ufm in ufm_list], + axis=0, + ) + rms = np.round(fit_rms, 4) + xi_model_fit = modeled_fits.xi + eta_model_fit = modeled_fits.eta + if self.xieta_model == "measured": + xi_ref, eta_ref, _ = measured_xieta_data + elif self.xieta_model == "template": + xi_ref, eta_ref, _ = nominal_xieta_locs + if 'sat' in platform: + markercolor = ancil.boresight_enc.copy() + coloredby = "Boresight" + elif 'lat' in platform: + markercolor = ancil.corotator_enc.copy() + coloredby = "Corotator" + scale_weights = weights / np.nanmax(weights) + #### + fig = plt.figure(figsize=(6, 6)) + gs = fig.add_gridspec(2, 2) + ax = fig.add_subplot(gs[:, :]) + ax.scatter( + xi_ref[plotmask] / DEG, + eta_ref[plotmask] / DEG, + c=markercolor[plotmask], + alpha=0.4, + label="Data", + edgecolors="k", + linewidths=0.4, + s=130 * scale_weights[plotmask], cmap="jet", + vmax=65, ) - ax[i // 4, i % 4].set_xlim(-12, 12) - ax[i // 4, i % 4].set_ylim(-12, 12) - ax[i // 4, i % 4].set_title(f"ws{i}") - plt.colorbar(im, ax[1, 3], label="Elevation (deg)", fraction=0.046, pad=0.04) - plt.tight_layout() - plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_elevation{tag}.png", dpi=350) - plt.close() - - fig, ax = plt.subplots(2, 4, figsize=(9, 6)) - for i in range(7): - ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") - im = ax[i // 4, i % 4].scatter( - xi_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[0, i] / ARCMIN, - eta_unmod[i::7] / ARCMIN - solver_aman.nom_ufm_centers[1, i] / ARCMIN, - c=solver_aman.ancil.boresight_enc[i::7], - s=scale_weights[i::7] * 80, - edgecolor="gray", - lw=0.3, - marker="o", - alpha=0.5, + sc3 = ax.scatter( + xi_model_fit / DEG, + eta_model_fit / DEG, + marker="*", + c=markercolor, cmap="jet", + alpha=0.4, + edgecolor="k", + lw=0.4, + s=65, + label=f"Model, RMS = {rms}", + vmax=65, ) - ax[i // 4, i % 4].set_xlim(-12, 12) - ax[i // 4, i % 4].set_ylim(-12, 12) - ax[i // 4, i % 4].set_title(f"ws{i}") - plt.colorbar(im, ax[1, 3], label="Boresight (deg)", fraction=0.046, pad=0.04) - plt.tight_layout() - plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_boresight{tag}.png", dpi=350) - plt.close() - - -def plot_residuals_vs_ancil(solver_aman, config, save_dir, tag): - plot_dir = os.path.join(save_dir, "plots") - os.makedirs(plot_dir, exist_ok=True) - platform = config.get("platform") - scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) - plotmask = np.where(solver_aman.weights) - xi_model_fit = solver_aman.model_fits.xi - eta_model_fit = solver_aman.model_fits.eta - if config.get("xieta_model") == "measured": - xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits - elif config.get("xieta_model") == "template": - xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers - - fig, ax = plt.subplots(2, 3, figsize=(8, 6), sharex="col", sharey="row") - plt.setp(ax[0, 1].get_yticklabels(), visible=False) - plt.suptitle(r"$\delta \xi$, $\delta \eta$ vs Az, El, Boresight") - for k in range(6): - i = k // 3 - j = k % 3 - if i == 0: - model = xi_model_fit - ref = xi_ref - elif i == 1: - model = eta_model_fit - ref = eta_ref - if j == 0: - x = solver_aman.ancil.az_enc % 360 - elif j == 1: - x = solver_aman.ancil.el_enc - elif j == 2: - x = solver_aman.ancil.boresight_enc - - ax[i, j].scatter( - x[plotmask], - (model - ref)[plotmask] / ARCMIN, - color="k", + ax.scatter( + nom_array[:, 0] / DEG, + nom_array[:, 1] / DEG, marker=".", - alpha=0.3, - lw=0, - s=scale_weights[plotmask] * 80, - ) - ax[i, j].axhline(0, xmin=0, xmax=1, color="k", lw=2, alpha=0.5) - mxb = np.polyfit( - x[plotmask], - (model[plotmask] - ref[plotmask]) / ARCMIN, - 1, - w=scale_weights[plotmask], + color="r", + alpha=0.2, + label="Nominal Center", ) - xrange = np.arange(np.nanmin(x), np.nanmax(x)) - ax[i, j].plot(xrange, mxb[0] * xrange + mxb[1], "r", lw=1, - label=f'Slope {np.round(mxb[0],4)}\n [arcmin/deg]') - ax[i, j].legend(fontsize='small') - - ax[0, 0].set_ylabel("dXi [arcmin]") - ax[1, 0].set_ylabel("dEta [arcmin]") - ax[1, 0].set_xlabel("Azimuth [deg]") - ax[1, 1].set_xlabel("Elevation [deg]") - ax[1, 2].set_xlabel("Boresight [deg]") - plt.tight_layout() - plt.savefig(f"{plot_dir}/{platform}_residuals_vs_ancillary{tag}.png", dpi=350) - plt.close() + offsets = sc3.get_offsets() + #xmin, ymin = offsets.min(axis=0) + #xmax, ymax = offsets.max(axis=0) + ax.legend(loc=1, fontsize="small") + ax.set_xlabel("Xi (deg)") + ax.set_ylabel("Eta (deg)") + cb = plt.colorbar(sc3, fraction=0.046, pad=0.04) + cb.ax.set_title(coloredby) + ax.set_title(f"Fits, Colored by {coloredby} (deg)") -def plot_total_residuals(solver_aman, - config, - save_dir, - tag, - fit_residual_i1, - fit_residual_i2=None, - bad_fit_inds=None, - ): - plot_dir = os.path.join(save_dir, "plots") - os.makedirs(plot_dir, exist_ok=True) - iterate_cutoff = config.get("iterate_cutoff", None) - platform = config.get("platform") - scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) - if fit_residual_i2 is not None: - fig = plt.figure(figsize=(6,4)) - gs = fig.add_gridspec(7,1) - ax1 = fig.add_subplot(gs[0:-2,:]) - ax2 = fig.add_subplot(gs[-2:,:]) - #Plot first fit iteration residuals - ax1.plot(np.arange(len(fit_residual_i1)), fit_residual_i1 / ARCMIN, - 'r.', mew=0, alpha=0.6, lw=0, label = '1st Fit') - ax1.set_ylabel(f'Fit Residual $\left|\Delta$(xi, eta)$\right|$ [arcmin]') - ax1.set_xlabel('Data point') - ax1.axhline(iterate_cutoff, xmin=0, xmax=1, color="k", linestyle = ':', - lw=0.8, label = 'Cutoff') - #Plot second fit iteration residuals - ax1.plot(np.arange(len(fit_residual_i2)), - fit_residual_i2 / ARCMIN, 'b*', - alpha=0.5, lw=0, mew=0, label = '2nd fit') - xtox = np.empty((len(fit_residual_i2), 2)) - xtox[:, 0] = np.arange(len(fit_residual_i1)) - xtox[:, 1] = np.arange(len(fit_residual_i2)) - ytoy = np.empty((len(fit_residual_i1), 2)) - ytoy[:, 0] = fit_residual_i1 / ARCMIN - ytoy[:, 1] = fit_residual_i2 / ARCMIN - ax1.plot(xtox.T, ytoy.T, "k", lw=0.4) + # Plot lines connecting data to modeled data point + xitoxi = np.empty((len(xi_model_fit), 2)) + xitoxi[:, 0] = xi_ref / DEG + xitoxi[:, 1] = xi_model_fit / DEG + etatoeta = np.empty((len(eta_model_fit), 2)) + etatoeta[:, 0] = eta_ref / DEG + etatoeta[:, 1] = eta_model_fit / DEG + ax.plot(xitoxi.T, etatoeta.T, "k", lw=0.4) + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_model_fits{tag}.png", dpi=350) + plt.close() + + + def plot_ws0_modeled_fits(self): + platform = self.platform + plot_dir = self.plot_dir + xieta_model = self.xieta_model + tag = self.tag + ancil = self.solver_aman.ancil + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.solver_aman.weights + modeled_fits = self.solver_aman.modeled_fits + fit_rms = self.solver_aman.fit_rms - ax1.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) - ax1.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') - ax1.legend(loc=2,fontsize='small') + xi_model_fit = modeled_fits.xi + eta_model_fit = modeled_fits.eta + if xieta_model == "measured": + xi_ref, eta_ref, _ = measured_xieta_data + elif xieta_model == "template": + xi_ref, eta_ref, _ = nominal_xieta_locs + markercolor = [ancil.el_enc.copy(), ancil.boresight_enc.copy(), ancil.az_enc.copy()] + coloredby = ["El", "Boresight", "Az"] + vmins = [40, -50, 0] + vmaxs = [65, 50, 360] + scale_weights = weights / np.nanmax(weights) + plotmask = np.where(weights)[0] + rms = np.round(fit_rms, 4) - ax2.scatter(np.arange(len(fit_residual_i1)), (fit_residual_i2 - fit_residual_i1) / ARCMIN, c='k', - marker = 'o', s = scale_weights*50, - lw=0, alpha = 0.6, label = "Res i2 - Res i1") - ax2.plot(np.arange(len(fit_residual_i1))[bad_fit_inds], (fit_residual_i2 - fit_residual_i1)[bad_fit_inds] / ARCMIN, - 'kx', ms=7, lw=0.2, alpha = 0.6, label = "Excl. from i2 fit") - ax2.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) - ax2.legend(fontsize='x-small') - ax2.set_xlabel('Data points') - ax2.set_ylabel(r'$\Delta$ Residuals') - plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) + # Calculate lines connecting data to modeled data point + xitoxi = np.empty((len(xi_model_fit), 2)) + xitoxi[:, 0] = xi_ref / DEG + xitoxi[:, 1] = xi_model_fit / DEG + etatoeta = np.empty((len(eta_model_fit), 2)) + etatoeta[:, 0] = eta_ref / DEG + etatoeta[:, 1] = eta_model_fit / DEG - else: - fig, ax1 = plt.subplots() - #Plot first fit iteration residuals - ax1.scatter(np.arange(len(fit_residual_i1)), fit_residual_i1 / ARCMIN, - s = scale_weights*50, color='r', lw=0, alpha=0.6, label = '1st Fit') - ax1.set_ylabel(r'Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]') - ax1.set_xlabel('Data points') - plt.legend(loc=2) - plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) - plt.close() - -def plot_xieta_residuals(solver_aman, config, save_dir, tag): - plot_dir = os.path.join(save_dir, "plots") - os.makedirs(plot_dir, exist_ok=True) - scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) - plotmask = np.where(solver_aman.weights) - platform = config.get("platform") - xi_model_fit = solver_aman.model_fits.xi - eta_model_fit = solver_aman.model_fits.eta - if config.get("xieta_model") == "measured": - xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits - elif config.get("xieta_model") == "template": - xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers + #### + #fig = plt.figure(figsize=(6, 6)) + #gs = fig.add_gridspec(2, 2) + #ax = fig.add_subplot(gs[:, :]) + fig, ax = plt.subplots(1,3, figsize=(9,4), sharey=True) + for x in range(3): + ax[x].plot( + nominal_xieta_locs[0, : 7 + 1] / DEG, + nominal_xieta_locs[1, : 7 + 1] / DEG, + "rx", + label="Nominal Center", + ) + ax[x].scatter( + xi_ref[plotmask] / DEG, + eta_ref[plotmask] / DEG, + c=markercolor[x][plotmask], + alpha=0.4, + label="Data", + edgecolors="k", + linewidths=0.4, + s=130 * scale_weights[plotmask], + cmap="jet", + vmin=vmins[x], + vmax=vmaxs[x], + ) + im = ax[x].scatter( + xi_model_fit[plotmask] / DEG, + eta_model_fit[plotmask] / DEG, + marker="*", + c=markercolor[x][plotmask], + cmap="jet", + edgecolor="gray", + lw=0.3, + s=130, + label=f"Model, RMS = {rms}", + vmin=vmins[x], + vmax=vmaxs[x], + ) + ax[x].legend(loc=1, fontsize="small") + ax[x].set_xlabel("Xi (deg)") + ax[x].set_ylabel("Eta (deg)") + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + cb.ax.set_title(coloredby[x]) + ax[x].set_title(f"Fits, Colored by {coloredby[x]} (deg)") + ax[x].plot(xitoxi.T, etatoeta.T, "k", lw=0.4,alpha=0.5) + ax[x].set_xlim(-1.05, 0.25) + ax[x].set_ylim(-0.2, 0.2) + # plt.subplots_adjust(left=0.1, right=0.90, bottom=0.05, hspace=0.3) + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_ws0_model_fits{tag}.png", dpi=350) + plt.close() - fig, ax = plt.subplots(2, 1) - for i, xe in enumerate(['Xi','Eta']): - if xe == 'Xi': - xaxis_ref = xi_ref - xlabel = 'Xi' - yref = xi_ref - ymodel = xi_model_fit - ylabel = 'dXi' - elif xe == 'Eta': - xaxis_ref = eta_ref - xlabel = 'Eta' - yref = eta_ref - ymodel = eta_model_fit - ylabel = 'dEta' - #xi residuals vs xi - im = ax[i].scatter( - xaxis_ref[plotmask] / DEG, - (ymodel - yref)[plotmask] / ARCMIN, - marker="*", - c=xaxis_ref[plotmask], + + def plot_template_space_fits_per_wafer(self): + platform = self.platform + plot_dir = self.plot_dir + plotlims = self.plotlims + pm_version = self.pm_version + tag = self.tag + ancil = self.solver_aman.ancil + roll_c = self.solver_aman.roll_c + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.solver_aman.weights + pointing_model = self.solver_aman.pointing_model + modeled_fits = self.solver_aman.modeled_fits + fit_rms = self.solver_aman.fit_rms + + scale_weights = weights / np.nanmax(weights) + xi_unmod, eta_unmod = model_template_xieta( + pointing_model, + pm_version, + self.solver_aman + ) + xi0, eta0 = model_template_xieta( + pm.param_defaults[pm_version], + pm_version, + self.solver_aman + ) + #Plot with Elevation as colorbar + fig, ax = plt.subplots(2, 4, figsize=(9, 6)) + for i in range(7): + ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") + im = ax[i // 4, i % 4].scatter( + xi_unmod[i::7] / ARCMIN - nominal_xieta_locs[0, i] / ARCMIN, + eta_unmod[i::7] / ARCMIN - nominal_xieta_locs[1, i] / ARCMIN, + c=ancil.el_enc[i::7], + s=scale_weights[i::7] * 80, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.5, + cmap="jet", + ) + ax[i // 4, i % 4].set_xlim(-1 * plotlims, plotlims) + ax[i // 4, i % 4].set_ylim(-1 * plotlims, plotlims) + ax[i // 4, i % 4].set_title(f"ws{i}") + ax[i // 4, i % 4].set_aspect('equal', adjustable='box') + plt.colorbar(im, ax[1, 3], label="Elevation (deg)", fraction=0.046, pad=0.04) + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_elevation{tag}.png", dpi=350) + plt.close() + #Plot with Roll as colorbar + fig, ax = plt.subplots(2, 4, figsize=(9, 6)) + if "sat" in pm_version: + markercolor = -1*ancil.boresight_enc[i::7].copy() + if "lat" in pm_version: + markercolor = roll_c[i::7].copy() + coloredby = "roll" + for i in range(7): + ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") + im = ax[i // 4, i % 4].scatter( + xi_unmod[i::7] / ARCMIN - nominal_xieta_locs[0, i] / ARCMIN, + eta_unmod[i::7] / ARCMIN - nominal_xieta_locs[1, i] / ARCMIN, + c=markercolor, + s=scale_weights[i::7] * 80, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.5, + cmap="jet", + ) + ax[i // 4, i % 4].set_xlim(-1 * plotlims, plotlims) + ax[i // 4, i % 4].set_ylim(-1 * plotlims, plotlims) + ax[i // 4, i % 4].set_title(f"ws{i}") + ax[i // 4, i % 4].set_aspect('equal', adjustable='box') + plt.colorbar(im, ax[1, 3], label=f"{coloredby} (deg)", fraction=0.046, pad=0.04) + if self.save_figure: + plt.tight_layout() + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_roll{tag}.png", dpi=350) + + if "lat" in pm_version: + markercolor = ancil.corotator_enc[i::7].copy() + coloredby = "corotator" + fig, ax = plt.subplots(2, 4, figsize=(9, 6)) + for i in range(7): + ax[i // 4, i % 4].plot(0, 0, "kx", label="Nominal Center") + im = ax[i // 4, i % 4].scatter( + xi_unmod[i::7] / ARCMIN - nominal_xieta_locs[0, i] / ARCMIN, + eta_unmod[i::7] / ARCMIN - nominal_xieta_locs[1, i] / ARCMIN, + c=ancil.boresight_enc[i::7], + s=scale_weights[i::7] * 80, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.5, + cmap="jet", + ) + ax[i // 4, i % 4].set_xlim(-1 * plotlims, plotlims) + ax[i // 4, i % 4].set_ylim(-1 * plotlims, plotlims) + ax[i // 4, i % 4].set_title(f"ws{i}") + ax[i // 4, i % 4].set_aspect('equal', adjustable='box') + plt.colorbar(im, ax[1, 3], label=f"{coloredby} (deg)", fraction=0.046, pad=0.04) + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_corotator{tag}.png", dpi=350) + plt.close() + + + def plot_template_space_fits_per_detector(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + plotlims = self.plotlims + pm_version = self.pm_version + ancil = self.solver_aman.ancil + roll_c = self.solver_aman.roll_c + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.solver_aman.weights + pointing_model = self.solver_aman.pointing_model + modeled_fits = self.solver_aman.modeled_fits + fit_rms = self.solver_aman.fit_rms + + scale_weights = weights / np.nanmax(weights) + xi_unmod, eta_unmod = model_template_xieta( + pointing_model, + pm_version, + self.solver_aman + ) + xi0, eta0 = model_template_xieta( + pm.param_defaults[pm_version], + pm_version, + self.solver_aman + ) + #plot with weights as colorbar + fig, ax = plt.subplots(figsize=(9, 6)) + ax.plot(0, 0, "kx", label="Nominal Center") + im = ax.scatter( + xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, + eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, + c=weights, + s=scale_weights * 5, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.4, + cmap="viridis", + vmin=self.config.get("weight_cutoff"), + vmax=1 + ) + ax.set_xlim(-1 * plotlims, plotlims) + ax.set_ylim(-1 * plotlims, plotlims) + ax.set_aspect('equal', adjustable='box') + ax.set_title(f"Unmodeled fits, by fit weight") + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_weights{tag}.png", dpi=350) + plt.close() + fig, ax = plt.subplots(figsize=(9, 6)) + ax.plot(0, 0, "kx", label="Nominal Center") + im = ax.scatter( + xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, + eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, + c=ancil.el_enc, + s=scale_weights * 5, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.4, cmap="jet", - s=100 * scale_weights[plotmask], - alpha=scale_weights[plotmask], - edgecolors="k", - linewidths=0.4, ) + ax.set_xlim(-1 * plotlims, plotlims) + ax.set_ylim(-1 * plotlims, plotlims) + ax.set_aspect('equal', adjustable='box') + ax.set_title(f"Unmodeled fits, by elevation color") cb = plt.colorbar(im, fraction=0.046, pad=0.04) - cb.ax.set_title(xlabel) - ax[i].axhline(0, xmin=0, xmax=1, color="k", lw=0.8, alpha=0.6) - ax[i].axvline(0, ymin=0, ymax=1, color="k", lw=0.8, alpha=0.5) - ax[i].set_ylim(-10, 20) - ax[i].set_xlabel(f"{xlabel} (deg)", fontsize="small") - ax[i].set_ylabel(f"{ylabel} [arcmin]") - plt.tight_layout() - plt.savefig(f"{plot_dir}/{platform}_xieta_residuals{tag}.png", dpi=350) - plt.close() - - -def plot_xieta_cross_residuals(solver_aman, config, save_dir, tag): - plot_dir = os.path.join(save_dir, "plots") - os.makedirs(plot_dir, exist_ok=True) - scale_weights = solver_aman.weights / np.nanmax(solver_aman.weights) - plotmask = np.where(solver_aman.weights) - platform = config.get("platform") - xi_model_fit = solver_aman.model_fits.xi - eta_model_fit = solver_aman.model_fits.eta - if config.get("xieta_model") == "measured": - xi_ref, eta_ref, _ = solver_aman.ffp_ufm_center_fits - elif config.get("xieta_model") == "template": - xi_ref, eta_ref, _ = solver_aman.nom_ufm_centers - - fig, ax = plt.subplots(2, 1) - for i, xe in enumerate(['Xi','Eta']): - if xe == 'Xi': - xaxis_ref = eta_ref - xlabel = 'Eta' - yref = xi_ref - ymodel = xi_model_fit - ylabel = 'dXi' - elif xe == 'Eta': - xaxis_ref = xi_ref - xlabel = 'Xi' - yref = eta_ref - ymodel = eta_model_fit - ylabel = 'dEta' - #xi residuals vs xi - im = ax[i].scatter( - xaxis_ref[plotmask] / DEG, - (ymodel - yref)[plotmask] / ARCMIN, - marker="*", - c=xaxis_ref[plotmask], + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_elevation{tag}.png", dpi=350) + plt.close() + + fig, ax = plt.subplots(figsize=(9, 6)) + if "sat" in pm_version: + markercolor = -1*ancil.boresight_enc + if "lat" in pm_version: + markercolor = roll_c + coloredby = "roll" + ax.plot(0, 0, "kx", label="Nominal Center") + im = ax.scatter( + xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, + eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, + c=markercolor, + s=scale_weights * 5, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.4, cmap="jet", - s=100 * scale_weights[plotmask], - alpha=scale_weights[plotmask], - edgecolors="k", - linewidths=0.4, ) + ax.set_xlim(-1 * plotlims, plotlims) + ax.set_ylim(-1 * plotlims, plotlims) + ax.set_aspect('equal', adjustable='box') + ax.set_title(f"Unmodeled fits, colored by {coloredby} angle") cb = plt.colorbar(im, fraction=0.046, pad=0.04) - cb.ax.set_title(xlabel) - ax[i].axhline(0, xmin=0, xmax=1, color="k", lw=0.8, alpha=0.6) - ax[i].axvline(0, ymin=0, ymax=1, color="k", lw=0.8, alpha=0.5) - ax[i].set_ylim(-10, 20) - ax[i].set_xlabel(f"{xlabel} (deg)", fontsize="small") + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_roll{tag}.png", dpi=350) + plt.close() + + if "lat" in pm_version: + markercolor = ancil.corotator_enc.copy() + coloredby = "corotator" + fig, ax = plt.subplots(figsize=(9, 6)) + ax.plot(0, 0, "kx", label="Nominal Center") + im = ax.scatter( + xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, + eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, + c=markercolor, + s=scale_weights * 5, + edgecolor="gray", + lw=0.3, + marker="o", + alpha=0.4, + cmap="jet", + ) + ax.set_xlim(-1 * plotlims, plotlims) + ax.set_ylim(-1 * plotlims, plotlims) + ax.set_title(f"Unmodeled fits, colored by {coloredby} angle") + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_corotator{tag}.png", dpi=350) + plt.close() + + + def plot_residuals_vs_ancil(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + pm_version = self.pm_version + xieta_model = self.xieta_model + plotlims = self.plotlims + ancil = self.solver_aman.ancil + roll_c = self.solver_aman.roll_c + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.solver_aman.weights + pointing_model = self.solver_aman.pointing_model + modeled_fits = self.solver_aman.modeled_fits + fit_rms = self.solver_aman.fit_rms + + scale_weights = weights / np.nanmax(weights) + plotmask = np.where(weights) + xi_model_fit = modeled_fits.xi + eta_model_fit = modeled_fits.eta + if xieta_model == "measured": + xi_ref, eta_ref, _ = measured_xieta_data + elif xieta_model == "template": + xi_ref, eta_ref, _ = nominal_xieta_locs + + if "sat" in platform: + third_enc = ancil.boresight_enc.copy() + third_enc_name = "Boresight" + elif "lat" in platform: + third_enc = ancil.corotator_enc.copy() + third_enc_name = "Corotator" + fig, ax = plt.subplots(2, 3, figsize=(8, 6), sharex="col", sharey="row") + plt.setp(ax[0, 1].get_yticklabels(), visible=False) + plt.suptitle(r"$\delta \xi$, $\delta \eta$" + f" vs Az, El, {third_enc_name}") + for k in range(6): + i = k // 3 + j = k % 3 + if i == 0: + model = xi_model_fit + ref = xi_ref + elif i == 1: + model = eta_model_fit + ref = eta_ref + if j == 0: + x = ancil.az_enc % 360 + elif j == 1: + x = ancil.el_enc + elif j == 2: + x = third_enc + ax[i, j].scatter( + x[plotmask], + (model - ref)[plotmask] / ARCMIN, + color="k", + marker=".", + alpha=0.1, + lw=0, + s=scale_weights[plotmask] * 80, + ) + ax[i, j].axhline(0, xmin=0, xmax=1, color="k", lw=2, alpha=0.5) + mxb = np.polyfit( + x[plotmask], + (model[plotmask] - ref[plotmask]) / ARCMIN, + 1, + w=scale_weights[plotmask], + ) + xrange = np.arange(np.nanmin(x), np.nanmax(x)) + ax[i, j].plot( + xrange, + mxb[0] * xrange + mxb[1], + "r", + lw=1, + label=f"Slope {np.round(mxb[0],4)}\n [arcmin/deg]", + ) + ax[i, j].legend(fontsize="small") + ax[0, 0].set_ylabel("dXi [arcmin]") + ax[1, 0].set_ylabel("dEta [arcmin]") + ax[1, 0].set_xlabel("Azimuth [deg]") + ax[1, 1].set_xlabel("Elevation [deg]") + ax[1, 2].set_xlabel(f"{third_enc_name} [deg]") + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_residuals_vs_ancillary{tag}.png", dpi=350) + plt.close() + + def plot_total_residuals(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + plotlims = self.plotlims + pm_version = self.pm_version + ancil = self.solver_aman.ancil + roll_c = self.solver_aman.roll_c + weights = self.solver_aman.weights + fit_rms = self.solver_aman.fit_rms + obs_index = self.solver_aman.obs_index + + scale_weights = weights / np.nanmax(weights) + effobs =np.where(np.diff(np.append(obs_index, obs_index[-1]+1))>0)[0] + try: + two_fits = np.any(_valid_arg("fit_residuals_i1", 'signal', src=self.solver_aman)) + except: + two_fits = False + if two_fits: + iterate_cutoff = self.config.get("iterate_cutoff") + if iterate_cutoff == "auto": + iterate_cutoff = np.nanstd(self.solver_aman.fit_residuals_i1)*2 + np.nanmedian(self.solver_aman.fit_residuals_i1) + bad_fit_inds = self.solver_aman.bad_fit_inds + fit_residuals_i1 = self.solver_aman.fit_residuals_i1 + fit_residuals_i2 = self.solver_aman.fit_residuals + fig = plt.figure(figsize=(6, 4)) + gs = fig.add_gridspec(7, 1) + ax1 = fig.add_subplot(gs[0:-2, :]) + ax2 = fig.add_subplot(gs[-2:, :]) + # Plot first fit iteration residuals + ax1.plot( + np.arange(len(fit_residuals_i1)), + fit_residuals_i1, + "r.", + mew=0, + alpha=0.6, + lw=0, + label="1st Fit", + ) + ax1.set_ylabel(r"Fit Residual $\left|\Delta$(xi, eta)$\right|$ [arcmin]") + ax1.set_xlabel("Data point") + ax1.axhline( + iterate_cutoff, + #xmin=0, + #xmax=1, + color="k", + linestyle=":", + lw=0.8, + label="Cutoff", + ) + # Plot second fit iteration residuals + ax1.plot( + np.arange(len(fit_residuals_i2)), + fit_residuals_i2, + "b*", + alpha=0.5, + lw=0, + mew=0, + label="2nd fit", + ) + xtox = np.empty((len(fit_residuals_i2), 2)) + xtox[:, 0] = np.arange(len(fit_residuals_i1)) + xtox[:, 1] = np.arange(len(fit_residuals_i2)) + ytoy = np.empty((len(fit_residuals_i1), 2)) + ytoy[:, 0] = fit_residuals_i1 + ytoy[:, 1] = fit_residuals_i2 + ax1.plot(xtox.T, ytoy.T, "k", lw=0.4) + + ax1.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) + ax1.set_ylabel(r"Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]") + ax1.legend(loc=2, fontsize="small") + + ax2.scatter( + np.arange(len(fit_residuals_i1)), + (fit_residuals_i2 - fit_residuals_i1), + c="k", + marker="o", + s=scale_weights * 50, + lw=0, + alpha=0.6, + label="Res i2 - Res i1", + ) + ax2.plot( + np.arange(len(fit_residuals_i1))[bad_fit_inds], + (fit_residuals_i2 - fit_residuals_i1)[bad_fit_inds], + "kx", + ms=7, + lw=0.2, + alpha=0.6, + label="Excl. from i2 fit", + ) + ax2.axhline(0, xmin=0, xmax=1, color="k", alpha=0.5, lw=0.8) + for e in effobs: + ax1.axvline(e,linestyle=':',color='k') + ax2.axvline(e,linestyle=':',color='k') + ax2.legend(fontsize="x-small") + ax2.set_xlabel("Data points") + ax2.set_ylabel(r"$\Delta$ Residuals") + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) + plt.close() + else: + # Plot first fit iteration residuals only + fit_residuals_i1 = self.solver_aman.fit_residuals + fig, ax1 = plt.subplots() + im = ax1.scatter( + np.arange(len(fit_residuals_i1)), + fit_residuals_i1, + s=scale_weights * 50, + #c=obs_index, + c=weights, + lw=0, + alpha=0.4, + label="1st Fit", + vmax=1, + ) + for e in effobs: + ax1.axvline(e,linestyle=':',color='k') + plt.colorbar(im, label='fit weights', fraction=0.046, pad=0.04) + ax1.set_ylabel(r"Fit Residual $\left|\Delta\text{(xi, eta)}\right|$ [arcmin]") + ax1.set_xlabel("Data points") + plt.title("Fit Residuals") + plt.legend(loc=2) + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_total_residuals{tag}.png", dpi=350) + plt.close() + + + def plot_residuals_histograms(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + append = self.append_string + ancil = self.solver_aman.ancil + weights = self.solver_aman.weights + fit_rms = self.solver_aman.fit_rms + fit_residuals = self.solver_aman.fit_residuals + fit_rms_full = self.solver_aman.fit_rms_full + fit_residuals_full = self.solver_aman.fit_residuals_full + + xmax = np.nanmax(fit_residuals_full) * 1.1 + title = f"{append} {tag}" + plt.figure() + plt.hist(fit_residuals_full, + bins = 25, + range = (0, xmax), + alpha=0.7, + label='All dets above weight threshold' + ) + plt.hist(fit_residuals, + bins = 25, + range = (0, xmax), + alpha=0.7, + label='Subset in Fit' + ) + plt.axvline(fit_rms_full,color='C0', + label=f'Weighted RMSE (all): {fit_rms_full:.3f} arcmin') + plt.axvline(fit_rms, color='C1', + label=f'Weighted RMSE (set): {fit_rms:.3f} arcmin') + plt.xlabel('Fit Residuals (arcmin)') + plt.ylabel('# Detectors') + plt.title(append) + plt.legend() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_residuals_hists{tag}.png", dpi=350) + plt.close() + + def plot_xieta_residuals(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + xieta_model = self.xieta_model + weights = self.solver_aman.weights + modeled_fits = self.solver_aman.modeled_fits + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + + scale_weights = weights / np.nanmax(weights) + plotmask = np.where(weights) + xi_model_fit = modeled_fits.xi + eta_model_fit = modeled_fits.eta + if xieta_model == "measured": + xi_ref, eta_ref, _ = measured_xieta_data + elif xieta_model == "template": + xi_ref, eta_ref, _ = nominal_xieta_locs + + fig, ax = plt.subplots(2, 1) + for i, xe in enumerate(["Xi", "Eta"]): + if xe == "Xi": + xaxis_ref = xi_ref + xlabel = "Xi" + yref = xi_ref + ymodel = xi_model_fit + ylabel = "dXi" + elif xe == "Eta": + xaxis_ref = eta_ref + xlabel = "Eta" + yref = eta_ref + ymodel = eta_model_fit + ylabel = "dEta" + # xi residuals vs xi + im = ax[i].scatter( + xaxis_ref[plotmask] / DEG, + (ymodel - yref)[plotmask] / ARCMIN, + marker=".", + c=xaxis_ref[plotmask], + cmap="jet", + s=100 * scale_weights[plotmask], + alpha=scale_weights[plotmask], + edgecolors="k", + linewidths=0.4, + ) + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + cb.ax.set_title(xlabel) + ax[i].axhline(0, xmin=0, xmax=1, color="k", lw=0.8, alpha=0.6) + ax[i].axvline(0, ymin=0, ymax=1, color="k", lw=0.8, alpha=0.5) + ax[i].set_ylim(-self.plotlims, self.plotlims) + ax[i].set_xlabel(f"{xlabel} (deg)", fontsize="small") + ax[i].set_ylabel(f"{ylabel} [arcmin]") + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_xieta_residuals{tag}.png", dpi=350) + plt.close() + + + def plot_xieta_cross_residuals(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + xieta_model = self.xieta_model + weights = self.solver_aman.weights + modeled_fits = self.solver_aman.modeled_fits + nominal_xieta_locs = self.solver_aman.nominal_xieta_locs + measured_xieta_data = self.solver_aman.measured_xieta_data + + scale_weights = weights / np.nanmax(weights) + plotmask = np.where(weights) + xi_model_fit = modeled_fits.xi + eta_model_fit = modeled_fits.eta + if xieta_model == "measured": + xi_ref, eta_ref, _ = measured_xieta_data + elif xieta_model == "template": + xi_ref, eta_ref, _ = nominal_xieta_locs + + fig, ax = plt.subplots(2, 1) + for i, xe in enumerate(["Xi", "Eta"]): + if xe == "Xi": + xaxis_ref = eta_ref + xlabel = "Eta" + yref = xi_ref + ymodel = xi_model_fit + ylabel = "dXi" + elif xe == "Eta": + xaxis_ref = xi_ref + xlabel = "Xi" + yref = eta_ref + ymodel = eta_model_fit + ylabel = "dEta" + # xi residuals vs xi + im = ax[i].scatter( + xaxis_ref[plotmask] / DEG, + (ymodel - yref)[plotmask] / ARCMIN, + marker=".", + c=xaxis_ref[plotmask], + cmap="jet", + s=100 * scale_weights[plotmask], + alpha=scale_weights[plotmask], + edgecolors="k", + linewidths=0.4, + ) + cb = plt.colorbar(im, fraction=0.046, pad=0.04) + cb.ax.set_title(xlabel) + ax[i].axhline(0, xmin=0, xmax=1, color="k", lw=0.8, alpha=0.6) + ax[i].axvline(0, ymin=0, ymax=1, color="k", lw=0.8, alpha=0.5) + ax[i].set_ylim(-self.plotlims, self.plotlims) + ax[i].set_xlabel(f"{xlabel} (deg)", fontsize="small") ax[i].set_ylabel(f"{ylabel} [arcmin]") - plt.tight_layout() - plt.savefig(f"{plot_dir}/{platform}_xieta_cross_residuals{tag}.png", dpi=350) - plt.close() - - - + plt.tight_layout() + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_xieta_cross_residuals{tag}.png", dpi=350) + plt.close() + ############ if __name__ == "__main__": From 8042065813a670c8e6b883bcd960a1ca7cc033fe Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Mon, 4 Aug 2025 09:44:36 -0700 Subject: [PATCH 26/48] Fixed issue so now works for fitting LAT parameters. Minor changes to formatting. --- .../site_pipeline/solve_pointing_model.py | 68 ++++++++----------- 1 file changed, 29 insertions(+), 39 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index b1b21fb57..ff8120e07 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -305,9 +305,9 @@ def objective_model_func_lmfit( return chi_sq(weights_array, dist) def chi_sq(weights, dist): - N = np.identity(len(dist)) * weights - chi2 = dist.T * N * dist + #N = np.identity(len(dist)) * weights #chi2 = dist.T @ N @ dist + chi2 = np.nansum(dist ** 2 * weights) return chi2 def model_template_xieta(params, pm_version, aman): @@ -320,16 +320,15 @@ def model_template_xieta(params, pm_version, aman): xi_meas = aman.measured_xieta_data[0] eta_meas = aman.measured_xieta_data[1] params = params.valuesdict() if isinstance(params, Parameters) else params - # if type(params) == lmfit.parameter.Parameters: - # params = params.valuesdict() params["version"] = pm_version if "sat" in pm_version: az, el, roll = pm._get_sat_enc_radians(aman.ancil) + q_nomodel = quat.rotation_lonlat(-az, el, 0) if "lat" in pm_version: az, el, roll = pm._get_lat_enc_radians(aman.ancil) + q_nomodel = quat.rotation_lonlat(-az, el, roll) boresight = pm.apply_pointing_model(aman, pointing_model=params, wrap=False) az1, el1, roll1 = boresight.az, boresight.el, boresight.roll - q_nomodel = quat.rotation_lonlat(-az, el, 0) q_model = quat.rotation_lonlat(-az1, el1, roll1) q_det_meas = quat.rotation_xieta(xi_meas, eta_meas, 0) xi_mod_true, eta_mod_true, _ = quat.decompose_xieta( @@ -345,18 +344,17 @@ def model_measured_xieta(params, pm_version, aman): q_nomodel * q_det_meas == q_model * q_det_true """ params = params.valuesdict() if isinstance(params, Parameters) else params - # if type(params) == lmfit.parameter.Parameters: - # params = params.valuesdict() params["version"] = pm_version xi_true, eta_true, gam_true = aman.nominal_xieta_locs if "sat" in pm_version: az, el, roll = pm._get_sat_enc_radians(aman.ancil) + q_nomodel = quat.rotation_lonlat(-az, el, 0) if "lat" in pm_version: az, el, roll = pm._get_lat_enc_radians(aman.ancil) + q_nomodel = quat.rotation_lonlat(-az, el, roll) + boresight = pm.apply_pointing_model(aman, pointing_model=params, wrap=False) az1, el1, roll1 = boresight.az, boresight.el, boresight.roll - - q_nomodel = quat.rotation_lonlat(-az, el, 0) q_model = quat.rotation_lonlat(-az1, el1, roll1) q_det_true = quat.rotation_xieta(xi_true, eta_true, 0) xi_mod_meas, eta_mod_meas, _ = quat.decompose_xieta( @@ -455,7 +453,6 @@ def main(config_path: str): os.makedirs(save_dir, exist_ok=True) shutil.copy(config_path, os.path.join(save_dir, "config.yaml")) - # Initialize Logger logger = sp_util.init_logger(__name__, "Solve pointing_model") logpath = os.path.join(save_dir, "pointing_model.log") @@ -488,7 +485,6 @@ def main(config_path: str): logger.info("Performing per-detector fits for all UFM data.") which_data = config.get("use_as_data") - # use_weights = False if which_data == 'raw' else config.get("use_weights", True) use_weights = config.get("use_weights", True) #Make axis manager with full detector set. @@ -519,6 +515,7 @@ def main(config_path: str): fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) #Now make axis manager that has down sampled data for computation + solver_aman = core.AxisManager(core.IndexAxis("samps")) ( filelist, obs_dets_fits, @@ -527,17 +524,13 @@ def main(config_path: str): all_det_ids, obs_index, ) = load_per_detector_data(config) - logger.info("Loaded %s data points", len(weights_dets)) ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - # Build Axis Managers + # Build Axis Managers + solver_aman.wrap("ancil", ancil) obs_info = core.AxisManager() obs_info.wrap("obs_ids", np.array(filelist)) - - - solver_aman = core.AxisManager(core.IndexAxis("samps")) - solver_aman.wrap("ancil", ancil) solver_aman.wrap("obs_info", obs_info) solver_aman.wrap("roll_c", roll_c, [(0, "samps")]) solver_aman.wrap( @@ -554,7 +547,6 @@ def main(config_path: str): ) solver_aman.wrap("weights", weights_dets, [(0, "samps")]) solver_aman.wrap("obs_index", obs_index) - # Make weights/data cuts logger.info("Built axis manager") ########################################################################### @@ -600,22 +592,16 @@ def main(config_path: str): # Make weights/data cuts logger.info("Built axis manager") - ############################ - # END SPLIT - ############################ + ################################ + # END of SPLIT: Now fit the parameters + ################################ # Initialize Parameters to Fit with Model fit_params = _init_fit_params(config) logger.info("Initialized fit parameters") - """ - if xieta_model == "measured": - model_reference = solver_aman.measured_xieta_data - elif xieta_model == "template": - model_reference = solver_aman.nominal_xieta_locs - """ + # Solve for Model Parameters # use chosen xieta_model to solve for parameters - # use_weights = config.get("use_weights",True) model_solved_params = minimize( objective_model_func_lmfit, fit_params, @@ -650,8 +636,7 @@ def main(config_path: str): # Model template and measured points using parameters found above modeled_fits, fit_residuals_i1, rms_i1, model_reference = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, solver_aman) - logger.info( - "RMS on fit: %f arcmin", rms_i1) + logger.info("RMS on fit: %f arcmin", rms_i1) # Save fit results to the axis manager modelfit_aman = core.AxisManager() @@ -661,7 +646,6 @@ def main(config_path: str): solver_aman.wrap("fit_residuals", fit_residuals_i1, overwrite=True) solver_aman.wrap("fit_rms", rms_i1, overwrite=True) - if fit_type == "detector": _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) logger.info("RMS on FULL detector set: %f arcmin", rms_full) @@ -880,25 +864,31 @@ def plot_dets_in_these_obs(self): roll_c = self.solver_aman.roll_c elev = self.solver_aman.ancil.el_enc azim = self.solver_aman.ancil.az_enc + if platform == 'lat': + elmin=10; elmax=90 + else: + elmin=45; elmax=65 plt.figure() fig, ax = plt.subplots(2,2,figsize=(11,10)) + ax[0,0].set_title('color by boresight.roll', fontsize='medium') a0 = ax[0,0].scatter(measured_xieta_data[0], measured_xieta_data[1], - c=roll_c, alpha=0.5, s=10, cmap='jet',vmin=-45,vmax=45) + c=roll_c, alpha=0.5, s=10, + cmap='jet',vmin=-45,vmax=45) + ax[1,0].set_title('color by fit Elevation', fontsize='medium') a1= ax[1,0].scatter(measured_xieta_data[0], measured_xieta_data[1], - c=elev, alpha=0.3, s=10, cmap='jet',vmin=45,vmax=65) + c=elev, alpha=0.3, s=10, cmap='jet', vmin=elmin, vmax=elmax) + ax[0,1].set_title('color by fit Azimuth', fontsize='medium') a2= ax[0,1].scatter(measured_xieta_data[0], measured_xieta_data[1], - c=azim, alpha=0.3, s=10, cmap='jet', vmin=0,vmax=420) + c=azim, alpha=0.3, s=10, cmap='jet', vmin=0, vmax=420) + ax[1,1].set_title('color by fit weights', fontsize='medium') a3= ax[1,1].scatter(measured_xieta_data[0], measured_xieta_data[1], - c=weights, alpha=0.3, s=10, cmap='jet',vmin=0.8, vmax=1) + c=weights, alpha=0.3, s=10, cmap='jet', vmin=0.8, vmax=1) c0 = plt.colorbar(a0) c1 = plt.colorbar(a1) c2 = plt.colorbar(a2) c3 = plt.colorbar(a3) - ax[0,0].set_title('color by boresight.roll', fontsize='medium') - ax[1,0].set_title('color by fit Elevation', fontsize='medium') - ax[0,1].set_title('color by fit Azimuth', fontsize='medium') - ax[1,1].set_title('color by fit weights', fontsize='medium') + plt.suptitle('Detectors hit in these observations') if self.save_figure: plt.savefig(f"{plot_dir}/{platform}_dets_in_these_obs.png", dpi=350) From dde423673f1e5e64099849a7252bb68b3d74d29b Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Mon, 4 Aug 2025 16:42:17 -0700 Subject: [PATCH 27/48] Added plotting capabilities to show effect of solved pointing parameters on full set of detector data. --- .../site_pipeline/solve_pointing_model.py | 547 +++++++++++++++--- 1 file changed, 454 insertions(+), 93 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index ff8120e07..8ca5d41e5 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -18,6 +18,7 @@ matplotlib.use("agg") import matplotlib.pyplot as plt +import matplotlib.cm as cm from sotodlib.coords.helpers import _valid_arg from sotodlib.site_pipeline import util as sp_util @@ -135,27 +136,37 @@ def create_culling_mask(obs_index, cull_dets): return culling_mask -def load_per_detector_data(config, return_all_dets=False): +def load_per_detector_data(config, no_downsample_set=False, return_all_dets=False): per_obs_fps = config.get("per_obs_fps") skip_tags = config.get("skip_tags", []) t0 = config.get("begin_timerange", 0) tf = config.get("end_timerange", int(time.time())) rxs = fpc.Receiver.load_file(per_obs_fps) - band = config.get("band") - if band is not None: - band = band.encode("utf-8") - - cull_dets = config.get("cull_dets", None) - cull_twice = config.get("cull_twice", False) - even_obs_size = config.get("even_obs_size", False) - filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] - filelist = [obs for obs in filelist if int(obs.split("_")[1]) > t0 and int(obs.split("_")[1]) < tf] - if config.get("use_these_files") is not None: - filelist = [filelist[i] for i in config.get("use_these_files")] + if return_all_dets: + band = None + cull_dets = config.get("cull_dets", None) + cull_twice = config.get("cull_twice", False) + even_obs_size = False + which_ufm = None + filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] + filelist = [obs for obs in filelist if int(obs.split("_")[1]) > t0 and int(obs.split("_")[1]) < tf] + + else: + band = config.get("band") + if band is not None: + band = band.encode("utf-8") + cull_dets = config.get("cull_dets", None) + cull_twice = config.get("cull_twice", False) + even_obs_size = config.get("even_obs_size", False) + which_ufm = config.get("which_ufm", None) + filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] + filelist = [obs for obs in filelist if int(obs.split("_")[1]) > t0 and int(obs.split("_")[1]) < tf] + if config.get("use_these_files") is not None: + filelist = [filelist[i] for i in config.get("use_these_files")] weights_dets, obs_dets_fits, stream_id_list, obs_index = [], [], [], [] - which_ufm = config.get("which_ufm", None) + #which_ufm = config.get("which_ufm", None) which_data = config.get("use_as_data") which_weights = config.get("use_as_weights", None) @@ -181,7 +192,7 @@ def load_per_detector_data(config, return_all_dets=False): obs_dets_fits[np.where(weights_dets == 0)] = np.nan mask = ~np.isnan(weights_dets) - if return_all_dets: + if no_downsample_set: #plotting use-case to compare subset fits with the entire dataset. return ( filelist, @@ -497,7 +508,7 @@ def main(config_path: str): all_nom_det_array, all_det_ids, obs_index, - ) = load_per_detector_data(config, return_all_dets=True) + ) = load_per_detector_data(config, no_downsample_set=True) ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) fitcheck_aman.wrap("ancil", ancil) @@ -513,6 +524,7 @@ def main(config_path: str): ) fitcheck_aman.wrap("weights", weights_dets, [(0, "samps")]) fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) + logger.info("Loaded %s fit check data points", len(weights_dets)) #Now make axis manager that has down sampled data for computation solver_aman = core.AxisManager(core.IndexAxis("samps")) @@ -772,7 +784,7 @@ def main(config_path: str): solver_aman.wrap("fit_rms", rms_i2, overwrite=True) if fit_type == "detector": - _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) + _, fit_residuals_full, rms_full, _ = pm.apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) logger.info("RMS on FULL detector set: %f arcmin", rms_full) solver_aman.move("fit_residuals_full", "fit_residuals_full_i1") solver_aman.move("fit_rms_full", "fit_rms_full_i1") @@ -825,6 +837,183 @@ def main(config_path: str): replace=True, ) db.to_file(os.path.join(save_dir, dbfile)) + + #Optional extra plotting + if config.get("make_full_analysis_plots", True): + #Fill up axis manager with ALL the data (only cuts from culling and time stamps remain) + + ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index, + ) = load_per_detector_data(config, return_all_dets=True) + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + ufm_list = [ufm.split("_")[1] for ufm in config.get('ufms')] + + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + + full_aman = core.AxisManager(core.IndexAxis("samps")) + full_aman.wrap("obs_info", obs_info) + full_aman.wrap("ancil", ancil) + full_aman.wrap( + "nominal_xieta_locs", all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + full_aman.wrap( + "measured_xieta_data", obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + full_aman.wrap("weights", weights_dets, [(0, "samps")]) + full_aman.wrap("obs_index", obs_index) + full_aman.wrap("roll_c", roll_c, [(0, "samps")]) + full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) + full_aman.wrap("radial", + np.sqrt(full_aman.nominal_xieta_locs[0]**2 + full_aman.nominal_xieta_locs[1]**2)/DEG, + [(0, "samps")]) + full_aman.wrap("det_ufm", + np.array([detid.decode('utf-8').split('_')[0].lower() for detid in full_aman.det_ids]) + , [(0, "samps")]) + full_aman.wrap("det_wafer", np.array([ufm_list.index(d) for d in full_aman.det_ufm]), [(0, "samps")]) + + try: + full_modeled, full_residuals, rms, _ = apply_model_params("template", + solver_aman.pointing_model_i1, + config.get("pm_version"), + full_aman) + except: + full_modeled, fit_residuals, rms, _ = apply_model_params("template", + solver_aman.pointing_model, + config.get("pm_version"), + full_aman) + + full_aman.wrap("full_modeled", np.array(full_modeled), + [(0, core.LabelAxis("xieta", ["xi", "eta"]))], + [(1, "samps")]) + full_aman.wrap("fit_residuals", fit_residuals, [(0, "samps")]) + del(full_modeled) + del(fit_residuals) + + (obs_az, obs_el, obs_roll, + obs_resid, obs_dxi, obs_deta, + obs_std_xi, obs_std_eta + ) = [], [], [], [], [], [], [], [] + (all_ufm_az, all_ufm_el, all_ufm_roll, + all_ufm_resid, all_ufm_dxi, all_ufm_deta, + all_ufm_std_xi, all_ufm_std_eta, all_ufm_wafer_num + ) = [], [], [], [], [], [], [], [], [] + for ob in np.unique(full_aman.obs_index): + inds = np.where(full_aman.obs_index == ob)[0] + obs_az.append(np.nanmedian(full_aman.ancil.az_enc[inds])) + obs_el.append(np.nanmedian(full_aman.ancil.el_enc[inds])) + obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) + obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) + obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + obs_deta.append(np.nanmean((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + (ufm_az, ufm_el, ufm_roll, ufm_resid, + ufm_dxi, ufm_deta, ufm_std_xi, + ufm_std_eta, ufm_wafer_num + )= [], [], [], [], [], [], [], [], [] + for ufm in ufm_list: + ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] + ufm_az.append(np.nanmedian(full_aman.ancil.az_enc[inds][ufm_inds])) + ufm_el.append(np.nanmedian(full_aman.ancil.el_enc[inds][ufm_inds])) + ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) + ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) + ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + ufm_deta.append(np.nanmean((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + ufm_std_xi.append(np.nanstd((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + ufm_std_eta.append(np.nanstd((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) + all_ufm_az.append(ufm_az) + all_ufm_el.append(ufm_el) + all_ufm_roll.append(ufm_roll) + all_ufm_resid.append(ufm_resid) + all_ufm_deta.append(ufm_deta) + all_ufm_dxi.append(ufm_dxi) + all_ufm_std_xi.append(ufm_std_xi) + all_ufm_std_eta.append(ufm_std_eta) + all_ufm_wafer_num.append(ufm_wafer_num) + + per_ufm_stats = core.AxisManager() + per_obs_stats = core.AxisManager() + + per_obs_stats.wrap("el", np.array(obs_el)) + per_obs_stats.wrap("roll", np.array(obs_roll)) + per_obs_stats.wrap("az", np.array(obs_az)) + per_obs_stats.wrap("resid", np.array(obs_resid)) + per_obs_stats.wrap("dxi", np.array(obs_dxi)) + per_obs_stats.wrap("deta", np.array(obs_deta)) + per_obs_stats.wrap("std_xi", np.array(obs_std_xi)) + per_obs_stats.wrap("std_eta", np.array(obs_std_eta)) + + per_ufm_stats.wrap("az", np.array(all_ufm_az)) + per_ufm_stats.wrap("el", np.array(all_ufm_el)) + per_ufm_stats.wrap("roll", np.array(all_ufm_roll)) + per_ufm_stats.wrap("resid", np.array(all_ufm_resid)) + per_ufm_stats.wrap("dxi", np.array(all_ufm_dxi)) + per_ufm_stats.wrap("deta", np.array(all_ufm_deta)) + per_ufm_stats.wrap("std_xi", np.array(all_ufm_std_xi)) + per_ufm_stats.wrap("std_eta", np.array(all_ufm_std_eta)) + per_ufm_stats.wrap("wafer_num", np.array(all_ufm_wafer_num)) + + if platform == "lat": + obs_cr = [] + all_ufm_cr = [] + for ob in np.unique(full_aman.obs_index): + inds = np.where(full_aman.obs_index == ob)[0] + obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) + ufm_cr = [] + for ufm in ufm_list: + ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) + all_ufm_cr.append(ufm_cr) + per_obs_stats.wrap("cr", np.array(obs_cr)) + per_ufm_stats.wrap("cr", np.array(all_ufm_cr)) + + full_aman.wrap("dxi", (full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])/DEG*60, [(0, "samps")]) + full_aman.wrap("deta", (full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])/DEG*60, [(0, "samps")]) + + #full_dxi_av = np.nanmean(full_dxi) + #full_deta_av = np.nanmean(full_deta) + obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) + per_obs_stats.wrap("obsids", obsids) + per_ufm_stats.wrap("obsids", np.repeat(obsids, np.shape(per_ufm_stats["dxi"])[1])) + full_aman.wrap("obsids", obsids[full_aman.obs_index]) + + #Calculate RMSs + per_obs_stats.wrap("rms", np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + per_obs_stats["deta"]**2))) + per_ufm_stats.wrap("rms", np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + per_ufm_stats["deta"]**2))) + full_aman.wrap("rms", np.sqrt(np.nanmean(full_aman["dxi"]**2 + full_aman["deta"]**2))) + full_aman.wrap("per_ufm_stats", per_ufm_stats) + full_aman.wrap("per_obs_stats", per_obs_stats) + + plotter = ModelFitsPlotter(solver_aman=full_aman, + config=config, + save_dir=save_dir, + iteration_tag="", + save_figure=True, + plotlims=plotlims) + plotter.plot_full_residuals_across_focalplane() + plotter.plot_full_histogram() + plotter.plot_full_unmodeled_residuals() + logger.info("Done") @@ -835,7 +1024,7 @@ def main(config_path: str): class ModelFitsPlotter: def __init__(self, solver_aman, config, save_dir, iteration_tag="", save_figure=True, plotlims=None): - self.solver_aman = solver_aman + self.aman = solver_aman self.config = config self.tag = iteration_tag self.save_figure = save_figure @@ -854,16 +1043,187 @@ def __init__(self, solver_aman, config, save_dir, iteration_tag="", save_figure= self.xieta_model = config.get("xieta_model", "measured") self.append_string = config.get("append","") self.iterate_cutoff = config.get("iterate_cutoff", None) + + def plot_full_unmodeled_residuals(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + append = self.append_string + + ancil = self.aman.ancil + modeled = self.aman.full_modeled + nominal_xieta_locs = self.aman.nominal_xieta_locs + per_ufm_stats = self.aman.per_ufm_stats + per_obs_stats = self.aman.per_obs_stats + roll_c = self.aman.roll_c + full_dxi_av = np.nanmean(self.aman.dxi) + full_deta_av = np.nanmean(self.aman.deta) + if "sat" in platform: + elmin, elmax = 45, 65 + rollmin, rollmax = -45, 45 + azmin, azmax = 0, 360 + if platform=="satp2": + plotlim = self.plotlims*0.7 + else: + plotlim = 8 + else: + plotlim = self.plotlims + elmin, elmax = None, None + rollmin, rollmax = -70, 40 + azmin, azmax = 0, 360 + + plt.figure(figsize=(7,5)) + plt.scatter((modeled[0] - nominal_xieta_locs[0])/DEG*60, + (modeled[1] - nominal_xieta_locs[1])/DEG*60, + c=roll_c, s=2.7, + alpha=0.15, marker='.', cmap='jet', + vmin=rollmin, vmax=rollmax) + plt.scatter(per_ufm_stats.dxi, per_ufm_stats.deta, + c=per_ufm_stats.roll, + alpha=0.6, marker='o', cmap='jet', + s=25, edgecolor='k', linewidth=0.7, + vmin=rollmin, vmax=rollmax, label='UFM Avg') + plt.scatter(per_obs_stats.dxi, per_obs_stats.deta, + c = per_obs_stats.roll, + alpha=0.99, marker='X', cmap='jet', + s=45, edgecolor='k', linewidth=0.7, + vmin=rollmin, vmax=rollmax, label='Obs Avg.') + plt.colorbar(label='Roll') + plt.xlabel('Xi Error (arcmin)') + plt.ylabel('Eta Error (arcmin)') + plt.axvline(0,0,1, color='k', alpha=0.4) + plt.axhline(0,0,1, color='k', alpha=0.4) + plt.axhline(0,0,1, color='k', alpha=0.4) + plt.xlim(-plotlim,plotlim); plt.ylim(-plotlim,plotlim) + plt.scatter(full_dxi_av, full_deta_av, color='r', marker='o', edgecolor='k', label='All Data Avg. Offset') + plt.legend(fontsize='small') + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_full_2D_Residuals_Roll{tag}.png", dpi=350) + plt.close() + + + plt.figure(figsize=(7,5)) + plt.scatter((modeled[0] - nominal_xieta_locs[0])/DEG*60, + (modeled[1] - nominal_xieta_locs[1])/DEG*60, + c=ancil.el_enc, s=2.7, + alpha=0.15, marker='.', cmap='jet', + vmin=elmin, vmax=elmax) + plt.scatter(per_ufm_stats.dxi, per_ufm_stats.deta, + c=per_ufm_stats.el, + alpha=0.6, marker='o', cmap='jet', + s=25, edgecolor='k', linewidth=0.7, + vmin=elmin, vmax=elmax, label='UFM Avg') + plt.scatter(per_obs_stats.dxi, per_obs_stats.deta, + c = per_obs_stats.el, + alpha=0.99, marker='X', cmap='jet', + s=45, edgecolor='k', linewidth=0.7, + vmin=elmin, vmax=elmax, label='Obs Avg.') + plt.colorbar(label='Elevation') + plt.xlabel('Xi Error (arcmin)') + plt.ylabel('Eta Error (arcmin)') + plt.axvline(0,0,1, color='k', alpha=0.4) + plt.axhline(0,0,1, color='k', alpha=0.4) + plt.axhline(0,0,1, color='k', alpha=0.4) + plt.xlim(-plotlim,plotlim); plt.ylim(-plotlim,plotlim) + plt.scatter(full_dxi_av, full_deta_av, color='r', marker='o', edgecolor='k', label='All Data Avg. Offset') + plt.legend(fontsize='small') + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_full_2D_Residuals_El{tag}.png", dpi=350) + plt.close() + + plt.figure(figsize=(7,5)) + plt.scatter((modeled[0] - nominal_xieta_locs[0])/DEG*60, + (modeled[1] - nominal_xieta_locs[1])/DEG*60, + c=ancil.az_enc%360, s=2.7, + alpha=0.15, marker='.', cmap='jet', + vmin=azmin, vmax=azmax) + plt.scatter(per_ufm_stats.dxi, per_ufm_stats.deta, + c=per_ufm_stats.az%360, + alpha=0.6, marker='o', cmap='jet', + s=25, edgecolor='k', linewidth=0.7, + vmin=azmin, vmax=azmax, label='UFM Avg') + plt.scatter(per_obs_stats.dxi, per_obs_stats.deta, + c = per_obs_stats.az%360, + alpha=0.99, marker='X', cmap='jet', + s=45, edgecolor='k', linewidth=0.7, + vmin=azmin, vmax=azmax, label='Obs Avg.') + plt.colorbar(label='Azimuth') + plt.xlabel('Xi Error (arcmin)') + plt.ylabel('Eta Error (arcmin)') + plt.axvline(0,0,1, color='k', alpha=0.4) + plt.axhline(0,0,1, color='k', alpha=0.4) + plt.axhline(0,0,1, color='k', alpha=0.4) + plt.xlim(-plotlim,plotlim); plt.ylim(-plotlim,plotlim) + plt.scatter(full_dxi_av, full_deta_av, color='r', marker='o', edgecolor='k', label='All Data Avg. Offset') + plt.legend(fontsize='small') + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_full_2D_Residuals_Az{tag}.png", dpi=350) + plt.close() + + + def plot_full_residuals_across_focalplane(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + append = self.append_string + ancil = self.aman.ancil - + weights = self.aman.weights + fit_residuals = self.aman.fit_residuals + nominal_xieta_locs = self.aman.nominal_xieta_locs + + fig, ax = plt.subplots() + im = ax.scatter(nominal_xieta_locs[0], nominal_xieta_locs[1], + c=fit_residuals, alpha=0.11, cmap='jet', + linewidth=0, s=15, vmax=self.plotlims) + sm = cm.ScalarMappable(cmap=im.cmap, norm=im.norm) + sm.set_array([]) + plt.colorbar(sm, ax=ax, label='Fit Residual (arcmin)') + ax.set_xlabel('Xi (rad)') + ax.set_ylabel('Eta (rad)') + plt.title('Fit Residuals across Focal Plane\n(Not averaged per det)') + if platform == 'lat': + plt.xlim(-.042, .042);plt.ylim(-.042, .042) + else: + plt.xlim(-.31, .31);plt.ylim(-.31, .31) + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_full_FocalPlane_colored_FitResiduals{tag}.png", dpi=350) + + def plot_full_histogram(self): + platform = self.platform + plotlims = self.plotlims + plot_dir = self.plot_dir + tag = self.tag + append = self.append_string + + det_rms = self.aman.rms + ufm_rms = self.aman.per_ufm_stats.rms + obs_rms = self.aman.per_obs_stats.rms + fit_residuals = self.aman.fit_residuals + + plt.figure() + plt.hist(fit_residuals, bins=50, range=(0, plotlims)) + plt.axvline(det_rms , 0, 1, color='k', + label=f'Full RMS {det_rms:.2f} arcmin') + plt.axvline(obs_rms, 0, 1, color='c', + label = f'Obs RMS {obs_rms:.2f} arcmin') + plt.axvline(ufm_rms, 0, 1, color='m', + label=f'UFM RMS {ufm_rms:.2f} arcmin') + plt.legend(fontsize='medium') + plt.title(platform + ' 1D residuals') + plt.xlabel('arcmin') + + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_full_Hist_Residuals{tag}.png", dpi=350) + def plot_dets_in_these_obs(self): platform = self.platform plot_dir = self.plot_dir - measured_xieta_data = self.solver_aman.measured_xieta_data - weights = self.solver_aman.weights - roll_c = self.solver_aman.roll_c - elev = self.solver_aman.ancil.el_enc - azim = self.solver_aman.ancil.az_enc + measured_xieta_data = self.aman.measured_xieta_data + weights = self.aman.weights + roll_c = self.aman.roll_c + elev = self.aman.ancil.el_enc + azim = self.aman.ancil.az_enc if platform == 'lat': elmin=10; elmax=90 else: @@ -898,12 +1258,12 @@ def plot_modeled_fits(self): platform = self.platform plot_dir = self.plot_dir tag = self.tag - ancil = self.solver_aman.ancil - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data - weights = self.solver_aman.weights - modeled_fits = self.solver_aman.modeled_fits - fit_rms = self.solver_aman.fit_rms + ancil = self.aman.ancil + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data + weights = self.aman.weights + modeled_fits = self.aman.modeled_fits + fit_rms = self.aman.fit_rms if self.which_ufm is not None: if isinstance(self.which_ufm, list): ufm_list = self.which_ufm @@ -996,12 +1356,12 @@ def plot_ws0_modeled_fits(self): plot_dir = self.plot_dir xieta_model = self.xieta_model tag = self.tag - ancil = self.solver_aman.ancil - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data - weights = self.solver_aman.weights - modeled_fits = self.solver_aman.modeled_fits - fit_rms = self.solver_aman.fit_rms + ancil = self.aman.ancil + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data + weights = self.aman.weights + modeled_fits = self.aman.modeled_fits + fit_rms = self.aman.fit_rms xi_model_fit = modeled_fits.xi eta_model_fit = modeled_fits.eta @@ -1085,25 +1445,25 @@ def plot_template_space_fits_per_wafer(self): plotlims = self.plotlims pm_version = self.pm_version tag = self.tag - ancil = self.solver_aman.ancil - roll_c = self.solver_aman.roll_c - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data - weights = self.solver_aman.weights - pointing_model = self.solver_aman.pointing_model - modeled_fits = self.solver_aman.modeled_fits - fit_rms = self.solver_aman.fit_rms + ancil = self.aman.ancil + roll_c = self.aman.roll_c + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data + weights = self.aman.weights + pointing_model = self.aman.pointing_model + modeled_fits = self.aman.modeled_fits + fit_rms = self.aman.fit_rms scale_weights = weights / np.nanmax(weights) xi_unmod, eta_unmod = model_template_xieta( pointing_model, pm_version, - self.solver_aman + self.aman ) xi0, eta0 = model_template_xieta( pm.param_defaults[pm_version], pm_version, - self.solver_aman + self.aman ) #Plot with Elevation as colorbar fig, ax = plt.subplots(2, 4, figsize=(9, 6)) @@ -1192,25 +1552,25 @@ def plot_template_space_fits_per_detector(self): tag = self.tag plotlims = self.plotlims pm_version = self.pm_version - ancil = self.solver_aman.ancil - roll_c = self.solver_aman.roll_c - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data - weights = self.solver_aman.weights - pointing_model = self.solver_aman.pointing_model - modeled_fits = self.solver_aman.modeled_fits - fit_rms = self.solver_aman.fit_rms + ancil = self.aman.ancil + roll_c = self.aman.roll_c + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data + weights = self.aman.weights + pointing_model = self.aman.pointing_model + modeled_fits = self.aman.modeled_fits + fit_rms = self.aman.fit_rms scale_weights = weights / np.nanmax(weights) xi_unmod, eta_unmod = model_template_xieta( pointing_model, pm_version, - self.solver_aman + self.aman ) xi0, eta0 = model_template_xieta( pm.param_defaults[pm_version], pm_version, - self.solver_aman + self.aman ) #plot with weights as colorbar fig, ax = plt.subplots(figsize=(9, 6)) @@ -1223,7 +1583,7 @@ def plot_template_space_fits_per_detector(self): edgecolor="gray", lw=0.3, marker="o", - alpha=0.4, + alpha=0.2, cmap="viridis", vmin=self.config.get("weight_cutoff"), vmax=1 @@ -1247,7 +1607,7 @@ def plot_template_space_fits_per_detector(self): edgecolor="gray", lw=0.3, marker="o", - alpha=0.4, + alpha=0.2, cmap="jet", ) ax.set_xlim(-1 * plotlims, plotlims) @@ -1275,7 +1635,7 @@ def plot_template_space_fits_per_detector(self): edgecolor="gray", lw=0.3, marker="o", - alpha=0.4, + alpha=0.2, cmap="jet", ) ax.set_xlim(-1 * plotlims, plotlims) @@ -1301,7 +1661,7 @@ def plot_template_space_fits_per_detector(self): edgecolor="gray", lw=0.3, marker="o", - alpha=0.4, + alpha=0.2, cmap="jet", ) ax.set_xlim(-1 * plotlims, plotlims) @@ -1321,14 +1681,14 @@ def plot_residuals_vs_ancil(self): pm_version = self.pm_version xieta_model = self.xieta_model plotlims = self.plotlims - ancil = self.solver_aman.ancil - roll_c = self.solver_aman.roll_c - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data - weights = self.solver_aman.weights - pointing_model = self.solver_aman.pointing_model - modeled_fits = self.solver_aman.modeled_fits - fit_rms = self.solver_aman.fit_rms + ancil = self.aman.ancil + roll_c = self.aman.roll_c + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data + weights = self.aman.weights + pointing_model = self.aman.pointing_model + modeled_fits = self.aman.modeled_fits + fit_rms = self.aman.fit_rms scale_weights = weights / np.nanmax(weights) plotmask = np.where(weights) @@ -1404,25 +1764,25 @@ def plot_total_residuals(self): tag = self.tag plotlims = self.plotlims pm_version = self.pm_version - ancil = self.solver_aman.ancil - roll_c = self.solver_aman.roll_c - weights = self.solver_aman.weights - fit_rms = self.solver_aman.fit_rms - obs_index = self.solver_aman.obs_index + ancil = self.aman.ancil + roll_c = self.aman.roll_c + weights = self.aman.weights + fit_rms = self.aman.fit_rms + obs_index = self.aman.obs_index scale_weights = weights / np.nanmax(weights) effobs =np.where(np.diff(np.append(obs_index, obs_index[-1]+1))>0)[0] try: - two_fits = np.any(_valid_arg("fit_residuals_i1", 'signal', src=self.solver_aman)) + two_fits = np.any(_valid_arg("fit_residuals_i1", 'signal', src=self.aman)) except: two_fits = False if two_fits: iterate_cutoff = self.config.get("iterate_cutoff") if iterate_cutoff == "auto": - iterate_cutoff = np.nanstd(self.solver_aman.fit_residuals_i1)*2 + np.nanmedian(self.solver_aman.fit_residuals_i1) - bad_fit_inds = self.solver_aman.bad_fit_inds - fit_residuals_i1 = self.solver_aman.fit_residuals_i1 - fit_residuals_i2 = self.solver_aman.fit_residuals + iterate_cutoff = np.nanstd(self.aman.fit_residuals_i1)*2 + np.nanmedian(self.aman.fit_residuals_i1) + bad_fit_inds = self.aman.bad_fit_inds + fit_residuals_i1 = self.aman.fit_residuals_i1 + fit_residuals_i2 = self.aman.fit_residuals fig = plt.figure(figsize=(6, 4)) gs = fig.add_gridspec(7, 1) ax1 = fig.add_subplot(gs[0:-2, :]) @@ -1501,7 +1861,7 @@ def plot_total_residuals(self): plt.close() else: # Plot first fit iteration residuals only - fit_residuals_i1 = self.solver_aman.fit_residuals + fit_residuals_i1 = self.aman.fit_residuals fig, ax1 = plt.subplots() im = ax1.scatter( np.arange(len(fit_residuals_i1)), @@ -1531,12 +1891,12 @@ def plot_residuals_histograms(self): plot_dir = self.plot_dir tag = self.tag append = self.append_string - ancil = self.solver_aman.ancil - weights = self.solver_aman.weights - fit_rms = self.solver_aman.fit_rms - fit_residuals = self.solver_aman.fit_residuals - fit_rms_full = self.solver_aman.fit_rms_full - fit_residuals_full = self.solver_aman.fit_residuals_full + ancil = self.aman.ancil + weights = self.aman.weights + fit_rms = self.aman.fit_rms + fit_residuals = self.aman.fit_residuals + fit_rms_full = self.aman.fit_rms_full + fit_residuals_full = self.aman.fit_residuals_full xmax = np.nanmax(fit_residuals_full) * 1.1 title = f"{append} {tag}" @@ -1570,10 +1930,10 @@ def plot_xieta_residuals(self): plot_dir = self.plot_dir tag = self.tag xieta_model = self.xieta_model - weights = self.solver_aman.weights - modeled_fits = self.solver_aman.modeled_fits - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.aman.weights + modeled_fits = self.aman.modeled_fits + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data scale_weights = weights / np.nanmax(weights) plotmask = np.where(weights) @@ -1628,10 +1988,10 @@ def plot_xieta_cross_residuals(self): plot_dir = self.plot_dir tag = self.tag xieta_model = self.xieta_model - weights = self.solver_aman.weights - modeled_fits = self.solver_aman.modeled_fits - nominal_xieta_locs = self.solver_aman.nominal_xieta_locs - measured_xieta_data = self.solver_aman.measured_xieta_data + weights = self.aman.weights + modeled_fits = self.aman.modeled_fits + nominal_xieta_locs = self.aman.nominal_xieta_locs + measured_xieta_data = self.aman.measured_xieta_data scale_weights = weights / np.nanmax(weights) plotmask = np.where(weights) @@ -1679,7 +2039,8 @@ def plot_xieta_cross_residuals(self): if self.save_figure: plt.savefig(f"{plot_dir}/{platform}_xieta_cross_residuals{tag}.png", dpi=350) plt.close() - + + ############ if __name__ == "__main__": From 41aa7f1daa9bbc610a45c3fbeb901482ee2c9892 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Wed, 6 Aug 2025 11:07:01 -0700 Subject: [PATCH 28/48] feat: add joint fitting and some db fixes --- .../site_pipeline/solve_pointing_model.py | 915 ++++++++++-------- 1 file changed, 487 insertions(+), 428 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 8ca5d41e5..2a7c4a520 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -50,14 +50,12 @@ def load_nom_ufm_centers(config): return nom_ufm_centers -def load_per_obs_data(config): +def load_per_obs_data(config, t0, tf): # Load per-observation UFM center data points and weights # The per obs .h5 file a dict with obs_id for keys per_obs_fps = config.get("per_obs_fps") ufms = config.get("ufms") skip_tags = config.get("skip_tags", []) - t0 = config.get("begin_timerange", 0) - tf = config.get("end_timerange", 3000000000) rxs = fpc.Receiver.load_file(per_obs_fps) filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] @@ -136,11 +134,9 @@ def create_culling_mask(obs_index, cull_dets): return culling_mask -def load_per_detector_data(config, no_downsample_set=False, return_all_dets=False): +def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_dets=False): per_obs_fps = config.get("per_obs_fps") skip_tags = config.get("skip_tags", []) - t0 = config.get("begin_timerange", 0) - tf = config.get("end_timerange", int(time.time())) rxs = fpc.Receiver.load_file(per_obs_fps) if return_all_dets: @@ -286,20 +282,45 @@ def load_obs_boresight_per_detector(config, filelist, obs_ind): return ancil, roll_c -def _init_fit_params(config): +def _init_fit_params(config, epochs): pm_version = config.get("pm_version") init_params = config.get("initial_params", pm.param_defaults[pm_version]) - fixed_params = config.get("fixed_params",None) + fixed_params = config.get("fixed_params",[]) + + # Add independant params + orig_pars = np.array(list(init_params.keys())) + par_list = orig_pars.copy() + for epoch in epochs: + indep_list = epoch["indep_list"] + if np.sum(np.isin(indep_list, par_list)) != len(indep_list): + raise ValueError(f"Invalid independant parameters in time range starting with {t0}") + indep_list = [f"{n}_{epoch['name']}" for n in indep_list] + par_list = np.hstack((par_list, indep_list)) + for ipar, par in zip(indep_list, epoch["indep_list"]): + init_params[ipar] = init_params[par] + par_count = np.zeros(len(par_list)) + for epoch in epochs: + indep_list = epoch["indep_list"] + pmsk = np.zeros(len(par_list), bool) + pmsk[:len(orig_pars)] = True + pmsk[np.isin(par_list, indep_list)] = False + pmsk += np.array([epoch["name"] in par for par in par_list]) + if np.sum(pmsk) != len(orig_pars): + raise ValueError(f"Epoch {epoch['name']} somehow has the wrong number of parameters!") + par_count[pmsk] += 1 + epoch["params"] = par_list[pmsk] + fixed_params += par_list[par_count == 0].tolist() + # Initialize lmfit Parameter object fit_params = Parameters() for p in init_params.keys(): fit_params.add(p, value=init_params[p], vary=True) # Turn off various parameters depending on platform - if fixed_params is not None: - for fix in fixed_params: + for fix in fixed_params: + if fix in fit_params: fit_params[fix].set(vary=False) - return fit_params + return fit_params, epochs def objective_model_func_lmfit( params, pm_version, solver_aman, xieta_model, weights=True @@ -315,6 +336,16 @@ def objective_model_func_lmfit( weights_array = solver_aman.weights if weights else np.ones(len(dist)) return chi_sq(weights_array, dist) +def objective_model_func_lmfit_joint( + params, pm_version, epochs, xieta_model, weights=True +): + params = params.valuesdict() + chisq = 0 + for epoch in epochs: + chisq += objective_model_func_lmfit({par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]}, pm_version, epoch["solver_aman"], xieta_model, weights) + return chisq + + def chi_sq(weights, dist): #N = np.identity(len(dist)) * weights #chi2 = dist.T @ N @ dist @@ -416,9 +447,10 @@ def _create_db(filename, save_dir): else: os.makedirs(save_dir, exist_ok=True) scheme = core.metadata.ManifestScheme() - scheme.add_range_match("obs:obs_timestamp") + scheme.add_range_match("obs:timestamp") scheme.add_data_field("dataset") - return core.metadata.ManifestDb(db_filename, scheme=scheme) + core.metadata.ManifestDb(scheme=scheme).to_file(db_filename) + return core.metadata.ManifestDb(db_filename) def get_parser(parser=None): @@ -483,7 +515,14 @@ def main(config_path: str): ########################################################## ### Begin split for per-detector or per-UFM center fitting ########################################################## - + epochs = config.get("epochs") + for epoch in epochs: + if "name" not in epoch: + epoch["name"] = f"t{epoch['begin_timerange']}" + if "indep_list" not in epoch: + epoch["indep_list"] = [] + if epochs is None: + raise ValueError("No epochs provided") fit_type = config.get("fit_type", "detector") if fit_type == "detector": @@ -500,66 +539,70 @@ def main(config_path: str): #Make axis manager with full detector set. # Keep wafer/band/obs cuts but do not further downsample. - fitcheck_aman = core.AxisManager(core.IndexAxis("samps")) - ( - filelist, - obs_dets_fits, - weights_dets, - all_nom_det_array, - all_det_ids, - obs_index, - ) = load_per_detector_data(config, no_downsample_set=True) - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - - fitcheck_aman.wrap("ancil", ancil) - fitcheck_aman.wrap( - "nominal_xieta_locs", all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - fitcheck_aman.wrap( - "measured_xieta_data", obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - fitcheck_aman.wrap("weights", weights_dets, [(0, "samps")]) - fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) - logger.info("Loaded %s fit check data points", len(weights_dets)) - - #Now make axis manager that has down sampled data for computation - solver_aman = core.AxisManager(core.IndexAxis("samps")) - ( - filelist, - obs_dets_fits, - weights_dets, - all_nom_det_array, - all_det_ids, - obs_index, - ) = load_per_detector_data(config) - logger.info("Loaded %s data points", len(weights_dets)) - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - - # Build Axis Managers - solver_aman.wrap("ancil", ancil) - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - solver_aman.wrap("obs_info", obs_info) - solver_aman.wrap("roll_c", roll_c, [(0, "samps")]) - solver_aman.wrap( - "nominal_xieta_locs", - all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap( - "measured_xieta_data", - obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap("weights", weights_dets, [(0, "samps")]) - solver_aman.wrap("obs_index", obs_index) - logger.info("Built axis manager") + for epoch in epochs: + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] + fitcheck_aman = core.AxisManager(core.IndexAxis("samps")) + ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index, + ) = load_per_detector_data(config, t0, tf, no_downsample_set=True) + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + + fitcheck_aman.wrap("ancil", ancil) + fitcheck_aman.wrap( + "nominal_xieta_locs", all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + fitcheck_aman.wrap( + "measured_xieta_data", obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + fitcheck_aman.wrap("weights", weights_dets, [(0, "samps")]) + fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) + logger.info("Loaded %s fit check data points", len(weights_dets)) + + #Now make axis manager that has down sampled data for computation + solver_aman = core.AxisManager(core.IndexAxis("samps")) + ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index, + ) = load_per_detector_data(config, t0, tf) + logger.info("Loaded %s data points", len(weights_dets)) + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + + # Build Axis Managers + solver_aman.wrap("ancil", ancil) + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + solver_aman.wrap("obs_info", obs_info) + solver_aman.wrap("roll_c", roll_c, [(0, "samps")]) + solver_aman.wrap( + "nominal_xieta_locs", + all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap( + "measured_xieta_data", + obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap("weights", weights_dets, [(0, "samps")]) + solver_aman.wrap("obs_index", obs_index) + epoch["solver_aman"] = solver_aman + epoch["fitcheck_aman"] = fitcheck_aman + logger.info("Built axis manager for epoch %s", epoch["name"]) ########################################################################### elif fit_type == "ufm_center": @@ -569,57 +612,60 @@ def main(config_path: str): logger.info("Loaded nominal UFM centers from %s: ", config.get("ffp_path")) logger.info(nom_ufm_centers) - filelist, obs_ufm_centers, weights_ufm, obs_index = load_per_obs_data(config) - logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) - logger.info("Including data from these obs:") - logger.info(filelist) - - ancil, roll_c = load_obs_boresight(config, filelist) - logger.info("Loaded boresight data from obs ids.") - - # Build Axis Managers - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - - solver_aman = core.AxisManager(core.IndexAxis("samps")) - solver_aman.wrap("ancil", ancil) - solver_aman.wrap("obs_info", obs_info) - solver_aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) - solver_aman.wrap( - "nominal_xieta_locs", - np.repeat([nom_ufm_centers], len(filelist), axis=0) - .reshape(len(filelist) * 7, 3) - .T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap( - "measured_xieta_data", - obs_ufm_centers.reshape(len(filelist) * 7, 3).T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) - solver_aman.wrap("obs_index", obs_index) - # Make weights/data cuts - logger.info("Built axis manager") + for epoch in epochs: + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] + filelist, obs_ufm_centers, weights_ufm, obs_index = load_per_obs_data(config, t0, tf) + logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) + logger.info("Including data from these obs:") + logger.info(filelist) + + ancil, roll_c = load_obs_boresight(config, filelist) + logger.info("Loaded boresight data from obs ids.") + + # Build Axis Managers + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + + solver_aman = core.AxisManager(core.IndexAxis("samps")) + solver_aman.wrap("ancil", ancil) + solver_aman.wrap("obs_info", obs_info) + solver_aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) + solver_aman.wrap( + "nominal_xieta_locs", + np.repeat([nom_ufm_centers], len(filelist), axis=0) + .reshape(len(filelist) * 7, 3) + .T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap( + "measured_xieta_data", + obs_ufm_centers.reshape(len(filelist) * 7, 3).T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) + solver_aman.wrap("obs_index", obs_index) + # Make weights/data cuts + epoch["solver_aman"] = solver_aman + logger.info("Built axis manager for epoch %s", epoch["name"]) ################################ # END of SPLIT: Now fit the parameters ################################ # Initialize Parameters to Fit with Model - fit_params = _init_fit_params(config) + fit_params, epochs = _init_fit_params(config, epochs) logger.info("Initialized fit parameters") # Solve for Model Parameters # use chosen xieta_model to solve for parameters model_solved_params = minimize( - objective_model_func_lmfit, + objective_model_func_lmfit_joint, fit_params, method="nelder", nan_policy="omit", - args=(pm_version, solver_aman, xieta_model, use_weights), + args=(pm_version, epochs, xieta_model, use_weights), ) logger.info("Ran 1st Minimization") @@ -632,144 +678,158 @@ def main(config_path: str): logger.info(fit_report(model_solved_params)) # save pointing model parameters to axis manager - param_aman = core.AxisManager() - for k in list(test_params.keys()): - param_aman.wrap(k, test_params[k]) - solver_aman.wrap("pointing_model", param_aman) - - # save errors to axis manager - error_aman = core.AxisManager() - for k in list(model_solved_params.params.values()): - error_aman.wrap(k.name, k.stderr) - solver_aman.wrap("pointing_model_errors", error_aman) - - # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) - # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats) - - # Model template and measured points using parameters found above - modeled_fits, fit_residuals_i1, rms_i1, model_reference = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, solver_aman) - logger.info("RMS on fit: %f arcmin", rms_i1) - - # Save fit results to the axis manager - modelfit_aman = core.AxisManager() - modelfit_aman.wrap("xi", modeled_fits[0], overwrite=True) - modelfit_aman.wrap("eta", modeled_fits[1], overwrite=True) - solver_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) - solver_aman.wrap("fit_residuals", fit_residuals_i1, overwrite=True) - solver_aman.wrap("fit_rms", rms_i1, overwrite=True) - - if fit_type == "detector": - _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) - logger.info("RMS on FULL detector set: %f arcmin", rms_full) - solver_aman.wrap("fit_residuals_full", fit_residuals_full, overwrite=True) - solver_aman.wrap("fit_rms_full", rms_full, overwrite=True) - solver_aman.wrap("obs_index_full", fitcheck_aman.obs_index) + for epoch in epochs: + logger.info("Calculating RMS and cutoff for %s", epoch["name"]) + par_mapping = {par:par.split(f"_{epoch['name']}")[0] for par in epoch["params"]} + param_aman = core.AxisManager() + for k in list(par_mapping.keys()): + param_aman.wrap(par_mapping[k], test_params[k]) + param_aman.wrap("version", pm_version) + epoch["solver_aman"].wrap("pointing_model", param_aman) + + # save errors to axis manager + error_aman = core.AxisManager() + for k in list(model_solved_params.params.values()): + if k.name in epoch["params"]: + error_aman.wrap(par_mapping[k.name], k.stderr) + epoch["solver_aman"].wrap("pointing_model_errors", error_aman) + + # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) + # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats) + + # Model template and measured points using parameters found above + modeled_fits, fit_residuals_i1, rms_i1, model_reference = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, epoch["solver_aman"]) + logger.info("RMS on fit: %f arcmin", rms_i1) + + # Save fit results to the axis manager + modelfit_aman = core.AxisManager() + modelfit_aman.wrap("xi", modeled_fits[0], overwrite=True) + modelfit_aman.wrap("eta", modeled_fits[1], overwrite=True) + epoch["solver_aman"].wrap("modeled_fits", modelfit_aman, overwrite=True) + epoch["solver_aman"].wrap("fit_residuals", fit_residuals_i1, overwrite=True) + epoch["solver_aman"].wrap("fit_rms", rms_i1, overwrite=True) - cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) - logger.info(f"2 stdev away from residual Median: {cutoff:.2f} arcmin") - - if config.get("make_plots"): - tag = "_i1" - plotter = ModelFitsPlotter(solver_aman=solver_aman, - config=config, - save_dir=save_dir, - iteration_tag=tag, - save_figure=True, - plotlims=plotlims) - if fit_type == "ufm_center": - plotter.plot_ws0_modeled_fits() - plotter.plot_template_space_fits_per_wafer() - plotter.plot_residuals_vs_ancil() - plotter.plot_xieta_cross_residuals() - plotter.plot_xieta_residuals() - else: - plotter.plot_modeled_fits() - plotter.plot_template_space_fits_per_detector() - plotter.plot_residuals_vs_ancil() - plotter.plot_residuals_histograms() - plotter.plot_dets_in_these_obs() + if fit_type == "detector": + _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, epoch["fitcheck_aman"]) + logger.info("RMS on FULL detector set: %f arcmin", rms_full) + epoch["solver_aman"].wrap("fit_residuals_full", fit_residuals_full, overwrite=True) + epoch["solver_aman"].wrap("fit_rms_full", rms_full, overwrite=True) + epoch["solver_aman"].wrap("obs_index_full", epoch["fitcheck_aman"].obs_index) + + cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) + logger.info(f"2 stdev away from residual Median: {cutoff:.2f} arcmin") + + if config.get("make_plots"): + tag = f"{epoch['name']}_i1" + plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], + config=config, + save_dir=save_dir, + iteration_tag=tag, + save_figure=True, + plotlims=plotlims) + if fit_type == "ufm_center": + plotter.plot_ws0_modeled_fits() + plotter.plot_template_space_fits_per_wafer() + plotter.plot_residuals_vs_ancil() + plotter.plot_xieta_cross_residuals() + plotter.plot_xieta_residuals() + else: + plotter.plot_modeled_fits() + plotter.plot_template_space_fits_per_detector() + plotter.plot_residuals_vs_ancil() + plotter.plot_residuals_histograms() + plotter.plot_dets_in_these_obs() if iterate_cutoff is not None: logger.info("Iterating parameter solution") logger.info(f"Using {iterate_cutoff} as cutoff") - if iterate_cutoff == "auto": - iterate_cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) - logger.info(f"Using {iterate_cutoff} as cutoff") - bad_fit_inds = np.where(fit_residuals_i1 > iterate_cutoff)[0] - logger.info("Bad fit indices:") - logger.info(bad_fit_inds) - logger.info( - "%f data points are higher than %s arcmin", - len(bad_fit_inds), - iterate_cutoff, - ) - - if len(bad_fit_inds) != 0: - if fit_type == "ufm_center": - bad_filename = bad_fit_inds // 7 - bad_wafer = bad_fit_inds % 7 - logger.info("Outliers:") - for i, full_i in enumerate(bad_fit_inds): - logger.info( - f"{filelist[bad_filename[i]]}; ws{bad_wafer[i]}; Resid. {np.round(fit_residuals_i1[full_i], 4)}" - ) - logger.info( - f"--- Roll {solver_aman.roll_c[full_i]}; El {solver_aman.ancil.el_enc[full_i]}; weight {np.round(solver_aman.weights[full_i],4)}" - ) - - # Print RMS of initial fits without outlying data points before - # zero-ing the weights. - good_fit_inds = np.where(fit_residuals_i1 < iterate_cutoff)[0] - _, _, masked_rms, _ = apply_model_params(xieta_model, - solver_aman.pointing_model, - pm_version, - solver_aman, - use_inds=good_fit_inds) - - logger.info("RMS on initial fit without outliers: %f arcmin", masked_rms) - solver_aman.wrap('bad_fit_inds', bad_fit_inds) - solver_aman.weights[bad_fit_inds] = 0.0 - - model_solved_params = minimize( - objective_model_func_lmfit, - fit_params, - method="nelder", - nan_policy="omit", - args=(pm_version, solver_aman, xieta_model, use_weights), - ) - - test_params = _round_params(model_solved_params.params.valuesdict(), 8) - test_params["version"] = pm_version - logger.info("Found best-fit pointing model parameters, second iteration") - logger.info(test_params) + for epoch in epochs: + logger.info("Appylying cutoff to %s", epoch["name"]) + fit_residuals_i1 = epoch["solver_aman"].fit_residuals + cutoff = iterate_cutoff + if iterate_cutoff == "auto": + cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) + logger.info(f"Using {cutoff} as cutoff") + bad_fit_inds = np.where(fit_residuals_i1 > cutoff)[0] + logger.info("Bad fit indices:") + logger.info(bad_fit_inds) logger.info( - model_solved_params.params.pretty_print(precision=5, colwidth=11) + "%f data points are higher than %s arcmin", + len(bad_fit_inds), + cutoff, ) - logger.info("Fit Report:") - logger.info(lmfit.fit_report(model_solved_params)) - # save pointing model parameters to axis manager - solver_aman.move("pointing_model", "pointing_model_i1") + if len(bad_fit_inds) != 0: + if fit_type == "ufm_center": + bad_filename = bad_fit_inds // 7 + bad_wafer = bad_fit_inds % 7 + logger.info("Outliers:") + for i, full_i in enumerate(bad_fit_inds): + logger.info( + f"{filelist[bad_filename[i]]}; ws{bad_wafer[i]}; Resid. {np.round(fit_residuals_i1[full_i], 4)}" + ) + logger.info( + f"--- Roll {solver_aman.roll_c[full_i]}; El {solver_aman.ancil.el_enc[full_i]}; weight {np.round(solver_aman.weights[full_i],4)}" + ) + + # Print RMS of initial fits without outlying data points before + # zero-ing the weights. + good_fit_inds = np.where(fit_residuals_i1 < cutoff)[0] + _, _, masked_rms, _ = apply_model_params(xieta_model, + epoch["solver_aman"].pointing_model, + pm_version, + epoch["solver_aman"], + use_inds=good_fit_inds) + + logger.info("RMS on initial fit without outliers: %f arcmin", masked_rms) + epoch["solver_aman"].wrap('bad_fit_inds', bad_fit_inds) + epoch["solver_aman"].weights[bad_fit_inds] = 0.0 + + model_solved_params = minimize( + objective_model_func_lmfit_joint, + fit_params, + method="nelder", + nan_policy="omit", + args=(pm_version, epochs, xieta_model, use_weights), + ) + + test_params = _round_params(model_solved_params.params.valuesdict(), 8) + test_params["version"] = pm_version + logger.info("Found best-fit pointing model parameters, second iteration") + logger.info(test_params) + logger.info( + model_solved_params.params.pretty_print(precision=5, colwidth=11) + ) + logger.info("Fit Report:") + logger.info(lmfit.fit_report(model_solved_params)) + + # save pointing model parameters to axis manager + for epoch in epochs: + logger.info("Calculating RMS for %s", epoch["name"]) + par_mapping = {par:par.split(f"_{epoch['name']}")[0] for par in epoch["params"]} + epoch["solver_aman"].move("pointing_model", "pointing_model_i1") param_aman = core.AxisManager() - for k in list(test_params.keys()): - param_aman.wrap(k, test_params[k]) - solver_aman.wrap("pointing_model", param_aman, overwrite=True) + for k in list(par_mapping.keys()): + param_aman.wrap(par_mapping[k], test_params[k]) + param_aman.wrap("version", pm_version) + epoch["solver_aman"].wrap("pointing_model", param_aman) # save errors to axis manager - solver_aman.move("pointing_model_errors", "pointing_model_errors_i1") + epoch["solver_aman"].move("pointing_model_errors", "pointing_model_errors_i1") error_aman = core.AxisManager() for k in list(model_solved_params.params.values()): - error_aman.wrap(k.name, k.stderr) - solver_aman.wrap("pointing_model_errors", error_aman, overwrite=True) + if k.name in epoch["params"]: + error_aman.wrap(par_mapping[k.name], k.stderr) + epoch["solver_aman"].wrap("pointing_model_errors", error_aman) # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) - # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats, overwrite=True) + # epoch["solver_aman"].wrap("parameter_fit_stats", parameter_fit_stats, overwrite=True) # Recalculate best-fit modeled points modeled_fits, fit_residuals_i2, rms_i2, model_reference = apply_model_params(xieta_model, - solver_aman.pointing_model, + epoch["solver_aman"].pointing_model, pm_version, - solver_aman) + epoch["solver_aman"]) logger.info("RMS on secondary fit: %f arcmin", rms_i2) @@ -777,23 +837,23 @@ def main(config_path: str): modelfit_aman = core.AxisManager() modelfit_aman.wrap("xi", modeled_fits[0], overwrite=True) modelfit_aman.wrap("eta", modeled_fits[1], overwrite=True) - solver_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) - solver_aman.move("fit_residuals", "fit_residuals_i1") - solver_aman.wrap("fit_residuals", fit_residuals_i2, overwrite=True) - solver_aman.move("fit_rms", "fit_rms_i1") - solver_aman.wrap("fit_rms", rms_i2, overwrite=True) + epoch["solver_aman"].wrap("modeled_fits", modelfit_aman, overwrite=True) + epoch["solver_aman"].move("fit_residuals", "fit_residuals_i1") + epoch["solver_aman"].wrap("fit_residuals", fit_residuals_i2, overwrite=True) + epoch["solver_aman"].move("fit_rms", "fit_rms_i1") + epoch["solver_aman"].wrap("fit_rms", rms_i2, overwrite=True) if fit_type == "detector": - _, fit_residuals_full, rms_full, _ = pm.apply_model_params(xieta_model, solver_aman.pointing_model, pm_version, fitcheck_aman) + _, fit_residuals_full, rms_full, _ = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, epoch["fitcheck_aman"]) logger.info("RMS on FULL detector set: %f arcmin", rms_full) - solver_aman.move("fit_residuals_full", "fit_residuals_full_i1") - solver_aman.move("fit_rms_full", "fit_rms_full_i1") - solver_aman.wrap("fit_residuals_full", fit_residuals_full, overwrite=True) - solver_aman.wrap("fit_rms_full", rms_full, overwrite=True) + epoch["solver_aman"].move("fit_residuals_full", "fit_residuals_full_i1") + epoch["solver_aman"].move("fit_rms_full", "fit_rms_full_i1") + epoch["solver_aman"].wrap("fit_residuals_full", fit_residuals_full, overwrite=True) + epoch["solver_aman"].wrap("fit_rms_full", rms_full, overwrite=True) if config.get("make_plots"): - tag = "_i2" - plotter = ModelFitsPlotter(solver_aman=solver_aman, + tag = f"{epoch['name']}_i2" + plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, iteration_tag=tag, @@ -813,206 +873,205 @@ def main(config_path: str): plotter.plot_dets_in_these_obs() else: if config.get("make_plots"): - plotter = ModelFitsPlotter(solver_aman=solver_aman, - config=config, - save_dir=save_dir, - iteration_tag="", - save_figure=True, - plotlims=plotlims) - plotter.plot_total_residuals() + for epoch in epochs: + plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], + config=config, + save_dir=save_dir, + iteration_tag=epoch["name"], + save_figure=True, + plotlims=plotlims) + plotter.plot_total_residuals() if config.get("save_output"): # Save .h5 and ManifestDb h5_rel = "pointing_model_data.h5" h5_filename = os.path.join(save_dir, h5_rel) - solver_aman.save(h5_filename, overwrite=True) dbfile = "db.sqlite" - t0 = config.get("begin_timerange", 0) - t1 = config.get("end_timerange", int(time.time())) - Epoch_Name = config.get("epoch_name") db = _create_db(dbfile, save_dir) - db.add_entry( - {"obs:obs_timestamp": (t0, t1), "dataset": f"{Epoch_Name}_parameters"}, - filename=h5_rel, - replace=True, - ) - db.to_file(os.path.join(save_dir, dbfile)) + for epoch in epochs: + epoch["solver_aman"].save(h5_filename, group=epoch["name"], overwrite=True) + db.add_entry( + {"obs:timestamp": (epoch["begin_timerange"], epoch["end_timerange"]), "dataset": f"{epoch['name']}/pointing_model"}, + filename=h5_rel, + replace=True, + ) #Optional extra plotting if config.get("make_full_analysis_plots", True): - #Fill up axis manager with ALL the data (only cuts from culling and time stamps remain) - - ( - filelist, - obs_dets_fits, - weights_dets, - all_nom_det_array, - all_det_ids, - obs_index, - ) = load_per_detector_data(config, return_all_dets=True) - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - ufm_list = [ufm.split("_")[1] for ufm in config.get('ufms')] - - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - - full_aman = core.AxisManager(core.IndexAxis("samps")) - full_aman.wrap("obs_info", obs_info) - full_aman.wrap("ancil", ancil) - full_aman.wrap( - "nominal_xieta_locs", all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - full_aman.wrap( - "measured_xieta_data", obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - full_aman.wrap("weights", weights_dets, [(0, "samps")]) - full_aman.wrap("obs_index", obs_index) - full_aman.wrap("roll_c", roll_c, [(0, "samps")]) - full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) - full_aman.wrap("radial", - np.sqrt(full_aman.nominal_xieta_locs[0]**2 + full_aman.nominal_xieta_locs[1]**2)/DEG, - [(0, "samps")]) - full_aman.wrap("det_ufm", - np.array([detid.decode('utf-8').split('_')[0].lower() for detid in full_aman.det_ids]) - , [(0, "samps")]) - full_aman.wrap("det_wafer", np.array([ufm_list.index(d) for d in full_aman.det_ufm]), [(0, "samps")]) - - try: - full_modeled, full_residuals, rms, _ = apply_model_params("template", - solver_aman.pointing_model_i1, - config.get("pm_version"), - full_aman) - except: + for epoch in epochs: + #Fill up axis manager with ALL the data (only cuts from culling and time stamps remain) + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] + ( + filelist, + obs_dets_fits, + weights_dets, + all_nom_det_array, + all_det_ids, + obs_index, + ) = load_per_detector_data(config, t0, tf, return_all_dets=True) + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + ufm_list = [ufm.split("_")[1] for ufm in config.get('ufms')] + + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + + full_aman = core.AxisManager(core.IndexAxis("samps")) + full_aman.wrap("obs_info", obs_info) + full_aman.wrap("ancil", ancil) + full_aman.wrap( + "nominal_xieta_locs", all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + full_aman.wrap( + "measured_xieta_data", obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + full_aman.wrap("weights", weights_dets, [(0, "samps")]) + full_aman.wrap("obs_index", obs_index) + full_aman.wrap("roll_c", roll_c, [(0, "samps")]) + full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) + full_aman.wrap("radial", + np.sqrt(full_aman.nominal_xieta_locs[0]**2 + full_aman.nominal_xieta_locs[1]**2)/DEG, + [(0, "samps")]) + full_aman.wrap("det_ufm", + np.array([detid.decode('utf-8').split('_')[0].lower() for detid in full_aman.det_ids]) + , [(0, "samps")]) + full_aman.wrap("det_wafer", np.array([ufm_list.index(d) for d in full_aman.det_ufm]), [(0, "samps")]) + + # try: + # full_modeled, full_residuals, rms, _ = apply_model_params("template", + # epoch["solver_aman"].pointing_model_i1, + # config.get("pm_version"), + # full_aman) + # except: full_modeled, fit_residuals, rms, _ = apply_model_params("template", - solver_aman.pointing_model, - config.get("pm_version"), - full_aman) - - full_aman.wrap("full_modeled", np.array(full_modeled), - [(0, core.LabelAxis("xieta", ["xi", "eta"]))], - [(1, "samps")]) - full_aman.wrap("fit_residuals", fit_residuals, [(0, "samps")]) - del(full_modeled) - del(fit_residuals) - - (obs_az, obs_el, obs_roll, - obs_resid, obs_dxi, obs_deta, - obs_std_xi, obs_std_eta - ) = [], [], [], [], [], [], [], [] - (all_ufm_az, all_ufm_el, all_ufm_roll, - all_ufm_resid, all_ufm_dxi, all_ufm_deta, - all_ufm_std_xi, all_ufm_std_eta, all_ufm_wafer_num - ) = [], [], [], [], [], [], [], [], [] - for ob in np.unique(full_aman.obs_index): - inds = np.where(full_aman.obs_index == ob)[0] - obs_az.append(np.nanmedian(full_aman.ancil.az_enc[inds])) - obs_el.append(np.nanmedian(full_aman.ancil.el_enc[inds])) - obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) - obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) - obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) - obs_deta.append(np.nanmean((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) - obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) - obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) - (ufm_az, ufm_el, ufm_roll, ufm_resid, - ufm_dxi, ufm_deta, ufm_std_xi, - ufm_std_eta, ufm_wafer_num - )= [], [], [], [], [], [], [], [], [] - for ufm in ufm_list: - ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] - ufm_az.append(np.nanmedian(full_aman.ancil.az_enc[inds][ufm_inds])) - ufm_el.append(np.nanmedian(full_aman.ancil.el_enc[inds][ufm_inds])) - ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) - ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) - ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) - ufm_deta.append(np.nanmean((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) - ufm_std_xi.append(np.nanstd((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) - ufm_std_eta.append(np.nanstd((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) - ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) - all_ufm_az.append(ufm_az) - all_ufm_el.append(ufm_el) - all_ufm_roll.append(ufm_roll) - all_ufm_resid.append(ufm_resid) - all_ufm_deta.append(ufm_deta) - all_ufm_dxi.append(ufm_dxi) - all_ufm_std_xi.append(ufm_std_xi) - all_ufm_std_eta.append(ufm_std_eta) - all_ufm_wafer_num.append(ufm_wafer_num) + epoch["solver_aman"].pointing_model, + config.get("pm_version"), + full_aman) - per_ufm_stats = core.AxisManager() - per_obs_stats = core.AxisManager() - - per_obs_stats.wrap("el", np.array(obs_el)) - per_obs_stats.wrap("roll", np.array(obs_roll)) - per_obs_stats.wrap("az", np.array(obs_az)) - per_obs_stats.wrap("resid", np.array(obs_resid)) - per_obs_stats.wrap("dxi", np.array(obs_dxi)) - per_obs_stats.wrap("deta", np.array(obs_deta)) - per_obs_stats.wrap("std_xi", np.array(obs_std_xi)) - per_obs_stats.wrap("std_eta", np.array(obs_std_eta)) - - per_ufm_stats.wrap("az", np.array(all_ufm_az)) - per_ufm_stats.wrap("el", np.array(all_ufm_el)) - per_ufm_stats.wrap("roll", np.array(all_ufm_roll)) - per_ufm_stats.wrap("resid", np.array(all_ufm_resid)) - per_ufm_stats.wrap("dxi", np.array(all_ufm_dxi)) - per_ufm_stats.wrap("deta", np.array(all_ufm_deta)) - per_ufm_stats.wrap("std_xi", np.array(all_ufm_std_xi)) - per_ufm_stats.wrap("std_eta", np.array(all_ufm_std_eta)) - per_ufm_stats.wrap("wafer_num", np.array(all_ufm_wafer_num)) - - if platform == "lat": - obs_cr = [] - all_ufm_cr = [] + full_aman.wrap("full_modeled", np.array(full_modeled), + [(0, core.LabelAxis("xieta", ["xi", "eta"]))], + [(1, "samps")]) + full_aman.wrap("fit_residuals", fit_residuals, [(0, "samps")]) + del(full_modeled) + del(fit_residuals) + + (obs_az, obs_el, obs_roll, + obs_resid, obs_dxi, obs_deta, + obs_std_xi, obs_std_eta + ) = [], [], [], [], [], [], [], [] + (all_ufm_az, all_ufm_el, all_ufm_roll, + all_ufm_resid, all_ufm_dxi, all_ufm_deta, + all_ufm_std_xi, all_ufm_std_eta, all_ufm_wafer_num + ) = [], [], [], [], [], [], [], [], [] for ob in np.unique(full_aman.obs_index): - inds = np.where(full_aman.obs_index == ob)[0] - obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) - ufm_cr = [] + inds = np.where(full_aman.obs_index == ob)[0] + obs_az.append(np.nanmedian(full_aman.ancil.az_enc[inds])) + obs_el.append(np.nanmedian(full_aman.ancil.el_enc[inds])) + obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) + obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) + obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + obs_deta.append(np.nanmean((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + (ufm_az, ufm_el, ufm_roll, ufm_resid, + ufm_dxi, ufm_deta, ufm_std_xi, + ufm_std_eta, ufm_wafer_num + )= [], [], [], [], [], [], [], [], [] for ufm in ufm_list: - ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) - all_ufm_cr.append(ufm_cr) - per_obs_stats.wrap("cr", np.array(obs_cr)) - per_ufm_stats.wrap("cr", np.array(all_ufm_cr)) - - full_aman.wrap("dxi", (full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])/DEG*60, [(0, "samps")]) - full_aman.wrap("deta", (full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])/DEG*60, [(0, "samps")]) - - #full_dxi_av = np.nanmean(full_dxi) - #full_deta_av = np.nanmean(full_deta) - obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) - per_obs_stats.wrap("obsids", obsids) - per_ufm_stats.wrap("obsids", np.repeat(obsids, np.shape(per_ufm_stats["dxi"])[1])) - full_aman.wrap("obsids", obsids[full_aman.obs_index]) - - #Calculate RMSs - per_obs_stats.wrap("rms", np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + per_obs_stats["deta"]**2))) - per_ufm_stats.wrap("rms", np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + per_ufm_stats["deta"]**2))) - full_aman.wrap("rms", np.sqrt(np.nanmean(full_aman["dxi"]**2 + full_aman["deta"]**2))) - full_aman.wrap("per_ufm_stats", per_ufm_stats) - full_aman.wrap("per_obs_stats", per_obs_stats) - - plotter = ModelFitsPlotter(solver_aman=full_aman, - config=config, - save_dir=save_dir, - iteration_tag="", - save_figure=True, - plotlims=plotlims) - plotter.plot_full_residuals_across_focalplane() - plotter.plot_full_histogram() - plotter.plot_full_unmodeled_residuals() + ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] + ufm_az.append(np.nanmedian(full_aman.ancil.az_enc[inds][ufm_inds])) + ufm_el.append(np.nanmedian(full_aman.ancil.el_enc[inds][ufm_inds])) + ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) + ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) + ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + ufm_deta.append(np.nanmean((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + ufm_std_xi.append(np.nanstd((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + ufm_std_eta.append(np.nanstd((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) + all_ufm_az.append(ufm_az) + all_ufm_el.append(ufm_el) + all_ufm_roll.append(ufm_roll) + all_ufm_resid.append(ufm_resid) + all_ufm_deta.append(ufm_deta) + all_ufm_dxi.append(ufm_dxi) + all_ufm_std_xi.append(ufm_std_xi) + all_ufm_std_eta.append(ufm_std_eta) + all_ufm_wafer_num.append(ufm_wafer_num) + + per_ufm_stats = core.AxisManager() + per_obs_stats = core.AxisManager() + + per_obs_stats.wrap("el", np.array(obs_el)) + per_obs_stats.wrap("roll", np.array(obs_roll)) + per_obs_stats.wrap("az", np.array(obs_az)) + per_obs_stats.wrap("resid", np.array(obs_resid)) + per_obs_stats.wrap("dxi", np.array(obs_dxi)) + per_obs_stats.wrap("deta", np.array(obs_deta)) + per_obs_stats.wrap("std_xi", np.array(obs_std_xi)) + per_obs_stats.wrap("std_eta", np.array(obs_std_eta)) + + per_ufm_stats.wrap("az", np.array(all_ufm_az)) + per_ufm_stats.wrap("el", np.array(all_ufm_el)) + per_ufm_stats.wrap("roll", np.array(all_ufm_roll)) + per_ufm_stats.wrap("resid", np.array(all_ufm_resid)) + per_ufm_stats.wrap("dxi", np.array(all_ufm_dxi)) + per_ufm_stats.wrap("deta", np.array(all_ufm_deta)) + per_ufm_stats.wrap("std_xi", np.array(all_ufm_std_xi)) + per_ufm_stats.wrap("std_eta", np.array(all_ufm_std_eta)) + per_ufm_stats.wrap("wafer_num", np.array(all_ufm_wafer_num)) + + if platform == "lat": + obs_cr = [] + all_ufm_cr = [] + for ob in np.unique(full_aman.obs_index): + inds = np.where(full_aman.obs_index == ob)[0] + obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) + ufm_cr = [] + for ufm in ufm_list: + ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) + all_ufm_cr.append(ufm_cr) + per_obs_stats.wrap("cr", np.array(obs_cr)) + per_ufm_stats.wrap("cr", np.array(all_ufm_cr)) + + full_aman.wrap("dxi", (full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])/DEG*60, [(0, "samps")]) + full_aman.wrap("deta", (full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])/DEG*60, [(0, "samps")]) + + #full_dxi_av = np.nanmean(full_dxi) + #full_deta_av = np.nanmean(full_deta) + obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) + per_obs_stats.wrap("obsids", obsids) + per_ufm_stats.wrap("obsids", np.repeat(obsids, np.shape(per_ufm_stats["dxi"])[1])) + full_aman.wrap("obsids", obsids[full_aman.obs_index]) + + #Calculate RMSs + per_obs_stats.wrap("rms", np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + per_obs_stats["deta"]**2))) + per_ufm_stats.wrap("rms", np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + per_ufm_stats["deta"]**2))) + full_aman.wrap("rms", np.sqrt(np.nanmean(full_aman["dxi"]**2 + full_aman["deta"]**2))) + full_aman.wrap("per_ufm_stats", per_ufm_stats) + full_aman.wrap("per_obs_stats", per_obs_stats) + + plotter = ModelFitsPlotter(solver_aman=full_aman, + config=config, + save_dir=save_dir, + iteration_tag=epoch["name"], + save_figure=True, + plotlims=plotlims) + plotter.plot_full_residuals_across_focalplane() + plotter.plot_full_histogram() + plotter.plot_full_unmodeled_residuals() logger.info("Done") From f60187289b716c378df32f2627ddf6fce594e418 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 6 Aug 2025 14:26:16 -0700 Subject: [PATCH 29/48] Added just_test_params functionality to only test initial params against full dataset. --- .../site_pipeline/solve_pointing_model.py | 402 ++++++++++-------- 1 file changed, 225 insertions(+), 177 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 2a7c4a520..d371ee90c 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -439,6 +439,178 @@ def _round_params(param_dict, decimal): return P +def analyze_PM_with_all_dets(config, t0, tf, params): + platform = config.get("platform") + + (filelist, obs_dets_fits, + weights_dets, all_nom_det_array, + all_det_ids, obs_index, + ) = load_per_detector_data(config, t0, tf, return_all_dets=True) + + ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) + ufm_list = [ufm.split("_")[1] for ufm in config.get('ufms')] + + obs_info = core.AxisManager() + obs_info.wrap("obs_ids", np.array(filelist)) + full_aman = core.AxisManager(core.IndexAxis("samps")) + full_aman.wrap("obs_info", obs_info) + full_aman.wrap("ancil", ancil) + full_aman.wrap( + "nominal_xieta_locs", all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + full_aman.wrap( + "measured_xieta_data", obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], + [(1, "samps")], + ) + full_aman.wrap("weights", weights_dets, [(0, "samps")]) + full_aman.wrap("obs_index", obs_index) + full_aman.wrap("roll_c", roll_c, [(0, "samps")]) + full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) + full_aman.wrap("radial", + np.sqrt(full_aman.nominal_xieta_locs[0]**2 + + full_aman.nominal_xieta_locs[1]**2)/DEG, + [(0, "samps")]) + full_aman.wrap("det_ufm", + np.array([detid.decode('utf-8').split('_')[0].lower() for detid in full_aman.det_ids]) + , [(0, "samps")]) + full_aman.wrap("det_wafer", + np.array([ufm_list.index(d) for d in full_aman.det_ufm]), + [(0, "samps")]) + # Apply model to data. + (full_modeled, full_residuals, rms, _ + ) = apply_model_params("template", + params, + config.get("pm_version"), + full_aman) + full_aman.wrap("full_modeled", np.array(full_modeled), + [(0, core.LabelAxis("xieta", ["xi", "eta"]))], + [(1, "samps")]) + full_aman.wrap("fit_residuals", full_residuals, [(0, "samps")]) + del(full_modeled) + del(full_residuals) + + (obs_az, obs_el, obs_roll, obs_resid, + obs_dxi, obs_deta, obs_std_xi, obs_std_eta + ) = [], [], [], [], [], [], [], [] + (all_ufm_az, all_ufm_el, all_ufm_roll, all_ufm_resid, + all_ufm_dxi, all_ufm_deta, all_ufm_std_xi, + all_ufm_std_eta, all_ufm_wafer_num + ) = [], [], [], [], [], [], [], [], [] + for ob in np.unique(full_aman.obs_index): + inds = np.where(full_aman.obs_index == ob)[0] + obs_az.append(np.nanmedian(full_aman.ancil.az_enc[inds])) + obs_el.append(np.nanmedian(full_aman.ancil.el_enc[inds])) + obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) + obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) + obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + obs_deta.append(np.nanmean((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + (ufm_az, ufm_el, ufm_roll, ufm_resid, + ufm_dxi, ufm_deta, ufm_std_xi, + ufm_std_eta, ufm_wafer_num + )= [], [], [], [], [], [], [], [], [] + for ufm in ufm_list: + ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] + ufm_az.append(np.nanmedian(full_aman.ancil.az_enc[inds][ufm_inds])) + ufm_el.append(np.nanmedian(full_aman.ancil.el_enc[inds][ufm_inds])) + ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) + ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) + ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + ufm_deta.append(np.nanmean( + (full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + ufm_std_xi.append(np.nanstd( + (full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + ufm_std_eta.append(np.nanstd( + (full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) + all_ufm_az.append(ufm_az) + all_ufm_el.append(ufm_el) + all_ufm_roll.append(ufm_roll) + all_ufm_resid.append(ufm_resid) + all_ufm_deta.append(ufm_deta) + all_ufm_dxi.append(ufm_dxi) + all_ufm_std_xi.append(ufm_std_xi) + all_ufm_std_eta.append(ufm_std_eta) + all_ufm_wafer_num.append(ufm_wafer_num) + + per_ufm_stats = core.AxisManager() + per_obs_stats = core.AxisManager() + per_obs_stats.wrap("el", np.array(obs_el)) + per_obs_stats.wrap("roll", np.array(obs_roll)) + per_obs_stats.wrap("az", np.array(obs_az)) + per_obs_stats.wrap("resid", np.array(obs_resid)) + per_obs_stats.wrap("dxi", np.array(obs_dxi)) + per_obs_stats.wrap("deta", np.array(obs_deta)) + per_obs_stats.wrap("std_xi", np.array(obs_std_xi)) + per_obs_stats.wrap("std_eta", np.array(obs_std_eta)) + per_ufm_stats.wrap("az", np.array(all_ufm_az)) + per_ufm_stats.wrap("el", np.array(all_ufm_el)) + per_ufm_stats.wrap("roll", np.array(all_ufm_roll)) + per_ufm_stats.wrap("resid", np.array(all_ufm_resid)) + per_ufm_stats.wrap("dxi", np.array(all_ufm_dxi)) + per_ufm_stats.wrap("deta", np.array(all_ufm_deta)) + per_ufm_stats.wrap("std_xi", np.array(all_ufm_std_xi)) + per_ufm_stats.wrap("std_eta", np.array(all_ufm_std_eta)) + per_ufm_stats.wrap("wafer_num", np.array(all_ufm_wafer_num)) + + if platform == "lat": + obs_cr = [] + all_ufm_cr = [] + for ob in np.unique(full_aman.obs_index): + inds = np.where(full_aman.obs_index == ob)[0] + obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) + ufm_cr = [] + for ufm in ufm_list: + ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) + all_ufm_cr.append(ufm_cr) + per_obs_stats.wrap("cr", np.array(obs_cr)) + per_ufm_stats.wrap("cr", np.array(all_ufm_cr)) + + full_aman.wrap("dxi", + (full_aman.full_modeled[0] - + full_aman.nominal_xieta_locs[0])/DEG*60, + [(0, "samps")]) + full_aman.wrap("deta", + (full_aman.full_modeled[1] - + full_aman.nominal_xieta_locs[1])/DEG*60, + [(0, "samps")]) + obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) + per_obs_stats.wrap("obsids", obsids) + per_ufm_stats.wrap("obsids", + np.repeat(obsids, + np.shape(per_ufm_stats["dxi"])[1]) + ) + full_aman.wrap("obsids", obsids[full_aman.obs_index]) + + #Calculate RMSs + per_obs_stats.wrap("rms", + np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + per_obs_stats["deta"]**2)) + ) + per_ufm_stats.wrap("rms", + np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + + per_ufm_stats["deta"]**2)) + ) + full_aman.wrap("rms", + np.sqrt(np.nanmean(full_aman["dxi"]**2 + + full_aman["deta"]**2)) + ) + full_aman.wrap("per_ufm_stats", per_ufm_stats) + full_aman.wrap("per_obs_stats", per_obs_stats) + + return full_aman + def _create_db(filename, save_dir): db_filename = os.path.join(save_dir, filename) # Get Database ready @@ -508,13 +680,7 @@ def main(config_path: str): Only "measured" or "template" accepted' ) exit - logger.info( - "Pointing model will try to replicate (model) the %s data.", xieta_model - ) - - ########################################################## - ### Begin split for per-detector or per-UFM center fitting - ########################################################## + epochs = config.get("epochs") for epoch in epochs: if "name" not in epoch: @@ -523,7 +689,40 @@ def main(config_path: str): epoch["indep_list"] = [] if epochs is None: raise ValueError("No epochs provided") + + if config.get("just_test_params"): + logger.info("Will test initial_parameters against pointing data.") + for epoch in epochs: + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] + + test_params, epochs = _init_fit_params(config, epochs) + logger.info("Using these paramters to test pointing: ") + for p in test_params: + logger.info(f"{p}: {test_params[p].value}") + #logger.info(test_params) + full_aman = analyze_PM_with_all_dets(config, t0, tf, test_params) + logger.info(f"for epoch {epoch["name"]}") + logger.info(f"full rms: {full_aman.rms:.3f} (arcmin) ") + logger.info(f"obs_rms: {full_aman.per_obs_stats.rms:.3f} (arcmin) ") + logger.info(f"ufm_rms: {full_aman.per_ufm_stats.rms:.3f} (arcmin) ") + plotter = ModelFitsPlotter(solver_aman=full_aman, + config=config, + save_dir=save_dir, + iteration_tag=f"_{epoch["name"]}", + save_figure=True, + plotlims=plotlims) + plotter.plot_full_residuals_across_focalplane() + plotter.plot_full_histogram() + plotter.plot_full_unmodeled_residuals() + logger.info("done") + + logger.info( + "Pointing model will try to replicate (model) the %s data.", xieta_model + ) fit_type = config.get("fit_type", "detector") + ########################################################## + ### Begin split for per-detector or per-UFM center fitting + ########################################################## if fit_type == "detector": which_ufm = config.get("which_ufm", None) @@ -719,12 +918,11 @@ def main(config_path: str): cutoff = np.nanstd(fit_residuals_i1)*2 + np.nanmedian(fit_residuals_i1) logger.info(f"2 stdev away from residual Median: {cutoff:.2f} arcmin") - if config.get("make_plots"): - tag = f"{epoch['name']}_i1" + if config.get("make_plots"): plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, - iteration_tag=tag, + iteration_tag=f"_{epoch['name']}_i1", save_figure=True, plotlims=plotlims) if fit_type == "ufm_center": @@ -852,7 +1050,7 @@ def main(config_path: str): epoch["solver_aman"].wrap("fit_rms_full", rms_full, overwrite=True) if config.get("make_plots"): - tag = f"{epoch['name']}_i2" + tag = f"_{epoch['name']}_i2" plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, @@ -877,7 +1075,7 @@ def main(config_path: str): plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, - iteration_tag=epoch["name"], + iteration_tag=f"_{epoch["name"]}", save_figure=True, plotlims=plotlims) plotter.plot_total_residuals() @@ -901,177 +1099,27 @@ def main(config_path: str): for epoch in epochs: #Fill up axis manager with ALL the data (only cuts from culling and time stamps remain) t0, tf = epoch["begin_timerange"], epoch["end_timerange"] - ( - filelist, - obs_dets_fits, - weights_dets, - all_nom_det_array, - all_det_ids, - obs_index, - ) = load_per_detector_data(config, t0, tf, return_all_dets=True) - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - ufm_list = [ufm.split("_")[1] for ufm in config.get('ufms')] - - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - full_aman = core.AxisManager(core.IndexAxis("samps")) - full_aman.wrap("obs_info", obs_info) - full_aman.wrap("ancil", ancil) - full_aman.wrap( - "nominal_xieta_locs", all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - full_aman.wrap( - "measured_xieta_data", obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - full_aman.wrap("weights", weights_dets, [(0, "samps")]) - full_aman.wrap("obs_index", obs_index) - full_aman.wrap("roll_c", roll_c, [(0, "samps")]) - full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) - full_aman.wrap("radial", - np.sqrt(full_aman.nominal_xieta_locs[0]**2 + full_aman.nominal_xieta_locs[1]**2)/DEG, - [(0, "samps")]) - full_aman.wrap("det_ufm", - np.array([detid.decode('utf-8').split('_')[0].lower() for detid in full_aman.det_ids]) - , [(0, "samps")]) - full_aman.wrap("det_wafer", np.array([ufm_list.index(d) for d in full_aman.det_ufm]), [(0, "samps")]) - - # try: - # full_modeled, full_residuals, rms, _ = apply_model_params("template", - # epoch["solver_aman"].pointing_model_i1, - # config.get("pm_version"), - # full_aman) - # except: - full_modeled, fit_residuals, rms, _ = apply_model_params("template", - epoch["solver_aman"].pointing_model, - config.get("pm_version"), - full_aman) - - full_aman.wrap("full_modeled", np.array(full_modeled), - [(0, core.LabelAxis("xieta", ["xi", "eta"]))], - [(1, "samps")]) - full_aman.wrap("fit_residuals", fit_residuals, [(0, "samps")]) - del(full_modeled) - del(fit_residuals) - - (obs_az, obs_el, obs_roll, - obs_resid, obs_dxi, obs_deta, - obs_std_xi, obs_std_eta - ) = [], [], [], [], [], [], [], [] - (all_ufm_az, all_ufm_el, all_ufm_roll, - all_ufm_resid, all_ufm_dxi, all_ufm_deta, - all_ufm_std_xi, all_ufm_std_eta, all_ufm_wafer_num - ) = [], [], [], [], [], [], [], [], [] - for ob in np.unique(full_aman.obs_index): - inds = np.where(full_aman.obs_index == ob)[0] - obs_az.append(np.nanmedian(full_aman.ancil.az_enc[inds])) - obs_el.append(np.nanmedian(full_aman.ancil.el_enc[inds])) - obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) - obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) - obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) - obs_deta.append(np.nanmean((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) - obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) - obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) - (ufm_az, ufm_el, ufm_roll, ufm_resid, - ufm_dxi, ufm_deta, ufm_std_xi, - ufm_std_eta, ufm_wafer_num - )= [], [], [], [], [], [], [], [], [] - for ufm in ufm_list: - ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] - ufm_az.append(np.nanmedian(full_aman.ancil.az_enc[inds][ufm_inds])) - ufm_el.append(np.nanmedian(full_aman.ancil.el_enc[inds][ufm_inds])) - ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) - ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) - ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) - ufm_deta.append(np.nanmean((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) - ufm_std_xi.append(np.nanstd((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) - ufm_std_eta.append(np.nanstd((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) - ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) - all_ufm_az.append(ufm_az) - all_ufm_el.append(ufm_el) - all_ufm_roll.append(ufm_roll) - all_ufm_resid.append(ufm_resid) - all_ufm_deta.append(ufm_deta) - all_ufm_dxi.append(ufm_dxi) - all_ufm_std_xi.append(ufm_std_xi) - all_ufm_std_eta.append(ufm_std_eta) - all_ufm_wafer_num.append(ufm_wafer_num) - - per_ufm_stats = core.AxisManager() - per_obs_stats = core.AxisManager() - - per_obs_stats.wrap("el", np.array(obs_el)) - per_obs_stats.wrap("roll", np.array(obs_roll)) - per_obs_stats.wrap("az", np.array(obs_az)) - per_obs_stats.wrap("resid", np.array(obs_resid)) - per_obs_stats.wrap("dxi", np.array(obs_dxi)) - per_obs_stats.wrap("deta", np.array(obs_deta)) - per_obs_stats.wrap("std_xi", np.array(obs_std_xi)) - per_obs_stats.wrap("std_eta", np.array(obs_std_eta)) - - per_ufm_stats.wrap("az", np.array(all_ufm_az)) - per_ufm_stats.wrap("el", np.array(all_ufm_el)) - per_ufm_stats.wrap("roll", np.array(all_ufm_roll)) - per_ufm_stats.wrap("resid", np.array(all_ufm_resid)) - per_ufm_stats.wrap("dxi", np.array(all_ufm_dxi)) - per_ufm_stats.wrap("deta", np.array(all_ufm_deta)) - per_ufm_stats.wrap("std_xi", np.array(all_ufm_std_xi)) - per_ufm_stats.wrap("std_eta", np.array(all_ufm_std_eta)) - per_ufm_stats.wrap("wafer_num", np.array(all_ufm_wafer_num)) - - if platform == "lat": - obs_cr = [] - all_ufm_cr = [] - for ob in np.unique(full_aman.obs_index): - inds = np.where(full_aman.obs_index == ob)[0] - obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) - ufm_cr = [] - for ufm in ufm_list: - ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) - all_ufm_cr.append(ufm_cr) - per_obs_stats.wrap("cr", np.array(obs_cr)) - per_ufm_stats.wrap("cr", np.array(all_ufm_cr)) - - full_aman.wrap("dxi", (full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])/DEG*60, [(0, "samps")]) - full_aman.wrap("deta", (full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])/DEG*60, [(0, "samps")]) - - #full_dxi_av = np.nanmean(full_dxi) - #full_deta_av = np.nanmean(full_deta) - obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) - per_obs_stats.wrap("obsids", obsids) - per_ufm_stats.wrap("obsids", np.repeat(obsids, np.shape(per_ufm_stats["dxi"])[1])) - full_aman.wrap("obsids", obsids[full_aman.obs_index]) - - #Calculate RMSs - per_obs_stats.wrap("rms", np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + per_obs_stats["deta"]**2))) - per_ufm_stats.wrap("rms", np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + per_ufm_stats["deta"]**2))) - full_aman.wrap("rms", np.sqrt(np.nanmean(full_aman["dxi"]**2 + full_aman["deta"]**2))) - full_aman.wrap("per_ufm_stats", per_ufm_stats) - full_aman.wrap("per_obs_stats", per_obs_stats) + if "pointing_model_i1" in solver_aman: + test_params = epoch["solver_aman"].pointing_model_i1 + else: + test_params = epoch["solver_aman"].pointing_model + full_aman = analyze_PM_with_all_dets(config, t0, tf, test_params) + logger.info(f"for this epoch: {epoch["name"]}") + logger.info(f"full rms: {full_aman.rms:.3f} (arcmin) ") + logger.info(f"obs_rms: {full_aman.per_obs_stats.rms:.3f} (arcmin) ") + logger.info(f"ufm_rms: {full_aman.per_ufm_stats.rms:.3f} (arcmin) ") plotter = ModelFitsPlotter(solver_aman=full_aman, - config=config, - save_dir=save_dir, - iteration_tag=epoch["name"], - save_figure=True, - plotlims=plotlims) + config=config, + save_dir=save_dir, + iteration_tag=f"_{epoch["name"]}", + save_figure=True, + plotlims=plotlims) plotter.plot_full_residuals_across_focalplane() plotter.plot_full_histogram() plotter.plot_full_unmodeled_residuals() + logger.info("Done") From 954bdd5502183cb20faaa5a620ee1abebe16b8e1 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Mon, 25 Aug 2025 14:12:29 -0700 Subject: [PATCH 30/48] Made small changes to make code be compatible with latest master branch. --- sotodlib/coords/brightsrc_pointing.py | 2 +- sotodlib/preprocess/processes.py | 3 ++- sotodlib/site_pipeline/get_brightsrc_pointing_step2.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sotodlib/coords/brightsrc_pointing.py b/sotodlib/coords/brightsrc_pointing.py index 5985da6fe..8330fce1f 100644 --- a/sotodlib/coords/brightsrc_pointing.py +++ b/sotodlib/coords/brightsrc_pointing.py @@ -107,7 +107,7 @@ def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), wafer_r = np.sqrt(wafer_x**2 + wafer_y**2) wafer_theta = np.arctan2(wafer_y, wafer_x) - fp_to_sky = optics.sat_to_sky(optics.SAT_X, optics.SAT_LON) + fp_to_sky = optics.sat_to_sky(optics.SAT_R_FP, optics.SAT_R_SKY) lon = fp_to_sky(wafer_r) q1 = quat.rotation_iso(lon, 0) diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index ad274bf9d..cbfd0f7a0 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -1705,7 +1705,7 @@ def process(self, aman, proc_aman, sim=False): class ReduceFlags(_Preprocess): name = 'reduce_flags' - def process(self, aman, proc_aman): + def process(self, aman, proc_aman, sim=False): aman.flags.reduce(**self.process_cfgs) class DetcalNanCuts(_Preprocess): @@ -2506,6 +2506,7 @@ def save(self, proc_aman, flag_aman): if self.save_cfgs: proc_aman.wrap("smurfgaps", flag_aman) +_Preprocess.register(ReduceFlags) _Preprocess.register(SplitFlags) _Preprocess.register(SubtractT2P) _Preprocess.register(EstimateT2P) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 6081d1800..89354f9d3 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -70,7 +70,7 @@ def wrapper_gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, *args return gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, nonlin_coeffs) def wrap_fp_rset(tod, fp_rset): - tod.restrict('dets', tod.dets.vals[np.in1d(tod.dets.vals, fp_rset['dets:readout_id'])]) + tod.restrict('dets', tod.dets.vals[np.isin(tod.dets.vals, fp_rset['dets:readout_id'])]) focal_plane = core.AxisManager(tod.dets) focal_plane.wrap_new('xi', shape=('dets', )) focal_plane.wrap_new('eta', shape=('dets', )) From 092b2ab8b8fc7b413b0751bdff06ce0c29de3f18 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Fri, 29 Aug 2025 12:28:38 -0700 Subject: [PATCH 31/48] Removed offensive square brackets in a get_obs call --- sotodlib/site_pipeline/get_brightsrc_pointing_step1.py | 2 +- sotodlib/site_pipeline/get_brightsrc_pointing_step2.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py index 2b7aea511..331165ce6 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py @@ -201,7 +201,7 @@ def main_one_obs(configs, obs_id, sso_name=None, logger.info(f'sso_names of {sso_names} are found from observation tags.' + f'Processing only {sso_name}') - tod = ctx.get_obs(obs_id, dets=[]) + tod = ctx.get_obs(obs_id, no_signal=True) streamed_wafer_slots = ['ws{}'.format(index) for index, bit in enumerate(obs_id.split('_')[-1]) if bit == '1'] processed_wafer_slots = [] finished_wafer_slots = [] diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 89354f9d3..6842cbce6 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -453,7 +453,7 @@ def main_one_obs(configs, obs_id, sso_name=None, logger.info(f'sso_names of {sso_names} are found from observation tags.' + f'Processing only {sso_name}') - tod = ctx.get_obs(obs_id, dets=[]) + tod = ctx.get_obs(obs_id, no_signal=True) streamed_wafer_slots = ['ws{}'.format(index) for index, bit in enumerate(obs_id.split('_')[-1]) if bit == '1'] processed_wafer_slots = [] finished_wafer_slots = [] From 951de7a33dee74fcef02442599447bce891e04e2 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 3 Sep 2025 12:05:52 -0700 Subject: [PATCH 32/48] add saturn to possible source objects --- sotodlib/site_pipeline/get_brightsrc_pointing_step1.py | 4 ++-- sotodlib/site_pipeline/get_brightsrc_pointing_step2.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py index 331165ce6..17d99f039 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py @@ -17,7 +17,7 @@ from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'make_map_based_pointing: ') -def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter', 'mars']): +def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter', 'mars', 'saturn']): obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] sso_names = [] for _name in candidate_names: @@ -225,7 +225,7 @@ def main_one_obs(configs, obs_id, sso_name=None, logger.info(f'Found saved data for these wafer_slots: {finished_wafer_slots}') logger.info(f'Will continue for these wafer_slots: {processed_wafer_slots}') - + logger.info("using filelock") if configs.get('parallel_job'): logger.info('Continuing with parallel job') try: diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 6842cbce6..20ba5b754 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -22,7 +22,7 @@ from sotodlib.preprocess import Pipeline logger = util.init_logger(__name__, 'update_pointing: ') -def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter']): +def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter', 'mars', 'saturn']): obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] sso_names = [] for _name in candidate_names: From 2a38cb3ecc46eb25785100d85dbfeafbbf7a156c Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Thu, 11 Sep 2025 07:41:41 -0700 Subject: [PATCH 33/48] removed obsolete logger entry --- sotodlib/site_pipeline/get_brightsrc_pointing_step1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py index 17d99f039..e29c3bc76 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py @@ -225,7 +225,7 @@ def main_one_obs(configs, obs_id, sso_name=None, logger.info(f'Found saved data for these wafer_slots: {finished_wafer_slots}') logger.info(f'Will continue for these wafer_slots: {processed_wafer_slots}') - logger.info("using filelock") + if configs.get('parallel_job'): logger.info('Continuing with parallel job') try: From 512d4eb085bf124080157473cd669120f5c92405 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 19 Nov 2025 14:31:42 -0800 Subject: [PATCH 34/48] fixed bug on including a list of detectors to debug with --- sotodlib/site_pipeline/get_brightsrc_pointing_step2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 20ba5b754..b648010a1 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -579,7 +579,7 @@ def get_parser(): parser.add_argument("--sso_name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter'). If not specified, get sso_name from observation tags. "\ + "Valid only when obs_id is specified") - parser.add_argument("--restrict_dets_for_debug", type=int, default=False) + parser.add_argument("--restrict_dets_for_debug", type=str, default=False) return parser if __name__ == '__main__': From a7fef706a50a49eacdb20a4ea2578958a2eb0070 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Sun, 26 Oct 2025 22:25:25 -0700 Subject: [PATCH 35/48] fix: fix supplying only some init pars and plotting when culled --- sotodlib/site_pipeline/solve_pointing_model.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index d371ee90c..946ceb5ab 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -284,8 +284,10 @@ def load_obs_boresight_per_detector(config, filelist, obs_ind): def _init_fit_params(config, epochs): pm_version = config.get("pm_version") - init_params = config.get("initial_params", pm.param_defaults[pm_version]) + initial_params = config.get("initial_params", {}) fixed_params = config.get("fixed_params",[]) + init_params = pm.param_defaults[pm_version] + init_params.update(initial_params) # Add independant params orig_pars = np.array(list(init_params.keys())) @@ -293,7 +295,7 @@ def _init_fit_params(config, epochs): for epoch in epochs: indep_list = epoch["indep_list"] if np.sum(np.isin(indep_list, par_list)) != len(indep_list): - raise ValueError(f"Invalid independant parameters in time range starting with {t0}") + raise ValueError(f"Invalid independant parameters in epoch {epoch['name']}") indep_list = [f"{n}_{epoch['name']}" for n in indep_list] par_list = np.hstack((par_list, indep_list)) for ipar, par in zip(indep_list, epoch["indep_list"]): @@ -573,6 +575,7 @@ def analyze_PM_with_all_dets(config, t0, tf, params): obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) ufm_cr = [] for ufm in ufm_list: + ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) all_ufm_cr.append(ufm_cr) per_obs_stats.wrap("cr", np.array(obs_cr)) @@ -596,7 +599,8 @@ def analyze_PM_with_all_dets(config, t0, tf, params): #Calculate RMSs per_obs_stats.wrap("rms", - np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + per_obs_stats["deta"]**2)) + np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + + per_obs_stats["deta"]**2)) ) per_ufm_stats.wrap("rms", np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + @@ -980,8 +984,8 @@ def main(config_path: str): use_inds=good_fit_inds) logger.info("RMS on initial fit without outliers: %f arcmin", masked_rms) - epoch["solver_aman"].wrap('bad_fit_inds', bad_fit_inds) epoch["solver_aman"].weights[bad_fit_inds] = 0.0 + epoch["solver_aman"].wrap('bad_fit_inds', bad_fit_inds) model_solved_params = minimize( objective_model_func_lmfit_joint, From e7f007fd1a7e215ef55ddbde792d0c6283ec9b25 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Tue, 20 Jan 2026 15:19:16 -0800 Subject: [PATCH 36/48] Updated rst documentation with example NERSC submission script --- docs/site_pipeline.rst | 142 ++++++++++++++++++++++++++++------------- 1 file changed, 96 insertions(+), 46 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index e87110890..6a103d981 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -197,28 +197,26 @@ if it does not exist. get_brightsrc_pointing_part1 and get_brightsrc_pointing_part2 ----------------------------------------------------------------- +--------------------------------------------------------------- The two-part ``get_brightsrc_pointing`` script set will will run solve for the xieta coordinates of detectors that observe a bright source during an observation. -It is a two part process that requires a map step and then a tod step. -To run, the scripts require config files described below. +It is a two part process that requires a map step and then a TOD step. +The scripts require the settings and preprocessing config files described below. + +For job submission and parallelization, see example NERSC slurm submission config at the end of this section. The code will process all wafers unless otherwise specified. It is recommended to run with ``parallel_job: True`` in the config files if analyzing -multiple wafers at once. Otherwise, specify a wafer slot or restrict detectors in CL args. - -Recommended SLURM settings - - ``--nodes=1`` - - ``--ntasks=1`` - - ``--time=00:45:00`` - - ``--cpus-per-task=14`` - - ``--mem=150G`` - - export OMP_NUM_THREADS=1 +multiple wafers at once. +Otherwise, specify a wafer slot or restrict detectors in command line args to debug. +.. argparse:: + :module: sotodlib.site_pipeline.get_brightsrc_pointing_part1 or _part2 + :func: get_parser -Recommended Command Line arguments: +Command Line arguments: - ``configs`` - ``--obs_id`` - ``--sso_name`` @@ -227,47 +225,55 @@ Optional Command Line arguments: - ``--wafer_slot`` e.g. ws0 - ``--restrict_dets_for_debug`` integer, or comma separated list of det readout_ids. -Options to include min_ctime and max_ctime arguments, which will proces all obs -in the time frame. - -.. argparse:: - :module: sotodlib.site_pipeline.get_brightsrc_pointing_part1 or _part2 - :func: get_parser - +There are options to include min_ctime and max_ctime arguments, which will process all observations +in the time frame. (not recommended) Generated results ``````````````````` -Saves results as ResultSet .hdf file in the results_dir. -ResultSet<[dets:readout_id, xi, eta, gamma, xi_err, eta_err, R2, redchi2], N rows> +The Step 1 map-based analysis scripts will generate the following outputs in the specified directory: -Load data with sotodlib.io.metadata.read_dataset( results.hdf, 'focal_plane') + 1. Single detector maps in ``/results/single_det_maps/_.hdf``. -Configuration -````````````````` -These scripts take in a config yaml file + * All single maps are packaged in a single hdf file, with detector readout_id as the keys in the h5py file. -Part 1 is the map-based step. Its config file should look like the following: -The parameters in these examples are used for SAT mid-freq moon observations. + 2. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/map_based_results`` as specified in the Step 1 config file. Script will append 'force_zero_roll' onto the specified results_dir if True in config file. Load ResultSet with keyword 'focal_plane' + + * Contents: ``ResultSet<[dets:readout_id, xi, eta, gamma, R2], N rows>`` + + +The Step 2 TOD-based analysis scripts will use the map-based results as a starting point and then generate the finalized outputs in the specified directory: + + 1. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/tod_based_results`` as specified in config file for Step-2. Script will append 'force_zero_roll' onto the specified results_dir if True in config file. Load ResultSet with keyword 'focal_plane' + + * Contents: ``ResultSet<[dets:readout_id, xi, eta, gamma, xi_err, eta_err, R2, redchi2], N rows>`` + +Configuration Files +``````````````````` +The configuration files to be input as ``configs`` in the command line arguments should have the following arguments as well as any preprocessing steps wished to be taken. Only processing steps that are agnostic of det-match can be used to do initial analyses without formalized metadata. + +The parameters in these examples could be used for SAT mid-freq moon observations. + +Step 1 Config: .. code-block:: yaml context_file: /path/to/context.yaml - query_tags: ['moon', 'jupiter', 'mars'] (alternatively specify --sso_name in kwargs + query_tags: ['moon'=1] #(alternatively specify --sso_name in kwargs) - optics_config_fn: /path/to/ufm_to_fp.yaml + optics_config_fn: '/global/cfs/cdirs/sobs/users/elleshaw/process_brightsrc/ufm_to_fp.yaml' single_det_maps_dir: /path/to/results/single_det_maps results_dir: /path/to/results/map_based_results - parallel_job: True - wafer_mask_det: 8. + parallel_job: True #For job submission. Parallel across wafers. + wafer_mask_det: 8. #mask around detector to cut TOD when source too far away. res_deg: 0.3 - xieta_bs_offset: [0., 0.] + xieta_bs_offset: [0., 0.] #Good to input xieta offset in radians. (!!! for satp2) save_normal_roll: False #false for SAT, true for LAT save_force_zero_roll: True #true for SAT, false for LAT - hit_time_threshold: 600 #seconds - hit_circle_r_deg: 7. + hit_circle_r_deg: 7. # radial mask to decide which UFMs are hit by source and should be analyzed. + hit_time_threshold: 600 #seconds, if hit_time not met then UFM does not get analyzed. process_pipe: - name: 'detrend' @@ -302,22 +308,20 @@ The parameters in these examples are used for SAT mid-freq moon observations. .. code-block:: yaml context_file: /path/to/context.yaml - query_tags: ['moon', 'jupiter', 'mars'] (alternatively specify --sso_name in kwargs - - optics_config_fn: /path/to/ufm_to_fp.yaml + query_tags: ['moon'=1] #(alternatively specify --sso_name in kwargs) + optics_config_fn: '/global/cfs/cdirs/sobs/users/elleshaw/process_brightsrc/ufm_to_fp.yaml' fp_hdf_dir: /path/to/results/map_based_results from step 1 config file. - # If force_zero_roll is was True, then append _force_zero_roll to the end - result_dir: /path/to/resuls/tod_based_results + # If force_zero_roll is was True, then append _force_zero_roll to the end. Just make sure it matches where the results from Step 1 are. + result_dir: /path/to/resuls/tod_based_results #Where you want the final Step2 results to show up. parallel_job: True - force_zero_roll: True - + force_zero_roll: True #Results will show up roatated in the xi-eta results as they are on the sky. ds_factor: 40 - mask_deg: 2.5 + mask_deg: 2.5 # size for circular mask around SSO (helps exclude focal plane reflections too) fit_func_name: 'gaussian2d_nonlin' - max_non_linear_order: 3 - fwhm_init_deg: 0.5 + max_non_linear_order: 3 #Suggested to use 1 for jupiter or sso's that do not saturate. + fwhm_init_deg: 0.5 # Lower for SATp2 error_estimation_method: 'force_one_redchi2' flag_name_rms_calc: 'around_source' flag_rms_calc_exclusive: False @@ -378,9 +382,55 @@ The parameters in these examples are used for SAT mid-freq moon observations. mask: 'around_source' exclusive: False +Example NERSC slurm job submission config file +`````````````````````````````````````````````` + +.. code-block:: yaml + #!/bin/bash -l + + #SBATCH --qos=shared + #SBATCH --constraint=cpu + #SBATCH --nodes=1 + #SBATCH --ntasks=1 + + #SBATCH --cpus-per-task=14 + #SBATCH --time=00:30:00 + #SBATCH --mem=220G`` #(may require regular queue and up to 400 Gb for extra long observations) + + export OMP_NUM_THREADS=1 + set -e + + tele=$1 + obs=$2 + map=$3 + basis=$4 + source="moon_from_moon" + + ymldir="/path/to/processing_settings_config_folder" + yfile="${ymldir}/preprocess_config_moon_${basis}_based_${tele}.yaml" + + if (($map)); then + echo submitted map job; + srun -n 1 -N 1 -c 14 python3 /path/to/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py $yfile --obs_id=${2} --sso_name="moon"; + else + echo submitted tod job; + srun -n 1 -N 1 -c 14 python3 /path/to/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py $yfile --obs_id=${2} --sso_name="moon"; + fi + + +Submit the job submission file with the following commands: + +1. For Step 1 map-based + + * ``sbatch submit_moon_job_script.sh 1 map`` + +2. For Step 2 TOD based + + * ``sbatch submit_moon_job_script.sh 0 tod`` + make_read_det_match -``````````````````` +------------------- This script generates the readout ID to detector ID mapping required to translate between the detector hardware information (ex: pixel position) and the readout IDs of the resonators used to index the SMuRF data. The script uses the From 2de0b169fb2964146746e061602a83eed10f15d9 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Mon, 16 Feb 2026 12:38:11 -0800 Subject: [PATCH 37/48] Changes according to requested comments. Condensed ReduceFlags into CombineFlags, implemented MPI parallel processing, and other syntax updates. --- docs/site_pipeline.rst | 96 +++++++++++-------- sotodlib/coords/brightsrc_pointing.py | 17 ++-- sotodlib/preprocess/processes.py | 32 +++---- .../get_brightsrc_pointing_step1.py | 61 ++++++------ .../get_brightsrc_pointing_step2.py | 69 +++++++------ sotodlib/tod_ops/sub_polyf.py | 7 +- 6 files changed, 152 insertions(+), 130 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index 6a103d981..ea9ea5537 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -196,10 +196,10 @@ The output database ``wafer_info.sqlite`` and HDF5 file if it does not exist. -get_brightsrc_pointing_part1 and get_brightsrc_pointing_part2 ---------------------------------------------------------------- +get_brightsrc_pointing_part1 +---------------------------- -The two-part ``get_brightsrc_pointing`` script set will will run solve for the xieta +The two-part ``get_brightsrc_pointing_part{}`` script set will solve for the xieta coordinates of detectors that observe a bright source during an observation. It is a two part process that requires a map step and then a TOD step. @@ -212,21 +212,14 @@ It is recommended to run with ``parallel_job: True`` in the config files if anal multiple wafers at once. Otherwise, specify a wafer slot or restrict detectors in command line args to debug. +Command Line arguments: .. argparse:: - :module: sotodlib.site_pipeline.get_brightsrc_pointing_part1 or _part2 + :module: sotodlib.site_pipeline.get_brightsrc_pointing_part1 :func: get_parser -Command Line arguments: - - ``configs`` - - ``--obs_id`` - - ``--sso_name`` - -Optional Command Line arguments: - - ``--wafer_slot`` e.g. ws0 - - ``--restrict_dets_for_debug`` integer, or comma separated list of det readout_ids. -There are options to include min_ctime and max_ctime arguments, which will process all observations -in the time frame. (not recommended) +There options to include min_ctime and max_ctime arguments, which will process all observations +in the time frame, is not recommended unless severely restricting the detectors for debugging. Generated results @@ -237,20 +230,28 @@ The Step 1 map-based analysis scripts will generate the following outputs in the * All single maps are packaged in a single hdf file, with detector readout_id as the keys in the h5py file. - 2. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/map_based_results`` as specified in the Step 1 config file. Script will append 'force_zero_roll' onto the specified results_dir if True in config file. Load ResultSet with keyword 'focal_plane' + 2. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/map_based_results`` + as specified in the Step 1 config file. Script will append 'force_zero_roll' onto the specified results_dir + if True in config file. Load ResultSet with keyword 'focal_plane' * Contents: ``ResultSet<[dets:readout_id, xi, eta, gamma, R2], N rows>`` -The Step 2 TOD-based analysis scripts will use the map-based results as a starting point and then generate the finalized outputs in the specified directory: +The Step 2 TOD-based analysis scripts will use the map-based results as a starting point + and then generate the finalized outputs in the specified directory: - 1. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/tod_based_results`` as specified in config file for Step-2. Script will append 'force_zero_roll' onto the specified results_dir if True in config file. Load ResultSet with keyword 'focal_plane' + 1. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/tod_based_results`` + as specified in config file for Step-2. Script will append 'force_zero_roll' onto the specified results_dir + if True in config file. Load ResultSet with keyword 'focal_plane' * Contents: ``ResultSet<[dets:readout_id, xi, eta, gamma, xi_err, eta_err, R2, redchi2], N rows>`` Configuration Files ``````````````````` -The configuration files to be input as ``configs`` in the command line arguments should have the following arguments as well as any preprocessing steps wished to be taken. Only processing steps that are agnostic of det-match can be used to do initial analyses without formalized metadata. +The configuration files to be input as ``configs`` in the command line should +have the following arguments as well as any preprocessing steps wished to be taken. +Only processing steps that are agnostic of det-match can be used to do +initial analyses without formalized metadata. The parameters in these examples could be used for SAT mid-freq moon observations. @@ -259,14 +260,14 @@ Step 1 Config: .. code-block:: yaml context_file: /path/to/context.yaml - query_tags: ['moon'=1] #(alternatively specify --sso_name in kwargs) + query_tags: ['moon=1'] #(alternatively specify --sso_name in kwargs) optics_config_fn: '/global/cfs/cdirs/sobs/users/elleshaw/process_brightsrc/ufm_to_fp.yaml' single_det_maps_dir: /path/to/results/single_det_maps results_dir: /path/to/results/map_based_results parallel_job: True #For job submission. Parallel across wafers. - wafer_mask_det: 8. #mask around detector to cut TOD when source too far away. + wafer_mask_det: 8. # (degrees) mask around detector to cut TOD when source too far away. res_deg: 0.3 xieta_bs_offset: [0., 0.] #Good to input xieta offset in radians. (!!! for satp2) save_normal_roll: False #false for SAT, true for LAT @@ -308,20 +309,22 @@ The parameters in these examples are used for SAT mid-freq moon observations. .. code-block:: yaml context_file: /path/to/context.yaml - query_tags: ['moon'=1] #(alternatively specify --sso_name in kwargs) + query_tags: ['moon=1'] #(alternatively specify --sso_name in kwargs) optics_config_fn: '/global/cfs/cdirs/sobs/users/elleshaw/process_brightsrc/ufm_to_fp.yaml' fp_hdf_dir: /path/to/results/map_based_results from step 1 config file. - # If force_zero_roll is was True, then append _force_zero_roll to the end. Just make sure it matches where the results from Step 1 are. + # If force_zero_roll is was True, then append _force_zero_roll to the end. + # Just make sure it matches where the results from Step 1 are. result_dir: /path/to/resuls/tod_based_results #Where you want the final Step2 results to show up. parallel_job: True force_zero_roll: True #Results will show up roatated in the xi-eta results as they are on the sky. ds_factor: 40 - mask_deg: 2.5 # size for circular mask around SSO (helps exclude focal plane reflections too) + mask_deg: 2.5 # (degrees) size for circular mask around SSO (helps exclude focal plane reflections too) fit_func_name: 'gaussian2d_nonlin' - max_non_linear_order: 3 #Suggested to use 1 for jupiter or sso's that do not saturate. - fwhm_init_deg: 0.5 # Lower for SATp2 + max_non_linear_order: 3 #Suggested to use 1 for jupiter or sso's + #that do not saturate. + fwhm_init_deg: 0.5 # (degrees) Lower for SATp2 error_estimation_method: 'force_one_redchi2' flag_name_rms_calc: 'around_source' flag_rms_calc_exclusive: False @@ -351,27 +354,28 @@ The parameters in these examples are used for SAT mid-freq moon observations. cutoff: 1.9 width: 0.2 - name: 'source_flags' + source_flags_name: 'source_wide' + save: True calc: - merge: True - max_pix: 10000000000 - source_flags_name: 'source_wide' mask: shape: circle xyr: [0., 0., 5.0] - - name: 'source_flags' - calc: merge: True max_pix: 10000000000 - source_flags_name: 'source_narrow' + - name: 'source_flags' + source_flags_name: 'source_narrow' + save: True + calc: mask: shape: circle xyr: [0., 0., 3.0] - - name: 'reduce_flags' + merge: True + max_pix: 10000000000 + - name: 'combine_flags' process: - flags: ['source_wide', 'source_narrow'] - method: 'except' - wrap: True - new_flag: 'around_source' + flag_labels: ['source_wide.moon', 'source_narrow.moon'] + method: 'except' + total_flags_label: 'around_source' - name: 'flag_turnarounds' process: truncate: True @@ -395,7 +399,7 @@ Example NERSC slurm job submission config file #SBATCH --cpus-per-task=14 #SBATCH --time=00:30:00 - #SBATCH --mem=220G`` #(may require regular queue and up to 400 Gb for extra long observations) + #SBATCH --mem=220G`` #(may need regular queue & up to 400 Gb for long obs) export OMP_NUM_THREADS=1 set -e @@ -411,10 +415,14 @@ Example NERSC slurm job submission config file if (($map)); then echo submitted map job; - srun -n 1 -N 1 -c 14 python3 /path/to/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py $yfile --obs_id=${2} --sso_name="moon"; + srun -n 1 -N 1 -c 14 python3 + /path/to/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py $yfile + --obs_id=${2} --sso_name="moon"; else echo submitted tod job; - srun -n 1 -N 1 -c 14 python3 /path/to/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py $yfile --obs_id=${2} --sso_name="moon"; + srun -n 1 -N 1 -c 14 python3 + /path/to/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py $yfile + --obs_id=${2} --sso_name="moon"; fi @@ -499,6 +507,16 @@ entries mater. det_info: true multi: true + +get_brightsrc_pointing_part2 +---------------------------- +See Part 1 for description + +.. argparse:: + :module: sotodlib.site_pipeline.get_brightsrc_pointing_part2 + :func: get_parser + + update_det_match ------------------ diff --git a/sotodlib/coords/brightsrc_pointing.py b/sotodlib/coords/brightsrc_pointing.py index 8330fce1f..2cb8605d6 100644 --- a/sotodlib/coords/brightsrc_pointing.py +++ b/sotodlib/coords/brightsrc_pointing.py @@ -1,10 +1,12 @@ +# These functions are used for fitting detector positions from bright point sources +# called by site_pipeline.get_brightsrc_pointing_step1 and site_pipeline.get_brightsrc_pointing_step2 import os import re from tqdm import tqdm import numpy as np from scipy import interpolate from scipy.optimize import curve_fit -from joblib import Parallel, delayed +#from joblib import Parallel, delayed from sotodlib import core from sotodlib import coords @@ -20,11 +22,12 @@ def get_planet_trajectory(tod, planet, _split=20, return_model=False): """ - Generate the trajectory of a given planet over a specified time range. + Generate the trajectory in horizon coordinates of a given planet over a specified time range. Parameters: - tod : An axis manager - planet (str): The name of the planet for which to generate the trajectory. + tod : An axis manager containing a timestamps field, which is used to + determine the time range and generate the trajectory. + planet (str): The name of the object for which to generate the trajectory. e.g. "moon" or "saturn" _split (int, optional): Number of points to interpolate the trajectory. Defaults to 20. return_model (bool, optional): If True, returns interpolation functions of az and el. Defaults to False. @@ -34,7 +37,6 @@ def get_planet_trajectory(tod, planet, _split=20, return_model=False): If return_model is False: array: Array of quaternions representing trajectory of the planet at each timestamp. """ - print(planet) timestamps_sparse = np.linspace(tod.timestamps[0], tod.timestamps[-1], _split) planet_az_sparse = np.zeros_like(timestamps_sparse) @@ -136,7 +138,8 @@ def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), def get_rough_hit_time(tod, wafer_slot, sso_name, circle_r_deg=7.,optics_config_fn=None): """ - Estimate the rough hit time for a axismanager, wafer_slot, and sso_name. + Estimate the rough hit time, which is the amount of time for which the source + is within some distance from the center of a wafer slot. Parameters: tod : An AxisManager object @@ -166,6 +169,8 @@ def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, signal='signal', wafer_mask_deg=8., res_deg=0.3, cuts=None,): """ Generate boresight-centered maps from Time-Ordered Data (TOD) for each individual detector. + This script modifies tod.focal_plane and tod.boresight + Parameters: tod : an axismanager object sso_name (str): Name of the planet for which the trajectory is calculated. diff --git a/sotodlib/preprocess/processes.py b/sotodlib/preprocess/processes.py index f6ee863ee..1b77db1da 100644 --- a/sotodlib/preprocess/processes.py +++ b/sotodlib/preprocess/processes.py @@ -1,3 +1,4 @@ +import pdb import numpy as np from operator import attrgetter import copy @@ -1784,7 +1785,7 @@ def calc_and_save(self, aman, proc_aman): source_aman.wrap(source + '_inv', RangesMatrix.ones([aman.dets.count, aman.samps.count]), [(0, 'dets'), (1, 'samps')]) - + self.save(proc_aman, source_aman) return aman, proc_aman @@ -1991,12 +1992,7 @@ def process(self, aman, proc_aman, sim=False): aman.samps.offset + aman.samps.count - trim)) proc_aman.restrict('samps', (proc_aman.samps.offset + trim, proc_aman.samps.offset + proc_aman.samps.count - trim)) - return aman, proc_aman - -class ReduceFlags(_Preprocess): - name = 'reduce_flags' - def process(self, aman, proc_aman, sim=False): - aman.flags.reduce(**self.process_cfgs) + return aman, proc_aman class DetcalNanCuts(_Preprocess): """ @@ -2524,9 +2520,9 @@ def process(self, aman, proc_aman, sim=False): aman['flags'].wrap(self.process_cfgs['total_flags_label'], total_flags) return aman, proc_aman - + class CombineFlags(_Preprocess): - """Do the conbine of relevant flags for mapping + """Do the combination of relevant flags for mapping Saves results for aman under the "flags.[total_flags_label]" field. @@ -2537,8 +2533,11 @@ class CombineFlags(_Preprocess): process: flag_labels: ['glitches.glitch_flags', 'source_flags.jupiter_inv'] total_flags_label: 'glitch_flags' - method: 'union' # You can select a method from ['union', '+', 'intersect', '*']. - #method: ['+', '*'] # Or you can pass individual method for each flags as a list. Lentgh must match the length of flag_labels. + method: 'union' # You can select a method from ['union', '+', 'intersect', '*', 'except', '-']. + #method: ['+', '*'] # Or you can pass individual method for each flags as a list. + # Length of list must match the length of flag_labels. + # If a list, the first method must be '+', as if adding the first flag set to an empty flag set. + # Operations are performed strictly from Left to Right, '*' are not performed first. """ name = "combine_flags" @@ -2548,13 +2547,12 @@ def process(self, aman, proc_aman, sim=False): if isinstance(self.process_cfgs['method'], list): if len(self.process_cfgs['flag_labels']) != len(self.process_cfgs['method']): raise ValueError("The length of method does not match to the length of flag_labels") - elif any(method not in ['+', 'union', '*', 'intersect'] for method in self.process_cfgs['method']): - raise ValueError("The method provided does not match one of '+', '*', 'union', or 'intersect'") - elif self.process_cfgs['method'] in ['+', 'union', '*', 'intersect']: + elif any(method not in ['+', 'union', '*', 'intersect', '-', 'except'] for method in self.process_cfgs['method']): + raise ValueError("One or more methods in list are not valid") + elif self.process_cfgs['method'] in ['+', 'union', '*', 'intersect', '-', 'except']: self.process_cfgs['method'] = ['+'] + (len(self.process_cfgs['flag_labels']) - 1)*[self.process_cfgs['method']] else: - raise ValueError("The method matches neither list nor the one of the ['+', 'union', '*', 'intersect']") - + raise ValueError("The method matches neither list nor the one of the valid operations") total_flags = RangesMatrix.zeros([proc_aman.dets.count, proc_aman.samps.count]) # get an empty flags with shape (Ndets,Nsamps) for i, (method, label) in enumerate(zip(self.process_cfgs['method'], self.process_cfgs['flag_labels'])): _label = attrgetter(label) @@ -2567,6 +2565,8 @@ def process(self, aman, proc_aman, sim=False): total_flags += _label(proc_aman) # The + operator is the union operator in this case elif method in ['*', 'intersect']: total_flags *= _label(proc_aman) # The * operator is the intersect operator in this case + elif method in ['-', 'except']: + total_flags *= ~ _label(proc_aman) # The - operator is the except operator in this case if 'flags' not in aman._fields: from sotodlib.core import FlagManager diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py index e29c3bc76..c77a229d6 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step1.py @@ -4,7 +4,7 @@ import argparse import time import glob -from joblib import Parallel, delayed +import logging from sotodlib import core from sotodlib import coords @@ -12,10 +12,14 @@ from sotodlib.coords import brightsrc_pointing as bsp from sotodlib.io import metadata from sotodlib.io.metadata import read_dataset, write_dataset - -from sotodlib.site_pipeline import util +from sotodlib.site_pipeline.utils.pipeline import main_launcher from sotodlib.preprocess import Pipeline -logger = util.init_logger(__name__, 'make_map_based_pointing: ') +from sotodlib.utils.procs_pool import get_exec_env +from sotodlib.site_pipeline.utils.logging import init_logger as sp_init_logger + +logger = logging.getLogger("get_brightsrc_pointing_step1") +if not logger.hasHandlers(): + sp_init_logger("get_brightsrc_pointing_step1") def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter', 'mars', 'saturn']): obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] @@ -109,7 +113,7 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, filename=result_filename, force_zero_roll=True, edge_avoidance = edge_avoidance_deg*coords.DEG) - return + return f"Finished processing {obs_id}, {wafer_slot}" def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=False): if type(configs) == str: @@ -169,15 +173,6 @@ def combine_pointings(pointing_result_files): focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['R2'])) return focal_plane -def parallel_process_wafer_slot(configs, obs_id, wafer_slot, sso_name, restrict_dets_for_debug): - logger.info(f'Processing {obs_id}, {wafer_slot}') - main_one_wafer(configs=configs, - obs_id=obs_id, - wafer_slot=wafer_slot, - sso_name=sso_name, - restrict_dets_for_debug=restrict_dets_for_debug) - - def main_one_obs(configs, obs_id, sso_name=None, restrict_dets_for_debug=False): if type(configs) == str: @@ -234,17 +229,19 @@ def main_one_obs(configs, obs_id, sso_name=None, n_jobs = -1 logger.info('Entering wafer pool') - Parallel(n_jobs=n_jobs)( - delayed(parallel_process_wafer_slot)( - configs, - obs_id, - wafer_slot, - sso_name, - restrict_dets_for_debug, - ) - for wafer_slot in processed_wafer_slots - ) - logger.info('Exiting wafer pool') + rank, executor, as_completed_callable = get_exec_env(nprocs=n_jobs) + futures = [executor.submit( + main_one_wafer, + configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug, + ) + for wafer_slot in processed_wafer_slots] + for future in as_completed_callable(futures): + logger.info(future.result()) + else: logger.info('Continuing with serial processing of wafers.') for wafer_slot in processed_wafer_slots: @@ -328,20 +325,20 @@ def main(configs, min_ctime=None, max_ctime=None, update_delay=None, def get_parser(): parser = argparse.ArgumentParser(description="Process TOD data and update pointing") parser.add_argument("configs", type=str, help="Path to the configuration file") - parser.add_argument('--min_ctime', type=int, help="Minimum timestamp for the beginning of an observation list") - parser.add_argument('--max_ctime', type=int, help="Maximum timestamp for the beginning of an observation list") + parser.add_argument('--min-ctime', type=int, help="Minimum timestamp for the beginning of an observation list") + parser.add_argument('--max-ctime', type=int, help="Maximum timestamp for the beginning of an observation list") parser.add_argument('--update-delay', type=int, help="Number of days (unit is days) in the past to start observation list.") - parser.add_argument("--obs_id", type=str, + parser.add_argument("--obs-id", type=str, help="Specific observation obs_id to process. If provided, overrides other filtering parameters.") - parser.add_argument("--wafer_slot", type=str, default=None, + parser.add_argument("--wafer-slot", type=str, default=None, help="Wafer slot to be processed (e.g., 'ws0', 'ws3'). Valid only when obs_id is specified.") - parser.add_argument("--sso_name", type=str, default=None, + parser.add_argument("--sso-name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter'). If not specified, get sso_name from observation tags. "\ + "Valid only when obs_id is specified") - parser.add_argument("--restrict_dets_for_debug", type=str, default=False) + parser.add_argument("--restrict-dets-for-debug", type=str, default=False) return parser if __name__ == '__main__': - util.main_launcher(main, get_parser) + main_launcher(main, get_parser) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index b648010a1..17cb2a7ff 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -6,7 +6,7 @@ import time import glob from tqdm import tqdm -from joblib import Parallel, delayed +import logging from scipy.optimize import curve_fit from sotodlib.core import metadata @@ -18,9 +18,14 @@ import so3g from so3g.proj import quat import sotodlib.coords.planets as planets -from sotodlib.site_pipeline import util +from sotodlib.site_pipeline.utils.pipeline import main_launcher from sotodlib.preprocess import Pipeline -logger = util.init_logger(__name__, 'update_pointing: ') +from sotodlib.utils.procs_pool import get_exec_env +from sotodlib.site_pipeline.utils.logging import init_logger as sp_init_logger + +logger = logging.getLogger("get_brightsrc_pointing_step2") +if not logger.hasHandlers(): + sp_init_logger("get_brightsrc_pointing_step2") def _get_sso_names_from_tags(ctx, obs_id, candidate_names=['moon', 'jupiter', 'mars', 'saturn']): obs_tags = ctx.obsdb.get(obs_id, tags=True)['tags'] @@ -71,16 +76,14 @@ def wrapper_gaussian2d_nonlin(xieta, xi0, eta0, fwhm_xi, fwhm_eta, phi, a, *args def wrap_fp_rset(tod, fp_rset): tod.restrict('dets', tod.dets.vals[np.isin(tod.dets.vals, fp_rset['dets:readout_id'])]) + _, ind_tod, ind_rset = core.util.get_coindices(tod.dets.vals, fp_rset['dets:readout_id']) focal_plane = core.AxisManager(tod.dets) focal_plane.wrap_new('xi', shape=('dets', )) focal_plane.wrap_new('eta', shape=('dets', )) - focal_plane.wrap_new('gamma', shape=('dets', )) - - for di, det in enumerate(tod.dets.vals): - di_rset = np.where(fp_rset['dets:readout_id'] == det)[0][0] - focal_plane.xi[di] = fp_rset['xi'][di_rset] - focal_plane.eta[di] = fp_rset['eta'][di_rset] - focal_plane.gamma[di] = fp_rset['gamma'][di_rset] + focal_plane.wrap_new('gamma', shape=('dets', )) + focal_plane.xi[ind_tod] = fp_rset['xi'][ind_rset] + focal_plane.eta[ind_tod] = fp_rset['eta'][ind_rset] + focal_plane.gamma[ind_tod] = fp_rset['gamma'][ind_rset] if 'focal_plane' in tod._fields.keys(): tod.move('focal_plane', None) @@ -367,7 +370,7 @@ def main_one_wafer(configs, obs_id, wafer_slot, sso_name=None, address='focal_plane', overwrite=True) - return + return f"Finished processing {obs_id}, {wafer_slot}" def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=False): if type(configs) == str: @@ -422,14 +425,6 @@ def combine_pointings(pointing_result_files): focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['xi_err'], val['eta_err'], val['R2'], val['redchi2'])) return focal_plane -def parallel_process_wafer_slot(configs, obs_id, wafer_slot, sso_name, restrict_dets_for_debug): - logger.info(f'Processing {obs_id}, {wafer_slot}') - main_one_wafer(configs=configs, - obs_id=obs_id, - wafer_slot=wafer_slot, - sso_name=sso_name, - restrict_dets_for_debug=restrict_dets_for_debug) - def main_one_obs(configs, obs_id, sso_name=None, restrict_dets_for_debug=False): if type(configs) == str: @@ -484,16 +479,18 @@ def main_one_obs(configs, obs_id, sso_name=None, n_jobs = int(os.environ.get('SLURM_CPUS_PER_TASK', 1)) except: n_jobs = -1 - Parallel(n_jobs=n_jobs)( - delayed(parallel_process_wafer_slot)( - configs, - obs_id, - wafer_slot, - sso_name, - restrict_dets_for_debug - ) - for wafer_slot in processed_wafer_slots - ) + rank, executor, as_completed_callable = get_exec_env(nprocs=n_jobs) + futures = [executor.submit( + main_one_wafer, + configs=configs, + obs_id=obs_id, + wafer_slot=wafer_slot, + sso_name=sso_name, + restrict_dets_for_debug=restrict_dets_for_debug, + ) + for wafer_slot in processed_wafer_slots] + for future in as_completed_callable(futures): + logger.info(future.result()) else: logger.info('Continuing with serial processing of wafers.') for wafer_slot in processed_wafer_slots: @@ -567,20 +564,20 @@ def main(configs, min_ctime=None, max_ctime=None, update_delay=None, def get_parser(): parser = argparse.ArgumentParser(description="Get updated result of pointings with tod-based results") parser.add_argument("configs", type=str, help="Path to the configuration file") - parser.add_argument('--min_ctime', type=int, help="Minimum timestamp for the beginning of an observation list") - parser.add_argument('--max_ctime', type=int, help="Maximum timestamp for the beginning of an observation list") + parser.add_argument('--min-ctime', type=int, help="Minimum timestamp for the beginning of an observation list") + parser.add_argument('--max-ctime', type=int, help="Maximum timestamp for the beginning of an observation list") parser.add_argument('--update-delay', type=int, help="Number of days (unit is days) in the past to start observation list.") - parser.add_argument("--obs_id", type=str, + parser.add_argument("--obs-id", type=str, help="Specific observation obs_id to process. If provided, overrides other filtering parameters.") - parser.add_argument("--wafer_slot", type=str, default=None, + parser.add_argument("--wafer-slot", type=str, default=None, help="Wafer slot to be processed (e.g., 'ws0', 'ws3'). Valid only when obs_id is specified.") - parser.add_argument("--sso_name", type=str, default=None, + parser.add_argument("--sso-name", type=str, default=None, help="Name of solar system object (e.g., 'moon', 'jupiter'). If not specified, get sso_name from observation tags. "\ + "Valid only when obs_id is specified") - parser.add_argument("--restrict_dets_for_debug", type=str, default=False) + parser.add_argument("--restrict-dets-for-debug", type=str, default=False) return parser if __name__ == '__main__': - util.main_launcher(main, get_parser) + main_launcher(main, get_parser) diff --git a/sotodlib/tod_ops/sub_polyf.py b/sotodlib/tod_ops/sub_polyf.py index 57bf35a31..66aeec36c 100644 --- a/sotodlib/tod_ops/sub_polyf.py +++ b/sotodlib/tod_ops/sub_polyf.py @@ -44,6 +44,7 @@ def subscan_polyfilter(aman, degree, signal_name="signal", exclude_turnarounds=F """ if method not in ["polyfit", "legendre"] : raise ValueError("Only polyfit and legendre are acceptable.") + if exclude_turnarounds: if ("left_scan" not in aman.flags) or ("turnarounds" not in aman.flags): logger.warning('aman does not have left/right scan or turnarounds flag. `sotodlib.flags.get_turnaround_flags` will be ran with default parameters') @@ -113,6 +114,7 @@ def subscan_polyfilter(aman, degree, signal_name="signal", exclude_turnarounds=F elif method == "legendre": degree_corr = degree + 1 + time = np.copy(aman["timestamps"]) for start, end in subscan_indices: @@ -123,6 +125,7 @@ def subscan_polyfilter(aman, degree, signal_name="signal", exclude_turnarounds=F # Get each subscan to be filtered tod_mat = copy.deepcopy(signal[:, start:end]) + # Scale time range into [-1,1] x = np.linspace(-1, 1, tod_mat.shape[1]) dx = np.mean(np.diff(x)) @@ -163,13 +166,15 @@ def subscan_polyfilter(aman, degree, signal_name="signal", exclude_turnarounds=F tod_mat[idet,msk_indx] = interped else : pass - + + means = np.mean(tod_mat, axis=1)[:, np.newaxis] tod_mat -= means # Make model to be subtracted coeffs = np.dot(arr_legendre, tod_mat.T) model = np.dot((coeffs/norm_vector[:, np.newaxis]).T,arr_legendre)*dx + model += means signal[:,start:end] -= model From f3befaae72272685532f0bc0a1d48ef7bb8507f8 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Tue, 24 Feb 2026 16:37:03 -0800 Subject: [PATCH 38/48] Fixed issues with lonlat quat rotations and hacky negative xi solutions --- sotodlib/coords/brightsrc_pointing.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/sotodlib/coords/brightsrc_pointing.py b/sotodlib/coords/brightsrc_pointing.py index 2cb8605d6..0ed141e21 100644 --- a/sotodlib/coords/brightsrc_pointing.py +++ b/sotodlib/coords/brightsrc_pointing.py @@ -52,7 +52,7 @@ def get_planet_trajectory(tod, planet, _split=20, return_model=False): else: planet_az = planet_az_func(tod.timestamps) planet_el = planet_el_func(tod.timestamps) - q_planet = quat.rotation_lonlat(planet_az, planet_el) + q_planet = quat.rotation_lonlat(-1 * planet_az, planet_el) return q_planet def get_wafer_centered_sight(tod=None, planet=None, q_planet=None, q_bs=None, q_wafer=None): @@ -73,18 +73,18 @@ def get_wafer_centered_sight(tod=None, planet=None, q_planet=None, q_bs=None, q_ Returns: Sightline vector for the planet trajectory centered on the center of the wafer. """ + #breakpoint() if q_planet is None: q_planet = get_planet_trajectory(tod, planet) if q_bs is None: - q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) + q_bs = quat.rotation_lonlat(-1 * tod.boresight.az, tod.boresight.el) if q_wafer is None: q_wafer = quat.rotation_xieta(np.nanmedian(tod.focal_plane.xi), np.nanmedian(tod.focal_plane.eta)) xi_wafer, eta_wafer, _ = quat.decompose_xieta(q_wafer) - q_wafer_f = quat.rotation_xieta(-xi_wafer, eta_wafer) z_to_x = quat.rotation_lonlat(0, 0) - sight = z_to_x * ~(q_bs * q_wafer_f) * q_planet + sight = z_to_x * ~(q_bs * q_wafer) * q_planet return sight def get_wafer_xieta(wafer_slot, optics_config_fn, xieta_bs_offset=(0., 0.), @@ -151,7 +151,7 @@ def get_rough_hit_time(tod, wafer_slot, sso_name, circle_r_deg=7.,optics_config_ Returns: float: Estimated rough hit time within the circular region around the wafer center. """ - q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) + q_bs = quat.rotation_lonlat(-1 * tod.boresight.az, tod.boresight.el) q_planet = get_planet_trajectory(tod, sso_name) xi_wafer, eta_wafer = get_wafer_xieta(wafer_slot, optics_config_fn=optics_config_fn, roll_bs_offset=np.median(tod.boresight.roll), wrap_to_tod=False) @@ -186,8 +186,9 @@ def make_wafer_centered_maps(tod, sso_name, optics_config_fn, map_hdf, Returns: None """ + #breakpoint() q_planet = get_planet_trajectory(tod, sso_name) - q_bs = quat.rotation_lonlat(tod.boresight.az, tod.boresight.el) + q_bs = quat.rotation_lonlat(-1 * tod.boresight.az, tod.boresight.el) if roll_bs_offset is None: roll_bs_offset = np.mean(tod.boresight.roll) @@ -419,7 +420,7 @@ def map_to_xieta(mT, edge_avoidance=1.0*coords.DEG, edge_check='nan', (np.inf, beam_sigma_init*5, np.inf),), max_nfev = 1000000) R2 = 1 - np.sum((_z - _gauss1d(_r, *popt))**2)/np.sum((_z - np.mean(_z))**2) - xi_det, eta_det, R2_det = -xi_peak, eta_peak, R2 + xi_det, eta_det, R2_det = xi_peak, eta_peak, R2 else: xi_det, eta_det, R2_det = np.nan, np.nan, np.nan return xi_det, eta_det, R2_det From 76ae8dfe5719ce2372e21ccc8d53a4c1e7d004a9 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 4 Mar 2026 13:06:05 -0800 Subject: [PATCH 39/48] Find boresight azimuth and elevation during detector crossings. --- .../get_brightsrc_pointing_step2.py | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 17cb2a7ff..34d82839c 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -8,6 +8,10 @@ from tqdm import tqdm import logging +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt + from scipy.optimize import curve_fit from sotodlib.core import metadata from sotodlib.io.metadata import read_dataset, write_dataset @@ -154,7 +158,7 @@ def update_xieta(tod, # if focal_plane result is specified, use the information as a prior if fp_hdf_file is not None: wrap_fp_from_hdf(tod, fp_hdf_file) - + # set dets without focal_plane info to have (xi, eta, gamma) = (0, 0, 0), just to avoid error xieta_isnan = (np.isnan(tod.focal_plane.xi)) | (np.isnan(tod.focal_plane.eta)) gamma_isnan = np.isnan(tod.focal_plane.gamma) @@ -213,14 +217,17 @@ def update_xieta(tod, xieta_dict = {} for di, det in enumerate(tqdm(tod.dets.vals)): mask_di = source_flags_ds[di] + bs_az = np.nanmedian(tod.boresight.az[mask_ds][mask_di]) + bs_el = np.nanmedian(tod.boresight.el[mask_ds][mask_di]) + if np.any([xieta_isnan[di], np.all(mask_di==False), tod.rms[di]==0.]): xieta_dict[det] = {'xi': np.nan, 'eta': np.nan, 'xi_err': np.nan, 'eta_err': np.nan, - 'R2': np.nan, 'redchi2': np.nan} + 'R2': np.nan, 'redchi2': np.nan, 'az': np.nan, 'el': np.nan} else: ts = ts_ds[mask_di] d1_unix = np.median(ts) - xieta_det = np.array([tod.focal_plane.xi[di], tod.focal_plane.eta[di]]) + xieta_det = np.array([tod.focal_plane.xi[di], tod.focal_plane.eta[di]]) q_det = so3g.proj.quat.rotation_xieta(xieta_det[0], xieta_det[1]) planet = planets.SlowSource.for_named_source(sso_name, d1_unix * 1.) ra0, dec0 = planet.pos(d1_unix) @@ -232,7 +239,7 @@ def update_xieta(tod, xieta_src = xieta_src[:, mask_di] sig = sig_ds[di][mask_di] ptp_val = np.ptp(np.percentile(sig, [0.1, 99.9])) - + if fit_func_name == 'gaussian2d_nonlin': p0 = np.array([0., 0., fwhm_init_deg*coords.DEG, fwhm_init_deg*coords.DEG, 0., ptp_val]) bounds = np.array( @@ -270,16 +277,17 @@ def update_xieta(tod, xieta_det += np.array([xi_opt, eta_opt]) xieta_dict[det] = {'xi': xieta_det[0], 'eta': xieta_det[1], 'xi_err': xi_err, 'eta_err': eta_err, - 'R2': R2, 'redchi2': redchi2} + 'R2': R2, 'redchi2': redchi2, 'az' : bs_az, 'el': bs_el} except RuntimeError: xieta_dict[det] = {'xi': np.nan, 'eta': np.nan, 'xi_err': np.nan, 'eta_err': np.nan, - 'R2': np.nan, 'redchi2': np.nan} + 'R2': np.nan, 'redchi2': np.nan, 'az': np.nan, 'el': np.nan} - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2']) + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2', 'az', 'el']) for det in tod.dets.vals: focal_plane.rows.append((det, xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0., xieta_dict[det]['xi_err'], xieta_dict[det]['eta_err'], xieta_dict[det]['R2'], xieta_dict[det]['redchi2'], + xieta_dict[det]['az'], xieta_dict[det]['el'], )) return focal_plane From 81ac88696a15da661398b7b0b16a444e7081cc2a Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Tue, 17 Mar 2026 20:26:47 -0700 Subject: [PATCH 40/48] propagate addition of az and el and roll to result set output in dummy wafer and final full wafer products --- .../get_brightsrc_pointing_step2.py | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py index 34d82839c..ef1a77853 100644 --- a/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py +++ b/sotodlib/site_pipeline/get_brightsrc_pointing_step2.py @@ -183,7 +183,7 @@ def update_xieta(tod, max_pix=1e10, wrap='source', mask={'shape':'circle', 'xyr':[0.,0.,mask_deg]}) - + # restrict data to duration when at least one detector hit the source summed_flag = np.sum(tod.flags['source'].mask()[~xieta_isnan], axis=0).astype('bool') idx_hit = np.where(summed_flag)[0] @@ -219,10 +219,11 @@ def update_xieta(tod, mask_di = source_flags_ds[di] bs_az = np.nanmedian(tod.boresight.az[mask_ds][mask_di]) bs_el = np.nanmedian(tod.boresight.el[mask_ds][mask_di]) + bs_roll = np.nanmedian(tod.boresight.roll[mask_ds][mask_di]) if np.any([xieta_isnan[di], np.all(mask_di==False), tod.rms[di]==0.]): xieta_dict[det] = {'xi': np.nan, 'eta': np.nan, 'xi_err': np.nan, 'eta_err': np.nan, - 'R2': np.nan, 'redchi2': np.nan, 'az': np.nan, 'el': np.nan} + 'R2': np.nan, 'redchi2': np.nan, 'az': np.nan, 'el': np.nan, 'roll': np.nan} else: ts = ts_ds[mask_di] d1_unix = np.median(ts) @@ -277,17 +278,17 @@ def update_xieta(tod, xieta_det += np.array([xi_opt, eta_opt]) xieta_dict[det] = {'xi': xieta_det[0], 'eta': xieta_det[1], 'xi_err': xi_err, 'eta_err': eta_err, - 'R2': R2, 'redchi2': redchi2, 'az' : bs_az, 'el': bs_el} + 'R2': R2, 'redchi2': redchi2, 'az' : bs_az, 'el': bs_el, 'roll': bs_roll} except RuntimeError: xieta_dict[det] = {'xi': np.nan, 'eta': np.nan, 'xi_err': np.nan, 'eta_err': np.nan, - 'R2': np.nan, 'redchi2': np.nan, 'az': np.nan, 'el': np.nan} + 'R2': np.nan, 'redchi2': np.nan, 'az': np.nan, 'el': np.nan, 'roll': np.nan} - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2', 'az', 'el']) + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2', 'az', 'el', 'roll']) for det in tod.dets.vals: focal_plane.rows.append((det, xieta_dict[det]['xi'], xieta_dict[det]['eta'], 0., xieta_dict[det]['xi_err'], xieta_dict[det]['eta_err'], xieta_dict[det]['R2'], xieta_dict[det]['redchi2'], - xieta_dict[det]['az'], xieta_dict[det]['el'], + xieta_dict[det]['az'], xieta_dict[det]['el'], xieta_dict[det]['roll'], )) return focal_plane @@ -401,9 +402,9 @@ def main_one_wafer_dummy(configs, obs_id, wafer_slot, restrict_dets_for_debug=Fa result_filename = f'focal_plane_{obs_id}_{wafer_slot}.hdf' fp_rset_dummy = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', - 'xi_err', 'eta_err', 'R2', 'redchi2']) + 'xi_err', 'eta_err', 'R2', 'redchi2', 'az', 'el', 'roll']) for det in meta.dets.vals: - fp_rset_dummy.rows.append((det, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan)) + fp_rset_dummy.rows.append((det, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan)) os.makedirs(result_dir, exist_ok=True) write_dataset(fp_rset_dummy, @@ -426,11 +427,14 @@ def combine_pointings(pointing_result_files): combined_dict[row['dets:readout_id']]['eta_err'] = row['eta_err'] combined_dict[row['dets:readout_id']]['R2'] = row['R2'] combined_dict[row['dets:readout_id']]['redchi2'] = row['redchi2'] + combined_dict[row['dets:readout_id']]['az'] = row['az'] + combined_dict[row['dets:readout_id']]['el'] = row['el'] + combined_dict[row['dets:readout_id']]['roll'] = row['roll'] - focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2']) + focal_plane = metadata.ResultSet(keys=['dets:readout_id', 'xi', 'eta', 'gamma', 'xi_err', 'eta_err', 'R2', 'redchi2', 'az', 'el', 'roll']) for det, val in combined_dict.items(): - focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['xi_err'], val['eta_err'], val['R2'], val['redchi2'])) + focal_plane.rows.append((det, val['xi'], val['eta'], val['gamma'], val['xi_err'], val['eta_err'], val['R2'], val['redchi2'],val['az'], val['el'], val['roll'])) return focal_plane def main_one_obs(configs, obs_id, sso_name=None, From 5879bf3323232c6e5a3a656fb59f1749c386db7c Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 25 Mar 2026 08:46:49 -0700 Subject: [PATCH 41/48] Added ability to pass az el and roll values from get_brightsrc datasets through to the outputs of finalize focal plane. Useful for per-obs usecase, and when loaded with Receiver.load the values will be under det_boresight --- docs/site_pipeline.rst | 11 +++++-- sotodlib/coords/fp_containers.py | 33 +++++++++++++++++-- .../site_pipeline/finalize_focal_plane.py | 12 ++++--- 3 files changed, 46 insertions(+), 10 deletions(-) diff --git a/docs/site_pipeline.rst b/docs/site_pipeline.rst index ea9ea5537..89f728908 100644 --- a/docs/site_pipeline.rst +++ b/docs/site_pipeline.rst @@ -242,9 +242,10 @@ The Step 2 TOD-based analysis scripts will use the map-based results as a starti 1. Fitted xi-eta focal plane position results saved as ResultSet in ``/path/to/results/tod_based_results`` as specified in config file for Step-2. Script will append 'force_zero_roll' onto the specified results_dir - if True in config file. Load ResultSet with keyword 'focal_plane' + if True in config file. Load ResultSet with keyword 'focal_plane'. + The median boresight values from small time range the source was visible to each detector is included. - * Contents: ``ResultSet<[dets:readout_id, xi, eta, gamma, xi_err, eta_err, R2, redchi2], N rows>`` + * Contents: ``ResultSet<[dets:readout_id, xi, eta, gamma, xi_err, eta_err, R2, redchi2, az, el, roll], N rows>`` Configuration Files ``````````````````` @@ -762,6 +763,10 @@ The ``focal_plane_full`` dataset contains nine columns: - ``eta_m``: The measured eta in radians - ``gamma_m``: The measured gamma in radians. - ``weights``: The average weights of the measurements for this det. +- ``r2``: The fit weight passed in from the get_brightsrc_pointing dataset +- ``az``: The median Az value in radians from source-detector crossing +- ``el``: The median El value in radians from source-detector crossing +- ``roll``: The median Roll value in radians from source-detector crossing - ``n_point``: The number of pointing fits used for the det. - ``n_gamma``: The number of gamma fits used for this det. @@ -802,7 +807,7 @@ always be ``(1, 1, 1)`` and ``shear`` will be ``0``. ``finalize_focal_plane`` will also output a ``ManifestDb`` as a file called ``db.sqlite`` in the output directory. By default this will be indexed by ``stream_id`` and ``obs:timestamp`` and will point to the ``focal_plane`` dataset. -If you are running in ``per_obs`` mode then it wirbe indexed by ``obs_id`` and will point +If you are running in ``per_obs`` mode then it will be indexed by ``obs_id`` and will point to results associated with data observation. Be warned that in this case there will only be entries for observations with pointing fits, so design your context accordingly. diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index fb089c631..976e7e36d 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -141,6 +141,7 @@ class FocalPlane: id_strs: NDArray[np.str_] # (ndet,) avg_fp: NDArray[np.floating] # (ndim, ndet) weights: NDArray[np.floating] # (ndet,) + det_boresight: NDArray[np.floating] # (ndim, ndet) transformed: NDArray[np.floating] # (ndet, ndim) center: NDArray[np.floating] # (1, ndim) center_transformed: NDArray[np.floating] # (1, ndim) @@ -219,6 +220,7 @@ def empty(cls, template, stream_id, wafer_slot, n_aman, config=""): tot_weight = np.zeros((len(template.det_ids), 2)) avg_fp = np.full_like(template.fp, np.nan) weight = np.zeros((len(template.det_ids), 2)) + det_boresight = np.zeros((len(template.det_ids), 3)) + np.nan # az, el, roll transformed = template.fp.copy() center = template.center.copy() center_transformed = template.center.copy() @@ -232,6 +234,7 @@ def empty(cls, template, stream_id, wafer_slot, n_aman, config=""): template.id_strs, avg_fp, weight, + det_boresight, transformed, center, center_transformed, @@ -260,8 +263,17 @@ def map_by_det_id(self, aman): xi = aman.pointing.xi[msk][srt][mapping] eta = aman.pointing.eta[msk][srt][mapping] r2 = np.nan + np.zeros_like(eta) + az = np.nan + np.zeros_like(eta) + el = np.nan + np.zeros_like(eta) + roll = np.nan + np.zeros_like(eta) if "R2" in aman.pointing: r2 = aman.pointing.R2[msk][srt][mapping] + if "az" in aman.pointing: + az = aman.pointing.az[msk][srt][mapping] + if "el" in aman.pointing: + el = aman.pointing.el[msk][srt][mapping] + if "roll" in aman.pointing: + roll = aman.pointing.roll[msk][srt][mapping] if "polarization" in aman: # name of field just a placeholder for now gamma = aman.polarization.polang[msk][srt][mapping] @@ -270,14 +282,16 @@ def map_by_det_id(self, aman): else: gamma = np.full(len(xi), np.nan) fp = np.column_stack((xi, eta, gamma)) - return fp, r2, template_msk + det_boresight = np.column_stack((az, el, roll)) + return fp, r2, det_boresight, template_msk - def add_fp(self, i, fp, weights, template_msk): - if self.full_fp is None or self.tot_weight is None: + def add_fp(self, i, fp, weights, det_boresight, template_msk): + if self.full_fp is None or self.tot_weight is None or self.det_boresight is None: raise ValueError("full_fp or tot_weight not initialized") self.full_fp[template_msk, :, i] = fp * weights[:, 0][..., None] weights = np.nan_to_num(weights) self.tot_weight[template_msk] += weights + self.det_boresight[template_msk, :] = det_boresight def save(self, f, db_info, group): logger.info("Saving %s", self.stream_id) @@ -323,6 +337,9 @@ def save(self, f, db_info, group): ("gamma_m", np.float32), ("weights", np.float32), ("r2", np.float32), + ("az", np.float32), + ("el", np.float32), + ("roll", np.float32), ("n_point", np.int8), ("n_gamma", np.int8), ] @@ -333,6 +350,7 @@ def save(self, f, db_info, group): *(self.transformed.T), *(self.avg_fp.T), *(self.weights.T), + *(self.det_boresight.T), self.n_point, self.n_gamma, ), @@ -378,6 +396,14 @@ def load(cls, group, include_cm=None): np.array(fp_full["gamma_m"]), ) ) + + det_boresight = np.column_stack( + ( + np.array(fp_full["az"]), + np.array(fp_full["el"]), + np.array(fp_full["roll"]), + ) + ) # For backwards compatibility weights = np.array(fp_full["weights"]) if "r2" in fp_full.keys: @@ -424,6 +450,7 @@ def load(cls, group, include_cm=None): np.array(id_strs), avg_fp, np.array(weights), + det_boresight, transformed, center, center_transformed, diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index fe19a6e97..8df8a9c59 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -6,6 +6,10 @@ from importlib import import_module from typing import List, Optional +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt + import git import h5py import megham.transform as mt @@ -399,7 +403,7 @@ def _mk_pointing_config(telescope_flavor, tube_slot, wafer_slot, config): def _restrict_inliers(aman, focal_plane): # TODO: Use gamma as well # Map to template - fp, _, template_msk = focal_plane.map_by_det_id(aman) + fp, _, _, template_msk = focal_plane.map_by_det_id(aman) fp = fp[:, :2] inliers = np.ones(len(fp), dtype=bool) @@ -651,7 +655,7 @@ def main(): plot_dir = os.path.join(plot_dir_base, str(config["start_time"])) os.makedirs(plot_dir, exist_ok=True) logger.info("Working on batch containing: %s", str(obs_ids)) - + # Setup db and Receiver db, base, group = _create_db( dbpath, @@ -771,7 +775,7 @@ def main(): _restrict_inliers(aman, focal_plane) # Mapping to template - fp, r2, template_msk = focal_plane.map_by_det_id(aman) + fp, r2, det_boresight, template_msk = focal_plane.map_by_det_id(aman) focal_plane.template.add_wafer_info(aman, template_msk) # Try an initial alignment and get weights @@ -808,7 +812,7 @@ def main(): # Store weighted values weights = np.column_stack((weights, r2)) - focal_plane.add_fp(i, fp, weights, template_msk) + focal_plane.add_fp(i, fp, weights, det_boresight, template_msk) n_obs += 1 From 3dbb7edaf4cb6100898f230994ab95476d0e0096 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Thu, 26 Mar 2026 15:46:02 -0700 Subject: [PATCH 42/48] Pre-fill detector pointing with central values from obs_info if per detector pointing not included in inputs --- sotodlib/coords/fp_containers.py | 6 +++--- sotodlib/site_pipeline/finalize_focal_plane.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sotodlib/coords/fp_containers.py b/sotodlib/coords/fp_containers.py index 976e7e36d..3e8a4ab92 100644 --- a/sotodlib/coords/fp_containers.py +++ b/sotodlib/coords/fp_containers.py @@ -263,9 +263,9 @@ def map_by_det_id(self, aman): xi = aman.pointing.xi[msk][srt][mapping] eta = aman.pointing.eta[msk][srt][mapping] r2 = np.nan + np.zeros_like(eta) - az = np.nan + np.zeros_like(eta) - el = np.nan + np.zeros_like(eta) - roll = np.nan + np.zeros_like(eta) + az = np.deg2rad(aman.obs_info.az_center) * np.ones_like(eta) + el = np.deg2rad(aman.obs_info.el_center) * np.ones_like(eta) + roll = np.deg2rad(aman.obs_info.roll_center) * np.ones_like(eta) if "R2" in aman.pointing: r2 = aman.pointing.R2[msk][srt][mapping] if "az" in aman.pointing: diff --git a/sotodlib/site_pipeline/finalize_focal_plane.py b/sotodlib/site_pipeline/finalize_focal_plane.py index 8b8f0d826..0a0209cfb 100644 --- a/sotodlib/site_pipeline/finalize_focal_plane.py +++ b/sotodlib/site_pipeline/finalize_focal_plane.py @@ -663,7 +663,7 @@ def main(): plot_dir = os.path.join(plot_dir_base, str(config["start_time"])) os.makedirs(plot_dir, exist_ok=True) logger.info("Working on batch containing: %s", str(obs_ids)) - + # Setup db and Receiver db, base, group = _create_db( dbpath, From c6f3612c62e90eb4281888e7e3bb0fd604f96c67 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Tue, 14 Apr 2026 13:45:40 -0400 Subject: [PATCH 43/48] Lat pmchanges (#1605) * feat: lat v2 pointing model * Update sotodlib/coords/pointing_model.py Co-authored-by: Matthew Hasselfield * Update sotodlib/coords/pointing_model.py Co-authored-by: Matthew Hasselfield * feat: quiver plot * fix: use correct roll * fix: remove unused lines * feat: add option for optics tubes to float and moved some fitter options to config file * fix: dont run second iteration if there are no cut points and fix missing comma * fix: apply ot offsets when plotting * fix: use same xieta_mode when making final plots and save ot offsets seperately --------- Co-authored-by: Matthew Hasselfield --- .../site_pipeline/solve_pointing_model.py | 168 +++++++++++++++--- 1 file changed, 144 insertions(+), 24 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 946ceb5ab..378c64041 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -66,6 +66,7 @@ def load_per_obs_data(config, t0, tf): obs_ufm_centers = np.zeros([len(filelist), 7, 3]) * np.nan weights_ufm = np.zeros([len(filelist), 7]) obs_index = [] + ot_list = [] for i, ffp in enumerate(filelist): this_OT = rxs[ffp].optics_tubes[0] @@ -73,16 +74,18 @@ def load_per_obs_data(config, t0, tf): index = ufms.index(this_OT.focal_planes[u].stream_id) obs_ufm_centers[i, index, :3] = this_OT.focal_planes[u].center_transformed weights_ufm[i, index] = np.nansum(this_OT.focal_planes[u].weights) - obs_index.append(np.repeat(i, 7)) + ot_list.append(this_OT.name) + obs_index.append(i) weights_ufm = weights_ufm / 1720.0 weights_ufm[weights_ufm < config.get("weight_cutoff")] = 0.0 initial_weights_mask = np.where(weights_ufm == 0) obs_ufm_centers[initial_weights_mask] = np.nan - obs_index = np.concatenate(obs_index) + obs_index = np.array(obs_index) + ot_list = np.array(ot_list) #obs_index[initial_weights_mask] = np.nan - return filelist, obs_ufm_centers, weights_ufm, obs_index + return filelist, obs_ufm_centers, weights_ufm, obs_index, ot_list def load_nom_focal_plane_full(config, ufm): which_template = config.get("use_as_template", "ffp") @@ -161,7 +164,7 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d if config.get("use_these_files") is not None: filelist = [filelist[i] for i in config.get("use_these_files")] - weights_dets, obs_dets_fits, stream_id_list, obs_index = [], [], [], [] + weights_dets, obs_dets_fits, stream_id_list, ot_list, obs_index = [], [], [], [], [] #which_ufm = config.get("which_ufm", None) which_data = config.get("use_as_data") which_weights = config.get("use_as_weights", None) @@ -177,12 +180,14 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d weights_dets.append(weights) obs_dets_fits.append(data) obs_index.append(np.repeat(i, len(ufm.weights))) + ot_list.append([this_OT.name] * len(data)) nom_data = [load_nom_focal_plane_full(config, s) for s in stream_id_list] all_det_ids, all_nom_det_array = map(np.concatenate, zip(*nom_data)) weights_dets = np.concatenate(weights_dets) obs_dets_fits = np.concatenate(obs_dets_fits, axis=0) obs_index = np.concatenate(obs_index) + ot_list = np.concatenate(ot_list) weights_dets[weights_dets < config.get("weight_cutoff")] = 0.0 obs_dets_fits[np.where(weights_dets == 0)] = np.nan @@ -197,6 +202,7 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d all_nom_det_array[mask], all_det_ids[mask], obs_index[mask], + ot_list[mask], ) else: @@ -209,6 +215,7 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d all_nom_det_array = all_nom_det_array[mask] all_det_ids = all_det_ids[mask] obs_index = obs_index[mask] + ot_list = ot_list[mask] if even_obs_size: mask = create_size_mask(obs_index) obs_dets_fits = obs_dets_fits[mask] @@ -216,6 +223,7 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d all_nom_det_array = all_nom_det_array[mask] all_det_ids = all_det_ids[mask] obs_index = obs_index[mask] + ot_list = ot_list[mask] if cull_dets is not None: for _ in range(2 if cull_twice else 1): mask = create_culling_mask(obs_index, cull_dets) @@ -224,13 +232,15 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d all_nom_det_array = all_nom_det_array[mask] all_det_ids = all_det_ids[mask] obs_index = obs_index[mask] + ot_list = ot_list[mask] return ( filelist, obs_dets_fits, weights_dets, all_nom_det_array, all_det_ids, - obs_index + obs_index, + ot_list, ) def load_obs_boresight(config, filelist): @@ -286,14 +296,26 @@ def _init_fit_params(config, epochs): pm_version = config.get("pm_version") initial_params = config.get("initial_params", {}) fixed_params = config.get("fixed_params",[]) + float_ots = config.get("float_ots", False) init_params = pm.param_defaults[pm_version] init_params.update(initial_params) # Add independant params orig_pars = np.array(list(init_params.keys())) par_list = orig_pars.copy() + added_ots = [] for epoch in epochs: indep_list = epoch["indep_list"] + if float_ots: + for ot in np.unique(epoch["solver_aman"].ot_list): + ot_float_pars = [f"{n}_{ot}" for n in ["xioff", "etaoff", "rot", "xiscale", "etascale"]] + indep_list += ot_float_pars + if ot not in added_ots: + orig_pars = np.concatenate((orig_pars, ot_float_pars)) + par_list = np.concatenate((par_list, ot_float_pars)) + for p, d in zip(ot_float_pars, [0, 0, 0, 1, 1]): + init_params[p] = d + added_ots += [ot] if np.sum(np.isin(indep_list, par_list)) != len(indep_list): raise ValueError(f"Invalid independant parameters in epoch {epoch['name']}") indep_list = [f"{n}_{epoch['name']}" for n in indep_list] @@ -324,6 +346,32 @@ def _init_fit_params(config, epochs): return fit_params, epochs + +def _apply_ot_float(xi_mod, eta_mod, solver_aman, params): + # For simplicity we do the floating of OTs to just the model + # We are recomputing information that should just be cached here... + # TODO: Fix that + ot_float_pars = ["xioff", "etaoff", "rot", "xiscale", "etascale"] + ot_float_defaults = np.array([0, 0, 0, 1, 1]) + for ot in np.unique(solver_aman.ot_list): + ot_pars = [params.get(f"{n}_{ot}", d) for n, d in zip(ot_float_pars, ot_float_defaults)] + if np.array_equal(ot_pars, ot_float_defaults): + continue + msk = (solver_aman.ot_list == ot) + xi_mod[msk] += ot_pars[0] + eta_mod[msk] += ot_pars[1] + theta = ot_pars[2] + R = np.array([[np.cos(theta), -np.sin(theta)], + [np.sin(theta), np.cos(theta)]]) + xi_cent = np.mean(xi_mod[msk]) + eta_cent = np.mean(eta_mod[msk]) + xi_mod[msk], eta_mod[msk] = R@np.vstack((xi_mod[msk] - xi_cent, eta_mod[msk] - eta_cent)) + xi_mod[msk] += xi_cent + eta_mod[msk] += eta_cent + xi_mod[msk] *= ot_pars[3] + eta_mod[msk] *= ot_pars[4] + return xi_mod, eta_mod + def objective_model_func_lmfit( params, pm_version, solver_aman, xieta_model, weights=True ): @@ -333,6 +381,9 @@ def objective_model_func_lmfit( elif xieta_model == "template": xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) xi_ref, eta_ref, _ = solver_aman.nominal_xieta_locs + + xi_mod, eta_mod = _apply_ot_float(xi_mod, eta_mod, solver_aman, params) + dist = np.sqrt((xi_ref - xi_mod) ** 2 + (eta_ref - eta_mod) ** 2) #print(np.nansum(dist)) weights_array = solver_aman.weights if weights else np.ones(len(dist)) @@ -343,6 +394,7 @@ def objective_model_func_lmfit_joint( ): params = params.valuesdict() chisq = 0 + t1 = time.time() for epoch in epochs: chisq += objective_model_func_lmfit({par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]}, pm_version, epoch["solver_aman"], xieta_model, weights) return chisq @@ -429,7 +481,8 @@ def apply_model_params(xieta_model, pointing_model, pm_version, aman, use_inds=N model_reference = aman.nominal_xieta_locs modeled_fits = model_template_xieta( pointing_model, pm_version, aman - ) + ) + modeled_fits = _apply_ot_float(modeled_fits[0], modeled_fits[1], aman, pointing_model) rms, fit_residuals = calc_RMS_and_residuals(modeled_fits, model_reference, aman.weights, use_inds=use_inds) return modeled_fits, fit_residuals, rms, model_reference @@ -446,7 +499,7 @@ def analyze_PM_with_all_dets(config, t0, tf, params): (filelist, obs_dets_fits, weights_dets, all_nom_det_array, - all_det_ids, obs_index, + all_det_ids, obs_index, ot_list ) = load_per_detector_data(config, t0, tf, return_all_dets=True) ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) @@ -469,6 +522,7 @@ def analyze_PM_with_all_dets(config, t0, tf, params): ) full_aman.wrap("weights", weights_dets, [(0, "samps")]) full_aman.wrap("obs_index", obs_index) + full_aman.wrap("ot_list", ot_list) full_aman.wrap("roll_c", roll_c, [(0, "samps")]) full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) full_aman.wrap("radial", @@ -482,15 +536,25 @@ def analyze_PM_with_all_dets(config, t0, tf, params): np.array([ufm_list.index(d) for d in full_aman.det_ufm]), [(0, "samps")]) # Apply model to data. + xieta_model = config.get("xieta_model", "measured") (full_modeled, full_residuals, rms, _ - ) = apply_model_params("template", + ) = apply_model_params(xieta_model, params, config.get("pm_version"), full_aman) + to_comp = "nominal_xieta_locs" + if xieta_model == "measured": + to_comp = "measured_xieta_data" full_aman.wrap("full_modeled", np.array(full_modeled), [(0, core.LabelAxis("xieta", ["xi", "eta"]))], [(1, "samps")]) full_aman.wrap("fit_residuals", full_residuals, [(0, "samps")]) + + # Just for compatibility + modelfit_aman = core.AxisManager() + modelfit_aman.wrap("xi", full_modeled[0], overwrite=True) + modelfit_aman.wrap("eta", full_modeled[1], overwrite=True) + full_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) del(full_modeled) del(full_residuals) @@ -508,13 +572,13 @@ def analyze_PM_with_all_dets(config, t0, tf, params): obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + full_aman[to_comp][0])[inds]/DEG*60)) obs_deta.append(np.nanmean((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + full_aman[to_comp][1])[inds]/DEG*60)) obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds]/DEG*60)) + full_aman[to_comp][0])[inds]/DEG*60)) obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds]/DEG*60)) + full_aman[to_comp][1])[inds]/DEG*60)) (ufm_az, ufm_el, ufm_roll, ufm_resid, ufm_dxi, ufm_deta, ufm_std_xi, ufm_std_eta, ufm_wafer_num @@ -526,16 +590,16 @@ def analyze_PM_with_all_dets(config, t0, tf, params): ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + full_aman[to_comp][0])[inds][ufm_inds]/DEG*60)) ufm_deta.append(np.nanmean( (full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + full_aman[to_comp][1])[inds][ufm_inds]/DEG*60)) ufm_std_xi.append(np.nanstd( (full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])[inds][ufm_inds]/DEG*60)) + full_aman[to_comp][0])[inds][ufm_inds]/DEG*60)) ufm_std_eta.append(np.nanstd( (full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])[inds][ufm_inds]/DEG*60)) + full_aman[to_comp][1])[inds][ufm_inds]/DEG*60)) ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) all_ufm_az.append(ufm_az) all_ufm_el.append(ufm_el) @@ -583,11 +647,11 @@ def analyze_PM_with_all_dets(config, t0, tf, params): full_aman.wrap("dxi", (full_aman.full_modeled[0] - - full_aman.nominal_xieta_locs[0])/DEG*60, + full_aman[to_comp][0])/DEG*60, [(0, "samps")]) full_aman.wrap("deta", (full_aman.full_modeled[1] - - full_aman.nominal_xieta_locs[1])/DEG*60, + full_aman[to_comp][1])/DEG*60, [(0, "samps")]) obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) per_obs_stats.wrap("obsids", obsids) @@ -716,6 +780,7 @@ def main(config_path: str): save_figure=True, plotlims=plotlims) plotter.plot_full_residuals_across_focalplane() + plotter.plot_full_deltas_across_focalplane() plotter.plot_full_histogram() plotter.plot_full_unmodeled_residuals() logger.info("done") @@ -752,6 +817,7 @@ def main(config_path: str): all_nom_det_array, all_det_ids, obs_index, + ot_list, ) = load_per_detector_data(config, t0, tf, no_downsample_set=True) ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) @@ -768,6 +834,7 @@ def main(config_path: str): ) fitcheck_aman.wrap("weights", weights_dets, [(0, "samps")]) fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) + fitcheck_aman.wrap("ot_list", ot_list, [(0, "samps")]) logger.info("Loaded %s fit check data points", len(weights_dets)) #Now make axis manager that has down sampled data for computation @@ -779,6 +846,7 @@ def main(config_path: str): all_nom_det_array, all_det_ids, obs_index, + ot_list, ) = load_per_detector_data(config, t0, tf) logger.info("Loaded %s data points", len(weights_dets)) ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) @@ -803,6 +871,7 @@ def main(config_path: str): ) solver_aman.wrap("weights", weights_dets, [(0, "samps")]) solver_aman.wrap("obs_index", obs_index) + solver_aman.wrap("ot_list", ot_list) epoch["solver_aman"] = solver_aman epoch["fitcheck_aman"] = fitcheck_aman logger.info("Built axis manager for epoch %s", epoch["name"]) @@ -817,7 +886,7 @@ def main(config_path: str): for epoch in epochs: t0, tf = epoch["begin_timerange"], epoch["end_timerange"] - filelist, obs_ufm_centers, weights_ufm, obs_index = load_per_obs_data(config, t0, tf) + filelist, obs_ufm_centers, weights_ufm, obs_index, ot_list = load_per_obs_data(config, t0, tf) logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) logger.info("Including data from these obs:") logger.info(filelist) @@ -849,6 +918,7 @@ def main(config_path: str): ) solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) solver_aman.wrap("obs_index", obs_index) + solver_aman.wrap("ot_list", ot_list) # Make weights/data cuts epoch["solver_aman"] = solver_aman logger.info("Built axis manager for epoch %s", epoch["name"]) @@ -866,9 +936,10 @@ def main(config_path: str): model_solved_params = minimize( objective_model_func_lmfit_joint, fit_params, - method="nelder", + method=config.get("fit_method", "nelder"), nan_policy="omit", args=(pm_version, epochs, xieta_model, use_weights), + **config.get("fit_options", {}), ) logger.info("Ran 1st Minimization") @@ -942,6 +1013,7 @@ def main(config_path: str): plotter.plot_residuals_histograms() plotter.plot_dets_in_these_obs() + tot_bad = 0 if iterate_cutoff is not None: logger.info("Iterating parameter solution") logger.info(f"Using {iterate_cutoff} as cutoff") @@ -960,6 +1032,7 @@ def main(config_path: str): len(bad_fit_inds), cutoff, ) + tot_bad += len(bad_fit_inds) if len(bad_fit_inds) != 0: if fit_type == "ufm_center": @@ -977,7 +1050,7 @@ def main(config_path: str): # Print RMS of initial fits without outlying data points before # zero-ing the weights. good_fit_inds = np.where(fit_residuals_i1 < cutoff)[0] - _, _, masked_rms, _ = apply_model_params(xieta_model, + _, _, masked_rms, _ = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, epoch["solver_aman"], @@ -987,12 +1060,16 @@ def main(config_path: str): epoch["solver_aman"].weights[bad_fit_inds] = 0.0 epoch["solver_aman"].wrap('bad_fit_inds', bad_fit_inds) + if tot_bad == 0: + logger.info("No bad points found so not running second fit!") + if tot_bad > 0: model_solved_params = minimize( objective_model_func_lmfit_joint, - fit_params, - method="nelder", + model_solved_params, + method=config.get("fit_method", "nelder"), nan_policy="omit", args=(pm_version, epochs, xieta_model, use_weights), + **config.get("fit_options", {}), ) test_params = _round_params(model_solved_params.params.valuesdict(), 8) @@ -1091,6 +1168,20 @@ def main(config_path: str): dbfile = "db.sqlite" db = _create_db(dbfile, save_dir) for epoch in epochs: + solver_aman = epoch["solver_aman"] + # Remove OT float parameters + if config["float_ots"]: + ot_float_aman = core.AxisManager() + for ot in np.unique(solver_aman.ot_list): + for par in [f"{n}_{ot}" for n in ["xioff", "etaoff", "rot", "xiscale", "etascale"]]: + if par not in solver_aman.pointing_model._assignments: + continue + ot_float_aman.wrap(par, solver_aman.pointing_model[par]) + solver_aman.pointing_model.move(par, None) + solver_aman.wrap("ot_float_aman", ot_float_aman) + epoch["solver_aman"] = solver_aman + + epoch["solver_aman"].save(h5_filename, group=epoch["name"], overwrite=True) db.add_entry( {"obs:timestamp": (epoch["begin_timerange"], epoch["end_timerange"]), "dataset": f"{epoch['name']}/pointing_model"}, @@ -1108,6 +1199,8 @@ def main(config_path: str): test_params = epoch["solver_aman"].pointing_model_i1 else: test_params = epoch["solver_aman"].pointing_model + if "ot_float_aman" in epoch["solver_aman"]._assignments: + test_params = test_params.merge(epoch["solver_aman"].ot_float_aman) full_aman = analyze_PM_with_all_dets(config, t0, tf, test_params) logger.info(f"for this epoch: {epoch["name"]}") @@ -1121,6 +1214,7 @@ def main(config_path: str): save_figure=True, plotlims=plotlims) plotter.plot_full_residuals_across_focalplane() + plotter.plot_full_deltas_across_focalplane() plotter.plot_full_histogram() plotter.plot_full_unmodeled_residuals() @@ -1271,7 +1365,6 @@ def plot_full_unmodeled_residuals(self): plt.savefig(f"{plot_dir}/{platform}_full_2D_Residuals_Az{tag}.png", dpi=350) plt.close() - def plot_full_residuals_across_focalplane(self): platform = self.platform plot_dir = self.plot_dir @@ -1300,6 +1393,33 @@ def plot_full_residuals_across_focalplane(self): if self.save_figure: plt.savefig(f"{plot_dir}/{platform}_full_FocalPlane_colored_FitResiduals{tag}.png", dpi=350) + def plot_full_deltas_across_focalplane(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + append = self.append_string + ancil = self.aman.ancil + + xi, eta, _ = self.aman.nominal_xieta_locs + dxi = self.aman.modeled_fits.xi - self.aman.nominal_xieta_locs[0] + deta = self.aman.modeled_fits.eta - self.aman.nominal_xieta_locs[1] + roll_c = self.aman.roll_c + + fig, ax = plt.subplots() + im = ax.quiver(xi, eta, dxi, deta, roll_c, angles='xy', scale=np.deg2rad(24), scale_units='xy', alpha=.8) + sm = cm.ScalarMappable(cmap=im.cmap, norm=im.norm) + sm.set_array([]) + plt.colorbar(sm, ax=ax, label='Roll') + ax.set_xlabel('Xi (rad)') + ax.set_ylabel('Eta (rad)') + plt.title('Fit Deltas across Focal Plane\n(Not averaged per det)') + if platform == 'lat': + plt.xlim(-.042, .042);plt.ylim(-.042, .042) + else: + plt.xlim(-.31, .31);plt.ylim(-.31, .31) + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_full_FocalPlane_colored_FitDeltas{tag}.png", dpi=350) + def plot_full_histogram(self): platform = self.platform plotlims = self.plotlims From 9838573382209bbbf14a3f7adc4160e9269ddb22 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Mon, 20 Apr 2026 16:38:23 -0700 Subject: [PATCH 44/48] Refactoring of code to handle per-detector azimuth data. Implements saving of lmfit parameter errors and correlations matrix. Simplifies loading of data into axis managers. --- .../site_pipeline/solve_pointing_model.py | 759 ++++++++---------- 1 file changed, 357 insertions(+), 402 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 378c64041..9b486693b 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -7,8 +7,7 @@ import logging import numpy as np import so3g.proj.quat as quat -import pdb -# import lmfit + import lmfit from lmfit import minimize, Parameters, fit_report import time @@ -33,10 +32,15 @@ plt.rcParams["grid.alpha"] = 0.5 -def load_nom_ufm_centers(config): - # Load Nominal UFM Center Locations from centered focal_plane +def load_per_obs_data(config, t0, tf): + # Load per-observation UFM center data points and weights + platform = config.get("platform") ffp_path = config.get("ffp_path") ufms = config.get("ufms") + per_obs_fps = config.get("per_obs_fps") + skip_tags = config.get("skip_tags", []) + + # Load nominal template data for UFM Center locations. nom_ufm_centers = np.zeros([7, 3]) * np.nan rx = fpc.Receiver.load_file(ffp_path) OT = rx["0"].optics_tubes[0] @@ -45,24 +49,15 @@ def load_nom_ufm_centers(config): index = ufms.index(OT.focal_planes[ufm].stream_id) except: temp_ufms = config.get("temp_ufms") - index = temp_ufms.index(OT.focal_planes[ufm].stream_id) + index = temp_ufms.index(OT.focal_planes[ufm].stream_id) nom_ufm_centers[index, :3] = OT.focal_planes[ufm].center - return nom_ufm_centers - -def load_per_obs_data(config, t0, tf): - # Load per-observation UFM center data points and weights # The per obs .h5 file a dict with obs_id for keys - per_obs_fps = config.get("per_obs_fps") - ufms = config.get("ufms") - skip_tags = config.get("skip_tags", []) rxs = fpc.Receiver.load_file(per_obs_fps) - filelist = [obs for obs in rxs.keys() if all(skip not in obs for skip in skip_tags)] filelist = [obs for obs in filelist if int(obs.split("_")[1]) > t0 and int(obs.split("_")[1]) < tf] if config.get("use_these_files") is not None: filelist = [filelist[i] for i in config.get("use_these_files")] - obs_ufm_centers = np.zeros([len(filelist), 7, 3]) * np.nan weights_ufm = np.zeros([len(filelist), 7]) obs_index = [] @@ -76,19 +71,35 @@ def load_per_obs_data(config, t0, tf): weights_ufm[i, index] = np.nansum(this_OT.focal_planes[u].weights) ot_list.append(this_OT.name) obs_index.append(i) - + weights_ufm = weights_ufm / 1720.0 weights_ufm[weights_ufm < config.get("weight_cutoff")] = 0.0 initial_weights_mask = np.where(weights_ufm == 0) obs_ufm_centers[initial_weights_mask] = np.nan obs_index = np.array(obs_index) ot_list = np.array(ot_list) - #obs_index[initial_weights_mask] = np.nan - return filelist, obs_ufm_centers, weights_ufm, obs_index, ot_list + # Load Boresight information + ctx = core.Context(config["context"]["path"]) + obs_info = [ctx.obsdb.get(obsid) for obsid in filelist] + az_c = np.array([obs["az_center"] for obs in obs_info]) + el_c = np.array([obs["el_center"] for obs in obs_info]) + roll_c = np.array([obs["roll_center"] for obs in obs_info]) + + ancil = core.AxisManager(core.IndexAxis("samps")) + ancil.wrap("az_enc", np.repeat(az_c, 7), [(0, "samps")]) + ancil.wrap("el_enc", np.repeat(el_c, 7), [(0, "samps")]) + ancil.wrap("roll_enc", np.repeat(el_c, 7), [(0, "samps")]) + if "lat" in platform: + ancil.wrap("corotator_enc", np.repeat((el_c - 60. - roll_c), 7), [(0, "samps")]) + if "sat" in platform: + ancil.wrap("boresight_enc", np.repeat(-1 * roll_c, 7), [(0, "samps")]) + + return filelist, obs_ufm_centers, weights_ufm, nom_ufm_centers, obs_index, ot_list, ancil, roll_c -def load_nom_focal_plane_full(config, ufm): +def load_nom_focal_plane_full(config, t0, ufm): which_template = config.get("use_as_template", "ffp") + t0 = str(t0) if which_template == "nominal": ffp_path = config.get("nominal") with h5py.File(ffp_path, "r") as template_fp: @@ -101,7 +112,7 @@ def load_nom_focal_plane_full(config, ufm): elif which_template == "ffp": ffp_path = config.get("ffp_path") with h5py.File(ffp_path, "r") as template_fp: - OT = template_fp["0/st1"] + OT = template_fp[t0]["st1"] fpf = np.array(OT[ufm]["focal_plane_full"][:]) # Extracting specific columns using structured arrays det_ids = fpf[ @@ -113,8 +124,9 @@ def load_nom_focal_plane_full(config, ufm): nom_det_array = np.stack((xi, eta, gamma), axis=1) return det_ids, nom_det_array + -def create_size_mask(obs_index): +def _create_size_mask(obs_index): #create comparably sized datasets of all obs. unique, counts = np.unique(obs_index, return_counts=True) min_count = min(counts) @@ -124,8 +136,9 @@ def create_size_mask(obs_index): selected_indices = np.random.choice(indices, min_count, replace=False) limiting_mask[selected_indices] = True return limiting_mask + -def create_culling_mask(obs_index, cull_dets): +def _create_culling_mask(obs_index, cull_dets): # Remove a random fraction 1/cull_dets of each dataset unique, counts = np.unique(obs_index, return_counts=True) #min_count = min(counts) @@ -135,12 +148,15 @@ def create_culling_mask(obs_index, cull_dets): selected_indices = np.random.choice(indices, (cull_dets - 1)* count // cull_dets, replace=False) culling_mask[selected_indices] = True return culling_mask + - -def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_dets=False): +def load_per_detector_data(config, t0, tf, no_downsample=False, return_all_dets=False): per_obs_fps = config.get("per_obs_fps") skip_tags = config.get("skip_tags", []) rxs = fpc.Receiver.load_file(per_obs_fps) + which_data = config.get("use_as_data") + which_weights = config.get("use_as_weights", None) + ctx = core.Context(config["context"]["path"]) if return_all_dets: band = None @@ -164,132 +180,145 @@ def load_per_detector_data(config, t0, tf, no_downsample_set=False, return_all_d if config.get("use_these_files") is not None: filelist = [filelist[i] for i in config.get("use_these_files")] - weights_dets, obs_dets_fits, stream_id_list, ot_list, obs_index = [], [], [], [], [] - #which_ufm = config.get("which_ufm", None) - which_data = config.get("use_as_data") - which_weights = config.get("use_as_weights", None) - + obs_dets_fits, det_boresight, weights_dets, stream_id_list, ot_list, obs_index = [], [], [], [], [], [] for i, ffp in enumerate(filelist): + obs_info = ctx.obsdb.get(ffp) this_OT = rxs[ffp].optics_tubes[0] for ufm in this_OT.focal_planes: if which_ufm is not None and ufm.stream_id not in which_ufm: continue stream_id_list.append(ufm.stream_id) weights = ufm.weights[:, 1] if which_weights == "r2" else ufm.weights[:, 0] - data = ufm.avg_fp if which_data == "raw" else ufm.transformed weights_dets.append(weights) + data = ufm.avg_fp if which_data == "raw" else ufm.transformed obs_dets_fits.append(data) obs_index.append(np.repeat(i, len(ufm.weights))) ot_list.append([this_OT.name] * len(data)) + try: + det_pointing = np.rad2deg(ufm.det_boresight) + #det_boresight.append(ufm.det_boresight) + except: + # ufm.det_boresight not found, filling with average boresight values. + az_c = np.ones(np.shape(data)[0]) * obs_info["az_center"] + el_c = np.ones(np.shape(data)[0]) * obs_info["el_center"] + roll_c = np.ones(np.shape(data)[0]) * obs_info["roll_center"] + det_pointing = np.column_stack([az_c, el_c, roll_c]) + det_pointing[:,2] = np.ones(np.shape(data)[0]) * obs_info["roll_center"] + det_boresight.append(det_pointing) - nom_data = [load_nom_focal_plane_full(config, s) for s in stream_id_list] + nom_data = [load_nom_focal_plane_full(config, t0, s) for s in stream_id_list] all_det_ids, all_nom_det_array = map(np.concatenate, zip(*nom_data)) weights_dets = np.concatenate(weights_dets) obs_dets_fits = np.concatenate(obs_dets_fits, axis=0) + det_boresight = np.concatenate(det_boresight, axis=0) obs_index = np.concatenate(obs_index) ot_list = np.concatenate(ot_list) + #Order: [data fits, detector boresight, weights, nominal, ids, index] + data_group = [obs_dets_fits, det_boresight, weights_dets, all_nom_det_array, all_det_ids, obs_index, ot_list] + weights_dets[weights_dets < config.get("weight_cutoff")] = 0.0 obs_dets_fits[np.where(weights_dets == 0)] = np.nan mask = ~np.isnan(weights_dets) - - if no_downsample_set: - #plotting use-case to compare subset fits with the entire dataset. - return ( - filelist, - obs_dets_fits[mask], - weights_dets[mask], - all_nom_det_array[mask], - all_det_ids[mask], - obs_index[mask], - ot_list[mask], - ) - - else: + if not no_downsample: # Reduce detector counts for computation if band is not None: mask &= np.array([band in det for det in all_det_ids]) - #apply weights and band mask - obs_dets_fits = obs_dets_fits[mask] - weights_dets = weights_dets[mask] - all_nom_det_array = all_nom_det_array[mask] - all_det_ids = all_det_ids[mask] - obs_index = obs_index[mask] - ot_list = ot_list[mask] + data_group = [arr[mask] for arr in data_group] + if even_obs_size: - mask = create_size_mask(obs_index) - obs_dets_fits = obs_dets_fits[mask] - weights_dets = weights_dets[mask] - all_nom_det_array = all_nom_det_array[mask] - all_det_ids = all_det_ids[mask] - obs_index = obs_index[mask] - ot_list = ot_list[mask] + m = _create_size_mask(data_group[-1]) + data_group = [arr[m] for arr in data_group] + if cull_dets is not None: for _ in range(2 if cull_twice else 1): - mask = create_culling_mask(obs_index, cull_dets) - obs_dets_fits = obs_dets_fits[mask] - weights_dets = weights_dets[mask] - all_nom_det_array = all_nom_det_array[mask] - all_det_ids = all_det_ids[mask] - obs_index = obs_index[mask] - ot_list = ot_list[mask] - return ( - filelist, + m = _create_culling_mask(data_group[-1], cull_dets) + data_group = [arr[m] for arr in data_group] + else: + # Only apply weights mask, (Used for plotting) + data_group = [arr[mask] for arr in data_group] + + return (filelist, *data_group) + + +def build_data_aman(config, t0, tf, per_det = True, no_downsample=False, return_all_dets=False): + aman = core.AxisManager(core.IndexAxis("samps")) + obs_info = core.AxisManager() + obs_info.wrap("platform", config.get("platform")) + + if not per_det: + ( + filelist, + obs_ufm_centers, + weights_ufm, + nom_fm_centers, + obs_index, + ot_list, + ancil, + roll_c, + ) = load_per_obs_data(config, t0, tf) + obs_info.wrap("obs_ids", np.array(filelist)) + aman.wrap("obs_info", obs_info) + aman.wrap("ancil", ancil) + aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) + aman.wrap( + "nominal_xieta_locs", + np.repeat([nom_ufm_centers], len(filelist), axis=0) + .reshape(len(filelist) * 7, 3) + .T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"])), + (1, "samps")], + ) + aman.wrap( + "measured_xieta_data", + obs_ufm_centers.reshape(len(filelist) * 7, 3).T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"])), + (1, "samps")], + ) + aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) + aman.wrap("obs_index", obs_index) + + return aman + + else: + ( + filelist, obs_dets_fits, + det_boresight, weights_dets, all_nom_det_array, all_det_ids, obs_index, ot_list, + ) = load_per_detector_data(config, t0, tf, no_downsample, return_all_dets) + ancil = core.AxisManager(core.IndexAxis("samps")) + ancil.wrap("az_enc", det_boresight[:,0], [(0, "samps")]) + ancil.wrap("el_enc", det_boresight[:,1], [(0, "samps")]) + ancil.wrap("roll_c", det_boresight[:,2], [(0, "samps")]) + if "lat" in obs_info.platform: + ancil.wrap("corotator_enc", ancil.el_enc - 60. - ancil.roll_c, [(0, "samps")]) + elif "sat" in obs_info.platform: + ancil.wrap("boresight_enc", -1 * det_boresight[:,2], [(0, "samps")]) + obs_info.wrap("obs_ids", np.array(filelist)) + aman.wrap("obs_info", obs_info) + aman.wrap("ancil", ancil) + aman.wrap("roll_c", det_boresight[:,2], [(0, "samps")]) + aman.wrap( + "nominal_xieta_locs", all_nom_det_array.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"])), + (1, "samps")], ) + aman.wrap( + "measured_xieta_data", obs_dets_fits.T, + [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"])), + (1, "samps")], + ) + aman.wrap("weights", weights_dets, [(0, "samps")]) + aman.wrap("det_ids", all_det_ids, [(0, "samps")]) + aman.wrap("obs_index", obs_index, [(0, "samps")]) + aman.wrap("ot_list", ot_list, [(0, "samps")]) -def load_obs_boresight(config, filelist): - # Load boresight elevation information from each observation - # Put into an axis manager - pm_version = config.get("pm_version") - ctx = core.Context(config["context"]["path"]) - obs_info = [ctx.obsdb.get(obsid) for obsid in filelist] - az_c = np.array([obs["az_center"] for obs in obs_info]) - el_c = np.array([obs["el_center"] for obs in obs_info]) - roll_c = np.array([obs["roll_center"] for obs in obs_info]) - #az_c = np.round(np.array(az_c), 4) - #el_c = np.round(np.array(el_c), 4) - #roll_c = np.round(np.array(roll_c), 4) - #roll_c[np.where(roll_c == 0)[0]] = 0 # rounding gives negative 0 sometimes. - - ancil = core.AxisManager(core.IndexAxis("samps")) - ancil.wrap("az_enc", np.repeat(az_c, 7), [(0, "samps")]) - ancil.wrap("el_enc", np.repeat(el_c, 7), [(0, "samps")]) - if "lat" in pm_version: - ancil.wrap("corotator_enc", np.repeat((el_c - 60. - roll_c), 7), [(0, "samps")]) - if "sat" in pm_version: - ancil.wrap("boresight_enc", np.repeat(-1 * roll_c, 7), [(0, "samps")]) - return ancil, roll_c - - -def load_obs_boresight_per_detector(config, filelist, obs_ind): - # Load boresight elevation information from each observation - # Put into an axis manager - platform = config.get("platform") - ctx = core.Context(config["context"]["path"]) - obs_info = [ctx.obsdb.get(obsid) for obsid in filelist] - az_c = np.array([obs["az_center"] for obs in obs_info]) - el_c = np.array([obs["el_center"] for obs in obs_info]) - roll_c = np.array([obs["roll_center"] for obs in obs_info]) - - ancil = core.AxisManager(core.IndexAxis("samps")) - if platform == 'lat': - roll_c = np.array([roll_c[i] for i in obs_ind]) - ancil.wrap("az_enc", np.array([az_c[i] for i in obs_ind]), [(0, "samps")]) - ancil.wrap("el_enc", np.array([el_c[i] for i in obs_ind]), [(0, "samps")]) - ancil.wrap("corotator_enc", ancil.el_enc - 60. - roll_c, [(0, "samps")]) - else: - roll_c = np.array([roll_c[i] for i in obs_ind]) - ancil.wrap("az_enc", np.array([az_c[i] for i in obs_ind]), [(0, "samps")]) - ancil.wrap("el_enc", np.array([el_c[i] for i in obs_ind]), [(0, "samps")]) - ancil.wrap("boresight_enc", -1 * roll_c, [(0, "samps")]) - - return ancil, roll_c + return aman def _init_fit_params(config, epochs): @@ -372,6 +401,39 @@ def _apply_ot_float(xi_mod, eta_mod, solver_aman, params): eta_mod[msk] *= ot_pars[4] return xi_mod, eta_mod +#test Elle Apr 10 +def objective_model_func_lmfit( + params, pm_version, solver_aman, xieta_model, weights=True +): + if xieta_model == "measured": + xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) + xi_ref, eta_ref, _ = solver_aman.measured_xieta_data + elif xieta_model == "template": + xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) + xi_ref, eta_ref, _ = solver_aman.nominal_xieta_locs + + xi_mod, eta_mod = _apply_ot_float(xi_mod, eta_mod, solver_aman, params) + + dist = np.sqrt((xi_ref - xi_mod) ** 2 + (eta_ref - eta_mod) ** 2) + if weights: + return dist * np.sqrt(solver_aman.weights) + else: + return dist + #weights_array = solver_aman.weights if weights else np.ones(len(dist)) + #return chi_sq(weights_array, dist) + +def objective_model_func_lmfit_joint( + params, pm_version, epochs, xieta_model, weights=True +): + params = params.valuesdict() + all_residuals=[] + for epoch in epochs: + epoch_params = {par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]} + res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, weights) + all_residuals.append(res) + return np.concatenate(all_residuals) + +""" def objective_model_func_lmfit( params, pm_version, solver_aman, xieta_model, weights=True ): @@ -405,6 +467,7 @@ def chi_sq(weights, dist): #chi2 = dist.T @ N @ dist chi2 = np.nansum(dist ** 2 * weights) return chi2 +""" def model_template_xieta(params, pm_version, aman): """ @@ -419,6 +482,7 @@ def model_template_xieta(params, pm_version, aman): params["version"] = pm_version if "sat" in pm_version: az, el, roll = pm._get_sat_enc_radians(aman.ancil) + q_nomodel = quat.rotation_lonlat(-az, el, 0) if "lat" in pm_version: az, el, roll = pm._get_lat_enc_radians(aman.ancil) @@ -494,51 +558,35 @@ def _round_params(param_dict, decimal): return P -def analyze_PM_with_all_dets(config, t0, tf, params): - platform = config.get("platform") - - (filelist, obs_dets_fits, - weights_dets, all_nom_det_array, - all_det_ids, obs_index, ot_list - ) = load_per_detector_data(config, t0, tf, return_all_dets=True) +def get_full_correlation_matrix(lmfit_result, epoch_params): + """Returns a square matrix for ALL parameters, including fixed ones.""" + #param_names = list(lmfit_result.params.keys()) + param_names = epoch_params.tolist() + full_corr_mat = np.eye(len(param_names)) + for i, n1 in enumerate(param_names): + p1 = lmfit_result.params[n1] + if p1.vary and p1.correl: + for j, n2 in enumerate(param_names): + if i != j and n2 in p1.correl: + full_corr_mat[i, j] = p1.correl[n2] + return full_corr_mat, param_names + - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) +def analyze_PM_with_all_dets(config, t0, tf, params): + full_aman = build_data_aman(config, t0, tf, per_det = True, no_downsample=False, return_all_dets=True) ufm_list = [ufm.split("_")[1] for ufm in config.get('ufms')] - - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - full_aman = core.AxisManager(core.IndexAxis("samps")) - full_aman.wrap("obs_info", obs_info) - full_aman.wrap("ancil", ancil) - full_aman.wrap( - "nominal_xieta_locs", all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - full_aman.wrap( - "measured_xieta_data", obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - full_aman.wrap("weights", weights_dets, [(0, "samps")]) - full_aman.wrap("obs_index", obs_index) - full_aman.wrap("ot_list", ot_list) - full_aman.wrap("roll_c", roll_c, [(0, "samps")]) - full_aman.wrap("det_ids", all_det_ids, [(0, "samps")]) - full_aman.wrap("radial", - np.sqrt(full_aman.nominal_xieta_locs[0]**2 + - full_aman.nominal_xieta_locs[1]**2)/DEG, - [(0, "samps")]) - full_aman.wrap("det_ufm", - np.array([detid.decode('utf-8').split('_')[0].lower() for detid in full_aman.det_ids]) - , [(0, "samps")]) + xi_nom, eta_nom = full_aman.nominal_xieta_locs[0], full_aman.nominal_xieta_locs[1] + full_aman.wrap("radial", np.sqrt(xi_nom ** 2 + eta_nom ** 2) / DEG, [(0, "samps")]) + #np.sqrt(full_aman.nominal_xieta_locs[0]**2 + full_aman.nominal_xieta_locs[1]**2)/DEG, + det_ufms = np.array([d.decode('utf-8').split('_')[0].lower() for d in full_aman.det_ids]) + full_aman.wrap("det_ufm", det_ufms, [(0, "samps")]) full_aman.wrap("det_wafer", - np.array([ufm_list.index(d) for d in full_aman.det_ufm]), + np.array([ufm_list.index(d) for d in det_ufms]), [(0, "samps")]) - # Apply model to data. + # Apply model to data. xieta_model = config.get("xieta_model", "measured") (full_modeled, full_residuals, rms, _ - ) = apply_model_params(xieta_model, + ) = apply_model_params(xieta_model, #"template", params, config.get("pm_version"), full_aman) @@ -546,137 +594,74 @@ def analyze_PM_with_all_dets(config, t0, tf, params): if xieta_model == "measured": to_comp = "measured_xieta_data" full_aman.wrap("full_modeled", np.array(full_modeled), - [(0, core.LabelAxis("xieta", ["xi", "eta"]))], - [(1, "samps")]) + [(0, core.LabelAxis("xieta", ["xi", "eta"])),(1, "samps")]) full_aman.wrap("fit_residuals", full_residuals, [(0, "samps")]) - + # Just for compatibility modelfit_aman = core.AxisManager() modelfit_aman.wrap("xi", full_modeled[0], overwrite=True) modelfit_aman.wrap("eta", full_modeled[1], overwrite=True) full_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) - del(full_modeled) - del(full_residuals) - - (obs_az, obs_el, obs_roll, obs_resid, - obs_dxi, obs_deta, obs_std_xi, obs_std_eta - ) = [], [], [], [], [], [], [], [] - (all_ufm_az, all_ufm_el, all_ufm_roll, all_ufm_resid, - all_ufm_dxi, all_ufm_deta, all_ufm_std_xi, - all_ufm_std_eta, all_ufm_wafer_num - ) = [], [], [], [], [], [], [], [], [] - for ob in np.unique(full_aman.obs_index): - inds = np.where(full_aman.obs_index == ob)[0] - obs_az.append(np.nanmedian(full_aman.ancil.az_enc[inds])) - obs_el.append(np.nanmedian(full_aman.ancil.el_enc[inds])) - obs_roll.append(np.nanmedian(full_aman.roll_c[inds])) - obs_resid.append(np.nanmean(full_aman.fit_residuals[inds])) - obs_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman[to_comp][0])[inds]/DEG*60)) - obs_deta.append(np.nanmean((full_aman.full_modeled[1] - - full_aman[to_comp][1])[inds]/DEG*60)) - obs_std_xi.append(np.nanstd((full_aman.full_modeled[0] - - full_aman[to_comp][0])[inds]/DEG*60)) - obs_std_eta.append(np.nanstd((full_aman.full_modeled[1] - - full_aman[to_comp][1])[inds]/DEG*60)) - (ufm_az, ufm_el, ufm_roll, ufm_resid, - ufm_dxi, ufm_deta, ufm_std_xi, - ufm_std_eta, ufm_wafer_num - )= [], [], [], [], [], [], [], [], [] + + dxi_all = (full_aman.full_modeled[0] - xi_nom) / DEG * 60 #modeled - nominal + deta_all = (full_aman.full_modeled[1] - eta_nom) / DEG * 60 + full_aman.wrap("dxi", dxi_all, [(0, "samps")]) + full_aman.wrap("deta", deta_all, [(0, "samps")]) + + stats_map = { + 'az' : full_aman.ancil.az_enc, 'el' : full_aman.ancil.el_enc, + 'roll' : full_aman.roll_c, 'resid' : full_aman.fit_residuals, + 'dxi': full_aman.dxi, 'deta': full_aman.deta, 'cr': getattr(full_aman.ancil, 'corotator_enc', None) + } + per_obs = {k: [] for k in stats_map if stats_map[k] is not None} + per_obs.update({'std_xi': [], 'std_eta': []}) + per_ufm = {k: [] for k in stats_map if stats_map[k] is not None} + per_ufm.update({'std_xi': [], 'std_eta': [], 'wafer_num': []}) + unique_obs_ids = np.unique(full_aman.obs_index) + for ob in unique_obs_ids: + o_mask = (full_aman.obs_index == ob) + for k, vals in stats_map.items(): + if vals is not None: + per_obs[k].append(np.nanmedian(vals[o_mask]) if k in ['az', 'el', 'roll', 'cr'] else np.nanmean(vals[o_mask])) + per_obs['std_xi'].append(np.nanstd(dxi_all[o_mask])) + per_obs['std_eta'].append(np.nanstd(deta_all[o_mask])) + #Calculate per ufm stats for this obs + ufm_row = {k: [] for k in per_ufm} for ufm in ufm_list: - ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] - ufm_az.append(np.nanmedian(full_aman.ancil.az_enc[inds][ufm_inds])) - ufm_el.append(np.nanmedian(full_aman.ancil.el_enc[inds][ufm_inds])) - ufm_roll.append(np.nanmedian(full_aman.roll_c[inds][ufm_inds])) - ufm_resid.append(np.nanmean(full_aman.fit_residuals[inds][ufm_inds])) - ufm_dxi.append(np.nanmean((full_aman.full_modeled[0] - - full_aman[to_comp][0])[inds][ufm_inds]/DEG*60)) - ufm_deta.append(np.nanmean( - (full_aman.full_modeled[1] - - full_aman[to_comp][1])[inds][ufm_inds]/DEG*60)) - ufm_std_xi.append(np.nanstd( - (full_aman.full_modeled[0] - - full_aman[to_comp][0])[inds][ufm_inds]/DEG*60)) - ufm_std_eta.append(np.nanstd( - (full_aman.full_modeled[1] - - full_aman[to_comp][1])[inds][ufm_inds]/DEG*60)) - ufm_wafer_num.append(np.nanmedian(full_aman.det_wafer[inds][ufm_inds])) - all_ufm_az.append(ufm_az) - all_ufm_el.append(ufm_el) - all_ufm_roll.append(ufm_roll) - all_ufm_resid.append(ufm_resid) - all_ufm_deta.append(ufm_deta) - all_ufm_dxi.append(ufm_dxi) - all_ufm_std_xi.append(ufm_std_xi) - all_ufm_std_eta.append(ufm_std_eta) - all_ufm_wafer_num.append(ufm_wafer_num) - + u_mask = o_mask & (full_aman.det_ufm == ufm) + for k, vals in stats_map.items(): + if vals is not None: + ufm_row[k].append(np.nanmedian(vals[u_mask]) if k in ['az', 'el', 'roll', 'cr'] else np.nanmean(vals[u_mask])) + ufm_row['std_xi'].append(np.nanstd(dxi_all[u_mask])) + ufm_row['std_eta'].append(np.nanstd(deta_all[u_mask])) + ufm_row['wafer_num'].append(np.nanmedian(full_aman.det_wafer[u_mask])) + + for k in per_ufm: + per_ufm[k].append(ufm_row[k]) + per_ufm_stats = core.AxisManager() per_obs_stats = core.AxisManager() - per_obs_stats.wrap("el", np.array(obs_el)) - per_obs_stats.wrap("roll", np.array(obs_roll)) - per_obs_stats.wrap("az", np.array(obs_az)) - per_obs_stats.wrap("resid", np.array(obs_resid)) - per_obs_stats.wrap("dxi", np.array(obs_dxi)) - per_obs_stats.wrap("deta", np.array(obs_deta)) - per_obs_stats.wrap("std_xi", np.array(obs_std_xi)) - per_obs_stats.wrap("std_eta", np.array(obs_std_eta)) - per_ufm_stats.wrap("az", np.array(all_ufm_az)) - per_ufm_stats.wrap("el", np.array(all_ufm_el)) - per_ufm_stats.wrap("roll", np.array(all_ufm_roll)) - per_ufm_stats.wrap("resid", np.array(all_ufm_resid)) - per_ufm_stats.wrap("dxi", np.array(all_ufm_dxi)) - per_ufm_stats.wrap("deta", np.array(all_ufm_deta)) - per_ufm_stats.wrap("std_xi", np.array(all_ufm_std_xi)) - per_ufm_stats.wrap("std_eta", np.array(all_ufm_std_eta)) - per_ufm_stats.wrap("wafer_num", np.array(all_ufm_wafer_num)) - - if platform == "lat": - obs_cr = [] - all_ufm_cr = [] - for ob in np.unique(full_aman.obs_index): - inds = np.where(full_aman.obs_index == ob)[0] - obs_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds])) - ufm_cr = [] - for ufm in ufm_list: - ufm_inds = np.where(full_aman.det_ufm[inds] == ufm)[0] - ufm_cr.append(np.nanmedian(full_aman.ancil.corotator_enc[inds][ufm_inds])) - all_ufm_cr.append(ufm_cr) - per_obs_stats.wrap("cr", np.array(obs_cr)) - per_ufm_stats.wrap("cr", np.array(all_ufm_cr)) - - full_aman.wrap("dxi", - (full_aman.full_modeled[0] - - full_aman[to_comp][0])/DEG*60, - [(0, "samps")]) - full_aman.wrap("deta", - (full_aman.full_modeled[1] - - full_aman[to_comp][1])/DEG*60, - [(0, "samps")]) + for k, vals in per_obs.items(): + per_obs_stats.wrap(k, np.array(vals)) + for k, vals in per_ufm.items(): + per_ufm_stats.wrap(k, np.array(vals)) + + #Add obsids and RMS stats obsids=np.array([int(D.split('_')[1]) for D in full_aman.obs_info.obs_ids]) per_obs_stats.wrap("obsids", obsids) per_ufm_stats.wrap("obsids", np.repeat(obsids, np.shape(per_ufm_stats["dxi"])[1]) ) - full_aman.wrap("obsids", obsids[full_aman.obs_index]) + full_aman.wrap("obsids", obsids[full_aman.obs_index], [(0, "samps")]) #Calculate RMSs - per_obs_stats.wrap("rms", - np.sqrt(np.nanmean(per_obs_stats["dxi"]**2 + - per_obs_stats["deta"]**2)) - ) - per_ufm_stats.wrap("rms", - np.sqrt(np.nanmean(per_ufm_stats["dxi"]**2 + - per_ufm_stats["deta"]**2)) - ) - full_aman.wrap("rms", - np.sqrt(np.nanmean(full_aman["dxi"]**2 + - full_aman["deta"]**2)) - ) + for aman in [per_obs_stats, per_ufm_stats, full_aman]: + aman.wrap("rms", np.sqrt(np.nanmean(aman['dxi'] ** 2 + aman['deta'] ** 2))) + full_aman.wrap("per_ufm_stats", per_ufm_stats) full_aman.wrap("per_obs_stats", per_obs_stats) - + return full_aman def _create_db(filename, save_dir): @@ -764,7 +749,7 @@ def main(config_path: str): t0, tf = epoch["begin_timerange"], epoch["end_timerange"] test_params, epochs = _init_fit_params(config, epochs) - logger.info("Using these paramters to test pointing: ") + logger.info("Using these parameters to test pointing: ") for p in test_params: logger.info(f"{p}: {test_params[p].value}") #logger.info(test_params) @@ -776,6 +761,8 @@ def main(config_path: str): plotter = ModelFitsPlotter(solver_aman=full_aman, config=config, save_dir=save_dir, + t0=t0, + tf=tf, iteration_tag=f"_{epoch["name"]}", save_figure=True, plotlims=plotlims) @@ -784,6 +771,7 @@ def main(config_path: str): plotter.plot_full_histogram() plotter.plot_full_unmodeled_residuals() logger.info("done") + return logger.info( "Pointing model will try to replicate (model) the %s data.", xieta_model @@ -805,73 +793,18 @@ def main(config_path: str): which_data = config.get("use_as_data") use_weights = config.get("use_weights", True) - #Make axis manager with full detector set. - # Keep wafer/band/obs cuts but do not further downsample. + for epoch in epochs: t0, tf = epoch["begin_timerange"], epoch["end_timerange"] - fitcheck_aman = core.AxisManager(core.IndexAxis("samps")) - ( - filelist, - obs_dets_fits, - weights_dets, - all_nom_det_array, - all_det_ids, - obs_index, - ot_list, - ) = load_per_detector_data(config, t0, tf, no_downsample_set=True) - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - - fitcheck_aman.wrap("ancil", ancil) - fitcheck_aman.wrap( - "nominal_xieta_locs", all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - fitcheck_aman.wrap( - "measured_xieta_data", obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - fitcheck_aman.wrap("weights", weights_dets, [(0, "samps")]) - fitcheck_aman.wrap("obs_index", obs_index, [(0, "samps")]) - fitcheck_aman.wrap("ot_list", ot_list, [(0, "samps")]) - logger.info("Loaded %s fit check data points", len(weights_dets)) + # Make axis manager with full detector set. + # Keep wafer/band/obs cuts but do not further downsample. + fitcheck_aman = build_data_aman(config, t0, tf, per_det = True, no_downsample=True) + logger.info("Loaded %s fit check data points", len(fitcheck_aman.weights)) #Now make axis manager that has down sampled data for computation - solver_aman = core.AxisManager(core.IndexAxis("samps")) - ( - filelist, - obs_dets_fits, - weights_dets, - all_nom_det_array, - all_det_ids, - obs_index, - ot_list, - ) = load_per_detector_data(config, t0, tf) - logger.info("Loaded %s data points", len(weights_dets)) - ancil, roll_c = load_obs_boresight_per_detector(config, filelist, obs_index) - - # Build Axis Managers - solver_aman.wrap("ancil", ancil) - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - solver_aman.wrap("obs_info", obs_info) - solver_aman.wrap("roll_c", roll_c, [(0, "samps")]) - solver_aman.wrap( - "nominal_xieta_locs", - all_nom_det_array.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap( - "measured_xieta_data", - obs_dets_fits.T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap("weights", weights_dets, [(0, "samps")]) - solver_aman.wrap("obs_index", obs_index) - solver_aman.wrap("ot_list", ot_list) + solver_aman = build_data_aman(config, t0, tf, per_det = True, no_downsample=False) + logger.info("Loaded %s data points", len(solver_aman.weights)) + epoch["solver_aman"] = solver_aman epoch["fitcheck_aman"] = fitcheck_aman logger.info("Built axis manager for epoch %s", epoch["name"]) @@ -880,46 +813,13 @@ def main(config_path: str): elif fit_type == "ufm_center": use_weights = config.get("use_weights", True) # Load in focal_plane and boresight data - nom_ufm_centers = load_nom_ufm_centers(config) - logger.info("Loaded nominal UFM centers from %s: ", config.get("ffp_path")) - logger.info(nom_ufm_centers) - for epoch in epochs: t0, tf = epoch["begin_timerange"], epoch["end_timerange"] - filelist, obs_ufm_centers, weights_ufm, obs_index, ot_list = load_per_obs_data(config, t0, tf) + solver_aman = build_data_aman(config, t0, tf, per_det=False) logger.info("Loaded per-obs FFP data from %s: ", config.get("per_obs_fps")) logger.info("Including data from these obs:") - logger.info(filelist) + logger.info(solver_aman.obs_info.obs_ids) - ancil, roll_c = load_obs_boresight(config, filelist) - logger.info("Loaded boresight data from obs ids.") - - # Build Axis Managers - obs_info = core.AxisManager() - obs_info.wrap("obs_ids", np.array(filelist)) - - solver_aman = core.AxisManager(core.IndexAxis("samps")) - solver_aman.wrap("ancil", ancil) - solver_aman.wrap("obs_info", obs_info) - solver_aman.wrap("roll_c", np.repeat(roll_c, 7), [(0, "samps")]) - solver_aman.wrap( - "nominal_xieta_locs", - np.repeat([nom_ufm_centers], len(filelist), axis=0) - .reshape(len(filelist) * 7, 3) - .T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap( - "measured_xieta_data", - obs_ufm_centers.reshape(len(filelist) * 7, 3).T, - [(0, core.LabelAxis("xietagamma", ["xi", "eta", "gamma"]))], - [(1, "samps")], - ) - solver_aman.wrap("weights", weights_ufm.reshape(-1), [(0, "samps")]) - solver_aman.wrap("obs_index", obs_index) - solver_aman.wrap("ot_list", ot_list) - # Make weights/data cuts epoch["solver_aman"] = solver_aman logger.info("Built axis manager for epoch %s", epoch["name"]) @@ -953,7 +853,6 @@ def main(config_path: str): # save pointing model parameters to axis manager for epoch in epochs: - logger.info("Calculating RMS and cutoff for %s", epoch["name"]) par_mapping = {par:par.split(f"_{epoch['name']}")[0] for par in epoch["params"]} param_aman = core.AxisManager() for k in list(par_mapping.keys()): @@ -961,17 +860,33 @@ def main(config_path: str): param_aman.wrap("version", pm_version) epoch["solver_aman"].wrap("pointing_model", param_aman) - # save errors to axis manager - error_aman = core.AxisManager() + #Save Fit report statistics + model_fit_stats_aman = core.AxisManager() + stderr_aman = core.AxisManager() for k in list(model_solved_params.params.values()): if k.name in epoch["params"]: - error_aman.wrap(par_mapping[k.name], k.stderr) - epoch["solver_aman"].wrap("pointing_model_errors", error_aman) - - # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) - # solver_aman.wrap("parameter_fit_stats", parameter_fit_stats) + stderr_aman.wrap(par_mapping[k.name], k.stderr) + model_fit_stats_aman.wrap('stderr', stderr_aman) + + fitreport_aman = core.AxisManager() + fitreport_aman.wrap('chisq', model_solved_params.chisqr) + fitreport_aman.wrap('redchi', model_solved_params.redchi) + fitreport_aman.wrap('success', int(model_solved_params.success)) # 1 for True, 0 for False + fitreport_aman.wrap('nfev', model_solved_params.nfev) # Num function evaluations + fitreport_aman.wrap('message', + str(model_solved_params.message)) + model_fit_stats_aman.wrap('fit_report', fitreport_aman) + + if config.get("fit_method", "nelder") == "leastsq": + mat, param_names = get_full_correlation_matrix(model_solved_params, epoch["params"]) + corrmtx_aman = core.AxisManager(core.LabelAxis('params', param_names)) + corrmtx_aman.wrap('matrix', mat, [(0, 'params'), (1, 'params')]) + corrmtx_aman.wrap('param_names', np.array(param_names), [(0, 'params')]) + model_fit_stats_aman.wrap('correlation_matrix', corrmtx_aman) + epoch["solver_aman"].wrap('model_fit_statistics', model_fit_stats_aman) # Model template and measured points using parameters found above + logger.info("Calculating RMS and cutoff for %s", epoch["name"]) modeled_fits, fit_residuals_i1, rms_i1, model_reference = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, epoch["solver_aman"]) logger.info("RMS on fit: %f arcmin", rms_i1) @@ -997,6 +912,8 @@ def main(config_path: str): plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, + t0=t0, + tf=tf, iteration_tag=f"_{epoch['name']}_i1", save_figure=True, plotlims=plotlims) @@ -1013,7 +930,7 @@ def main(config_path: str): plotter.plot_residuals_histograms() plotter.plot_dets_in_these_obs() - tot_bad = 0 + tot_bad=0 if iterate_cutoff is not None: logger.info("Iterating parameter solution") logger.info(f"Using {iterate_cutoff} as cutoff") @@ -1050,7 +967,7 @@ def main(config_path: str): # Print RMS of initial fits without outlying data points before # zero-ing the weights. good_fit_inds = np.where(fit_residuals_i1 < cutoff)[0] - _, _, masked_rms, _ = apply_model_params(xieta_model, + _, _, masked_rms, _ = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, epoch["solver_aman"], @@ -1059,17 +976,18 @@ def main(config_path: str): logger.info("RMS on initial fit without outliers: %f arcmin", masked_rms) epoch["solver_aman"].weights[bad_fit_inds] = 0.0 epoch["solver_aman"].wrap('bad_fit_inds', bad_fit_inds) - + if tot_bad == 0: logger.info("No bad points found so not running second fit!") + if tot_bad > 0: model_solved_params = minimize( objective_model_func_lmfit_joint, - model_solved_params, - method=config.get("fit_method", "nelder"), + fit_params, + method=config.get("fit_method", "leastsq"), nan_policy="omit", args=(pm_version, epochs, xieta_model, use_weights), - **config.get("fit_options", {}), + **config.get("fit_options", {}) ) test_params = _round_params(model_solved_params.params.valuesdict(), 8) @@ -1084,7 +1002,6 @@ def main(config_path: str): # save pointing model parameters to axis manager for epoch in epochs: - logger.info("Calculating RMS for %s", epoch["name"]) par_mapping = {par:par.split(f"_{epoch['name']}")[0] for par in epoch["params"]} epoch["solver_aman"].move("pointing_model", "pointing_model_i1") param_aman = core.AxisManager() @@ -1094,17 +1011,33 @@ def main(config_path: str): epoch["solver_aman"].wrap("pointing_model", param_aman) # save errors to axis manager - epoch["solver_aman"].move("pointing_model_errors", "pointing_model_errors_i1") - error_aman = core.AxisManager() + epoch["solver_aman"].move("model_fit_statistics", "model_fit_statistics_i1") + model_fit_stats_aman = core.AxisManager() + stderr_aman = core.AxisManager() for k in list(model_solved_params.params.values()): if k.name in epoch["params"]: - error_aman.wrap(par_mapping[k.name], k.stderr) - epoch["solver_aman"].wrap("pointing_model_errors", error_aman) + stderr_aman.wrap(par_mapping[k.name], k.stderr) + model_fit_stats_aman.wrap('stderr', stderr_aman) + + fitreport_aman = core.AxisManager() + fitreport_aman.wrap('chisq', model_solved_params.chisqr) + fitreport_aman.wrap('redchi', model_solved_params.redchi) + fitreport_aman.wrap('success', int(model_solved_params.success)) # 1 for True, 0 for False + fitreport_aman.wrap('nfev', model_solved_params.nfev) # Num function evaluations + fitreport_aman.wrap('message', str(model_solved_params.message)) + model_fit_stats_aman.wrap('fit_report', fitreport_aman) + + if config.get("fit_method", "nelder") == "leastsq": + mat, param_names = get_full_correlation_matrix(model_solved_params, epoch["params"]) + corrmtx_aman = core.AxisManager(core.LabelAxis('params', param_names)) + corrmtx_aman.wrap('matrix', mat, [(0, 'params'), (1, 'params')]) + corrmtx_aman.wrap('param_names', np.array(param_names), [(0, 'params')]) + model_fit_stats_aman.wrap('correlation_matrix', corrmtx_aman) - # parameter_fit_stats = build_param_fit_stat_aman(model_solved_params) - # epoch["solver_aman"].wrap("parameter_fit_stats", parameter_fit_stats, overwrite=True) + epoch["solver_aman"].wrap('model_fit_statistics', model_fit_stats_aman) # Recalculate best-fit modeled points + logger.info("Calculating RMS for %s", epoch["name"]) modeled_fits, fit_residuals_i2, rms_i2, model_reference = apply_model_params(xieta_model, epoch["solver_aman"].pointing_model, pm_version, @@ -1135,6 +1068,8 @@ def main(config_path: str): plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, + t0=t0, + tf=tf, iteration_tag=tag, save_figure=True, plotlims=plotlims) @@ -1156,6 +1091,8 @@ def main(config_path: str): plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, + t0=t0, + tf=tf, iteration_tag=f"_{epoch["name"]}", save_figure=True, plotlims=plotlims) @@ -1180,8 +1117,6 @@ def main(config_path: str): solver_aman.pointing_model.move(par, None) solver_aman.wrap("ot_float_aman", ot_float_aman) epoch["solver_aman"] = solver_aman - - epoch["solver_aman"].save(h5_filename, group=epoch["name"], overwrite=True) db.add_entry( {"obs:timestamp": (epoch["begin_timerange"], epoch["end_timerange"]), "dataset": f"{epoch['name']}/pointing_model"}, @@ -1200,8 +1135,8 @@ def main(config_path: str): else: test_params = epoch["solver_aman"].pointing_model if "ot_float_aman" in epoch["solver_aman"]._assignments: - test_params = test_params.merge(epoch["solver_aman"].ot_float_aman) - + test_params = test_params.merge(epoch["solver_aman"].o +t_float_aman) full_aman = analyze_PM_with_all_dets(config, t0, tf, test_params) logger.info(f"for this epoch: {epoch["name"]}") logger.info(f"full rms: {full_aman.rms:.3f} (arcmin) ") @@ -1210,9 +1145,12 @@ def main(config_path: str): plotter = ModelFitsPlotter(solver_aman=full_aman, config=config, save_dir=save_dir, + t0 = t0, + tf = tf, iteration_tag=f"_{epoch["name"]}", save_figure=True, - plotlims=plotlims) + plotlims=plotlims, + ) plotter.plot_full_residuals_across_focalplane() plotter.plot_full_deltas_across_focalplane() plotter.plot_full_histogram() @@ -1227,7 +1165,7 @@ def main(config_path: str): #################### class ModelFitsPlotter: - def __init__(self, solver_aman, config, save_dir, iteration_tag="", save_figure=True, plotlims=None): + def __init__(self, solver_aman, config, save_dir, t0, tf, iteration_tag="", save_figure=True, plotlims=None): self.aman = solver_aman self.config = config @@ -1240,7 +1178,8 @@ def __init__(self, solver_aman, config, save_dir, iteration_tag="", save_figure= plot_dir = os.path.join(save_dir, "plots") os.makedirs(plot_dir, exist_ok=True) self.plot_dir = plot_dir - + self.t0 = t0 + self.tf = tf self.platform = config.get("platform") self.pm_version = config.get("pm_version") self.ufms = config.get("ufms") @@ -1264,7 +1203,7 @@ def plot_full_unmodeled_residuals(self): full_dxi_av = np.nanmean(self.aman.dxi) full_deta_av = np.nanmean(self.aman.deta) if "sat" in platform: - elmin, elmax = 45, 65 + elmin, elmax = 40, 65 rollmin, rollmax = -45, 45 azmin, azmax = 0, 360 if platform=="satp2": @@ -1365,6 +1304,7 @@ def plot_full_unmodeled_residuals(self): plt.savefig(f"{plot_dir}/{platform}_full_2D_Residuals_Az{tag}.png", dpi=350) plt.close() + def plot_full_residuals_across_focalplane(self): platform = self.platform plot_dir = self.plot_dir @@ -1419,7 +1359,7 @@ def plot_full_deltas_across_focalplane(self): plt.xlim(-.31, .31);plt.ylim(-.31, .31) if self.save_figure: plt.savefig(f"{plot_dir}/{platform}_full_FocalPlane_colored_FitDeltas{tag}.png", dpi=350) - + def plot_full_histogram(self): platform = self.platform plotlims = self.plotlims @@ -1431,6 +1371,7 @@ def plot_full_histogram(self): ufm_rms = self.aman.per_ufm_stats.rms obs_rms = self.aman.per_obs_stats.rms fit_residuals = self.aman.fit_residuals + #median = np.nanmedian(fit_residuals) plt.figure() plt.hist(fit_residuals, bins=50, range=(0, plotlims)) @@ -1440,12 +1381,16 @@ def plot_full_histogram(self): label = f'Obs RMS {obs_rms:.2f} arcmin') plt.axvline(ufm_rms, 0, 1, color='m', label=f'UFM RMS {ufm_rms:.2f} arcmin') + #plt.axvline(median, 0, 1, color='r', linestyle=':', + # label='median arcmin') + #plt.axvline(median+np.nanmedian(np.abs(fit_residuals- median))*1.4826, + # 0, 1, color = 'y', linestyle=':') plt.legend(fontsize='medium') plt.title(platform + ' 1D residuals') plt.xlabel('arcmin') if self.save_figure: - plt.savefig(f"{plot_dir}/{platform}_full_Hist_Residuals{tag}.png", dpi=350) + plt.savefig(f"{plot_dir}/{platform}_full_Hist_Residuals{tag}.png", dpi=350) def plot_dets_in_these_obs(self): platform = self.platform @@ -1458,7 +1403,7 @@ def plot_dets_in_these_obs(self): if platform == 'lat': elmin=10; elmax=90 else: - elmin=45; elmax=65 + elmin=40; elmax=65 plt.figure() fig, ax = plt.subplots(2,2,figsize=(11,10)) @@ -1504,7 +1449,7 @@ def plot_modeled_fits(self): ufm_list = self.ufms plotmask = np.where(weights) nom_array = np.concatenate( - [load_nom_focal_plane_full(self.config, ufm)[1] for ufm in ufm_list], + [load_nom_focal_plane_full(self.config, self.t0, ufm)[1] for ufm in ufm_list], axis=0, ) rms = np.round(fit_rms, 4) @@ -1803,30 +1748,40 @@ def plot_template_space_fits_per_detector(self): pm_version, self.aman ) + + fig, ax = plt.subplots(2, 1, figsize=(8,5)) + ax[0].hist(xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, + range=(-1 * plotlims, plotlims), bins = 30) + ax[1].hist(eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, + range=(-1 * plotlims, plotlims), bins = 30) + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_xi_and_eta_residuals_hist.png", dpi=350) + plt.close() + #plot with weights as colorbar fig, ax = plt.subplots(figsize=(9, 6)) ax.plot(0, 0, "kx", label="Nominal Center") im = ax.scatter( xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, - c=weights, + c=ancil.az_enc, s=scale_weights * 5, edgecolor="gray", lw=0.3, marker="o", alpha=0.2, cmap="viridis", - vmin=self.config.get("weight_cutoff"), - vmax=1 + vmin=0, + vmax=360 ) ax.set_xlim(-1 * plotlims, plotlims) ax.set_ylim(-1 * plotlims, plotlims) ax.set_aspect('equal', adjustable='box') - ax.set_title(f"Unmodeled fits, by fit weight") + ax.set_title(f"Unmodeled fits, by azimuth") cb = plt.colorbar(im, fraction=0.046, pad=0.04) plt.tight_layout() if self.save_figure: - plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_weights{tag}.png", dpi=350) + plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_azimuth{tag}.png", dpi=350) plt.close() fig, ax = plt.subplots(figsize=(9, 6)) ax.plot(0, 0, "kx", label="Nominal Center") From 5b8780f39321fcc998d1971d90426a2357829e7d Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 22 Apr 2026 09:08:06 -0700 Subject: [PATCH 45/48] Fix bugs and typos. Handle measured vs template options in calculation of differential xi and eta in analyze_PM_with_all_dets function. Feature plot of correlation matrix. --- .../site_pipeline/solve_pointing_model.py | 172 +++++++++--------- 1 file changed, 86 insertions(+), 86 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 9b486693b..f18d8b1fa 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -401,41 +401,9 @@ def _apply_ot_float(xi_mod, eta_mod, solver_aman, params): eta_mod[msk] *= ot_pars[4] return xi_mod, eta_mod -#test Elle Apr 10 -def objective_model_func_lmfit( - params, pm_version, solver_aman, xieta_model, weights=True -): - if xieta_model == "measured": - xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) - xi_ref, eta_ref, _ = solver_aman.measured_xieta_data - elif xieta_model == "template": - xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) - xi_ref, eta_ref, _ = solver_aman.nominal_xieta_locs - - xi_mod, eta_mod = _apply_ot_float(xi_mod, eta_mod, solver_aman, params) - - dist = np.sqrt((xi_ref - xi_mod) ** 2 + (eta_ref - eta_mod) ** 2) - if weights: - return dist * np.sqrt(solver_aman.weights) - else: - return dist - #weights_array = solver_aman.weights if weights else np.ones(len(dist)) - #return chi_sq(weights_array, dist) -def objective_model_func_lmfit_joint( - params, pm_version, epochs, xieta_model, weights=True -): - params = params.valuesdict() - all_residuals=[] - for epoch in epochs: - epoch_params = {par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]} - res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, weights) - all_residuals.append(res) - return np.concatenate(all_residuals) - -""" def objective_model_func_lmfit( - params, pm_version, solver_aman, xieta_model, weights=True + params, pm_version, solver_aman, xieta_model, fit_method, weights=True ): if xieta_model == "measured": xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) @@ -447,27 +415,31 @@ def objective_model_func_lmfit( xi_mod, eta_mod = _apply_ot_float(xi_mod, eta_mod, solver_aman, params) dist = np.sqrt((xi_ref - xi_mod) ** 2 + (eta_ref - eta_mod) ** 2) - #print(np.nansum(dist)) weights_array = solver_aman.weights if weights else np.ones(len(dist)) - return chi_sq(weights_array, dist) -def objective_model_func_lmfit_joint( - params, pm_version, epochs, xieta_model, weights=True -): - params = params.valuesdict() - chisq = 0 - t1 = time.time() - for epoch in epochs: - chisq += objective_model_func_lmfit({par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]}, pm_version, epoch["solver_aman"], xieta_model, weights) - return chisq + return (dist * np.sqrt(weights_array)) -def chi_sq(weights, dist): - #N = np.identity(len(dist)) * weights - #chi2 = dist.T @ N @ dist - chi2 = np.nansum(dist ** 2 * weights) - return chi2 -""" +def objective_model_func_lmfit_joint( + params, pm_version, epochs, xieta_model, fit_method, weights=True): + params = params.valuesdict() + if fit_method == "leastsq" or fit_method == "least_squares": + all_residuals=[] + for epoch in epochs: + epoch_params = {par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]} + res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, fit_method, weights) + all_residuals.append(res) + return np.concatenate(all_residuals) + + else: + chisq = 0 + for epoch in epochs: + epoch_params = {par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]} + res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, fit_method, weights) + chisq += np.nansum(res**2) + + return chisq + def model_template_xieta(params, pm_version, aman): """ @@ -586,11 +558,12 @@ def analyze_PM_with_all_dets(config, t0, tf, params): # Apply model to data. xieta_model = config.get("xieta_model", "measured") (full_modeled, full_residuals, rms, _ - ) = apply_model_params(xieta_model, #"template", + ) = apply_model_params(xieta_model, params, config.get("pm_version"), full_aman) - to_comp = "nominal_xieta_locs" + if xieta_model == "template": + to_comp = "nominal_xieta_locs" if xieta_model == "measured": to_comp = "measured_xieta_data" full_aman.wrap("full_modeled", np.array(full_modeled), @@ -603,8 +576,9 @@ def analyze_PM_with_all_dets(config, t0, tf, params): modelfit_aman.wrap("eta", full_modeled[1], overwrite=True) full_aman.wrap("modeled_fits", modelfit_aman, overwrite=True) - dxi_all = (full_aman.full_modeled[0] - xi_nom) / DEG * 60 #modeled - nominal - deta_all = (full_aman.full_modeled[1] - eta_nom) / DEG * 60 + dxi_all = (full_aman.full_modeled[0] - full_aman[to_comp][0]) / DEG * 60 + deta_all = (full_aman.full_modeled[1] - full_aman[to_comp][1]) / DEG * 60 + full_aman.wrap("dxi", dxi_all, [(0, "samps")]) full_aman.wrap("deta", deta_all, [(0, "samps")]) @@ -693,6 +667,7 @@ def main(config_path: str): pm_version = config.get("pm_version") # e.g. sat_v1 sv_tag = config.get("solution_version_tag") # e.g. YYMMDDr# xieta_model = config.get("xieta_model", "measured") + fit_method = config.get("fit_method", "leastsq") xe_tag = f"{xieta_model}_xieta" iterate_cutoff = config.get("iterate_cutoff", None) plotlims = config.get("plotlims", 20) @@ -833,12 +808,13 @@ def main(config_path: str): # Solve for Model Parameters # use chosen xieta_model to solve for parameters + breakpoint() model_solved_params = minimize( objective_model_func_lmfit_joint, fit_params, - method=config.get("fit_method", "nelder"), + method=fit_method, nan_policy="omit", - args=(pm_version, epochs, xieta_model, use_weights), + args=(pm_version, epochs, xieta_model, fit_method, use_weights), **config.get("fit_options", {}), ) logger.info("Ran 1st Minimization") @@ -877,12 +853,12 @@ def main(config_path: str): str(model_solved_params.message)) model_fit_stats_aman.wrap('fit_report', fitreport_aman) - if config.get("fit_method", "nelder") == "leastsq": - mat, param_names = get_full_correlation_matrix(model_solved_params, epoch["params"]) - corrmtx_aman = core.AxisManager(core.LabelAxis('params', param_names)) - corrmtx_aman.wrap('matrix', mat, [(0, 'params'), (1, 'params')]) - corrmtx_aman.wrap('param_names', np.array(param_names), [(0, 'params')]) - model_fit_stats_aman.wrap('correlation_matrix', corrmtx_aman) + #if config.get("fit_method", "leastsq") == "leastsq": + mat, param_names = get_full_correlation_matrix(model_solved_params, epoch["params"]) + corrmtx_aman = core.AxisManager(core.LabelAxis('params', param_names)) + corrmtx_aman.wrap('matrix', mat, [(0, 'params'), (1, 'params')]) + corrmtx_aman.wrap('param_names', np.array(param_names), [(0, 'params')]) + model_fit_stats_aman.wrap('correlation_matrix', corrmtx_aman) epoch["solver_aman"].wrap('model_fit_statistics', model_fit_stats_aman) # Model template and measured points using parameters found above @@ -923,12 +899,14 @@ def main(config_path: str): plotter.plot_residuals_vs_ancil() plotter.plot_xieta_cross_residuals() plotter.plot_xieta_residuals() + plotter.plot_fit_correlation_matrix() else: plotter.plot_modeled_fits() plotter.plot_template_space_fits_per_detector() plotter.plot_residuals_vs_ancil() plotter.plot_residuals_histograms() plotter.plot_dets_in_these_obs() + plotter.plot_fit_correlation_matrix() tot_bad=0 if iterate_cutoff is not None: @@ -968,7 +946,7 @@ def main(config_path: str): # zero-ing the weights. good_fit_inds = np.where(fit_residuals_i1 < cutoff)[0] _, _, masked_rms, _ = apply_model_params(xieta_model, - epoch["solver_aman"].pointing_model, + epoch["solver_aman"].pointing_model, pm_version, epoch["solver_aman"], use_inds=good_fit_inds) @@ -979,14 +957,13 @@ def main(config_path: str): if tot_bad == 0: logger.info("No bad points found so not running second fit!") - if tot_bad > 0: model_solved_params = minimize( objective_model_func_lmfit_joint, fit_params, - method=config.get("fit_method", "leastsq"), + method=fit_method, nan_policy="omit", - args=(pm_version, epochs, xieta_model, use_weights), + args=(pm_version, epochs, xieta_model, fit_method, use_weights), **config.get("fit_options", {}) ) @@ -1027,12 +1004,12 @@ def main(config_path: str): fitreport_aman.wrap('message', str(model_solved_params.message)) model_fit_stats_aman.wrap('fit_report', fitreport_aman) - if config.get("fit_method", "nelder") == "leastsq": - mat, param_names = get_full_correlation_matrix(model_solved_params, epoch["params"]) - corrmtx_aman = core.AxisManager(core.LabelAxis('params', param_names)) - corrmtx_aman.wrap('matrix', mat, [(0, 'params'), (1, 'params')]) - corrmtx_aman.wrap('param_names', np.array(param_names), [(0, 'params')]) - model_fit_stats_aman.wrap('correlation_matrix', corrmtx_aman) + #if config.get("fit_method", "leastsq") == "leastsq": + mat, param_names = get_full_correlation_matrix(model_solved_params, epoch["params"]) + corrmtx_aman = core.AxisManager(core.LabelAxis('params', param_names)) + corrmtx_aman.wrap('matrix', mat, [(0, 'params'), (1, 'params')]) + corrmtx_aman.wrap('param_names', np.array(param_names), [(0, 'params')]) + model_fit_stats_aman.wrap('correlation_matrix', corrmtx_aman) epoch["solver_aman"].wrap('model_fit_statistics', model_fit_stats_aman) @@ -1080,11 +1057,13 @@ def main(config_path: str): plotter.plot_template_space_fits_per_wafer() plotter.plot_xieta_cross_residuals() plotter.plot_xieta_residuals() + plotter.plot_fit_correlation_matrix() else: plotter.plot_modeled_fits() plotter.plot_template_space_fits_per_detector() plotter.plot_residuals_histograms() plotter.plot_dets_in_these_obs() + plotter.plot_fit_correlation_matrix() else: if config.get("make_plots"): for epoch in epochs: @@ -1107,7 +1086,7 @@ def main(config_path: str): for epoch in epochs: solver_aman = epoch["solver_aman"] # Remove OT float parameters - if config["float_ots"]: + if config.get("float_ots", False): ot_float_aman = core.AxisManager() for ot in np.unique(solver_aman.ot_list): for par in [f"{n}_{ot}" for n in ["xioff", "etaoff", "rot", "xiscale", "etascale"]]: @@ -1135,8 +1114,7 @@ def main(config_path: str): else: test_params = epoch["solver_aman"].pointing_model if "ot_float_aman" in epoch["solver_aman"]._assignments: - test_params = test_params.merge(epoch["solver_aman"].o -t_float_aman) + test_params = test_params.merge(epoch["solver_aman"].ot_float_aman) full_aman = analyze_PM_with_all_dets(config, t0, tf, test_params) logger.info(f"for this epoch: {epoch["name"]}") logger.info(f"full rms: {full_aman.rms:.3f} (arcmin) ") @@ -1749,15 +1727,6 @@ def plot_template_space_fits_per_detector(self): self.aman ) - fig, ax = plt.subplots(2, 1, figsize=(8,5)) - ax[0].hist(xi_unmod / ARCMIN - nominal_xieta_locs[0] / ARCMIN, - range=(-1 * plotlims, plotlims), bins = 30) - ax[1].hist(eta_unmod / ARCMIN - nominal_xieta_locs[1] / ARCMIN, - range=(-1 * plotlims, plotlims), bins = 30) - if self.save_figure: - plt.savefig(f"{plot_dir}/{platform}_xi_and_eta_residuals_hist.png", dpi=350) - plt.close() - #plot with weights as colorbar fig, ax = plt.subplots(figsize=(9, 6)) ax.plot(0, 0, "kx", label="Nominal Center") @@ -1859,7 +1828,6 @@ def plot_template_space_fits_per_detector(self): plt.savefig(f"{plot_dir}/{platform}_unmodeled_fits_WS_corotator{tag}.png", dpi=350) plt.close() - def plot_residuals_vs_ancil(self): platform = self.platform plot_dir = self.plot_dir @@ -2226,7 +2194,39 @@ def plot_xieta_cross_residuals(self): plt.savefig(f"{plot_dir}/{platform}_xieta_cross_residuals{tag}.png", dpi=350) plt.close() - + def plot_fit_correlation_matrix(self): + platform = self.platform + plot_dir = self.plot_dir + tag = self.tag + CM = self.aman.model_fit_statistics.correlation_matrix.matrix + names = self.aman.model_fit_statistics.correlation_matrix.param_names + n_param = len(names) + mask = np.triu_indices(n_param, k=1) + CM_plot = CM.copy() + CM_plot[mask] = np.nan + + fig, ax = plt.subplots(figsize=(9, 8)) + im = ax.imshow(CM_plot,cmap='RdBu_r', vmin=-1, vmax=1) + cbar = fig.colorbar(im, ax=ax, fraction=0.046, pad=0.04) + cbar.set_label('Correlation Coefficient', rotation=270, labelpad=15) + ax.set_xticks(np.arange(n_param)) + ax.set_yticks(np.arange(n_param)) + ax.set_xticklabels(names) + ax.set_yticklabels(names) + + plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor") + for i in range(n_param): + for j in range(n_param): + if i>=j: + val = CM_plot[i, j] + color = "white" if abs(val) > 0.5 else "black" + ax.text(j, i, f"{val:.2f}", ha="center", va="center", color=color) + ax.spines['top'].set_visible(False) + ax.spines['right'].set_visible(False) + plt.grid(False) # Grid usually looks messy on masked heatmaps + if self.save_figure: + plt.savefig(f"{plot_dir}/{platform}_correlation_matrix{tag}.png", dpi=350) + plt.close() ############ if __name__ == "__main__": From 0288873fe1aebafc709d2c5c9f5572d4370f76c0 Mon Sep 17 00:00:00 2001 From: Elle Shaw Date: Wed, 22 Apr 2026 09:10:49 -0700 Subject: [PATCH 46/48] Remove breakpoint(). Error remains on float_ots == True --- sotodlib/site_pipeline/solve_pointing_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index f18d8b1fa..9ef587cdf 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -421,7 +421,8 @@ def objective_model_func_lmfit( def objective_model_func_lmfit_joint( - params, pm_version, epochs, xieta_model, fit_method, weights=True): + params, pm_version, epochs, xieta_model, fit_method, weights=True +): params = params.valuesdict() if fit_method == "leastsq" or fit_method == "least_squares": all_residuals=[] @@ -808,7 +809,6 @@ def main(config_path: str): # Solve for Model Parameters # use chosen xieta_model to solve for parameters - breakpoint() model_solved_params = minimize( objective_model_func_lmfit_joint, fit_params, From 51b1ba7e14438529a0412ad32834aec1231b1594 Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Wed, 22 Apr 2026 23:23:18 -0700 Subject: [PATCH 47/48] fix: handle nans in OT float and catch some edge cases --- .../site_pipeline/solve_pointing_model.py | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 9ef587cdf..8d3a6e72a 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -139,6 +139,8 @@ def _create_size_mask(obs_index): def _create_culling_mask(obs_index, cull_dets): + if cull_dets <= 1: + return np.ones_like(obs_index, dtype=bool) # Remove a random fraction 1/cull_dets of each dataset unique, counts = np.unique(obs_index, return_counts=True) #min_count = min(counts) @@ -392,8 +394,8 @@ def _apply_ot_float(xi_mod, eta_mod, solver_aman, params): theta = ot_pars[2] R = np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]) - xi_cent = np.mean(xi_mod[msk]) - eta_cent = np.mean(eta_mod[msk]) + xi_cent = np.nanmean(xi_mod[msk]) + eta_cent = np.nanmean(eta_mod[msk]) xi_mod[msk], eta_mod[msk] = R@np.vstack((xi_mod[msk] - xi_cent, eta_mod[msk] - eta_cent)) xi_mod[msk] += xi_cent eta_mod[msk] += eta_cent @@ -403,7 +405,7 @@ def _apply_ot_float(xi_mod, eta_mod, solver_aman, params): def objective_model_func_lmfit( - params, pm_version, solver_aman, xieta_model, fit_method, weights=True + params, pm_version, solver_aman, xieta_model, weights=True ): if xieta_model == "measured": xi_mod, eta_mod = model_measured_xieta(params, pm_version, solver_aman) @@ -411,6 +413,8 @@ def objective_model_func_lmfit( elif xieta_model == "template": xi_mod, eta_mod = model_template_xieta(params, pm_version, solver_aman) xi_ref, eta_ref, _ = solver_aman.nominal_xieta_locs + else: + raise ValueError("xieta_model must be measured or template") xi_mod, eta_mod = _apply_ot_float(xi_mod, eta_mod, solver_aman, params) @@ -428,7 +432,7 @@ def objective_model_func_lmfit_joint( all_residuals=[] for epoch in epochs: epoch_params = {par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]} - res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, fit_method, weights) + res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, weights) all_residuals.append(res) return np.concatenate(all_residuals) @@ -436,7 +440,7 @@ def objective_model_func_lmfit_joint( chisq = 0 for epoch in epochs: epoch_params = {par.split(f"_{epoch['name']}")[0]:params[par] for par in epoch["params"]} - res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, fit_method, weights) + res = objective_model_func_lmfit(epoch_params, pm_version, epoch["solver_aman"], xieta_model, weights) chisq += np.nansum(res**2) return chisq @@ -482,9 +486,11 @@ def model_measured_xieta(params, pm_version, aman): if "sat" in pm_version: az, el, roll = pm._get_sat_enc_radians(aman.ancil) q_nomodel = quat.rotation_lonlat(-az, el, 0) - if "lat" in pm_version: + elif "lat" in pm_version: az, el, roll = pm._get_lat_enc_radians(aman.ancil) q_nomodel = quat.rotation_lonlat(-az, el, roll) + else: + raise ValueError("Pointing model must be for sat or lat!") boresight = pm.apply_pointing_model(aman, pointing_model=params, wrap=False) az1, el1, roll1 = boresight.az, boresight.el, boresight.roll @@ -798,6 +804,8 @@ def main(config_path: str): epoch["solver_aman"] = solver_aman logger.info("Built axis manager for epoch %s", epoch["name"]) + else: + raise ValueError("fit_type must be detector or ufm_center") ################################ # END of SPLIT: Now fit the parameters @@ -885,6 +893,7 @@ def main(config_path: str): logger.info(f"2 stdev away from residual Median: {cutoff:.2f} arcmin") if config.get("make_plots"): + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, @@ -1042,6 +1051,7 @@ def main(config_path: str): if config.get("make_plots"): tag = f"_{epoch['name']}_i2" + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, @@ -1067,6 +1077,7 @@ def main(config_path: str): else: if config.get("make_plots"): for epoch in epochs: + t0, tf = epoch["begin_timerange"], epoch["end_timerange"] plotter = ModelFitsPlotter(solver_aman=epoch["solver_aman"], config=config, save_dir=save_dir, @@ -1109,7 +1120,7 @@ def main(config_path: str): #Fill up axis manager with ALL the data (only cuts from culling and time stamps remain) t0, tf = epoch["begin_timerange"], epoch["end_timerange"] - if "pointing_model_i1" in solver_aman: + if "pointing_model_i1" in epoch["solver_aman"]: test_params = epoch["solver_aman"].pointing_model_i1 else: test_params = epoch["solver_aman"].pointing_model @@ -1324,7 +1335,7 @@ def plot_full_deltas_across_focalplane(self): roll_c = self.aman.roll_c fig, ax = plt.subplots() - im = ax.quiver(xi, eta, dxi, deta, roll_c, angles='xy', scale=np.deg2rad(24), scale_units='xy', alpha=.8) + im = ax.quiver(xi, eta, dxi, deta, roll_c, angles='xy', scale=1/34.4, scale_units='xy', alpha=.2) sm = cm.ScalarMappable(cmap=im.cmap, norm=im.norm) sm.set_array([]) plt.colorbar(sm, ax=ax, label='Roll') From 4eb0d488e6d56053b311833c4044b83e88f595da Mon Sep 17 00:00:00 2001 From: Saianeesh Keshav Haridas Date: Thu, 23 Apr 2026 08:26:08 -0700 Subject: [PATCH 48/48] fix: minor plotting stuff --- sotodlib/site_pipeline/solve_pointing_model.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/sotodlib/site_pipeline/solve_pointing_model.py b/sotodlib/site_pipeline/solve_pointing_model.py index 8d3a6e72a..3bf8e55a7 100644 --- a/sotodlib/site_pipeline/solve_pointing_model.py +++ b/sotodlib/site_pipeline/solve_pointing_model.py @@ -1863,6 +1863,8 @@ def plot_residuals_vs_ancil(self): xi_ref, eta_ref, _ = measured_xieta_data elif xieta_model == "template": xi_ref, eta_ref, _ = nominal_xieta_locs + else: + raise ValueError("xieta_model must be measured or template") if "sat" in platform: third_enc = ancil.boresight_enc.copy() @@ -1870,9 +1872,11 @@ def plot_residuals_vs_ancil(self): elif "lat" in platform: third_enc = ancil.corotator_enc.copy() third_enc_name = "Corotator" + else: + raise ValueError("Platform must be sat or lat") fig, ax = plt.subplots(2, 3, figsize=(8, 6), sharex="col", sharey="row") plt.setp(ax[0, 1].get_yticklabels(), visible=False) - plt.suptitle(r"$\delta \xi$, $\delta \eta$" + f" vs Az, El, {third_enc_name}") + plt.suptitle(r"$\delta \xi$, $\delta \eta$" + f" vs Az, El, {third_enc_name} ({tag.split('_')[1]})") for k in range(6): i = k // 3 j = k % 3 @@ -1882,12 +1886,16 @@ def plot_residuals_vs_ancil(self): elif i == 1: model = eta_model_fit ref = eta_ref + else: + raise ValueError("Trying to plot an axis that doesn't exist?") if j == 0: x = ancil.az_enc % 360 elif j == 1: x = ancil.el_enc elif j == 2: x = third_enc + else: + raise ValueError("Trying to plot an encoder that doesn't exist?") ax[i, j].scatter( x[plotmask], (model - ref)[plotmask] / ARCMIN, @@ -1905,12 +1913,12 @@ def plot_residuals_vs_ancil(self): w=scale_weights[plotmask], ) xrange = np.arange(np.nanmin(x), np.nanmax(x)) - ax[i, j].plot( + ax[i, j].errorbar( xrange, mxb[0] * xrange + mxb[1], - "r", + color="r", lw=1, - label=f"Slope {np.round(mxb[0],4)}\n [arcmin/deg]", + label=f"Slope {np.round(mxb[0],4)} \n [arcmin/deg]", ) ax[i, j].legend(fontsize="small") ax[0, 0].set_ylabel("dXi [arcmin]")