diff --git a/rubin_sim/maf/batches/science_radar_batch.py b/rubin_sim/maf/batches/science_radar_batch.py index c34f7e63..805bfbd1 100644 --- a/rubin_sim/maf/batches/science_radar_batch.py +++ b/rubin_sim/maf/batches/science_radar_batch.py @@ -570,9 +570,6 @@ def science_radar_batch( sqlconstraint = "night <= %s" % (yr_cut * 365.25 + 0.5) sqlconstraint += ' and scheduler_note not like "DD%"' info_label = f"{bandpass} band non-DD year {yr_cut}" - ThreebyTwoSummary_simple = metrics.StaticProbesFoMEmulatorMetricSimple( - nside=nside, year=yr_cut, metric_name="3x2ptFoM_simple" - ) ThreebyTwoSummary = maf.StaticProbesFoMEmulatorMetric(nside=nside, metric_name="3x2ptFoM") m = metrics.ExgalM5WithCuts( diff --git a/rubin_sim/maf/maf_contrib/star_counts/coords.py b/rubin_sim/maf/maf_contrib/star_counts/coords.py index f20a3679..76f07153 100644 --- a/rubin_sim/maf/maf_contrib/star_counts/coords.py +++ b/rubin_sim/maf/maf_contrib/star_counts/coords.py @@ -8,9 +8,9 @@ # Two different functions are present that do the conversion, and a third # that uses ephem package, for redundancy purposes. # For use with Field Star Count metric - - +import astropy.units as u import numpy as np +from astropy.coordinates import SkyCoord from scipy.optimize import fsolve rad1 = np.radians(282.25) @@ -76,12 +76,14 @@ def eq_gal2(eq_ra, eq_dec): def eq_gal3(eq_ra, eq_dec): - coordset = ephem.Equatorial(np.radians(eq_ra), np.radians(eq_dec), epoch="2000") - g = ephem.Galactic(coordset) - templon, templat = float(g.lon), float(g.lat) - l_deg = np.degrees(templon) - b_deg = np.degrees(templat) - return b_deg, l_deg + # assume input ra, dec are in deg + coordset = SkyCoord( + ra=np.radians(eq_ra) * u.radians, dec=np.rad(eq_dec) * u.rad, frame="icrs", unit="deg" + ) + # convert to galactic + galactic = coordset.galactic + # return b,l in deg + return galactic.b.value, galactic.l.values def gal_cyn(b_deg, l_deg, dist): diff --git a/rubin_sim/maf/maf_contrib/star_counts/starcount_bymass.py b/rubin_sim/maf/maf_contrib/star_counts/starcount_bymass.py index 31875782..d8d2e176 100644 --- a/rubin_sim/maf/maf_contrib/star_counts/starcount_bymass.py +++ b/rubin_sim/maf/maf_contrib/star_counts/starcount_bymass.py @@ -58,12 +58,13 @@ def noise_calc(band): m5 = {"u": 23.9, "g": 25.0, "r": 24.7, "i": 24.0, "z": 23.3, "y": 22.1} sigma = 0.03 sigma_sys = 0.005 - fun = ( - lambda x: sigma_sys**2 - + (0.04 - gamma[band]) * 10 ** (0.4 * (x - m5[band])) - + gamma[band] * 10 ** (0.8 * (x - m5[band])) - - sigma**2 - ) + + def fun(x): + sigma_sys**2 + +(0.04 - gamma[band]) * 10 ** (0.4 * (x - m5[band])) + +gamma[band] * 10 ** (0.8 * (x - m5[band])) + -(sigma**2) + return newton(fun, 25) diff --git a/rubin_sim/maf/metrics/star_density.py b/rubin_sim/maf/metrics/star_density.py index ff9a9280..57bf5ac6 100644 --- a/rubin_sim/maf/metrics/star_density.py +++ b/rubin_sim/maf/metrics/star_density.py @@ -11,12 +11,13 @@ class StarDensityMetric(BaseMetric): """Interpolate the stellar luminosity function to return the number of stars per square arcsecond brighter than the mag_limit. Note that the map is built from CatSim stars in the range 20 < r < 28. - mag_limit values outside that the range of the map's starMapBins will return self.badval + mag_limit values outside that the range of the map's starMapBins will + return self.badval - The stellar density maps are available in any bandpass, but bandpasses other - than r band must use a pre-configured StellarDensityMap (not just the default). - In other words, when setting up the metric bundle for an i-band stellar density - using (as an example) a HealpixSlicer: + The stellar density maps are available in any bandpass, but bandpasses + other than r band must use a pre-configured StellarDensityMap (not just the + default). In other words, when setting up the metric bundle for an i-band + stellar density using (as an example) a HealpixSlicer: ``` map = maf.StellarDensityMap(filtername='i') metric = maf.StarDensityMetric(filtername='i', mag_limit=25.0) @@ -31,8 +32,9 @@ class StarDensityMetric(BaseMetric): Returns number of stars per square arcsecond brighter than this limit. Default 25. filtername : `str`, opt - Which filter to evaluate the luminosity function in; Note that using bands other than r - will require setting up a custom (rather than default) version of the stellar density map. + Which filter to evaluate the luminosity function in; Note that using + bands other than r will require setting up a custom (rather than + default) version of the stellar density map. Default r. units : `str`, opt Units for the output values. Default "stars/sq arcsec". @@ -42,7 +44,8 @@ class StarDensityMetric(BaseMetric): Returns ------- result : `float` - Number of stars brighter than mag_limit in filtername, based on the stellar density map. + Number of stars brighter than mag_limit in filtername, based on the + stellar density map. """ def __init__( @@ -68,6 +71,7 @@ def run(self, data_slice, slice_point=None): try: result = interp(self.mag_limit) / (3600.0**2) except ValueError: - # This probably means the interpolation went out of range (magLimit <15 or >28) + # This probably means the interpolation went out of range + # (magLimit <15 or >28) return self.badval return result diff --git a/rubin_sim/maf/metrics/summary_metrics.py b/rubin_sim/maf/metrics/summary_metrics.py index eac41369..37695431 100644 --- a/rubin_sim/maf/metrics/summary_metrics.py +++ b/rubin_sim/maf/metrics/summary_metrics.py @@ -173,7 +173,8 @@ def run(self, data_slice, slice_point=None): class NormalizeMetric(BaseMetric): """ - Return a metric values divided by 'norm_val'. Useful for turning summary statistics into fractions. + Return a metric values divided by 'norm_val'. + Useful for turning summary statistics into fractions. """ def __init__(self, col="metricdata", norm_val=1, **kwargs): @@ -190,7 +191,8 @@ def run(self, data_slice, slice_point=None): class ZeropointMetric(BaseMetric): """ - Return a metric values with the addition of 'zp'. Useful for altering the zeropoint for summary statistics. + Return a metric values with the addition of 'zp'. + Useful for altering the zeropoint for summary statistics. """ def __init__(self, col="metricdata", zp=0, **kwargs): @@ -271,7 +273,8 @@ def run(self, data_slice, slice_point=None): Returns: float: Interpolated static-probe statistical Figure-of-Merit. Raises: - ValueError: If year is not one of the 4 for which a FoM is calculated + ValueError: If year is not one of the 4 for which a FoM is + calculated """ # Chop off any outliers good_pix = np.where(data_slice[self.col] > 0)[0] diff --git a/rubin_sim/maf/metrics/surfb_metric.py b/rubin_sim/maf/metrics/surfb_metric.py index d6c80f0d..60252147 100644 --- a/rubin_sim/maf/metrics/surfb_metric.py +++ b/rubin_sim/maf/metrics/surfb_metric.py @@ -21,7 +21,8 @@ def surface_brightness_limit_approx( tot_area=100.0, mag_diff_warn=0.1, ): - """Compute surface brightness limit in 3 limiting cases, return the brightest. + """Compute surface brightness limit in 3 limiting cases, return the + brightest. Algerbra worked out in this technote: https://github.com/lsst-sims/smtn-016 @@ -52,8 +53,8 @@ def surface_brightness_limit_approx( Returns ------- - surface brightness limit in mags/sq arcsec - aka the surface brightness that reaches SNR=nsigma when measured over tot_area. + surface brightness limit in mags/sq arcsec, aka the surface brightness that + reaches SNR=nsigma when measured over tot_area. """ a_pix = pixscale**2 @@ -76,7 +77,8 @@ def surface_brightness_limit_approx( if np.min([d1, d2, d3]) < mag_diff_warn: warnings.warn( - "Limiting magnitudes in different cases are within %.3f mags, result may be too optimistic by up 0.38 mags/sq arcsec." + "Limiting magnitudes in different cases are within %.3f mags, \ + result may be too optimistic by up 0.38 mags/sq arcsec." % mag_diff_warn ) diff --git a/rubin_sim/maf/metrics/transient_metrics.py b/rubin_sim/maf/metrics/transient_metrics.py index 79a8ae14..b086092e 100644 --- a/rubin_sim/maf/metrics/transient_metrics.py +++ b/rubin_sim/maf/metrics/transient_metrics.py @@ -8,7 +8,8 @@ class TransientMetric(BaseMetric): """ - Calculate what fraction of the transients would be detected. Best paired with a spatial slicer. + Calculate what fraction of the transients would be detected. Best paired + with a spatial slicer. We are assuming simple light curves with no color evolution. Parameters @@ -19,7 +20,8 @@ class TransientMetric(BaseMetric): How long it takes to reach the peak magnitude (days). Default 5. rise_slope : float, optional Slope of the light curve before peak time (mags/day). - This should be negative since mags are backwards (magnitudes decrease towards brighter fluxes). + This should be negative since mags are backwards (magnitudes decrease + towards brighter fluxes). Default 0. decline_slope : float, optional Slope of the light curve after peak time (mags/day). @@ -43,25 +45,31 @@ class TransientMetric(BaseMetric): MJD for the survey start date. Default None (uses the time of the first observation). detect_m5_plus : float, optional - An observation will be used if the light curve magnitude is brighter than m5+detect_m5_plus. + An observation will be used if the light curve magnitude is brighter + than m5+detect_m5_plus. Default 0. n_pre_peak : int, optional Number of observations (in any filter(s)) to demand before peak_time, before saying a transient has been detected. Default 0. n_per_lc : int, optional - Number of sections of the light curve that must be sampled above the detect_m5_plus theshold + Number of sections of the light curve that must be sampled above the + detect_m5_plus theshold (in a single filter) for the light curve to be counted. - For example, setting n_per_lc = 2 means a light curve is only considered detected if there - is at least 1 observation in the first half of the LC, and at least one in the second half of the LC. - n_per_lc = 4 means each quarter of the light curve must be detected to count. + For example, setting n_per_lc = 2 means a light curve is only + considered detected if there is at least 1 observation in the first + half of the LC, and at least one in the second half of the LC. + n_per_lc = 4 means each quarter of the light curve must be detected to + count. Default 1. n_filters : int, optional - Number of filters that need to be observed for an object to be counted as detected. + Number of filters that need to be observed for an object to be counted + as detected. Default 1. n_phase_check : int, optional Sets the number of phases that should be checked. - One can imagine pathological cadences where many objects pass the detection criteria, + One can imagine pathological cadences where many objects pass the + detection criteria, but would not if the observations were offset by a phase-shift. Default 1. count_method : {'full' 'partialLC'}, defaults to 'full' @@ -154,15 +162,18 @@ def light_curve(self, time, filters): return lc_mags def run(self, data_slice, slice_point=None): - """ " - Calculate the detectability of a transient with the specified lightcurve. + """ + Calculate the detectability of a transient with the specified + lightcurve. Parameters ---------- data_slice : numpy.array - Numpy structured array containing the data related to the visits provided by the slicer. + Numpy structured array containing the data related to the visits + provided by the slicer. slice_point : dict, optional - Dictionary containing information about the slice_point currently active in the slicer. + Dictionary containing information about the slice_point currently + active in the slicer. Returns ------- @@ -178,8 +189,8 @@ def run(self, data_slice, slice_point=None): n_detected = 0 n_trans_max = 0 for tshift in tshifts: - # Compute the total number of back-to-back transients are possible to detect - # given the survey duration and the transient duration. + # Compute the total number of back-to-back transients are possible + # to detect given the survey duration and the transient duration. n_trans_max += _n_trans_max if tshift != 0: n_trans_max -= 1 @@ -211,9 +222,10 @@ def run(self, data_slice, slice_point=None): ulc_number = np.unique(lc_number) left = np.searchsorted(lc_number, ulc_number) right = np.searchsorted(lc_number, ulc_number, side="right") - # Note here I'm using np.searchsorted to basically do a 'group by' - # might be clearer to use scipy.ndimage.measurements.find_objects or pandas, but - # this numpy function is known for being efficient. + # Note here I'm using np.searchsorted to basically do a + # 'group by' might be clearer to use + # scipy.ndimage.measurements.find_objects or pandas, but this + # numpy function is known for being efficient. for le, ri in zip(left, right): # Number of points where there are a detection good = np.where(time[le:ri] < self.peak_time) @@ -221,7 +233,8 @@ def run(self, data_slice, slice_point=None): if nd >= self.n_pre_peak: detected[le:ri] += 1 - # Check if we need multiple points per light curve or multiple filters + # Check if we need multiple points per light curve + # or multiple filters if (self.n_per_lc > 1) | (self.n_filters > 1): # make sure things are sorted by time ord = np.argsort(data_slice[self.mjd_col]) @@ -243,11 +256,13 @@ def run(self, data_slice, slice_point=None): if np.size(np.unique(phase_sections[good])) >= self.n_per_lc: detected[le:ri] += 1 - # Find the unique number of light curves that passed the required number of conditions + # Find the unique number of light curves that passed the required + # number of conditions n_detected += np.size(np.unique(lc_number[np.where(detected >= detect_thresh)])) - # Rather than keeping a single "detected" variable, maybe make a mask for each criteria, then - # reduce functions like: reduce_singleDetect, reduce_NDetect, reduce_PerLC, reduce_perFilter. + # Rather than keeping a single "detected" variable, maybe make a mask + # for each criteria, then reduce functions like: reduce_singleDetect, + # reduce_NDetect, reduce_PerLC, reduce_perFilter. # The way I'm running now it would speed things up. return float(n_detected) / n_trans_max diff --git a/rubin_sim/maf/metrics/visit_groups_metric.py b/rubin_sim/maf/metrics/visit_groups_metric.py index 47c68558..00450f6f 100644 --- a/rubin_sim/maf/metrics/visit_groups_metric.py +++ b/rubin_sim/maf/metrics/visit_groups_metric.py @@ -6,7 +6,8 @@ from .base_metric import BaseMetric # Example of more complex metric -# Takes multiple columns of data (although 'night' could be calculable from 'expmjd') +# Takes multiple columns of data (although 'night' could be calculable +# from 'expmjd') # Returns variable length array of data # Uses multiple reduce functions @@ -15,16 +16,18 @@ class PairFractionMetric(BaseMetric): """What fraction of observations are part of a pair. Note, an observation can be a member of more than one "pair". For example, - t=[0, 5, 30], all observations would be considered part of a pair because they - all have an observation within the given window to pair with (the observation at t=30 - pairs twice). + t=[0, 5, 30], all observations would be considered part of a pair because + they all have an observation within the given window to pair with (the + observation at t=30 pairs twice). Parameters ---------- min_gap : float, optional - Minimum time to consider something part of a pair (minutes). Default 15. + Minimum time to consider something part of a pair (minutes). + Default 15. max_gap : float, optional - Maximum time to consider something part of a pair (minutes). Default 90. + Maximum time to consider something part of a pair (minutes). + Default 90. """ def __init__( @@ -47,7 +50,8 @@ def run(self, data_slice, slice_point=None): t_minus = times + self.min_gap ind1 = np.searchsorted(times, t_plus) ind2 = np.searchsorted(times, t_minus) - # If ind1 and ind2 are the same, there is no pairable image for that exposure + # If ind1 and ind2 are the same, there is no pairable image for + # that exposure diff1 = ind1 - ind2 # Check which have a back match @@ -65,7 +69,34 @@ def run(self, data_slice, slice_point=None): class VisitGroupsMetric(BaseMetric): - """Count the number of visits per night within delta_t_min and delta_t_max.""" + """Count the number of visits per night within delta_t_min and delta_t_max. + + Parameters + ---------- + time_col : str, optional + Column with the time of the visit. + Default: 'observationStartMJD' + nights_col : str, optional + Column with the night of the visit + Default: 'night' + delta_t_min : float, min + Minimum time of window: units are days + Default: 15.0 / 60.0 / 24.0 (15min in days) + delta_t_max : float, optional + Maximum time of window: units are days + Default: 90.0 / 60.0 / 24.0 (90min in days) + min_n_visits : int, optional + Minimum number of visits within a night (with spacing between + delta_t_min/max from any other visit) required + Default: 2 + window : int, optional + Number of nights to consider within a window (for reduce methods) + Default: 30 + min_n_nights : int, optional + minimum required number of nights within window to make a full 'group' + Default: 3 + + """ def __init__( self, @@ -79,18 +110,6 @@ def __init__( min_n_nights=3, **kwargs, ): - """ - Instantiate metric. - - 'time_col' = column with the time of the visit (default expmjd), - 'nights_col' = column with the night of the visit (default night), - 'delta_t_min' = minimum time of window: units are days (default 15 min), - 'delta_t_max' = maximum time of window: units are days (default 90 min), - 'min_n_visits' = the minimum number of visits within a night (with spacing between delta_t_min/max - from any other visit) required, - 'window' = the number of nights to consider within a window (for reduce methods), - 'min_n_nights' = the minimum required number of nights within window to make a full 'group'. - """ self.times = time_col self.nights = nights_col eps = 1e-10 @@ -145,26 +164,31 @@ def __init__( def run(self, data_slice, slice_point=None): """ Return a dictionary of: - the number of visits within a night (within delta tmin/tmax of another visit), - and the nights with visits > minNVisits. - Count two visits which are within tmin of each other, but which have another visit - within tmin/tmax interval, as one and a half (instead of two). + the number of visits within a night (within delta tmin/tmax of another + visit), and the nights with visits > minNVisits. + Count two visits which are within tmin of each other, but which have + another visit within tmin/tmax interval, as one and a half (instead of + two). - So for example: 4 visits, where 1, 2, 3 were all within deltaTMax of each other, and 4 was later but - within deltaTmax of visit 3 -- would give you 4 visits. If visit 1 and 2 were closer together - than deltaTmin, the two would be counted as 1.5 visits together (if only 1 and 2 existed, - then there would be 0 visits as none would be within the qualifying time interval). + So for example: 4 visits, where 1, 2, 3 were all within deltaTMax of + each other, and 4 was later but within deltaTmax of visit 3 -- would + give you 4 visits. If visit 1 and 2 were closer together than + deltaTmin, the two would be counted as 1.5 visits together (if only 1 + and 2 existed, then there would be 0 visits as none would be within the + qualifying time interval). """ uniquenights = np.unique(data_slice[self.nights]) nights = [] visit_num = [] - # Find the nights with visits within deltaTmin/max of one another and count the number of visits + # Find the nights with visits within deltaTmin/max of one another and + # count the number of visits for n in uniquenights: condition = data_slice[self.nights] == n times = np.sort(data_slice[self.times][condition]) nvisits = 0 ntooclose = 0 - # Calculate difference between each visit and time of previous visit (tnext- tnow) + # Calculate difference between each visit and time of previous + # visit (tnext- tnow) timediff = np.diff(times) timegood = np.where( (timediff <= self.delta_tmax) & (timediff >= self.delta_tmin), @@ -213,7 +237,8 @@ def reduce_median(self, metricval): return np.median(metricval["visits"]) def reduce_n_nights_with_n_visits(self, metricval): - """Reduce to total number of nights with more than 'minNVisits' visits.""" + """Reduce to total number of nights with more than 'minNVisits' + visits.""" condition = metricval["visits"] >= self.min_n_visits return len(metricval["visits"][condition]) @@ -223,8 +248,8 @@ def _in_window(self, visits, nights, night, window, min_n_visits): return visits[condition][condition2], nights[condition][condition2] def reduce_n_visits_in_window(self, metricval): - """Reduce to max number of total visits on all nights with more than minNVisits, - within any 'window' (default=30 nights).""" + """Reduce to max number of total visits on all nights with more than + minNVisits, within any 'window' (default=30 nights).""" maxnvisits = 0 for n in metricval["nights"]: vw, nw = self._in_window( @@ -238,7 +263,8 @@ def reduce_n_visits_in_window(self, metricval): return maxnvisits def reduce_n_nights_in_window(self, metricval): - """Reduce to max number of nights with more than minNVisits, within 'window' over all windows.""" + """Reduce to max number of nights with more than minNVisits, within + 'window' over all windows.""" maxnights = 0 for n in metricval["nights"]: vw, nw = self._in_window( @@ -256,8 +282,9 @@ def _in_lunation(self, visits, nights, lunation_start, lunation_length): return visits[condition], nights[condition] def reduce_n_lunations(self, metricval): - """Reduce to number of lunations (unique 30 day windows) that contain at least one 'group': - a set of more than minNVisits per night, with more than minNNights of visits within 'window' time period. + """Reduce to number of lunations (unique 30 day windows) that contain + at least one 'group': a set of more than minNVisits per night, with + more than minNNights of visits within 'window' time period. """ lunation_length = 30 lunations = np.arange( @@ -266,9 +293,9 @@ def reduce_n_lunations(self, metricval): lunation_length, ) n_lunations = 0 - for l in lunations: + for lunation in lunations: # Find visits within lunation. - vl, nl = self._in_lunation(metricval["visits"], metricval["nights"], l, lunation_length) + vl, nl = self._in_lunation(metricval["visits"], metricval["nights"], lunation, lunation_length) for n in nl: # Find visits which are in groups within the lunation. vw, nw = self._in_window(vl, nl, n, self.window, self.min_n_visits) @@ -278,8 +305,9 @@ def reduce_n_lunations(self, metricval): return n_lunations def reduce_max_seq_lunations(self, metricval): - """Count the max number of sequential lunations (unique 30 day windows) that contain at least one 'group': - a set of more than minNVisits per night, with more than minNNights of visits within 'window' time period. + """Count the max number of sequential lunations (unique 30 day windows + that contain at least one 'group': a set of more than minNVisits per + night, with more than minNNights of visits within 'window' time period. """ lunation_length = 30 lunations = np.arange( @@ -289,9 +317,9 @@ def reduce_max_seq_lunations(self, metricval): ) max_sequence = 0 cur_sequence = 0 - for l in lunations: + for lunation in lunations: # Find visits within lunation. - vl, nl = self._in_lunation(metricval["visits"], metricval["nights"], l, lunation_length) + vl, nl = self._in_lunation(metricval["visits"], metricval["nights"], lunation, lunation_length) # If no visits this lunation: if len(vl) == 0: max_sequence = max(max_sequence, cur_sequence) diff --git a/rubin_sim/maf/metrics/weak_lensing_systematics_metric.py b/rubin_sim/maf/metrics/weak_lensing_systematics_metric.py index 3accd323..d5b13f16 100644 --- a/rubin_sim/maf/metrics/weak_lensing_systematics_metric.py +++ b/rubin_sim/maf/metrics/weak_lensing_systematics_metric.py @@ -8,16 +8,18 @@ class ExgalM5WithCuts(BaseMetric): """ - Calculate co-added five-sigma limiting depth, but apply dust extinction and depth cuts. - This means that places on the sky that don't meet the dust extinction, coadded depth, or filter coverage - cuts will have masked values on those places. + Calculate co-added five-sigma limiting depth, but apply dust extinction and + depth cuts. This means that places on the sky that don't meet the dust + extinction, coadded depth, or filter coverage cuts will have masked values + on those places. - This metric is useful for DESC static science and weak lensing metrics. - In particular, it is required as input for the StaticProbesFoMEmulatorMetricSimple + This metric is useful for DESC static science and weak lensing metrics. In + particular, it is required as input for StaticProbesFoMEmulatorMetricSimple (a summary metric to emulate a 3x2pt FOM). - Note: this metric calculates the depth after dust extinction in band 'lsst_filter', but because - it looks for coverage in all bands, there should generally be no filter-constraint on the sql query. + Note: this metric calculates the depth after dust extinction in band + 'lsst_filter', but because it looks for coverage in all bands, there should + generally be no filter-constraint on the sql query. """ def __init__( @@ -38,7 +40,8 @@ def __init__( self.depth_cut = depth_cut self.n_filters = n_filters self.lsst_filter = lsst_filter - # I thought about inheriting from ExGalM5 instead, but the columns specification is more complicated + # I thought about inheriting from ExGalM5 instead, but the columns + # specification is more complicated self.exgal_m5 = ExgalM5(m5_col=m5_col, units=units) super().__init__( col=[self.m5_col, self.filter_col], @@ -53,12 +56,14 @@ def run(self, data_slice, slice_point): if slice_point["ebv"] > self.extinction_cut: return self.badval - # check to make sure there is at least some coverage in the required number of bands + # check to make sure there is at least some coverage in the required + # number of bands n_filters = len(set(data_slice[self.filter_col])) if n_filters < self.n_filters: return self.badval - # if coverage and dust criteria are valid, move forward with only lsstFilter-band visits + # if coverage and dust criteria are valid, move forward with only + # lsstFilter-band visits d_s = data_slice[data_slice[self.filter_col] == self.lsst_filter] # calculate the lsstFilter-band coadded depth dustdepth = self.exgal_m5.run(d_s, slice_point) @@ -71,12 +76,13 @@ def run(self, data_slice, slice_point): class WeakLensingNvisits(BaseMetric): - """A proxy metric for WL systematics. Higher values indicate better systematics mitigation. + """A proxy metric for WL systematics. Higher values indicate better + systematics mitigation. - Weak Lensing systematics metric : Computes the average number of visits per point on a HEALPix grid - after a maximum E(B-V) cut and a minimum co-added depth cut. - Intended to be used to count visits in gri, but can be any filter combination as long as it - includes `lsst_filter` band visits. + Weak Lensing systematics metric : Computes the average number of visits per + point on a HEALPix grid after a maximum E(B-V) cut and a minimum co-added + depth cut. Intended to be used to count visits in gri, but can be any + filter combination as long as it includes `lsst_filter` band visits. """