From dee2c57d1aa168699e3c06c93a5e1d3b3d5855b1 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 2 Oct 2024 12:45:23 -0700 Subject: [PATCH 01/23] Add typing hints to data module --- rubin_sim/data/rs_download_data.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/rubin_sim/data/rs_download_data.py b/rubin_sim/data/rs_download_data.py index 57fec699..d6313509 100644 --- a/rubin_sim/data/rs_download_data.py +++ b/rubin_sim/data/rs_download_data.py @@ -8,7 +8,7 @@ from rubin_scheduler.data import get_data_dir as gdd -def get_data_dir(): +def get_data_dir() -> str: """Wraps rubin_scheduler.data.get_data_dir(). Provided here for backwards compatibility. @@ -20,7 +20,7 @@ def get_data_dir(): return gdd() -def get_baseline(): +def get_baseline() -> str: """Get the path to the baseline cadence simulation sqlite file. Returns @@ -34,7 +34,7 @@ def get_baseline(): return file -def data_dict(): +def data_dict() -> dict: """ Dictionary containing expected version information for rubin_sim_data data sets, for this version of rubin_sim. @@ -65,7 +65,7 @@ def data_dict(): return file_dict -def rs_download_testing(): +def rs_download_testing() -> None: """Convenience function for github actions, to download only a subset of data. @@ -89,7 +89,7 @@ def rs_download_testing(): ) -def rs_download_data(): +def rs_download_data() -> None: """Utility to download necessary data for rubin_sim. Wrapper around rubin_scheduler.scheduler_download_data, From e40d658fdcacd4785e234adb11bce49f5f273ca3 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 21:16:15 -0700 Subject: [PATCH 02/23] Use SURVEY_START_MJD instead of survey_start_mjd() --- rubin_sim/maf/maf_contrib/kne_metrics.py | 1 + rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py | 4 ++++ rubin_sim/maf/run_moving_calc.py | 4 ++-- rubin_sim/maf/run_moving_fractions.py | 4 ++-- rubin_sim/satellite_constellations/model_observatory.py | 2 ++ rubin_sim/satellite_constellations/sat_utils.py | 1 + rubin_sim/skybrightness/generate_hdf5.py | 2 +- tests/satellite_constellations/test_satellites.py | 4 ++-- 8 files changed, 15 insertions(+), 7 deletions(-) diff --git a/rubin_sim/maf/maf_contrib/kne_metrics.py b/rubin_sim/maf/maf_contrib/kne_metrics.py index 7a37ec91..f0de8043 100644 --- a/rubin_sim/maf/maf_contrib/kne_metrics.py +++ b/rubin_sim/maf/maf_contrib/kne_metrics.py @@ -162,6 +162,7 @@ def __init__( self.output_lc = output_lc self.lightcurves = KnLc(file_list=file_list) + self.mjd0 = mjd0 dust_properties = DustValues() diff --git a/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py b/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py index bb538845..f162fc1d 100644 --- a/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py +++ b/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py @@ -182,6 +182,7 @@ def __init__( self.skyregion = skyregion # read in file as light curve object; self.lightcurves = KnLc(file_list=file_list) + self.mjd0 = mjd0 dust_properties = DustValues() @@ -190,6 +191,9 @@ def __init__( cols = [self.mjd_col, self.m5_col, self.filter_col, self.night_col] super().__init__(col=cols, units="Detected, 0 or 1", metric_name=metric_name, maps=maps, **kwargs) + # Unused .. + self.pts_needed = pts_needed + def _presto_color_detect(self, around_peak, filters): """Detection criteria of presto cadence: at least three detections at two filters; diff --git a/rubin_sim/maf/run_moving_calc.py b/rubin_sim/maf/run_moving_calc.py index fba08610..2a7faa1e 100755 --- a/rubin_sim/maf/run_moving_calc.py +++ b/rubin_sim/maf/run_moving_calc.py @@ -4,7 +4,7 @@ import os import numpy as np -from rubin_scheduler.utils import survey_start_mjd +from rubin_scheduler.utils import SURVEY_START_MJD from rubin_sim.maf.slicers import MoObjSlicer @@ -136,7 +136,7 @@ def run_moving_calc(): # Default parameters for metric setup. if args.start_time is None: - start_time = survey_start_mjd() + start_time = SURVEY_START_MJD else: start_time = args.start_time diff --git a/rubin_sim/maf/run_moving_fractions.py b/rubin_sim/maf/run_moving_fractions.py index 82cae208..e0f76f5a 100755 --- a/rubin_sim/maf/run_moving_fractions.py +++ b/rubin_sim/maf/run_moving_fractions.py @@ -5,7 +5,7 @@ import os import numpy as np -from rubin_scheduler.utils import survey_start_mjd +from rubin_scheduler.utils import SURVEY_START_MJD from . import batches as batches from . import db as db @@ -50,7 +50,7 @@ def run_moving_fractions(): # Default parameters for metric setup. if args.start_time is None: - start_time = survey_start_mjd() + start_time = SURVEY_START_MJD else: start_time = args.start_time stepsize = 365 / 6.0 diff --git a/rubin_sim/satellite_constellations/model_observatory.py b/rubin_sim/satellite_constellations/model_observatory.py index 91af837d..079bc8df 100644 --- a/rubin_sim/satellite_constellations/model_observatory.py +++ b/rubin_sim/satellite_constellations/model_observatory.py @@ -3,6 +3,7 @@ import numpy as np from rubin_scheduler.scheduler.model_observatory import ModelObservatory as oMO from rubin_scheduler.site_models import Almanac + from rubin_scheduler.utils import SURVEY_START_MJD, _healbin # Take the model observatory from the scheduler and @@ -71,6 +72,7 @@ def __init__( # Need to do a little fiddle with the MJD since # self.mjd needs self.night set now. self.mjd_start = mjd_start + self.almanac = Almanac(mjd_start=self.mjd_start) self.night = -1 diff --git a/rubin_sim/satellite_constellations/sat_utils.py b/rubin_sim/satellite_constellations/sat_utils.py index 2f63c7e6..65a0b903 100644 --- a/rubin_sim/satellite_constellations/sat_utils.py +++ b/rubin_sim/satellite_constellations/sat_utils.py @@ -10,6 +10,7 @@ import numpy as np from astropy import constants as const from astropy import units as u + from rubin_scheduler.utils import SURVEY_START_MJD, Site, gnomonic_project_toxy, point_to_line_distance from shapely.geometry import LineString, Point from skyfield.api import EarthSatellite, load, wgs84 diff --git a/rubin_sim/skybrightness/generate_hdf5.py b/rubin_sim/skybrightness/generate_hdf5.py index efdb6c0e..3ea96c59 100644 --- a/rubin_sim/skybrightness/generate_hdf5.py +++ b/rubin_sim/skybrightness/generate_hdf5.py @@ -225,7 +225,7 @@ def generate_sky( if __name__ == "__main__": # Make a quick small one for speed loading print("generating small file") - m0 = utils.survey_start_mjd() + m0 = utils.SURVEY_START_MJD generate_sky(mjd0=m0 - 1, mjd_max=m0 + 31) nyears = 25.0 # 20 # 13 diff --git a/tests/satellite_constellations/test_satellites.py b/tests/satellite_constellations/test_satellites.py index 1ce99554..c890f894 100644 --- a/tests/satellite_constellations/test_satellites.py +++ b/tests/satellite_constellations/test_satellites.py @@ -12,8 +12,8 @@ def test_constellations(self): mjd0 = SURVEY_START_MJD sv1 = starlink_tles_v1() - sv2 = starlink_tles_v2() - ow = oneweb_tles() + _ = starlink_tles_v2() + _ = oneweb_tles() const = Constellation(sv1) From bb7832d3052e1fd36f73f9a795ca0a0eb2a0e134 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 21:43:23 -0700 Subject: [PATCH 03/23] Ruff and reinstate moving_object tests on ephemerides --- rubin_sim/moving_objects/cheby_values.py | 2 +- rubin_sim/moving_objects/orbits.py | 4 +-- tests/moving_objects/test_chebyfits.py | 36 ++++++++++++++------- tests/moving_objects/test_chebyshevutils.py | 11 ++++--- tests/moving_objects/test_chebyvalues.py | 32 +++++++++++------- tests/moving_objects/test_ephemerides.py | 36 ++++++++++++--------- tests/moving_objects/test_orbits.py | 2 +- 7 files changed, 76 insertions(+), 47 deletions(-) diff --git a/rubin_sim/moving_objects/cheby_values.py b/rubin_sim/moving_objects/cheby_values.py index f1204bf9..e724569d 100644 --- a/rubin_sim/moving_objects/cheby_values.py +++ b/rubin_sim/moving_objects/cheby_values.py @@ -68,7 +68,7 @@ def read_coefficients(self, cheby_fits_file): if not os.path.isfile(cheby_fits_file): raise IOError("Could not find cheby_fits_file at %s" % (cheby_fits_file)) # Read the coefficients file. - coeffs = pd.read_table(cheby_fits_file, sep="\s+") + coeffs = pd.read_table(cheby_fits_file, sep=r"\s+") # The header line provides information on the number of # coefficients for each parameter. datacols = coeffs.columns.values diff --git a/rubin_sim/moving_objects/orbits.py b/rubin_sim/moving_objects/orbits.py index c140f705..e33f0bfd 100644 --- a/rubin_sim/moving_objects/orbits.py +++ b/rubin_sim/moving_objects/orbits.py @@ -388,7 +388,7 @@ def read_orbits(self, orbit_file, delim=None, skiprows=None): "COMPCODE", ) # First use names_com, and then change if required. - orbits = pd.read_csv(orbit_file, sep="\s+", header=None, names=names_com) + orbits = pd.read_csv(orbit_file, sep=r"\s+", header=None, names=names_com) if orbits["FORMAT"][0] == "KEP": orbits.columns = names_kep @@ -397,7 +397,7 @@ def read_orbits(self, orbit_file, delim=None, skiprows=None): else: if delim is None: - orbits = pd.read_csv(orbit_file, sep="\s+", skiprows=skiprows, names=names) + orbits = pd.read_csv(orbit_file, sep=r"\s+", skiprows=skiprows, names=names) else: orbits = pd.read_csv(orbit_file, sep=delim, skiprows=skiprows, names=names) diff --git a/tests/moving_objects/test_chebyfits.py b/tests/moving_objects/test_chebyfits.py index 72fa7ac9..bc14a99b 100644 --- a/tests/moving_objects/test_chebyfits.py +++ b/tests/moving_objects/test_chebyfits.py @@ -42,21 +42,26 @@ def test_precompute_multipliers(self): self.assertTrue(key in self.cheb.multipliers) def test_set_segment_length(self): - # Expect MBAs with standard ngran and tolerance to have length ~2.0 days. + # Expect MBAs with standard ngran and tolerance to have + # length ~2.0 days. self.cheb.calc_segment_length() self.assertAlmostEqual(self.cheb.length, 2.0) - # Test that we can set it to other values which fit into the 30 day window. + # Test that we can set it to other values which fit into + # the 30 day window. self.cheb.calc_segment_length(length=1.5) self.assertEqual(self.cheb.length, 1.5) - # Test that we if we try to set it to a value which does not fit into the 30 day window, + # Test that we if we try to set it to a value which does not + # fit into the 30 day window, # that the actual value used is different - and smaller. self.cheb.calc_segment_length(length=1.9) self.assertTrue(self.cheb.length < 1.9) - # Test that we get a warning about the residuals if we try to set the length to be too long. + # Test that we get a warning about the residuals if we try + # to set the length to be too long. with warnings.catch_warnings(record=True) as w: self.cheb.calc_segment_length(length=5.0) self.assertTrue(len(w), 1) - # Now check granularity works for other orbit types (which would have other standard lengths). + # Now check granularity works for other orbit types + # (which would have other standard lengths). # Check for multiple orbit types. for orbit_file in [ "test_orbitsMBA.s3m", @@ -73,7 +78,8 @@ def test_set_segment_length(self): pos_resid, ratio = cheb._test_residuals(cheb.length) self.assertTrue(pos_resid < sky_tolerance) self.assertEqual((cheb.length * 100) % 1, 0) - # print('final', orbit_file, sky_tolerance, pos_resid, cheb.length, ratio) + # print('final', orbit_file, sky_tolerance, pos_resid, + # cheb.length, ratio) # And check for challenging 'impactors'. for orbit_file in ["test_orbitsImpactors.s3m"]: self.orbits.read_orbits(os.path.join(self.testdir, orbit_file)) @@ -85,7 +91,8 @@ def test_set_segment_length(self): cheb.calc_segment_length() pos_resid, ratio = cheb._test_residuals(cheb.length) self.assertTrue(pos_resid < sky_tolerance) - # print('final', orbit_file, sky_tolerance, pos_resid, cheb.length, ratio) + # print('final', orbit_file, sky_tolerance, pos_resid, + # cheb.length, ratio) @unittest.skip("Skipping because it has a strange platform-dependent failure") def test_segments(self): @@ -108,7 +115,8 @@ def test_segments(self): for k in coeff_keys: self.assertTrue(k in self.cheb.coeffs.keys()) # And in this case, we had a 30 day timespan with 1 day segments - # (one day segments should be more than enough to meet 2.5mas tolerance, so not subdivided) + # (one day segments should be more than enough to meet + # 2.5mas tolerance, so not subdivided) self.assertEqual(len(self.cheb.coeffs["t_start"]), 30 * len(self.orbits)) # And we used 14 coefficients for ra and dec. self.assertEqual(len(self.cheb.coeffs["ra"][0]), 14) @@ -144,7 +152,8 @@ def test_run_through(self): t_start = self.orbits.orbits.epoch.iloc[0] interval = 30 cheb = ChebyFits(self.orbits, t_start, interval, ngran=64, sky_tolerance=2.5, n_decimal=10) - # Set granularity. Use an value that will be too long, to trigger recursion below. + # Set granularity. Use an value that will be too long, + # to trigger recursion below. with warnings.catch_warnings(): warnings.simplefilter("ignore") cheb.calc_segment_length(length=10.0) @@ -156,17 +165,20 @@ def test_run_through(self): resid_name = os.path.join(self.scratch_dir, "resid2.txt") failed_name = os.path.join(self.scratch_dir, "failed2.txt") cheb.write(coeff_name, resid_name, failed_name) - # Test that the segments for each individual object fit together start/end. + # Test that the segments for each individual object fit + # together start/end. for k in cheb.coeffs: cheb.coeffs[k] = np.array(cheb.coeffs[k]) for obj_id in np.unique(cheb.coeffs["obj_id"]): condition = cheb.coeffs["obj_id"] == obj_id te_prev = t_start for ts, te in zip(cheb.coeffs["t_start"][condition], cheb.coeffs["t_end"][condition]): - # Test that the start of the current interval = the end of the previous interval. + # Test that the start of the current interval = + # the end of the previous interval. self.assertEqual(te_prev, ts) te_prev = te - # Test that the end of the last interval is equal to the end of the total interval + # Test that the end of the last interval is equal to the end + # of the total interval self.assertEqual(te, t_start + interval) diff --git a/tests/moving_objects/test_chebyshevutils.py b/tests/moving_objects/test_chebyshevutils.py index a90c3ebd..a19d6ac7 100644 --- a/tests/moving_objects/test_chebyshevutils.py +++ b/tests/moving_objects/test_chebyshevutils.py @@ -2,7 +2,7 @@ import numpy as np -from rubin_sim.moving_objects import chebeval, chebfit, make_cheb_matrix, make_cheb_matrix_only_x +from rubin_sim.moving_objects import chebeval, chebfit, make_cheb_matrix class TestChebgrid(unittest.TestCase): @@ -22,7 +22,8 @@ def test_eval(self): yy_w_vel, vv = chebeval(np.linspace(-1, 1, 17), p) yy_wout_vel, vv = chebeval(np.linspace(-1, 1, 17), p, do_velocity=False) self.assertTrue(np.allclose(yy_wout_vel, yy_w_vel)) - # Test that we get a nan for a value outside the range of the 'interval', if mask=True + # Test that we get a nan for a value outside the range of the + # 'interval', if mask=True yy_w_vel, vv = chebeval(np.linspace(-2, 1, 17), p, mask=True) self.assertTrue( np.isnan(yy_w_vel[0]), @@ -42,7 +43,8 @@ def test_ends_locked(self): self.assertAlmostEqual(vv[-1], dy[-1], places=13) def test_accuracy(self): - """If n_poly is greater than number of values being fit, then fit should be exact.""" + """If n_poly is greater than number of values being fit, + then fit should be exact.""" x = np.linspace(0, np.pi, 9) y = np.sin(x) dy = np.cos(x) @@ -53,7 +55,8 @@ def test_accuracy(self): self.assertLess(np.sum(resid), 1e-13) def test_accuracy_prefit_c1c2(self): - """If n_poly is greater than number of values being fit, then fit should be exact.""" + """If n_poly is greater than number of values being fit, + then fit should be exact.""" NPOINTS = 8 NPOLY = 16 x = np.linspace(0, np.pi, NPOINTS + 1) diff --git a/tests/moving_objects/test_chebyvalues.py b/tests/moving_objects/test_chebyvalues.py index 2530df6c..93b55c9f 100644 --- a/tests/moving_objects/test_chebyvalues.py +++ b/tests/moving_objects/test_chebyvalues.py @@ -70,11 +70,12 @@ def test_set_coeff(self): self.assertTrue(k in cheby_values.coeffs) self.assertTrue(isinstance(cheby_values.coeffs[k], np.ndarray)) self.assertEqual(len(np.unique(cheby_values.coeffs["obj_id"])), len(self.orbits)) - # This will only be true for carefully selected length/orbit type, where subdivision did not occur. + # This will only be true for carefully selected length/orbit type, + # where subdivision did not occur. # For the test MBAs, a len=1day will work. # For the test NEOs, a len=0.25 day will work (with 2.5mas skyTol). # self.assertEqual(len(cheby_values.coeffs['tStart']), - # (self.interval / self.set_length) * len(self.orbits)) + # (self.interval / self.set_length) * len(self.orbits)) self.assertEqual(len(cheby_values.coeffs["ra"][0]), self.n_coeffs) self.assertTrue("meanRA" in cheby_values.coeffs) self.assertTrue("meanDec" in cheby_values.coeffs) @@ -90,9 +91,11 @@ def test_read_coeffs(self): # Can't test strings with np.test.assert_almost_equal. np.testing.assert_equal(cheby_values.coeffs[k], cheby_values2.coeffs[k]) else: - # All of these will only be accurate to 2 less decimal places than they are - # print out with in chebyFits. Since vmag, delta and elongation only use 7 - # decimal places, this means we can test to 5 decimal places for those. + # All of these will only be accurate to 2 less decimal places + # than they are print out with in chebyFits. Since vmag, + # delta and elongation only use 7 + # decimal places, this means we can test to 5 decimal + # places for those. np.testing.assert_allclose(cheby_values.coeffs[k], cheby_values2.coeffs[k], rtol=0, atol=1e-5) def test_get_ephemerides(self): @@ -142,12 +145,13 @@ def test_get_ephemerides(self): ) self.assertTrue( np.isnan(ephemerides["ra"][0]), - msg="Expected Nan for out of range ephemeris, got %.2e" % (ephemerides["ra"][0]), + msg=f"Expected Nan for out of range ephemeris, got {ephemerides['ra'][0]}", ) class TestJPLValues(unittest.TestCase): - # Test the interpolation-generated RA/Dec values against JPL generated RA/Dec values. + # Test the interpolation-generated RA/Dec values against JPL + # generated RA/Dec values. # The resulting errors should be similar to the errors reported # from testEphemerides when testing against JPL values. def setUp(self): @@ -156,14 +160,15 @@ def setUp(self): self.jpl_dir = os.path.join(get_data_dir(), "tests", "jpl_testdata") self.orbits.read_orbits(os.path.join(self.jpl_dir, "S0_n747.des"), skiprows=1) # Read JPL ephems. - self.jpl = pd.read_table(os.path.join(self.jpl_dir, "807_n747.txt"), delim_whitespace=True) + self.jpl = pd.read_table(os.path.join(self.jpl_dir, "807_n747.txt"), sep=r"\s+") self.jpl["obj_id"] = self.jpl["objId"] # Add times in TAI and UTC, because. t = Time(self.jpl["epoch_mjd"], format="mjd", scale="utc") self.jpl["mjdTAI"] = t.tai.mjd self.jpl["mjdUTC"] = t.utc.mjd self.jpl = self.jpl.to_records(index=False) - # Generate interpolation coefficients for the time period in the JPL catalog. + # Generate interpolation coefficients for the time period + # in the JPL catalog. self.scratch_dir = tempfile.mkdtemp(dir=ROOT, prefix="TestJPLValues-") self.coeff_file = os.path.join(self.scratch_dir, "test_coeffs") self.resid_file = os.path.join(self.scratch_dir, "test_resids") @@ -202,7 +207,8 @@ def tearDown(self): shutil.rmtree(self.scratch_dir) def test_ra_dec(self): - # We won't compare Vmag, because this also needs information on trailing losses. + # We won't compare Vmag, because this also needs + # information on trailing losses. times = np.unique(self.jpl["mjdTAI"]) delta_ra = np.zeros(len(times), float) delta_dec = np.zeros(len(times), float) @@ -219,8 +225,10 @@ def test_ra_dec(self): delta_ra[i] = d_ra.max() delta_dec[i] = d_dec.max() # Should be (given OOrb direct prediction): - # Much of the time we're closer than 1mas, but there are a few which hit higher values. - # This is consistent with the errors/values reported by oorb directly in testEphemerides. + # Much of the time we're closer than 1mas, but there are a + # few which hit higher values. + # This is consistent with the errors/values reported by oorb + # directly in testEphemerides. # # XXX--units? print("max JPL errors", delta_ra.max(), delta_dec.max()) diff --git a/tests/moving_objects/test_ephemerides.py b/tests/moving_objects/test_ephemerides.py index 34810462..981b7b1c 100644 --- a/tests/moving_objects/test_ephemerides.py +++ b/tests/moving_objects/test_ephemerides.py @@ -57,7 +57,8 @@ def test_convert_to_oorb_array(self): def test_convert_from_oorb_array(self): # Check that we can convert orbital elements TO oorb format and back - # without losing info (except ObjId -- we will lose that unless we use updateOrbits.) + # without losing info + # (except ObjId -- we will lose that unless we use updateOrbits.) self.ephems._convert_to_oorb_elem(self.orbits.orbits, self.orbits.orb_format) new_orbits = Orbits() new_orbits.set_orbits(self.orbits.orbits) @@ -116,8 +117,8 @@ def test_ephemeris(self): time_scale="UTC", by_object=False, ) - # Temp removing this as it is giving an intermittent fail. Not sure why - # np.testing.assert_equal(ephs_all, ephs) + for key in ephs_all.dtype.names: + np.testing.assert_almost_equal(ephs_all[key], ephs[key]) # Reset ephems to use KEP Orbits, and calculate new ephemerides. self.ephems.set_orbits(self.orbits_kep) oorb_ephs = self.ephems._generate_oorb_ephs_basic(eph_times, obscode=807, eph_mode="N") @@ -135,18 +136,21 @@ def test_ephemeris(self): time_scale="UTC", by_object=False, ) - # Also seems to be an intermitent fail - # np.testing.assert_equal(ephsAllKEP, ephsKEP) - # Check that ephemerides calculated from the different (COM/KEP) orbits are almost equal. - # for column in ephs.dtype.names: - # np.testing.assert_allclose(ephs[column], ephsKEP[column], rtol=0, atol=1e-7) - # Check that the wrapped method using KEP elements and the wrapped method using COM elements match. - # for column in ephsAll.dtype.names: - # np.testing.assert_allclose(ephsAllKEP[column], ephsAll[column], rtol=0, atol=1e-7) + for key in ephs_all_kep.dtype.names: + np.testing.assert_almost_equal(ephs_all_kep[key], ephs_kep[key]) + # Check that ephemerides calculated from the different (COM/KEP) + # orbits are almost equal + for column in ephs.dtype.names: + np.testing.assert_allclose(ephs[column], ephs_kep[column], rtol=1e-5, atol=1e-4) + # Check that the wrapped method using KEP elements and the wrapped + # method using COM elements match. + for column in ephs_all.dtype.names: + np.testing.assert_allclose(ephs_all_kep[column], ephs_all[column], rtol=1e-5, atol=1e-4) class TestJPLValues(unittest.TestCase): - """Test the oorb generated RA/Dec values against JPL generated RA/Dec values.""" + """Test the oorb generated RA/Dec values against + JPL generated RA/Dec values.""" def setUp(self): # Read orbits. @@ -154,7 +158,7 @@ def setUp(self): self.jpl_dir = os.path.join(get_data_dir(), "tests", "jpl_testdata") self.orbits.read_orbits(os.path.join(self.jpl_dir, "S0_n747.des"), skiprows=1) # Read JPL ephems. - self.jpl = pd.read_csv(os.path.join(self.jpl_dir, "807_n747.txt"), sep="\s+") + self.jpl = pd.read_csv(os.path.join(self.jpl_dir, "807_n747.txt"), sep=r"\s+") # Temp key fix self.jpl["obj_id"] = self.jpl["objId"] # Add times in TAI and UTC, because. @@ -167,7 +171,8 @@ def tear_down(self): del self.jpl def test_ra_dec(self): - # We won't compare Vmag, because this also needs information on trailing losses. + # We won't compare Vmag, because this also needs information + # on trailing losses. times = self.jpl["mjdUTC"].unique() delta_ra = np.zeros(len(times), float) delta_dec = np.zeros(len(times), float) @@ -193,7 +198,8 @@ def test_ra_dec(self): # Convert to mas delta_ra *= 3600.0 * 1000.0 delta_dec *= 3600.0 * 1000.0 - # Much of the time we're closer than 1mas, but there are a few which hit higher values. + # Much of the time we're closer than 1mas, + # but there are a few which hit higher values. print("max JPL errors", np.max(delta_ra), np.max(delta_dec)) print("std JPL errors", np.std(delta_ra), np.std(delta_dec)) self.assertLess(np.max(delta_ra), 25) diff --git a/tests/moving_objects/test_orbits.py b/tests/moving_objects/test_orbits.py index 5362b830..a694a22a 100644 --- a/tests/moving_objects/test_orbits.py +++ b/tests/moving_objects/test_orbits.py @@ -122,7 +122,7 @@ def test_set_orbits(self): self.assertEqual(len(new_orbits), 1) self.assertEqual(new_orbits.orb_format, "COM") assert_frame_equal(new_orbits.orbits, suborbits) - # Test that we can set the orbits using a numpy array with many objects. + # Test that we can set the orbits using a numpy array of many objects. numpyorbits = orbits.orbits.to_records(index=False) new_orbits = Orbits() new_orbits.set_orbits(numpyorbits) From a28cece9b54ebc20afa82af6023e46f8a818bd87 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 21:48:15 -0700 Subject: [PATCH 04/23] Ruff finished all tests --- tests/data/test_data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/data/test_data.py b/tests/data/test_data.py index a9578f6c..e3ed5848 100644 --- a/tests/data/test_data.py +++ b/tests/data/test_data.py @@ -18,8 +18,8 @@ def testBaseline(self): assert data_dir == dd2 if "sim_baseline" in os.listdir(data_dir): - baseline = get_baseline() - versions = data_versions() + _ = get_baseline() + _ = data_versions() if __name__ == "__main__": From 86a5abf7d801f1afedc639326b849c1f4c7deb94 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 9 Oct 2024 10:09:05 -0700 Subject: [PATCH 05/23] Disable tests failing in linux --- tests/moving_objects/test_ephemerides.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/moving_objects/test_ephemerides.py b/tests/moving_objects/test_ephemerides.py index 981b7b1c..d5446316 100644 --- a/tests/moving_objects/test_ephemerides.py +++ b/tests/moving_objects/test_ephemerides.py @@ -117,8 +117,11 @@ def test_ephemeris(self): time_scale="UTC", by_object=False, ) - for key in ephs_all.dtype.names: - np.testing.assert_almost_equal(ephs_all[key], ephs[key]) + # See https://rubinobs.atlassian.net/browse/SP-1633 + # This needs to be fixed, but on a separate ticket + # for key in ephs_all.dtype.names: + # np.testing.assert_almost_equal(ephs_all[key], ephs[key]) + # Reset ephems to use KEP Orbits, and calculate new ephemerides. self.ephems.set_orbits(self.orbits_kep) oorb_ephs = self.ephems._generate_oorb_ephs_basic(eph_times, obscode=807, eph_mode="N") @@ -136,8 +139,10 @@ def test_ephemeris(self): time_scale="UTC", by_object=False, ) - for key in ephs_all_kep.dtype.names: - np.testing.assert_almost_equal(ephs_all_kep[key], ephs_kep[key]) + # Also https://rubinobs.atlassian.net/browse/SP-1633 + # for key in ephs_all_kep.dtype.names: + # np.testing.assert_almost_equal(ephs_all_kep[key], ephs_kep[key]) + # Check that ephemerides calculated from the different (COM/KEP) # orbits are almost equal for column in ephs.dtype.names: From 6ce74ca5428913427b1a48383d8c9e4291af0497 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 21:50:24 -0700 Subject: [PATCH 06/23] Remove inits for rubin_scheduler moved modules --- rubin_sim/scheduler/__init__.py | 6 ------ rubin_sim/site_models/__init__.py | 8 -------- rubin_sim/skybrightness_pre/__init__.py | 9 --------- rubin_sim/utils/__init__.py | 6 ------ 4 files changed, 29 deletions(-) delete mode 100644 rubin_sim/scheduler/__init__.py delete mode 100644 rubin_sim/site_models/__init__.py delete mode 100644 rubin_sim/skybrightness_pre/__init__.py delete mode 100644 rubin_sim/utils/__init__.py diff --git a/rubin_sim/scheduler/__init__.py b/rubin_sim/scheduler/__init__.py deleted file mode 100644 index 3a0ad0c5..00000000 --- a/rubin_sim/scheduler/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -import warnings - -from rubin_scheduler.scheduler import * # noqa: F403 - -warnings.simplefilter("default") -warnings.warn("rubin_sim.scheduler is deprecated, switch to rubin_scheduler.scheduler", DeprecationWarning) diff --git a/rubin_sim/site_models/__init__.py b/rubin_sim/site_models/__init__.py deleted file mode 100644 index a49d3efb..00000000 --- a/rubin_sim/site_models/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -import warnings - -from rubin_scheduler.site_models import * # noqa: F403 - -warnings.simplefilter("default") -warnings.warn( - "rubin_sim.site_models is deprecated, switch to rubin_scheduler.site_models", DeprecationWarning -) diff --git a/rubin_sim/skybrightness_pre/__init__.py b/rubin_sim/skybrightness_pre/__init__.py deleted file mode 100644 index d36e68e1..00000000 --- a/rubin_sim/skybrightness_pre/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -import warnings - -from rubin_scheduler.skybrightness_pre import * - -warnings.simplefilter("default") -warnings.warn( - "rubin_sim.skybrightness_pre is deprecated, switch to rubin_scheduler.skybrightness_pre", - DeprecationWarning, -) diff --git a/rubin_sim/utils/__init__.py b/rubin_sim/utils/__init__.py deleted file mode 100644 index 873d45bd..00000000 --- a/rubin_sim/utils/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -import warnings - -from rubin_scheduler.utils import * # noqa: F403 - -warnings.simplefilter("default") -warnings.warn("rubin_sim.utils is deprecated, switch to rubin_scheduler.utils", DeprecationWarning) From 4bcf061ca0a38de5dcc2caf07c8e9296f2568a21 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 22:07:38 -0700 Subject: [PATCH 07/23] comcam slicer should be replaced with additional footprint map for comcam --- .../maf/slicers/healpix_comcam_slicer.py | 203 ------------------ 1 file changed, 203 deletions(-) delete mode 100644 rubin_sim/maf/slicers/healpix_comcam_slicer.py diff --git a/rubin_sim/maf/slicers/healpix_comcam_slicer.py b/rubin_sim/maf/slicers/healpix_comcam_slicer.py deleted file mode 100644 index a63c31b4..00000000 --- a/rubin_sim/maf/slicers/healpix_comcam_slicer.py +++ /dev/null @@ -1,203 +0,0 @@ -__all__ = ("HealpixComCamSlicer",) - -import warnings -from functools import wraps - -import matplotlib.path as mplPath -import numpy as np -import rubin_scheduler.utils as simsUtils - -from .healpix_slicer import HealpixSlicer - -# The names of the chips in the central raft, aka, ComCam -center_raft_chips = [ - "R:2,2 S:0,0", - "R:2,2 S:0,1", - "R:2,2 S:0,2", - "R:2,2 S:1,0", - "R:2,2 S:1,1", - "R:2,2 S:1,2", - "R:2,2 S:2,0", - "R:2,2 S:2,1", - "R:2,2 S:2,2", -] - - -class HealpixComCamSlicer(HealpixSlicer): - """Slicer that uses the ComCam footprint to decide if observations overlap a healpixel center""" - - def __init__( - self, - nside=128, - lon_col="fieldRA", - lat_col="fieldDec", - lat_lon_deg=True, - verbose=True, - badval=np.nan, - use_cache=True, - leafsize=100, - use_camera=False, - rot_sky_pos_col_name="rotSkyPos", - mjdColName="observationStartMJD", - chipNames=center_raft_chips, - side_length=0.7, - ): - """ - Parameters - ---------- - side_length : float (0.7) - How large is a side of the raft (degrees) - """ - radius = side_length / 2.0 * np.sqrt(2.0) - super(HealpixComCamSlicer, self).__init__( - nside=nside, - lon_col=lon_col, - lat_col=lat_col, - lat_lon_deg=lat_lon_deg, - verbose=verbose, - badval=badval, - use_cache=use_cache, - leafsize=leafsize, - radius=radius, - use_camera=use_camera, - rot_sky_pos_col_name=rot_sky_pos_col_name, - mjdColName=mjdColName, - chipNames=chipNames, - ) - self.side_length = np.radians(side_length) - self.corners_x = np.array( - [ - -self.side_length / 2.0, - -self.side_length / 2.0, - self.side_length / 2.0, - self.side_length / 2.0, - ] - ) - self.corners_y = np.array( - [ - self.side_length / 2.0, - -self.side_length / 2.0, - -self.side_length / 2.0, - self.side_length / 2.0, - ] - ) - # Need the rotation even if not using the camera - self.columns_needed.append(rot_sky_pos_col_name) - self.columns_needed = list(set(self.columns_needed)) - - # The 3D search radius for things inside the raft - self.side_radius = simsUtils.xyz_angular_radius(side_length / 2.0) - - def setup_slicer(self, sim_data, maps=None): - """Use sim_data[self.lon_col] and sim_data[self.lat_col] (in radians) to set up KDTree. - - Parameters - ----------- - sim_data : numpy.recarray - The simulated data, including the location of each pointing. - maps : list of rubin_sim.maf.maps objects, optional - List of maps (such as dust extinction) that will run to build up additional metadata at each - slice_point. This additional metadata is available to metrics via the slice_point dictionary. - Default None. - """ - if maps is not None: - if self.cache_size != 0 and len(maps) > 0: - warnings.warn( - "Warning: Loading maps but cache on." "Should probably set use_cache=False in slicer." - ) - self._run_maps(maps) - self._setRad(self.radius) - if self.use_camera: - self._setupLSSTCamera() - self._presliceFootprint(sim_data) - else: - if self.latLonDeg: - self._build_tree( - np.radians(sim_data[self.lon_col]), - np.radians(sim_data[self.lat_col]), - self.leafsize, - ) - else: - self._build_tree(sim_data[self.lon_col], sim_data[self.lat_col], self.leafsize) - - @wraps(self._slice_sim_data) - def _slice_sim_data(islice): - """Return indexes for relevant opsim data at slice_point - (slice_point=lon_col/lat_col value .. usually ra/dec).""" - - # Build dict for slice_point info - slice_point = {"sid": islice} - if self.use_camera: - indices = self.sliceLookup[islice] - slice_point["chipNames"] = self.chipNames[islice] - else: - sx, sy, sz = simsUtils._xyz_from_ra_dec( - self.slice_points["ra"][islice], self.slice_points["dec"][islice] - ) - # Anything within half the side length is good no matter what rotation angle - # the camera is at - indices = self.opsimtree.query_ball_point((sx, sy, sz), self.side_radius) - # Now the larger radius. Need to make it an array for easy subscripting - initial_indices = np.array(self.opsimtree.query_ball_point((sx, sy, sz), self.rad), dtype=int) - # remove the indices we already know about - initial_indices = initial_indices[np.in1d(initial_indices, indices, invert=True)] - - if self.latLonDeg: - lat = np.radians(sim_data[self.lat_col][initial_indices]) - lon = np.radians(sim_data[self.lon_col][initial_indices]) - cos_rot = np.cos(np.radians(sim_data[self.rotSkyPosColName][initial_indices])) - sin_rot = np.cos(np.radians(sim_data[self.rotSkyPosColName][initial_indices])) - else: - lat = sim_data[self.lat_col][initial_indices] - lon = sim_data[self.lon_col][initial_indices] - cos_rot = np.cos(sim_data[self.rotSkyPosColName][initial_indices]) - sin_rot = np.sin(sim_data[self.rotSkyPosColName][initial_indices]) - # loop over the observations that might be overlapping the healpix, check each - for i, ind in enumerate(initial_indices): - # Rotate the camera - x_rotated = self.corners_x * cos_rot[i] - self.corners_y * sin_rot[i] - y_rotated = self.corners_x * sin_rot[i] + self.corners_y * cos_rot[i] - # How far is the pointing center from the healpix center - xshift, yshift = simsUtils.gnomonic_project_toxy( - lon[i], - lat[i], - self.slice_points["ra"][islice], - self.slice_points["dec"][islice], - ) - - x_rotated += xshift - y_rotated += yshift - # Use matplotlib to make a polygon - bbPath = mplPath.Path( - np.array( - [ - [x_rotated[0], y_rotated[0]], - [x_rotated[1], y_rotated[1]], - [x_rotated[2], y_rotated[2]], - [x_rotated[3], y_rotated[3]], - [x_rotated[0], y_rotated[0]], - ] - ) - ) - # Check if the slice_point is inside the image corners and append to list - if bbPath.contains_point((0.0, 0.0)): - indices.append(ind) - - # Loop through all the slice_point keys. If the first dimension of slice_point[key] has - # the same shape as the slicer, assume it is information per slice_point. - # Otherwise, pass the whole slice_point[key] information. Useful for stellar LF maps - # where we want to pass only the relevant LF and the bins that go with it. - - for key in self.slice_points: - if len(np.shape(self.slice_points[key])) == 0: - keyShape = 0 - else: - keyShape = np.shape(self.slice_points[key])[0] - if keyShape == self.nslice: - slice_point[key] = self.slice_points[key][islice] - else: - slice_point[key] = self.slice_points[key] - - return {"idxs": indices, "slice_point": slice_point} - - setattr(self, "_slice_sim_data", _slice_sim_data) From 4efe63743e843c2a3f60f461cd9948cd00535773 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 22:08:21 -0700 Subject: [PATCH 08/23] Remove comcam slicer --- rubin_sim/maf/slicers/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/rubin_sim/maf/slicers/__init__.py b/rubin_sim/maf/slicers/__init__.py index 57ef4f4f..a1e67259 100644 --- a/rubin_sim/maf/slicers/__init__.py +++ b/rubin_sim/maf/slicers/__init__.py @@ -1,6 +1,5 @@ from .base_slicer import * from .base_spatial_slicer import * -from .healpix_comcam_slicer import * from .healpix_sdss_slicer import * from .healpix_slicer import * from .healpix_subset_slicer import * From e5753ffc834512eaaff1a078b76bf4d58f95bb66 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 22:21:10 -0700 Subject: [PATCH 09/23] Ruff slicers --- rubin_sim/maf/slicers/healpix_sdss_slicer.py | 12 ++- rubin_sim/maf/slicers/healpix_slicer.py | 85 +++++++++++--------- rubin_sim/maf/slicers/hourglass_slicer.py | 6 +- rubin_sim/maf/slicers/mo_slicer.py | 46 ++++++----- rubin_sim/maf/slicers/movie_slicer.py | 56 ++++++++----- rubin_sim/maf/slicers/nd_slicer.py | 13 ++- rubin_sim/maf/slicers/one_d_slicer.py | 56 ++++++++----- rubin_sim/maf/slicers/uni_slicer.py | 4 +- 8 files changed, 161 insertions(+), 117 deletions(-) diff --git a/rubin_sim/maf/slicers/healpix_sdss_slicer.py b/rubin_sim/maf/slicers/healpix_sdss_slicer.py index 9790fc67..8ac22355 100644 --- a/rubin_sim/maf/slicers/healpix_sdss_slicer.py +++ b/rubin_sim/maf/slicers/healpix_sdss_slicer.py @@ -25,7 +25,8 @@ def __init__( leafsize=100, **kwargs, ): - """Using one corner of the chip as the spatial key and the diagonal as the radius.""" + """Using one corner of the chip as the spatial key and the + diagonal as the radius.""" super().__init__( verbose=verbose, lon_col=lon_col, @@ -57,9 +58,11 @@ def _slice_sim_data(islice): sx, sy, sz = _xyz_from_ra_dec(self.slice_points["ra"][islice], self.slice_points["dec"][islice]) # Query against tree. initIndices = self.opsimtree.query_ball_point((sx, sy, sz), self.rad) - # Loop through all the images and check if the slice_point is inside the corners of the chip + # Loop through all the images and check if the slice_point + # is inside the corners of the chip indices = [] - # Gnomic project all the corners that are near the slice point, centered on slice point + # Gnomic project all the corners that are near the slice point, + # centered on slice point x1, y1 = gnomonic_project_toxy( self.corners["RA1"][initIndices], self.corners["Dec1"][initIndices], @@ -98,7 +101,8 @@ def _slice_sim_data(islice): ] ) ) - # Check if the slice_point is inside the image corners and append to list if it is + # Check if the slice_point is inside the image corners + # and append to list if it is if bbPath.contains_point((0.0, 0.0)) == 1: indices.append(ind) diff --git a/rubin_sim/maf/slicers/healpix_slicer.py b/rubin_sim/maf/slicers/healpix_slicer.py index c2664441..60b5bd34 100644 --- a/rubin_sim/maf/slicers/healpix_slicer.py +++ b/rubin_sim/maf/slicers/healpix_slicer.py @@ -1,7 +1,9 @@ -"""A slicer that uses a Healpix grid to calculate metric values (at the center of each healpixel).""" +"""A slicer that uses a Healpix grid to calculate metric values +(at the center of each healpixel).""" # Requires healpy -# See more documentation on healpy here http://healpy.readthedocs.org/en/latest/tutorial.html +# See more documentation on healpy here +# http://healpy.readthedocs.org/en/latest/tutorial.html # Also requires numpy (for histogram and power spectrum plotting) __all__ = ("HealpixSlicer",) @@ -20,62 +22,64 @@ class HealpixSlicer(BaseSpatialSlicer): """ A spatial slicer that evaluates pointings on a healpix-based grid. - Note that a healpix slicer is intended to evaluate the sky on a spatial grid. - Usually this grid will be something like RA as the lon_col and Dec as the lat_col. - However, it could also be altitude and azimuth, in which case altitude as lat_col, - and azimuth as lon_col. + Note that a healpix slicer is intended to evaluate the sky + on a spatial grid. Usually this grid will be something like RA as + the lon_col and Dec as the lat_col. + However, it could also be altitude and azimuth, + in which case altitude as lat_col, and azimuth as lon_col. An additional alternative is to use HA/Dec, with lon_col=HA, lat_col=Dec. - When plotting with RA/Dec, the default HealpixSkyMap can be used, corresponding to + When plotting with RA/Dec, the default HealpixSkyMap can be used, + corresponding to {'rot': (0, 0, 0), 'flip': 'astro'}. - When plotting with alt/az, either the LambertSkyMap can be used (if Basemap is installed) + When plotting with alt/az, either the LambertSkyMap can be used + (if Basemap is installed) or the HealpixSkyMap can be used with a modified plot_dict, {'rot': (90, 90, 90), 'flip': 'geo'}. - When plotting with HA/Dec, only the HealpixSkyMap can be used, with a modified plot_dict of + When plotting with HA/Dec, only the HealpixSkyMap can be used, + with a modified plot_dict of {'rot': (0, -30, 0), 'flip': 'geo'}. Parameters ---------- nside : `int`, optional The nside parameter of the healpix grid. Must be a power of 2. - Default 128. lon_col : `str`, optional - Name of the longitude (RA equivalent) column to use from the input data. - Default fieldRA + Name of the longitude (RA equivalent) column to use from the + input data. lat_col : `str`, optional - Name of the latitude (Dec equivalent) column to use from the input data. - Default fieldDec + Name of the latitude (Dec equivalent) column to use from + the input data. lat_lon_deg : `bool`, optional - Flag indicating whether the lat and lon values in the input data are in - degrees (True) or radians (False). - Default True. + Flag indicating whether the lat and lon values in the input data are + in degrees (True) or radians (False). verbose : `bool`, optional - Flag to indicate whether or not to write additional information to stdout during runtime. - Default True. - badval : `float`, optional - Bad value flag, relevant for plotting. Default the np.nan value (in order to properly flag - bad data points for plotting with the healpix plotting routines). This should not be changed. + Flag to indicate whether or not to write additional information to + stdout during runtime. use_cache : `bool`, optional - Flag allowing the user to indicate whether or not to cache (and reuse) metric results - calculated with the same set of simulated data pointings. - This can be safely set to True for slicers not using maps and will result in increased speed. - When calculating metric results using maps, the metadata at each individual ra/dec point may - influence the metric results and so use_cache should be set to False. - Default True. + Flag allowing the user to indicate whether or not to cache + (and reuse) metric results calculated with the same set of + simulated data pointings. + This can be safely set to True for slicers not using maps and + will result in increased speed. + When calculating metric results using maps, the map data at each + individual ra/dec point may influence the metric results and so + use_cache should be set to False. leafsize : `int`, optional Leafsize value for kdtree. Default 100. radius : `float`, optional - Radius for matching in the kdtree. Equivalent to the radius of the FOV. Degrees. - Default 2.45. + Radius for matching in the kdtree. + Equivalent to the radius of the FOV. Degrees. use_camera : `bool`, optional Flag to indicate whether to use the LSST camera footprint or not. - Default True. camera_footprint_file : `str`, optional - Name of the camera footprint map to use. Can be None, which will use the default. + Name of the camera footprint map to use. + Can be None, which will use the default footprint map. rot_sky_pos_col_name : `str`, optional - Name of the rotSkyPos column in the input data. Only used if use_camera is True. - Describes the orientation of the camera orientation compared to the sky. - Default rotSkyPos. + Name of the rotSkyPos column in the input data. + Only used if use_camera is True. + Describes the orientation of the camera orientation + compared to the sky. """ def __init__( @@ -85,7 +89,6 @@ def __init__( lat_col="fieldDec", lat_lon_deg=True, verbose=True, - badval=np.nan, use_cache=True, leafsize=100, radius=2.45, @@ -93,12 +96,11 @@ def __init__( camera_footprint_file=None, rot_sky_pos_col_name="rotSkyPos", ): - """Instantiate and set up healpix slicer object.""" super().__init__( verbose=verbose, lon_col=lon_col, lat_col=lat_col, - badval=badval, + badval=np.nan, radius=radius, leafsize=leafsize, use_camera=use_camera, @@ -131,7 +133,8 @@ def __init__( } self.use_cache = use_cache if use_cache: - # use_cache set the size of the cache for the memoize function in sliceMetric. + # use_cache set the size of the cache for the memoize function in + # sliceMetric. bin_res = hp.nside2resol(nside) # Pixel size in radians # Set the cache size to be ~2x the circumference self.cache_size = int(np.round(4.0 * np.pi / bin_res)) @@ -166,7 +169,9 @@ def __eq__(self, other_slicer): return result def _pix2radec(self, islice): - """Given the pixel number / sliceID, return the RA/Dec of the pointing, in radians.""" + """Given the pixel number / sliceID, return the RA/Dec of the + pointing, in radians. + """ # Calculate RA/Dec in RADIANS of pixel in this healpix slicer. # Note that ipix could be an array, # in which case RA/Dec values will be an array also. diff --git a/rubin_sim/maf/slicers/hourglass_slicer.py b/rubin_sim/maf/slicers/hourglass_slicer.py index 862f6d96..5a8d22ae 100644 --- a/rubin_sim/maf/slicers/hourglass_slicer.py +++ b/rubin_sim/maf/slicers/hourglass_slicer.py @@ -20,7 +20,8 @@ def __init__(self, verbose=True, badval=-666): def write_data(self, outfilename, metric_values, metric_name="", **kwargs): """ - Override base write method: we don't want to save hourglass metric data. + Override base write method: we don't want to save hourglass metric + data. The data volume is too large. """ @@ -28,7 +29,8 @@ def write_data(self, outfilename, metric_values, metric_name="", **kwargs): def read_metric_data(self, infilename): """ - Override base read method to 'pass': we don't save or read hourglass metric data. + Override base read method to 'pass': + we don't save or read hourglass metric data. The data volume is too large. """ diff --git a/rubin_sim/maf/slicers/mo_slicer.py b/rubin_sim/maf/slicers/mo_slicer.py index 4bc95b75..70b367d7 100644 --- a/rubin_sim/maf/slicers/mo_slicer.py +++ b/rubin_sim/maf/slicers/mo_slicer.py @@ -10,7 +10,8 @@ class MoObjSlicer(BaseSlicer): - """Slice moving object _observations_, per object and optionally clone/per H value. + """Slice moving object _observations_, per object and optionally + clone/per H value. Iteration over the MoObjSlicer will go as: * iterate over each orbit; @@ -19,7 +20,8 @@ class MoObjSlicer(BaseSlicer): Parameters ---------- h_range : numpy.ndarray or None - The H values to clone the orbital parameters over. If Hrange is None, will not clone orbits. + The H values to clone the orbital parameters over. + If Hrange is None, will not clone orbits. """ def __init__(self, h_range=None, verbose=True, badval=0): @@ -46,7 +48,8 @@ def setup_slicer(self, orbit_file, delim=None, skiprows=None, obs_file=None): This is necessary, in order to be able to generate plots. obs_file : str, optional The file containing the observations of each object, optional. - If not provided (default, None), then the slicer will not be able to 'slice', but can still plot. + If not provided (default, None), then the slicer will not be + able to slice, but can still plot. """ self.read_orbits(orbit_file, delim=delim, skiprows=skiprows) if obs_file is not None: @@ -55,7 +58,8 @@ def setup_slicer(self, orbit_file, delim=None, skiprows=None, obs_file=None): self.obs_file = None self.all_obs = None self.obs = None - # Add these filenames to the slicer init values, to preserve in output files. + # Add these filenames to the slicer init values, + # to preserve in output files. self.slicer_init["orbit_file"] = self.orbit_file self.slicer_init["obs_file"] = self.obs_file @@ -65,7 +69,8 @@ def read_orbits(self, orbit_file, delim=None, skiprows=None): orb.read_orbits(orbit_file, delim=delim, skiprows=skiprows) self.orbit_file = orbit_file self.orbits = orb.orbits - # Then go on as previously. Need to refactor this into 'setup_slicer' style. + # Then go on as previously. Need to refactor this + # into 'setup_slicer' style. self.nSso = len(self.orbits) self.slice_points = {} self.slice_points["orbits"] = self.orbits @@ -80,14 +85,16 @@ def read_orbits(self, orbit_file, delim=None, skiprows=None): self.nslice = self.shape[0] * self.shape[1] def read_obs(self, obs_file): - """Read observations of the solar system objects (such as created by sims_movingObjects). + """Read observations of the solar system objects + (such as created by sims_movingObjects). Parameters ---------- obs_file: str The file containing the observation information. """ - # For now, just read all the observations (should be able to chunk this though). + # For now, just read all the observations + # (should be able to chunk this though). restore_file = np.load(obs_file) self.all_obs = restore_file["object_observations"].copy() restore_file.close() @@ -98,8 +105,8 @@ def read_obs(self, obs_file): self.all_obs["velocity"] = np.sqrt(self.all_obs["dradt"] ** 2 + self.all_obs["ddecdt"] ** 2) def subset_obs(self, pandas_constraint=None): - """ - Choose a subset of all the observations, such as those in a particular time period. + """Choose a subset of all the observations, + such as those in a particular time period. """ if pandas_constraint is None: self.obs = self.all_obs @@ -109,8 +116,10 @@ def subset_obs(self, pandas_constraint=None): def _slice_obs(self, idx): """Return the observations of a given ssoId. - For now this works for any ssoId; in the future, this might only work as ssoId is - progressively iterated through the series of ssoIds (so we can 'chunk' the reading). + For now this works for any ssoId; in the future, + this might only work as ssoId is + progressively iterated through the series of ssoIds + (so we can chunk the reading). Parameters ---------- @@ -134,16 +143,12 @@ def _slice_obs(self, idx): return {"obs": obs.to_records(), "orbit": orb, "Hvals": Hvals} def __iter__(self): - """ - Iterate through each of the ssoIds. - """ + """Iterate through each of the ssoIds.""" self.idx = 0 return self def __next__(self): - """ - Returns result of self._getObs when iterating over moSlicer. - """ + """Returns result of self._getObs when iterating over moSlicer.""" if self.idx >= self.nSso: raise StopIteration idx = self.idx @@ -151,13 +156,12 @@ def __next__(self): return self._slice_obs(idx) def __getitem__(self, idx): - # This may not be guaranteed to work if/when we implement chunking of the obs_file. + # This may not be guaranteed to work if/when we + # implement chunking of the obs_file. return self._slice_obs(idx) def __eq__(self, other_slicer): - """ - Evaluate if two slicers are equal. - """ + """Evaluate if two slicers are equal.""" result = False if isinstance(other_slicer, MoObjSlicer): if other_slicer.orbit_file == self.orbit_file: diff --git a/rubin_sim/maf/slicers/movie_slicer.py b/rubin_sim/maf/slicers/movie_slicer.py index cdee7515..abd9c259 100644 --- a/rubin_sim/maf/slicers/movie_slicer.py +++ b/rubin_sim/maf/slicers/movie_slicer.py @@ -29,28 +29,34 @@ def __init__( cumulative=True, force_no_ffmpeg=False, ): - """ - The 'movieSlicer' acts similarly to the OneDSlicer (slices on one data column). - However, the data slices from the movieSlicer are intended to be fed to another slicer, which then - (together with a set of Metrics) calculates metric values + plots at each slice - created by the movieSlicer. - The job of the movieSlicer is to track those slices and put them together into a movie. + """The 'movieSlicer' acts similarly to the OneDSlicer + (slices on one data column). + However, the data slices from the movieSlicer are intended to be + fed to another slicer, which then + (together with a set of Metrics) calculates metric values + + plots at each slice created by the movieSlicer. + The job of the movieSlicer is to track those slices and + put them together into a movie. 'slice_col_name' is the name of the data column to use for slicing. - 'slice_col_units' lets the user set the units (for plotting purposes) of the slice column. - 'bins' can be a numpy array with the binpoints for sliceCol or a single integer value + 'slice_col_units' lets the user set the units (for plotting purposes) + of the slice column. + 'bins' can be a numpy array with the binpoints for sliceCol + or a single integer value (if a single value, this will be used as the number of bins, together with data min/max or bin_min/max), as in numpy's histogram function. - If 'bin_size' is used, this will override the bins value and will be used - together with the data min/max + If 'bin_size' is used, this will override the bins value and + will be used together with the data min/max or bin_min/Max to set the binpoint values. - Bins work like numpy histogram bins: the last 'bin' value is end value of last bin; - all bins except for last bin are half-open ([a, b>), the last one is ([a, b]). + Bins work like numpy histogram bins: the last 'bin' value is end + value of last bin; all bins except for last bin are half-open + ([a, b>), the last one is ([a, b]). - The movieSlicer stitches individual frames together into a movie using ffmpeg. Thus, on - instantiation it checks that ffmpeg is available and will raise and exception if not. + The movieSlicer stitches individual frames together into a movie + using ffmpeg. Thus, on instantiation it checks that ffmpeg is + available and will raise and exception if not. This behavior can be overriden using force_no_ffmpeg = True (in order to create a movie later perhaps). """ @@ -93,7 +99,8 @@ def setup_slicer(self, sim_data, maps=None): self.bin_min = slice_col.min() if self.bin_max is None: self.bin_max = slice_col.max() - # Give warning if bin_min = bin_max, and do something at least slightly reasonable. + # Give warning if bin_min = bin_max, and do something at + # least slightly reasonable. if self.bin_min == self.bin_max: warnings.warn( "bin_min = bin_max (maybe your data is single-valued?). " @@ -133,7 +140,8 @@ def setup_slicer(self, sim_data, maps=None): self.bin_size, "float", ) - # Set shape / nbins to be one less than # of bins because last binvalue is RH edge only + # Set shape / nbins to be one less than # of bins because + # last binvalue is RH edge only self.nslice = len(self.bins) - 1 self.shape = self.nslice # Set slice_point metadata. @@ -145,7 +153,8 @@ def setup_slicer(self, sim_data, maps=None): self.sim_idxs = np.argsort(sim_data[self.slice_col_name]) simFieldsSorted = np.sort(sim_data[self.slice_col_name]) # "left" values are location where simdata == bin value - # Note that this setup will place any the visits beyond the last bin into the last bin. + # Note that this setup will place any the visits beyond + # the last bin into the last bin. self.left = np.searchsorted(simFieldsSorted, self.bins[:-1], "left") self.left = np.concatenate( ( @@ -163,9 +172,11 @@ def setup_slicer(self, sim_data, maps=None): @wraps(self._slice_sim_data) def _slice_sim_data(islice): """ - Slice sim_data on oneD sliceCol, to return relevant indexes for slice_point. + Slice sim_data on oneD sliceCol, to + return relevant indexes for slice_point. """ - # this is the important part. The ids here define the pieces of data that get + # this is the important part. + # The ids here define the pieces of data that get # passed on to subsequent slicers # cumulative version of 1D slicing idxs = self.sim_idxs[0 : self.left[islice + 1]] @@ -184,7 +195,8 @@ def _slice_sim_data(islice): @wraps(self._slice_sim_data) def _slice_sim_data(islice): """ - Slice sim_data on oneD sliceCol, to return relevant indexes for slice_point. + Slice sim_data on oneD sliceCol, + to return relevant indexes for slice_point. """ idxs = self.sim_idxs[self.left[islice] : self.left[islice + 1]] return { @@ -217,8 +229,8 @@ def make_movie( ips=10.0, fps=10.0, ): - """ - Takes in metric and slicer metadata and calls ffmpeg to stitch together output files. + """Takes in metric and slicer metadata and calls + ffmpeg to stitch together output files. """ if not os.path.isdir(out_dir): raise Exception("Cannot find output directory %s with movie input files." % (out_dir)) diff --git a/rubin_sim/maf/slicers/nd_slicer.py b/rubin_sim/maf/slicers/nd_slicer.py index 318a17d3..91c9db12 100644 --- a/rubin_sim/maf/slicers/nd_slicer.py +++ b/rubin_sim/maf/slicers/nd_slicer.py @@ -21,17 +21,14 @@ class NDSlicer(BaseSlicer): slice_col_list : `list` of `str` Names of the data columns for slicing (e.g. 'airmass`, etc) bins_list : `int` or `list` of `int` or `np.ndarray`, opt - Single integer (for same number of slices in each dimension) or a list of integers (matching + Single integer (for same number of slices in each dimension) + or a list of integers (matching slice_col_list) or list of arrays. Default 100, in all dimensions. All bins are half-open ([a, b)). """ def __init__(self, slice_col_list=None, bins_list=100, verbose=True): - """Instantiate object. - bins_list can be a list of numpy arrays with the respective slicepoints for slice_col_list, - or a list of integers (one per column in slice_col_list) or a single value - (repeated for all columns, default=100).""" super().__init__(verbose=verbose) self.slice_col_list = slice_col_list self.columns_needed = self.slice_col_list @@ -70,7 +67,8 @@ def setup_slicer(self, sim_data, maps=None): else: self.bins.append(np.sort(bl)) self.nslice = (np.array(list(map(len, self.bins))) - 1).prod() - # Count how many bins we have total (not counting last 'RHS' bin values, as in oneDSlicer). + # Count how many bins we have total + # (not counting last 'RHS' bin values, as in oneDSlicer). self.shape = self.nslice # Set up slice metadata. self.slice_points["sid"] = np.arange(self.nslice) @@ -110,7 +108,8 @@ def setup_slicer(self, sim_data, maps=None): ), ) ) - # Add these calculated values into the class lists of sim_idxs and lefts. + # Add these calculated values into the class lists of + # sim_idxs and lefts. self.sim_idxs.append(sim_idxs) self.lefts.append(left) diff --git a/rubin_sim/maf/slicers/one_d_slicer.py b/rubin_sim/maf/slicers/one_d_slicer.py index b42316dd..55d091d8 100644 --- a/rubin_sim/maf/slicers/one_d_slicer.py +++ b/rubin_sim/maf/slicers/one_d_slicer.py @@ -13,24 +13,30 @@ class OneDSlicer(BaseSlicer): - """OneD Slicer allows the 'slicing' of data into bins in a single dimension. + """OneD Slicer allows the slicing of data into bins in a single dimension. Parameters ---------- slice_col_name : `str` The name of the data column to base slicing on (i.e. 'airmass', etc.) slice_col_units : `str`, optional - Set a name for the units of the sliceCol. Used for plotting labels. Default None. + Set a name for the units of the sliceCol. Used for plotting labels. bins : np.ndarray, optional - The data will be sliced into 'bins': this can be defined as an array here. Default None. + The data will be sliced into 'bins': this can be defined as an + array here. Default None. bin_min : `float`, optional bin_max : `float`, optional bin_size : `float`, optional - If bins is not defined, then bin_min/bin_max/bin_size can be chosen to anchor the slice points. + If bins is not defined, then bin_min/bin_max/bin_size can be chosen + to anchor the slice points. Default None. - Priority goes: bins >> bin_min/bin_max/bin_size >> data values (if none of the above are chosen). + Priority goes: bins >> bin_min/bin_max/bin_size >> data values + (if none of the above are chosen). - All bins except for the last bin are half-open ([a, b)) while the last bin is ([a, b]). + Notes + ----- + All bins except for the last bin are half-open ([a, b)) while the + last bin is ([a, b]). """ def __init__( @@ -49,8 +55,10 @@ def __init__( raise ValueError("slice_col_name cannot be left None - choose a data column to group data by") self.slice_col_name = slice_col_name self.columns_needed = [slice_col_name] - # We could try to set up the self.bins here -- but it's also possible that - # these bin_min/max/size values have not been set and should just be set from the data. + # We could try to set up the self.bins here -- + # but it's also possible that + # these bin_min/max/size values have not been set and + # should just be set from the data. self.bins = bins self.bin_min = bin_min self.bin_max = bin_max @@ -87,10 +95,12 @@ def __init__( ] def setup_slicer(self, sim_data, maps=None): - """ - Set up bins in slicer. - This happens AFTER sim_data is defined, thus typically in the MetricBundleGroup. - This maps data into the bins; it's not a good idea to reuse a OneDSlicer as a result. + """Set up bins in slicer. + + This happens AFTER sim_data is defined, + thus typically in the MetricBundleGroup. + This maps data into the bins; + it's not a good idea to reuse a OneDSlicer as a result. """ if "bins" in self.slice_points: warning_msg = "Warning: this OneDSlicer was already set up once. " @@ -101,14 +111,16 @@ def setup_slicer(self, sim_data, maps=None): ) warnings.warn(warning_msg) slice_col = sim_data[self.slice_col_name] - # Set bins from data or specified values, if they were previously defined. + # Set bins from data or specified values, + # if they were previously defined. if self.bins is None: # Set bin min/max values (could have been set in __init__) if self.bin_min is None: self.bin_min = np.nanmin(slice_col) if self.bin_max is None: self.bin_max = np.nanmax(slice_col) - # Give warning if bin_min = bin_max, and do something at least slightly reasonable. + # Give warning if bin_min = bin_max, + # and do something at least slightly reasonable. if self.bin_min == self.bin_max: warnings.warn( "bin_min = bin_max (maybe your data is single-valued?). " @@ -124,10 +136,12 @@ def setup_slicer(self, sim_data, maps=None): self.bin_size = (self.bin_max - self.bin_min) / float(nbins) # Set bins self.bins = np.arange(self.bin_min, self.bin_max + self.bin_size / 2.0, self.bin_size, "float") - # nslice is used to stop iteration and should reflect the usable length of the bins + # nslice is used to stop iteration and should + # reflect the usable length of the bins self.nslice = len(self.bins) - 1 # and "shape" refers to the length of the datavalues, - # and should be one less than # of bins because last binvalue is RH edge only + # and should be one less than # of bins because last + # binvalue is RH edge only self.shape = self.nslice # Set slice_point metadata. self.slice_points["sid"] = np.arange(self.nslice) @@ -148,7 +162,9 @@ def setup_slicer(self, sim_data, maps=None): # Set up _slice_sim_data method for this class. @wraps(self._slice_sim_data) def _slice_sim_data(islice): - """Slice sim_data on oneD sliceCol, to return relevant indexes for slice_point.""" + """Slice sim_data on oneD sliceCol, to return relevant + indexes for slice_point. + """ idxs = self.sim_idxs[islice] bin_left = self.bins[islice] bin_right = self.bins[islice + 1] @@ -168,11 +184,13 @@ def __eq__(self, other_slicer): result = False if isinstance(other_slicer, OneDSlicer): if self.slice_col_name == other_slicer.slice_col_name: - # If slicer restored from disk or setup, then 'bins' in slice_points dict. + # If slicer restored from disk or setup, + # then 'bins' in slice_points dict. # This is preferred method to see if slicers are equal. if ("bins" in self.slice_points) & ("bins" in other_slicer.slice_points): result = np.array_equal(other_slicer.slice_points["bins"], self.slice_points["bins"]) - # However, before we 'setup' the slicer with data, the slicers could be equivalent. + # However, before we 'setup' the slicer with data, + # the slicers could be equivalent. else: if (self.bins is not None) and (other_slicer.bins is not None): result = np.array_equal(self.bins, other_slicer.bins) diff --git a/rubin_sim/maf/slicers/uni_slicer.py b/rubin_sim/maf/slicers/uni_slicer.py index 88407b69..d01d47c5 100644 --- a/rubin_sim/maf/slicers/uni_slicer.py +++ b/rubin_sim/maf/slicers/uni_slicer.py @@ -1,5 +1,5 @@ -# UniSlicer class. -# This slicer simply returns the indexes of all data points. No slicing done at all. +"""UniSlicer - no slicing at all, simply return all data points.""" + __all__ = ("UniSlicer",) from functools import wraps From 6d52f7508cc6bd2055d8f55128ebba7252bc7eb8 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 22:33:03 -0700 Subject: [PATCH 10/23] Ruff stackers --- rubin_sim/maf/stackers/coord_stackers.py | 10 +- rubin_sim/maf/stackers/label_stackers.py | 41 +++-- rubin_sim/maf/stackers/m5_optimal_stacker.py | 38 ++--- rubin_sim/maf/stackers/mo_phase.py | 6 +- rubin_sim/maf/stackers/mo_stackers.py | 167 ++++++++++++------- rubin_sim/maf/stackers/n_follow_stacker.py | 17 +- rubin_sim/maf/stackers/neo_dist_stacker.py | 1 - rubin_sim/maf/stackers/sdss_stackers.py | 3 +- rubin_sim/maf/stackers/sn_stacker.py | 30 +--- 9 files changed, 182 insertions(+), 131 deletions(-) diff --git a/rubin_sim/maf/stackers/coord_stackers.py b/rubin_sim/maf/stackers/coord_stackers.py index fec7ab6b..66b1a133 100644 --- a/rubin_sim/maf/stackers/coord_stackers.py +++ b/rubin_sim/maf/stackers/coord_stackers.py @@ -103,7 +103,8 @@ def __init__(self, ra_col="fieldRA", dec_col="fieldDec", degrees=True): def _run(self, sim_data, cols_present=False): # raCol and DecCol in radians, gall/b in radians. if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; + # assume it is correct and does not need recalculating. return sim_data if self.degrees: c = SkyCoord(ra=sim_data[self.ra_col] * u.deg, dec=sim_data[self.dec_col] * u.deg).transform_to( @@ -119,7 +120,9 @@ def _run(self, sim_data, cols_present=False): class EclipticStacker(BaseStacker): - """Add the ecliptic coordinates of each RA/Dec pointing: eclipLat, eclipLon + """Add the ecliptic coordinates of each RA/Dec pointing: + eclipLat, eclipLon + Optionally subtract off the sun's ecliptic longitude and wrap. Parameters @@ -157,7 +160,8 @@ def __init__( def _run(self, sim_data, cols_present=False): if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; + # assume it is correct and does not need recalculating. return sim_data for i in np.arange(sim_data.size): if self.degrees: diff --git a/rubin_sim/maf/stackers/label_stackers.py b/rubin_sim/maf/stackers/label_stackers.py index 84c7e929..5b518519 100644 --- a/rubin_sim/maf/stackers/label_stackers.py +++ b/rubin_sim/maf/stackers/label_stackers.py @@ -7,15 +7,18 @@ class WFDlabelStacker(BaseStacker): - """Add an 'areaId' column to flag whether a visit was inside the 'footprint'. + """Add an 'areaId' column to flag whether a visit was + inside the 'footprint'. Parameters ---------- footprint : `np.NDarray`, optional The healpix map indicating the desired footprint region. - If this is not defined (default None), then the entire sky is used as the footprint. + If this is not defined (default None), + then the entire sky is used as the footprint. fp_threshold : `float`, optional - The threshold for the fraction of the visit area which falls within the footprint in order + The threshold for the fraction of the visit area which + falls within the footprint in order to be counted as 'in' the footprint. Default 0.4. areaIdName : `str`, optional Value to place in the areaId column for visits which match this area. @@ -24,17 +27,23 @@ class WFDlabelStacker(BaseStacker): dec_col : `str`, optional The name of the Dec column. Default fieldDec. note_col : `str`, optional - The name of the 'note' column in the database. Default 'note'. This is used to identify visits + The name of the 'note' (scheduler_note) column in the database. + This is used to identify visits which were part of a DD sequence. exclude_dd : `bool`, optional - Exclude (True) or include (False) visits which are part of a DD sequence within this 'area'. + Exclude (True) or include (False) visits which are part of a + DD sequence within this 'area'. - This stacker adds an areaId column in the opsim database, to be labelled with 'areaIdName' if the - visit falls within the healpix footprint map and (optionally) is not tagged as a DD visit. + Notes + ----- + This stacker adds an areaId column in the opsim database, + to be labelled with 'areaIdName' if the visit falls within the + healpix footprint map and (optionally) is not tagged as a DD visit. If it falls outside the footprint, the visit is tagged as "NULL". - If it was part of a DD sequence, the visit is tagged with an ID which is unique to that DD field, - if 'exclude_dd' is True. - Generally this would be likely to be used to tag visits as belonging to WFD - but not necessarily! + If it was part of a DD sequence, the visit is tagged with an + ID which is unique to that DD field, if 'exclude_dd' is True. + Generally this would be likely to be used to tag visits as + belonging to WFD - but not necessarily! Any healpix footprint is valid. """ @@ -60,7 +69,8 @@ def __init__( self.area_id_name = area_id_name self.exclude_dd = exclude_dd if footprint is None: - # If footprint was not defined, just set it to cover the entire sky, at nside=64 + # If footprint was not defined, + # just set it to cover the entire sky, at nside=64 footprint = np.ones(hp.nside2npix(64)) self.footprint = footprint self.nside = hp.npix2nside(len(self.footprint)) @@ -85,10 +95,13 @@ def _run(self, sim_data, cols_present=False): for i, (v, note) in enumerate(zip(vec, sim_data[self.note_col])): # Identify the healpixels which would be inside this pointing pointing_healpix = hp.query_disc(self.nside, v, radius, inclusive=False) - # The wfd_footprint consists of values of 0/1 if out/in WFD footprint + # The wfd_footprint consists of values of 0/1 + # if out/in WFD footprint hp_in_fp = self.footprint[pointing_healpix].sum() - # So in_fp= the number of healpixels which were in the specified footprint - # .. in the # in / total # > limit (0.4) then "yes" it's in the footprint + # So in_fp= the number of healpixels which were in + # the specified footprint + # .. in the # in / total # > limit (0.4) + # then "yes" it's in the footprint in_fp = hp_in_fp / len(pointing_healpix) if note.startswith("DD") and self.exclude_dd: area_id[i] = self.define_ddname(note) diff --git a/rubin_sim/maf/stackers/m5_optimal_stacker.py b/rubin_sim/maf/stackers/m5_optimal_stacker.py index 0a96a3be..58fe5389 100644 --- a/rubin_sim/maf/stackers/m5_optimal_stacker.py +++ b/rubin_sim/maf/stackers/m5_optimal_stacker.py @@ -7,9 +7,7 @@ def generate_sky_slopes(): - """ - Fit a line to how the sky brightness changes with airmass. - """ + """Fit a line to how the sky brightness changes with airmass.""" import healpy as hp import rubin_sim.skybrightness as sb @@ -32,36 +30,30 @@ def generate_sky_slopes(): class M5OptimalStacker(BaseStacker): - """ - Make a new m5 column as if observations were taken on the meridian. + """Make a new m5 column as if observations were taken on the meridian. If the moon is up, assume sky brightness stays the same. - Assumes seeing scales as airmass^0.6. Uses linear fits for sky and airmass relation. + Assumes seeing scales as airmass^0.6. Uses linear fits for sky + and airmass relation. Parameters ---------- - airmass_col : str ('airmass') + airmass_col : `str` Column name for the airmass per pointing. - dec_col : str ('dec_rad') + dec_col : `str` Column name for the pointing declination. - sky_bright_col: str ('filtSkyBrightness') + sky_bright_col : `str` Column name for the sky brighntess per pointing. - filter_col : str ('filter') + filter_col : `str` Column name for the filter name. - m5_col : str ('fiveSigmaDepth') + m5_col : `str` Colum name for the five sigma limiting depth per pointing. - moon_alt_col : str ('moonAlt') + moon_alt_col : `str` Column name for the moon altitude per pointing. - sun_alt_col : str ('sun_alt_col') + sun_alt_col : `str` Column name for the sun altitude column. - site : str ('LSST') + site : `str` Name of the site. - - Returns - ------- - numpy.array - Adds a column to that is approximately what the five-sigma depth would have - been if the observation had been taken on the meridian. """ cols_added = ["m5_optimal"] @@ -102,7 +94,8 @@ def __init__( def _run(self, sim_data, cols_present=False): # k_atm values from rubin_sim.operations gen_output.py k_atm = {"u": 0.50, "g": 0.21, "r": 0.13, "i": 0.10, "z": 0.07, "y": 0.18} - # Linear fits to sky brightness change, no moon, twilight, or zodiacal components + # Linear fits to sky brightness change, + # no moon, twilight, or zodiacal components # Use generate_sky_slopes to regenerate if needed. sky_slopes = { "g": -0.52611780327408397, @@ -119,7 +112,8 @@ def _run(self, sim_data, cols_present=False): delta_sky[ np.where((sim_data[self.moon_alt_col] > 0) | (sim_data[self.sun_alt_col] > np.radians(-18.0))) ] = 0 - # Using Approximation that FWHM~X^0.6. So seeing term in m5 of: 0.25 * log (7.0/FWHMeff ) + # Using Approximation that FWHM~X^0.6. + # So seeing term in m5 of: 0.25 * log (7.0/FWHMeff ) # Goes to 0.15 log(FWHM_min / FWHM_eff) in the difference m5_optimal = ( sim_data[self.m5_col] diff --git a/rubin_sim/maf/stackers/mo_phase.py b/rubin_sim/maf/stackers/mo_phase.py index d749a344..aa1786c1 100644 --- a/rubin_sim/maf/stackers/mo_phase.py +++ b/rubin_sim/maf/stackers/mo_phase.py @@ -202,7 +202,8 @@ def phase__halley_marcus(phase): """Halley-Marcus composite dust phase function. - This is appropriate for use when calculating the brightness of cometary coma. + This is appropriate for use when calculating the brightness of + cometary coma. Parameters ---------- @@ -253,7 +254,8 @@ def phase_hg(phase, G=0.15): phi : float or array Phase function evaluated at phase """ - # see Muinonen et al 2010, eqn 6 (http://dx.doi.org/10.1016/j.icarus.2010.04.003) + # see Muinonen et al 2010, eqn 6 + # (http://dx.doi.org/10.1016/j.icarus.2010.04.003) phi1 = np.exp(-3.33 * np.power(np.tan(np.radians(phase) / 2), 0.63)) phi2 = np.exp(-1.87 * np.power(np.tan(np.radians(phase) / 2), 1.22)) return (1 - G) * phi1 + G * phi2 diff --git a/rubin_sim/maf/stackers/mo_stackers.py b/rubin_sim/maf/stackers/mo_stackers.py index ee9a8420..9fcaf73d 100644 --- a/rubin_sim/maf/stackers/mo_stackers.py +++ b/rubin_sim/maf/stackers/mo_stackers.py @@ -22,9 +22,11 @@ class BaseMoStacker(BaseStacker): - """Base class for moving object (SSobject) stackers. Relevant for MoSlicer ssObs (pd.dataframe). + """Base class for moving object (SSobject) stackers. + Relevant for MoSlicer ssObs (pd.dataframe). - Provided to add moving-object specific API for 'run' method of moving object stackers. + Provided to add moving-object specific API for 'run' + method of moving object stackers. """ def run(self, sso_obs, href, hval=None): @@ -37,38 +39,48 @@ def run(self, sso_obs, href, hval=None): with warnings.catch_warnings(): warnings.simplefilter("ignore") sso_obs, cols_present = self._add_stacker_cols(sso_obs) - # Here we don't really care about cols_present, because almost every time we will be readding + # Here we don't really care about cols_present, because almost + # every time we will be readding # columns anymore (for different H values). return self._run(sso_obs, href, hval) class MoMagStacker(BaseMoStacker): - """Add columns relevant to SSobject apparent magnitudes and visibility to the slicer ssoObs + """Add columns relevant to SSobject apparent magnitudes and + visibility to the slicer ssoObs dataframe, given a particular Href and current h_val. Specifically, this stacker adds magLimit, appMag, SNR, and vis. - magLimit indicates the appropriate limiting magnitude to consider for a particular object in a particular - observation, when combined with the losses due to detection (dmag_detect) or trailing (dmagTrail). - appMag adds the apparent magnitude in the filter of the current object, at the current h_val. + magLimit indicates the appropriate limiting magnitude to consider + for a particular object in a particular + observation, when combined with the losses due to detection + (dmag_detect) or trailing (dmagTrail). + appMag adds the apparent magnitude in the filter of the current object, + at the current h_val. SNR adds the SNR of this object, given the magLimit. - vis adds a flag (0/1) indicating whether an object was visible (assuming a 5sigma threshhold including + vis adds a flag (0/1) indicating whether an object was visible + (assuming a 5sigma threshhold including some probabilistic determination of visibility). Parameters ---------- m5Col : `str`, optional - Name of the column describing the 5 sigma depth of each visit. Default fiveSigmaDepth. + Name of the column describing the 5 sigma depth of each visit. + Default fiveSigmaDepth. lossCol : `str`, optional Name of the column describing the magnitude losses, - due to trailing (dmagTrail) or detection (dmag_detect). Default dmag_detect. + due to trailing (dmagTrail) or detection (dmag_detect). + Default dmag_detect. gamma : `float`, optional The 'gamma' value for calculating SNR. Default 0.038. LSST range under normal conditions is about 0.037 to 0.039. sigma : `float`, optional - The 'sigma' value for probabilistic prediction of whether or not an object is visible at 5sigma. + The 'sigma' value for probabilistic prediction of whether or not + an object is visible at 5sigma. Default 0.12. - The probabilistic prediction of visibility is based on Fermi-Dirac completeness formula (see SDSS, - eqn 24, Stripe82 analysis: http://iopscience.iop.org/0004-637X/794/2/120/pdf/apj_794_2_120.pdf). + The probabilistic prediction of visibility is based on + Fermi-Dirac completeness formula (see SDSS, eqn 24, Stripe82 analysis: + http://iopscience.iop.org/0004-637X/794/2/120/pdf/apj_794_2_120.pdf). randomSeed: `int` or None, optional If set, then used as the random seed for the numpy random number generation for the dither offsets. @@ -133,7 +145,8 @@ def __init__( self.units = ["mag", "mag", "SNR", ""] def _run(self, sso_obs, href, hval): - # hval = current H value (useful if cloning over H range), href = reference H value from orbit. + # hval = current H value (useful if cloning over H range), + # href = reference H value from orbit. # Without cloning, href = hval. # add apparent magnitude self.mag_stacker._run(sso_obs, href, hval) @@ -145,11 +158,14 @@ def _run(self, sso_obs, href, hval): class AppMagNullStacker(BaseMoStacker): """Do nothing to calculate an apparent magnitude. - This assumes an apparent magnitude was part of the input data and does not need to be modified (no + This assumes an apparent magnitude was part of the input data and + does not need to be modified (no cloning, color terms, trailing losses, etc). Just return the appMag column. - This would not be necessary in general, but appMag is treated as a special column (because we must have an - apparent magnitude for most of the basic moving object metrics, and it must be calculated before SNR + This would not be necessary in general, but appMag is treated as a + special column (because we must have an + apparent magnitude for most of the basic moving object metrics, + and it must be calculated before SNR if that is also needed). """ @@ -168,25 +184,36 @@ def _run(self, sso_obs, href, hval): class AppMagStacker(BaseMoStacker): - """Add apparent magnitude of an object for the current h_val (compared to Href in the orbit file), - incorporating the magnitude losses due to trailing/detection, as well as the color of the object. - - This is calculated from the reported mag_v in the input observation file (calculated assuming Href) as: - ssoObs['appMag'] = ssoObs[self.vMagCol] + ssoObs[self.colorCol] + ssoObs[self.lossCol] + h_val - Href - - Using the vMag reported in the input observations implicitly uses the phase curve coded in at that point; - for Oorb this is an H/G phase curve, with G=0.15 unless otherwise specified in the orbit file. + """Add apparent magnitude of an object for the current h_val + (compared to Href in the orbit file), + incorporating the magnitude losses due to trailing/detection, + as well as the color of the object. + + This is calculated from the reported mag_v in the input observation + file (calculated assuming Href) as: + ``` + ssoObs['appMag'] = ssoObs[self.vMagCol] + ssoObs[self.colorCol] + + ssoObs[self.lossCol] + h_val - Href + ``` + + Using the vMag reported in the input observations implicitly uses + the phase curve coded in at that point; + for Oorb this is an H/G phase curve, with G=0.15 unless otherwise + specified in the orbit file. See sims_movingObjects for more details on the color and loss quantities. Parameters ---------- v_mag_col : `str`, optional - Name of the column containing the base V magnitude for the object at H=Href. + Name of the column containing the base V magnitude for the + object at H=Href. loss_col : `str`, optional Name of the column describing the magnitude losses, - due to trailing (dmagTrail) or detection (dmag_detect). Default dmag_detect. + due to trailing (dmagTrail) or detection (dmag_detect). + Default dmag_detect. color_col : `str`, optional - Name of the column describing the color correction (into the observation filter, from V). + Name of the column describing the color correction + (into the observation filter, from V). Default dmag_color. """ @@ -202,7 +229,8 @@ def __init__(self, v_mag_col="magV", color_col="dmag_color", loss_col="dmag_dete ] def _run(self, sso_obs, href, hval): - # hval = current H value (useful if cloning over H range), href = reference H value from orbit. + # hval = current H value (useful if cloning over H range), + # href = reference H value from orbit. # Without cloning, href = hval. sso_obs["appMag"] = ( sso_obs[self.v_mag_col] + sso_obs[self.color_col] + sso_obs[self.loss_col] + hval - href @@ -211,28 +239,35 @@ def _run(self, sso_obs, href, hval): class CometAppMagStacker(BaseMoStacker): - """Add a cometary apparent magnitude, including nucleus and coma, based on a calculation of - Afrho (using the current h_val) and a Halley-Marcus phase curve for the coma brightness. + """Add a cometary apparent magnitude, including nucleus and coma, + based on a calculation of + Afrho (using the current h_val) and a Halley-Marcus phase curve + for the coma brightness. Parameters ---------- cometType : `str`, optional - Type of comet - short, oort, or mbc. This setting also sets the value of Afrho1 and k: + Type of comet - short, oort, or mbc. + This setting also sets the value of Afrho1 and k: short = Afrho1 / R^2 = 100 cm/km2, k = -4 oort = Afrho1 / R^2 = 1000 cm/km2, k = -2 mbc = Afrho1 / R^2 = 4000 cm/km2, k = -6. Default = 'oort'. - It is also possible to pass this a dictionary instead: the dictionary should contain 'k' and + It is also possible to pass this a dictionary instead: + the dictionary should contain 'k' and 'afrho1_const' keys, which will be used to set these values directly. (e.g. cometType = {'k': -3.5, 'afrho1_const': 1500}). ap : `float`, optional The albedo for calculating the object's size. Default 0.04 rh_col : `str`, optional - The column name for the heliocentric distance (in AU). Default 'helio_dist'. + The column name for the heliocentric distance (in AU). + Default 'helio_dist'. delta_col : `str`, optional - The column name for the geocentric distance (in AU). Default 'geo_dist'. + The column name for the geocentric distance (in AU). + Default 'geo_dist'. phase_col : `str`, optional - The column name for the phase value (in degrees). Default 'phase'. + The column name for the phase value (in degrees). + Default 'phase'. """ cols_added = ["appMag"] @@ -300,12 +335,14 @@ def __init__( def _run(self, sso_obs, href, hval): # Calculate radius from the current H value (hval). radius = 10 ** (0.2 * (VMAG_SUN - hval)) / np.sqrt(self.ap) * KM_PER_AU - # Calculate expected Afrho - this is a value that describes how the brightness of the coma changes + # Calculate expected Afrho - this is a value that describes + # how the brightness of the coma changes afrho1 = self.afrho1_const * radius**2 phase_val = phase__halley_marcus(sso_obs[self.phase_col]) # afrho is equivalent to a sort of 'absolute' magnitude of the coma afrho = afrho1 * sso_obs[self.rh_col] ** self.k * phase_val - # rho describes the projected area on the sky (project the aperture into cm on-sky) + # rho describes the projected area on the sky + # (project the aperture into cm on-sky) # Using the seeing * apScale as the aperture radius_aperture = sso_obs[self.seeing_col] * self.ap_scale rho = 725e5 * sso_obs[self.delta_col] * radius_aperture @@ -313,13 +350,16 @@ def _run(self, sso_obs, href, hval): delta = sso_obs[self.delta_col] * KM_PER_AU * 1000 # delta in cm dm = -2.5 * np.log10(afrho * rho / (2 * sso_obs[self.rh_col] * delta) ** 2) coma = np.zeros(len(sso_obs), float) - # This calculates a coma mag that scales with the sun's color in each bandpass, but the coma + # This calculates a coma mag that scales with the sun's color in + # each bandpass, but the coma # modification is gray (i.e. it's just reflecting sunlight) for f in sso_obs[self.filter_col]: match = np.where(sso_obs[self.filter_col] == f) coma[match] = AB_SUN[f] + dm[match] - # Calculate cometary nucleus magnitude -- we'll use the apparent V mag adapted from OOrb as well as - # the object's color - these are generally assumed to be D type (which was used in sims_movingObjects) + # Calculate cometary nucleus magnitude -- we'll use the + # apparent V mag adapted from OOrb as well as + # the object's color - these are generally assumed to be + # D type (which was used in sims_movingObjects) nucleus = sso_obs[self.v_mag_col] + sso_obs[self.color_col] + sso_obs[self.loss_col] + hval - href # add coma and nucleus then ready for calculation of SNR, etc. sso_obs["appMag"] = -2.5 * np.log10(10 ** (-0.4 * coma) + 10 ** (-0.4 * nucleus)) @@ -327,30 +367,41 @@ def _run(self, sso_obs, href, hval): class SNRStacker(BaseMoStacker): - """Add SNR and visibility for a particular object, given the five sigma depth of the image and the - apparent magnitude (whether from AppMagStacker or CometAppMagStacker, etc). - - The SNR simply calculates the SNR based on the five sigma depth and the apparent magnitude. - The 'vis' column is a probabilistic flag (0/1) indicating whether the object was detected, assuming - a 5-sigma SNR threshold and then applying a probabilistic cut on whether it was detected or not (i.e. - there is a gentle roll-over in 'vis' from 1 to 0 depending on the SNR of the object). - This is based on the Fermi-Dirac completeness formula as described in equation 24 of the Stripe 82 SDSS - analysis here: http://iopscience.iop.org/0004-637X/794/2/120/pdf/apj_794_2_120.pdf. + """Add SNR and visibility for a particular object, + given the five sigma depth of the image and the + apparent magnitude (whether from AppMagStacker or CometAppMagStacker etc). + + The SNR simply calculates the SNR based on the five sigma depth and + the apparent magnitude. + The 'vis' column is a probabilistic flag (0/1) indicating whether + the object was detected, assuming + a 5-sigma SNR threshold and then applying a probabilistic cut on whether + it was detected or not (i.e. + there is a gentle roll-over in 'vis' from 1 to 0 depending on the + SNR of the object). + This is based on the Fermi-Dirac completeness formula as described + in equation 24 of the Stripe 82 SDSS + analysis here: + http://iopscience.iop.org/0004-637X/794/2/120/pdf/apj_794_2_120.pdf. Parameters ---------- app_mag_col : `str`, optional - Name of the column describing the apparent magnitude of the object. Default 'appMag'. + Name of the column describing the apparent magnitude of the object. + Default 'appMag'. m5_col : `str`, optional - Name of the column describing the 5 sigma depth of each visit. Default fiveSigmaDepth. + Name of the column describing the 5 sigma depth of each visit. + Default fiveSigmaDepth. gamma : `float`, optional The 'gamma' value for calculating SNR. Default 0.038. LSST range under normal conditions is about 0.037 to 0.039. sigma : `float`, optional - The 'sigma' value for probabilistic prediction of whether or not an object is visible at 5sigma. + The 'sigma' value for probabilistic prediction of whether or not + an object is visible at 5sigma. Default 0.12. - The probabilistic prediction of visibility is based on Fermi-Dirac completeness formula (see SDSS, - eqn 24, Stripe82 analysis: http://iopscience.iop.org/0004-637X/794/2/120/pdf/apj_794_2_120.pdf). + The probabilistic prediction of visibility is based on Fermi-Dirac + completeness formula (see SDSS, eqn 24, Stripe82 analysis: + http://iopscience.iop.org/0004-637X/794/2/120/pdf/apj_794_2_120.pdf). random_seed: `int` or None, optional If set, then used as the random seed for the numpy random number generation for the probability of detection. Default: None. @@ -375,7 +426,8 @@ def __init__( self.units = ["SNR", ""] def _run(self, sso_obs, href, hval): - # hval = current H value (useful if cloning over H range), href = reference H value from orbit. + # hval = current H value (useful if cloning over H range), + # href = reference H value from orbit. # Without cloning, href = hval. xval = np.power(10, 0.5 * (sso_obs[self.app_mag_col] - sso_obs[self.m5_col])) sso_obs["SNR"] = 1.0 / np.sqrt((0.04 - self.gamma) * xval + self.gamma * xval * xval) @@ -392,7 +444,8 @@ def _run(self, sso_obs, href, hval): class EclStacker(BaseMoStacker): """ - Add ecliptic latitude/longitude (ecLat/ecLon) to the slicer ssoObs (in degrees). + Add ecliptic latitude/longitude (ecLat/ecLon) to the slicer ssoObs + (in degrees). Parameters ----------- diff --git a/rubin_sim/maf/stackers/n_follow_stacker.py b/rubin_sim/maf/stackers/n_follow_stacker.py index 9d622380..692eb9ba 100644 --- a/rubin_sim/maf/stackers/n_follow_stacker.py +++ b/rubin_sim/maf/stackers/n_follow_stacker.py @@ -8,7 +8,8 @@ def find_telescopes(min_size=3.0): - """Finds telescopes larger than min_size, from list of large telescopes based on + """Finds telescopes larger than min_size, + from list of large telescopes based on http://astro.nineplanets.org/bigeyes.html. Returns @@ -105,17 +106,20 @@ def find_telescopes(min_size=3.0): class NFollowStacker(BaseStacker): - """Add the number of telescopes ('nObservatories') that could follow up any visit - at (any of the) times in timeStep, specifying the minimum telescope size (in meters) and airmass limit. + """Add the number of telescopes ('nObservatories') that could + follow up any visit at (any of the) times in timeStep, + specifying the minimum telescope size (in meters) and airmass limit. Parameters ---------- minSize: float, optional The minimum telescope aperture to use, in meters. Default 3.0. airmass_limit: float, optional - The maximum airmass allowable at the follow-up observatory. Default 2.5. + The maximum airmass allowable at the follow-up observatory. + Default 2.5. time_steps: np.array or list of floats, optional - The timesteps to check for followup opportunities, in hours. Default is np.arange(0.5, 12., 3.0). + The timesteps to check for followup opportunities, in hours. + Default is np.arange(0.5, 12., 3.0). mjd_col: str, optional The exposure MJD column name. Default 'observationStartMJD'. ra_col: str, optional @@ -174,7 +178,8 @@ def _run(self, sim_data, cols_present=False): ) airmass = 1.0 / (np.cos(np.pi / 2.0 - alt)) followed = np.where((airmass <= self.airmass_limit) & (airmass >= 1.0)) - # If the observatory got an observation, save this into obs_got_it. + # If the observatory got an observation, save this + # into obs_got_it. # obs_got_it will be 1 if ANY of the times got an observation. obs_got_it[followed] = 1 # If an observatory got an observation, count it in nObservatories. diff --git a/rubin_sim/maf/stackers/neo_dist_stacker.py b/rubin_sim/maf/stackers/neo_dist_stacker.py index 5259fa29..cfa57eef 100644 --- a/rubin_sim/maf/stackers/neo_dist_stacker.py +++ b/rubin_sim/maf/stackers/neo_dist_stacker.py @@ -3,7 +3,6 @@ import numpy as np from .base_stacker import BaseStacker -from .general_stackers import FiveSigmaStacker class NEODistStacker(BaseStacker): diff --git a/rubin_sim/maf/stackers/sdss_stackers.py b/rubin_sim/maf/stackers/sdss_stackers.py index 1392482e..af840a71 100644 --- a/rubin_sim/maf/stackers/sdss_stackers.py +++ b/rubin_sim/maf/stackers/sdss_stackers.py @@ -13,7 +13,8 @@ class SdssRADecStacker(BaseStacker): cols_added = ["RA1", "Dec1", "RA2", "Dec2", "RA3", "Dec3", "RA4", "Dec4"] def __init__(self, pcols=["p1", "p2", "p3", "p4", "p5", "p6", "p7", "p8"]): - """The p1,p2 columns represent the corners of chips. Could generalize this a bit.""" + """The p1,p2 columns represent the corners of chips. + Could generalize this a bit.""" self.units = ["rad"] * 8 self.cols_req = pcols diff --git a/rubin_sim/maf/stackers/sn_stacker.py b/rubin_sim/maf/stackers/sn_stacker.py index ad07f545..83020545 100644 --- a/rubin_sim/maf/stackers/sn_stacker.py +++ b/rubin_sim/maf/stackers/sn_stacker.py @@ -1,14 +1,12 @@ __all__ = ("CoaddStacker",) import numpy as np -import numpy.lib.recfunctions as rf from rubin_sim.maf.stackers import BaseStacker class CoaddStacker(BaseStacker): - """ - Stacker to estimate m5 "coadded" per band and par night + """Stacker to estimate m5 "coadded" per band and par night Parameters ---------- @@ -69,25 +67,9 @@ def __init__( self.units = ["int"] def _run(self, sim_data, cols_present=False): - """ - Parameters - ------------ - sim_data : simulation data - cols_present: to check whether the field has already been estimated - - Returns - -------- - numpy array of initial fields plus modified fields: - - m5Col: "coadded" m5 - - numExposuresCol: sum of numExposuresCol - - visitTimeCol: sum of visitTimeCol - - visitExposureTimeCol: sum of visitExposureTimeCol - - all other input fields except band (Ra, Dec, night) : median(field) - - """ - if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct + # and does not need recalculating. return sim_data self.dtype = sim_data.dtype r = [] @@ -105,8 +87,7 @@ def _run(self, sim_data, cols_present=False): return myarray def fill(self, tab): - """ - Estimation of new fields (m5 "coadded" values, ...) + """Estimation of new fields (m5 "coadded" values, ...) Parameters --------------- @@ -154,8 +135,7 @@ def fill(self, tab): return r def m5_coadd(self, m5): - """ - Estimation of "coadded" m5 values based on: + """Estimation of "coadded" m5 values based on: flux_5sigma = 10**(-0.4*m5) sigmas = flux_5sigma/5. sigma_tot = 1./sqrt(np.sum(1/sigmas**2)) From c719d8d2b4fcfe80c8ed9feb6634e7c7e8988f56 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 22:39:12 -0700 Subject: [PATCH 11/23] Remove obs2sqlite --- rubin_sim/maf/utils/obs2sqlite.py | 160 ------------------------------ 1 file changed, 160 deletions(-) delete mode 100644 rubin_sim/maf/utils/obs2sqlite.py diff --git a/rubin_sim/maf/utils/obs2sqlite.py b/rubin_sim/maf/utils/obs2sqlite.py deleted file mode 100644 index 7101ba0a..00000000 --- a/rubin_sim/maf/utils/obs2sqlite.py +++ /dev/null @@ -1,160 +0,0 @@ -__all__ = ("obs2sqlite",) - -import sqlite3 -import sys - -import healpy as hp -import numpy as np -import pandas as pd -from rubin_scheduler.utils import Site, _approx_ra_dec2_alt_az, m5_flat_sed, raDec2Hpid - -import rubin_sim.skybrightness_pre as sb -from rubin_sim.skybrightness import SkyModel - - -def obs2sqlite( - observations_in, - location="LSST", - outfile="observations.sqlite", - slewtime_limit=5.0, - full_sky=False, - radians=True, -): - """ - Utility to take an array of observations and dump it to a sqlite file, filling in useful columns along the way. - - observations_in: numpy array with at least columns of - ra : RA in degrees - dec : dec in degrees - mjd : MJD in day - filter : string with the filter name - exptime : the exposure time in seconds - slewtime_limit : float - Consider all slewtimes larger than this to be closed-dome time not part of a slew. - """ - - # Set the location to be LSST - if location == "LSST": - telescope = Site("LSST") - - # Check that we have the columns we need - needed_cols = ["ra", "dec", "mjd", "filter"] - in_cols = observations_in.dtype.names - for col in needed_cols: - if needed_cols not in in_cols: - ValueError("%s column not found in observtion array" % col) - - n_obs = observations_in.size - sm = None - # make sure they are in order by MJD - observations_in.sort(order="mjd") - - # Take all the columns that are in the input and add any missing - names = [ - "filter", - "ra", - "dec", - "mjd", - "exptime", - "alt", - "az", - "skybrightness", - "seeing", - "night", - "slewtime", - "fivesigmadepth", - "airmass", - "sunAlt", - "moonAlt", - ] - types = ["|S1"] - types.extend([float] * (len(names) - 1)) - - observations = np.zeros(n_obs, dtype=list(zip(names, types))) - - # copy over the ones we have - for col in in_cols: - observations[col] = observations_in[col] - - # convert output to be in degrees like expected - if radians: - observations["ra"] = np.degrees(observations["ra"]) - observations["dec"] = np.degrees(observations["dec"]) - - if "exptime" not in in_cols: - observations["exptime"] = 30.0 - - # Fill in the slewtime. Note that filterchange time gets included in slewtimes - if "slewtime" not in in_cols: - # Assume MJD is midpoint of exposures - mjd_sec = observations_in["mjd"] * 24.0 * 3600.0 - observations["slewtime"][1:] = ( - mjd_sec[1:] - - mjd_sec[0:-1] - - observations["exptime"][0:-1] * 0.5 - - observations["exptime"][1:] * 0.5 - ) - closed = np.where(observations["slewtime"] > slewtime_limit * 60.0) - observations["slewtime"][closed] = 0.0 - - # Let's just use the stupid-fast to get alt-az - if "alt" not in in_cols: - alt, az = _approx_ra_dec2_alt_az( - np.radians(observations["ra"]), - np.radians(observations["dec"]), - telescope.latitude_rad, - telescope.longitude_rad, - observations["mjd"], - ) - observations["alt"] = np.degrees(alt) - observations["az"] = np.degrees(az) - - # Fill in the airmass - if "airmass" not in in_cols: - observations["airmass"] = 1.0 / np.cos(np.pi / 2.0 - np.radians(observations["alt"])) - - # Fill in the seeing - if "seeing" not in in_cols: - # XXX just fill in a dummy val - observations["seeing"] = 0.8 - - # Sky Brightness - if "skybrightness" not in in_cols: - if full_sky: - sm = SkyModel(mags=True) - for i, obs in enumerate(observations): - sm.set_ra_dec_mjd(obs["ra"], obs["dec"], obs["mjd"], degrees=True) - observations["skybrightness"][i] = sm.return_mags()[obs["filter"]] - else: - # Let's try using the pre-computed sky brighntesses - sm = sb.SkyModelPre(preload=False) - full = sm.return_mags(observations["mjd"][0]) - nside = hp.npix2nside(full["r"].size) - imax = float(np.size(observations)) - for i, obs in enumerate(observations): - indx = raDec2Hpid(nside, obs["ra"], obs["dec"]) - observations["skybrightness"][i] = sm.return_mags(obs["mjd"], indx=[indx])[obs["filter"]] - sun_moon = sm.returnSunMoon(obs["mjd"]) - observations["sunAlt"][i] = sun_moon["sunAlt"] - observations["moonAlt"][i] = sun_moon["moonAlt"] - progress = i / imax * 100 - text = "\rprogress = %.2f%%" % progress - sys.stdout.write(text) - sys.stdout.flush() - observations["sunAlt"] = np.degrees(observations["sunAlt"]) - observations["moonAlt"] = np.degrees(observations["moonAlt"]) - - # 5-sigma depth - for fn in np.unique(observations["filter"]): - good = np.where(observations["filter"] == fn) - observations["fivesigmadepth"][good] = m5_flat_sed( - fn, - observations["skybrightness"][good], - observations["seeing"][good], - observations["exptime"][good], - observations["airmass"][good], - ) - - conn = sqlite3.connect(outfile) - df = pd.DataFrame(observations) - df.to_sql("observations", conn) From 984326e1bcda142910737403b622274968b3312f Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 22:58:53 -0700 Subject: [PATCH 12/23] Ruff utils --- rubin_sim/maf/utils/generate_fov_map.py | 6 +- rubin_sim/maf/utils/maf_utils.py | 18 +- rubin_sim/maf/utils/opsim_utils.py | 6 +- rubin_sim/maf/utils/sn_n_sn_utils.py | 221 ++++++++++++------------ rubin_sim/maf/utils/sn_utils.py | 110 ++++++------ rubin_sim/maf/utils/stellar_mags.py | 7 +- 6 files changed, 189 insertions(+), 179 deletions(-) diff --git a/rubin_sim/maf/utils/generate_fov_map.py b/rubin_sim/maf/utils/generate_fov_map.py index 36afda59..ef2ae07f 100644 --- a/rubin_sim/maf/utils/generate_fov_map.py +++ b/rubin_sim/maf/utils/generate_fov_map.py @@ -1,5 +1,5 @@ import numpy as np -from rubin_scheduler.utils import gnomonic_project_tosky, gnomonic_project_toxy +from rubin_scheduler.utils import gnomonic_project_tosky # Use the main stack to make a rough array. # This code needs an update to work without lsst.sims. @@ -46,13 +46,13 @@ chip_names = chip_names.reshape(nside, nside) wavefront_names = [ name - for name in np.unique(chip_names[np.where(chip_names != None)]) + for name in np.unique(chip_names[np.where(chip_names is not None)]) if ("SW" in name) | ("R44" in name) | ("R00" in name) | ("R04" in name) | ("R40" in name) ] # If it's on a waverfront sensor, that's false for name in wavefront_names: result[np.where(chip_names == name)] = False # No chipname, that's a false - result[np.where(chip_names == None)] = False + result[np.where(chip_names is None)] = False np.savez("fov_map.npz", x=x_one, image=result) diff --git a/rubin_sim/maf/utils/maf_utils.py b/rubin_sim/maf/utils/maf_utils.py index 533cbe87..47dc9a27 100644 --- a/rubin_sim/maf/utils/maf_utils.py +++ b/rubin_sim/maf/utils/maf_utils.py @@ -20,7 +20,9 @@ def load_inst_zeropoints(): - """Load up and return instrumental zeropoints and atmospheric extinctions""" + """Load up and return instrumental zeropoints and atmospheric + extinctions. + """ zp_inst = {} datadir = get_data_dir() for filtername in "ugrizy": @@ -38,7 +40,7 @@ def load_inst_zeropoints(): def coadd_m5(mags): - """Coadded depth, assuming Gaussian noise""" + """Coadded depth, assuming Gaussian noise.""" return 1.25 * np.log10(np.sum(10.0 ** (0.8 * np.array(mags)))) @@ -164,22 +166,21 @@ def optimal_bins(datain, binmin=None, binmax=None, nbin_max=200, nbin_min=1, ver def percentile_clipping(data, percentile=95.0): - """ - Calculate the minimum and maximum values of a distribution of points, + """Calculate the minimum and maximum values of a distribution of points, after discarding data more than 'percentile' from the median. This is useful for determining useful data ranges for plots. Note that 'percentile' percent of the data is retained. Parameters ---------- - data : numpy.ndarray + data : `numpy.ndarray` The data to clip. - percentile : float + percentile : `float` Retain values within percentile of the median. Returns ------- - float, float + minimum, maximum : `float`, `float` The minimum and maximum values of the clipped data. """ lower_percentile = (100 - percentile) / 2.0 @@ -190,8 +191,7 @@ def percentile_clipping(data, percentile=95.0): def radec2pix(nside, ra, dec): - """ - Calculate the nearest healpixel ID of an RA/Dec array, assuming nside. + """Calculate the nearest healpixel ID of an RA/Dec array, assuming nside. Parameters ---------- diff --git a/rubin_sim/maf/utils/opsim_utils.py b/rubin_sim/maf/utils/opsim_utils.py index ae829926..e63b9e50 100644 --- a/rubin_sim/maf/utils/opsim_utils.py +++ b/rubin_sim/maf/utils/opsim_utils.py @@ -53,12 +53,12 @@ def get_sim_data( sqlconstraint = "" # Check that file exists - if type(db_con) == str: + if isinstance(db_con, str): if os.path.isfile(db_con) is False: raise FileNotFoundError("No file %s" % db_con) # Check if table is "observations" or "SummaryAllProps" - if (table_name is None) & (full_sql_query is None) & (type(db_con) == str): + if (table_name is None) & (full_sql_query is None) & (isinstance(db_con, str)): url = make_url("sqlite:///" + db_con) eng = create_engine(url) inspector = inspect(eng) @@ -77,7 +77,7 @@ def get_sim_data( # that's probably fine, keep people from getting fancy with old sims table_name = "observations" - if type(db_con) == str: + if isinstance(db_con, str): con = sqlite3.connect(db_con) else: con = db_con diff --git a/rubin_sim/maf/utils/sn_n_sn_utils.py b/rubin_sim/maf/utils/sn_n_sn_utils.py index 7d362fb0..02a482b7 100644 --- a/rubin_sim/maf/utils/sn_n_sn_utils.py +++ b/rubin_sim/maf/utils/sn_n_sn_utils.py @@ -33,32 +33,32 @@ class to simulate supernovae light curves in a fast way Parameters --------------- - reference_lc: RegularGridData + reference_lc : `scipy.interpolate.RegularGridData` lc reference files - x1: float + x1 : `float` SN stretch - color: float + color : `float` SN color - telescope: Telescope() + telescope : Telescope() telescope for the study - mjd_col: str, opt - name of the MJD col in data to simulate (default: observationStartMJD) - filter_col: str, opt - name of the filter col in data to simulate (default: filter) - exptime_col: str, opt - name of the exposure time col in data to simulate (default: visitExposureTime) - m5_col: str, opt - name of the fiveSigmaDepth col in data to simulate (default: fiveSigmaDepth) - season_col: str, opt - name of the season col in data to simulate (default: season) - snr_min: float, opt - minimal Signal-to-Noise Ratio to apply on LC points (default: 5) - light_output: bool, opt - to get a lighter output (ie lower number of cols) (default: True) - bluecutoff: float,opt - blue cutoff for SN (default: 380.0 nm) - redcutoff: float, opt - red cutoff for SN (default: 800.0 nm) + mjd_col : `str`, optional + name of the MJD col in data to simulate + filter_col : `str`, optional + name of the filter col in data to simulate + exptime_col : `str`, optional + name of the exposure time col in data to simulate + m5_col : `str`, optional + name of the fiveSigmaDepth col in data to simulate + season_col : `str`, optional + name of the season col in data to simulate + snr_min : `float`, optional + minimal Signal-to-Noise Ratio to apply on LC points + light_output : `bool`, optional + to get a lighter output (ie lower number of cols) + bluecutoff : `float`, optional + blue cutoff for SN + redcutoff : `float`, optional + red cutoff for SN """ def __init__( @@ -114,22 +114,22 @@ def __call__(self, obs, ebvof_mw, gen_par=None, bands="grizy"): Parameters - ---------------- - obs: array + ----------- + obs : `np.ndarray`, (N,) array of observations - ebvof_mw: float + ebvof_mw : `float` E(B-V) for MW - gen_par: array, opt + gen_par : `np.ndarray`, optional simulation parameters (default: None) - bands: str, opt + bands : `str`, optional filters to consider for simulation (default: grizy) Returns - ------------ - astropy table with: + -------- + table : `astropy.Table` columns: band, flux, fluxerr, snr_m5,flux_e,zp,zpsys,time - metadata : SNID,RA,Dec,DayMax,X1,Color,z + metadata : SNID,RA,Dec,DayMax,X1,Color,z """ if len(obs) == 0: @@ -158,16 +158,17 @@ def process_band(self, sel_obs, ebvof_mw, band, gen_par): Parameters --------------- - sel_obs: array + sel_obs : `np.ndarray` array of observations - band: str + band : `str` band of observations - gen_par: array + gen_par : `np.ndarray`` simulation parameters Returns ------- - astropy table with fields corresponding to LC components + table : `astropy.Table` + astropy table with fields corresponding to LC components """ # if there are no observations in this filter: return None @@ -293,20 +294,19 @@ def process_band(self, sel_obs, ebvof_mw, band, gen_par): return lc_flux def get_flag(self, sel_obs, gen_par, fluxes_obs, band, p): - """ - Method to flag events corresponding to selection criteria + """Method to flag events corresponding to selection criteria Parameters --------------- - sel_obs: array + sel_obs : `np.ndarray`` observations - gen_par: array + gen_par : `np.ndarray` simulation parameters - fluxes_obs: array + fluxes_obs : `np.ndarray` LC fluxes - band: str + band : `str` filter - p: array + p : `np.ndarray`` LC phases """ # remove LC points outside the restframe phase range @@ -329,36 +329,36 @@ def get_flag(self, sel_obs, gen_par, fluxes_obs, band, p): return flag def marray(self, val, flag): - """ - method to estimate a masked array + """method to estimate a masked array Parameters --------------- - val: array - flag: mask + val : `np.ndarray` + flag : `np.ndarray` + mask for values Returns ---------- - the masked array + masked_array : `np.ma.array` + the masked array """ return np.ma.array(val, mask=~flag) def tarray(self, arr, nvals, flag): - """ - method to estimate a masked tiled array + """method to estimate a masked tiled array Parameters --------------- - val: array - nvals: int + val : `np.ndarray` + nvals : `int` number of tiles - flag: mask + flag : np.ndarray` Returns ---------- - the masked tiled array - + masked_tiles : `np.ma.array` + the masked tiled array """ vv = np.ma.array(np.tile(arr, (nvals, 1)), mask=~flag) @@ -366,12 +366,14 @@ def tarray(self, arr, nvals, flag): def load_sne_cached(gamma_name="gamma_WFD.hdf5"): - """Load up the SNe lightcurve files with a simple function that caches the result so each metric + """Load up the SNe lightcurve files with a simple function + that caches the result so each metric doesn't need to load it on it's own """ need_data = True if hasattr(load_sne_cached, "data"): - # Only return data if the current gamma file matches the loaded gamma file + # Only return data if the current gamma file matches + # the loaded gamma file if load_sne_cached.gammaName == gamma_name: need_data = False if need_data: @@ -381,19 +383,21 @@ def load_sne_cached(gamma_name="gamma_WFD.hdf5"): class LoadReference: - """ - class to load template files requested for LCFast + """class to load template files requested for LCFast These files should be stored in a reference_files The files should be part of the rubin_sim_data/maf/SNe_dat files. - They are also available for download from https://me.lsst.eu/gris/DESC_SN_pipeline/. + They are also available for download from + https://me.lsst.eu/gris/DESC_SN_pipeline/. Parameters --------------- - templateDir: str, optional + templateDir : `str`, optional where to put the files (default: reference_files) gammaName : `str`, optional - The name of the gamma file (for photometric zeropoint and SNR calculation information). - Note that these are similar to, but not necessarily the same as, the current expected + The name of the gamma file (for photometric zeropoint and + SNR calculation information). + Note that these are similar to, but not necessarily the same as, + the current expected LSST zeropoints and SNR gamma values. """ @@ -438,30 +442,32 @@ def __init__( class GetReference: - """ - Class to load reference data - used for the fast SN simulator + """Class to load reference data used for the fast SN simulator Parameters ---------------- - lcName: `str` + lcName : `str` name of the reference file to load (lc) - gammaName: `str` + gammaName : `str` name of the reference file to load (gamma) - tel_par: `dict` + tel_par : `dict` telescope parameters param_Fisher : `list` [`str`], optional list of SN parameter for Fisher estimation to consider (default: ['x0', 'x1', 'color', 'daymax']) - Returns - ----------- - The following dict can be accessed: + Notes + ----- + After instantiations, the following dict can be accessed: mag_to_flux_e_sec : Interp1D of mag to flux(e.sec-1) conversion - flux : dict of RegularGridInterpolator of fluxes (key: filters, (x,y)=(phase, z), result=flux) - fluxerr : dict of RegularGridInterpolator of flux errors (key: filters, (x,y)=(phase, z), result=fluxerr) - param : dict of dict of RegularGridInterpolator of flux derivatives wrt SN parameters - (key: filters plus param_Fisher parameters; (x,y)=(phase, z), result=flux derivatives) + flux : dict of RegularGridInterpolator of fluxes + (key: filters, (x,y)=(phase, z), result=flux) + fluxerr : dict of RegularGridInterpolator of flux errors + (key: filters, (x,y)=(phase, z), result=fluxerr) + param : dict of dict of RegularGridInterpolator of flux derivatives + wrt SN parameters + (key: filters plus param_Fisher parameters; (x,y)=(phase, z), + result=flux derivatives) gamma : dict of RegularGridInterpolator of gamma values (key: filters) """ @@ -516,7 +522,8 @@ def __init__(self, lc_name, gamma_name, param__fisher=["x0", "x1", "color", "day self.gamma_ref[band] = lc_sel["gamma"][0] self.m5_ref[band] = np.unique(lc_sel["m5"])[0] - # Another interpolator, faster than griddata: regulargridinterpolator + # Another interpolator, faster than griddata: + # regulargridinterpolator # Fluxes and errors zmin, zmax, zstep, nz = self.lim_vals(lc_sel, "z") @@ -589,20 +596,20 @@ def lim_vals(self, lc, field): Parameters ---------- - lc: Table + lc : `astropy.Table` astropy Table (here probably a LC) - field: str + field : `str` name of the field of interest Returns ------- - vmin: float + vmin : `float` min value of the field - vmax: float + vmax : `float` max value of the field - vstep: float + vstep : `float` step value for this field (median) - nvals: int + nvals : `int` number of unique values """ @@ -617,22 +624,21 @@ def lim_vals(self, lc, field): class SnRate: - """ - Estimate production rates of typeIa SN + r"""Estimate production rates of typeIa SN Available rates: Ripoche, Perrett, Dilday Parameters ---------- - rate : str, optional - type of rate chosen (Ripoche, Perrett, Dilday) (default : Perrett) - H0 : float, optional - Hubble constant value :math:`H_{0}` (default : 70.) - Om0 : float, optional - matter density value :math:`\Omega_{0}` (default : 0.25) - min_rf_phase : float, optional - min rest-frame phase (default : -15.) - max_rf_phase : float, optional - max rest-frame phase (default : 30.) + rate : `str`, optional + type of rate chosen (Ripoche, Perrett, Dilday) + H0 : `float`, optional + Hubble constant value :math:`H_{0}` + Om0 : `float`, optional + matter density value :math:`\Omega_{0}` + min_rf_phase : `float`, optional + min rest-frame phase + max_rf_phase : `float`, optional + max rest-frame phase """ def __init__(self, rate="Perrett", h0=70, om0=0.25, min_rf_phase=-15.0, max_rf_phase=30.0): @@ -662,30 +668,29 @@ def __call__( dz : `float`, optional redshift bin (default : 0.001) survey_area : `float`, optional - area of the survey (:math:`deg^{2}`) (default : 9.6 :math:`deg^{2}`) + area of the survey (:math:`deg^{2}`) bins : `list` [`float`], optional - redshift bins (default : None) + redshift bins account_for_edges : `bool` to account for season edges. - If true, duration of the survey will be reduced by (1+z)*(maf_rf_phase-min_rf_phase)/365.25 - (default : False) + If true, duration of the survey will be reduced by + (1+z)*(maf_rf_phase-min_rf_phase)/365.25 duration : `float`, optional - survey duration (in days) (default : 140 days) + survey duration (in days) duration_z : `list` [`float`], optional - survey duration (as a function of z) (default : None) + survey duration (as a function of z) Returns ----------- - Lists : - zz : `float` + zz : `list` [`float`] redshift values - rate : `float` + rate : `list` [`float`] production rate - err_rate : `float` + err_rate : `list` [`float`] production rate error - nsn : `float` + nsn : `list` [`float`] number of SN - err_nsn : `float` + err_nsn : `list` [`float`] error on the number of SN """ @@ -722,7 +727,8 @@ def __call__( return zz, rate, err_rate, nsn, err_nsn def ripoche_rate(self, z): - """The SNLS SNIa rate according to the (unpublished) Ripoche et al study. + """The SNLS SNIa rate according to the (unpublished) + Ripoche et al study. Parameters -------------- @@ -831,7 +837,8 @@ def var_color(self, lc): Parameters -------------- lc : `pandas.DataFrame` - data to process containing the derivative of the flux with respect to SN parameters + data to process containing the derivative of the + flux with respect to SN parameters Returns ---------- diff --git a/rubin_sim/maf/utils/sn_utils.py b/rubin_sim/maf/utils/sn_utils.py index 51569c83..690c44b6 100644 --- a/rubin_sim/maf/utils/sn_utils.py +++ b/rubin_sim/maf/utils/sn_utils.py @@ -9,18 +9,18 @@ class Lims: Parameters ------------- - Li_files : str + Li_files : `str` light curve reference file - mag_to_flux_files : str + mag_to_flux_files : `str` files of magnitude to flux - band : str + band : `str` band considered - SNR : float + SNR : `float` Signal-To-Noise Ratio cut - mag_range : pair(float), optional + mag_range : `float`, `float`, optional mag range considered Default : (23., 27.5) - dt_range : pair(float) + dt_range : `float`, `float`, optional difference time range considered (cadence) Default : (0.5, 25.) """ @@ -53,16 +53,17 @@ def get_lims(self, band, tab, SNR): Parameters ------------- - band : str + band : `str` band to consider - tab : numpy array + tab : `np.ndarray`, (N,) table of data - SNR : float + SNR : `float` Signal-to-Noise Ratio cut Returns --------- - dict of limits with redshift and band as keys. + lims : `dict` + dict of limits with redshift and band as keys. """ @@ -83,18 +84,18 @@ def get_lims(self, band, tab, SNR): return lims def mesh(self, mag_to_flux): - """ - Mesh grid to estimate five-sigma depth values (m5) from mags input. + """Mesh grid to estimate five-sigma depth values (m5) from mags input. Parameters - --------------- - mag_to_flux : magnitude to flux values + ----------- + mag_to_flux : `np.ndarray`, (N,2) + magnitude to flux values Returns - ----------- - m5 values - time difference dt (cadence) - metric=sqrt(dt)*f5 where f5 is the 5-sigma flux + ------- + m5, Dt, metric : `np.ndarray`, np.ndarray`, `np.ndarray` + m5 values, time difference dt (cadence), + metric=sqrt(dt)*f5 where f5 is the 5-sigma flux """ dt = np.linspace(self.dt_range[0], self.dt_range[1], 100) @@ -109,7 +110,9 @@ def mesh(self, mag_to_flux): return m5, DT, metric def interp(self): - """Estimate a grid of interpolated values in the plane (m5, cadence, metric)""" + """Estimate a grid of interpolated values in the plane + (m5, cadence, metric). + """ m5_all = [] dt_all = [] @@ -127,13 +130,12 @@ def interp(self): figa, axa = plt.subplots() for kk, lim in enumerate(self.lims): - fmt = {} ll = [lim[zz][self.band] for zz in sorted_keys[kk]] cs = axa.contour(m5_all[kk], dt_all[kk], metric_all[kk], ll) points_values = None for io, col in enumerate(cs.collections): - # Update in matplotlib changed get_segements to get_paths + # Update in matplotlib changed get_segments to get_paths if hasattr(col, "get_segments"): segments = col.get_segments() else: @@ -155,17 +157,17 @@ def interp(self): plt.close(figa) # do not display def interp_griddata(self, data): - """ - Estimate metric interpolation for data (m5,cadence) + """Estimate metric interpolation for data (m5,cadence) Parameters - --------------- - data : data where interpolation has to be done (m5,cadence) + ---------- + data : `np.ndarray` + data where interpolation has to be done (m5,cadence) Returns - ----------- - griddata interpolation (m5,cadence,metric) - + -------- + res : `np.ndarray` + griddata interpolation (m5,cadence,metric) """ ref_points = self.points_ref @@ -184,16 +186,17 @@ class GenerateFakeObservations: Parameters ----------- config: yaml-like - configuration file (parameter choice: filter, cadence, m5,Nseasons, ...) - list : str, optional + configuration file (parameter choice: filter, cadence, m5,Nseasons, ..) + list : `str`, optional Name of the columns used. - Default : 'observationStartMJD', 'fieldRA', 'fieldDec','filter','fiveSigmaDepth', - 'visitExposureTime','numExposures','visitTime','season' + Default : 'observationStartMJD', 'fieldRA', 'fieldDec','filter', + 'fiveSigmaDepth', 'visitExposureTime','numExposures', + 'visitTime','season' Returns --------- - - recordarray of observations with the fields: + observations : `np.ndarray`, (N, M) + recordarray of observations with the fields: MJD, Ra, Dec, band,m5,Nexp, ExpTime, Season """ @@ -227,7 +230,8 @@ def make_fake(self, config): Parameters ----------- config: yaml-like - configuration file (parameter choice: filter, cadence, m5,Nseasons, ...) + configuration file (parameter choice: filter, + cadence, m5,Nseasons, ...) """ bands = config["bands"] cadence = dict(zip(bands, config["Cadence"])) @@ -294,18 +298,17 @@ def m5_coadd(self, m5, nvisits, tvisit): class ReferenceData: - """ - class to handle light curve of SN + """class to handle light curve of SN Parameters ------------ - Li_files : str + Li_files : `str` light curve reference file - mag_to_flux_files : str + mag_to_flux_files : `str` files of magnitude to flux - band : str + band : `str` band considered - z : float + z : `float` redshift considered """ @@ -321,23 +324,22 @@ def __init__(self, li_files, mag_to_flux_files, band, z): self.mag_to_flux.append(self.interp_mag(self.band, np.load(val))) def interp_fluxes(self, band, tab, z): - """ - Flux interpolator + """Flux interpolator Parameters --------------- - band : str + band : `str` band considered - tab : array + tab : `np.ndarray` reference data with (at least) fields z,band,time,DayMax - z : float + z : `float` redshift considered Returns -------- - list (float) of interpolated fluxes (in e/sec) + fluxes : `list` [`float`] + list (float) of interpolated fluxes (in e/sec) """ - lims = {} idx = (np.abs(tab["z"] - z) < 1.0e-5) & (tab["band"] == "LSST::" + band) sel = tab[idx] selc = np.copy(sel) @@ -346,21 +348,21 @@ def interp_fluxes(self, band, tab, z): return interpolate.interp1d(selc["deltaT"], selc["flux_e"], bounds_error=False, fill_value=0.0) def interp_mag(self, band, tab): - """ - magnitude (m5) to flux (e/sec) interpolator + """magnitude (m5) to flux (e/sec) interpolator Parameters --------------- - band : str + band : `str` band considered - tab : array + tab : `np.ndarray` reference data with (at least) fields band,m5,flux_e, - z : float + z : `float` redshift considered Returns -------- - list (float) of interpolated fluxes (in e/sec) + mags : `list` [`float`] + list (float) of interpolated magnitudes (in e/sec) """ idx = tab["band"] == band sel = tab[idx] diff --git a/rubin_sim/maf/utils/stellar_mags.py b/rubin_sim/maf/utils/stellar_mags.py index b86c6371..f552e943 100644 --- a/rubin_sim/maf/utils/stellar_mags.py +++ b/rubin_sim/maf/utils/stellar_mags.py @@ -5,8 +5,9 @@ def calc_wd_colors(): """ - Calculate a few example WD colors. Values to go in stellar_mags(). Here in case - values need to be regenerated (different stars, bandpasses change, etc.) + Calculate a few example WD colors. Values to go in stellar_mags(). + Here in case values need to be regenerated + (different stars, bandpasses change, etc.) """ try: @@ -15,7 +16,7 @@ def calc_wd_colors(): from lsst.utils import getPackageDir from rubin_sim.photUtils import Bandpass, Sed - except: + except ImportError: "Need to setup sims_photUtils to generate WD magnitudes." names = ["HeWD_25200_80", "WD_11000_85", "WD_3000_85"] From d09c4a0184c1024b7a8ce04e7c57f76dc8e523e5 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 23:04:50 -0700 Subject: [PATCH 13/23] Pass magtype to moving object bundles --- rubin_sim/maf/run_moving_calc.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/rubin_sim/maf/run_moving_calc.py b/rubin_sim/maf/run_moving_calc.py index 2a7faa1e..c27d0371 100755 --- a/rubin_sim/maf/run_moving_calc.py +++ b/rubin_sim/maf/run_moving_calc.py @@ -149,7 +149,8 @@ def run_moving_calc(): try: os.makedirs(args.out_dir) except FileExistsError: - # This can happen if you are running these in parallel and two scripts try to make + # This can happen if you are running these in parallel + # and two scripts try to make # the same directory. pass results_db = db.ResultsDb(out_dir=args.out_dir) @@ -168,6 +169,7 @@ def run_moving_calc(): detection_losses="trailing", albedo=args.albedo, h_mark=h_mark, + magtype=magtype ) # Run these discovery metrics print("Calculating quick discovery metrics with simple trailing losses.") @@ -185,6 +187,7 @@ def run_moving_calc(): detection_losses="detection", albedo=args.albedo, h_mark=h_mark, + magtype=magtype ) bdict = batches.discovery_batch( slicer, @@ -196,6 +199,7 @@ def run_moving_calc(): detection_losses="detection", albedo=args.albedo, h_mark=h_mark, + magtype=magtype ) bdictD.update(bdict) @@ -215,6 +219,7 @@ def run_moving_calc(): constraint_info_label=args.constraint_info_label, constraint=args.constraint, h_mark=h_mark, + magtype=magtype ) elif characterization.lower() == "outer": bdictC = batches.characterization_outer_batch( @@ -226,6 +231,7 @@ def run_moving_calc(): constraint_info_label=args.constraint_info_label, constraint=args.constraint, h_mark=h_mark, + magtype=magtype ) # Run these characterization metrics print("Calculating characterization metrics.") From 6c5330d608d97b0ab36c267135be03147e3b384c Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 23:04:59 -0700 Subject: [PATCH 14/23] Ruff run scripts --- rubin_sim/maf/run_moving_fractions.py | 15 ++++++++++----- rubin_sim/maf/run_moving_join.py | 24 ++++++++++++++++-------- rubin_sim/maf/scimaf_dir.py | 2 +- 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/rubin_sim/maf/run_moving_fractions.py b/rubin_sim/maf/run_moving_fractions.py index e0f76f5a..79169e0d 100755 --- a/rubin_sim/maf/run_moving_fractions.py +++ b/rubin_sim/maf/run_moving_fractions.py @@ -60,7 +60,8 @@ def run_moving_fractions(): # Create a results Db. results_db = db.ResultsDb(out_dir=args.work_dir) - # Just read in all metrics in the (joint or single) directory, then run completeness and fraction + # Just read in all metrics in the (joint or single) directory, + # then run completeness and fraction # summaries, using the methods in the batches. if args.metadata is None: matchstring = os.path.join(args.work_dir, "*MOOB.npz") @@ -79,8 +80,10 @@ def run_moving_fractions(): first = bdict[metric_names[0]] figroot = f"{first.run_name}" - # adding args.metadata to the figroot means that we can narrow down *which* subset of files to work on - # this lets us add different h_mark values to the output plots, if desired (leaving None/default is ok) + # adding args.metadata to the figroot means that we can narrow + # down *which* subset of files to work on + # this lets us add different h_mark values to the output plots, + # if desired (leaving None/default is ok) if args.metadata != ".": figroot += f"_{args.metadata}" @@ -97,7 +100,8 @@ def run_moving_fractions(): out_dir=args.work_dir, ) - # Calculate fractions of population for characterization. This utility writes these to disk. + # Calculate fractions of population for characterization. + # This utility writes these to disk. bdict_fractions = batches.run_fraction_summary(bdict, args.h_mark, args.work_dir, results_db) # Plot the fractions for colors and lightcurves. batches.plot_fractions(bdict_fractions, figroot=figroot, results_db=results_db, out_dir=args.work_dir) @@ -109,7 +113,8 @@ def run_moving_fractions(): if "ObsArc" in k: batches.plot_single(bdict[k], results_db=results_db, out_dir=args.work_dir) - # Plot the number of chances of discovery metric - this is different than completeness + # Plot the number of chances of discovery metric - + # this is different than completeness # As it plots the metric value directly for k in bdict: if "DiscoveryNChances" in k and "3_pairs_in_15_nights_detection_loss" in k: diff --git a/rubin_sim/maf/run_moving_join.py b/rubin_sim/maf/run_moving_join.py index 5a2f01fc..c144693e 100755 --- a/rubin_sim/maf/run_moving_join.py +++ b/rubin_sim/maf/run_moving_join.py @@ -40,12 +40,18 @@ def run_moving_join(): # Outputs from the metrics are generally like so: # // - # - base_dir tends to be (but is set by user when starting to generate obs.) - # - splitDir tends to be (and is set by observation generation script) - # - metricFile is ( + # but is set by user when starting to generate obs.) + # - splitDir tends to be + # (and is set by observation generation script) + # - metricFile is Date: Tue, 8 Oct 2024 23:15:27 -0700 Subject: [PATCH 15/23] Ruff batches and run_comparison --- rubin_sim/maf/batches/ddf_batch.py | 3 +- .../run_comparison/microlensing_compare.py | 159 ++++++++++-------- rubin_sim/maf/run_comparison/radar_plot.py | 35 ++-- 3 files changed, 112 insertions(+), 85 deletions(-) diff --git a/rubin_sim/maf/batches/ddf_batch.py b/rubin_sim/maf/batches/ddf_batch.py index 3f29de62..eebf86a0 100644 --- a/rubin_sim/maf/batches/ddf_batch.py +++ b/rubin_sim/maf/batches/ddf_batch.py @@ -50,7 +50,8 @@ def ddfBatch( extra_info_label : `str`, optional Additional description information to add (alongside the extra_sql) old_coords : `bool` - Use the default locations for the DDFs from pre-July 2024. Default False. + Use the default locations for the DDFs from pre-July 2024. + Default False. Returns ------- diff --git a/rubin_sim/maf/run_comparison/microlensing_compare.py b/rubin_sim/maf/run_comparison/microlensing_compare.py index 24ed66de..4fbc2950 100644 --- a/rubin_sim/maf/run_comparison/microlensing_compare.py +++ b/rubin_sim/maf/run_comparison/microlensing_compare.py @@ -24,10 +24,10 @@ def microlensing_fom( Parameters ---------- - result_db_path : string + result_db_path : `str` Path to the directory storing the result databases generated by MAF. - metric_data_path : string + metric_data_path : `str` Path to the directory storing the npz files generated by MAF. """ @@ -93,7 +93,7 @@ def parse_t_e_run_types(name): Parameters ---------- - name : string + name : `str` A MicrolensingMetric file name """ split_name = name.split("MicrolensingMetric") @@ -112,12 +112,12 @@ def get_results(df, run_type, fisher_sigmat_e_t_e_cutoff=0.1): Parameters ---------- - df : pandas dataframe + df : `pandas.Dataframe` Pandas dataframe of the results npz file - run_types : numpy array of strings + run_types : `np.ndarray`, (N,) Array of strings describing microlensing metric type: either 'detect', 'Npts', or 'Fisher' as parsed by the file name - fisher_sigmat_e_t_e_cutoff : float (Default is 0.1) + fisher_sigmat_e_t_e_cutoff : `float` Maximum normalized uncertainty in tE (sigmatE/tE) as determined by 3sigma values of pubished planet microlensing candidates """ @@ -149,25 +149,25 @@ def plot_fom(results, run_names, run_types, min_t_e, max_t_e, save_folder, figur Parameters ---------- - results : numpy array of floats + results : `np.ndarray`, (N,) Results from the MicrolensingMetric from get_results() from the respective microlensing metric type - run_names : numpy array of strings + run_names : `np.ndarray`, (N,) Array of names of the OpSim run that was used in the metric - run_types : numpy array of strings + run_types : `np.ndarray`, (N,) Array of strings describing microlensing metric type: either 'detect', 'Npts', or 'Fisher' as parsed by the file name - min_t_e : numpy array of int/floats + min_t_e : `np.ndarray`, (N,) Array of values describing the minium einstein crossing time (tE) as parsed by the file name - max_t_e : numpy array of int/floats + max_t_e : `np.ndarray`, (N,) Array of values describing the maximum einstein crossing time (tE) as parsed by the file name - save_folder : string + save_folder : `str` String of folder name to save figure - figure_name : string + figure_name : `str` String of figure name - figsize : tuple + figsize : (`int`, `int`) Tuple of figure size in inches. Default is None, which sets figsize = (25, 30) """ @@ -231,28 +231,29 @@ def plot_fom(results, run_names, run_types, min_t_e, max_t_e, save_folder, figur def plot_compare(results, run_names, run_types, min_t_e, max_t_e, save_folder, npts_required=10): """ - Plots confusion matrix type plots comparing fraction detected, characterized (via Fisher), + Plots confusion matrix type plots comparing fraction detected, + characterized (via Fisher), and fraction of events with at least npts_required points within 2 tE Parameters ---------- - results : numpy array of floats + results : `np.ndarray`, (N,) Results from the MicrolensingMetric from get_results() from the respective microlensing metric type - run_names : numpy array of strings + run_names : `np.ndarray`, (N,) Array of names of the OpSim run that was used in the metric - run_types : numpy array of strings + run_types : `np.ndarray`, (N,) Array of strings describing microlensing metric type: either 'detect', 'Npts', or 'Fisher' as parsed by the file name - min_t_e : numpy array of int/floats + min_t_e : `np.ndarray`, (N,) Array of values describing the minium einstein crossing time (tE) as parsed by the file name - max_t_e : numpy array of int/floats + max_t_e : `np.ndarray`, (N,) Array of values describing the maximum einstein crossing time (tE) as parsed by the file name - save_folder : string + save_folder : `str` String of folder name to save figures - npts_required : int (Default is 10). + npts_required : `int` Number of poitns within 2tE required for the number of points fraction. """ @@ -320,20 +321,22 @@ def confusion_matrix_plot(comparison_matrix, xlabel, ylabel, run_name, t_e_range Parameters ---------- - comparison_matrix : numpy array + comparison_matrix : `np.ndarray`, (N,)` Array comparing two metric types (A and B) with the following shape: - [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), (No A and No B)]] - where Yes A and Yes B are the number of events that pass both the A and B criteria. - xlabel : string + [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), + (No A and No B)]] + where Yes A and Yes B are the number of events that pass both + the A and B criteria. + xlabel : `str` Sring of xlabel (also used in file name of figure) - ylabel : string + ylabel : `str` Sring of ylabel (also used in file name of figure) - run_name : string + run_name : `str` Name of the OpSim run that was used in the metric (used in labels and file name) - t_e_range : string + t_e_range : `str` String of the range of the tE (used in labels and file name) - save_folder : string + save_folder : `str` String of folder name to save figures """ @@ -365,16 +368,20 @@ def detected_fisher_comparison(fisher_results, detect_results, fisher_sigmat_e_t """ Returns an array of the following form where A = fisher criteria and B = detection criteria: - [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), (No A and No B)]] - where Yes A and Yes B are the number of events that pass both the A and B criteria. + [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), + (No A and No B)]] + where Yes A and Yes B are the number of events that pass both the + A and B criteria. Parameters ---------- - fisher_results : numpy array - Array of results from running the Fisher metric of the microlensing metric - detect_results : numpy array - Array of results from running the detect metric of the microlensing metric - fisher_sigmat_e_t_e_cutoff : float (Default is 0.1) + fisher_results : `np.ndarray`, (N,) + Array of results from running the Fisher metric of the + microlensing metric + detect_results : `np.ndarray`, (N,) + Array of results from running the detect metric of the + microlensing metric + fisher_sigmat_e_t_e_cutoff : `float` Maximum normalized uncertainty in tE (sigmatE/tE) as determined by 3sigma values of pubished planet microlensing candidates """ @@ -389,18 +396,22 @@ def fisher_npts_comparison(fisher_results, npts_results, npts_required=10, fishe """ Returns an array of the following form where A = fisher criteria and B = npts criteria: - [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), (No A and No B)]] - where Yes A and Yes B are the number of events that pass both the A and B criteria. + [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), + (No A and No B)]] + where Yes A and Yes B are the number of events that pass both the + A and B criteria. Parameters ---------- - fisher_results : numpy array - Array of results from running the Fisher metric of the microlensing metric - npts_results : numpy array - Array of results from running the Npts metric of the microlensing metric - npts_required : int (Default is 10). + fisher_results : `np.ndarray`, (N,) + Array of results from running the Fisher metric of the + microlensing metric + npts_results : `np.ndarray`, (N,) + Array of results from running the Npts metric of the + microlensing metric + npts_required : `int` Number of poitns within 2tE required for the number of points fraction. - fisher_sigmat_e_t_e_cutoff : float (Default is 0.1) + fisher_sigmat_e_t_e_cutoff : `float` Maximum normalized uncertainty in tE (sigmatE/tE) as determined by 3sigma values of pubished planet microlensing candidates """ @@ -415,16 +426,20 @@ def detected_npts_comparison(detect_results, npts_results, npts_required=10): """ Returns an array of the following form where A = detect criteria and B = npts criteria: - [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), (No A and No B)]] - where Yes A and Yes B are the number of events that pass both the A and B criteria. + [[(Yes A and Yes B), (Yes A and No B)], [(No A and Yes B), + (No A and No B)]] + where Yes A and Yes B are the number of events that pass both the + A and B criteria. Parameters ---------- - detect_results : numpy array - Array of results from running the detect metric of the microlensing metric - npts_results : numpy array - Array of results from running the Npts metric of the microlensing metric - npts_required : int (Default is 10). + detect_results : `np.ndarray`, (N,) + Array of results from running the detect metric of the + microlensing metric + npts_results : `np.ndarray`, (N,) + Array of results from running the Npts metric of the + microlensing metric + npts_required : `int` Number of poitns within 2tE required for the number of points fraction. """ detect_npts = np.where((detect_results == 1) & (npts_results > npts_required))[0] @@ -439,13 +454,17 @@ def get_results_dbs(result_db_path): Create a dictionary of result_db from result_db files via PCW Hackathan 2020 Resources - Args: - result_db_path(str): Path to the directory storing the result databases - generated by MAF. + Parameters + ---------- + result_db_path : `str` + Path to the directory storing the result databases + generated by MAF. - Returns: - result_dbs(dict): A dictionary containing the ResultDb objects - reconstructed from result databases in the provided directory. + Returns + ------- + result_dbs : `dict` + A dictionary containing the ResultDb objects + reconstructed from result databases in the provided directory. """ result_dbs = {} @@ -466,16 +485,20 @@ def bundle_dict_from_disk(result_db, run_name, metric_data_path): Load metric data from disk and import them into metricBundles. via PCW Hackathan 2020 Resources - Args: - result_db(dict): A ResultDb object. - run_name(str): The name of the opsim database that the metrics stored in - result_db was evaluated on. - metric_data_path(str): The path to the directory where the metric data - (.npz files) is stored. - - Returns: - bundle_dict(dict): A dictionary of metricBundles reconstructed from data - stored on disk, the keys designate metric names. + Parameters + ---------- + results_db : `dict` + A ResultsDb object + run_name : `str` + The name of the opsim database for the metrics in results_db + metric_data_path : `str` + The path to the directory where the metric datafiles are stored. + + Returns + ------- + bundle_dict : `dict` + A dictionary of metricBundles reconstructed from the data + stored on disk. """ bundle_dict = {} diff --git a/rubin_sim/maf/run_comparison/radar_plot.py b/rubin_sim/maf/run_comparison/radar_plot.py index a732ad30..cf6ea3b4 100644 --- a/rubin_sim/maf/run_comparison/radar_plot.py +++ b/rubin_sim/maf/run_comparison/radar_plot.py @@ -21,28 +21,32 @@ def normalize_for_radar( reverse_cols=None, mag_cols=[], ): - """ - Normalize values in a dataframe to a given run, return output in a dataframe. + """Normalize values in a dataframe to a given run, + return output in a dataframe. - This provides a similar functionality as the normalize_metric_summaries method, and returns a similar - dataframe. The options for specifying which columns to invert, reverse, or identify as 'magnitudes' + This provides a similar functionality as the + normalize_metric_summaries method, and returns a similar + dataframe. The options for specifying which columns to invert, + reverse, or identify as 'magnitudes' are slightly different, instead of using a 'metric_set'. Parameters ---------- - summary : pandas.DataFrame - The data frame containing the metric summary stats to normalize (such as from `get_metric_summaries`). - Note that this should contain only the runs and metrics to be normalized -- e.g. + summary : `pandas.DataFrame` + The data frame containing the metric summary stats to normalize + (such as from `get_metric_summaries`). + Note that this should contain only the runs and metrics to be + normalized -- e.g. `summary.loc[[list of runs], [list of metrics]]` summary should be indexed by the run name. - norm_run : str + norm_run : `str` The name of the run to use to define the normalization. - invert_cols : list of str + invert_cols : `list` [`str`] A list of column names that should be inverted (e.g., columns that are uncertainties and are better with a smaller value) - reverse_cols : list of str + reverse_cols : `list` [`str] Columns to reverse (e.g., magnitudes) - mag_cols : list of str + mag_cols : `list` [`str`] Columns that are in magnitudes """ out_df = summary.copy() @@ -70,7 +74,7 @@ def _radar_factory(num_vars, frame="circle"): Parameters ---------- - num_vars : int + num_vars : `int` Number of variables for radar chart. frame : {'circle' | 'polygon'} Shape of frame surrounding axes. @@ -78,7 +82,8 @@ def _radar_factory(num_vars, frame="circle"): """ # calculate evenly-spaced axis angles theta = np.linspace(0, 2 * np.pi, num_vars, endpoint=False) - # rotate theta such that the first axis is at the top, then make sure we don't go past 360 + # rotate theta such that the first axis is at the top, + # then make sure we don't go past 360 theta += np.pi / 2 theta = theta % (2.0 * np.pi) @@ -168,9 +173,7 @@ def radar( bbox_to_anchor=(1.6, 0.5), legend_font_size=None, ): - """ - make a radar plot! - """ + """make a radar plot!""" theta = _radar_factory(np.size(df.columns), frame="polygon") fig, axes = plt.subplots(figsize=figsize, subplot_kw=dict(projection="radar")) axes.set_rgrids(rgrids, fontsize="x-large") From f38cc85e5f3af7a1aa74fb781e5666cfd88dd687 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 8 Oct 2024 23:20:08 -0700 Subject: [PATCH 16/23] Black and isort --- rubin_sim/maf/run_moving_calc.py | 10 +++++----- rubin_sim/maf/scimaf_dir.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/rubin_sim/maf/run_moving_calc.py b/rubin_sim/maf/run_moving_calc.py index c27d0371..a3c354bb 100755 --- a/rubin_sim/maf/run_moving_calc.py +++ b/rubin_sim/maf/run_moving_calc.py @@ -169,7 +169,7 @@ def run_moving_calc(): detection_losses="trailing", albedo=args.albedo, h_mark=h_mark, - magtype=magtype + magtype=magtype, ) # Run these discovery metrics print("Calculating quick discovery metrics with simple trailing losses.") @@ -187,7 +187,7 @@ def run_moving_calc(): detection_losses="detection", albedo=args.albedo, h_mark=h_mark, - magtype=magtype + magtype=magtype, ) bdict = batches.discovery_batch( slicer, @@ -199,7 +199,7 @@ def run_moving_calc(): detection_losses="detection", albedo=args.albedo, h_mark=h_mark, - magtype=magtype + magtype=magtype, ) bdictD.update(bdict) @@ -219,7 +219,7 @@ def run_moving_calc(): constraint_info_label=args.constraint_info_label, constraint=args.constraint, h_mark=h_mark, - magtype=magtype + magtype=magtype, ) elif characterization.lower() == "outer": bdictC = batches.characterization_outer_batch( @@ -231,7 +231,7 @@ def run_moving_calc(): constraint_info_label=args.constraint_info_label, constraint=args.constraint, h_mark=h_mark, - magtype=magtype + magtype=magtype, ) # Run these characterization metrics print("Calculating characterization metrics.") diff --git a/rubin_sim/maf/scimaf_dir.py b/rubin_sim/maf/scimaf_dir.py index 42ad1e11..32355d34 100755 --- a/rubin_sim/maf/scimaf_dir.py +++ b/rubin_sim/maf/scimaf_dir.py @@ -50,7 +50,7 @@ def scimaf_dir(): con.close() mjd0 = mjd0_df.values.min() # If this fails for any reason (aka schema change) - except: #noqa E722 + except: # noqa E722 warnings.warn("Could not find survey start date for Presto KNe, setting mjd0=None.") mjd0 = None # Clobber output directory if it exists From 5c0f12dcbd7f8ebe98859c44f77bbd2e5a5fc547 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 9 Oct 2024 00:20:58 -0700 Subject: [PATCH 17/23] Put badval kwarg back in healpix slicer --- rubin_sim/maf/slicers/healpix_slicer.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/rubin_sim/maf/slicers/healpix_slicer.py b/rubin_sim/maf/slicers/healpix_slicer.py index 60b5bd34..a0deffd7 100644 --- a/rubin_sim/maf/slicers/healpix_slicer.py +++ b/rubin_sim/maf/slicers/healpix_slicer.py @@ -80,6 +80,9 @@ class HealpixSlicer(BaseSpatialSlicer): Only used if use_camera is True. Describes the orientation of the camera orientation compared to the sky. + badval: `float`, optional + The value to place use in place of bad data values. + Use np.nan unless specifically needing other values. """ def __init__( @@ -95,12 +98,13 @@ def __init__( use_camera=True, camera_footprint_file=None, rot_sky_pos_col_name="rotSkyPos", + badval=np.nan, ): super().__init__( verbose=verbose, lon_col=lon_col, lat_col=lat_col, - badval=np.nan, + badval=badval, radius=radius, leafsize=leafsize, use_camera=use_camera, From ed0744c24f51c5cd44f1173bbb56c7d0b25d80be Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 9 Oct 2024 12:01:18 -0700 Subject: [PATCH 18/23] Fix errors within rubin-sim when building docs --- docs/data-download.rst | 4 ++-- docs/introduction.rst | 6 +++--- docs/skybrightness.rst | 2 +- rubin_sim/data/rs_download_data.py | 4 ++-- rubin_sim/maf/batches/metadata_batch.py | 4 ++-- rubin_sim/maf/batches/moving_objects_batch.py | 15 +++++++++++---- rubin_sim/maf/maf_contrib/kne_metrics.py | 6 ++---- .../maf_contrib/presto_color_kne_pop_metric.py | 5 +++++ .../maf/maps/galactic_plane_priority_maps.py | 4 ++-- rubin_sim/maf/metrics/color_slope_metrics.py | 11 +++++++---- rubin_sim/maf/metrics/mo_metrics.py | 2 +- rubin_sim/maf/plots/two_d_plotters.py | 10 +++++----- rubin_sim/maf/run_comparison/archive.py | 2 ++ rubin_sim/maf/stackers/mo_stackers.py | 8 ++++---- rubin_sim/phot_utils/sed.py | 2 +- rubin_sim/skybrightness/interp_components.py | 11 ++++++----- 16 files changed, 56 insertions(+), 40 deletions(-) diff --git a/docs/data-download.rst b/docs/data-download.rst index 0e941efa..b2836e98 100644 --- a/docs/data-download.rst +++ b/docs/data-download.rst @@ -33,8 +33,8 @@ for more details on setting up $RUBIN_SIM_DATA_DIR (which is shared between ``rubin_scheduler``, ``rubin_sim`` and ``schedview``). Using either the default path to $RUBIN_SIM_DATA_DIR, or after setting it -explicitly, first download the necessary data for `rubin_scheduler` and -then add the (larger) data set for `rubin_sim`: +explicitly, first download the necessary data for ``rubin_scheduler`` and +then add the (larger) data set for ``rubin_sim``: .. code-block:: bash diff --git a/docs/introduction.rst b/docs/introduction.rst index 9f1832fc..1672dd73 100644 --- a/docs/introduction.rst +++ b/docs/introduction.rst @@ -8,7 +8,7 @@ Introduction The `Legacy Survey of Space and Time `_ (LSST) is anticipated to encompass around 2 million observations spanning a decade, -averaging 800 visits per night. The `rubin_sim` package was built to help +averaging 800 visits per night. The ``rubin_sim`` package was built to help understand the predicted performance of the LSST. The :ref:`Phot Utils` module provides synthetic photometry @@ -18,14 +18,14 @@ The :ref:`skybrightness` module incorporates the ESO sky model, modified to match measured sky conditions at the LSST site, including an addition of a model for twilight skybrightness. This is used to generate the pre-calculated skybrightness data used in -`rubin_scheduler`_. +`rubin_scheduler `_. The :ref:`Moving Objects` module provides a way to create synthetic observations of moving objects, based on how they would appear in pointing databases ("opsims") created by `rubin_scheduler `_. -One of the major goals for `rubin_sim` is to enable efficient and +One of the major goals for ``rubin_sim`` is to enable efficient and scientifically varied evaluation of the LSST survey strategy and progress, by providing a framework to enable these metrics to run in a standardized way on opsim outputs. diff --git a/docs/skybrightness.rst b/docs/skybrightness.rst index 5cdefb23..d22c1432 100644 --- a/docs/skybrightness.rst +++ b/docs/skybrightness.rst @@ -6,7 +6,7 @@ Skybrightness ############# -The `rubin_sim.skybrightness` module generates +The ``rubin_sim.skybrightness`` module generates predicted skybrightness values (in either magnitudes per square arcsecond for any LSST bandpass or as a SED over the relevant wavelengths). diff --git a/rubin_sim/data/rs_download_data.py b/rubin_sim/data/rs_download_data.py index d6313509..66203983 100644 --- a/rubin_sim/data/rs_download_data.py +++ b/rubin_sim/data/rs_download_data.py @@ -44,9 +44,9 @@ def data_dict() -> dict: file_dict : `dict` Data bucket filenames dictionary with keys: ``"name"`` - Data bucket name (`str`). + Data bucket name (`str`). ``"version"`` - Versioned file name (`str`). + Versioned file name (`str`). """ # Note for developers: # to create tar files and follow any sym links, run: e.g. diff --git a/rubin_sim/maf/batches/metadata_batch.py b/rubin_sim/maf/batches/metadata_batch.py index ac067e0a..2b1065d2 100644 --- a/rubin_sim/maf/batches/metadata_batch.py +++ b/rubin_sim/maf/batches/metadata_batch.py @@ -39,7 +39,7 @@ def metadataBasics( Calculates this around the sky (HealpixSlicer), makes histograms of all visits (OneDSlicer), and calculates statistics on all visits (UniSlicer) for the quantity, - in all visits and per filter. + in all visits and per filter. Currently have a hack for HA & normairmass. @@ -47,7 +47,7 @@ def metadataBasics( ---------- value : `str` The column name for the quantity to evaluate. - (column name in the database or created by a stacker). + (column name in the database or created by a stacker). colmap : `dict` or None, optional A dictionary with a mapping of column names. runName : `str`, optional diff --git a/rubin_sim/maf/batches/moving_objects_batch.py b/rubin_sim/maf/batches/moving_objects_batch.py index 9ff4b6fc..a618fda9 100644 --- a/rubin_sim/maf/batches/moving_objects_batch.py +++ b/rubin_sim/maf/batches/moving_objects_batch.py @@ -36,7 +36,9 @@ def ss_population_defaults(objtype): - "Provide useful default ranges for H, based on objtype of population type." + """Provide useful default ranges for H, + based on objtype of population type. + """ defaults = {} defaults["Vatira"] = { "h_range": [16, 28, 0.2], @@ -135,6 +137,9 @@ def quick_discovery_batch( constraint=None, magtype="asteroid", ): + """A subset of discovery metrics, using only the default discovery + criteria of 3 pairs in 15 or 30 nights. + """ if colmap is None: colmap = col_map_dict() bundleList = [] @@ -276,6 +281,9 @@ def discovery_batch( constraint=None, magtype="asteroid", ): + """A comprehensive set of discovery metrics, using a wide range + of discovery criteria. + """ if colmap is None: colmap = col_map_dict() bundleList = [] @@ -648,8 +656,7 @@ def _configure_child_bundles(parentBundle): def run_completeness_summary(bdict, h_mark, times, out_dir, results_db): - """ - Calculate completeness and create completeness bundles from all + """Calculate completeness and create completeness bundles from all N_Chances and Time (child) metrics of the (discovery) bundles in bdict, and write completeness at h_mark to results_db, save bundle to disk. @@ -665,7 +672,7 @@ def run_completeness_summary(bdict, h_mark, times, out_dir, results_db): If not defined (None), then the h_mark from the plotdict from the metric bundle will be used if available. If None and h_mark not in plot_dict, then median of h_range values - will be used. + will be used. times : `np.ndarray` The times at which to calculate completeness (over time). out_dir : `str` diff --git a/rubin_sim/maf/maf_contrib/kne_metrics.py b/rubin_sim/maf/maf_contrib/kne_metrics.py index f0de8043..35a7d051 100644 --- a/rubin_sim/maf/maf_contrib/kne_metrics.py +++ b/rubin_sim/maf/maf_contrib/kne_metrics.py @@ -24,10 +24,8 @@ def get_kne_filename(inj_params_list=None): mass of the dynamical ejecta (mej_dyn), mass of the disk wind ejecta (mej_wind), semi opening angle of the cylindrically-symmetric ejecta fan ('phi'), and viewing angle ('theta'). For example - inj_params_list = [{'mej_dyn': 0.005, - 'mej_wind': 0.050, - 'phi': 30, - 'theta': 25.8}] + inj_params_list = + [{'mej_dyn': 0.005, 'mej_wind': 0.050, 'phi': 30, 'theta': 25.8}] """ # Get files, model grid developed by M. Bulla datadir = get_data_dir() diff --git a/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py b/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py index f162fc1d..68bc85cc 100644 --- a/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py +++ b/rubin_sim/maf/maf_contrib/presto_color_kne_pop_metric.py @@ -81,6 +81,7 @@ def generate_presto_pop_slicer( ): """Generate a population of KNe events, and put the info about them into a UserPointSlicer object + Parameters ---------- skyregion : `str` @@ -100,6 +101,10 @@ def generate_presto_pop_slicer( Minimum luminosity distance (Mpc) d_max : `float` or `int` Maximum luminosity distance (Mpc) + + Returns + ------- + kne_slicer : `~.maf.UserPointsSlicer` """ def rndm(a, b, g, size=1): diff --git a/rubin_sim/maf/maps/galactic_plane_priority_maps.py b/rubin_sim/maf/maps/galactic_plane_priority_maps.py index d05b53e4..fa32a397 100644 --- a/rubin_sim/maf/maps/galactic_plane_priority_maps.py +++ b/rubin_sim/maf/maps/galactic_plane_priority_maps.py @@ -18,7 +18,8 @@ def gp_priority_map_components_to_keys(filtername, science_map): """A convenience function to help keep the map key - formats in sync in various places""" + formats in sync in various places. + """ return f"galplane_priority_{science_map}:{filtername}" @@ -71,7 +72,6 @@ def galplane_priority_map( Use the priority_GalPlane_footprint_alt_map_data_{ugrizysum}.fits files instead of the default priority_galPlane_footprint_map_data_{ugrizysum}.fits files. - Default False. """ # This is a function that will read the galactic plane priority map data # and hold onto it indefinitely diff --git a/rubin_sim/maf/metrics/color_slope_metrics.py b/rubin_sim/maf/metrics/color_slope_metrics.py index 4a3c85b8..8c5b1cee 100644 --- a/rubin_sim/maf/metrics/color_slope_metrics.py +++ b/rubin_sim/maf/metrics/color_slope_metrics.py @@ -67,7 +67,8 @@ def __call__(self, data_slice): class ColorSlopeMetric(BaseMetric): """How many times do we get a color and slope in a night - A proxie metric for seeing how many times + + A proxy metric for seeing how many times there would be the possibility of identifying and classifying a transient. @@ -81,7 +82,8 @@ class ColorSlopeMetric(BaseMetric): to still count as a color (hours). Default 1 hour. slope_length : `float` The length of time to demand observations in the - same filter be greater than (hours). Default 3 hours.""" + same filter be greater than (hours). Default 3 hours. + """ def __init__( self, @@ -128,7 +130,7 @@ def run(self, data_slice, slice_point=None): class ColorSlope2NightMetric(ColorSlopeMetric): """Like ColorSlopeMetric, but span over 2 nights - Parameters + Parameters ---------- mag : `dict` Dictionary with filternames as keys and minimum depth m5 @@ -138,7 +140,8 @@ class ColorSlope2NightMetric(ColorSlopeMetric): to still count as a color (hours). Default 1 hour. slope_length : `float` The length of time to demand observations in the - same filter be greater than (hours). Default 15 hours.""" + same filter be greater than (hours). Default 15 hours. + """ def __init__( self, diff --git a/rubin_sim/maf/metrics/mo_metrics.py b/rubin_sim/maf/metrics/mo_metrics.py index 4d10adcd..62c5d663 100644 --- a/rubin_sim/maf/metrics/mo_metrics.py +++ b/rubin_sim/maf/metrics/mo_metrics.py @@ -1115,7 +1115,7 @@ class ColorAsteroidMetric(BaseMoMetric): 1 = g and (r or i) and (z or y). i.e. obtain colors g-r or g-i PLUS g-z or g-y 2 = Any 4 different filters (from grizy). - i.e. colors = g-r, r-i, i-z, OR r-i, i-z, z-y.. + i.e. colors = g-r, r-i, i-z, OR r-i, i-z, z-y.. 3 = All 5 from grizy. i.e. colors g-r, r-i, i-z, z-y. 4 = All 6 filters (ugrizy) -- best possible! add u-g. diff --git a/rubin_sim/maf/plots/two_d_plotters.py b/rubin_sim/maf/plots/two_d_plotters.py index b65124d2..c8861ee6 100644 --- a/rubin_sim/maf/plots/two_d_plotters.py +++ b/rubin_sim/maf/plots/two_d_plotters.py @@ -44,7 +44,7 @@ def __call__(self, metric_value, slicer, user_plot_dict, fig=None): The metric values from the bundle. slicer : `rubin_sim.maf.slicers.TwoDSlicer` The slicer. - user_plot_dict: `dict` + user_plot_dict : `dict` Dictionary of plot parameters set by user (overrides default values). fig : `matplotlib.figure.Figure` @@ -52,8 +52,8 @@ def __call__(self, metric_value, slicer, user_plot_dict, fig=None): Returns ------- - fig : `matplotlib.figure.Figure - Figure with the plot. + fig : `matplotlib.figure.Figure` + Figure with the plot. """ if "Healpix" in slicer.slicer_name: self.default_plot_dict["ylabel"] = "Healpix ID" @@ -124,7 +124,7 @@ class VisitPairsHist(BasePlotter): The metric values from the bundle. slicer : `rubin_sim.maf.slicers.TwoDSlicer` The slicer. - user_plot_dict: `dict` + user_plot_dict : `dict` Dictionary of plot parameters set by user (overrides default values). fig : `matplotlib.figure.Figure` @@ -133,7 +133,7 @@ class VisitPairsHist(BasePlotter): Returns ------- fig : `matplotlib.figure.Figure` - Figure with the plot. + Figure with the plot. """ def __init__(self): diff --git a/rubin_sim/maf/run_comparison/archive.py b/rubin_sim/maf/run_comparison/archive.py index dcb57a04..5874270b 100644 --- a/rubin_sim/maf/run_comparison/archive.py +++ b/rubin_sim/maf/run_comparison/archive.py @@ -221,7 +221,9 @@ def get_metric_summaries( ---- The entire summary statistic values for all of the runs and metrics can be downloaded from the default sources first, by simply calling + .. code-block:: python + summary = get_metric_summaries() Then, you can use `get_metric_summaries` to get a subset without diff --git a/rubin_sim/maf/stackers/mo_stackers.py b/rubin_sim/maf/stackers/mo_stackers.py index 9fcaf73d..597b6002 100644 --- a/rubin_sim/maf/stackers/mo_stackers.py +++ b/rubin_sim/maf/stackers/mo_stackers.py @@ -191,10 +191,10 @@ class AppMagStacker(BaseMoStacker): This is calculated from the reported mag_v in the input observation file (calculated assuming Href) as: - ``` - ssoObs['appMag'] = ssoObs[self.vMagCol] + ssoObs[self.colorCol] + - ssoObs[self.lossCol] + h_val - Href - ``` + .. codeblock::python + + ssoObs['appMag'] = ssoObs[self.vMagCol] + ssoObs[self.colorCol] + + ssoObs[self.lossCol] + h_val - Href Using the vMag reported in the input observations implicitly uses the phase curve coded in at that point; diff --git a/rubin_sim/phot_utils/sed.py b/rubin_sim/phot_utils/sed.py index 248ecec2..0df13715 100644 --- a/rubin_sim/phot_utils/sed.py +++ b/rubin_sim/phot_utils/sed.py @@ -754,7 +754,7 @@ def resample_sed( Give method wavelen/flux OR default to self.wavelen/self.flambda. Method either returns wavelen/flambda (if given those arrays) or - updates wavelen/flambda in self. + updates wavelen/flambda in self. If updating self, resets fnu to None. Method will first check if resampling needs to be done or not, unless 'force' is True. diff --git a/rubin_sim/skybrightness/interp_components.py b/rubin_sim/skybrightness/interp_components.py index 925d8308..790eea90 100644 --- a/rubin_sim/skybrightness/interp_components.py +++ b/rubin_sim/skybrightness/interp_components.py @@ -36,7 +36,7 @@ def id2intid(ids): - """take an array of ids, and convert them to an integer id. + """Take an array of ids, and convert them to an integer id. Handy if you want to put things into a sparse array. """ uids = np.unique(ids) @@ -76,10 +76,11 @@ def load_spec_files(filenames, mags=False): * filter_wave: The central wavelengths of the pre-computed magnitudes * wave: wavelengths for the spectra * spec: array of spectra and magnitudes along with the relevant - variable inputs. For example, airglow has dtype = - [('airmass', ' Date: Wed, 9 Oct 2024 12:07:57 -0700 Subject: [PATCH 19/23] Unpin pytest and add tornado to requirements --- requirements.txt | 2 ++ test-requirements.txt | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 4a9cbe02..188bef80 100644 --- a/requirements.txt +++ b/requirements.txt @@ -21,5 +21,7 @@ scikit-learn shapely skyfield skyproj +tornado +jinja2 tqdm rubin-scheduler diff --git a/test-requirements.txt b/test-requirements.txt index 067ec2b4..762f93fc 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,5 +1,6 @@ -pytest<8.0.0 +pytest +pytest-cov black>=24.0.0 ruff isort -pytest-cov + From 7e87d7471dc3c9cc6df21b9feb598fa4729b4b39 Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 9 Oct 2024 13:07:35 -0700 Subject: [PATCH 20/23] Remove jenkins --- ci/Jenkinsfile_rubin_sim.conda | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 ci/Jenkinsfile_rubin_sim.conda diff --git a/ci/Jenkinsfile_rubin_sim.conda b/ci/Jenkinsfile_rubin_sim.conda deleted file mode 100644 index c07cc724..00000000 --- a/ci/Jenkinsfile_rubin_sim.conda +++ /dev/null @@ -1,2 +0,0 @@ -@Library('JenkinsShared')_ -ExternalCondaPipeline("rubin-sim") From 4cb55e4eac4cc0fda1c4915409a9908f1449a66f Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 9 Oct 2024 13:02:27 -0700 Subject: [PATCH 21/23] Updated doc workflow --- .github/workflows/build_docs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_docs.yaml b/.github/workflows/build_docs.yaml index 9eba81c5..4d337474 100644 --- a/.github/workflows/build_docs.yaml +++ b/.github/workflows/build_docs.yaml @@ -18,7 +18,7 @@ jobs: - uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true - python-version: ${{ matrix.python-version }} + python-version: "3.11" miniforge-variant: Miniforge3 channels: conda-forge,defaults use-mamba: true @@ -28,8 +28,8 @@ jobs: - name: configure conda and install requirements shell: bash -l {0} run: | - mamba install --quiet --file=requirements.txt mamba install --quiet pip + mamba install --quiet --file=requirements.txt pip install "documenteer[guide]" - name: install rubin_sim From 73b4d0681dbecba8d21d5be0870d5512024715ef Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Wed, 9 Oct 2024 15:32:00 -0700 Subject: [PATCH 22/23] Fix typo in conda env names in installation guide --- docs/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index 798c7aaf..ace58168 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -45,14 +45,14 @@ Create a conda environment for it: :: - conda create --channel conda-forge --name rubin_sim --file requirements.txt python=3.11 + conda create --channel conda-forge --name rubin-sim --file requirements.txt python=3.11 If you want to run tests (please do), install the test requirements as well: :: - conda activate rubin_scheduler + conda activate rubin-sim conda install -c conda-forge --file=test-requirements.txt From 76d8554f96ff9769e81eea5c5858cd36f2cf01cd Mon Sep 17 00:00:00 2001 From: Lynne Jones Date: Tue, 15 Oct 2024 16:25:50 -0700 Subject: [PATCH 23/23] Check that all tles are not None --- rubin_sim/satellite_constellations/model_observatory.py | 1 - rubin_sim/satellite_constellations/sat_utils.py | 1 - tests/satellite_constellations/test_satellites.py | 8 ++++++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/rubin_sim/satellite_constellations/model_observatory.py b/rubin_sim/satellite_constellations/model_observatory.py index 079bc8df..43186f9a 100644 --- a/rubin_sim/satellite_constellations/model_observatory.py +++ b/rubin_sim/satellite_constellations/model_observatory.py @@ -3,7 +3,6 @@ import numpy as np from rubin_scheduler.scheduler.model_observatory import ModelObservatory as oMO from rubin_scheduler.site_models import Almanac - from rubin_scheduler.utils import SURVEY_START_MJD, _healbin # Take the model observatory from the scheduler and diff --git a/rubin_sim/satellite_constellations/sat_utils.py b/rubin_sim/satellite_constellations/sat_utils.py index 65a0b903..2f63c7e6 100644 --- a/rubin_sim/satellite_constellations/sat_utils.py +++ b/rubin_sim/satellite_constellations/sat_utils.py @@ -10,7 +10,6 @@ import numpy as np from astropy import constants as const from astropy import units as u - from rubin_scheduler.utils import SURVEY_START_MJD, Site, gnomonic_project_toxy, point_to_line_distance from shapely.geometry import LineString, Point from skyfield.api import EarthSatellite, load, wgs84 diff --git a/tests/satellite_constellations/test_satellites.py b/tests/satellite_constellations/test_satellites.py index c890f894..6812797c 100644 --- a/tests/satellite_constellations/test_satellites.py +++ b/tests/satellite_constellations/test_satellites.py @@ -12,8 +12,12 @@ def test_constellations(self): mjd0 = SURVEY_START_MJD sv1 = starlink_tles_v1() - _ = starlink_tles_v2() - _ = oneweb_tles() + sv2 = starlink_tles_v2() + onw = oneweb_tles() + + assert sv1 is not None + assert sv2 is not None + assert onw is not None const = Constellation(sv1)