Skip to content

Commit

Permalink
Update on the import for the photutils (#321)
Browse files Browse the repository at this point in the history
  • Loading branch information
haticekaratay authored Oct 28, 2024
1 parent 013fb9e commit df96883
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 34 deletions.
30 changes: 17 additions & 13 deletions notebooks/ACS/acs_sbc_dark_analysis/acs_sbc_dark_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,7 @@
"from matplotlib.colors import LogNorm\n",
"from matplotlib.patches import Rectangle\n",
"\n",
"from photutils import EllipticalAperture\n",
"from photutils import aperture_photometry"
"from photutils.aperture import EllipticalAperture, aperture_photometry"
]
},
{
Expand Down Expand Up @@ -148,10 +147,12 @@
"metadata": {},
"outputs": [],
"source": [
"science_list = Observations.query_criteria(proposal_id='13655', obs_id='JCMC11*')\n",
"science_list = Observations.query_criteria(\n",
" proposal_id='13655', obs_id='JCMC11*')\n",
"\n",
"sci_dl_table = Observations.download_products(science_list['obsid'], \n",
" productSubGroupDescription=['ASN', 'FLT'],\n",
"sci_dl_table = Observations.download_products(science_list['obsid'],\n",
" productSubGroupDescription=[\n",
" 'ASN', 'FLT'],\n",
" mrp_only=False)"
]
},
Expand All @@ -173,7 +174,8 @@
"darks_list = Observations.query_criteria(proposal_id='13961', obstype='cal')\n",
"\n",
"drk_dl_table = Observations.download_products(darks_list['obsid'],\n",
" productSubGroupDescription=['RAW'],\n",
" productSubGroupDescription=[\n",
" 'RAW'],\n",
" mrp_only=False)"
]
},
Expand All @@ -191,7 +193,7 @@
"outputs": [],
"source": [
"for dl_table in [sci_dl_table, drk_dl_table]:\n",
" \n",
"\n",
" for row in dl_table:\n",
" oldfname = row['Local Path']\n",
" newfname = os.path.basename(oldfname)\n",
Expand Down Expand Up @@ -334,13 +336,13 @@
" filt = fits.getval(file, 'FILTER1', ext=0)\n",
" date = fits.getval(file, 'DATE-OBS', ext=0)\n",
" time = fits.getval(file, 'TIME-OBS', ext=0)\n",
" \n",
"\n",
" t1 = fits.getval(file, 'MDECODT1', ext=1)\n",
" t2 = fits.getval(file, 'MDECODT2', ext=1)\n",
"\n",
" starttime = date + 'T' + time\n",
" avgtemp = (t1+t2) / 2\n",
" \n",
"\n",
" flt_table.add_row((file, starttime, filt, t1, t2, avgtemp))\n",
"\n",
"print(flt_table)"
Expand Down Expand Up @@ -546,7 +548,7 @@
"exptime = fits.getval(drk_file, 'exptime', ext=0)\n",
"\n",
"with fits.open(new_file, mode='update') as hdu:\n",
" \n",
"\n",
" hdu[1].data[:, :] = darkdat\n",
" hdu[0].header['exptime'] = exptime"
]
Expand All @@ -566,7 +568,7 @@
"exptime = fits.getval(drk_file, 'exptime', ext=0)\n",
"\n",
"with fits.open(new_file, mode='update') as hdu:\n",
" \n",
"\n",
" hdu[1].data[:, :] = darkdat\n",
" hdu[0].header['exptime'] = exptime"
]
Expand All @@ -584,7 +586,8 @@
"metadata": {},
"outputs": [],
"source": [
"adriz_output = adriz(['dark1.fits', 'dark2.fits'], output='masterdark', **driz_kwargs)"
"adriz_output = adriz(['dark1.fits', 'dark2.fits'],\n",
" output='masterdark', **driz_kwargs)"
]
},
{
Expand Down Expand Up @@ -644,7 +647,8 @@
"metadata": {},
"outputs": [],
"source": [
"aper = EllipticalAperture([(735, 710), (200, 200)], a=70, b=40, theta=0.5*np.pi)"
"aper = EllipticalAperture([(735, 710), (200, 200)],\n",
" a=70, b=40, theta=0.5*np.pi)"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
"from astroquery.mast import Observations\n",
"\n",
"from drizzlepac import tweakreg\n",
"from photutils import CircularAperture\n",
"from photutils.aperture import CircularAperture\n",
"from regions import Regions\n",
"\n",
"# set plotting details for notebooks\n",
Expand Down Expand Up @@ -136,7 +136,8 @@
"\n",
"# Find obsID for specific FLC images.\n",
"product_list = Observations.get_product_list(obs_table['obsid'])\n",
"mask = (product_list['productFilename'] == 'jcdua3f4q_flc.fits') | (product_list['productFilename'] == 'jcdua3f8q_flc.fits')\n",
"mask = (product_list['productFilename'] == 'jcdua3f4q_flc.fits') | (\n",
" product_list['productFilename'] == 'jcdua3f8q_flc.fits')\n",
"product_list['obsID', 'productFilename'][mask]"
]
},
Expand All @@ -159,10 +160,12 @@
"\n",
"products_to_download = product_list[mask]\n",
"\n",
"data_prod = ['FLC'] # ['FLC','FLT','DRC','DRZ'] \n",
"data_type = ['CALACS'] # ['CALACS','CALWF3','CALWP2','HAP-SVM'] \n",
"data_prod = ['FLC'] # ['FLC','FLT','DRC','DRZ']\n",
"# ['CALACS','CALWF3','CALWP2','HAP-SVM']\n",
"data_type = ['CALACS']\n",
"\n",
"Observations.download_products(products_to_download, productSubGroupDescription=data_prod, project=data_type, cache=True)"
"Observations.download_products(\n",
" products_to_download, productSubGroupDescription=data_prod, project=data_type, cache=True)"
]
},
{
Expand All @@ -183,7 +186,7 @@
"for flc in glob.glob('mastDownload/HST/*/jcdua3f[48]q_flc.fits'):\n",
" flc_name = os.path.split(flc)[-1]\n",
" os.rename(flc, flc_name)\n",
" \n",
"\n",
"# Delete the mastDownload directory and all subdirectories it contains.\n",
"shutil.rmtree('mastDownload')"
]
Expand Down Expand Up @@ -212,7 +215,7 @@
"metadata": {},
"outputs": [],
"source": [
"tweakreg.TweakReg('jcdua3f4q_flc.fits', \n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits',\n",
" imagefindcfg=dict(threshold=50, conv_width=4.5),\n",
" updatehdr=False)"
]
Expand Down Expand Up @@ -242,7 +245,7 @@
"outputs": [],
"source": [
"# Read in the SCI1 catalog file\n",
"coords_tab = Table.read('jcdua3f4q_flc_sci1_xy_catalog.coo', \n",
"coords_tab = Table.read('jcdua3f4q_flc_sci1_xy_catalog.coo',\n",
" format='ascii.no_header', names=['X', 'Y', 'Flux', 'ID'])\n",
"\n",
"# Output the first five rows to display the table format\n",
Expand Down Expand Up @@ -280,9 +283,10 @@
"metadata": {},
"outputs": [],
"source": [
"# Make the apertures with photutils. \n",
"# One pixel offset corrects for differences between (0,0) and (1,1) origin systems. \n",
"positions = [(x-1., y-1.) for x, y in coords_tab.group_by(['X', 'Y']).groups.keys]\n",
"# Make the apertures with photutils.\n",
"# One pixel offset corrects for differences between (0,0) and (1,1) origin systems.\n",
"positions = [(x-1., y-1.)\n",
" for x, y in coords_tab.group_by(['X', 'Y']).groups.keys]\n",
"apertures = CircularAperture(positions, r=10.)\n",
"\n",
"# Plot a region of the image with pyplot\n",
Expand Down Expand Up @@ -366,14 +370,15 @@
" '''\n",
"\n",
" # Read in the SCI1 catalog file with the exclusions\n",
" coords_tab = Table.read('jcdua3f4q_flc_sci1_xy_catalog.coo', \n",
" format='ascii.no_header', \n",
" coords_tab = Table.read('jcdua3f4q_flc_sci1_xy_catalog.coo',\n",
" format='ascii.no_header',\n",
" names=['X', 'Y', 'Flux', 'ID'])\n",
" \n",
"\n",
" # Define apertures for TweakReg identified sources\n",
" positions = [(x-1., y-1.) for x, y in coords_tab.group_by(['X', 'Y']).groups.keys]\n",
" positions = [(x-1., y-1.)\n",
" for x, y in coords_tab.group_by(['X', 'Y']).groups.keys]\n",
" apertures = CircularAperture(positions, r=10.)\n",
" \n",
"\n",
" # Plot\n",
" fig, ax = plt.subplots()\n",
" ax.imshow(data, cmap='Greys', origin='lower', vmin=0, vmax=400)\n",
Expand Down Expand Up @@ -448,7 +453,7 @@
"outputs": [],
"source": [
"# tweakreg run with DS9 regions excluded from source detection\n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits', \n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits',\n",
" imagefindcfg=dict(threshold=50, conv_width=4.5),\n",
" exclusions='exclusions.txt',\n",
" updatehdr=False)"
Expand Down Expand Up @@ -519,8 +524,8 @@
"metadata": {},
"outputs": [],
"source": [
"# tweakreg run with source detection only inside the DS9 regions \n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits', \n",
"# tweakreg run with source detection only inside the DS9 regions\n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits',\n",
" imagefindcfg=dict(threshold=50, conv_width=4.5),\n",
" exclusions='inclusions.txt',\n",
" updatehdr=False)"
Expand Down Expand Up @@ -590,7 +595,7 @@
"outputs": [],
"source": [
"# tweakreg run with a mix of included/excluded DS9 regions\n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits', \n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits',\n",
" imagefindcfg=dict(threshold=50, conv_width=4.5),\n",
" exclusions='inclusions_no_box.txt',\n",
" updatehdr=False)"
Expand Down Expand Up @@ -639,7 +644,7 @@
"outputs": [],
"source": [
"# tweakreg run with excluded box first to show order of operations\n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits', \n",
"tweakreg.TweakReg('jcdua3f4q_flc.fits',\n",
" imagefindcfg=dict(threshold=50, conv_width=4.5),\n",
" exclusions='inclusions_no_box_first.txt',\n",
" updatehdr=False)"
Expand Down

0 comments on commit df96883

Please sign in to comment.