diff --git a/notebooks/COS/Extract/Extract.ipynb b/notebooks/COS/Extract/Extract.ipynb index 9aa1c149c..bdc35c95b 100644 --- a/notebooks/COS/Extract/Extract.ipynb +++ b/notebooks/COS/Extract/Extract.ipynb @@ -51,7 +51,7 @@ "\n", "**This tutorial aims to prepare you to work with the COS data of your choice by walking you through altering the extraction box sizes in the `XTRACTAB`/`_1dx` file to make sure you are extracting the cleanest possible signal from your source and background.** We will demonstrate this in both the NUV and FUV. \n", "\n", - "*Note* that some COS modes which use the FUV detector can be better extracted using the [TWOZONE method](https://hst-docs.stsci.edu/cosdhb/chapter-3-cos-calibration/3-2-pipeline-processing-overview), which is not directly discussed in this Notebook. All COS/NUV modes use the [BOXCAR method](https://hst-docs.stsci.edu/cosdhb/chapter-3-cos-calibration/3-2-pipeline-processing-overview#id-3.2PipelineProcessingOverview-3.2.1OverviewofTWOZONEextraction) discussed in this Notebook.\n", + "*Note* that some COS modes which use the FUV detector can be better extracted using the [`TWOZONE` method](https://hst-docs.stsci.edu/cosdhb/chapter-3-cos-calibration/3-2-pipeline-processing-overview), which is not directly discussed in this Notebook. All COS/NUV modes use the [`BOXCAR` method](https://hst-docs.stsci.edu/cosdhb/chapter-3-cos-calibration/3-2-pipeline-processing-overview#id-3.2PipelineProcessingOverview-3.2.1OverviewofTWOZONEextraction) discussed in this Notebook.\n", "\n", "- For an in-depth manual to working with COS data and a discussion of caveats and user tips, see the [COS Data Handbook](https://hst-docs.stsci.edu/display/COSDHB/).\n", "- For a detailed overview of the COS instrument, see the [COS Instrument Handbook](https://hst-docs.stsci.edu/display/COSIHB/).\n" @@ -120,7 +120,8 @@ "import warnings\n", "np.seterr(divide='ignore') \n", "warnings.filterwarnings('ignore',\n", - " category=np.VisibleDeprecationWarning)" + " category=np.VisibleDeprecationWarning)\n", + "from IPython.display import clear_output" ] }, { @@ -139,13 +140,21 @@ "outputs": [], "source": [ "# These will be important directories for the notebook\n", - "datadir = Path('./data/')\n", - "outputdir = Path('./output/')\n", - "plotsdir = Path('./output/plots/')\n", + "datadir_nuv = Path('./data_nuv/')\n", + "outputdir_nuv = Path('./output_nuv/')\n", + "\n", + "datadir_fuv = Path('./data_fuv/')\n", + "outputdir_fuv = Path('./output_fuv/')\n", + "\n", + "plotsdir = Path('./plots/')\n", "\n", "# Make the directories if they don't exist\n", - "datadir.mkdir(exist_ok=True)\n", - "outputdir.mkdir(exist_ok=True)\n", + "datadir_nuv.mkdir(exist_ok=True)\n", + "outputdir_nuv.mkdir(exist_ok=True)\n", + "\n", + "datadir_fuv.mkdir(exist_ok=True)\n", + "outputdir_fuv.mkdir(exist_ok=True)\n", + "\n", "plotsdir.mkdir(exist_ok=True)" ] }, @@ -155,7 +164,7 @@ "metadata": {}, "source": [ "## And we will need to download the data we wish to work with:\n", - "We choose the exposures with the association obs_id: `LE4B04010` and download all the `_rawtag` data. This dataset happens to be COS/NUV data taken with the G185M grating, observing the star: [LS IV -13 30](https://simbad.u-strasbg.fr/simbad/sim-id?Ident=%402582869&Name=LS%20%20IV%20-13%20%20%2030&submit=submit).\n", + "We choose the exposures with the association obs_id: `LE4B04010` and download all the `_rawtag` data. This dataset happens to be COS/NUV data taken with the `G185M` grating, observing the star: [LS IV -13 30](https://simbad.u-strasbg.fr/simbad/sim-id?Ident=%402582869&Name=LS%20%20IV%20-13%20%20%2030&submit=submit).\n", "For more information on downloading COS data, see our [notebook tutorial on downloading COS data](https://github.com/spacetelescope/hst_notebooks/blob/main/notebooks/COS/DataDl/DataDl.ipynb).\n", "\n", "*Note*, we're working with the `_rawtags` because they are smaller files and quicker to download than the `_corrtag` files. However, this workflow translates very well to using `_corrtag` files, as you likely will want to do when working with your actual data. If you wish to use the default corrections converting from raw to corrected `TIME-TAG` data, you may instead download and work with `CORRTAG` files directly." @@ -169,14 +178,14 @@ "outputs": [], "source": [ "# Querying our exposures with the association observation ID LE4B04010\n", - "productlist = Observations.get_product_list(Observations.query_criteria(\n", + "productlist_nuv = Observations.get_product_list(Observations.query_criteria(\n", " obs_id='LE4B04010'))\n", "\n", "# Getting only the RAWTAG, ASN, and X1DSUM files\n", - "masked_productlist = productlist[np.isin(productlist['productSubGroupDescription'], ['RAWTAG', 'ASN', 'X1DSUM'])]\n", + "masked_productlist_nuv = productlist_nuv[np.isin(productlist_nuv['productSubGroupDescription'], ['RAWTAG', 'ASN', 'X1DSUM'])]\n", "\n", "# Now download the files\n", - "Observations.download_products(masked_productlist)" + "Observations.download_products(masked_productlist_nuv)" ] }, { @@ -195,14 +204,14 @@ "outputs": [], "source": [ "# Getting a list of the RAWTAG, ASN, and X1DSUM files\n", - "rawtags = glob.glob('./mastDownload/HST/**/*_rawtag.fits', \n", - " recursive=True)\n", + "rawtags_nuv = glob.glob('./mastDownload/HST/**/*_rawtag.fits', \n", + " recursive=True)\n", "\n", - "asnfile = glob.glob('./mastDownload/HST/**/*_asn.fits', \n", - " recursive=True)[0]\n", + "asnfile_nuv = glob.glob('./mastDownload/HST/**/*_asn.fits', \n", + " recursive=True)[0]\n", "\n", - "old_x1dsum = glob.glob('./mastDownload/HST/**/*_x1dsum.fits', \n", - " recursive=True)[0]" + "old_x1dsum_nuv = glob.glob('./mastDownload/HST/**/*_x1dsum.fits', \n", + " recursive=True)[0]" ] }, { @@ -241,14 +250,14 @@ "outputs": [], "source": [ "# Read the data from the first rawtag\n", - "rawtag = rawtags[0]\n", - "rt_data = Table.read(rawtag, 1)\n", + "rawtag_nuv = rawtags_nuv[0]\n", + "rt_data_nuv = Table.read(rawtag_nuv, 1)\n", "\n", "# Setting up the figure\n", "plt.figure(figsize=(10, 8))\n", "\n", "# Plot the raw counts\n", - "plt.scatter(rt_data['RAWX'], rt_data['RAWY'], \n", + "plt.scatter(rt_data_nuv['RAWX'], rt_data_nuv['RAWY'], \n", " # Size of data points\n", " s=0.1, \n", " # Transparency of data points\n", @@ -315,7 +324,7 @@ "metadata": {}, "outputs": [], "source": [ - "orig_xtractab = fits.getheader(rawtag)['XTRACTAB']" + "orig_xtractab_nuv = fits.getheader(rawtag_nuv)['XTRACTAB']" ] }, { @@ -332,7 +341,7 @@ "lref = \"YOUR_EXISTING_LREF_PATH\"\n", "%env lref YOUR_EXISTING_LREF_PATH\n", "\n", - "orig_xtractab = lref + orig_xtractab.split('$')[1]\n", + "orig_xtractab_nuv = lref + orig_xtractab_nuv.split('$')[1]\n", "```" ] }, @@ -393,7 +402,8 @@ "metadata": {}, "outputs": [], "source": [ - "!crds sync --contexts hst_cos_0352.imap --fetch-references" + "!crds sync --contexts hst_cos_0359.imap --fetch-references\n", + "clear_output()" ] }, { @@ -407,7 +417,7 @@ "lref = os.path.join(crds_path, \"references/hst/cos/\")\n", "os.environ[\"lref\"] = lref\n", "\n", - "orig_xtractab = lref + orig_xtractab.split('$')[1]" + "orig_xtractab_nuv = lref + orig_xtractab_nuv.split('$')[1]" ] }, { @@ -565,7 +575,7 @@ "outputs": [], "source": [ "# We'll note the returned values correspond to these extraction boxes\n", - "box_names = ['NUVA', 'NUVB', 'NUVC', 'BKG-1', 'BKG-2']\n", + "box_names_nuv = ['NUVA', 'NUVB', 'NUVC', 'BKG-1', 'BKG-2']\n", "box_names_fuv = ['FUVA', 'FUVB', 'BKG-1A', 'BKG-2A', 'BKG-1B', 'BKG-2B']" ] }, @@ -795,26 +805,26 @@ "outputs": [], "source": [ "# Bounds of all boxes for these conditions\n", - "read_bounds = readxtractab(orig_xtractab, \n", - " grat='G185M', \n", - " cw=1786, \n", - " aper='PSA') \n", + "read_bounds_nuv = readxtractab(orig_xtractab_nuv, \n", + " grat='G185M', \n", + " cw=1786, \n", + " aper='PSA') \n", "\n", "# Set up figure\n", "plt.figure(figsize=(10, 8)) \n", "\n", "# Image of the raw counts\n", - "plt.scatter(rt_data['RAWX'], rt_data['RAWY'], \n", + "plt.scatter(rt_data_nuv['RAWX'], rt_data_nuv['RAWY'], \n", " s=0.1, \n", " alpha=0.1, \n", " c='C0')\n", "\n", "# Add all boxes\n", "for i in range(3):\n", - " plt.axhspan(read_bounds[i][0], read_bounds[i][1],\n", + " plt.axhspan(read_bounds_nuv[i][0], read_bounds_nuv[i][1],\n", " color=\"myr\"[i],\n", " alpha=0.3,\n", - " label=box_names[i])\n", + " label=box_names_nuv[i])\n", "\n", "# Add a legend to the plot\n", "plt.legend(loc='upper right') \n", @@ -849,12 +859,12 @@ "outputs": [], "source": [ "# Run the function to plot the original boxes over the y-axis spectrum\n", - "flat_yspec = collapsey(tagfiles=rawtags, \n", - " xtractab=orig_xtractab, \n", - " raw=True,\n", - " save=True, \n", - " savename=\"orig_xtractab_col_y\", \n", - " fignum=\"1.3\")" + "flat_yspec_nuv = collapsey(tagfiles=rawtags_nuv, \n", + " xtractab=orig_xtractab_nuv, \n", + " raw=True,\n", + " save=True, \n", + " savename=\"orig_xtractab_col_y\", \n", + " fignum=\"1.3\")" ] }, { @@ -1035,24 +1045,24 @@ "source": [ "# The values we set the box params to:\n", "# Centers of the background extract regions\n", - "intercept_dict = {\"bbkg1\": 900., \"bbkg2\": 60., \n", - " # Centers of NUV stripe extract regions\n", - " 'bspa': 195., 'bspb': 285., 'bspc': 415.} \n", + "intercept_dict_nuv = {\"bbkg1\": 900., \"bbkg2\": 60., \n", + " # Centers of NUV stripe extract regions\n", + " 'bspa': 195., 'bspb': 285., 'bspc': 415.} \n", "\n", - "hgt_dict = {'h_a': 41, 'h_b': 51, 'h_c': 61}\n", + "hgt_dict_nuv = {'h_a': 41, 'h_b': 51, 'h_c': 61}\n", "\n", "# Now edit using the edit_xtractab() function;\n", "# We'll change G185M grating for cenwaves 1786, 1817:\n", - "edit_xtractab(xtractab=orig_xtractab, \n", + "edit_xtractab(xtractab=orig_xtractab_nuv, \n", " # We're changing the G185M grating\n", " gratlist=['G185M'], \n", " # We're changing the 1786 and 1817 cenwaves for this grating\n", " cwlist=[1786, 1817], \n", " # New (arbitrary) heights to set boxes to\n", - " h_dict=hgt_dict, \n", + " h_dict=hgt_dict_nuv, \n", " # New y-intercepts for boxes\n", - " b_dict=intercept_dict, \n", - " new_filename='./edit_1dx.fits') " + " b_dict=intercept_dict_nuv, \n", + " new_filename='./edit_1dx_nuv.fits') " ] }, { @@ -1085,44 +1095,44 @@ " sharey=True)\n", "\n", "# Add raw count images to both subplots\n", - "ax0.scatter(rt_data['RAWX'], rt_data['RAWY'], \n", + "ax0.scatter(rt_data_nuv['RAWX'], rt_data_nuv['RAWY'], \n", " s=0.1, \n", " alpha=0.1, \n", " c='C0')\n", "\n", - "ax1.scatter(rt_data['RAWX'], rt_data['RAWY'], \n", + "ax1.scatter(rt_data_nuv['RAWX'], rt_data_nuv['RAWY'], \n", " s=0.1, \n", " alpha=0.1, \n", " c='C0')\n", "\n", "# First plot the boxes from the original XTRACTAB \n", - "read_bounds_old = readxtractab(orig_xtractab, \n", - " grat='G185M', \n", - " cw=1786, \n", - " aper='PSA')\n", + "read_bounds_old_nuv = readxtractab(orig_xtractab_nuv, \n", + " grat='G185M', \n", + " cw=1786, \n", + " aper='PSA')\n", "\n", "for i in range(3):\n", - " ax0.axhspan(read_bounds_old[i][0], read_bounds_old[i][1],\n", + " ax0.axhspan(read_bounds_old_nuv[i][0], read_bounds_old_nuv[i][1],\n", " color=\"myr\"[i],\n", " alpha=0.3,\n", - " label=box_names[i]+\"_old\")\n", + " label=box_names_nuv[i]+\"_old\")\n", "\n", "# Hatches will help differentiate between the old and new boxes\n", "plt.rcParams['hatch.linewidth'] = 1\n", "\n", "# Now with the newly edited XTRACTAB\n", - "read_bounds_new = readxtractab('./edit_1dx.fits', \n", - " grat='G185M', \n", - " cw=1786, \n", - " aper='PSA')\n", + "read_bounds_new_nuv = readxtractab('./edit_1dx_nuv.fits', \n", + " grat='G185M', \n", + " cw=1786, \n", + " aper='PSA')\n", " \n", "for i in range(3):\n", - " ax1.axhspan(read_bounds_new[i][0], read_bounds_new[i][1],\n", + " ax1.axhspan(read_bounds_new_nuv[i][0], read_bounds_new_nuv[i][1],\n", " color=\"myr\"[i],\n", " alpha=0.3,\n", " # Specifying the hatch\n", " hatch=\"x*\",\n", - " label=box_names[i]+\"_new\")\n", + " label=box_names_nuv[i]+\"_new\")\n", "\n", "# Now some plot formatting\n", "ax0.legend(loc='upper right')\n", @@ -1164,20 +1174,20 @@ "outputs": [], "source": [ "# Run the function to plot the original boxes over the y-axis spectrum\n", - "flat_yspec = collapsey(tagfiles=rawtags, \n", - " xtractab='./edit_1dx.fits', \n", - " raw=True,\n", - " save=True, \n", - " display=False, \n", - " savename=\"edit_xtractab_col_y\", \n", - " fignum=\"2.2\")\n", + "flat_yspec_nuv = collapsey(tagfiles=rawtags_nuv, \n", + " xtractab='./edit_1dx_nuv.fits', \n", + " raw=True,\n", + " save=True, \n", + " display=False, \n", + " savename=\"edit_xtractab_col_y\", \n", + " fignum=\"2.2\")\n", "\n", "# Now plot both together\n", "fig, (ax0, ax1) = plt.subplots(1, 2,\n", " figsize=(25, 18))\n", "\n", - "ax0.imshow(mpimg.imread('./output/plots/orig_xtractab_col_y.png'))\n", - "ax1.imshow(mpimg.imread('./output/plots/edit_xtractab_col_y.png'))\n", + "ax0.imshow(mpimg.imread('./plots/orig_xtractab_col_y.png'))\n", + "ax1.imshow(mpimg.imread('./plots/edit_xtractab_col_y.png'))\n", "\n", "ax0.axis('off')\n", "ax1.axis('off')\n", @@ -1216,18 +1226,18 @@ "outputs": [], "source": [ "try: \n", - " for rawtag in rawtags:\n", - " os.rename(rawtag, datadir / os.path.basename(rawtag))\n", + " for rawtag in rawtags_nuv:\n", + " os.rename(rawtag, datadir_nuv / os.path.basename(rawtag))\n", "except FileNotFoundError:\n", " print('No files')\n", "\n", "try: \n", - " os.rename(asnfile, datadir / os.path.basename(asnfile))\n", + " os.rename(asnfile_nuv, datadir_nuv / os.path.basename(asnfile_nuv))\n", "except FileNotFoundError:\n", " print('No files')\n", " \n", - "rawtags = glob.glob(str(datadir / '*rawtag*'))\n", - "asnfile = glob.glob(str(datadir / '*asn*'))[0]" + "rawtags_nuv = glob.glob(str(datadir_nuv / '*rawtag*'))\n", + "asnfile_nuv = glob.glob(str(datadir_nuv / '*asn*'))[0]" ] }, { @@ -1237,13 +1247,13 @@ "metadata": {}, "outputs": [], "source": [ - "for rawtag in rawtags:\n", + "for rawtag in rawtags_nuv:\n", " print(\"changing XTRACTAB for \", os.path.basename(rawtag))\n", " print(\"\\tOriginally: \", fits.getheader(rawtag)['XTRACTAB'])\n", "\n", " fits.setval(filename=rawtag, \n", " keyword='XTRACTAB', \n", - " value='./edit_1dx.fits')\n", + " value='./edit_1dx_nuv.fits')\n", " \n", " print(\"\\tNow set to: \", fits.getheader(rawtag)['XTRACTAB'])" ] @@ -1273,13 +1283,13 @@ "# Above ^ capture the output and save it in the next cell\n", "\n", "# Checking if there's output already\n", - "if os.path.exists(outputdir / \"calcos_run1\"):\n", - " shutil.rmtree(outputdir / \"calcos_run1\")\n", + "if os.path.exists(outputdir_nuv / \"calcos_nuv_run1\"):\n", + " shutil.rmtree(outputdir_nuv / \"calcos_nuv_run1\")\n", "\n", "# This line actually runs the pipeline:\n", - "calcos.calcos(asnfile, \n", + "calcos.calcos(asnfile_nuv, \n", " verbosity=1, \n", - " outdir=str(outputdir / \"calcos_run1\"))" + " outdir=str(outputdir_nuv / \"calcos_nuv_run1\"))" ] }, { @@ -1297,7 +1307,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(str(outputdir/'output_calcos_1.txt'), 'w') as f: \n", + "with open(str(outputdir_nuv/'output_calcos_nuv_1.txt'), 'w') as f: \n", " f.write(cap.stdout)" ] }, @@ -1325,55 +1335,65 @@ "gs = fig.add_gridspec(nrows=5, \n", " ncols=3) \n", "\n", + "new_x1dsum_nuv = './output_nuv/calcos_nuv_run1/le4b04010_x1dsum.fits'\n", + "\n", "# Going through each NUV stripe\n", "for i in range(3):\n", " # Creating two subplots, one with the X1DSUM data and the other a residual plot\n", " ax0 = fig.add_subplot(gs[0:4, i])\n", " ax1 = fig.add_subplot(gs[4:5, i])\n", "\n", - " # Getting the newly created (edited) X1DSUM's data\n", - " new_wvln, new_flux = Table.read('./output/calcos_run1/le4b04010_x1dsum.fits')[i]['WAVELENGTH', 'FLUX']\n", - " # Getting the old X1DSUM's data\n", - " old_wvln, old_flux, seg = Table.read(old_x1dsum)[i]['WAVELENGTH', 'FLUX', 'SEGMENT']\n", - " \n", - " # Interpolate the new wvln onto the old wvln's sampling:\n", - " new_flux_interp = interp1d(x=new_wvln, y=new_flux, \n", - " fill_value=\"extrapolate\")(old_wvln)\n", - "\n", - " # Print max difference to user:\n", - " print(f\"Stripe {seg} differs by up to: \\\n", - " {100 * max(new_flux - old_flux)/np.mean(abs(old_flux)):.3f}%\")\n", - "\n", - " # Plotting upper panel\n", - " ax0.plot(new_wvln, new_flux, \n", - " linewidth=0.5, \n", - " label='$New$ extracted spectrum')\n", - " ax0.plot(old_wvln, old_flux, \n", - " linewidth=0.5, \n", - " label='$Original$ extracted spectrum')\n", - "\n", - " # Plotting lower panel\n", - " ax1.plot(new_wvln, old_flux - new_flux_interp, \n", - " linewidth=0.5, \n", - " label='Residual')\n", - "\n", - " # Formatting:\n", - " ax0.set_title(f\"Segment {seg}\", \n", - " fontsize=20)\n", - "\n", - " ax0.set_xticks([])\n", - "\n", - " # Adding a legend\n", - " ax0.legend(loc='lower center')\n", - " ax1.legend(loc='lower center')\n", - "\n", - " # Add axis labels to the plot\n", - " if i == 0: \n", - " ax0.set_ylabel(r\"Flux\\n[$erg\\ \\AA^{-1}\\ cm^{-2}\\ s^{-1}$]\", \n", + " with fits.open(new_x1dsum_nuv) as new_hdul_nuv, fits.open(old_x1dsum_nuv) as old_hdul_nuv:\n", + " # Getting the flux and wl for old and new x1dsums\n", + " new_nuv_data = new_hdul_nuv[1].data[i]\n", + " old_nuv_data = old_hdul_nuv[1].data[i]\n", + " \n", + " seg_nuv = old_nuv_data[\"SEGMENT\"]\n", + "\n", + " old_wvln_nuv = old_nuv_data[\"WAVELENGTH\"]\n", + " old_flux_nuv = old_nuv_data[\"FLUX\"]\n", + "\n", + " new_wvln_nuv = new_nuv_data[\"WAVELENGTH\"]\n", + " new_flux_nuv = new_nuv_data[\"FLUX\"]\n", + "\n", + " # Interpolate the new wvln onto the old wvln's sampling:\n", + " new_flux_interp_nuv = interp1d(x=new_wvln_nuv, y=new_flux_nuv, \n", + " fill_value=\"extrapolate\")(old_wvln_nuv)\n", + " \n", + " # Print max difference to user:\n", + " print(f\"Stripe {seg_nuv} differs by up to: \\\n", + " {100 * max(new_flux_nuv - old_flux_nuv)/np.mean(abs(old_flux_nuv)):.3f}%\")\n", + "\n", + " # Plotting upper panel\n", + " ax0.plot(new_wvln_nuv, new_flux_nuv, \n", + " linewidth=0.5, \n", + " label='$New$ extracted spectrum')\n", + " ax0.plot(old_wvln_nuv, old_flux_nuv, \n", + " linewidth=0.5, \n", + " label='$Original$ extracted spectrum')\n", + "\n", + " # Plotting lower panel\n", + " ax1.plot(new_wvln_nuv, old_flux_nuv - new_flux_interp_nuv, \n", + " linewidth=0.5, \n", + " label='Residual')\n", + "\n", + " # Formatting:\n", + " ax0.set_title(f\"Segment {seg_nuv}\", \n", + " fontsize=20)\n", + "\n", + " ax0.set_xticks([])\n", + "\n", + " # Adding a legend\n", + " ax0.legend(loc='lower center')\n", + " ax1.legend(loc='lower center')\n", + "\n", + " # Add axis labels to the plot\n", + " if i == 0: \n", + " ax0.set_ylabel(r\"Flux\\n[$erg\\ \\AA^{-1}\\ cm^{-2}\\ s^{-1}$]\", \n", + " fontsize=20)\n", + " if i == 1:\n", + " plt.xlabel(\"Wavelength\", \n", " fontsize=20)\n", - " if i == 1:\n", - " plt.xlabel(\"Wavelength\", \n", - " fontsize=20)\n", "\n", "plt.suptitle(\"Fig 3.1\\nComparing the old and new extracted spectra for each segment\", \n", " fontsize=25)\n", @@ -1413,16 +1433,16 @@ "outputs": [], "source": [ "# Querying the PID's data\n", - "pl = Observations.get_product_list(\n", + "pl_fuv = Observations.get_product_list(\n", " Observations.query_criteria(\n", " proposal_id=15869, \n", " obs_id='LE4B01040'))\n", "\n", "# Getting only the RAWTAG, ASN, and X1DSUM files\n", - "masked_pl = pl[np.isin(pl['productSubGroupDescription'], ['RAWTAG_A', 'RAWTAG_B', 'ASN', 'X1DSUM'])]\n", + "masked_pl_fuv = pl_fuv[np.isin(pl_fuv['productSubGroupDescription'], ['RAWTAG_A', 'RAWTAG_B', 'ASN', 'X1DSUM'])]\n", "\n", "# Now download the files\n", - "Observations.download_products(masked_pl)" + "Observations.download_products(masked_pl_fuv)" ] }, { @@ -1448,11 +1468,11 @@ "\n", "rawtags_ab = rawtags_a + rawtags_b\n", "\n", - "asnfile = glob.glob('./mastDownload/HST/le4b01040/**/*_asn.fits', \n", - " recursive=True)[0]\n", + "asnfile_fuv = glob.glob('./mastDownload/HST/le4b01040/**/*_asn.fits', \n", + " recursive=True)[0]\n", "\n", - "old_x1dsum = glob.glob('./mastDownload/HST/le4b01040/**/*_x1dsum.fits', \n", - " recursive=True)[0]" + "old_x1dsum_fuv = glob.glob('./mastDownload/HST/le4b01040/**/*_x1dsum.fits', \n", + " recursive=True)[0]" ] }, { @@ -1471,24 +1491,21 @@ "outputs": [], "source": [ "# Aggregate all this FUV data, except the calibrated x1dsum\n", - "fdatadir = Path('./fuv_data/') \n", - "fdatadir.mkdir(exist_ok=True)\n", - "\n", - "[os.rename(rta, fdatadir / os.path.basename(rta)) for rta in rawtags_a]\n", - "[os.rename(rtb, fdatadir / os.path.basename(rtb)) for rtb in rawtags_b]\n", + "[os.rename(rta, datadir_fuv / os.path.basename(rta)) for rta in rawtags_a]\n", + "[os.rename(rtb, datadir_fuv / os.path.basename(rtb)) for rtb in rawtags_b]\n", "\n", - "os.rename(asnfile, fdatadir / os.path.basename(asnfile))\n", + "os.rename(asnfile_fuv, datadir_fuv / os.path.basename(asnfile_fuv))\n", "\n", "# Re-find all the data now that it's moved\n", - "rawtags_a = glob.glob(str(fdatadir/'*_rawtag_a.fits'), \n", + "rawtags_a = glob.glob(str(datadir_fuv/'*_rawtag_a.fits'), \n", " recursive=True)\n", - "rawtags_b = glob.glob(str(fdatadir/'*_rawtag_b.fits'), \n", + "rawtags_b = glob.glob(str(datadir_fuv/'*_rawtag_b.fits'), \n", " recursive=True)\n", "\n", "rawtags_ab = rawtags_a + rawtags_b\n", "\n", - "asnfile = glob.glob(str(fdatadir/'*_asn.fits'), \n", - " recursive=True)[0]" + "asnfile_fuv = glob.glob(str(datadir_fuv/'*_asn.fits'), \n", + " recursive=True)[0]" ] }, { @@ -1599,10 +1616,10 @@ "# Getting our XTRACTAB reference file for the FUV\n", "fuv_xtractab = lref + correct_fuv_1dx\n", "\n", - "read_bounds = readxtractab(fuv_xtractab,\n", - " grat='G160M',\n", - " cw=1533,\n", - " aper='PSA')\n", + "read_bounds_fuv = readxtractab(fuv_xtractab,\n", + " grat='G160M',\n", + " cw=1533,\n", + " aper='PSA')\n", "\n", "# Setting up a figure with two subplots, top being FUVA and bottom being FUVB\n", "fig, (ax0, ax1) = plt.subplots(2, 1,\n", @@ -1625,7 +1642,7 @@ "for i in range(2):\n", " # If it's the FUVB box:\n", " if 'A' not in box_names_fuv[i]:\n", - " ax0.axhspan(read_bounds[i][0], read_bounds[i][1],\n", + " ax0.axhspan(read_bounds_fuv[i][0], read_bounds_fuv[i][1],\n", " color='cm'[i],\n", " hatch='/+x+x'[i],\n", " alpha=0.3,\n", @@ -1633,7 +1650,7 @@ "\n", " # If it's the FUVA box\n", " else:\n", - " ax1.axhspan(read_bounds[i][0], read_bounds[i][1],\n", + " ax1.axhspan(read_bounds_fuv[i][0], read_bounds_fuv[i][1],\n", " color='cm'[i],\n", " hatch='/+x+x'[i],\n", " alpha=0.3,\n", @@ -1715,10 +1732,10 @@ "outputs": [], "source": [ "# Original boxes\n", - "read_bounds = readxtractab(fuv_xtractab,\n", - " grat='G160M',\n", - " cw=1533,\n", - " aper='PSA')\n", + "read_bounds_fuv = readxtractab(fuv_xtractab,\n", + " grat='G160M',\n", + " cw=1533,\n", + " aper='PSA')\n", "\n", "# Edited boxes\n", "read_bounds_fuv_edit = readxtractab('./edit_fuv_1dx.fits',\n", @@ -1759,7 +1776,7 @@ " c='C0')\n", "\n", "# Add all the boxes onto each subplots' image\n", - "for i, (oldbox, newbox, bname) in enumerate(zip(read_bounds[:2], read_bounds_fuv_edit[:2], box_names_fuv[:2])):\n", + "for i, (oldbox, newbox, bname) in enumerate(zip(read_bounds_fuv[:2], read_bounds_fuv_edit[:2], box_names_fuv[:2])):\n", " # FUVA in ax0, ax1 (top left/right)\n", " if 'A' not in bname:\n", " ax0.axhspan(oldbox[0], oldbox[1],\n", @@ -1856,7 +1873,7 @@ " 'XTRACTAB': './edit_fuv_1dx.fits'}\n", "\n", "# Change this to True if you want to see the changes\n", - "verbose = False\n", + "verbose = True\n", "\n", "# Loop through the rawtag A/B files\n", "for rawtag in rawtags_ab:\n", @@ -1893,13 +1910,13 @@ "# Above ^ capture the output and save it in the next cell\n", "\n", "# Checking if there's output already\n", - "if os.path.exists(outputdir / \"calcos_fuv_run1\"):\n", - " shutil.rmtree(outputdir / \"calcos_fuv_run1\")\n", + "if os.path.exists(outputdir_fuv / \"calcos_fuv_run1\"):\n", + " shutil.rmtree(outputdir_fuv / \"calcos_fuv_run1\")\n", "\n", "# This line actually runs the pipeline:\n", - "calcos.calcos(asnfile,\n", + "calcos.calcos(asnfile_fuv,\n", " verbosity=1,\n", - " outdir=str(outputdir / \"calcos_fuv_run1\"))" + " outdir=str(outputdir_fuv / \"calcos_fuv_run1\"))" ] }, { @@ -1910,7 +1927,7 @@ "outputs": [], "source": [ "# This file now contains the output of the last cell\n", - "with open(str(outputdir/'output_calcos_fuv_1.txt'), 'w') as f:\n", + "with open(str(outputdir_fuv/'output_calcos_fuv_1.txt'), 'w') as f:\n", " f.write(cap.stdout)" ] }, @@ -1937,58 +1954,69 @@ "gs = fig.add_gridspec(nrows=5,\n", " ncols=2)\n", "\n", + "new_x1dsum_fuv = './output_fuv/calcos_fuv_run1/le4b01040_x1dsum.fits'\n", + "\n", "# Use i,j to plot FUVA on the right, not the left, by inverting subplot index\n", "for i, j in zip([0, 1], [1, 0]):\n", " ax0 = fig.add_subplot(gs[0:4, j])\n", " ax1 = fig.add_subplot(gs[4:5, j])\n", "\n", - " # Get the new and old flux/wavelength data to be plotted\n", - " new_wvln, new_flux = Table.read('./output/calcos_fuv_run1/le4b01040_x1dsum.fits')[i]['WAVELENGTH', 'FLUX']\n", - " old_wvln, old_flux, seg = Table.read(old_x1dsum)[i]['WAVELENGTH', 'FLUX', 'SEGMENT']\n", - "\n", - " # Interpolate the new wvln onto the old wvln's sampling:\n", - " new_flux_interp = interp1d(x=new_wvln, y=new_flux,\n", - " fill_value=\"extrapolate\")(old_wvln)\n", - "\n", - " # Print the max difference\n", - " print(f\"Stripe {seg} differs by up to: \\\n", - " {max(new_flux - old_flux)/np.mean(abs(old_flux)):.3f}%\")\n", - "\n", - " # Plotting the upper panel\n", - " ax0.plot(new_wvln, new_flux,\n", - " alpha=1,\n", - " linewidth=0.5,\n", - " c='C1',\n", - " label='$New$ extracted spectrum')\n", - "\n", - " ax0.plot(old_wvln, old_flux,\n", - " linewidth=1,\n", - " c='C0',\n", - " linestyle='dotted',\n", - " alpha=0.75,\n", - " label='$Original$ extracted spectrum')\n", - "\n", - " # Plotting the lower panel\n", - " ax1.plot(new_wvln, old_flux - new_flux_interp,\n", - " linewidth=0.5,\n", - " label='Residual')\n", - "\n", - " # Some formatting:\n", - " ax0.set_title(f\"Segment {seg}\",\n", - " fontsize=20)\n", - "\n", - " ax0.set_xticks([])\n", - "\n", - " ax0.legend(loc='lower center')\n", - " ax1.legend(loc='lower center')\n", - "\n", - " # Add axis labels to the plot\n", - " if i == 0:\n", - " ax0.set_ylabel(r\"Flux\\n[$erg\\ \\AA^{-1}\\ cm^{-2}\\ s^{-1}$]\",\n", + " with fits.open(new_x1dsum_fuv) as new_hdul_fuv, fits.open(old_x1dsum_fuv) as old_hdul_fuv:\n", + " # Getting the flux and wl for old and new x1dsums\n", + " new_fuv_data = new_hdul_fuv[1].data[i]\n", + " old_fuv_data = old_hdul_fuv[1].data[i]\n", + " \n", + " seg_fuv = old_fuv_data[\"SEGMENT\"]\n", + "\n", + " old_wvln_fuv = old_fuv_data[\"WAVELENGTH\"]\n", + " old_flux_fuv = old_fuv_data[\"FLUX\"]\n", + "\n", + " new_wvln_fuv = new_fuv_data[\"WAVELENGTH\"]\n", + " new_flux_fuv = new_fuv_data[\"FLUX\"]\n", + "\n", + " # Interpolate the new wvln onto the old wvln's sampling:\n", + " new_flux_interp_fuv = interp1d(x=new_wvln_fuv, y=new_flux_fuv,\n", + " fill_value=\"extrapolate\")(old_wvln_fuv)\n", + " \n", + " # Print the max difference\n", + " print(f\"Stripe {seg_fuv} differs by up to: \\\n", + " {max(new_flux_fuv - old_flux_fuv)/np.mean(abs(old_flux_fuv)):.3f}%\")\n", + "\n", + " # Plotting the upper panel\n", + " ax0.plot(new_wvln_fuv, new_flux_fuv,\n", + " alpha=1,\n", + " linewidth=0.5,\n", + " c='C1',\n", + " label='$New$ extracted spectrum')\n", + "\n", + " ax0.plot(old_wvln_fuv, old_flux_fuv,\n", + " linewidth=1,\n", + " c='C0',\n", + " linestyle='dotted',\n", + " alpha=0.75,\n", + " label='$Original$ extracted spectrum')\n", + "\n", + " # Plotting the lower panel\n", + " ax1.plot(new_wvln_fuv, old_flux_fuv - new_flux_interp_fuv,\n", + " linewidth=0.5,\n", + " label='Residual')\n", + "\n", + " # Some formatting:\n", + " ax0.set_title(f\"Segment {seg_fuv}\",\n", + " fontsize=20)\n", + "\n", + " ax0.set_xticks([])\n", + "\n", + " ax0.legend(loc='lower center')\n", + " ax1.legend(loc='lower center')\n", + "\n", + " # Add axis labels to the plot\n", + " if i == 0:\n", + " ax0.set_ylabel(r\"Flux\\n[$erg\\ \\AA^{-1}\\ cm^{-2}\\ s^{-1}$]\",\n", + " fontsize=20)\n", + " if i == 1:\n", + " plt.xlabel(\"Wavelength\",\n", " fontsize=20)\n", - " if i == 1:\n", - " plt.xlabel(\"Wavelength\",\n", - " fontsize=20)\n", "\n", "plt.suptitle(\"Fig 4.4\\nComparing the old and new extracted spectra for each segment in the FUV\",\n", " fontsize=25)\n", @@ -2073,7 +2101,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/notebooks/COS/Extract/requirements.txt b/notebooks/COS/Extract/requirements.txt index 01aaafed6..10ab6a5de 100644 --- a/notebooks/COS/Extract/requirements.txt +++ b/notebooks/COS/Extract/requirements.txt @@ -1,7 +1,7 @@ -astropy==5.3.4 -astroquery==0.4.6 -calcos==3.4.7 -matplotlib==3.7.0 -numpy==1.25.2 -scipy==1.10.0 -crds==11.17.13 \ No newline at end of file +astropy>=5.3.4 +astroquery>=0.4.6 +calcos>=3.4.8 +matplotlib>=3.7.0 +numpy>=1.25.2 +scipy>=1.10.0 +crds>=11.17.13