diff --git a/docs/source/common_monitors.rst b/docs/source/common_monitors.rst new file mode 100644 index 000000000..21d98a7d7 --- /dev/null +++ b/docs/source/common_monitors.rst @@ -0,0 +1,9 @@ +*************** +common_monitors +*************** + +dark_monitor.py +--------------- +.. automodule:: jwql.instrument_monitors.common_monitors.dark_monitor + :members: + :undoc-members: \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index af34cf5d8..00e9c25c3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -23,9 +23,11 @@ API documentation :maxdepth: 1 :caption: Contents: + common_monitors.rst database.rst edb.rst jwql_monitors.rst + instrument_monitors.rst tests.rst utils.rst website.rst diff --git a/docs/source/instrument_monitors.rst b/docs/source/instrument_monitors.rst new file mode 100644 index 000000000..284f2cd23 --- /dev/null +++ b/docs/source/instrument_monitors.rst @@ -0,0 +1,9 @@ +******************* +instrument_monitors +******************* + +pipeline_tools.py +----------------- +.. automodule:: jwql.instrument_monitors.pipeline_tools + :members: + :undoc-members: \ No newline at end of file diff --git a/docs/source/tests.rst b/docs/source/tests.rst index f94998d64..cfab32e86 100644 --- a/docs/source/tests.rst +++ b/docs/source/tests.rst @@ -8,12 +8,30 @@ test_api_views.py :members: :undoc-members: +test_calculations.py +-------------------- +.. automodule:: jwql.tests.test_calculations + :members: + :undoc-members: + +test_dark_monitor.py +-------------------- +.. automodule:: jwql.tests.test_dark_monitor + :members: + :undoc-members: + test_edb_interface.py --------------------- .. automodule:: jwql.tests.test_edb_interface :members: :undoc-members: +test_instrument_properties.py +----------------------------- +.. automodule:: jwql.tests.test_instrument_properties + :members: + :undoc-members: + test_loading_times.py -------------------- .. automodule:: jwql.tests.test_loading_times @@ -32,6 +50,12 @@ test_permissions.py :members: :undoc-members: +test_pipeline_tools.py +---------------------- +.. automodule:: jwql.tests.test_pipeline_tools + :members: + :undoc-members: + test_plotting.py ---------------- .. automodule:: jwql.tests.test_plotting diff --git a/docs/source/utils.rst b/docs/source/utils.rst index bdc718d7f..e219ef3bf 100644 --- a/docs/source/utils.rst +++ b/docs/source/utils.rst @@ -2,12 +2,24 @@ utils ***** +calculations.py +--------------- +.. automodule:: jwql.utils.calculations + :members: + :undoc-members: + constants.py ------------ .. automodule:: jwql.utils.constants :members: :undoc-members: +instrument_properties.py +------------------------ +.. automodule:: jwql.utils.instrument_properties + :members: + :undoc-members: + logging_functions.py -------------------- .. automodule:: jwql.utils.logging_functions diff --git a/environment.yml b/environment.yml index 6ab4a2973..3bd509e1a 100644 --- a/environment.yml +++ b/environment.yml @@ -3,14 +3,15 @@ channels: - http://ssb.stsci.edu/astroconda-dev - defaults dependencies: -- asdf>=2.3.0 -- astropy +- asdf=2.3.1 +- astropy>=3.1.2 - astroquery=0.3.9 - bokeh=1.1.0 +- crds>=7.2.7 - django=2.1.7 - ipython=7.3.0 - jinja2=2.10 -- jwst +- jwst=0.13.1 - matplotlib=3.0.2 - numpy=1.16.2 - numpydoc=0.8.0 @@ -29,4 +30,5 @@ dependencies: - pip: - authlib==0.10 - codecov==2.0.15 + - pysiaf==0.2.5 - sphinx-automodapi==0.10 diff --git a/jwql/database/database_interface.py b/jwql/database/database_interface.py index b360ad664..557223b6c 100644 --- a/jwql/database/database_interface.py +++ b/jwql/database/database_interface.py @@ -70,12 +70,10 @@ from sqlalchemy import Enum from sqlalchemy import Float from sqlalchemy import Integer -from sqlalchemy import Float from sqlalchemy import MetaData from sqlalchemy import String from sqlalchemy import Time from sqlalchemy import UniqueConstraint -from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from sqlalchemy.orm.query import Query @@ -137,8 +135,8 @@ def load_connection(connection_string): return session, base, engine, meta -# Import a global session. If running from readthedocs, pass a dummy connection string -if 'build' and 'project' in socket.gethostname(): +# Import a global session. If running from readthedocs or Jenkins, pass a dummy connection string +if 'build' and 'project' in socket.gethostname() or os.path.expanduser('~') == '/home/jenkins': dummy_connection_string = 'postgresql+psycopg2://account:password@hostname:0000/db_name' session, base, engine, meta = load_connection(dummy_connection_string) else: @@ -256,7 +254,7 @@ class Monitor(base): start_time = Column(DateTime, nullable=False) end_time = Column(DateTime, nullable=True) status = Column(Enum('SUCESS', 'FAILURE', name='monitor_status'), nullable=True) - affected_tables = Column(postgresql.ARRAY(String, dimensions=1), nullable=True) + affected_tables = Column(ARRAY(String, dimensions=1), nullable=True) log_file = Column(String(), nullable=False) @@ -287,11 +285,14 @@ def get_monitor_columns(data_dict, table_name): 'date': Date(), 'time': Time(), 'datetime': DateTime, - 'bool': Boolean} + 'bool': Boolean + } # Get the data from the table definition file + instrument = table_name.split('_')[0] table_definition_file = os.path.join(os.path.split(__file__)[0], 'monitor_table_definitions', + instrument.lower(), '{}.txt'.format(table_name)) with open(table_definition_file, 'r') as f: data = f.readlines() @@ -302,9 +303,20 @@ def get_monitor_columns(data_dict, table_name): column_name = column_definition[0] data_type = column_definition[1] + if 'array' in data_type: + dtype, _a, dimension = data_type.split('_') + dimension = int(dimension[0]) + array = True + else: + dtype = data_type + array = False + # Create a new column - if data_type in list(data_type_dict.keys()): - data_dict[column_name.lower()] = Column(data_type_dict[data_type]) + if dtype in list(data_type_dict.keys()): + if array: + data_dict[column_name.lower()] = Column(ARRAY(data_type_dict[dtype], dimensions=dimension)) + else: + data_dict[column_name.lower()] = Column(data_type_dict[dtype]) else: raise ValueError('Unrecognized column type: {}:{}'.format(column_name, data_type)) @@ -355,7 +367,7 @@ class : obj # Columns specific to all monitor ORMs data_dict['id'] = Column(Integer, primary_key=True, nullable=False) data_dict['entry_date'] = Column(DateTime, unique=True, nullable=False, default=datetime.now()) - data_dict['__table_args__'] = (UniqueConstraint('id', 'entry_date', name='monitor_uc'),) + data_dict['__table_args__'] = (UniqueConstraint('id', 'entry_date', name='{}_uc'.format(data_dict['__tablename__'])),) # Get monitor-specific columns data_dict = get_monitor_columns(data_dict, data_dict['__tablename__']) @@ -365,10 +377,23 @@ class : obj return type(class_name, (base,), data_dict) + # Create tables from ORM factory -# NIRCamDarkQueries = monitor_orm_factory('nircam_dark_queries') -# NIRCamDarkPixelStats = monitor_orm_factory('nircam_dark_pixel_stats') -# NIRCamDarkDarkCurrent = monitor_orm_factory('nircam_dark_dark_current') +NIRCamDarkQueryHistory = monitor_orm_factory('nircam_dark_query_history') +NIRCamDarkPixelStats = monitor_orm_factory('nircam_dark_pixel_stats') +NIRCamDarkDarkCurrent = monitor_orm_factory('nircam_dark_dark_current') +NIRISSDarkQueryHistory = monitor_orm_factory('niriss_dark_query_history') +NIRISSDarkPixelStats = monitor_orm_factory('niriss_dark_pixel_stats') +NIRISSDarkDarkCurrent = monitor_orm_factory('niriss_dark_dark_current') +NIRSpecDarkQueryHistory = monitor_orm_factory('nirspec_dark_query_history') +NIRSpecDarkPixelStats = monitor_orm_factory('nirspec_dark_pixel_stats') +NIRSpecDarkDarkCurrent = monitor_orm_factory('nirspec_dark_dark_current') +MIRIDarkQueryHistory = monitor_orm_factory('miri_dark_query_history') +MIRIDarkPixelStats = monitor_orm_factory('miri_dark_pixel_stats') +MIRIDarkDarkCurrent = monitor_orm_factory('miri_dark_dark_current') +FGSDarkQueryHistory = monitor_orm_factory('fgs_dark_query_history') +FGSDarkPixelStats = monitor_orm_factory('fgs_dark_pixel_stats') +FGSDarkDarkCurrent = monitor_orm_factory('fgs_dark_dark_current') if __name__ == '__main__': diff --git a/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt b/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt new file mode 100644 index 000000000..cdd2d681d --- /dev/null +++ b/jwql/database/monitor_table_definitions/fgs/fgs_dark_dark_current.txt @@ -0,0 +1,19 @@ +APERTURE, string +AMPLIFIER, string +MEAN, float +STDEV, float +SOURCE_FILES, string_array_1d +GAUSS_AMPLITUDE, float_array_1d +GUASS_PEAK, float_array_1d +GAUSS_WIDTH, float_array_1d +GAUSS_CHISQ, float +DOUBLE_GAUSS_AMPLITUDE1, float_array_1d +DOUBLE_GAUSS_PEAK1, float_array_1d +DOUBLE_GAUSS_WIDTH1, float_array_1d +DOUBLE_GAUSS_AMPLITUDE2, float_array_1d +DOUBLE_GAUSS_PEAK2, float_array_1d +DOUBLE_GAUSS_WIDTH2, float_array_1d +DOUBLE_GAUSS_CHISQ, float +MEAN_DARK_IMAGE_FILE, string +HIST_DARK_VALUES, float_array_1d +HIST_AMPLITUDES, float_array_1d diff --git a/jwql/database/monitor_table_definitions/fgs/fgs_dark_pixel_stats.txt b/jwql/database/monitor_table_definitions/fgs/fgs_dark_pixel_stats.txt new file mode 100644 index 000000000..bf2c06c48 --- /dev/null +++ b/jwql/database/monitor_table_definitions/fgs/fgs_dark_pixel_stats.txt @@ -0,0 +1,7 @@ +DETECTOR, string +X_COORD, integer_array_1d +Y_COORD, integer_array_1d +TYPE, string +SOURCE_FILES, string_array_1d +MEAN_DARK_IMAGE_FILE, string +BASELINE_FILE, string diff --git a/jwql/database/monitor_table_definitions/fgs/fgs_dark_query_history.txt b/jwql/database/monitor_table_definitions/fgs/fgs_dark_query_history.txt new file mode 100644 index 000000000..d0c493354 --- /dev/null +++ b/jwql/database/monitor_table_definitions/fgs/fgs_dark_query_history.txt @@ -0,0 +1,6 @@ +INSTRUMENT, string +APERTURE, string +START_TIME_MJD, float +END_TIME_MJD, float +FILES_FOUND, integer +RUN_MONITOR, bool diff --git a/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt b/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt new file mode 100644 index 000000000..cdd2d681d --- /dev/null +++ b/jwql/database/monitor_table_definitions/miri/miri_dark_dark_current.txt @@ -0,0 +1,19 @@ +APERTURE, string +AMPLIFIER, string +MEAN, float +STDEV, float +SOURCE_FILES, string_array_1d +GAUSS_AMPLITUDE, float_array_1d +GUASS_PEAK, float_array_1d +GAUSS_WIDTH, float_array_1d +GAUSS_CHISQ, float +DOUBLE_GAUSS_AMPLITUDE1, float_array_1d +DOUBLE_GAUSS_PEAK1, float_array_1d +DOUBLE_GAUSS_WIDTH1, float_array_1d +DOUBLE_GAUSS_AMPLITUDE2, float_array_1d +DOUBLE_GAUSS_PEAK2, float_array_1d +DOUBLE_GAUSS_WIDTH2, float_array_1d +DOUBLE_GAUSS_CHISQ, float +MEAN_DARK_IMAGE_FILE, string +HIST_DARK_VALUES, float_array_1d +HIST_AMPLITUDES, float_array_1d diff --git a/jwql/database/monitor_table_definitions/miri/miri_dark_pixel_stats.txt b/jwql/database/monitor_table_definitions/miri/miri_dark_pixel_stats.txt new file mode 100644 index 000000000..bf2c06c48 --- /dev/null +++ b/jwql/database/monitor_table_definitions/miri/miri_dark_pixel_stats.txt @@ -0,0 +1,7 @@ +DETECTOR, string +X_COORD, integer_array_1d +Y_COORD, integer_array_1d +TYPE, string +SOURCE_FILES, string_array_1d +MEAN_DARK_IMAGE_FILE, string +BASELINE_FILE, string diff --git a/jwql/database/monitor_table_definitions/miri/miri_dark_query_history.txt b/jwql/database/monitor_table_definitions/miri/miri_dark_query_history.txt new file mode 100644 index 000000000..d0c493354 --- /dev/null +++ b/jwql/database/monitor_table_definitions/miri/miri_dark_query_history.txt @@ -0,0 +1,6 @@ +INSTRUMENT, string +APERTURE, string +START_TIME_MJD, float +END_TIME_MJD, float +FILES_FOUND, integer +RUN_MONITOR, bool diff --git a/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt b/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt index e3866c8ff..cdd2d681d 100644 --- a/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt +++ b/jwql/database/monitor_table_definitions/nircam/nircam_dark_dark_current.txt @@ -1,2 +1,19 @@ +APERTURE, string +AMPLIFIER, string MEAN, float -STDEV, float \ No newline at end of file +STDEV, float +SOURCE_FILES, string_array_1d +GAUSS_AMPLITUDE, float_array_1d +GUASS_PEAK, float_array_1d +GAUSS_WIDTH, float_array_1d +GAUSS_CHISQ, float +DOUBLE_GAUSS_AMPLITUDE1, float_array_1d +DOUBLE_GAUSS_PEAK1, float_array_1d +DOUBLE_GAUSS_WIDTH1, float_array_1d +DOUBLE_GAUSS_AMPLITUDE2, float_array_1d +DOUBLE_GAUSS_PEAK2, float_array_1d +DOUBLE_GAUSS_WIDTH2, float_array_1d +DOUBLE_GAUSS_CHISQ, float +MEAN_DARK_IMAGE_FILE, string +HIST_DARK_VALUES, float_array_1d +HIST_AMPLITUDES, float_array_1d diff --git a/jwql/database/monitor_table_definitions/nircam/nircam_dark_pixel_stats.txt b/jwql/database/monitor_table_definitions/nircam/nircam_dark_pixel_stats.txt index c2eadbe6b..bf2c06c48 100644 --- a/jwql/database/monitor_table_definitions/nircam/nircam_dark_pixel_stats.txt +++ b/jwql/database/monitor_table_definitions/nircam/nircam_dark_pixel_stats.txt @@ -1,3 +1,7 @@ -X_COORD, integer -Y_COORD, integer -TYPE, string \ No newline at end of file +DETECTOR, string +X_COORD, integer_array_1d +Y_COORD, integer_array_1d +TYPE, string +SOURCE_FILES, string_array_1d +MEAN_DARK_IMAGE_FILE, string +BASELINE_FILE, string diff --git a/jwql/database/monitor_table_definitions/nircam/nircam_dark_queries.txt b/jwql/database/monitor_table_definitions/nircam/nircam_dark_queries.txt deleted file mode 100644 index 9f838c18f..000000000 --- a/jwql/database/monitor_table_definitions/nircam/nircam_dark_queries.txt +++ /dev/null @@ -1,3 +0,0 @@ -LAST_RUN, datetime -DETECTOR, string -APERTURE, string \ No newline at end of file diff --git a/jwql/database/monitor_table_definitions/nircam/nircam_dark_query_history.txt b/jwql/database/monitor_table_definitions/nircam/nircam_dark_query_history.txt index 209b84d88..d0c493354 100644 --- a/jwql/database/monitor_table_definitions/nircam/nircam_dark_query_history.txt +++ b/jwql/database/monitor_table_definitions/nircam/nircam_dark_query_history.txt @@ -1,3 +1,6 @@ -START_TIME, datetime -DETECTOR, string +INSTRUMENT, string APERTURE, string +START_TIME_MJD, float +END_TIME_MJD, float +FILES_FOUND, integer +RUN_MONITOR, bool diff --git a/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt b/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt new file mode 100644 index 000000000..cdd2d681d --- /dev/null +++ b/jwql/database/monitor_table_definitions/niriss/niriss_dark_dark_current.txt @@ -0,0 +1,19 @@ +APERTURE, string +AMPLIFIER, string +MEAN, float +STDEV, float +SOURCE_FILES, string_array_1d +GAUSS_AMPLITUDE, float_array_1d +GUASS_PEAK, float_array_1d +GAUSS_WIDTH, float_array_1d +GAUSS_CHISQ, float +DOUBLE_GAUSS_AMPLITUDE1, float_array_1d +DOUBLE_GAUSS_PEAK1, float_array_1d +DOUBLE_GAUSS_WIDTH1, float_array_1d +DOUBLE_GAUSS_AMPLITUDE2, float_array_1d +DOUBLE_GAUSS_PEAK2, float_array_1d +DOUBLE_GAUSS_WIDTH2, float_array_1d +DOUBLE_GAUSS_CHISQ, float +MEAN_DARK_IMAGE_FILE, string +HIST_DARK_VALUES, float_array_1d +HIST_AMPLITUDES, float_array_1d diff --git a/jwql/database/monitor_table_definitions/niriss/niriss_dark_pixel_stats.txt b/jwql/database/monitor_table_definitions/niriss/niriss_dark_pixel_stats.txt new file mode 100644 index 000000000..bf2c06c48 --- /dev/null +++ b/jwql/database/monitor_table_definitions/niriss/niriss_dark_pixel_stats.txt @@ -0,0 +1,7 @@ +DETECTOR, string +X_COORD, integer_array_1d +Y_COORD, integer_array_1d +TYPE, string +SOURCE_FILES, string_array_1d +MEAN_DARK_IMAGE_FILE, string +BASELINE_FILE, string diff --git a/jwql/database/monitor_table_definitions/niriss/niriss_dark_query_history.txt b/jwql/database/monitor_table_definitions/niriss/niriss_dark_query_history.txt new file mode 100644 index 000000000..d0c493354 --- /dev/null +++ b/jwql/database/monitor_table_definitions/niriss/niriss_dark_query_history.txt @@ -0,0 +1,6 @@ +INSTRUMENT, string +APERTURE, string +START_TIME_MJD, float +END_TIME_MJD, float +FILES_FOUND, integer +RUN_MONITOR, bool diff --git a/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt new file mode 100644 index 000000000..cdd2d681d --- /dev/null +++ b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_dark_current.txt @@ -0,0 +1,19 @@ +APERTURE, string +AMPLIFIER, string +MEAN, float +STDEV, float +SOURCE_FILES, string_array_1d +GAUSS_AMPLITUDE, float_array_1d +GUASS_PEAK, float_array_1d +GAUSS_WIDTH, float_array_1d +GAUSS_CHISQ, float +DOUBLE_GAUSS_AMPLITUDE1, float_array_1d +DOUBLE_GAUSS_PEAK1, float_array_1d +DOUBLE_GAUSS_WIDTH1, float_array_1d +DOUBLE_GAUSS_AMPLITUDE2, float_array_1d +DOUBLE_GAUSS_PEAK2, float_array_1d +DOUBLE_GAUSS_WIDTH2, float_array_1d +DOUBLE_GAUSS_CHISQ, float +MEAN_DARK_IMAGE_FILE, string +HIST_DARK_VALUES, float_array_1d +HIST_AMPLITUDES, float_array_1d diff --git a/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_pixel_stats.txt b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_pixel_stats.txt new file mode 100644 index 000000000..bf2c06c48 --- /dev/null +++ b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_pixel_stats.txt @@ -0,0 +1,7 @@ +DETECTOR, string +X_COORD, integer_array_1d +Y_COORD, integer_array_1d +TYPE, string +SOURCE_FILES, string_array_1d +MEAN_DARK_IMAGE_FILE, string +BASELINE_FILE, string diff --git a/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_query_history.txt b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_query_history.txt new file mode 100644 index 000000000..d0c493354 --- /dev/null +++ b/jwql/database/monitor_table_definitions/nirspec/nirspec_dark_query_history.txt @@ -0,0 +1,6 @@ +INSTRUMENT, string +APERTURE, string +START_TIME_MJD, float +END_TIME_MJD, float +FILES_FOUND, integer +RUN_MONITOR, bool diff --git a/jwql/instrument_monitors/common_monitors/dark_monitor.py b/jwql/instrument_monitors/common_monitors/dark_monitor.py new file mode 100755 index 000000000..097d86b99 --- /dev/null +++ b/jwql/instrument_monitors/common_monitors/dark_monitor.py @@ -0,0 +1,988 @@ +#! /usr/bin/env python + +"""This module contains code for the dark current monitor, which +performs some basic analysis to check whether the dark current behavior +for the most recent set of input files is consistent with that from +past files. + +If enough new files for a given instrument/aperture combination +(currently the files must be identified as dark current files in the +``exp_type`` header keyword) are present in the filesystem at the time +the ``dark_monitor`` is called, the files are first run through the the +appropriate pipeline steps to produce slope images. + +A mean slope image as well as a standard deviation slope image is +created by sigma-clipping on a pixel by pixel basis. The mean and +standard deviation images are saved to a fits file, the name of which +is entered into the ``DarkCurrent`` database table. + +The mean slope image is then normalized by an existing baseline slope +image. New hot pixels are identified as those with normalized signal +rates above a ``hot_threshold`` value. Similarly, pixels with +normalized signal rates below a ``dead_threshold`` are flagged as new +dead pixels. + +The standard deviation slope image is normalized by a baseline +(historical) standard deviation image. Pixels with normalized values +above a noise threshold are flagged as newly noisy pixels. + +New hot, dead, and noisy pixels are saved to the ``DarkPixelStats`` +database table. + +Next, the dark current in the mean slope image is examined. A histogram +of the slope values is created for the pixels in each amplifier, as +well as for all pixels on the detector. In all cases, a Gaussian is fit +to the histogram. Currently for NIRCam and NIRISS, a double Gaussian is +also fit to the histogram from the entire detector. + +The histogram itself as well as the best-fit Gaussian and double +Gaussian parameters are saved to the DarkDarkCurrent database table. + + +Author +------ + + - Bryan Hilbert + +Use +--- + + This module can be used from the command line as such: + + :: + + python dark_monitor.py +""" + +from copy import copy, deepcopy +import datetime +import logging +import os + +from astropy.io import ascii, fits +from astropy.modeling import models +from astropy.time import Time +import numpy as np +from pysiaf import Siaf +from sqlalchemy import func +from sqlalchemy.sql.expression import and_ + +from jwql.database.database_interface import session +from jwql.database.database_interface import NIRCamDarkQueryHistory, NIRCamDarkPixelStats, NIRCamDarkDarkCurrent +from jwql.database.database_interface import NIRISSDarkQueryHistory, NIRISSDarkPixelStats, NIRISSDarkDarkCurrent +from jwql.database.database_interface import MIRIDarkQueryHistory, MIRIDarkPixelStats, MIRIDarkDarkCurrent +from jwql.database.database_interface import NIRSpecDarkQueryHistory, NIRSpecDarkPixelStats, NIRSpecDarkDarkCurrent +from jwql.database.database_interface import FGSDarkQueryHistory, FGSDarkPixelStats, FGSDarkDarkCurrent +from jwql.instrument_monitors import pipeline_tools +from jwql.jwql_monitors import monitor_mast +from jwql.utils import calculations, instrument_properties +from jwql.utils.constants import JWST_INSTRUMENT_NAMES_MIXEDCASE, JWST_DATAPRODUCTS +from jwql.utils.logging_functions import log_info, log_fail +from jwql.utils.permissions import set_permissions +from jwql.utils.utils import copy_files, ensure_dir_exists, get_config, filesystem_path, initialize_instrument_monitor, update_monitor_table + +THRESHOLDS_FILE = os.path.join(os.path.split(__file__)[0], 'dark_monitor_file_thresholds.txt') + + +def mast_query_darks(instrument, aperture, start_date, end_date): + """Use ``astroquery`` to search MAST for dark current data + + Parameters + ---------- + instrument : str + Instrument name (e.g. ``nircam``) + + aperture : str + Detector aperture to search for (e.g. ``NRCA1_FULL``) + + start_date : float + Starting date for the search in MJD + + end_date : float + Ending date for the search in MJD + + Returns + ------- + query_results : list + List of dictionaries containing the query results + """ + + # Make sure instrument is correct case + if instrument.lower() == 'nircam': + instrument = 'NIRCam' + dark_template = ['NRC_DARK'] + elif instrument.lower() == 'niriss': + instrument = 'NIRISS' + dark_template = ['NIS_DARK'] + elif instrument.lower() == 'nirspec': + instrument = 'NIRSpec' + dark_template = ['NRS_DARK'] + elif instrument.lower() == 'fgs': + instrument = 'FGS' + dark_template = ['FGS_DARK'] + elif instrument.lower() == 'miri': + instrument = 'MIRI' + dark_template = ['MIR_DARKALL', 'MIR_DARKIMG', 'MIR_DARKMRS'] + + # monitor_mast.instrument_inventory does not allow list inputs to + # the added_filters input (or at least if you do provide a list, then + # it becomes a nested list when it sends the query to MAST. The + # nested list is subsequently ignored by MAST.) + # So query once for each dark template, and combine outputs into a + # single list. + query_results = [] + for template_name in dark_template: + + # Create dictionary of parameters to add + parameters = {"date_obs_mjd": {"min": start_date, "max": end_date}, + "apername": aperture, "exp_type": template_name} + + query = monitor_mast.instrument_inventory(instrument, dataproduct=JWST_DATAPRODUCTS, + add_filters=parameters, return_data=True, caom=False) + if len(query['data']) > 0: + query_results.extend(query['data']) + + return query_results + + +@log_fail +@log_info +class Dark(): + """Class for executing the dark current monitor. + + This class will search for new (since the previous instance of the + class) dark current files in the file system. It will loop over + instrument/aperture combinations and find the number of new dark + current files available. If there are enough, it will copy the files + over to a working directory and run the monitor. This will create a + mean dark current rate image, create a histogram of the dark current + values, and fit several functions to the histogram. It will also + compare the dark current image to a historical image in order to + search for new hot or dead pixels. Results are all saved to + database tables. + + Parameters + ---------- + testing : bool + For pytest. If ``True``, an instance of ``Dark`` is created, but + no other code is executed. + + Attributes + ---------- + output_dir : str + Path into which outputs will be placed + + data_dir : str + Path into which new dark files will be copied to be worked on + + query_start : float + MJD start date to use for querying MAST + + query_end : float + MJD end date to use for querying MAST + + instrument : str + Name of instrument used to collect the dark current data + + aperture : str + Name of the aperture used for the dark current (e.g. + ``NRCA1_FULL``) + + query_table : sqlalchemy table + Table containing the history of dark current queries to MAST + for each instrument/aperture combination + + pixel_table : sqlalchemy table + Table containing lists of hot/dead/noisy pixels found for each + instrument/detector + + stats_table : sqlalchemy table + Table containing dark current analysis results. Mean/stdev + values, histogram information, Gaussian fitting results, etc. + + Raises + ------ + ValueError + If encountering an unrecognized bad pixel type + + ValueError + If the most recent query search returns more than one entry + """ + + def __init__(self, testing=False): + + logging.info('Begin logging for dark_monitor') + + apertures_to_skip = ['NRCALL_FULL', 'NRCAS_FULL', 'NRCBS_FULL'] + + if not testing: + + # Get the output directory + self.output_dir = os.path.join(get_config()['outputs'], 'dark_monitor') + + # Read in config file that defines the thresholds for the number + # of dark files that must be present in order for the monitor to run + limits = ascii.read(THRESHOLDS_FILE) + + # Use the current time as the end time for MAST query + self.query_end = Time.now().mjd + + # Loop over all instruments + for instrument in ['nircam']: + self.instrument = instrument + + # Identify which database tables to use + self.identify_tables() + + # Get a list of all possible apertures from pysiaf + possible_apertures = list(Siaf(instrument).apernames) + possible_apertures = [ap for ap in possible_apertures if ap not in apertures_to_skip] + + for aperture in possible_apertures: + + logging.info('') + logging.info('Working on aperture {} in {}'.format(aperture, instrument)) + + # Find the appropriate threshold for the number of new files needed + match = aperture == limits['Aperture'] + file_count_threshold = limits['Threshold'][match] + + # Locate the record of the most recent MAST search + self.aperture = aperture + self.query_start = self.most_recent_search() + logging.info('\tQuery times: {} {}'.format(self.query_start, self.query_end)) + + # Query MAST using the aperture and the time of the + # most recent previous search as the starting time + new_entries = mast_query_darks(instrument, aperture, self.query_start, self.query_end) + logging.info('\tAperture: {}, new entries: {}'.format(self.aperture, len(new_entries))) + + # Check to see if there are enough new files to meet the monitor's signal-to-noise requirements + if len(new_entries) >= file_count_threshold: + + logging.info('\tSufficient new dark files found for {}, {} to run the dark monitor.' + .format(self.instrument, self.aperture)) + + # Get full paths to the files + new_filenames = [filesystem_path(file_entry['filename']) for file_entry in new_entries] + + # Set up directories for the copied data + ensure_dir_exists(os.path.join(self.output_dir, 'data')) + self.data_dir = os.path.join(self.output_dir, + 'data/{}_{}'.format(self.instrument.lower(), + self.aperture.lower())) + ensure_dir_exists(self.data_dir) + + # Copy files from filesystem + dark_files, not_copied = copy_files(new_filenames, self.data_dir) + + # Run the dark monitor + self.run(dark_files) + monitor_run = True + + else: + logging.info(('\tDark monitor skipped. {} new dark files for {}, {}. {} new files are ' + 'required to run dark current monitor.').format( + len(new_entries), instrument, aperture, file_count_threshold[0])) + monitor_run = False + + # Update the query history + new_entry = {'instrument': instrument, + 'aperture': aperture, + 'start_time_mjd': self.query_start, + 'end_time_mjd': self.query_end, + 'files_found': len(new_entries), + 'run_monitor': monitor_run, + 'entry_date': datetime.datetime.now()} + self.query_table.__table__.insert().execute(new_entry) + logging.info('\tUpdated the query history table') + + logging.info('Dark Monitor completed successfully.') + + def add_bad_pix(self, coordinates, pixel_type, files, mean_filename, baseline_filename): + """Add a set of bad pixels to the bad pixel database table + + Parameters + ---------- + coordinates : tuple + Tuple of two lists, containing x,y coordinates of bad + pixels (Output of ``np.where`` call) + + pixel_type : str + Type of bad pixel. Options are ``dead``, ``hot``, and + ``noisy`` + + files : list + List of fits files which were used to identify the bad + pixels + + mean_filename : str + Name of fits file containing the mean dark rate image used + to find these bad pixels + + baseline_filename : str + Name of fits file containing the baseline dark rate image + used to find these bad pixels + """ + + logging.info('Adding {} {} pixels to database.'.format(len(coordinates[0]), pixel_type)) + + source_files = [os.path.basename(item) for item in files] + entry = {'detector': self.detector, + 'x_coord': coordinates[0], + 'y_coord': coordinates[1], + 'type': pixel_type, + 'source_files': source_files, + 'mean_dark_image_file': os.path.basename(mean_filename), + 'baseline_file': os.path.basename(baseline_filename), + 'entry_date': datetime.datetime.now()} + self.pixel_table.__table__.insert().execute(entry) + + def get_metadata(self, filename): + """Collect basic metadata from a fits file + + Parameters + ---------- + filename : str + Name of fits file to examine + """ + + header = fits.getheader(filename) + + try: + self.detector = header['DETECTOR'] + self.x0 = header['SUBSTRT1'] + self.y0 = header['SUBSTRT2'] + self.xsize = header['SUBSIZE1'] + self.ysize = header['SUBSIZE2'] + self.sample_time = header['TSAMPLE'] + self.frame_time = header['TFRAME'] + self.read_pattern = header['READPATT'] + + except KeyError as e: + logging.error(e) + + def exclude_existing_badpix(self, badpix, pixel_type): + """Given a set of coordinates of bad pixels, determine which of + these pixels have been previously identified and remove them + from the list + + Parameters + ---------- + badpix : tuple + Tuple of lists containing x and y pixel coordinates. (Output + of ``numpy.where`` call) + + pixel_type : str + Type of bad pixel being examined. Options are ``hot``, + ``dead``, and ``noisy`` + + Returns + ------- + new_pixels_x : list + List of x coordinates of new bad pixels + + new_pixels_y : list + List of y coordinates of new bad pixels + """ + + if pixel_type not in ['hot', 'dead', 'noisy']: + raise ValueError('Unrecognized bad pixel type: {}'.format(pixel_type)) + + db_entries = session.query(self.pixel_table) \ + .filter(self.pixel_table.type == pixel_type) \ + .filter(self.pixel_table.detector == self.detector) \ + .all() + + already_found = [] + if len(db_entries) != 0: + for _row in db_entries: + x_coords = _row.x_coord + y_coords = _row.y_coord + for x, y in zip(x_coords, y_coords): + already_found.append((x, y)) + + # Check to see if each pixel already appears in the database for + # the given bad pixel type + new_pixels_x = [] + new_pixels_y = [] + for x, y in zip(badpix[0], badpix[1]): + pixel = (x, y) + if pixel not in already_found: + new_pixels_x.append(x) + new_pixels_y.append(y) + + return (new_pixels_x, new_pixels_y) + + def find_hot_dead_pixels(self, mean_image, comparison_image, hot_threshold=2., dead_threshold=0.1): + """Create the ratio of the slope image to a baseline slope + image. Pixels in the ratio image with values above + ``hot_threshold`` will be marked as hot, and those with ratio + values less than ``dead_threshold`` will be marked as dead. + + Parameters + ---------- + mean_image : numpy.ndarray + 2D array containing the slope image from the new data + + comparison_image : numpy.ndarray + 2D array containing the baseline slope image to compare + against the new slope image. + + hot_threshold : float + ``(mean_image / comparison_image)`` ratio value above which + a pixel is considered hot. + + dead_threshold : float + ``(mean_image / comparison_image)`` ratio value below which + a pixel is considered dead. + + Returns + ------- + hotpix : tuple + Tuple (of lists) containing x,y coordinates of newly hot + pixels + + deadpix : tuple + Tuple (of lists) containing x,y coordinates of newly dead + pixels + """ + + # Avoid divide by zeros + zeros = comparison_image == 0. + comparison_image[zeros] = 1. + mean_image[zeros] += 1. + + ratio = mean_image / comparison_image + hotpix = np.where(ratio > hot_threshold) + deadpix = np.where(ratio < dead_threshold) + + return hotpix, deadpix + + def get_baseline_filename(self): + """Query the database and return the filename of the baseline + (comparison) mean dark slope image to use when searching for + new hot/dead/noisy pixels. For this we assume that the most + recent baseline file for the given detector is the one to use. + + Returns + ------- + filename : str + Name of fits file containing the baseline image + """ + + subq = session.query(self.pixel_table.detector, + func.max(self.pixel_table.entry_date).label('maxdate') + ).group_by(self.pixel_table.detector).subquery('t2') + + query = session.query(self.pixel_table).join( + subq, + and_( + self.pixel_table.detector == self.detector, + self.pixel_table.entry_date == subq.c.maxdate + ) + ) + + count = query.count() + if not count: + filename = None + else: + filename = query.all()[0].baseline_file + logging.info('Baseline filename: {}'.format(filename)) + + return filename + + def identify_tables(self): + """Determine which database tables to use for a run of the dark + monitor + """ + + mixed_case_name = JWST_INSTRUMENT_NAMES_MIXEDCASE[self.instrument] + self.query_table = eval('{}DarkQueryHistory'.format(mixed_case_name)) + self.pixel_table = eval('{}DarkPixelStats'.format(mixed_case_name)) + self.stats_table = eval('{}DarkDarkCurrent'.format(mixed_case_name)) + + def most_recent_search(self): + """Query the query history database and return the information + on the most recent query for the given ``aperture_name`` where + the dark monitor was executed. + + Returns + ------- + query_result : float + Date (in MJD) of the ending range of the previous MAST query + where the dark monitor was run. + """ + + sub_query = session.query(self.query_table.aperture, + func.max(self.query_table.end_time_mjd).label('maxdate') + ).group_by(self.query_table.aperture).subquery('t2') + + # Note that "self.query_table.run_monitor == True" below is + # intentional. Switching = to "is" results in an error in the query. + query = session.query(self.query_table).join( + sub_query, + and_( + self.query_table.aperture == self.aperture, + self.query_table.end_time_mjd == sub_query.c.maxdate, + self.query_table.run_monitor == True + ) + ).all() + + query_count = len(query) + if query_count == 0: + query_result = 57357.0 # a.k.a. Dec 1, 2015 == CV3 + logging.info(('\tNo query history for {}. Beginning search date will be set to {}.' + .format(self.aperture, query_result))) + elif query_count > 1: + raise ValueError('More than one "most recent" query?') + else: + query_result = query[0].end_time_mjd + + return query_result + + def noise_check(self, new_noise_image, baseline_noise_image, threshold=1.5): + """Create the ratio of the stdev (noise) image to a baseline + noise image. Pixels in the ratio image with values above + ``threshold`` will be marked as newly noisy. + + Parameters + ---------- + new_noise_image : numpy.ndarray + 2D array containing the noise image from the new data + + baseline_noise_image : numpy.ndarray + 2D array containing the baseline noise image to compare + against the new noise image. + + threshold : float + ``(new_noise_image / baseline_noise_image)`` ratio value + above which a pixel is considered newly noisey. + + Returns + ------- + noisy : tuple + Tuple (of lists) of x,y coordinates of newly noisy pixels + """ + + # Avoid divide by zeros + zeros = baseline_noise_image == 0. + baseline_noise_image[zeros] = 1. + new_noise_image[zeros] += 1. + + ratio = new_noise_image / baseline_noise_image + noisy = np.where(ratio > threshold) + + return noisy + + def read_baseline_slope_image(self, filename): + """Read in a baseline mean slope image and associated standard + deviation image from the given fits file + + Parameters + ---------- + filename : str + Name of fits file to be read in + + Returns + ------- + mean_image : numpy.ndarray + 2D mean slope image + + stdev_image : numpy.ndarray + 2D stdev image + """ + + try: + with fits.open(filename) as hdu: + mean_image = hdu['MEAN'].data + stdev_image = hdu['STDEV'].data + return mean_image, stdev_image + except (FileNotFoundError, KeyError) as e: + logging.warning('Trying to read {}: {}'.format(filename, e)) + + def run(self, file_list): + """The main method. See module docstrings for further details + + Parameters + ---------- + file_list : list + List of filenames (including full paths) to the dark current + files + """ + + # Basic metadata that will be needed later + self.get_metadata(file_list[0]) + + # Determine which pipeline steps need to be executed + required_steps = pipeline_tools.get_pipeline_steps(self.instrument) + logging.info('\tRequired calwebb1_detector pipeline steps to have the data in the ' + 'correct format:') + for item in required_steps: + logging.info('\t\t{}: {}'.format(item, required_steps[item])) + + # Modify the list of pipeline steps to skip those not needed for the + # preparation of dark current data + required_steps['dark_current'] = False + required_steps['persistence'] = False + + # NIRSpec IR^2 readout pattern NRSIRS2 is the only one with + # nframes not a power of 2 + if self.read_pattern not in pipeline_tools.GROUPSCALE_READOUT_PATTERNS: + required_steps['group_scale'] = False + + # Run pipeline steps on files, generating slope files + slope_files = [] + for filename in file_list: + + completed_steps = pipeline_tools.completed_pipeline_steps(filename) + steps_to_run = pipeline_tools.steps_to_run(required_steps, completed_steps) + + logging.info('\tWorking on file: {}'.format(filename)) + logging.info('\tPipeline steps that remain to be run:') + for item in steps_to_run: + logging.info('\t\t{}: {}'.format(item, steps_to_run[item])) + + # Run any remaining required pipeline steps + if any(steps_to_run.values()) is False: + slope_files.append(filename) + else: + processed_file = filename.replace('.fits', '_{}.fits'.format('rate')) + + # If the slope file already exists, skip the pipeline call + if not os.path.isfile(processed_file): + logging.info('\tRunning pipeline on {}'.format(filename)) + processed_file = pipeline_tools.run_calwebb_detector1_steps(os.path.abspath(filename), steps_to_run) + logging.info('\tPipeline complete. Output: {}'.format(processed_file)) + + else: + logging.info('\tSlope file {} already exists. Skipping call to pipeline.' + .format(processed_file)) + pass + + slope_files.append(processed_file) + + # Delete the original dark ramp file to save disk space + os.remove(filename) + + logging.info('\tSlope images to use in the dark monitor for {}, {}:'.format(self.instrument, self.aperture)) + for item in slope_files: + logging.info('\t\t{}'.format(item)) + + # Read in all slope images and place into a list + slope_image_stack, slope_exptimes = pipeline_tools.image_stack(slope_files) + + # Calculate a mean slope image from the inputs + slope_image, stdev_image = calculations.mean_image(slope_image_stack, sigma_threshold=3) + mean_slope_file = self.save_mean_slope_image(slope_image, stdev_image, slope_files) + logging.info('\tSigma-clipped mean of the slope images saved to: {}'.format(mean_slope_file)) + + # ----- Search for new hot/dead/noisy pixels ----- + # Read in baseline mean slope image and stdev image + # The baseline image is used to look for hot/dead/noisy pixels, + # but not for comparing mean dark rates. Therefore, updates to + # the baseline can be minimal. + + # Limit checks for hot/dead/noisy pixels to full frame data since + # subarray data have much shorter exposure times and therefore lower + # signal-to-noise + aperture_type = Siaf(self.instrument)[self.aperture].AperType + if aperture_type == 'FULLSCA': + baseline_file = self.get_baseline_filename() + if baseline_file is None: + logging.warning(('\tNo baseline dark current countrate image for {} {}. Setting the ' + 'current mean slope image to be the new baseline.'.format(self.instrument, self.aperture))) + baseline_file = mean_slope_file + baseline_mean = deepcopy(slope_image) + baseline_stdev = deepcopy(stdev_image) + else: + logging.info('\tBaseline file is {}'.format(baseline_file)) + baseline_mean, baseline_stdev = self.read_baseline_slope_image(baseline_file) + + # Check the hot/dead pixel population for changes + new_hot_pix, new_dead_pix = self.find_hot_dead_pixels(slope_image, baseline_mean) + + # Shift the coordinates to be in full frame coordinate system + new_hot_pix = self.shift_to_full_frame(new_hot_pix) + new_dead_pix = self.shift_to_full_frame(new_dead_pix) + + # Exclude hot and dead pixels found previously + new_hot_pix = self.exclude_existing_badpix(new_hot_pix, 'hot') + new_dead_pix = self.exclude_existing_badpix(new_dead_pix, 'dead') + + # Add new hot and dead pixels to the database + logging.info('\tFound {} new hot pixels'.format(len(new_hot_pix))) + logging.info('\tFound {} new dead pixels'.format(len(new_dead_pix))) + self.add_bad_pix(new_hot_pix, 'hot', file_list, mean_slope_file, baseline_file) + self.add_bad_pix(new_dead_pix, 'dead', file_list, mean_slope_file, baseline_file) + + # Check for any pixels that are significantly more noisy than + # in the baseline stdev image + new_noisy_pixels = self.noise_check(stdev_image, baseline_stdev) + + # Shift coordinates to be in full_frame coordinate system + new_noisy_pixels = self.shift_to_full_frame(new_noisy_pixels) + + # Exclude previously found noisy pixels + new_noisy_pixels = self.exclude_existing_badpix(new_noisy_pixels, 'noisy') + + # Add new noisy pixels to the database + logging.info('\tFound {} new noisy pixels'.format(len(new_noisy_pixels))) + self.add_bad_pix(new_noisy_pixels, 'noisy', file_list, mean_slope_file, baseline_file) + + # ----- Calculate image statistics ----- + + # Find amplifier boundaries so per-amp statistics can be calculated + number_of_amps, amp_bounds = instrument_properties.amplifier_info(slope_files[0]) + logging.info('\tAmplifier boundaries: {}'.format(amp_bounds)) + + # Calculate mean and stdev values, and fit a Gaussian to the + # histogram of the pixels in each amp + (amp_mean, amp_stdev, gauss_param, gauss_chisquared, double_gauss_params, double_gauss_chisquared, + histogram, bins) = self.stats_by_amp(slope_image, amp_bounds) + + # Construct new entry for dark database table + source_files = [os.path.basename(item) for item in file_list] + for key in amp_mean.keys(): + dark_db_entry = {'aperture': self.aperture, 'amplifier': key, 'mean': amp_mean[key], + 'stdev': amp_stdev[key], + 'source_files': source_files, + 'gauss_amplitude': list(gauss_param[key][0]), + 'gauss_peak': list(gauss_param[key][1]), + 'gauss_width': list(gauss_param[key][2]), + 'gauss_chisq': gauss_chisquared[key], + 'double_gauss_amplitude1': double_gauss_params[key][0], + 'double_gauss_peak1': double_gauss_params[key][1], + 'double_gauss_width1': double_gauss_params[key][2], + 'double_gauss_amplitude2': double_gauss_params[key][3], + 'double_gauss_peak2': double_gauss_params[key][4], + 'double_gauss_width2': double_gauss_params[key][5], + 'double_gauss_chisq': double_gauss_chisquared[key], + 'mean_dark_image_file': os.path.basename(mean_slope_file), + 'hist_dark_values': bins, + 'hist_amplitudes': histogram, + 'entry_date': datetime.datetime.now() + } + self.stats_table.__table__.insert().execute(dark_db_entry) + + def save_mean_slope_image(self, slope_img, stdev_img, files): + """Save the mean slope image and associated stdev image to a + file + + Parameters + ---------- + slope_img : numpy.ndarray + 2D array containing the mean slope image + + stdev_img : numpy.ndarray + 2D array containing the stdev image associated with the mean + slope image. + + files : list + List of input files used to construct the mean slope image + + Returns + ------- + output_filename : str + Name of fits file to save mean and stdev images within + """ + + output_filename = '{}_{}_{}_to_{}_mean_slope_image.fits'.format(self.instrument.lower(), + self.aperture.lower(), + self.query_start, self.query_end) + mean_slope_dir = os.path.join(get_config()['outputs'], 'dark_monitor', 'mean_slope_images') + ensure_dir_exists(mean_slope_dir) + output_filename = os.path.join(mean_slope_dir, output_filename) + + primary_hdu = fits.PrimaryHDU() + primary_hdu.header['INSTRUME'] = (self.instrument, 'JWST instrument') + primary_hdu.header['APERTURE'] = (self.aperture, 'Aperture name') + primary_hdu.header['QRY_STRT'] = (self.query_start, 'MAST Query start time (MJD)') + primary_hdu.header['QRY_END'] = (self.query_end, 'MAST Query end time (MJD)') + + files_string = 'FILES USED: ' + for filename in files: + files_string += '{}, '.format(filename) + + primary_hdu.header.add_history(files_string) + mean_img_hdu = fits.ImageHDU(slope_img, name='MEAN') + stdev_img_hdu = fits.ImageHDU(stdev_img, name='STDEV') + hdu_list = fits.HDUList([primary_hdu, mean_img_hdu, stdev_img_hdu]) + hdu_list.writeto(output_filename, overwrite=True) + set_permissions(output_filename) + + return output_filename + + def shift_to_full_frame(self, coords): + """Shift the input list of pixels from the subarray coordinate + system to the full frame coordinate system + + Parameters + ---------- + coords : tup + (x, y) pixel coordinates in subarray coordinate system + + Returns + ------- + coords : tup + (x, y) pixel coordinates in full frame coordinate system + """ + + x = coords[0] + x += self.x0 + y = coords[1] + y += self.y0 + + return (x, y) + + def stats_by_amp(self, image, amps): + """Calculate statistics in the input image for each amplifier as + well as the full image + + Parameters + ---------- + image : numpy.ndarray + 2D array on which to calculate statistics + + amps : dict + Dictionary containing amp boundary coordinates (output from + ``amplifier_info`` function) + ``amps[key] = [(xmin, ymin), (xmax, ymax)]`` + + Returns + ------- + amp_means : dict + Sigma-clipped mean value for each amp. Keys are amp numbers + as strings (e.g. ``'1'``) + + amp_stdevs : dict + Sigma-clipped standard deviation for each amp. Keys are amp + numbers as strings (e.g. ``'1'``) + + gaussian_params : dict + Best-fit Gaussian parameters to the dark current histogram. + Keys are amp numbers as strings. Values are three-element + lists ``[amplitude, peak, width]``. Each element in the list + is a tuple of the best-fit value and the associated + uncertainty. + + gaussian_chi_squared : dict + Reduced chi-squared for the best-fit parameters. Keys are + amp numbers as strings + + double_gaussian_params : dict + Best-fit double Gaussian parameters to the dark current + histogram. Keys are amp numbers as strings. Values are six- + element lists. (3-elements * 2 Gaussians). + ``[amplitude1, peak1, stdev1, amplitude2, peak2, stdev2]`` + Each element of the list is a tuple containing the best-fit + value and associated uncertainty. + + double_gaussian_chi_squared : dict + Reduced chi-squared for the best-fit parameters. Keys are + amp numbers as strings + + hist : numpy.ndarray + 1D array of histogram values + + bin_centers : numpy.ndarray + 1D array of bin centers that match the ``hist`` values. + """ + + amp_means = {} + amp_stdevs = {} + gaussian_params = {} + gaussian_chi_squared = {} + double_gaussian_params = {} + double_gaussian_chi_squared = {} + + # Add full image coords to the list of amp_boundaries, so that full + # frame stats are also calculated. + if 'FULL' in self.aperture: + maxx = 0 + maxy = 0 + for amp in amps: + mxx = amps[amp][1][0] + mxy = amps[amp][1][1] + if mxx > maxx: + maxx = copy(mxx) + if mxy > maxy: + maxy = copy(mxy) + amps['5'] = [(0, 0), (maxx, maxy)] + logging.info(('\tFull frame exposure detected. Adding the full frame to the list ' + 'of amplifiers upon which to calculate statistics.')) + + for key in amps: + x_start, y_start = amps[key][0] + x_end, y_end = amps[key][1] + + # Basic statistics, sigma clipped areal mean and stdev + amp_mean, amp_stdev = calculations.mean_stdev(image[y_start: y_end, x_start: x_end]) + amp_means[key] = amp_mean + amp_stdevs[key] = amp_stdev + + # Create a histogram + lower_bound = (amp_mean - 7 * amp_stdev) + upper_bound = (amp_mean + 7 * amp_stdev) + + hist, bin_edges = np.histogram(image[y_start: y_end, x_start: x_end], bins='auto', + range=(lower_bound, upper_bound)) + bin_centers = (bin_edges[1:] + bin_edges[0: -1]) / 2. + initial_params = [np.max(hist), amp_mean, amp_stdev] + + # Fit a Gaussian to the histogram. Save best-fit params and + # uncertainties, as well as reduced chi squared + amplitude, peak, width = calculations.gaussian1d_fit(bin_centers, hist, initial_params) + gaussian_params[key] = [amplitude, peak, width] + + gauss_fit_model = models.Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0]) + gauss_fit = gauss_fit_model(bin_centers) + + positive = hist > 0 + degrees_of_freedom = len(hist) - 3. + total_pix = np.sum(hist[positive]) + p_i = gauss_fit[positive] / total_pix + gaussian_chi_squared[key] = (np.sum((hist[positive] - (total_pix*p_i)**2) / (total_pix*p_i)) + / degrees_of_freedom) + + # Double Gaussian fit only for full frame data (and only for + # NIRISS, NIRCam at the moment.) + if key == '5': + if self.instrument.upper() in ['NIRISS', 'NIRCAM']: + initial_params = (np.max(hist), amp_mean, amp_stdev * 0.8, + np.max(hist) / 7., amp_mean / 2., amp_stdev * 0.9) + double_gauss_params, double_gauss_sigma = calculations.double_gaussian_fit(bin_centers, hist, initial_params) + double_gaussian_params[key] = [[param, sig] for param, sig in zip(double_gauss_params, double_gauss_sigma)] + double_gauss_fit = calculations.double_gaussian(bin_centers, *double_gauss_params) + degrees_of_freedom = len(bin_centers) - 6. + dp_i = double_gauss_fit[positive] / total_pix + double_gaussian_chi_squared[key] = np.sum((hist[positive] - (total_pix*dp_i)**2) / (total_pix*dp_i)) / degrees_of_freedom + + else: + double_gaussian_params[key] = [[0., 0.] for i in range(6)] + double_gaussian_chi_squared[key] = 0. + else: + double_gaussian_params[key] = [[0., 0.] for i in range(6)] + double_gaussian_chi_squared[key] = 0. + + logging.info('\tMean dark rate by amplifier: {}'.format(amp_means)) + logging.info('\tStandard deviation of dark rate by amplifier: {}'.format(amp_means)) + logging.info('\tBest-fit Gaussian parameters [amplitude, peak, width]'.format(gaussian_params)) + logging.info('\tReduced chi-squared associated with Gaussian fit: {}'.format(gaussian_chi_squared)) + logging.info('\tBest-fit double Gaussian parameters [amplitude1, peak1, width1, amplitude2, peak2, ' + 'width2]'.format(double_gaussian_params)) + logging.info('\tReduced chi-squared associated with double Gaussian fit: {}' + .format(double_gaussian_chi_squared)) + + return (amp_means, amp_stdevs, gaussian_params, gaussian_chi_squared, double_gaussian_params, + double_gaussian_chi_squared, hist.astype(np.float), bin_centers) + + +if __name__ == '__main__': + + module = os.path.basename(__file__).strip('.py') + start_time, log_file = initialize_instrument_monitor(module) + + monitor = Dark() + + update_monitor_table(module, start_time, log_file) diff --git a/jwql/instrument_monitors/common_monitors/dark_monitor_file_thresholds.txt b/jwql/instrument_monitors/common_monitors/dark_monitor_file_thresholds.txt new file mode 100644 index 000000000..ddb83e174 --- /dev/null +++ b/jwql/instrument_monitors/common_monitors/dark_monitor_file_thresholds.txt @@ -0,0 +1,606 @@ +Instrument Aperture Threshold +nircam NRCA1_FULL_OSS 10 +nircam NRCA2_FULL_OSS 10 +nircam NRCA3_FULL_OSS 10 +nircam NRCA4_FULL_OSS 10 +nircam NRCA5_FULL_OSS 10 +nircam NRCB1_FULL_OSS 10 +nircam NRCB2_FULL_OSS 10 +nircam NRCB3_FULL_OSS 10 +nircam NRCB4_FULL_OSS 10 +nircam NRCB5_FULL_OSS 10 +nircam NRCALL_FULL 10 +nircam NRCAS_FULL 10 +nircam NRCA1_FULL 10 +nircam NRCA2_FULL 10 +nircam NRCA3_FULL 10 +nircam NRCA4_FULL 10 +nircam NRCA5_FULL 10 +nircam NRCBS_FULL 10 +nircam NRCB1_FULL 10 +nircam NRCB2_FULL 10 +nircam NRCB3_FULL 10 +nircam NRCB4_FULL 10 +nircam NRCB5_FULL 10 +nircam NRCB1_FULLP 10 +nircam NRCB5_FULLP 10 +nircam NRCA1_SUB160 30 +nircam NRCA2_SUB160 30 +nircam NRCA3_SUB160 30 +nircam NRCA4_SUB160 30 +nircam NRCA5_SUB160 30 +nircam NRCB1_SUB160 30 +nircam NRCB2_SUB160 30 +nircam NRCB3_SUB160 30 +nircam NRCB4_SUB160 30 +nircam NRCB5_SUB160 30 +nircam NRCA1_SUB320 30 +nircam NRCA2_SUB320 30 +nircam NRCA3_SUB320 30 +nircam NRCA4_SUB320 30 +nircam NRCA5_SUB320 30 +nircam NRCB1_SUB320 30 +nircam NRCB2_SUB320 30 +nircam NRCB3_SUB320 30 +nircam NRCB4_SUB320 30 +nircam NRCB5_SUB320 30 +nircam NRCA1_SUB640 30 +nircam NRCA2_SUB640 30 +nircam NRCA3_SUB640 30 +nircam NRCA4_SUB640 30 +nircam NRCA5_SUB640 30 +nircam NRCB1_SUB640 30 +nircam NRCB2_SUB640 30 +nircam NRCB3_SUB640 30 +nircam NRCB4_SUB640 30 +nircam NRCB5_SUB640 30 +nircam NRCA5_GRISM256_F322W2 30 +nircam NRCA5_GRISM128_F322W2 30 +nircam NRCA5_GRISM64_F322W2 30 +nircam NRCA5_GRISM256_F277W 30 +nircam NRCA5_GRISM128_F277W 30 +nircam NRCA5_GRISM64_F277W 30 +nircam NRCA5_GRISM256_F356W 30 +nircam NRCA5_GRISM128_F356W 30 +nircam NRCA5_GRISM64_F356W 30 +nircam NRCA5_GRISM256_F444W 30 +nircam NRCA5_GRISM128_F444W 30 +nircam NRCA5_GRISM64_F444W 30 +nircam NRCA5_GRISM_F322W2 30 +nircam NRCA5_GRISM_F277W 30 +nircam NRCA5_GRISM_F356W 30 +nircam NRCA5_GRISM_F444W 30 +nircam NRCA1_GRISMTS 30 +nircam NRCA1_GRISMTS256 30 +nircam NRCA1_GRISMTS128 30 +nircam NRCA1_GRISMTS64 30 +nircam NRCA3_GRISMTS 30 +nircam NRCA3_GRISMTS256 30 +nircam NRCA3_GRISMTS128 30 +nircam NRCA3_GRISMTS64 30 +nircam NRCA5_TAGRISMTS32 30 +nircam NRCA5_TAGRISMTS_SCI_F322W2 30 +nircam NRCA5_TAGRISMTS_SCI_F444W 30 +nircam NRCA3_DHSPIL 30 +nircam NRCA3_DHSPIL_SUB96 30 +nircam NRCA3_DHSPIL_WEDGES 30 +nircam NRCB4_DHSPIL 30 +nircam NRCB4_DHSPIL_SUB96 30 +nircam NRCB4_DHSPIL_WEDGES 30 +nircam NRCA3_FP1 30 +nircam NRCA3_FP1_SUB8 30 +nircam NRCA3_FP1_SUB64 30 +nircam NRCA3_FP2MIMF 30 +nircam NRCA1_FP3MIMF 30 +nircam NRCA2_FP4MIMF 30 +nircam NRCA4_FP5MIMF 30 +nircam NRCB4_FP1 30 +nircam NRCB4_FP1_SUB8 30 +nircam NRCB4_FP1_SUB64 30 +nircam NRCB4_FP2MIMF 30 +nircam NRCB2_FP3MIMF 30 +nircam NRCB1_FP4MIMF 30 +nircam NRCB3_FP5MIMF 30 +nircam NRCA3_SUB64P 30 +nircam NRCA3_SUB160P 30 +nircam NRCA3_SUB400P 30 +nircam NRCA5_SUB64P 30 +nircam NRCA5_SUB160P 30 +nircam NRCA5_SUB400P 30 +nircam NRCB1_SUB64P 30 +nircam NRCB1_SUB160P 30 +nircam NRCB1_SUB400P 30 +nircam NRCB5_SUB64P 30 +nircam NRCB5_SUB160P 30 +nircam NRCB5_SUB400P 30 +nircam NRCB5_TAPSIMG32 30 +nircam NRCA5_GRISMC_WFSS 30 +nircam NRCA5_GRISMR_WFSS 30 +nircam NRCALL_GRISMC_WFSS 30 +nircam NRCALL_GRISMR_WFSS 30 +nircam NRCB5_GRISMC_WFSS 30 +nircam NRCB5_GRISMR_WFSS 30 +nircam NRCA2_MASK210R 30 +nircam NRCA5_MASK335R 30 +nircam NRCA5_MASK430R 30 +nircam NRCA4_MASKSWB 30 +nircam NRCA5_MASKLWB 30 +nircam NRCA2_TAMASK210R 30 +nircam NRCA5_TAMASK335R 30 +nircam NRCA5_TAMASK430R 30 +nircam NRCA4_TAMASKSWB 30 +nircam NRCA5_TAMASKLWB 30 +nircam NRCA5_TAMASKLWBL 30 +nircam NRCA4_TAMASKSWBS 30 +nircam NRCB1_MASK210R 30 +nircam NRCB5_MASK335R 30 +nircam NRCB5_MASK430R 30 +nircam NRCB3_MASKSWB 30 +nircam NRCB5_MASKLWB 30 +nircam NRCB1_TAMASK210R 30 +nircam NRCB5_TAMASK335R 30 +nircam NRCB5_TAMASK430R 30 +nircam NRCB3_TAMASKSWB 30 +nircam NRCB5_TAMASKLWB 30 +nircam NRCB5_TAMASKLWBL 30 +nircam NRCB3_TAMASKSWBS 30 +nircam NRCA2_FSTAMASK210R 30 +nircam NRCA4_FSTAMASKSWB 30 +nircam NRCA5_FSTAMASKLWB 30 +nircam NRCA5_FSTAMASK335R 30 +nircam NRCA5_FSTAMASK430R 30 +nircam NRCA4_MASKSWB_F182M 30 +nircam NRCA4_MASKSWB_F187N 30 +nircam NRCA4_MASKSWB_F210M 30 +nircam NRCA4_MASKSWB_F212N 30 +nircam NRCA4_MASKSWB_F200W 30 +nircam NRCA4_MASKSWB_NARROW 30 +nircam NRCA5_MASKLWB_F250M 30 +nircam NRCA5_MASKLWB_F300M 30 +nircam NRCA5_MASKLWB_F277W 30 +nircam NRCA5_MASKLWB_F335M 30 +nircam NRCA5_MASKLWB_F360M 30 +nircam NRCA5_MASKLWB_F356W 30 +nircam NRCA5_MASKLWB_F410M 30 +nircam NRCA5_MASKLWB_F430M 30 +nircam NRCA5_MASKLWB_F460M 30 +nircam NRCA5_MASKLWB_F480M 30 +nircam NRCA5_MASKLWB_F444W 30 +nircam NRCA5_MASKLWB_NARROW 30 +nircam NRCA2_FULL_MASK210R 10 +nircam NRCA5_FULL_MASK335R 10 +nircam NRCA5_FULL_MASK430R 10 +nircam NRCA4_FULL_MASKSWB 10 +nircam NRCA4_FULL_MASKSWB_F182M 10 +nircam NRCA4_FULL_MASKSWB_F187N 10 +nircam NRCA4_FULL_MASKSWB_F210M 10 +nircam NRCA4_FULL_MASKSWB_F212N 10 +nircam NRCA4_FULL_MASKSWB_F200W 10 +nircam NRCA5_FULL_MASKLWB 10 +nircam NRCA5_FULL_MASKLWB_F250M 10 +nircam NRCA5_FULL_MASKLWB_F300M 10 +nircam NRCA5_FULL_MASKLWB_F277W 10 +nircam NRCA5_FULL_MASKLWB_F335M 10 +nircam NRCA5_FULL_MASKLWB_F360M 10 +nircam NRCA5_FULL_MASKLWB_F356W 10 +nircam NRCA5_FULL_MASKLWB_F410M 10 +nircam NRCA5_FULL_MASKLWB_F430M 10 +nircam NRCA5_FULL_MASKLWB_F460M 10 +nircam NRCA5_FULL_MASKLWB_F480M 10 +nircam NRCA5_FULL_MASKLWB_F444W 10 +niriss NIS_CEN_OSS 10 +niriss NIS_CEN 10 +niriss NIS_AMI1 30 +niriss NIS_AMI2 30 +niriss NIS_AMI3 30 +niriss NIS_AMI4 30 +niriss NIS_AMITA 30 +niriss NIS_SOSSTA 30 +niriss NIS_WFSS_OFFSET 30 +niriss NIS_WFSS64 30 +niriss NIS_WFSS64R 30 +niriss NIS_WFSS64R3 30 +niriss NIS_WFSS64C 30 +niriss NIS_WFSS64C3 30 +niriss NIS_WFSS128 30 +niriss NIS_WFSS128R 30 +niriss NIS_WFSS128R3 30 +niriss NIS_WFSS128C 30 +niriss NIS_WFSS128C3 30 +niriss NIS_SUB64 30 +niriss NIS_SUB128 30 +niriss NIS_SUB256 30 +niriss NIS_SUBAMPCAL 30 +niriss NIS_SUBSTRIP96 30 +niriss NIS_SUBSTRIP256 30 +niriss NIS_FP1MIMF 30 +niriss NIS_FP2MIMF 30 +niriss NIS_FP3MIMF 30 +niriss NIS_FP4MIMF 30 +niriss NIS_FP5MIMF 30 +niriss NIS_AMIFULL 10 +niriss NIS_SOSSFULL 10 +miri MIRIM_FULL_OSS 10 +miri MIRIM_FULL 10 +miri MIRIM_ILLUM 30 +miri MIRIM_BRIGHTSKY 30 +miri MIRIM_SUB256 30 +miri MIRIM_SUB128 30 +miri MIRIM_SUB64 30 +miri MIRIM_SLITLESSPRISM 30 +miri MIRIM_SLITLESSUPPER 30 +miri MIRIM_SLITLESSLOWER 30 +miri MIRIM_MASK1065 30 +miri MIRIM_MASK1140 30 +miri MIRIM_MASK1550 30 +miri MIRIM_MASKLYOT 30 +miri MIRIM_TAMRS 30 +miri MIRIM_TALRS 30 +miri MIRIM_TABLOCK 30 +miri MIRIM_TALYOT_UL 30 +miri MIRIM_TALYOT_UR 30 +miri MIRIM_TALYOT_LL 30 +miri MIRIM_TALYOT_LR 30 +miri MIRIM_TALYOT_CUL 30 +miri MIRIM_TALYOT_CUR 30 +miri MIRIM_TALYOT_CLL 30 +miri MIRIM_TALYOT_CLR 30 +miri MIRIM_TA1550_UL 30 +miri MIRIM_TA1550_UR 30 +miri MIRIM_TA1550_LL 30 +miri MIRIM_TA1550_LR 30 +miri MIRIM_TA1550_CUL 30 +miri MIRIM_TA1550_CUR 30 +miri MIRIM_TA1550_CLL 30 +miri MIRIM_TA1550_CLR 30 +miri MIRIM_TA1140_UL 30 +miri MIRIM_TA1140_UR 30 +miri MIRIM_TA1140_LL 30 +miri MIRIM_TA1140_LR 30 +miri MIRIM_TA1140_CUL 30 +miri MIRIM_TA1140_CUR 30 +miri MIRIM_TA1140_CLL 30 +miri MIRIM_TA1140_CLR 30 +miri MIRIM_TA1065_UL 30 +miri MIRIM_TA1065_UR 30 +miri MIRIM_TA1065_LL 30 +miri MIRIM_TA1065_LR 30 +miri MIRIM_TA1065_CUL 30 +miri MIRIM_TA1065_CUR 30 +miri MIRIM_TA1065_CLL 30 +miri MIRIM_TA1065_CLR 30 +miri MIRIM_TAFULL 10 +miri MIRIM_TAILLUM 30 +miri MIRIM_TABRIGHTSKY 30 +miri MIRIM_TASUB256 30 +miri MIRIM_TASUB128 30 +miri MIRIM_TASUB64 30 +miri MIRIM_TASLITLESSPRISM 30 +miri MIRIM_CORON1065 30 +miri MIRIM_CORON1140 30 +miri MIRIM_CORON1550 30 +miri MIRIM_CORONLYOT 30 +miri MIRIM_KNIFE 30 +miri MIRIM_FP1MIMF 30 +miri MIRIM_FP2MIMF 30 +miri MIRIM_FP3MIMF 30 +miri MIRIM_FP4MIMF 30 +miri MIRIM_FP5MIMF 30 +miri MIRIM_SLIT 30 +miri MIRIFU_CHANNEL1A 30 +miri MIRIFU_1ASLICE01 30 +miri MIRIFU_1ASLICE02 30 +miri MIRIFU_1ASLICE03 30 +miri MIRIFU_1ASLICE04 30 +miri MIRIFU_1ASLICE05 30 +miri MIRIFU_1ASLICE06 30 +miri MIRIFU_1ASLICE07 30 +miri MIRIFU_1ASLICE08 30 +miri MIRIFU_1ASLICE09 30 +miri MIRIFU_1ASLICE10 30 +miri MIRIFU_1ASLICE11 30 +miri MIRIFU_1ASLICE12 30 +miri MIRIFU_1ASLICE13 30 +miri MIRIFU_1ASLICE14 30 +miri MIRIFU_1ASLICE15 30 +miri MIRIFU_1ASLICE16 30 +miri MIRIFU_1ASLICE17 30 +miri MIRIFU_1ASLICE18 30 +miri MIRIFU_1ASLICE19 30 +miri MIRIFU_1ASLICE20 30 +miri MIRIFU_1ASLICE21 30 +miri MIRIFU_CHANNEL1B 30 +miri MIRIFU_1BSLICE01 30 +miri MIRIFU_1BSLICE02 30 +miri MIRIFU_1BSLICE03 30 +miri MIRIFU_1BSLICE04 30 +miri MIRIFU_1BSLICE05 30 +miri MIRIFU_1BSLICE06 30 +miri MIRIFU_1BSLICE07 30 +miri MIRIFU_1BSLICE08 30 +miri MIRIFU_1BSLICE09 30 +miri MIRIFU_1BSLICE10 30 +miri MIRIFU_1BSLICE11 30 +miri MIRIFU_1BSLICE12 30 +miri MIRIFU_1BSLICE13 30 +miri MIRIFU_1BSLICE14 30 +miri MIRIFU_1BSLICE15 30 +miri MIRIFU_1BSLICE16 30 +miri MIRIFU_1BSLICE17 30 +miri MIRIFU_1BSLICE18 30 +miri MIRIFU_1BSLICE19 30 +miri MIRIFU_1BSLICE20 30 +miri MIRIFU_1BSLICE21 30 +miri MIRIFU_CHANNEL1C 30 +miri MIRIFU_1CSLICE01 30 +miri MIRIFU_1CSLICE02 30 +miri MIRIFU_1CSLICE03 30 +miri MIRIFU_1CSLICE04 30 +miri MIRIFU_1CSLICE05 30 +miri MIRIFU_1CSLICE06 30 +miri MIRIFU_1CSLICE07 30 +miri MIRIFU_1CSLICE08 30 +miri MIRIFU_1CSLICE09 30 +miri MIRIFU_1CSLICE10 30 +miri MIRIFU_1CSLICE11 30 +miri MIRIFU_1CSLICE12 30 +miri MIRIFU_1CSLICE13 30 +miri MIRIFU_1CSLICE14 30 +miri MIRIFU_1CSLICE15 30 +miri MIRIFU_1CSLICE16 30 +miri MIRIFU_1CSLICE17 30 +miri MIRIFU_1CSLICE18 30 +miri MIRIFU_1CSLICE19 30 +miri MIRIFU_1CSLICE20 30 +miri MIRIFU_1CSLICE21 30 +miri MIRIFU_CHANNEL2A 30 +miri MIRIFU_2ASLICE01 30 +miri MIRIFU_2ASLICE02 30 +miri MIRIFU_2ASLICE03 30 +miri MIRIFU_2ASLICE04 30 +miri MIRIFU_2ASLICE05 30 +miri MIRIFU_2ASLICE06 30 +miri MIRIFU_2ASLICE07 30 +miri MIRIFU_2ASLICE08 30 +miri MIRIFU_2ASLICE09 30 +miri MIRIFU_2ASLICE10 30 +miri MIRIFU_2ASLICE11 30 +miri MIRIFU_2ASLICE12 30 +miri MIRIFU_2ASLICE13 30 +miri MIRIFU_2ASLICE14 30 +miri MIRIFU_2ASLICE15 30 +miri MIRIFU_2ASLICE16 30 +miri MIRIFU_2ASLICE17 30 +miri MIRIFU_CHANNEL2B 30 +miri MIRIFU_2BSLICE01 30 +miri MIRIFU_2BSLICE02 30 +miri MIRIFU_2BSLICE03 30 +miri MIRIFU_2BSLICE04 30 +miri MIRIFU_2BSLICE05 30 +miri MIRIFU_2BSLICE06 30 +miri MIRIFU_2BSLICE07 30 +miri MIRIFU_2BSLICE08 30 +miri MIRIFU_2BSLICE09 30 +miri MIRIFU_2BSLICE10 30 +miri MIRIFU_2BSLICE11 30 +miri MIRIFU_2BSLICE12 30 +miri MIRIFU_2BSLICE13 30 +miri MIRIFU_2BSLICE14 30 +miri MIRIFU_2BSLICE15 30 +miri MIRIFU_2BSLICE16 30 +miri MIRIFU_2BSLICE17 30 +miri MIRIFU_CHANNEL2C 30 +miri MIRIFU_2CSLICE01 30 +miri MIRIFU_2CSLICE02 30 +miri MIRIFU_2CSLICE03 30 +miri MIRIFU_2CSLICE04 30 +miri MIRIFU_2CSLICE05 30 +miri MIRIFU_2CSLICE06 30 +miri MIRIFU_2CSLICE07 30 +miri MIRIFU_2CSLICE08 30 +miri MIRIFU_2CSLICE09 30 +miri MIRIFU_2CSLICE10 30 +miri MIRIFU_2CSLICE11 30 +miri MIRIFU_2CSLICE12 30 +miri MIRIFU_2CSLICE13 30 +miri MIRIFU_2CSLICE14 30 +miri MIRIFU_2CSLICE15 30 +miri MIRIFU_2CSLICE16 30 +miri MIRIFU_2CSLICE17 30 +miri MIRIFU_CHANNEL3A 30 +miri MIRIFU_3ASLICE01 30 +miri MIRIFU_3ASLICE02 30 +miri MIRIFU_3ASLICE03 30 +miri MIRIFU_3ASLICE04 30 +miri MIRIFU_3ASLICE05 30 +miri MIRIFU_3ASLICE06 30 +miri MIRIFU_3ASLICE07 30 +miri MIRIFU_3ASLICE08 30 +miri MIRIFU_3ASLICE09 30 +miri MIRIFU_3ASLICE10 30 +miri MIRIFU_3ASLICE11 30 +miri MIRIFU_3ASLICE12 30 +miri MIRIFU_3ASLICE13 30 +miri MIRIFU_3ASLICE14 30 +miri MIRIFU_3ASLICE15 30 +miri MIRIFU_3ASLICE16 30 +miri MIRIFU_CHANNEL3B 30 +miri MIRIFU_3BSLICE01 30 +miri MIRIFU_3BSLICE02 30 +miri MIRIFU_3BSLICE03 30 +miri MIRIFU_3BSLICE04 30 +miri MIRIFU_3BSLICE05 30 +miri MIRIFU_3BSLICE06 30 +miri MIRIFU_3BSLICE07 30 +miri MIRIFU_3BSLICE08 30 +miri MIRIFU_3BSLICE09 30 +miri MIRIFU_3BSLICE10 30 +miri MIRIFU_3BSLICE11 30 +miri MIRIFU_3BSLICE12 30 +miri MIRIFU_3BSLICE13 30 +miri MIRIFU_3BSLICE14 30 +miri MIRIFU_3BSLICE15 30 +miri MIRIFU_3BSLICE16 30 +miri MIRIFU_CHANNEL3C 30 +miri MIRIFU_3CSLICE01 30 +miri MIRIFU_3CSLICE02 30 +miri MIRIFU_3CSLICE03 30 +miri MIRIFU_3CSLICE04 30 +miri MIRIFU_3CSLICE05 30 +miri MIRIFU_3CSLICE06 30 +miri MIRIFU_3CSLICE07 30 +miri MIRIFU_3CSLICE08 30 +miri MIRIFU_3CSLICE09 30 +miri MIRIFU_3CSLICE10 30 +miri MIRIFU_3CSLICE11 30 +miri MIRIFU_3CSLICE12 30 +miri MIRIFU_3CSLICE13 30 +miri MIRIFU_3CSLICE14 30 +miri MIRIFU_3CSLICE15 30 +miri MIRIFU_3CSLICE16 30 +miri MIRIFU_CHANNEL4A 30 +miri MIRIFU_4ASLICE01 30 +miri MIRIFU_4ASLICE02 30 +miri MIRIFU_4ASLICE03 30 +miri MIRIFU_4ASLICE04 30 +miri MIRIFU_4ASLICE05 30 +miri MIRIFU_4ASLICE06 30 +miri MIRIFU_4ASLICE07 30 +miri MIRIFU_4ASLICE08 30 +miri MIRIFU_4ASLICE09 30 +miri MIRIFU_4ASLICE10 30 +miri MIRIFU_4ASLICE11 30 +miri MIRIFU_4ASLICE12 30 +miri MIRIFU_CHANNEL4B 30 +miri MIRIFU_4BSLICE01 30 +miri MIRIFU_4BSLICE02 30 +miri MIRIFU_4BSLICE03 30 +miri MIRIFU_4BSLICE04 30 +miri MIRIFU_4BSLICE05 30 +miri MIRIFU_4BSLICE06 30 +miri MIRIFU_4BSLICE07 30 +miri MIRIFU_4BSLICE08 30 +miri MIRIFU_4BSLICE09 30 +miri MIRIFU_4BSLICE10 30 +miri MIRIFU_4BSLICE11 30 +miri MIRIFU_4BSLICE12 30 +miri MIRIFU_CHANNEL4C 30 +miri MIRIFU_4CSLICE01 30 +miri MIRIFU_4CSLICE02 30 +miri MIRIFU_4CSLICE03 30 +miri MIRIFU_4CSLICE04 30 +miri MIRIFU_4CSLICE05 30 +miri MIRIFU_4CSLICE06 30 +miri MIRIFU_4CSLICE07 30 +miri MIRIFU_4CSLICE08 30 +miri MIRIFU_4CSLICE09 30 +miri MIRIFU_4CSLICE10 30 +miri MIRIFU_4CSLICE11 30 +miri MIRIFU_4CSLICE12 30 +nirspec NRS1_FULL_OSS 10 +nirspec NRS1_FULL 10 +nirspec NRS2_FULL_OSS 10 +nirspec NRS2_FULL 10 +nirspec NRS_S200A1_SLIT 30 +nirspec NRS_S200A2_SLIT 30 +nirspec NRS_S400A1_SLIT 30 +nirspec NRS_S1600A1_SLIT 30 +nirspec NRS_S200B1_SLIT 30 +nirspec NRS_FULL_IFU 10 +nirspec NRS_IFU_SLICE00 30 +nirspec NRS_IFU_SLICE01 30 +nirspec NRS_IFU_SLICE02 30 +nirspec NRS_IFU_SLICE03 30 +nirspec NRS_IFU_SLICE04 30 +nirspec NRS_IFU_SLICE05 30 +nirspec NRS_IFU_SLICE06 30 +nirspec NRS_IFU_SLICE07 30 +nirspec NRS_IFU_SLICE08 30 +nirspec NRS_IFU_SLICE09 30 +nirspec NRS_IFU_SLICE10 30 +nirspec NRS_IFU_SLICE11 30 +nirspec NRS_IFU_SLICE12 30 +nirspec NRS_IFU_SLICE13 30 +nirspec NRS_IFU_SLICE14 30 +nirspec NRS_IFU_SLICE15 30 +nirspec NRS_IFU_SLICE16 30 +nirspec NRS_IFU_SLICE17 30 +nirspec NRS_IFU_SLICE18 30 +nirspec NRS_IFU_SLICE19 30 +nirspec NRS_IFU_SLICE20 30 +nirspec NRS_IFU_SLICE21 30 +nirspec NRS_IFU_SLICE22 30 +nirspec NRS_IFU_SLICE23 30 +nirspec NRS_IFU_SLICE24 30 +nirspec NRS_IFU_SLICE25 30 +nirspec NRS_IFU_SLICE26 30 +nirspec NRS_IFU_SLICE27 30 +nirspec NRS_IFU_SLICE28 30 +nirspec NRS_IFU_SLICE29 30 +nirspec NRS_FULL_MSA 10 +nirspec NRS_FULL_MSA1 10 +nirspec NRS_FULL_MSA2 10 +nirspec NRS_FULL_MSA3 10 +nirspec NRS_FULL_MSA4 10 +nirspec NRS_VIGNETTED_MSA 30 +nirspec NRS_VIGNETTED_MSA1 30 +nirspec NRS_VIGNETTED_MSA2 30 +nirspec NRS_VIGNETTED_MSA3 30 +nirspec NRS_VIGNETTED_MSA4 30 +nirspec NRS_FIELD1_MSA4 30 +nirspec NRS_FIELD2_MSA4 30 +nirspec NRS1_FP1MIMF 30 +nirspec NRS1_FP2MIMF 30 +nirspec NRS1_FP3MIMF 30 +nirspec NRS2_FP4MIMF 30 +nirspec NRS2_FP5MIMF 30 +nirspec CLEAR_GWA_OTE 30 +nirspec F110W_GWA_OTE 30 +nirspec F140X_GWA_OTE 30 +nirspec NRS_SKY_OTEIP 30 +nirspec NRS_CLEAR_OTEIP_MSA_L0 30 +nirspec NRS_CLEAR_OTEIP_MSA_L1 30 +nirspec NRS_F070LP_OTEIP_MSA_L0 30 +nirspec NRS_F070LP_OTEIP_MSA_L1 30 +nirspec NRS_F100LP_OTEIP_MSA_L0 30 +nirspec NRS_F100LP_OTEIP_MSA_L1 30 +nirspec NRS_F170LP_OTEIP_MSA_L0 30 +nirspec NRS_F170LP_OTEIP_MSA_L1 30 +nirspec NRS_F290LP_OTEIP_MSA_L0 30 +nirspec NRS_F290LP_OTEIP_MSA_L1 30 +nirspec NRS_F110W_OTEIP_MSA_L0 30 +nirspec NRS_F110W_OTEIP_MSA_L1 30 +nirspec NRS_F140X_OTEIP_MSA_L0 30 +nirspec NRS_F140X_OTEIP_MSA_L1 30 +fgs FGS1_FULL_OSS 10 +fgs FGS1_FULL 10 +fgs FGS2_FULL_OSS 10 +fgs FGS2_FULL 10 +fgs FGS1_SUB128LL 30 +fgs FGS1_SUB128DIAG 30 +fgs FGS1_SUB128CNTR 30 +fgs FGS1_SUB32LL 30 +fgs FGS1_SUB32DIAG 30 +fgs FGS1_SUB32CNTR 30 +fgs FGS1_SUB8LL 30 +fgs FGS1_SUB8DIAG 30 +fgs FGS1_SUB8CNTR 30 +fgs FGS2_SUB128LL 30 +fgs FGS2_SUB128DIAG 30 +fgs FGS2_SUB128CNTR 30 +fgs FGS2_SUB32LL 30 +fgs FGS2_SUB32DIAG 30 +fgs FGS2_SUB32CNTR 30 +fgs FGS2_SUB8LL 30 +fgs FGS2_SUB8DIAG 30 +fgs FGS2_SUB8CNTR 30 +fgs FGS1_FP1MIMF 30 +fgs FGS1_FP2MIMF 30 +fgs FGS1_FP3MIMF 30 +fgs FGS1_FP4MIMF 30 +fgs FGS1_FP5MIMF 30 +fgs FGS2_FP1MIMF 30 +fgs FGS2_FP2MIMF 30 +fgs FGS2_FP3MIMF 30 +fgs FGS2_FP4MIMF 30 +fgs FGS2_FP5MIMF 30 diff --git a/jwql/instrument_monitors/pipeline_tools.py b/jwql/instrument_monitors/pipeline_tools.py new file mode 100644 index 000000000..e81e7cfce --- /dev/null +++ b/jwql/instrument_monitors/pipeline_tools.py @@ -0,0 +1,259 @@ +"""Various utility functions related to the JWST calibration pipeline + +Authors +------- + + - Bryan Hilbert + +Use +--- + + This module can be imported as such: + :: + + from jwql.instrument_monitors import pipeline_tools + pipeline_steps = pipeline_tools.completed_pipeline_steps(filename) + """ + +from collections import OrderedDict +import copy +import numpy as np + +from astropy.io import fits +from jwst.dq_init import DQInitStep +from jwst.dark_current import DarkCurrentStep +from jwst.firstframe import FirstFrameStep +from jwst.group_scale import GroupScaleStep +from jwst.ipc import IPCStep +from jwst.jump import JumpStep +from jwst.lastframe import LastFrameStep +from jwst.linearity import LinearityStep +from jwst.persistence import PersistenceStep +from jwst.ramp_fitting import RampFitStep +from jwst.refpix import RefPixStep +from jwst.rscd import RSCD_Step +from jwst.saturation import SaturationStep +from jwst.superbias import SuperBiasStep + +from jwql.utils.constants import JWST_INSTRUMENT_NAMES_UPPERCASE + +# Define the fits header keyword that accompanies each step +PIPE_KEYWORDS = {'S_GRPSCL': 'group_scale', 'S_DQINIT': 'dq_init', 'S_SATURA': 'saturation', + 'S_IPC': 'ipc', 'S_REFPIX': 'refpix', 'S_SUPERB': 'superbias', + 'S_PERSIS': 'persistence', 'S_DARK': 'dark_current', 'S_LINEAR': 'linearity', + 'S_FRSTFR': 'firstframe', 'S_LASTFR': 'lastframe', 'S_RSCD': 'rscd', + 'S_JUMP': 'jump', 'S_RAMP': 'rate'} + +PIPELINE_STEP_MAPPING = {'dq_init': DQInitStep, 'dark_current': DarkCurrentStep, + 'firstframe': FirstFrameStep, 'group_scale': GroupScaleStep, + 'ipc': IPCStep, 'jump': JumpStep, 'lastframe': LastFrameStep, + 'linearity': LinearityStep, 'persistence': PersistenceStep, + 'rate': RampFitStep, 'refpix': RefPixStep, 'rscd': RSCD_Step, + 'saturation': SaturationStep, 'superbias': SuperBiasStep} + +# Readout patterns that have nframes != a power of 2. These readout patterns +# require the group_scale pipeline step to be run. +GROUPSCALE_READOUT_PATTERNS = ['NRSIRS2'] + + +def completed_pipeline_steps(filename): + """Return a list of the completed pipeline steps for a given file. + + Parameters + ---------- + filename : str + File to examine + + Returns + ------- + completed : collections.OrderedDict + Dictionary with boolean entry for each pipeline step, + indicating which pipeline steps have been run on filename + """ + + # Initialize using PIPE_KEYWORDS so that entries are guaranteed to + # be in the correct order + completed = OrderedDict({}) + for key in PIPE_KEYWORDS.values(): + completed[key] = False + + header = fits.getheader(filename) + for key in PIPE_KEYWORDS.keys(): + try: + value = header.get(key) + except KeyError: + value == 'NOT DONE' + if value == 'COMPLETE': + completed[PIPE_KEYWORDS[key]] = True + + return completed + + +def get_pipeline_steps(instrument): + """Get the names and order of the ``calwebb_detector1`` pipeline + steps for a given instrument. Use values that match up with the + values in the ``PIPE_STEP`` defintion in ``definitions.py`` + + Parameters + ---------- + instrument : str + Name of JWST instrument + + Returns + ------- + steps : collections.OrderedDict + Dictionary of step names + """ + + # Ensure instrument name is valid + instrument = instrument.upper() + if instrument not in JWST_INSTRUMENT_NAMES_UPPERCASE.values(): + raise ValueError("WARNING: {} is not a valid instrument name.".format(instrument)) + + # Order is important in 'steps' lists below!! + if instrument == 'MIRI': + steps = ['group_scale', 'dq_init', 'saturation', 'ipc', 'firstframe', 'lastframe', + 'linearity', 'rscd', 'dark_current', 'refpix', 'persistence', 'jump', 'rate'] + # No persistence correction for MIRI + steps.remove('persistence') + # MIRI is limited to one frame per group + steps.remove('group_scale') + else: + steps = ['group_scale', 'dq_init', 'saturation', 'ipc', 'superbias', 'refpix', 'linearity', + 'persistence', 'dark_current', 'jump', 'rate'] + + # No persistence correction for NIRSpec + if instrument == 'NIRSPEC': + steps.remove('persistence') + else: + # NIRCam, NISISS, FGS all do not need group scale as nframes is + # always a multiple of 2 + steps.remove('group_scale') + + # IPC correction currently not done for any instrument + steps.remove('ipc') + + # Initialize using PIPE_KEYWORDS so the steps will be in the right order + required_steps = OrderedDict({}) + for key in steps: + required_steps[key] = True + for key in PIPE_KEYWORDS.values(): + if key not in required_steps.keys(): + required_steps[key] = False + + return required_steps + + +def image_stack(file_list): + """Given a list of fits files containing 2D images, read in all data + and place into a 3D stack + + Parameters + ---------- + file_list : list + List of fits file names + + Returns + ------- + cube : numpy.ndarray + 3D stack of the 2D images + """ + + exptimes = [] + for i, input_file in enumerate(file_list): + with fits.open(input_file) as hdu: + image = hdu[1].data + exptime = hdu[0].header['EFFINTTM'] + num_ints = hdu[0].header['NINTS'] + + # Stack all inputs together into a single 3D image cube + if i == 0: + ndim_base = image.shape + if len(ndim_base) == 3: + cube = copy.deepcopy(image) + elif len(ndim_base) == 2: + cube = np.expand_dims(image, 0) + else: + ndim = image.shape + if ndim_base[-2:] == ndim[-2:]: + if len(ndim) == 2: + image = np.expand_dims(image, 0) + elif len(ndim) > 3: + raise ValueError("4-dimensional input slope images not supported.") + cube = np.vstack((cube, image)) + else: + raise ValueError("Input images are of inconsistent size in x/y dimension.") + exptimes.append([exptime] * num_ints) + + return cube, exptimes + + +def run_calwebb_detector1_steps(input_file, steps): + """Run the steps of ``calwebb_detector1`` specified in the steps + dictionary on the input file + + Parameters + ---------- + input_file : str + File on which to run the pipeline steps + + steps : collections.OrderedDict + Keys are the individual pipeline steps (as seen in the + ``PIPE_KEYWORDS`` values above). Boolean values indicate whether + a step should be run or not. Steps are run in the official + ``calwebb_detector1`` order. + """ + + first_step_to_be_run = True + for step_name in steps: + if steps[step_name]: + if first_step_to_be_run: + model = PIPELINE_STEP_MAPPING[step_name].call(input_file) + first_step_to_be_run = False + else: + model = PIPELINE_STEP_MAPPING[step_name].call(model) + suffix = step_name + output_filename = input_file.replace('.fits', '_{}.fits'.format(suffix)) + if suffix != 'rate': + model.save(output_filename) + else: + model[0].save(output_filename) + + return output_filename + + +def steps_to_run(all_steps, finished_steps): + """Given a list of pipeline steps that need to be completed as well + as a list of steps that have already been completed, return a list + of steps remaining to be done. + + Parameters + ---------- + all_steps : collections.OrderedDict + A dictionary of all steps that need to be completed + + finished_steps : collections.OrderedDict + A dictionary with keys equal to the pipeline steps and boolean + values indicating whether a particular step has been completed + or not (i.e. output from ``completed_pipeline_steps``) + + Returns + ------- + steps_to_run : collections.OrderedDict + A dictionaru with keys equal to the pipeline steps and boolean + values indicating whether a particular step has yet to be run. + """ + + torun = copy.deepcopy(finished_steps) + + for key in all_steps: + if all_steps[key] == finished_steps[key]: + torun[key] = False + elif ((all_steps[key] is True) & (finished_steps[key] is False)): + torun[key] = True + elif ((all_steps[key] is False) & (finished_steps[key] is True)): + print(("WARNING: {} step has been run " + "but the requirements say that it should not " + "be. Need a new input file.".format(key))) + + return torun diff --git a/jwql/tests/test_calculations.py b/jwql/tests/test_calculations.py new file mode 100644 index 000000000..5834e453e --- /dev/null +++ b/jwql/tests/test_calculations.py @@ -0,0 +1,105 @@ +#! /usr/bin/env python + +"""Tests for the ``calculations`` module. + +Authors +------- + + - Bryan Hilbert + +Use +--- + + These tests can be run via the command line (omit the ``-s`` to + suppress verbose output to stdout): + :: + + pytest -s test_calculations.py +""" + +import numpy as np + +from jwql.utils import calculations + + +def test_double_gaussian_fit(): + """Test the double Gaussian fitting function""" + + amplitude1 = 500 + mean_value1 = 0.5 + sigma_value1 = 0.05 + amplitude2 = 300 + mean_value2 = 0.4 + sigma_value2 = 0.03 + + bin_centers = np.arange(0., 1.1, 0.007) + input_params = [amplitude1, mean_value1, sigma_value1, amplitude2, mean_value2, sigma_value2] + input_values = calculations.double_gaussian(bin_centers, *input_params) + + initial_params = [np.max(input_values), 0.55, 0.1, np.max(input_values), 0.5, 0.05] + params, sigma = calculations.double_gaussian_fit(bin_centers, input_values, initial_params) + + assert np.allclose(np.array(params[0:3]), np.array([amplitude2, mean_value2, sigma_value2]), + atol=0, rtol=0.000001) + assert np.allclose(np.array(params[3:]), np.array([amplitude1, mean_value1, sigma_value1]), + atol=0, rtol=0.000001) + + +def test_gaussian1d_fit(): + """Test histogram fitting function""" + + mean_value = 0.5 + sigma_value = 0.05 + image = np.random.normal(loc=mean_value, scale=sigma_value, size=(100, 100)) + hist, bin_edges = np.histogram(image, bins='auto') + bin_centers = (bin_edges[1:] + bin_edges[0: -1]) / 2. + initial_params = [np.max(hist), 0.55, 0.1] + amplitude, peak, width = calculations.gaussian1d_fit(bin_centers, hist, initial_params) + + assert np.isclose(peak[0], mean_value, atol=0.0015, rtol=0.) + assert np.isclose(width[0], sigma_value, atol=0.0015, rtol=0.) + assert ((mean_value <= peak[0]+3*peak[1]) & (mean_value >= peak[0]-3*peak[1])) + assert ((sigma_value <= width[0]+3*width[1]) & (sigma_value >= width[0]-3*width[1])) + + +def test_mean_image(): + """Test the sigma-clipped mean and stdev image calculator""" + + # Create a stack of 50 5x5 pixel images + nstack = 50 + cube = np.zeros((nstack, 5, 5)) + + # Set alternating frames equal to 4 and 5 + for i in range(nstack): + if i % 2 == 0: + cube[i, :, :] = 4. + else: + cube[i, :, :] = 5. + + # Insert a few signal values that will be removed by sigma clipping. + # Make sure you "remove" and equal number of 4's and 5's from each + # pixel in order to keep the mean at 4.5 and dev at 0.5 + cube[0, 0, 0] = 55. + cube[1, 0, 0] = -78. + cube[3, 3, 3] = 150. + cube[2, 3, 3] = 32. + cube[1, 4, 4] = -96. + cube[4, 4, 4] = -25. + mean_img, dev_img = calculations.mean_image(cube, sigma_threshold=3) + + assert np.all(mean_img == 4.5) + assert np.all(dev_img == 0.5) + + +def test_mean_stdev(): + """Test calcualtion of the sigma-clipped mean from an image""" + + image = np.zeros((50, 50)) + 1. + badx = [1, 4, 10, 14, 16, 20, 22, 25, 29, 30] + bady = [13, 27, 43, 21, 1, 32, 25, 21, 9, 14] + for x, y in zip(badx, bady): + image[y, x] = 100. + + meanval, stdval = calculations.mean_stdev(image, sigma_threshold=3) + assert meanval == 1. + assert stdval == 0. diff --git a/jwql/tests/test_dark_monitor.py b/jwql/tests/test_dark_monitor.py new file mode 100644 index 000000000..7562c2ba1 --- /dev/null +++ b/jwql/tests/test_dark_monitor.py @@ -0,0 +1,136 @@ +#! /usr/bin/env python + +"""Tests for the ``dark_monitor`` module. + +Authors +------- + + - Bryan Hilbert + +Use +--- + + These tests can be run via the command line (omit the ``-s`` to + suppress verbose output to stdout): + :: + + pytest -s test_dark_monitor.py +""" + +import os +import pytest + +from astropy.time import Time +import numpy as np + +from jwql.instrument_monitors.common_monitors import dark_monitor +from jwql.utils.utils import get_config + + +def test_find_hot_dead_pixels(): + """Test hot and dead pixel searches""" + monitor = dark_monitor.Dark(testing=True) + + # Create "baseline" image + comparison_image = np.zeros((10, 10)) + 1. + + # Create mean slope image to compare + mean_image = np.zeros((10, 10)) + 1. + mean_image[0, 0] = 1.7 + mean_image[1, 1] = 2.2 + mean_image[7, 7] = 4.5 + mean_image[5, 5] = 0.12 + mean_image[6, 6] = 0.06 + mean_image[7, 3] = 0.09 + + hot, dead = monitor.find_hot_dead_pixels(mean_image, comparison_image, hot_threshold=2., dead_threshold=0.1) + assert len(hot) == 2 + assert np.all(hot[0] == np.array([1, 7])) + assert np.all(hot[1] == np.array([1, 7])) + assert len(dead) == 2 + assert np.all(dead[0] == np.array([6, 7])) + assert np.all(dead[1] == np.array([6, 3])) + + +@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + reason='Requires access to central storage.') +def test_get_metadata(): + """Test retrieval of metadata from input file""" + + monitor = dark_monitor.Dark(testing=True) + filename = os.path.join(get_config()['test_dir'], 'dark_monitor', 'test_image_1.fits') + monitor.get_metadata(filename) + + assert monitor.detector == 'NRCA1' + assert monitor.x0 == 0 + assert monitor.y0 == 0 + assert monitor.xsize == 10 + assert monitor.ysize == 10 + assert monitor.sample_time == 10 + assert monitor.frame_time == 10.5 + + +def test_mast_query_darks(): + """Test that the MAST query for darks is functional""" + + instrument = 'NIRCAM' + aperture = 'NRCA1_FULL' + start_date = Time("2016-01-01T00:00:00").mjd + end_date = Time("2018-01-01T00:00:00").mjd + query = dark_monitor.mast_query_darks(instrument, aperture, start_date, end_date) + apernames = [entry['apername'] for entry in query] + filenames = [entry['filename'] for entry in query] + + truth_filenames = ['jw96003001001_02201_00001_nrca1_dark.fits', + 'jw82600013001_02102_00002_nrca1_dark.fits', + 'jw82600013001_02101_00001_nrca1_dark.fits', + 'jw82600013001_02103_00003_nrca1_dark.fits', + 'jw82600013001_02103_00001_nrca1_dark.fits', + 'jw82600013001_02103_00002_nrca1_dark.fits', + 'jw82600016001_02101_00002_nrca1_dark.fits', + 'jw82600016001_02101_00001_nrca1_dark.fits', + 'jw82600013001_02102_00001_nrca1_dark.fits', + 'jw82600016001_02103_00002_nrca1_dark.fits', + 'jw82600016001_02103_00001_nrca1_dark.fits', + 'jw82600016001_02103_00004_nrca1_dark.fits', + 'jw82600016001_02103_00003_nrca1_dark.fits', + 'jw82600016001_02102_00001_nrca1_dark.fits'] + + assert len(query) == 14 + assert apernames == [aperture]*len(query) + assert filenames == truth_filenames + + +def test_noise_check(): + """Test the search for noisier than average pixels""" + + noise_image = np.zeros((10, 10)) + 0.5 + baseline = np.zeros((10, 10)) + 0.5 + + noise_image[3, 3] = 0.8 + noise_image[6, 6] = 0.6 + noise_image[9, 9] = 1.0 + + baseline[5, 5] = 1.0 + noise_image[5, 5] = 1.25 + + monitor = dark_monitor.Dark(testing=True) + noisy = monitor.noise_check(noise_image, baseline, threshold=1.5) + + assert len(noisy[0]) == 2 + assert np.all(noisy[0] == np.array([3, 9])) + assert np.all(noisy[1] == np.array([3, 9])) + + +def test_shift_to_full_frame(): + """Test pixel coordinate shifting to be in full frame coords""" + + monitor = dark_monitor.Dark(testing=True) + monitor.x0 = 512 + monitor.y0 = 512 + + coordinates = (np.array([6, 7]), np.array([6, 3])) + new_coords = monitor.shift_to_full_frame(coordinates) + + assert np.all(new_coords[0] == np.array([518, 519])) + assert np.all(new_coords[1] == np.array([518, 515])) diff --git a/jwql/tests/test_instrument_properties.py b/jwql/tests/test_instrument_properties.py new file mode 100644 index 000000000..5e9af4784 --- /dev/null +++ b/jwql/tests/test_instrument_properties.py @@ -0,0 +1,83 @@ +#! /usr/bin/env python + +"""Tests for the ``instrument_properties`` module. + +Authors +------- + + - Bryan Hilbert + +Use +--- + + These tests can be run via the command line (omit the ``-s`` to + suppress verbose output to stdout): + :: + + pytest -s test_instrument_properties.py +""" + +import os +import pytest + +import numpy as np + +from jwql.utils import instrument_properties +from jwql.utils.utils import get_config + + +@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + reason='Requires access to central storage.') +def test_amplifier_info(): + """Test that the correct number of amplifiers are found for a given + file + """ + + data_dir = os.path.join(get_config()['test_dir'], 'dark_monitor') + + fullframe = instrument_properties.amplifier_info(os.path.join(data_dir, 'test_image_ff.fits')) + fullframe_truth = (4, {'1': [(4, 4), (512, 2044)], + '2': [(512, 4), (1024, 2044)], + '3': [(1024, 4), (1536, 2044)], + '4': [(1536, 4), (2044, 2044)]}) + assert fullframe == fullframe_truth + + fullframe = instrument_properties.amplifier_info(os.path.join(data_dir, 'test_image_ff.fits'), omit_reference_pixels=False) + fullframe_truth = (4, {'1': [(0, 0), (512, 2048)], + '2': [(512, 0), (1024, 2048)], + '3': [(1024, 0), (1536, 2048)], + '4': [(1536, 0), (2048, 2048)]}) + assert fullframe == fullframe_truth + + subarray = instrument_properties.amplifier_info(os.path.join(data_dir, 'test_image_1.fits')) + subarray_truth = (1, {'1': [(0, 0), (10, 10)]}) + assert subarray == subarray_truth + + subarray_one = instrument_properties.amplifier_info(os.path.join(data_dir, 'test_image_grismstripe_one_amp.fits')) + subarray_one_truth = (1, {'1': [(4, 4), (2044, 64)]}) + assert subarray_one == subarray_one_truth + + subarray_four = instrument_properties.amplifier_info(os.path.join(data_dir, 'test_image_grismstripe_four_amp.fits')) + subarray_four_truth = (4, {'1': [(4, 4), (512, 64)], + '2': [(512, 4), (1024, 64)], + '3': [(1024, 4), (1536, 64)], + '4': [(1536, 4), (2044, 64)]}) + assert subarray_four == subarray_four_truth + + +def test_calc_frame_time(): + """Test calcuation of frametime for a given instrument/aperture""" + + nearir_fullframe = 10.73677 + nircam_160 = 0.27864 + nrc_fullframe = instrument_properties.calc_frame_time('nircam', 'NRCA1_FULL', 2048, 2048, 4) + nrc_160 = instrument_properties.calc_frame_time('nircam', 'NRCA1_SUB160', 160, 160, 1) + nrs_fullframe = instrument_properties.calc_frame_time('niriss', 'NIS_CEN', 2048, 2048, 4) + #nrs_some_subarra = instrument_properies.calc_frame_time('niriss', '????', ??, ??, ?) + + print('STILL NEED TO ADD FRAMETIME CALCS FOR MIRI AND NIRSPEC TO THE CALC_FRAME_TIME_FUNCTION') + print('CONFIRM NIRCAMSUB160 TIME ON JDOX') + + assert np.isclose(nrc_fullframe, nearir_fullframe, atol=0.001, rtol=0) + assert np.isclose(nrc_160, nircam_160, atol=0.001, rtol=0) + assert np.isclose(nrs_fullframe, nearir_fullframe, atol=0.001, rtol=0) diff --git a/jwql/tests/test_pipeline_tools.py b/jwql/tests/test_pipeline_tools.py new file mode 100644 index 000000000..c932dd46e --- /dev/null +++ b/jwql/tests/test_pipeline_tools.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python + +"""Tests for the ``pipeline_tools`` module. + +Authors +------- + + - Bryan Hilbert + +Use +--- + + These tests can be run via the command line (omit the ``-s`` to + suppress verbose output to stdout): + :: + + pytest -s test_pipeline_tools.py +""" + +from collections import OrderedDict +import os +import pytest + +import numpy as np + +from jwql.instrument_monitors import pipeline_tools +from jwql.utils.utils import get_config + + +@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + reason='Requires access to central storage.') +def test_completed_pipeline_steps(): + """Test that the list of completed pipeline steps for a file is + correct + + Parameters + ---------- + filename : str + File to be checked + """ + + filename = os.path.join(get_config()['filesystem'], 'jw00312', 'jw00312002001_02102_00001_nrcb4_rateints.fits') + completed_steps = pipeline_tools.completed_pipeline_steps(filename) + true_completed = OrderedDict([('group_scale', False), + ('dq_init', True), + ('saturation', True), + ('ipc', False), + ('refpix', True), + ('superbias', True), + ('persistence', True), + ('dark_current', True), + ('linearity', True), + ('firstframe', False), + ('lastframe', False), + ('rscd', False), + ('jump', True), + ('rate', True)]) + + assert completed_steps == true_completed + + +def test_get_pipeline_steps(): + """Test that the proper pipeline steps are returned for an + instrument + """ + + # FGS, NIRCam, and NIRISS have the same required steps + instruments = ['fgs', 'nircam', 'niriss'] + for instrument in instruments: + req_steps = pipeline_tools.get_pipeline_steps(instrument) + steps = ['dq_init', 'saturation', 'superbias', 'refpix', 'linearity', + 'persistence', 'dark_current', 'jump', 'rate'] + not_required = ['group_scale', 'ipc', 'firstframe', 'lastframe', 'rscd'] + steps_dict = OrderedDict({}) + for step in steps: + steps_dict[step] = True + for step in not_required: + steps_dict[step] = False + assert req_steps == steps_dict + + # NIRSpec and MIRI have different required steps + nrs_req_steps = pipeline_tools.get_pipeline_steps('nirspec') + nrs_steps = ['group_scale', 'dq_init', 'saturation', 'superbias', 'refpix', 'linearity', + 'dark_current', 'jump', 'rate'] + not_required = ['ipc', 'persistence', 'firstframe', 'lastframe', 'rscd'] + nrs_dict = OrderedDict({}) + for step in nrs_steps: + nrs_dict[step] = True + for step in not_required: + nrs_dict[step] = False + assert nrs_req_steps == nrs_dict + + miri_req_steps = pipeline_tools.get_pipeline_steps('miri') + miri_steps = ['dq_init', 'saturation', 'firstframe', 'lastframe', + 'linearity', 'rscd', 'dark_current', 'refpix', 'jump', 'rate'] + not_required = ['group_scale', 'ipc', 'superbias', 'persistence'] + miri_dict = OrderedDict({}) + for step in miri_steps: + miri_dict[step] = True + for step in not_required: + miri_dict[step] = False + assert miri_req_steps == miri_dict + + +@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + reason='Requires access to central storage.') +def test_image_stack(): + """Test stacking of slope images""" + + directory = os.path.join(get_config()['test_dir'], 'dark_monitor') + files = [os.path.join(directory, 'test_image_{}.fits'.format(str(i+1))) for i in range(3)] + + image_stack, exptimes = pipeline_tools.image_stack(files) + truth = np.zeros((3, 10, 10)) + truth[0, :, :] = 5. + truth[1, :, :] = 10. + truth[2, :, :] = 15. + + assert np.all(image_stack == truth) + assert exptimes == [[10.5], [10.5], [10.5]] + + +def test_steps_to_run(): + """Test that the dictionaries for steps required and steps completed + are correctly combined to create a dictionary of pipeline steps to + be done + + Parameters + ---------- + filename : str + File to be checked + + required : OrderedDict + Dict of all pipeline steps to be run on filename + + already_done : OrderedDict + Dict of pipeline steps already run on filename + """ + + required = OrderedDict([('group_scale', True), + ('dq_init', False), + ('saturation', False), + ('ipc', False), + ('refpix', False), + ('superbias', False), + ('persistence', True), + ('dark_current', True), + ('linearity', False), + ('firstframe', False), + ('lastframe', False), + ('rscd', False), + ('jump', True), + ('rate', True)]) + already_done = OrderedDict([('group_scale', True), + ('dq_init', False), + ('saturation', False), + ('ipc', False), + ('refpix', False), + ('superbias', False), + ('persistence', True), + ('dark_current', True), + ('linearity', False), + ('firstframe', False), + ('lastframe', False), + ('rscd', False), + ('jump', False), + ('rate', False)]) + + steps_to_run = pipeline_tools.steps_to_run(required, already_done) + true_steps_to_run = OrderedDict([('group_scale', False), + ('dq_init', False), + ('saturation', False), + ('ipc', False), + ('refpix', False), + ('superbias', False), + ('persistence', False), + ('dark_current', False), + ('linearity', False), + ('firstframe', False), + ('lastframe', False), + ('rscd', False), + ('jump', True), + ('rate', True)]) + + assert steps_to_run == true_steps_to_run diff --git a/jwql/tests/test_utils.py b/jwql/tests/test_utils.py index 5d8bf5d5e..a59a4b003 100644 --- a/jwql/tests/test_utils.py +++ b/jwql/tests/test_utils.py @@ -19,10 +19,10 @@ """ import os - +from pathlib import Path import pytest -from jwql.utils.utils import get_config, filename_parser +from jwql.utils.utils import copy_files, get_config, filename_parser, filesystem_path FILENAME_PARSER_TEST_DATA = [ @@ -244,6 +244,33 @@ ] + +@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + reason='Requires access to central storage.') +def test_copy_files(): + """Test that files are copied successfully""" + + # Create an example file to be copied + data_dir = os.path.dirname(__file__) + file_to_copy = 'file.txt' + original_file = os.path.join(data_dir, file_to_copy) + Path(original_file).touch() + assert os.path.exists(original_file), 'Failed to create original test file.' + + # Make a copy one level up + new_location = os.path.abspath(os.path.join(data_dir, '../')) + copied_file = os.path.join(new_location, file_to_copy) + + # Copy the file + success, failure = copy_files([original_file], new_location) + assert success == [copied_file] + assert os.path.isfile(copied_file) + + # Remove the copy + os.remove(original_file) + os.remove(copied_file) + + @pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', reason='Requires access to central storage.') def test_get_config(): @@ -272,8 +299,6 @@ def test_filename_parser(filename, solution): @pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', reason='Requires access to central storage.') -@pytest.mark.xfail(raises=(FileNotFoundError, ValueError), - reason='Known non-compliant files in filesystem; User must manually supply config file.') def test_filename_parser_whole_filesystem(): """Test the filename_parser on all files currently in the filesystem.""" # Get all files @@ -309,3 +334,15 @@ def test_filename_parser_nonJWST(): with pytest.raises(ValueError): filename = 'not_a_jwst_file.fits' filename_parser(filename) + + +@pytest.mark.skipif(os.path.expanduser('~') == '/home/jenkins', + reason='Requires access to central storage.') +def test_filesystem_path(): + """Test that a file's location in the filesystem is returned""" + + filename = 'jw96003001001_02201_00001_nrca1_dark.fits' + check = filesystem_path(filename) + location = os.path.join(get_config()['filesystem'], 'jw96003', filename) + + assert check == location diff --git a/jwql/utils/calculations.py b/jwql/utils/calculations.py new file mode 100644 index 000000000..6d167abe7 --- /dev/null +++ b/jwql/utils/calculations.py @@ -0,0 +1,166 @@ +"""Various math-related functions used by the ``jwql`` instrument +monitors. + +Authors +------- + + - Bryan Hilbert + +Use +--- + + This module can be imported as such: + :: + + from jwql.utils import calculations + mean_val, stdev_val = calculations.mean_stdev(image, sigma_threshold=4) + """ + +import numpy as np + +from astropy.modeling import fitting, models +from astropy.stats import sigma_clip +from scipy.optimize import curve_fit +from scipy.stats import sigmaclip + + +def double_gaussian(x, amp1, peak1, sigma1, amp2, peak2, sigma2): + """Equate two Gaussians + + Parameters + ---------- + x : numpy.ndarray + 1D array of x values to be fit + + params : list + Gaussian coefficients + ``[amplitude1, peak1, stdev1, amplitude2, peak2, stdev2]`` + """ + + y_values = amp1 * np.exp(-(x - peak1)**2.0 / (2.0 * sigma1**2.0)) \ + + amp2 * np.exp(-(x - peak2)**2.0 / (2.0 * sigma2**2.0)) + + return y_values + + +def double_gaussian_fit(x_values, y_values, input_params): + """Fit two Gaussians to the given array + + Parameters + ---------- + x_values : numpy.ndarray + 1D array of x values to be fit + + y_values : numpy.ndarray + 1D array of y values to be fit + + input_params : list + Initial guesses for Gaussian coefficients + ``[amplitude1, peak1, stdev1, amplitude2, peak2, stdev2]`` + + Returns + ------- + params : list + Fitted parameter values + + sigma : numpy.ndarray + Uncertainties on the parameters + """ + + params, cov = curve_fit(double_gaussian, x_values, y_values, input_params) + sigma = np.sqrt(np.diag(cov)) + + return params, sigma + + +def gaussian1d_fit(x_values, y_values, params): + """Fit 1D Gaussian to an array. Designed around fitting to histogram + of pixel values. + + Parameters + ---------- + x_values : numpy.ndarray + 1D array of x values to be fit + + y_values : numpy.ndarray + 1D array of y values to be fit + + Returns + ------- + amplitude : tup + Tuple of the best fit Gaussian amplitude and uncertainty + + peak : tup + Tuple of the best fit Gaussian peak position and uncertainty + + width : tup + Tuple of the best fit Gaussian width and uncertainty + """ + + model_gauss = models.Gaussian1D(amplitude=params[0], mean=params[1], stddev=params[2]) + fitter_gauss = fitting.LevMarLSQFitter() + best_fit = fitter_gauss(model_gauss, x_values, y_values) + cov_diag = np.diag(fitter_gauss.fit_info['param_cov']) + + # Arrange each parameter into (best_fit_value, uncertainty) tuple + amplitude = (best_fit.amplitude.value, np.sqrt(cov_diag[0])) + peak = (best_fit.mean.value, np.sqrt(cov_diag[1])) + width = (best_fit.stddev.value, np.sqrt(cov_diag[2])) + + return amplitude, peak, width + + +def mean_image(cube, sigma_threshold=3): + """Combine a stack of 2D images into a mean slope image, using + sigma-clipping on a pixel-by-pixel basis + + Parameters + ---------- + cube : numpy.ndarray + 3D array containing a stack of 2D images + + sigma_threshold : int + Number of sigma to use when sigma-clipping values in each + pixel + + Returns + ------- + mean_image : numpy.ndarray + 2D sigma-clipped mean image + + stdev_image : numpy.ndarray + 2D sigma-clipped standard deviation image + """ + + clipped_cube = sigma_clip(cube, sigma=sigma_threshold, axis=0, masked=False) + mean_image = np.nanmean(clipped_cube, axis=0) + std_image = np.nanstd(clipped_cube, axis=0) + + return mean_image, std_image + + +def mean_stdev(image, sigma_threshold=3): + """Calculate the sigma-clipped mean and stdev of an input array + + Parameters + ---------- + image : numpy.ndarray + Array of which to calculate statistics + + sigma_threshold : float + Number of sigma to use when sigma-clipping + + Returns + ------- + mean_value : float + Sigma-clipped mean of image + + stdev_value : float + Sigma-clipped standard deviation of image + """ + + clipped, lower, upper = sigmaclip(image, low=sigma_threshold, high=sigma_threshold) + mean_value = np.mean(clipped) + stdev_value = np.std(clipped) + + return mean_value, stdev_value diff --git a/jwql/utils/constants.py b/jwql/utils/constants.py index 67d278c64..d12df9a8b 100644 --- a/jwql/utils/constants.py +++ b/jwql/utils/constants.py @@ -92,3 +92,11 @@ 'NRCB1', 'NRCB2', 'NRCB3', 'NRCB4'] NIRCAM_LONGWAVE_DETECTORS = ['NRCA5', 'NRCB5'] + +AMPLIFIER_BOUNDARIES = {'nircam': {'1': [(0, 0), (512, 2048)], '2': [(512, 0), (1024, 2048)], + '3': [(1024, 0), (1536, 2048)], '4': [(1536, 0), (2048, 2048)]} + } + +FOUR_AMP_SUBARRAYS = ['WFSS128R', 'WFSS64R'] + +SUBARRAYS_ONE_OR_FOUR_AMPS = ['SUBGRISMSTRIPE64', 'SUBGRISMSTRIPE128', 'SUBGRISMSTRIPE256'] diff --git a/jwql/utils/instrument_properties.py b/jwql/utils/instrument_properties.py new file mode 100644 index 000000000..2e5de55c3 --- /dev/null +++ b/jwql/utils/instrument_properties.py @@ -0,0 +1,233 @@ +"""Collection of functions dealing with retrieving/calculating various +instrument properties + +Authors +------- + + - Bryan Hilbert + +Uses +---- + + This module can be imported and used as such: + + :: + + from jwql.utils import instrument_properties as inst + amps = inst.amplifier_info('my_files.fits') +""" + +from copy import deepcopy + +from astropy.io import fits +from jwst.datamodels import dqflags +import numpy as np + +from jwql.utils.constants import AMPLIFIER_BOUNDARIES, FOUR_AMP_SUBARRAYS, SUBARRAYS_ONE_OR_FOUR_AMPS + + +def amplifier_info(filename, omit_reference_pixels=True): + """Calculate the number of amplifiers used to collect the data in a + given file using the array size and exposure time of a single frame + (This is needed because there is no header keyword specifying + how many amps were used.) + + Parameters + ---------- + filename : str + Name of fits file to investigate + + omit_reference_pixels : bool + If ``True``, return the amp boundary coordinates excluding + reference pixels + + Returns + ------- + num_amps : int + Number of amplifiers used to read out the data + + amp_bounds : dict + Dictionary of amplifier boundary coordinates. Keys are strings of + the amp number (1-4). Each value is a list composed of two tuples. + The first tuple give the (x, y) starting location, and the second + tuple gives the (x, y) ending location. + """ + + # First get necessary metadata + header = fits.getheader(filename) + instrument = header['INSTRUME'].lower() + detector = header['DETECTOR'] + x_dim = header['SUBSIZE1'] + y_dim = header['SUBSIZE2'] + sample_time = header['TSAMPLE'] * 1.e-6 + frame_time = header['TFRAME'] + subarray_name = header['SUBARRAY'] + aperture = "{}_{}".format(detector, subarray_name) + + # Full frame data will be 2048x2048 for all instruments + if ((x_dim == 2048) and (y_dim == 2048)) or subarray_name in FOUR_AMP_SUBARRAYS: + num_amps = 4 + amp_bounds = deepcopy(AMPLIFIER_BOUNDARIES[instrument]) + + else: + + if subarray_name not in SUBARRAYS_ONE_OR_FOUR_AMPS: + num_amps = 1 + amp_bounds = {'1': [(0, 0), (x_dim, y_dim)]} + + else: + + # These are the tougher cases. Subarrays that can be + # used with multiple amp combinations + + # Compare the given frametime with the calculated frametimes + # using 4 amps or 1 amp. + + # Right now this is used only for the NIRCam grism stripe + # subarrays, so we don't need this to be a general case that + # can handle any subarray orientation relative to any amp + # orientation + amp4_time = calc_frame_time(instrument, aperture, x_dim, y_dim, + 4, sample_time=sample_time) + amp1_time = calc_frame_time(instrument, aperture, x_dim, y_dim, + 1, sample_time=sample_time) + + if np.isclose(amp4_time, frame_time, atol=0.001, rtol=0): + num_amps = 4 + # In this case, keep the full frame amp boundaries in + # the x direction, and set the boundaries in the y + # direction equal to the hight of the subarray + amp_bounds = deepcopy(AMPLIFIER_BOUNDARIES[instrument]) + for amp_num in ['1', '2', '3', '4']: + newdims = (amp_bounds[amp_num][1][0], y_dim) + amp_bounds[amp_num][1] = newdims + + elif np.isclose(amp1_time, frame_time, atol=0.001, rtol=0): + num_amps = 1 + amp_bounds = {'1': [(0, 0), (x_dim, y_dim)]} + + else: + raise ValueError(('Unable to determine number of amps used for exposure. 4-amp frametime' + 'is {}. 1-amp frametime is {}. Reported frametime is {}.') + .format(amp4_time, amp1_time, frame_time)) + + if omit_reference_pixels: + + # If requested, ignore reference pixels by adjusting the indexes of + # the amp boundaries. + with fits.open(filename) as hdu: + try: + data_quality = hdu['DQ'].data + except KeyError: + raise KeyError('DQ extension not found.') + + + # Reference pixels should be flagged in the DQ array with the + # REFERENCE_PIXEL flag. Find the science pixels by looping for + # pixels that don't have that bit set. + scipix = np.where(data_quality & dqflags.pixel['REFERENCE_PIXEL'] == 0) + ymin = np.min(scipix[0]) + xmin = np.min(scipix[1]) + ymax = np.max(scipix[0]) + 1 + xmax = np.max(scipix[1]) + 1 + + # Adjust the minimum and maximum x and y values if they are within + # the reference pixels + for key in amp_bounds: + bounds = amp_bounds[key] + prev_xmin, prev_ymin = bounds[0] + prev_xmax, prev_ymax = bounds[1] + if prev_xmin < xmin: + new_xmin = xmin + else: + new_xmin = prev_xmin + if prev_ymin < ymin: + new_ymin = ymin + else: + new_ymin = prev_ymin + if prev_xmax > xmax: + new_xmax = xmax + else: + new_xmax = prev_xmax + if prev_ymax > ymax: + new_ymax = ymax + else: + new_ymax = prev_ymax + amp_bounds[key] = [(new_xmin, new_ymin), (new_xmax, new_ymax)] + + return num_amps, amp_bounds + + +def calc_frame_time(instrument, aperture, xdim, ydim, amps, sample_time=1.e-5): + """Calculate the readout time for a single frame of a given size and + number of amplifiers. Note that for NIRISS and FGS, the fast readout + direction is opposite to that in NIRCam, so we switch ``xdim`` and + ``ydim`` so that we can keep a single equation. + + Parameters: + ----------- + instrument : str + Name of the instrument being simulated + + aperture : str + Name of aperture being simulated (e.g ``NRCA1_FULL``). + Currently this is only used to check for the FGS ``ACQ1`` + aperture, which uses a unique value of ``colpad`` below. + + xdim : int + Number of columns in the frame + + ydim : int + Number of rows in the frame + + amps : int + Number of amplifiers used to read out the frame + + sample_time : float + Time to sample a pixel, in seconds. For NIRCam/NIRISS/FGS + this is 10 microseconds = 1e-5 seconds + + Returns: + -------- + frametime : float + Readout time in seconds for the frame + """ + + instrument = instrument.lower() + if instrument == "nircam": + colpad = 12 + + # Fullframe + if amps == 4: + rowpad = 1 + fullpad = 1 + else: + # All subarrays + rowpad = 2 + fullpad = 0 + if ((xdim <= 8) & (ydim <= 8)): + # The smallest subarray + rowpad = 3 + + elif instrument == "niriss": + colpad = 12 + + # Fullframe + if amps == 4: + rowpad = 1 + fullpad = 1 + else: + rowpad = 2 + fullpad = 0 + + elif instrument == 'fgs': + colpad = 6 + if 'acq1' in aperture.lower(): + colpad = 12 + rowpad = 1 + if amps == 4: + fullpad = 1 + else: + fullpad = 0 + + return ((1.0 * xdim / amps + colpad) * (ydim + rowpad) + fullpad) * sample_time diff --git a/jwql/utils/logging_functions.py b/jwql/utils/logging_functions.py index 4ca4c030a..77292f68f 100644 --- a/jwql/utils/logging_functions.py +++ b/jwql/utils/logging_functions.py @@ -81,11 +81,20 @@ def configure_logging(module): environement. path : str Where to write the log if user-supplied path; default to working dir. + + Returns + ------- + log_file : str + The path to the file where the log is written to. """ # Determine log file location log_file = make_log_file(module) + # Make sure no other root lhandlers exist before configuring the logger + for handler in logging.root.handlers[:]: + logging.root.removeHandler(handler) + # Create the log file and set the permissions logging.basicConfig(filename=log_file, format='%(asctime)s %(levelname)s: %(message)s', @@ -94,6 +103,8 @@ def configure_logging(module): print('Log file initialized to {}'.format(log_file)) set_permissions(log_file) + return log_file + def make_log_file(module): """Create the log file name based on the module name. @@ -199,6 +210,8 @@ def wrapped(*args, **kwargs): except (ImportError, AttributeError) as err: logging.warning(err) + logging.info('') + # Call the function and time it t1_cpu = time.clock() t1_time = time.time() diff --git a/jwql/utils/utils.py b/jwql/utils/utils.py index c3b834bd0..7b155e600 100644 --- a/jwql/utils/utils.py +++ b/jwql/utils/utils.py @@ -29,10 +29,12 @@ - JWST TR JWST-STScI-004800, SM-12 """ +import datetime import getpass import json import os import re +import shutil from jwql.utils import permissions from jwql.utils.constants import FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES_SHORTHAND @@ -40,9 +42,98 @@ __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) -def ensure_dir_exists(fullpath): - """Creates dirs from ``fullpath`` if they do not already exist. +def copy_files(files, out_dir): + """Copy a given file to a given directory. Only try to copy the file + if it is not already present in the output directory. + + Parameters + ---------- + files : list + List of files to be copied + + out_dir : str + Destination directory + + Returns + ------- + success : list + Files successfully copied (or that already existed in out_dir) + + failed : list + Files that were not copied + """ + + # Copy files if they do not already exist + success = [] + failed = [] + for input_file in files: + input_new_path = os.path.join(out_dir, os.path.basename(input_file)) + if os.path.isfile(input_new_path): + success.append(input_new_path) + else: + try: + shutil.copy2(input_file, out_dir) + success.append(input_new_path) + permissions.set_permissions(input_new_path) + except: + failed.append(input_file) + return success, failed + + +def download_mast_data(query_results, output_dir): + """Example function for downloading MAST query results. From MAST + website (``https://mast.stsci.edu/api/v0/pyex.html``) + + Parameters + ---------- + query_results : list + List of dictionaries returned by a MAST query. + + output_dir : str + Directory into which the files will be downlaoded """ + + # Set up the https connection + server = 'mast.stsci.edu' + conn = httplib.HTTPSConnection(server) + + # Dowload the products + print('Number of query results: {}'.format(len(query_results))) + + for i in range(len(query_results)): + + # Make full output file path + output_file = os.path.join(output_dir, query_results[i]['filename']) + + print('Output file is {}'.format(output_file)) + + # Download the data + uri = query_results[i]['dataURI'] + + print('uri is {}'.format(uri)) + + conn.request("GET", "/api/v0/download/file?uri="+uri) + resp = conn.getresponse() + file_content = resp.read() + + # Save to file + with open(output_file, 'wb') as file_obj: + file_obj.write(file_content) + + # Check for file + if not os.path.isfile(output_file): + print("ERROR: {} failed to download.".format(output_file)) + else: + statinfo = os.stat(output_file) + if statinfo.st_size > 0: + print("DOWNLOAD COMPLETE: ", output_file) + else: + print("ERROR: {} file is empty.".format(output_file)) + conn.close() + + +def ensure_dir_exists(fullpath): + """Creates dirs from ``fullpath`` if they do not already exist.""" if not os.path.exists(fullpath): os.makedirs(fullpath) permissions.set_permissions(fullpath) @@ -230,6 +321,33 @@ def filename_parser(filename): return filename_dict +def filesystem_path(filename): + """Return the full path to a given file in the filesystem + + Parameters + ---------- + filename : str + File to locate (e.g. ``jw86600006001_02101_00008_guider1_cal.fits``) + + Returns + ------- + full_path : str + Full path to the given file, including filename + """ + + filesystem_base = get_config()["filesystem"] + + # Subdirectory name is based on the proposal ID + subdir = 'jw{}'.format(filename_parser(filename)['program_id']) + full_path = os.path.join(filesystem_base, subdir, filename) + + # Check to see if the file exists + if os.path.isfile(full_path): + return full_path + else: + raise FileNotFoundError(('{} is not in the predicted location: {}'.format(filename, full_path))) + + def get_base_url(): """Return the beginning part of the URL to the ``jwql`` web app based on which user is running the software. @@ -275,3 +393,53 @@ def get_config(): settings = json.load(config_file) return settings + + +def initialize_instrument_monitor(module): + """Configures a log file for the instrument monitor run and + captures the start time of the monitor + + Parameters + ---------- + module : str + The module name (e.g. ``dark_monitor``) + + Returns + ------- + start_time : datetime object + The start time of the monitor + log_file : str + The path to where the log file is stored + """ + + from jwql.utils.logging_functions import configure_logging + + start_time = datetime.datetime.now() + log_file = configure_logging(module) + + return start_time, log_file + + +def update_monitor_table(module, start_time, log_file): + """Update the ``monitor`` database table with information about + the instrument monitor run + + Parameters + ---------- + module : str + The module name (e.g. ``dark_monitor``) + start_time : datetime object + The start time of the monitor + log_file : str + The path to where the log file is stored + """ + + from jwql.database.database_interface import Monitor + + new_entry = {} + new_entry['monitor_name'] = module + new_entry['start_time'] = start_time + new_entry['end_time'] = datetime.datetime.now() + new_entry['log_file'] = os.path.basename(log_file) + + Monitor.__table__.insert().execute(new_entry) diff --git a/requirements.txt b/requirements.txt index 0064b0bff..876fc3548 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,7 @@ numpy==1.16.2 numpydoc==0.8.0 pandas==0.24.2 psycopg2==2.8.2 +pysiaf==0.2.5 python-dateutil==2.8.0 pytest==4.4.1 sphinx==2.0.1 diff --git a/setup.py b/setup.py index b4c18b34b..d1edcda89 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ 'numpydoc', 'pandas', 'psycopg2', + 'pysiaf', 'pytest', 'sphinx', 'sqlalchemy',