Skip to content

Commit

Permalink
Add timezone support. Fix int index. Cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
bhilbert4 committed Mar 5, 2024
1 parent 69664ff commit d389329
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 27 deletions.
38 changes: 13 additions & 25 deletions jwql/instrument_monitors/common_monitors/dark_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
import os

from astropy.io import ascii, fits
from astropy.modeling import models
from astropy.modeling.models import Gaussian1D
from astropy.stats import sigma_clipped_stats
from astropy.time import Time
from bokeh.models import ColorBar, ColumnDataSource, HoverTool, Legend
Expand Down Expand Up @@ -234,7 +234,7 @@ def add_bad_pix(self, coordinates, pixel_type, files, mean_filename, baseline_fi
'obs_end_time': observation_end_time,
'mean_dark_image_file': os.path.basename(mean_filename),
'baseline_file': os.path.basename(baseline_filename),
'entry_date': datetime.datetime.now()}
'entry_date': datetime.datetime.now(datetime.timezone.utc)}
entry = self.pixel_table(**entry)
entry.save()

Expand Down Expand Up @@ -712,7 +712,7 @@ def process(self, file_list):
step_args = {'dark_current': {'skip': True}}

# Call the pipeline
#outputs = run_parallel_pipeline(pipeline_files, "dark", [output_suffix], self.instrument, step_args=step_args)
outputs = run_parallel_pipeline(pipeline_files, "dark", [output_suffix], self.instrument, step_args=step_args)

for filename in file_list:
processed_file = filename.replace("_dark", f"_{output_suffix}")
Expand All @@ -725,7 +725,7 @@ def process(self, file_list):
for item in slope_files:
logging.info('\t\t{}'.format(item))
# Get the observation time for each file
obstime = instrument_properties.get_obstime(item)
obstime = instrument_properties.get_obstime(item) # .replace(tzinfo=datetime.timezone.utc) - if get_obstime can't be changed
obs_times.append(obstime)

# Find the earliest and latest observation time, and calculate
Expand Down Expand Up @@ -864,7 +864,7 @@ def process(self, file_list):
'readpattern': self.readpatt,
'mean': amp_mean[key],
'stdev': amp_stdev[key],
'source_files': json.dumps(source_files),
'source_files': source_files,
'obs_start_time': min_time,
'obs_mid_time': mid_time,
'obs_end_time': max_time,
Expand All @@ -880,9 +880,9 @@ def process(self, file_list):
'double_gauss_width2': double_gauss_params[key][5],
'double_gauss_chisq': double_gauss_chisquared[key],
'mean_dark_image_file': os.path.basename(mean_slope_file),
'hist_dark_values': bins[key],
'hist_amplitudes': histogram[key],
'entry_date': datetime.datetime.now()
'hist_dark_values': list(bins[key]),
'hist_amplitudes': list(histogram[key]),
'entry_date': datetime.datetime.now(datetime.timezone.utc)
}
entry = self.stats_table(**dark_db_entry)
entry.save()
Expand Down Expand Up @@ -936,7 +936,7 @@ def run(self):
self.query_end = Time.now().mjd

# Loop over all instruments
for instrument in ['nircam']: # JWST_INSTRUMENT_NAMES:
for instrument in JWST_INSTRUMENT_NAMES:
self.instrument = instrument
logging.info(f'\n\nWorking on {instrument}')

Expand All @@ -950,7 +950,7 @@ def run(self):
# Get a list of all possible readout patterns associated with the aperture
possible_readpatts = RAPID_READPATTERNS[instrument]

for aperture in ['NRCB1_FULL']: #possible_apertures:
for aperture in possible_apertures:
logging.info('')
logging.info(f'Working on aperture {aperture} in {instrument}')

Expand All @@ -967,18 +967,6 @@ def run(self):
# Locate the record of the most recent MAST search
self.query_start = self.most_recent_search()




self.query_start = 59697. # apr 28, 2022
self.query_end = 59697.26







logging.info(f'\tQuery times: {self.query_start} {self.query_end}')

# Query MAST using the aperture and the time of the
Expand Down Expand Up @@ -1122,7 +1110,7 @@ def run(self):
'end_time_mjd': batch_end_time,
'files_found': len(dark_files),
'run_monitor': monitor_run,
'entry_date': datetime.datetime.now()}
'entry_date': datetime.datetime.now(datetime.timezone.utc)}

entry = self.query_table(**new_entry)
entry.save()
Expand All @@ -1143,7 +1131,7 @@ def run(self):
'end_time_mjd': self.query_end,
'files_found': len(new_entries),
'run_monitor': monitor_run,
'entry_date': datetime.datetime.now()}
'entry_date': datetime.datetime.now(datetime.timezone.utc)}

entry = self.query_table(**new_entry)
entry.save()
Expand Down Expand Up @@ -1542,7 +1530,7 @@ def stats_by_amp(self, image, amps):
amplitude, peak, width = calculations.gaussian1d_fit(bin_centers, hist, initial_params)
gaussian_params[key] = [amplitude, peak, width]

gauss_fit_model = models.Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0])
gauss_fit_model = Gaussian1D(amplitude=amplitude[0], mean=peak[0], stddev=width[0])
gauss_fit = gauss_fit_model(bin_centers)

positive = hist > 0
Expand Down
2 changes: 1 addition & 1 deletion jwql/utils/instrument_properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def get_obstime(filename):
time = h[0].header['TIME-OBS']
year, month, day = [int(element) for element in date.split('-')]
hour, minute, second = [float(element) for element in time.split(':')]
return datetime.datetime(year, month, day, int(hour), int(minute), int(second))
return datetime.datetime(year, month, day, int(hour), int(minute), int(second), tzinfo=datetime.timezone.utc)


def mean_time(times):
Expand Down
2 changes: 1 addition & 1 deletion jwql/website/apps/jwql/monitor_pages/monitor_dark_bokeh.py
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@ def get_latest_histogram_data(self):
self.db.stats_data[idx_int].hist_amplitudes)

# Keep track of the observation date of the most recent entry
self.hist_date = self.db.stats_data[most_recent_idx[0]].obs_mid_time
self.hist_date = self.db.stats_data[int(most_recent_idx[0])].obs_mid_time

def get_trending_data(self):
"""Organize data for the trending plot. Here we need all the data for
Expand Down

0 comments on commit d389329

Please sign in to comment.