Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement a new mapmaking template for periodic signals #690

Merged
merged 3 commits into from
Aug 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,12 @@ def readme():
conf["extras_require"] = {
"mpi": ["mpi4py>=3.0"],
"totalconvolve": ["ducc0"],
"interactive": [
"wurlitzer",
"ipywidgets",
"plotly",
"plotly-resampler",
]
}
conf["packages"] = find_packages(
"src",
Expand Down
2 changes: 2 additions & 0 deletions src/toast/coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,11 @@ def _ephem_transform(site, t):
quat = qa.norm(np.array([b, c, d, a]))
return quat


def _qpoint_transform(site, t, quats_azel):
"""Get the Az/El -> Ra/Dec conversion quaternion for boresight."""
import qpoint

qp = qpoint.QPoint(
accuracy="high",
fast_math=True,
Expand Down
23 changes: 16 additions & 7 deletions src/toast/ops/noise_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,9 +602,9 @@ def _exec(self, data, detectors=None, **kwargs):
local_net = list()
local_fknee = list()

# If we have an analytic noise model from a simulation or fit, then we can access
# the properties directly. If not, we will use the detector weight as a proxy
# for the NET and make a crude estimate of the knee frequency.
# If we have an analytic noise model from a simulation or fit, then we can
# access the properties directly. If not, we will use the detector weight
# as a proxy for the NET and make a crude estimate of the knee frequency.

for det in dets:
try:
Expand All @@ -623,28 +623,31 @@ def _exec(self, data, detectors=None, **kwargs):

# Gather data across the process column

local_net = np.array(local_net, dtype=np.float64)
local_fknee = np.array(local_fknee, dtype=np.float64)
net_mean = None
net_std = None
fknee_mean = None
fknee_std = None
good_fit = local_net > 0.0
if obs.comm_col is None:
all_net = np.array(local_net)
all_net = np.array(local_net[good_fit])
net_mean = np.mean(all_net)
net_std = np.std(all_net)
if self.sigma_fknee is not None:
all_fknee = np.array(local_fknee)
all_fknee = np.array(local_fknee[good_fit])
fknee_mean = np.mean(all_fknee)
fknee_std = np.std(all_fknee)
else:
all_net = obs.comm_col.gather(local_net, root=0)
all_net = obs.comm_col.gather(local_net[good_fit], root=0)
if obs.comm_col_rank == 0:
all_net = np.array([val for plist in all_net for val in plist])
net_mean = np.mean(all_net)
net_std = np.std(all_net)
net_mean = obs.comm_col.bcast(net_mean, root=0)
net_std = obs.comm_col.bcast(net_std, root=0)
if self.sigma_fknee is not None:
all_fknee = obs.comm_col.gather(local_fknee, root=0)
all_fknee = obs.comm_col.gather(local_fknee[good_fit], root=0)
if obs.comm_col_rank == 0:
all_fknee = np.array(
[val for plist in all_fknee for val in plist]
Expand All @@ -660,6 +663,12 @@ def _exec(self, data, detectors=None, **kwargs):
local_cut_fknee = list()
new_flags = dict()
for idet, det in enumerate(dets):
if not good_fit[idet]:
msg = f"obs {obs.name}, det {det} has NET=0 (bad model fit)"
log.info(msg)
obs.detdata[self.det_flags][det, :] |= self.det_flag_mask
new_flags[det] = self.det_flag_mask
continue
if np.absolute(local_net[idet] - net_mean) > net_std * self.sigma_NET:
msg = f"obs {obs.name}, det {det} has NET {local_net[idet]} "
msg += f" that is > {self.sigma_NET} x {net_std} from {net_mean}"
Expand Down
2 changes: 2 additions & 0 deletions src/toast/ops/polyfilter/polyfilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -783,6 +783,8 @@ def _exec(self, data, detectors=None, use_accel=None, **kwargs):
for value in values:
local_dets = []
for idet, det in enumerate(temp_ob.local_detectors):
if temp_ob.local_detector_flags[det] & self.det_flag_mask:
continue
if pat.match(det) is None:
continue
if (
Expand Down
15 changes: 9 additions & 6 deletions src/toast/ops/save_hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ class SaveHDF5(Operator):
detdata_in_place = Bool(
False,
help="If True, all compressed detector data will be decompressed and written "
"over the input data."
"over the input data.",
)

compress_detdata = Bool(False, help="If True, use FLAC to compress detector signal")
Expand Down Expand Up @@ -244,11 +244,14 @@ def _exec(self, data, detectors=None, **kwargs):
detdata_fields[ifield] = (field, {"type": "gzip"})
else:
# Everything else is FLAC-compressed
detdata_fields[ifield] = (field, {
"type": "flac",
"level": 5,
"precision": self.compress_precision,
})
detdata_fields[ifield] = (
field,
{
"type": "flac",
"level": 5,
"precision": self.compress_precision,
},
)

outpath = save_hdf5(
ob,
Expand Down
1 change: 1 addition & 0 deletions src/toast/templates/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ install(FILES
template.py
amplitudes.py
fourier2d.py
periodic.py
subharmonic.py
gaintemplate.py
DESTINATION ${PYTHON_SITE}/toast/templates
Expand Down
1 change: 1 addition & 0 deletions src/toast/templates/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,6 @@
from .fourier2d import Fourier2D
from .gaintemplate import GainTemplate
from .offset import Offset
from .periodic import Periodic
from .subharmonic import SubHarmonic
from .template import Template
2 changes: 1 addition & 1 deletion src/toast/templates/offset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

# Namespace imports

from .offset import Offset
from .offset import Offset, plot
205 changes: 205 additions & 0 deletions src/toast/templates/offset/offset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
# All rights reserved. Use of this source code is governed by
# a BSD-style license that can be found in the LICENSE file.

import json
from collections import OrderedDict

import h5py
import numpy as np
import scipy
import scipy.signal
Expand All @@ -16,6 +18,7 @@
from ...timing import function_timer
from ...traits import Bool, Float, Int, Quantity, Unicode, UseEnum, trait_docs
from ...utils import AlignedF64, Logger, rate_from_times
from ...vis import set_matplotlib_backend
from ..amplitudes import Amplitudes
from ..template import Template
from .kernels import (
Expand Down Expand Up @@ -735,3 +738,205 @@ def _implementations(self):

def _supports_accel(self):
return True

@function_timer
def write(self, amplitudes, out):
"""Write out amplitude values.

This stores the amplitudes to files for debugging / plotting. Since the
Offset amplitudes are unique on each process, we open one file per process
group and each process in the group communicates their amplitudes to one
writer.

Since this function is used mainly for debugging, we are a bit wasteful
and duplicate the amplitudes in order to make things easier.

Args:
amplitudes (Amplitudes): The amplitude data.
out (str): The output file root.

Returns:
None

"""

# Copy of the amplitudes, organized by observation and detector
obs_det_amps = dict()

for det in self._all_dets:
amp_offset = self._det_start[det]
for iob, ob in enumerate(self.data.obs):
if det not in ob.local_detectors:
continue
if ob.comm_row_size != 1:
raise NotImplementedError(
"Only observations distributed by detector are supported"
)
# The step length for this observation
step_length = self._step_length(
self.step_time.to_value(u.second), self._obs_rate[iob]
)

if ob.name not in obs_det_amps:
# First time with this observation, store info about
# the offset spans
obs_det_amps[ob.name] = dict()
props = dict()
props["step_length"] = step_length
amp_first = list()
amp_last = list()
amp_start = list()
amp_stop = list()
for ivw, vw in enumerate(ob.intervals[self.view]):
n_amp_view = self._obs_views[iob][ivw]
for istep in range(n_amp_view):
amp_first.append(vw.first + istep * step_length)
if istep == n_amp_view - 1:
amp_last.append(vw.last)
else:
amp_last.append(vw.first + (istep + 1) * step_length)
amp_start.append(ob.shared[self.times].data[amp_first[-1]])
amp_stop.append(ob.shared[self.times].data[amp_last[-1]])
props["amp_first"] = np.array(amp_first, dtype=np.int64)
props["amp_last"] = np.array(amp_last, dtype=np.int64)
props["amp_start"] = np.array(amp_start, dtype=np.float64)
props["amp_stop"] = np.array(amp_stop, dtype=np.float64)
obs_det_amps[ob.name]["bounds"] = props

# Loop over views and extract per-detector amplitudes and flags
det_amps = list()
det_flags = list()
views = ob.view[self.view]
for ivw, vw in enumerate(views):
n_amp_view = self._obs_views[iob][ivw]
amp_slice = slice(amp_offset, amp_offset + n_amp_view, 1)
det_amps.append(amplitudes.local[amp_slice])
det_flags.append(amplitudes.local_flags[amp_slice])
amp_offset += n_amp_view
det_amps = np.concatenate(det_amps, dtype=np.float64)
det_flags = np.concatenate(det_flags, dtype=np.uint8)
obs_det_amps[ob.name][det] = {
"amps": det_amps,
"flags": det_flags,
}

# Each group writes out its amplitudes.

# NOTE: If/when we want to support arbitrary data distributions when
# writing, we would need to take the data from each process and align
# them in time rather than just extracting detector data and writing
# to the datasets.

for iob, ob in enumerate(self.data.obs):
obs_local_amps = obs_det_amps[ob.name]
if self.data.comm.group_size == 1:
all_obs_amps = [obs_local_amps]
else:
all_obs_amps = self.data.comm.comm_group.gather(obs_local_amps, root=0)

if self.data.comm.group_rank == 0:
out_file = f"{out}_{ob.name}.h5"
det_names = set()
for pdata in all_obs_amps:
for k in pdata.keys():
if k != "bounds":
det_names.add(k)
det_names = list(sorted(det_names))
n_det = len(det_names)
amp_first = all_obs_amps[0]["bounds"]["amp_first"]
amp_last = all_obs_amps[0]["bounds"]["amp_last"]
amp_start = all_obs_amps[0]["bounds"]["amp_start"]
amp_stop = all_obs_amps[0]["bounds"]["amp_stop"]
n_amp = len(amp_first)
det_to_row = {y: x for x, y in enumerate(det_names)}
with h5py.File(out_file, "w") as hf:
hf.attrs["step_length"] = all_obs_amps[0]["bounds"]["step_length"]
hf.attrs["detectors"] = json.dumps(det_names)
hamp_first = hf.create_dataset("amp_first", data=amp_first)
hamp_last = hf.create_dataset("amp_last", data=amp_last)
hamp_start = hf.create_dataset("amp_start", data=amp_start)
hamp_stop = hf.create_dataset("amp_stop", data=amp_stop)
hamps = hf.create_dataset(
"amplitudes",
(n_det, n_amp),
dtype=np.float64,
)
hflags = hf.create_dataset(
"flags",
(n_det, n_amp),
dtype=np.uint8,
)
for pdata in all_obs_amps:
for k, v in pdata.items():
if k == "bounds":
continue
row = det_to_row[k]
hslice = (slice(row, row + 1, 1), slice(0, n_amp, 1))
dslice = (slice(0, n_amp, 1),)
hamps.write_direct(v["amps"], dslice, hslice)
hflags.write_direct(v["flags"], dslice, hslice)


def plot(amp_file, compare=dict(), out=None):
"""Plot an amplitude dump file.

This loads an amplitude file and makes a set of plots.

Args:
amp_file (str): The path to the input file of observation amplitudes.
compare (dict): If specified, dictionary of per-detector timestreams
to plot for comparison.
out (str): The output file.

Returns:
None

"""

if out is not None:
set_matplotlib_backend(backend="pdf")

import matplotlib.pyplot as plt

figdpi = 100

with h5py.File(amp_file, "r") as hf:
step_length = hf.attrs["step_length"]
det_list = json.loads(hf.attrs["detectors"])
n_det = len(det_list)
amp_first = hf["amp_first"]
amp_last = hf["amp_last"]
amp_start = hf["amp_start"]
amp_stop = hf["amp_stop"]
hamps = hf["amplitudes"]
hflags = hf["flags"]
n_amp = len(amp_first)

fig_width = 8
fig_height = 4
fig_dpi = 100

x_samples = np.arange(amp_first[0], amp_last[-1] + 1, 1)

for idet, det in enumerate(det_list):
outfile = f"{out}_{det}.pdf"
fig = plt.figure(dpi=fig_dpi, figsize=(fig_width, fig_height))
ax = fig.add_subplot(1, 1, 1)
if det in compare:
ax.plot(x_samples, compare[det], color="black", label=f"{det} Data")
ax.step(
amp_first[:],
hamps[idet],
where="post",
color="red",
label=f"{det} Offset Amplitudes",
)
ax.set_xlabel("Sample Index")
ax.set_ylabel("Amplitude")
ax.legend(loc="best")
if out is None:
# Interactive
plt.show()
else:
plt.savefig(outfile, dpi=figdpi, bbox_inches="tight", format="pdf")
plt.close()
Loading