Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix code that raises new numpy deprecation warnings #1308

Merged
merged 5 commits into from
Jun 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ workflows:
- pyuvdata:
name: pyuvdata_3.11
python_version: "3.11"
env_name: "pyuvdata_tests"
env_name: "pyuvdata_tests_311"
- pyuvdata:
name: pyuvdata_min_deps
python_version: "3.9"
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/macosx_windows_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ jobs:
run: |
if [[ "${{ runner.os }}" = "Windows" ]]; then
echo "::set-output name=ENV_NAME::pyuvdata_tests_windows"
elif [[ "${{ matrix.python-version }}" == "3.11" ]]; then
echo "::set-output name=ENV_NAME::pyuvdata_tests_311"
else
echo "::set-output name=ENV_NAME::pyuvdata_tests"
fi
Expand Down
30 changes: 30 additions & 0 deletions ci/pyuvdata_tests_311.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Need a special environment file for python 3.11 because h5py <3.9 throws lots of
# deprecation warnings with numpy >= 1.25. But h5py 3.9 cannot be resolved with other
# dependencies on python 3.11. So we're restricting numpy to <1.25 on python 3.11 for now.
name: pyuvdata_tests_311
channels:
- conda-forge
dependencies:
- astropy>=5.0.4
- astropy-healpix>=0.6
- astroquery>=0.4.4
- docstring_parser>=0.15
- h5py>=3.1
- hdf5plugin>=3.1.0
- numpy>=1.20.*,<1.25
- pyerfa>=2.0
- python-casacore>=3.3.1
- pyyaml>=5.1
- scipy>=1.5
- coverage
- pytest>=6.2.0
- pytest-cases>=3.6.9
- pytest-cov
- pytest-xdist
- cython
- setuptools_scm<7.0|>=7.0.3
- pip
- pip:
- lunarsky>=0.2.1
- novas
- novas_de405
21 changes: 17 additions & 4 deletions pyuvdata/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,14 +150,27 @@ def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
# only check if we're not currently handling an exception
if exc_type is None and exc_val is None and exc_tb is None:
if self.expected_warning is None:
assert len(self) == 0
expected_length = 0
else:
assert len(self) == len(self.expected_warning), (
f"{len(self.expected_warning)} warnings expected, "
expected_length = len(self.expected_warning)

if len(self) != expected_length:
warn_file_line = []
msg_list = []
for each in self:
warn_file_line.append(f"{each.filename}: {each.lineno}")
msg_list.append([each.message for each in self])
if self.expected_warning is None:
err_msg = "No warnings expected, "
else:
err_msg = f"{len(self.expected_warning)} warnings expected, "
err_msg += (
f"{len(self)} warnings issued. The list of emitted warnings is: "
f"{[each.message for each in self]}."
f"{msg_list}. The filenames and line numbers are: {warn_file_line}"
)
raise AssertionError(err_msg)

if expected_length > 0:
for warn_i, exp_warn in enumerate(self.expected_warning):
if not any(issubclass(r.category, exp_warn) for r in self):
__tracebackhide__ = True
Expand Down
17 changes: 12 additions & 5 deletions pyuvdata/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1851,7 +1851,8 @@ def test_calc_app_fk4_roundtrip(astrometry_args, telescope_frame):
@pytest.mark.filterwarnings('ignore:ERFA function "pmsafe" yielded 4 of')
@pytest.mark.filterwarnings('ignore:ERFA function "utcut1" yielded 2 of')
@pytest.mark.filterwarnings('ignore:ERFA function "d2dtf" yielded 1 of')
def test_astrometry_icrs_to_app(astrometry_args):
@pytest.mark.parametrize("use_extra", [True, False])
def test_astrometry_icrs_to_app(astrometry_args, use_extra):
"""
Check for consistency beteen astrometry libraries when converting ICRS -> TOPP

Expand Down Expand Up @@ -1881,18 +1882,24 @@ def test_astrometry_icrs_to_app(astrometry_args):

coord_results[3] = (precalc_ra, precalc_dec)

kwargs = {}
extra_args = ["pm_ra", "pm_dec", "vrad", "dist"]
if use_extra:
for key in extra_args:
kwargs[key] = astrometry_args[key]
else:
# don't compare to precalc if not using extra arguments
coord_results = coord_results[:-1]

for idx, name in enumerate(astrometry_list):
coord_results[idx] = uvutils.transform_icrs_to_app(
astrometry_args["time_array"],
astrometry_args["icrs_ra"],
astrometry_args["icrs_dec"],
astrometry_args["telescope_loc"],
epoch=astrometry_args["epoch"],
pm_ra=astrometry_args["pm_ra"],
pm_dec=astrometry_args["pm_dec"],
vrad=astrometry_args["vrad"],
dist=astrometry_args["dist"],
astrometry_library=name,
**kwargs,
)

for idx in range(len(coord_results) - 1):
Expand Down
66 changes: 51 additions & 15 deletions pyuvdata/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2768,23 +2768,38 @@ def transform_icrs_to_app(
for idx in range(len(app_ra)):
if multi_coord or (idx == 0):
# Create a catalog entry for the source in question
if pm_ra is None:
pm_ra_use = 0.0
else:
pm_ra_use = pm_ra_coord.to_value("mas/yr") * np.cos(
dec_coord[idx].to_value("rad")
)

if pm_dec is None:
pm_dec_use = 0.0
else:
pm_dec_use = pm_dec_coord.to_value("mas/yr")

if dist is None or np.any(dist == 0.0):
parallax = 0.0
else:
parallax = d_coord[idx].kiloparsec ** -1.0

if vrad is None:
vrad_use = 0.0
else:
vrad_use = v_coord[idx].to_value("km/s")

cat_entry = novas.make_cat_entry(
"dummy_name", # Dummy source name
"GKK", # Catalog ID, fixed for now
156, # Star ID number, fixed for now
ra_coord[idx].to_value("hourangle"),
dec_coord[idx].to_value("deg"),
0.0
if pm_ra is None
else (
pm_ra_coord.to_value("mas/yr")
* np.cos(dec_coord[idx].to_value("rad"))
),
0.0 if pm_dec is None else pm_dec_coord.to_value("mas/yr"),
0.0
if (dist is None or np.any(dist == 0.0))
else (d_coord.kiloparsec**-1.0),
0.0 if (vrad is None) else v_coord.to_value("km/s"),
pm_ra_use,
pm_dec_use,
parallax,
vrad_use,
)

# Update polar wobble parameters for a given timestamp
Expand All @@ -2810,13 +2825,34 @@ def transform_icrs_to_app(
# liberfa wants things in radians
pm_x_array *= np.pi / (3600.0 * 180.0)
pm_y_array *= np.pi / (3600.0 * 180.0)

if pm_ra is None:
pm_ra_use = 0.0
else:
pm_ra_use = pm_ra_coord.to_value("rad/yr")

if pm_dec is None:
pm_dec_use = 0.0
else:
pm_dec_use = pm_dec_coord.to_value("rad/yr")

if dist is None or np.any(dist == 0.0):
parallax = 0.0
else:
parallax = d_coord.pc**-1.0

if vrad is None:
vrad_use = 0
else:
vrad_use = v_coord.to_value("km/s")

[_, _, _, app_dec, app_ra, eqn_org] = erfa.atco13(
ra_coord.to_value("rad"),
dec_coord.to_value("rad"),
0.0 if (pm_ra is None) else pm_ra_coord.to_value("rad/yr"),
0.0 if (pm_dec is None) else pm_dec_coord.to_value("rad/yr"),
0.0 if (dist is None or np.any(dist == 0.0)) else (d_coord.pc**-1.0),
0.0 if (vrad is None) else v_coord.to_value("km/s"),
pm_ra_use,
pm_dec_use,
parallax,
vrad_use,
time_obj_array.utc.jd,
0.0,
time_obj_array.delta_ut1_utc,
Expand Down
4 changes: 2 additions & 2 deletions pyuvdata/uvbeam/mwa_beam.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def P1sin(nmax, theta):
# Pn(cos x)/sin x = -dPn(cos_th)/dcos_th
Pm_cos_delta_cos = lpmv(orders, n, cos_th - delta_cos)
# backward difference
Pm_sin[1, 0] = -(P[0] - Pm_cos_delta_cos[0]) / delta_cos
Pm_sin[1, 0] = -(P[0, 0] - Pm_cos_delta_cos[0, 0]) / delta_cos

elif cos_th == -1:
# The first approach, to just use the analytical derivative
Expand All @@ -103,7 +103,7 @@ def P1sin(nmax, theta):
# Pn(cos x)/sin x = -dPn(cos_th)/dcos_th
Pm_cos_delta_cos = lpmv(orders, n, cos_th - delta_cos)
# forward difference
Pm_sin[1, 0] = -(Pm_cos_delta_cos[0] - P[0]) / delta_cos
Pm_sin[1, 0] = -(Pm_cos_delta_cos[0, 0] - P[0, 0]) / delta_cos
else:
Pm_sin = P / sin_th

Expand Down
25 changes: 17 additions & 8 deletions pyuvdata/uvbeam/uvbeam.py
Original file line number Diff line number Diff line change
Expand Up @@ -950,7 +950,7 @@ def _fix_auto_power(self):
self.data_array[:, :, pol_screen]
)

def _check_auto_power(self, fix_auto_power=False):
def _check_auto_power(self, fix_auto_power=False, warn_tols=(0, 0)):
"""
Check for complex auto polarization power beams.

Expand All @@ -959,6 +959,11 @@ def _check_auto_power(self, fix_auto_power=False):
fix_auto_power : bool
If auto polarization power beams with imaginary values are found,
fix those values so that they are real-only in data_array.
warn_tols : tuple of float
Tolerances (relative, absolute) to use in comparing max imaginary part of
auto polarization power beams to zero (passed to numpy.isclose). If the max
imaginary part is close to zero within the tolerances and fix_auto_power is
True, silently fix them to be zero and do not warn.

"""
if self.beam_type != "power" or self.polarization_array is None:
Expand Down Expand Up @@ -988,11 +993,12 @@ def _check_auto_power(self, fix_auto_power=False):
np.abs(np.imag(np.rollaxis(self.data_array, pol_axis)[pol_screen]))
)
if fix_auto_power:
warnings.warn(
"Fixing auto polarization power beams to be be real-only, "
"after some imaginary values were detected in data_array. "
f"Largest imaginary component was {max_imag}."
)
if not np.isclose(max_imag, 0, rtol=warn_tols[0], atol=warn_tols[1]):
warnings.warn(
"Fixing auto polarization power beams to be be real-only, "
"after some imaginary values were detected in data_array. "
f"Largest imaginary component was {max_imag}."
)
self._fix_auto_power()
else:
raise ValueError(
Expand Down Expand Up @@ -1277,8 +1283,11 @@ def efield_to_power(

if calc_cross_pols:
# Sometimes the auto pol beams can have a small complex part due to
# numerical precision errors. Fix that (with warnings).
beam_object._check_auto_power(fix_auto_power=True)
# numerical precision errors. Fix that (with warnings if the complex part
# is larger than the tolerances).
beam_object._check_auto_power(
fix_auto_power=True, warn_tols=beam_object._data_array.tols
)

history_update_string = " Converted from efield to power using pyuvdata."

Expand Down
2 changes: 1 addition & 1 deletion pyuvdata/uvcal/fhd_cal.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def read_fhd_cal(
self.history += "\n" + self.pyuvdata_version_str

if not read_data:
n_pols = int(obs_data["N_POL"])
n_pols = int(obs_data["N_POL"][0])
# FHD only has the diagonal elements (jxx, jyy), so limit to 2
self.Njones = int(np.min([n_pols, 2]))

Expand Down
6 changes: 3 additions & 3 deletions pyuvdata/uvdata/mir.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,9 +452,9 @@ def _init_from_mir_parser(
assert len(spw_nchan) == 1

# Get the data in the right units and dtype
spw_fsky = float(spw_fsky * 1e9) # GHz -> Hz
spw_fres = float(spw_fres * 1e6) # MHz -> Hz
spw_nchan = int(spw_nchan)
spw_fsky = float(spw_fsky[0] * 1e9) # GHz -> Hz
spw_fres = float(spw_fres[0] * 1e6) # MHz -> Hz
spw_nchan = int(spw_nchan[0])

# We need to do a some extra handling here, because a single correlator
# can produce multiple spectral windows (e.g., LSB/USB). The scheme below
Expand Down
4 changes: 3 additions & 1 deletion pyuvdata/uvdata/mir_meta_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3101,7 +3101,9 @@ def read(self, filepath, nchunks=8):
if (file_size % (rec_size + hdr_dtype.itemsize)) != 0:
# If the file size doesn't go in evenly, then read in just the first
# record and try to figure it out.
nchunks = int(np.fromfile(old_ac_file, dtype=hdr_dtype, count=1)["nChunks"])
nchunks = int(
np.fromfile(old_ac_file, dtype=hdr_dtype, count=1)["nChunks"][0]
)
rec_size = 4 * 16384 * nchunks * 2
assert (
file_size % (rec_size + hdr_dtype.itemsize)
Expand Down
2 changes: 1 addition & 1 deletion pyuvdata/uvdata/tests/test_uvdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -12426,7 +12426,7 @@ def test_flex_pol_uvh5(future_shapes, multispw, sorting, uv_phase_comp, tmp_path
spw_final_order = [1, 4, 5, 0, 3, 2, 6, 7, 8, 9, 10, 11]
spw_order = np.zeros_like(uvd.spw_array)
for idx, spw in enumerate(spw_final_order):
spw_order[idx] = np.nonzero(uvd.spw_array == spw)[0]
spw_order[idx] = np.nonzero(uvd.spw_array == spw)[0][0]
uvd.reorder_freqs(spw_order=spw_order)

uvd.check(check_autos=True)
Expand Down
12 changes: 8 additions & 4 deletions pyuvdata/uvdata/tests/test_uvfits.py
Original file line number Diff line number Diff line change
Expand Up @@ -1675,10 +1675,14 @@ def test_no_spoof(sma_mir, tmp_path, spoof):
sma_mir._set_app_coords_helper()
filename = os.path.join(tmp_path, "spoof.uvfits" if spoof else "no_spoof.uvfits")

with uvtest.check_warnings(
DeprecationWarning if spoof else None,
"UVFITS-required metadata are now set automatically to " if spoof else None,
):
if spoof:
warn_type = DeprecationWarning
warn_msg = "UVFITS-required metadata are now set automatically to "
else:
warn_type = None
warn_msg = ""

with uvtest.check_warnings(warn_type, match=warn_msg):
sma_mir.write_uvfits(filename, spoof_nonessential=spoof)

sma_uvfits = UVData.from_file(filename, use_future_array_shapes=True)
Expand Down
4 changes: 2 additions & 2 deletions pyuvdata/uvdata/uvdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -2777,7 +2777,7 @@ def remove_flex_pol(self, combine_spws=True):
this_spw = np.array(spw_set["spws"])[spw_set["pols"] == pol]
spw_order[this_ind] = np.nonzero(
self.spw_array == this_spw
)[0]
)[0][0]
else:
spw_order = None

Expand Down Expand Up @@ -5220,7 +5220,7 @@ def reorder_freqs(
if self.flex_spw_polarization_array is not None:
spw_sort_inds = np.zeros_like(self.spw_array)
for idx, spw in enumerate(new_spw_array):
spw_sort_inds[idx] = np.nonzero(self.spw_array == spw)[0]
spw_sort_inds[idx] = np.nonzero(self.spw_array == spw)[0][0]
self.flex_spw_polarization_array = self.flex_spw_polarization_array[
spw_sort_inds
]
Expand Down
Loading