Skip to content

Commit

Permalink
Fix bandit issues
Browse files Browse the repository at this point in the history
  • Loading branch information
djhoese committed Nov 17, 2023
1 parent d2c252e commit f07a571
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .bandit
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[bandit]
skips: B506
exclude: pyresample/test,pyresample/version.py
exclude: pyresample/test,pyresample/version.py,versioneer.py
4 changes: 2 additions & 2 deletions pyresample/ewa/dask_ewa.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,8 +186,8 @@ class DaskEWAResampler(BaseResampler):
def __init__(self, source_geo_def, target_geo_def):
"""Initialize in-memory cache."""
super(DaskEWAResampler, self).__init__(source_geo_def, target_geo_def)
assert isinstance(source_geo_def, SwathDefinition), \
"EWA resampling can only operate on SwathDefinitions"
if not isinstance(source_geo_def, SwathDefinition):
raise ValueError("EWA resampling can only operate on SwathDefinitions")
self.cache = {}

def _new_chunks(self, in_arr, rows_per_scan):
Expand Down
8 changes: 4 additions & 4 deletions pyresample/future/resamplers/nearest.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,8 @@ def __init__(self,
raise ImportError("Missing 'xarray' and 'dask' dependencies")
super().__init__(source_geo_def, target_geo_def, cache=cache)
self._internal_cache = {}
assert (self.target_geo_def.ndim == 2), \
"Target area definition must be 2 dimensions"
if self.target_geo_def.ndim != 2:
raise ValueError("Target area definition must be 2 dimensions")

@property
def version(self) -> str:
Expand Down Expand Up @@ -218,8 +218,8 @@ def _get_neighbor_info(self, mask, neighbors, radius_of_influence, epsilon):
valid_output_idx = ((target_lons >= -180) & (target_lons <= 180) & (target_lats <= 90) & (target_lats >= -90))

if mask is not None:
assert (mask.shape == self.source_geo_def.shape), \
"'mask' must be the same shape as the source geo definition"
if mask.shape != self.source_geo_def.shape:
raise ValueError("'mask' must be the same shape as the source geo definition")
mask = mask.data
index_arr = self._query_resample_kdtree(
resample_kdtree, target_lons, target_lats, valid_input_idx,
Expand Down
2 changes: 1 addition & 1 deletion pyresample/future/resamplers/resampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
def hash_dict(the_dict: dict, existing_hash: Optional[HashType] = None) -> HashType:
"""Calculate a hash for a dictionary and optionally update an existing hash."""
if existing_hash is None:
existing_hash = hashlib.sha1()
existing_hash = hashlib.sha1() # nosec: B324
existing_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8'))
return existing_hash

Expand Down
4 changes: 2 additions & 2 deletions pyresample/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def __hash__(self):
def update_hash(self, existing_hash: Optional[HashType] = None) -> HashType:
"""Update the hash."""
if existing_hash is None:
existing_hash = hashlib.sha1()
existing_hash = hashlib.sha1() # nosec: B324
existing_hash.update(get_array_hashable(self.lons))
existing_hash.update(get_array_hashable(self.lats))
try:
Expand Down Expand Up @@ -2058,7 +2058,7 @@ def __ne__(self, other):
def update_hash(self, existing_hash: Optional[HashType] = None) -> HashType:
"""Update a hash, or return a new one if needed."""
if existing_hash is None:
existing_hash = hashlib.sha1()
existing_hash = hashlib.sha1() # nosec: B324
existing_hash.update(self.crs_wkt.encode('utf-8'))
existing_hash.update(np.array(self.shape))
existing_hash.update(np.array(self.area_extent))
Expand Down
16 changes: 8 additions & 8 deletions pyresample/kd_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -905,8 +905,8 @@ def __init__(self,
if radius_of_influence is None:
radius_of_influence = self._compute_radius_of_influence()
self.radius_of_influence = radius_of_influence
assert (self.target_geo_def.ndim == 2), \
"Target area definition must be 2 dimensions"
if self.target_geo_def.ndim != 2:
raise ValueError("Target area definition must be 2 dimensions")

def _compute_radius_of_influence(self):
"""Estimate a good default radius_of_influence."""
Expand Down Expand Up @@ -989,8 +989,8 @@ def get_neighbour_info(self, mask=None):
valid_output_idx = ((target_lons >= -180) & (target_lons <= 180) & (target_lats <= 90) & (target_lats >= -90))

if mask is not None:
assert (mask.shape == self.source_geo_def.shape), \
"'mask' must be the same shape as the source geo definition"
if mask.shape != self.source_geo_def.shape:
raise ValueError("'mask' must be the same shape as the source geo definition")
mask = mask.data
index_arr, distance_arr = self.query_resample_kdtree(
resample_kdtree, target_lons, target_lats, valid_output_idx, mask)
Expand Down Expand Up @@ -1056,13 +1056,13 @@ def get_sample_from_neighbour_info(self, data, fill_value=np.nan):
dst_geo_dims = ('y', 'x')
# verify that source dims are the same between geo and data
data_geo_dims = tuple(d for d in data.dims if d in src_geo_dims)
assert (data_geo_dims == src_geo_dims), \
"Data dimensions do not match source area dimensions"
if data_geo_dims != src_geo_dims:
raise ValueError("Data dimensions do not match source area dimensions")
# verify that the dims are next to each other
first_dim_idx = data.dims.index(src_geo_dims[0])
num_dims = len(src_geo_dims)
assert (data.dims[first_dim_idx:first_dim_idx + num_dims] == data_geo_dims), \
"Data's geolocation dimensions are not consecutive."
if data.dims[first_dim_idx:first_dim_idx + num_dims] != data_geo_dims:
raise ValueError("Data's geolocation dimensions are not consecutive.")

# FIXME: Can't include coordinates whose dimensions depend on the geo
# dims either
Expand Down

0 comments on commit f07a571

Please sign in to comment.