Skip to content

Commit

Permalink
Merge branch 'dev' into fix_string_datetime_validator
Browse files Browse the repository at this point in the history
  • Loading branch information
rly authored Jan 12, 2024
2 parents abf78b8 + 4bdf39d commit c5a8914
Show file tree
Hide file tree
Showing 10 changed files with 25 additions and 21 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
- Add Data.set_data_io(), which allows for setting a `DataIO` to a data object after-the-fact. @bendichter and @CodyCBakerPhD [#1013](https://github.com/hdmf-dev/hdmf/pull/1013)
- Added `add_ref_termset`, updated helper methods for `HERD`, revised `add_ref` to support validations prior to populating the tables
and added `add_ref_container`. @mavaylon1 [#968](https://github.com/hdmf-dev/hdmf/pull/968)
- Use `stacklevel` in most warnings. @rly [#1027](https://github.com/hdmf-dev/hdmf/pull/1027)

### Minor Improvements
- Updated `__gather_columns` to ignore the order of bases when generating columns from the super class. @mavaylon1 [#991](https://github.com/hdmf-dev/hdmf/pull/991)
Expand Down
9 changes: 5 additions & 4 deletions src/hdmf/backends/hdf5/h5_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ def __init__(self, **kwargs):
# Check for possible collision with other parameters
if not isinstance(getargs('data', kwargs), Dataset) and self.__link_data:
self.__link_data = False
warnings.warn('link_data parameter in H5DataIO will be ignored')
warnings.warn('link_data parameter in H5DataIO will be ignored', stacklevel=2)
# Call the super constructor and consume the data parameter
super().__init__(**kwargs)
# Construct the dict with the io args, ignoring all options that were set to None
Expand All @@ -523,7 +523,7 @@ def __init__(self, **kwargs):
self.__iosettings.pop('compression', None)
if 'compression_opts' in self.__iosettings:
warnings.warn('Compression disabled by compression=False setting. ' +
'compression_opts parameter will, therefore, be ignored.')
'compression_opts parameter will, therefore, be ignored.', stacklevel=2)
self.__iosettings.pop('compression_opts', None)
# Validate the compression options used
self._check_compression_options()
Expand All @@ -537,7 +537,8 @@ def __init__(self, **kwargs):
# Check possible parameter collisions
if isinstance(self.data, Dataset):
for k in self.__iosettings.keys():
warnings.warn("%s in H5DataIO will be ignored with H5DataIO.data being an HDF5 dataset" % k)
warnings.warn("%s in H5DataIO will be ignored with H5DataIO.data being an HDF5 dataset" % k,
stacklevel=2)

self.__dataset = None

Expand Down Expand Up @@ -594,7 +595,7 @@ def _check_compression_options(self):
if self.__iosettings['compression'] not in ['gzip', h5py_filters.h5z.FILTER_DEFLATE]:
warnings.warn(str(self.__iosettings['compression']) + " compression may not be available "
"on all installations of HDF5. Use of gzip is recommended to ensure portability of "
"the generated HDF5 files.")
"the generated HDF5 files.", stacklevel=3)

@staticmethod
def filter_available(filter, allow_plugin_filters):
Expand Down
4 changes: 3 additions & 1 deletion src/hdmf/backends/hdf5/h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,9 @@ def copy_file(self, **kwargs):
"""

warnings.warn("The copy_file class method is no longer supported and may be removed in a future version of "
"HDMF. Please use the export method or h5py.File.copy method instead.", DeprecationWarning)
"HDMF. Please use the export method or h5py.File.copy method instead.",
category=DeprecationWarning,
stacklevel=2)

source_filename, dest_filename, expand_external, expand_refs, expand_soft = getargs('source_filename',
'dest_filename',
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/build/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

import warnings
warnings.warn('Classes in map.py should be imported from hdmf.build. Importing from hdmf.build.map will be removed '
'in HDMF 3.0.', DeprecationWarning)
'in HDMF 3.0.', DeprecationWarning, stacklevel=2)
2 changes: 1 addition & 1 deletion src/hdmf/common/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,7 +628,7 @@ def add_ref(self, **kwargs):
if entity_uri is not None:
entity_uri = entity.entity_uri
msg = 'This entity already exists. Ignoring new entity uri'
warn(msg)
warn(msg, stacklevel=2)

#################
# Validate Object
Expand Down
12 changes: 6 additions & 6 deletions src/hdmf/common/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ def __set_table_attr(self, col):
msg = ("An attribute '%s' already exists on %s '%s' so this column cannot be accessed as an attribute, "
"e.g., table.%s; it can only be accessed using other methods, e.g., table['%s']."
% (col.name, self.__class__.__name__, self.name, col.name, col.name))
warn(msg)
warn(msg, stacklevel=2)
else:
setattr(self, col.name, col)

Expand Down Expand Up @@ -764,7 +764,7 @@ def add_column(self, **kwargs): # noqa: C901

if isinstance(index, VectorIndex):
warn("Passing a VectorIndex in for index may lead to unexpected behavior. This functionality will be "
"deprecated in a future version of HDMF.", FutureWarning)
"deprecated in a future version of HDMF.", category=FutureWarning, stacklevel=2)

if name in self.__colids: # column has already been added
msg = "column '%s' already exists in %s '%s'" % (name, self.__class__.__name__, self.name)
Expand All @@ -781,7 +781,7 @@ def add_column(self, **kwargs): # noqa: C901
"Please ensure the new column complies with the spec. "
"This will raise an error in a future version of HDMF."
% (name, self.__class__.__name__, spec_table))
warn(msg)
warn(msg, stacklevel=2)

index_bool = index or not isinstance(index, bool)
spec_index = self.__uninit_cols[name].get('index', False)
Expand All @@ -791,7 +791,7 @@ def add_column(self, **kwargs): # noqa: C901
"Please ensure the new column complies with the spec. "
"This will raise an error in a future version of HDMF."
% (name, self.__class__.__name__, spec_index))
warn(msg)
warn(msg, stacklevel=2)

spec_col_cls = self.__uninit_cols[name].get('class', VectorData)
if col_cls != spec_col_cls:
Expand All @@ -800,7 +800,7 @@ def add_column(self, **kwargs): # noqa: C901
"Please ensure the new column complies with the spec. "
"This will raise an error in a future version of HDMF."
% (name, self.__class__.__name__, spec_col_cls))
warn(msg)
warn(msg, stacklevel=2)

ckwargs = dict(kwargs)

Expand Down Expand Up @@ -1517,7 +1517,7 @@ def _validate_on_set_parent(self):
if set(table_ancestor_ids).isdisjoint(self_ancestor_ids):
msg = (f"The linked table for DynamicTableRegion '{self.name}' does not share an ancestor with the "
"DynamicTableRegion.")
warn(msg)
warn(msg, stacklevel=2)
return super()._validate_on_set_parent()


Expand Down
6 changes: 3 additions & 3 deletions src/hdmf/spec/namespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,13 @@ def __init__(self, **kwargs):
self['full_name'] = full_name
if version == str(SpecNamespace.UNVERSIONED):
# the unversioned version may be written to file as a string and read from file as a string
warn("Loaded namespace '%s' is unversioned. Please notify the extension author." % name)
warn("Loaded namespace '%s' is unversioned. Please notify the extension author." % name, stacklevel=2)
version = SpecNamespace.UNVERSIONED
if version is None:
# version is required on write -- see YAMLSpecWriter.write_namespace -- but can be None on read in order to
# be able to read older files with extensions that are missing the version key.
warn(("Loaded namespace '%s' is missing the required key 'version'. Version will be set to '%s'. "
"Please notify the extension author.") % (name, SpecNamespace.UNVERSIONED))
"Please notify the extension author.") % (name, SpecNamespace.UNVERSIONED), stacklevel=2)
version = SpecNamespace.UNVERSIONED
self['version'] = version
if date is not None:
Expand Down Expand Up @@ -529,7 +529,7 @@ def load_namespaces(self, **kwargs):
if ns['version'] != self.__namespaces.get(ns['name'])['version']:
# warn if the cached namespace differs from the already loaded namespace
warn("Ignoring cached namespace '%s' version %s because version %s is already loaded."
% (ns['name'], ns['version'], self.__namespaces.get(ns['name'])['version']))
% (ns['name'], ns['version'], self.__namespaces.get(ns['name'])['version']), stacklevel=2)
else:
to_load.append(ns)
# now load specs into namespace
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/spec/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ def __init__(self, **kwargs):
default_name = getargs('default_name', kwargs)
if default_name:
if name is not None:
warn("found 'default_name' with 'name' - ignoring 'default_name'")
warn("found 'default_name' with 'name' - ignoring 'default_name'", stacklevel=2)
else:
self['default_name'] = default_name
self.__attributes = dict()
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/spec/write.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def export_spec(ns_builder, new_data_types, output_dir):
"""

if len(new_data_types) == 0:
warnings.warn('No data types specified. Exiting.')
warnings.warn('No data types specified. Exiting.', stacklevel=2)
return

ns_path = ns_builder.name + '.namespace.yaml'
Expand Down
6 changes: 3 additions & 3 deletions src/hdmf/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ def fmt_docval_args(func, kwargs):
"removes all arguments not accepted by the function's docval, so if you are passing kwargs that "
"includes extra arguments and the function's docval does not allow extra arguments (allow_extra=True "
"is set), then you will need to pop the extra arguments out of kwargs before calling the function.",
PendingDeprecationWarning)
PendingDeprecationWarning, stacklevel=2)
func_docval = getattr(func, docval_attr_name, None)
ret_args = list()
ret_kwargs = dict()
Expand Down Expand Up @@ -488,7 +488,7 @@ def call_docval_func(func, kwargs):
"removes all arguments not accepted by the function's docval, so if you are passing kwargs that "
"includes extra arguments and the function's docval does not allow extra arguments (allow_extra=True "
"is set), then you will need to pop the extra arguments out of kwargs before calling the function.",
PendingDeprecationWarning)
PendingDeprecationWarning, stacklevel=2)
with warnings.catch_warnings(record=True):
# catch and ignore only PendingDeprecationWarnings from fmt_docval_args so that two
# PendingDeprecationWarnings saying the same thing are not raised
Expand Down Expand Up @@ -645,7 +645,7 @@ def _check_args(args, kwargs):
parse_warnings = parsed.get('future_warnings')
if parse_warnings:
msg = '%s: %s' % (func.__qualname__, ', '.join(parse_warnings))
warnings.warn(msg, FutureWarning)
warnings.warn(msg, category=FutureWarning, stacklevel=3)

for error_type, ExceptionType in (('type_errors', TypeError),
('value_errors', ValueError),
Expand Down

0 comments on commit c5a8914

Please sign in to comment.