Skip to content

Commit

Permalink
Chore: numpy 2.0 compatibility
Browse files Browse the repository at this point in the history
Signed-off-by: Matthias Kümmerer <[email protected]>
  • Loading branch information
matthias-k committed Jun 24, 2024
1 parent 6d629d0 commit e8dee3f
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 22 deletions.
24 changes: 12 additions & 12 deletions pysaliency/datasets/fixations.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,8 +309,8 @@ def to_hdf5(self, target):
""" Write fixations to hdf5 file or hdf5 group
"""

target.attrs['type'] = np.string_('Fixations')
target.attrs['version'] = np.string_('1.2')
target.attrs['type'] = np.bytes_('Fixations')
target.attrs['version'] = np.bytes_('1.2')

variable_length_arrays = []

Expand All @@ -324,8 +324,8 @@ def to_hdf5(self, target):
data = data._data
target.create_dataset(attribute, data=data)

target.attrs['__attributes__'] = np.string_(json.dumps(self.__attributes__))
target.attrs['__variable_length_arrays__'] = np.string_(json.dumps(sorted(variable_length_arrays)))
target.attrs['__attributes__'] = np.bytes_(json.dumps(self.__attributes__))
target.attrs['__variable_length_arrays__'] = np.bytes_(json.dumps(sorted(variable_length_arrays)))

@classmethod
@hdf5_wrapper(mode='r')
Expand Down Expand Up @@ -503,8 +503,8 @@ def to_hdf5(self, target):
""" Write ScanpathFixations to hdf5 file or hdf5 group
"""

target.attrs['type'] = np.string_('ScanpathFixations')
target.attrs['version'] = np.string_('1.0')
target.attrs['type'] = np.bytes_('ScanpathFixations')
target.attrs['version'] = np.bytes_('1.0')

self.scanpaths.to_hdf5(target.create_group('scanpaths'))

Expand Down Expand Up @@ -1000,8 +1000,8 @@ def to_hdf5(self, target):
""" Write fixationtrains to hdf5 file or hdf5 group
"""

target.attrs['type'] = np.string_('FixationTrains')
target.attrs['version'] = np.string_('1.3')
target.attrs['type'] = np.bytes_('FixationTrains')
target.attrs['version'] = np.bytes_('1.3')

variable_length_arrays = []

Expand All @@ -1016,19 +1016,19 @@ def to_hdf5(self, target):
target.create_dataset(attribute, data=data)

saved_attributes = [attribute_name for attribute_name in self.__attributes__ if attribute_name not in self.auto_attributes]
target.attrs['__attributes__'] = np.string_(json.dumps(saved_attributes))
target.attrs['__attributes__'] = np.bytes_(json.dumps(saved_attributes))

target.attrs['scanpath_attribute_mapping'] = np.string_(json.dumps(self.scanpath_attribute_mapping))
target.attrs['scanpath_attribute_mapping'] = np.bytes_(json.dumps(self.scanpath_attribute_mapping))

scanpath_attributes_group = target.create_group('scanpath_attributes')
for attribute_name, attribute_value in self.scanpath_attributes.items():
scanpath_attributes_group.create_dataset(attribute_name, data=attribute_value)
scanpath_attributes_group.attrs['__attributes__'] = np.string_(json.dumps(sorted(self.scanpath_attributes.keys())))
scanpath_attributes_group.attrs['__attributes__'] = np.bytes_(json.dumps(sorted(self.scanpath_attributes.keys())))

scanpath_fixation_attributes_group = target.create_group('scanpath_fixation_attributes')
for attribute_name, attribute_value in self.scanpath_fixation_attributes.items():
scanpath_fixation_attributes_group.create_dataset(attribute_name, data=attribute_value._data)
scanpath_fixation_attributes_group.attrs['__attributes__'] = np.string_(json.dumps(sorted(self.scanpath_fixation_attributes.keys())))
scanpath_fixation_attributes_group.attrs['__attributes__'] = np.bytes_(json.dumps(sorted(self.scanpath_fixation_attributes.keys())))


@classmethod
Expand Down
10 changes: 5 additions & 5 deletions pysaliency/datasets/scanpaths.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,8 @@ def subject(self) -> VariableLengthArray:
def to_hdf5(self, target):
""" Write scanpaths to hdf5 file or hdf5 group
"""
target.attrs['type'] = np.string_('Scanpaths')
target.attrs['version'] = np.string_('1.0')
target.attrs['type'] = np.bytes_('Scanpaths')
target.attrs['version'] = np.bytes_('1.0')

target.create_dataset('xs', data=self.xs._data)
target.create_dataset('ys', data=self.ys._data)
Expand All @@ -129,14 +129,14 @@ def to_hdf5(self, target):
scanpath_attributes_group = target.create_group('scanpath_attributes')
for attribute_name, attribute_value in self.scanpath_attributes.items():
create_hdf5_dataset(scanpath_attributes_group, attribute_name, attribute_value)
scanpath_attributes_group.attrs['__attributes__'] = np.string_(json.dumps(sorted(self.scanpath_attributes.keys())))
scanpath_attributes_group.attrs['__attributes__'] = np.bytes_(json.dumps(sorted(self.scanpath_attributes.keys())))

fixation_attributes_group = target.create_group('fixation_attributes')
for attribute_name, attribute_value in self.fixation_attributes.items():
fixation_attributes_group.create_dataset(attribute_name, data=attribute_value._data)
fixation_attributes_group.attrs['__attributes__'] = np.string_(json.dumps(sorted(self.fixation_attributes.keys())))
fixation_attributes_group.attrs['__attributes__'] = np.bytes_(json.dumps(sorted(self.fixation_attributes.keys())))

target.attrs['attribute_mapping'] = np.string_(json.dumps(self.attribute_mapping))
target.attrs['attribute_mapping'] = np.bytes_(json.dumps(self.attribute_mapping))


@classmethod
Expand Down
10 changes: 5 additions & 5 deletions pysaliency/datasets/stimuli.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,8 @@ def to_hdf5(self, target, verbose=False, compression='gzip', compression_opts=9)
""" Write stimuli to hdf5 file or hdf5 group
"""

target.attrs['type'] = np.string_('Stimuli')
target.attrs['version'] = np.string_('1.1')
target.attrs['type'] = np.bytes_('Stimuli')
target.attrs['version'] = np.bytes_('1.1')

for n, stimulus in enumerate(tqdm(self.stimuli, disable=not verbose)):
target.create_dataset(str(n), data=stimulus, compression=compression, compression_opts=compression_opts)
Expand Down Expand Up @@ -209,7 +209,7 @@ def read_hdf5(cls, source):
def _attributes_to_hdf5(self, target):
for attribute_name, attribute_value in self.attributes.items():
create_hdf5_dataset(target, attribute_name, attribute_value)
target.attrs['__attributes__'] = np.string_(json.dumps(self.__attributes__))
target.attrs['__attributes__'] = np.bytes_(json.dumps(self.__attributes__))

@classmethod
def _get_attributes_from_hdf5(cls, source, data_version, data_version_for_attribute_list):
Expand Down Expand Up @@ -352,8 +352,8 @@ def to_hdf5(self, target):
""" Write FileStimuli to hdf5 file or hdf5 group
"""

target.attrs['type'] = np.string_('FileStimuli')
target.attrs['version'] = np.string_('2.1')
target.attrs['type'] = np.bytes_('FileStimuli')
target.attrs['version'] = np.bytes_('2.1')

import h5py
# make sure everything is unicode
Expand Down

0 comments on commit e8dee3f

Please sign in to comment.