Skip to content

Commit

Permalink
Use data from variables if available for reading
Browse files Browse the repository at this point in the history
  • Loading branch information
ka-sarthak committed Dec 17, 2024
1 parent 67ee382 commit 645348a
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 19 deletions.
25 changes: 16 additions & 9 deletions src/nomad_measurements/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,8 +226,8 @@ def add_dataset(
):
"""
Add a dataset to the HDF5 file. The dataset is written lazily (default) when
either `read_dataset` or `write_file` method is called. The `path` is validated
against the `valid_dataset_paths` if provided before adding the data.
`write_file` method is called. The `path` is validated against the
`valid_dataset_paths` if provided before adding the data.
`params` should be a dictionary containing `data`. Optionally,
it can also contain `archive_path` and `internal_reference`:
Expand Down Expand Up @@ -278,8 +278,7 @@ def add_attribute(
):
"""
Add an attribute to the dataset or group at the given path. The attribute is
written lazily (default) when either `read_dataset` or `write_file` method is
called.
written lazily (default) when `write_file` method is called.
Args:
path (str): The dataset or group path in the HDF5 file.
Expand All @@ -297,19 +296,27 @@ def add_attribute(
def read_dataset(self, path: str):
"""
Returns the dataset at the given path. If the quantity has `units` as an
attribute, tries to returns a `pint.Quantity`. Before returning the dataset, the
method also writes the file with any pending datasets.
attribute, tries to returns a `pint.Quantity`.
If the dataset available in the `self._hdf5_datasets`, it is returned directly.
Args:
path (str): The dataset path in the HDF5 file.
"""
if self._hdf5_datasets or self._hdf5_attributes:
self.write_file()
if path is None:
return
file_path, dataset_path = path.split('#')
file_name = file_path.rsplit('/raw/', 1)[1]

# find path in the instance variables
value = None
if dataset_path in self._hdf5_datasets:
value = self._hdf5_datasets[dataset_path].data
if dataset_path in self._hdf5_attributes:
units = self._hdf5_attributes[dataset_path].get('units', None)
if units:
value *= ureg(units)
return value

file_name = file_path.rsplit('/raw/', 1)[1]
with h5py.File(self.archive.m_context.raw_file(file_name, 'rb')) as h5:
if dataset_path not in h5:
self.logger.warning(f'Dataset "{dataset_path}" not found.')
Expand Down
21 changes: 11 additions & 10 deletions src/nomad_measurements/xrd/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,14 @@ def normalize(self, archive, logger):
),
validate_path=False,
)

hdf5_handler.add_attribute(
path=f'{prefix}/plot_intensity',
params=dict(
axes='two_theta',
signal='intensity',
NX_class='NXdata',
),
)
for var_axis in ['omega', 'phi', 'chi']:
if self.get(var_axis) is not None:
hdf5_handler.add_dataset(
Expand All @@ -338,16 +345,9 @@ def normalize(self, archive, logger):
NX_class='NXdata',
),
)
return
break

hdf5_handler.add_attribute(
path=f'{prefix}/plot_intensity',
params=dict(
axes='two_theta',
signal='intensity',
NX_class='NXdata',
),
)
hdf5_handler.write_file()


class XRDResultPlotIntensityScatteringVector(ArchiveSection):
Expand Down Expand Up @@ -465,6 +465,7 @@ def normalize(self, archive, logger):
NX_class='NXdata',
),
)
hdf5_handler.write_file()


class XRDResult(MeasurementResult):
Expand Down

0 comments on commit 645348a

Please sign in to comment.