Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/hdfmap/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@
'set_all_logging_level', 'version_info', 'module_info'
]

__version__ = "1.1.0"
__date__ = "2025/10/27"
__version__ = "1.1.1"
__date__ = "2025/11/17"
__author__ = "Dan Porter"


Expand Down
12 changes: 5 additions & 7 deletions src/hdfmap/eval_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ def select_ids(startswith=''):
if symbol.startswith(startswith) and hdf_namespace.get(name := symbol[len(startswith):], '') in hdf_file
)

extra_data = extra_hdf_data(hdf_file)
namespace = {symbol: dataset2data(hdf_file[hdf_namespace[name]]) for symbol, name in select_ids()}
strings = {symbol: dataset2str(hdf_file[hdf_namespace[name]], units=True) for symbol, name in select_ids('s_')}
datasets = {symbol: hdf_file[hdf_namespace[name]] for symbol, name in select_ids('d_')}
Expand All @@ -266,8 +267,8 @@ def select_ids(startswith=''):
if name not in data_namespace and name not in GLOBALS_NAMELIST
and hdf_namespace.get(name, '') not in hdf_file
}
# combine with precendence
data = {**defaults, **hdf_paths, **hdf_names, **datasets, **strings, **namespace}
# combine with precedence
data = {**defaults, **hdf_paths, **hdf_names, **datasets, **strings, **namespace, **extra_data}
# add or overwrite data in data_namespace
data_namespace.update(data)
return data
Expand Down Expand Up @@ -304,16 +305,13 @@ def prepare_expression(hdf_file: h5py.File, expression: str, hdf_namespace: dict
"""
if data_namespace is None:
data_namespace = {}
# get extra data
extra_data = extra_hdf_data(hdf_file)
# find name@attribute in expression
attributes = {
f"attr__{name}_{attr}": dataset_attribute(hdf_file[path], attr)
for name, attr in re_dataset_attributes.findall(expression)
for name, attr in re_dataset_attributes.findall(expression) # name@attr
if (path := hdf_namespace.get(name, '')) in hdf_file
}
extra_data.update(attributes)
data_namespace.update(extra_data) # update in the parent function
data_namespace.update(attributes) # adds data in the parent function
# replace name@attribute in expression
expression = re_dataset_attributes.sub(r'attr__\g<1>_\g<2>', expression)
# find values with defaults '..?(..)'
Expand Down
2 changes: 1 addition & 1 deletion src/hdfmap/hdfmap_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def add_roi(self, name: str, cen_i: int | str, cen_j: int | str,
*name*_mean -> returns the mean of each image in the ROI array
*name*_bkg -> returns the background ROI array (area around ROI)
*name*_rmbkg -> returns the total with background subtracted
*name*_box -> returns the pixel positions of the ROI
*name*_box -> returns the pixel positions of the ROI corners
*name*_bkg_box -> returns the pixel positions of the background ROI

:param name: string name of the ROI
Expand Down
16 changes: 14 additions & 2 deletions src/hdfmap/nexus.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
NX_DETECTOR = 'NXdetector'
NX_DETECTOR_DATA = 'data'
NX_IMAGE_DATA = 'image_data'
NX_IMAGE_NUMBER = 'path'
NX_UNITS = 'units'
logger = create_logger(__name__)

Expand Down Expand Up @@ -189,6 +190,7 @@ class NexusMap(HdfMap):
nxmap.populate(nxs, default_entry_only=True) # populates only from the default entry

# Special behaviour
nxmap.image_data is preferentially populated by NXdetector groups
nxmap['axes'] -> return path of default axes dataset
nxmap['signal'] -> return path of default signal dataset
nxmap['image_data'] -> return path of first area detector data object
Expand Down Expand Up @@ -340,6 +342,7 @@ def generate_image_data_from_nxdetector(self):
# detector data is stored in NXdata in dataset 'data'
data_path = build_hdf_path(group_path, NX_DETECTOR_DATA)
image_data_path = build_hdf_path(group_path, NX_IMAGE_DATA)
image_data_numbers = build_hdf_path(group_path, NX_IMAGE_NUMBER)
logger.debug(f"Looking for image_data at: '{data_path}' or '{image_data_path}'")
if data_path in self.datasets and is_image(self.datasets[data_path].shape, image_ndim):
logger.info(f"Adding image_data ['{detector_name}'] = '{data_path}'")
Expand All @@ -348,13 +351,22 @@ def generate_image_data_from_nxdetector(self):
# also save image_data if available
if image_data_path in self.datasets:
detector_name = f"{detector_name}_image_list"
logger.info(f"Adding image_data ['{detector_name}'] = '{image_data_path}'")
logger.info(f"Adding image_data str ['{detector_name}'] = '{image_data_path}'")
self.image_data[detector_name] = image_data_path
self.arrays[detector_name] = image_data_path
elif image_data_numbers in self.datasets:
detector_name = f"{detector_name}_image_list"
logger.info(f"Adding image_data 1D ['{detector_name}'] = '{image_data_numbers}'")
self.image_data[detector_name] = image_data_numbers
self.arrays[detector_name] = image_data_numbers
elif image_data_path in self.datasets:
logger.info(f"Adding image_data ['{detector_name}'] = '{image_data_path}'")
logger.info(f"Adding image_data str ['{detector_name}'] = '{image_data_path}'")
self.image_data[detector_name] = image_data_path
self.arrays[detector_name] = image_data_path
elif image_data_numbers in self.datasets:
logger.info(f"Adding image_data 1D ['{detector_name}'] = '{image_data_numbers}'")
self.image_data[detector_name] = image_data_numbers
self.arrays[detector_name] = image_data_numbers
else:
# Use first dataset with > 2 dimensions
image_dataset = next((
Expand Down
1 change: 1 addition & 0 deletions tests/create_test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
(DIR + 'i16/982681.nxs', 'i16 pil2m single point scan'),
(DIR + 'i16/928878.nxs', 'i16 merlin 2d delta gam calibration'),
(DIR + 'i16/1109527.nxs', 'i16 pilatus eta scan, new nexus format'),
(DIR + 'i16/1113658.nxs', 'i16 bpm scan, new nexus format'),
(DIR + 'i16/processed/1090391_msmapper.nxs', 'msmapper volume'),
(DIR + 'i10/i10-608314.nxs', 'i10 pimte scan'),
(DIR + 'i10/i10-618365.nxs', 'i10 scan'),
Expand Down
2 changes: 1 addition & 1 deletion tests/data/test_files.json

Large diffs are not rendered by default.

29 changes: 29 additions & 0 deletions tests/test_edge_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def test_edge_cases():
assert path.isfile(chk['filename']) is True, f"{chk['filename']} doesn't exist"
mymap = hdfmap.create_nexus_map(chk['filename'])
assert isinstance(mymap, hdfmap.NexusMap), f"{chk['filename']} is not NexusMap"
assert mymap('filepath') == chk['filename']
assert len(mymap.combined) == chk['len_combined'], "{chk['filename']} has wrong size of combined"
assert len(mymap.scannables) == chk['len_scannables'], f"{chk['filename']} has wrong size of scannables"
assert mymap.scannables_length() == chk['scannables_length'], f"{chk['filename']} has wrong scannables_length"
Expand Down Expand Up @@ -53,6 +54,34 @@ def test_new_i16_file():
assert fname == '1040323.nxs'


@only_dls_file_system
def test_newer_i16_file():
filename = '/dls/science/groups/das/ExampleData/hdfmap_tests/i16/1109527.nxs'
assert path.isfile(filename) is True, f"{filename} doesn't exist"
mymap = hdfmap.create_nexus_map(filename)
with hdfmap.hdf_loader.load_hdf(filename) as hdf:
axes, signal, IMAGE, fname, fpath = mymap.eval(hdf, 'axes, signal, _IMAGE, filename, filepath')
assert axes.shape == (61, ), 'expression "axes" has wrong shape'
assert signal.shape == (61,), 'expression "axes" has wrong shape'
assert signal.max() == 692919
assert isinstance(fname, str), "expression 'filename' has wrong type"
assert filename.endswith(fname)
assert isinstance(fpath, str), "expression 'filepath' has wrong type"
assert fpath == filename


@only_dls_file_system
def test_i16_bpm_file():
filename = '/dls/science/groups/das/ExampleData/hdfmap_tests/i16/1113658.nxs'
assert path.isfile(filename) is True, f"{filename} doesn't exist"
mymap = hdfmap.create_nexus_map(filename)
assert mymap.get_image_path() == '/entry/instrument/bpm/path'
with hdfmap.hdf_loader.load_hdf(filename) as hdf:
image = mymap.get_image(hdf)
assert image.ndim == 0, 'bpm image has wrong shape'
assert int(image) == 11, 'bpm image has wrong value'


@only_dls_file_system
def test_msmapper_file():
filename = '/dls/science/groups/das/ExampleData/hdfmap_tests/i16/processed/1098101_msmapper.nxs'
Expand Down