Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 48 additions & 5 deletions openmc/deplete/abc.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,15 @@ class Integrator(ABC):
`source_rates` should be the same as the initial run.

.. versionadded:: 0.15.1
hdf5_dtype : str, optional
dtype for number and reaction rate datasets, float32 or float64.

.. versionadded:: 0.15.4
hdf5_compression : str, optional
Compression for number and reaction rate datasets.
Accepted values are 'gzip' and 'lzf'. Ignored with parallel HDF5.

.. versionadded:: 0.15.4

Attributes
----------
Expand Down Expand Up @@ -632,7 +641,18 @@ def __init__(
timestep_units: str = 's',
solver: str = "cram48",
continue_timesteps: bool = False,
hdf5_dtype: str = 'float64',
hdf5_compression: str = None,
):
if hdf5_dtype not in ('float32', 'float64'):
raise ValueError(
f"hdf5_dtype must be 'float32' or 'float64', got '{hdf5_dtype}'")
if hdf5_compression is not None and hdf5_compression not in ('gzip', 'lzf'):
raise ValueError(
f"hdf5_compression must be None, 'gzip', or 'lzf', "
f"got '{hdf5_compression}'")
self.hdf5_dtype = hdf5_dtype
self.hdf5_compression = hdf5_compression
if continue_timesteps and operator.prev_res is None:
raise ValueError("Continuation run requires passing prev_results.")
self.operator = operator
Expand Down Expand Up @@ -895,7 +915,9 @@ def integrate(
self._i_res + i,
proc_time,
write_rates=write_rates,
path=path
path=path,
hdf5_dtype=self.hdf5_dtype,
hdf5_compression=self.hdf5_compression,
)

# Update for next step
Expand All @@ -918,7 +940,9 @@ def integrate(
self._i_res + len(self),
proc_time,
write_rates=write_rates,
path=path
path=path,
hdf5_dtype=self.hdf5_dtype,
hdf5_compression=self.hdf5_compression,
)
self.operator.write_bos_data(len(self) + self._i_res)

Expand Down Expand Up @@ -1116,6 +1140,16 @@ class SIIntegrator(Integrator):
`source_rates` should be the same as the initial run.

.. versionadded:: 0.15.1
hdf5_dtype : str, optional
dtype for number and reaction rate datasets, float32 or float64 (default)

.. versionadded:: 0.15.4
hdf5_compression : str, optional
Compression filter for number and reaction rate datasets in
depletion_results.h5. Accepted values are 'gzip' and 'lzf'.
Default is None (no compression). Ignored with parallel HDF5.

.. versionadded:: 0.15.4

Attributes
----------
Expand Down Expand Up @@ -1159,12 +1193,17 @@ def __init__(
n_steps: int = 10,
solver: str = "cram48",
continue_timesteps: bool = False,
hdf5_dtype: str = 'float64',
hdf5_compression: str = None,
):
check_type("n_steps", n_steps, Integral)
check_greater_than("n_steps", n_steps, 0)
super().__init__(
operator, timesteps, power, power_density, source_rates,
timestep_units=timestep_units, solver=solver, continue_timesteps=continue_timesteps)
timestep_units=timestep_units, solver=solver,
continue_timesteps=continue_timesteps,
hdf5_dtype=hdf5_dtype,
hdf5_compression=hdf5_compression)
self.n_steps = n_steps

def _get_bos_data_from_operator(self, step_index, step_power, n_bos):
Expand Down Expand Up @@ -1255,7 +1294,9 @@ def integrate(
self._i_res + i,
proc_time,
write_rates=write_rates,
path=path
path=path,
hdf5_dtype=self.hdf5_dtype,
hdf5_compression=self.hdf5_compression,
)

# Update for next step
Expand All @@ -1273,7 +1314,9 @@ def integrate(
self._i_res + len(self),
proc_time,
write_rates=write_rates,
path=path
path=path,
hdf5_dtype=self.hdf5_dtype,
hdf5_compression=self.hdf5_compression,
)
self.operator.write_bos_data(self._i_res + len(self))

Expand Down
2 changes: 1 addition & 1 deletion openmc/deplete/cram.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def __call__(self, A, n0, dt):

"""
A = dt * csc_array(A, dtype=np.float64)
y = n0.copy()
y = np.array(n0, dtype=np.float64)
ident = eye_array(A.shape[0], format='csc')
for alpha, theta in zip(self.alpha, self.theta):
y += 2*np.real(alpha*sla.spsolve(A - theta*ident, y))
Expand Down
37 changes: 32 additions & 5 deletions openmc/deplete/stepresult.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ def __init__(self):
self.name_list = None

self.data = None
self.hdf5_dtype = 'float64'
self.hdf5_compression = None

def __repr__(self):
t = self.time[0]
Expand Down Expand Up @@ -189,7 +191,8 @@ def distribute(self, local_materials, ranges):

# Direct transfer
direct_attrs = ("time", "k", "source_rate", "index_nuc",
"mat_to_hdf5_ind", "mat_to_name", "proc_time")
"mat_to_hdf5_ind", "mat_to_name", "proc_time",
"hdf5_dtype", "hdf5_compression")
for attr in direct_attrs:
setattr(new, attr, getattr(self, attr))
# Get applicable slice of data
Expand Down Expand Up @@ -259,12 +262,19 @@ def export_to_hdf5(self, filename, step, write_rates: bool = False):
kwargs = {'mode': "w" if step == 0 else "a"}

if h5py.get_config().mpi and comm.size > 1:
# Write results in parallel
# Write results in parallel — compression not supported
saved_compression = self.hdf5_compression
if self.hdf5_compression is not None:
if comm.rank == 0 and step == 0:
warnings.warn("HDF5 compression is not supported with "
"parallel I/O; writing without compression")
self.hdf5_compression = None
kwargs['driver'] = 'mpio'
kwargs['comm'] = comm
with h5py.File(filename, **kwargs) as handle:
self._to_hdf5(handle, step, parallel=True,
write_rates=write_rates)
self.hdf5_compression = saved_compression
else:
# Gather results at root process
all_results = comm.gather(self)
Expand Down Expand Up @@ -344,17 +354,21 @@ def _write_hdf5_metadata(self, handle, write_rates):
self.rates.index_rx[rxn])

# Construct array storage
_dtype = self.hdf5_dtype
_compression = self.hdf5_compression

handle.create_dataset("number", (1, n_mats, n_nuc_number),
maxshape=(None, n_mats, n_nuc_number),
chunks=True,
dtype='float64')
dtype=_dtype,
compression=_compression)

if include_rates and n_nuc_rxn > 0 and n_rxn > 0:
handle.create_dataset(
"reaction rates", (1, n_mats, n_nuc_rxn, n_rxn),
maxshape=(None, n_mats, n_nuc_rxn, n_rxn),
chunks=True, dtype='float64')
chunks=True, dtype=_dtype,
compression=_compression)

handle.create_dataset("eigenvalues", (1, 2),
maxshape=(None, 2), dtype='float64')
Expand Down Expand Up @@ -554,7 +568,9 @@ def save(
step_ind,
proc_time=None,
write_rates: bool = False,
path: PathLike = "depletion_results.h5"
path: PathLike = "depletion_results.h5",
hdf5_dtype: str = 'float64',
hdf5_compression: str = None,
):
"""Creates and writes depletion results to disk

Expand Down Expand Up @@ -582,12 +598,23 @@ def save(
Path to file to write. Defaults to 'depletion_results.h5'.

.. versionadded:: 0.14.0
hdf5_dtype : str, optional
dtype for number and reaction rate datasets, float32 or float64

.. versionadded:: 0.15.4
hdf5_compression : str, optional
Compression for number and reaction rate datasets.
Accepted values are 'gzip' and 'lzf'. Ignored with parallel HDF5.

.. versionadded:: 0.15.4
"""
# Get indexing terms
vol_dict, nuc_list, burn_list, full_burn_list, name_list = op.get_results_info()

# Create results
results = StepResult()
results.hdf5_dtype = hdf5_dtype
results.hdf5_compression = hdf5_compression
results.allocate(vol_dict, nuc_list, burn_list, full_burn_list, name_list)

n_mat = len(burn_list)
Expand Down
Loading