Skip to content

Commit

Permalink
Merge branch 'next' into ewm8190_setup_github_ci
Browse files Browse the repository at this point in the history
  • Loading branch information
ktactac-ornl authored Jan 8, 2025
2 parents abdac59 + 22f7504 commit 3619726
Show file tree
Hide file tree
Showing 11 changed files with 130 additions and 24 deletions.
2 changes: 2 additions & 0 deletions docs/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,11 @@ Release Notes
**Of interest to the User**:

- PR #325: Migrates repository from GitLab to GitHub
- MR #1185: Rename output directory for coherent and incoherent-inelastic corrections as "info"
- MR #1184: Document wedge binning
- MR #1183: Added a script generate_report to generate a summary report from an hdf5 log file
- MR #1180: User documentation on time and log slicing
- MR #1179: Add validation for timeslice parameters, period must be integer multiple of interval
- MR #1177: Correction output now goes to `outputDir/debug/elastic_norm` and `outputDir/debug/incoherent_inelastic`
- MR #1176: The directory /SNS/EQSANS/IPTS-XXXX/nexus/ has a priority in file search algorithm
- MR #1175: Input parameters JSON field `sample::loadOptions::additionalProperties` now accepts a boolean.
Expand Down
3 changes: 2 additions & 1 deletion src/drtsans/configuration/schema/common.json
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,8 @@
},
"timeSlicePeriod": {
"$ref": "common.json#/definitions/safeStringPositiveFloat",
"description": "Period for time slicing, in seconds and a multiple of timeSliceInterval. Default is None, meaning no periodicity"
"description": "Period for time slicing, in seconds and a multiple of timeSliceInterval. Default is None, meaning no periodicity",
"isIntegerMultiple": "#configuration/timeSliceInterval"
},
"useSliceIDxAsSuffix": {
"type": "boolean",
Expand Down
2 changes: 2 additions & 0 deletions src/drtsans/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,12 @@ def _insert_periodic_timeslice_log(
time_offset
"""
sample_logs = SampleLogs(input_workspace)

try:
run_start = sample_logs.run_start.value
except AttributeError:
run_start = sample_logs.start_time.value

log = periodic_index_log(
period=time_period,
interval=time_interval,
Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/mono/biosans/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1549,8 +1549,8 @@ def _prepare_sample_transmission_ws(_sample_transmission):
wedges = getWedgeSelection(iq2d_main_in, **autoWedgeOpts)
logger.notice("found wedge angles:")
peak_wedge, back_wedge = wedges
logger.notice(" peak: ", peak_wedge)
logger.notice(" background:", back_wedge)
logger.notice(f" peak: {peak_wedge}")
logger.notice(f" background: {back_wedge}")
del peak_wedge, back_wedge
logger.notice(f"wedges: {wedges}")

Expand Down
19 changes: 15 additions & 4 deletions src/drtsans/redparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,16 +584,17 @@ class ReductionParameters:
_validators = {
"dataSource": "_validate_data_source",
"evaluateCondition": "_validate_evaluate_condition",
"lessThan": "_validate_less_than",
"exclusiveOr": "_validate_exclusive_or",
"fluxFileTOF": "_validate_flux_file_tof",
"pairedTo": "_validate_is_paired_to",
"isIntegerMultiple": "_validate_is_integer_multiple",
"lessThan": "_validate_less_than",
"onlyOneTrue": "_validate_only_one_true",
"pairedTo": "_validate_is_paired_to",
"pairwiseLessThan": "_validate_pairwise_less_than",
"sameLen": "_validate_equal_len",
"scalableComponents": "_validate_components_to_scale",
"useEntry": "_validate_use_entry",
"wedgeSources": "_validate_wedge_sources",
"pairwiseLessThan": "_validate_pairwise_less_than",
"scalableComponents": "_validate_components_to_scale",
}

# 2. public class methods and static functions
Expand Down Expand Up @@ -886,6 +887,16 @@ def _validate_evaluate_condition(self, validator, value, instance, schema):
if eval(condition) is False:
yield jsonschema.ValidationError(f"{value} condition has evaluated to False")

def _validate_is_integer_multiple(self, validator, value, instance, schema):
this_value = instance # period
other_value = self.get_parameter_value(value) # interval
if this_value is None:
return
if other_value is None:
yield jsonschema.ValidationError(f"{value.split('/')[-1]} must be assigned, and a multiple of {instance}")
if this_value % other_value > 1e-9:
yield jsonschema.ValidationError(f"{this_value} is not a multiple of {other_value}")

def _validate_less_than(self, validator, value, instance, schema):
r"""
Check the parameter value is smaller than the value of other parameters
Expand Down
25 changes: 19 additions & 6 deletions src/drtsans/samplelogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ def periodic_index_log(
name: str = "periodic_index",
) -> FloatTimeSeriesProperty:
r"""
Generate a periodic log whose values are integers ranging from 0 to ``period / interval``.
Generate a periodic log whose values are integers ranging from 0 to ``period / interval``
using timeSliceXXX values from the reduction configuration.
The first log entry is at ``run_start + offset`` with value 0. The next entry at
``run_start + offset + interval`` with value 1, and so on. The log wraps around
Expand Down Expand Up @@ -127,13 +128,25 @@ def periodic_index_log(
If ``period`` is not a multiple of ``interval``.
"""

if SECONDS_TO_NANOSECONDS * period % interval > 1: # allow for rounding errors of 1 nanosecond
if (SECONDS_TO_NANOSECONDS * period) % interval > 1: # allow for rounding errors of 1 nanosecond
raise ValueError(f"period {period} must be a multiple of interval {interval}")

times = np.arange(offset, duration, interval) # times at which we insert a new log entry
values_in_period = step * np.arange(0, int(period / interval)) # 0, 1,.., period/interval
period_count = 1 + int((duration - offset) / period) # additional period if "/" truncates times
entries = np.tile(values_in_period, period_count)[: len(times)].tolist() # cast to python's int type
# times at which we insert a new log entry
times = np.arange(offset, duration, interval)

# number of periods in the duration (plus one in case division truncates)
period_count = 1 + int((duration - offset) / period)

# array of values in each period, scaled by the step
values_in_period = step * np.arange(0, int(period / interval))

# repeat the values in a period up to the number of periods,
# then truncate to the length of times, then cast to list
entries = np.tile(values_in_period, period_count)[: len(times)].tolist()

assert len(times) == len(
entries
), f"times and entries must have the same length: len(times) {len(times)} != len(entries) {len(entries)}"

return time_series(name, times, entries, run_start, unit="")

Expand Down
4 changes: 2 additions & 2 deletions src/drtsans/tof/eqsans/reduction_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def bin_i_with_correction(
# Elastic correction
if correction_setup.do_elastic_correction:
elastic_output_dir = os.path.join(
output_dir, "debug", "elastic_norm", output_filename, slice_name, f"frame_{frameskip_frame}"
output_dir, "info", "elastic_norm", output_filename, slice_name, f"frame_{frameskip_frame}"
)
os.makedirs(elastic_output_dir, exist_ok=True)

Expand Down Expand Up @@ -392,7 +392,7 @@ def bin_i_with_correction(
# Inelastic incoherence correction
if correction_setup.do_inelastic_correction[frameskip_frame]:
inelastic_output_dir = os.path.join(
output_dir, "debug", "inelastic_incoh", output_filename, slice_name, f"frame_{frameskip_frame}"
output_dir, "info", "inelastic_incoh", output_filename, slice_name, f"frame_{frameskip_frame}"
)
os.makedirs(inelastic_output_dir, exist_ok=True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,9 @@ def verify_histogram(source_nexus, test_nexus):
)

# write the error message to disk
os.makedirs("test_output", exist_ok=True)
report_file_name = os.path.basename(source_nexus).split(".")[0] + "_error_log.txt"
os.makedirs("test_output", exist_ok=True)
with open(f"test_output/{report_file_name}", "w") as report_file:
report_file.write(error_message)
report_file.write(f"source: {source_nexus}\n")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,10 +256,8 @@ def test_incoherence_correction_elastic_normalization(

### Check output results

elastic_output_dir = os.path.join(test_dir, "debug", "elastic_norm", f"{outputFileName}", "slice_0", "frame_0")
inelastic_output_dir = os.path.join(
test_dir, "debug", "inelastic_incoh", f"{outputFileName}", "slice_0", "frame_0"
)
elastic_output_dir = os.path.join(test_dir, "info", "elastic_norm", f"{outputFileName}", "slice_0", "frame_0")
inelastic_output_dir = os.path.join(test_dir, "info", "inelastic_incoh", f"{outputFileName}", "slice_0", "frame_0")

# Check empty subdirectories not created for no correction case
if correction_case == "no_correction":
Expand Down Expand Up @@ -512,10 +510,10 @@ def test_incoherence_correction_elastic_normalization_slices_frames(
else:
num_wavelengths_k = num_wavelengths_b = 29 - iframe
elastic_output_dir = os.path.join(
test_dir, "debug", "elastic_norm", f"{base_name}", f"slice_{islice}", f"frame_{iframe}"
test_dir, "info", "elastic_norm", f"{base_name}", f"slice_{islice}", f"frame_{iframe}"
)
inelastic_output_dir = os.path.join(
test_dir, "debug", "inelastic_incoh", f"{base_name}", f"slice_{islice}", f"frame_{iframe}"
test_dir, "info", "inelastic_incoh", f"{base_name}", f"slice_{islice}", f"frame_{iframe}"
)
# before k correction
assert (
Expand Down
33 changes: 33 additions & 0 deletions tests/unit/drtsans/test_redparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -890,6 +890,7 @@ def test_incohfit_parameters(self, datarepo_dir):
parameters = deepcopy(self.parameters_all)
with amend_config(data_dir=datarepo_dir.eqsans):
validate_reduction_parameters(parameters)

# assert incohfit_qmin/qmax/factor are null by default
assert parameters["configuration"]["incohfit_qmin"] is None
assert parameters["configuration"]["incohfit_qmax"] is None
Expand Down Expand Up @@ -976,6 +977,38 @@ def test_scale_components(self, datarepo_dir):
with amend_config(data_dir=datarepo_dir.eqsans):
validate_reduction_parameters(parameters)

@pytest.mark.parametrize(
"timeSliceInterval, timeSlicePeriod, throws_error",
[
(10, 100, False), # exact integer multiple
(0.001, 1.00000000001, False), # nearly exact integer ratio
(-45, -100, True), # negative values
(1, 100.1, True), # not an integer multiple
],
ids=["exact integer multiple", "nearly exact integer ratio", "negative values", "not an integer multiple"],
)
def test_timeslice_parameters(self, datarepo_dir, timeSliceInterval, timeSlicePeriod, throws_error):
parameters = deepcopy(self.parameters_all)

# default values
with amend_config(data_dir=datarepo_dir.eqsans):
validate_reduction_parameters(parameters)
assert parameters["configuration"]["useTimeSlice"] is False
assert parameters["configuration"]["timeSliceInterval"] == 300
assert parameters["configuration"]["timeSlicePeriod"] is None

# set timeSliceInterval and timeSlicePeriod
parameters["configuration"]["useTimeSlice"] = True
parameters["configuration"]["timeSliceInterval"] = timeSliceInterval
parameters["configuration"]["timeSlicePeriod"] = timeSlicePeriod

with amend_config(data_dir=datarepo_dir.eqsans):
if throws_error:
with pytest.raises(ReductionParameterError):
validate_reduction_parameters(parameters)
else:
validate_reduction_parameters(parameters)

@pytest.mark.datarepo
def test_permissible(self, datarepo_dir):
parameters_new = update_reduction_parameters(
Expand Down
50 changes: 47 additions & 3 deletions tests/unit/drtsans/test_samplelogs.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import pytest
from numpy.testing import assert_almost_equal
from os.path import join as pjn
from mantid.simpleapi import LoadNexusProcessed

import pytest
from mantid.api import Run
from mantid.simpleapi import LoadNexusProcessed
from numpy.testing import assert_almost_equal

from drtsans.samplelogs import SampleLogs, periodic_index_log

Expand All @@ -23,6 +24,49 @@ def test_periodic_index_log():
assert log.lastValue() == 14


@pytest.mark.parametrize(
"period, interval, duration, offset, step, expected",
[
(0.9901076314875992, 0.01, 8371.0458984375, 0.6694618359281799, 1.0, 837038),
(0.990110528971648, 0.01, 8353.4296875, 0.4783230672977612, 1.0, "fail"),
(0.990107380210766, 0.1, 7111.37744140625, 0.8365827919520669, 1.0, "fail"),
(0.9901, 0.1, 763.654052734375, 0.578, 1.0, "fail"),
(9.0914642068604, 1.0, 382.9853210449219, 1.1055757465580345, 1.0, 382),
(9.091311484270927, 1.0, 380.5020751953125, 4.384090426034748, 1.0, 377),
(90.87363154577008, 1.0, 227.791259765625, 63.92350945782564, 1.0, 164),
(0.09990091554886044, 0.01, 228.7245635986328, 0.04903141722988207, 1.0, "fail"),
(0.09990091554886044, 0.01, 1903.1103515625, 0.04903141722988207, 1.0, "fail"),
(0.09990100006257276, 0.01, 7.999692916870117, 0.06871597503197445, 1.0, "fail"),
(0.09990099085909061, 0.009, 8.049692153930664, 0.011064958593924914, 1.0, "fail"),
(0.09990099496711198, 0.009, 38.19853591918945, 0.06973558286231382, 1.0, "fail"),
(2, 1, 10, 0, 1, 10),
],
ids=[
"reduce_slice_157044",
"reduce_slice_157045", #
"reduce_slice_157046", #
"reduce_slice_157053", #
"reduce_slice_157054",
"reduce_slice_157065",
"reduce_slice_157076",
"reduce_slice_157106", #
"reduce_slice_157107", #
"reduce_slice_157110", #
"reduce_slice_157111", #
"reduce_slice_157112", #
"period_count-exact-integer",
],
)
def test_periodic_index_log_cases(period, interval, duration, offset, step, expected):
run_start = "2000-01-01T00:00:00"
if expected == "fail":
with pytest.raises(AssertionError, match=r"(AssertionError: times and entries must have the same length)*"):
result = periodic_index_log(period, interval, duration, run_start, offset, step)
else:
result = periodic_index_log(period, interval, duration, run_start, offset, step)
assert result.size() == expected, f"Expected TimeSeriesProperty with size {expected}, got {result.size()}"


class TestSampleLogs:
@pytest.mark.datarepo
def test_init(self, datarepo_dir, clean_workspace):
Expand Down

0 comments on commit 3619726

Please sign in to comment.