diff --git a/pyproject.toml b/pyproject.toml index 9a74e48..28ef862 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,9 @@ version = "0.0.1" requires-python = ">= 3.11" dependencies = [ "pika==1.3.2", - "h5py==3.11.0" + "h5py==3.11.0", + "xmltodict==0.13.0", + "requests==2.32.3" ] @@ -19,7 +21,9 @@ run-detection = "rundetection.run_detection:main" formatting = [ "ruff==0.4.8", "mypy==1.10.0", - "run-detection[test]" + "run-detection[test]", + "types-requests==2.32.0.20240914", + "types-xmltodict==0.14.0.20241009" ] test = [ diff --git a/rundetection/ingestion/extracts.py b/rundetection/ingestion/extracts.py index 25f056c..fdb434d 100644 --- a/rundetection/ingestion/extracts.py +++ b/rundetection/ingestion/extracts.py @@ -34,6 +34,22 @@ def skip_extract(job_request: JobRequest, _: Any) -> JobRequest: return job_request +def loq_extract(job_request: JobRequest, dataset: Any) -> JobRequest: + """ + Get the sample details and the cycle strings + :param job_request: The job request + :param dataset: The nexus file dataset + :return: The updated job request + """ + job_request.additional_values["cycle_string"] = get_cycle_string_from_path(job_request.filepath) + job_request.additional_values["sample_thickness"] = dataset.get("sample").get("thickness") + job_request.additional_values["sample_geometry"] = dataset.get("sample").get("shape") + job_request.additional_values["sample_height"] = dataset.get("sample").get("height") + job_request.additional_values["sample_width"] = dataset.get("sample").get("width") + + return job_request + + def tosca_extract(job_request: JobRequest, _: Any) -> JobRequest: """ Add the cycle_string to the job request @@ -137,6 +153,8 @@ def get_extraction_function(instrument: str) -> Callable[[JobRequest, Any], JobR return tosca_extract case "osiris": return osiris_extract + case "loq": + return loq_extract case _: return skip_extract diff --git a/rundetection/ingestion/ingest.py b/rundetection/ingestion/ingest.py index 231d470..5ed481b 100644 --- a/rundetection/ingestion/ingest.py +++ b/rundetection/ingestion/ingest.py @@ -102,4 +102,6 @@ def get_run_title(nexus_path: Path) -> str: :param nexus_path: Path - the nexus file path :return: str - The title of the files run """ + # Instead of using Ingest here and reusing code, we won't bother with loading too much of the file every time and + # JUST load the title instead of everything. return ingest(nexus_path).experiment_title diff --git a/rundetection/rules/common_rules.py b/rundetection/rules/common_rules.py index 437efcd..9a26425 100644 --- a/rundetection/rules/common_rules.py +++ b/rundetection/rules/common_rules.py @@ -2,9 +2,13 @@ Module containing rule implementations for instrument shared rules """ +import logging + from rundetection.job_requests import JobRequest from rundetection.rules.rule import Rule +logger = logging.getLogger(__name__) + class EnabledRule(Rule[bool]): """ @@ -14,3 +18,26 @@ class EnabledRule(Rule[bool]): def verify(self, job_request: JobRequest) -> None: job_request.will_reduce = self._value + + +class NotAScatterFileError(Exception): + pass + + +class CheckIfScatterSANS(Rule[bool]): + def verify(self, job_request: JobRequest) -> None: + if "_SANS/TRANS" not in job_request.experiment_title: + job_request.will_reduce = False + logger.error("Not a scatter run. Does not have _SANS/TRANS in the experiment title.") + # If it has empty or direct in the title assume it is a direct run file instead of a normal scatter. + if ( + "empty" in job_request.experiment_title + or "EMPTY" in job_request.experiment_title + or "direct" in job_request.experiment_title + or "DIRECT" in job_request.experiment_title + ): + job_request.will_reduce = False + logger.error( + "If it is a scatter, contains empty or direct in the title and is assumed to be a scatter " + "for an empty can run." + ) diff --git a/rundetection/rules/factory.py b/rundetection/rules/factory.py index 4c1a23d..e4a2bab 100644 --- a/rundetection/rules/factory.py +++ b/rundetection/rules/factory.py @@ -4,8 +4,9 @@ from typing import Any -from rundetection.rules.common_rules import EnabledRule +from rundetection.rules.common_rules import CheckIfScatterSANS, EnabledRule from rundetection.rules.inter_rules import InterStitchRule +from rundetection.rules.loq_rules import LoqFindFiles, LoqUserFile from rundetection.rules.mari_rules import MariMaskFileRule, MariStitchRule, MariWBVANRule from rundetection.rules.osiris_rules import ( OsirisDefaultGraniteAnalyser, @@ -59,6 +60,15 @@ def rule_factory(key_: str, value: T) -> Rule[Any]: # noqa: C901, PLR0911, PLR0 case "osirisreductionmode": if isinstance(value, bool): return OsirisReductionModeRule(value) + case "checkifscattersans": + if isinstance(value, bool): + return CheckIfScatterSANS(value) + case "loqfindfiles": + if isinstance(value, bool): + return LoqFindFiles(value) + case "loquserfile": + if isinstance(value, str): + return LoqUserFile(value) case _: raise MissingRuleError(f"Implementation of Rule: {key_} does not exist.") diff --git a/rundetection/rules/loq_rules.py b/rundetection/rules/loq_rules.py new file mode 100644 index 0000000..aa916e7 --- /dev/null +++ b/rundetection/rules/loq_rules.py @@ -0,0 +1,165 @@ +""" +Rules for LOQ +""" + +from __future__ import annotations + +import logging +import typing +from dataclasses import dataclass +from pathlib import Path + +import requests +import xmltodict + +from rundetection.rules.rule import Rule + +if typing.TYPE_CHECKING: + from rundetection.job_requests import JobRequest + +logger = logging.getLogger(__name__) + + +@dataclass +class SansFileData: + title: str + type: str + run_number: str + + +def _extract_run_number_from_filename(filename: str) -> str: + # Assume filename looks like so: LOQ00100002.nxs, then strip. + return filename.split(".")[0].lstrip("LOQ").lstrip("0") + + +def _is_sample_transmission_file(sans_file: SansFileData, sample_title: str) -> bool: + return sample_title in sans_file.title and sans_file.type == "TRANS" + + +def _is_sample_direct_file(sans_file: SansFileData) -> bool: + return ("direct" in sans_file.title.lower() or "empty" in sans_file.title.lower()) and sans_file.type == "TRANS" + + +def _is_can_scatter_file(sans_file: SansFileData, can_title: str) -> bool: + return can_title == sans_file.title.split("_")[0] and sans_file.type == "SANS/TRANS" + + +def _is_can_transmission_file(sans_file: SansFileData, can_title: str) -> bool: + return can_title in sans_file.title and sans_file.type == "TRANS" + + +def _find_trans_file(sans_files: list[SansFileData], sample_title: str) -> SansFileData | None: + for sans_file in sans_files: + if _is_sample_transmission_file(sans_file=sans_file, sample_title=sample_title): + return sans_file + return None + + +def _find_direct_file(sans_files: list[SansFileData]) -> SansFileData | None: + reversed_files = reversed(sans_files) + for sans_file in reversed_files: + if _is_sample_direct_file(sans_file=sans_file): + return sans_file + return None + + +def _find_can_scatter_file(sans_files: list[SansFileData], can_title: str) -> SansFileData | None: + for sans_file in sans_files: + if _is_can_scatter_file(sans_file=sans_file, can_title=can_title): + return sans_file + return None + + +def _find_can_trans_file(sans_files: list[SansFileData], can_title: str) -> SansFileData | None: + for sans_file in sans_files: + if _is_can_transmission_file(sans_file=sans_file, can_title=can_title): + return sans_file + return None + + +def find_path_for_run_number(cycle_path: str, run_number: int) -> Path | None: + # 10 is just a magic number, but we needed an unrealistic value for the maximum + for padding in range(11): + potential_path = Path(f"{cycle_path}/LOQ{str(run_number).zfill(padding)}.nxs") + if potential_path.exists(): + return potential_path + return None + + +def grab_cycle_instrument_index(cycle: str) -> str: + _, cycle_year, cycle_num = cycle.split("_") + url = f"http://data.isis.rl.ac.uk/journals/ndxloq/journal_{cycle_year}_{cycle_num}.xml" + return requests.get(url, timeout=5).text + + +def create_list_of_files(job_request: JobRequest) -> list[SansFileData]: + cycle = job_request.additional_values["cycle_string"] + xml = grab_cycle_instrument_index(cycle=cycle) + cycle_run_info = xmltodict.parse(xml) + list_of_files = [] + for run_info in cycle_run_info["NXroot"]["NXentry"]: + title_contents = run_info["title"]["#text"].split("_") + run_number = run_info["run_number"]["#text"] + if len(title_contents) in {2, 3}: + file_type = title_contents[-1] + else: + job_request.will_reduce = False + logger.error(f"Run {run_info} either doesn't contain a _ or is not an expected experiment title format.") + return [] + list_of_files.append(SansFileData(title=run_info["title"]["#text"], type=file_type, run_number=run_number)) + return list_of_files + + +def strip_excess_files(sans_files: list[SansFileData], scatter_run_number: int) -> list[SansFileData]: + new_list_of_files: list[SansFileData] = [] + for sans_file in sans_files: + if int(sans_file.run_number) >= scatter_run_number: + return new_list_of_files + new_list_of_files.append(sans_file) + return new_list_of_files + + +class LoqFindFiles(Rule[bool]): + def verify(self, job_request: JobRequest) -> None: + # Expecting 3 values + title_parts = job_request.experiment_title.split("_") + if len(title_parts) != 3: # noqa: PLR2004 + job_request.will_reduce = False + logger.error( + f"Less or more than 3 sections to the experiment_title, probably missing Can Scatter title: " + f"{job_request.experiment_title}" + ) + return + sample_title, can_title, ___ = title_parts + sans_files = create_list_of_files(job_request) + if sans_files == []: + job_request.will_reduce = False + logger.error("No files found for this cycle excluding this run.") + return + sans_files = strip_excess_files(sans_files, scatter_run_number=job_request.run_number) + + job_request.additional_values["run_number"] = job_request.run_number + + trans_file = _find_trans_file(sans_files=sans_files, sample_title=sample_title) + if trans_file is not None: + job_request.additional_values["scatter_transmission"] = trans_file.run_number + + can_scatter = _find_can_scatter_file(sans_files=sans_files, can_title=can_title) + if can_scatter is not None: + job_request.additional_values["can_scatter"] = can_scatter.run_number + + can_trans = _find_can_trans_file(sans_files=sans_files, can_title=can_title) + if can_trans is not None and can_scatter is not None: + job_request.additional_values["can_transmission"] = can_trans.run_number + + direct_file = _find_direct_file(sans_files=sans_files) + if direct_file is not None: + if trans_file is not None: + job_request.additional_values["scatter_direct"] = direct_file.run_number + if can_scatter is not None and can_trans is not None: + job_request.additional_values["can_direct"] = direct_file.run_number + + +class LoqUserFile(Rule[str]): + def verify(self, job_request: JobRequest) -> None: + job_request.additional_values["user_file"] = f"/extras/loq/{self._value}" diff --git a/rundetection/run_detection.py b/rundetection/run_detection.py index 0a028ad..2d1eafe 100644 --- a/rundetection/run_detection.py +++ b/rundetection/run_detection.py @@ -130,14 +130,12 @@ def process_notifications(notification_queue: SimpleQueue[JobRequest]) -> None: :param notification_queue: The notification queue :return: None """ - logger.info("Checking notification queue...") while not notification_queue.empty(): detected_run = notification_queue.get() logger.info("Sending notification for run: %s", detected_run.run_number) with producer() as channel: channel.basic_publish(EGRESS_QUEUE_NAME, "", detected_run.to_json_string().encode()) - logger.info("Notification queue empty. Continuing...") def write_readiness_probe_file() -> None: diff --git a/rundetection/specifications/loq_specification.json b/rundetection/specifications/loq_specification.json index c994fef..94c2f65 100644 --- a/rundetection/specifications/loq_specification.json +++ b/rundetection/specifications/loq_specification.json @@ -1,3 +1,6 @@ { - "enabled": false + "enabled": true, + "checkifscattersans": true, + "loqfindfiles": true, + "loquserfile": "USER_LOQ_243B_M3_Changer_Xpress_Okesola__MERGED_log.toml" } \ No newline at end of file diff --git a/rundetection/specifications/osiris_specification.json b/rundetection/specifications/osiris_specification.json index ec22d5f..b2b3c4b 100644 --- a/rundetection/specifications/osiris_specification.json +++ b/rundetection/specifications/osiris_specification.json @@ -1,6 +1,6 @@ { "enabled": true, - "osiriscalibfilesandreflection": {"002": "00148587", "004": "00148587"}, + "osiriscalibfilesandreflection": {"002": "00149059", "004": "00149060"}, "osirisreductionmode": false, "osirisdefaultspectroscopy": true, "osirisdefaultgraniteanalyser": true, diff --git a/test/ingestion/test_extracts.py b/test/ingestion/test_extracts.py index 2831c72..f32425d 100644 --- a/test/ingestion/test_extracts.py +++ b/test/ingestion/test_extracts.py @@ -11,6 +11,7 @@ from rundetection.ingestion.extracts import ( get_cycle_string_from_path, get_extraction_function, + loq_extract, mari_extract, osiris_extract, skip_extract, @@ -59,6 +60,7 @@ def test_skip_extract(caplog: LogCaptureFixture): ("mari", "mari_extract"), ("tosca", "tosca_extract"), ("osiris", "osiris_extract"), + ("loq", "loq_extract"), ], ) def test_get_extraction_function(input_value, expected_function_name): @@ -236,6 +238,25 @@ def test_osiris_extract_raises_on_bad_frequencies(job_request): osiris_extract(job_request, dataset) +def test_loq_extract(job_request): + dataset = { + "sample": { + "thickness": 1.0, + "shape": "Disc", + "height": 8.0, + "width": 8.0, + } + } + with patch("rundetection.ingestion.extracts.get_cycle_string_from_path", return_value="some string"): + loq_extract(job_request, dataset) + + assert job_request.additional_values["cycle_string"] == "some string" + assert job_request.additional_values["sample_thickness"] == 1.0 + assert job_request.additional_values["sample_geometry"] == "Disc" + assert job_request.additional_values["sample_height"] == 8.0 # noqa: PLR2004 + assert job_request.additional_values["sample_width"] == 8.0 # noqa: PLR2004 + + def test_get_cycle_string_from_path_valid(): """ Test get cycle string returns correct string diff --git a/test/rules/test_common_rules.py b/test/rules/test_common_rules.py index 46c92df..83228b3 100644 --- a/test/rules/test_common_rules.py +++ b/test/rules/test_common_rules.py @@ -4,11 +4,12 @@ import unittest from pathlib import Path +from unittest import mock import pytest from rundetection.ingestion.ingest import JobRequest -from rundetection.rules.common_rules import EnabledRule +from rundetection.rules.common_rules import CheckIfScatterSANS, EnabledRule @pytest.fixture() @@ -42,5 +43,23 @@ def test_enabled_rule_when_not_enabled(job_request) -> None: assert job_request.will_reduce is False +@pytest.mark.parametrize("end_of_title", ["_TRANS", "_SANS", "COOL", "_sans/trans"]) +def test_checkifscattersans_verify_raises_for_no_sans_trans(end_of_title) -> None: + job_request = mock.MagicMock() + job_request.experiment_title = "{fancy chemical}" + end_of_title + CheckIfScatterSANS(True).verify(job_request) + + assert job_request.will_reduce is False + + +@pytest.mark.parametrize("to_raise", ["direct", "DIRECT", "empty", "EMPTY"]) +def test_checkifscattersans_verify_raises_for_direct_or_empty_in_title(to_raise) -> None: + job_request = mock.MagicMock() + job_request.experiment_title = "{fancy chemical " + to_raise + "}_SANS/TRANS" + CheckIfScatterSANS(True).verify(job_request) + + assert job_request.will_reduce is False + + if __name__ == "__main__": unittest.main() diff --git a/test/rules/test_factory.py b/test/rules/test_factory.py index ed6f44a..bc99a44 100644 --- a/test/rules/test_factory.py +++ b/test/rules/test_factory.py @@ -8,9 +8,10 @@ import pytest -from rundetection.rules.common_rules import EnabledRule +from rundetection.rules.common_rules import CheckIfScatterSANS, EnabledRule from rundetection.rules.factory import rule_factory from rundetection.rules.inter_rules import InterStitchRule +from rundetection.rules.loq_rules import LoqFindFiles, LoqUserFile from rundetection.rules.mari_rules import MariMaskFileRule, MariStitchRule, MariWBVANRule from rundetection.rules.osiris_rules import ( OsirisDefaultGraniteAnalyser, @@ -50,6 +51,9 @@ def assert_correct_rule(name: str, value: Any, rule_type: type[Rule]): ("osirisdefaultspectroscopy", True, OsirisDefaultSpectroscopy), ("osirisdefaultgraniteanalyser", True, OsirisDefaultGraniteAnalyser), ("osirisreductionmode", True, OsirisReductionModeRule), + ("checkifscattersans", True, CheckIfScatterSANS), + ("loquserfile", "loquserfile.toml", LoqUserFile), + ("loqfindfiles", True, LoqFindFiles), ], ) def test_rule_factory_returns_correct_rule(rule_key, rule_value, expected_rule): diff --git a/test/rules/test_loq_rules.py b/test/rules/test_loq_rules.py new file mode 100644 index 0000000..0e803ab --- /dev/null +++ b/test/rules/test_loq_rules.py @@ -0,0 +1,411 @@ +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +from rundetection.job_requests import JobRequest +from rundetection.rules.loq_rules import ( + LoqFindFiles, + LoqUserFile, + SansFileData, + _extract_run_number_from_filename, + _find_can_scatter_file, + _find_can_trans_file, + _find_direct_file, + _find_trans_file, + _is_can_scatter_file, + _is_can_transmission_file, + _is_sample_direct_file, + _is_sample_transmission_file, + find_path_for_run_number, + grab_cycle_instrument_index, + strip_excess_files, +) + +SANS_FILES = [ + SansFileData(title="{direct/empty beam}", type="TRANS", run_number="-1"), + SansFileData(title="{Banana}", type="SANS/TRANS", run_number="0"), + SansFileData(title="{Banana}", type="TRANS", run_number="1"), + SansFileData(title="{Apple}", type="SANS/TRANS", run_number="2"), + SansFileData(title="{Apple}", type="TRANS", run_number="3"), + SansFileData(title="{direct beam}", type="TRANS", run_number="4"), +] + + +@pytest.mark.parametrize( + ("filename", "result"), + [("LOQ00100002.nxs", "100002"), ("LOQ123456789.nxs", "123456789"), ("LOQ.nxs", ""), ("LOQ00000.nxs", "")], +) +def test_extract_run_number_from_filename(filename, result): + assert _extract_run_number_from_filename(filename) == result + + +@pytest.mark.parametrize( + ("sans_file", "sample_title", "result"), + [ + (SansFileData(title="{Banana}", type="SANS/TRANS", run_number="0"), "Banana", False), + (SansFileData(title="{Banana}", type="TRANS", run_number="0"), "Banana", True), + (SansFileData(title="{Banana}", type="SANS", run_number="0"), "Banana", False), + (SansFileData(title="{Banana}", type="TRANS", run_number="0"), "Apple", False), + ], +) +def test_is_sample_transmission_file(sans_file, sample_title, result): + assert _is_sample_transmission_file(sans_file, sample_title) == result + + +@pytest.mark.parametrize( + ("sans_file", "result"), + [ + (SansFileData(title="{Banana}", type="TRANS", run_number="0"), False), + (SansFileData(title="{Banana direct}", type="SANS/TRANS", run_number="0"), False), + (SansFileData(title="{Banana direct}", type="TRANS", run_number="0"), True), + (SansFileData(title="{Banana empty}", type="TRANS", run_number="0"), True), + (SansFileData(title="{Banana direct}", type="SANS", run_number="0"), False), + ], +) +def test_is_sample_direct_file(sans_file, result): + assert _is_sample_direct_file(sans_file) == result + + +@pytest.mark.parametrize( + ("sans_file", "can_title", "result"), + [ + (SansFileData(title="{Banana}", type="SANS/TRANS", run_number="0"), "{Banana}", True), + (SansFileData(title="{Banana}", type="SANS/TRANS", run_number="0"), "{Apple}", False), + (SansFileData(title="{Banana}", type="TRANS", run_number="0"), "{Banana}", False), + (SansFileData(title="{Banana}_{}", type="TRANS", run_number="0"), "{Banana}", False), + ], +) +def test_is_can_scatter_file(sans_file, can_title, result): + assert _is_can_scatter_file(sans_file, can_title) == result + + +@pytest.mark.parametrize( + ("sans_file", "can_title", "result"), + [ + (SansFileData(title="{Banana}", type="SANS/TRANS", run_number="0"), "{Banana}", False), + (SansFileData(title="{Banana}", type="TRANS", run_number="0"), "{Apple}", False), + (SansFileData(title="{Banana}", type="TRANS", run_number="0"), "{Banana}", True), + ], +) +def test_is_can_transmission_file(sans_file, can_title, result): + assert _is_can_transmission_file(sans_file, can_title) == result + + +@pytest.mark.parametrize( + ("sans_files", "sample_title", "expected"), + [(SANS_FILES, "{Apple}", SANS_FILES[4]), (SANS_FILES, "{Banana}", SANS_FILES[2])], +) +def test_find_trans_file_success(sans_files, sample_title, expected): + assert _find_trans_file(sans_files, sample_title) == expected + + +def test_find_trans_file_fail(): + assert _find_trans_file(SANS_FILES, "{Lemmon}") is None + + +def test_find_direct_file(): + assert _find_direct_file(SANS_FILES) == SANS_FILES[-1] + + +def test_find_can_scatter_file(): + assert _find_can_scatter_file(SANS_FILES, "{Apple}") == SANS_FILES[3] + + +def test_can_trans_files(): + assert _find_can_trans_file(SANS_FILES, "{Apple}") == SANS_FILES[4] + + +def test_path_for_run_number_with_some_zeros(): + tempdir = tempfile.mkdtemp() + path = f"{tempdir}/LOQ0012345.nxs" + with Path(path).open("a"): + assert find_path_for_run_number(tempdir, 12345) == Path(path) + + +def test_path_for_run_number_with_no_zeros(): + tempdir = tempfile.mkdtemp() + path = f"{tempdir}/LOQ12345.nxs" + with Path(path).open("a"): + assert find_path_for_run_number(tempdir, 12345) == Path(path) + + +def test_path_for_run_number_too_many_zeros(): + tempdir = tempfile.mkdtemp() + with Path(f"{tempdir}/LOQ00000000000012345.nxs").open("a"): + assert find_path_for_run_number(tempdir, 12345) is None + + +def test_path_for_run_number_doesnt_exist(): + tempdir = tempfile.mkdtemp() + assert find_path_for_run_number(tempdir, 12345) is None + + +def test_grab_cycle_instrument_index(): + with mock.patch("rundetection.rules.loq_rules.requests") as requests: + cycle_index_text = grab_cycle_instrument_index("cycle_24_2") + assert cycle_index_text == requests.get.return_value.text + requests.get.assert_called_once_with("http://data.isis.rl.ac.uk/journals/ndxloq/journal_24_2.xml", timeout=5) + + +def test_strip_excess_files(): + files = [ + SansFileData(title="", type="", run_number="0"), + SansFileData(title="", type="", run_number="1"), + SansFileData(title="", type="", run_number="2"), + ] + new_list = strip_excess_files(files, 1) + assert new_list == [SansFileData(title="", type="", run_number="0")] + + +def test_loq_find_files_verify_title_too_long(): + job_request = JobRequest( + run_number=0, + instrument="", + experiment_title="too_long_problems_here", + experiment_number="", + filepath=Path(), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + LoqFindFiles(value=True).verify(job_request) + assert job_request.will_reduce is False + + +def test_loq_find_files_verify_title_too_short(): + job_request = JobRequest( + run_number=0, + instrument="", + experiment_title="tooshortproblemshere", + experiment_number="", + filepath=Path(), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + LoqFindFiles(value=True).verify(job_request) + assert job_request.will_reduce is False + + +def test_loq_find_files_verify_no_files_left(): + job_request = JobRequest( + run_number=0, + instrument="", + experiment_title="{}_{}_sans/trans", + experiment_number="", + filepath=Path(), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + with mock.patch("rundetection.rules.loq_rules.create_list_of_files", return_value=[]): + loq_find_files = LoqFindFiles(value=True) + loq_find_files.verify(job_request) + assert job_request.will_reduce is False + + +def test_loq_find_files_verify_some_files_found_but_none_valid(): + job_request = JobRequest( + run_number=0, + instrument="", + experiment_title="{}_{}_sans/trans", + experiment_number="", + filepath=Path("/path/cycle_24_2/LOQ.nxs"), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + with ( + mock.patch("rundetection.rules.loq_rules.create_list_of_files", return_value=[SansFileData("", "", "")]), + mock.patch( + "rundetection.rules.loq_rules.strip_excess_files", + return_value=[SansFileData("", "", ""), SansFileData("", "", ""), SansFileData("", "", "")], + ), + ): + loq_find_files = LoqFindFiles(value=True) + loq_find_files.verify(job_request) + assert job_request.will_reduce is True + assert job_request.additional_values["run_number"] == 0 + + +def test_loq_find_files_trans_file_found(): + job_request = JobRequest( + run_number=5, + instrument="", + experiment_title="{scatter}_{background}_sans/trans", + experiment_number="", + filepath=Path("/path/cycle_24_2/LOQ.nxs"), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + with ( + mock.patch("rundetection.rules.loq_rules.create_list_of_files", return_value=[SansFileData("", "", "")]), + mock.patch( + "rundetection.rules.loq_rules.strip_excess_files", + return_value=[ + SansFileData(title="{scatter}", type="TRANS", run_number="1"), + SansFileData(title="{background}", type="TRANS", run_number="2"), + SansFileData(title="{direct}", type="SANS/TRANS", run_number="3"), + ], + ), + ): + loq_find_files = LoqFindFiles(value=True) + loq_find_files.verify(job_request) + assert job_request.will_reduce is True + assert job_request.additional_values["run_number"] == 5 # noqa: PLR2004 + assert job_request.additional_values["scatter_transmission"] == "1" + + +def test_loq_find_files_can_transmission_file_found(): + job_request = JobRequest( + run_number=5, + instrument="", + experiment_title="{scatter}_{background}_sans/trans", + experiment_number="", + filepath=Path("/path/cycle_24_2/LOQ.nxs"), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + with ( + mock.patch("rundetection.rules.loq_rules.create_list_of_files", return_value=[SansFileData("", "", "")]), + mock.patch( + "rundetection.rules.loq_rules.strip_excess_files", + return_value=[ + SansFileData(title="{scatter}", type="TRANS", run_number="1"), + SansFileData(title="{background}", type="SANS/TRANS", run_number="2"), + SansFileData(title="{background}", type="TRANS", run_number="3"), + SansFileData(title="{direct}", type="SANS/TRANS", run_number="4"), + ], + ), + ): + loq_find_files = LoqFindFiles(value=True) + loq_find_files.verify(job_request) + assert job_request.will_reduce is True + assert job_request.additional_values["run_number"] == 5 # noqa: PLR2004 + assert job_request.additional_values["can_transmission"] == "3" + + +def test_loq_find_files_direct_file_found(): + job_request = JobRequest( + run_number=5, + instrument="", + experiment_title="{scatter}_{background}_sans/trans", + experiment_number="", + filepath=Path("/path/cycle_24_2/LOQ.nxs"), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + with ( + mock.patch("rundetection.rules.loq_rules.create_list_of_files", return_value=[SansFileData("", "", "")]), + mock.patch( + "rundetection.rules.loq_rules.strip_excess_files", + return_value=[ + SansFileData(title="{scatter}", type="TRANS", run_number="1"), + SansFileData(title="{background}", type="SANS/TRANS", run_number="2"), + SansFileData(title="{background}", type="TRANS", run_number="3"), + SansFileData(title="{direct}", type="TRANS", run_number="4"), + ], + ), + ): + loq_find_files = LoqFindFiles(value=True) + loq_find_files.verify(job_request) + assert job_request.will_reduce is True + assert job_request.additional_values["run_number"] == 5 # noqa: PLR2004 + assert job_request.additional_values["scatter_direct"] == "4" + assert job_request.additional_values["can_direct"] == "4" + + +def test_loq_find_files_can_scatter_file_found(): + job_request = JobRequest( + run_number=5, + instrument="", + experiment_title="{scatter}_{background}_sans/trans", + experiment_number="", + filepath=Path("/path/cycle_24_2/LOQ.nxs"), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + with ( + mock.patch("rundetection.rules.loq_rules.create_list_of_files", return_value=[SansFileData("", "", "")]), + mock.patch( + "rundetection.rules.loq_rules.strip_excess_files", + return_value=[ + SansFileData(title="{scatter}", type="TRANS", run_number="1"), + SansFileData(title="{background}", type="SANS/TRANS", run_number="2"), + SansFileData(title="{background}", type="TRANS", run_number="3"), + SansFileData(title="{direct}", type="SANS/TRANS", run_number="4"), + ], + ), + ): + loq_find_files = LoqFindFiles(value=True) + loq_find_files.verify(job_request) + assert job_request.will_reduce is True + assert job_request.additional_values["run_number"] == 5 # noqa: PLR2004 + assert job_request.additional_values["can_scatter"] == "2" + + +def test_loq_user_file_(): + job_request = JobRequest( + run_number=0, + instrument="", + experiment_title="", + experiment_number="", + filepath=Path(), + run_start="", + run_end="", + raw_frames=0, + good_frames=0, + users="", + will_reduce=True, + additional_values={}, + additional_requests=[], + ) + LoqUserFile(value="loq_user_file").verify(job_request) + assert job_request.additional_values["user_file"] == "/extras/loq/loq_user_file" + assert len(job_request.additional_values) == 1 diff --git a/test/test_e2e.py b/test/test_e2e.py index 63023a5..c824522 100644 --- a/test/test_e2e.py +++ b/test/test_e2e.py @@ -94,6 +94,7 @@ def assert_run_in_recieved(run: Any, recieved: list[Any]): EXPECTED_MARI_WBVAN = get_specification_value("mari", "mariwbvan") EXPECTED_MARI_MASK = get_specification_value("mari", "marimaskfile") +EXPECTED_OSIRIS_MASK = get_specification_value("osiris", "osiriscalibfilesandreflection") @pytest.mark.parametrize( @@ -303,7 +304,7 @@ def assert_run_in_recieved(run: Any, recieved: list[Any]): "tcb_monitor_min": 40700.0, "tcb_monitor_max": 60700.0, "reflection": "002", - "calibration_run_number": "00148587", + "calibration_run_number": EXPECTED_OSIRIS_MASK["002"], "spectroscopy_reduction": "true", "diffraction_reduction": "false", "analyser": "graphite", @@ -332,7 +333,7 @@ def assert_run_in_recieved(run: Any, recieved: list[Any]): "tcb_monitor_min": 40700.0, "tcb_monitor_max": 60700.0, "reflection": "002", - "calibration_run_number": "00148587", + "calibration_run_number": EXPECTED_OSIRIS_MASK["002"], "spectroscopy_reduction": "true", "diffraction_reduction": "false", "analyser": "graphite", @@ -361,7 +362,7 @@ def assert_run_in_recieved(run: Any, recieved: list[Any]): "tcb_monitor_min": 40700.0, "tcb_monitor_max": 60700.0, "reflection": "002", - "calibration_run_number": "00148587", + "calibration_run_number": EXPECTED_OSIRIS_MASK["002"], "spectroscopy_reduction": "true", "diffraction_reduction": "false", "analyser": "graphite", @@ -390,7 +391,7 @@ def assert_run_in_recieved(run: Any, recieved: list[Any]): "tcb_monitor_min": 11700.0, "tcb_monitor_max": 51700.0, "reflection": "002", - "calibration_run_number": "00148587", + "calibration_run_number": EXPECTED_OSIRIS_MASK["002"], "spectroscopy_reduction": "true", "diffraction_reduction": "false", "analyser": "graphite",