From c86c37375b561426739c23185ad4241236312767 Mon Sep 17 00:00:00 2001 From: tjgalvin Date: Fri, 25 Oct 2024 10:52:05 +0800 Subject: [PATCH] Create a BaseOptions class using pydantic (#184) * added initial BaseOptions pydantic * added support for list/tuple/set * fixed tests added _asdict * attempt to handle optional[list] * fixed field processing * added docstrings * fixed options regression * added some tests * using FieldOptions with BaseOptions * added optional container path * rejig cli around beam masks * rejig of beam masks * corrected argument name# Please enter the commit message for your changes. Lines starting * added missing argument * tweaks to archive cli * added a tarball verification * updated changelog / added error to raise * added logs to verify tarballs * added a few tests * added more tests/fixed tests * removed type ignore * fixed use_beam_mask flag default * added test for use-beam-mask / type ignore * Added expect type hint * added test for bandpass flow cli * removed old test --------- Co-authored-by: tgalvin --- CHANGELOG.md | 6 + flint/archive.py | 96 ++++++--- flint/configuration.py | 3 +- flint/exceptions.py | 4 + flint/imager/wsclean.py | 36 ++-- flint/masking.py | 96 ++------- flint/options.py | 174 +++++++++++++--- flint/prefect/flows/bandpass_pipeline.py | 87 +------- flint/prefect/flows/continuum_pipeline.py | 230 ++-------------------- tests/test_archive.py | 32 ++- tests/test_bandpass_flow.py | 27 --- tests/test_baseoptions.py | 98 +++++++++ tests/test_configuration.py | 5 +- tests/test_masking.py | 6 +- tests/test_options.py | 84 +++++--- tests/test_prefect_bandpass_flow.py | 45 +++++ 16 files changed, 521 insertions(+), 508 deletions(-) delete mode 100644 tests/test_bandpass_flow.py create mode 100644 tests/test_baseoptions.py create mode 100644 tests/test_prefect_bandpass_flow.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 88b460a6..e5e5e1fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,12 @@ # dev - added `wrapper_options_from_strategy` decorator helper function +- Created `BaseOptions` from a `pydantic.BaseModel` class. +- Added functions to + - create `ArgumentParser` options from a `BaseOptions` class + - load arguments into an `BaseOptions` class from a `ArgumentParser` + - starting to move some classes over to this approaches (including some CLIs) +- Added a verify tarball function to verify archives # 0.2.7 diff --git a/flint/archive.py b/flint/archive.py index 51a7ccae..db077c7e 100644 --- a/flint/archive.py +++ b/flint/archive.py @@ -2,17 +2,19 @@ import re import shutil +import subprocess +import shlex import tarfile from argparse import ArgumentParser from pathlib import Path from typing import Any, Collection, Dict, List, Tuple from flint.configuration import get_options_from_strategy +from flint.exceptions import TarArchiveError from flint.logging import logger from flint.options import ( - DEFAULT_COPY_RE_PATTERNS, - DEFAULT_TAR_RE_PATTERNS, ArchiveOptions, + add_options_to_parser, ) @@ -89,13 +91,46 @@ def copy_files_into(copy_out_path: Path, files_to_copy: Collection[Path]) -> Pat return copy_out_path +def verify_tarball( + tarball: Path, +) -> bool: + """Verify that a tarball was created properly by examining its + table. Internally this calls ``tar`` through a subprocess call. + Hence, ``tar`` needs to be available on the system PATH. + + Args: + tarball (Path): The tarball to examine + + Returns: + bool: True if the ``tar``s exit code is 0, False otherwise + """ + tarball = Path(tarball) # trust nothing + assert ( + tarball.exists() and tarball.is_file() + ), f"{tarball} is not a file or does not exist" + assert tarball.suffix == ".tar", f"{tarball=} appears to not have a .tar extension" + + cmd = f"tar -tvf {str(tarball)}" + logger.info(f"Verifying {tarball=}") + popen = subprocess.Popen(shlex.split(cmd), stderr=subprocess.PIPE) + with popen.stderr: # type: ignore + for line in iter(popen.stderr.readline, b""): # type: ignore + logger.error(line.decode().strip()) + exitcode = popen.wait() + + return exitcode == 0 + + # TODO: Add a clobber option -def tar_files_into(tar_out_path: Path, files_to_tar: Collection[Path]) -> Path: +def tar_files_into( + tar_out_path: Path, files_to_tar: Collection[Path], verify: bool = True +) -> Path: """Create a tar file given a desired output path and list of files to tar. Args: tar_out_path (Path): The output path of the tarball. The parent directory will be created if necessary. files_to_tar (Collection[Path]): All the files to tarball up + verify (bool, optional): Verify that the tarball was correctly formed. Defaults to True. Raises: FileExistsError: The path of the tarball created @@ -121,6 +156,14 @@ def tar_files_into(tar_out_path: Path, files_to_tar: Collection[Path]) -> Path: tar.add(file, arcname=file.name) logger.info(f"Created {tar_out_path}") + + if verify: + tar_success = verify_tarball(tarball=tar_out_path) + if not tar_success: + raise TarArchiveError(f"Failed to verify {tar_out_path=}") + + logger.info(f"{tar_out_path=} appears to be correctly formed") + return tar_out_path @@ -200,28 +243,30 @@ def get_parser() -> ArgumentParser: dest="mode", help="Operation mode of flint_archive" ) - list_parser = subparser.add_parser("list") + list_parser = subparser.add_parser( + "list", help="List the files that would be copied" + ) list_parser.add_argument( "--base-path", type=Path, default=Path("."), help="Base directory to perform glob expressions", ) - - list_parser.add_argument( - "--file-patterns", - nargs="+", - default=DEFAULT_TAR_RE_PATTERNS, - type=str, - help="The regular expression patterns to evaluate", - ) list_parser.add_argument( "--strategy-yaml-path", type=Path, default=None, help="Path to a strategy file with a archive section. Overrides any --file-patterns. ", ) - + list_parser.add_argument( + "--mode", + choices=("create", "copy"), + default="copy", + help="Which set of RE patterns to present, those for the tarball (create) or those for copy", + ) + list_parser = add_options_to_parser( + parser=list_parser, options_class=ArchiveOptions + ) create_parser = subparser.add_parser("create", help="Create a tarfile archive") create_parser.add_argument( "tar_out_path", type=Path, help="Path of the output tar file to be created" @@ -233,12 +278,8 @@ def get_parser() -> ArgumentParser: help="Base directory to perform glob expressions", ) - create_parser.add_argument( - "--tar-file-patterns", - nargs="+", - default=DEFAULT_TAR_RE_PATTERNS, - type=str, - help="The regular expression patterns to evaluate inside the base path directory", + create_parser = add_options_to_parser( + parser=create_parser, options_class=ArchiveOptions ) create_parser.add_argument( "--strategy-yaml-path", @@ -262,12 +303,8 @@ def get_parser() -> ArgumentParser: help="Base directory to perform glob expressions", ) - copy_parser.add_argument( - "--copy-file-patterns", - nargs="+", - default=DEFAULT_COPY_RE_PATTERNS, - type=str, - help="The regular expression patterns to evaluate inside the base path directory", + copy_parser = add_options_to_parser( + parser=copy_parser, options_class=ArchiveOptions ) copy_parser.add_argument( "--strategy-yaml-path", @@ -294,11 +331,16 @@ def cli() -> None: files = resolve_glob_expressions( base_path=args.base_path, - file_re_patterns=archive_options.tar_file_re_patterns, + file_re_patterns=( + archive_options.tar_file_re_patterns + if args.mode == "create" + else archive_options.copy_file_re_patterns + ), ) - for count, file in enumerate(sorted(files)): logger.info(f"{count} of {len(files)}, {file}") + logger.info(f"{len(files)} for mode={args.mode}") + elif args.mode == "create": update_options_create: Dict[str, Any] = ( get_archive_options_from_yaml(strategy_yaml_path=args.strategy_yaml_path) diff --git a/flint/configuration.py b/flint/configuration.py index 78e7ed37..40d7fc5d 100644 --- a/flint/configuration.py +++ b/flint/configuration.py @@ -11,6 +11,7 @@ from typing import Any, Callable, Dict, ParamSpec, Optional, TypeVar, Union from click import MissingParameter +from pydantic import ValidationError import yaml from flint.imager.wsclean import WSCleanOptions @@ -423,7 +424,7 @@ def verify_configuration(input_strategy: Strategy, raise_on_error: bool = True) ) try: _ = MODE_OPTIONS_MAPPING[key](**options) - except TypeError as typeerror: + except (ValidationError, TypeError) as typeerror: errors.append( f"{key} mode in initial round incorrectly formed. {typeerror} " ) diff --git a/flint/exceptions.py b/flint/exceptions.py index 92575e24..219c217b 100644 --- a/flint/exceptions.py +++ b/flint/exceptions.py @@ -20,3 +20,7 @@ class CleanDivergenceError(Exception): """Raised if it is detected that cleaning has diverged.""" pass + + +class TarArchiveError(Exception): + """Raised it the flint tarball is not created successfullty""" diff --git a/flint/imager/wsclean.py b/flint/imager/wsclean.py index 5b0d073c..03ea285f 100644 --- a/flint/imager/wsclean.py +++ b/flint/imager/wsclean.py @@ -30,7 +30,12 @@ from flint.logging import logger from flint.ms import MS from flint.naming import create_image_cube_name, create_imaging_name_prefix -from flint.options import options_to_dict +from flint.options import ( + options_to_dict, + BaseOptions, + add_options_to_parser, + create_options_from_parser, +) from flint.sclient import run_singularity_command from flint.utils import ( get_environment_variable, @@ -57,7 +62,7 @@ class ImageSet(NamedTuple): """Residual images.""" -class WSCleanOptions(NamedTuple): +class WSCleanOptions(BaseOptions): """A basic container to handle WSClean options. These attributes should conform to the same option name in the calling signature of wsclean @@ -98,7 +103,7 @@ class WSCleanOptions(NamedTuple): """Enable multiscale deconvolution""" multiscale_scale_bias: float = 0.75 """Multiscale bias term""" - multiscale_scales: Optional[Collection[int]] = ( + multiscale_scales: Tuple[int, ...] = ( 0, 15, 25, @@ -150,13 +155,6 @@ class WSCleanOptions(NamedTuple): pol: str = "i" """The polarisation to be imaged""" - def with_options(self, **kwargs) -> WSCleanOptions: - """Return a new instance of WSCleanOptions with updated components""" - _dict = self._asdict() - _dict.update(**kwargs) - - return WSCleanOptions(**_dict) - class WSCleanCommand(NamedTuple): """Simple container for a wsclean command.""" @@ -834,11 +832,8 @@ def get_parser() -> ArgumentParser: default=None, help="Path to a singularity container with wsclean installed. ", ) - wsclean_parser.add_argument( - "--data-column", - type=str, - default="CORRECTED_DATA", - help="The column name to image. ", + wsclean_parser = add_options_to_parser( + parser=wsclean_parser, options_class=WSCleanOptions ) return parser @@ -856,8 +851,15 @@ def cli() -> None: logger.setLevel(logging.DEBUG) ms = MS(path=args.ms, column=args.data_column) - - wsclean_imager(ms=ms, wsclean_container=args.wsclean_container) + wsclean_options: WSCleanOptions = create_options_from_parser( + parser_namespace=args, + options_class=WSCleanOptions, # type: ignore + ) + wsclean_imager( + ms=ms, + wsclean_container=args.wsclean_container, + update_wsclean_options=wsclean_options._asdict(), + ) if __name__ == "__main__": diff --git a/flint/masking.py b/flint/masking.py index ffc726a7..388f0b0f 100644 --- a/flint/masking.py +++ b/flint/masking.py @@ -4,9 +4,9 @@ from __future__ import annotations -from argparse import ArgumentParser, Namespace +from argparse import ArgumentParser from pathlib import Path -from typing import Collection, NamedTuple, Optional, Union +from typing import Collection, Optional, Union import astropy.units as u import numpy as np @@ -24,12 +24,13 @@ from flint.logging import logger from flint.naming import FITSMaskNames, create_fits_mask_names +from flint.options import BaseOptions, add_options_to_parser, create_options_from_parser from flint.utils import get_pixels_per_beam # TODO: Need to remove a fair amount of old approaches, and deprecate some of the toy functions -class MaskingOptions(NamedTuple): +class MaskingOptions(BaseOptions): """Contains options for the creation of clean masks from some subject image. Clipping levels specified are in units of RMS (or sigma). They are NOT in absolute units. @@ -37,7 +38,7 @@ class MaskingOptions(NamedTuple): base_snr_clip: float = 4 """A base clipping level to be used should other options not be activated""" - flood_fill: bool = True + flood_fill: bool = False """Whether to attempt to flood fill when constructing a mask. This should be `True` for ``grow_low_snr_islands`` and ``suppress_artefacts`` to have an effect. """ flood_fill_positive_seed_clip: float = 4.5 """The clipping level to seed islands that will be grown to lower signal metric""" @@ -70,14 +71,7 @@ class MaskingOptions(NamedTuple): beam_shape_erode: bool = False """Erode the mask using the shape of the restoring beam""" beam_shape_erode_minimum_response: float = 0.6 - """The minimum response of the beam that is used to form t he erode structure shape""" - - def with_options(self, **kwargs) -> MaskingOptions: - """Return a new instance of the MaskingOptions""" - _dict = self._asdict() - _dict.update(**kwargs) - - return MaskingOptions(**_dict) + """The minimum response of the beam that is used to form the erode structure shape""" def consider_beam_mask_round( @@ -784,66 +778,15 @@ def get_parser() -> ArgumentParser: help="Create a mask for an image, potentially using its RMS and BKG images (e.g. outputs from BANE). Output FITS image will default to the image with a mask suffix.", ) fits_parser.add_argument("image", type=Path, help="Path to the input image. ") - fits_parser.add_argument( - "--rms-fits", type=Path, help="Path to the RMS of the input image. " - ) - fits_parser.add_argument( - "--bkg-fits", type=Path, help="Path to the BKG of the input image. " + fits_parser = add_options_to_parser( + parser=fits_parser, options_class=MaskingOptions ) fits_parser.add_argument( - "-s", - "--save-signal", - action="store_true", - help="Save the signal map. Defaults to the same as image with a signal suffix. ", - ) - fits_parser.add_argument( - "--base-snr-clip", - type=float, - default=4, - help="A base clipping level to be used should other options not be activated", - ) - fits_parser.add_argument( - "--flood-fill", - action="store_true", - default=False, - help="Whether to attempt to flood fill when constructing a mask. This should be `True` for `grow_low_snr_islands` and `suppress_artefacts to have an effect. ", - ) - fits_parser.add_argument( - "--flood-fill-positive-seed-clip", - type=float, - default=4.5, - help="The clipping level to seed islands that will be grown to lower signal metric", - ) - fits_parser.add_argument( - "--flood-fill-positive-flood-clip", - type=float, - default=1.5, - help="Clipping level used to grow seeded islands down to", - ) - fits_parser.add_argument( - "--flood-fill-use-mbc", - action="store_true", - default=False, - help="If True, the clipping levels are used as the `increase_factor` when using a minimum absolute clip. ", - ) - fits_parser.add_argument( - "--flood-fill-use-mbc-box-size", - type=int, - default=75, - help="The size of the mbc box size should mbc be used", - ) - fits_parser.add_argument( - "--beam-shape-erode", - action="store_true", - default=False, - help="Erode the mask using the shape of the restoring beam", + "--rms-fits", type=Path, help="Path to the RMS of the input image. " ) fits_parser.add_argument( - "--beam-shape-erode-minimum-response", - type=float, - default=0.6, - help="The minimum response of the beam that is used to form t he erode structure shape. Smaller numbers correspond to a larger shape which means islands are more aggressively removed", + "--bkg-fits", type=Path, help="Path to the BKG of the input image. " ) extract_parser = subparser.add_parser( @@ -866,28 +809,15 @@ def get_parser() -> ArgumentParser: return parser -def _args_to_mask_options(args: Namespace) -> MaskingOptions: - """Convert the args namespace to a MaskingOptions""" - masking_options = MaskingOptions( - base_snr_clip=args.base_snr_clip, - flood_fill=args.flood_fill, - flood_fill_positive_seed_clip=args.flood_fill_positive_seed_clip, - flood_fill_positive_flood_clip=args.flood_fill_positive_flood_clip, - flood_fill_use_mbc=args.flood_fill_use_mbc, - flood_fill_use_mbc_box_size=args.flood_fill_use_mbc_box_size, - beam_shape_erode=args.beam_shape_erode, - beam_shape_erode_minimum_response=args.beam_shape_erode_minimum_response, - ) - return masking_options - - def cli(): parser = get_parser() args = parser.parse_args() if args.mode == "mask": - masking_options = _args_to_mask_options(args=args) + masking_options = create_options_from_parser( + parser_namespace=args, options_class=MaskingOptions + ) create_snr_mask_from_fits( fits_image_path=args.image, fits_rms_path=args.rms_fits, diff --git a/flint/options.py b/flint/options.py index d6534eb5..44a2d3db 100644 --- a/flint/options.py +++ b/flint/options.py @@ -11,8 +11,22 @@ annotations, ) +from argparse import ArgumentParser, Namespace from pathlib import Path -from typing import Any, Collection, Dict, List, NamedTuple, Optional, Union +from pydantic import BaseModel, ConfigDict +from pydantic.fields import FieldInfo +from typing import ( + Any, + Dict, + List, + NamedTuple, + Optional, + Union, + Tuple, + TypeVar, + get_args, + get_origin, +) import yaml @@ -40,13 +54,133 @@ def options_to_dict(input_options: Any) -> Dict: if "_asdict" in dir(input_options): return input_options._asdict() + try: + if issubclass(input_options, BaseModel): + return dict(**input_options.__dict__) + except TypeError: + logger.debug(f"can not use issubclass on {input_options}") + try: return dict(**input_options) except TypeError: raise TypeError(f"Input options is not known: {type(input_options)}") -class BandpassOptions(NamedTuple): +T = TypeVar("T", bound=BaseModel) + + +class BaseOptions(BaseModel): + """A base class that Options style flint classes can + inherit from. This is derived from ``pydantic.BaseModel``, + and can be used for validation of supplied values. + + Class derived from ``BaseOptions`` are immutable by + default, and have the docstrings of attributes + extracted. + """ + + model_config = ConfigDict( + frozen=True, from_attributes=True, use_attribute_docstrings=True, extra="forbid" + ) + + def with_options(self: T, /, **kwargs) -> T: + new_args = self.__dict__.copy() + new_args.update(**kwargs) + + return self.__class__(**new_args) + + def _asdict(self) -> Dict[str, Any]: + return self.__dict__ + + +def _create_argparse_options(name: str, field: FieldInfo) -> Tuple[str, Dict[str, Any]]: + """Convert a pydantic Field into ``dict`` to splate into ArgumentParser.add_argument()""" + + field_name = name if field.is_required() else "--" + name.replace("_", "-") + + field_type = get_origin(field.annotation) + field_args = get_args(field.annotation) + iterable_types = (list, tuple, set) + + options = dict(action="store", help=field.description, default=field.default) + + if field.annotation is bool: + options["action"] = "store_false" if field.default else "store_true" + elif field_type in iterable_types or ( + field_type is Union and any(get_origin(p) in iterable_types for p in field_args) + ): + options["nargs"] = "+" + + return field_name, options + + +def add_options_to_parser( + parser: ArgumentParser, options_class: type[BaseOptions] +) -> ArgumentParser: + """Given an established argument parser and a class derived + from a ``pydantic.BaseModel``, populate the argument parser + with the model properties. + + Args: + parser (ArgumentParser): Parser that arguments will be added to + options_class (type[BaseModel]): A ``Options`` style class derived from ``BaseOptions`` + + Returns: + ArgumentParser: Updated argument parser + """ + + assert issubclass( + options_class, BaseModel + ), f"{options_class=} is not a pydantic BaseModel" + + group = parser.add_argument_group( + title=f"Inputs for {options_class.__name__}", + description="Options for the masking options example", + ) + + for name, field in options_class.model_fields.items(): + field_name, options = _create_argparse_options(name=name, field=field) + group.add_argument(field_name, **options) # type: ignore + + return parser + + +U = TypeVar("U", bound=BaseOptions) + + +def create_options_from_parser( + parser_namespace: Namespace, options_class: type[U] +) -> U: + """Given a ``BaseOptions`` derieved class, extract the corresponding + arguments from an ``argparse.nNamespace``. These options correspond to + ones generated by ``add_options_to_parser``. + + Args: + parser_namespace (Namespace): The argument parser corresponding to those in the ``BaseOptions`` class + options_class (U): A ``BaseOptions`` derived class + + Returns: + U: An populated options class with arguments drawn from CLI argument parser + """ + assert issubclass( + options_class, # type: ignore + BaseModel, + ), f"{options_class=} is not a pydantic BaseModel" + + args = ( + vars(parser_namespace) + if not isinstance(parser_namespace, dict) + else parser_namespace + ) + + opts_dict = {} + for name, field in options_class.model_fields.items(): + opts_dict[name] = args[name] + + return options_class(**opts_dict) + + +class BandpassOptions(BaseOptions): """Container that reoresents the flint related options that might be used throughout the processing of bandpass calibration data. @@ -59,9 +193,9 @@ class BandpassOptions(NamedTuple): a single bandpass pipeline run """ - flagger_container: Path + flagger_container: Optional[Path] = None """Path to the singularity aoflagger container""" - calibrate_container: Path + calibrate_container: Optional[Path] = None """Path to the singularity calibrate container""" expected_ms: int = 36 """The expected number of measurement set files to find""" @@ -83,7 +217,7 @@ class BandpassOptions(NamedTuple): """Flag Jones matrix if any amplitudes with a Jones are above this value""" -class FieldOptions(NamedTuple): +class FieldOptions(BaseOptions): """Container that represents the flint related options that might be used throughout components related to the actual pipeline. @@ -96,9 +230,9 @@ class FieldOptions(NamedTuple): rounds of self-calibration. """ - flagger_container: Path + flagger_container: Optional[Path] = None """Path to the singularity aoflagger container""" - calibrate_container: Path + calibrate_container: Optional[Path] = None """Path to the singularity calibrate container""" casa_container: Optional[Path] = None """Path to the singularity CASA container""" @@ -134,13 +268,15 @@ class FieldOptions(NamedTuple): """Specify the final beamsize of linmos field images in (arcsec, arcsec, deg)""" pb_cutoff: float = 0.1 """Primary beam attenuation cutoff to use during linmos""" - use_preflagger: bool = True + use_preflagger: bool = False """Whether to apply (or search for solutions with) bandpass solutions that have gone through the preflagging operations""" use_smoothed: bool = False """Whether to apply (or search for solutions with) a bandpass smoothing operation applied""" - use_beam_masks: bool = True + use_beam_masks: bool = False """Construct beam masks from MFS images to use for the next round of imaging. """ - use_beam_mask_rounds: Union[str, List[int], int] = 1 + use_beam_masks_from: int = 1 + """If `use_beam_masks` is True, this sets the round where beam masks will be generated from""" + use_beam_masks_rounds: Optional[List[int]] = None """If `use_beam_masks` is True, this sets which rounds should have a mask applied""" imaging_strategy: Optional[Path] = None """Path to a FLINT imaging yaml file that contains settings to use throughout imaging""" @@ -155,12 +291,6 @@ class FieldOptions(NamedTuple): coadd_cubes: bool = False """Co-add cubes formed throughout imaging together. Cubes will be smoothed channel-wise to a common resolution. Only performed on final set of images""" - def with_options(self, **kwargs) -> FieldOptions: - _dict = self._asdict() - _dict.update(**kwargs) - - return FieldOptions(**_dict) - def dump_field_options_to_yaml( output_path: Path, field_options: FieldOptions, overwrite: bool = False @@ -215,20 +345,14 @@ def dump_field_options_to_yaml( DEFAULT_COPY_RE_PATTERNS = (r".*linmos.*fits", r".*weight\.fits", r".*png", r".*csv") -class ArchiveOptions(NamedTuple): +class ArchiveOptions(BaseOptions): """Container for options related to archiving products from flint workflows""" - tar_file_re_patterns: Collection[str] = DEFAULT_TAR_RE_PATTERNS + tar_file_re_patterns: Tuple[str, ...] = DEFAULT_TAR_RE_PATTERNS """Regular-expressions to use to collect files that should be tarballed""" - copy_file_re_patterns: Collection[str] = DEFAULT_COPY_RE_PATTERNS + copy_file_re_patterns: Tuple[str, ...] = DEFAULT_COPY_RE_PATTERNS """Regular-expressions used to identify files to copy into a final location (not tarred)""" - def with_options(self, **kwargs) -> ArchiveOptions: - opts = self._asdict() - opts.update(**kwargs) - - return ArchiveOptions(**opts) - class MS(NamedTuple): """Helper to keep track of measurement set information diff --git a/flint/prefect/flows/bandpass_pipeline.py b/flint/prefect/flows/bandpass_pipeline.py index 06cf03a5..6eee2db8 100644 --- a/flint/prefect/flows/bandpass_pipeline.py +++ b/flint/prefect/flows/bandpass_pipeline.py @@ -27,7 +27,11 @@ from flint.logging import logger from flint.ms import MS, preprocess_askap_ms, split_by_field from flint.naming import get_sbid_from_path -from flint.options import BandpassOptions +from flint.options import ( + BandpassOptions, + add_options_to_parser, + create_options_from_parser, +) from flint.prefect.clusters import get_dask_runner from flint.prefect.common.utils import upload_image_as_artifact from flint.sky_model import get_1934_model @@ -336,78 +340,14 @@ def get_parser() -> ArgumentParser: help="Location to write the field-split MSs. Will attempt to create a directory using the SBID of the bandpass observation. ", ) - parser.add_argument( - "--expected-ms", - type=int, - default=36, - help="The expected number of measurement sets to find. ", - ) - parser.add_argument( - "--calibrate-container", - type=Path, - default="aocalibrate.sif", - help="Path to container that holds AO calibrate and applysolutions. ", - ) - parser.add_argument( - "--flagger-container", - type=Path, - default="flagger.sif", - help="Path to container with aoflagger software. ", - ) parser.add_argument( "--cluster-config", type=str, default="petrichor", help="Path to a cluster configuration file, or a known cluster name. ", ) - parser.add_argument( - "--smooth-solutions", - default=False, - action="store_true", - help="Smooth the bandpass solutions", - ) - parser.add_argument( - "--smooth-window-size", - default=16, - type=int, - help="Size of the smoothing Savgol window when smoothing bandpass solutions", - ) - parser.add_argument( - "--smooth-polynomial-order", - default=4, - type=int, - help="Order of the polynomial when smoothing the bandpass solutions with the Savgol filter", - ) - parser.add_argument( - "--flag-calibrate-rounds", - type=int, - default=3, - help="The number of times a bandpass solution will be derived, applied and flagged. ", - ) - parser.add_argument( - "--minuv", - type=float, - default=None, - help="The minimum baseline length, in meters, for data to be included in bandpass calibration stage", - ) - parser.add_argument( - "--preflagger-ant-mean-tolerance", - type=float, - default=0.2, - help="Tolerance of the mean x/y antenna gain ratio test before antenna is flagged", - ) - parser.add_argument( - "--preflagger-mesh-ant-flags", - default=False, - action="store_true", - help="Share channel flags from bandpass solutions between all antennas", - ) - parser.add_argument( - "--preflagger-jones-max-amplitude", - default=None, - type=float, - help="Flag Jones matrix if any amplitudes with a Jones are above this value", - ) + + parser = add_options_to_parser(parser=parser, options_class=BandpassOptions) return parser @@ -421,17 +361,8 @@ def cli() -> None: args = parser.parse_args() - bandpass_options = BandpassOptions( - flagger_container=args.flagger_container, - calibrate_container=args.calibrate_container, - expected_ms=args.expected_ms, - smooth_solutions=args.smooth_solutions, - smooth_window_size=args.smooth_window_size, - smooth_polynomial_order=args.smooth_polynomial_order, - flag_calibrate_rounds=args.flag_calibrate_rounds, - minuv=args.minuv, - preflagger_ant_mean_tolerance=args.preflagger_ant_mean_tolerance, - preflagger_mesh_ant_flags=args.preflagger_mesh_ant_flags, + bandpass_options = create_options_from_parser( + parser_namespace=args, options_class=BandpassOptions ) setup_run_bandpass_flow( diff --git a/flint/prefect/flows/continuum_pipeline.py b/flint/prefect/flows/continuum_pipeline.py index 7c24c83d..c7d3cd37 100644 --- a/flint/prefect/flows/continuum_pipeline.py +++ b/flint/prefect/flows/continuum_pipeline.py @@ -29,7 +29,12 @@ extract_components_from_name, get_sbid_from_path, ) -from flint.options import FieldOptions, dump_field_options_to_yaml +from flint.options import ( + FieldOptions, + dump_field_options_to_yaml, + add_options_to_parser, + create_options_from_parser, +) from flint.prefect.clusters import get_dask_runner from flint.prefect.common.imaging import ( _create_convol_linmos_images, @@ -379,7 +384,11 @@ def process_science_fields( fits_beam_masks = None if consider_beam_mask_round( current_round=current_round, - mask_rounds=field_options.use_beam_mask_rounds, + mask_rounds=( + field_options.use_beam_masks_rounds + if field_options.use_beam_masks_rounds + else field_options.use_beam_masks_from + ), allow_beam_masks=field_options.use_beam_masks, ): # Early versions of the masking procedure required aegean outputs @@ -552,18 +561,6 @@ def get_parser() -> ArgumentParser: type=Path, help="Path to directories containing the beam-wise science measurementsets that will have solutions copied over and applied.", ) - parser.add_argument( - "--calibrated-bandpass-path", - type=Path, - default=None, - help="Path to directory containing the uncalibrated beam-wise measurement sets that contain the bandpass calibration source. If None then the '--sky-model-directory' should be provided. ", - ) - parser.add_argument( - "--imaging-strategy", - type=Path, - default=None, - help="Path to a FLINT yaml file that specifies options to use throughout iamging. ", - ) parser.add_argument( "--split-path", type=Path, @@ -571,53 +568,10 @@ def get_parser() -> ArgumentParser: help="Location to write field-split MSs to. Will attempt to use the parent name of a directory when writing out a new MS. ", ) parser.add_argument( - "--holofile", - type=Path, - default=None, - help="Path to the holography FITS cube used for primary beam corrections", - ) - - parser.add_argument( - "--expected-ms", - type=int, - default=36, - help="The expected number of measurement sets to find. ", - ) - parser.add_argument( - "--calibrate-container", - type=Path, - default="aocalibrate.sif", - help="Path to container that holds AO calibrate and applysolutions. ", - ) - parser.add_argument( - "--flagger-container", - type=Path, - default="flagger.sif", - help="Path to container with aoflagger software. ", - ) - parser.add_argument( - "--wsclean-container", - type=Path, - default=None, - help="Path to the wsclean singularity container", - ) - parser.add_argument( - "--yandasoft-container", - type=Path, - default=None, - help="Path to the singularity container with yandasoft", - ) - parser.add_argument( - "--potato-container", - type=Path, - default=None, - help="Path to the potato peel singularity container", - ) - parser.add_argument( - "--casa-container", + "--calibrated-bandpass-path", type=Path, default=None, - help="Path to the CASA6 singularity container", + help="Path to directory containing the uncalibrated beam-wise measurement sets that contain the bandpass calibration source. If None then the '--sky-model-directory' should be provided. ", ) parser.add_argument( "--cluster-config", @@ -625,132 +579,14 @@ def get_parser() -> ArgumentParser: default="petrichor", help="Path to a cluster configuration file, or a known cluster name. ", ) - parser.add_argument( - "--selfcal-rounds", - type=int, - default=2, - help="The number of selfcalibration rounds to perform. ", - ) - parser.add_argument( - "--skip-selfcal-on-rounds", - type=int, - nargs="+", - default=None, - help="Do not perform the derive and apply self-calibration solutions on these rounds", - ) - parser.add_argument( - "--zip-ms", - action="store_true", - help="Zip up measurement sets as imaging and self-calibration is carried out.", - ) - parser.add_argument( - "--run-aegean", - action="store_true", - help="Run the aegean source finder on images. ", - ) - parser.add_argument( - "--aegean-container", - type=Path, - default=None, - help="Path to the singularity container with aegean", - ) - parser.add_argument( - "--no-imaging", - action="store_true", - help="Do not perform any imaging, only derive bandpass solutions and apply to sources. ", - ) - parser.add_argument( - "--reference-catalogue-directory", - type=Path, - default=None, - help="Path to the directory containing the ICFS, NVSS and SUMSS reference catalogues. These are required for validation plots. ", - ) - parser.add_argument( - "--linmos-residuals", - action="store_true", - help="Co-add the per-beam cleaning residuals into a field image", - ) - parser.add_argument( - "--beam-cutoff", - type=float, - default=150, - help="Cutoff in arcseconds that is used to flagged synthesised beams were deriving a common resolution to smooth to when forming the linmos images", - ) - parser.add_argument( - "--fixed-beam-shape", - nargs=3, - type=float, - default=None, - help="Specify the final beamsize of linmos field images in (arcsec, arcsec, deg)", - ) - parser.add_argument( - "--pb-cutoff", - type=float, - default=0.1, - help="Primary beam attenuation cutoff to use during linmos", - ) - parser.add_argument( - "--use-preflagger", - action="store_true", - default=False, - help="Whether to use (or search for solutions with) the preflagger operations applied to the bandpass gain solutions", - ) - parser.add_argument( - "--use-beam-masks", - default=False, - action="store_true", - help="Construct a clean mask from an MFS image for the next round of imaging. May adjust some of the imaging options per found if activated. ", - ) - beam_mask_options = parser.add_mutually_exclusive_group() - beam_mask_options.add_argument( - "--use-beam-mask-rounds", - default=None, - type=int, - nargs="+", - help="If --use-beam-masks is provided, this option specifies from which round of self-calibration the masking operation will be used onwards from. Specific rounds can be set here. ", - ) - beam_mask_options.add_argument( - "--use-beam-masks-from", - default=1, - type=int, - help="If --use-beam-masks is provided, this option specifies from which round of self-calibration the masking operation will be used onwards from. ", - ) - parser.add_argument( - "--sbid-archive-path", - type=Path, - default=None, - help="Path that SBID archive tarballs will be created under. If None no archive tarballs are created. See ArchiveOptions. ", - ) - parser.add_argument( - "--sbid-copy-path", - type=Path, - default=None, - help="Path that final processed products will be copied into. If None no copying of file products is performed. See ArchiveOptions. ", - ) parser.add_argument( "--skip-bandpass-check", default=False, action="store_true", help="Skip checking whether the path containing bandpass solutions exists (e.g. if solutions have already been applied)", ) - parser.add_argument( - "--rename-ms", - action="store_true", - default=False, - help="Rename MSs throughout rounds of imaging and self-cal instead of creating copies. This will delete data-columns throughout. ", - ) - parser.add_argument( - "--stokes-v-imaging", - help="Enables stokes-v imaging after the final round of imaging (whether", - action="store_true", - default=False, - ) - parser.add_argument( - "--coadd-cubes", - default=False, - action="store_true", - help="Co-add cubes formed throughout imaging together. Cubes will be smoothed channel-wise to a common resolution. Only performed on final set of images", - ) + + parser = add_options_to_parser(parser=parser, options_class=FieldOptions) return parser @@ -765,39 +601,9 @@ def cli() -> None: args = parser.parse_args() - field_options = FieldOptions( - flagger_container=args.flagger_container, - calibrate_container=args.calibrate_container, - casa_container=args.casa_container, - holofile=args.holofile, - expected_ms=args.expected_ms, - wsclean_container=args.wsclean_container, - yandasoft_container=args.yandasoft_container, - potato_container=args.potato_container, - rounds=args.selfcal_rounds, - skip_selfcal_on_rounds=args.skip_selfcal_on_rounds, - zip_ms=args.zip_ms, - run_aegean=args.run_aegean, - aegean_container=args.aegean_container, - no_imaging=args.no_imaging, - reference_catalogue_directory=args.reference_catalogue_directory, - linmos_residuals=args.linmos_residuals, - beam_cutoff=args.beam_cutoff, - fixed_beam_shape=args.fixed_beam_shape, - pb_cutoff=args.pb_cutoff, - use_preflagger=args.use_preflagger, - use_beam_masks=args.use_beam_masks, - use_beam_mask_rounds=( - args.use_beam_mask_rounds - if args.use_beam_mask_rounds - else args.use_beam_masks_from - ), # defaults value of args.use_beam_masks_from is 1 - imaging_strategy=args.imaging_strategy, - sbid_archive_path=args.sbid_archive_path, - sbid_copy_path=args.sbid_copy_path, - rename_ms=args.rename_ms, - stokes_v_imaging=args.stokes_v_imaging, - coadd_cubes=args.coadd_cubes, + field_options: FieldOptions = create_options_from_parser( + parser_namespace=args, + options_class=FieldOptions, ) setup_run_process_science_field( diff --git a/tests/test_archive.py b/tests/test_archive.py index 69c09777..a3c1e934 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -125,27 +125,29 @@ def test_archive_parser(glob_files): args = parser.parse_args("list".split()) assert isinstance(args.base_path, Path) - assert args.file_patterns == DEFAULT_TAR_RE_PATTERNS + assert args.tar_file_re_patterns == DEFAULT_TAR_RE_PATTERNS example_path = Path("this/no/exist") args = parser.parse_args(f"list --base-path {str(example_path)}".split()) assert isinstance(args.base_path, Path) assert args.base_path == example_path - args = parser.parse_args(r"list --file-patterns '.*linmos.*' '.*MFS.*'".split()) - assert len(args.file_patterns) == 2 + args = parser.parse_args( + r"list --copy-file-re-patterns '.*linmos.*' '.*MFS.*'".split() + ) + assert len(args.copy_file_re_patterns) == 2 example_path = Path(base_dir) args = parser.parse_args( - f"list --base-path {str(example_path)} --file-patterns *pdf".split() + f"list --base-path {str(example_path)} --copy-file-re-patterns *pdf".split() ) assert isinstance(args.base_path, Path) assert args.base_path == example_path - assert args.file_patterns == ["*pdf"] + assert args.copy_file_re_patterns == ["*pdf"] - cmd = r"create --tar-file-patterns '.*linmos.*' '.*MFS.*' '.*beam[0-9]+\.round4-????-image\.fits' --base-path 39420 test_archive_tarball/39420.tar" + cmd = r"create --tar-file-re-patterns '.*linmos.*' '.*MFS.*' '.*beam[0-9]+\.round4-????-image\.fits' --base-path 39420 test_archive_tarball/39420.tar" args = parser.parse_args(cmd.split()) - assert len(args.tar_file_patterns) == 3 + assert len(args.tar_file_re_patterns) == 3 def test_tar_ball_files(temp_files): @@ -187,3 +189,19 @@ def test_archive_new_tar_patterns(): tar_file_re_patterns=new_patterns ) assert len(new_archive_options.tar_file_re_patterns) == before_count + 1 + + +def test_archiveoptions_with_options(): + """Ensure that the with_options interface for ArchiveOptions works""" + + archive_options = ArchiveOptions() + default_copy = archive_options.copy_file_re_patterns + update_options = ("Jack", "was", "here") + new_options = archive_options.with_options(copy_file_re_patterns=update_options) + + assert new_options.copy_file_re_patterns != default_copy + assert new_options.copy_file_re_patterns == update_options + assert archive_options is not new_options + + new_dict = new_options._asdict() + assert new_dict["copy_file_re_patterns"] == update_options diff --git a/tests/test_bandpass_flow.py b/tests/test_bandpass_flow.py deleted file mode 100644 index cfb7e7b3..00000000 --- a/tests/test_bandpass_flow.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Basic tests for the prefect bandpass flow""" - -# this pirate made a promise that if there is a typo and an error -# then a test will be made to not let it happen. Penaalty is walking -# the plank - -from flint.prefect.flows.bandpass_pipeline import get_parser - - -def test_parser(): - parser = get_parser() - - args = parser.parse_args( - """/51998 - --calibrate-container /scratch3/gal16b/containers/calibrate.sif - --flagger-container /scratch3/gal16b/containers/aoflagger.sif - --cluster-config /scratch3/gal16b/bp_test/petrichor.yaml - --split-path $(pwd) - --smooth-window-size 8 - --smooth-polynomial-order 3 - """.split() - ) - - assert args.smooth_polynomial_order == 3 - assert args.smooth_window_size == 8 - assert isinstance(args.smooth_window_size, int) - assert isinstance(args.smooth_polynomial_order, int) diff --git a/tests/test_baseoptions.py b/tests/test_baseoptions.py new file mode 100644 index 00000000..79a6995b --- /dev/null +++ b/tests/test_baseoptions.py @@ -0,0 +1,98 @@ +"""Some specific tests around the pydantic base options model +that we are using to construct a BaseOptions class""" + +import pytest +from argparse import ArgumentParser + +from pydantic import ValidationError + +from flint.options import BaseOptions, add_options_to_parser, create_options_from_parser + + +def test_ensure_options_frozen(): + """BaseOption classes should be immutable, so an error + should be raised""" + + class NewOptions(BaseOptions): + a: int + """An example""" + b: float + """Another example""" + + new_options = NewOptions(a=1, b=1.23) + with pytest.raises(ValidationError): + # can't update the immutable class + new_options.a = 33 + # raise error on argument not existing + _ = NewOptions(a=1, b=1, jack="sparrow") # type: ignore + + +def test_baseoptions_argparse(): + """Create an argument parser from a BaseOptions""" + + class NewOptions(BaseOptions): + a: int + """An example""" + b: float + """Another example""" + c: bool = False + """A flag""" + + parser = ArgumentParser(description="Jack Sparrow") + + parser = add_options_to_parser(parser=parser, options_class=NewOptions) + args = parser.parse_args("1 1.23 --c".split()) + assert args.a == "1" + assert isinstance(args.a, str) + assert args.b == "1.23" + assert isinstance(args.b, str) + assert args.c + assert isinstance(args.c, bool) + + new_options = create_options_from_parser( + parser_namespace=args, options_class=NewOptions + ) + assert isinstance(new_options, NewOptions) + assert new_options.a == 1 + assert isinstance(new_options.a, int) + assert new_options.b == 1.23 + assert isinstance(new_options.b, float) + + +def test_create_new_options(): + """Create a new subclass of BaseOptions""" + + class NewOptions(BaseOptions): + a: int + """An example""" + b: float + """Another example""" + + new_options = NewOptions(a=1, b=1.23) + assert new_options.a == 1 + assert new_options.b == 1.23 + + update_options = new_options.with_options(b=234.3) + assert update_options.b == 234.3 + + assert new_options is not update_options + + # Make sure the types are properly cast + new_options = NewOptions(a=1, b=1) + assert isinstance(new_options.b, float) + + +def test_create_new_options_asdict(): + """Create a new subclass of BaseOptions""" + + class NewOptions(BaseOptions): + a: int + """An example""" + b: float + """Another example""" + + new_options = NewOptions(a=1, b=1.23) + _dict = new_options._asdict() + assert isinstance(_dict, dict) + assert _dict["a"] == 1 + assert isinstance(_dict["a"], int) diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 9873be80..196b08a7 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -114,7 +114,7 @@ def test_verify_options_with_class_missing_initial(package_strategy): def test_verify_options_with_class(package_strategy): - # ebsure that the errors raised from options passed through + # ensure that the errors raised from options passed through # to the input structures correctly raise errors should they # be misconfigured (e.g. option supplied does not exist, missing # mandatory argument) @@ -129,8 +129,9 @@ def test_verify_options_with_class(package_strategy): verify_configuration(input_strategy=strategy) strategy["selfcal"][1]["masking"]["ThisDoesNotExist"] = "ThisDoesNotExist" + with pytest.raises(ValueError): - verify_configuration(input_strategy=strategy) + verify_configuration(input_strategy=strategy, raise_on_error=True) def test_create_yaml_file(tmpdir): diff --git a/tests/test_masking.py b/tests/test_masking.py index 6e46ed96..d4270bf3 100644 --- a/tests/test_masking.py +++ b/tests/test_masking.py @@ -7,12 +7,12 @@ from flint.masking import ( MaskingOptions, _create_signal_from_rmsbkg, - _args_to_mask_options, _need_to_make_signal, _verify_set_positive_seed_clip, beam_shape_erode, consider_beam_mask_round, create_beam_mask_kernel, + create_options_from_parser, create_snr_mask_from_fits, get_parser, minimum_boxcar_artefact_mask, @@ -71,7 +71,9 @@ def test_arg_parser_cli_and_masking_options(): args = parser.parse_args( args="mask img --rms-fits rms --bkg-fits bkg --flood-fill --flood-fill-positive-seed-clip 10 --flood-fill-positive-flood-clip 1. --flood-fill-use-mbc --flood-fill-use-mbc-box-size 100".split() ) - masking_options = _args_to_mask_options(args=args) + masking_options = create_options_from_parser( + parser_namespace=args, options_class=MaskingOptions + ) assert isinstance(masking_options, MaskingOptions) assert masking_options.flood_fill assert masking_options.flood_fill_use_mbc diff --git a/tests/test_options.py b/tests/test_options.py index 31239736..6fac1ee2 100644 --- a/tests/test_options.py +++ b/tests/test_options.py @@ -4,13 +4,55 @@ """ from pathlib import Path +from typing import List import pytest - -from flint.options import FieldOptions, dump_field_options_to_yaml, options_to_dict +from pydantic.fields import FieldInfo + +from flint.options import ( + FieldOptions, + dump_field_options_to_yaml, + options_to_dict, + _create_argparse_options, + create_options_from_parser, +) from flint.prefect.flows.continuum_pipeline import get_parser +def test_fieldinfo_to_argparse_options(): + """The pydantic ``FieldInfo`` object is used to generate the options that would be + splat into an ArgumentParser.add_argument method. Ensure the expected mappings from + types to argument options make sense""" + field = FieldInfo(default=1, annotation=int, description="An example description") + field_name, field_options = _create_argparse_options( + name="jack_sparrow", field=field + ) + assert field_name == "--jack-sparrow" + assert field_options["action"] == "store" + assert field_options["default"] == 1 + assert field_options["help"] == "An example description" + + field = FieldInfo(annotation=int, description="An example description") + field_name, field_options = _create_argparse_options( + name="jack_sparrow", field=field + ) + assert field_name == "jack_sparrow" + assert field_options["action"] == "store" + assert field_options["help"] == "An example description" + + field = FieldInfo( + default=[1, 2, 3, 4], annotation=List[int], description="An example description" + ) + field_name, field_options = _create_argparse_options( + name="jack_sparrow", field=field + ) + assert field_name == "--jack-sparrow" + assert field_options["action"] == "store" + assert field_options["default"] == [1, 2, 3, 4] + assert field_options["help"] == "An example description" + assert field_options["nargs"] == "+" + + def test_options_to_dict(): """See ifthe utility around converting Option/Results to dictionary works""" flagger_container = Path("a") @@ -63,10 +105,12 @@ def test_config_field_options(tmpdir): --wsclean-container /scratch3/projects/spiceracs/singularity_images/wsclean_force_mask.sif --yandasoft-container /scratch3/gal16b/containers/yandasoft.sif --cluster-config /scratch3/gal16b/split/petrichor.yaml - --selfcal-rounds 2 + --rounds 2 --split-path $(pwd) --zip-ms --run-aegean + --use-beam-masks + --use-preflagger --aegean-container '/scratch3/gal16b/containers/aegean.sif' --reference-catalogue-directory '/scratch3/gal16b/reference_catalogues/' --linmos-residuals @@ -83,31 +127,17 @@ def test_config_field_options(tmpdir): --cli-config {str(output_file)}""".split() ) - field_options = FieldOptions( - flagger_container=args.flagger_container, - calibrate_container=args.calibrate_container, - holofile=args.holofile, - expected_ms=args.expected_ms, - wsclean_container=args.wsclean_container, - yandasoft_container=args.yandasoft_container, - rounds=args.selfcal_rounds, - zip_ms=args.zip_ms, - run_aegean=args.run_aegean, - aegean_container=args.aegean_container, - no_imaging=args.no_imaging, - reference_catalogue_directory=args.reference_catalogue_directory, - linmos_residuals=args.linmos_residuals, - beam_cutoff=args.beam_cutoff, - pb_cutoff=args.pb_cutoff, - use_preflagger=args.use_preflagger, + field_options = create_options_from_parser( + parser_namespace=args, options_class=FieldOptions ) assert isinstance(field_options, FieldOptions) - assert field_options.use_preflagger is False assert field_options.zip_ms is True assert field_options.linmos_residuals is True assert field_options.rounds == 2 assert isinstance(field_options.wsclean_container, Path) + assert field_options.use_beam_masks + assert field_options.use_preflagger def test_create_field_options(): @@ -121,7 +151,7 @@ def test_create_field_options(): --wsclean-container /scratch3/projects/spiceracs/singularity_images/wsclean_force_mask.sif --yandasoft-container /scratch3/gal16b/containers/yandasoft.sif --cluster-config /scratch3/gal16b/split/petrichor.yaml - --selfcal-rounds 2 + --rounds 2 --split-path $(pwd) --zip-ms --run-aegean @@ -138,7 +168,7 @@ def test_create_field_options(): expected_ms=args.expected_ms, wsclean_container=args.wsclean_container, yandasoft_container=args.yandasoft_container, - rounds=args.selfcal_rounds, + rounds=args.rounds, zip_ms=args.zip_ms, run_aegean=args.run_aegean, aegean_container=args.aegean_container, @@ -169,7 +199,7 @@ def test_create_field_options2(): --wsclean-container /scratch3/projects/spiceracs/singularity_images/wsclean_force_mask.sif --yandasoft-container /scratch3/gal16b/containers/yandasoft.sif --cluster-config /scratch3/gal16b/split/petrichor.yaml - --selfcal-rounds 2 + --rounds 2 --split-path $(pwd) --run-aegean --aegean-container '/scratch3/gal16b/containers/aegean.sif' @@ -185,7 +215,7 @@ def test_create_field_options2(): expected_ms=args.expected_ms, wsclean_container=args.wsclean_container, yandasoft_container=args.yandasoft_container, - rounds=args.selfcal_rounds, + rounds=args.rounds, zip_ms=args.zip_ms, run_aegean=args.run_aegean, aegean_container=args.aegean_container, @@ -216,7 +246,7 @@ def test_create_field_options3(): --wsclean-container /scratch3/projects/spiceracs/singularity_images/wsclean_force_mask.sif --yandasoft-container /scratch3/gal16b/containers/yandasoft.sif --cluster-config /scratch3/gal16b/split/petrichor.yaml - --selfcal-rounds 2 + --rounds 2 --split-path $(pwd) --run-aegean --aegean-container '/scratch3/gal16b/containers/aegean.sif' @@ -232,7 +262,7 @@ def test_create_field_options3(): expected_ms=args.expected_ms, wsclean_container=args.wsclean_container, yandasoft_container=args.yandasoft_container, - rounds=args.selfcal_rounds, + rounds=args.rounds, zip_ms=args.zip_ms, run_aegean=args.run_aegean, aegean_container=args.aegean_container, diff --git a/tests/test_prefect_bandpass_flow.py b/tests/test_prefect_bandpass_flow.py new file mode 100644 index 00000000..1a29c6f5 --- /dev/null +++ b/tests/test_prefect_bandpass_flow.py @@ -0,0 +1,45 @@ +"""Tests that are specific to the bandpass calibration +flow""" + +from pathlib import Path + +from flint.prefect.flows import bandpass_pipeline +from flint.options import BandpassOptions, create_options_from_parser + + +def test_bandpass_cli(): + """Ensure that the bandpass calibration using the BaseOptions + class can integrate with the current preferred workflow + used by racs-low3""" + parser = bandpass_pipeline.get_parser() + + example_cli = """ + /some/test/argument + --flagger-container /jack/sparrow/containers/aoflagger.sif + --calibrate-container /jack/sparrow/containers/calibrate.sif + --cluster-config ./petrichor.yaml + --split-path /another/made/up/path + --flag-calibrate-rounds 4 + --minuv 600 + --preflagger-jones-max-amplitude 0.6 + --preflagger-ant-mean-tolerance 0.18 + """ + args = parser.parse_args(example_cli.split()) + + bandpass_options = create_options_from_parser( + parser_namespace=args, options_class=BandpassOptions + ) + assert args.bandpass_path == Path("/some/test/argument") + assert args.split_path == Path("/another/made/up/path") + assert args.cluster_config == "./petrichor.yaml" + assert bandpass_options.flagger_container == Path( + "/jack/sparrow/containers/aoflagger.sif" + ) + assert bandpass_options.calibrate_container == Path( + "/jack/sparrow/containers/calibrate.sif" + ) + assert bandpass_options.minuv == 600.0 + assert isinstance(bandpass_options.minuv, float) + assert not bandpass_options.preflagger_mesh_ant_flags + assert bandpass_options.flag_calibrate_rounds == 4 + assert isinstance(bandpass_options.flag_calibrate_rounds, int)