From 2018996dd777f925814cf77b1ffb7c0ef7df46dd Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 12:32:15 +0800 Subject: [PATCH 01/10] Update rules --- pyproject.toml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 836973f0..d760e2f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,6 +133,32 @@ target-version = "py38" # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. select = ["E4", "E7", "E9", "F"] ignore = [] +# Full list of rules from scientific-python-cookiecutter. +# More can be enabled as needed / wanted. +extend-select = [ + # "B", # flake8-bugbear + "I", # isort + # "ARG", # flake8-unused-arguments + # "C4", # flake8-comprehensions + # "EM", # flake8-errmsg + # "ICN", # flake8-import-conventions + # "G", # flake8-logging-format + # "PGH", # pygrep-hooks + # "PIE", # flake8-pie + # "PL", # pylint + # "PT", # flake8-pytest-style + # "PTH", # flake8-use-pathlib + # "RET", # flake8-return + # "RUF", # Ruff-specific + # "SIM", # flake8-simplify + # "T20", # flake8-print + "UP", # pyupgrade + # "YTT", # flake8-2020 + # "EXE", # flake8-executable + "NPY", # NumPy specific rules + "PD", # pandas-vet +] +isort.required-imports = ["from __future__ import annotations"] # Allow fix for all enabled rules (when `--fix`) is provided. fixable = ["ALL"] From 4a304e005bc60f9ba5fdd9721bcf4836fc0ceef2 Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 13:03:20 +0800 Subject: [PATCH 02/10] Apply ruff --- docs/conf.py | 1 + flint/archive.py | 20 ++-- flint/bandpass.py | 27 ++--- flint/bptools/preflagger.py | 26 +++-- flint/bptools/smoother.py | 4 +- flint/calibrate/aocalibrate.py | 83 +++++++------ flint/catalogue.py | 18 +-- flint/coadd/linmos.py | 60 +++++----- flint/configuration.py | 42 +++---- flint/convol.py | 18 +-- flint/exceptions.py | 3 + flint/flagging.py | 24 ++-- flint/imager/wsclean.py | 110 +++++++++--------- flint/leakage.py | 21 ++-- flint/logging.py | 2 + flint/masking.py | 49 ++++---- flint/ms.py | 100 ++++++++-------- flint/naming.py | 84 ++++++------- flint/options.py | 82 +++++++------ flint/peel/potato.py | 50 ++++---- flint/prefect/clusters.py | 14 ++- flint/prefect/common/imaging.py | 94 +++++++-------- flint/prefect/common/ms.py | 7 +- flint/prefect/common/utils.py | 25 ++-- flint/prefect/flows/bandpass_pipeline.py | 18 +-- .../prefect/flows/continuum_mask_pipeline.py | 12 +- flint/prefect/flows/continuum_pipeline.py | 24 ++-- flint/prefect/flows/subtract_cube_pipeline.py | 38 +++--- flint/sclient.py | 14 ++- flint/selfcal/casa.py | 18 +-- flint/selfcal/utils.py | 4 +- flint/sky_model.py | 89 +++++++------- flint/source_finding/aegean.py | 18 +-- flint/summary.py | 58 ++++----- flint/utils.py | 54 ++++----- flint/validation.py | 67 +++++------ tests/test_aegean.py | 5 +- tests/test_aocalibrate.py | 18 +-- tests/test_archive.py | 8 +- tests/test_baseoptions.py | 4 +- tests/test_bptools.py | 2 + tests/test_casa.py | 2 + tests/test_catalogue.py | 4 +- tests/test_configuration.py | 4 +- tests/test_convol.py | 6 +- tests/test_flagging.py | 2 + tests/test_leakage.py | 6 +- tests/test_linmos_coadd.py | 10 +- tests/test_masking.py | 4 +- tests/test_ms.py | 14 ++- tests/test_naming.py | 7 +- tests/test_options.py | 8 +- tests/test_potato.py | 2 + tests/test_prefect_bandpass_flow.py | 4 +- tests/test_prefect_subtractcube_flow.py | 2 + tests/test_selfcal_utils.py | 2 + tests/test_summary.py | 2 + tests/test_utils.py | 6 +- tests/test_validation.py | 2 + tests/test_wsclean.py | 10 +- 60 files changed, 784 insertions(+), 728 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c581dddb..7d724b3d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,6 +5,7 @@ # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information +from __future__ import annotations project = "flint" copyright = "2023, Tim Galvin" diff --git a/flint/archive.py b/flint/archive.py index 193f2d64..e2572dd6 100644 --- a/flint/archive.py +++ b/flint/archive.py @@ -1,13 +1,15 @@ """Operations around preserving files and products from an flint run""" +from __future__ import annotations + import re +import shlex import shutil import subprocess -import shlex import tarfile from argparse import ArgumentParser from pathlib import Path -from typing import Any, Collection, Dict, List, Tuple +from typing import Any, Collection from flint.configuration import get_options_from_strategy from flint.exceptions import TarArchiveError @@ -21,7 +23,7 @@ def resolve_glob_expressions( base_path: Path, file_re_patterns: Collection[str] -) -> Tuple[Path, ...]: +) -> tuple[Path, ...]: """Collect a set of files given a base directory and a set of glob expressions. Unique paths are returned. @@ -34,7 +36,7 @@ def resolve_glob_expressions( """ base_path = Path(base_path) - resolved_files: List[Path] = [] + resolved_files: list[Path] = [] logger.info(f"Searching {base_path=}") @@ -70,7 +72,7 @@ def copy_files_into(copy_out_path: Path, files_to_copy: Collection[Path]) -> Pat copy_out_path.mkdir(parents=True, exist_ok=True) total = len(files_to_copy) - not_copied: List[Path] = [] + not_copied: list[Path] = [] logger.info(f"Copying {total} files into {copy_out_path}") for count, file in enumerate(files_to_copy): @@ -111,7 +113,7 @@ def verify_tarball( ), f"{tarball} is not a file or does not exist" assert tarball.suffix == ".tar", f"{tarball=} appears to not have a .tar extension" - cmd = f"tar -tvf {str(tarball)}" + cmd = f"tar -tvf {tarball!s}" logger.info(f"Verifying {tarball=}") popen = subprocess.Popen(shlex.split(cmd), stderr=subprocess.PIPE) with popen.stderr: # type: ignore @@ -153,7 +155,7 @@ def tar_files_into( logger.info(f"Opening {tar_out_path}") with tarfile.open(tar_out_path, "w") as tar: for count, file in enumerate(files_to_tar): - logger.info(f"{count+1} of {total}, adding {str(file)}") + logger.info(f"{count+1} of {total}, adding {file!s}") tar.add(file, arcname=file.name) logger.info(f"Created {tar_out_path}") @@ -217,7 +219,7 @@ def copy_sbid_files_archive( return copy_out_path -def get_archive_options_from_yaml(strategy_yaml_path: Path) -> Dict[str, Any]: +def get_archive_options_from_yaml(strategy_yaml_path: Path) -> dict[str, Any]: """Load the archive options from a specified strategy file Args: @@ -360,7 +362,7 @@ def cli() -> None: ) ) - update_options_create: Dict[str, Any] = ( + update_options_create: dict[str, Any] = ( get_archive_options_from_yaml(strategy_yaml_path=args.strategy_yaml_path) if args.strategy_yaml_path else dict(tar_file_re_patterhs=args.file_patterns) diff --git a/flint/bandpass.py b/flint/bandpass.py index d0e3b5b5..4e238e30 100644 --- a/flint/bandpass.py +++ b/flint/bandpass.py @@ -1,8 +1,9 @@ """Procedure to calibrate bandpass observation""" +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Optional, Union import numpy as np from casacore.tables import table, taql @@ -10,12 +11,12 @@ from flint.calibrate.aocalibrate import AOSolutions, calibrate_apply_ms from flint.flagging import flag_ms_aoflagger from flint.logging import logger -from flint.ms import MS, describe_ms, preprocess_askap_ms, get_field_id_for_field +from flint.ms import MS, describe_ms, get_field_id_for_field, preprocess_askap_ms from flint.naming import create_ms_name from flint.sky_model import KNOWN_1934_FILES, get_1934_model -def plot_solutions(solutions_path: Path, ref_ant: Optional[int] = 0) -> None: +def plot_solutions(solutions_path: Path, ref_ant: int | None = 0) -> None: """Plot solutions for AO-style solutions Args: @@ -28,7 +29,7 @@ def plot_solutions(solutions_path: Path, ref_ant: Optional[int] = 0) -> None: _ = ao_sols.plot_solutions(ref_ant=ref_ant) -def flag_bandpass_offset_pointings(ms: Union[MS, Path]) -> MS: +def flag_bandpass_offset_pointings(ms: MS | Path) -> MS: """The typical bandpass style observation in ASKAP will shift each beam so that it is centred on the bandpass-calibration object (here B1934-638). During each offset position all beams are recording data still. The trick @@ -61,7 +62,7 @@ def flag_bandpass_offset_pointings(ms: Union[MS, Path]) -> MS: logger.info(f"The B1934-638 field name is {good_field_name}. ") logger.info("Will attempt to flag other fields. ") - with table(f"{str(ms.path)}/FIELD", readonly=True, ack=False) as tab: + with table(f"{ms.path!s}/FIELD", readonly=True, ack=False) as tab: # The ID is _position_ of the matching row in the table. field_names = tab.getcol("NAME") field_idx = np.argwhere([fn == good_field_name for fn in field_names])[0] @@ -73,7 +74,7 @@ def flag_bandpass_offset_pointings(ms: Union[MS, Path]) -> MS: field_idx = field_idx[0] logger.info(f"{good_field_name} FIELD_ID is {field_idx}") - with table(f"{str(ms.path)}", readonly=False, ack=False) as tab: + with table(f"{ms.path!s}", readonly=False, ack=False) as tab: field_idxs = tab.getcol("FIELD_ID") field_mask = field_idxs != field_idx logger.info( @@ -92,9 +93,9 @@ def flag_bandpass_offset_pointings(ms: Union[MS, Path]) -> MS: def extract_correct_bandpass_pointing( - ms: Union[MS, Path], + ms: MS | Path, source_name_prefix: str = "B1934-638", - ms_out_dir: Optional[Path] = None, + ms_out_dir: Path | None = None, ) -> MS: """The typical bandpass style observation in ASKAP will shift each beam so that it is centred on the bandpass-calibration object (here B1934-638). @@ -134,7 +135,7 @@ def extract_correct_bandpass_pointing( ms = MS.cast(ms) ms_summary = describe_ms(ms, verbose=False) - logger.info(f"Checking for unique fields in {str(ms.path)} data table.") + logger.info(f"Checking for unique fields in {ms.path!s} data table.") with table(str(ms.path)) as tab: fields = np.unique(tab.getcol("FIELD_ID")) if len(fields) == 1: @@ -164,7 +165,7 @@ def extract_correct_bandpass_pointing( out_path = ms_out_dir / Path(out_name).name logger.info(f"Will create a MS, writing to {out_path}") - with table(f"{str(ms.path)}") as tab: + with table(f"{ms.path!s}") as tab: field_ms = taql(f"select * from $tab where FIELD_ID=={field_id}") field_ms.copy(str(out_path), deep=True) @@ -177,8 +178,8 @@ def calibrate_bandpass( mode: str, calibrate_container: Path, plot: bool = True, - aoflagger_container: Optional[Path] = None, - ms_out_dir: Optional[Path] = None, + aoflagger_container: Path | None = None, + ms_out_dir: Path | None = None, ) -> MS: """Entry point to extract the appropriate field from a bandpass observation, run AO-style calibrate, and plot results. In its current form a new measurement @@ -196,7 +197,7 @@ def calibrate_bandpass( Returns: MS: The calibrated measurement set with nominated column """ - logger.info(f"Will calibrate {str(ms_path)}, column {data_column}") + logger.info(f"Will calibrate {ms_path!s}, column {data_column}") # TODO: Check to make sure only 1934-638 model_path: Path = get_1934_model(mode=mode) diff --git a/flint/bptools/preflagger.py b/flint/bptools/preflagger.py index f8dc3749..98750485 100644 --- a/flint/bptools/preflagger.py +++ b/flint/bptools/preflagger.py @@ -6,8 +6,10 @@ components of the bandpass. """ +from __future__ import annotations + from pathlib import Path -from typing import List, NamedTuple, Optional, Tuple +from typing import NamedTuple import matplotlib.pyplot as plt import numpy as np @@ -32,9 +34,9 @@ class PhaseOutlierResults(NamedTuple): """The initial model of the complex_gains""" fit_model_gains: np.ndarray """The complex gain model fit made against the unwrapped gains (i.e. complex_gains / init_model_gains)""" - init_model_params: Tuple[float, float] + init_model_params: tuple[float, float] """The initial guess (gradient, offset) model parameters to represent the phase component of the complex_gains""" - fit_model_params: Tuple[float, float] + fit_model_params: tuple[float, float] """The fitted model parameters constrained against the unwrapped gains""" outlier_mask: np.ndarray """Boolean mask of equal length to complex_gain, where True represents outliers that should be flagged""" @@ -50,7 +52,7 @@ class PhaseOutlierResults(NamedTuple): def plot_phase_outlier( phase_outlier_results: PhaseOutlierResults, output_path: Path, - title: Optional[str] = None, + title: str | None = None, ) -> Path: """Create a simple diagnostic plot highlighting how the outlier channels and their phases were selected. @@ -63,7 +65,7 @@ def plot_phase_outlier( Returns: Path: Path of the output image file """ - logger.debug(f"Creating phase outlier plot, writing {str(output_path)}.") + logger.debug(f"Creating phase outlier plot, writing {output_path!s}.") complex_gains = phase_outlier_results.complex_gains init_model_gains = phase_outlier_results.init_model_gains @@ -165,8 +167,8 @@ def flag_outlier_phase( complex_gains: np.ndarray, flag_cut: float, use_mad: bool = False, - plot_title: Optional[str] = None, - plot_path: Optional[Path] = None, + plot_title: str | None = None, + plot_path: Path | None = None, ) -> PhaseOutlierResults: """This procedure attempts to identify channels in the bandpass solutions to flag but searching for gains with outlier phases. Typically, ASKAP solutions @@ -295,7 +297,7 @@ def flag_outlier_phase( def flags_over_threshold( - flags: np.ndarray, thresh: float = 0.8, ant_idx: Optional[int] = None + flags: np.ndarray, thresh: float = 0.8, ant_idx: int | None = None ) -> bool: """Given a set of flags for an antenna across frequency, consider how much is flagged, indicated by a value of True, and return whether it was over a threshold. The intent is to return whether @@ -333,7 +335,7 @@ def plot_mean_amplitudes( mean: float, std: float, output_path: Path, - plot_title: Optional[str] = None, + plot_title: str | None = None, ) -> Path: """A simply plot to examine the polynomial fit to the residual amplitude data. @@ -378,8 +380,8 @@ def flag_mean_residual_amplitude( complex_gains: np.ndarray, use_robust: bool = True, polynomial_order: int = 5, - plot_path: Optional[Path] = None, - plot_title: Optional[str] = None, + plot_path: Path | None = None, + plot_title: str | None = None, ) -> bool: """Calculate the median or mean of the residual amplitudes of the complex gains after fitting a polynomial of order polynomial_order. @@ -520,7 +522,7 @@ def construct_mesh_ant_flags(mask: np.ndarray) -> np.ndarray: nant = mask.shape[0] logger.info(f"Accumulating flagged channels over {nant=} antenna") - empty_ants: List[int] = [] + empty_ants: list[int] = [] # TODO: This can be replaced with numpy broadcasting diff --git a/flint/bptools/smoother.py b/flint/bptools/smoother.py index 43cada80..af105a6e 100644 --- a/flint/bptools/smoother.py +++ b/flint/bptools/smoother.py @@ -1,4 +1,4 @@ -from typing import Tuple +from __future__ import annotations import numpy as np from scipy.ndimage import median_filter @@ -207,7 +207,7 @@ def smooth_bandpass_complex_gains( window_size: int = 16, polynomial_order: int = 4, apply_median_filter: bool = True, - smooth_jones_elements: Tuple[int, ...] = (0, 1, 2, 3), + smooth_jones_elements: tuple[int, ...] = (0, 1, 2, 3), ) -> np.ndarray: """Smooth bandpass solutions by applying a savgol filter to the real and imaginary components of each of the antenna based polarisation solutions across channels. diff --git a/flint/calibrate/aocalibrate.py b/flint/calibrate/aocalibrate.py index 608d4adc..1ea3e9bf 100644 --- a/flint/calibrate/aocalibrate.py +++ b/flint/calibrate/aocalibrate.py @@ -8,14 +8,9 @@ from typing import ( Any, Collection, - Dict, Iterable, - List, Literal, NamedTuple, - Optional, - Tuple, - Union, ) import matplotlib.pyplot as plt @@ -67,13 +62,13 @@ class CalibrateOptions(BaseOptions): """The name of the datacolumn that will be calibrates""" m: Path """The path to the model file used to calibtate""" - minuv: Optional[float] = None + minuv: float | None = None """The minimum distance in meters that is""" - maxuv: Optional[float] = None + maxuv: float | None = None """The maximum distance in meters that is""" - i: Optional[int] = 100 + i: int | None = 100 """The number of iterations that may be performed""" - p: Optional[Tuple[Path, Path]] = None + p: tuple[Path, Path] | None = None """Plot output names for the amplitude gain and phases""" @@ -142,7 +137,7 @@ def save(self, output_path: Path) -> Path: """ return save_aosolutions_file(aosolutions=self, output_path=output_path) - def plot_solutions(self, ref_ant: Optional[int] = 0) -> Iterable[Path]: + def plot_solutions(self, ref_ant: int | None = 0) -> Iterable[Path]: """Plot the solutions of all antenna for the first time-inteval in the aosolutions file. The XX and the YY will be plotted. @@ -159,7 +154,7 @@ def plot_solutions(self, ref_ant: Optional[int] = 0) -> Iterable[Path]: def fill_between_flags( ax: plt.Axes, flags: np.ndarray, - values: Optional[np.ndarray] = None, + values: np.ndarray | None = None, direction: str = "x", ) -> None: """Plot vertical or horizontal lines where data are flagged. @@ -183,7 +178,7 @@ def fill_between_flags( def plot_solutions( - solutions: Union[Path, AOSolutions], ref_ant: Optional[int] = 0 + solutions: Path | AOSolutions, ref_ant: int | None = 0 ) -> Collection[Path]: """Plot solutions for AO-style solutions @@ -333,15 +328,15 @@ def plot_solutions( fig_ratio.tight_layout() fig_phase.tight_layout() - out_amp = f"{str(solutions_path.with_suffix('.amplitude.png'))}" + out_amp = f"{solutions_path.with_suffix('.amplitude.png')!s}" logger.info(f"Saving {out_amp}.") fig_amp.savefig(out_amp) - out_phase = f"{str(solutions_path.with_suffix('.phase.png'))}" + out_phase = f"{solutions_path.with_suffix('.phase.png')!s}" logger.info(f"Saving {out_phase}.") fig_phase.savefig(out_phase) - out_ratio = f"{str(solutions_path.with_suffix('.ratio.png'))}" + out_ratio = f"{solutions_path.with_suffix('.ratio.png')!s}" logger.info(f"Saving {out_ratio}.") fig_ratio.savefig(out_ratio) @@ -367,7 +362,7 @@ def save_aosolutions_file(aosolutions: AOSolutions, output_path: Path) -> Path: logger.info(f"Creating {output_dir}.") output_dir.mkdir(parents=True) - logger.info(f"Writing aosolutions to {str(output_path)}.") + logger.info(f"Writing aosolutions to {output_path!s}.") with open(str(output_path), "wb") as out_file: out_file.write( struct.pack( @@ -400,10 +395,10 @@ def load_aosolutions_file(solutions_path: Path) -> AOSolutions: assert ( solutions_path.exists() and solutions_path.is_file() - ), f"{str(solutions_path)} either does not exist or is not a file. " + ), f"{solutions_path!s} either does not exist or is not a file. " logger.info(f"Loading {solutions_path}") - with open(solutions_path, "r") as in_file: + with open(solutions_path) as in_file: _junk = np.fromfile(in_file, dtype=" List[CalibrateCommand]: +) -> list[CalibrateCommand]: """Given a directory that contains a collection of bandpass measurement sets, attempt to identify a corresponding set of calibrate binary solution file. @@ -496,7 +491,7 @@ def find_existing_solutions( def select_aosolution_for_ms( - calibrate_cmds: List[CalibrateCommand], ms: Union[MS, Path] + calibrate_cmds: list[CalibrateCommand], ms: MS | Path ) -> Path: """Attempt to select an AO-style solution file for a measurement set. This can be expanded to include a number of criteria, but @@ -516,20 +511,20 @@ def select_aosolution_for_ms( ms = MS.cast(ms) ms_beam = ms.beam if ms.beam is not None else get_beam_from_ms(ms=ms) - logger.info(f"Will select a solution for {str(ms.path)}, {ms_beam=}.") + logger.info(f"Will select a solution for {ms.path!s}, {ms_beam=}.") logger.info(f"{len(calibrate_cmds)} potential solutions to consider. ") for calibrate_cmd in calibrate_cmds: - logger.info(f"Considering {str(calibrate_cmd.solution_path)}.") + logger.info(f"Considering {calibrate_cmd.solution_path!s}.") if consistent_ms(ms1=ms, ms2=calibrate_cmd.ms): sol_file = calibrate_cmd.solution_path break else: raise ValueError( - f"No solution file found for {str(ms.path)} from {[c.ms.path for c in calibrate_cmds]} found. " + f"No solution file found for {ms.path!s} from {[c.ms.path for c in calibrate_cmds]} found. " ) - logger.info(f"Have selected {str(sol_file)} for {str(ms.path)}. ") + logger.info(f"Have selected {sol_file!s} for {ms.path!s}. ") return sol_file @@ -548,13 +543,13 @@ def calibrate_options_to_command( """ cmd = "calibrate " - unknowns: List[Tuple[Any, Any]] = [] + unknowns: list[tuple[Any, Any]] = [] for key, value in calibrate_options._asdict().items(): if value is None: continue elif isinstance(value, (str, Path, int, float)): - cmd += f"-{key} {str(value)} " + cmd += f"-{key} {value!s} " elif isinstance(value, (tuple, list)): values = " ".join([str(v) for v in value]) cmd += f"-{key} {values} " @@ -565,18 +560,18 @@ def calibrate_options_to_command( len(unknowns) == 0 ), f"Unknown types when generating calibrate command: {unknowns}" - cmd += f"{str(ms_path)} {str(solutions_path)}" + cmd += f"{ms_path!s} {solutions_path!s}" return cmd def create_calibrate_cmd( - ms: Union[Path, MS], + ms: Path | MS, calibrate_model: Path, - solution_path: Optional[Path] = None, - container: Optional[Path] = None, - update_calibrate_options: Optional[Dict[str, Any]] = None, - calibrate_data_column: Optional[str] = None, + solution_path: Path | None = None, + container: Path | None = None, + update_calibrate_options: dict[str, Any] | None = None, + calibrate_data_column: str | None = None, ) -> CalibrateCommand: """Generate a typical ao calibrate command. Any extra keyword arguments are passed through as additional options to the `calibrate` program. @@ -648,8 +643,8 @@ def create_calibrate_cmd( def create_apply_solutions_cmd( ms: MS, solutions_file: Path, - output_column: Optional[str] = None, - container: Optional[Path] = None, + output_column: str | None = None, + container: Path | None = None, ) -> ApplySolutions: """Construct the command to apply calibration solutions to a MS using an AO calibrate style solutions file. @@ -691,8 +686,8 @@ def create_apply_solutions_cmd( f"applysolutions " f"-datacolumn {input_column} " f"{copy_mode} " - f"{str(ms.path)} " - f"{str(solutions_file)} " + f"{ms.path!s} " + f"{solutions_file!s} " ) logger.info(f"Constructed {cmd=}") @@ -800,13 +795,13 @@ def calibrate_apply_ms( def apply_solutions_to_ms( - ms: Union[Path, MS], + ms: Path | MS, solutions_path: Path, container: Path, data_column: str = "DATA", ) -> ApplySolutions: ms = ms if isinstance(ms, MS) else MS(path=ms, column=data_column) - logger.info(f"Will attempt to apply {str(solutions_path)} to {str(ms.path)}.") + logger.info(f"Will attempt to apply {solutions_path!s} to {ms.path!s}.") apply_solutions_cmd = create_apply_solutions_cmd( ms=ms, solutions_file=solutions_path @@ -867,15 +862,15 @@ def flag_aosolutions( solutions_path: Path, ref_ant: int = -1, flag_cut: float = 3, - plot_dir: Optional[Path] = None, - out_solutions_path: Optional[Path] = None, + plot_dir: Path | None = None, + out_solutions_path: Path | None = None, smooth_solutions: bool = False, plot_solutions_throughout: bool = True, smooth_window_size: int = 16, smooth_polynomial_order: int = 4, mean_ant_tolerance: float = 0.2, mesh_ant_flags: bool = False, - max_gain_amplitude: Optional[float] = None, + max_gain_amplitude: float | None = None, ) -> FlaggedAOSolution: """Will open a previously solved ao-calibrate solutions file and flag additional channels and antennae. @@ -944,7 +939,7 @@ def flag_aosolutions( ref_ant = select_refant(bandpass=solutions.bandpass) logger.info(f"Overwriting reference antenna selection, using {ref_ant=}") - plots: List[Path] = [] + plots: list[Path] = [] if plot_solutions_throughout: output_plots = plot_solutions(solutions=solutions_path, ref_ant=ref_ant) @@ -1075,7 +1070,7 @@ def flag_aosolutions( total_flagged = np.sum(~np.isfinite(bandpass)) / np.prod(bandpass.shape) if total_flagged > 0.8: msg = ( - f"{total_flagged*100.:.2f}% of {str((solutions_path))} is flagged after running the preflagger. " + f"{total_flagged*100.:.2f}% of {(solutions_path)!s} is flagged after running the preflagger. " "That is over 90%. " f"This surely can not be correct. Likely something has gone very wrong. " ) @@ -1100,7 +1095,7 @@ def add_model_options_to_command(add_model_options: AddModelOptions) -> str: """ logger.info("Generating addmodel command") command = f"addmodel -datacolumn {add_model_options.datacolumn} -m {add_model_options.mode} " - command += f"{str(add_model_options.model_path)} {str(add_model_options.ms_path)}" + command += f"{add_model_options.model_path!s} {add_model_options.ms_path!s}" return command diff --git a/flint/catalogue.py b/flint/catalogue.py index 92b25e8f..550564df 100644 --- a/flint/catalogue.py +++ b/flint/catalogue.py @@ -7,9 +7,11 @@ and retained on disk. """ +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import NamedTuple, Optional, Tuple, Union +from typing import NamedTuple import astropy.units as u from astropy.table import Table @@ -41,11 +43,11 @@ class Catalogue(NamedTuple): """Column name containing the min-axis of the source gaussian component""" pa_col: str """Column name containing the pa of the source gaussian component""" - alpha_col: Optional[str] = None # Used to scale the SED + alpha_col: str | None = None # Used to scale the SED """Column name containing the spectral index, used to calculate the source SED. If None a default is used. """ - q_col: Optional[str] = None # Used to scale the SED + q_col: str | None = None # Used to scale the SED """Column name containing the curvature of the spectral index, used to calculate the source SED. If None a default is used. """ - vizier_id: Optional[str] = ( + vizier_id: str | None = ( None # Required for known reference catalogues, not for other specified catalogues ) """The ID of the catalogue on Vizier that is used to download the catalogue""" @@ -124,7 +126,7 @@ class Catalogue(NamedTuple): def guess_column_in_table( - table: Table, column: str, guess_column: Optional[str] = None + table: Table, column: str, guess_column: str | None = None ) -> str: """Attempt to deduce the appropriate column name from a set of column names in a table. A lookup of known column names for different @@ -170,7 +172,7 @@ def guess_column_in_table( def _guess_catalogue_type( - table: Union[Table, Path], + table: Table | Path, survey: str = "askap", file_name: str = "askap.fit", freq: float = -1, @@ -207,7 +209,7 @@ def _guess_catalogue_type( def get_reference_catalogue( reference_directory: Path, survey: str, verify: bool = True -) -> Tuple[Table, Catalogue]: +) -> tuple[Table, Catalogue]: """Load in a known reference catalogue Args: @@ -301,7 +303,7 @@ def download_vizier_catalogue( def download_referencce_catalogues( reference_directory: Path, dry_run: bool = False -) -> Tuple[Path, ...]: +) -> tuple[Path, ...]: """Download all of the expected reference catalogue data that flint relies on Args: diff --git a/flint/coadd/linmos.py b/flint/coadd/linmos.py index 31f4ec33..651dfee5 100644 --- a/flint/coadd/linmos.py +++ b/flint/coadd/linmos.py @@ -1,8 +1,10 @@ """This is an interface into the yandasoft linmos task.""" +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Collection, List, NamedTuple, Optional, Tuple, Literal +from typing import Collection, Literal, NamedTuple import numpy as np from astropy.io import fits @@ -38,7 +40,7 @@ class BoundingBox(NamedTuple): """Minimum y pixel""" ymax: int """Maximum y pixel""" - original_shape: Tuple[int, int] + original_shape: tuple[int, int] """The original shape of the image. If constructed against a cube this is the shape of a single plane.""" @@ -47,15 +49,15 @@ class LinmosParsetSummary(NamedTuple): parset_path: Path """Path to the parset text file created""" - image_paths: Tuple[Path, ...] + image_paths: tuple[Path, ...] """The set of paths to the fits images that were coadded together""" - weight_text_paths: Optional[Tuple[Path, ...]] = None + weight_text_paths: tuple[Path, ...] | None = None """The set of Paths to the text files with per channel weights used by linmos""" def _create_bound_box_plane( image_data: np.ndarray, is_masked: bool = False -) -> Optional[BoundingBox]: +) -> BoundingBox | None: """Create a bounding box around pixels in a 2D image. If all pixels are not valid, then ``None`` is returned. @@ -156,7 +158,7 @@ class TrimImageResult(NamedTuple): def trim_fits_image( - image_path: Path, bounding_box: Optional[BoundingBox] = None + image_path: Path, bounding_box: BoundingBox | None = None ) -> TrimImageResult: """Trim the FITS image produces by linmos to remove as many empty pixels around the border of the image as possible. This is an inplace operation. @@ -251,7 +253,7 @@ def _get_image_weight_plane( def get_image_weight( image_path: Path, mode: str = "mad", stride: int = 1, image_slice: int = 0 -) -> List[float]: +) -> list[float]: """Compute an image weight supplied to linmos, which is used for optimally weighting overlapping images. Supported modes are 'mad' and 'mtd', which simply resolve to their numpy equivalents. @@ -282,7 +284,7 @@ def get_image_weight( f"Computing linmos weight using {mode=}, {image_slice=} for {image_path}. " ) - weights: List[float] = [] + weights: list[float] = [] with fits.open(image_path, memmap=True) as in_fits: image_data = in_fits[image_slice].data # type: ignore @@ -312,7 +314,7 @@ def get_image_weight( def generate_weights_list_and_files( image_paths: Collection[Path], mode: str = "mad", stride: int = 1 -) -> Tuple[Path, ...]: +) -> tuple[Path, ...]: """Generate the expected linmos weight files, and construct an appropriate string that can be embedded into a linmos partset. These weights files will appear as: @@ -367,7 +369,7 @@ def generate_weights_list_and_files( return tuple(weight_file_list) -def _get_alpha_linmos_option(pol_axis: Optional[float] = None) -> str: +def _get_alpha_linmos_option(pol_axis: float | None = None) -> str: """Compute the appropriate alpha term for linmos that is used to describe the differential rotation of the ASKAP third-axis and the footprint layout. The typical holography rotation is -45 degs. Internally @@ -404,8 +406,8 @@ def _get_alpha_linmos_option(pol_axis: Optional[float] = None) -> str: def _get_holography_linmos_options( - holofile: Optional[Path] = None, - pol_axis: Optional[float] = None, + holofile: Path | None = None, + pol_axis: float | None = None, remove_leakage: bool = False, ) -> str: """Construct the appropriate set of linmos options that @@ -433,7 +435,7 @@ def _get_holography_linmos_options( parset = ( f"linmos.primarybeam = ASKAP_PB\n" - f"linmos.primarybeam.ASKAP_PB.image = {str(holofile.absolute())}\n" + f"linmos.primarybeam.ASKAP_PB.image = {holofile.absolute()!s}\n" f"linmos.removeleakage = {'true' if remove_leakage else 'false'}\n" ) parset += _get_alpha_linmos_option(pol_axis=pol_axis) @@ -445,10 +447,10 @@ def generate_linmos_parameter_set( images: Collection[Path], parset_output_path: Path, linmos_names: LinmosNames, - weight_list: Optional[str] = None, - holofile: Optional[Path] = None, + weight_list: str | None = None, + holofile: Path | None = None, cutoff: float = 0.001, - pol_axis: Optional[float] = None, + pol_axis: float | None = None, overwrite: bool = True, ) -> LinmosParsetSummary: """Generate a parset file that will be used with the @@ -467,7 +469,7 @@ def generate_linmos_parameter_set( Returns: LinmosParsetSummary: Important components around the generated parset file. """ - img_str: List[str] = list( + img_str: list[str] = list( [str(p).replace(".fits", "") for p in images if p.exists()] ) logger.info(f"{len(img_str)} unique images from {len(images)} input collection. ") @@ -482,7 +484,7 @@ def generate_linmos_parameter_set( # quality. In reality, this should be updated to provide a RMS noise # estimate per-pixel of each image. weight_str = weight_list - weight_files: Optional[Tuple[Path, ...]] = None + weight_files: tuple[Path, ...] | None = None if weight_str is None: weight_files = generate_weights_list_and_files( image_paths=images, mode="mad", stride=8 @@ -514,8 +516,8 @@ def generate_linmos_parameter_set( f"linmos.beams = {beam_order_list}\n" # f"linmos.beamangle = {beam_angle_list}\n" f"linmos.imagetype = fits\n" - f"linmos.outname = {str(parent_dir / linmos_names.image_fits.stem)}\n" - f"linmos.outweight = {str(parent_dir / linmos_names.weight_fits.stem)}\n" + f"linmos.outname = {parent_dir / linmos_names.image_fits.stem!s}\n" + f"linmos.outweight = {parent_dir / linmos_names.weight_fits.stem!s}\n" f"# For ASKAPsoft>1.3.0\n" f"linmos.useweightslog = true\n" f"linmos.weighttype = Combined\n" @@ -528,11 +530,11 @@ def generate_linmos_parameter_set( parset += _get_holography_linmos_options( holofile=holofile, pol_axis=pol_axis, - remove_leakage=".i." not in str(list(images)[0]), + remove_leakage=".i." not in str(next(iter(images))), ) # Now write the file, me hearty - logger.info(f"Writing parset to {str(parset_output_path)}.") + logger.info(f"Writing parset to {parset_output_path!s}.") logger.info(f"{parset}") if not overwrite: assert not Path( @@ -552,7 +554,7 @@ def generate_linmos_parameter_set( return linmos_parset_summary -def _linmos_cleanup(linmos_parset_summary: LinmosParsetSummary) -> Tuple[Path, ...]: +def _linmos_cleanup(linmos_parset_summary: LinmosParsetSummary) -> tuple[Path, ...]: """Clean up linmos files if requested. Args: @@ -577,11 +579,11 @@ def linmos_images( images: Collection[Path], parset_output_path: Path, image_output_name: str = "linmos_field", - weight_list: Optional[str] = None, - holofile: Optional[Path] = None, + weight_list: str | None = None, + holofile: Path | None = None, container: Path = Path("yandasoft.sif"), cutoff: float = 0.001, - pol_axis: Optional[float] = None, + pol_axis: float | None = None, trim_linmos_fits: bool = True, cleanup: bool = False, ) -> LinmosCommand: @@ -603,9 +605,7 @@ def linmos_images( LinmosCommand: The linmos command executed and the associated parset file """ - assert ( - container.exists() - ), f"The yandasoft container {str(container)} was not found. " + assert container.exists(), f"The yandasoft container {container!s} was not found. " linmos_names: LinmosNames = create_linmos_names(name_prefix=image_output_name) @@ -619,7 +619,7 @@ def linmos_images( pol_axis=pol_axis, ) - linmos_cmd_str = f"linmos -c {str(linmos_parset_summary.parset_path)}" + linmos_cmd_str = f"linmos -c {linmos_parset_summary.parset_path!s}" bind_dirs = [image.absolute().parent for image in images] + [ linmos_parset_summary.parset_path.absolute().parent ] diff --git a/flint/configuration.py b/flint/configuration.py index eea5a8ea..b64927f9 100644 --- a/flint/configuration.py +++ b/flint/configuration.py @@ -4,15 +4,17 @@ throughout the pipeline. """ +from __future__ import annotations + import inspect import shutil from argparse import ArgumentParser from pathlib import Path -from typing import Any, Callable, Dict, ParamSpec, Optional, TypeVar, Union +from typing import Any, Callable, ParamSpec, TypeVar +import yaml from click import MissingParameter from pydantic import ValidationError -import yaml from flint.imager.wsclean import WSCleanOptions from flint.logging import logger @@ -42,7 +44,7 @@ } -def _create_mode_mapping_defaults() -> Dict[str, Any]: +def _create_mode_mapping_defaults() -> dict[str, Any]: """Create the default key-values for each of the registered Option classes Returns: @@ -82,8 +84,8 @@ def copy_and_timestamp_strategy_file(output_dir: Path, input_yaml: Path) -> Path def _load_and_copy_strategy( - output_split_science_path: Path, imaging_strategy: Optional[Path] = None -) -> Union[Strategy, None]: + output_split_science_path: Path, imaging_strategy: Path | None = None +) -> Strategy | None: """Load a strategy file and copy a timestamped version into the output directory that would contain the science processing. @@ -107,7 +109,7 @@ def _load_and_copy_strategy( ) -def get_selfcal_options_from_yaml(input_yaml: Optional[Path] = None) -> Dict: +def get_selfcal_options_from_yaml(input_yaml: Path | None = None) -> dict: """Stub to represent interaction with a configurationf ile If a path is supplied, an error is raised. @@ -132,8 +134,8 @@ def get_selfcal_options_from_yaml(input_yaml: Optional[Path] = None) -> Dict: def get_image_options_from_yaml( - input_yaml: Optional[Path] = None, self_cal_rounds: bool = False -) -> Dict: + input_yaml: Path | None = None, self_cal_rounds: bool = False +) -> dict: """Stub to interact with configuration file. If a `input_yaml` file is provided an error is raised @@ -247,12 +249,12 @@ def get_image_options_from_yaml( def get_options_from_strategy( - strategy: Union[Strategy, None, Path], + strategy: Strategy | None | Path, mode: str = "wsclean", - round_info: Union[str, int] = "initial", + round_info: str | int = "initial", max_round_override: bool = True, - operation: Optional[str] = None, -) -> Dict[Any, Any]: + operation: str | None = None, +) -> dict[Any, Any]: f"""Extract a set of options from a strategy file to use in a pipeline run. If the mode exists in the default section, these are used as a base. @@ -379,11 +381,11 @@ def _wrapper(fn: Callable[P, T]) -> Callable[P, T]: # prefect confuxed, wherein it throws an error saying the strategy, mode, round options # are not part of the wrappede fn's function signature. def wrapper( - strategy: Union[Strategy, None, Path] = None, + strategy: Strategy | None | Path = None, mode: str = "wsclean", - round_info: Union[str, int] = "initial", + round_info: str | int = "initial", max_round_override: bool = True, - operation: Optional[str] = None, + operation: str | None = None, *args: P.args, **kwargs: P.kwargs, ) -> T: @@ -516,7 +518,7 @@ def load_strategy_yaml(input_yaml: Path, verify: bool = True) -> Strategy: logger.info(f"Loading {input_yaml} file. ") - with open(input_yaml, "r") as in_file: + with open(input_yaml) as in_file: input_strategy = Strategy(yaml.load(in_file, Loader=yaml.Loader)) if verify: @@ -545,9 +547,7 @@ def write_strategy_to_yaml(strategy: Strategy, output_path: Path) -> Path: # TODO: Create the file only for a subset of known defaults -def create_default_yaml( - output_yaml: Path, selfcal_rounds: Optional[int] = None -) -> Path: +def create_default_yaml(output_yaml: Path, selfcal_rounds: int | None = None) -> Path: """Create an example strategy yaml file that outlines the options to use at varies stages of some assumed processing pipeline. @@ -561,7 +561,7 @@ def create_default_yaml( Path: Path to the written yaml output file. """ logger.info("Generating a default strategy. ") - strategy: Dict[Any, Any] = {} + strategy: dict[Any, Any] = {} strategy["version"] = FORMAT_VERSION @@ -571,7 +571,7 @@ def create_default_yaml( if selfcal_rounds: logger.info(f"Creating {selfcal_rounds} self-calibration rounds. ") - selfcal: Dict[int, Any] = {} + selfcal: dict[int, Any] = {} for selfcal_round in range(1, selfcal_rounds + 1): selfcal[selfcal_round] = { "wsclean": {}, diff --git a/flint/convol.py b/flint/convol.py index b7cced8a..f8f543a6 100644 --- a/flint/convol.py +++ b/flint/convol.py @@ -8,7 +8,7 @@ from argparse import ArgumentParser from pathlib import Path from shutil import copyfile -from typing import Collection, List, Literal, NamedTuple, Optional +from typing import Collection, Literal, NamedTuple import astropy.units as u import numpy as np @@ -80,8 +80,8 @@ def check_if_cube_fits(fits_file: Path) -> bool: def get_cube_common_beam( - cube_paths: Collection[Path], cutoff: Optional[float] = None -) -> List[BeamShape]: + cube_paths: Collection[Path], cutoff: float | None = None +) -> list[BeamShape]: """Given a set of input cube FITS files, compute a common beam for each channel. @@ -129,8 +129,8 @@ def get_cube_common_beam( def convolve_cubes( cube_paths: Collection[Path], - beam_shapes: List[BeamShape], - cutoff: Optional[float] = None, + beam_shapes: list[BeamShape], + cutoff: float | None = None, convol_suffix: str = "conv", executor_type: Literal["thread", "process", "mpi"] = "thread", ) -> Collection[Path]: @@ -180,7 +180,7 @@ def convolve_cubes( def get_common_beam( - image_paths: Collection[Path], cutoff: Optional[float] = None + image_paths: Collection[Path], cutoff: float | None = None ) -> BeamShape: """Return the minimum beam size required to encompass the beams described in the FITS header (e.g. BMAJ,BMIN,BPA) of the input images. This is used @@ -214,7 +214,7 @@ def get_common_beam( def convolve_images( image_paths: Collection[Path], beam_shape: BeamShape, - cutoff: Optional[float] = None, + cutoff: float | None = None, convol_suffix: str = "conv", ) -> Collection[Path]: """Convolve a set of input images to a common resolution as specified @@ -257,7 +257,7 @@ def convolve_images( pa=beam_shape.bpa_deg * u.deg, ) - return_conv_image_paths: List[Path] = [] + return_conv_image_paths: list[Path] = [] for image_path in image_paths: convol_output_path = Path( @@ -268,7 +268,7 @@ def convolve_images( logger.info(f"Copying {image_path} to {convol_output_path=} for empty beam") copyfile(image_path, convol_output_path) else: - logger.info(f"Convolving {str(image_path.name)}") + logger.info(f"Convolving {image_path.name!s}") beamcon_2D.beamcon_2d_on_fits( file=image_path, outdir=None, diff --git a/flint/exceptions.py b/flint/exceptions.py index 630b0d52..39b35f8a 100644 --- a/flint/exceptions.py +++ b/flint/exceptions.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + class FlintException(Exception): """Base exception for Flint""" diff --git a/flint/flagging.py b/flint/flagging.py index 704a2e27..791c001c 100644 --- a/flint/flagging.py +++ b/flint/flagging.py @@ -1,8 +1,10 @@ """Utility functions to carry out flagging against ASKAP measurement sets""" +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Collection, NamedTuple, Optional, Union +from typing import Collection, NamedTuple import numpy as np from casacore.tables import table @@ -23,7 +25,7 @@ class AOFlaggerCommand(NamedTuple): """The path to the MS that will be flagged. """ ms: MS """The MS object that was flagged""" - strategy_file: Optional[Path] = None + strategy_file: Path | None = None """The path to the aoflagging strategy file to use""" @@ -74,8 +76,8 @@ def flag_ms_zero_uvws(ms: MS, chunk_size: int = 10000) -> MS: def nan_zero_extreme_flag_ms( - ms: Union[Path, MS], - data_column: Optional[str] = None, + ms: Path | MS, + data_column: str | None = None, flag_extreme_dxy: bool = True, dxy_thresh: float = 4.0, nan_data_on_flag: bool = False, @@ -105,7 +107,7 @@ def nan_zero_extreme_flag_ms( logger.warning("No valid data column selected, using default of DATA") data_column = "DATA" elif data_column is None and ms.column is not None: - logger.info(f"Using nominated {ms.column} column for {str(ms.path)}") + logger.info(f"Using nominated {ms.column} column for {ms.path!s}") data_column = ms.column logger.info(f"Flagging NaNs and zeros in {data_column}.") @@ -185,7 +187,7 @@ def create_aoflagger_cmd(ms: MS) -> AOFlaggerCommand: ) logger.info(f"Flagging using the strategy file {flagging_strategy}") - cmd = f"aoflagger -column {ms.column} -strategy {flagging_strategy} -v {str(ms.path.absolute())}" + cmd = f"aoflagger -column {ms.column} -strategy {flagging_strategy} -v {ms.path.absolute()!s}" return AOFlaggerCommand( cmd=cmd, ms_path=ms.path, strategy_file=Path(flagging_strategy), ms=ms @@ -225,7 +227,7 @@ def flag_ms_aoflagger(ms: MS, container: Path) -> MS: MS: Measurement set flagged with the appropriate column """ ms = MS.cast(ms) - logger.info(f"Will flag column {ms.column} in {str(ms.path)}.") + logger.info(f"Will flag column {ms.column} in {ms.path!s}.") aoflagger_cmd = create_aoflagger_cmd(ms=ms) logger.info("Flagging command constructed. ") @@ -238,9 +240,7 @@ def flag_ms_aoflagger(ms: MS, container: Path) -> MS: return ms -def flag_ms_by_antenna_ids( - ms: Union[Path, MS], ant_ids: Union[int, Collection[int]] -) -> MS: +def flag_ms_by_antenna_ids(ms: Path | MS, ant_ids: int | Collection[int]) -> MS: """Set the FLAG to True for a collection of rows where ANTENNA1 or ANTENNA2 is in a set of antenna IDs to flag. The flagging is performed via the antenna ID as it is in the measurement set - it is not by the antenna name. @@ -260,12 +260,12 @@ def flag_ms_by_antenna_ids( logger.info("Antenna list to flag is empty. Exiting. ") return ms - logger.info(f"Will flag {str(ms.path)}.") + logger.info(f"Will flag {ms.path!s}.") logger.info(f"Antennas to flag: {ant_ids}") # TODO: Potentially this should be batched into chunks to operate over with table(str(ms.path), readonly=False, ack=False) as tab: - logger.info(f"Opened {str(ms.path)}, loading metadata.") + logger.info(f"Opened {ms.path!s}, loading metadata.") ant1 = tab.getcol("ANTENNA1") ant2 = tab.getcol("ANTENNA2") flags = tab.getcol("FLAG") diff --git a/flint/imager/wsclean.py b/flint/imager/wsclean.py index 20ceaa7e..e12a1804 100644 --- a/flint/imager/wsclean.py +++ b/flint/imager/wsclean.py @@ -21,7 +21,7 @@ from glob import glob from numbers import Number from pathlib import Path -from typing import Any, Collection, Dict, List, NamedTuple, Optional, Tuple, Union +from typing import Any, Collection, NamedTuple import numpy as np from fitscube.combine_fits import combine_fits @@ -31,10 +31,10 @@ from flint.ms import MS from flint.naming import create_image_cube_name, create_imaging_name_prefix from flint.options import ( - options_to_dict, BaseOptions, add_options_to_parser, create_options_from_parser, + options_to_dict, ) from flint.sclient import run_singularity_command from flint.utils import ( @@ -50,17 +50,17 @@ class ImageSet(BaseOptions): prefix: str """Prefix of the images and other output products. This should correspond to the -name argument from wsclean""" - image: List[Path] + image: list[Path] """Images produced. """ - psf: Optional[List[Path]] = None + psf: list[Path] | None = None """References to the PSFs produced by wsclean. """ - dirty: Optional[List[Path]] = None + dirty: list[Path] | None = None """Dirty images. """ - model: Optional[List[Path]] = None + model: list[Path] | None = None """Model images. """ - residual: Optional[List[Path]] = None + residual: list[Path] | None = None """Residual images.""" - source_list: Optional[Path] = None + source_list: Path | None = None """Path to a source list that accompanies the image data""" @@ -91,7 +91,7 @@ class WSCleanOptions(BaseOptions): """How deep the construct clean mask is during each cycle""" auto_threshold: float = 0.5 """How deep to clean once initial clean threshold reached""" - threshold: Optional[float] = None + threshold: float | None = None """Threshold in Jy to stop cleaning""" channels_out: int = 4 """Number of output channels""" @@ -105,9 +105,9 @@ class WSCleanOptions(BaseOptions): """Enable multiscale deconvolution""" multiscale_scale_bias: float = 0.75 """Multiscale bias term""" - multiscale_gain: Optional[float] = None + multiscale_gain: float | None = None """Size of step made in the subminor loop of multi-scale. Default currently 0.2, but shows sign of instability. A value of 0.1 might be more stable.""" - multiscale_scales: Tuple[int, ...] = ( + multiscale_scales: tuple[int, ...] = ( 0, 15, 25, @@ -118,7 +118,7 @@ class WSCleanOptions(BaseOptions): 400, ) """Scales used for multi-scale deconvolution""" - fit_spectral_pol: Optional[int] = None + fit_spectral_pol: int | None = None """Number of spectral terms to include during sub-band subtraction""" weight: str = "briggs -0.5" """Robustness of the weighting used""" @@ -126,41 +126,41 @@ class WSCleanOptions(BaseOptions): """Which column in the MS to image""" scale: str = "2.5asec" """Pixel scale size""" - gridder: Optional[str] = "wgridder" + gridder: str | None = "wgridder" """Use the wgridder kernel in wsclean (instead of the default w-stacking method)""" - nwlayers: Optional[int] = None + nwlayers: int | None = None """Number of w-layers to use if the gridder mode is w-stacking""" wgridder_accuracy: float = 1e-4 """The accuracy requested of the wgridder (should it be used), compared as the RMS error when compred to a DFT""" join_channels: bool = True """Collapse the sub-band images down to an MFS image when peak-finding""" - minuv_l: Optional[float] = None + minuv_l: float | None = None """The minimum lambda length that the visibility data needs to meet for it to be selected for imaging""" - minuvw_m: Optional[float] = None + minuvw_m: float | None = None """A (u,v) selection command, where any baselines shorter than this will be ignored during imaging""" - maxw: Optional[float] = None + maxw: float | None = None """A percentage specifying the maximum w-term to be gridded, relative to the max w-term being considered""" no_update_model_required: bool = False """Will instruct wsclean not to create the MODEL_DATA column""" no_small_inversion: bool = False """Disables an optimisation of wsclean's w-gridder mode. This might improve accuracy of the w-gridder. """ - beam_fitting_size: Optional[float] = 1.25 + beam_fitting_size: float | None = 1.25 """Use a fitting box the size of times the theoretical beam size for fitting a Gaussian to the PSF.""" - fits_mask: Optional[Path] = None + fits_mask: Path | None = None """Path to a FITS file that encodes a cleaning mask""" - deconvolution_channels: Optional[int] = None + deconvolution_channels: int | None = None """The channels out will be averaged down to this many sub-band images during deconvolution""" - parallel_deconvolution: Optional[int] = None + parallel_deconvolution: int | None = None """If not none, then this is the number of sub-regions wsclean will attempt to divide and clean""" - parallel_gridding: Optional[int] = None + parallel_gridding: int | None = None """If not none, then this is the number of channel images that will be gridded in parallel""" - temp_dir: Optional[Union[str, Path]] = None + temp_dir: str | Path | None = None """The path to a temporary directory where files will be wrritten. """ pol: str = "i" """The polarisation to be imaged""" save_source_list: bool = False """Saves the found clean components as a BBS/DP3 text sky model""" - channel_range: Optional[Tuple[int, int]] = None + channel_range: tuple[int, int] | None = None """Image a channel range between a lower (inclusive) and upper (exclusive) bound""" no_reorder: bool = False """If True turn off the reordering of the MS at the beginning of wsclean""" @@ -177,14 +177,14 @@ class WSCleanCommand(BaseOptions): """The set of wslean options used for imaging""" ms: MS """The measurement sets that have been included in the wsclean command. """ - imageset: Optional[ImageSet] = None + imageset: ImageSet | None = None """Collection of images produced by wsclean""" cleanup: bool = True """Will clean up the dirty images/psfs/residuals/models when the imaging has completed""" def get_wsclean_output_source_list_path( - name_path: Union[str, Path], pol: Optional[str] = None + name_path: str | Path, pol: str | None = None ) -> Path: """WSClean can produce a text file that describes the components that it cleaned, their type, scale and brightness. These are @@ -305,11 +305,11 @@ def _wsclean_output_callback(line: str) -> None: # TODO: Update this function to also add int the source list def get_wsclean_output_names( # prefix: str, - subbands: Optional[int] = None, - pols: Optional[Union[str, Tuple[str]]] = None, + subbands: int | None = None, + pols: str | tuple[str] | None = None, verify_exists: bool = False, include_mfs: bool = True, - output_types: Union[str, Collection[str]] = ( + output_types: str | Collection[str] = ( "image", "dirty", "residual", @@ -344,7 +344,7 @@ def get_wsclean_output_names( # """ logger.info(f"Finding wsclean outputs, {prefix=}") # TODO: Use a regular expression for this - subband_strs: List[Optional[str]] = [ + subband_strs: list[str | None] = [ None, ] if subbands and subbands > 1: @@ -352,7 +352,7 @@ def get_wsclean_output_names( # if include_mfs: subband_strs.append("MFS") - in_pols: Tuple[Union[None, str]] + in_pols: tuple[None | str] if pols is None: in_pols = (None,) elif isinstance(pols, str): @@ -363,12 +363,12 @@ def get_wsclean_output_names( # if isinstance(output_types, str): output_types = (output_types,) - images: Dict[str, List[Path]] = {} + images: dict[str, list[Path]] = {} for image_type in ("image", "dirty", "model", "residual"): if image_type not in output_types: continue - paths: List[Path] = [] + paths: list[Path] = [] for pol in in_pols: for subband_str in subband_strs: components = [prefix] @@ -404,7 +404,7 @@ def get_wsclean_output_names( # images["psf"] = psf_images if verify_exists: - paths_no_exists: List[Path] = [] + paths_no_exists: list[Path] = [] for _, check_paths in images.items(): paths_no_exists += [path for path in check_paths if not path.exists()] if len(paths_no_exists) > 0: @@ -420,7 +420,7 @@ def delete_wsclean_outputs( output_type: str = "image", ignore_mfs: bool = True, no_subbands: bool = False, -) -> List[Path]: +) -> list[Path]: """Attempt to remove elected wsclean output files If ``no_subbands`` is True (as in ``channels_out`` is 1) then nothing is deleted. @@ -437,7 +437,7 @@ def delete_wsclean_outputs( # TODO: This glob needs to be replaced with something more explicit paths = [Path(p) for p in glob(f"{prefix}-*{output_type}.fits")] logger.info(f"Found {len(paths)} matching {prefix=} and {output_type=}.") - rm_paths: List[Path] = [] + rm_paths: list[Path] = [] for path in paths: if no_subbands: @@ -460,10 +460,10 @@ def delete_wsclean_outputs( # TODO: Need to create a regex based mode, and better handlijng of -MFS-, # which is only created when -join-channels is used def wsclean_cleanup_files( - prefix: Union[str, Path], - output_types: Optional[Tuple[str, ...]] = ("dirty", "psf", "model", "residual"), + prefix: str | Path, + output_types: tuple[str, ...] | None = ("dirty", "psf", "model", "residual"), single_channel: bool = False, -) -> Tuple[Path, ...]: +) -> tuple[Path, ...]: """Clean up (i.e. delete) files from wsclean. Args: @@ -512,7 +512,7 @@ def create_wsclean_name_argument(wsclean_options: WSCleanOptions, ms: MS) -> Pat ) # Now resolve the directory part - name_dir: Union[Path, str, None] = ms.path.parent + name_dir: Path | str | None = ms.path.parent temp_dir = wsclean_options_dict.get("temp_dir", None) if temp_dir: # Resolve if environment variable @@ -533,11 +533,11 @@ def create_wsclean_name_argument(wsclean_options: WSCleanOptions, ms: MS) -> Pat class ResolvedCLIResult(NamedTuple): """Mapping results to provide to wsclean""" - cmd: Optional[str] = None + cmd: str | None = None """The argument value pair to place on the CLI. """ - unknown: Optional[Any] = None + unknown: Any | None = None """Unknown options that could not be converted""" - bindpath: Optional[Path] = None + bindpath: Path | None = None """A path to bind to when called within a container""" ignore: bool = False """Ignore this CLIResult if True""" @@ -609,7 +609,7 @@ def _resolve_wsclean_key_value_to_cli_str(key: str, value: Any) -> ResolvedCLIRe def create_wsclean_cmd( ms: MS, wsclean_options: WSCleanOptions, - container: Optional[Path] = None, + container: Path | None = None, ) -> WSCleanCommand: """Create a wsclean command from a WSCleanOptions container @@ -645,11 +645,11 @@ def create_wsclean_cmd( wsclean_options=wsclean_options, ms=ms ) move_directory = ms.path.parent - hold_directory: Optional[Path] = Path(name_argument_path).parent + hold_directory: Path | None = Path(name_argument_path).parent wsclean_options_dict = wsclean_options._asdict() - unknowns: List[Tuple[Any, Any]] = [] + unknowns: list[tuple[Any, Any]] = [] logger.info("Creating wsclean command.") cli_results = list( @@ -673,8 +673,8 @@ def create_wsclean_cmd( msg = ", ".join([f"{t[0]} {t[1]}" for t in unknowns]) raise ValueError(f"Unknown wsclean option types: {msg}") - cmds += [f"-name {str(name_argument_path)}"] - cmds += [f"{str(ms.path)} "] + cmds += [f"-name {name_argument_path!s}"] + cmds += [f"{ms.path!s} "] bind_dir_paths.append(ms.path.parent) @@ -777,7 +777,7 @@ def rename_wsclean_prefix_in_imageset(input_imageset: ImageSet) -> ImageSet: check_keys = ("prefix", "image", "residual", "model", "dirty") - output_args: Dict[str, Any] = {} + output_args: dict[str, Any] = {} for key, value in input_args.items(): if key == "prefix": @@ -798,10 +798,10 @@ def rename_wsclean_prefix_in_imageset(input_imageset: ImageSet) -> ImageSet: def run_wsclean_imager( wsclean_cmd: WSCleanCommand, container: Path, - bind_dirs: Optional[Tuple[Path, ...]] = None, - move_hold_directories: Optional[Tuple[Path, Optional[Path]]] = None, + bind_dirs: tuple[Path, ...] | None = None, + move_hold_directories: tuple[Path, Path | None] | None = None, make_cube_from_subbands: bool = True, - image_prefix_str: Optional[str] = None, + image_prefix_str: str | None = None, ) -> WSCleanCommand: """Run a provided wsclean command. Optionally will clean up files, including the dirty beams, psfs and other assorted things. @@ -859,7 +859,7 @@ def run_wsclean_imager( wsclean_cleanup = False # Update the prefix based on where the files will be moved to prefix = ( - f"{str(move_hold_directories[0] / Path(prefix).name)}" + f"{move_hold_directories[0] / Path(prefix).name!s}" if image_prefix_str else None ) @@ -908,9 +908,9 @@ def run_wsclean_imager( def wsclean_imager( - ms: Union[Path, MS], + ms: Path | MS, wsclean_container: Path, - update_wsclean_options: Optional[Dict[str, Any]] = None, + update_wsclean_options: dict[str, Any] | None = None, ) -> WSCleanCommand: """Create and run a wsclean imager command against a measurement set. diff --git a/flint/leakage.py b/flint/leakage.py index 050f00cf..60f0cf4d 100644 --- a/flint/leakage.py +++ b/flint/leakage.py @@ -1,8 +1,10 @@ """Construct a leakge map between two polarisations, typically V/I""" +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Dict, NamedTuple, Union, Optional +from typing import NamedTuple, Union import astropy.units as u import numpy as np @@ -15,7 +17,6 @@ from flint.catalogue import guess_column_in_table from flint.logging import logger - TableOrPath = Union[Table, Path] @@ -44,7 +45,7 @@ class FITSImage(NamedTuple): data: np.ndarray """The data of the fits image""" - header: Dict + header: dict """Header of the fits image""" wcs: WCS """Celestial WCS of the fits image""" @@ -100,8 +101,8 @@ def filter_components( int_col: str, int_err_col: str, leakage_filters: LeakageFilters, - ra_col: Optional[str] = None, - dec_col: Optional[str] = None, + ra_col: str | None = None, + dec_col: str | None = None, ) -> Table: """Apply the pre-processing operations to catalogue components to select an optimal sample of sources for leakage characterisation. Sources will be selected @@ -170,8 +171,8 @@ def filter_components( def get_xy_pixel_coords( table: Table, wcs: WCS, - ra_col: Optional[str] = None, - dec_col: Optional[str] = None, + ra_col: str | None = None, + dec_col: str | None = None, ) -> PixelCoords: """Convert (RA, Dec) positions in a catalogue into (x, y)-pixels given an WCS @@ -326,7 +327,7 @@ def extract_pol_stats_in_box( def _get_output_catalogue_path( - input_path: Path, pol: str, output_path: Optional[Path] = None + input_path: Path, pol: str, output_path: Path | None = None ) -> Path: """Create the output leakage catalogue name""" # NOTE: This is a separate function to test against after a silly. Might move with the other named Pirates @@ -347,9 +348,9 @@ def _get_output_catalogue_path( def create_leakge_component_table( pol_image: Path, - catalogue: Union[Table, Path], + catalogue: Table | Path, pol: str = "v", - output_path: Optional[Path] = None, + output_path: Path | None = None, ) -> Path: """Create a component catalogue that includes enough information to describe the polarisation fraction of sources across a field. This is intended to be used diff --git a/flint/logging.py b/flint/logging.py index 89bb3092..2230ca48 100644 --- a/flint/logging.py +++ b/flint/logging.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging # Create logger diff --git a/flint/masking.py b/flint/masking.py index 8865844c..fda510ef 100644 --- a/flint/masking.py +++ b/flint/masking.py @@ -6,12 +6,13 @@ from argparse import ArgumentParser from pathlib import Path -from typing import Collection, Optional, Union, NamedTuple +from typing import Collection, NamedTuple import astropy.units as u import numpy as np from astropy.io import fits from astropy.wcs import WCS +from radio_beam import Beam from reproject import reproject_interp from scipy.ndimage import ( binary_dilation as scipy_binary_dilation, # Rename to distinguish from skimage @@ -21,7 +22,6 @@ ) from scipy.ndimage import label, minimum_filter from scipy.signal import fftconvolve -from radio_beam import Beam from flint.logging import logger from flint.naming import FITSMaskNames, create_fits_mask_names @@ -53,7 +53,7 @@ class MaskingOptions(BaseOptions): """Stepping size used to increase box by should adaptive detect poor boxcar statistics""" flood_fill_use_mbc_adaptive_skew_delta: float = 0.2 """A box is consider too small for a pixel if the fractional proportion of positive pixels is larger than the deviation away of (0.5 + frac). This threshold is therefore 0 to 0.5""" - flood_fill_use_mbc_adaptive_max_depth: Optional[int] = None + flood_fill_use_mbc_adaptive_max_depth: int | None = None """Determines the number of adaptive boxcar scales to use when constructing seed mask. If None no adaptive boxcar sizes""" grow_low_snr_island: bool = False """Whether to attempt to grow a mask to capture islands of low SNR (e.g. diffuse emission)""" @@ -69,7 +69,7 @@ class MaskingOptions(BaseOptions): def consider_beam_mask_round( current_round: int, - mask_rounds: Union[str, Collection[int], int], + mask_rounds: str | Collection[int] | int, allow_beam_masks: bool = True, ) -> bool: """Evaluate whether a self-calibration round should have a beam clean mask @@ -97,8 +97,7 @@ def consider_beam_mask_round( return mask_rounds is not None and ( (isinstance(mask_rounds, str) and mask_rounds.lower() == "all") or (isinstance(mask_rounds, int) and current_round >= mask_rounds) - or (isinstance(mask_rounds, (list, tuple))) - and current_round in mask_rounds + or ((isinstance(mask_rounds, (list, tuple))) and current_round in mask_rounds) ) # type: ignore @@ -240,10 +239,10 @@ def extract_beam_mask_from_mosaic( def _get_signal_image( - image: Optional[np.ndarray] = None, - rms: Optional[np.ndarray] = None, - background: Optional[np.ndarray] = None, - signal: Optional[np.ndarray] = None, + image: np.ndarray | None = None, + rms: np.ndarray | None = None, + background: np.ndarray | None = None, + signal: np.ndarray | None = None, ) -> np.ndarray: if all([item is None for item in (image, background, rms, signal)]): raise ValueError("No input maps have been provided. ") @@ -261,13 +260,13 @@ def _get_signal_image( def grow_low_snr_mask( - image: Optional[np.ndarray] = None, - rms: Optional[np.ndarray] = None, - background: Optional[np.ndarray] = None, - signal: Optional[np.ndarray] = None, + image: np.ndarray | None = None, + rms: np.ndarray | None = None, + background: np.ndarray | None = None, + signal: np.ndarray | None = None, grow_low_snr: float = 2.0, grow_low_island_size: int = 512, - region_mask: Optional[np.ndarray] = None, + region_mask: np.ndarray | None = None, ) -> np.ndarray: """There may be cases where normal thresholding operations based on simple pixel-wise SNR cuts fail to pick up diffuse, low surface brightness regions of emission. When some type @@ -434,7 +433,7 @@ def minimum_absolute_clip( image: np.ndarray, increase_factor: float = 2.0, box_size: int = 100, - adaptive_max_depth: Optional[int] = None, + adaptive_max_depth: int | None = None, adaptive_box_step: float = 2.0, adaptive_skew_delta: float = 0.2, ) -> np.ndarray: @@ -494,10 +493,8 @@ def _verify_set_positive_seed_clip( max_signal = np.max(signal) if max_signal < positive_seed_clip: logger.critical( - ( - f"The maximum signal {max_signal:.4f} is below the provided {positive_seed_clip=}. " - "Setting clip to 90 percent of maximum. " - ) + f"The maximum signal {max_signal:.4f} is below the provided {positive_seed_clip=}. " + "Setting clip to 90 percent of maximum. " ) positive_seed_clip = max_signal * 0.9 @@ -507,7 +504,7 @@ def _verify_set_positive_seed_clip( def reverse_negative_flood_fill( base_image: np.ndarray, masking_options: MaskingOptions, - pixels_per_beam: Optional[float] = None, + pixels_per_beam: float | None = None, ) -> np.ndarray: """Attempt to: @@ -597,9 +594,9 @@ def reverse_negative_flood_fill( def _create_signal_from_rmsbkg( - image: Union[Path, np.ndarray], - rms: Union[Path, np.ndarray], - bkg: Union[Path, np.ndarray], + image: Path | np.ndarray, + rms: Path | np.ndarray, + bkg: Path | np.ndarray, ) -> np.ndarray: logger.info("Creating signal image") @@ -639,8 +636,8 @@ def _need_to_make_signal(masking_options: MaskingOptions) -> bool: def create_snr_mask_from_fits( fits_image_path: Path, masking_options: MaskingOptions, - fits_rms_path: Optional[Path], - fits_bkg_path: Optional[Path], + fits_rms_path: Path | None, + fits_bkg_path: Path | None, create_signal_fits: bool = False, overwrite: bool = True, ) -> FITSMaskNames: diff --git a/flint/ms.py b/flint/ms.py index 473608f6..bc235d8e 100644 --- a/flint/ms.py +++ b/flint/ms.py @@ -17,7 +17,7 @@ from curses.ascii import controlnames from os import PathLike from pathlib import Path -from typing import List, NamedTuple, Optional, Tuple, Union +from typing import NamedTuple import astropy.units as u import numpy as np @@ -42,9 +42,9 @@ class MSSummary(NamedTuple): """Number of flagged records""" flag_spectrum: np.ndarray """Flagged spectral channels""" - fields: List[str] + fields: list[str] """Collection of unique field names from the FIELDS table""" - ants: List[int] + ants: list[int] """Collection of unique antennas""" beam: int """The ASKAP beam number of the measurement set""" @@ -52,7 +52,7 @@ class MSSummary(NamedTuple): """Path to the measurement set that is being represented""" phase_dir: SkyCoord """The phase direction of the measurement set, which will be where the image will be centred""" - spw: Optional[int] = None + spw: int | None = None """Intended to be used with ASKAP high-frequency resolution modes, where the MS is divided into SPWs""" @@ -117,7 +117,7 @@ def critical_ms_interaction( output_ms.rename(target=input_ms) -def get_field_id_for_field(ms: Union[MS, Path], field_name: str) -> Union[int, None]: +def get_field_id_for_field(ms: MS | Path, field_name: str) -> int | None: """Return the FIELD_ID for an elected field in a measurement set Args: @@ -132,7 +132,7 @@ def get_field_id_for_field(ms: Union[MS, Path], field_name: str) -> Union[int, N """ ms_path = ms if isinstance(ms, Path) else ms.path - with table(f"{str(ms_path)}/FIELD", readonly=True, ack=False) as tab: + with table(f"{ms_path!s}/FIELD", readonly=True, ack=False) as tab: # The ID is _position_ of the matching row in the table. field_names = tab.getcol("NAME") field_idx = np.argwhere([fn == field_name for fn in field_names])[0] @@ -150,7 +150,7 @@ def get_field_id_for_field(ms: Union[MS, Path], field_name: str) -> Union[int, N return field_idx -def get_beam_from_ms(ms: Union[MS, Path]) -> int: +def get_beam_from_ms(ms: MS | Path) -> int: """Lookup the ASKAP beam number from a measurement set. Args: @@ -166,12 +166,12 @@ def get_beam_from_ms(ms: Union[MS, Path]) -> int: assert ( len(uniq_beams) == 1 - ), f"Expected {str(ms_path)} to contain a single beam, found {len(uniq_beams)}: {uniq_beams=}" + ), f"Expected {ms_path!s} to contain a single beam, found {len(uniq_beams)}: {uniq_beams=}" return uniq_beams[0] -def get_freqs_from_ms(ms: Union[MS, Path]) -> np.ndarray: +def get_freqs_from_ms(ms: MS | Path) -> np.ndarray: """Return the frequencies observed from an ASKAP Measurement set. Some basic checks are performed to ensure they conform to some expectations. @@ -184,7 +184,7 @@ def get_freqs_from_ms(ms: Union[MS, Path]) -> np.ndarray: """ ms = MS.cast(ms) - with table(f"{str(ms.path)}/SPECTRAL_WINDOW", readonly=True, ack=False) as tab: + with table(f"{ms.path!s}/SPECTRAL_WINDOW", readonly=True, ack=False) as tab: freqs = tab.getcol("CHAN_FREQ") freqs = np.squeeze(freqs) @@ -195,7 +195,7 @@ def get_freqs_from_ms(ms: Union[MS, Path]) -> np.ndarray: return freqs -def get_phase_dir_from_ms(ms: Union[MS, Path]) -> SkyCoord: +def get_phase_dir_from_ms(ms: MS | Path) -> SkyCoord: """Extract the phase direction from a measurement set. If more than one phase direction is found an AssertError will @@ -209,7 +209,7 @@ def get_phase_dir_from_ms(ms: Union[MS, Path]) -> SkyCoord: """ ms = MS.cast(ms) - with table(f"{str(ms.path)}/FIELD", readonly=True, ack=False) as tab: + with table(f"{ms.path!s}/FIELD", readonly=True, ack=False) as tab: phase_dir = tab.getcol("PHASE_DIR")[0] assert phase_dir.shape[0] == 1, "More than one phase direction found. " @@ -219,7 +219,7 @@ def get_phase_dir_from_ms(ms: Union[MS, Path]) -> SkyCoord: return phase_sky -def get_times_from_ms(ms: Union[MS, Path]) -> Time: +def get_times_from_ms(ms: MS | Path) -> Time: """Return the observation times from an ASKAP Measurement set. Args: @@ -236,7 +236,7 @@ def get_times_from_ms(ms: Union[MS, Path]) -> Time: return times -def get_telescope_location_from_ms(ms: Union[MS, Path]) -> EarthLocation: +def get_telescope_location_from_ms(ms: MS | Path) -> EarthLocation: """Return the telescope location from an ASKAP Measurement set. Args: @@ -255,7 +255,7 @@ def get_telescope_location_from_ms(ms: Union[MS, Path]) -> EarthLocation: def get_pol_axis_from_ms( - ms: Union[MS, Path], feed_idx: Optional[int] = None, col: str = "RECEPTOR_ANGLE" + ms: MS | Path, feed_idx: int | None = None, col: str = "RECEPTOR_ANGLE" ) -> u.Quantity: """Get the polarization axis from the ASKAP MS. Checks are performed to ensure this polarisation axis angle is constant throughout the observation. @@ -304,7 +304,7 @@ def get_pol_axis_from_ms( # TODO: Inline with other changing conventions this should be # changed to `create_ms_summary` -def describe_ms(ms: Union[MS, Path], verbose: bool = False) -> MSSummary: +def describe_ms(ms: MS | Path, verbose: bool = False) -> MSSummary: """Print some basic information from the inpute measurement set. Args: @@ -356,11 +356,11 @@ def describe_ms(ms: Union[MS, Path], verbose: bool = False) -> MSSummary: def split_by_field( - ms: Union[MS, Path], - field: Optional[str] = None, - out_dir: Optional[Path] = None, - column: Optional[str] = None, -) -> List[MS]: + ms: MS | Path, + field: str | None = None, + out_dir: Path | None = None, + column: str | None = None, +) -> list[MS]: """Attempt to split an input measurement set up by the unique FIELDs recorded Args: @@ -382,7 +382,7 @@ def split_by_field( fields = [field] if field else ms_summary.fields field_idxs = [get_field_id_for_field(ms=ms, field_name=field) for field in fields] - out_mss: List[MS] = [] + out_mss: list[MS] = [] ms_out_dir: Path = Path(out_dir) if out_dir is not None else ms.path.parent logger.info(f"Will write output MSs to {ms_out_dir}.") @@ -404,7 +404,7 @@ def split_by_field( out_ms_str = create_ms_name(ms_path=ms.path, field=split_name) out_path = ms_out_dir / Path(out_ms_str).name - logger.info(f"Writing {str(out_path)} for {split_name}") + logger.info(f"Writing {out_path!s} for {split_name}") sub_ms.copy(str(out_path), deep=True) out_mss.append( @@ -415,9 +415,9 @@ def split_by_field( def check_column_in_ms( - ms: Union[MS, str, PathLike], - column: Optional[str] = None, - sub_table: Optional[str] = None, + ms: MS | str | PathLike, + column: str | None = None, + sub_table: str | None = None, ) -> bool: """Checks to see whether a column exists in an MS. If `column` is provided this is checked. It `column` is None, then the MS.column is specified. If both are @@ -443,7 +443,7 @@ def check_column_in_ms( raise ValueError(f"No column to check specified: {ms} {column=}.") ms_path = ms.path if isinstance(ms, MS) else Path(ms) - check_table = str(ms_path) if sub_table is None else f"{str(ms_path)}/{sub_table}" + check_table = str(ms_path) if sub_table is None else f"{ms_path!s}/{sub_table}" logger.debug(f"Checking for {check_col} in {check_table}") with table(check_table, readonly=True) as tab: @@ -467,7 +467,7 @@ def consistent_ms(ms1: MS, ms2: MS) -> bool: bool: Whether MS1 is consistent with MS2 """ - logger.info(f"Comparing ms1={str(ms1.path)} to ms2={(ms2.path)}") + logger.info(f"Comparing ms1={ms1.path!s} to ms2={(ms2.path)}") beam1 = get_beam_from_ms(ms=ms1) beam2 = get_beam_from_ms(ms=ms2) @@ -499,13 +499,13 @@ def consistent_ms(ms1: MS, ms2: MS) -> bool: result = False if not result: - logger.info(f"{str(ms1.path)} not compatibale with {str(ms2.path)}, {reasons=}") + logger.info(f"{ms1.path!s} not compatibale with {ms2.path!s}, {reasons=}") return result def consistent_channelwise_frequencies( - freqs: Union[List[np.ndarray], np.ndarray], + freqs: list[np.ndarray] | np.ndarray, ) -> np.ndarray: """Given a collection of frequencies in the form of (N, frequencies), inspect the frequencies channelwise @@ -531,7 +531,7 @@ def consistent_channelwise_frequencies( return freqs_are_same -def consistent_ms_frequencies(mss: Tuple[MS, ...]) -> bool: +def consistent_ms_frequencies(mss: tuple[MS, ...]) -> bool: """Given a set of measurement sets, inspect the frequencies to ensure they are all the same @@ -593,8 +593,8 @@ def rename_column_in_ms( def remove_columns_from_ms( - ms: Union[MS, Path], columns_to_remove: Union[str, List[str]] -) -> List[str]: + ms: MS | Path, columns_to_remove: str | list[str] +) -> list[str]: """Attempt to remove a collection of columns from a measurement set. If any of the provided columns do not exist they are ignored. @@ -625,8 +625,8 @@ def remove_columns_from_ms( def subtract_model_from_data_column( ms: MS, model_column: str = "MODEL_DATA", - data_column: Optional[str] = None, - output_column: Optional[str] = None, + data_column: str | None = None, + output_column: str | None = None, update_tracked_column: bool = False, ) -> MS: """Execute a ``taql`` query to subtract the MODEL_DATA from a nominated data column. @@ -680,7 +680,7 @@ def subtract_model_from_data_column( # as it is currently being used in unclear ways. Specifically there is a renaming # of the data_column to instrument_column before the rotation of things def preprocess_askap_ms( - ms: Union[MS, Path], + ms: MS | Path, data_column: str = "DATA", instrument_column: str = "INSTRUMENT_DATA", overwrite: bool = True, @@ -717,19 +717,19 @@ def preprocess_askap_ms( data_column != instrument_column ), f"Received matching column names: {data_column=} {instrument_column=}" - logger.info( - f"Will be running ASKAP MS conversion operations against {str(ms.path)}." - ) + logger.info(f"Will be running ASKAP MS conversion operations against {ms.path!s}.") logger.info("Correcting directions. ") with table(str(ms.path), ack=False, readonly=False) as tab: colnames = tab.colnames() if data_column not in colnames: raise ValueError( - f"Column {data_column} not found in {str(ms.path)}. Columns found: {colnames}" + f"Column {data_column} not found in {ms.path!s}. Columns found: {colnames}" ) if all([col in colnames for col in (data_column, instrument_column)]): - msg = f"Column {instrument_column} already in {str(ms.path)}. Already corrected?" + msg = ( + f"Column {instrument_column} already in {ms.path!s}. Already corrected?" + ) if not overwrite: raise ValueError(msg) @@ -768,7 +768,7 @@ def preprocess_askap_ms( def copy_and_preprocess_casda_askap_ms( - casda_ms: Union[MS, Path], + casda_ms: MS | Path, data_column: str = "DATA", instrument_column: str = "INSTRUMENT_DATA", fix_stokes_factor: bool = True, @@ -806,7 +806,7 @@ def copy_and_preprocess_casda_askap_ms( ms = ms.with_options(path=out_ms_path) logger.info( - f"Will be running CASDA ASKAP MS conversion operations against {str(ms.path)}." + f"Will be running CASDA ASKAP MS conversion operations against {ms.path!s}." ) with table(str(ms.path), ack=False, readonly=False) as tab: @@ -836,7 +836,7 @@ def copy_and_preprocess_casda_askap_ms( def rename_ms_and_columns_for_selfcal( ms: MS, - target: Union[str, Path], + target: str | Path, corrected_data: str = "CORRECTED_DATA", data: str = "DATA", ) -> MS: @@ -922,10 +922,10 @@ def rename_ms_and_columns_for_selfcal( def find_mss( mss_parent_path: Path, - expected_ms_count: Optional[int] = 36, - data_column: Optional[str] = None, - model_column: Optional[str] = None, -) -> Tuple[MS, ...]: + expected_ms_count: int | None = 36, + data_column: str | None = None, + model_column: str | None = None, +) -> tuple[MS, ...]: """Search a directory to find measurement sets via a simple `*.ms` glob expression. An expected number of MSs can be enforced via the `expected_ms_count` option. @@ -941,7 +941,7 @@ def find_mss( """ assert ( mss_parent_path.exists() and mss_parent_path.is_dir() - ), f"{str(mss_parent_path)} does not exist or is not a folder. " + ), f"{mss_parent_path!s} does not exist or is not a folder. " found_mss = tuple( [MS.cast(ms_path) for ms_path in sorted(mss_parent_path.glob("*.ms"))] @@ -950,7 +950,7 @@ def find_mss( if expected_ms_count: assert ( len(found_mss) == expected_ms_count - ), f"Expected to find {expected_ms_count} in {str(mss_parent_path)}, found {len(found_mss)}." + ), f"Expected to find {expected_ms_count} in {mss_parent_path!s}, found {len(found_mss)}." if data_column or model_column: logger.info(f"Updating column attribute to {data_column=}") diff --git a/flint/naming.py b/flint/naming.py index 5e38dbf6..ec5ab452 100644 --- a/flint/naming.py +++ b/flint/naming.py @@ -2,16 +2,18 @@ products. """ +from __future__ import annotations + import re from datetime import datetime from pathlib import Path -from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union +from typing import Any, NamedTuple from flint.logging import logger from flint.options import MS -def get_fits_cube_from_paths(paths: List[Path]) -> List[Path]: +def get_fits_cube_from_paths(paths: list[Path]) -> list[Path]: """Given a list of files, find the ones that appear to be FITS files and contain the ``.cube.`` field indicator. A regular expression searching for both the ``.cube.`` and ``.fits`` file type is used. @@ -42,7 +44,7 @@ def _long_field_name_to_shorthand(long_name: str) -> str: def create_name_from_common_fields( - in_paths: Tuple[Path, ...], additional_suffixes: Optional[str] = None + in_paths: tuple[Path, ...], additional_suffixes: str | None = None ) -> Path: """Attempt to craft a base name using the field elements that are in common. The expectation that these are paths that can be processed by the ``processed_name_format`` @@ -106,8 +108,8 @@ def create_name_from_common_fields( # TODO: Need to assess the mode argument, and define literals that are accepted def create_image_cube_name( image_prefix: Path, - mode: Optional[Union[str, List[str]]] = None, - suffix: Optional[Union[str, List[str]]] = None, + mode: str | list[str] | None = None, + suffix: str | list[str] | None = None, ) -> Path: """Create a consistent naming scheme when combining images into cube images. Intended to be used when combining many subband images together into a single cube. @@ -130,7 +132,7 @@ def create_image_cube_name( """ # NOTE: This is likely a function to grow in time as more imaging and pipeline modes added. Putting # it here for future proofing - output_cube_name = f"{str(Path(image_prefix))}.{mode}.{suffix}" + output_cube_name = f"{Path(image_prefix)!s}.{mode}.{suffix}" output_components = [str(Path(image_prefix))] if mode: @@ -157,9 +159,9 @@ def create_image_cube_name( def create_imaging_name_prefix( - ms: Union[MS, Path], - pol: Optional[str] = None, - channel_range: Optional[Tuple[int, int]] = None, + ms: MS | Path, + pol: str | None = None, + channel_range: tuple[int, int] | None = None, ) -> str: """Given a measurement set and a polarisation, create the naming prefix to be used by some imager @@ -184,7 +186,7 @@ def create_imaging_name_prefix( return ".".join(names) -def get_beam_resolution_str(mode: str, marker: Optional[str] = None) -> str: +def get_beam_resolution_str(mode: str, marker: str | None = None) -> str: """Map a beam resolution mode to an appropriate suffix. This is located her in anticipation of other imaging modes. @@ -203,7 +205,7 @@ def get_beam_resolution_str(mode: str, marker: Optional[str] = None) -> str: # NOTE: Arguably this is a trash and needless function. Adding it # in case other modes are ever needed or referenced. No idea whether # it will ever been needed and could be removed in future. - supported_modes: Dict[str, str] = dict(optimal="optimal", fixed="fixed", raw="raw") + supported_modes: dict[str, str] = dict(optimal="optimal", fixed="fixed", raw="raw") if mode.lower() not in supported_modes.keys(): raise ValueError( f"Received {mode=}, supported modes are {supported_modes.keys()}" @@ -234,7 +236,7 @@ def get_selfcal_ms_name(in_ms_path: Path, round: int = 1) -> Path: name_str = str(in_ms_path.name) name = f"{name_str[:span[0]]}.round{round}.ms" else: - name = f"{str(in_ms_path.stem)}.round{round}.ms" + name = f"{in_ms_path.stem!s}.round{round}.ms" out_ms_path = in_ms_path.parent / name assert ( @@ -245,7 +247,7 @@ def get_selfcal_ms_name(in_ms_path: Path, round: int = 1) -> Path: def add_timestamp_to_path( - input_path: Union[Path, str], timestamp: Optional[datetime] = None + input_path: Path | str, timestamp: datetime | None = None ) -> Path: """Add a timestamp to a input path, where the timestamp is the current data and time. The time will be added to the name component @@ -278,15 +280,15 @@ class CASDANameComponents(NamedTuple): """The name of the field extracted""" beam: str """Beam number of the data""" - spw: Optional[str] = None + spw: str | None = None """If multiple MS were written as the data were in a high-frequency resolution mode, which segment""" - alias: Optional[str] = None + alias: str | None = None """Older ASKAP MSs could be packed with multiple fields. The ASKAP pipeline holds this field as an alias. They are now the same in almost all cases as the field. """ format: str = "science" """What the format / type of the data the MS is. """ -def casda_ms_format(in_name: Union[str, Path]) -> Union[CASDANameComponents, None]: +def casda_ms_format(in_name: str | Path) -> CASDANameComponents | None: """Break up a CASDA sty;e MS name (really the askap pipeline format) into its recognised parts. if a match fails a `None` is returned. @@ -332,11 +334,11 @@ class RawNameComponents(NamedTuple): """Time that the data were written""" beam: str """Beam number of the data""" - spw: Optional[str] = None + spw: str | None = None """If multiple MS were written as the data were in a high-frequency resolution mode, which segment""" -def raw_ms_format(in_name: str) -> Union[None, RawNameComponents]: +def raw_ms_format(in_name: str) -> None | RawNameComponents: """The typical ASKAP measurement written to the ingest disks has the form: @@ -383,21 +385,21 @@ class ProcessedNameComponents(NamedTuple): """The sbid of the observation""" field: str """The name of the field extracted""" - beam: Optional[str] = None + beam: str | None = None """The beam of the observation processed""" - spw: Optional[str] = None + spw: str | None = None """The SPW of the observation. If there is only one spw this is None.""" - round: Optional[str] = None + round: str | None = None """The self-calibration round detected. This might be represented as 'noselfcal' in some image products, e.g. linmos. """ - pol: Optional[str] = None + pol: str | None = None """The polarisation component, if it exists, in a filename. Examples are 'i','q','u','v'. Could be combinations in some cases depending on how it was created (e.g. based on wsclean pol option). """ - channel_range: Optional[Tuple[int, int]] = None + channel_range: tuple[int, int] | None = None """The channel range encoded in an file name. Generally are zero-padded, and are two fields of the form ch1234-1235, where the upper bound is exclusive. Defaults to none.""" def processed_ms_format( - in_name: Union[str, Path], -) -> Union[ProcessedNameComponents, None]: + in_name: str | Path, +) -> ProcessedNameComponents | None: """Will take a formatted name (i.e. one derived from the flint.naming.create_ms_name) and attempt to extract its main components. This includes the SBID, field, beam and spw. @@ -415,15 +417,13 @@ def processed_ms_format( # TODOL At very least I think the beam should become options # A raw string is used to avoid bad unicode escaping regex = re.compile( - ( - r"^SB(?P[0-9]+)" - r"\.(?P[^.]+)" - r"((\.beam(?P[0-9]+))?)" - r"((\.spw(?P[0-9]+))?)" - r"((\.round(?P[0-9]+))?)" - r"((\.(?P(i|q|u|v|xx|yy|xy|yx)+))?)" - r"((\.ch(?P([0-9]+))-(?P([0-9]+)))?)" - ) + r"^SB(?P[0-9]+)" + r"\.(?P[^.]+)" + r"((\.beam(?P[0-9]+))?)" + r"((\.spw(?P[0-9]+))?)" + r"((\.round(?P[0-9]+))?)" + r"((\.(?P(i|q|u|v|xx|yy|xy|yx)+))?)" + r"((\.ch(?P([0-9]+))-(?P([0-9]+)))?)" ) results = regex.match(in_name) @@ -449,8 +449,8 @@ def processed_ms_format( def extract_components_from_name( - name: Union[str, Path], -) -> Union[RawNameComponents, ProcessedNameComponents, CASDANameComponents]: + name: str | Path, +) -> RawNameComponents | ProcessedNameComponents | CASDANameComponents: """Attempts to break down a file name of a recognised format into its principal compobnents. Presumably this is a measurement set or something derived from it (i.e. images). @@ -495,7 +495,7 @@ def extract_components_from_name( return results -def extract_beam_from_name(name: Union[str, Path]) -> int: +def extract_beam_from_name(name: str | Path) -> int: """Attempts to extract the beam number from some input name should it follow a known naming convention. @@ -519,7 +519,7 @@ def extract_beam_from_name(name: Union[str, Path]) -> int: def create_ms_name( - ms_path: Path, sbid: Optional[int] = None, field: Optional[str] = None + ms_path: Path, sbid: int | None = None, field: str | None = None ) -> str: """Create a consistent naming scheme for measurement sets. At present it is intended to be used for splitting fields from raw measurement @@ -535,7 +535,7 @@ def create_ms_name( """ ms_path = Path(ms_path).absolute() - ms_name_list: List[Any] = [] + ms_name_list: list[Any] = [] format_components = extract_components_from_name(name=ms_path) @@ -676,7 +676,7 @@ def get_sbid_from_path(path: Path) -> int: if not sbid.isdigit(): raise ValueError( - f"Extracted {sbid=} from {str(path)} failed appears to be non-conforming - it is not a number! " + f"Extracted {sbid=} from {path!s} failed appears to be non-conforming - it is not a number! " ) return int(sbid) @@ -749,12 +749,12 @@ class FITSMaskNames(NamedTuple): mask_fits: Path """Name of the mask FITS file""" - signal_fits: Optional[Path] = None + signal_fits: Path | None = None """Name of the signal FITS file""" def create_fits_mask_names( - fits_image: Union[str, Path], include_signal_path: bool = False + fits_image: str | Path, include_signal_path: bool = False ) -> FITSMaskNames: """Create the names that will be used when generate FITS mask products diff --git a/flint/options.py b/flint/options.py index 03da6357..37ddde23 100644 --- a/flint/options.py +++ b/flint/options.py @@ -13,28 +13,24 @@ from argparse import ArgumentParser, Namespace from pathlib import Path -from pydantic import BaseModel, ConfigDict -from pydantic.fields import FieldInfo from typing import ( Any, - Dict, - List, NamedTuple, - Optional, - Union, - Tuple, TypeVar, + Union, get_args, get_origin, ) import yaml +from pydantic import BaseModel, ConfigDict +from pydantic.fields import FieldInfo from flint.exceptions import MSError from flint.logging import logger -def options_to_dict(input_options: Any) -> Dict: +def options_to_dict(input_options: Any) -> dict: """Helper function to convert an `Options` type class to a dictionary. Most of `flint` `Option` and `Result` classes used `typing.NamedTuples`, which carry with @@ -89,11 +85,11 @@ def with_options(self: T, /, **kwargs) -> T: return self.__class__(**new_args) - def _asdict(self) -> Dict[str, Any]: + def _asdict(self) -> dict[str, Any]: return self.__dict__ -def _create_argparse_options(name: str, field: FieldInfo) -> Tuple[str, Dict[str, Any]]: +def _create_argparse_options(name: str, field: FieldInfo) -> tuple[str, dict[str, Any]]: """Convert a pydantic Field into ``dict`` to splate into ArgumentParser.add_argument()""" field_name = name if field.is_required() else "--" + name.replace("_", "-") @@ -193,9 +189,9 @@ class BandpassOptions(BaseOptions): a single bandpass pipeline run """ - flagger_container: Optional[Path] = None + flagger_container: Path | None = None """Path to the singularity aoflagger container""" - calibrate_container: Optional[Path] = None + calibrate_container: Path | None = None """Path to the singularity calibrate container""" expected_ms: int = 36 """The expected number of measurement set files to find""" @@ -207,13 +203,13 @@ class BandpassOptions(BaseOptions): """The polynomial order used by the Savgol filter when smoothing the bandpass solutions""" flag_calibrate_rounds: int = 3 """The number of times the bandpass will be calibrated, flagged, then recalibrated""" - minuv: Optional[float] = None + minuv: float | None = None """The minimum baseline length, in meters, for data to be included in bandpass calibration stage""" preflagger_ant_mean_tolerance: float = 0.2 """Tolerance that the mean x/y antenna gain ratio test before the antenna is flagged""" preflagger_mesh_ant_flags: bool = False """Share channel flags from bandpass solutions between all antenna""" - preflagger_jones_max_amplitude: Optional[float] = None + preflagger_jones_max_amplitude: float | None = None """Flag Jones matrix if any amplitudes with a Jones are above this value""" @@ -225,11 +221,11 @@ class AddModelSubtractFieldOptions(BaseOptions): attempt_addmodel: bool = False """Invoke the ``addmodel`` visibility prediction, including the search for the ``wsclean`` source list""" - wsclean_pol_mode: List[str] = ["i"] + wsclean_pol_mode: list[str] = ["i"] """The polarisation of the wsclean model that was generated""" - calibrate_container: Optional[Path] = None + calibrate_container: Path | None = None """Path to the container with the calibrate software (including addmodel)""" - addmodel_cluster_config: Optional[Path] = None + addmodel_cluster_config: Path | None = None """Specify a new cluster configuration file different to the preferred on. If None, drawn from preferred cluster config""" @@ -247,9 +243,9 @@ class SubtractFieldOptions(BaseOptions): """Describe the column that should be imaed and, if requested, have model subtracted from""" expected_ms: int = 36 """The number of measurement sets that should exist""" - imaging_strategy: Optional[Path] = None + imaging_strategy: Path | None = None """Path to a FLINT imaging yaml file that contains settings to use throughout imaging""" - holofile: Optional[Path] = None + holofile: Path | None = None """Path to the holography FITS cube that will be used when co-adding beams""" linmos_residuals: bool = False """Linmos the cleaning residuals together into a field image""" @@ -257,7 +253,7 @@ class SubtractFieldOptions(BaseOptions): """Cutoff in arcseconds to use when calculating the common beam to convol to""" pb_cutoff: float = 0.1 """Primary beam attenuation cutoff to use during linmos""" - stagger_delay_seconds: Optional[float] = None + stagger_delay_seconds: float | None = None """The delay, in seconds, that should be used when submitting items in batches (e.g. looping over channels)""" attempt_subtract: bool = False """Attempt to subtract the model column from the nominated data column""" @@ -280,41 +276,41 @@ class FieldOptions(BaseOptions): rounds of self-calibration. """ - flagger_container: Optional[Path] = None + flagger_container: Path | None = None """Path to the singularity aoflagger container""" - calibrate_container: Optional[Path] = None + calibrate_container: Path | None = None """Path to the singularity calibrate container""" - casa_container: Optional[Path] = None + casa_container: Path | None = None """Path to the singularity CASA container""" expected_ms: int = 36 """The expected number of measurement set files to find""" - wsclean_container: Optional[Path] = None + wsclean_container: Path | None = None """Path to the singularity wsclean container""" - yandasoft_container: Optional[Path] = None + yandasoft_container: Path | None = None """Path to the singularity yandasoft container""" - potato_container: Optional[Path] = None + potato_container: Path | None = None """Path to the singularity potato peel container""" - holofile: Optional[Path] = None + holofile: Path | None = None """Path to the holography FITS cube that will be used when co-adding beams""" rounds: int = 2 """Number of required rouds of self-calibration and imaging to perform""" - skip_selfcal_on_rounds: Optional[List[int]] = None + skip_selfcal_on_rounds: list[int] | None = None """Do not perform the derive and apply self-calibration solutions on these rounds""" zip_ms: bool = False """Whether to zip measurement sets once they are no longer required""" run_aegean: bool = False """Whether to run the aegean source finding tool""" - aegean_container: Optional[Path] = None + aegean_container: Path | None = None """Path to the singularity aegean container""" no_imaging: bool = False """Whether to skip the imaging process (including self-calibration)""" - reference_catalogue_directory: Optional[Path] = None + reference_catalogue_directory: Path | None = None """Path to the directory container the reference catalogues, used to generate validation plots""" linmos_residuals: bool = False """Linmos the cleaning residuals together into a field image""" beam_cutoff: float = 150 """Cutoff in arcseconds to use when calculating the common beam to convol to""" - fixed_beam_shape: Optional[List[float]] = None + fixed_beam_shape: list[float] | None = None """Specify the final beamsize of linmos field images in (arcsec, arcsec, deg)""" pb_cutoff: float = 0.1 """Primary beam attenuation cutoff to use during linmos""" @@ -326,13 +322,13 @@ class FieldOptions(BaseOptions): """Construct beam masks from MFS images to use for the next round of imaging. """ use_beam_masks_from: int = 1 """If `use_beam_masks` is True, this sets the round where beam masks will be generated from""" - use_beam_masks_rounds: Optional[List[int]] = None + use_beam_masks_rounds: list[int] | None = None """If `use_beam_masks` is True, this sets which rounds should have a mask applied""" - imaging_strategy: Optional[Path] = None + imaging_strategy: Path | None = None """Path to a FLINT imaging yaml file that contains settings to use throughout imaging""" - sbid_archive_path: Optional[Path] = None + sbid_archive_path: Path | None = None """Path that SBID archive tarballs will be created under. If None no archive tarballs are created. See ArchiveOptions. """ - sbid_copy_path: Optional[Path] = None + sbid_copy_path: Path | None = None """Path that final processed products will be copied into. If None no copying of file products is performed. See ArchiveOptions. """ rename_ms: bool = False """Rename MSs throughout rounds of imaging and self-cal instead of creating copies. This will delete data-columns throughout. """ @@ -400,9 +396,9 @@ def dump_field_options_to_yaml( class ArchiveOptions(BaseOptions): """Container for options related to archiving products from flint workflows""" - tar_file_re_patterns: Tuple[str, ...] = DEFAULT_TAR_RE_PATTERNS + tar_file_re_patterns: tuple[str, ...] = DEFAULT_TAR_RE_PATTERNS """Regular-expressions to use to collect files that should be tarballed""" - copy_file_re_patterns: Tuple[str, ...] = DEFAULT_COPY_RE_PATTERNS + copy_file_re_patterns: tuple[str, ...] = DEFAULT_COPY_RE_PATTERNS """Regular-expressions used to identify files to copy into a final location (not tarred)""" @@ -415,15 +411,15 @@ class MS(NamedTuple): path: Path """Path to the measurement set that is being represented""" - column: Optional[str] = None + column: str | None = None """Column that should be operated against""" - beam: Optional[int] = None + beam: int | None = None """The beam ID of the MS within an ASKAP field""" - spw: Optional[int] = None + spw: int | None = None """Intended to be used with ASKAP high-frequency resolution modes, where the MS is divided into SPWs""" - field: Optional[str] = None + field: str | None = None """The field name of the data""" - model_column: Optional[str] = None + model_column: str | None = None """The column name of the most recently MODEL data""" @property @@ -431,7 +427,7 @@ def ms(self) -> MS: return self @classmethod - def cast(cls, ms: Union[MS, Path]) -> MS: + def cast(cls, ms: MS | Path) -> MS: """Create/return a MS instance given either a Path or MS. If the input is neither a MS instance or Path, the object will diff --git a/flint/peel/potato.py b/flint/peel/potato.py index 2e346718..b519e7ec 100644 --- a/flint/peel/potato.py +++ b/flint/peel/potato.py @@ -19,7 +19,7 @@ from argparse import ArgumentParser from pathlib import Path -from typing import Any, Collection, Dict, NamedTuple, Optional, Tuple, Union +from typing import Any, Collection, NamedTuple import astropy.units as u import numpy as np @@ -97,7 +97,7 @@ class PotatoPeelOptions(NamedTuple): https://gitlab.com/Sunmish/potato """ - c: Optional[Path] = None + c: Path | None = None """Path to the potatopeel configuration file""" solint: float = 30 """Solution interval to use when applying gaincal""" @@ -111,11 +111,11 @@ class PotatoPeelOptions(NamedTuple): """Whether a direct model subtraction (without self-cal) should be used ift he source is faint""" intermediate_peels: bool = True """Creates an image after each calibration and subtraction loop to show iterative improvements of the subject peel source""" - T: Union[str, Path] = "peel" + T: str | Path = "peel" """Where the temporary wsclean files will be written to""" - minuvimage: Optional[float] = None + minuvimage: float | None = None """The minimum uv distance in wavelengths to use for imaging""" - minuvpeel: Optional[float] = None + minuvpeel: float | None = None """The minimum uv distance in wavelengths to use when attempting to self-calibrate""" def with_options(self, **kwargs) -> PotatoPeelOptions: @@ -145,7 +145,7 @@ def source_within_image_fov( source_coord: SkyCoord, beam_coord: SkyCoord, image_size: int, - pixel_scale: Union[u.Quantity, str], + pixel_scale: u.Quantity | str, ) -> bool: """Evaluate whether a source will be within the field of view of an image. @@ -182,8 +182,8 @@ def find_sources_to_peel( field_idx: int = 0, maximum_offset: float = 30, minimum_apparent_brightness: float = 0.5, - override_beam_position_with: Optional[SkyCoord] = None, -) -> Union[Table, None]: + override_beam_position_with: SkyCoord | None = None, +) -> Table | None: """Obtain a set of sources to peel from a reference candidate set. This will evaluate whether a source should be peels based on two criteria: @@ -294,10 +294,8 @@ def prepare_ms_for_potato(ms: MS) -> MS: logger.info(f"The nominated column is: {data_column=}") logger.warning( - ( - "Deleting and renaming columns so final column is DATA. " - "PotatoPeel only operates on the DATA column. " - ) + "Deleting and renaming columns so final column is DATA. " + "PotatoPeel only operates on the DATA column. " ) # If the data column already exists and is the nominated column, then we should @@ -316,7 +314,7 @@ def prepare_ms_for_potato(ms: MS) -> MS: colnames = tab.colnames() if data_column not in colnames: raise ValueError( - f"Column {data_column} not found in {str(ms.path)}. Columns found: {colnames}" + f"Column {data_column} not found in {ms.path!s}. Columns found: {colnames}" ) # In order to rename the data_column to DATA, we need to make sure that @@ -341,8 +339,8 @@ def prepare_ms_for_potato(ms: MS) -> MS: def _potato_options_to_command( - potato_options: Union[PotatoPeelArguments, PotatoConfigOptions, PotatoPeelOptions], - skip_keys: Optional[Collection[str]] = None, + potato_options: PotatoPeelArguments | PotatoConfigOptions | PotatoPeelOptions, + skip_keys: Collection[str] | None = None, check_double_keys: bool = False, ) -> str: """Construct the CLI options that would be provided to @@ -385,7 +383,7 @@ def _potato_options_to_command( sub_options += f"{flag}{key} {value} " elif isinstance(value, Path): logger.debug("Path") - sub_options += f"{flag}{key} {str(value)} " + sub_options += f"{flag}{key} {value!s} " elif value is None: continue else: @@ -418,7 +416,7 @@ def _potato_config_command( PotatoconfigCommand: The CLI command that will be executed to create a potato configuration file """ - command = "peel_configuration.py " f"{str(config_path)} " + command = "peel_configuration.py " f"{config_path!s} " sub_options = _potato_options_to_command(potato_options=potato_config_options) command = command + sub_options @@ -429,7 +427,7 @@ def _potato_config_command( def create_run_potato_config( potato_container: Path, - ms_path: Union[Path, MS], + ms_path: Path | MS, potato_config_options: PotatoConfigOptions, ) -> PotatoConfigCommand: """Construct and run a CLI command into the `peel_configuration.py` @@ -488,7 +486,7 @@ def _potato_peel_command( command = ( "hot_potato " - f"{str(ms.path.absolute())} " + f"{ms.path.absolute()!s} " f"{potato_peel_arguments.image_fov:.4f} " ) @@ -554,13 +552,13 @@ class NormalisedSources(NamedTuple): would be provided to potato """ - source_ras: Tuple[float] + source_ras: tuple[float] """The RAs in degrees""" - source_decs: Tuple[float] + source_decs: tuple[float] """The Decs in degrees""" - source_fovs: Tuple[float] + source_fovs: tuple[float] """The size of each source to image in degrees""" - source_names: Tuple[str] + source_names: tuple[str] """The name of each source""" @@ -605,9 +603,9 @@ def _print_ms_colnames(ms: MS) -> MS: def potato_peel( ms: MS, potato_container: Path, - update_potato_config_options: Optional[Dict[str, Any]] = None, - update_potato_peel_options: Optional[Dict[str, Any]] = None, - image_options: Optional[WSCleanOptions] = None, + update_potato_config_options: dict[str, Any] | None = None, + update_potato_peel_options: dict[str, Any] | None = None, + image_options: WSCleanOptions | None = None, ) -> MS: """Peel out sources from a measurement set using PotatoPeel. Candidate sources from a known list of sources (see Table 3 or RACS-Mid paper) are considered. diff --git a/flint/prefect/clusters.py b/flint/prefect/clusters.py index 42c3a88b..3c883e4b 100644 --- a/flint/prefect/clusters.py +++ b/flint/prefect/clusters.py @@ -4,9 +4,11 @@ operations. """ +from __future__ import annotations + from glob import glob from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any import yaml from prefect_dask import DaskTaskRunner @@ -14,7 +16,7 @@ from flint.utils import get_packaged_resource_path -def list_packaged_clusters() -> List[str]: +def list_packaged_clusters() -> list[str]: """Return a list of cluster names that are available in the packaged set of dask_jobqueue specification YAML files. @@ -33,7 +35,7 @@ def list_packaged_clusters() -> List[str]: return clusters -def get_cluster_spec(cluster: Union[str, Path]) -> Dict[Any, Any]: +def get_cluster_spec(cluster: str | Path) -> dict[Any, Any]: """ Given a cluster name, obtain the appropriate SLURM configuration file appropriate for use with SLURMCluster. @@ -67,15 +69,15 @@ def get_cluster_spec(cluster: Union[str, Path]) -> Dict[Any, Any]: f"{cluster=} is not known, or its YAML file could not be loaded. Known clusters are {KNOWN_CLUSTERS}" ) - with open(yaml_file, "r") as in_file: + with open(yaml_file) as in_file: spec = yaml.load(in_file, Loader=yaml.Loader) return spec def get_dask_runner( - cluster: Union[str, Path] = "galaxy_small", - extra_cluster_kwargs: Optional[Dict[str, Any]] = None, + cluster: str | Path = "galaxy_small", + extra_cluster_kwargs: dict[str, Any] | None = None, ) -> DaskTaskRunner: """Creates and returns a DaskTaskRunner configured to established a SLURMCluster instance to manage a set of dask-workers. The SLURMCluster is currently configured only for Galaxy. diff --git a/flint/prefect/common/imaging.py b/flint/prefect/common/imaging.py index 91941dd0..8143c75f 100644 --- a/flint/prefect/common/imaging.py +++ b/flint/prefect/common/imaging.py @@ -4,11 +4,13 @@ imaging flows. """ +from __future__ import annotations + from pathlib import Path -from typing import Any, Collection, Dict, List, Literal, Optional, TypeVar, Union, Tuple +from typing import Any, Collection, Literal, TypeVar -import pandas as pd import numpy as np +import pandas as pd from prefect import task, unmapped from prefect.artifacts import create_table_artifact @@ -22,10 +24,10 @@ from flint.configuration import wrapper_options_from_strategy from flint.convol import ( BeamShape, + convolve_cubes, convolve_images, get_common_beam, get_cube_common_beam, - convolve_cubes, ) from flint.flagging import flag_ms_aoflagger from flint.imager.wsclean import ( @@ -84,9 +86,9 @@ def task_potato_peel( ms: MS, potato_container: Path, - update_potato_config_options: Optional[Dict[str, Any]] = None, - update_potato_peel_options: Optional[Dict[str, Any]] = None, - update_wsclean_options: Optional[Dict[str, Any]] = None, + update_potato_config_options: dict[str, Any] | None = None, + update_potato_peel_options: dict[str, Any] | None = None, + update_wsclean_options: dict[str, Any] | None = None, ) -> MS: logger.info(f"Attempting to peel {ms.path}") @@ -165,9 +167,9 @@ def task_extract_solution_path(calibrate_cmd: CalibrateCommand) -> Path: # BANE sometimes gets cauht in some stalled staTE @task(retries=3) def task_run_bane_and_aegean( - image: Union[WSCleanCommand, LinmosCommand], + image: WSCleanCommand | LinmosCommand, aegean_container: Path, - timelimit_seconds: Union[int, float] = 60 * 45, + timelimit_seconds: int | float = 60 * 45, ) -> AegeanOutputs: """Run BANE and Aegean against a FITS image. @@ -237,10 +239,10 @@ def task_zip_ms(in_item: WSCleanCommand) -> Path: @task @wrapper_options_from_strategy(update_options_keyword="update_gain_cal_options") def task_gaincal_applycal_ms( - ms: Union[MS, WSCleanCommand], + ms: MS | WSCleanCommand, selfcal_round: int, casa_container: Path, - update_gain_cal_options: Optional[Dict[str, Any]] = None, + update_gain_cal_options: dict[str, Any] | None = None, archive_input_ms: bool = False, skip_selfcal: bool = False, rename_ms: bool = False, @@ -288,11 +290,11 @@ def task_gaincal_applycal_ms( @task @wrapper_options_from_strategy(update_options_keyword="update_wsclean_options") def task_wsclean_imager( - in_ms: Union[ApplySolutions, MS], + in_ms: ApplySolutions | MS, wsclean_container: Path, - update_wsclean_options: Optional[Dict[str, Any]] = None, - fits_mask: Optional[FITSMaskNames] = None, - channel_range: Optional[Tuple[int, int]] = None, + update_wsclean_options: dict[str, Any] | None = None, + fits_mask: FITSMaskNames | None = None, + channel_range: tuple[int, int] | None = None, ) -> WSCleanCommand: """Run the wsclean imager against an input measurement set @@ -361,8 +363,8 @@ def task_wsclean_imager( def task_get_common_beam( wsclean_cmds: Collection[WSCleanCommand], cutoff: float = 25, - filter: Optional[str] = None, - fixed_beam_shape: Optional[List[float]] = None, + filter: str | None = None, + fixed_beam_shape: list[float] | None = None, ) -> BeamShape: """Compute a common beam size that all input images will be convoled to. @@ -387,7 +389,7 @@ def task_get_common_beam( logger.info(f"Using fixed {beam_shape=}") return beam_shape - images_to_consider: List[Path] = [] + images_to_consider: list[Path] = [] # TODO: This should support other image types for wsclean_cmd in wsclean_cmds: @@ -421,7 +423,7 @@ def task_get_common_beam( def task_get_cube_common_beam( wsclean_cmds: Collection[WSCleanCommand], cutoff: float = 25, -) -> List[BeamShape]: +) -> list[BeamShape]: """Compute a common beam size for input cubes. Args: @@ -432,7 +434,7 @@ def task_get_cube_common_beam( List[BeamShape]: The final convolving beam size to be used per channel in cubes """ - images_to_consider: List[Path] = [] + images_to_consider: list[Path] = [] # TODO: This should support other image types for wsclean_cmd in wsclean_cmds: @@ -457,7 +459,7 @@ def task_get_cube_common_beam( @task def task_convolve_cube( wsclean_cmd: WSCleanCommand, - beam_shapes: List[BeamShape], + beam_shapes: list[BeamShape], cutoff: float = 60, mode: Literal["image"] = "image", convol_suffix_str: str = "conv", @@ -512,7 +514,7 @@ def task_convolve_image( beam_shape: BeamShape, cutoff: float = 60, mode: str = "image", - filter: Optional[str] = None, + filter: str | None = None, convol_suffix_str: str = "conv", remove_original_images: bool = False, ) -> Collection[Path]: @@ -575,7 +577,7 @@ def task_convolve_image( for image_path in image_paths: image_beam = Beam.from_fits_header(fits.getheader(str(image_path))) logger.info( - f"{str(image_path.name)}: {image_beam.major.to(u.arcsecond)} {image_beam.minor.to(u.arcsecond)} {image_beam.pa}" + f"{image_path.name!s}: {image_beam.major.to(u.arcsecond)} {image_beam.minor.to(u.arcsecond)} {image_beam.pa}" ) convolved_images = convolve_images( @@ -596,14 +598,14 @@ def task_convolve_image( def task_linmos_images( images: Collection[Collection[Path]], container: Path, - filter: Optional[str] = ".MFS.", - field_name: Optional[str] = None, + filter: str | None = ".MFS.", + field_name: str | None = None, suffix_str: str = "noselfcal", - holofile: Optional[Path] = None, - sbid: Optional[Union[int, str]] = None, - parset_output_path: Optional[str] = None, + holofile: Path | None = None, + sbid: int | str | None = None, + parset_output_path: str | None = None, cutoff: float = 0.05, - field_summary: Optional[FieldSummary] = None, + field_summary: FieldSummary | None = None, trim_linmos_fits: bool = True, remove_original_images: bool = False, cleanup: bool = False, @@ -683,9 +685,9 @@ def task_linmos_images( def _convolve_linmos( wsclean_cmds: Collection[WSCleanCommand], beam_shape: BeamShape, - field_options: Union[FieldOptions, SubtractFieldOptions], + field_options: FieldOptions | SubtractFieldOptions, linmos_suffix_str: str, - field_summary: Optional[FieldSummary] = None, + field_summary: FieldSummary | None = None, convol_mode: str = "image", convol_filter: str = ".MFS.", convol_suffix_str: str = "conv", @@ -742,10 +744,10 @@ def _convolve_linmos( def _create_convol_linmos_images( wsclean_cmds: Collection[WSCleanCommand], field_options: FieldOptions, - field_summary: Optional[FieldSummary] = None, - current_round: Optional[int] = None, - additional_linmos_suffix_str: Optional[str] = None, -) -> List[LinmosCommand]: + field_summary: FieldSummary | None = None, + current_round: int | None = None, + additional_linmos_suffix_str: str | None = None, +) -> list[LinmosCommand]: """Derive the appropriate set of beam shapes and then produce corresponding convolved and co-added images @@ -759,7 +761,7 @@ def _create_convol_linmos_images( Returns: List[LinmosCommand]: The collection of linmos commands executed. """ - parsets: List[LinmosCommand] = [] + parsets: list[LinmosCommand] = [] # Come up with the linmos suffix to add to output file suffixes = [f"round{current_round}" if current_round else "noselfcal"] @@ -768,7 +770,7 @@ def _create_convol_linmos_images( main_linmos_suffix_str = ".".join(suffixes) - todo: List[Tuple[Any, str]] = [(None, get_beam_resolution_str(mode="optimal"))] + todo: list[tuple[Any, str]] = [(None, get_beam_resolution_str(mode="optimal"))] if field_options.fixed_beam_shape: logger.info( f"Creating second round of linmos images with {field_options.fixed_beam_shape}" @@ -822,8 +824,8 @@ def _create_convol_linmos_images( def _create_convolve_linmos_cubes( wsclean_cmds: Collection[WSCleanCommand], field_options: FieldOptions, - current_round: Optional[int] = None, - additional_linmos_suffix_str: Optional[str] = "cube", + current_round: int | None = None, + additional_linmos_suffix_str: str | None = "cube", ): suffixes = [f"round{current_round}" if current_round else "noselfcal"] if additional_linmos_suffix_str: @@ -855,9 +857,9 @@ def _create_convolve_linmos_cubes( @task @wrapper_options_from_strategy(update_options_keyword="update_masking_options") def task_create_image_mask_model( - image: Union[LinmosCommand, ImageSet, WSCleanCommand], + image: LinmosCommand | ImageSet | WSCleanCommand, image_products: AegeanOutputs, - update_masking_options: Optional[Dict[str, Any]] = None, + update_masking_options: dict[str, Any] | None = None, ) -> FITSMaskNames: """Create a mask from a linmos image, with the intention of providing it as a clean mask to an appropriate imager. This is derived using a simple signal to noise cut. @@ -931,7 +933,7 @@ def task_extract_beam_mask_image( assert ( wsclean_cmd.imageset is not None ), f"{wsclean_cmd.imageset=}, which should not happen" - beam_image = list(wsclean_cmd.imageset.image)[0] + beam_image = next(iter(wsclean_cmd.imageset.image)) beam_mask_names = extract_beam_mask_from_mosaic( fits_beam_image_path=beam_image, fits_mosaic_mask_names=linmos_mask_names ) @@ -970,7 +972,7 @@ def task_create_validation_plot( if upload_artifact: upload_image_as_artifact( - image_path=plot_path, description=f"Validation plot {str(plot_path)}" + image_path=plot_path, description=f"Validation plot {plot_path!s}" ) return plot_path @@ -1010,8 +1012,8 @@ def task_create_validation_tables( if upload_artifacts: for table in validation_tables: if isinstance(table, Path): - df = pd.read_csv(table) - df_dict = df.to_dict("records") + validation_df = pd.read_csv(table) + df_dict = validation_df.to_dict("records") create_table_artifact( table=df_dict, description=f"{table.stem}", @@ -1022,8 +1024,8 @@ def task_create_validation_tables( continue if not isinstance(subtable, Path): continue - df = pd.read_csv(subtable) - df_dict = df.to_dict("records") + sub_df = pd.read_csv(subtable) + df_dict = sub_df.to_dict("records") create_table_artifact( table=df_dict, description=f"{subtable.stem}", diff --git a/flint/prefect/common/ms.py b/flint/prefect/common/ms.py index 240aeb8a..ff5877b1 100644 --- a/flint/prefect/common/ms.py +++ b/flint/prefect/common/ms.py @@ -1,19 +1,20 @@ """Common prefect tasks around interacting with measurement sets""" +from __future__ import annotations + from pathlib import Path -from typing import Optional from prefect import task from flint.calibrate.aocalibrate import AddModelOptions, add_model -from flint.logging import logger from flint.imager.wsclean import WSCleanCommand +from flint.logging import logger # TODO: This can be a dispatcher type function should # other modes be added def add_model_source_list_to_ms( - wsclean_command: WSCleanCommand, calibrate_container: Optional[Path] = None + wsclean_command: WSCleanCommand, calibrate_container: Path | None = None ) -> WSCleanCommand: logger.info("Updating MODEL_DATA with source list") ms = wsclean_command.ms diff --git a/flint/prefect/common/utils.py b/flint/prefect/common/utils.py index c713001f..2ba0139a 100644 --- a/flint/prefect/common/utils.py +++ b/flint/prefect/common/utils.py @@ -1,8 +1,10 @@ """Common prefect related utilities that can be used between flows.""" +from __future__ import annotations + import base64 from pathlib import Path -from typing import Any, Dict, List, Optional, TypeVar +from typing import Any, TypeVar from uuid import UUID from prefect import task @@ -23,9 +25,7 @@ SUPPORTED_IMAGE_TYPES = ("png",) -def upload_image_as_artifact( - image_path: Path, description: Optional[str] = None -) -> UUID: +def upload_image_as_artifact(image_path: Path, description: str | None = None) -> UUID: """Create and submit a markdown artifact tracked by prefect for an input image. Currently supporting png formatted images. @@ -67,10 +67,10 @@ def upload_image_as_artifact( @task def task_archive_sbid( science_folder_path: Path, - archive_path: Optional[Path] = None, - copy_path: Optional[Path] = None, - max_round: Optional[int] = None, - update_archive_options: Optional[Dict[str, Any]] = None, + archive_path: Path | None = None, + copy_path: Path | None = None, + max_round: int | None = None, + update_archive_options: dict[str, Any] | None = None, ) -> Path: """Create a tarball of files, or copy files, from a processing folder. @@ -96,9 +96,10 @@ def task_archive_sbid( # TODO: What should this be? Just general new regexs passed through, # or is this fine? if max_round: - updated_file_patterns = tuple(archive_options.tar_file_re_patterns) + ( - rf".*beam[0-9]+\.round{max_round}-.*-image\.fits", - rf".*beam[0-9]+\.round{max_round}\.ms\.(zip|tar)", + updated_file_patterns = ( + *tuple(archive_options.tar_file_re_patterns), + f".*beam[0-9]+\\.round{max_round}-.*-image\\.fits", + f".*beam[0-9]+\\.round{max_round}\\.ms\\.(zip|tar)", ) archive_options = archive_options.with_options( tar_file_re_patterns=updated_file_patterns @@ -162,7 +163,7 @@ def task_get_attributes(item: Any, attribute: str) -> Any: @task -def task_flatten(to_flatten: List[List[T]]) -> List[T]: +def task_flatten(to_flatten: list[list[T]]) -> list[T]: """Will flatten a list of lists into a single list. This is useful for when a task-descorated function returns a list. diff --git a/flint/prefect/flows/bandpass_pipeline.py b/flint/prefect/flows/bandpass_pipeline.py index 6eee2db8..e780e2a2 100644 --- a/flint/prefect/flows/bandpass_pipeline.py +++ b/flint/prefect/flows/bandpass_pipeline.py @@ -8,9 +8,11 @@ to split the correct field out before actually calibration. """ +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Collection, List, Optional +from typing import Collection from prefect import flow, task, unmapped @@ -56,7 +58,7 @@ def task_bandpass_create_apply_solutions_cmd( ms: MS, calibrate_cmd: CalibrateCommand, container: Path, - output_column: Optional[str] = None, + output_column: str | None = None, ) -> ApplySolutions: """Apply an ao-calibrate style solutions file to an input measurement set. @@ -129,7 +131,7 @@ def run_bandpass_stage( model_path: Path, source_name_prefix: str = "B1934-638", skip_rotation: bool = False, -) -> List[CalibrateCommand]: +) -> list[CalibrateCommand]: """Executes the bandpass calibration (using ``calibrate``) against a set of input measurement sets. @@ -149,10 +151,10 @@ def run_bandpass_stage( ), f"Currently {bandpass_options.flag_calibrate_rounds=}, needs to be 0 or higher" if not output_split_bandpass_path.exists(): - logger.info(f"Creating {str(output_split_bandpass_path)}") + logger.info(f"Creating {output_split_bandpass_path!s}") output_split_bandpass_path.mkdir(parents=True) - calibrate_cmds: List[CalibrateCommand] = [] + calibrate_cmds: list[CalibrateCommand] = [] extract_bandpass_mss = task_extract_correct_bandpass_pointing.map( ms=bandpass_mss, @@ -235,12 +237,12 @@ def calibrate_bandpass_flow( """ assert ( bandpass_path.exists() and bandpass_path.is_dir() - ), f"{str(bandpass_path)} does not exist or is not a folder. " + ), f"{bandpass_path!s} does not exist or is not a folder. " bandpass_mss = list([MS.cast(ms_path) for ms_path in bandpass_path.glob("*.ms")]) assert ( len(bandpass_mss) == bandpass_options.expected_ms - ), f"Expected to find {bandpass_options.expected_ms} in {str(bandpass_path)}, found {len(bandpass_mss)}." + ), f"Expected to find {bandpass_options.expected_ms} in {bandpass_path!s}, found {len(bandpass_mss)}." logger.info( f"Found the following bandpass measurement set: {[bp.path for bp in bandpass_mss]}." @@ -251,7 +253,7 @@ def calibrate_bandpass_flow( Path(split_path / bandpass_folder_name).absolute().resolve() ) logger.info( - f"Will write extracted bandpass MSs to: {str(output_split_bandpass_path)}." + f"Will write extracted bandpass MSs to: {output_split_bandpass_path!s}." ) # This is the model that we will calibrate the bandpass against. diff --git a/flint/prefect/flows/continuum_mask_pipeline.py b/flint/prefect/flows/continuum_mask_pipeline.py index d30dd0c9..8a8a4178 100644 --- a/flint/prefect/flows/continuum_mask_pipeline.py +++ b/flint/prefect/flows/continuum_mask_pipeline.py @@ -13,9 +13,11 @@ imaging round. """ +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Any, Union +from typing import Any from prefect import flow, unmapped @@ -84,13 +86,13 @@ def process_science_fields( assert ( science_path.exists() and science_path.is_dir() - ), f"{str(science_path)} does not exist or is not a folder. " + ), f"{science_path!s} does not exist or is not a folder. " science_mss = list( [MS.cast(ms_path) for ms_path in sorted(science_path.glob("*.ms"))] ) assert ( len(science_mss) == field_options.expected_ms - ), f"Expected to find {field_options.expected_ms} in {str(science_path)}, found {len(science_mss)}." + ), f"Expected to find {field_options.expected_ms} in {science_path!s}, found {len(science_mss)}." science_folder_name = science_path.name @@ -99,7 +101,7 @@ def process_science_fields( ) if not output_split_science_path.exists(): - logger.info(f"Creating {str(output_split_science_path)}") + logger.info(f"Creating {output_split_science_path!s}") output_split_science_path.mkdir(parents=True) logger.info(f"Found the following raw measurement sets: {science_mss}") @@ -323,7 +325,7 @@ def process_science_fields( def setup_run_process_science_field( - cluster_config: Union[str, Path], + cluster_config: str | Path, science_path: Path, bandpass_path: Path, split_path: Path, diff --git a/flint/prefect/flows/continuum_pipeline.py b/flint/prefect/flows/continuum_pipeline.py index 686ec220..9f01cb98 100644 --- a/flint/prefect/flows/continuum_pipeline.py +++ b/flint/prefect/flows/continuum_pipeline.py @@ -5,8 +5,10 @@ - run aegean source finding """ +from __future__ import annotations + from pathlib import Path -from typing import Any, List, Optional, Union +from typing import Any from configargparse import ArgumentParser from prefect import flow, tags, unmapped @@ -15,8 +17,8 @@ from flint.catalogue import verify_reference_catalogues from flint.coadd.linmos import LinmosCommand from flint.configuration import ( - _load_and_copy_strategy, Strategy, + _load_and_copy_strategy, get_options_from_strategy, ) from flint.logging import logger @@ -30,9 +32,9 @@ ) from flint.options import ( FieldOptions, - dump_field_options_to_yaml, add_options_to_parser, create_options_from_parser, + dump_field_options_to_yaml, ) from flint.prefect.clusters import get_dask_runner from flint.prefect.common.imaging import ( @@ -133,7 +135,7 @@ def _check_create_output_split_science_path( ) raise ValueError("Output science directory already exists. ") - logger.info(f"Creating {str(output_split_science_path)}") + logger.info(f"Creating {output_split_science_path!s}") output_split_science_path.mkdir(parents=True) return output_split_science_path @@ -144,7 +146,7 @@ def process_science_fields( science_path: Path, split_path: Path, field_options: FieldOptions, - bandpass_path: Optional[Path] = None, + bandpass_path: Path | None = None, ) -> None: # Verify no nasty incompatible options _check_field_options(field_options=field_options) @@ -170,9 +172,9 @@ def process_science_fields( field_options=field_options, ) - archive_wait_for: List[Any] = [] + archive_wait_for: list[Any] = [] - strategy: Optional[Strategy] = _load_and_copy_strategy( + strategy: Strategy | None = _load_and_copy_strategy( output_split_science_path=output_split_science_path, imaging_strategy=field_options.imaging_strategy, ) @@ -419,9 +421,7 @@ def process_science_fields( aegean_container=unmapped(field_options.aegean_container), ) - parsets_self: Union[None, List[LinmosCommand]] = ( - None # Without could be unbound - ) + parsets_self: None | list[LinmosCommand] = None # Without could be unbound if field_options.yandasoft_container: parsets_self = _create_convol_linmos_images( wsclean_cmds=wsclean_cmds, @@ -504,11 +504,11 @@ def process_science_fields( def setup_run_process_science_field( - cluster_config: Union[str, Path], + cluster_config: str | Path, science_path: Path, split_path: Path, field_options: FieldOptions, - bandpass_path: Optional[Path] = None, + bandpass_path: Path | None = None, skip_bandpass_check: bool = False, ) -> None: if not skip_bandpass_check and bandpass_path: diff --git a/flint/prefect/flows/subtract_cube_pipeline.py b/flint/prefect/flows/subtract_cube_pipeline.py index 4142c73c..a946cc50 100644 --- a/flint/prefect/flows/subtract_cube_pipeline.py +++ b/flint/prefect/flows/subtract_cube_pipeline.py @@ -7,27 +7,28 @@ already been preprocessed and fixed. """ +from __future__ import annotations + from pathlib import Path from time import sleep -from typing import Tuple, Optional, Union, List import numpy as np from configargparse import ArgumentParser from fitscube.combine_fits import combine_fits -from prefect import flow, unmapped, task +from prefect import flow, task, unmapped from flint.coadd.linmos import LinmosCommand from flint.configuration import _load_and_copy_strategy from flint.exceptions import FrequencyMismatchError -from flint.prefect.clusters import get_dask_runner from flint.logging import logger from flint.ms import ( MS, - find_mss, consistent_ms_frequencies, + find_mss, get_freqs_from_ms, subtract_model_from_data_column, ) +from flint.naming import get_sbid_from_path from flint.options import ( AddModelSubtractFieldOptions, BaseOptions, @@ -35,12 +36,12 @@ add_options_to_parser, create_options_from_parser, ) +from flint.prefect.clusters import get_dask_runner from flint.prefect.common.imaging import ( - task_wsclean_imager, - task_get_common_beam, _convolve_linmos, + task_get_common_beam, + task_wsclean_imager, ) -from flint.naming import get_sbid_from_path class CrystalBallOptions(BaseOptions): @@ -48,12 +49,12 @@ class CrystalBallOptions(BaseOptions): attempt_crystalball: bool = False """Attempt to predict the model visibilities using ``crystalball``""" - wsclean_pol_mode: List[str] = ["i"] + wsclean_pol_mode: list[str] = ["i"] """The polarisation of the wsclean model that was generated""" def _check_and_verify_options( - options: Union[AddModelSubtractFieldOptions, SubtractFieldOptions], + options: AddModelSubtractFieldOptions | SubtractFieldOptions, ) -> None: """Verrify that the options supplied to run the subtract field options make sense""" if isinstance(options, SubtractFieldOptions): @@ -77,10 +78,10 @@ def _check_and_verify_options( def find_mss_to_image( mss_parent_path: Path, - expected_ms_count: Optional[int] = None, + expected_ms_count: int | None = None, data_column: str = "CORRECTED_DATA", model_column: str = "MODEL_DATA", -) -> Tuple[MS, ...]: +) -> tuple[MS, ...]: """Search for MSs to image. See ``flint.ms.find_mss`` for further details. Args: @@ -103,10 +104,10 @@ def find_mss_to_image( def find_and_setup_mss( - science_path_or_mss: Union[Path, Tuple[MS, ...]], + science_path_or_mss: Path | tuple[MS, ...], expected_ms_count: int, data_column: str, -) -> Tuple[MS, ...]: +) -> tuple[MS, ...]: """Search for MSs in a directory and, if necessary, perform checks around their consistency. If the input data appear to be collection of MSs already assume they have already been set and checked for consistency. @@ -152,8 +153,8 @@ def task_addmodel_to_ms( ms: MS, addmodel_subtract_options: AddModelSubtractFieldOptions, ) -> MS: - from flint.imager.wsclean import get_wsclean_output_source_list_path from flint.calibrate.aocalibrate import AddModelOptions, add_model + from flint.imager.wsclean import get_wsclean_output_source_list_path logger.info(f"Searching for wsclean source list for {ms.path}") for idx, pol in enumerate(addmodel_subtract_options.wsclean_pol_mode): @@ -186,6 +187,7 @@ def task_addmodel_to_ms( def task_crystalball_to_ms(ms: MS, crystalball_options: CrystalBallOptions) -> MS: from prefect_dask import get_dask_client + from flint.imager.wsclean import get_wsclean_output_source_list_path logger.info(f"Searching for wsclean source list for {ms.path}") @@ -205,7 +207,7 @@ def task_crystalball_to_ms(ms: MS, crystalball_options: CrystalBallOptions) -> M @task def task_combine_all_linmos_images( - linmos_commands: List[LinmosCommand], + linmos_commands: list[LinmosCommand], remove_original_images: bool = False, combine_weights: bool = False, ) -> Path: @@ -226,7 +228,7 @@ def task_combine_all_linmos_images( logger.info(f"Combining {len(images_to_combine)} FITS files together") - from flint.naming import create_name_from_common_fields, create_image_cube_name + from flint.naming import create_image_cube_name, create_name_from_common_fields assert len(images_to_combine) > 0, "No images to combine" @@ -254,11 +256,11 @@ def task_combine_all_linmos_images( @flow def flow_addmodel_to_mss( - science_path_or_mss: Union[Path, Tuple[MS, ...]], + science_path_or_mss: Path | tuple[MS, ...], addmodel_subtract_field_options: AddModelSubtractFieldOptions, expected_ms: int, data_column: str, -) -> Tuple[MS, ...]: +) -> tuple[MS, ...]: """Separate flow to perform the potentially expensive model prediction into MSs""" _check_and_verify_options(options=addmodel_subtract_field_options) diff --git a/flint/sclient.py b/flint/sclient.py index 5e1ce524..05aff358 100644 --- a/flint/sclient.py +++ b/flint/sclient.py @@ -1,9 +1,11 @@ """Utilities related to running commands in a singularity container""" +from __future__ import annotations + from pathlib import Path from subprocess import CalledProcessError from time import sleep -from typing import Callable, Collection, Optional, Union, List +from typing import Callable, Collection from spython.main import Client as sclient @@ -15,8 +17,8 @@ def run_singularity_command( image: Path, command: str, - bind_dirs: Optional[Union[Path, Collection[Path]]] = None, - stream_callback_func: Optional[Callable] = None, + bind_dirs: Path | Collection[Path] | None = None, + stream_callback_func: Callable | None = None, ignore_logging_output: bool = False, max_retries: int = 2, ) -> None: @@ -44,7 +46,7 @@ def run_singularity_command( logger.info(f"Running {command} in {image}") job_info = log_job_environment() - bind: Union[None, List[str]] = None + bind: None | list[str] = None if bind_dirs: logger.info("Preparing bind directories") if isinstance(bind_dirs, Path): @@ -121,8 +123,8 @@ def singularity_wrapper( def wrapper( container: Path, - bind_dirs: Optional[Union[Path, Collection[Path]]] = None, - stream_callback_func: Optional[Callable] = None, + bind_dirs: Path | Collection[Path] | None = None, + stream_callback_func: Callable | None = None, ignore_logging_output: bool = False, **kwargs, ) -> str: diff --git a/flint/selfcal/casa.py b/flint/selfcal/casa.py index 9ef8f072..e512cad4 100644 --- a/flint/selfcal/casa.py +++ b/flint/selfcal/casa.py @@ -8,7 +8,7 @@ from argparse import ArgumentParser from pathlib import Path from shutil import copytree -from typing import Any, Dict, NamedTuple, Optional +from typing import Any, NamedTuple from casacore.tables import table @@ -65,7 +65,7 @@ def args_to_casa_task_string(task: str, **kwargs) -> str: command = [] for k, v in kwargs.items(): if isinstance(v, (str, Path)): - arg = rf"{k}='{str(v)}'" + arg = rf"{k}='{v!s}'" else: arg = rf"{k}={v}" command.append(arg) @@ -161,7 +161,7 @@ def copy_and_clean_ms_casagain( mode_text = "Renaming" if rename_ms else "Copying" - logger.info(f"Output MS name will be {str(out_ms_path)}.") + logger.info(f"Output MS name will be {out_ms_path!s}.") logger.info(f"{mode_text} {ms.path} to {out_ms_path}.") if out_ms_path.exists(): @@ -207,7 +207,7 @@ def copy_and_clean_ms_casagain( ] for col in to_delete: if col in colnames: - logger.info(f"Removing {col=} from {str(out_ms_path)}.") + logger.info(f"Removing {col=} from {out_ms_path!s}.") try: tab.removecols(col) tab.flush(recursive=True) @@ -216,7 +216,7 @@ def copy_and_clean_ms_casagain( f"Failed to remove {col=}! \nCaptured error: {e}" ) else: - logger.warning(f"Column {col} not found in {str(out_ms_path)}.") + logger.warning(f"Column {col} not found in {out_ms_path!s}.") logger.info("Renaming CORRECTED_DATA to DATA. ") tab.renamecol("CORRECTED_DATA", "DATA") @@ -247,7 +247,7 @@ def create_spws_in_ms(casa_container: Path, ms_path: Path, nspw: int) -> Path: Path: The path to the measurement set that was updated """ - logger.info(f"Transforming {str(ms_path)} to have {nspw} SPWs") + logger.info(f"Transforming {ms_path!s} to have {nspw} SPWs") transform_ms = ms_path.with_suffix(".ms_transform") mstransform( @@ -318,8 +318,8 @@ def gaincal_applycal_ms( ms: MS, casa_container: Path, round: int = 1, - gain_cal_options: Optional[GainCalOptions] = None, - update_gain_cal_options: Optional[Dict[str, Any]] = None, + gain_cal_options: GainCalOptions | None = None, + update_gain_cal_options: dict[str, Any] | None = None, archive_input_ms: bool = False, raise_error_on_fail: bool = True, skip_selfcal: bool = False, @@ -374,7 +374,7 @@ def gaincal_applycal_ms( logger.info(f"Will create calibration table {cal_table}.") if cal_table.exists(): - logger.warning(f"Removing {str(cal_table)}") + logger.warning(f"Removing {cal_table!s}") remove_files_folders(cal_table) # This is used for when a frequency dependent self-calibration solution is requested. diff --git a/flint/selfcal/utils.py b/flint/selfcal/utils.py index 7e8c88ea..f16addc9 100644 --- a/flint/selfcal/utils.py +++ b/flint/selfcal/utils.py @@ -2,11 +2,11 @@ across different packages. """ -from typing import List, Union +from __future__ import annotations def consider_skip_selfcal_on_round( - current_round: int, skip_selfcal_on_rounds: Union[int, List[int], None] + current_round: int, skip_selfcal_on_rounds: int | list[int] | None ) -> bool: """Consider whether the self-calibration process (derive and applying solutions) should be skipped on a particular imaging round. diff --git a/flint/sky_model.py b/flint/sky_model.py index a360124f..873e07d4 100644 --- a/flint/sky_model.py +++ b/flint/sky_model.py @@ -1,9 +1,10 @@ #!/usr/bin/env python +from __future__ import annotations from argparse import ArgumentParser from functools import partial from pathlib import Path -from typing import Dict, List, NamedTuple, Optional, Tuple, Union +from typing import NamedTuple import numpy as np import yaml @@ -89,11 +90,11 @@ class SkyModel(NamedTuple): """Number of source that are included in the sky-model""" apparent: bool = True """Whether the sources and model are absolute of apparent fluxes""" - hyperdrive_model: Optional[Path] = None + hyperdrive_model: Path | None = None """Path to the sky-model file created to use with hyperdrive""" - calibrate_model: Optional[Path] = None + calibrate_model: Path | None = None """Path to the sky-model file created to use with calibrate""" - ds9_region: Optional[Path] = None + ds9_region: Path | None = None """Path to the DS9 region file representing the sky-model""" @@ -101,7 +102,7 @@ class SkyModel(NamedTuple): NORM_COLS = {"flux": "Jy", "maj": "arcsecond", "min": "arcsecond", "pa": "deg"} """Normalised column names and their corresponding astropy units. """ -KNOWN_CATAS: Dict[str, Catalogue] = KNOWN_REFERENCE_CATALOGUES +KNOWN_CATAS: dict[str, Catalogue] = KNOWN_REFERENCE_CATALOGUES """Known sky-model catalogues that have had some pre-processing operations applied. Discuss with maintainers for access, """ # TODO: Make this a yaml file packaged in data/models @@ -137,7 +138,7 @@ def get_1934_model(mode: str = "calibrate") -> Path: assert ( model_path.exists() ), f"Constructed {model_path} apparently does not exist. Check packaged models. " - logger.info(f"Calibrate 1934-638 model path: {str(model_path)}.") + logger.info(f"Calibrate 1934-638 model path: {model_path!s}.") return model_path @@ -249,7 +250,7 @@ def generate_airy_pb( def generate_pb( pb_type: str, freqs: u.Quantity, aperture: u.Quantity, offset: u.Quantity -) -> Union[GaussianResponse, SincSquaredResponse, AiryResponse]: +) -> GaussianResponse | SincSquaredResponse | AiryResponse: """Generate the primary beam response using a set of physical quantities. Each is assumed to be rotationally invariant, so a 1-D slice can be evaluated. @@ -271,7 +272,7 @@ def generate_pb( Returns: Union[GaussianResponse, SincSquaredResponse, AiryResponse]: Constructed primary beam responses """ - response: Union[GaussianResponse, SincSquaredResponse, AiryResponse, None] = None + response: GaussianResponse | SincSquaredResponse | AiryResponse | None = None if pb_type.lower() == "gaussian": response = generate_gaussian_pb(freqs=freqs, aperture=aperture, offset=offset) elif pb_type.lower() == "sincsquared": @@ -379,7 +380,7 @@ def dir_from_ms(ms_path: Path) -> SkyCoord: Returns: SkyCoord: Pointing direction on the sky of the measurement set """ - tp = table(f"{str(ms_path)}/FIELD", readonly=True, ack=False) + tp = table(f"{ms_path!s}/FIELD", readonly=True, ack=False) p_phase = tp.getcol("PHASE_DIR") tp.close() @@ -400,7 +401,7 @@ def freqs_from_ms(ms_path: Path) -> np.ndarray: Returns: np.ndarray: Collection of channel frequencies. """ - tf = table(f"{str(ms_path)}/SPECTRAL_WINDOW", ack=False) + tf = table(f"{ms_path!s}/SPECTRAL_WINDOW", ack=False) freqs = tf[0]["CHAN_FREQ"] tf.close() return np.sort(freqs) @@ -429,11 +430,11 @@ def get_known_catalogue(cata: str) -> Catalogue: def load_catalogue( catalogue_dir: Path, - catalogue: Optional[str] = None, - ms_pointing: Optional[SkyCoord] = None, + catalogue: str | None = None, + ms_pointing: SkyCoord | None = None, assumed_alpha: float = -0.83, assumed_q: float = 0.0, -) -> Tuple[Catalogue, Table]: +) -> tuple[Catalogue, Table]: """Load in a catalogue table given a name or measurement set declinattion. Args: @@ -554,7 +555,7 @@ def preprocess_catalogue( return QTable(new_cata_tab) -def make_ds9_region(out_path: Path, sources: List[Row]) -> Path: +def make_ds9_region(out_path: Path, sources: list[Row]) -> Path: """Create a DS9 region file of the sky-model derived Args: @@ -565,22 +566,22 @@ def make_ds9_region(out_path: Path, sources: List[Row]) -> Path: Path: Path to the region file created """ logger.info( - f"Creating DS9 region file, writing {len(sources)} regions to {str(out_path)}." + f"Creating DS9 region file, writing {len(sources)} regions to {out_path!s}." ) - with open(out_path, "wt") as out_file: + with open(out_path, "w") as out_file: out_file.write("# DS9 region file\n") out_file.write("fk5\n") for source in sources: if source["maj"] < 1.0 * u.arcsecond and source["min"] < 1.0 * u.arcsecond: out_file.write( - "point(%f,%f) # point=circle color=red dash=1\n" - % (source["RA"].value, source["DEC"].value) + "point({:f},{:f}) # point=circle color=red dash=1\n".format( + source["RA"].value, source["DEC"].value + ) ) else: out_file.write( - "ellipse(%f,%f,%f,%f,%f) # color=red dash=1\n" - % ( + "ellipse({:f},{:f},{:f},{:f},{:f}) # color=red dash=1\n".format( source["RA"].value, source["DEC"].value, source["maj"].value, @@ -592,7 +593,7 @@ def make_ds9_region(out_path: Path, sources: List[Row]) -> Path: return out_path -def make_hyperdrive_model(out_path: Path, sources: List[Tuple[Row, CurvedPL]]) -> Path: +def make_hyperdrive_model(out_path: Path, sources: list[tuple[Row, CurvedPL]]) -> Path: """Writes a Hyperdrive sky-model to a yaml file. Args: @@ -647,7 +648,7 @@ def make_hyperdrive_model(out_path: Path, sources: List[Tuple[Row, CurvedPL]]) - return out_path -def make_calibrate_model(out_path: Path, sources: List[Tuple[Row, CurvedPL]]) -> Path: +def make_calibrate_model(out_path: Path, sources: list[tuple[Row, CurvedPL]]) -> Path: """Create a sky-model file that is compatible with the AO Calibrate software Args: @@ -679,30 +680,26 @@ def make_calibrate_model(out_path: Path, sources: List[Tuple[Row, CurvedPL]]) -> and src_row["min"] < 1.0 * u.arcsecond ): out_file.write( - ( - f"{src_row['name']}," - f"POINT," - f"{ra_str}," - f"{dec_str}," - f"{src_cpl.norm}," - f"[{src_cpl.alpha},{src_cpl.q}]," - f"true,{ref_nu},,,\n" - ) + f"{src_row['name']}," + f"POINT," + f"{ra_str}," + f"{dec_str}," + f"{src_cpl.norm}," + f"[{src_cpl.alpha},{src_cpl.q}]," + f"true,{ref_nu},,,\n" ) else: out_file.write( - ( - f"{src_row['name']}," - f"GAUSSIAN," - f"{ra_str}," - f"{dec_str}," - f"{src_cpl.norm}," - f"[{src_cpl.alpha},{src_cpl.q}]," - f"true,{ref_nu}," - f"{src_row['maj'].to(u.arcsecond).value}," - f"{src_row['maj'].to(u.arcsecond).value}," - f"{src_row['pa'].to(u.deg).value},\n" - ) + f"{src_row['name']}," + f"GAUSSIAN," + f"{ra_str}," + f"{dec_str}," + f"{src_cpl.norm}," + f"[{src_cpl.alpha},{src_cpl.q}]," + f"true,{ref_nu}," + f"{src_row['maj'].to(u.arcsecond).value}," + f"{src_row['maj'].to(u.arcsecond).value}," + f"{src_row['pa'].to(u.deg).value},\n" ) return out_path @@ -711,7 +708,7 @@ def make_calibrate_model(out_path: Path, sources: List[Tuple[Row, CurvedPL]]) -> def create_sky_model( ms_path: Path, cata_dir: Path = Path("."), - cata_name: Optional[str] = None, + cata_name: str | None = None, assumed_alpha: float = -0.83, assumed_q: float = 0.0, flux_cutoff: float = 0.02, @@ -756,7 +753,7 @@ def create_sky_model( radial_cutoff = ( fwhm_scale_cutoff * pb.fwhms[0] ).decompose() # The lowest frequency FWHM is largest - logger.info("Radial cutoff = %.3f degrees" % (radial_cutoff.to(u.deg).value)) + logger.info(f"Radial cutoff = {radial_cutoff.to(u.deg).value:.3f} degrees") cata_info, cata_tab = load_catalogue( catalogue_dir=cata_dir, @@ -774,7 +771,7 @@ def create_sky_model( ) total_flux: u.Jy = 0.0 * u.Jy - accepted_rows: List[Tuple[Row, CurvedPL]] = [] + accepted_rows: list[tuple[Row, CurvedPL]] = [] for i, row in enumerate(cata_tab): src_pos = SkyCoord(row["RA"], row["DEC"]) diff --git a/flint/source_finding/aegean.py b/flint/source_finding/aegean.py index ebf76328..0a5105d0 100644 --- a/flint/source_finding/aegean.py +++ b/flint/source_finding/aegean.py @@ -1,8 +1,10 @@ """A basic interface into aegean source finding routines.""" +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import NamedTuple, Optional, Tuple +from typing import NamedTuple from astropy.io import fits @@ -15,9 +17,9 @@ class BANEOptions(NamedTuple): """Container for basic BANE related options. Only a subclass of BANE options are supported.""" - grid_size: Optional[Tuple[int, int]] = (16, 16) + grid_size: tuple[int, int] | None = (16, 16) """The step interval of each box, in pixels""" - box_size: Optional[Tuple[int, int]] = (196, 196) + box_size: tuple[int, int] | None = (196, 196) """The size of the box in pixels""" @@ -46,7 +48,7 @@ class AegeanOutputs(NamedTuple): """RMS map created by BANE""" comp: Path """Source component catalogue created by Aegean""" - beam_shape: Tuple[float, float, float] + beam_shape: tuple[float, float, float] """The `BMAJ`, `BMIN` and `BPA` that were stored in the image header that Aegen searched""" image: Path """The input image that was used to source find against""" @@ -55,7 +57,7 @@ class AegeanOutputs(NamedTuple): def _get_bane_command(image: Path, cores: int, bane_options: BANEOptions) -> str: """Create the BANE command to run""" # The stripes is purposely set lower than the cores due to an outstanding bane bug that can cause a deadlock. - bane_command_str = f"BANE {str(image)} --cores {cores} --stripes {cores-1} " + bane_command_str = f"BANE {image!s} --cores {cores} --stripes {cores-1} " if bane_options.grid_size: bane_command_str += ( f"--grid {bane_options.grid_size[0]} {bane_options.grid_size[1]} " @@ -88,7 +90,7 @@ def _get_aegean_command( image: Path, base_output: str, aegean_options: AegeanOptions ) -> str: """Create the aegean command to run""" - aegean_command = f"aegean {str(image)} " + aegean_command = f"aegean {image!s} " if aegean_options.autoload: aegean_command += "--autoload " if aegean_options.nocov: @@ -113,8 +115,8 @@ def run_bane_and_aegean( image: Path, aegean_container: Path, cores: int = 8, - bane_options: Optional[BANEOptions] = None, - aegean_options: Optional[AegeanOptions] = None, + bane_options: BANEOptions | None = None, + aegean_options: AegeanOptions | None = None, ) -> AegeanOutputs: """Run BANE, the background and noise estimator, and aegean, the source finder, against an input image. This function attempts to hook into the AegeanTools diff --git a/flint/summary.py b/flint/summary.py index 0c4ec1c6..0a09ee34 100644 --- a/flint/summary.py +++ b/flint/summary.py @@ -5,7 +5,7 @@ ) from pathlib import Path -from typing import NamedTuple, Optional, Tuple, Union, List +from typing import NamedTuple import astropy.units as u from astropy.coordinates import ( @@ -61,33 +61,33 @@ class FieldSummary(NamedTuple): """SBID of the bandpass calibrator""" field_name: str """The name of the field""" - ms_summaries: Optional[Tuple[MSSummary, ...]] = None + ms_summaries: tuple[MSSummary, ...] | None = None """Summaries of measurement sets used in the processing of the filed""" - centre: Optional[SkyCoord] = None + centre: SkyCoord | None = None """Centre of the field, which is calculated as the mean position of all phase directions of the `mss` measurement sets""" - integration_time: Optional[int] = None + integration_time: int | None = None """The integration time of the observation (seconds)""" - no_components: Optional[int] = None + no_components: int | None = None """Number of components found from the source finder""" - holography_path: Optional[Path] = None + holography_path: Path | None = None """Path to the file used for holography""" - round: Optional[int] = None + round: int | None = None """The self-cal round""" - location: Optional[EarthLocation] = None + location: EarthLocation | None = None """The location of the telescope stored as (X,Y,Z) in meters""" - ms_times: Optional[Time] = None + ms_times: Time | None = None """The unique scan times of integrations stored in the measurement set""" - hour_angles: Optional[Longitude] = None + hour_angles: Longitude | None = None """Computed hour-angles of the field""" - elevations: Optional[Latitude] = None + elevations: Latitude | None = None """Computed elevations of the field""" - median_rms: Optional[float] = None + median_rms: float | None = None """The meanian RMS computed from an RMS image""" - beam_summaries: Optional[List[BeamSummary]] = None + beam_summaries: list[BeamSummary] | None = None """Summary information from each beam. Contains MSSummary, ImageSet and other information.""" - linmos_image: Optional[Path] = None + linmos_image: Path | None = None """The path to the linmos image of all beams""" - pol_axis: Optional[float] = None + pol_axis: float | None = None """The orientation of the ASKAP third-axis in radians. """ def with_options(self, **kwargs) -> FieldSummary: @@ -97,7 +97,7 @@ def with_options(self, **kwargs) -> FieldSummary: return FieldSummary(**prop) -def _get_pol_axis_as_rad(ms: Union[MS, Path]) -> float: +def _get_pol_axis_as_rad(ms: MS | Path) -> float: """Helper to get the appropriate pol_axis out of a MS. Prioritises the instrumental third-axis imprinted from fixms""" ms = MS.cast(ms=ms) @@ -118,7 +118,7 @@ def _get_pol_axis_as_rad(ms: Union[MS, Path]) -> float: # TODO: Need to establise a MSLike type -def add_ms_summaries(field_summary: FieldSummary, mss: List[MS]) -> FieldSummary: +def add_ms_summaries(field_summary: FieldSummary, mss: list[MS]) -> FieldSummary: """Obtain a MSSummary instance to add to a FieldSummary Quantities derived from the field centre (hour angles, elevations) are @@ -224,9 +224,9 @@ def add_linmos_fits_image( def update_field_summary( field_summary: FieldSummary, - aegean_outputs: Optional[AegeanOutputs] = None, - mss: Optional[List[MS]] = None, - linmos_command: Optional[LinmosCommand] = None, + aegean_outputs: AegeanOutputs | None = None, + mss: list[MS] | None = None, + linmos_command: LinmosCommand | None = None, **kwargs, ) -> FieldSummary: """Update an existing `FieldSummary` instance with additional information. @@ -264,10 +264,10 @@ def update_field_summary( def create_field_summary( - mss: List[Union[MS, Path]], - cal_sbid_path: Optional[Path] = None, - holography_path: Optional[Path] = None, - aegean_outputs: Optional[AegeanOutputs] = None, + mss: list[MS | Path], + cal_sbid_path: Path | None = None, + holography_path: Path | None = None, + aegean_outputs: AegeanOutputs | None = None, **kwargs, ) -> FieldSummary: """Create a field summary object using a measurement set. @@ -345,9 +345,9 @@ class BeamSummary(NamedTuple): ms_summary: MSSummary """A summary object of a measurement set""" - imageset: Optional[ImageSet] = None + imageset: ImageSet | None = None """A set of images that have been created from the measurement set represented by `summary`""" - components: Optional[AegeanOutputs] = None + components: AegeanOutputs | None = None """The source finding components from the aegean source finder""" def with_options(self, **kwargs) -> BeamSummary: @@ -358,9 +358,9 @@ def with_options(self, **kwargs) -> BeamSummary: def create_beam_summary( - ms: Union[MS, Path], - imageset: Optional[Union[ImageSet, WSCleanCommand]] = None, - components: Optional[AegeanOutputs] = None, + ms: MS | Path, + imageset: ImageSet | WSCleanCommand | None = None, + components: AegeanOutputs | None = None, ) -> BeamSummary: """Create a summary of a beam diff --git a/flint/utils.py b/flint/utils.py index e346b20c..0ee1cb8d 100644 --- a/flint/utils.py +++ b/flint/utils.py @@ -2,6 +2,8 @@ for general usage. """ +from __future__ import annotations + import datetime import os import shutil @@ -10,7 +12,7 @@ from contextlib import contextmanager from pathlib import Path from socket import gethostname -from typing import List, NamedTuple, Optional, Tuple, Union, Generator +from typing import Generator, NamedTuple import astropy.units as u import numpy as np @@ -35,7 +37,7 @@ def _signal_timelimit_handler(*args): @contextmanager def timelimit_on_context( - timelimit_seconds: Union[int, float], + timelimit_seconds: int | float, ) -> Generator[None, None, None]: """Creates a context manager that will raise ``flint.exceptions.TimelimitException`` should the control not leave the ``with`` context within an specified amount of time. @@ -70,7 +72,7 @@ def timelimit_on_context( @contextmanager def hold_then_move_into( move_directory: Path, - hold_directory: Optional[Path], + hold_directory: Path | None, delete_hold_on_exist: bool = True, ) -> Generator[Path, None, None]: """Create a temporary directory such that anything within it on the @@ -120,7 +122,7 @@ def hold_then_move_into( @contextmanager def temporarily_move_into( - subject: Path, temporary_directory: Optional[Path] = None + subject: Path, temporary_directory: Path | None = None ) -> Generator[Path, None, None]: """Given a file or folder, temporarily copy it into the path specified by `temporary_directory` for the duration of the context manager. Upon @@ -178,8 +180,8 @@ def temporarily_move_into( def get_environment_variable( - variable: Union[str, None], default: Optional[str] = None -) -> Union[str, None]: + variable: str | None, default: str | None = None +) -> str | None: """Get the value of an environment variable if it exists. If it does not a None is returned. @@ -204,11 +206,11 @@ def get_environment_variable( class SlurmInfo(NamedTuple): hostname: str """The hostname of the slurm job""" - job_id: Optional[str] = None + job_id: str | None = None """The job ID of the slurm job""" - task_id: Optional[str] = None + task_id: str | None = None """The task ID of the slurm job""" - time: Optional[str] = None + time: str | None = None """The time time the job information was gathered""" @@ -227,7 +229,7 @@ def get_slurm_info() -> SlurmInfo: return SlurmInfo(hostname=hostname, job_id=job_id, task_id=task_id, time=time) -def get_job_info(mode: str = "slurm") -> Union[SlurmInfo]: +def get_job_info(mode: str = "slurm") -> SlurmInfo: """Get the job information for the supplied mode Args: @@ -266,7 +268,7 @@ def log_job_environment() -> SlurmInfo: return slurm_info -def get_beam_shape(fits_path: Path) -> Optional[BeamShape]: +def get_beam_shape(fits_path: Path) -> BeamShape | None: """Construct and return a beam shape from the fields in a FITS image Args: @@ -290,7 +292,7 @@ def get_beam_shape(fits_path: Path) -> Optional[BeamShape]: return beam_shape -def get_pixels_per_beam(fits_path: Path) -> Optional[float]: +def get_pixels_per_beam(fits_path: Path) -> float | None: """Given a image with beam information, return the number of pixels per beam. The beam is taken from the FITS header. This is evaluated for pixels at the reference pixel position. @@ -349,8 +351,8 @@ def get_packaged_resource_path(package: str, filename: str) -> Path: def generate_strict_stub_wcs_header( position_at_image_center: SkyCoord, - image_shape: Tuple[int, int], - pixel_scale: Union[u.Quantity, str], + image_shape: tuple[int, int], + pixel_scale: u.Quantity | str, image_shape_is_center: bool = False, ) -> WCS: """Create a WCS object using some strict quantities. There @@ -409,12 +411,12 @@ def generate_strict_stub_wcs_header( def generate_stub_wcs_header( - ra: Optional[Union[float, u.Quantity]] = None, - dec: Optional[Union[float, u.Quantity]] = None, - image_shape: Optional[Tuple[int, int]] = None, - pixel_scale: Optional[Union[u.Quantity, str, float]] = None, + ra: float | u.Quantity | None = None, + dec: float | u.Quantity | None = None, + image_shape: tuple[int, int] | None = None, + pixel_scale: u.Quantity | str | float | None = None, projection: str = "SIN", - base_wcs: Optional[Union[Path, WCS]] = None, + base_wcs: Path | WCS | None = None, ) -> WCS: """Create a basic WSC header object that can be used to calculate sky positions for an example image. @@ -537,7 +539,7 @@ def estimate_image_centre(image_path: Path) -> SkyCoord: def zip_folder( - in_path: Path, out_zip: Optional[Path] = None, archive_format: str = "tar" + in_path: Path, out_zip: Path | None = None, archive_format: str = "tar" ) -> Path: """Zip a directory and remove the original. @@ -574,9 +576,7 @@ def rsync_copy_directory(target_path: Path, out_path: Path) -> Path: Path: The output path of the new directory. """ - rsync_cmd = ( - f"rsync -avh --progress --stats " f"{str(target_path)}/ " f"{str(out_path)}/ " - ) + rsync_cmd = f"rsync -avh --progress --stats " f"{target_path!s}/ " f"{out_path!s}/ " logger.info(f"Rsync copying {target_path} to {out_path}.") logger.debug(f"Will run {rsync_cmd}") rsync_run = subprocess.Popen(rsync_cmd.split(), stdout=subprocess.PIPE) @@ -628,7 +628,7 @@ def copy_directory( return output_directory -def remove_files_folders(*paths_to_remove: Path) -> List[Path]: +def remove_files_folders(*paths_to_remove: Path) -> list[Path]: """Will remove a set of paths from the file system. If a Path points to a folder, it will be recursively removed. Otherwise it is simply unlinked. @@ -650,7 +650,7 @@ def remove_files_folders(*paths_to_remove: Path) -> List[Path]: continue if file.is_dir(): - logger.info(f"Removing folder {str(file)}") + logger.info(f"Removing folder {file!s}") shutil.rmtree(file) else: logger.info(f"Removing file {file}.") @@ -677,10 +677,10 @@ def create_directory(directory: Path, parents: bool = True) -> Path: directory = Path(directory) - logger.info(f"Creating {str(directory)}") + logger.info(f"Creating {directory!s}") try: directory.mkdir(parents=parents, exist_ok=True) except Exception as e: - logger.error(f"Failed to create {str(directory)} {e}.") + logger.error(f"Failed to create {directory!s} {e}.") return directory diff --git a/flint/validation.py b/flint/validation.py index 6aaa9870..048f9f78 100644 --- a/flint/validation.py +++ b/flint/validation.py @@ -2,9 +2,11 @@ for continuum imaging of RACS data """ +from __future__ import annotations + from argparse import ArgumentParser from pathlib import Path -from typing import Dict, NamedTuple, Optional, Tuple, Union +from typing import NamedTuple import astropy.units as u import matplotlib.pyplot as plt @@ -43,13 +45,13 @@ class ValidationCatalogues(NamedTuple): """ICRF catalogue""" askap: Catalogue """ASKAP catalogue""" - racs_high: Optional[Catalogue] = None + racs_high: Catalogue | None = None """RACS high catalogue""" - racs_mid: Optional[Catalogue] = None + racs_mid: Catalogue | None = None """RACS mid catalogue""" - tgss: Optional[Catalogue] = None + tgss: Catalogue | None = None """TGSS catalogue""" - vlass: Optional[Catalogue] = None + vlass: Catalogue | None = None """VLASS catalogue""" @@ -64,13 +66,13 @@ class Tables(NamedTuple): """ICRF catalogue""" askap: Table """ASKAP catalogue""" - racs_high: Optional[Table] = None + racs_high: Table | None = None """RACS high catalogue""" - racs_mid: Optional[Table] = None + racs_mid: Table | None = None """RACS mid catalogue""" - tgss: Optional[Table] = None + tgss: Table | None = None """TGSS catalogue""" - vlass: Optional[Table] = None + vlass: Table | None = None """VLASS catalogue""" @@ -83,13 +85,13 @@ class XMatchTables(NamedTuple): """SUMSS catalogue""" icrf: Path """ICRF catalogue""" - racs_high: Optional[Path] = None + racs_high: Path | None = None """RACS high catalogue""" - racs_mid: Optional[Path] = None + racs_mid: Path | None = None """RACS mid catalogue""" - tgss: Optional[Path] = None + tgss: Path | None = None """TGSS catalogue""" - vlass: Optional[Path] = None + vlass: Path | None = None """VLASS catalogue""" @@ -136,7 +138,7 @@ class RMSImageInfo(NamedTuple): """Path to the RMS fits image""" header: fits.Header """Header from the FITS image""" - shape: Tuple[int, int] + shape: tuple[int, int] """Dimension of the image""" no_valid_pixels: int """Number of valid pixels in the image""" @@ -175,7 +177,7 @@ class SourceCounts(NamedTuple): """Rough estimate of error on the euclidean normalised source counts""" area: float """The area in square degrees that the sources cover, i.e. image footprint sky-area""" - area_fraction: Optional[np.ndarray] = None + area_fraction: np.ndarray | None = None """The fraction of the image that was above a sigma level per flux bin. This may be used as a rough term to scale the Euclidean Normalised source counts. This is not intended to be a robust way of correcting the source counts - just quick""" @@ -198,9 +200,9 @@ class MatchResult(NamedTuple): """The indices of the matched sources from the original input table 1""" idx2: np.ndarray """The indices of the matched sources from the original input table 2""" - flux1: Optional[np.ndarray] = None + flux1: np.ndarray | None = None """Brightness in Jy of source in the first survey""" - flux2: Optional[np.ndarray] = None + flux2: np.ndarray | None = None """Brightness in Jy of source in the second survey""" @@ -307,7 +309,7 @@ def get_known_catalogue_info(name: str) -> Catalogue: def load_known_catalogue( name: str, reference_catalogue_directory: Path -) -> Tuple[Table, Catalogue]: +) -> tuple[Table, Catalogue]: """Load in a known catalogue table Args: @@ -499,7 +501,7 @@ def plot_flag_summary( def plot_rms_map( - fig: Figure, ax: Axes, rms_path: Path, source_positions: Optional[SkyCoord] = None + fig: Figure, ax: Axes, rms_path: Path, source_positions: SkyCoord | None = None ) -> Axes: """Add the RMS image to the figure @@ -630,7 +632,7 @@ def get_source_counts( minlogf: float = -4, maxlogf: float = 2, Nbins: int = 81, - rms_image_path: Optional[Path] = None, + rms_image_path: Path | None = None, ) -> SourceCounts: """Derive source counts for a set of fluxes and known area @@ -690,9 +692,9 @@ def plot_source_counts( catalogue: Table, rms_info: RMSImageInfo, ax: Axes, - freq: Optional[float] = None, - dezotti: Optional[Table] = None, - skads: Optional[Table] = None, + freq: float | None = None, + dezotti: Table | None = None, + skads: Table | None = None, ) -> Axes: """Create a figure of source counts from a astropy Table. If `freq` and either `dezotti` / `skads` are supplied then these @@ -899,12 +901,7 @@ def plot_astrometry_comparison( ax.set( xlim=(-8, 8), ylim=(-8, 8), xlabel="Offset (arcsec)", ylabel="Offset (arcsec)" ) - le_a1 = r" $\epsilon_{SU} : ({%.1f}\pm{%.1f},{%.1f}\pm{%.1f})$" % ( - mean_x, - std_x, - mean_y, - std_y, - ) + le_a1 = rf" $\epsilon_{{SU}} : ({{{mean_x:.1f}}}\pm{{{std_x:.1f}}},{{{mean_y:.1f}}}\pm{{{std_y:.1f}}})$" ax.text( 7.6, 7.6, @@ -956,11 +953,11 @@ def plot_flux_comparison(fig: Figure, ax: Axes, match_result: MatchResult) -> Ax def scale_flux_alpha( - flux: Union[float, np.ndarray], - freq: Union[float, np.ndarray], + flux: float | np.ndarray, + freq: float | np.ndarray, ref_freq: float, alpha: float = -0.8, -) -> Union[float, np.ndarray]: +) -> float | np.ndarray: """Scale a flux density to a reference frequency using a spectral index Args: @@ -982,7 +979,7 @@ def make_xmatch_table( catalogue2: Catalogue, match_result: MatchResult, output_path: Path, -) -> Tuple[Table, Path]: +) -> tuple[Table, Path]: """Create a simple cross match table between two catalogues Args: @@ -1126,7 +1123,7 @@ def load_catalogues( reference_catalogue_directory: Path, askap_survey_name: str, rms_info: RMSImageInfo, -) -> Tuple[ValidationCatalogues, Tables]: +) -> tuple[ValidationCatalogues, Tables]: """Load in all the catalogues that are required for the validation. Args: @@ -1354,7 +1351,7 @@ def create_validation_tables( ) # Loop over all the catalogues and cross match them to the ASKAP catalogue - _tables: Dict[str, Table] = {} + _tables: dict[str, Table] = {} for survey in catalogues._fields: if survey == "askap": continue diff --git a/tests/test_aegean.py b/tests/test_aegean.py index 0c02d3d3..1871f7d6 100644 --- a/tests/test_aegean.py +++ b/tests/test_aegean.py @@ -3,9 +3,12 @@ feed the clean mask creation. """ -import pytest +from __future__ import annotations + from pathlib import Path +import pytest + from flint.exceptions import AttemptRerunException from flint.source_finding.aegean import ( AegeanOptions, diff --git a/tests/test_aocalibrate.py b/tests/test_aocalibrate.py index bbfc6208..3923f15a 100644 --- a/tests/test_aocalibrate.py +++ b/tests/test_aocalibrate.py @@ -1,5 +1,7 @@ """Some tests related to using aoccalibrate related things""" +from __future__ import annotations + import shutil from pathlib import Path @@ -17,9 +19,9 @@ AOSolutions, CalibrateOptions, FlaggedAOSolution, + add_model_options_to_command, calibrate_options_to_command, flag_aosolutions, - add_model_options_to_command, plot_solutions, select_refant, ) @@ -290,14 +292,12 @@ def test_aosols_bandpass_ref_nu_preserve_phase(ao_sols): assert np.allclose(y_angle[np.isfinite(y_angle)], 0) expected = np.array( - ( - [ - -0.10846614 - 0.01465966j, - -0.10776107 - 0.01495074j, - -0.10728749 - 0.01611982j, - -0.10742277 - 0.01654671j, - ] - ) + [ + -0.10846614 - 0.01465966j, + -0.10776107 - 0.01495074j, + -0.10728749 - 0.01611982j, + -0.10742277 - 0.01654671j, + ] ) print(expected) diff --git a/tests/test_archive.py b/tests/test_archive.py index a3c1e934..dda18cd3 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -1,5 +1,7 @@ """Tests around archives""" +from __future__ import annotations + import tarfile from pathlib import Path @@ -25,7 +27,7 @@ def glob_files(tmpdir): """Create an example set of temporary files in a known directory""" for f in FILES: - touch_file = f"{str(tmpdir / f)}" + touch_file = f"{tmpdir / f!s}" with open(touch_file, "w") as out_file: out_file.write("example\n") @@ -128,7 +130,7 @@ def test_archive_parser(glob_files): assert args.tar_file_re_patterns == DEFAULT_TAR_RE_PATTERNS example_path = Path("this/no/exist") - args = parser.parse_args(f"list --base-path {str(example_path)}".split()) + args = parser.parse_args(f"list --base-path {example_path!s}".split()) assert isinstance(args.base_path, Path) assert args.base_path == example_path @@ -139,7 +141,7 @@ def test_archive_parser(glob_files): example_path = Path(base_dir) args = parser.parse_args( - f"list --base-path {str(example_path)} --copy-file-re-patterns *pdf".split() + f"list --base-path {example_path!s} --copy-file-re-patterns *pdf".split() ) assert isinstance(args.base_path, Path) assert args.base_path == example_path diff --git a/tests/test_baseoptions.py b/tests/test_baseoptions.py index 79a6995b..b6fa88a6 100644 --- a/tests/test_baseoptions.py +++ b/tests/test_baseoptions.py @@ -1,9 +1,11 @@ """Some specific tests around the pydantic base options model that we are using to construct a BaseOptions class""" -import pytest +from __future__ import annotations + from argparse import ArgumentParser +import pytest from pydantic import ValidationError from flint.options import BaseOptions, add_options_to_parser, create_options_from_parser diff --git a/tests/test_bptools.py b/tests/test_bptools.py index 824a487b..ddf1c145 100644 --- a/tests/test_bptools.py +++ b/tests/test_bptools.py @@ -1,5 +1,7 @@ """Itemss around testing components of bptools""" +from __future__ import annotations + import numpy as np import pytest diff --git a/tests/test_casa.py b/tests/test_casa.py index 480b9fed..c19e15e3 100644 --- a/tests/test_casa.py +++ b/tests/test_casa.py @@ -1,5 +1,7 @@ """Tests around the casa self-calibration tooling""" +from __future__ import annotations + from flint.selfcal.casa import args_to_casa_task_string diff --git a/tests/test_catalogue.py b/tests/test_catalogue.py index e37ba0f0..9f91f8aa 100644 --- a/tests/test_catalogue.py +++ b/tests/test_catalogue.py @@ -1,5 +1,7 @@ """Tests that work around the catalogue functionality""" +from __future__ import annotations + from pathlib import Path import pytest @@ -8,11 +10,11 @@ from flint.catalogue import ( KNOWN_REFERENCE_CATALOGUES, Catalogue, + _guess_catalogue_type, download_referencce_catalogues, download_vizier_catalogue, get_reference_catalogue, guess_column_in_table, - _guess_catalogue_type, ) from flint.utils import get_packaged_resource_path diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 196b08a7..b9851370 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import filecmp from pathlib import Path -from click import MissingParameter import pytest +from click import MissingParameter from flint.configuration import ( Strategy, diff --git a/tests/test_convol.py b/tests/test_convol.py index 7bbfa1e9..358c25d7 100644 --- a/tests/test_convol.py +++ b/tests/test_convol.py @@ -1,16 +1,18 @@ """Bits around testing the convolution utilities""" -import pytest +from __future__ import annotations + import shutil from pathlib import Path import numpy as np +import pytest from astropy.io import fits from flint.convol import ( + BeamShape, check_if_cube_fits, get_cube_common_beam, - BeamShape, ) from flint.utils import get_packaged_resource_path diff --git a/tests/test_flagging.py b/tests/test_flagging.py index 66890a9d..84a39f17 100644 --- a/tests/test_flagging.py +++ b/tests/test_flagging.py @@ -1,5 +1,7 @@ """Test utilities related to flagging measurement set operations""" +from __future__ import annotations + import shutil from pathlib import Path diff --git a/tests/test_leakage.py b/tests/test_leakage.py index 06602756..bf0c5b98 100644 --- a/tests/test_leakage.py +++ b/tests/test_leakage.py @@ -1,9 +1,11 @@ """Testing components in the leakage creation steps""" -import pytest +from __future__ import annotations + from pathlib import Path import numpy as np +import pytest from astropy.table import Table from astropy.wcs import WCS @@ -12,8 +14,8 @@ LeakageFilters, PixelCoords, _get_output_catalogue_path, - _load_fits_image, _load_component_table, + _load_fits_image, filter_components, get_xy_pixel_coords, ) diff --git a/tests/test_linmos_coadd.py b/tests/test_linmos_coadd.py index b37265a1..3b7480b1 100644 --- a/tests/test_linmos_coadd.py +++ b/tests/test_linmos_coadd.py @@ -3,6 +3,8 @@ some of the helper functions around it. """ +from __future__ import annotations + from pathlib import Path import numpy as np @@ -12,11 +14,11 @@ from flint.coadd.linmos import ( BoundingBox, LinmosParsetSummary, - _linmos_cleanup, _create_bound_box_plane, _get_alpha_linmos_option, _get_holography_linmos_options, _get_image_weight_plane, + _linmos_cleanup, create_bound_box, generate_weights_list_and_files, trim_fits_image, @@ -149,13 +151,13 @@ def test_linmos_holo_options(tmpdir): parset = _get_holography_linmos_options(holofile=holofile, pol_axis=None) assert "linmos.primarybeam = ASKAP_PB\n" in parset assert "linmos.removeleakage = false\n" in parset - assert f"linmos.primarybeam.ASKAP_PB.image = {str(holofile.absolute())}\n" in parset + assert f"linmos.primarybeam.ASKAP_PB.image = {holofile.absolute()!s}\n" in parset assert "linmos.primarybeam.ASKAP_PB.alpha" not in parset parset = _get_holography_linmos_options(holofile=holofile, pol_axis=np.deg2rad(-45)) assert "linmos.primarybeam = ASKAP_PB\n" in parset assert "linmos.removeleakage = false\n" in parset - assert f"linmos.primarybeam.ASKAP_PB.image = {str(holofile.absolute())}\n" in parset + assert f"linmos.primarybeam.ASKAP_PB.image = {holofile.absolute()!s}\n" in parset assert "linmos.primarybeam.ASKAP_PB.alpha" in parset parset = _get_holography_linmos_options( @@ -163,7 +165,7 @@ def test_linmos_holo_options(tmpdir): ) assert "linmos.primarybeam = ASKAP_PB\n" in parset assert "linmos.removeleakage = true\n" in parset - assert f"linmos.primarybeam.ASKAP_PB.image = {str(holofile.absolute())}\n" in parset + assert f"linmos.primarybeam.ASKAP_PB.image = {holofile.absolute()!s}\n" in parset assert "linmos.primarybeam.ASKAP_PB.alpha" in parset diff --git a/tests/test_masking.py b/tests/test_masking.py index 13f841e7..48e6cce9 100644 --- a/tests/test_masking.py +++ b/tests/test_masking.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import numpy as np @@ -7,6 +9,7 @@ from flint.masking import ( MaskingOptions, _create_signal_from_rmsbkg, + _minimum_absolute_clip, _need_to_make_signal, _verify_set_positive_seed_clip, beam_shape_erode, @@ -15,7 +18,6 @@ create_options_from_parser, create_snr_mask_from_fits, get_parser, - _minimum_absolute_clip, minimum_absolute_clip, ) from flint.naming import FITSMaskNames diff --git a/tests/test_ms.py b/tests/test_ms.py index 0231a840..51ec0c5f 100644 --- a/tests/test_ms.py +++ b/tests/test_ms.py @@ -2,20 +2,22 @@ and the MS class """ +from __future__ import annotations + import shutil from pathlib import Path import numpy as np -from pydantic import ValidationError import pytest from casacore.tables import table +from pydantic import ValidationError from flint.calibrate.aocalibrate import ApplySolutions from flint.ms import ( MS, check_column_in_ms, - copy_and_preprocess_casda_askap_ms, consistent_channelwise_frequencies, + copy_and_preprocess_casda_askap_ms, find_mss, get_phase_dir_from_ms, remove_columns_from_ms, @@ -304,7 +306,7 @@ def test_remove_columns_from_ms(ms_remove_example): updated_columns = _get_column_names(ms_path=ms_remove_example) diff = set(original_columns) - set(updated_columns) assert len(diff) == 1 - assert list(diff)[0] == "DATA" + assert next(iter(diff)) == "DATA" assert removed_columns[0] == "DATA" assert len(removed_columns) == 1 @@ -340,7 +342,7 @@ def test_subtract_model_from_data_column(casda_taql_example): assert ms.exists() ms = MS(path=ms) - from casacore.tables import maketabdesc, makearrcoldesc + from casacore.tables import makearrcoldesc, maketabdesc with table(str(ms.path), readonly=False) as tab: data = tab.getcol("DATA") @@ -387,7 +389,7 @@ def test_subtract_model_from_data_column_ms_column(tmpdir): assert ms.exists() ms = MS(path=ms, column="DATA") - from casacore.tables import maketabdesc, makearrcoldesc + from casacore.tables import makearrcoldesc, maketabdesc with table(str(ms.path), readonly=False) as tab: data = tab.getcol("DATA") @@ -436,7 +438,7 @@ def test_subtract_model_from_data_column_ms_column_new_column(tmpdir): assert ms.exists() ms = MS(path=ms, column="DATA") - from casacore.tables import maketabdesc, makearrcoldesc + from casacore.tables import makearrcoldesc, maketabdesc with table(str(ms.path), readonly=False) as tab: data = tab.getcol("DATA") diff --git a/tests/test_naming.py b/tests/test_naming.py index 856524cd..5ddb013a 100644 --- a/tests/test_naming.py +++ b/tests/test_naming.py @@ -1,8 +1,9 @@ """Some tests related to components around measurement sets.""" +from __future__ import annotations + from datetime import datetime from pathlib import Path -from typing import List import pytest @@ -649,7 +650,7 @@ def test_get_beam_from_name(): ) -def get_lots_of_names() -> List[Path]: +def get_lots_of_names() -> list[Path]: examples = [ "59058/SB59058.RACS_1626-84.ch0285-0286.linmos.fits", "59058/SB59058.RACS_1626-84.ch0285-0286.linmos.fits", @@ -689,7 +690,7 @@ def test_create_name_from_common_fields(): create_name_from_common_fields(in_paths=examples) -def get_lots_of_names_2() -> List[Path]: +def get_lots_of_names_2() -> list[Path]: examples = [ "59058/SB59058.RACS_1626-84.round4.i.ch0285-0286.linmos.fits", "59058/SB59058.RACS_1626-84.round4.i.ch0285-0286.linmos.fits", diff --git a/tests/test_options.py b/tests/test_options.py index 6fac1ee2..735077ea 100644 --- a/tests/test_options.py +++ b/tests/test_options.py @@ -3,6 +3,8 @@ to create it """ +from __future__ import annotations + from pathlib import Path from typing import List @@ -11,10 +13,10 @@ from flint.options import ( FieldOptions, - dump_field_options_to_yaml, - options_to_dict, _create_argparse_options, create_options_from_parser, + dump_field_options_to_yaml, + options_to_dict, ) from flint.prefect.flows.continuum_pipeline import get_parser @@ -124,7 +126,7 @@ def test_config_field_options(tmpdir): args = parser.parse_args( f"""/scratch3/gal16b/askap_sbids/112334/ --calibrated-bandpass-path /scratch3/gal16b/askap_sbids/111/ - --cli-config {str(output_file)}""".split() + --cli-config {output_file!s}""".split() ) field_options = create_options_from_parser( diff --git a/tests/test_potato.py b/tests/test_potato.py index 765dffde..3a18770b 100644 --- a/tests/test_potato.py +++ b/tests/test_potato.py @@ -1,5 +1,7 @@ """Some basic checks around the potato peel functionality""" +from __future__ import annotations + import shutil from pathlib import Path diff --git a/tests/test_prefect_bandpass_flow.py b/tests/test_prefect_bandpass_flow.py index 1a29c6f5..8702d31e 100644 --- a/tests/test_prefect_bandpass_flow.py +++ b/tests/test_prefect_bandpass_flow.py @@ -1,10 +1,12 @@ """Tests that are specific to the bandpass calibration flow""" +from __future__ import annotations + from pathlib import Path -from flint.prefect.flows import bandpass_pipeline from flint.options import BandpassOptions, create_options_from_parser +from flint.prefect.flows import bandpass_pipeline def test_bandpass_cli(): diff --git a/tests/test_prefect_subtractcube_flow.py b/tests/test_prefect_subtractcube_flow.py index 0e666e7d..25e722d6 100644 --- a/tests/test_prefect_subtractcube_flow.py +++ b/tests/test_prefect_subtractcube_flow.py @@ -1,5 +1,7 @@ """Tests around the subtract cube imaging flow""" +from __future__ import annotations + from flint.prefect.flows.subtract_cube_pipeline import get_parser diff --git a/tests/test_selfcal_utils.py b/tests/test_selfcal_utils.py index 50ff54f2..ae3c627a 100644 --- a/tests/test_selfcal_utils.py +++ b/tests/test_selfcal_utils.py @@ -1,5 +1,7 @@ """Tests around utility helper functions for self-calibration""" +from __future__ import annotations + from flint.selfcal.utils import consider_skip_selfcal_on_round diff --git a/tests/test_summary.py b/tests/test_summary.py index 4b49883f..5858ca46 100644 --- a/tests/test_summary.py +++ b/tests/test_summary.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import shutil from pathlib import Path diff --git a/tests/test_utils.py b/tests/test_utils.py index 378b143d..da1a1793 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,5 +1,7 @@ """Basic tests for utility functions""" +from __future__ import annotations + import math import os import shutil @@ -521,7 +523,7 @@ def test_package_resource_path_askap_lua(): assert isinstance(askap_lua, Path) assert askap_lua.exists() - with open(askap_lua, "r") as open_lua: + with open(askap_lua) as open_lua: line = open_lua.readline() assert line == "--[[\n" @@ -535,7 +537,7 @@ def test_package_resource_path_skymodel(): assert isinstance(askap_model, Path) assert askap_model.exists() - with open(askap_model, "r") as open_model: + with open(askap_model) as open_model: line = open_model.readline() assert ( line diff --git a/tests/test_validation.py b/tests/test_validation.py index eb151053..27a9cacb 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -1,5 +1,7 @@ """Items related to test functions in the validation stage of flint""" +from __future__ import annotations + from pathlib import Path from typing import NamedTuple diff --git a/tests/test_wsclean.py b/tests/test_wsclean.py index adcf2e4d..f03fd9f3 100644 --- a/tests/test_wsclean.py +++ b/tests/test_wsclean.py @@ -1,9 +1,11 @@ """Testing some wsclean functionality.""" +from __future__ import annotations + import os import shutil from pathlib import Path -from typing import Any, Dict +from typing import Any import pytest @@ -12,8 +14,8 @@ ImageSet, WSCleanCommand, WSCleanOptions, - _rename_wsclean_title, _rename_wsclean_file, + _rename_wsclean_title, _resolve_wsclean_key_value_to_cli_str, _wsclean_output_callback, combine_subbands_to_cube, @@ -113,8 +115,8 @@ def test_rename_wsclean_imageset(tmpdir: Any): test_dir.mkdir(parents=True, exist_ok=True) # create some test files and ensure they all exist - keys: Dict[Any, Any] = {} - prefix = f"{str(test_dir)}/SB39400.RACS_0635-31.beam33.i" + keys: dict[Any, Any] = {} + prefix = f"{test_dir!s}/SB39400.RACS_0635-31.beam33.i" keys["prefix"] = prefix for mode in ("image", "residual"): items = [ From 0f5fa4ec252c0e69107ac7204bc6aa7d45ecb9b2 Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 14:31:11 +0800 Subject: [PATCH 03/10] Typos --- .pre-commit-config.yaml | 5 +++++ pyproject.toml | 3 +++ 2 files changed, 8 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b9279404..86a5a52e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -56,6 +56,11 @@ repos: - pytest - types-PyYAML + - repo: https://github.com/crate-ci/typos + rev: v1.29.4 + hooks: + - id: typos + - repo: https://github.com/codespell-project/codespell rev: "v2.3.0" hooks: diff --git a/pyproject.toml b/pyproject.toml index 7af81f00..65309abb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -179,3 +179,6 @@ skip-magic-trailing-comma = false # Like Black, automatically detect the appropriate line ending. line-ending = "auto" + +[tool.typos.files] +extend-exclude = ["*.fits", "flint/data/**/*"] From 47d805f1e3fed403494b48bde4c9180b8308e0b0 Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 14:43:00 +0800 Subject: [PATCH 04/10] Add ignored --- pyproject.toml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 65309abb..a8687ea9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -181,4 +181,10 @@ skip-magic-trailing-comma = false line-ending = "auto" [tool.typos.files] -extend-exclude = ["*.fits", "flint/data/**/*"] +extend-exclude = ["*.fits", "flint/data/**/*", "ignore_words.txt"] + +[tool.typos.default.extend-identifiers] +# Don't correct the surname "Teh" +FIELDs = "FIELDs" +FoV = "FoV" +outweight= "outweight" \ No newline at end of file From e9e3eb14aea8d25c7c234c429decfb193bbf0189 Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 14:45:47 +0800 Subject: [PATCH 05/10] Typos --- CHANGELOG.md | 2 +- README.md | 4 ++-- docs/index.rst | 2 +- flint/calibrate/aocalibrate.py | 10 +++++----- flint/leakage.py | 4 ++-- flint/naming.py | 6 +++--- flint/options.py | 4 ++-- flint/prefect/common/imaging.py | 6 +++--- flint/prefect/flows/bandpass_pipeline.py | 2 +- flint/prefect/flows/continuum_pipeline.py | 2 +- flint/sky_model.py | 4 ++-- flint/source_finding/aegean.py | 2 +- flint/validation.py | 2 +- tests/test_ms.py | 6 +++--- 14 files changed, 28 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ab76bfc7..7aa43c40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,7 +22,7 @@ - Added a `timelimit_on_context` helper to raise an error after some specified length of time. Looking at you, BANE and issue #186. Arrrr. - Added a `BANE` callback handler to attempt to help #186. This includes a - `AttemptRerunException` and corresponding code in `run_singularity_comand` to + `AttemptRerunException` and corresponding code in `run_singularity_command` to retry the failing command. # 0.2.8 diff --git a/README.md b/README.md index 5b15a7a6..6f4ed946 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A pirate themed toy ASKAP-RACS pipeline. -Yarrrr-Harrrr fiddley-dee! +Yarrrr-Harrrr fiddly-dee! Capn' Flint - Credit: DALLE 3 @@ -148,7 +148,7 @@ the time of writing there are six containers for: - calibration: this should contain `calibrate` and `applysolutions`. These are tools written by Andre Offringa. - flagging: this should contain `aoflagger`, which is installable via a - `apt install aoflagger` within ubunutu. + `apt install aoflagger` within ubuntu. - imaging: this should contain `wsclean`. This should be at least version 3. At the moment a modified version is being used (which implements a `-force-mask-round` option). diff --git a/docs/index.rst b/docs/index.rst index 57961981..705f3c98 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,7 +8,7 @@ Welcome to flint's documentation! A pirate themed toy ASKAP-RACS pipeline. -Yarrrr-Harrrr fiddley-dee! +Yarrrr-Harrrr fiddly-dee! .. image:: logo.jpeg :width: 400 diff --git a/flint/calibrate/aocalibrate.py b/flint/calibrate/aocalibrate.py index 1ea3e9bf..912b7279 100644 --- a/flint/calibrate/aocalibrate.py +++ b/flint/calibrate/aocalibrate.py @@ -115,7 +115,7 @@ class AOSolutions(NamedTuple): npol: int """Number of polarisations in the file""" bandpass: np.ndarray - """Complex data representing the antennea Jones. Shape is (nsol, nant, nchan, npol)""" + """Complex data representing the antenna Jones. Shape is (nsol, nant, nchan, npol)""" # TODO: Need tocorporate the start and end times into this header @@ -138,7 +138,7 @@ def save(self, output_path: Path) -> Path: return save_aosolutions_file(aosolutions=self, output_path=output_path) def plot_solutions(self, ref_ant: int | None = 0) -> Iterable[Path]: - """Plot the solutions of all antenna for the first time-inteval + """Plot the solutions of all antenna for the first time-interval in the aosolutions file. The XX and the YY will be plotted. Args: @@ -974,7 +974,7 @@ def flag_aosolutions( ant_gains = ref_bandpass[ant, :, pol] plot_title = f"{title} - ant{ant:02d} - {pols[pol]}" - ouput_path = ( + output_path = ( plot_dir / f"{title}.ant{ant:02d}.{pols[pol]}.png" if plot_dir is not None else None @@ -989,7 +989,7 @@ def flag_aosolutions( complex_gains=ant_gains, flag_cut=flag_cut, plot_title=plot_title, - plot_path=ouput_path, + plot_path=output_path, ) bandpass[time, ant, phase_outlier_result.outlier_mask, :] = np.nan except PhaseOutlierFitError: @@ -1197,7 +1197,7 @@ def get_parser() -> ArgumentParser: "--flag-cut", type=float, default=3.0, - help="The significance level thaat an outlier phase has to be before being flagged", + help="The significance level that an outlier phase has to be before being flagged", ) flag_sols_parser.add_argument( "--plot-dir", diff --git a/flint/leakage.py b/flint/leakage.py index 60f0cf4d..0d2c6d12 100644 --- a/flint/leakage.py +++ b/flint/leakage.py @@ -177,7 +177,7 @@ def get_xy_pixel_coords( """Convert (RA, Dec) positions in a catalogue into (x, y)-pixels given an WCS Args: - table (Table): The table containing sources to collect (x, y)-coodinates + table (Table): The table containing sources to collect (x, y)-coordinates wcs (WCS): The WCS description to use to resolve (RA, Dec) to (x, y) ra_col (Optional[str], optional): The RA column name. If None, it will be guessed. Defaults to None. dec_col (Optional[str], optional): The Dec column name. If None, it will be guessed. Defaults to None. @@ -410,7 +410,7 @@ def create_leakge_component_table( def get_parser() -> ArgumentParser: - parser = ArgumentParser(description="Create a leakage cataloge and map") + parser = ArgumentParser(description="Create a leakage catalogue and map") parser.add_argument("pol_image", type=Path, help="Path to the polarisation image") parser.add_argument( "component_catalogue", type=Path, help="Path to the component catalogue" diff --git a/flint/naming.py b/flint/naming.py index ec5ab452..7b884bd1 100644 --- a/flint/naming.py +++ b/flint/naming.py @@ -82,7 +82,7 @@ def create_name_from_common_fields( keys_to_test = processed_components_dict[0].keys() logger.info(f"{keys_to_test=}") # One of the worst crimes on the seven seas I have ever done - # If a field is None, it was not detected. If a field is not constanst + # If a field is None, it was not detected. If a field is not constants # across all input paths, it is ignored. Should a field be considered # common across all input paths, look up its short hand that # would otherwise be usede and use it. @@ -219,7 +219,7 @@ def get_beam_resolution_str(mode: str, marker: str | None = None) -> str: def get_selfcal_ms_name(in_ms_path: Path, round: int = 1) -> Path: """Create the new output MS path that will be used for self-calibration. The output measurement set path will include a roundN.ms suffix, where N is the - round. If such a suffic already exists from an earlier self-calibration round, + round. If such a suffix already exists from an earlier self-calibration round, it will be removed and replaced. Args: @@ -407,7 +407,7 @@ def processed_ms_format( in_name (Union[str, Path]): The name that needs to be broken down into components Returns: - Union[FormatedNameComponents,None': A structure container the sbid, field, beam and spw. None is returned if can not be parsed. + Union[FormattedNameComponents,None': A structure container the sbid, field, beam and spw. None is returned if can not be parsed. """ in_name = in_name.name if isinstance(in_name, Path) else in_name diff --git a/flint/options.py b/flint/options.py index 37ddde23..f9bc1ba0 100644 --- a/flint/options.py +++ b/flint/options.py @@ -147,7 +147,7 @@ def add_options_to_parser( def create_options_from_parser( parser_namespace: Namespace, options_class: type[U] ) -> U: - """Given a ``BaseOptions`` derieved class, extract the corresponding + """Given a ``BaseOptions`` derived class, extract the corresponding arguments from an ``argparse.nNamespace``. These options correspond to ones generated by ``add_options_to_parser``. @@ -216,7 +216,7 @@ class BandpassOptions(BaseOptions): class AddModelSubtractFieldOptions(BaseOptions): """Options related to predicting a continuum model during the SubtractFieldOptions workflow. Specifically these options deal with identifying the wsclean produced source list model, which - may be used by ``admodel`` to predict model visibilities. See utilities aroun the ``aocalibrate`` + may be used by ``admodel`` to predict model visibilities. See utilities around the ``aocalibrate`` functions and routines.""" attempt_addmodel: bool = False diff --git a/flint/prefect/common/imaging.py b/flint/prefect/common/imaging.py index 8143c75f..136a3c35 100644 --- a/flint/prefect/common/imaging.py +++ b/flint/prefect/common/imaging.py @@ -615,7 +615,7 @@ def task_linmos_images( Args: images (Collection[Collection[Path]]): Images that will be co-added together container (Path): Path to singularity container that contains yandasoft - filter (Optional[str], optional): Filter to extract the images that will be extracted from the set of input images. These will be co-added. If None all images are co-aded. Defaults to ".MFS.". + filter (Optional[str], optional): Filter to extract the images that will be extracted from the set of input images. These will be co-added. If None all images are co-added. Defaults to ".MFS.". suffix_str (str, optional): Additional string added to the prefix of the output linmos image products. Defaults to "noselfcal". holofile (Optional[Path], optional): The FITS cube with the beam corrections derived from ASKAP holography. Defaults to None. parset_output_path (Optional[str], optional): Location to write the linmos parset file to. Defaults to None. @@ -623,7 +623,7 @@ def task_linmos_images( field_summary (Optional[FieldSummary], optional): The summary of the field, including (importantly) to orientation of the third-axis. Defaults to None. trim_linmos_fits (bool, optional): Attempt to trim the output linmos files of as much empty space as possible. Defaults to True. remove_original_images (bool, optional): If True remove the original image after they have been convolved. Defaults to False. - cleanup (bool, optional): Clean up items created throughout linmos, including the per-channl weight text files for each input image. Defaults to False. + cleanup (bool, optional): Clean up items created throughout linmos, including the per-channel weight text files for each input image. Defaults to False. Returns: LinmosCommand: The linmos command and associated meta-data @@ -709,7 +709,7 @@ def _convolve_linmos( convol_suffix_str (str, optional): The suffix added to the convolved images. Defaults to 'conv'. trim_linmos_fits (bool, optional): Attempt to trim the output linmos files of as much empty space as possible. Defaults to True. remove_original_images (bool, optional): If True remove the original image after they have been convolved. Defaults to False. - cleanup_linmos (bool, optional): Clean up items created throughout linmos, including the per-channl weight text files for each input image. Defaults to False. + cleanup_linmos (bool, optional): Clean up items created throughout linmos, including the per-channel weight text files for each input image. Defaults to False. Returns: LinmosCommand: Resulting linmos command parset diff --git a/flint/prefect/flows/bandpass_pipeline.py b/flint/prefect/flows/bandpass_pipeline.py index e780e2a2..8e18749b 100644 --- a/flint/prefect/flows/bandpass_pipeline.py +++ b/flint/prefect/flows/bandpass_pipeline.py @@ -321,7 +321,7 @@ def setup_run_bandpass_flow( def get_parser() -> ArgumentParser: - """Create an argument paraser for the bandpass prefect workflow + """Create an argument parser for the bandpass prefect workflow Returns: ArgumentParser: CLI argument parser diff --git a/flint/prefect/flows/continuum_pipeline.py b/flint/prefect/flows/continuum_pipeline.py index 9f01cb98..370e80ec 100644 --- a/flint/prefect/flows/continuum_pipeline.py +++ b/flint/prefect/flows/continuum_pipeline.py @@ -92,7 +92,7 @@ def _check_field_options(field_options: FieldOptions) -> None: if field_options.rounds is not None: if field_options.rounds >= 1 and field_options.casa_container is None: raise ValueError( - "CASA Container needs to be set if self-calibraiton is to be performed" + "CASA Container needs to be set if self-calibration is to be performed" ) if field_options.coadd_cubes: if ( diff --git a/flint/sky_model.py b/flint/sky_model.py index 873e07d4..82dde753 100644 --- a/flint/sky_model.py +++ b/flint/sky_model.py @@ -599,7 +599,7 @@ def make_hyperdrive_model(out_path: Path, sources: list[tuple[Row, CurvedPL]]) - Args: out_path (Path): The output path that the sky-model would be written to sources (List[Tuple[Row,CurvedPL]]): Collection of sources to write, including the - normalied row and the results of fitting to the estimated apparent SED + normalized row and the results of fitting to the estimated apparent SED Returns: Path: The path of the file created @@ -777,7 +777,7 @@ def create_sky_model( src_pos = SkyCoord(row["RA"], row["DEC"]) src_sep = src_pos.separation(direction) - # Get the primary beam reasponse + # Get the primary beam response gauss_taper = generate_gaussian_pb( freqs=freqs, aperture=12.0 * u.m, offset=src_sep ) diff --git a/flint/source_finding/aegean.py b/flint/source_finding/aegean.py index 0a5105d0..3ffeb6d7 100644 --- a/flint/source_finding/aegean.py +++ b/flint/source_finding/aegean.py @@ -27,7 +27,7 @@ class AegeanOptions(NamedTuple): """Container for basic aegean options. Only a subclass of aegean options are supported. Of note is the lack of a tables option (corresponding to --tables). This is dependent on knowing the base output name - and relying on aegean to also append a suffic of sorts to the outputs. For that reason + and relying on aegean to also append a suffix of sorts to the outputs. For that reason the aegean command generated will always create the table option. """ diff --git a/flint/validation.py b/flint/validation.py index 048f9f78..94184810 100644 --- a/flint/validation.py +++ b/flint/validation.py @@ -103,7 +103,7 @@ class ValidationTables(NamedTuple): stats_table_path: Path """Path to the statistics table""" xmatch_tables: XMatchTables - """Cross-mathed tables""" + """Cross-matched tables""" class ValidatorLayout(NamedTuple): diff --git a/tests/test_ms.py b/tests/test_ms.py index 51ec0c5f..4e895469 100644 --- a/tests/test_ms.py +++ b/tests/test_ms.py @@ -140,7 +140,7 @@ def test_copy_preprocess_ms(casda_example, tmpdir): ) _test_the_data(ms=new_ms.path) - # wjem file format not recgonised + # When file format not recognised with pytest.raises(ValueError): copy_and_preprocess_casda_askap_ms( casda_ms=Path(casda_example) / "Thisdoesnotexist", @@ -191,7 +191,7 @@ def _get_columns(ms_path): def test_rename_ms_and_columns_for_selfcal_correct2data(ms_example, tmpdir): - """Sanity around renaming a MS and handlign the columns that should be renamed""" + """Sanity around renaming a MS and handling the columns that should be renamed""" ms = MS.cast(Path(ms_example)) with table(str(ms.path), readonly=False, ack=False) as tab: tab.renamecol("DATA", "CORRECTED_DATA") @@ -214,7 +214,7 @@ def test_rename_ms_and_columns_for_selfcal_correct2data(ms_example, tmpdir): def test_rename_ms_and_columns_for_selfcal(ms_example, tmpdir): - """Sanity around renaming a MS and handlign the columns that should be renamed""" + """Sanity around renaming a MS and handling the columns that should be renamed""" ms = MS.cast(Path(ms_example)) colnames = _get_columns(ms_path=ms.path) From e8e2188c4fece636663b13c909c69ba955e5daff Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 14:49:06 +0800 Subject: [PATCH 06/10] typos --- tests/test_wsclean.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_wsclean.py b/tests/test_wsclean.py index f03fd9f3..ed92829c 100644 --- a/tests/test_wsclean.py +++ b/tests/test_wsclean.py @@ -374,7 +374,7 @@ def test_wsclean_divergence(): "Iteration 59228, scale 0 px : -862.94 µJy at 3729,3746", "Opening reordered part 0 spw 0 for /scratch3/gal16b/flint_peel/40470/SB40470.RACS_1237+00.beam4.round1.ms", "Opening reordered part 0 spw 0 for /scratch3/gal16b/flint_peel/40470/SB40470.RACS_1237+00.beam4.round1.ms", - "Although KJy there is no iterat ion, not the lack of a capital-I and the space, clever pirate", + "Although KJy there is no iterate ion, not the lack of a capital-I and the space, clever pirate", ) for g in good: _wsclean_output_callback(line=g) From e1855e3f689916ac4dd0c07aa18eb7d8e828280e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2025 06:49:36 +0000 Subject: [PATCH 07/10] style: pre-commit fixes --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a8687ea9..f7c67969 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -187,4 +187,4 @@ extend-exclude = ["*.fits", "flint/data/**/*", "ignore_words.txt"] # Don't correct the surname "Teh" FIELDs = "FIELDs" FoV = "FoV" -outweight= "outweight" \ No newline at end of file +outweight= "outweight" From e117fce995ac2698524e556c032a3303c8688622 Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 14:51:29 +0800 Subject: [PATCH 08/10] Ignore --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a8687ea9..50a9d8d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -184,7 +184,6 @@ line-ending = "auto" extend-exclude = ["*.fits", "flint/data/**/*", "ignore_words.txt"] [tool.typos.default.extend-identifiers] -# Don't correct the surname "Teh" FIELDs = "FIELDs" FoV = "FoV" -outweight= "outweight" \ No newline at end of file +outweight= "outweight" From 48117a4771c08ffd54abf47b243ee4bf943f01ec Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Wed, 8 Jan 2025 15:46:46 +0800 Subject: [PATCH 09/10] Fix tests --- tests/test_wsclean.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/test_wsclean.py b/tests/test_wsclean.py index ed92829c..d417c0f0 100644 --- a/tests/test_wsclean.py +++ b/tests/test_wsclean.py @@ -398,17 +398,11 @@ def test_attemptrerun_wsclean_output_callback(): "Opening reordered part 0 spw 0 for /scratch3/gal16b/flint_peel/40470/SB40470.RACS_1237+00.beam4.round1.ms", "Although Input/output is here, it is not next to error", "Similar with temporary data file error opening error", - "Input/output error", ) for g in good: _wsclean_output_callback(line=g) - bad = ( - "Error opening temporary data file", - # "Input/output error", - "Some other words Error opening temporary data file", - # "Input/output error and more errors to be here", - ) + bad = ("Input/output error",) for b in bad: with pytest.raises(AttemptRerunException): _wsclean_output_callback(line=b) From 7ee354f097df1b0f4fa2016f78f6bad148b35272 Mon Sep 17 00:00:00 2001 From: "Alec Thomson (S&A, Kensington WA)" Date: Thu, 9 Jan 2025 10:45:25 +0800 Subject: [PATCH 10/10] Update test --- tests/test_wsclean.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_wsclean.py b/tests/test_wsclean.py index d417c0f0..1e7043ca 100644 --- a/tests/test_wsclean.py +++ b/tests/test_wsclean.py @@ -402,7 +402,11 @@ def test_attemptrerun_wsclean_output_callback(): for g in good: _wsclean_output_callback(line=g) - bad = ("Input/output error",) + bad = ( + "Input/output error", + "But why is the rum gone... Input/output error", + "Input/output error should cause a remake of Pirates of the Caribbean", + ) for b in bad: with pytest.raises(AttemptRerunException): _wsclean_output_callback(line=b)