From 4e6cf8d7375d185a77ea33315fbed16e4705c6b2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2025 16:45:12 +0000 Subject: [PATCH 1/2] chore: update pre-commit hooks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.2 → v0.9.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.2...v0.9.1) - [github.com/pre-commit/mirrors-mypy: v1.13.0 → v1.14.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.13.0...v1.14.1) - [github.com/crate-ci/typos: v1.29.4 → dictgen-v0.3.1](https://github.com/crate-ci/typos/compare/v1.29.4...dictgen-v0.3.1) - [github.com/python-jsonschema/check-jsonschema: 0.30.0 → 0.31.0](https://github.com/python-jsonschema/check-jsonschema/compare/0.30.0...0.31.0) --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ec8d6a9..1584ff4c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,14 +40,14 @@ repos: args: [--prose-wrap=always] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.2" + rev: "v0.9.1" hooks: - id: ruff args: ["--fix", "--show-fixes"] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.13.0" + rev: "v1.14.1" hooks: - id: mypy files: flint|tests @@ -57,7 +57,7 @@ repos: - types-PyYAML - repo: https://github.com/crate-ci/typos - rev: v1.29.4 + rev: dictgen-v0.3.1 hooks: - id: typos @@ -87,7 +87,7 @@ repos: additional_dependencies: ["validate-pyproject-schema-store[all]"] - repo: https://github.com/python-jsonschema/check-jsonschema - rev: "0.30.0" + rev: "0.31.0" hooks: - id: check-dependabot - id: check-github-workflows From 478485b9194313ceb1c15fa46cbdcd6dcc150f27 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2025 16:45:31 +0000 Subject: [PATCH 2/2] style: pre-commit fixes --- flint/archive.py | 12 +-- flint/bandpass.py | 6 +- flint/bptools/preflagger.py | 24 +++--- flint/bptools/smoother.py | 18 ++--- flint/calibrate/aocalibrate.py | 52 ++++++------ flint/catalogue.py | 6 +- flint/coadd/linmos.py | 48 +++++------ flint/configuration.py | 24 +++--- flint/convol.py | 24 +++--- flint/flagging.py | 14 ++-- flint/leakage.py | 16 ++-- flint/masking.py | 30 +++---- flint/ms.py | 80 +++++++++---------- flint/naming.py | 8 +- flint/options.py | 6 +- flint/peel/potato.py | 6 +- flint/prefect/common/imaging.py | 42 +++++----- flint/prefect/common/ms.py | 6 +- flint/prefect/common/utils.py | 12 +-- flint/prefect/flows/bandpass_pipeline.py | 18 ++--- .../prefect/flows/continuum_mask_pipeline.py | 18 ++--- flint/prefect/flows/continuum_pipeline.py | 16 ++-- flint/prefect/flows/subtract_cube_pipeline.py | 36 ++++----- flint/sky_model.py | 20 ++--- flint/source_finding/aegean.py | 2 +- flint/summary.py | 6 +- flint/utils.py | 32 ++++---- flint/validation.py | 14 ++-- tests/test_naming.py | 2 +- 29 files changed, 300 insertions(+), 298 deletions(-) diff --git a/flint/archive.py b/flint/archive.py index e2572dd6..84ac0160 100644 --- a/flint/archive.py +++ b/flint/archive.py @@ -77,11 +77,11 @@ def copy_files_into(copy_out_path: Path, files_to_copy: Collection[Path]) -> Pat logger.info(f"Copying {total} files into {copy_out_path}") for count, file in enumerate(files_to_copy): if file.is_file(): - logger.info(f"{count+1} of {total}, copying file {file}") + logger.info(f"{count + 1} of {total}, copying file {file}") shutil.copy(file, copy_out_path) elif file.is_dir(): # TODO: Add an option to tar folders into the final location - logger.info(f"{count+1} of {total}, copying folder {file}") + logger.info(f"{count + 1} of {total}, copying folder {file}") shutil.copytree(file, copy_out_path / file.name) else: not_copied.append(file) @@ -108,9 +108,9 @@ def verify_tarball( bool: True if the ``tar``s exit code is 0, False otherwise """ tarball = Path(tarball) # trust nothing - assert ( - tarball.exists() and tarball.is_file() - ), f"{tarball} is not a file or does not exist" + assert tarball.exists() and tarball.is_file(), ( + f"{tarball} is not a file or does not exist" + ) assert tarball.suffix == ".tar", f"{tarball=} appears to not have a .tar extension" cmd = f"tar -tvf {tarball!s}" @@ -155,7 +155,7 @@ def tar_files_into( logger.info(f"Opening {tar_out_path}") with tarfile.open(tar_out_path, "w") as tar: for count, file in enumerate(files_to_tar): - logger.info(f"{count+1} of {total}, adding {file!s}") + logger.info(f"{count + 1} of {total}, adding {file!s}") tar.add(file, arcname=file.name) logger.info(f"Created {tar_out_path}") diff --git a/flint/bandpass.py b/flint/bandpass.py index 4e238e30..3b1b0faa 100644 --- a/flint/bandpass.py +++ b/flint/bandpass.py @@ -67,9 +67,9 @@ def flag_bandpass_offset_pointings(ms: MS | Path) -> MS: field_names = tab.getcol("NAME") field_idx = np.argwhere([fn == good_field_name for fn in field_names])[0] - assert ( - len(field_idx) == 1 - ), f"More than one matching field name found. This should not happen. {good_field_name=} {field_names=}" + assert len(field_idx) == 1, ( + f"More than one matching field name found. This should not happen. {good_field_name=} {field_names=}" + ) field_idx = field_idx[0] logger.info(f"{good_field_name} FIELD_ID is {field_idx}") diff --git a/flint/bptools/preflagger.py b/flint/bptools/preflagger.py index 98750485..c79f94fc 100644 --- a/flint/bptools/preflagger.py +++ b/flint/bptools/preflagger.py @@ -312,9 +312,9 @@ def flags_over_threshold( bool: Whether the number of flags has reached a threshold """ - assert ( - 0.0 <= thresh <= 1.0 - ), f"The provided {thresh=} should be a fraction between 0 to 1. " + assert 0.0 <= thresh <= 1.0, ( + f"The provided {thresh=} should be a fraction between 0 to 1. " + ) number_flagged = np.sum(flags) # Use the shape in case multi-dimensional array passed in @@ -468,9 +468,9 @@ def flag_mean_xxyy_amplitude_ratio( bool: Whether data should be flagged (True) or not (False) """ - assert ( - xx_complex_gains.shape == yy_complex_gains.shape - ), f"Input xx and yy shapes do not match. {xx_complex_gains.shape=} {yy_complex_gains.shape=}" + assert xx_complex_gains.shape == yy_complex_gains.shape, ( + f"Input xx and yy shapes do not match. {xx_complex_gains.shape=} {yy_complex_gains.shape=}" + ) logger.info("Calculating mean ratios. ") xx_amplitudes = np.abs(xx_complex_gains) @@ -514,9 +514,9 @@ def construct_mesh_ant_flags(mask: np.ndarray) -> np.ndarray: np.ndarray: Output array where antennas have common sets of flags """ - assert ( - len(mask.shape) == 3 - ), f"Expect array of shape (ant, channel, pol), received {mask.shape=}" + assert len(mask.shape) == 3, ( + f"Expect array of shape (ant, channel, pol), received {mask.shape=}" + ) accumulate_mask = np.zeros_like(mask[0], dtype=bool) nant = mask.shape[0] @@ -562,9 +562,9 @@ def construct_jones_over_max_amp_flags( np.ndarray: Boolean array of equal shape to `complex_gains`, with `True` indicating a flag """ - assert ( - complex_gains.shape[-1] == 4 - ), f"Expected last dimension to be length 4, received {complex_gains.shape=}" + assert complex_gains.shape[-1] == 4, ( + f"Expected last dimension to be length 4, received {complex_gains.shape=}" + ) logger.info(f"Creating mask for Jones with amplitudes of {max_amplitude=}") complex_gains = complex_gains.copy() diff --git a/flint/bptools/smoother.py b/flint/bptools/smoother.py index af105a6e..4ff499c3 100644 --- a/flint/bptools/smoother.py +++ b/flint/bptools/smoother.py @@ -43,9 +43,9 @@ def divide_bandpass_by_ref_ant_preserve_phase( Returns: np.ndarray: The normalised bandpass solutions """ - assert ( - len(complex_gains.shape) == 3 - ), f"The shape of the input complex gains should be of rank 3 in form (ant, chan, pol). Received {complex_gains.shape}" + assert len(complex_gains.shape) == 3, ( + f"The shape of the input complex gains should be of rank 3 in form (ant, chan, pol). Received {complex_gains.shape}" + ) logger.info( f"Dividing bandpass gain solutions using reference antenna={ref_ant}, using correct phasor" @@ -113,9 +113,9 @@ def divide_bandpass_by_ref_ant(complex_gains: np.ndarray, ref_ant: int) -> np.nd Returns: np.ndarray: The normalised bandpass solutions """ - assert ( - len(complex_gains.shape) == 3 - ), f"The shape of the input complex gains should be of rank 3 in form (ant, chan, pol). Received {complex_gains.shape}" + assert len(complex_gains.shape) == 3, ( + f"The shape of the input complex gains should be of rank 3 in form (ant, chan, pol). Received {complex_gains.shape}" + ) logger.info( f"Dividing bandpass gain solutions using reference antenna={ref_ant} with shifted phasor" @@ -234,9 +234,9 @@ def smooth_bandpass_complex_gains( np.ndarray: Smoothed complex gains """ - assert ( - len(complex_gains.shape) == 3 - ), f"The shape of the input complex gains should be of rank 3 in form (ant, chan, pol). Received {complex_gains.shape}" + assert len(complex_gains.shape) == 3, ( + f"The shape of the input complex gains should be of rank 3 in form (ant, chan, pol). Received {complex_gains.shape}" + ) # Duplicate the original, ya filthy pirate smoothed_complex_gains = complex_gains.copy() diff --git a/flint/calibrate/aocalibrate.py b/flint/calibrate/aocalibrate.py index 912b7279..da53a391 100644 --- a/flint/calibrate/aocalibrate.py +++ b/flint/calibrate/aocalibrate.py @@ -393,9 +393,9 @@ def load_aosolutions_file(solutions_path: Path) -> AOSolutions: AOSolutions: Structure container the deserialized solutions file """ - assert ( - solutions_path.exists() and solutions_path.is_file() - ), f"{solutions_path!s} either does not exist or is not a file. " + assert solutions_path.exists() and solutions_path.is_file(), ( + f"{solutions_path!s} either does not exist or is not a file. " + ) logger.info(f"Loading {solutions_path}") with open(solutions_path) as in_file: @@ -470,9 +470,9 @@ def find_existing_solutions( # If not all the treasure could be found. At the moment this function will only # work if the bandpass solutions were made using the default values. - assert all( - [solution_path.exists() for solution_path in solution_paths] - ), f"Missing solution file constructed from scanning {bandpass_directory}. Check the directory. " + assert all([solution_path.exists() for solution_path in solution_paths]), ( + f"Missing solution file constructed from scanning {bandpass_directory}. Check the directory. " + ) calibrate_cmds = [ CalibrateCommand( @@ -556,9 +556,9 @@ def calibrate_options_to_command( else: unknowns.append((key, value)) - assert ( - len(unknowns) == 0 - ), f"Unknown types when generating calibrate command: {unknowns}" + assert len(unknowns) == 0, ( + f"Unknown types when generating calibrate command: {unknowns}" + ) cmd += f"{ms_path!s} {solutions_path!s}" @@ -670,9 +670,9 @@ def create_apply_solutions_cmd( assert ms.path.exists(), f"The measurement set {ms} was not found. " assert ms.column is not None, f"{ms} does not have a nominated data_column. " - assert ( - solutions_file.exists() - ), f"The solutions file {solutions_file} does not exists. " + assert solutions_file.exists(), ( + f"The solutions file {solutions_file} does not exists. " + ) input_column = ms.column copy_mode = "-nocopy" if input_column == output_column else "-copy" @@ -714,9 +714,9 @@ def run_calibrate(calibrate_cmd: CalibrateCommand, container: Path) -> None: """ assert container.exists(), f"The calibrate container {container} does not exist. " - assert ( - calibrate_cmd.ms is not None - ), "When calibrating the 'ms' field attribute must be defined. " + assert calibrate_cmd.ms is not None, ( + "When calibrating the 'ms' field attribute must be defined. " + ) run_singularity_command( image=container, @@ -738,12 +738,12 @@ def run_apply_solutions(apply_solutions_cmd: ApplySolutions, container: Path) -> container (Path): Location of the existing solutions file """ - assert ( - container.exists() - ), f"The applysolutions container {container} does not exist. " - assert ( - apply_solutions_cmd.ms.path.exists() - ), f"The measurement set {apply_solutions_cmd.ms} was not found. " + assert container.exists(), ( + f"The applysolutions container {container} does not exist. " + ) + assert apply_solutions_cmd.ms.path.exists(), ( + f"The measurement set {apply_solutions_cmd.ms} was not found. " + ) run_singularity_command( image=container, @@ -828,9 +828,9 @@ def select_refant(bandpass: np.ndarray) -> int: int: The index of the reference antenna that should be used. """ - assert ( - len(bandpass.shape) == 4 - ), f"Expected a bandpass of shape (times, ant, channels, pol), received {bandpass.shape=}" + assert len(bandpass.shape) == 4, ( + f"Expected a bandpass of shape (times, ant, channels, pol), received {bandpass.shape=}" + ) # create the mask of valid solutions mask = np.isfinite(bandpass) @@ -1019,7 +1019,7 @@ def flag_aosolutions( flagged = ~np.isfinite(bandpass[time, ant, :, pol]) logger.info( - f"{ant=:02d}, pol={pols[pol]}, flagged {np.sum(flagged) / ant_gains.shape[0] * 100.:.2f}%" + f"{ant=:02d}, pol={pols[pol]}, flagged {np.sum(flagged) / ant_gains.shape[0] * 100.0:.2f}%" ) for time in range(solutions.nsol): @@ -1070,7 +1070,7 @@ def flag_aosolutions( total_flagged = np.sum(~np.isfinite(bandpass)) / np.prod(bandpass.shape) if total_flagged > 0.8: msg = ( - f"{total_flagged*100.:.2f}% of {(solutions_path)!s} is flagged after running the preflagger. " + f"{total_flagged * 100.0:.2f}% of {(solutions_path)!s} is flagged after running the preflagger. " "That is over 90%. " f"This surely can not be correct. Likely something has gone very wrong. " ) diff --git a/flint/catalogue.py b/flint/catalogue.py index 550564df..bbee4b49 100644 --- a/flint/catalogue.py +++ b/flint/catalogue.py @@ -288,9 +288,9 @@ def download_vizier_catalogue( logger.info(f"catalogue downloaded, contains {len(tablelist[0])} rows") logger.info(f"Writing {vizier_id=} to {output_path=}") - assert ( - len(tablelist) == 1 - ), f"Table list for {vizier_id=} has unexpected length of {len(tablelist)}" + assert len(tablelist) == 1, ( + f"Table list for {vizier_id=} has unexpected length of {len(tablelist)}" + ) # Note all pirates respect the FITS standard@ if description := tablelist[0].meta.get("description", None): diff --git a/flint/coadd/linmos.py b/flint/coadd/linmos.py index 651dfee5..f9889e8b 100644 --- a/flint/coadd/linmos.py +++ b/flint/coadd/linmos.py @@ -68,9 +68,9 @@ def _create_bound_box_plane( Returns: Optional[BoundingBox]: None if no valid pixels, a bounding box with the (xmin,xmax,ymin,ymax) of valid pixels """ - assert ( - len(image_data.shape) == 2 - ), f"Only two-dimensional arrays supported, received {image_data.shape}" + assert len(image_data.shape) == 2, ( + f"Only two-dimensional arrays supported, received {image_data.shape}" + ) # First convert to a boolean array image_valid = image_data if is_masked else np.isfinite(image_data) @@ -227,9 +227,9 @@ def _get_image_weight_plane( """ weight_modes = ("mad", "std") - assert ( - mode in weight_modes - ), f"Invalid {mode=} specified. Available modes: {weight_modes}" + assert mode in weight_modes, ( + f"Invalid {mode=} specified. Available modes: {weight_modes}" + ) # remove non-finite numbers that would ruin the statistic image_data = image_data[np.isfinite(image_data)][::stride] @@ -288,9 +288,9 @@ def get_image_weight( with fits.open(image_path, memmap=True) as in_fits: image_data = in_fits[image_slice].data # type: ignore - assert ( - len(image_data.shape) >= 2 - ), f"{len(image_data.shape)=} is less than two. Is this really an image?" + assert len(image_data.shape) >= 2, ( + f"{len(image_data.shape)=} is less than two. Is this really an image?" + ) image_shape = image_data.shape[-2:] image_data = ( @@ -299,9 +299,9 @@ def get_image_weight( else image_data ) - assert ( - len(image_data.shape) == 3 - ), f"Expected to have shape (chan, dec, ra), got {image_data.shape}" + assert len(image_data.shape) == 3, ( + f"Expected to have shape (chan, dec, ra), got {image_data.shape}" + ) for idx, chan_image_data in enumerate(image_data): weight = _get_image_weight_plane(image_data=chan_image_data, stride=stride) @@ -391,9 +391,9 @@ def _get_alpha_linmos_option(pol_axis: float | None = None) -> str: if pol_axis is None: return "" - assert ( - np.abs(pol_axis) <= 2.0 * np.pi - ), f"{pol_axis=}, which is outside +/- 2pi radians and seems unreasonable" + assert np.abs(pol_axis) <= 2.0 * np.pi, ( + f"{pol_axis=}, which is outside +/- 2pi radians and seems unreasonable" + ) logger.info( f"The constant assumed holography rotation is: {EXPECTED_HOLOGRAPHY_ROTATION_CONSTANT_RADIANS:.4f} radians" @@ -475,9 +475,9 @@ def generate_linmos_parameter_set( logger.info(f"{len(img_str)} unique images from {len(images)} input collection. ") img_list: str = "[" + ",".join(img_str) + "]" - assert ( - len(set(img_str)) == len(images) - ), "Some images were dropped from the linmos image string. Something is bad, walk the plank. " + assert len(set(img_str)) == len(images), ( + "Some images were dropped from the linmos image string. Something is bad, walk the plank. " + ) # If no weights_list has been provided (and therefore no optimal # beam-wise weighting) assume that all beams are of about the same @@ -489,9 +489,9 @@ def generate_linmos_parameter_set( weight_files = generate_weights_list_and_files( image_paths=images, mode="mad", stride=8 ) - assert ( - weight_files is not None - ), f"{weight_files=}, which should not happen after creating weight files" + assert weight_files is not None, ( + f"{weight_files=}, which should not happen after creating weight files" + ) _weight_str = [ str(weight_file) for weight_file in weight_files @@ -537,9 +537,9 @@ def generate_linmos_parameter_set( logger.info(f"Writing parset to {parset_output_path!s}.") logger.info(f"{parset}") if not overwrite: - assert not Path( - parset_output_path - ).exists(), f"The parset {parset_output_path} already exists!" + assert not Path(parset_output_path).exists(), ( + f"The parset {parset_output_path} already exists!" + ) with open(parset_output_path, "w") as parset_file: parset_file.write(parset) diff --git a/flint/configuration.py b/flint/configuration.py index b64927f9..7c4bc837 100644 --- a/flint/configuration.py +++ b/flint/configuration.py @@ -121,9 +121,9 @@ def get_selfcal_options_from_yaml(input_yaml: Path | None = None) -> dict: Dict: Mapping where the key is the self-calibration round, and values are key-value of updated gaincal options """ - assert ( - input_yaml is None - ), "Configuring via a yaml configuration file is not yet support. " + assert input_yaml is None, ( + "Configuring via a yaml configuration file is not yet support. " + ) return { 1: {"solint": "60s", "uvrange": ">235m", "nspw": 1}, @@ -148,9 +148,9 @@ def get_image_options_from_yaml( Dict: _description_ """ - assert ( - input_yaml is None - ), "Configuring via a yaml configuration file is not yet support. " + assert input_yaml is None, ( + "Configuring via a yaml configuration file is not yet support. " + ) MULTISCALE_SCALES = (0, 15, 30, 40, 50, 60, 70, 120) IMAGE_SIZE = 7144 @@ -288,12 +288,12 @@ def get_options_from_strategy( strategy = load_strategy_yaml(input_yaml=strategy) # Some sanity checks - assert isinstance( - strategy, (Strategy, dict) - ), f"Unknown input strategy type {type(strategy)}" - assert round_info == "initial" or isinstance( - round_info, int - ), f"{round_info=} not a known value or type. " + assert isinstance(strategy, (Strategy, dict)), ( + f"Unknown input strategy type {type(strategy)}" + ) + assert round_info == "initial" or isinstance(round_info, int), ( + f"{round_info=} not a known value or type. " + ) # Override the round if requested if ( diff --git a/flint/convol.py b/flint/convol.py index f8f543a6..107000a2 100644 --- a/flint/convol.py +++ b/flint/convol.py @@ -116,9 +116,9 @@ def get_cube_common_beam( ) first_cube_fits_beam = common_beam_data_list[0][0] - assert isinstance( - first_cube_fits_beam, Beams - ), f"Unexpected type for common beams. Expected Beams, got {type(first_cube_fits_beam)}" + assert isinstance(first_cube_fits_beam, Beams), ( + f"Unexpected type for common beams. Expected Beams, got {type(first_cube_fits_beam)}" + ) beam_shape_list = [ BeamShape.from_radio_beam(radio_beam=beam) # type: ignore @@ -173,9 +173,9 @@ def convolve_cubes( logger.info(f"{input_cube=} convolved to {output_cube}") # Trust no one - assert all( - [p.exists() for p in convol_cubes_path] - ), "A convolved cube does not exist" + assert all([p.exists() for p in convol_cubes_path]), ( + "A convolved cube does not exist" + ) return convol_cubes_path @@ -362,9 +362,9 @@ def cli() -> None: get_common_beam(image_paths=args.images, cutoff=args.cutoff) if args.mode == "convol": if args.cubes: - assert all( - [check_if_cube_fits(fits_file=f) for f in args.images] - ), "Not all input files are FITS cubes" + assert all([check_if_cube_fits(fits_file=f) for f in args.images]), ( + "Not all input files are FITS cubes" + ) common_beams = get_cube_common_beam( cube_paths=args.images, cutoff=args.cutoff ) @@ -378,9 +378,9 @@ def cli() -> None: ) else: - assert not all( - [check_if_cube_fits(fits_file=f) for f in args.images] - ), "Not all input files are FITS images (not cubes)" + assert not all([check_if_cube_fits(fits_file=f) for f in args.images]), ( + "Not all input files are FITS images (not cubes)" + ) common_beam = get_common_beam(image_paths=args.images, cutoff=args.cutoff) _ = convolve_images( image_paths=args.images, diff --git a/flint/flagging.py b/flint/flagging.py index 791c001c..fa1e0dd7 100644 --- a/flint/flagging.py +++ b/flint/flagging.py @@ -175,9 +175,9 @@ def create_aoflagger_cmd(ms: MS) -> AOFlaggerCommand: logger.info("Creating an AOFlagger command. ") ms = MS.cast(ms) - assert ( - ms.column is not None - ), f"MS column must be set in order to flag, currently {ms.column=}. Full {ms=}" + assert ms.column is not None, ( + f"MS column must be set in order to flag, currently {ms.column=}. Full {ms=}" + ) if not check_column_in_ms(ms): raise MSError(f"Column {ms.column} not found in {ms.path}.") @@ -201,9 +201,9 @@ def run_aoflagger_cmd(aoflagger_cmd: AOFlaggerCommand, container: Path) -> None: aoflagger_cmd (AOFlaggerCommand): The command that will be executed container (Path): Path to the container that contains aoflagger """ - assert ( - container.exists() - ), f"The applysolutions container {container} does not exist. " + assert container.exists(), ( + f"The applysolutions container {container} does not exist. " + ) bind_dirs = [aoflagger_cmd.ms_path.parent.absolute()] logger.debug(f"Bind directory for aoflagger: {bind_dirs}") @@ -287,7 +287,7 @@ def flag_ms_by_antenna_ids(ms: Path | MS, ant_ids: int | Collection[int]) -> MS: diff_flags = end_flags - init_flags logger.info( - f"Loaded flags: {init_flags}, Final flags: {end_flags}, Difference: {diff_flags} ({diff_flags/np.prod(flags.shape)*100.:.2f}%)" + f"Loaded flags: {init_flags}, Final flags: {end_flags}, Difference: {diff_flags} ({diff_flags / np.prod(flags.shape) * 100.0:.2f}%)" ) return ms diff --git a/flint/leakage.py b/flint/leakage.py index 0d2c6d12..f0157386 100644 --- a/flint/leakage.py +++ b/flint/leakage.py @@ -73,9 +73,9 @@ def _load_fits_image(fits_path: Path) -> FITSImage: FITSImage: Loaded FITS properties """ - assert ( - fits_path.suffix == ".fits" - ), f"Unexpected file type for {fits_path=}, expected fits" + assert fits_path.suffix == ".fits", ( + f"Unexpected file type for {fits_path=}, expected fits" + ) logger.info(f"Opening {fits_path=}") with fits.open(fits_path) as in_fits: image_data = in_fits[0].data # type: ignore @@ -131,7 +131,9 @@ def filter_components( col in table.colnames for col in (ra_col, dec_col, peak_col, int_col, int_err_col) ] - ), f"Supplied column names {ra_col=} {dec_col=} {peak_col=} {int_col=} partly missing from {table.colnames=}" + ), ( + f"Supplied column names {ra_col=} {dec_col=} {peak_col=} {int_col=} partly missing from {table.colnames=}" + ) total_comps = len(table) sky_coords = SkyCoord(table[ra_col], table[dec_col], unit=(u.deg, u.deg)) @@ -339,9 +341,9 @@ def _get_output_catalogue_path( if output_path is None else output_path ) - assert ( - output_path is not None - ), f"{output_path=} is empty, and no catalogue path provided" + assert output_path is not None, ( + f"{output_path=} is empty, and no catalogue path provided" + ) return Path(output_path) diff --git a/flint/masking.py b/flint/masking.py index fda510ef..c9d26f5a 100644 --- a/flint/masking.py +++ b/flint/masking.py @@ -120,9 +120,9 @@ def create_beam_mask_kernel( Returns: np.ndarray: Boolean mask of the kernel shape """ - assert ( - 0.0 < minimum_response < 1.0 - ), f"{minimum_response=}, should be between 0 to 1 (exclusive)" + assert 0.0 < minimum_response < 1.0, ( + f"{minimum_response=}, should be between 0 to 1 (exclusive)" + ) POSITION_KEYS = ("CDELT1", "CDELT2") if not all([key in fits_header for key in POSITION_KEYS]): @@ -132,9 +132,9 @@ def create_beam_mask_kernel( assert isinstance(beam, Beam) cdelt1, cdelt2 = np.abs(fits_header["CDELT1"]), np.abs(fits_header["CDELT2"]) # type: ignore - assert np.isclose( - cdelt1, cdelt2 - ), f"Pixel scales {cdelt1=} {cdelt2=}, but must be equal" + assert np.isclose(cdelt1, cdelt2), ( + f"Pixel scales {cdelt1=} {cdelt2=}, but must be equal" + ) k = beam.as_kernel( pixscale=cdelt1 * u.Unit("deg"), x_size=kernel_size, y_size=kernel_size @@ -340,9 +340,9 @@ def create_boxcar_skew_mask( image: np.ndarray, skew_delta: float, box_size: int ) -> np.ndarray: assert 0.0 < skew_delta < 0.5, f"{skew_delta=}, but should be 0.0 to 0.5" - assert ( - len(image.shape) == 2 - ), f"Expected two dimensions, got image shape of {image.shape}" + assert len(image.shape) == 2, ( + f"Expected two dimensions, got image shape of {image.shape}" + ) logger.info(f"Computing boxcar skew with {box_size=} and {skew_delta=}") positive_pixels = (image > 0.0).astype(np.float32) @@ -605,9 +605,9 @@ def _create_signal_from_rmsbkg( logger.info(f"Loading {image}") image = in_fits[0].data # type: ignore - assert isinstance( - image, np.ndarray - ), f"Expected the image to be a numpy array by now, instead have {type(image)}" + assert isinstance(image, np.ndarray), ( + f"Expected the image to be a numpy array by now, instead have {type(image)}" + ) if isinstance(bkg, Path): with fits.open(bkg) as in_fits: @@ -677,9 +677,9 @@ def create_snr_mask_from_fits( signal_data = fits_image[0].data # type: ignore if _need_to_make_signal(masking_options=masking_options): - assert isinstance(fits_rms_path, Path) and isinstance( - fits_bkg_path, Path - ), "Expected paths for input RMS and bkg FITS files" + assert isinstance(fits_rms_path, Path) and isinstance(fits_bkg_path, Path), ( + "Expected paths for input RMS and bkg FITS files" + ) signal_data = _create_signal_from_rmsbkg( image=signal_data, rms=fits_rms_path, bkg=fits_bkg_path ) diff --git a/flint/ms.py b/flint/ms.py index bc235d8e..68a9fb8e 100644 --- a/flint/ms.py +++ b/flint/ms.py @@ -91,9 +91,9 @@ def critical_ms_interaction( # a target output measurement set already on disk. This second check would be # useful in copy==True mode, ya seadog assert input_ms.exists(), f"The input measurement set {input_ms} does not exist. " - assert ( - not output_ms.exists() - ), f"The output measurement set {output_ms} already exists. " + assert not output_ms.exists(), ( + f"The output measurement set {output_ms} already exists. " + ) logger.info(f"Critical section for {input_ms=}") if copy: rsync_copy_directory(target_path=input_ms, out_path=output_ms) @@ -164,9 +164,9 @@ def get_beam_from_ms(ms: MS | Path) -> int: with table(str(ms_path), readonly=True, ack=False) as tab: uniq_beams = sorted(list(set(tab.getcol("FEED1")))) - assert ( - len(uniq_beams) == 1 - ), f"Expected {ms_path!s} to contain a single beam, found {len(uniq_beams)}: {uniq_beams=}" + assert len(uniq_beams) == 1, ( + f"Expected {ms_path!s} to contain a single beam, found {len(uniq_beams)}: {uniq_beams=}" + ) return uniq_beams[0] @@ -188,9 +188,9 @@ def get_freqs_from_ms(ms: MS | Path) -> np.ndarray: freqs = tab.getcol("CHAN_FREQ") freqs = np.squeeze(freqs) - assert ( - len(freqs.shape) == 1 - ), f"Frequency axis has dimensionality greater than one. Not expecting that. {len(freqs.shape)}" + assert len(freqs.shape) == 1, ( + f"Frequency axis has dimensionality greater than one. Not expecting that. {len(freqs.shape)}" + ) return freqs @@ -338,7 +338,7 @@ def describe_ms(ms: MS | Path, verbose: bool = False) -> MSSummary: logger.info(f"Inspecting {ms.path}.") logger.info(f"Contains: {colnames}") - logger.info(f"{flagged} of {total} flagged ({flagged/total*100.:.4f}%). ") + logger.info(f"{flagged} of {total} flagged ({flagged / total * 100.0:.4f}%). ") logger.info(f"{len(uniq_ants)} unique antenna: {uniq_ants}") logger.info(f"Unique fields: {uniq_fields}") logger.info(f"Phase direction: {phase_dir}") @@ -520,14 +520,14 @@ def consistent_channelwise_frequencies( np.ndarray: Same length as the frequencies. True if for a single channel all frequencies are the same. False otherwise. """ freqs = np.array(freqs) - assert ( - len(freqs.shape) == 2 - ), f"{freqs.shape=}, but was expecting something of rank 2" + assert len(freqs.shape) == 2, ( + f"{freqs.shape=}, but was expecting something of rank 2" + ) freqs_are_same = np.all(freqs - freqs[0, None] == 0, axis=1) - assert ( - len(freqs_are_same.shape) == 1 - ), f"Channelwise check should be length 1, but have {freqs_are_same.shaope=}" + assert len(freqs_are_same.shape) == 1, ( + f"Channelwise check should be length 1, but have {freqs_are_same.shaope=}" + ) return freqs_are_same @@ -575,12 +575,12 @@ def rename_column_in_ms( with table(tablename=str(ms.path), readonly=False, ack=False) as tab: colnames = tab.colnames() - assert ( - original_column_name in colnames - ), f"{original_column_name=} missing from {ms}" - assert ( - new_column_name not in colnames - ), f"{new_column_name=} already exists in {ms}" + assert original_column_name in colnames, ( + f"{original_column_name=} missing from {ms}" + ) + assert new_column_name not in colnames, ( + f"{new_column_name=} already exists in {ms}" + ) logger.info(f"Renaming {original_column_name} to {new_column_name}") tab.renamecol(oldname=original_column_name, newname=new_column_name) @@ -654,9 +654,9 @@ def subtract_model_from_data_column( with table(str(critical_ms), readonly=False) as tab: logger.info("Extracting columns") colnames = tab.colnames() - assert all( - [d in colnames for d in (model_column, data_column)] - ), f"{model_column=} or {data_column=} missing from {colnames=}" + assert all([d in colnames for d in (model_column, data_column)]), ( + f"{model_column=} or {data_column=} missing from {colnames=}" + ) if output_column not in colnames: from casacore.tables import makecoldesc @@ -713,9 +713,9 @@ def preprocess_askap_ms( """ ms = MS.cast(ms) - assert ( - data_column != instrument_column - ), f"Received matching column names: {data_column=} {instrument_column=}" + assert data_column != instrument_column, ( + f"Received matching column names: {data_column=} {instrument_column=}" + ) logger.info(f"Will be running ASKAP MS conversion operations against {ms.path!s}.") logger.info("Correcting directions. ") @@ -811,9 +811,9 @@ def copy_and_preprocess_casda_askap_ms( with table(str(ms.path), ack=False, readonly=False) as tab: column_names = tab.colnames() - assert ( - data_column in column_names and instrument_column not in column_names - ), f"{ms.path} column names failed. {data_column=} {instrument_column=} {column_names=}" + assert data_column in column_names and instrument_column not in column_names, ( + f"{ms.path} column names failed. {data_column=} {instrument_column=} {column_names=}" + ) tab.renamecol(data_column, instrument_column) tab.flush() @@ -913,9 +913,9 @@ def rename_ms_and_columns_for_selfcal( # This is a safe guard against my bad handling of the above / mutineers # There could be interplay with these columns when potato peel is used # as some MSs will have CORRECYED_DATA and others may not. - assert ( - data == "DATA" - ), f"Somehow data column is not DATA, instead {data=}. Likely a problem for casa." + assert data == "DATA", ( + f"Somehow data column is not DATA, instead {data=}. Likely a problem for casa." + ) return ms.with_options(path=target, column=data) @@ -939,18 +939,18 @@ def find_mss( Returns: Tuple[MS, ...]: Collection of found MSs """ - assert ( - mss_parent_path.exists() and mss_parent_path.is_dir() - ), f"{mss_parent_path!s} does not exist or is not a folder. " + assert mss_parent_path.exists() and mss_parent_path.is_dir(), ( + f"{mss_parent_path!s} does not exist or is not a folder. " + ) found_mss = tuple( [MS.cast(ms_path) for ms_path in sorted(mss_parent_path.glob("*.ms"))] ) if expected_ms_count: - assert ( - len(found_mss) == expected_ms_count - ), f"Expected to find {expected_ms_count} in {mss_parent_path!s}, found {len(found_mss)}." + assert len(found_mss) == expected_ms_count, ( + f"Expected to find {expected_ms_count} in {mss_parent_path!s}, found {len(found_mss)}." + ) if data_column or model_column: logger.info(f"Updating column attribute to {data_column=}") diff --git a/flint/naming.py b/flint/naming.py index 7b884bd1..234b310d 100644 --- a/flint/naming.py +++ b/flint/naming.py @@ -234,14 +234,14 @@ def get_selfcal_ms_name(in_ms_path: Path, round: int = 1) -> Path: logger.info("Detected a previous round of self-calibration. ") span = res.span() name_str = str(in_ms_path.name) - name = f"{name_str[:span[0]]}.round{round}.ms" + name = f"{name_str[: span[0]]}.round{round}.ms" else: name = f"{in_ms_path.stem!s}.round{round}.ms" out_ms_path = in_ms_path.parent / name - assert ( - in_ms_path != out_ms_path - ), f"{in_ms_path=} and {out_ms_path=} match. Something went wrong when creating new self-cal name. " + assert in_ms_path != out_ms_path, ( + f"{in_ms_path=} and {out_ms_path=} match. Something went wrong when creating new self-cal name. " + ) return out_ms_path diff --git a/flint/options.py b/flint/options.py index f9bc1ba0..c6a82af1 100644 --- a/flint/options.py +++ b/flint/options.py @@ -125,9 +125,9 @@ def add_options_to_parser( ArgumentParser: Updated argument parser """ - assert issubclass( - options_class, BaseModel - ), f"{options_class=} is not a pydantic BaseModel" + assert issubclass(options_class, BaseModel), ( + f"{options_class=} is not a pydantic BaseModel" + ) group = parser.add_argument_group( title=f"Inputs for {options_class.__name__}", diff --git a/flint/peel/potato.py b/flint/peel/potato.py index b519e7ec..15031fd3 100644 --- a/flint/peel/potato.py +++ b/flint/peel/potato.py @@ -416,7 +416,7 @@ def _potato_config_command( PotatoconfigCommand: The CLI command that will be executed to create a potato configuration file """ - command = "peel_configuration.py " f"{config_path!s} " + command = f"peel_configuration.py {config_path!s} " sub_options = _potato_options_to_command(potato_options=potato_config_options) command = command + sub_options @@ -485,9 +485,7 @@ def _potato_peel_command( """ command = ( - "hot_potato " - f"{ms.path.absolute()!s} " - f"{potato_peel_arguments.image_fov:.4f} " + f"hot_potato {ms.path.absolute()!s} {potato_peel_arguments.image_fov:.4f} " ) for item in [potato_peel_arguments, potato_peel_options]: diff --git a/flint/prefect/common/imaging.py b/flint/prefect/common/imaging.py index 136a3c35..2c99a957 100644 --- a/flint/prefect/common/imaging.py +++ b/flint/prefect/common/imaging.py @@ -198,9 +198,9 @@ def task_run_bane_and_aegean( # For the moment, will only source find on an MFS image image_paths = [image for image in image_paths if ".MFS." in str(image)] - assert ( - len(image_paths) == 1 - ), "More than one image found after filter for MFS only images. " + assert len(image_paths) == 1, ( + "More than one image found after filter for MFS only images. " + ) # Get out the only path in the list. image_path = image_paths[0] elif isinstance(image, LinmosCommand): @@ -475,9 +475,9 @@ def task_convolve_cube( Returns: Collection[Path]: Path to the output images that have been convolved. """ - assert ( - wsclean_cmd.imageset is not None - ), f"{wsclean_cmd.ms} has no attached imageset." + assert wsclean_cmd.imageset is not None, ( + f"{wsclean_cmd.ms} has no attached imageset." + ) supported_modes = ("image",) logger.info(f"Extracting {mode}") @@ -494,9 +494,9 @@ def task_convolve_cube( # handling this. The pirate in me feels like less is more, so an error will be enough. Keeping # things simple and avoiding the problem is probably the better way of dealing with this # situation. In time this would mean that we inspect and handle conflicting pipeline options. - assert ( - image_paths is not None - ), f"{image_paths=} for {mode=} and {wsclean_cmd.imageset=}" + assert image_paths is not None, ( + f"{image_paths=} for {mode=} and {wsclean_cmd.imageset=}" + ) logger.info(f"Will convolve {image_paths}") @@ -531,18 +531,18 @@ def task_convolve_image( Returns: Collection[Path]: Path to the output images that have been convolved. """ - assert ( - wsclean_cmd.imageset is not None - ), f"{wsclean_cmd.ms} has no attached imageset." + assert wsclean_cmd.imageset is not None, ( + f"{wsclean_cmd.ms} has no attached imageset." + ) supported_modes = ("image", "residual") logger.info(f"Extracting {mode}") if mode == "image": image_paths = list(wsclean_cmd.imageset.image) elif mode == "residual": - assert ( - wsclean_cmd.imageset.residual is not None - ), f"{wsclean_cmd.imageset.residual=}, which should not happen" + assert wsclean_cmd.imageset.residual is not None, ( + f"{wsclean_cmd.imageset.residual=}, which should not happen" + ) image_paths = list(wsclean_cmd.imageset.residual) else: raise ValueError(f"{mode=} is not supported. Known modes are {supported_modes}") @@ -558,9 +558,9 @@ def task_convolve_image( # handling this. The pirate in me feels like less is more, so an error will be enough. Keeping # things simple and avoiding the problem is probably the better way of dealing with this # situation. In time this would mean that we inspect and handle conflicting pipeline options. - assert ( - image_paths is not None - ), f"{image_paths=} for {mode=} and {wsclean_cmd.imageset=}" + assert image_paths is not None, ( + f"{image_paths=} for {mode=} and {wsclean_cmd.imageset=}" + ) logger.info(f"Will convolve {image_paths}") @@ -930,9 +930,9 @@ def task_extract_beam_mask_image( FITSMaskNames: Clean mask for a image """ # All images made by wsclean will have the same WCS - assert ( - wsclean_cmd.imageset is not None - ), f"{wsclean_cmd.imageset=}, which should not happen" + assert wsclean_cmd.imageset is not None, ( + f"{wsclean_cmd.imageset=}, which should not happen" + ) beam_image = next(iter(wsclean_cmd.imageset.image)) beam_mask_names = extract_beam_mask_from_mosaic( fits_beam_image_path=beam_image, fits_mosaic_mask_names=linmos_mask_names diff --git a/flint/prefect/common/ms.py b/flint/prefect/common/ms.py index ff5877b1..b13539a3 100644 --- a/flint/prefect/common/ms.py +++ b/flint/prefect/common/ms.py @@ -19,9 +19,9 @@ def add_model_source_list_to_ms( logger.info("Updating MODEL_DATA with source list") ms = wsclean_command.ms - assert ( - wsclean_command.imageset is not None - ), f"{wsclean_command.imageset=}, which is not allowed" + assert wsclean_command.imageset is not None, ( + f"{wsclean_command.imageset=}, which is not allowed" + ) source_list_path = wsclean_command.imageset.source_list if source_list_path is None: diff --git a/flint/prefect/common/utils.py b/flint/prefect/common/utils.py index 2ba0139a..6d3885b2 100644 --- a/flint/prefect/common/utils.py +++ b/flint/prefect/common/utils.py @@ -42,9 +42,9 @@ def upload_image_as_artifact(image_path: Path, description: str | None = None) - """ image_type = image_path.suffix.replace(".", "") assert image_path.exists(), f"{image_path} does not exist" - assert ( - image_type in SUPPORTED_IMAGE_TYPES - ), f"{image_path} has type {image_type}, and is not supported. Supported types are {SUPPORTED_IMAGE_TYPES}" + assert image_type in SUPPORTED_IMAGE_TYPES, ( + f"{image_path} has type {image_type}, and is not supported. Supported types are {SUPPORTED_IMAGE_TYPES}" + ) with open(image_path, "rb") as open_image: logger.info(f"Encoding {image_path} in base64") @@ -136,9 +136,9 @@ def task_update_with_options(input_object: T, **kwargs) -> T: Returns: T: The updated object """ - assert "with_options" in dir( - input_object - ), f"{type(input_object)=} does not have a with_options method" + assert "with_options" in dir(input_object), ( + f"{type(input_object)=} does not have a with_options method" + ) updated_object = input_object.with_options(**kwargs) # type: ignore return updated_object diff --git a/flint/prefect/flows/bandpass_pipeline.py b/flint/prefect/flows/bandpass_pipeline.py index 8e18749b..99910a85 100644 --- a/flint/prefect/flows/bandpass_pipeline.py +++ b/flint/prefect/flows/bandpass_pipeline.py @@ -146,9 +146,9 @@ def run_bandpass_stage( Returns: List[CalibrateCommand]: Set of calibration commands used """ - assert ( - bandpass_options.flag_calibrate_rounds >= 0 - ), f"Currently {bandpass_options.flag_calibrate_rounds=}, needs to be 0 or higher" + assert bandpass_options.flag_calibrate_rounds >= 0, ( + f"Currently {bandpass_options.flag_calibrate_rounds=}, needs to be 0 or higher" + ) if not output_split_bandpass_path.exists(): logger.info(f"Creating {output_split_bandpass_path!s}") @@ -235,14 +235,14 @@ def calibrate_bandpass_flow( Returns: Path: Directory that contains the extracted measurement sets and the ao-style gain solutions files. """ - assert ( - bandpass_path.exists() and bandpass_path.is_dir() - ), f"{bandpass_path!s} does not exist or is not a folder. " + assert bandpass_path.exists() and bandpass_path.is_dir(), ( + f"{bandpass_path!s} does not exist or is not a folder. " + ) bandpass_mss = list([MS.cast(ms_path) for ms_path in bandpass_path.glob("*.ms")]) - assert ( - len(bandpass_mss) == bandpass_options.expected_ms - ), f"Expected to find {bandpass_options.expected_ms} in {bandpass_path!s}, found {len(bandpass_mss)}." + assert len(bandpass_mss) == bandpass_options.expected_ms, ( + f"Expected to find {bandpass_options.expected_ms} in {bandpass_path!s}, found {len(bandpass_mss)}." + ) logger.info( f"Found the following bandpass measurement set: {[bp.path for bp in bandpass_mss]}." diff --git a/flint/prefect/flows/continuum_mask_pipeline.py b/flint/prefect/flows/continuum_mask_pipeline.py index fc1decb3..7f99b98b 100644 --- a/flint/prefect/flows/continuum_mask_pipeline.py +++ b/flint/prefect/flows/continuum_mask_pipeline.py @@ -82,15 +82,15 @@ def process_science_fields( ) run_validation = field_options.reference_catalogue_directory is not None - assert ( - science_path.exists() and science_path.is_dir() - ), f"{science_path!s} does not exist or is not a folder. " + assert science_path.exists() and science_path.is_dir(), ( + f"{science_path!s} does not exist or is not a folder. " + ) science_mss = list( [MS.cast(ms_path) for ms_path in sorted(science_path.glob("*.ms"))] ) - assert ( - len(science_mss) == field_options.expected_ms - ), f"Expected to find {field_options.expected_ms} in {science_path!s}, found {len(science_mss)}." + assert len(science_mss) == field_options.expected_ms, ( + f"Expected to find {field_options.expected_ms} in {science_path!s}, found {len(science_mss)}." + ) science_folder_name = science_path.name @@ -331,9 +331,9 @@ def setup_run_process_science_field( split_path: Path, field_options: FieldOptions, ) -> None: - assert ( - bandpass_path.exists() and bandpass_path.is_dir() - ), f"{bandpass_path=} needs to exist and be a directory! " + assert bandpass_path.exists() and bandpass_path.is_dir(), ( + f"{bandpass_path=} needs to exist and be a directory! " + ) science_sbid = get_sbid_from_path(path=science_path) diff --git a/flint/prefect/flows/continuum_pipeline.py b/flint/prefect/flows/continuum_pipeline.py index 370e80ec..ffeb4c3d 100644 --- a/flint/prefect/flows/continuum_pipeline.py +++ b/flint/prefect/flows/continuum_pipeline.py @@ -122,9 +122,9 @@ def _check_create_output_split_science_path( """ science_folder_name = science_path.name - assert str( - science_folder_name - ).isdigit(), f"We require the parent directory to be the SBID (all digits), got {science_folder_name=}" + assert str(science_folder_name).isdigit(), ( + f"We require the parent directory to be the SBID (all digits), got {science_folder_name=}" + ) output_split_science_path = ( Path(split_path / science_folder_name).absolute().resolve() ) @@ -442,7 +442,9 @@ def process_science_fields( round=current_round, ) # type: ignore if run_validation: - assert field_options.reference_catalogue_directory, f"Reference catalogue directory should be set when {run_validation=}" + assert field_options.reference_catalogue_directory, ( + f"Reference catalogue directory should be set when {run_validation=}" + ) val_results = _validation_items( field_summary=field_summary, aegean_outputs=aegean_outputs, @@ -512,9 +514,9 @@ def setup_run_process_science_field( skip_bandpass_check: bool = False, ) -> None: if not skip_bandpass_check and bandpass_path: - assert ( - bandpass_path.exists() and bandpass_path.is_dir() - ), f"{bandpass_path=} needs to exist and be a directory! " + assert bandpass_path.exists() and bandpass_path.is_dir(), ( + f"{bandpass_path=} needs to exist and be a directory! " + ) science_sbid = get_sbid_from_path(path=science_path) diff --git a/flint/prefect/flows/subtract_cube_pipeline.py b/flint/prefect/flows/subtract_cube_pipeline.py index a946cc50..1ba9f8cf 100644 --- a/flint/prefect/flows/subtract_cube_pipeline.py +++ b/flint/prefect/flows/subtract_cube_pipeline.py @@ -67,9 +67,9 @@ def _check_and_verify_options( ), f"{options.yandasoft_container=} does not exist or is not a file" if isinstance(options, AddModelSubtractFieldOptions): if options.attempt_addmodel: - assert ( - options.calibrate_container is not None - ), "Calibrate container path is needede for addmodel" + assert options.calibrate_container is not None, ( + "Calibrate container path is needede for addmodel" + ) assert ( options.calibrate_container.exists() and options.calibrate_container.is_file() @@ -161,9 +161,9 @@ def task_addmodel_to_ms( wsclean_source_list_path = get_wsclean_output_source_list_path( name_path=ms.path, pol=pol ) - assert ( - wsclean_source_list_path.exists() - ), f"{wsclean_source_list_path=} was requested, but does not exist" + assert wsclean_source_list_path.exists(), ( + f"{wsclean_source_list_path=} was requested, but does not exist" + ) # This should attempt to add model of different polarisations together. # But to this point it is a future proof and is not tested. @@ -173,9 +173,9 @@ def task_addmodel_to_ms( mode="c" if idx == 0 else "a", datacolumn="MODEL_DATA", ) - assert ( - addmodel_subtract_options.calibrate_container is not None - ), f"{addmodel_subtract_options.calibrate_container=}, which should not happen" + assert addmodel_subtract_options.calibrate_container is not None, ( + f"{addmodel_subtract_options.calibrate_container=}, which should not happen" + ) add_model( add_model_options=addmodel_options, container=addmodel_subtract_options.calibrate_container, @@ -195,9 +195,9 @@ def task_crystalball_to_ms(ms: MS, crystalball_options: CrystalBallOptions) -> M wsclean_source_list_path = get_wsclean_output_source_list_path( name_path=ms.path, pol=pol ) - assert ( - wsclean_source_list_path.exists() - ), f"{wsclean_source_list_path=} was requested, but does not exist" + assert wsclean_source_list_path.exists(), ( + f"{wsclean_source_list_path=} was requested, but does not exist" + ) with get_dask_client(): logger.info("Running crystalball in prefect dask client") @@ -247,9 +247,9 @@ def task_combine_all_linmos_images( logger.info(f"Removing original {len(images_to_combine)} images") for image in images_to_combine: logger.info(f"Removing {image=}") - assert ( - isinstance(image, Path) and image.exists() - ), f"{image=} does not exist, but it should" + assert isinstance(image, Path) and image.exists(), ( + f"{image=} does not exist, but it should" + ) image.unlink() return Path(output_cube_path) @@ -316,9 +316,9 @@ def flow_subtract_cube( # ms=science_mss, # addmodel_subtract_options=unmapped(addmodel_subtract_field_options), # ) - assert ( - addmodel_subtract_field_options.addmodel_cluster_config is not None - ), f"{addmodel_subtract_field_options.addmodel_cluster_config=}, which should not happen" + assert addmodel_subtract_field_options.addmodel_cluster_config is not None, ( + f"{addmodel_subtract_field_options.addmodel_cluster_config=}, which should not happen" + ) addmodel_dask_runner = get_dask_runner( cluster=addmodel_subtract_field_options.addmodel_cluster_config ) diff --git a/flint/sky_model.py b/flint/sky_model.py index 82dde753..d8f4d0f9 100644 --- a/flint/sky_model.py +++ b/flint/sky_model.py @@ -135,9 +135,9 @@ def get_1934_model(mode: str = "calibrate") -> Path: package="flint.data.models", filename=model_fn ) - assert ( - model_path.exists() - ), f"Constructed {model_path} apparently does not exist. Check packaged models. " + assert model_path.exists(), ( + f"Constructed {model_path} apparently does not exist. Check packaged models. " + ) logger.info(f"Calibrate 1934-638 model path: {model_path!s}.") return model_path @@ -418,9 +418,9 @@ def get_known_catalogue(cata: str) -> Catalogue: Returns: Catalogue: properties of known catalogue """ - assert ( - cata.upper() in KNOWN_CATAS.keys() - ), f"'{cata}' not a known catalogue. Acceptable keys are: {KNOWN_CATAS.keys()}." + assert cata.upper() in KNOWN_CATAS.keys(), ( + f"'{cata}' not a known catalogue. Acceptable keys are: {KNOWN_CATAS.keys()}." + ) cata_info = KNOWN_CATAS[cata.upper()] logger.info(f"Loading {cata}={cata_info.file_name}") @@ -450,9 +450,9 @@ def load_catalogue( Returns: Tuple[Catalogue,Table]: The `Catalogue` information and `Table` of components loaded """ - assert ( - catalogue is not None or ms_pointing is not None - ), "Either catalogue or dec_point have to be provided. " + assert catalogue is not None or ms_pointing is not None, ( + "Either catalogue or dec_point have to be provided. " + ) if catalogue: logger.info(f"Loading provided catalogue {catalogue=}") @@ -744,7 +744,7 @@ def create_sky_model( freqs = freqs_from_ms(ms_path) * u.Hz logger.info( - f"Frequency range: {freqs[0]/1000.:.3f} MHz - {freqs[-1]/1000.:.3f} MHz (centre = {np.mean(freqs/1000.):.3f} MHz)" + f"Frequency range: {freqs[0] / 1000.0:.3f} MHz - {freqs[-1] / 1000.0:.3f} MHz (centre = {np.mean(freqs / 1000.0):.3f} MHz)" ) # This is used to estimate a frequency-dependent search radius diff --git a/flint/source_finding/aegean.py b/flint/source_finding/aegean.py index 3ffeb6d7..0fc8f4a8 100644 --- a/flint/source_finding/aegean.py +++ b/flint/source_finding/aegean.py @@ -57,7 +57,7 @@ class AegeanOutputs(NamedTuple): def _get_bane_command(image: Path, cores: int, bane_options: BANEOptions) -> str: """Create the BANE command to run""" # The stripes is purposely set lower than the cores due to an outstanding bane bug that can cause a deadlock. - bane_command_str = f"BANE {image!s} --cores {cores} --stripes {cores-1} " + bane_command_str = f"BANE {image!s} --cores {cores} --stripes {cores - 1} " if bane_options.grid_size: bane_command_str += ( f"--grid {bane_options.grid_size[0]} {bane_options.grid_size[1]} " diff --git a/flint/summary.py b/flint/summary.py index 0a09ee34..37176806 100644 --- a/flint/summary.py +++ b/flint/summary.py @@ -212,9 +212,9 @@ def add_linmos_fits_image( Returns: FieldSummary: The updated field summary object with the linmos fits image added """ - assert isinstance( - linmos_command, LinmosCommand - ), f"{linmos_command=} is type {type(linmos_command)}, expected LinmosCommand" + assert isinstance(linmos_command, LinmosCommand), ( + f"{linmos_command=} is type {type(linmos_command)}, expected LinmosCommand" + ) image_fits = linmos_command.image_fits field_summary = field_summary.with_options(linmos_image=image_fits) diff --git a/flint/utils.py b/flint/utils.py index 0ee1cb8d..75a63122 100644 --- a/flint/utils.py +++ b/flint/utils.py @@ -152,9 +152,9 @@ def temporarily_move_into( yield subject else: temporary_directory.mkdir(parents=True, exist_ok=True) - assert ( - temporary_directory.is_dir() - ), f"{temporary_directory=} exists and is not a folder" + assert temporary_directory.is_dir(), ( + f"{temporary_directory=} exists and is not a folder" + ) output_item = temporary_directory / subject.name assert not output_item.exists(), f"{output_item=} already exists! " @@ -436,9 +436,9 @@ def generate_stub_wcs_header( WCS: The representative WCS objects """ # Trust nothing - assert ( - len(projection) == 3 - ), f"Projection should be three characters, received {projection}" + assert len(projection) == 3, ( + f"Projection should be three characters, received {projection}" + ) # Handle all the pixels you rotten seadog if pixel_scale is not None: @@ -448,9 +448,9 @@ def generate_stub_wcs_header( pixel_scale = pixel_scale * u.arcsec # Trust nothing even more - assert isinstance( - pixel_scale, u.Quantity - ), f"pixel_scale is not an quantity, instead {type(pixel_scale)}" + assert isinstance(pixel_scale, u.Quantity), ( + f"pixel_scale is not an quantity, instead {type(pixel_scale)}" + ) pixel_scale = np.abs(pixel_scale.to(u.rad).value) pixel_scale = np.array([-pixel_scale, pixel_scale]) @@ -466,9 +466,9 @@ def generate_stub_wcs_header( if isinstance(base_wcs, Path): base_wcs = WCS(fits.getheader(base_wcs)).celestial - assert isinstance( - base_wcs, WCS - ), f"Expecting base_wcs to be a WCS object by now, instead is {type(base_wcs)}" + assert isinstance(base_wcs, WCS), ( + f"Expecting base_wcs to be a WCS object by now, instead is {type(base_wcs)}" + ) if image_shape is None: image_shape = base_wcs._naxis @@ -576,7 +576,7 @@ def rsync_copy_directory(target_path: Path, out_path: Path) -> Path: Path: The output path of the new directory. """ - rsync_cmd = f"rsync -avh --progress --stats " f"{target_path!s}/ " f"{out_path!s}/ " + rsync_cmd = f"rsync -avh --progress --stats {target_path!s}/ {out_path!s}/ " logger.info(f"Rsync copying {target_path} to {out_path}.") logger.debug(f"Will run {rsync_cmd}") rsync_run = subprocess.Popen(rsync_cmd.split(), stdout=subprocess.PIPE) @@ -609,9 +609,9 @@ def copy_directory( input_directory = Path(input_directory) output_directory = Path(output_directory) - assert ( - input_directory.exists() and input_directory.is_dir() - ), f"Currently only supports copying directories, {input_directory=} is a file or does not exist. " + assert input_directory.exists() and input_directory.is_dir(), ( + f"Currently only supports copying directories, {input_directory=} is a file or does not exist. " + ) logger.info(f"Copying {input_directory} to {output_directory}.") diff --git a/flint/validation.py b/flint/validation.py index 94184810..97ac39f6 100644 --- a/flint/validation.py +++ b/flint/validation.py @@ -1098,7 +1098,7 @@ def plot_field_info( f"- Integration time : {field_summary.integration_time * u.second:latex_inline}", # type: ignore f"- Hour angle range : {hour_angles.min().to_string(precision=2, format='latex_inline')} - {hour_angles.max().to_string(precision=2, format='latex_inline')}", # type: ignore f"- Elevation range : {elevations.min().to_string(precision=2, format='latex_inline')} - {elevations.max().to_string(precision=2, format='latex_inline')}", # type: ignore - f"- Median rms uJy : {rms_info.median*1e6:.1f}", + f"- Median rms uJy : {rms_info.median * 1e6:.1f}", f"- Components : {len(askap_table)}", f"- Processing date : {Time.now().fits}", f"- Pol. axis : {pol_axis_str}", @@ -1252,9 +1252,9 @@ def _make_beam_psf_row(beam_summary: BeamSummary) -> PSFTableRow: vis_total = beam_summary.ms_summary.flagged + beam_summary.ms_summary.unflagged vis_flagged = beam_summary.ms_summary.flagged - assert ( - beam_summary.imageset is not None - ), f"{beam_summary.imageset=}, which should not happen" + assert beam_summary.imageset is not None, ( + f"{beam_summary.imageset=}, which should not happen" + ) image_file = list(beam_summary.imageset.image)[-1] with fits.open(image_file) as image: bmaj = image[0].header["BMAJ"] # type: ignore @@ -1285,9 +1285,9 @@ def make_psf_table(field_summary: FieldSummary, output_path: Path) -> Path: # Columns are: # BEAM_NUM,BEAM_TIME,RA_DEG,DEC_DEG,GAL_LONG,GAL_LAT,PSF_MAJOR,PSF_MINOR,PSF_ANGLE,VIS_TOTAL,VIS_FLAGGED - assert ( - field_summary.beam_summaries is not None - ), f"{field_summary.beam_summaries=}, which should not happen" + assert field_summary.beam_summaries is not None, ( + f"{field_summary.beam_summaries=}, which should not happen" + ) psf_table_rows = [ _make_beam_psf_row(beam_summary=beam_summary) diff --git a/tests/test_naming.py b/tests/test_naming.py index 5ddb013a..ed7456d5 100644 --- a/tests/test_naming.py +++ b/tests/test_naming.py @@ -205,7 +205,7 @@ def test_self_cal_name_wbeams(): for round in range(1, 5): for beam in range(45): ms = Path(f"SB12349.RACS_1234+45.beam{beam:02d}.round{round}.ms") - e_ms = Path(f"SB12349.RACS_1234+45.beam{beam:02d}.round{round+1}.ms") + e_ms = Path(f"SB12349.RACS_1234+45.beam{beam:02d}.round{round + 1}.ms") out_ms = get_selfcal_ms_name(in_ms_path=ms, round=round + 1) assert out_ms == e_ms