diff --git a/arrakis/cleanup.py b/arrakis/cleanup.py
index a8041695..801851b6 100644
--- a/arrakis/cleanup.py
+++ b/arrakis/cleanup.py
@@ -10,7 +10,7 @@
from prefect import flow, get_run_logger, task, unmapped
from tqdm.auto import tqdm
-from arrakis.logger import TqdmToLogger, logger
+from arrakis.logger import TqdmToLogger, UltimateHelpFormatter, logger
from arrakis.utils.pipeline import logo_str
logger.setLevel(logging.INFO)
@@ -61,10 +61,11 @@ def main(
datadir: Path,
overwrite: bool = False,
) -> None:
- """Clean up beam images
+ """Clean up beam images flow
Args:
datadir (Path): Directory with sub dir 'cutouts'
+ overwrite (bool): Overwrite existing tarball
"""
cutdir = datadir / "cutouts"
@@ -117,7 +118,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
"outdir",
diff --git a/arrakis/cutout.py b/arrakis/cutout.py
index fa9a9452..8de423c1 100644
--- a/arrakis/cutout.py
+++ b/arrakis/cutout.py
@@ -26,7 +26,7 @@
from spectral_cube.utils import SpectralCubeWarning
from tqdm.auto import tqdm
-from arrakis.logger import TqdmToLogger, logger
+from arrakis.logger import TqdmToLogger, UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, test_db
from arrakis.utils.fitsutils import fix_header
from arrakis.utils.io import try_mkdir
@@ -397,7 +397,7 @@ def cutout_islands(
dryrun: bool = True,
limit: Optional[int] = None,
) -> None:
- """Perform cutouts of RACS islands in parallel.
+ """Flow to cutout islands in parallel.
Args:
field (str): RACS field name.
@@ -531,7 +531,7 @@ def cutout_parser(parent_parser: bool = False) -> argparse.ArgumentParser:
cut_parser = argparse.ArgumentParser(
add_help=not parent_parser,
description=descStr,
- formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ formatter_class=UltimateHelpFormatter,
)
parser = cut_parser.add_argument_group("cutout arguments")
diff --git a/arrakis/frion.py b/arrakis/frion.py
index 318f638d..5fd38397 100644
--- a/arrakis/frion.py
+++ b/arrakis/frion.py
@@ -17,7 +17,7 @@
from FRion import correct, predict
from prefect import flow, task, unmapped
-from arrakis.logger import logger
+from arrakis.logger import UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, get_field_db, test_db
from arrakis.utils.fitsutils import getfreq
from arrakis.utils.io import try_mkdir
@@ -202,19 +202,21 @@ def main(
ionex_predownload: bool = False,
limit: Optional[int] = None,
):
- """Main script
+ """FRion flow
Args:
field (str): RACS field name
- outdir (str): Output directory
+ outdir (Path): Output directory
host (str): MongoDB host IP address
+ epoch (int): Epoch of observation
username (str, optional): Mongo username. Defaults to None.
password (str, optional): Mongo passwrod. Defaults to None.
database (bool, optional): Update database. Defaults to False.
- verbose (bool, optional): Verbose output. Defaults to True.
ionex_server (str, optional): IONEX server. Defaults to "ftp://ftp.aiub.unibe.ch/CODE/".
ionex_proxy_server (str, optional): Proxy server. Defaults to None.
ionex_formatter (Union[str, Callable], optional): IONEX formatter. Defaults to "ftp.aiub.unibe.ch".
+ ionex_predownload (bool, optional): Pre-download IONEX files. Defaults to False.
+ limit (int, optional): Limit to number of islands. Defaults to None.
"""
# Query database for data
outdir = os.path.abspath(outdir)
@@ -352,7 +354,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
"field", metavar="field", type=str, help="RACS field to mosaic - e.g. 2132-50A."
diff --git a/arrakis/imager.py b/arrakis/imager.py
index 17137327..f3830fb9 100644
--- a/arrakis/imager.py
+++ b/arrakis/imager.py
@@ -27,7 +27,7 @@
from spython.main import Client as sclient
from tqdm.auto import tqdm
-from arrakis.logger import TqdmToLogger, logger
+from arrakis.logger import TqdmToLogger, UltimateHelpFormatter, logger
from arrakis.utils.io import parse_env_path
from arrakis.utils.msutils import (
beam_from_ms,
@@ -637,6 +637,43 @@ def main(
skip_fix_ms: bool = False,
no_mf_weighting: bool = False,
):
+ """Arrakis imager flow
+
+ Args:
+ msdir (Path): Path to the directory containing the MS files.
+ out_dir (Path): Path to the directory where the images will be written.
+ temp_dir (Optional[Path], optional): Path for temporary files to be written. Defaults to None.
+ cutoff (Optional[float], optional): WSClean cutoff. Defaults to None.
+ robust (float, optional): WSClean Briggs robust parameter. Defaults to -0.5.
+ pols (str, optional): WSClean polarisations. Defaults to "IQU".
+ nchan (int, optional): WSClean number of output channels. Defaults to 36.
+ size (int, optional): WSClean image size. Defaults to 6074.
+ scale (float, optional): WSClean pixel size (arcseconds). Defaults to 2.5.
+ mgain (float, optional): WSClean mgain. Defaults to 0.8.
+ niter (int, optional): WSClean niter. Defaults to 100_000.
+ auto_mask (float, optional): WSClean automatic masking (in SNR). Defaults to 3.
+ force_mask_rounds (Union[int, None], optional): WSClean force mask rounds (requires modified WSClean). Defaults to None.
+ auto_threshold (float, optional): WSClean auto threshold (in SNR). Defaults to 1.
+ taper (Union[float, None], optional): WSClean taper (in arcsec). Defaults to None.
+ purge (bool, optional): Purge auxillary files after imaging. Defaults to False.
+ minuv (float, optional): WSClean minuv-l. Defaults to 0.0.
+ parallel_deconvolution (Optional[int], optional): WSClean parallel deconvolution. Defaults to None.
+ gridder (Optional[str], optional): WSClean gridder. Defaults to None.
+ nmiter (Optional[int], optional): WSClean nmiter. Defaults to None.
+ local_rms (bool, optional): WSClean local_rms. Defaults to False.
+ local_rms_window (Optional[float], optional): WSClean local_rms_window. Defaults to None.
+ wsclean_path (Path | str, optional): Path or URL for WSClean container. Defaults to "docker://alecthomson/wsclean:latest".
+ multiscale (Optional[bool], optional): WSClean multiscale. Defaults to None.
+ multiscale_scale_bias (Optional[float], optional): WSClean multiscale bias. Defaults to None.
+ multiscale_scales (Optional[str], optional): WSClean scales. Defaults to "0,2,4,8,16,32,64,128".
+ absmem (Optional[float], optional): WSClean absmem usage. Defaults to None.
+ make_residual_cubes (Optional[bool], optional): Make resiudal image cubes. Defaults to False.
+ ms_glob_pattern (str, optional): Globe pattern for MS files. Defaults to "scienceData*_averaged_cal.leakage.ms".
+ data_column (str, optional): Data column to image. Defaults to "CORRECTED_DATA".
+ skip_fix_ms (bool, optional): Apply FixMS. Defaults to False.
+ no_mf_weighting (bool, optional): WSClean no_mf_weighting. Defaults to False.
+ """
+
simage = get_wsclean(wsclean=wsclean_path)
logger.info(f"Searching {msdir} for MS matching {ms_glob_pattern}.")
@@ -785,7 +822,7 @@ def imager_parser(parent_parser: bool = False) -> argparse.ArgumentParser:
img_parser = argparse.ArgumentParser(
add_help=not parent_parser,
description=descStr,
- formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ formatter_class=UltimateHelpFormatter,
)
parser = img_parser.add_argument_group("imaging arguments")
diff --git a/arrakis/init_database.py b/arrakis/init_database.py
index 6c3f1f29..419ee92e 100644
--- a/arrakis/init_database.py
+++ b/arrakis/init_database.py
@@ -13,7 +13,7 @@
from pymongo.results import InsertManyResult
from tqdm import tqdm
-from arrakis.logger import TqdmToLogger, logger
+from arrakis.logger import TqdmToLogger, UltimateHelpFormatter, logger
from arrakis.utils.database import get_beam_inf_db, get_db, get_field_db, test_db
from arrakis.utils.json import MyEncoder
from arrakis.utils.meta import yes_or_no
@@ -553,7 +553,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
diff --git a/arrakis/linmos.py b/arrakis/linmos.py
index a8d20cd2..afff931c 100644
--- a/arrakis/linmos.py
+++ b/arrakis/linmos.py
@@ -18,7 +18,7 @@
from spectral_cube.utils import SpectralCubeWarning
from spython.main import Client as sclient
-from arrakis.logger import logger
+from arrakis.logger import UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, test_db
warnings.filterwarnings(action="ignore", category=SpectralCubeWarning, append=True)
@@ -284,7 +284,7 @@ def main(
stokeslist: Optional[List[str]] = None,
limit: Optional[int] = None,
) -> None:
- """Main script
+ """LINMOS flow
Args:
field (str): RACS field name.
@@ -296,7 +296,7 @@ def main(
yanda (str, optional): Yandasoft version. Defaults to "1.3.0".
yanda_img (Path, optional): Path to a yandasoft singularirt image. If `None`, the container version `yanda` will be downloaded. Defaults to None.
stokeslist (List[str], optional): Stokes parameters to process. Defaults to None.
- verbose (bool, optional): Verbose output. Defaults to True.
+ limit (int, optional): Limit the number of islands to process. Defaults to None.
"""
# Setup singularity image
image = get_yanda(version=yanda) if yanda_img is None else yanda_img
@@ -383,7 +383,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
diff --git a/arrakis/logger.py b/arrakis/logger.py
index a38a4c44..0e141095 100644
--- a/arrakis/logger.py
+++ b/arrakis/logger.py
@@ -2,12 +2,19 @@
# -*- coding: utf-8 -*-
"""Logging module for arrakis"""
+import argparse
import io
import logging
from tqdm import tqdm
+# https://stackoverflow.com/questions/61324536/python-argparse-with-argumentdefaultshelpformatter-and-rawtexthelpformatter
+class UltimateHelpFormatter(
+ argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter
+): ...
+
+
class TqdmToLogger(io.StringIO):
"""
Output stream for TQDM which will output to logger module instead of
diff --git a/arrakis/makecat.py b/arrakis/makecat.py
index 5294aefc..334713c3 100644
--- a/arrakis/makecat.py
+++ b/arrakis/makecat.py
@@ -25,7 +25,7 @@
from vorbin.voronoi_2d_binning import voronoi_2d_binning
from arrakis import columns_possum
-from arrakis.logger import TqdmToLogger, logger
+from arrakis.logger import TqdmToLogger, UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, get_field_db, test_db
from arrakis.utils.pipeline import logo_str
from arrakis.utils.plotting import latexify
@@ -765,7 +765,7 @@ def main(
verbose: bool = True,
outfile: Union[str, None] = None,
) -> None:
- """Main
+ """Make a catalogue from the Arrakis database flow
Args:
field (str): RACS field name
@@ -1008,7 +1008,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
"field", metavar="field", type=str, help="RACS field to mosaic - e.g. 2132-50A."
diff --git a/arrakis/merge_fields.py b/arrakis/merge_fields.py
index 4c9e2abd..b57d724c 100644
--- a/arrakis/merge_fields.py
+++ b/arrakis/merge_fields.py
@@ -9,7 +9,7 @@
from prefect import flow, task, unmapped
from arrakis.linmos import get_yanda, linmos
-from arrakis.logger import logger
+from arrakis.logger import UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, test_db
from arrakis.utils.io import try_mkdir
@@ -345,7 +345,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
diff --git a/arrakis/rmclean_oncuts.py b/arrakis/rmclean_oncuts.py
index a11de1bb..9e045f26 100644
--- a/arrakis/rmclean_oncuts.py
+++ b/arrakis/rmclean_oncuts.py
@@ -16,7 +16,7 @@
from RMtools_1D import do_RMclean_1D
from RMtools_3D import do_RMclean_3D
-from arrakis.logger import logger
+from arrakis.logger import UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, test_db
from arrakis.utils.pipeline import logo_str
@@ -220,11 +220,11 @@ def main(
showPlots=False,
rm_verbose=False,
):
- """Main script
+ """Run RM-CLEAN on cutouts flow
Args:
field (str): RACS field name.
- outdir (str): Output directory.
+ outdir (Path): Output directory.
host (str): MongoDB host IP.
username (str, optional): Mongo username. Defaults to None.
password (str, optional): Mongo password. Defaults to None.
@@ -355,7 +355,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
"field", metavar="field", type=str, help="RACS field to mosaic - e.g. 2132-50A."
diff --git a/arrakis/rmsynth_oncuts.py b/arrakis/rmsynth_oncuts.py
index 3b12797a..3a37a2be 100644
--- a/arrakis/rmsynth_oncuts.py
+++ b/arrakis/rmsynth_oncuts.py
@@ -30,7 +30,7 @@
from RMutils.util_misc import create_frac_spectra
from scipy.stats import norm
-from arrakis.logger import logger
+from arrakis.logger import UltimateHelpFormatter, logger
from arrakis.utils.database import get_db, test_db
from arrakis.utils.fitsutils import getfreq
from arrakis.utils.fitting import fit_pl, fitted_mean, fitted_std
@@ -887,6 +887,39 @@ def main(
ion: bool = False,
do_own_fit: bool = False,
) -> None:
+ """Run RMsynth on cutouts flow
+
+ Args:
+ field (str): RACS field
+ outdir (Path): Output directory
+ host (str): MongoDB host
+ epoch (int): Epoch
+ username (Union[str, None], optional): MongoDB username. Defaults to None.
+ password (Union[str, None], optional): MongoDB password. Defaults to None.
+ dimension (str, optional): RMsynth dimension. Defaults to "1d".
+ verbose (bool, optional): Verbose output. Defaults to True.
+ database (bool, optional): Update MongoDB. Defaults to False.
+ do_validate (bool, optional): Validate RMsynth. Defaults to False.
+ limit (Union[int, None], optional): Limit number of components. Defaults to None.
+ savePlots (bool, optional): Save plots. Defaults to False.
+ weightType (str, optional): Weight type. Defaults to "variance".
+ fitRMSF (bool, optional): Fit RMSF. Defaults to True.
+ phiMax_radm2 (Union[float, None], optional): Max FD. Defaults to None.
+ dPhi_radm2 (Union[float, None], optional): Delta FD. Defaults to None.
+ nSamples (int, optional): Samples across RMSF. Defaults to 5.
+ polyOrd (int, optional): Order of fit to I. Defaults to 3.
+ noStokesI (bool, optional): Ignore Stokes I. Defaults to False.
+ showPlots (bool, optional): Show plots. Defaults to False.
+ not_RMSF (bool, optional): Not RMSF. Defaults to False.
+ rm_verbose (bool, optional): Verbose RMsynth. Defaults to False.
+ debug (bool, optional): Debug plots. Defaults to False.
+ fit_function (str, optional): Fit function. Defaults to "log".
+ tt0 (Union[str, None], optional): Total intensity T0 image. Defaults to None.
+ tt1 (Union[str, None], optional): Total intensity T1 image. Defaults to None.
+ ion (bool, optional): Ion. Defaults to False.
+ do_own_fit (bool, optional): Do own fit. Defaults to False.
+ """
+
outdir = os.path.abspath(outdir)
outdir = os.path.join(outdir, "cutouts")
@@ -1054,7 +1087,7 @@ def cli():
# Parse the command line options
parser = argparse.ArgumentParser(
- description=descStr, formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ description=descStr, formatter_class=UltimateHelpFormatter
)
parser.add_argument(
"field", metavar="field", type=str, help="RACS field to mosaic - e.g. 2132-50A."
diff --git a/docs/source/imaging.rst b/docs/source/imaging.rst
new file mode 100644
index 00000000..63ba3ce2
--- /dev/null
+++ b/docs/source/imaging.rst
@@ -0,0 +1,161 @@
+Imaging
+-------
+
+.. attention::
+
+ MeasurementSets produced by the ASKAPsoft pipeline need modification before using tools like WSClean. This can be done using `FixMS `, which is called internally by *Arrakis*.
+
+*Arrakis* provides an interface to the `WSClean `_ imaging software, with convencince functions for imaging mutliple ASKAP beams simultaneously. There are two main interaces for running the imaging pipeline:
+
+The `spice_image` CLI and API
+===================================
+
+.. attention::
+
+ This will only run using a sequential Prefect task runner. i.e. Only one beam will be imaged at a time.
+ See either the Python API below, or the `spice_process` usage further below for parallel imaging.
+
+
+This can be run using:
+
+.. code-block::
+
+ $ spice_image -h
+ usage: spice_image [-h] [--temp_dir TEMP_DIR] [--psf_cutoff PSF_CUTOFF] [--robust ROBUST] [--nchan NCHAN] [--pols POLS] [--size SIZE]
+ [--scale SCALE] [--mgain MGAIN] [--niter NITER] [--nmiter NMITER] [--auto_mask AUTO_MASK]
+ [--auto_threshold AUTO_THRESHOLD] [--local_rms] [--local_rms_window LOCAL_RMS_WINDOW]
+ [--force_mask_rounds FORCE_MASK_ROUNDS] [--gridder {direct-ft,idg,wgridder,tuned-wgridder,wstacking}] [--taper TAPER]
+ [--minuv MINUV] [--parallel PARALLEL] [--purge] [--mpi] [--multiscale] [--multiscale_scale_bias MULTISCALE_SCALE_BIAS]
+ [--multiscale_scales MULTISCALE_SCALES] [--absmem ABSMEM] [--make_residual_cubes] [--ms_glob_pattern MS_GLOB_PATTERN]
+ [--data_column DATA_COLUMN] [--no_mf_weighting] [--skip_fix_ms]
+ [--hosted-wsclean HOSTED_WSCLEAN | --local_wsclean LOCAL_WSCLEAN]
+ msdir outdir
+
+
+ mmm mmm mmm mmm mmm
+ )-( )-( )-( )-( )-(
+ ( S ) ( P ) ( I ) ( C ) ( E )
+ | | | | | | | | | |
+ |___| |___| |___| |___| |___|
+ mmm mmm mmm mmm
+ )-( )-( )-( )-(
+ ( R ) ( A ) ( C ) ( S )
+ | | | | | | | |
+ |___| |___| |___| |___|
+
+ Arrkis imager
+
+
+ options:
+ -h, --help show this help message and exit
+
+ imaging arguments:
+ msdir Directory containing MS files
+ outdir Directory to output images
+ --temp_dir TEMP_DIR Temporary directory to store intermediate files (default: None)
+ --psf_cutoff PSF_CUTOFF
+ Cutoff for smoothing in units of arcseconds. (default: None)
+ --robust ROBUST
+ --nchan NCHAN
+ --pols POLS
+ --size SIZE
+ --scale SCALE
+ --mgain MGAIN
+ --niter NITER
+ --nmiter NMITER
+ --auto_mask AUTO_MASK
+ --auto_threshold AUTO_THRESHOLD
+ --local_rms
+ --local_rms_window LOCAL_RMS_WINDOW
+ --force_mask_rounds FORCE_MASK_ROUNDS
+ --gridder {direct-ft,idg,wgridder,tuned-wgridder,wstacking}
+ --taper TAPER
+ --minuv MINUV
+ --parallel PARALLEL
+ --purge Purge intermediate files (default: False)
+ --mpi Use MPI (default: False)
+ --multiscale Use multiscale clean (default: False)
+ --multiscale_scale_bias MULTISCALE_SCALE_BIAS
+ The multiscale scale bias term provided to wsclean. (default: None)
+ --multiscale_scales MULTISCALE_SCALES
+ The scales used in the multiscale clean. (default: 0,2,4,8,16,32,64,128)
+ --absmem ABSMEM Absolute memory limit in GB (default: None)
+ --make_residual_cubes
+ Create residual cubes as well as cubes from restored images. (default: False)
+ --ms_glob_pattern MS_GLOB_PATTERN
+ The pattern used to search for measurement sets. (default: scienceData*_averaged_cal.leakage.ms)
+ --data_column DATA_COLUMN
+ Which column in the measurement set to image. (default: CORRECTED_DATA)
+ --no_mf_weighting Do not use multi-frequency weighting. (default: False)
+ --skip_fix_ms Do not apply the ASKAP MS corrections from the package fixms. (default: False)
+ --hosted-wsclean HOSTED_WSCLEAN
+ Docker or Singularity image for wsclean [docker://alecthomson/wsclean:latest] (default: docker://alecthomson/wsclean:latest)
+ --local_wsclean LOCAL_WSCLEAN
+ Path to local wsclean Singularity image (default: None)
+
+
+You may instead prefer to use the Python API, which is more flexible and allows for parallel imaging. You will need to set up your own Prefect task-runner for this. Here is a (very) minimal example:
+
+.. code-block:: python
+
+ from prefect.task_runners import SequentialTaskRunner
+ from arrakis.imager import main as imager_flow
+
+ def main(...):
+
+ task_runner = SequentialTaskRunner()
+ imager_flow.with_options(
+ task_runner=task_runner
+ )(
+ ... # Add your arguments here
+ )
+
+
+You can find the full list of arguments in the API docs here: :py:mod:`arrakis.imager.main`.
+
+
+The `spice_process` CLI
+=====================================
+
+It is also possible to run just the imaging part of the pipeline using a the `spice_process` command line tool, as described in :ref:`Running the pipeline`. You will need to envoke the argument `--imager_only`, along with the other imaging arguments. This will run the imaging pipeline in parallel, using the Dask task runner defined in your config file of choice. Here is an example pipeline config for only imaging:
+
+.. code-block:: cfg
+
+ # SB8593.cfg
+ imager_only = True
+ ms_glob_pattern = 'scienceData_SB8593_RACS_1347-37A.beam*_averaged_cal.leakage.split.ms'
+ imager_dask_config = petrichor.yaml
+ mgain = 0.7
+ force_mask_rounds = 8
+ nmiter = 15
+ niter = 500000
+ local_rms = True
+ auto_mask = 4
+ local_rms_window = 60
+ auto_threshold = 1
+ size = 6144
+ scale = 2.5
+ robust = -0.5
+ pols = IQU
+ gridder = wgridder
+ minuv = 200
+ local_wsclean = wsclean_force_mask.sif
+ multiscale = True
+ multiscale_scale_bias = 0.7
+ multiscale_scales = "0,2,4,8,16,32,64,128"
+ purge = False
+ absmem = 100
+ nchan = 36
+ psf_cutoff = 30
+ skip_fix_ms = False
+ data_column = CORRECTED_DATA
+
+You would then run the pipeline using:
+
+.. code-block:: bash
+
+ spice_process \
+ --config SB8593.cfg \
+ /path/to/ms/files/ \
+ /path/to/work/dir/ \
+ RACS_1347-37A
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 91b1963d..a8fb0c17 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -19,6 +19,7 @@ Scripts for processing polarized RACS data products.
parallel
start
pipeline
+ imaging
acknowledge
changelog
autoapi/index
diff --git a/docs/source/start.rst b/docs/source/start.rst
index 3ae618f2..927987b6 100644
--- a/docs/source/start.rst
+++ b/docs/source/start.rst
@@ -226,7 +226,7 @@ To set up a Prefect Server, fist install Prefect with `pip`. You will also need
Tips on adaptive scaling:
-========================
+=========================
There can be strange failure modes when a prefect based workflow is being executed on a Dask task runner on a `dask_jobqueue.SLURMCluster` object with adaptive scaling enabled. Commonly, this presents as a previously completed taskrun restarting. Depending on the actual workflow, this may outright fail (e.g. if a data product that is expected has been removed), or may run perfectly fine (e.g. wsclean clobbering existing files and reimaging). Naturally, this is not behaviour that should be encouraged.
@@ -235,6 +235,7 @@ It appears as those the issue is related job stealing among a dask workers estab
The dask environment variables below are intended to try to limit these failure modes. These should be exported in the `sbatch` launch script before the python prefect / dask entry point.
.. code-block:: bash
+
# See https://docs.dask.org/en/latest/configuration.html#distributed-scheduler
# For more information on these variables
export DASK_DISTRIBUTED__SCHEDULER__WORKER_SATURATION=0.01