Skip to content

Commit

Permalink
merge in master
Browse files Browse the repository at this point in the history
  • Loading branch information
landmanbester committed Jan 30, 2024
2 parents 13d26cc + 57e1413 commit a790f7a
Show file tree
Hide file tree
Showing 44 changed files with 3,224 additions and 148 deletions.
38 changes: 36 additions & 2 deletions docs/source/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@ Installation

This page details QuartiCal's recommended installation procedure.

Ubuntu 18.04+
~~~~~~~~~~~~~
Ubuntu 18.04+ via pip
~~~~~~~~~~~~~~~~~~~~~

This is the preferred method of installation. It is simple but may be
vulnerable to upstream changes.

If you wish to install QuartiCal in a virtual environment (recommended), see
`Using a virtual environment`_.
Expand All @@ -25,6 +28,37 @@ QuartiCal can be installed by running the following:
pip3 install -e path/to/repo/
Ubuntu 18.04+ via poetry
~~~~~~~~~~~~~~~~~~~~~~~~

Installing via poetry is less simple but should always work.

Firstly, install `poetry <https://python-poetry.org/docs/>`_

Assuming you have cloned the repository from git and checked out the relevant
tag, run the following from inside the QuartiCal folder:

.. code:: bash
poetry install
.. note::

This will automatically install QuartiCal into a new virtual environment
matching your system Python. The Python version can be changed prior to
installation using:

.. code:: bash
poetry env use python3.10
Users can enter the QuartiCal virtual environment using:

.. code:: bash
poetry -C path/to/repo shell
Using a virtual environment
~~~~~~~~~~~~~~~~~~~~~~~~~~~

Expand Down
2,887 changes: 2,887 additions & 0 deletions poetry.lock

Large diffs are not rendered by default.

37 changes: 17 additions & 20 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "quartical"
version = "0.2.0"
version = "0.2.1"
description = "Fast and flexible calibration suite for radio interferometer data."
repository = "https://github.com/ratt-ru/QuartiCal"
documentation = "https://quartical.readthedocs.io"
Expand All @@ -13,7 +13,6 @@ classifiers = [
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Scientific/Engineering :: Astronomy"
Expand All @@ -25,28 +24,26 @@ include = [
]

[tool.poetry.dependencies]
python = "^3.9, < 3.13"
tbump = "^6.10.0"
columnar = "^1.4.1"
"ruamel.yaml" = "^0.17.26"
dask = {extras = ["diagnostics"], version = "^2023.1.0"}
distributed = "^2023.1.0"
dask-ms = {git = "https://github.com/ratt-ru/dask-ms.git", extras = ["s3", "xarray", "zarr"], branch="master"}
codex-africanus = {extras = ["dask", "scipy", "astropy", "python-casacore"], version = "^0.3.4"}
astro-tigger-lsm = "^1.7.2"
loguru = "^0.7.0"
requests = "^2.31.0"
pytest = "^7.3.1"
omegaconf = "^2.3.0"
colorama = "^0.4.6"
stimela = {git = "https://github.com/caracal-pipeline/stimela", branch = "FIASCO3"}
python = "^3.9"
astro-tigger-lsm = ">=1.7.2, <=1.7.3"
codex-africanus = {extras = ["dask", "scipy", "astropy", "python-casacore"], version = ">=0.3.4, <=0.3.4"}
colorama = ">=0.4.6, <=0.4.6"
columnar = ">=1.4.1, <=1.4.1"
dask = {extras = ["diagnostics"], version = ">=2023.5.0, <=2023.12.1"}
dask-ms = {extras = ["s3", "xarray", "zarr"], version = ">=0.2.16, <=0.2.18"}
distributed = ">=2023.5.0, <=2023.12.1"
loguru = ">=0.7.0, <=0.7.2"
matplotlib = ">=3.5.1, <=3.8.2"
omegaconf = ">=2.3.0, <=2.3.0"
pytest = ">=7.3.1, <=7.4.4"
requests = ">=2.31.0, <=2.31.0"
"ruamel.yaml" = ">=0.17.26, <=0.17.40"
stimela = "2.0rc8"
tbump = ">=6.10.0, <=6.11.0"
ducc0 = "^0.31.0"
sympy = "^1.12"
matplotlib = "^3.5.1"
nifty8 = {git = "https://gitlab.mpcdf.mpg.de/ift/nifty.git", branch = "NIFTy_8"}

[tool.poetry.extras]
degrid = ["ducc0", "sympy"]

[tool.poetry.scripts]
goquartical = 'quartical.executor:execute'
Expand Down
16 changes: 16 additions & 0 deletions quartical/apps/plotter.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,10 +216,16 @@ def _plot(group, xds, args):

# NOTE: This mututates the data variables in place.
data = xds[args.plot_var].values
<<<<<<< HEAD
# flags = xds[args.flag_var].values
# data[np.where(flags)] = np.nan # Set flagged values to nan (not plotted).
# xds['flagged_data'] = np.where(flags, data, np.nan)
# xds = xds.drop_vars(args.flag_var) # No more use for flags.
=======
flags = xds[args.flag_var].values
data[np.where(flags)] = np.nan # Set flagged values to nan (not plotted).
xds = xds.drop_vars(args.flag_var) # No more use for flags.
>>>>>>> main

# Construct list of lists containing axes over which we iterate i.e.
# produce a plot per combination of these values.
Expand All @@ -245,7 +251,10 @@ def _plot(group, xds, args):

fig, ax = plt.subplots(figsize=args.fig_size)

<<<<<<< HEAD
# import ipdb; ipdb.set_trace()
=======
>>>>>>> main
for ia in product(*iter_axes_itr):

sel = {ax: val for ax, val in zip(args.iter_axes, ia)}
Expand Down Expand Up @@ -302,7 +311,10 @@ def _plot(group, xds, args):

fig.savefig(
f"{args.output_path.full_path}/{subdir_path}/{fig_name}.png",
<<<<<<< HEAD
dpi=250,
=======
>>>>>>> main
bbox_inches="tight" # SLOW, but slightly unavoidable.
)

Expand All @@ -323,8 +335,12 @@ def plot():
xdsl = xds_from_zarr(gain_path)

# Select only the necessary fields for plotting on each dataset.
<<<<<<< HEAD
# xdsl = [xds[[args.plot_var, args.flag_var]] for xds in xdsl]
xdsl = [xds[[args.plot_var]] for xds in xdsl]
=======
xdsl = [xds[[args.plot_var, args.flag_var]] for xds in xdsl]
>>>>>>> main

# Partitioned dictionary of xarray.Datasets.
xdsd = to_plot_dict(xdsl, args.iter_attrs)
Expand Down
12 changes: 8 additions & 4 deletions quartical/apps/summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,11 @@ def field_info(path):

field_xds = xds_from_table_fragment(path + "::FIELD")[0]

<<<<<<< HEAD
field_ids = list(range(field_xds.dims['row']))
=======
field_ids = list(range(field_xds.sizes['row']))
>>>>>>> main
source_ids = [i for i in field_xds.SOURCE_ID.values]
names = [n for n in field_xds.NAME.values]
phase_dirs = [pd for pd in field_xds.PHASE_DIR.values]
Expand Down Expand Up @@ -199,7 +203,7 @@ def spw_info(path):
chunks={"row": 1, "chan": -1}
)

n_chan_per_spw = [xds.dims["chan"] for xds in spw_xds_list]
n_chan_per_spw = [xds.sizes["chan"] for xds in spw_xds_list]

bw_per_spw = [xds.TOTAL_BANDWIDTH.values.item() for xds in spw_xds_list]

Expand Down Expand Up @@ -244,9 +248,9 @@ def pointing_info(path):

def dimension_summary(xds_list):

rows_per_xds = [xds.dims["row"] for xds in xds_list]
chan_per_xds = [xds.dims["chan"] for xds in xds_list]
corr_per_xds = [xds.dims["corr"] for xds in xds_list]
rows_per_xds = [xds.sizes["row"] for xds in xds_list]
chan_per_xds = [xds.sizes["chan"] for xds in xds_list]
corr_per_xds = [xds.sizes["corr"] for xds in xds_list]

utime_per_xds = [np.unique(xds.TIME.values).size for xds in xds_list]

Expand Down
6 changes: 3 additions & 3 deletions quartical/calibration/calibrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def add_calibration_graph(
"""

# TODO: Does this check belong here or elsewhere?
have_dd_model = any(xds.dims['dir'] > 1 for xds in data_xds_list)
have_dd_model = any(xds.sizes['dir'] > 1 for xds in data_xds_list)
have_dd_chain = any(term.direction_dependent for term in chain)

if have_dd_model and not have_dd_chain:
Expand Down Expand Up @@ -237,7 +237,7 @@ def make_visibility_output(
itr = enumerate(zip(data_xds_list, mapping_xds_list))

if output_opts.subtract_directions:
n_dir = data_xds_list[0].dims['dir'] # Should be the same on all xdss.
n_dir = data_xds_list[0].sizes['dir'] # Should be the same over xdss.
requested = set(output_opts.subtract_directions)
valid = set(range(n_dir))
invalid = requested - valid
Expand Down Expand Up @@ -266,7 +266,7 @@ def make_visibility_output(
[mapping_xds.get(f"{k}_dir_map").data for k in gain_terms.keys()]
)

corr_mode = data_xds.dims["corr"]
corr_mode = data_xds.sizes["corr"]

is_bda = hasattr(data_xds, "ROW_MAP") # We are dealing with BDA.
row_map = data_xds.ROW_MAP.data if is_bda else None
Expand Down
8 changes: 4 additions & 4 deletions quartical/calibration/constructor.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def construct_solver(
weight_col = data_xds.WEIGHT.data
flag_col = data_xds.FLAG.data
gain_terms = gain_xds_lod[xds_ind]
corr_mode = data_xds.dims["corr"]
corr_mode = data_xds.sizes["corr"]

block_id_arr = get_block_id_arr(data_col)
aux_block_info = {
Expand All @@ -77,7 +77,7 @@ def construct_solver(
blocker.add_input(
v.name,
v.data,
("row",) if v.dims == ("time",) else v.dims
("row",) if set(v.dims) == {"time"} else v.dims
)

blocker.add_input(
Expand Down Expand Up @@ -272,8 +272,8 @@ def expand_specs(gain_terms):
# represents frequency chunks and the inner-most list contains the
# specs per term. Might be possible to do this with arrays instead.

n_t_chunks = set(xds.dims["time_chunk"] for xds in gain_terms.values())
n_f_chunks = set(xds.dims["freq_chunk"] for xds in gain_terms.values())
n_t_chunks = set(xds.sizes["time_chunk"] for xds in gain_terms.values())
n_f_chunks = set(xds.sizes["freq_chunk"] for xds in gain_terms.values())

assert len(n_t_chunks) == 1, "Chunking in time is inconsistent."
assert len(n_f_chunks) == 1, "Chunking in freq is inconsistent."
Expand Down
2 changes: 1 addition & 1 deletion quartical/calibration/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def make_mapping_datasets(data_xds_list, chain):
freq_interval
)

n_dir = data_xds.dims["dir"]
n_dir = data_xds.sizes["dir"]

dir_map = gain_obj.make_dir_map(
n_dir,
Expand Down
9 changes: 6 additions & 3 deletions quartical/config/external.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import re
from dataclasses import make_dataclass
from dataclasses import make_dataclass, field
from omegaconf import OmegaConf as oc
from typing import Dict, Any
from scabha.cargo import Parameter
Expand Down Expand Up @@ -37,8 +37,11 @@ def finalize_structure(additional_config):
FinalConfig = make_dataclass(
"FinalConfig",
[
*[(m, ModelComponent, ModelComponent()) for m in models],
*[(t, Gain, Gain()) for t in terms]
*[
(m, ModelComponent, field(default_factory=ModelComponent))
for m in models
],
*[(t, Gain, field(default_factory=Gain)) for t in terms]
],
bases=(BaseConfig,)
)
Expand Down
3 changes: 2 additions & 1 deletion quartical/config/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ def help():
]
)
]
HelpConfig = finalize_structure(additional_config)
help_class = finalize_structure(additional_config)
HelpConfig = help_class()

if len(sys.argv) == 1 or help_arg == "help":
print_help(HelpConfig)
Expand Down
2 changes: 1 addition & 1 deletion quartical/data_handling/angles.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def make_parangle_xds_list(ms_path, data_xds_list):
coords={
"utime": np.arange(sum(xds.UTIME_CHUNKS)),
"ant": xds.ant,
"receptor": np.arange(feedtab.dims['receptors'])
"receptor": np.arange(feedtab.sizes['receptors'])
},
attrs={
"FEED_TYPE": feed_type,
Expand Down
14 changes: 7 additions & 7 deletions quartical/data_handling/bda.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,15 @@ def process_bda_input(data_xds_list, spw_xds_list, weight_column):
"WEIGHT_SPECTRUM for BDA data.")

# Figure out the highest frequency resolution and its DDID.
spw_dims = {i: xds.dims["chan"] for i, xds in enumerate(spw_xds_list)}
spw_dims = {i: xds.sizes["chan"] for i, xds in enumerate(spw_xds_list)}
max_nchan_ddid = max(spw_dims, key=spw_dims.get)
max_nchan = spw_dims[max_nchan_ddid]

bda_xds_list = []

for xds in data_xds_list:

upsample_factor = max_nchan//xds.dims["chan"]
upsample_factor = max_nchan//xds.sizes["chan"]

weight_col = xds.WEIGHT_SPECTRUM.data

Expand All @@ -83,7 +83,7 @@ def process_bda_input(data_xds_list, spw_xds_list, weight_column):
weight_col/upsample_factor)})

# Create a selection which will upsample the frequency axis.
selection = np.repeat(np.arange(xds.dims["chan"]), upsample_factor)
selection = np.repeat(np.arange(xds.sizes["chan"]), upsample_factor)

bda_xds = bda_xds.sel({"chan": selection})

Expand All @@ -101,7 +101,7 @@ def process_bda_input(data_xds_list, spw_xds_list, weight_column):
bda_xds_list = [xarray.concat(xdss, dim="row")
for xdss in xds_merge_list]

bda_xds_list = [xds.chunk({"row": xds.dims["row"]})
bda_xds_list = [xds.chunk({"row": xds.sizes["row"]})
for xds in bda_xds_list]

# This should guarantee monotonicity in time (not baseline).
Expand Down Expand Up @@ -223,10 +223,10 @@ def process_bda_output(xds_list, ref_xds_list, output_cols):

chan_ind = dims.index('chan')

nchan = xds.dims['chan']
ref_nchan = ref_xds.dims['chan']
nchan = xds.sizes['chan']
ref_nchan = ref_xds.sizes['chan']

shape[chan_ind: chan_ind + 1] = [ref_xds.dims['chan'], -1]
shape[chan_ind: chan_ind + 1] = [ref_xds.sizes['chan'], -1]

data = data.reshape(shape)

Expand Down
8 changes: 4 additions & 4 deletions quartical/data_handling/model_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def add_model_graph(
# which require them. P Jones is applied to predicted components
# internally, so we only need to consider model columns for now.

n_corr = {xds.dims["corr"] for xds in data_xds_list}.pop()
n_corr = {xds.sizes["corr"] for xds in data_xds_list}.pop()

if model_opts.apply_p_jones:
# NOTE: Applying parallactic angle when there are fewer than four
Expand Down Expand Up @@ -219,9 +219,9 @@ def assign_identity_model(data_xds_list):

model_dims = [
(
xds.dims['row'],
xds.dims['chan'],
xds.dims['corr']
xds.sizes['row'],
xds.sizes['chan'],
xds.sizes['corr']
)
for xds in data_xds_list
]
Expand Down
Loading

0 comments on commit a790f7a

Please sign in to comment.