Skip to content

Commit

Permalink
style: update codebase with fixes for new ruff rules
Browse files Browse the repository at this point in the history
  • Loading branch information
ljgray committed Nov 2, 2023
1 parent 57ab458 commit 922b3c8
Show file tree
Hide file tree
Showing 27 changed files with 200 additions and 272 deletions.
9 changes: 3 additions & 6 deletions draco/analysis/beam.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,9 @@

import numpy as np
import scipy.constants

from caput import interferometry

from ..core import task
from ..core import io
from ..core import containers
from ..core import containers, io, task
from ..util import tools


Expand All @@ -41,8 +38,8 @@ def setup(self, telescope: io.TelescopeConvertible):
self.telescope = io.get_telescope(telescope)

self.log.info(
"Using telescope at latitude %0.4f deg with rotation angle %0.4f deg."
% (self.telescope.latitude, self.telescope.rotation_angle)
f"Using telescope at latitude {self.telescope.latitude:.4f} "
f"deg with rotation angle {self.telescope.rotation_angle:.4f} deg."
)

def process(self, data, beam):
Expand Down
23 changes: 13 additions & 10 deletions draco/analysis/beamform.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
"""Beamform visibilities to the location of known sources."""

from typing import Tuple

import healpy
import numpy as np
import scipy.interpolate
from skyfield.api import Star, Angle

from caput import config
from caput import time as ctime

from cora.util import units
from skyfield.api import Angle, Star

from ..core import task, containers, io
from ..core import containers, io, task
from ..util._fast_tools import beamform
from ..util.tools import baseline_vector, polarization_map, invert_no_zero
from ..util.tools import calculate_redundancy
from ..util.tools import (
baseline_vector,
calculate_redundancy,
invert_no_zero,
polarization_map,
)

# Constants
NU21 = units.nu21
Expand Down Expand Up @@ -676,7 +679,7 @@ def setup(self, manager, source_cat):
Catalog of points to beamform at.
"""
super(BeamForm, self).setup(manager)
super().setup(manager)
self.catalog = source_cat

def process(self, data):
Expand All @@ -700,7 +703,7 @@ def process(self, data):
return None

# Call generic process method.
return super(BeamForm, self).process()
return super().process()


class BeamFormCat(BeamFormBase):
Expand All @@ -718,7 +721,7 @@ def setup(self, manager, data):
Data to beamform on.
"""
super(BeamFormCat, self).setup(manager)
super().setup(manager)

# Process and make available various data
self._process_data(data)
Expand All @@ -742,7 +745,7 @@ def process(self, source_cat):
return None

# Call generic process method.
return super(BeamFormCat, self).process()
return super().process()


class BeamFormExternalBase(BeamFormBase):
Expand Down
20 changes: 8 additions & 12 deletions draco/analysis/dayenu.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@

import numpy as np
import scipy.interpolate

from caput import config
from cora.util import units

from ..core import task, io, containers
from ..core import containers, io, task
from ..util import tools


Expand Down Expand Up @@ -302,7 +301,7 @@ def process(self, ringmap):
for ee, el in enumerate(els):
t0 = time.time()

slc = ind + (slice(None), slice(None), ee)
slc = (*ind, slice(None), slice(None), ee)
wslc = slc[1:]

# Flag frequencies and times with zero weight
Expand Down Expand Up @@ -379,13 +378,12 @@ def _get_cut(self, el, pol=None, **kwargs):
if self._cut_interpolator is None:
return self.tauw

elif pol in self._cut_interpolator:
if pol in self._cut_interpolator:
return self._cut_interpolator[pol](el)

else:
# The file does not contain this polarisation (likely XY or YX).
# Use the maximum value over the polarisations that we do have.
return np.max([func(el) for func in self._cut_interpolator.values()])
# The file does not contain this polarisation (likely XY or YX).
# Use the maximum value over the polarisations that we do have.
return np.max([func(el) for func in self._cut_interpolator.values()])


class DayenuMFilter(task.SingleTask):
Expand Down Expand Up @@ -524,19 +522,17 @@ def process(self, stream):
* mixer.conj()
)

self.log.debug("Took %0.2f seconds." % (time.time() - t0,))
self.log.debug(f"Took {time.time() - t0:0.2f} seconds.")

return stream

def _get_cut(self, freq, xsep):
lmbda = units.c / (freq * 1e6)
u = xsep / lmbda
m = instantaneous_m(
return instantaneous_m(
0.0, np.radians(self.telescope.latitude), np.radians(self.dec), u, 0.0
)

return m


def highpass_delay_filter(freq, tau_cut, flag, epsilon=1e-12):
"""Construct a high-pass delay filter.
Expand Down
20 changes: 7 additions & 13 deletions draco/analysis/delay.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@
import typing

import numpy as np
from numpy.lib.recfunctions import structured_to_unstructured
import scipy.linalg as la

from caput import mpiarray, config
from caput import config, mpiarray
from cora.util import units
from numpy.lib.recfunctions import structured_to_unstructured

from ..core import containers, task, io
from ..core import containers, io, task
from ..util import random, tools


Expand Down Expand Up @@ -430,9 +429,7 @@ def process(self, ss):

# Evaluate frequency->delay transform. (self._evaluate take the empty output
# container, fills it, and returns it)
out_cont = self._evaluate(data_view, weight_view, out_cont)

return out_cont
return self._evaluate(data_view, weight_view, out_cont)

def _process_data(self, ss):
"""Get relevant views of data and weights, and create output container.
Expand Down Expand Up @@ -1268,8 +1265,7 @@ def _draw_signal_sample_t(S):
Ci = np.identity(2 * Ni.shape[0]) + np.dot(R, Rt)
x = la.solve(Ci, y, sym_pos=True)

s = Sh[:, np.newaxis] * (np.dot(Rt, x) + w1)
return s
return Sh[:, np.newaxis] * (np.dot(Rt, x) + w1)

def _draw_ps_sample(d):
# Draw a random delay power spectrum sample assuming the signal is Gaussian and
Expand All @@ -1285,9 +1281,7 @@ def _draw_ps_sample(d):
df = d.shape[1]
chi2 = rng.chisquare(df, size=d.shape[0])

S_samp = S_hat * df / chi2

return S_samp
return S_hat * df / chi2

# Select the method to use for the signal sample based on how many frequencies
# versus delays there are
Expand Down Expand Up @@ -1483,4 +1477,4 @@ def _take_view(arr: np.ndarray, ind: int, axis: int) -> np.ndarray:
# Like np.take but returns a view (instead of a copy), but only supports a scalar
# index
sl = (slice(None),) * axis
return arr[sl + (ind,)]
return arr[(*sl, ind)]
14 changes: 8 additions & 6 deletions draco/analysis/fgfilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@


import numpy as np

from caput import config
from ..core import task, containers, io

from ..core import containers, io, task


class _ProjectFilterBase(task.SingleTask):
Expand Down Expand Up @@ -41,6 +41,8 @@ def process(self, inp):
if self.mode == "filter":
return self._backward(self._forward(inp))

return None

def _forward(self, inp):
pass

Expand Down Expand Up @@ -170,8 +172,8 @@ def _forward(self, svdmodes):
# Check and set the KL basis we are using
if self.klname not in self.product_manager.kltransforms:
raise RuntimeError(
"Requested KL basis %s not available (options are %s)"
% (self.klname, repr(list(self.product_manager.kltransforms.items())))
f"Requested KL basis {self.kname} not available (options "
f"are {list(self.product_manager.kltransforms.items())!r})"
)
kl = self.product_manager.kltransforms[self.klname]

Expand Down Expand Up @@ -208,8 +210,8 @@ def _backward(self, klmodes):
# Check and set the KL basis we are using
if self.klname not in self.product_manager.kltransforms:
raise RuntimeError(
"Requested KL basis %s not available (options are %s)"
% (self.klname, repr(list(self.product_manager.kltransforms.items())))
f"Requested KL basis {self.klname} not available (options "
f"are {list(self.product_manager.kltransforms.items())!r})"
)
kl = self.product_manager.kltransforms[self.klname]

Expand Down
17 changes: 7 additions & 10 deletions draco/analysis/flagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,15 @@
be excluded and `False` for clean samples.
"""

from typing import Union, overload
import warnings
from typing import Union, overload

import numpy as np
import scipy.signal
from caput import config, mpiarray, weighted_median

from caput import config, weighted_median, mpiarray

from ..core import task, containers, io
from ..util import tools
from ..util import rfi
from ..core import containers, io, task
from ..util import rfi, tools


class DayMask(task.SingleTask):
Expand Down Expand Up @@ -1223,9 +1222,7 @@ def _apply_sir(self, mask, baseflag, eta=0.2):
nobaseflagsir = rfi.sir(nobaseflag[:, np.newaxis, :], eta=eta)[:, 0, :]

# Make sure the original mask (including baseflag) is still masked
flagsir = nobaseflagsir | mask

return flagsir
return nobaseflagsir | mask

def _mad_tv_mask(self, data, start_flag, freq):
"""Use the specific scattered TV channel flagging."""
Expand Down Expand Up @@ -1686,7 +1683,7 @@ def fmask(f):

# Solve to find a value of f that minimises the amount of data masked
res = minimize_scalar(
fmask, method="golden", options=dict(maxiter=20, xtol=1e-2)
fmask, method="golden", options={"maxiter": 20, "xtol": 1e-2}
)

if not res.success:
Expand Down
16 changes: 5 additions & 11 deletions draco/analysis/mapmaker.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
"""Map making from driftscan data using the m-mode formalism."""

import numpy as np
from caput import mpiarray, config
from caput import config, mpiarray

from ..core import containers, task, io
from ..core import containers, io, task
from ..util import tools


Expand Down Expand Up @@ -164,9 +164,7 @@ def _solve_m(self, m, f, v, Ni):
a = np.dot(bm.T.conj(), Ni * v)

# Reshape to the correct output
a = a.reshape(bt.telescope.num_pol_sky, bt.telescope.lmax + 1)

return a
return a.reshape(bt.telescope.num_pol_sky, bt.telescope.lmax + 1)


class MaximumLikelihoodMapMaker(BaseMapMaker):
Expand Down Expand Up @@ -199,9 +197,7 @@ def _solve_m(self, m, f, v, Ni):
a = np.dot(ib, Nh * v)

# Reshape to the correct output
a = a.reshape(bt.telescope.num_pol_sky, bt.telescope.lmax + 1)

return a
return a.reshape(bt.telescope.num_pol_sky, bt.telescope.lmax + 1)


class WienerMapMaker(BaseMapMaker):
Expand Down Expand Up @@ -300,6 +296,4 @@ def pinv_svd(M, acond=1e-4, rcond=1e-3):

psigma_diag = 1.0 / sig[:rank]

B = np.transpose(np.conjugate(np.dot(u[:, :rank] * psigma_diag, vh[:rank])))

return B
return np.transpose(np.conjugate(np.dot(u[:, :rank] * psigma_diag, vh[:rank])))
4 changes: 2 additions & 2 deletions draco/analysis/powerspectrum.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@


import numpy as np

from caput import config
from ..core import task, containers

from ..core import containers, task


class QuadraticPSEstimation(task.SingleTask):
Expand Down
13 changes: 4 additions & 9 deletions draco/analysis/ringmapmaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,13 @@
RADependentWeights
"""
import numpy as np
from numpy.lib.recfunctions import structured_to_unstructured
import scipy.constants
from mpi4py import MPI

from caput import config
from mpi4py import MPI
from numpy.lib.recfunctions import structured_to_unstructured

from ..core import task
from ..core import io
from ..core import containers, io, task
from ..util import tools
from ..core import containers
from . import transform


Expand Down Expand Up @@ -946,12 +943,10 @@ def _get_weight(self, inv_var):
if self.exclude_intracyl:
weight_ew[..., 0, :] = 0.0

weight_ew = weight_ew * tools.invert_no_zero(
return weight_ew * tools.invert_no_zero(
np.sum(weight_ew, axis=-2, keepdims=True)
)

return weight_ew

def _get_regularisation(self, *args):
return self.inv_SN

Expand Down
3 changes: 1 addition & 2 deletions draco/analysis/sensitivity.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
"""Sensitivity Analysis Tasks."""

import numpy as np

from caput import config

from ..core import task, io, containers
from ..core import containers, io, task
from ..util import tools


Expand Down
Loading

0 comments on commit 922b3c8

Please sign in to comment.