From cf8a9058822473f853457998765bd930eed01a6c Mon Sep 17 00:00:00 2001 From: ssiegelx Date: Fri, 6 Dec 2024 16:50:48 -0500 Subject: [PATCH] style: use f-strings to accommodate updated ruff rules (#299) --- draco/analysis/beamform.py | 4 ++-- draco/analysis/dayenu.py | 7 +++---- draco/analysis/sensitivity.py | 4 ++-- draco/analysis/sidereal.py | 4 ++-- draco/analysis/sourcestack.py | 6 +++--- draco/analysis/transform.py | 4 ++-- draco/core/containers.py | 2 +- draco/core/io.py | 2 +- draco/core/task.py | 2 +- draco/synthesis/gain.py | 2 +- draco/util/tools.py | 5 ++--- 11 files changed, 20 insertions(+), 22 deletions(-) diff --git a/draco/analysis/beamform.py b/draco/analysis/beamform.py index 1bfc7ba23..718df6dda 100644 --- a/draco/analysis/beamform.py +++ b/draco/analysis/beamform.py @@ -119,7 +119,7 @@ def setup(self, manager): if self.collapse_ha: self.log.info( "Tracking source for declination dependent amount of time " - "[%d seconds at equator]" % self.timetrack + f"[{self.timetrack:0.0f} seconds at equator]" ) else: raise NotImplementedError( @@ -130,7 +130,7 @@ def setup(self, manager): else: self.log.info( - "Tracking source for fixed amount of time [%d seconds]" % self.timetrack + f"Tracking source for fixed amount of time [{self.timetrack:0.0f} seconds]" ) def process(self): diff --git a/draco/analysis/dayenu.py b/draco/analysis/dayenu.py index 67009eda2..fcc2b49c0 100644 --- a/draco/analysis/dayenu.py +++ b/draco/analysis/dayenu.py @@ -154,7 +154,7 @@ def process(self, stream): weight[:, bb] *= flag_low[:, np.newaxis].astype(np.float32) else: - self.log.debug("There are %d unique masks/filters." % len(index)) + self.log.debug(f"There are {len(index):d} unique masks/filters.") for ii, ind in enumerate(index): vis[:, bb, ind] = np.matmul(NF[ii], bvis[:, ind]) weight[:, bb, ind] = tools.invert_no_zero( @@ -663,8 +663,7 @@ def process(self, ringmap): ecut = self._get_cut(el, **kwargs) self.log.debug( - "Filtering el %0.3f, %d of %d. [%0.3f micro-sec]" - % (el, ee, nel, ecut) + f"Filtering el {el:0.3f}, {ee:d} of {nel:d}. [{ecut:0.3f} micro-sec]" ) erm = np.ascontiguousarray(rm[slc]) @@ -837,7 +836,7 @@ def process(self, stream): if not np.any(flag): continue - self.log.debug("Filtering freq %d of %d." % (ff, nfreq)) + self.log.debug(f"Filtering freq {ff:d} of {nfreq:d}.") # Construct the filters m_cut = np.abs(self._get_cut(nu, db)) diff --git a/draco/analysis/sensitivity.py b/draco/analysis/sensitivity.py index 1bb199106..4d1dea686 100644 --- a/draco/analysis/sensitivity.py +++ b/draco/analysis/sensitivity.py @@ -85,8 +85,8 @@ def process(self, data): if not np.all(stack_flag): self.log.warning( - "There are %d stacked baselines that are masked " - "in the telescope instance." % np.sum(~stack_flag) + f"There are {np.sum(~stack_flag):0.0f} stacked baselines that are " + "masked in the telescope instance." ) ps = data.prod[stack_new["prod"]] diff --git a/draco/analysis/sidereal.py b/draco/analysis/sidereal.py index b32ac5cdc..3755e8979 100644 --- a/draco/analysis/sidereal.py +++ b/draco/analysis/sidereal.py @@ -145,7 +145,7 @@ def _process_current_lsd(self): ts = tod.concatenate(self._timestream_list) # Add attributes for the LSD and a tag for labelling saved files - ts.attrs["tag"] = "lsd_%i" % lsd + ts.attrs["tag"] = f"lsd_{lsd:d}" ts.attrs["lsd"] = lsd # Clear the timestream list since these days have already been processed @@ -248,7 +248,7 @@ def process(self, data): sdata.vis[:] = sts sdata.weight[:] = ni sdata.attrs["lsd"] = self.start - sdata.attrs["tag"] = "lsd_%i" % self.start + sdata.attrs["tag"] = f"lsd_{self.start:d}" return sdata diff --git a/draco/analysis/sourcestack.py b/draco/analysis/sourcestack.py index bebc784fd..ee5b63db7 100644 --- a/draco/analysis/sourcestack.py +++ b/draco/analysis/sourcestack.py @@ -362,7 +362,7 @@ def process(self, stack): self.nmock += 1 self.log.info( - "Collected frequency stack. Current size is %d." % len(self.stack) + f"Collected frequency stack. Current size is {len(self.stack):d}." ) if (len(self.stack) % self.ngroup) == 0: @@ -389,8 +389,8 @@ def _reset(self): Then, empty the list, reset the stack counter, and increment the group counter. """ self.log.info( - "We have accumulated %d mock realizations. Saving to file. [group %03d]" - % (self.nmock, self.counter) + f"We have accumulated {self.nmock:d} mock realizations. " + f"Saving to file. [group {self.counter:03d}]" ) mock = np.arange(self.nmock, dtype=np.int64) diff --git a/draco/analysis/transform.py b/draco/analysis/transform.py index 41a861c17..3ced83dca 100644 --- a/draco/analysis/transform.py +++ b/draco/analysis/transform.py @@ -202,8 +202,8 @@ def process(self, ss): if not np.all(stack_flag): self.log.warning( - "There are %d stacked baselines that are masked " - "in the telescope instance." % np.sum(~stack_flag) + f"There are {np.sum(~stack_flag):0.0f} stacked baselines " + "that are masked in the telescope instance." ) ss_prod = ss.prod[stack_new["prod"]] diff --git a/draco/core/containers.py b/draco/core/containers.py index 7aecc706b..4cb22a136 100644 --- a/draco/core/containers.py +++ b/draco/core/containers.py @@ -600,7 +600,7 @@ def _create_dtype(self, columns): dt = [] for ci, (name, dtype) in enumerate(columns): if not isinstance(name, str): - raise ValueError("Column %i is invalid" % ci) + raise ValueError(f"Column {ci:d} is invalid") dt.append((name, dtype)) return dt diff --git a/draco/core/io.py b/draco/core/io.py index e3ca0b89c..b7c2655ae 100644 --- a/draco/core/io.py +++ b/draco/core/io.py @@ -162,7 +162,7 @@ def _list_of_filegroups(groups: Union[list[dict], dict]) -> list[dict]: raise ConfigError(f"Expected type dict in file groups (got {type(group)}).") if "tag" not in group: - group["tag"] = "group_%i" % gi + group["tag"] = f"group_{gi:d}" flist = [] diff --git a/draco/core/task.py b/draco/core/task.py index 6196b10b8..c2ed3312e 100644 --- a/draco/core/task.py +++ b/draco/core/task.py @@ -107,7 +107,7 @@ def __init__(self): rank_length = int(math.log10(MPI.COMM_WORLD.size)) + 1 - mpi_fmt = "[MPI %%(mpi_rank)%id/%%(mpi_size)%id]" % (rank_length, rank_length) + mpi_fmt = f"[MPI %(mpi_rank){rank_length:d}d/%(mpi_size){rank_length:d}d]" filt = MPILogFilter(level_all=self.level_all, level_rank0=self.level_rank0) # This uses the fact that caput.pipeline.Manager has already diff --git a/draco/synthesis/gain.py b/draco/synthesis/gain.py index 84e90db3c..549fa6483 100644 --- a/draco/synthesis/gain.py +++ b/draco/synthesis/gain.py @@ -207,7 +207,7 @@ def process(self): gain = mpiarray.MPIArray.wrap(gain_comb, axis=1, comm=data.comm) gain_data.gain[:] = gain gain_data.attrs["lsd"] = self._current_lsd - gain_data.attrs["tag"] = "lsd_%i" % self._current_lsd + gain_data.attrs["tag"] = f"lsd_{self._current_lsd:d}" # Increment current lsd self._current_lsd += 1 diff --git a/draco/util/tools.py b/draco/util/tools.py index ea399888a..c0a933278 100644 --- a/draco/util/tools.py +++ b/draco/util/tools.py @@ -256,9 +256,8 @@ def extract_diagonal(utmat, axis=1): # Check that this nside is correct if utmat.shape[axis] != (nside * (nside + 1) // 2): msg = ( - "Array length (%i) of axis %i does not correspond upper triangle\ - of square matrix" - % (utmat.shape[axis], axis) + f"Array length ({utmat.shape[axis]:d}) of axis {axis:d} does not " + "correspond upper triangle of square matrix" ) raise RuntimeError(msg)