Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for Python3 #270

Merged
merged 25 commits into from
May 31, 2019
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
64e4eb5
run 2to3
gijzelaerr May 15, 2019
6b0e1a5
ignore virtualen vstuff
gijzelaerr May 15, 2019
4a94c61
configparser is stricter
gijzelaerr May 15, 2019
b377357
fix print sytnax
gijzelaerr May 15, 2019
9e4ba91
porting
gijzelaerr May 15, 2019
8a421e4
improve compat with older python-casacore
gijzelaerr May 15, 2019
747e1c6
more old python-casascore compat code
gijzelaerr May 15, 2019
84f7710
finishing touch
gijzelaerr May 16, 2019
1a99555
add ben feedback
gijzelaerr May 16, 2019
4da0293
this should not be here
gijzelaerr May 20, 2019
e4c6fdb
fix py2 compat
gijzelaerr May 21, 2019
483662a
Update with Jenkins.sh file
ratt-priv-ci May 22, 2019
7c7bc55
Add py2 and py3 tests for 16.04 and 18.04
ratt-priv-ci May 22, 2019
ebd45e0
fix residual python3 build issues
ratt-priv-ci May 23, 2019
5afad43
Fix residual issues with montblanc and py3 cython kernels
ratt-priv-ci May 24, 2019
b55a6a4
Ensure CC 3.0 is installed
ratt-priv-ci May 24, 2019
9afe9b0
Py2 and Py3 compatible changes
ratt-priv-ci May 28, 2019
91dc275
Make test relative
ratt-priv-ci May 28, 2019
15ccd33
Add asserts to verify types in ms_tile
ratt-priv-ci May 29, 2019
2723e27
fix pip install from a non-cythonized source directory
ratt-priv-ci May 29, 2019
1ba4fef
Fix dtype error on phase centre array.
JSKenyon May 30, 2019
b213dcb
Fix for python2 bombing when inseting BITFLAG column.
JSKenyon May 31, 2019
285c9ac
Depend on tagged release of montblanc
ratt-priv-ci May 31, 2019
036d385
typo
ratt-priv-ci May 31, 2019
d0909b9
Done and dusted py3 tested
ratt-priv-ci May 31, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -88,4 +88,8 @@ docs/_templates/
*.orig

# databases
*_db
*_db

# virtualenvs
.venv*/
.virtualenv*/
4 changes: 1 addition & 3 deletions cubical/DefaultParset.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ flag-ant-thr = 5 # Threshold (in sigmas) used to flag bad antenna
[sol]
_Help = Solution options which apply at the solver level
jones = G # Comma-separated list of Jones terms to enable, e.g. "G,B,dE"
(default: %default)
(default: default)
precision = 32 # Solve in single or double precision #options:32|64
delta-g = 1e-6 # Theshold for gain accuracy - gains which improve by less than this value
are considered converged. DEPRECATED FOR PER-JONES epsilon OPTION.
Expand Down Expand Up @@ -350,8 +350,6 @@ prop-flags = default # Flag propagation policy. Determines how flags
#options:never|always|default
estimate-pzd = 0 # Estimate phase-zero difference and initialize the gains with it.
Use for polarization calibration. #type:bool
estimate-pzd = 0 # Estimate phase-zero difference and initialize the gains with it.
Use for polarization calibration. #type:bool
diag-only = 0 # Use only diagonal (parallel-hand) data and model terms for the solution. Note that gains
are still applied to the full 2x2 data (unless --sel-diag is also set).
#type:bool
Expand Down
2 changes: 1 addition & 1 deletion cubical/data_handler/MBTiggerSim.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def model_vis(self, context):
else:
sel = slice(None)

for ddid_ind in xrange(self._nddid):
for ddid_ind in range(self._nddid):
gijzelaerr marked this conversation as resolved.
Show resolved Hide resolved
offset = ddid_ind*rows_per_ddid
lr = lower + offset
ur = upper + offset
Expand Down
6 changes: 3 additions & 3 deletions cubical/data_handler/TiggerSourceProvider.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""
Source provider for reading source information from a Tigger lsm.
"""

from six import string_types
import logging
import numpy as np

Expand Down Expand Up @@ -44,7 +44,7 @@ def __init__(self, lsm, phase_center, dde_tag='dE'):
self._freqs = None

self._clusters = cluster_sources(self._sm, dde_tag)
self._cluster_keys = self._clusters.keys()
self._cluster_keys = list(self._clusters.keys())
self._nclus = len(self._cluster_keys)

self._target_key = 0
Expand Down Expand Up @@ -241,7 +241,7 @@ def cluster_sources(sm, dde_tag):
if dde_tag:
tagvalue = src.getTag(dde_tag)
if tagvalue:
if type(tagvalue) is str:
if isinstance(tagvalue, string_types):
dde_cluster = tagvalue
else:
dde_cluster = src.getTag('cluster')
Expand Down
6 changes: 3 additions & 3 deletions cubical/data_handler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def uniquify(values):
uniq = np.array(sorted(set(values)))
rmap = {x: i for i, x in enumerate(uniq)}
# apply this map to the time column to construct a timestamp column
indices = np.fromiter(map(rmap.__getitem__, values), int)
indices = np.fromiter(list(map(rmap.__getitem__, values)), int)
return indices, uniq, rmap

# Try to import montblanc: if not successful, remember error for later.
Expand All @@ -30,8 +30,8 @@ def import_montblanc():
import montblanc
# all of these potentially fall over if Montblanc is the wrong version or something, so moving them here
# for now
from MBTiggerSim import simulate, MSSourceProvider, ColumnSinkProvider
from TiggerSourceProvider import TiggerSourceProvider
from .MBTiggerSim import simulate, MSSourceProvider, ColumnSinkProvider
from .TiggerSourceProvider import TiggerSourceProvider
from montblanc.impl.rime.tensorflow.sources import CachedSourceProvider, FitsBeamSourceProvider
return montblanc, None
except:
Expand Down
288 changes: 144 additions & 144 deletions cubical/data_handler/ms_data_handler.py

Large diffs are not rendered by default.

145 changes: 73 additions & 72 deletions cubical/data_handler/ms_tile.py

Large diffs are not rendered by default.

12 changes: 6 additions & 6 deletions cubical/database/casa_db_adaptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def init_empty(cls, db, filename, solfreqs, solants, field_ndir=1, is_complex=Tr
os.rename(os.path.join(basedir, BLANK_TABLE_NAME), filename)

antorder = [db.antnames.index(an) for an in solants]
with tbl("%s::ANTENNA" % filename, ack=False, readonly=False) as t:
with tbl("%s::ANTENNA" % str(filename), ack=False, readonly=False) as t:
t.addrows(nrows=len(db.anttype))
t.putcol("OFFSET", db.antoffset[antorder])
t.putcol("POSITION", db.antpos[antorder])
Expand All @@ -67,7 +67,7 @@ def init_empty(cls, db, filename, solfreqs, solants, field_ndir=1, is_complex=Tr
assert "field" in db.metadata, "Solver field not passed in metadata. This is a bug"
assert type(db.metadata["field"]) is int, "Currently only supports single field"
selfield = np.arange(len(db.fieldname)) == db.metadata["field"]
with tbl("%s::FIELD" % filename, ack=False, readonly=False) as t:
with tbl("%s::FIELD" % str(filename), ack=False, readonly=False) as t:
t.addrows(nrows=field_ndir)
t.putcol("DELAY_DIR", np.tile(db.fielddelaydirs[selfield], (field_ndir, 1)))
t.putcol("PHASE_DIR", np.tile(db.fieldphasedirs[selfield], (field_ndir, 1)))
Expand All @@ -78,7 +78,7 @@ def init_empty(cls, db, filename, solfreqs, solants, field_ndir=1, is_complex=Tr
t.putcol("SOURCE_ID", np.tile(db.fieldsrcid[selfield], (field_ndir, 1)) + np.arange(field_ndir).T)
t.putcol("TIME", np.tile(db.fieldtime[selfield], (field_ndir, 1)))

with tbl("%s::OBSERVATION" % filename, ack=False, readonly=False) as t:
with tbl("%s::OBSERVATION" % str(filename), ack=False, readonly=False) as t:
t.addrows(nrows=len(db.obsobserver))
(len(db.obstimerange) != 0) and t.putcol("TIME_RANGE", db.obstimerange)
(len(db.obslog) != 0) and t.putcol("LOG", db.obslog)
Expand All @@ -89,7 +89,7 @@ def init_empty(cls, db, filename, solfreqs, solants, field_ndir=1, is_complex=Tr
(len(db.obsreleasedate) != 0) and t.putcol("RELEASE_DATE", db.obsreleasedate)
(len(db.obstelescopename) != 0) and t.putcol("TELESCOPE_NAME", db.obstelescopename)

with tbl("%s::SPECTRAL_WINDOW" % filename, ack=False, readonly=False) as t:
with tbl("%s::SPECTRAL_WINDOW" % str(filename), ack=False, readonly=False) as t:
t.addrows(nrows=len(db.sel_ddids))
# Per DDID determine solution spacing in frequency
for iddid, ddid in enumerate(db.sel_ddids):
Expand Down Expand Up @@ -124,7 +124,7 @@ def init_empty(cls, db, filename, solfreqs, solants, field_ndir=1, is_complex=Tr
t.putcell("NUM_CHAN", iddid, ddsolfreqs.size)
t.putcell("TOTAL_BANDWIDTH", iddid, maxfreq - minfreq)

with tbl(filename, ack=False, readonly=False) as t:
with tbl(str(filename), ack=False, readonly=False) as t:
t.putkeyword("ParType", "Complex" if is_complex else "Float")
t.putkeyword("VisCal", viscal_label)

Expand Down Expand Up @@ -241,7 +241,7 @@ def create_B_table(cls, db, gname, outname = "B", diag=True):
field_ndir=ndir,
viscal_label="B Jones" if diag else "D Jones")

with tbl(db.filename + ".%s.casa" % outname, ack=False, readonly=False) as t:
with tbl(str(db.filename) + ".%s.casa" % outname, ack=False, readonly=False) as t:
t.addrows(nrows=nrow)

for iddid, ddid in enumerate(db.sel_ddids):
Expand Down
6 changes: 4 additions & 2 deletions cubical/database/iface_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@
"""
Defines database interface
"""
from six import add_metaclass
import abc

class iface_database(object):
__metaclass__ = abc.ABCMeta

@add_metaclass(abc.ABCMeta)
class iface_database:
@abc.abstractmethod
def __init__(self):
raise NotImplementedError("To be defined")
Expand Down
60 changes: 30 additions & 30 deletions cubical/database/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""
Handles parameter databases which can contain solutions and other relevant values.
"""

from __future__ import print_function
import numpy as np
from numpy.ma import masked_array
from cubical.tools import logger
Expand All @@ -22,7 +22,7 @@ class _Record(object):
"""

def __init__(self, **kw):
for key, value in kw.iteritems():
for key, value in kw.items():
setattr(self, key, value)


Expand Down Expand Up @@ -67,7 +67,7 @@ def __init__(self, name, dtype, axes, interpolation_axes=[], empty=0, metadata=N
"""
interpolation_axes = interpolation_axes or []
assert (len(interpolation_axes) in [0, 1, 2])
print>> log(1), "defining parameter '{}' over {}".format(name, ",".join(axes))
print("defining parameter '{}' over {}".format(name, ",".join(axes)), file=log(1))

self.name, self.dtype, self.axis_labels = name, dtype, axes
self.empty, self.metadata = empty, metadata
Expand Down Expand Up @@ -169,8 +169,8 @@ def _update_shape(self, shape, grid):
elif not self.shape[i]:
self.shape[i] = shape[i]
elif self.shape[i] != shape[i]:
raise ValueError, "axis {} of length {} does not match previously defined length {}".format(
axis, shape[i], self.shape[i])
raise ValueError("axis {} of length {} does not match previously defined length {}".format(
axis, shape[i], self.shape[i]))

def _finalize_shape(self):
"""
Expand Down Expand Up @@ -208,7 +208,7 @@ def _finalize_shape(self):
gmax = float(g1.max()) or 1
self._norm_grid[iaxis] = g1 = g1 / gmax
self._gminmax[iaxis] = gmin, gmax
print>> log(0), "dimensions of {} are {}".format(self.name, ','.join(map(str, self.shape)))
print("dimensions of {} are {}".format(self.name, ','.join(map(str, self.shape))), file=log(0))
return True

def _to_norm(self, iaxis, g):
Expand Down Expand Up @@ -255,7 +255,7 @@ def _init_arrays(self):
np.ones(self.shape, bool),
fill_value=self.empty)
self._array_slices = {}
print>> log(0), " loading {}, shape {}".format(self.name, 'x'.join(map(str, self.shape)))
print(" loading {}, shape {}".format(self.name, 'x'.join(map(str, self.shape))), file=log(0))

def _paste_slice(self, item):
"""
Expand Down Expand Up @@ -293,12 +293,12 @@ def _finalize_arrays(self):
slicers.append((None,))
else:
slicer_axes.append(i)
slicers.append(xrange(shape))
slicers.append(range(shape))

self._interpolators = {}

# get grid over interpolatable axes
print>> log(2), "decomposing {} into slices".format(self.name)
print("decomposing {} into slices".format(self.name), file=log(2))
# loop over all not-interpolatable slices (e.g. direction, antenna, correlation)
for slicer in itertools.product(*slicers):
array_slicer = tuple([slice(None) if sl is None else sl for sl in slicer])
Expand All @@ -311,23 +311,23 @@ def _finalize_arrays(self):
subset = [slice(None) for _ in interpol_axes]
if flags is not np.ma.nomask:
# now, for every axis in the slice, cut out fully flagged points
allaxis = set(xrange(array.ndim))
for iaxis in xrange(array.ndim):
allaxis = set(range(array.ndim))
for iaxis in range(array.ndim):
# find points on this axis which are fully flagged along other axes
if array.ndim == 1:
allflag = flags
else:
allflag = flags.all(axis=tuple(allaxis - {iaxis}))
# all flagged? Indicate this by array=None
if allflag.all():
print>> log(2), " slice {} fully flagged".format(slicer)
print(" slice {} fully flagged".format(slicer), file=log(2))
array = None
break
# if such points exist, extract subset of array and grid
elif allflag.any():
print>> log(2), " slice {} flagged at {} {} points".format(slicer, allflag.sum(),
print(" slice {} flagged at {} {} points".format(slicer, allflag.sum(),
self.axis_labels[
interpol_axes[iaxis]])
interpol_axes[iaxis]]), file=log(2))
# make corresponding slice
array_slice = [slice(None)] * array.ndim
# also set subset to the mask of the valid points
Expand Down Expand Up @@ -491,15 +491,15 @@ def reinterpolate(self, **grid):
# create output array of corresponding shape
output_array = np.full(output_shape, self.empty, self.dtype)

print>> log(1), "will interpolate {} solutions onto {} grid".format(self.name,
"x".join(map(str, output_shape)))
print("will interpolate {} solutions onto {} grid".format(self.name,
"x".join(map(str, output_shape))), file=log(1))

# now loop over all slices
for slicer, out_slicer in zip(itertools.product(*input_slicers), itertools.product(*output_slicers)):
# arse is the current array slice we work with
arse = self._array_slices[slicer]
if arse.array is None:
print>> log(2), " slice {} fully flagged".format(slicer)
print(" slice {} fully flagged".format(slicer), file=log(2))
else:
# Check which subset of the slice needs to be interpolated
# We build up the following lists describing the interpolation process
Expand Down Expand Up @@ -550,8 +550,8 @@ def reinterpolate(self, **grid):
if not interpolator or len(input_grid_segment0) != len(input_grid_segment) or \
not all([ia == ja and i0 <= j0 and i1 >= j1
for (ia, i0, i1), (ja, j0, j1) in zip(input_grid_segment0, input_grid_segment)]):
print>> log(2), " slice {} preparing {}D interpolator for {}".format(slicer,
len(segment_grid), ",".join(["{}:{}".format(*seg[1:]) for seg in input_grid_segment]))
print(" slice {} preparing {}D interpolator for {}".format(slicer,
len(segment_grid), ",".join(["{}:{}".format(*seg[1:]) for seg in input_grid_segment])), file=log(2))
# arav: linear array of all values, adata: all unflagged values
arav = arse.array[tuple(array_segment_slice)].ravel()
adata = arav.data[~arav.mask] if arav.mask is not np.ma.nomask else arav.data
Expand Down Expand Up @@ -597,15 +597,15 @@ def reinterpolate(self, **grid):
coords = np.array([x.ravel() for x in np.meshgrid(*output_coord, indexing='ij')])
# call interpolator. Reshape into output slice shape
result = interpolator(coords.T).reshape(interp_shape)
print>> log(2), " interpolated onto {} grid".format("x".join(map(str, interp_shape)))
print(" interpolated onto {} grid".format("x".join(map(str, interp_shape))), file=log(2))
output_array[out_slicer] = result[tuple(interp_broadcast)]
# return array, throwing out unneeded axes
output_array = output_array[tuple(output_reduction)]
# also, mask missing values from the interpolator with the fill value
missing = np.isnan(output_array)
output_array[missing] = self.empty
print>> log(1), "{} solutions: interpolation results in {}/{} missing values".format(self.name,
missing.sum(), missing.size)
print("{} solutions: interpolation results in {}/{} missing values".format(self.name,
missing.sum(), missing.size), file=log(1))
return masked_array(output_array, missing, fill_value=self.empty)


Expand Down Expand Up @@ -635,16 +635,16 @@ def lookup(self, **grid):
output_array = np.full(output_shape, self.empty, self.dtype)
output_mask = np.ones(output_shape, bool)

print>> log(1), "will lookup {} solutions on {} grid".format(self.name,
"x".join(map(str, output_shape)))
print("will lookup {} solutions on {} grid".format(self.name,
"x".join(map(str, output_shape))), file=log(1))


# now loop over all slices
for slicer, out_slicer in zip(itertools.product(*input_slicers), itertools.product(*output_slicers)):
# arse is the current array slice we work with
arse = self._array_slices[slicer]
if arse.array is None:
print>> log(2), " slice {} fully flagged".format(slicer)
print(" slice {} fully flagged".format(slicer), file=log(2))
else:
# segment_grid: float array of normalized coordinates corresponding
# to segment being interpolated over
Expand All @@ -660,8 +660,8 @@ def lookup(self, **grid):
ij = [ (i, gmap.get(x)) for i,x in enumerate(outgr) ]
input_indices.append([ j for i,j in ij if j is not None])
output_indices.append([ i for i,j in ij if j is not None])
print>> log(2), " slice {}: looking up {} valid points".format(slicer,
"x".join([str(len(idx)) for idx in input_indices]))
print(" slice {}: looking up {} valid points".format(slicer,
"x".join([str(len(idx)) for idx in input_indices])), file=log(2))

out = output_array[out_slicer]
outmask = output_mask[out_slicer]
Expand All @@ -677,8 +677,8 @@ def lookup(self, **grid):
output_mask = output_mask[tuple(output_reduction)]
output_array[output_mask] = self.empty

print>> log(1), "{} solutions: interpolation results in {}/{} missing values".format(self.name,
output_mask.sum(), output_mask.size)
print("{} solutions: interpolation results in {}/{} missing values".format(self.name,
output_mask.sum(), output_mask.size), file=log(1))

return masked_array(output_array, output_mask, fill_value=self.empty)

Expand All @@ -696,7 +696,7 @@ def match_grids(self, **grid):
True if all coordinate values match the parameter grid.
False if at least one doesn't.
"""
for axis, gridvalues in grid.iteritems():
for axis, gridvalues in grid.items():
iaxis = self.axis_index[axis]
if not set(gridvalues).issubset(self._grid_set[iaxis]):
return False
Expand Down
Loading