diff --git a/doc/_autosummary/ch_util.andata.rst b/doc/_autosummary/ch_util.andata.rst new file mode 100644 index 00000000..e86c351a --- /dev/null +++ b/doc/_autosummary/ch_util.andata.rst @@ -0,0 +1,48 @@ +ch\_util.andata +=============== + +.. automodule:: ch_util.andata + + + .. rubric:: Functions + + .. autosummary:: + + andata_from_acq1 + andata_from_archive2 + subclass_from_obj + versiontuple + + .. rubric:: Classes + + .. autosummary:: + + AnData + BaseData + BaseReader + CalibrationGainData + CalibrationGainReader + CorrData + CorrReader + DigitalGainData + DigitalGainReader + FlagInputData + FlagInputReader + GainData + GainFlagData + HKData + HKPData + HKPReader + HKReader + RawADCData + RawADCReader + Reader + WeatherData + WeatherReader + + .. rubric:: Exceptions + + .. autosummary:: + + AnDataError + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.cal_utils.rst b/doc/_autosummary/ch_util.cal_utils.rst new file mode 100644 index 00000000..a4bf2a12 --- /dev/null +++ b/doc/_autosummary/ch_util.cal_utils.rst @@ -0,0 +1,37 @@ +ch\_util.cal\_utils +=================== + +.. automodule:: ch_util.cal_utils + + + .. rubric:: Functions + + .. autosummary:: + + estimate_directional_scale + fit_histogram + fit_point_source_map + flag_outliers + func_2d_gauss + func_2d_sinc_gauss + func_dirty_gauss + func_real_dirty_gauss + get_reference_times_dataset_id + get_reference_times_file + guess_fwhm + interpolate_gain + interpolate_gain_quiet + thermal_amplitude + + .. rubric:: Classes + + .. autosummary:: + + FitAmpPhase + FitGaussAmpPolyPhase + FitPoly + FitPolyLogAmpPolyPhase + FitPolyRealPolyImag + FitRealImag + FitTransit + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.chan_monitor.rst b/doc/_autosummary/ch_util.chan_monitor.rst new file mode 100644 index 00000000..620f2fe6 --- /dev/null +++ b/doc/_autosummary/ch_util.chan_monitor.rst @@ -0,0 +1,13 @@ +ch\_util.chan\_monitor +====================== + +.. automodule:: ch_util.chan_monitor + + + .. rubric:: Classes + + .. autosummary:: + + ChanMonitor + FeedLocator + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.data_quality.rst b/doc/_autosummary/ch_util.data_quality.rst new file mode 100644 index 00000000..15e5e503 --- /dev/null +++ b/doc/_autosummary/ch_util.data_quality.rst @@ -0,0 +1,12 @@ +ch\_util.data\_quality +====================== + +.. automodule:: ch_util.data_quality + + + .. rubric:: Functions + + .. autosummary:: + + good_channels + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.ephemeris.rst b/doc/_autosummary/ch_util.ephemeris.rst new file mode 100644 index 00000000..8c3db049 --- /dev/null +++ b/doc/_autosummary/ch_util.ephemeris.rst @@ -0,0 +1,41 @@ +ch\_util.ephemeris +================== + +.. automodule:: ch_util.ephemeris + + + .. rubric:: Module Attributes + + .. autosummary:: + + CasA + CygA + TauA + VirA + + .. rubric:: Functions + + .. autosummary:: + + Star_cirs + bmxy_to_hadec + chime_local_datetime + cirs_radec + galt_pointing_model_dec + galt_pointing_model_ha + get_doppler_shifted_freq + get_range_rate + get_source_dictionary + hadec_to_bmxy + lunar_rising + lunar_setting + lunar_transit + object_coords + parse_date + peak_RA + solar_rising + solar_setting + solar_transit + transit_RA + utc_lst_to_mjd + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.finder.rst b/doc/_autosummary/ch_util.finder.rst new file mode 100644 index 00000000..8fb16631 --- /dev/null +++ b/doc/_autosummary/ch_util.finder.rst @@ -0,0 +1,33 @@ +ch\_util.finder +=============== + +.. automodule:: ch_util.finder + + + .. rubric:: Functions + + .. autosummary:: + + files_in_range + + .. rubric:: Classes + + .. autosummary:: + + BaseDataInterval + CalibrationGainDataInterval + CorrDataInterval + DataInterval + DataIntervalList + DigitalGainDataInterval + Finder + FlagInputDataInterval + HKDataInterval + WeatherDataInterval + + .. rubric:: Exceptions + + .. autosummary:: + + DataFlagged + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.fluxcat.rst b/doc/_autosummary/ch_util.fluxcat.rst new file mode 100644 index 00000000..d858fe4f --- /dev/null +++ b/doc/_autosummary/ch_util.fluxcat.rst @@ -0,0 +1,25 @@ +ch\_util.fluxcat +================ + +.. automodule:: ch_util.fluxcat + + + .. rubric:: Functions + + .. autosummary:: + + format_source_name + get_epoch + json_numpy_obj_hook + varname + + .. rubric:: Classes + + .. autosummary:: + + CurvedPowerLaw + FitSpectrum + FluxCatalog + MetaFluxCatalog + NumpyEncoder + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.hfbcat.rst b/doc/_autosummary/ch_util.hfbcat.rst new file mode 100644 index 00000000..2f68c99f --- /dev/null +++ b/doc/_autosummary/ch_util.hfbcat.rst @@ -0,0 +1,18 @@ +ch\_util.hfbcat +=============== + +.. automodule:: ch_util.hfbcat + + + .. rubric:: Functions + + .. autosummary:: + + get_doppler_shifted_freq + + .. rubric:: Classes + + .. autosummary:: + + HFBCatalog + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.holography.rst b/doc/_autosummary/ch_util.holography.rst new file mode 100644 index 00000000..42c61b0a --- /dev/null +++ b/doc/_autosummary/ch_util.holography.rst @@ -0,0 +1,13 @@ +ch\_util.holography +=================== + +.. automodule:: ch_util.holography + + + .. rubric:: Classes + + .. autosummary:: + + HolographyObservation + HolographySource + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.layout.rst b/doc/_autosummary/ch_util.layout.rst new file mode 100644 index 00000000..f9826091 --- /dev/null +++ b/doc/_autosummary/ch_util.layout.rst @@ -0,0 +1,21 @@ +ch\_util.layout +=============== + +.. automodule:: ch_util.layout + + + .. rubric:: Functions + + .. autosummary:: + + enter_ltf + get_global_flag_times + global_flags_between + + .. rubric:: Classes + + .. autosummary:: + + graph + subgraph_spec + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.ni_utils.rst b/doc/_autosummary/ch_util.ni_utils.rst new file mode 100644 index 00000000..de9b0fbd --- /dev/null +++ b/doc/_autosummary/ch_util.ni_utils.rst @@ -0,0 +1,30 @@ +ch\_util.ni\_utils +================== + +.. automodule:: ch_util.ni_utils + + + .. rubric:: Functions + + .. autosummary:: + + gains2utvec + gains2utvec_tf + gen_prod_sel + ktrprod + mat2utvec + ni_als + ni_gains_evalues + ni_gains_evalues_tf + process_gated_data + process_synced_data + sort_evalues_mag + subtract_sky_noise + utvec2mat + + .. rubric:: Classes + + .. autosummary:: + + ni_data + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.plot.rst b/doc/_autosummary/ch_util.plot.rst new file mode 100644 index 00000000..36683a24 --- /dev/null +++ b/doc/_autosummary/ch_util.plot.rst @@ -0,0 +1,14 @@ +ch\_util.plot +============= + +.. automodule:: ch_util.plot + + + .. rubric:: Functions + + .. autosummary:: + + spectra + time_ordered + waterfall + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.rfi.rst b/doc/_autosummary/ch_util.rfi.rst new file mode 100644 index 00000000..7c079274 --- /dev/null +++ b/doc/_autosummary/ch_util.rfi.rst @@ -0,0 +1,24 @@ +ch\_util.rfi +============ + +.. automodule:: ch_util.rfi + + + .. rubric:: Functions + + .. autosummary:: + + flag_dataset + frequency_mask + get_autocorrelations + highpass_delay_filter + iterative_hpf_masking + mad_cut_1d + mad_cut_2d + mad_cut_rolling + nanmedian + number_deviations + sir + sir1d + spectral_cut + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.timing.rst b/doc/_autosummary/ch_util.timing.rst new file mode 100644 index 00000000..94e2112d --- /dev/null +++ b/doc/_autosummary/ch_util.timing.rst @@ -0,0 +1,25 @@ +ch\_util.timing +=============== + +.. automodule:: ch_util.timing + + + .. rubric:: Functions + + .. autosummary:: + + construct_delay_template + eigen_decomposition + fit_poly_to_phase + load_timing_correction + map_input_to_noise_source + model_poly_phase + + .. rubric:: Classes + + .. autosummary:: + + TimingCorrection + TimingData + TimingInterpolator + \ No newline at end of file diff --git a/doc/_autosummary/ch_util.tools.rst b/doc/_autosummary/ch_util.tools.rst new file mode 100644 index 00000000..f8ae2e45 --- /dev/null +++ b/doc/_autosummary/ch_util.tools.rst @@ -0,0 +1,79 @@ +ch\_util.tools +============== + +.. automodule:: ch_util.tools + + + .. rubric:: Functions + + .. autosummary:: + + antenna_to_lna + apply_gain + beam_index2number + calibrate_temperature + change_chime_location + change_pathfinder_location + cmap + decorrelation + delay + eigh_no_diagonal + ensure_list + fake_tone_database + fast_pack_product_array + fringestop_time + get_correlator_inputs + get_default_frequency_map_stream + get_feed_polarisations + get_feed_positions + get_holographic_index + get_noise_channel + get_noise_source_index + hk_to_sensor + icmap + invert_no_zero + is_array + is_array_on + is_array_x + is_array_y + is_chime + is_chime_on + is_holographic + is_noise_source + is_pathfinder + lna_to_antenna + normalise_correlations + order_frequency_map_stream + pack_product_array + parse_chime_serial + parse_old_serial + parse_pathfinder_serial + rankN_approx + redefine_stack_index_map + reorder_correlator_inputs + sensor_to_hk + serial_to_id + serial_to_location + subtract_rank1_signal + unpack_product_array + + .. rubric:: Classes + + .. autosummary:: + + Antenna + ArrayAntenna + Blank + CHIMEAntenna + CorrInput + GBOAntenna + HCOAntenna + HKInput + HolographyAntenna + KKOAntenna + NoiseSource + PCOAntenna + PathfinderAntenna + RFIAntenna + TONEAntenna + \ No newline at end of file diff --git a/docs/.buildinfo b/docs/.buildinfo new file mode 100644 index 00000000..f52a79ca --- /dev/null +++ b/docs/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 9ffa1224b9591c68718b286d1d51bb82 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/.doctrees/_autosummary/ch_util.andata.doctree b/docs/.doctrees/_autosummary/ch_util.andata.doctree new file mode 100644 index 00000000..af00e0b1 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.andata.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.cal_utils.doctree b/docs/.doctrees/_autosummary/ch_util.cal_utils.doctree new file mode 100644 index 00000000..9f5afcfd Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.cal_utils.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.chan_monitor.doctree b/docs/.doctrees/_autosummary/ch_util.chan_monitor.doctree new file mode 100644 index 00000000..f8a8e664 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.chan_monitor.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.data_quality.doctree b/docs/.doctrees/_autosummary/ch_util.data_quality.doctree new file mode 100644 index 00000000..69c23833 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.data_quality.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.ephemeris.doctree b/docs/.doctrees/_autosummary/ch_util.ephemeris.doctree new file mode 100644 index 00000000..5a6176a1 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.ephemeris.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.finder.doctree b/docs/.doctrees/_autosummary/ch_util.finder.doctree new file mode 100644 index 00000000..25ccac8a Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.finder.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.fluxcat.doctree b/docs/.doctrees/_autosummary/ch_util.fluxcat.doctree new file mode 100644 index 00000000..3c2d240c Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.fluxcat.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.hfbcat.doctree b/docs/.doctrees/_autosummary/ch_util.hfbcat.doctree new file mode 100644 index 00000000..2695c34c Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.hfbcat.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.holography.doctree b/docs/.doctrees/_autosummary/ch_util.holography.doctree new file mode 100644 index 00000000..58569cfd Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.holography.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.layout.doctree b/docs/.doctrees/_autosummary/ch_util.layout.doctree new file mode 100644 index 00000000..e3f54165 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.layout.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.ni_utils.doctree b/docs/.doctrees/_autosummary/ch_util.ni_utils.doctree new file mode 100644 index 00000000..90d7d6c1 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.ni_utils.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.plot.doctree b/docs/.doctrees/_autosummary/ch_util.plot.doctree new file mode 100644 index 00000000..12199ef2 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.plot.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.rfi.doctree b/docs/.doctrees/_autosummary/ch_util.rfi.doctree new file mode 100644 index 00000000..b04585ab Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.rfi.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.timing.doctree b/docs/.doctrees/_autosummary/ch_util.timing.doctree new file mode 100644 index 00000000..556012b2 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.timing.doctree differ diff --git a/docs/.doctrees/_autosummary/ch_util.tools.doctree b/docs/.doctrees/_autosummary/ch_util.tools.doctree new file mode 100644 index 00000000..c0a41b55 Binary files /dev/null and b/docs/.doctrees/_autosummary/ch_util.tools.doctree differ diff --git a/docs/.doctrees/environment.pickle b/docs/.doctrees/environment.pickle new file mode 100644 index 00000000..f4b416a2 Binary files /dev/null and b/docs/.doctrees/environment.pickle differ diff --git a/docs/.doctrees/index.doctree b/docs/.doctrees/index.doctree new file mode 100644 index 00000000..2aee575b Binary files /dev/null and b/docs/.doctrees/index.doctree differ diff --git a/docs/.doctrees/reference.doctree b/docs/.doctrees/reference.doctree new file mode 100644 index 00000000..73630ca2 Binary files /dev/null and b/docs/.doctrees/reference.doctree differ diff --git a/docs/.nojekyll b/docs/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/docs/_autosummary/ch_util.andata.html b/docs/_autosummary/ch_util.andata.html new file mode 100644 index 00000000..acc48cdb --- /dev/null +++ b/docs/_autosummary/ch_util.andata.html @@ -0,0 +1,1565 @@ + + + + + + + ch_util.andata — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.andata

+

Analysis data format

+

Functions

+ + + + + + + + + + + + + + + +

andata_from_acq1(acq_files, start, stop, ...)

Create a CorrData object from a 1.0.0 archive version acq.

andata_from_archive2(cls, acq_files, start, ...)

Create an Andata object from a version 2.0.0 archive format acq.

subclass_from_obj(cls, obj)

Pick a subclass of BaseData based on an input object.

versiontuple(v)

Create a version tuple from a version string.

+

Classes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

AnData

alias of CorrData

BaseData([h5_data])

CHIME data in analysis format.

BaseReader(files)

Provides high level reading of CHIME data.

CalibrationGainData([h5_data])

Subclass of GainData for gain acquisitions.

CalibrationGainReader(files)

Subclass of BaseReader for calibration gain data.

CorrData([h5_data])

Subclass of BaseData for correlation data.

CorrReader(files)

Subclass of BaseReader for correlator data.

DigitalGainData([h5_data])

Subclass of GainData for digitalgain acquisitions.

DigitalGainReader(files)

Subclass of BaseReader for digital gain data.

FlagInputData([h5_data])

Subclass of GainFlagData for flaginput acquisitions.

FlagInputReader(files)

Subclass of BaseReader for input flag data.

GainData([h5_data])

Subclass of GainFlagData for gain and digitalgain acquisitions.

GainFlagData([h5_data])

Subclass of BaseData for gain, digitalgain, and flag input acquisitions.

HKData([h5_data])

Subclass of BaseData for housekeeping data.

HKPData([data_group, distributed, comm, ...])

Subclass of BaseData for housekeeping data.

HKPReader(files)

Subclass of BaseReader for HKP data.

HKReader(files)

Subclass of BaseReader for HK data.

RawADCData([h5_data])

Subclass of BaseData for raw ADC data.

RawADCReader(files)

Subclass of BaseReader for raw ADC data.

Reader

alias of CorrReader

WeatherData([h5_data])

Subclass of BaseData for weather data.

WeatherReader(files)

Subclass of BaseReader for weather data.

+

Exceptions

+ + + + + + +

AnDataError

Exception raised when something unexpected happens with the data.

+
+
+ch_util.andata.AnData
+

alias of CorrData

+
+ +
+
+exception ch_util.andata.AnDataError[source]
+

Bases: Exception

+

Exception raised when something unexpected happens with the data.

+
+ +
+
+class ch_util.andata.BaseData(h5_data=None, **kwargs)[source]
+

Bases: TOData

+

CHIME data in analysis format.

+

Inherits from caput.memh5.BasicCont.

+

This is intended to be the main data class for the post +acquisition/real-time analysis parts of the pipeline. This class is laid +out very similarly to how the data is stored in analysis format hdf5 files +and the data in this class can be optionally stored in such an hdf5 file +instead of in memory.

+
+
Parameters:
+

h5_data (h5py.Group, memh5.MemGroup or hdf5 filename, optional) – Underlying h5py like data container where data will be stored. If not +provided a new caput.memh5.MemGroup instance will be created.

+
+
+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property cal
+

Stores calibration schemes for the datasets.

+

Each entry is a calibration scheme which itself is a dict storing +meta-data about calibration.

+

Do not try to add a new entry by assigning to an element of this +property. Use create_cal() instead.

+
+
Returns:
+

cal – Calibration schemes.

+
+
Return type:
+

read only dictionary

+
+
+
+ +
+
+static convert_time(time)[source]
+

Overload to provide support for multiple time formats.

+

Method accepts scalar times in supported formats and converts them +to the same format as self.time.

+
+ +
+
+create_cal(name, cal=None)[source]
+

Create a new cal entry.

+
+ +
+
+create_flag(name, *args, **kwargs)[source]
+

Create a new flags dataset.

+
+ +
+
+create_reverse_map(axis_name, reverse_map)[source]
+

Create a new reverse map.

+
+ +
+
+dataset_name_allowed(name)[source]
+

Permits datasets in the root and ‘flags’ groups.

+
+ +
+
+property datasets
+

Stores hdf5 datasets holding all data.

+

Each dataset can reference a calibration scheme in +datasets[name].attrs['cal'] which refers to an entry in +cal.

+

Do not try to add a new dataset by assigning to an item of this +property. Use create_dataset instead.

+
+
Returns:
+

datasets – Entries are h5py or caput.memh5 datasets.

+
+
Return type:
+

read only dictionary

+
+
+
+ +
+
+del_reverse_map(axis_name)[source]
+

Delete a reverse map.

+
+ +
+
+property flags
+

Datasets representing flags and data weights.

+
+
Returns:
+

flags – Entries are h5py or caput.memh5 datasets.

+
+
Return type:
+

read only dictionary

+
+
+
+ +
+
+classmethod from_acq_h5(acq_files, start=None, stop=None, datasets=None, out_group=None, **kwargs)[source]
+

Convert acquisition format hdf5 data to analysis data object.

+

Reads hdf5 data produced by the acquisition system and converts it to +analysis format in memory.

+
+
Parameters:
+
    +
  • acq_files (filename, h5py.File or list there-of or filename pattern) – Files to convert from acquisition format to analysis format. +Filename patterns with wild cards (e.g. “foo*.h5”) are supported.

  • +
  • start (integer, optional) – What frame to start at in the full set of files.

  • +
  • stop (integer, optional) – What frame to stop at in the full set of files.

  • +
  • datasets (list of strings) – Names of datasets to include from acquisition files. Default is to +include all datasets found in the acquisition files.

  • +
  • out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

  • +
+
+
+

Examples

+

Examples are analogous to those of CorrData.from_acq_h5().

+
+ +
+
+group_name_allowed(name)[source]
+

Permits only the “flags” group.

+
+ +
+
+property ntime
+

Length of the time axis of the visibilities.

+
+ +
+
+property time
+

The ‘time’ axis centres as Unix/POSIX time.

+
+ +
+
+property timestamp
+

Deprecated name for time.

+
+ +
+ +
+
+class ch_util.andata.BaseReader(files)[source]
+

Bases: Reader

+

Provides high level reading of CHIME data.

+

You do not want to use this class, but rather one of its inherited classes +(CorrReader, HKReader, WeatherReader).

+

Parses and stores meta-data from file headers allowing for the +interpretation and selection of the data without reading it all from disk.

+
+
Parameters:
+

files (filename, h5py.File or list there-of or filename pattern) – Files containing data. Filename patterns with wild cards (e.g. +“foo*.h5”) are supported.

+
+
+
+
+data_class
+

alias of BaseData

+
+ +
+
+read(out_group=None)[source]
+

Read the selected data.

+
+
Parameters:
+

out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

+
+
Returns:
+

data – Data read from files based on the selections given +in time_sel, prod_sel, and +freq_sel.

+
+
Return type:
+

BaseData

+
+
+
+ +
+
+select_time_range(start_time=None, stop_time=None)[source]
+

Sets time_sel to include a time range.

+

The times from the samples selected will have bin centre timestamps +that are bracketed by the given start_time and stop_time.

+
+
Parameters:
+
    +
  • start_time (float or datetime.datetime) – If a float, this is a Unix/POSIX time. Affects the first element of +time_sel. Default leaves it unchanged.

  • +
  • stop_time (float or datetime.datetime) – If a float, this is a Unix/POSIX time. Affects the second element +of time_sel. Default leaves it unchanged.

  • +
+
+
+
+ +
+ +
+
+class ch_util.andata.CalibrationGainData(h5_data=None, **kwargs)[source]
+

Bases: GainData

+

Subclass of GainData for gain acquisitions.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property gain
+

Aliases the gain dataset.

+
+ +
+
+get_source_index(source_name)[source]
+

Index into the source axis for a given source name.

+
+ +
+
+property nsource
+

Number of sources of gains.

+
+ +
+
+property source
+

Names of the sources of gains.

+
+ +
+
+property source_gains
+

Dictionary that allows look up of source gains based on source name.

+
+ +
+
+property source_weights
+

Dictionary that allows look up of source weights based on source name.

+
+ +
+
+property weight
+

Aliases the weight dataset.

+
+ +
+ +
+
+class ch_util.andata.CalibrationGainReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for calibration gain data.

+
+
+data_class
+

alias of CalibrationGainData

+
+ +
+ +
+
+class ch_util.andata.CorrData(h5_data=None, **kwargs)[source]
+

Bases: BaseData

+

Subclass of BaseData for correlation data.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property dataset_id
+

Access dataset id dataset in unicode format.

+
+ +
+
+property freq
+

The spectral frequency axis as bin centres in MHz.

+
+ +
+
+classmethod from_acq_h5(acq_files, start=None, stop=None, **kwargs)[source]
+

Convert acquisition format hdf5 data to analysis data object.

+

This method overloads the one in BaseData.

+

Changed Jan. 22, 2016: input arguments are now (acq_files, start, +stop, **kwargs) instead of (acq_files, start, stop, prod_sel, +freq_sel, datasets, out_group).

+

Reads hdf5 data produced by the acquisition system and converts it to +analysis format in memory.

+
+
Parameters:
+
    +
  • acq_files (filename, h5py.File or list there-of or filename pattern) – Files to convert from acquisition format to analysis format. +Filename patterns with wild cards (e.g. “foo*.h5”) are supported.

  • +
  • start (integer, optional) – What frame to start at in the full set of files.

  • +
  • stop (integer, optional) – What frame to stop at in the full set of files.

  • +
  • stack_sel (valid numpy index) – Used to select a subset of the stacked correlation products. +Only one of stack_sel, prod_sel, and input_sel may be +specified, with prod_sel preferred over input_sel and +stack_sel proferred over both. +h5py fancy indexing supported but to be used with caution +due to poor reading performance.

  • +
  • prod_sel (valid numpy index) – Used to select a subset of correlation products. +Only one of stack_sel, prod_sel, and input_sel may be +specified, with prod_sel preferred over input_sel and +stack_sel proferred over both. +h5py fancy indexing supported but to be used with caution +due to poor reading performance.

  • +
  • input_sel (valid numpy index) – Used to select a subset of correlator inputs. +Only one of stack_sel, prod_sel, and input_sel may be +specified, with prod_sel preferred over input_sel and +stack_sel proferred over both. +h5py fancy indexing supported but to be used with caution +due to poor reading performance.

  • +
  • freq_sel (valid numpy index) – Used to select a subset of frequencies. +h5py fancy indexing supported but to be used with caution +due to poor reading performance.

  • +
  • datasets (list of strings) – Names of datasets to include from acquisition files. Default is to +include all datasets found in the acquisition files.

  • +
  • out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

  • +
  • apply_gain (boolean, optional) – Whether to apply the inverse gains to the visibility datasets.

  • +
  • renormalize (boolean, optional) – Whether to renormalize for dropped packets.

  • +
  • distributed (boolean, optional) – Load data into a distributed dataset.

  • +
  • comm (MPI.Comm) – Communicator to distributed over. Use MPI.COMM_WORLD if not set.

  • +
+
+
Returns:
+

data – Loaded data object.

+
+
Return type:
+

CorrData

+
+
+

Examples

+

Suppose we have two acquisition format files (this test data is +included in the ch_util repository):

+
>>> import os
+>>> import glob
+>>> from . import test_andata
+>>> os.chdir(test_andata.data_path)
+>>> print(glob.glob('test_acq.h5*'))
+['test_acq.h5.0001', 'test_acq.h5.0002']
+
+
+

These can be converted into one big analysis format data object:

+
>>> data = CorrData.from_acq_h5('test_acq.h5*')
+>>> print(data.vis.shape)
+(1024, 36, 31)
+
+
+

If we only want a subset of the total frames (time bins) in these files +we can supply start and stop indices.

+
>>> data = CorrData.from_acq_h5('test_acq.h5*', start=5, stop=-3)
+>>> print(data.vis.shape)
+(1024, 36, 23)
+
+
+

If we want a subset of the correlation products or spectral +frequencies, specify the prod_sel or freq_sel respectively:

+
>>> data = CorrData.from_acq_h5(
+...     'test_acq.h5*',
+...     prod_sel=[0, 8, 15, 21],
+...     freq_sel=slice(5, 15),
+...     )
+>>> print(data.vis.shape)
+(10, 4, 31)
+>>> data = CorrData.from_acq_h5('test_acq.h5*', prod_sel=1,
+...                           freq_sel=slice(None, None, 10))
+>>> print(data.vis.shape)
+(103, 1, 31)
+
+
+

The underlying hdf5-like container that holds the analysis format +data can also be specified.

+
>>> group = memh5.MemGroup()
+>>> data = CorrData.from_acq_h5('test_acq.h5*', out_group=group)
+>>> print(group['vis'].shape)
+(1024, 36, 31)
+>>> group['vis'] is data.vis
+True
+
+
+
+ +
+
+classmethod from_acq_h5_fast(fname, comm=None, freq_sel=None, start=None, stop=None)[source]
+

Efficiently read a CorrData file in a distributed fashion.

+

This reads a single file from disk into a distributed container. In +contrast to to CorrData.from_acq_h5 it is more restrictive, +allowing only contiguous slices of the frequency and time axes, +and no down selection of the input/product/stack axis.

+
+
Parameters:
+
    +
  • fname (str) – File name to read. Only supports one file at a time.

  • +
  • comm (MPI.Comm, optional) – MPI communicator to distribute over. By default this will +use MPI.COMM_WORLD.

  • +
  • freq_sel (slice, optional) – A selection over the frequency axis. Only slice objects +are supported. If not set, read all frequencies.

  • +
  • start (int, optional) – Start and stop indexes of the time selection.

  • +
  • stop (int, optional) – Start and stop indexes of the time selection.

  • +
+
+
Returns:
+

data – The CorrData container.

+
+
Return type:
+

andata.CorrData

+
+
+
+ +
+
+property gain
+

Convenience access to the gain dataset.

+

Equivalent to self.datasets[‘gain’].

+
+ +
+
+property input_flags
+

Convenience access to the input flags dataset.

+

Equivalent to self.flags[‘inputs’].

+
+ +
+
+property nfreq
+

Length of the freq axis.

+
+ +
+
+property nprod
+

Length of the prod axis.

+
+ +
+
+property prod
+

The correlation product axis as channel pairs.

+
+ +
+
+property prodstack
+

A pair of input indices representative of those in the stack.

+

Note, these are correctly conjugated on return, and so calculations +of the baseline and polarisation can be done without additionally +looking up the stack conjugation.

+
+ +
+
+property stack
+

The correlation product axis as channel pairs.

+
+ +
+
+property vis
+

Convenience access to the visibilities array.

+

Equivalent to self.datasets[‘vis’].

+
+ +
+
+property weight
+

Convenience access to the visibility weight array.

+

Equivalent to self.flags[‘vis_weight’].

+
+ +
+ +
+
+class ch_util.andata.CorrReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for correlator data.

+
+
+data_class
+

alias of CorrData

+
+ +
+
+property freq
+

Spectral frequency bin centres in data files.

+
+ +
+
+property freq_sel
+

Which frequencies to read.

+
+
Returns:
+

freq_sel – Valid numpy index for a 1D array, specifying what data to read +along the frequency axis.

+
+
Return type:
+

1D data selection

+
+
+
+ +
+
+property input
+

Correlator inputs in data files.

+
+ +
+
+property input_sel
+

Which correlator intputs to read.

+
+
Returns:
+

input_sel – Valid numpy index for a 1D array, specifying what data to read +along the correlation product axis.

+
+
Return type:
+

1D data selection

+
+
+
+ +
+
+property prod
+

Correlation products in data files.

+
+ +
+
+property prod_sel
+

Which correlation products to read.

+
+
Returns:
+

prod_sel – Valid numpy index for a 1D array, specifying what data to read +along the correlation product axis.

+
+
Return type:
+

1D data selection

+
+
+
+ +
+
+read(out_group=None)[source]
+

Read the selected data.

+
+
Parameters:
+

out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

+
+
Returns:
+

data – Data read from files based on the selections given +in time_sel, prod_sel, and +freq_sel.

+
+
Return type:
+

BaseData

+
+
+
+ +
+
+select_freq_physical(frequencies)[source]
+

Sets freq_sel to include given physical frequencies.

+
+
Parameters:
+

frequencies (list of floats) – Frequencies to select. Physical frequencies are matched to indices +on a best match basis.

+
+
+
+ +
+
+select_freq_range(freq_low=None, freq_high=None, freq_step=None)[source]
+

Sets freq_sel to given physical frequency range.

+

Frequencies selected will have bin centres bracked by provided range.

+
+
Parameters:
+
    +
  • freq_low (float) – Lower end of the frequency range in MHz. Default is the lower edge +of the band.

  • +
  • freq_high (float) – Upper end of the frequency range in MHz. Default is the upper edge +of the band.

  • +
  • freq_step (float) – How much bandwidth to skip over between samples in MHz. This value +is approximate. Default is to include all samples in given range.

  • +
+
+
+
+ +
+
+select_prod_autos()[source]
+

Sets prod_sel to only auto-correlations.

+
+ +
+
+select_prod_by_input(input)[source]
+

Sets prod_sel to only products with given input.

+
+
Parameters:
+

input (integer) – Correlator input number. All correlation products with +this input as one of the pairs are selected.

+
+
+
+ +
+
+select_prod_pairs(pairs)[source]
+

Sets prod_sel to include given product pairs.

+
+
Parameters:
+

pairs (list of integer pairs) – Input pairs to be included.

+
+
+
+ +
+ +
+
+class ch_util.andata.DigitalGainData(h5_data=None, **kwargs)[source]
+

Bases: GainData

+

Subclass of GainData for digitalgain acquisitions.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property compute_time
+

Unix timestamp indicating when the digital gain was computed.

+
+ +
+
+property gain
+

The digital gain applied to the channelized data.

+
+ +
+
+property gain_coeff
+

The coefficient of the digital gain applied to the channelized data.

+
+ +
+
+property gain_exp
+

The exponent of the digital gain applied to the channelized data.

+
+ +
+ +
+
+class ch_util.andata.DigitalGainReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for digital gain data.

+
+
+data_class
+

alias of DigitalGainData

+
+ +
+ +
+
+class ch_util.andata.FlagInputData(h5_data=None, **kwargs)[source]
+

Bases: GainFlagData

+

Subclass of GainFlagData for flaginput acquisitions.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property flag
+

Aliases the flag dataset.

+
+ +
+
+get_source_index(source_name)[source]
+

Index into the source axis for a given source name.

+
+ +
+
+property source_flags
+

Dictionary that allow look up of source flags based on source name.

+
+ +
+ +
+
+class ch_util.andata.FlagInputReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for input flag data.

+
+
+data_class
+

alias of FlagInputData

+
+ +
+ +
+
+class ch_util.andata.GainData(h5_data=None, **kwargs)[source]
+

Bases: GainFlagData

+

Subclass of GainFlagData for gain and digitalgain acquisitions.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property freq
+

The spectral frequency axis as bin centres in MHz.

+
+ +
+
+property nfreq
+

Number of frequency bins.

+
+ +
+ +
+
+class ch_util.andata.GainFlagData(h5_data=None, **kwargs)[source]
+

Bases: BaseData

+

Subclass of BaseData for gain, digitalgain, and flag input acquisitions.

+

These acquisitions consist of a collection of updates to the real-time pipeline ordered +chronologically. In most cases the updates do not occur at a regular cadence. +The time that each update occured can be accessed via self.index_map[‘update_time’]. +In addition, each update is given a unique update ID that can be accessed via +self.datasets[‘update_id’] and can be searched using the self.search_update_id method.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property input
+

Correlator inputs.

+
+ +
+
+property ninput
+

Number of correlator inputs.

+
+ +
+
+property ntime
+

Number of updates.

+
+ +
+
+resample(dataset, timestamp, transpose=False)[source]
+

Return a dataset resampled at specific times.

+
+
Parameters:
+
    +
  • dataset (string) – Name of the dataset to resample.

  • +
  • timestamp (np.ndarray) – Unix timestamps.

  • +
  • transpose (bool) – Tranpose the data such that time is the fastest varying axis. +By default time will be the slowest varying axis.

  • +
+
+
Returns:
+

data – The dataset resampled at the desired times and transposed if requested.

+
+
Return type:
+

np.ndarray

+
+
+
+ +
+
+search_update_id(pattern, is_regex=False)[source]
+

Find the index into the update_time axis corresponding to a particular update_id.

+
+
Parameters:
+
    +
  • pattern (str) – The desired update_id or a glob pattern to search.

  • +
  • is_regex (bool) – Set to True if pattern is a regular expression.

  • +
+
+
Returns:
+

index – Index into the update_time axis that will yield all +updates whose update_id matches the requested pattern.

+
+
Return type:
+

np.ndarray of dtype = int

+
+
+
+ +
+
+search_update_time(timestamp)[source]
+

Find the index into the update_time axis that is valid for specific times.

+

For each time returns the most recent update the occured before that time.

+
+
Parameters:
+

timestamp (np.ndarray of unix timestamp) – Unix timestamps.

+
+
Returns:
+

index – Index into the update_time axis that will yield values +that are valid for the requested timestamps.

+
+
Return type:
+

np.ndarray of dtype = int

+
+
+
+ +
+
+property time
+

Aliases index_map[‘update_time’] to time for caput.tod functionality.

+
+ +
+
+property update_id
+

Aliases the update_id dataset.

+
+ +
+ +
+
+class ch_util.andata.HKData(h5_data=None, **kwargs)[source]
+

Bases: BaseData

+

Subclass of BaseData for housekeeping data.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property atmel
+

Get the ATMEL board that took these data.

+
+
Returns:
+

comp – The ATMEL component that took these data.

+
+
Return type:
+

layout.component

+
+
+
+ +
+
+chan(mux=-1)[source]
+

Convenience access to the list of channels in a given mux.

+
+
Parameters:
+

mux (int) – A mux number. For housekeeping files with no multiplexing (e.g., +FLA’s), leave this as -1.

+
+
Returns:
+

n – The channels numbers.

+
+
Return type:
+

list

+
+
Raises:
+

ValueError – Raised if mux does not exist.

+
+
+
+ +
+
+classmethod from_acq_h5(acq_files, start=None, stop=None, datasets=None, out_group=None)[source]
+

Convert acquisition format hdf5 data to analysis data object.

+

This method overloads the one in BaseData.

+

Reads hdf5 data produced by the acquisition system and converts it to +analysis format in memory.

+
+
Parameters:
+
    +
  • acq_files (filename, h5py.File or list there-of or filename pattern) – Files to convert from acquisition format to analysis format. +Filename patterns with wild cards (e.g. “foo*.h5”) are supported.

  • +
  • start (integer, optional) – What frame to start at in the full set of files.

  • +
  • stop (integer, optional) – What frame to stop at in the full set of files.

  • +
  • datasets (list of strings) – Names of datasets to include from acquisition files. Default is to +include all datasets found in the acquisition files.

  • +
  • out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

  • +
+
+
+

Examples

+

Examples are analogous to those of CorrData.from_acq_h5().

+
+ +
+
+property mux
+

Get the list of muxes in the data.

+
+ +
+
+nchan(mux=-1)[source]
+

Convenience access to the number of channels in a given mux.

+
+
Parameters:
+

mux (int) – A mux number. For housekeeping files with no multiplexing (e.g., +FLA’s), leave this as -1.

+
+
Returns:
+

n – The number of channels

+
+
Return type:
+

int

+
+
Raises:
+

ValueError – Raised if mux does not exist.

+
+
+
+ +
+
+property nmux
+

Get the number of muxes in the data.

+
+ +
+
+tod(chan, mux=-1)[source]
+

Convenience access to a single time-ordered datastream (TOD).

+
+
Parameters:
+
    +
  • chan (int) – A channel number. (Generally, they should be in the range 0–7 for +non-multiplexed data and 0–15 for multiplexed data.)

  • +
  • mux (int) – A mux number. For housekeeping files with no multiplexing (e.g., +FLA’s), leave this as -1.

  • +
+
+
Returns:
+

tod – A 1D array of values for the requested channel/mux combination. Note +that a reference to the data in the dataset is returned; this method +does not make a copy.

+
+
Return type:
+

numpy.array

+
+
Raises:
+

ValueError – Raised if one of chan or mux is not present in any dataset.

+
+
+
+ +
+ +
+
+class ch_util.andata.HKPData(data_group=None, distributed=False, comm=None, file_format=None)[source]
+

Bases: MemDiskGroup

+

Subclass of BaseData for housekeeping data.

+
+
+classmethod from_acq_h5(acq_files, start=None, stop=None, metrics=None, datasets=None, **kwargs)[source]
+

Load in the housekeeping files.

+
+
Parameters:
+
    +
  • acq_files (list) – List of files to load.

  • +
  • start (datetime or float, optional) – Start and stop times for the range of data to load. Default is all.

  • +
  • stop (datetime or float, optional) – Start and stop times for the range of data to load. Default is all.

  • +
  • metrics (list) – Names of metrics to load. Default is all.

  • +
  • datasets (list) – Synonym for metrics (the value of metrics will take precedence).

  • +
+
+
Returns:
+

data

+
+
Return type:
+

HKPData

+
+
+
+ +
+
+static metrics(acq_files)[source]
+

Get the names of the metrics contained within the files.

+
+
Parameters:
+

acq_files (list) – List of acquisition filenames.

+
+
Returns:
+

metrics

+
+
Return type:
+

list

+
+
+
+ +
+
+resample(metric_name, rule, how='mean', unstack=False, **kwargs)[source]
+

Resample the metric onto a regular grid of time.

+

This internally uses the Pandas resampling functionality so that +documentation is a useful reference. This will return the metric with +the labels as a series of multi-level columns.

+
+
Parameters:
+
    +
  • metric_name (str) – Name of metric to resample.

  • +
  • rule (str) – The set of times to resample onto (example ‘30S’, ‘1Min’, ‘2D’). See +the pandas docs for a full description.

  • +
  • how (str or callable, optional) – How should we combine samples to regrid the data? This takes any +valid argument for the the pandas apply method. Useful options are +‘mean’, ‘sum’, ‘min’, ‘max’ and ‘std’.

  • +
  • unstack (bool, optional) – Unstack the data, i.e. return with the labels as hierarchial columns.

  • +
  • kwargs – Any remaining kwargs are passed to the pandas.DataFrame.resample +method to give fine grained control of the resampling.

  • +
+
+
Returns:
+

df – A dataframe resampled onto a regular grid. Labels now appear as part +of multi-level columns.

+
+
Return type:
+

pandas.DataFrame

+
+
+
+ +
+
+select(metric_name)[source]
+

Return the metric as a pandas time-series DataFrame.

+

Requires Pandas to be installed.

+
+
Parameters:
+

metric_name (string) – Name of metric to generate DataFrame for.

+
+
Returns:
+

df

+
+
Return type:
+

pandas.DataFrame

+
+
+
+ +
+ +
+
+class ch_util.andata.HKPReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for HKP data.

+
+
+data_class
+

alias of HKPData

+
+ +
+ +
+
+class ch_util.andata.HKReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for HK data.

+
+
+data_class
+

alias of HKData

+
+ +
+ +
+
+class ch_util.andata.RawADCData(h5_data=None, **kwargs)[source]
+

Bases: BaseData

+

Subclass of BaseData for raw ADC data.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+ +
+
+class ch_util.andata.RawADCReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for raw ADC data.

+
+
+data_class
+

alias of RawADCData

+
+ +
+ +
+
+ch_util.andata.Reader
+

alias of CorrReader

+
+ +
+
+class ch_util.andata.WeatherData(h5_data=None, **kwargs)[source]
+

Bases: BaseData

+

Subclass of BaseData for weather data.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+dataset_name_allowed(name)[source]
+

Permits datasets in the root and ‘blockhouse’ groups.

+
+ +
+
+group_name_allowed(name)[source]
+

Permits only the “blockhouse” group.

+
+ +
+
+property temperature
+

For easy access to outside weather station temperature. +Needs to be able to extrac temperatures from both mingun_weather files +and chime_weather files.

+
+ +
+
+property time
+

Needs to be able to extrac times from both mingun_weather files +and chime_weather files.

+
+ +
+ +
+
+class ch_util.andata.WeatherReader(files)[source]
+

Bases: BaseReader

+

Subclass of BaseReader for weather data.

+
+
+data_class
+

alias of WeatherData

+
+ +
+ +
+
+ch_util.andata.andata_from_acq1(acq_files, start, stop, prod_sel, freq_sel, datasets, out_group)[source]
+

Create a CorrData object from a 1.0.0 archive version acq.

+
+
Parameters:
+
    +
  • acq_files (filename, h5py.File or list there-of or filename pattern) – Files to convert from acquisition format to analysis format. +Filename patterns with wild cards (e.g. “foo*.h5”) are supported.

  • +
  • start (int) – What frame to start at in the full set of files.

  • +
  • stop (int) – What frame to stop at in the full set of files.

  • +
  • prod_sel (1D data selection) – Valid numpy index for a 1D array, specifying what data to read +along the correlation product axis.

  • +
  • freq_sel (1D data selection) – Valid numpy index for a 1D array, specifying what data to read +along the frequency axis.

  • +
  • datasets (list of strings) – Names of datasets to include from acquisition files. Default is to +include all datasets found in the acquisition files.

  • +
  • out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

  • +
+
+
Returns:
+

A CorrData object with the requested data.

+
+
Return type:
+

corrdata

+
+
+
+ +
+
+ch_util.andata.andata_from_archive2(cls, acq_files, start, stop, stack_sel, prod_sel, input_sel, freq_sel, datasets, out_group)[source]
+

Create an Andata object from a version 2.0.0 archive format acq.

+
+
Parameters:
+
    +
  • cls – class of object to create

  • +
  • acq_files (filename, h5py.File or list there-of or filename pattern) – Files to convert from acquisition format to analysis format. +Filename patterns with wild cards (e.g. “foo*.h5”) are supported.

  • +
  • start (int) – What frame to start at in the full set of files.

  • +
  • stop (int) – What frame to stop at in the full set of files.

  • +
  • prod_sel (1D data selection) – Valid numpy index for a 1D array, specifying what data to read +along the correlation product axis.

  • +
  • freq_sel (1D data selection) – Valid numpy index for a 1D array, specifying what data to read +along the frequency axis.

  • +
  • datasets (list of strings) – Names of datasets to include from acquisition files. Default is to +include all datasets found in the acquisition files.

  • +
  • out_group (h5py.Group, hdf5 filename or memh5.Group) – Underlying hdf5 like container that will store the data for the +BaseData instance.

  • +
  • Returns

  • +
  • -------

  • +
  • andata (cls instance) – The andata object for the requested data

  • +
+
+
+
+ +
+
+ch_util.andata.subclass_from_obj(cls, obj)[source]
+

Pick a subclass of BaseData based on an input object.

+
+
Parameters:
+
    +
  • cls (subclass of BaseData (class, not an instance)) – Default class to return.

  • +
  • obj (h5py.Group, filename, memh5.Group or) – BaseData object from which to determine the appropriate +subclass of AnData.

  • +
+
+
+
+ +
+
+ch_util.andata.versiontuple(v)[source]
+

Create a version tuple from a version string.

+
+
Parameters:
+

v (str) – A version string

+
+
Returns:
+

versiontuple – A tuple of int values created by splitting the string on dots.

+
+
Return type:
+

tuple

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.cal_utils.html b/docs/_autosummary/ch_util.cal_utils.html new file mode 100644 index 00000000..44bc2b34 --- /dev/null +++ b/docs/_autosummary/ch_util.cal_utils.html @@ -0,0 +1,1299 @@ + + + + + + + ch_util.cal_utils — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.cal_utils

+

Tools for point source calibration

+

This module contains tools for performing point-source calibration.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

estimate_directional_scale(z[, c])

Calculate robust, direction dependent estimate of scale.

fit_histogram(arr[, bins, rng, no_weight, ...])

Fit a gaussian to a histogram of the data.

fit_point_source_map(ra, dec, submap[, rms, ...])

Fits a map of a point source to a model.

flag_outliers(raw, flag[, window, nsigma])

Flag outliers with respect to rolling median.

func_2d_gauss(coord, peak_amplitude, ...)

Returns a parameteric model for the map of a point source, consisting of a 2-dimensional gaussian.

func_2d_sinc_gauss(coord, peak_amplitude, ...)

Returns a parameteric model for the map of a point source,

func_dirty_gauss(dirty_beam)

Returns a parameteric model for the map of a point source, consisting of the interpolated dirty beam along the y-axis and a gaussian along the x-axis.

func_real_dirty_gauss(dirty_beam)

Returns a parameteric model for the map of a point source, consisting of the interpolated dirty beam along the y-axis and a sinusoid with gaussian envelope along the x-axis.

get_reference_times_dataset_id(times, ...[, ...])

Calculate the relevant calibration reference times from the dataset IDs.

get_reference_times_file(times, cal_file[, ...])

For a given set of times determine when and how they were calibrated.

guess_fwhm(freq[, pol, dec, sigma, voltage, ...])

Provide rough estimate of the FWHM of the CHIME primary beam pattern.

interpolate_gain(freq, gain, weight[, flag, ...])

Replace gain at flagged frequencies with interpolated values.

interpolate_gain_quiet(*args, **kwargs)

Call interpolate_gain with ConvergenceWarnings silenced.

thermal_amplitude(delta_T, freq)

Computes the amplitude gain correction given a (set of) temperature difference and a (set of) frequency based on the thermal model.

+

Classes

+ + + + + + + + + + + + + + + + + + + + + + + + +

FitAmpPhase(*args, **kwargs)

Base class for fitting models to the amplitude and phase.

FitGaussAmpPolyPhase([poly_deg_phi])

Class that enables fits of a gaussian to amplitude and a polynomial to phase.

FitPoly([poly_type])

Base class for fitting polynomials to point source transits.

FitPolyLogAmpPolyPhase([poly_deg_amp, ...])

Class that enables separate fits of a polynomial to log amplitude and phase.

FitPolyRealPolyImag([poly_deg, even, odd])

Class that enables separate fits of a polynomial to real and imag components.

FitRealImag(*args, **kwargs)

Base class for fitting models to the real and imag component.

FitTransit(*args, **kwargs)

Base class for fitting models to point source transits.

+
+
+class ch_util.cal_utils.FitAmpPhase(*args, **kwargs)[source]
+

Bases: FitTransit

+

Base class for fitting models to the amplitude and phase.

+

Assumes an independent fit to amplitude and phase, and provides +methods for predicting the uncertainty on each.

+

Instantiates a FitTransit object.

+
+
Parameters:
+
    +
  • param (np.ndarray[..., nparam]) – Best-fit parameters.

  • +
  • param_cov (np.ndarray[..., nparam, nparam]) – Covariance of the fit parameters.

  • +
  • chisq (np.ndarray[..., ncomponent]) – Chi-squared.

  • +
  • ndof (np.ndarray[..., ncomponent]) – Number of degrees of freedom.

  • +
+
+
+
+
+property nparam
+

Number of parameters.

+
+
Returns:
+

nparam – Number of fit parameters.

+
+
Return type:
+

int

+
+
+
+ +
+
+uncertainty(ha, alpha=0.32, elementwise=False)[source]
+

Predicts the uncertainty on the response at given hour angle(s).

+

Returns the quadrature sum of the amplitude and phase uncertainty.

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – Hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
+
+
Returns:
+

err – Uncertainty on the response.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+
+uncertainty_amp(ha, alpha=0.32, elementwise=False)[source]
+

Predicts the uncertainty on amplitude at given hour angle(s).

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – Hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
+
+
Returns:
+

err – Uncertainty on the amplitude in fractional units.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+
+uncertainty_phi(ha, alpha=0.32, elementwise=False)[source]
+

Predicts the uncertainty on phase at given hour angle(s).

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – Hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
+
+
Returns:
+

err – Uncertainty on the phase in radians.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+ +
+
+class ch_util.cal_utils.FitGaussAmpPolyPhase(poly_deg_phi=5, *args, **kwargs)[source]
+

Bases: FitPoly, FitAmpPhase

+

Class that enables fits of a gaussian to amplitude and a polynomial to phase.

+

Instantiates a FitGaussAmpPolyPhase object.

+
+
Parameters:
+

poly_deg_phi (int) – Degree of the polynomial to fit to phase.

+
+
+
+
+property ndofa
+

Number of degrees of freedom for the amplitude fit.

+
+
Returns:
+

ndofa – Number of degrees of freedom of the amplitude fit.

+
+
Return type:
+

np.ndarray[…]

+
+
+
+ +
+
+property ndofp
+

Number of degrees of freedom for the phase fit.

+
+
Returns:
+

ndofp – Number of degrees of freedom of the phase fit.

+
+
Return type:
+

np.ndarray[…]

+
+
+
+ +
+
+property parameter_names
+

Array of strings containing the name of the fit parameters.

+
+ +
+
+peak()[source]
+

Return the peak of the transit.

+
+
Returns:
+

peak – Centroid of the gaussian fit to amplitude.

+
+
Return type:
+

float

+
+
+
+ +
+ +
+
+class ch_util.cal_utils.FitPoly(poly_type='standard', *args, **kwargs)[source]
+

Bases: FitTransit

+

Base class for fitting polynomials to point source transits.

+

Maps methods of np.polynomial to methods of the class for the +requested polynomial type.

+

Instantiates a FitPoly object.

+
+
Parameters:
+

poly_type (str) – Type of polynomial. Can be ‘standard’, ‘hermite’, or ‘chebyshev’.

+
+
+
+ +
+
+class ch_util.cal_utils.FitPolyLogAmpPolyPhase(poly_deg_amp=5, poly_deg_phi=5, *args, **kwargs)[source]
+

Bases: FitPoly, FitAmpPhase

+

Class that enables separate fits of a polynomial to log amplitude and phase.

+

Instantiates a FitPolyLogAmpPolyPhase object.

+
+
Parameters:
+
    +
  • poly_deg_amp (int) – Degree of the polynomial to fit to log amplitude.

  • +
  • poly_deg_phi (int) – Degree of the polynomial to fit to phase.

  • +
+
+
+
+
+property ndofa
+

Number of degrees of freedom for the amplitude fit.

+
+
Returns:
+

ndofa – Number of degrees of freedom of the amplitude fit.

+
+
Return type:
+

np.ndarray[…]

+
+
+
+ +
+
+property ndofp
+

Number of degrees of freedom for the phase fit.

+
+
Returns:
+

ndofp – Number of degrees of freedom of the phase fit.

+
+
Return type:
+

np.ndarray[…]

+
+
+
+ +
+
+property parameter_names
+

Array of strings containing the name of the fit parameters.

+
+ +
+
+peak(param=None)[source]
+

Find the peak of the transit.

+
+
Parameters:
+

param (np.ndarray[..., nparam]) – Coefficients of the polynomial model for log amplitude. +Defaults to self.param.

+
+
Returns:
+

peak – Location of the maximum amplitude in degrees hour angle. +If the polynomial does not have a maximum, then NaN is returned.

+
+
Return type:
+

np.ndarray[…]

+
+
+
+ +
+ +
+
+class ch_util.cal_utils.FitPolyRealPolyImag(poly_deg=5, even=False, odd=False, *args, **kwargs)[source]
+

Bases: FitPoly, FitRealImag

+

Class that enables separate fits of a polynomial to real and imag components.

+

Used to fit cross-polar response that is not well-described by the +FitPolyLogAmpPolyPhase used for co-polar response.

+

Instantiates a FitPolyRealPolyImag object.

+
+
Parameters:
+

poly_deg (int) – Degree of the polynomial to fit to real and imaginary component.

+
+
+
+
+deriv(ha, param=None)[source]
+

Calculate the derivative of the transit.

+
+ +
+
+property ndofi
+

Number of degrees of freedom for the imag fit.

+
+ +
+
+property ndofr
+

Number of degrees of freedom for the real fit.

+
+ +
+
+property parameter_names
+

Array of strings containing the name of the fit parameters.

+
+ +
+
+peak()[source]
+

Calculate the peak of the transit.

+
+ +
+
+vander(ha, *args)[source]
+

Create the Vandermonde matrix.

+
+ +
+ +
+
+class ch_util.cal_utils.FitRealImag(*args, **kwargs)[source]
+

Bases: FitTransit

+

Base class for fitting models to the real and imag component.

+

Assumes an independent fit to real and imaginary, and provides +methods for predicting the uncertainty on each.

+

Instantiates a FitTransit object.

+
+
Parameters:
+
    +
  • param (np.ndarray[..., nparam]) – Best-fit parameters.

  • +
  • param_cov (np.ndarray[..., nparam, nparam]) – Covariance of the fit parameters.

  • +
  • chisq (np.ndarray[..., ncomponent]) – Chi-squared.

  • +
  • ndof (np.ndarray[..., ncomponent]) – Number of degrees of freedom.

  • +
+
+
+
+
+property nparam
+

Number of parameters.

+
+
Returns:
+

nparam – Number of fit parameters.

+
+
Return type:
+

int

+
+
+
+ +
+
+uncertainty(ha, alpha=0.32, elementwise=False)[source]
+

Predicts the uncertainty on the response at given hour angle(s).

+

Returns the quadrature sum of the real and imag uncertainty.

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – Hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
+
+
Returns:
+

err – Uncertainty on the response.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+
+uncertainty_imag(ha, alpha=0.32, elementwise=False)[source]
+

Predicts the uncertainty on imag component at given hour angle(s).

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – Hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
+
+
Returns:
+

err – Uncertainty on the imag component.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+
+uncertainty_real(ha, alpha=0.32, elementwise=False)[source]
+

Predicts the uncertainty on real component at given hour angle(s).

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – Hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
+
+
Returns:
+

err – Uncertainty on the real component.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+ +
+
+class ch_util.cal_utils.FitTransit(*args, **kwargs)[source]
+

Bases: object

+

Base class for fitting models to point source transits.

+

The fit method should be used to populate the param, param_cov, chisq, +and ndof attributes. The predict and uncertainty methods can then be used +to obtain the model prediction for the response and uncertainty on this quantity +at a given hour angle.

+
+
+param
+

Best-fit parameters.

+
+
Type:
+

np.ndarray[…, nparam]

+
+
+
+ +
+
+param_cov
+

Covariance of the fit parameters.

+
+
Type:
+

np.ndarray[…, nparam, nparam]

+
+
+
+ +
+
+chisq
+

Chi-squared of the fit.

+
+
Type:
+

np.ndarray[…]

+
+
+
+ +
+
+ndof
+

Number of degrees of freedom.

+
+
Type:
+

np.ndarray[…]

+
+
+
+ +
+
+Abstract Methods
+
+ +
+
+----------------
+
+ +
+
+Any subclass of FitTransit must define these methods
+

peak +_fit +_model +_jacobian

+
+ +

Instantiates a FitTransit object.

+
+
Parameters:
+
    +
  • param (np.ndarray[..., nparam]) – Best-fit parameters.

  • +
  • param_cov (np.ndarray[..., nparam, nparam]) – Covariance of the fit parameters.

  • +
  • chisq (np.ndarray[..., ncomponent]) – Chi-squared.

  • +
  • ndof (np.ndarray[..., ncomponent]) – Number of degrees of freedom.

  • +
+
+
+
+
+property N
+

Number of independent transit fits contained in this object.

+
+
Returns:
+

N – Numpy-style shape indicating the number of +fits that the object contains. Is None +if the object contains a single fit.

+
+
Return type:
+

tuple

+
+
+
+ +
+
+fit(ha, resp, resp_err, width=5, absolute_sigma=False, **kwargs)[source]
+

Apply subclass defined _fit method to multiple transits.

+

This function can be used to fit the transit for multiple inputs +and frequencies. Populates the param, param_cov, chisq, and ndof +attributes.

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,]) – Hour angle in degrees.

  • +
  • resp (np.ndarray[..., nha]) – Measured response to the point source. Complex valued.

  • +
  • resp_err (np.ndarray[..., nha]) – Error on the measured response.

  • +
  • width (np.ndarray[...]) – Initial guess at the width (sigma) of the transit in degrees.

  • +
  • absolute_sigma (bool) – Set to True if the errors provided are absolute. Set to False if +the errors provided are relative, in which case the parameter covariance +will be scaled by the chi-squared per degree-of-freedom.

  • +
+
+
+
+ +
+
+property ncomponent
+

Number of components.

+
+
Returns:
+

ncomponent – Number of components (i.e, real and imag, amp and phase, complex) that have been fit.

+
+
Return type:
+

int

+
+
+
+ +
+
+property nparam
+

Number of parameters.

+
+
Returns:
+

nparam – Number of fit parameters.

+
+
Return type:
+

int

+
+
+
+ +
+
+property param_corr
+

Parameter correlation matrix.

+
+
Returns:
+

param_corr – Correlation of the fit parameters.

+
+
Return type:
+

np.ndarray[…, nparam, nparam]

+
+
+
+ +
+
+property parameter_names
+

Array of strings containing the name of the fit parameters.

+
+
Returns:
+

parameter_names – Names of the parameters.

+
+
Return type:
+

np.ndarray[nparam,]

+
+
+
+ +
+
+abstract peak()[source]
+

Calculate the peak of the transit.

+

Any subclass of FitTransit must define this method.

+
+ +
+
+predict(ha, elementwise=False)[source]
+

Predict the point source response.

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – The hour angle in degrees.

  • +
  • elementwise (bool) – If False, then the model will be evaluated at the +requested hour angles for every set of parameters. +If True, then the model will be evaluated at a +separate hour angle for each set of parameters +(requires ha.shape == self.N).

  • +
+
+
Returns:
+

model – Model for the point source response at the requested +hour angles. Complex valued.

+
+
Return type:
+

np.ndarray[…, nha] or float

+
+
+
+ +
+
+classmethod tval(alpha, ndof)[source]
+

Quantile of a standardized Student’s t random variable.

+

This quantity is slow to compute. Past values will be cached +in a dictionary shared by all instances of the class.

+
+
Parameters:
+
    +
  • alpha (float) – Calculate the quantile corresponding to the lower tail probability +1 - alpha / 2.

  • +
  • ndof (np.ndarray or int) – Number of degrees of freedom of the Student’s t variable.

  • +
+
+
Returns:
+

tval – Quantile of a standardized Student’s t random variable.

+
+
Return type:
+

np.ndarray or float

+
+
+
+ +
+
+uncertainty(ha, alpha=0.32, elementwise=False)[source]
+

Predict the uncertainty on the point source response.

+
+
Parameters:
+
    +
  • ha (np.ndarray[nha,] or float) – The hour angle in degrees.

  • +
  • alpha (float) – Confidence level given by 1 - alpha.

  • +
  • elementwise (bool) – If False, then the uncertainty will be evaluated at +the requested hour angles for every set of parameters. +If True, then the uncertainty will be evaluated at a +separate hour angle for each set of parameters +(requires ha.shape == self.N).

  • +
+
+
Returns:
+

err – Uncertainty on the point source response at the +requested hour angles.

+
+
Return type:
+

np.ndarray[…, nha]

+
+
+
+ +
+ +
+
+ch_util.cal_utils.estimate_directional_scale(z, c=2.1)[source]
+

Calculate robust, direction dependent estimate of scale.

+
+
Parameters:
+
    +
  • z (np.ndarray) – 1D array containing the data.

  • +
  • c (float) – Cutoff in number of MAD. Data points whose absolute value is +larger than c * MAD from the median are saturated at the +maximum value in the estimator.

  • +
+
+
Returns:
+

    +
  • zmed (float) – The median value of z.

  • +
  • sa (float) – Estimate of scale for z <= zmed.

  • +
  • sb (float) – Estimate of scale for z > zmed.

  • +
+

+
+
+
+ +
+
+ch_util.cal_utils.fit_histogram(arr, bins='auto', rng=None, no_weight=False, test_normal=False, return_histogram=False)[source]
+

Fit a gaussian to a histogram of the data.

+
+
Parameters:
+
    +
  • arr (np.ndarray) – 1D array containing the data. Arrays with more than one dimension are flattened.

  • +
  • bins (int or sequence of scalars or str) –

      +
    • If bins is an int, it defines the number of equal-width bins in rng.

    • +
    • If bins is a sequence, it defines a monotonically increasing array of bin edges, +including the rightmost edge, allowing for non-uniform bin widths.

    • +
    • If bins is a string, it defines a method for computing the bins.

    • +
    +

  • +
  • rng ((float, float)) – The lower and upper range of the bins. If not provided, then the range spans +the minimum to maximum value of arr.

  • +
  • no_weight (bool) – Give equal weighting to each histogram bin. Otherwise use proper weights based +on number of counts observed in each bin.

  • +
  • test_normal (bool) – Apply the Shapiro-Wilk and Anderson-Darling tests for normality to the data.

  • +
  • return_histogram (bool) – Return the histogram. Otherwise return only the best fit parameters and test statistics.

  • +
+
+
Returns:
+

    +
  • results (dict) – Dictionary containing the following fields:

  • +
  • indmin (int) – Only bins whose index is greater than indmin were included in the fit.

  • +
  • indmax (int) – Only bins whose index is less than indmax were included in the fit.

  • +
  • xmin (float) – The data value corresponding to the centre of the indmin bin.

  • +
  • xmax (float) – The data value corresponding to the centre of the indmax bin.

  • +
  • par ([float, float, float]) – The parameters of the fit, ordered as [peak, mu, sigma].

  • +
  • chisq (float) – The chi-squared of the fit.

  • +
  • ndof (int) – The number of degrees of freedom of the fit.

  • +
  • pte (float) – The probability to observe the chi-squared of the fit.

  • +
  • If return_histogram is True, then results will also contain the following fields –

    +
    +
    bin_centrenp.ndarray

    The bin centre of the histogram.

    +
    +
    bin_countnp.ndarray

    The bin counts of the histogram.

    +
    +
    +
  • +
  • If test_normal is True, then results will also contain the following fields –

    +
    +
    shapirodict
    +
    statfloat

    The Shapiro-Wilk test statistic.

    +
    +
    ptefloat

    The probability to observe stat if the data were drawn from a gaussian.

    +
    +
    +
    +
    andersondict
    +
    statfloat

    The Anderson-Darling test statistic.

    +
    +
    criticallist of float

    The critical values of the test statistic.

    +
    +
    alphalist of float

    The significance levels corresponding to each critical value.

    +
    +
    pastlist of bool

    Boolean indicating if the data passes the test for each critical value.

    +
    +
    +
    +
    +
  • +
+

+
+
+
+ +
+
+ch_util.cal_utils.fit_point_source_map(ra, dec, submap, rms=None, dirty_beam=None, real_map=False, freq=600.0, ra0=None, dec0=None)[source]
+

Fits a map of a point source to a model.

+
+
Parameters:
+
    +
  • ra (np.ndarray[nra, ]) – Transit right ascension.

  • +
  • dec (np.ndarray[ndec, ]) – Transit declination.

  • +
  • submap (np.ndarray[..., nra, ndec]) – Region of the ringmap around the point source.

  • +
  • rms (np.ndarray[..., nra]) – RMS error on the map.

  • +
  • flag (np.ndarray[..., nra, ndec]) – Boolean array that indicates which pixels to fit.

  • +
  • dirty_beam (np.ndarray[..., nra, ndec] or [ra, dec, dirty_beam]) – Fourier transform of the weighting function used to create +the map. If input, then the interpolated dirty beam will be used +as the model for the point source response in the declination direction. +Can either be an array that is the same size as submap, or a list/tuple +of length 3 that contains [ra, dec, dirty_beam] since the shape of the +dirty beam is likely to be larger than the shape of the subregion of the +map, at least in the declination direction.

  • +
+
+
Returns:
+

    +
  • param_name (np.ndarray[nparam, ]) – Names of the parameters.

  • +
  • param (np.ndarray[…, nparam]) – Best-fit parameters for each item.

  • +
  • param_cov (np.ndarray[…, nparam, nparam]) – Parameter covariance for each item.

  • +
+

+
+
+
+ +
+
+ch_util.cal_utils.flag_outliers(raw, flag, window=25, nsigma=5.0)[source]
+

Flag outliers with respect to rolling median.

+
+
Parameters:
+
    +
  • raw (np.ndarray[nsample,]) – Raw data sampled at fixed rate. Use the flag parameter to indicate missing +or invalid data.

  • +
  • flag (np.ndarray[nsample,]) – Boolean array where True indicates valid data and False indicates invalid data.

  • +
  • window (int) – Window size (in number of samples) used to determine local median.

  • +
  • nsigma (float) – Data is considered an outlier if it is greater than this number of median absolute +deviations away from the local median.

  • +
+
+
Returns:
+

not_outlier – Boolean array where True indicates valid data and False indicates data that is +either an outlier or had flag = True.

+
+
Return type:
+

np.ndarray[nsample,]

+
+
+
+ +
+
+ch_util.cal_utils.func_2d_gauss(coord, peak_amplitude, centroid_x, centroid_y, fwhm_x, fwhm_y, offset)[source]
+

Returns a parameteric model for the map of a point source, +consisting of a 2-dimensional gaussian.

+
+
Parameters:
+
    +
  • coord ((ra, dec)) – Tuple containing the right ascension and declination. These should be +coordinate vectors of length nra and ndec, respectively.

  • +
  • peak_amplitude (float) – Model parameter. Normalization of the gaussian.

  • +
  • centroid_x (float) – Model parameter. Centroid of the gaussian in degrees in the +right ascension direction.

  • +
  • centroid_y (float) – Model parameter. Centroid of the gaussian in degrees in the +declination direction.

  • +
  • fwhm_x (float) – Model parameter. Full width at half maximum of the gaussian +in degrees in the right ascension direction.

  • +
  • fwhm_y (float) – Model parameter. Full width at half maximum of the gaussian +in degrees in the declination direction.

  • +
  • offset (float) – Model parameter. Constant background value of the map.

  • +
+
+
Returns:
+

model – Model prediction for the map of the point source.

+
+
Return type:
+

np.ndarray[nra*ndec]

+
+
+
+ +
+
+ch_util.cal_utils.func_2d_sinc_gauss(coord, peak_amplitude, centroid_x, centroid_y, fwhm_x, fwhm_y, offset)[source]
+
+
Returns a parameteric model for the map of a point source,

consisting of a sinc function along the declination direction +and gaussian along the right ascension direction.

+
+
+
+
Parameters:
+
    +
  • coord ((ra, dec)) – Tuple containing the right ascension and declination. These should be +coordinate vectors of length nra and ndec, respectively.

  • +
  • peak_amplitude (float) – Model parameter. Normalization of the gaussian.

  • +
  • centroid_x (float) – Model parameter. Centroid of the gaussian in degrees in the +right ascension direction.

  • +
  • centroid_y (float) – Model parameter. Centroid of the sinc function in degrees in the +declination direction.

  • +
  • fwhm_x (float) – Model parameter. Full width at half maximum of the gaussian +in degrees in the right ascension direction.

  • +
  • fwhm_y (float) – Model parameter. Full width at half maximum of the sinc function +in degrees in the declination direction.

  • +
  • offset (float) – Model parameter. Constant background value of the map.

  • +
+
+
Returns:
+

model – Model prediction for the map of the point source.

+
+
Return type:
+

np.ndarray[nra*ndec]

+
+
+
+ +
+
+ch_util.cal_utils.func_dirty_gauss(dirty_beam)[source]
+

Returns a parameteric model for the map of a point source, +consisting of the interpolated dirty beam along the y-axis +and a gaussian along the x-axis.

+

This function is a wrapper that defines the interpolated +dirty beam.

+
+
Parameters:
+

dirty_beam (scipy.interpolate.interp1d) – Interpolation function that takes as an argument el = sin(za) +and outputs an np.ndarray[nel, nra] that represents the dirty +beam evaluated at the same right ascension as the map.

+
+
Returns:
+

dirty_gauss – Model prediction for the map of the point source.

+
+
Return type:
+

np.ndarray[nra*ndec]

+
+
+
+ +
+
+ch_util.cal_utils.func_real_dirty_gauss(dirty_beam)[source]
+

Returns a parameteric model for the map of a point source, +consisting of the interpolated dirty beam along the y-axis +and a sinusoid with gaussian envelope along the x-axis.

+

This function is a wrapper that defines the interpolated +dirty beam.

+
+
Parameters:
+

dirty_beam (scipy.interpolate.interp1d) – Interpolation function that takes as an argument el = sin(za) +and outputs an np.ndarray[nel, nra] that represents the dirty +beam evaluated at the same right ascension as the map.

+
+
Returns:
+

real_dirty_gauss – Model prediction for the map of the point source.

+
+
Return type:
+

np.ndarray[nra*ndec]

+
+
+
+ +
+
+ch_util.cal_utils.get_reference_times_dataset_id(times: ndarray, dataset_ids: ndarray, logger: Logger | None = None) Dict[str, ndarray | Dict][source]
+

Calculate the relevant calibration reference times from the dataset IDs.

+
+

Warning

+

Dataset IDs before 2020/10/10 are corrupt so this routine won’t work.

+
+
+
Parameters:
+
    +
  • times – Unix times of data points to be calibrated as floats.

  • +
  • dataset_ids – The dataset IDs as an array of strings.

  • +
  • logger – A logging object to use for messages. If not provided, use a module level +logger.

  • +
+
+
Returns:
+

A dictionary containing the results. See get_reference_times_file for a +description of the contents.

+
+
Return type:
+

reftime_result

+
+
+
+ +
+
+ch_util.cal_utils.get_reference_times_file(times: ndarray, cal_file: MemGroup, logger: Logger | None = None) Dict[str, ndarray][source]
+

For a given set of times determine when and how they were calibrated.

+

This uses the pre-calculated calibration time reference files.

+
+
Parameters:
+
    +
  • times – Unix times of data points to be calibrated as floats.

  • +
  • cal_file – memh5 container which containes the reference times for calibration source +transits.

  • +
  • logger – A logging object to use for messages. If not provided, use a module level +logger.

  • +
+
+
Returns:
+

reftime_result – A dictionary containing four entries:

+
    +
  • reftime: Unix time of same length as times. Reference times of transit of the +source used to calibrate the data at each time in times. Returns NaN for +times without a reference.

  • +
  • reftime_prev: The Unix time of the previous gain update. Only set for time +samples that need to be interpolated, otherwise NaN.

  • +
  • interp_start: The Unix time of the start of the interpolation period. Only +set for time samples that need to be interpolated, otherwise NaN.

  • +
  • interp_stop: The Unix time of the end of the interpolation period. Only +set for time samples that need to be interpolated, otherwise NaN.

  • +
+

+
+
Return type:
+

dict

+
+
+
+ +
+
+ch_util.cal_utils.guess_fwhm(freq, pol='X', dec=None, sigma=False, voltage=False, seconds=False)[source]
+

Provide rough estimate of the FWHM of the CHIME primary beam pattern.

+

It uses a linear fit to the median FWHM(nu) over all feeds of a given +polarization for CygA transits. CasA and TauA transits also showed +good agreement with this relationship.

+
+
Parameters:
+
    +
  • freq (float or np.ndarray) – Frequency in MHz.

  • +
  • pol (string or bool) – Polarization, can be ‘X’/’E’ or ‘Y’/’S’

  • +
  • dec (float) – Declination of the source in radians. If this quantity +is input, then the FWHM is divided by cos(dec) to account +for the increased rate at which a source rotates across +the sky. Default is do not correct for this effect.

  • +
  • sigma (bool) – Return the standard deviation instead of the FWHM. +Default is to return the FWHM.

  • +
  • voltage (bool) – Return the value for a voltage beam, otherwise returns +value for a power beam.

  • +
  • seconds (bool) – Convert to elapsed time in units of seconds. +Otherwise returns in units of degrees on the sky.

  • +
+
+
Returns:
+

fwhm – Rough estimate of the FWHM (or standard deviation if sigma=True).

+
+
Return type:
+

float or np.ndarray

+
+
+
+ +
+
+ch_util.cal_utils.interpolate_gain(freq, gain, weight, flag=None, length_scale=30.0)[source]
+

Replace gain at flagged frequencies with interpolated values.

+

Uses a gaussian process regression to perform the interpolation +with a Matern function describing the covariance between frequencies.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq,]) – Frequencies in MHz.

  • +
  • gain (np.ndarray[nfreq, ninput]) – Complex gain for each input and frequency.

  • +
  • weight (np.ndarray[nfreq, ninput]) – Uncertainty on the complex gain, expressed as inverse variance.

  • +
  • flag (np.ndarray[nfreq, ninput]) – Boolean array indicating the good (True) and bad (False) gains. +If not provided, then it will be determined by evaluating weight > 0.0.

  • +
  • length_scale (float) – Correlation length in frequency in MHz.

  • +
+
+
Returns:
+

    +
  • interp_gain (np.ndarray[nfreq, ninput]) – For frequencies with flag = True, this will be equal to gain. For frequencies with +flag = False, this will be an interpolation of the gains with flag = True.

  • +
  • interp_weight (np.ndarray[nfreq, ninput]) – For frequencies with flag = True, this will be equal to weight. For frequencies with +flag = False, this will be the expected uncertainty on the interpolation.

  • +
+

+
+
+
+ +
+
+ch_util.cal_utils.interpolate_gain_quiet(*args, **kwargs)[source]
+

Call interpolate_gain with ConvergenceWarnings silenced.

+

Accepts and passes all arguments and keyword arguments for interpolate_gain.

+
+ +
+
+ch_util.cal_utils.thermal_amplitude(delta_T, freq)[source]
+

Computes the amplitude gain correction given a (set of) temperature +difference and a (set of) frequency based on the thermal model.

+
+
Parameters:
+
    +
  • delta_T (float or array of foats) – Temperature difference (T - T_0) for which to find a gain correction.

  • +
  • freq (float or array of foats) – Frequencies in MHz

  • +
+
+
Returns:
+

g – Gain amplitude corrections. Multiply by data +to correct it.

+
+
Return type:
+

float or array of floats

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.chan_monitor.html b/docs/_autosummary/ch_util.chan_monitor.html new file mode 100644 index 00000000..17bbd9af --- /dev/null +++ b/docs/_autosummary/ch_util.chan_monitor.html @@ -0,0 +1,344 @@ + + + + + + + ch_util.chan_monitor — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.chan_monitor

+

Channel quality monitor routines

+

Classes

+ + + + + + + + + +

ChanMonitor(t1[, t2, freq_sel, prod_sel, ...])

This class provides the user interface to FeedLocator.

FeedLocator(vis1, vis2, tm1, tm2, src1, ...)

This class contains functions that do all the computations to determine feed positions from data.

+
+
+class ch_util.chan_monitor.ChanMonitor(t1, t2=None, freq_sel=None, prod_sel=None, bswp1=26, bswp2=90, bsep1=154, bsep2=218)[source]
+

Bases: object

+

This class provides the user interface to FeedLocator.

+

It initializes instances of FeedLocator (normally one per polarization) +and returns results combined lists of results (good channels and positions, +agreement/disagreement with the layout database, etc.)

+

Feed locator should not +have to sepparate the visibilities in data to run the test on and data not to run the +test on. ChanMonitor should make the sepparation and provide FeedLocator with the right +data cube to test.

+
+
Parameters:
+
    +
  • [t2] (t1) – set to 1 sideral day after t1

  • +
  • freq_sel

  • +
  • prod_sel

  • +
+
+
+

Here t1 and t2 have to be unix time (floats)

+
+
+classmethod fromdata(data, freq_sel=None, prod_sel=None)[source]
+

Initialize class from andata object

+
+ +
+
+classmethod fromdate(date, freq_sel=None, prod_sel=None, bswp1=26, bswp2=90, bsep1=154, bsep2=218)[source]
+

Initialize class from date

+
+ +
+
+get_results(src, tdelt=2800)[source]
+

If self.finder exists, then it takes a deep copy of this object, +further restricts the time range to include only src transits, +and then queries the database to obtain a list of the acquisitions. +If self.finder does not exist, then it creates a finder object, +restricts the time range to include only src transits between +self.t1 and self.t2, and then queries the database to obtain a list +of the acquisitions.

+
+ +
+
+get_sunfree_srcs(srcs=None)[source]
+

This method uses the attributes ‘night_acq_list’ and +‘acq_list’ to determine the srcs that transit +in the available data. If these attributes do not +exist, then the method ‘set_acq_list’ is called. +If srcs is not specified, then it defaults to the +brightest four radio point sources in the sky: +CygA, CasA, TauA, and VirA.

+
+ +
+
+set_acq_list()[source]
+

This method sets four attributes. The first two attributes +are ‘night_finder’ and ‘night_acq_list’, which are the +finder object and list of acquisitions that +contain all night time data between self.t1 and self.t2. +The second two attributes are ‘finder’ and ‘acq_list’, +which are the finder object and list of acquisitions +that contain all data beween self.t1 and self.t2 with the +sunrise, sun transit, and sunset removed.

+
+ +
+
+set_metadata(tms, input_map)[source]
+

Sets self.corr_inputs, self.pwds, self.pstns, self.p1_idx, self.p2_idx

+
+ +
+
+single_source_check()[source]
+

Assumes self.source1 is NOT None

+
+ +
+ +
+
+class ch_util.chan_monitor.FeedLocator(vis1, vis2, tm1, tm2, src1, src2, freqs, prods, inputs, pstns0, bsipts=None)[source]
+

Bases: object

+

This class contains functions that do all the computations to +determine feed positions from data. It also determines the quality +of data and returns a list of good inputs and frequencies.

+

Uppon initialization, it receives visibility data around one or two +bright sources transits as well as corresponding meta-data.

+
+
Parameters:
+
    +
  • vis1 (Visibility data around bright source transit)

  • +
  • [vis2] (Visibility data around bright source transit)

  • +
  • tm1 (Timestamp corresponding to vis1 [vis2])

  • +
  • [tm2] (Timestamp corresponding to vis1 [vis2])

  • +
  • src1 (Ephemeris astronomical object corresponding to the) – transit in vis1 [vis2]

  • +
  • [src2] (Ephemeris astronomical object corresponding to the) – transit in vis1 [vis2]

  • +
  • freqs (frequency axis of vis1 [and vis2])

  • +
  • prods (Product axis of vis1 [and vis2])

  • +
  • inputs (inputs loaded in vis1 [and vis2])

  • +
  • pstns0 (positions of inputs as obtained from the layout database)

  • +
  • bsipts (base inputs used to determine cross correlations loaded) – (might become unecessary in the future)

  • +
+
+
+

Basic initialization method

+
+
+continuity_test(tol=0.2, knl=5)[source]
+

Call only if freqs are adjacent. +Uses xdists (Earth coords) instead of c_xdists (cylinder coords) +to allow for calling before ydists are computed. Doesn’t make any +difference for this test. Results are used in computing y_dists.

+
+ +
+
+get_c_ydist(ph1=None, ph2=None, good_freqs=None, tol=1.5, Nmax=20)[source]
+

N-S. Absolutelly assumes contiguous frequencies!!!

+
+ +
+
+get_c_ydist_perfreq(ph1=None, ph2=None)[source]
+

Old N-S dists function. TO be used only in case a continuum of +frequencies is not available

+
+ +
+
+get_xdist(ft_prms, dec)[source]
+

E-W

+
+ +
+
+good_prod_freq(pass_rst, tol_ch1=0.3, tol_ch2=0.7, tol_fr1=0.6, tol_fr2=0.7)[source]
+

Tries to determine overall bad products and overall bad frequencies +from a test_pass result.

+
+ +
+
+params_ft(tm, vis, dec, x0_shift=5.0)[source]
+
+
Extract relevant parameters from source transit
+
visibility in two steps:
    +
  1. FFT visibility

  2. +
  3. Fit a gaussian to the transform

  4. +
+
+
+
+
+
+
Parameters:
+
    +
  • tm (array-like) – Independent variable (time)

  • +
  • trace (array-like) – Dependent variable (visibility)

  • +
  • freq (float) – Frenquency of the visibility trace, in MHz.

  • +
  • dec (float) – Declination of source. Used for initial guess of +gaussian width. Defaults to CygA declination: 0.71

  • +
+
+
Returns:
+

    +
  • popt (array of float) – List with optimal parameters: [A,mu,sig2]

  • +
  • pcov (array of float) – Covariance matrix for optimal parameters. +For details see documentation on numpy.curve_fit

  • +
+

+
+
+
+ +
+
+set_good_ipts(base_ipts)[source]
+

Good_prods to good_ipts

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.data_quality.html b/docs/_autosummary/ch_util.data_quality.html new file mode 100644 index 00000000..7347c1c2 --- /dev/null +++ b/docs/_autosummary/ch_util.data_quality.html @@ -0,0 +1,241 @@ + + + + + + + ch_util.data_quality — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.data_quality

+

Data quality routines

+
+

Data quality functions

+ +
+
+

Issues

+

Auxiliary functions are still lacking documentation.

+
+

Functions

+ + + + + + +

good_channels(data[, gain_tol, noise_tol, ...])

Test data for misbehaving channels.

+
+
+ch_util.data_quality.good_channels(data, gain_tol=10.0, noise_tol=2.0, fit_tol=0.02, test_freq=0, noise_synced=None, inputs=None, res_plot=False, verbose=True)[source]
+

Test data for misbehaving channels.

+

Three tests are performed:

+
    +
  1. Excessively high digital gains,

  2. +
  3. Compliance of noise to the radiometer equation and

  4. +
  5. Goodness of fit to a template Tsky.

  6. +
+

See Doclib:235 +file ‘data_quality.pdf’ for details on how the filters and tolerances work.

+
+
Parameters:
+
    +
  • data (ch_util.andata.CorrData object) – Data to run test on. +If andata object contains cross-correlations, +test is performed on auto-correlations only.

  • +
  • gain_tol (float) – Tolerance for digital gains filter. Flag channels whose +digital gain fractional absolute deviation +is above ‘gain_tol’ (default is 10.)

  • +
  • noise_tol (float) – Tolerance for radiometer noise filter. Flag channels whose +noise rms is higher then ‘noise_tol’ times the expected +from the radiometer equation. (default = 2.)

  • +
  • fit_tol (float) – Tolerance for the fit-to-Tsky filter. Flag channels whose +fractional rms for the ‘gain’ fit parameter is above +‘fit_tol’ (default = 0.02)

  • +
  • test_freq (integer) – Index of frequency to test. Default is 0.

  • +
  • noise_synced (boolean) – Use this to force the code to call (or not call) +ni_utils.process_synced_data(). If not given, +the code will determine if syncronized noise injection was on. +For acquisitions newer then 20150626T200540Z_pathfinder_corr, +noise injection info is written in the attributes. For older +acquisitions the function _check_ni() is called to determine +if noise injection is On.

  • +
  • inputs (list of CorrInputs, optional) – List of CorrInput objects describing the channels in this +dataset. This is optional, if not set (default), then it will +look the data up in the database. This option just allows +control of the database accesses.

  • +
  • res_plot (boolean, optional) – If True, a plot with all the tested channels and the +Tsky fits is generated. File naming is +plot_fit_{timestamp}.pdf

  • +
  • verbose (boolean, optional) – Print out useful output as the tests are run.

  • +
+
+
Returns:
+

    +
  • good_gains (list of int) –

    +
      +
    1. for channels that pass the gains filter, 0. otherwise.

    2. +
    +
  • +
  • good_noise (list of int) –

    +
      +
    1. for channels that pass the noise filter, 0. otherwise.

    2. +
    +
  • +
  • good_fit (list of int) –

    +

    1. for channels that pass the fit-to-Tsky filter, +0. otherwise.

    +
  • +
  • test_chans (list of int) – A list of the channels tested in the same order as they +appear in all the other lists returned

  • +
+

+
+
+

Examples

+

Run test on frequency index 3. data is an andata object:

+
>>> good_gains, good_noise, good_fit, test_chans = good_channels(data,test_freq=3)
+
+
+

And to create a plot of the results:

+
>>> good_gains, good_noise, good_fit, test_chans = good_channels(data,test_freq=3,res_plot=True)
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.ephemeris.html b/docs/_autosummary/ch_util.ephemeris.html new file mode 100644 index 00000000..f89e637a --- /dev/null +++ b/docs/_autosummary/ch_util.ephemeris.html @@ -0,0 +1,822 @@ + + + + + + + ch_util.ephemeris — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.ephemeris

+

Ephemeris routines

+

The precession of the Earth’s axis gives noticeable shifts in object +positions over the life time of CHIME. To minimise the effects of this we +need to be careful and consistent with our ephemeris calculations. +Historically Right Ascension has been given with respect to the Vernal +Equinox which has a significant (and unnecessary) precession in the origin of +the RA axis. To avoid this we use the new Celestial Intermediate Reference +System which does not suffer from this issue.

+

Practically this means that when calculating RA, DEC coordinates for a source +position at a given time you must be careful to obtain CIRS coordinates +(and not equinox based ones). Internally using ephemeris.object_coords does +exactly that for you, so for any lookup of coordinates you should use that on +your requested body.

+

Note that the actual coordinate positions of sources must be specified using +RA, DEC coordinates in ICRS (which is roughly equivalent to J2000). The +purpose of object_coords is to transform into new RA, DEC coordinates taking +into account the precession and nutation of the Earth’s polar axis since +then.

+

These kind of coordinate issues are tricky, confusing and hard to debug years +later, so if you’re unsure you are recommended to seek some advice.

+

Constants

+
+
CHIMELATITUDE

CHIME’s latitude [degrees].

+
+
CHIMELONGITUDE

CHIME’s longitude [degrees].

+
+
CHIMEALTITUDE

CHIME’s altitude [metres].

+
+
SIDEREAL_S

Number of SI seconds in a sidereal second [s/sidereal s]. You probably want +STELLAR_S instead.

+
+
STELLAR_S

Number of SI seconds in a stellar second [s/stellar s].

+
+
CasA

skyfield.starlib.Star representing Cassiopeia A.

+
+
CygA

skyfield.starlib.Star representing Cygnus A.

+
+
TauA

skyfield.starlib.Star representing Taurus A.

+
+
VirA

skyfield.starlib.Star representing Virgo A.

+
+
+
+

Telescope Instances

+
    +
  • chime

  • +
+
+
+

Ephemeris Functions

+ +
+
+

Time Utilities

+
    +
  • ensure_unix()

  • +
  • chime_local_datetime()

  • +
  • unix_to_datetime()

  • +
  • datetime_to_unix()

  • +
  • datetime_to_timestr()

  • +
  • timestr_to_datetime()

  • +
  • unix_to_skyfield_time()

  • +
  • skyfield_time_to_unix()

  • +
  • time_of_day()

  • +
  • csd()

  • +
  • csd_to_unix()

  • +
  • unix_to_csd()

  • +
  • parse_date()

  • +
+
+
+

Miscellaneous Utilities

+ +
+

Module Attributes

+ + + + + + + + + + + + + + + +

CasA

skyfield.starlib.Star representing Cassiopeia A.

CygA

skyfield.starlib.Star representing Cygnus A.

TauA

skyfield.starlib.Star representing Taurus A.

VirA

skyfield.starlib.Star representing Virgo A.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Star_cirs(ra, dec, epoch)

Wrapper for skyfield.api.star that creates a position given CIRS coordinates observed from CHIME

bmxy_to_hadec(bmx, bmy)

Convert CHIME/FRB beam-model XY coordinates to CIRS hour angle and declination.

chime_local_datetime(*args)

Create a datetime.datetime object in Canada/Pacific timezone.

cirs_radec(body[, date, deg, obs])

Converts a Skyfield body in CIRS coordinates at a given epoch to ICRS coordinates observed from CHIME

galt_pointing_model_dec(ha_in, dec_in[, b])

Calculate pointing correction in declination for the Galt Telescope See description of the pointing model by Lewis Knee CHIME document library 754 https://bao.chimenet.ca/doc/documents/754

galt_pointing_model_ha(ha_in, dec_in[, a])

Calculate pointing correction in hour angle for the Galt Telescope See description of the pointing model by Lewis Knee CHIME document library 754 https://bao.chimenet.ca/doc/documents/754

get_doppler_shifted_freq(*args, **kwargs)

Deprecated.

get_range_rate(source, date[, obs])

Calculate rate at which distance between observer and source changes.

get_source_dictionary(*args)

Returns a dictionary containing skyfield.starlib.Star objects for common radio point sources.

hadec_to_bmxy(ha_cirs, dec_cirs)

Convert CIRS hour angle and declination to CHIME/FRB beam-model XY coordinates.

lunar_rising(start_time[, end_time, obs])

Find the Lunar risings between two times for CHIME.

lunar_setting(start_time[, end_time, obs])

Find the Lunar settings between two times for CHIME.

lunar_transit(start_time[, end_time, obs])

Find the Lunar transits between two times for CHIME.

object_coords(body[, date, deg, obs])

Calculates the RA and DEC of the source.

parse_date(datestring)

Convert date string to a datetime object.

peak_RA(body[, date, deg])

Calculates the RA where a source is expected to peak in the beam.

solar_rising(start_time[, end_time, obs])

Find the Solar risings between two times for CHIME.

solar_setting(start_time[, end_time, obs])

Find the Solar settings between two times for CHIME.

solar_transit(start_time[, end_time, obs])

Find the Solar transits between two times for CHIME.

transit_RA(time)

No longer supported.

utc_lst_to_mjd(datestring, lst[, obs])

Convert datetime string and LST to corresponding modified Julian Day

+
+
+ch_util.ephemeris.CasA = Star(ra=350.86642, dec=58.81178, names=('CAS_A',), epoch=2451545.0)
+

skyfield.starlib.Star representing Cassiopeia A.

+
+ +
+
+ch_util.ephemeris.CygA = Star(ra=299.86815, dec=40.73392, names=('CYG_A',), epoch=2451545.0)
+

skyfield.starlib.Star representing Cygnus A.

+
+ +
+
+ch_util.ephemeris.Star_cirs(ra, dec, epoch)[source]
+

Wrapper for skyfield.api.star that creates a position given CIRS +coordinates observed from CHIME

+
+
Parameters:
+
    +
  • ra (skyfield.api.Angle) – RA and dec of the source in CIRS coordinates

  • +
  • dec (skyfield.api.Angle) – RA and dec of the source in CIRS coordinates

  • +
  • epoch (skyfield.api.Time) – Time of the observation

  • +
+
+
Returns:
+

body – Star object in ICRS coordinates

+
+
Return type:
+

skyfield.api.Star

+
+
+
+ +
+
+ch_util.ephemeris.TauA = Star(ra=83.63321, dec=22.01446, names=('TAU_A',), epoch=2451545.0)
+

skyfield.starlib.Star representing Taurus A.

+
+ +
+
+ch_util.ephemeris.VirA = Star(ra=187.70593, dec=12.39112, names=('VIR_A',), epoch=2451545.0)
+

skyfield.starlib.Star representing Virgo A.

+
+ +
+
+ch_util.ephemeris.bmxy_to_hadec(bmx, bmy)[source]
+

Convert CHIME/FRB beam-model XY coordinates to CIRS hour angle and declination.

+
+
Parameters:
+
+
+
Returns:
+

    +
  • ha_cirs (array_like) – The CIRS Hour Angle in degrees.

  • +
  • dec_cirs (array_like) – The CIRS Declination in degrees.

  • +
+

+
+
+
+ +
+
+ch_util.ephemeris.chime_local_datetime(*args)[source]
+

Create a datetime.datetime object in Canada/Pacific timezone.

+
+
Parameters:
+

*args – Any valid arguments to the constructor of datetime.datetime +except tzinfo. Local date and time at CHIME.

+
+
Returns:
+

dt – Timezone naive date and time but converted to UTC.

+
+
Return type:
+

datetime.datetime

+
+
+
+ +
+
+ch_util.ephemeris.cirs_radec(body, date=None, deg=False, obs=<caput.time.Observer object>)[source]
+

Converts a Skyfield body in CIRS coordinates at a given epoch to +ICRS coordinates observed from CHIME

+
+
Parameters:
+

body (skyfield.api.Star) – Skyfield Star object with positions in CIRS coordinates.

+
+
Returns:
+

new_body – Skyfield Star object with positions in ICRS coordinates

+
+
Return type:
+

skyfield.api.Star

+
+
+
+ +
+
+ch_util.ephemeris.galt_pointing_model_dec(ha_in, dec_in, b=[1.081, 0.707, -0.076, 0.0, 0.0, 0.0, 0.0])[source]
+

Calculate pointing correction in declination for the Galt Telescope +See description of the pointing model by Lewis Knee CHIME document library +754 https://bao.chimenet.ca/doc/documents/754

+
+
Parameters:
+
    +
  • ha (Skyfield Angle objects) – Target hour angle and declination

  • +
  • dec (Skyfield Angle objects) – Target hour angle and declination

  • +
  • b (list of floats) – List of coefficients (in arcmin) for the pointing model +(NOTE: it is very unlikely that a user will want to change these +from the defaults, which are taken from the pointing model as of +2019-2-15)

  • +
+
+
Returns:
+

Angular offset in hour angle

+
+
Return type:
+

Skyfield Angle object

+
+
+
+ +
+
+ch_util.ephemeris.galt_pointing_model_ha(ha_in, dec_in, a=[-5.872, -0.5292, 5.458, -0.076, -0.707, 0.0, 0.0])[source]
+

Calculate pointing correction in hour angle for the Galt Telescope +See description of the pointing model by Lewis Knee CHIME document library +754 https://bao.chimenet.ca/doc/documents/754

+
+
Parameters:
+
    +
  • ha (Skyfield Angle objects) – Target hour angle and declination

  • +
  • dec (Skyfield Angle objects) – Target hour angle and declination

  • +
  • a (list of floats) – List of coefficients (in arcmin) for the pointing model +(NOTE: it is very unlikely that a user will want to change these +from the defaults, which are taken from the pointing model as of +2019-2-15)

  • +
+
+
Returns:
+

Angular offset in hour angle

+
+
Return type:
+

Skyfield Angle object

+
+
+
+ +
+
+ch_util.ephemeris.get_doppler_shifted_freq(*args, **kwargs)[source]
+

Deprecated. Use ch_util.hfbcat.get_doppler_shifted_freq.

+
+ +
+
+ch_util.ephemeris.get_range_rate(source: ~skyfield.starlib.Star, date: float | list, obs: ~caput.time.Observer = <caput.time.Observer object>) float | array[source]
+

Calculate rate at which distance between observer and source changes.

+
+
Parameters:
+
    +
  • source – Position(s) on the sky.

  • +
  • date – Unix time(s) for which to calculate range rate.

  • +
  • obs – An Observer instance to use. If not supplied use chime. For many +calculations changing from this default will make little difference.

  • +
+
+
Returns:
+

Rate (in m/s) at which the distance between the observer and source +changes (i.e., the velocity of observer in direction of source, but +positive for observer and source moving appart). If either source +or date contains multiple entries, range_rate will be an array. +Otherwise, range_rate will be a float.

+
+
Return type:
+

range_rate

+
+
+

Notes

+

Only one of source and date can contain multiple entries.

+

This routine uses an skyfield.positionlib.Apparent object +(rather than an skyfield.positionlib.Astrometric object) to find +the velocity of the observatory and the position of the source. This +accounts for the gravitational deflection and the aberration of light. +It is unclear if the latter should be taken into account for this Doppler +shift calculation, but its effects are negligible.

+
+ +
+
+ch_util.ephemeris.get_source_dictionary(*args)[source]
+

Returns a dictionary containing skyfield.starlib.Star +objects for common radio point sources. This is useful for +obtaining the skyfield representation of a source from a string +containing its name.

+
+
Parameters:
+

catalog_name (str) – Name of the catalog. This must be the basename of the json file +in the ch_util/catalogs directory. Can take multiple catalogs, +with the first catalog favoured for any overlapping sources.

+
+
Returns:
+

src_dict – Format is {‘SOURCE_NAME’: skyfield.starlib.Star, …}

+
+
Return type:
+

dictionary

+
+
+
+ +
+
+ch_util.ephemeris.hadec_to_bmxy(ha_cirs, dec_cirs)[source]
+

Convert CIRS hour angle and declination to CHIME/FRB beam-model XY coordinates.

+
+
Parameters:
+
    +
  • ha_cirs (array_like) – The CIRS Hour Angle in degrees.

  • +
  • dec_cirs (array_like) – The CIRS Declination in degrees.

  • +
+
+
Returns:
+

bmx, bmy – The CHIME/FRB beam model X and Y coordinates in degrees as defined in +the beam-model coordinate conventions: +https://chime-frb-open-data.github.io/beam-model/#coordinate-conventions

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.lunar_rising(start_time, end_time=None, obs=<caput.time.Observer object>)[source]
+

Find the Lunar risings between two times for CHIME.

+
+
Parameters:
+
    +
  • start_time (float (UNIX time) or datetime) – Start time to find risings.

  • +
  • end_time (float (UNIX time) or datetime, optional) – End time for finding risings. If None default, search for 24 hours after +start time.

  • +
+
+
Returns:
+

rising_times – Array of rising times (in UNIX time).

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.lunar_setting(start_time, end_time=None, obs=<caput.time.Observer object>)[source]
+

Find the Lunar settings between two times for CHIME.

+
+
Parameters:
+
    +
  • start_time (float (UNIX time) or datetime) – Start time to find settings.

  • +
  • end_time (float (UNIX time) or datetime, optional) – End time for finding settings. If None default, search for 24 hours +after start time.

  • +
+
+
Returns:
+

setting_times – Array of setting times (in UNIX time).

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.lunar_transit(start_time, end_time=None, obs=<caput.time.Observer object>)[source]
+

Find the Lunar transits between two times for CHIME.

+
+
Parameters:
+
    +
  • start_time (float (UNIX time) or datetime) – Start time to find transits.

  • +
  • end_time (float (UNIX time) or datetime, optional) – End time for finding transits. If None default, search for 24 hours +after start time.

  • +
+
+
Returns:
+

transit_times – Array of transit times (in UNIX time).

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.object_coords(body, date=None, deg=False, obs=<caput.time.Observer object>)[source]
+

Calculates the RA and DEC of the source.

+

Gives the ICRS coordinates if no date is given (=J2000), or if a date is +specified gives the CIRS coordinates at that epoch.

+

This also returns the apparent position, including abberation and +deflection by gravitational lensing. This shifts the positions by up to +20 arcseconds.

+
+
Parameters:
+
    +
  • body (skyfield source) – skyfield.starlib.Star or skyfield.vectorlib.VectorSum or +skyfield.jpllib.ChebyshevPosition body representing the source.

  • +
  • date (float) – Unix time at which to determine ra of source If None, use Jan 01 +2000.

  • +
  • deg (bool) – Return RA ascension in degrees if True, radians if false (default).

  • +
  • obs (caput.time.Observer) – An observer instance to use. If not supplied use chime. For many +calculations changing from this default will make little difference.

  • +
+
+
Returns:
+

ra, dec – Position of the source.

+
+
Return type:
+

float

+
+
+
+ +
+
+ch_util.ephemeris.parse_date(datestring)[source]
+

Convert date string to a datetime object.

+
+
Parameters:
+

datestring (string) – Date as YYYYMMDD-AAA, where AAA is one of [UTC, PST, PDT]

+
+
Returns:
+

date – A python datetime object in UTC.

+
+
Return type:
+

datetime

+
+
+
+ +
+
+ch_util.ephemeris.peak_RA(body, date=None, deg=False)[source]
+

Calculates the RA where a source is expected to peak in the beam. +Note that this is not the same as the RA where the source is at +transit, since the pathfinder is rotated with respect to north.

+
+
Parameters:
+
    +
  • body (ephem.FixedBody) – skyfield.starlib.Star or skyfield.vectorlib.VectorSum or +skyfield.jpllib.ChebyshevPosition or Ephemeris body +representing the source.

  • +
  • date (float) – Unix time at which to determine ra of source +If None, use Jan 01 2000. +Ignored if body is not a skyfield object

  • +
  • deg (bool) – Return RA ascension in degrees if True, +radians if false (default).

  • +
+
+
Returns:
+

peak_ra – RA when the transiting source peaks.

+
+
Return type:
+

float

+
+
+
+ +
+
+ch_util.ephemeris.solar_rising(start_time, end_time=None, obs=<caput.time.Observer object>)[source]
+

Find the Solar risings between two times for CHIME.

+
+
Parameters:
+
    +
  • start_time (float (UNIX time) or datetime) – Start time to find risings.

  • +
  • end_time (float (UNIX time) or datetime, optional) – End time for finding risings. If None default, search for 24 hours +after start time.

  • +
+
+
Returns:
+

rising_times – Array of rising times (in UNIX time).

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.solar_setting(start_time, end_time=None, obs=<caput.time.Observer object>)[source]
+

Find the Solar settings between two times for CHIME.

+
+
Parameters:
+
    +
  • start_time (float (UNIX time) or datetime) – Start time to find settings.

  • +
  • end_time (float (UNIX time) or datetime, optional) – End time for finding settings. If None default, search for 24 hours +after start time.

  • +
+
+
Returns:
+

setting_times – Array of setting times (in UNIX time).

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.solar_transit(start_time, end_time=None, obs=<caput.time.Observer object>)[source]
+

Find the Solar transits between two times for CHIME.

+
+
Parameters:
+
    +
  • start_time (float (UNIX time) or datetime) – Start time to find transits.

  • +
  • end_time (float (UNIX time) or datetime, optional) – End time for finding transits. If None default, search for 24 hours +after start time.

  • +
+
+
Returns:
+

transit_times – Array of transit times (in UNIX time).

+
+
Return type:
+

array_like

+
+
+
+ +
+
+ch_util.ephemeris.transit_RA(time)[source]
+

No longer supported. Use lsa instead.

+
+ +
+
+ch_util.ephemeris.utc_lst_to_mjd(datestring, lst, obs=<caput.time.Observer object>)[source]
+

Convert datetime string and LST to corresponding modified Julian Day

+
+
Parameters:
+
    +
  • datestring (string) – Date as YYYYMMDD-AAA, where AAA is one of [UTC, PST, PDT]

  • +
  • lst (float) – Local sidereal time at DRAO (CHIME) in decimal hours

  • +
  • obs (caput.Observer object)

  • +
+
+
Returns:
+

mjd – Modified Julian Date corresponding to the given time.

+
+
Return type:
+

float

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.finder.html b/docs/_autosummary/ch_util.finder.html new file mode 100644 index 00000000..8737844b --- /dev/null +++ b/docs/_autosummary/ch_util.finder.html @@ -0,0 +1,1282 @@ + + + + + + + ch_util.finder — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.finder

+

Data Index Searcher for CHIME

+

Search routines for locating data withing the CHIME data index.

+
+

Data tables

+
    +
  • DataFlag

  • +
  • DataFlagType

  • +
+

Exceptions

+ +
+
+

High Level Index Searcher

+ +
+
+

Routines

+ +
+

Functions

+ + + + + + +

files_in_range(acq, start_time, end_time, ...)

Get files for a given acquisition within a time range.

+

Classes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BaseDataInterval([iterable])

A single data index search result.

CalibrationGainDataInterval([iterable])

Derived class from BaseDataInterval for calibration gain data.

CorrDataInterval([iterable])

Derived class from BaseDataInterval for correlator data.

DataInterval

alias of CorrDataInterval

DataIntervalList([iterable])

A list of data index search results.

DigitalGainDataInterval([iterable])

Derived class from BaseDataInterval for digital gain data.

Finder([acqs, node_spoof])

High level searching of the CHIME data index.

FlagInputDataInterval([iterable])

Derived class from BaseDataInterval for flag input data.

HKDataInterval([iterable])

Derived class from BaseDataInterval for housekeeping data.

WeatherDataInterval([iterable])

Derived class from BaseDataInterval for weather data.

+

Exceptions

+ + + + + + +

DataFlagged

Raised when data is affected by a global flag.

+
+
+class ch_util.finder.BaseDataInterval(iterable=(), /)[source]
+

Bases: tuple

+

A single data index search result.

+

Just a normal python tuple with some helper methods. Instances are created +by calls to Finder.get_results().

+

A data interval as two elements: a list of filenames and a time range within +those files.

+

You should generally only use the classes derived from this one (i.e., +CorrDataInterval, etc.)

+
+
+as_loaded_data(**kwargs)[source]
+

Load data interval to memory as an andata.AnData instance.

+
+
Parameters:
+

datasets (list of strings) – Passed on to andata.AnData.from_acq_h5()

+
+
Returns:
+

data – Data interval loaded into memory.

+
+
Return type:
+

andata.AnData

+
+
+
+ +
+
+as_reader()[source]
+

Get data interval as an andata.Reader instance.

+

The andata.Reader is initialized with the filename list part +of the data interval then the time range part of the data interval is +used as an arguments to andata.Reader.select_time_range().

+
+
Returns:
+

reader

+
+
Return type:
+

andata.Reader

+
+
+
+ +
+ +
+
+class ch_util.finder.CalibrationGainDataInterval(iterable=(), /)[source]
+

Bases: BaseDataInterval

+

Derived class from BaseDataInterval for calibration gain data.

+
+ +
+
+class ch_util.finder.CorrDataInterval(iterable=(), /)[source]
+

Bases: BaseDataInterval

+

Derived class from BaseDataInterval for correlator data.

+
+
+as_loaded_data(prod_sel=None, freq_sel=None, datasets=None)[source]
+

Load data interval to memory as an andata.CorrData instance

+
+
Parameters:
+
    +
  • prod_sel (valid numpy index) – Passed on to andata.CorrData.from_acq_h5()

  • +
  • freq_sel (valid numpy index) – Passed on to andata.CorrData.from_acq_h5()

  • +
  • datasets (list of strings) – Passed on to andata.CorrData.from_acq_h5()

  • +
+
+
Returns:
+

data – Data interval loaded into memory.

+
+
Return type:
+

andata.CorrData

+
+
+
+ +
+ +
+
+exception ch_util.finder.DataFlagged[source]
+

Bases: CHIMEdbError

+

Raised when data is affected by a global flag.

+
+ +
+
+ch_util.finder.DataInterval
+

alias of CorrDataInterval

+
+ +
+
+class ch_util.finder.DataIntervalList(iterable=(), /)[source]
+

Bases: list

+

A list of data index search results.

+

Just a normal python list of DataInterval-derived objects with +some helper methods. Instances are created by calls to +Finder.get_results().

+
+
+iter_loaded_data(**kwargs)[source]
+

Iterate over data intervals loading as andata.AnData.

+
+
Parameters:
+

**kwargs (argument list) – Pass any parameters accepted by the +BaseDataInverval-derived class that you are using.

+
+
Returns:
+

Iterator over data intervals loaded into memory as +andata.BaseData-derived instances.

+
+
Return type:
+

loaded_data_iterator

+
+
+

Examples

+

Use this method to loop over data loaded into memory.

+
>>> for data in interval_list.iter_loaded_data():
+...     pass
+
+
+

Data is loaded into memory on each iteration. To immediately load all +data into memory, initialize a list using the iterator:

+
>>> loaded_data_list = list(interval_list.iter_loaded_data())
+
+
+
+ +
+
+iter_reader()[source]
+

Iterate over data intervals converting to andata.Reader.

+
+
Returns:
+

Iterator over data intervals as andata.Reader instances.

+
+
Return type:
+

reader_iterator

+
+
+
+ +
+ +
+
+class ch_util.finder.DigitalGainDataInterval(iterable=(), /)[source]
+

Bases: BaseDataInterval

+

Derived class from BaseDataInterval for digital gain data.

+
+ +
+
+class ch_util.finder.Finder(acqs=(), node_spoof=None)[source]
+

Bases: object

+

High level searching of the CHIME data index.

+

This class gives a convenient way to search and filter data acquisitions +as well as time ranges of data within acquisitions. Search results +constitute a list of files within an acquisition as well as a time range for +the data within these files. Convenient methods are provided for loading +the precise time range of constituting a search result.

+

This is intended to make the most common types of searches of CHIME data as +convenient as possible. However for very complex searches, it may be +necessary to resort to the lower level interface.

+

Searching the index

+

There are four ways that a search can be modified which may be combined in +any way.

+
    +
  1. You can restrict the types of acquisition that are under +consideration, using methods whose names begin with only_. +In this way, one can consider only, say, housekeeping acquisitions.

  2. +
  3. The second is to adjust the total time range under consideration. +This is achieved by assigning to time_range or calling +methods beginning with set_time_range_. The total time range affects +acquisitions under consideration as well as the data time ranges within +the acquisitions. Subsequent changes to the total time range under +consideration may only become more restrictive.

  4. +
  5. The data index may also be filtered by acquisition using methods whose +names begin with filter_acqs. Again subsequent filtering are always +combined to become more restrictive. The attribute acqs +lists the acquisitions currently included in the search for convenience +when searching interactively.

  6. +
  7. Time intervals within acquisitions are added using methods with names +beginning with include_. Time intervals are defined in the +time_intervals attribute, and are inclusive (you can +add as many as you want).

  8. +
  9. Finally, upon calling :meth:get_results or :meth:get_results_acq, +one can pass an arbitrary condition on individual files, thereby +returning only a subset of files from each acquisition.

  10. +
+

Getting results

+

Results of the search can be retrieved using methods whose names begin with +get_results An individual search result is constituted of a list of file +names and a time interval within these files. These can easily loaded into +memory using helper functions (see BaseDataInterval and +DataIntervalList).

+
+
Parameters:
+
    +
  • acqs (list of chimedb.data_index.ArchiveAcq objects) – Acquisitions to initially include in data search. Default is to search +all acquisitions.

  • +
  • node_spoof (dictionary) – Normally, the DB will be queried to find which nodes are mounted on your +host. If you are on a machine that is cross-mounted, though, you can +enter a dictionary of “node_name”: “mnt_root” pairs, specifying the +nodes to search and where they are mounted on your host.

  • +
+
+
+

Examples

+

To find all the correlator data between two times.

+
>>> from ch_util import finder
+>>> from datetime import datetime
+>>> f = finder.Finder()
+>>> f.only_corr()
+>>> f.set_time_range(datetime(2014,02,24), datetime(2014,02,25))
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140219T145849Z_abbot_corr  |   378053.1  |    86400.0  |  25
+   2  |  20140224T051212Z_stone_corr  |        0.0  |    67653.9  |  19
+Total 154053.858720 seconds of data.
+
+
+

Search for transits of a given source.

+
>>> from ch_util import ephemeris
+>>> f.include_transits(ephemeris.CasA, time_delta=3600)
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140219T145849Z_abbot_corr  |   452087.2  |     3600.0  |  2
+   2  |  20140224T051212Z_stone_corr  |    55288.0  |     3600.0  |  2
+Total 7200.000000 seconds of data.
+
+
+

To read the data,

+
>>> from ch_util import andata
+>>> results_list = f.get_results()
+>>> # Pick result number 1
+>>> result = results_list[0]
+>>> # Pick product number 0 (autocorrelation)
+>>> data = result.as_loaded_data(prod_sel=0)
+>>> print data.vis.shape
+(1024, 1, 360)
+
+
+

More intricate filters on the acquisitions are possible.

+
>>> import chimedb.data_index as di
+>>> f = finder.Finder()
+>>> # Find ALL 10ms cadence data correlated by 'stone' with 8 channels.
+>>> f.filter_acqs((di.CorrAcqInfo.integration < 0.011)
+...               & (di.CorrAcqInfo.integration > 0.009)
+...               & (di.CorrAcqInfo.nfreq == 1024)
+...               & (di.CorrAcqInfo.nprod == 36)
+...               & (di.ArchiveInst.name == 'stone'))
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140211T020307Z_stone_corr  |        0.0  |      391.8  |  108
+   2  |  20140128T135105Z_stone_corr  |        0.0  |     4165.2  |  104
+   3  |  20131208T070336Z_stone_corr  |        0.0  |     1429.8  |  377
+   4  |  20140212T014603Z_stone_corr  |        0.0  |     2424.4  |  660
+   5  |  20131210T060233Z_stone_corr  |        0.0  |     1875.3  |  511
+   6  |  20140210T021023Z_stone_corr  |        0.0  |      874.1  |  240
+Total 11160.663510 seconds of data.
+
+
+

Here is an example that uses node spoofing and also filters files within +acquisitions to include only LNA housekeeping files:

+
>>> f = finder.Finder(node_spoof = {"gong" : "/mnt/gong/archive",
+                                        "suzu" : "/mnt/suzu/hk_data"})
+>>> f.only_hk()
+>>> f.set_time_range(datetime(2014, 9, 1), datetime(2014, 10, 10))
+>>> f.print_results_summary()
+# | acquisition                          |start (s)| len (s) |files |     MB
+0 | 20140830T005410Z_ben_hk              |  169549 |  419873 |   47 |   2093
+1 | 20140905T203905Z_ben_hk              |       0 |   16969 |    2 |      0
+2 | 20140908T153116Z_ben_hk              |       0 | 1116260 |   56 |      4
+3 | 20141009T222415Z_ben_hk              |       0 |    5745 |    2 |      0
+>>> res = f.get_results(file_condition = (di.HKFileInfo.atmel_name == "LNA"))
+>>> for r in res:
+...   print "No. files: %d" % (len(r[0]))
+No. files: 8
+No. files: 1
+No. files: 19
+No. files: 1
+>>> data = res[0].as_loaded_data()
+>>> for m in data.mux:
+...   print "Mux %d: %s", (m, data.chan(m))
+Mux 0: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+Mux 1: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+Mux 2: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+>>> print "Here are the raw data for Mux 1, Channel 14:", data.tod(14, 1)
+Here are the raw data for Mux 1, Channel 14: [ 1744.19091797  1766.34472656  1771.03356934 ...,  1928.61279297 1938.90075684  1945.53491211]
+
+
+

In the above example, the restriction to LNA housekeeping could also have +been accomplished with the convenience method Finder.set_hk_input():

+
>>> f.set_hk_input("LNA")
+>>> res = f.get_results()
+
+
+
+
+accept_all_global_flags()[source]
+

Set global flag behaviour to accept all data.

+
+ +
+
+property acqs
+

Acquisitions remaining in this search.

+
+
Returns:
+

acqs

+
+
Return type:
+

list of chimedb.data_index.ArchiveAcq objects

+
+
+
+ +
+
+property data_flag_types
+

Types of DataFlag to exclude from results.

+
+ +
+
+exclude_RA_interval(start_RA, end_RA)[source]
+

Add time intervals to exclude passings of given right RA +intervals

+
+
Parameters:
+
    +
  • start_RA (float) – Starting right ascension in degrees.

  • +
  • end_RA (float) – Ending right ascension in degrees.

  • +
+
+
+

Examples

+

Look under include_RA_interval for very similar example.

+
+ +
+
+exclude_data_flag_type(flag_type)[source]
+

Exclude times that overlap with DataFlags of this type.

+
+
Parameters:
+

flag_type (string or list of string) – Name of DataFlagType(s) to exclude from results, e.g. “rain”.

+
+
+
+ +
+
+exclude_daytime()[source]
+

Add time intervals to exclude all day time data.

+
+ +
+
+exclude_global_flag(flag)[source]
+

Update time_intervals to exclude a global flag.

+
+
Parameters:
+

flag (integer or string) – Global flag ID or name, e.g. “run_pass1_a”, or 65.

+
+
+
+

See also

+

Look

+
+

Notes

+

Global flag ID numbers, names, and descriptions are listed at +http://bao.phas.ubc.ca/layout/event.php?filt_event_type_id=7

+
+ +
+
+exclude_nighttime()[source]
+

Add time intervals to exclude all night time data.

+
+ +
+
+exclude_sun(time_delta=4000.0, time_delta_rise_set=4000.0)[source]
+

Add time intervals to exclude sunrise, sunset, and sun transit.

+
+
Parameters:
+
    +
  • time_delta (float) – Total amount of time to exclude surrounding the sun transit in +seconds. Default is to use 4000.0 seconds.

  • +
  • time_delta_rise_set (float) – Total amount of time to exclude after sunrise and before sunset +in seconds. Default is to use 4000.0 seconds.

  • +
+
+
+
+ +
+
+exclude_time_interval(start_time, end_time)[source]
+

Exclude a time interval.

+

Examples

+
>>> from ch_util import finder
+>>> from datetime import datetime
+>>> f = finder.Finder()
+>>> f.set_time_range(datetime(2014,04,04), datetime(2014,04,14))
+>>> # f.print_results_summary() will show all the files in this time range
+>>> # Now want to exclude all data from 04, 10 to 04, 11
+>>> f.exclude_time_interval(datetime(2014,04,10),datetime(2014,04,11))
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140330T102505Z_abbot_corr  |   394484.2  |   231900.8  |  65
+   2  |  20140403T152314Z_blanchard_corr  |    30988.4  |   309649.3  |  86
+   3  |  20140408T222844Z_abbot_corr  |        0.0  |    75589.3  |  21
+   4  |  20140409T184530Z_blanchard_corr  |        0.0  |     3795.0  |  2
+   5  |  20140409T165603Z_blanchard_corr  |        0.0  |     4952.7  |  2
+   6  |  20140411T003404Z_blanchard_corr  |        0.0  |   161606.5  |  45
+   7  |  20140411T000920Z_blanchard_corr  |        0.0  |     1080.4  |  36
+   8  |  20140413T002319Z_blanchard_corr  |        0.0  |    84981.7  |  24
+Total 873555.739000 seconds of data.
+
+
+
+ +
+
+exclude_transits(body, time_delta)[source]
+

Add time intervals to exclude transits for given celestial body.

+
+
Parameters:
+
    +
  • body (ephem.Body or float) – Transiting celestial body. If a float, interpret as a right +ascension in degrees.

  • +
  • time_delta (float) – Total amount of time to include surrounding the transit in +seconds. Default is to use twice the value of +min_interval.

  • +
+
+
+

Examples

+
>>> from ch_util import finder
+>>> from datetime import datetime
+>>> f = finder.Finder()
+>>> f.set_time_range(datetime(2014,02,20), datetime(2014,02,22))
+>>> import ephem
+>>> f.exclude_transits(ephem.Sun(), time_delta=43200)
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140219T145849Z_abbot_corr  |    32453.1  |    51128.4  |  15
+   2  |  20140219T145849Z_abbot_corr  |   126781.5  |    43193.0  |  13
+   3  |  20140219T145523Z_stone_corr  |    32662.5  |    18126.9  |  6
+   4  |  20140220T213252Z_stone_corr  |    16740.8  |    43193.0  |  13
+Total 155641.231275 seconds of data.
+
+
+
+ +
+
+filter_acqs(condition)[source]
+

Filter the acquisitions included in this search.

+
+
Parameters:
+

condition (peewee comparison) – Condition on any on chimedb.data_index.ArchiveAcq or any +class joined to chimedb.data_index.ArchiveAcq: using the +syntax from the peewee module [1].

+
+
+

Examples

+
>>> from ch_util import finder
+>>> import chimedb.data_index as di
+>>> f = finder.Finder()
+>>> f.filter_acqs(di.ArchiveInst.name == 'stone')
+>>> f.filter_acqs((di.AcqType == 'corr') & (di.CorrAcqInfo.nprod == 36))
+
+
+ +

References

+ +
+ +
+
+filter_acqs_by_files(condition)[source]
+

Filter the acquisitions by the properties of its files.

+

Because each acquisition has many files, this filter should be +significantly slower than Finder.filter_acqs().

+
+
Parameters:
+

condition (peewee comparison) – Condition on any on chimedb.data_index.ArchiveAcq, +chimedb.data_index.ArchiveFile or any class joined to +chimedb.data_index.ArchiveFile using the syntax from the +peewee module [2].

+
+
+
+

See also

+

Finder.filter_acqs()

+
+

Examples

+

References

+ +
+ +
+
+get_results(file_condition=None)[source]
+

Get all search results.

+
+
Parameters:
+

file_condition (peewee comparison) – Any additional condition for filtering the files within the +acquisition. In general, this should be a filter on one of the file +information tables, e.g., chimedb.data_index.CorrFileInfo.

+
+
Returns:
+

    +
  • interval_list (DataIntervalList) – Search results.

  • +
  • cond (peewee comparison) – Any extra filters, particularly filters on individual files.

  • +
+

+
+
+
+ +
+
+get_results_acq(acq_ind, file_condition=None)[source]
+

Get search results restricted to a given acquisition.

+
+
Parameters:
+
    +
  • acq_ind (int) – Index of Finder.acqs for the desired acquisition.

  • +
  • file_condition (peewee comparison) – Any additional condition for filtering the files within the +acquisition. In general, this should be a filter on one of the file +information tables, e.g., CorrFileInfo.

  • +
+
+
Returns:
+

interval_list – Search results.

+
+
Return type:
+

DataIntervalList

+
+
+
+ +
+
+property global_flag_mode
+

Global flag behaviour mode.

+

Defines how global flags are treated when finding data. There are three +severities of global flag: comment, warning, and severe. There are +four possible behaviours when a search result overlaps a global flag, +represented by module constants:

+
+
GF_REJECT:
+

Reject any data overlapping flag silently.

+
+
GF_RAISE:
+

Raise an exception when retrieving data intervals.

+
+
GF_WARN:
+

Send a warning when retrieving data intervals but proceed.

+
+
GF_ACCEPT:
+

Accept the data silently, ignoring the flag.

+
+
+

The behaviour for all three severities is represented by a dictionary. +If no mode is set, then the default behaviour is +{‘comment’ : GF_ACCEPT, ‘warning’ : GF_WARN, ‘severe’ : GF_REJECT}.

+

This is modified using Finder.update_global_flag_mode().

+
+
Returns:
+

global_flag_mode – Specifies finder behaviour.

+
+
Return type:
+

dictionary with keys ‘comment’, ‘warning’, ‘severe’.

+
+
+
+ +
+
+include_26m_obs(source, require_quality=True)[source]
+

Add time intervals to include 26m observations of a source.

+
+
Parameters:
+
    +
  • source (string) – Source observed. Has to match name on database exactly.

  • +
  • require_quality (bool (default: True)) – Require the quality flag to be zero (ie that the 26 m +pointing is trustworthy) or None

  • +
+
+
+

Examples

+
>>> from ch_util import finder
+>>> from datetime import datetime
+>>> f = finder.Finder()
+>>> f.only_corr()
+>>> f.set_time_range(datetime(2017,8,1,10), datetime(2017,8,2))
+>>> f.filter_acqs((di.ArchiveInst.name == 'pathfinder'))
+>>> f.include_26m_obs('CasA')
+>>> f.print_results_summary()
+   # | acquisition                          |start (s)| len (s) |files |     MB
+   0 | 20170801T063349Z_pathfinder_corr     |   12337 |   11350 |    2 | 153499
+   1 | 20170801T131035Z_pathfinder_corr     |       0 |    6922 |    1 |  75911
+Total  18271 seconds, 229410 MB of data.
+
+
+
+ +
+
+include_RA_interval(start_RA, end_RA)[source]
+

Add time intervals to include passings of given right RA intervals

+
+
Parameters:
+
    +
  • start_RA (float) – Starting right ascension in degrees.

  • +
  • end_RA (float) – Ending right ascension in degrees.

  • +
+
+
+

Examples

+
>>> from ch_util import finder
+>>> from datetime import datetime
+>>> f = finder.Finder()
+>>> f.set_time_range(datetime(2014,04,04), datetime(2014,04,14))
+>>> f.include_RA_interval(90., 180.)
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140330T102505Z_abbot_corr  |   398689.9  |    21541.0  |  7
+   2  |  20140330T102505Z_abbot_corr  |   484854.0  |    21541.0  |  7
+   3  |  20140330T102505Z_abbot_corr  |   571018.1  |    21541.0  |  7
+   4  |  20140403T152314Z_blanchard_corr  |    35194.1  |    21541.0  |  7
+   5  |  20140403T152314Z_blanchard_corr  |   121358.2  |    21541.0  |  7
+   6  |  20140403T152314Z_blanchard_corr  |   207522.3  |    21541.0  |  7
+   7  |  20140403T152314Z_blanchard_corr  |   293686.4  |    21541.0  |  6
+   8  |  20140408T222844Z_abbot_corr  |     8491.2  |    21541.0  |  7
+   9  |  20140410T003326Z_blanchard_corr  |      754.5  |     1419.2  |  48
+  10  |  20140410T031023Z_blanchard_corr  |        0.0  |     1376.5  |  46
+  11  |  20140410T014136Z_blanchard_corr  |        0.0  |     2347.4  |  78
+  12  |  20140411T003404Z_blanchard_corr  |      397.4  |    21541.0  |  7
+  13  |  20140411T003404Z_blanchard_corr  |    86561.5  |    21541.0  |  7
+  14  |  20140413T002319Z_blanchard_corr  |      664.1  |    21541.0  |  7
+Total 242094.394565 seconds of data.
+
+
+
+ +
+
+include_global_flag(flag)[source]
+

Update time_intervals to include a global flag.

+
+
Parameters:
+

flag (integer or string) – Global flag ID or name, e.g. “run_pass1_a”, or 11292.

+
+
+

Notes

+

Global flag ID numbers, names, and descriptions are listed at +http://bao.phas.ubc.ca/layout/event.php?filt_event_type_id=7

+
+ +
+
+include_time_interval(start_time, end_time)[source]
+

Include a time interval.

+

Examples

+

First a certain layout is chosen

+
>>> from ch_util import finder
+>>> f = finder.Finder()
+>>> f.set_time_range_layout(26)
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140311T192616Z_abbot_corr  |    16412.8  |      667.1  |  1
+   2  |  20140312T001123Z_abbot_corr  |        0.0  |     1150.5  |  314
+   3  |  20140312T003054Z_abbot_corr  |        0.0  |    79889.4  |  23
+   4  |  20140312T224940Z_abbot_corr  |        0.0  |      591.0  |  4
+   5  |  20140312T230108Z_abbot_corr  |        0.0  |   171909.0  |  48
+   6  |  20140315T014330Z_abbot_corr  |        0.0  |    35119.7  |  10
+   7  |  20140318T154959Z_abbot_corr  |        0.0  |    51739.6  |  15
+   8  |  20140320T120437Z_abbot_corr  |        0.0  |   186688.6  |  52
+   9  |  20140325T174231Z_abbot_corr  |        0.0  |    86019.3  |  24
+  10  |  20140326T175440Z_abbot_corr  |        0.0  |   286487.7  |  80
+  11  |  20140330T064125Z_abbot_corr  |        0.0  |     2998.6  |  1590
+  12  |  20140330T102505Z_abbot_corr  |        0.0  |   626385.0  |  174
+  13  |  20140403T000057Z_blanchard_corr  |        0.0  |    54912.3  |  16
+  14  |  20140403T152314Z_blanchard_corr  |        0.0  |   340637.8  |  94
+  15  |  20140408T222844Z_abbot_corr  |        0.0  |    75589.3  |  21
+  16  |  20140409T184530Z_blanchard_corr  |        0.0  |     3795.0  |  2
+  17  |  20140410T003326Z_blanchard_corr  |        0.0  |     2173.7  |  72
+  18  |  20140409T165603Z_blanchard_corr  |        0.0  |     4952.7  |  2
+Total 2011706.304970 seconds of data.
+
+
+

To find a specific day in that layout choose the functionality +include_time_interval

+
>>> from datetime import datetime
+>>> f.include_time_interval(datetime(2014,04,8), datetime(2014,04,9))
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140408T222844Z_abbot_corr  |        0.0  |     5465.1  |  2
+Total 5465.059670 seconds of data.
+
+
+
+ +
+
+include_transits(body, time_delta=None)[source]
+

Add time intervals to include transits for given celestial body.

+
+
Parameters:
+
    +
  • body (ephem.Body or float) – Transiting celestial body. If a float, interpret as a right +ascension in degrees.

  • +
  • time_delta (float) – Total amount of time to include surrounding the transit in +seconds. Default is to use twice the value of +min_interval.

  • +
+
+
+

Examples

+
>>> from ch_util import (finder, ephemeris)
+>>> from datetime import datetime
+>>> f = finder.Finder()
+>>> f.set_time_range(datetime(2014,02,20), datetime(2014,02,22))
+>>> f.include_transits(ephemeris.CasA, time_delta=3600)
+>>> f.print_results_summary()
+interval | acquisition | offset from start (s) | length (s) | N files
+   1  |  20140219T145849Z_abbot_corr  |   107430.9  |     3600.0  |  2
+   2  |  20140219T145849Z_abbot_corr  |   193595.0  |     3600.0  |  2
+   3  |  20140220T213252Z_stone_corr  |        0.0  |      990.2  |  1
+   4  |  20140220T213252Z_stone_corr  |    83554.3  |     3600.0  |  2
+Total 11790.181012 seconds of data.
+
+
+
+ +
+
+property min_interval
+

Minimum length of a block of data to be considered.

+

This can be set to any number. The default is 240 seconds.

+
+
Returns:
+

min_interval – Length of time in seconds.

+
+
Return type:
+

float

+
+
+
+ +
+
+classmethod offline(acqs=())[source]
+

Initialize Finder when not working on a storage node.

+

Normally only data that is available on the present host is searched, +and as such Finder can’t be used to browse the index when you +don’t have access to the acctual data. Initializing using this method +spoofs the ‘gong’ and ‘niedermayer’ storage nodes (which should have a +full copy of the archive) such that the data index can be search the +full archive.

+
+ +
+
+only_chime_weather()[source]
+

Only include chime weather acquisitions in this search. +This excludes the old format mingun-weather.

+
+ +
+
+only_corr()[source]
+

Only include correlator acquisitions in this search.

+
+ +
+
+only_digitalgain()[source]
+

Only include digital gain data in this search

+
+ +
+
+only_flaginput()[source]
+

Only include input flag data in this search

+
+ +
+
+only_gain()[source]
+

Only include calibration gain data in this search

+
+ +
+
+only_hfb()[source]
+

Only include HFB acquisitions in this search.

+
+ +
+
+only_hk()[source]
+

Only include housekeeping acquisitions in this search.

+
+ +
+
+only_hkp()[source]
+

Only include Prometheus housekeeping data in this search

+
+ +
+
+only_rawadc()[source]
+

Only include raw ADC acquisitions in this search.

+
+ +
+
+only_weather()[source]
+

Only include weather acquisitions in this search.

+
+ +
+
+print_acq_info()[source]
+

Print the acquisitions included in this search and thier properties.

+

This method is convenient when searching the data index interactively +and you want to see what acquisitions remain after applying filters or +restricting the time range.

+ +
+ +
+
+print_results_summary()[source]
+

Print a summary of the search results.

+
+ +
+
+set_hk_input(name)[source]
+

Restrict files to only one HK input type.

+

This is a shortcut for specifying +file_condition = (chimedb.data_index.HKFileInfo.atmel_name == name) +in get_results_acq(). Instead, one can simply call this function +with name as, e.g., “LNA”, “FLA”, and calls to +get_results_acq() will be appropriately restricted.

+
+
Parameters:
+

name (str) – The name of the housekeeping input.

+
+
+
+ +
+
+set_time_range(start_time=None, end_time=None)[source]
+

Restrict the time range of the search.

+

This method updates the time_range property and also +excludes any acquisitions that do not overlap with the new range. This +method always narrows the time range under consideration, never expands +it.

+
+
Parameters:
+
    +
  • start_time (float or datetime.datetime) – Unix/POSIX time or UTC start of desired time range. Optional.

  • +
  • end_time (float or datetime.datetime) – Unix/POSIX time or UTC end of desired time range. Optional.

  • +
+
+
+
+ +
+
+set_time_range_global_flag(flag)[source]
+

Set time range to correspond to a global flag.

+
+
Parameters:
+

flag (integer or string) – Global flag ID or name, e.g. “run_pass1_a”, or 11292.

+
+
+

Notes

+

Global flag ID numbers, names, and descriptions are listed at +http://bao.phas.ubc.ca/layout/event.php?filt_event_type_id=7

+
+ +
+
+set_time_range_season(year=None, season=None)[source]
+

Set the time range by as specific part of a given year.

+

NOT YET IMPLEMENTED

+
+
Parameters:
+
    +
  • year (integer) – Calender year

  • +
  • season (string) – Month name (3 letter abbreviations are acceptable) or one of +‘winter’, ‘spring’, ‘summer’, or ‘fall’.

  • +
+
+
+
+ +
+
+property time_exclusions
+

Periods in time to be excluded.

+
+
Returns:
+

time_exclusions – Each entry is the Unix/POSIX beginning and end of the time interval +to be excluded.

+
+
Return type:
+

list of pairs of floats

+
+
+
+ +
+
+property time_intervals
+

Periods in time to be included.

+

Periods are combined with OR unless list is empty, in which case no +filtering is performed.

+
+
Returns:
+

time_intervals – Each entry is the Unix/POSIX beginning and end of the time interval +to be included.

+
+
Return type:
+

list of pairs of floats

+
+
+
+ +
+
+property time_range
+

Time range to be included in search.

+

Data files and acquisitions that do not overlap with this range are +excluded. Assigning to this is equivalent to calling +set_time_range().

+
+
Returns:
+

time_range – Unix/POSIX beginning and end of the time range.

+
+
Return type:
+

tuple of 2 floats

+
+
+
+ +
+
+update_global_flag_mode(comment=None, warning=None, severe=None)[source]
+

Update Finder.global_flag_mode, the global flag mode.

+
+
Parameters:
+
    +
  • comment (One of GF_REJECT, GF_RAISE, GF_WARN, or GF_ACCEPT.)

  • +
  • warning (One of GF_REJECT, GF_RAISE, GF_WARN, or GF_ACCEPT.)

  • +
  • severe (One of GF_REJECT, GF_RAISE, GF_WARN, or GF_ACCEPT.)

  • +
+
+
+
+ +
+ +
+
+class ch_util.finder.FlagInputDataInterval(iterable=(), /)[source]
+

Bases: BaseDataInterval

+

Derived class from BaseDataInterval for flag input data.

+
+ +
+
+class ch_util.finder.HKDataInterval(iterable=(), /)[source]
+

Bases: BaseDataInterval

+

Derived class from BaseDataInterval for housekeeping data.

+
+ +
+
+class ch_util.finder.WeatherDataInterval(iterable=(), /)[source]
+

Bases: BaseDataInterval

+

Derived class from BaseDataInterval for weather data.

+
+ +
+
+ch_util.finder.files_in_range(acq, start_time, end_time, node_list, extra_cond=None, node_spoof=None)[source]
+

Get files for a given acquisition within a time range.

+
+
Parameters:
+
    +
  • acq (string or int) – Which acquisition, by its name or id key.

  • +
  • start_time (float) – POSIX/Unix time for the start or time range.

  • +
  • end_time (float) – POSIX/Unix time for the end or time range.

  • +
  • node_list (list of chimedb.data_index.StorageNode objects) – Only return files residing on the given nodes.

  • +
  • extra_cond (peewee comparison) – Any additional expression for filtering files.

  • +
+
+
Returns:
+

file_names – List of filenames, including the full path.

+
+
Return type:
+

list of strings

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.fluxcat.html b/docs/_autosummary/ch_util.fluxcat.html new file mode 100644 index 00000000..ecaea9a3 --- /dev/null +++ b/docs/_autosummary/ch_util.fluxcat.html @@ -0,0 +1,885 @@ + + + + + + + ch_util.fluxcat — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.fluxcat

+

Catalog the measured flux densities of astronomical sources

+

This module contains tools for cataloging astronomical sources +and predicting their flux density at radio frequencies based on +previous measurements.

+

Functions

+ + + + + + + + + + + + + + + +

format_source_name(input_name)

Standardise the name of a source.

get_epoch(date)

Return the epoch for a date.

json_numpy_obj_hook(dct)

Decodes a previously encoded numpy ndarray with proper shape and dtype.

varname(name)

Create a python variable name from name.

+

Classes

+ + + + + + + + + + + + + + + + + + +

CurvedPowerLaw([freq_pivot, nparam])

Class to fit a spectrum to a polynomial in log-log space, given by

FitSpectrum([param, param_cov, stats])

A base class for modeling and fitting spectra.

FluxCatalog(name[, ra, dec, ...])

Class for cataloging astronomical sources and predicting their flux density at radio frequencies based on spectral fits to previous measurements.

MetaFluxCatalog

Metaclass for FluxCatalog.

NumpyEncoder(*[, skipkeys, ensure_ascii, ...])

Constructor for JSONEncoder, with sensible defaults.

+
+
+class ch_util.fluxcat.CurvedPowerLaw(freq_pivot=600.0, nparam=2, *args, **kwargs)[source]
+

Bases: FitSpectrum

+

Class to fit a spectrum to a polynomial in log-log space, given by

+
+\[\ln{S} = a_{0} + a_{1} \ln{\nu'} + a_{2} \ln{\nu'}^2 + a_{3} \ln{\nu'}^3 + \dots + +\]
+

where \(S\) is the flux density, \(\nu'\) is the (normalized) frequency, +and \(a_{i}\) are the fit parameters.

+
+
Parameters:
+
    +
  • nparam (int) – Number of parameters. This sets the order of the polynomial. +Default is 2 (powerlaw).

  • +
  • freq_pivot (float) – The pivot frequency \(\nu' = \nu / freq_pivot\). +Default is FREQ_NOMINAL.

  • +
+
+
+

Instantiates a CurvedPowerLaw object

+
+ +
+
+class ch_util.fluxcat.FitSpectrum(param=None, param_cov=None, stats=None)[source]
+

Bases: object

+

A base class for modeling and fitting spectra. Any spectral model +used by FluxCatalog should be derived from this class.

+

The fit method should be used to populate the param, param_cov, and stats +attributes. The predict and uncertainty methods can then be used to obtain +the flux density and uncertainty at arbitrary frequencies.

+
+
+param
+

Best-fit parameters.

+
+
Type:
+

np.ndarray[nparam, ]

+
+
+
+ +
+
+param_cov
+

Covariance of the fit parameters.

+
+
Type:
+

np.ndarray[nparam, nparam]

+
+
+
+ +
+
+stats
+

Dictionary that contains statistics related to the fit. +Must include ‘chisq’ and ‘ndof’.

+
+
Type:
+

dict

+
+
+
+ +
+
+Abstract Methods
+
+ +
+
+----------------
+
+ +
+
+Any subclass of FitSpectrum must define these methods
+

fit +_get_x +_fit_func +_deriv_fit_func

+
+ +

Instantiates a FitSpectrum object.

+
+
+predict(freq)[source]
+

Predicts the flux density at a particular frequency.

+
+ +
+
+uncertainty(freq, alpha=0.32)[source]
+

Predicts the uncertainty on the flux density at a +particular frequency.

+
+ +
+ +
+
+class ch_util.fluxcat.FluxCatalog(name, ra=None, dec=None, alternate_names=[], model='CurvedPowerLaw', model_kwargs=None, stats=None, param=None, param_cov=None, measurements=None, overwrite=0)[source]
+

Bases: object

+

Class for cataloging astronomical sources and predicting +their flux density at radio frequencies based on spectral fits +to previous measurements.

+

Class methods act upon and provide access to the catalog of +all sources. Instance methods act upon and provide access +to individual sources. All instances are stored in an +internal class dictionary.

+
+
+fields
+

List of attributes that are read-from and written-to the +JSON catalog files.

+
+
Type:
+

list

+
+
+
+ +
+
+model_lookup
+

Dictionary that provides access to the various models that +can be fit to the spectrum. These models should be +subclasses of FitSpectrum.

+
+
Type:
+

dict

+
+
+
+ +

Instantiates a FluxCatalog object for an astronomical source.

+
+
Parameters:
+
    +
  • name (string) – Name of the source. The convention for the source name is to +use the MAIN_ID in the SIMBAD database in all uppercase letters +with spaces replaced by underscores.

  • +
  • ra (float) – Right Ascension in degrees.

  • +
  • dec (float) – Declination in degrees.

  • +
  • alternate_names (list of strings) – Alternate names for the source. Ideally should include all alternate names +present in the SIMBAD database using the naming convention specified above.

  • +
  • model (string) – Name of FitSpectrum subclass.

  • +
  • model_kwargs (dict) – Dictionary containing keywords required by the model.

  • +
  • stats (dict) – Dictionary containing statistics from model fit.

  • +
  • param (list, length nparam) – Best-fit parameters.

  • +
  • param_cov (2D-list, size nparam x nparam) – Estimate of covariance of fit parameters.

  • +
  • measurements (2D-list, size nmeas x 7) – List of measurements of the form: +[freq, flux, eflux, flag, catalog, epoch, citation]. +Should use the add_measurement method to populate this list.

  • +
  • overwrite (int between 0 and 2) – Action to take in the event that this source is already in the catalog: +- 0 - Return the existing entry. +- 1 - Add the measurements to the existing entry. +- 2 - Overwrite the existing entry. +Default is 0.

  • +
+
+
+
+
+add_measurement(freq, flux, eflux, flag=True, catalog=None, epoch=None, citation=None)[source]
+

Add entries to the list of measurements. Each argument/keyword +can be a list of items with length equal to ‘len(flux)’, or +alternatively a single item in which case the same value is used +for all measurements.

+
+
Parameters:
+
    +
  • freq (float, list of floats) – Frequency in MHz.

  • +
  • flux (float, list of floats) – Flux density in Jansky.

  • +
  • eflux (float, list of floats) – Uncertainty on flux density in Jansky.

  • +
  • flag (bool, list of bool) – If True, use this measurement in model fit. +Default is True.

  • +
  • catalog (string or None, list of strings or Nones) – Name of the catalog from which this measurement originates. +Default is None.

  • +
  • epoch (float or None, list of floats or Nones) – Year when this measurement was taken. +Default is None.

  • +
  • citation (string or None, list of strings or Nones) – Citation where this measurement can be found +(e.g., ‘Baars et al. (1977)’). +Default is None.

  • +
+
+
+
+ +
+
+classmethod available_collections()[source]
+

Search the local directory for potential collections that +can be loaded.

+
+
Returns:
+

collections – List containing a tuple for each collection. The tuple contains +the filename of the collection (str) and the sources it contains +(list of str).

+
+
Return type:
+

list of (str, [str, …])

+
+
+
+ +
+
+property catalog
+

Catalog from which each measurement originates.

+
+ +
+
+property citation
+

Citation where more information on each measurement +can be found.

+
+ +
+
+classmethod delete(source_name)[source]
+

Deletes a source from the catalog.

+
+
Parameters:
+

source_name (str) – Name of the astronomical source.

+
+
+
+ +
+
+classmethod dump(filename)[source]
+

Dumps the contents of the catalog to a file.

+
+
Parameters:
+

filename (str) – Valid path name. Should have .json or .pickle extension.

+
+
+
+ +
+
+property eflux
+

Error on the flux measurements in Jansky.

+
+ +
+
+property epoch
+

Year that each measurement occured.

+
+ +
+
+fit_model()[source]
+

Fit the measurements stored in the ‘measurements’ attribute with the +spectral model specified in the ‘model’ attribute. This populates the +‘param’, ‘param_cov’, and ‘stats’ attributes.

+
+ +
+
+property flag
+

Boolean flag indicating what measurements are used +in the spectral fit.

+
+ +
+
+property flux
+

Flux measurements in Jansky.

+
+ +
+
+property freq
+

Frequency of measurements in MHz.

+
+ +
+
+classmethod from_dict(name, flux_body_dict)[source]
+

Instantiates a FluxCatalog object for an astronomical source +from a dictionary of kwargs. Used when loading sources from a +JSON catalog file.

+
+
Parameters:
+
    +
  • name (str) – Name of the astronomical source.

  • +
  • flux_body_dict (dict) – Dictionary containing some or all of the keyword arguments +listed in the __init__ function for this class.

  • +
+
+
Returns:
+

obj – Object that can be used to predict the flux of this source, +plot flux measurements, etc.

+
+
Return type:
+

FluxCatalog instance

+
+
+
+ +
+
+classmethod get(key)[source]
+

Searches the catalog for a source. First checks against the +‘name’ of each entry, then checks against the ‘alternate_names’ +of each entry.

+
+
Parameters:
+

key (str) – Name of the astronomical source.

+
+
Returns:
+

obj – Object that can be used to predict the flux of this source, +plot flux measurements, etc.

+
+
Return type:
+

FluxCatalog instance

+
+
+
+ +
+
+classmethod iter()[source]
+

Iterates through the sources in the catalog.

+
+
Returns:
+

it – Provides the name of each source in the catalog +in the order specified by the ‘sort’ class method.

+
+
Return type:
+

iterator

+
+
+
+ +
+
+classmethod iteritems()[source]
+

Iterates through the sources in the catalog.

+
+
Returns:
+

it – Provides (name, object) for each source in the catalog +in the order specified by the ‘sort’ class method.

+
+
Return type:
+

iterator

+
+
+
+ +
+
+classmethod keys()[source]
+

Alias for sort.

+
+
Returns:
+

names – List of source names in correct order.

+
+
Return type:
+

list of str

+
+
+
+ +
+
+classmethod len()[source]
+

Number of sources in the catalog.

+
+
Returns:
+

N

+
+
Return type:
+

int

+
+
+
+ +
+
+classmethod load(filename, overwrite=0, set_globals=False, verbose=False)[source]
+

Load the contents of a file into the catalog.

+
+
Parameters:
+
    +
  • filename (str) – Valid path name. Should have .json or .pickle extension.

  • +
  • overwrite (int between 0 and 2) – Action to take in the event that this source is already in the catalog: +- 0 - Return the existing entry. +- 1 - Add any measurements to the existing entry. +- 2 - Overwrite the existing entry. +Default is 0.

  • +
  • set_globals (bool) – If True, this creates a variable in the global space +for each source in the file. Default is False.

  • +
  • verbose (bool) – If True, print some basic info about the contents of +the file as it is loaded. Default is False.

  • +
+
+
+
+ +
+
+classmethod loaded_collections()[source]
+

Return the collections that have been loaded.

+
+
Returns:
+

collections – List containing a tuple for each collection. The tuple contains +the filename of the collection (str) and the sources it contains +(list of str).

+
+
Return type:
+

list of (str, [str, …])

+
+
+
+ +
+
+plot(legend=True, catalog=True, residuals=False)[source]
+

Plot the measurements, best-fit model, and confidence interval.

+
+
Parameters:
+
    +
  • legend (bool) – Show legend. Default is True.

  • +
  • catalog (bool) – If True, then label and color code the measurements according to +their catalog. If False, then label and color code the measurements +according to their citation. Default is True.

  • +
  • residuals (bool) – Plot the residuals instead of the measurements and best-fit model. +Default is False.

  • +
+
+
+
+ +
+
+predict_flux(freq, epoch=None)[source]
+

Predict the flux density of the source at a particular +frequency and epoch.

+
+
Parameters:
+
    +
  • freq (float, np.array of floats) – Frequency in MHz.

  • +
  • epoch (float, np.array of floats) – Year. Defaults to current year.

  • +
+
+
Returns:
+

flux – Flux density in Jansky.

+
+
Return type:
+

float, np.array of floats

+
+
+
+ +
+
+predict_uncertainty(freq, epoch=None)[source]
+

Calculate the uncertainty in the estimate of the flux density +of the source at a particular frequency and epoch.

+
+
Parameters:
+
    +
  • freq (float, np.array of floats) – Frequency in MHz.

  • +
  • epoch (float, np.array of floats) – Year. Defaults to current year.

  • +
+
+
Returns:
+

flux_uncertainty – Uncertainty on the flux density in Jansky.

+
+
Return type:
+

float, np.array of floats

+
+
+
+ +
+
+classmethod print_available_collections(verbose=False)[source]
+

Print information about the available collections.

+
+
Parameters:
+

verbose (bool) – If True, then print all source names in addition to the names +of the files and number of sources. Default is False.

+
+
+
+ +
+
+classmethod print_loaded_collections(verbose=False)[source]
+

Print information about the collection that have been loaded.

+
+
Parameters:
+

verbose (bool) – If True, then print all source names in addition to the names +of the files and number of sources. Default is False.

+
+
+
+ +
+
+print_measurements()[source]
+

Print all measurements.

+
+ +
+
+classmethod reversed()[source]
+

Iterates through the sources in the catalog +in reverse order.

+
+
Returns:
+

it – Provides the name of each source in the catalog +in the reverse order as that specified by the +‘sort’ class method.

+
+
Return type:
+

iterator

+
+
+
+ +
+
+property skyfield
+

Skyfield star representation skyfield.starlib.Star +for the source.

+
+ +
+
+classmethod sort()[source]
+

Sorts the entries in the catalog by their flux density +at FREQ_NOMINAL in descending order.

+
+
Returns:
+

names – List of source names in correct order.

+
+
Return type:
+

list of str

+
+
+
+ +
+
+classmethod string()[source]
+

Print basic information about the sources in the catalog.

+
+ +
+
+to_dict()[source]
+

Returns an ordered dictionary containing attributes +for this instance object. Used to dump the information +stored in the instance object to a file.

+
+
Returns:
+

flux_body_dict – Dictionary containing all attributes listed in +the ‘fields’ class attribute.

+
+
Return type:
+

dict

+
+
+
+ +
+ +
+
+class ch_util.fluxcat.MetaFluxCatalog[source]
+

Bases: type

+

Metaclass for FluxCatalog. Defines magic methods +for the class that can act on and provice access to the +catalog of all astronomical sources.

+
+ +
+
+class ch_util.fluxcat.NumpyEncoder(*, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, default=None)[source]
+

Bases: JSONEncoder

+

Constructor for JSONEncoder, with sensible defaults.

+

If skipkeys is false, then it is a TypeError to attempt +encoding of keys that are not str, int, float or None. If +skipkeys is True, such items are simply skipped.

+

If ensure_ascii is true, the output is guaranteed to be str +objects with all incoming non-ASCII characters escaped. If +ensure_ascii is false, the output can contain non-ASCII characters.

+

If check_circular is true, then lists, dicts, and custom encoded +objects will be checked for circular references during encoding to +prevent an infinite recursion (which would cause an RecursionError). +Otherwise, no such check takes place.

+

If allow_nan is true, then NaN, Infinity, and -Infinity will be +encoded as such. This behavior is not JSON specification compliant, +but is consistent with most JavaScript based encoders and decoders. +Otherwise, it will be a ValueError to encode such floats.

+

If sort_keys is true, then the output of dictionaries will be +sorted by key; this is useful for regression tests to ensure +that JSON serializations can be compared on a day-to-day basis.

+

If indent is a non-negative integer, then JSON array +elements and object members will be pretty-printed with that +indent level. An indent level of 0 will only insert newlines. +None is the most compact representation.

+

If specified, separators should be an (item_separator, key_separator) +tuple. The default is (’, ‘, ‘: ‘) if indent is None and +(‘,’, ‘: ‘) otherwise. To get the most compact JSON representation, +you should specify (‘,’, ‘:’) to eliminate whitespace.

+

If specified, default is a function that gets called for objects +that can’t otherwise be serialized. It should return a JSON encodable +version of the object or raise a TypeError.

+
+
+default(obj)[source]
+

If input object is an ndarray it will be converted into a dict +holding dtype, shape and the data, base64 encoded.

+
+ +
+ +
+
+ch_util.fluxcat.format_source_name(input_name)[source]
+

Standardise the name of a source.

+
+
Parameters:
+
    +
  • input_name (str) – The name to format

  • +
  • Returns

  • +
  • formatted_name (str) – The name after formatting.

  • +
+
+
+
+ +
+
+ch_util.fluxcat.get_epoch(date)[source]
+

Return the epoch for a date.

+
+
Parameters:
+

date (datetime.datetime) – Date to calculate epoch

+
+
Returns:
+

epoch – The fractional-year epoch

+
+
Return type:
+

float

+
+
+
+ +
+
+ch_util.fluxcat.json_numpy_obj_hook(dct)[source]
+

Decodes a previously encoded numpy ndarray with proper shape and dtype.

+
+
Parameters:
+

dct – (dict) json encoded ndarray

+
+
Returns:
+

(ndarray) if input was an encoded ndarray

+
+
+
+ +
+
+ch_util.fluxcat.varname(name)[source]
+

Create a python variable name from name.

+

The variable name replaces spaces in name with +underscores and adds a leading underscore if name +starts with a digit.

+
+
Parameters:
+

name (str) – The name to create a variable name for

+
+
Returns:
+

varname – The python variable name.

+
+
Return type:
+

str

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.hfbcat.html b/docs/_autosummary/ch_util.hfbcat.html new file mode 100644 index 00000000..671b025f --- /dev/null +++ b/docs/_autosummary/ch_util.hfbcat.html @@ -0,0 +1,239 @@ + + + + + + + ch_util.hfbcat — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.hfbcat

+

Catalog of HFB test targets

+

Functions

+ + + + + + +

get_doppler_shifted_freq(source, date[, ...])

Calculate Doppler shifted frequency of spectral feature with rest frequency freq_rest, seen towards source source at time date, due to Earth's motion and rotation, following the relativistic Doppler effect.

+

Classes

+ + + + + + +

HFBCatalog(name[, ra, dec, alternate_names, ...])

Class for cataloguing HFB targets.

+
+
+class ch_util.hfbcat.HFBCatalog(name, ra=None, dec=None, alternate_names=[], freq_abs=[], overwrite=0)[source]
+

Bases: <class ‘ch_util.fluxcat.FluxCatalog’>

+

Class for cataloguing HFB targets.

+
+
+fields
+

List of attributes that are read-from and written-to the +JSON catalog files.

+
+
Type:
+

list

+
+
+
+ +

Instantiate an HFBCatalog object for an HFB target.

+
+
Parameters:
+
    +
  • name (string) – Name of the source.

  • +
  • ra (float) – Right Ascension in degrees.

  • +
  • dec (float) – Declination in degrees.

  • +
  • alternate_names (list of strings) – Alternate names for the source.

  • +
  • freq_abs (list of floats) – Frequencies at which (the peaks of) absorption features are found.

  • +
  • overwrite (int between 0 and 2) – Action to take in the event that this source is already in the catalog: +- 0 - Return the existing entry. +- 1 - Add the measurements to the existing entry. +- 2 - Overwrite the existing entry. +Default is 0. +BUG: Currently, freq_abs is always overwritten.

  • +
+
+
+
+ +
+
+ch_util.hfbcat.get_doppler_shifted_freq(source: Union[skyfield.starlib.Star, str], date: Union[float, list], freq_rest: Union[float, list] = None, obs: ephemeris.Observer = <caput.time.Observer object>) np.array[source]
+

Calculate Doppler shifted frequency of spectral feature with rest +frequency freq_rest, seen towards source source at time date, due to +Earth’s motion and rotation, following the relativistic Doppler effect.

+
+
Parameters:
+
    +
  • source – Position(s) on the sky. If the input is a str, attempt to resolve this +from ch_util.hfbcat.HFBCatalog.

  • +
  • date – Unix time(s) for which to calculate Doppler shift.

  • +
  • freq_rest – Rest frequency(ies) in MHz. If None, attempt to obtain rest frequency +of absorption feature from ch_util.hfbcat.HFBCatalog.freq_abs.

  • +
  • obs – An Observer instance to use. If not supplied use chime. For many +calculations changing from this default will make little difference.

  • +
+
+
Returns:
+

Doppler shifted frequencies in MHz. Array where rows correspond to the +different input rest frequencies and columns correspond either to input +times or to input sky positions (whichever contains multiple entries).

+
+
Return type:
+

freq_obs

+
+
+

Notes

+

Only one of source and date can contain multiple entries.

+

Example

+

To get the Doppler shifted frequencies of a feature with a rest frequency +of 600 MHz for two positions on the sky at a single point in time (Unix +time 1717809534 = 2024-06-07T21:18:54+00:00), run:

+
>>> from skyfield.starlib import Star
+>>> from skyfield.units import Angle
+>>> from ch_util.hfbcat import get_doppler_shifted_freq
+>>> coord = Star(ra=Angle(degrees=[100, 110]), dec=Angle(degrees=[45, 50]))
+>>> get_doppler_shifted_freq(coord, 1717809534, 600)
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.holography.html b/docs/_autosummary/ch_util.holography.html new file mode 100644 index 00000000..0f79045d --- /dev/null +++ b/docs/_autosummary/ch_util.holography.html @@ -0,0 +1,419 @@ + + + + + + + ch_util.holography — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.holography

+

Holography observation tables.

+

This module defines the tables:

+ +

and the constants:

+
    +
  • QUALITY_GOOD

  • +
  • QUALITY_OFFSOURCE

  • +
  • ONSOURCE_DIST_TO_FLAG

  • +
+

Classes

+ + + + + + + + + +

HolographyObservation(*args, **kwargs)

A peewee model for the holographic observations.

HolographySource(*args, **kwargs)

A peewee model for the Holography sources.

+
+
+class ch_util.holography.HolographyObservation(*args, **kwargs)[source]
+

Bases: base_model

+

A peewee model for the holographic observations.

+
+
+source
+

The source that we were observing.

+
+
Type:
+

foreign key

+
+
+
+ +
+
+start_time, finish_time
+

Start and end times of the source observation (as UNIX times).

+
+
Type:
+

float

+
+
+
+ +
+
+notes
+

Any free form notes about the observation.

+
+
Type:
+

str

+
+
+
+ +
+
+DoesNotExist
+

alias of HolographyObservationDoesNotExist

+
+ +
+
+classmethod create_from_ant_logs(logs, verbose=False, onsource_dist=0.1, notes=None, quality_flag=0, **kwargs)[source]
+

Read John Galt Telescope log files and create an entry in the +holography database corresponding to the exact times on source

+
+
Parameters:
+
    +
  • logs (list of strings) – log file archives (.zip files) to pass to parse_ant_logs()

  • +
  • onsource_dist (float (default: 0.1)) – maximum angular distance at which to consider the Galt Telescope +on source (in degrees)

  • +
+
+
Return type:
+

none

+
+
+
+ +
+
+classmethod create_from_dict(dict, notes=None, start_tol=60.0, dryrun=True, replace_dup=False, verbose=False)[source]
+

Create a holography database entry from a dictionary

+

This routine checks for duplicates and overwrites duplicates if and +only if replace_dup = True

+
+
Parameters:
+

dict (dict) –

+
srcHolographySource

A HolographySource object for the source

+
+
start_time

Start time as a Skyfield Time object

+
+
finish_time

Finish time as a Skyfield Time object

+
+
+

+
+
+
+ +
+
+classmethod create_from_post_reports(logs, start_tol=60.0, dryrun=True, replace_dup=False, verbose=True, notes=None)[source]
+

Create holography database entry from .POST_REPORT log files +generated by the nsched controller for the Galt Telescope.

+
+
Parameters:
+
    +
  • logs (string) – list of paths to archives. Filenames should be, eg, +01DEC17_1814.zip. Must be only one period in the filename, +separating the extension.

  • +
  • start_tol (float (optional; default: 60.)) – Tolerance (in seconds) around which to search for duplicate +operations.

  • +
  • dryrun (boolean (optional; default: True)) – Dry run only; do not add entries to database

  • +
  • replace_dup (boolean (optional; default: False)) – Delete existing duplicate entries and replace. Only has effect if +dry_run == False

  • +
  • notes (string or list of strings (optional; default: None)) – notes to be added. If a string, the same note will be added to all +observations. If a list of strings (must be same length as logs), +each element of the list will be added to the corresponding +database entry. +Nota bene: the text “Added by create_from_post_reports” with the +current date and time will also be included in the notes database +entry.

  • +
+
+
+

Example

+

from ch_util import holography as hl +import glob

+

obs = hl.HolographyObservation +logs = glob.glob(‘/path/to/logs/JUN18.zip’) +obs_list, dup_obs_list, missing = obs.create_from_post_reports(logs, dryrun=False)

+
+ +
+
+classmethod from_lst(source, start_day, start_lst, duration_lst, quality_flag=0, notes=None)[source]
+

Method to initialize a HolographyObservation from a start day, +start LST, and a stop day, stop LST.

+
+
Parameters:
+
    +
  • source (HolographySource) – An instance of HolographySource.

  • +
  • start_day (string) – Of format YYYMMDD-ABT, ABT can be one of (UTC, PST, PDT)

  • +
  • start_lst (float) – Hours and fraction of hours on a scale from 0-24.

  • +
  • duration (float) – Hours and fraction of hours on a scale from 0-24.

  • +
  • quality_flag (int, default : 0) – Flag for poor quality data. Good data is zero. +Sets a bitmask in the HolographyObservation instance.

  • +
  • notes (string, optional) – Any notes on this observation.

  • +
+
+
+
+ +
+
+classmethod parse_ant_logs(logs, return_post_report_params=False)[source]
+

Unzip and parse .ANT log file output by nsched for John Galt Telescope +observations

+
+
Parameters:
+

logs (list of strings) –

.ZIP filenames. Each .ZIP archive should include a .ANT file and +a .POST_REPORT file. This method unzips the archive, uses +parse_post_report to read the .POST_REPORT file and extract +the CHIME sidereal day corresponding to the DRAO sidereal day, +and then reads the lines in the .ANT file to obtain the pointing +history of the Galt Telescope during this observation.

+

(The DRAO sidereal day is days since the clock in Ev Sheehan’s +office at DRAO was reset. This clock is typically only reset every +few years, but it does not correspond to any defined date, so the +date must be figured out from the .POST_REPORT file, which reports +both the DRAO sidereal day and the UTC date and time.

+

Known reset dates: 2017-11-21, 2019-3-10)

+

+
+
Returns:
+

    +
  • if output_params == False

    +
    +
    ant_data: A dictionary consisting of lists containing the LST,

    hour angle, RA, and dec (all as Skyfield Angle objects), +CHIME sidereal day, and DRAO sidereal day.

    +
    +
    +
  • +
  • if output_params == True – output_params: dictionary returned by parse_post_report +and +ant_data: described above

  • +
  • Files

  • +
  • —–

  • +
  • the .ANT and .POST_REPORT files in the input .zip archive are

  • +
  • extracted into /tmp/26mlog/<loginname>/

  • +
+

+
+
+
+ +
+
+classmethod parse_post_report(post_report_file)[source]
+

read a .POST_REPORT file from the nsched program which controls the +John Galt Telescope and extract the source name, estimated start time, +DRAO sidereal day, commanded duration, and estimated finish time

+
+
Parameters:
+

post_report_file (str) – path to the .POST_REPORT file to read

+
+
Returns:
+

output_params

+
+
output_params[‘src’]HolographySource object or string

If the source is a known source in the holography database, +return the HolographySource object. If not, return the name +of the source as a string

+
+
output_params[‘SID’]int

DRAO sidereal day at the beginning of the observation

+
+
output_params[‘start_time’]skyfield time object

UTC time at the beginning of the observation

+
+
output_params[‘DURATION’]float

Commanded duration of the observation in sidereal hours

+
+
output_params[‘finish_time’]skyfield time object

Calculated UTC time at the end of the observation +Calculated as start_time + duration * caput.time.SIDEREAL_S

+
+
+

+
+
Return type:
+

dictionary

+
+
+
+ +
+ +
+
+class ch_util.holography.HolographySource(*args, **kwargs)[source]
+

Bases: base_model

+

A peewee model for the Holography sources.

+
+
+name
+

Unique name for the source. Be careful to avoid duplicates.

+
+
Type:
+

str

+
+
+
+ +
+
+ra, dec
+

ICRS co-ordinates of the source.

+
+
Type:
+

float

+
+
+
+ +
+
+DoesNotExist
+

alias of HolographySourceDoesNotExist

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.layout.html b/docs/_autosummary/ch_util.layout.html new file mode 100644 index 00000000..95a84223 --- /dev/null +++ b/docs/_autosummary/ch_util.layout.html @@ -0,0 +1,985 @@ + + + + + + + ch_util.layout — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.layout

+

Interface to the CHIME components and graphs

+

This module interfaces to the layout tables in the CHIME database.

+

The peewee module is used for the ORM to the MySQL database. Because the +layouts are event-driven, you should never attempt to enter events by raw +inserts to the event or timestamp tables, as you could create +inconsistencies. Rather, use the methods which are described in this document to +do such alterations robustly.

+

For most uses, you probably want to import the following:

+
>>> from datetime import datetime
+>>> import logging
+>>> logging.basicConfig(level = logging.INFO)
+>>> import peewee
+>>> import layout
+>>> layout.connect_database()
+
+
+
+

Note

+

The database must now be explicitly connected. This should not be done within +an import statement.

+
+
+

Note

+

The logging module can be set to the level of your preference, or not +imported altogether if you don’t want log messages from the layout +module. Note that the peewee module sends a lot of messages to the +DEBUG stream.

+
+

If you will be altering the layouts, you will need to register as a user:

+
>>> layout.set_user("Ahincks")
+
+
+

Use your CHIME wiki username here. Make sure it starts with a capital letter. +Note that different users have different permissions, stored in the +user_permission table. If you are simply reading from the layout, +there is no need to register as a user.

+
+

Choose Your Own Adventure

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

If you want to …

… then see

retrieve and examine layout graphs

graph

add components

component.add, +add_component

remove components

component.remove, +remove_component

make connexions

make_connexion

sever connexions

sever_connexion

set component properties

component.set_property +set_property

get component properties

component.get_property

perform bulk updates

enter_ltf()

add component history notes

component.add_history

add link to component documentation

component.add_doc

create a global flag

global_flag.start

+

Functions

+ +

Classes

+ +
+
+

Database Models

+
    +
  • component

  • +
  • component_history

  • +
  • component_type

  • +
  • component_type_rev

  • +
  • component_doc

  • +
  • connexion

  • +
  • external_repo

  • +
  • event

  • +
  • event_type

  • +
  • graph_obj

  • +
  • global_flag

  • +
  • predef_subgraph_spec

  • +
  • predef_subgraph_spec_param

  • +
  • property

  • +
  • property_component

  • +
  • property_type

  • +
  • timestamp

  • +
  • user_permission

  • +
  • user_permission_type

  • +
+

Exceptions

+
    +
  • NoSubgraph

  • +
  • BadSubgraph

  • +
  • DoesNotExist

  • +
  • UnknownUser

  • +
  • NoPermission

  • +
  • LayoutIntegrity

  • +
  • PropertyType

  • +
  • PropertyUnchanged

  • +
  • ClosestDraw

  • +
  • NotFound

  • +
+

Constants

+
    +
  • EVENT_AT

  • +
  • EVENT_BEFORE

  • +
  • EVENT_AFTER

  • +
  • EVENT_ALL

  • +
  • ORDER_ASC

  • +
  • ORDER_DESC

  • +
+
+

Functions

+ + + + + + + + + + + + +

enter_ltf(ltf[, time, notes, force])

Enter an LTF into the database.

get_global_flag_times(flag)

Convenience function to get global flag times by id or name.

global_flags_between(start_time, end_time[, ...])

Find global flags that overlap a time interval.

+

Classes

+ + + + + + + + + +

graph(*args, **kwargs)

A graph of connexions.

subgraph_spec(start, terminate, oneway, hide)

Specifications for extracting a subgraph from a full graph.

+
+
+ch_util.layout.enter_ltf(ltf, time=datetime.datetime(2024, 7, 31, 22, 1, 54, 120876), notes=None, force=False)[source]
+

Enter an LTF into the database.

+

This is a special mark-up language for quickly entering events. See the “help” +box on the LTF page of the web interface for instructions.

+
+
Parameters:
+
    +
  • ltf (string) – Pass either the path to a file containing the LTF, or a string containing +the LTF.

  • +
  • time (datetime.datetime) – The time at which to apply the LTF.

  • +
  • notes (string) – Notes for the timestamp.

  • +
  • force (bool) – If True, then do nothing when events that would damage database +integrity are encountered; skip over them. If False, then a bad +propsed event will raise the appropriate exception.

  • +
+
+
+
+ +
+
+ch_util.layout.get_global_flag_times(flag)[source]
+

Convenience function to get global flag times by id or name.

+
+
Parameters:
+

flag (integer or string) – If an integer, this is a global flag id, e.g. 64. If a string this is the +global flag’s name e.g. ‘run_pass0_e’.

+
+
Returns:
+

    +
  • start (datetime.datetime) – Global flag start time (UTC).

  • +
  • end (datetime.datetime or None) – Global flag end time (UTC) or None if the flag hasn’t ended.

  • +
+

+
+
+
+ +
+
+ch_util.layout.global_flags_between(start_time, end_time, severity=None)[source]
+

Find global flags that overlap a time interval.

+
+
Parameters:
+
    +
  • start_time

  • +
  • end_time

  • +
  • severity (str) – One of ‘comment’, ‘warning’, ‘severe’, or None.

  • +
+
+
Returns:
+

flags – List of global_flag objects matching criteria.

+
+
Return type:
+

list

+
+
+
+ +
+
+class ch_util.layout.graph(*args: Any, **kwargs: Any)[source]
+

Bases: Graph

+

A graph of connexions.

+

This class inherits the +networkx.Graph +class and adds CHIME-specific functionality.

+

Use the from_db() class method to construct a graph from the database.

+
+
Parameters:
+

time (datetime.datetime) – The time at which the graph is valid. Default is now().

+
+
+

Examples

+

To load a graph from the database, use the from_db() class method:

+
>>> from ch_util import graph
+>>> from datetime import datetime
+>>> g = layout.graph.from_db(datetime(2014, 10, 5, 12, 0))
+
+
+

You can now use any of the +networkx.Graph +methods:

+
>>> print g.order(), g.size()
+2483 2660
+
+
+

There are some convenience methods for our implementation. For example, you +can easily find components by component type:

+
>>> print g.component(type = "reflector")
+[<layout.component object at 0x7fd1b2cda710>, <layout.component object at 0x7fd1b2cda810>, <layout.component object at 0x7fd1b2cfb7d0>]
+
+
+

Note that the graph nodes are component objects. You can also use the +component() method to search for components by serial number:

+
>>> ant = g.component(comp = "ANT0044B")
+
+
+

Node properties are stored as per usual for networkx.Graph objects:

+
>>> print g.nodes[ant]
+{'_rev_id': 11L, '_type_id': 2L, u'pol1_orient': <layout.property object at 0x7f31ed323fd0>, '_type_name': u'antenna', '_id': 32L, u'pol2_orient': <layout.property object at 0x7f31ed2c8790>, '_rev_name': u'B'}
+
+
+

Note, however, that there are some internally-used properties (starting with +an underscore). The node_property() returns a dictionary of properties +without these private memebers:

+
>>> for p in g.node_property(ant).values():
+...   print "%s = %s %s" % (p.type.name, p.value, p.type.units if p.type.units else "")
+pol1_orient = S
+pol2_orient = E
+
+
+

To search the graph for the closest component of a given type to a single +component, using closest_of_type():

+
>>> slt_type = layout.component_type.get(name = "cassette slot")
+>>> print g.closest_of_type(ant, slt_type).sn
+CSS004C0
+
+
+

Use of closest_of_type() can be subtle for components separated by long +paths. See its documentation for more examples.

+

Subgraphs can be created using a subgraph specification, encoded in a +subgraph_spec object. See the documentation for that class for +details, but briefly, this allows one to create a smaller, more manageable +graph containing only components and connexions you are interested in. Given a +subgraph, the ltf() method can be useful.

+
+
+closest_of_type(comp, type, type_exclude=None, ignore_draws=True)[source]
+

Searches for the closest connected component of a given type.

+

Sometimes the closest component is through a long, convoluted path that you +do not wish to explore. You can cut out these cases by including a list of +component types that will block the search along a path.

+

The component may be passed by object or by serial number; similarly for +component types.

+
+
Parameters:
+
    +
  • comp (component or string or list of such) – The component to search from.

  • +
  • type (component_type or string) – The component type to find.

  • +
  • type_exclude (list of component_type or strings) – Any components of this type will prematurely cut off a line of +investigation.

  • +
  • ignore_draws (boolean) – It is possible that there be more than one component of a given type the +same distance from the starting component. If this parameter is set to +True, then just return the first one that is found. If set to +False, then raise an exception.

  • +
+
+
Returns:
+

comp – The closest component of the given type to start. If no component of +type is found None is returned.

+
+
Return type:
+

component or list of such

+
+
Raises:
+

ClosestDraw – Raised if there is no unique closest component and ignore_draws is set + to False.

+
+
+

Examples

+

Find the cassette slot an antenna is plugged into:

+
>>> import layout
+>>> from datetime import datetime
+>>> g = layout.graph.from_db(datetime(2014, 11, 5, 12, 0))
+>>> print g.closest_of_type("ANT0044B", "cassette slot").sn
+CSS004C0
+
+
+

The example above is simple as the two components are adjacent:

+
>>> print [c.sn for c in g.shortest_path_to_type("ANT0044B", "cassette slot")]
+[u'ANT0044B', u'CSS004C0']
+
+
+

In general, though, you need to take care when +using this method and make judicious use of the type_exclude parameter. +For example, consider the following example:

+
>>> print g.closest_of_type("K7BP16-00040112", "RFT thru").sn
+RFTB15B
+
+
+

It seems OK on the surface, but the path it has used is probably not what +you want:

+
>>> print [c.sn for c in g.shortest_path_to_type("K7BP16-00040112", "RFT thru")]
+[u'K7BP16-00040112', u'K7BP16-000401', u'K7BP16-00040101', u'FLA0280B', u'RFTB15B']
+
+
+

We need to block the searcher from going into the correlator card slot and +then back out another input, which we can do like so:

+
>>> print g.closest_of_type("K7BP16-00040112", "RFT thru", type_exclude = "correlator card slot").sn
+RFTQ15B
+
+
+

The reason the first search went through the correlator card slot is because +there are delay cables and splitters involved.

+
>>> print [c.sn for c in g.shortest_path_to_type("K7BP16-00040112", "RFT thru", type_exclude = "correlator card slot")]
+[u'K7BP16-00040112', u'CXS0279', u'CXA0018A', u'CXA0139B', u'SPL001AP2', u'SPL001A', u'SPL001AP3', u'CXS0281', u'RFTQ15B']
+
+
+

The shortest path really was through the correlator card slot, until we +explicitly rejected such paths.

+
+ +
+
+component(comp=None, type=None, sort_sn=False)[source]
+

Return a component or list of components from the graph.

+

The components exist as graph nodes. This method provides searchable access +to them.

+
+
Parameters:
+
    +
  • comp (string or component) – If not None, then return the component with this serial number, or +None if it does not exist in the graph. If this parameter is set, +then type is ignored. You can also pass a component object; the +instance of that component with the same serial number will be returned if +it exists in this graph.

  • +
  • type (string or component_type) – If not None, then only return components of this type. You may pass +either the name of the component type or an object.

  • +
+
+
Returns:
+

If the sn parameter is passed, a single component object is +returned. If the type parameter is passed, a list of +component objects is returned.

+
+
Return type:
+

component or list of such

+
+
Raises:
+

NotFound – Raised if no component is found.

+
+
+

Examples

+
>>> from ch_util import graph
+>>> from datetime import datetime
+>>> g = layout.graph.from_db(datetime(2014, 10, 5, 12, 0))
+>>> print g.component("CXA0005A").type_rev.name
+B
+>>> for r in g.component(type = "reflector"):
+...   print r.sn
+E_cylinder
+W_cylinder
+26m_dish
+
+
+
+ +
+
+classmethod from_db(time=datetime.datetime(2024, 7, 31, 22, 1, 54, 120853), sg_spec=None, sg_start_sn=None)[source]
+

Create a new graph by reading the database.

+

This method is designed to be efficient. It has customised SQL calls so that +only a couple of queries are required. Doing this with the standard peewee +functionality requires many more calls.

+

This method will establish a connection to the database if it doesn’t +already exist.

+
+
Parameters:
+
    +
  • time (datetime.datetime) – The time at which the graph is valid. Default is now().

  • +
  • sg_spec (subgraph_spec) – The subgraph specificationto use; can be set to None.

  • +
  • sg_start_sn (string) – If a serial number is specified, then only the subgraph starting with that +component will be returned. This parameter is ignored if sg_spec is +None.

  • +
+
+
Returns:
+

If sg_spec is not None, and sg_start_sn is not specified, then +a list of graph objects is returned instead.

+
+
Return type:
+

graph

+
+
+

:raises If no graph is found, NotFound is raised.:

+
+ +
+
+classmethod from_graph(g, sg_spec=None, sg_start_sn=None)[source]
+

Find subgraphs within this graph.

+
+
Parameters:
+
    +
  • g (graph) – The graph from which to get the new graph.

  • +
  • sg_spect (subgraph_spec) – The subgraph specification to use; can be set to None.

  • +
+
+
Returns:
+

    +
  • A list of graph objects, one for each subgraph found. If, however,

  • +
  • *g* is set to None, a reference to the input graph is returned.

  • +
+

+
+
+
+ +
+
+ltf()[source]
+

Get an LTF representation of the graph. The graph must be a subgraph, +i.e., generated with a predef_subgraph_spec.

+
+
Returns:
+

ltf – The LTF representation of the graph.

+
+
Return type:
+

string

+
+
Raises:
+
    +
  • NoSubgraph

  • +
  • Raised if no subgraph specification is associate with this layout.

  • +
+
+
+

Examples

+

Get the LTF for a subgraph of antenna to HK.

+
>>> import layout
+>>> from datetime import datetime
+>>> start = layout.component_type.get(name = "antenna").id
+>>> terminate = [layout.component_type.get(name = "reflector").id,
+                 layout.component_type.get(name = "cassette slot").id,
+                 layout.component_type.get(name = "correlator input").id,
+                 layout.component_type.get(name = "HK preamp").id,
+                 layout.component_type.get(name = "HK hydra").id]
+>>> hide = [layout.component_type.get(name = "reflector").id,
+            layout.component_type.get(name = "cassette slot").id,
+            layout.component_type.get(name = "HK preamp").id,
+            layout.component_type.get(name = "HK hydra").id]
+>>> sg_spec = layout.subgraph_spec(start, terminate, [], hide)
+>>> sg = layout.graph.from_db(datetime(2014, 11, 20, 12, 0), sg_spec, "ANT0108B")
+>>> print sg.ltf()
+# Antenna to correlator input.
+ANT0108B pol1_orient=S pol2_orient=E
+PL0108B1
+LNA0249B
+CXA0239C
+CANBJ6B
+CXS0042
+RFTG00B attenuation=10
+FLA0196B
+CXS0058
+K7BP16-00041606
+
+# Antenna to correlator input.
+ANT0108B pol1_orient=S pol2_orient=E
+PL0108B2
+LNA0296B
+CXA0067B
+CANBG6B
+CXS0090
+RFTG01B attenuation=10
+FLA0269B
+CXS0266
+K7BP16-00041506
+
+
+
+ +
+
+neighbour_of_type(n, type)[source]
+

Get a list of neighbours of a given type.

+

This is like the networkx.Graph.neighbors() method, but selects only +the neighbours of the specified type.

+
+
Parameters:
+
    +
  • comp (component) – A node in the graph.

  • +
  • type (component_type or string) – The component type to find.

  • +
+
+
Returns:
+

nlist

+
+
Return type:
+

A list of nodes of type type adjacent to n.

+
+
Raises:
+

networkx.NetworkXError – Raised if n is not in the graph.

+
+
+
+ +
+
+node_property(n)[source]
+

Return the properties of a node excluding internally used properties.

+

If you iterate over a nodes properties, you will also get the +internally-used properties (starting with an underscore). This method gets +the dictionary of properties without these “private” properties.

+
+
Parameters:
+

node (node object) – The node for which to get the properties.

+
+
Return type:
+

A dictionary of properties.

+
+
+

Examples

+
>>> from ch_util import graph
+>>> from datetime import datetime
+>>> g = layout.graph.from_db(datetime(2014, 10, 5, 12, 0))
+>>> rft = g.component(comp = "RFTK07B")
+>>> for p in g.node_property(rft).values():
+...   print "%s = %s %s" % (p.type.name, p.value, p.type.units if p.type.units else "")
+attenuation = 10 dB
+therm_avail = ch1
+
+
+
+ +
+
+property sg_spec
+

The subgraph_spec (subgraph specification) used to get this graph.

+
+
Return type:
+

The subgraph_spec used to get this graph, if any.

+
+
+
+ +
+
+property sg_spec_start
+

The subgraph starting component.

+
+
Return type:
+

The component that was used to begin the subgraph, if any.

+
+
+
+ +
+
+shortest_path_to_type(comp, type, type_exclude=None, ignore_draws=True)[source]
+

Searches for the shortest path to a component of a given type.

+

Sometimes the closest component is through a long, convoluted path that you +do not wish to explore. You can cut out these cases by including a list of +component types that will block the search along a path.

+

The component may be passed by object or by serial number; similarly for +component types.

+
+
Parameters:
+
    +
  • comp (component or string or list of one of these) – The component(s) to search from.

  • +
  • type (component_type or string) – The component type to find.

  • +
  • type_exclude (list of component_type or strings) – Any components of this type will prematurely cut off a line of +investigation.

  • +
  • ignore_draws (boolean) – It is possible that there be more than one component of a given type the +same distance from the starting component. If this parameter is set to +True, then just return the first one that is found. If set to +False, then raise an exception.

  • +
+
+
Returns:
+

comp – The closest component of the given type to start. If no path to a +component of the specified type exists, return None.

+
+
Return type:
+

component or list of such

+
+
Raises:
+

ClosestDraw – Raised if there is no unique closest component and ignore_draws is set + to False.

+
+
+

Examples

+

See the examples for closest_of_type().

+
+ +
+
+property time
+

The time of the graph.

+
+
Returns:
+

time – The time at which this graph existed.

+
+
Return type:
+

datetime.datetime

+
+
+
+ +
+ +
+
+class ch_util.layout.subgraph_spec(start, terminate, oneway, hide)[source]
+

Bases: object

+

Specifications for extracting a subgraph from a full graph.

+

The subgraph specification can be created from scratch by passing the +appropriate parameters. They can also be pulled from the database using the +class method FROM_PREDef().

+

The parameters can be passed as ID’s, names of compoenet types or +component_type instances.

+
+
Parameters:
+
    +
  • start (integer, component_type or string) – The component type for the start of the subgraph.

  • +
  • terminate (list of integers, of component_type or of strings) – Component type id’s for terminating the subgraph.

  • +
  • oneway (list of list of integer pairs, of component_type or of strings) – Pairs of component types for defining connexions that should only be +traced one way when moving from the starting to terminating components.

  • +
  • hide (list of integers, of component_type or of strings) – Component types for components that should be hidden and skipped over in +the subgraph.

  • +
+
+
+

Examples

+

To look at subgraphs of components between the outer bulkhead and the +correlator inputs, one could create the following specification:

+
>>> import layout
+>>> from datetime import datetime
+>>> sg_spec = layout.subgraph_spec(start = "c-can thru",
+                                   terminate = ["correlator input", "60m coax"],
+                                   oneway = [],
+                                   hide = ["60m coax", "SMA coax"])
+
+
+

What did we do? We specified that the subgraph starts at the C-Can bulkhead. +It terminates at the correlator input; in the other direction, it must also +terminate at a 60 m coaxial cable plugged into the bulkhead. We hide the 60 m +coaxial cable so that it doesn’t show up in the subgraph. We also hide the SMA +cables so that they will be skipped over.

+

We can load all such subgraphs from the database now and see how many nodes +they contain:

+
>>> sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec)
+print [s.order() for s in sg]
+[903, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 903,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 903, 3, 3, 3, 903, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 903, 3, 1, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+903, 3, 903, 3, 3, 3, 3, 3, 3, 3, 903, 903, 5, 5]
+
+
+

Most of them are as short as we would expect, but there are some +complications. Let’s look at that first one by printing out its LTF:

+
>>> print sg[0].ltf
+# C-can thru to RFT thru.
+CANAD0B
+RFTA15B attenuation=10 therm_avail=ch7
+
+# RFT thru to HK preamp.
+RFTA15B attenuation=10 therm_avail=ch7
+CHB036C7
+HPA0002A
+
+# HK preamp to HK readout.
+HPA0002A
+ATMEGA49704949575721220150
+HKR00
+
+# HK readout to HK ATMega.
+HKR00
+ATMEGA50874956504915100100
+etc...
+etc...
+# RFT thru to FLA.
+RFTA15B attenuation=10 therm_avail=ch7
+FLA0159B
+
+
+

Some FLA’s are connected to HK hydra cables and we need to terminate on these +as well. It turns out that some outer bulkheads are connected to 200 m +coaxial cables, and some FLA’s are connected to 50 m delay cables, adding to +the list of terminations. Let’s exclude these as well:

+
>>> sg_spec.terminate += ["200m coax", "HK hydra", "50m coax"]
+>>> sg_spec.hide += ["200m coax", "HK hydra", "50m coax"]
+>>> sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec)
+>>> print [s.order() for s in sg]
+[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+3, 3, 3, 3, 3, 10, 10, 5, 5]
+
+
+

The remaining subgraphs with more than three components actually turn out to +be errors in the layout! Let’s investigate the last one by removing any hidden +components and printing its LTF.

+
>>> sn = sg[-1].component(type = "C-can thru")[0].sn
+CANBL1B
+>>> sg_spec.hide = []
+>>> bad_sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec, sn)
+>>> print bad_sg.ltf()
+# C-can thru to c-can thru.
+CANBL1B
+CXS0017
+RFTQ00B
+CXS0016
+FLA0073B
+RFTQ01B attenuation=9
+CXS0015
+CANBL0B
+
+
+

It appears that CXS0016 mistakenly connects RFTQ00B to +FLA0073B. This is an error that should be investigated and fixed. But +by way of illustration, let’s cut this subgraph short by specifying a one-way +connection, and not allowing the subgrapher to trace backwards from the inner +bulkhead to an SMA cable:

+
>>> sg_spec.oneway = [["SMA coax", "RFT thru"]]
+>>> bad_sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec, sn)
+>>> print bad_sg.ltf()
+# C-can thru to RFT thru.
+CANBL1B
+CXS0017
+RFTQ00B
+
+
+
+
+classmethod from_predef(predef)[source]
+

Create a subgraph specification from a predefined version in the DB.

+
+
Parameters:
+

predef (predef_subgraph_spec) – A predefined subgraph specification in the DB.

+
+
+
+ +
+
+property hide
+

The component type ID(s) that are skipped over in the subgraph.

+
+ +
+
+property oneway
+

Pairs of component type ID(s) for one-way tracing of the subgraph.

+
+ +
+
+property start
+

The component type ID starting the subgraph.

+
+ +
+
+property terminate
+

The component type ID(s) terminating the subgraph.

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.ni_utils.html b/docs/_autosummary/ch_util.ni_utils.html new file mode 100644 index 00000000..f86d8832 --- /dev/null +++ b/docs/_autosummary/ch_util.ni_utils.html @@ -0,0 +1,708 @@ + + + + + + + ch_util.ni_utils — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.ni_utils

+

Tools for noise injection data

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

gains2utvec(g)

Converts a vector into an outer product matrix and vectorizes its upper triangle to obtain a vector in same format as the CHIME visibility matrix.

gains2utvec_tf(gains)

Converts gain array to CHIME visibility format for all frequencies and time frames.

gen_prod_sel(channels_to_select, ...)

Generates correlation product indices for selected channels.

ktrprod(A, B)

Khatri-Rao or column-wise Kronecker product of two matrices.

mat2utvec(A)

Vectorizes its upper triangle of the (hermitian) matrix A.

ni_als(R, g0, Gamma, Upsilon, maxsteps, ...)

Implementation of the Alternating Least Squares algorithm for noise injection.

ni_gains_evalues(C[, normalize_vis])

Basic algorithm to compute gains and evalues from noise injection data.

ni_gains_evalues_tf(vis_gated, Nchannels[, ...])

Computes gains and evalues from noise injection visibility data.

process_gated_data(data[, only_off])

Processes fast gating data and turns it into gated form.

process_synced_data(data[, ni_params, only_off])

Turn a synced noise source observation into gated form.

sort_evalues_mag(evalues)

Sorts eigenvalue array by magnitude for all frequencies and time frames

subtract_sky_noise(vis, Nchannels, ...)

Removes sky and system noise contributions from noise injection visibility data.

utvec2mat(n, utvec)

Recovers a hermitian matrix a from its upper triangle vectorized version.

+

Classes

+ + + + + + +

ni_data(Reader_read_obj, Nadc_channels[, ...])

Provides analysis utilities for CHIME noise injection data.

+
+
+ch_util.ni_utils.gains2utvec(g)[source]
+

Converts a vector into an outer product matrix and vectorizes its upper +triangle to obtain a vector in same format as the CHIME visibility matrix.

+
+
Parameters:
+

g (1d array) – gain vector

+
+
Return type:
+

1d array with vectorized form of upper triangle for the outer product of g

+
+
+
+ +
+
+ch_util.ni_utils.gains2utvec_tf(gains)[source]
+

Converts gain array to CHIME visibility format for all frequencies and +time frames.

+

For every frequency and time frame, converts a gain vector into an outer +product matrix and then vectorizes its upper triangle to obtain a vector in +the same format as the CHIME visibility matrix.

+

Converting the gain arrays to CHIME visibility format makes easier to +apply the gain corrections to the visibility data. See example below.

+
+
Parameters:
+

gains (3d array) – Input array with the gains for all frequencies, channels and time frames +in the fromat of ni_gains_evalues_tf. g has dimensions +[frequency, channels, time].

+
+
Returns:
+

G_ut – Output array with dimmensions [frequency, corr. number, time]. For +every frequency and time frame, contains the vectorized form of upper +triangle for the outer product of the respective gain vector.

+
+
Return type:
+

3d array

+
+
+

Example

+

To compute the gains from a set of noise injection pass0 data and apply the +gains to the visibilities run:

+
>>> from ch_util import andata
+>>> from ch_util import import ni_utils as ni
+>>> data = andata.Reader('/scratch/k/krs/jrs65/chime_archive/20140916T173334Z_blanchard_corr/000[0-3]*.h5')
+>>> readdata = data.read()
+>>> nidata = ni.ni_data(readdata, 16)
+>>> nidata.get_ni_gains()
+>>> G_ut = ni.gains2utvec(nidata.ni_gains)
+>>> corrected_vis = nidata.vis_off_dec/G_ut
+
+
+ +
+ +
+
+ch_util.ni_utils.gen_prod_sel(channels_to_select, total_N_channels)[source]
+

Generates correlation product indices for selected channels.

+

For a correlation matrix with total_N_channels total number of channels, +generates indices for correlation products corresponding to channels in +the list channels_to_select.

+
+
Parameters:
+
    +
  • channels_to_select (list of integers) – Indices of channels to select

  • +
  • total_N_channels (int) – Total number of channels

  • +
+
+
Returns:
+

prod_sel – indices of correlation products for channels in channels_to_select

+
+
Return type:
+

array

+
+
+
+ +
+
+ch_util.ni_utils.ktrprod(A, B)[source]
+

Khatri-Rao or column-wise Kronecker product of two matrices.

+

A and B have the same number of columns

+
+
Parameters:
+
    +
  • A (2d array)

  • +
  • B (2d array)

  • +
+
+
Returns:
+

C – Khatri-Rao product of A and B

+
+
Return type:
+

2d array

+
+
+
+ +
+
+ch_util.ni_utils.mat2utvec(A)[source]
+

Vectorizes its upper triangle of the (hermitian) matrix A.

+
+
Parameters:
+

A (2d array) – Hermitian matrix

+
+
Return type:
+

1d array with vectorized form of upper triangle of A

+
+
+

Example

+

if A is a 3x3 matrix then the output vector is +outvector = [A00, A01, A02, A11, A12, A22]

+
+

See also

+

utvec2mat

+
+
+ +
+
+ch_util.ni_utils.ni_als(R, g0, Gamma, Upsilon, maxsteps, abs_tol, rel_tol, weighted_als=True)[source]
+

Implementation of the Alternating Least Squares algorithm for noise +injection.

+

Implements the Alternating Least Squares algorithm to recover the system +gains, sky covariance matrix and system output noise covariance matrix +from the data covariance matrix R. All the variables and definitions are as +in http://bao.phas.ubc.ca/doc/library/doc_0103/rev_01/chime_calibration.pdf

+
+
Parameters:
+
    +
  • R (2d array) – Data covariance matrix

  • +
  • g0 (1d array) – First estimate of system gains

  • +
  • Gamma (2d array) – Matrix that characterizes parametrization of sky covariance matrix

  • +
  • Upsilon (2d array) – Matrix characterizing parametrization of system noise covariance matrix

  • +
  • maxsteps (int) – Maximum number of iterations

  • +
  • abs_tol (float) – Absolute tolerance on error function

  • +
  • rel_tol (float) – Relative tolerance on error function

  • +
  • weighted_als (bool) – If True, perform weighted ALS

  • +
+
+
Returns:
+

    +
  • g (1d array) – System gains

  • +
  • C (2d array) – Sky covariance matrix

  • +
  • N (2d array) – System output noise covariance matrix

  • +
  • err (1d array) – Error function for every step

  • +
+

+
+
+
+

See also

+
+
http

//bao.phas.ubc.ca/doc/library/doc_0103/rev_01/chime_calibration.pdf

+
+
+
+
+ +
+
+class ch_util.ni_utils.ni_data(Reader_read_obj, Nadc_channels, adc_ch_ref=None, fbin_ref=None)[source]
+

Bases: object

+

Provides analysis utilities for CHIME noise injection data.

+

This is just a wrapper for all the utilities created in this module.

+
+
Parameters:
+
    +
  • Reader_read_obj (andata.Reader.read() like object) – Contains noise injection data. Must have ‘vis’ and ‘timestamp’ property. +Assumed to contain all the Nadc_channels*(Nadc_channels+1)/2 correlation +products, in chime’s canonical vector, for an +Nadc_channels x Nadc_channels correlation matrix

  • +
  • Nadc_channels (int) – Number of channels read in Reader_read_obj

  • +
  • adc_ch_ref (int in the range 0 <= adc_ch_ref <= Nadc_channels-1) – Reference channel (used to find on/off points).

  • +
  • fbin_ref (int in the range) – 0 <= fbin_ref <= np.size(Reader_read_obj.vis, 0)-1 +Reference frequency bin (used to find on/off points).

  • +
+
+
+
+
+subtract_sky_noise : Removes sky and system noise contributions from noise
+

injection visibility data.

+
+ +
+
+get_ni_gains : Solve for gains from decimated sky-and-noise-subtracted
+

visibilities

+
+ +
+
+get_als_gains : Compute gains, sky and system noise covariance matrices from
+

a combination of noise injection gains and point source gains

+
+ +

Processes raw noise injection data so it is ready to compute gains.

+
+
+get_als_gains()[source]
+

Compute gains, sky and system noise covariance matrices from a +combination of noise injection gains and point source gains

+
+ +
+
+get_ni_gains(normalize_vis=False, masked_channels=None)[source]
+

Computes gains and evalues from noise injection visibility data.

+
+

See also

+

ni_gains_evalues_tf, Additional, ---------------------

+
+
masked_channels

list of integers channels which are not considered in the calculation of the gains.

+
+
+
+
+ +
+
+save()[source]
+

Save gain solutions

+
+ +
+
+subtract_sky_noise()[source]
+

Removes sky and system noise contributions from noise injection +visibility data.

+
+

See also

+

subtract_sky_noise

+
+
+ +
+ +
+
+ch_util.ni_utils.ni_gains_evalues(C, normalize_vis=False)[source]
+

Basic algorithm to compute gains and evalues from noise injection data.

+

C is a correlation matrix from which the gains are calculated. +If normalize_vis = True, the visibility matrix is weighted by the diagonal +matrix that turns it into a crosscorrelation coefficient matrix before the +gain calculation. The eigenvalues are not sorted. The returned gain solution +vector is normalized (LA.norm(g) = 1.)

+
+
Parameters:
+
    +
  • C (2d array) – Data covariance matrix from which the gains are calculated. It is +assumed that both the sky and system noise contributions have already +been subtracted using noise injection

  • +
  • normalize_vis (bool) – If True, the visibility matrix is weighted by the diagonal matrix that +turns it into a crosscorrelation coefficient matrix before the +gain calculation.

  • +
+
+
Returns:
+

    +
  • g (1d array) – Noise injection gains

  • +
  • ev (1d array) – Noise injection eigenvalues

  • +
+

+
+
+ +
+ +
+
+ch_util.ni_utils.ni_gains_evalues_tf(vis_gated, Nchannels, normalize_vis=False, vis_on=None, vis_off=None, niter=0)[source]
+

Computes gains and evalues from noise injection visibility data.

+

Gains and eigenvalues are calculated for all frames and +frequencies in vis_gated. The returned gain solution +vector is normalized (LA.norm(gains[f, :, t]) = 1.)

+
+
Parameters:
+
    +
  • vis_gated (3d array) – Visibility array in chime’s canonical format. vis_gated has dimensions +[frequency, corr. number, time]. It is assumed that both the sky and +system noise contributions have already been subtracted using noise +injection.

  • +
  • Nchannels (int) – Order of the visibility matrix (number of channels)

  • +
  • normalize_vis (bool) – If True, then the visibility matrix is weighted by the diagonal matrix that +turns it into a crosscorrelation coefficient matrix before the +gain calculation.

  • +
  • vis_on (3d array) – If input and normalize_vis is True, then vis_gated is weighted +by the diagonal elements of the matrix vis_on. +vis_on must be the same shape as vis_gated.

  • +
  • vis_off (3d array) – If input and normalize_vis is True, then vis_gated is weighted +by the diagonal elements of the matrix: vis_on = vis_gated + vis_off. +vis_off must be the same shape as vis_gated. Keyword vis_on +supersedes keyword vis_off.

  • +
  • niter (0) – Number of iterations to perform. At each iteration, the diagonal +elements of vis_gated are replaced with their rank 1 approximation. +If niter == 0 (default), then no iterations are peformed and the +autocorrelations are used instead.

  • +
+
+
Returns:
+

    +
  • gains (3d array) – Noise injection gains

  • +
  • evals (3d array) – Noise injection eigenvalues

  • +
  • Dependencies

  • +
  • ————

  • +
  • tools.normalise_correlations, tools.eigh_no_diagonal

  • +
+

+
+
+ +
+ +
+
+ch_util.ni_utils.process_gated_data(data, only_off=False)[source]
+

Processes fast gating data and turns it into gated form.

+
+
Parameters:
+
    +
  • data (andata.CorrData) – Correlator data with noise source switched synchronously with the +integration.

  • +
  • only_off (boolean) – Only return the off dataset. Do not return gated datasets.

  • +
+
+
Returns:
+

    +
  • newdata (andata.CorrData) – Correlator data folded on the noise source.

  • +
  • Comments

  • +
  • ——–

  • +
  • For now the correlator only supports fast gating with one gate

  • +
  • (gated_vis1) and 50% duty cycle. The vis dataset contains on+off

  • +
  • and the gated_vis1 contains on-off. This function returns a new

  • +
  • andata object with vis containing the off data only and gated_vis1

  • +
  • as in the original andata object. The attribute

  • +
  • ’gpu.gpu_intergration_period’ is divided by 2 since during an

  • +
  • integration half of the frames have on data.

  • +
+

+
+
+
+ +
+
+ch_util.ni_utils.process_synced_data(data, ni_params=None, only_off=False)[source]
+

Turn a synced noise source observation into gated form.

+

This will decimate the visibility to only the noise source off bins, and +will add 1 or more gated on-off dataset according to the specification in +doclib:5.

+
+
Parameters:
+
    +
  • data (andata.CorrData) – Correlator data with noise source switched synchronously with the +integration.

  • +
  • ni_params (dict) – Dictionary with the noise injection parameters. Optional +for data after ctime=1435349183. ni_params has the following keys +- ni_period: Noise injection period in GPU integrations. +It is assummed to be the same for all the enabled noise sources +- ni_on_bins: A list of lists, one per enabled noise source, +with the corresponding ON gates (within a period). For each +noise source, the list contains the indices of the time frames +for which the source is ON. +Example: For 3 GPU integration period (3 gates: 0, 1, 2), two enabled +noise sources, one ON during gate 0, the other ON during gate 1, +and both OFF during gate 2, then +` +ni_params = {'ni_period':3, 'ni_on_bins':[[0], [1]]} +`

  • +
  • only_off (boolean) – Only return the off dataset. Do not return gated datasets.

  • +
+
+
Returns:
+

    +
  • newdata (andata.CorrData) – Correlator data folded on the noise source.

  • +
  • Comments

  • +
  • ——–

  • +
  • - The function assumes that the fpga frame counter, which is used to

  • +
  • determine the noise injection gating parameters, is unwrapped.

  • +
  • - For noise injection data before ctime=1435349183 (i.e. for noise

  • +
  • injection data before 20150626T200540Z_pathfinder_corr) the noise

  • +
  • injection information is not in the headers so this function cannot be

  • +
  • used to determine the noise injection parameters. A different method is

  • +
  • required. Although it is recommended to check the data directly in this

  • +
  • case, the previous version of this function assumed that

  • +
  • ni_params = {‘ni_period’ (2, ‘ni_on_bins’:[[0],]})

  • +
  • for noise injection data before ctime=1435349183. Although this is not

  • +
  • always true, it is true for big old datasets like pass1g.

  • +
  • Use the value of ni_params recommended above to reproduce the

  • +
  • results of the old function with the main old datasets.

  • +
  • - Data (visibility, gain and weight datasets) are averaged for all the

  • +
  • off gates within the noise source period, and also for all the on

  • +
  • gates of each noise source.

  • +
  • - For the time index map, only one timestamp per noise period is kept

  • +
  • (no averaging)

  • +
+

+
+
+
+ +
+
+ch_util.ni_utils.sort_evalues_mag(evalues)[source]
+

Sorts eigenvalue array by magnitude for all frequencies and time frames

+
+
Parameters:
+

evalues (3d array) – Array of evalues. Its shape is [Nfreqs, Nevalues, Ntimeframes]

+
+
Returns:
+

ev – Array of same shape as evalues

+
+
Return type:
+

3d array

+
+
+
+ +
+
+ch_util.ni_utils.subtract_sky_noise(vis, Nchannels, timestamp, adc_ch_ref, fbin_ref)[source]
+

Removes sky and system noise contributions from noise injection visibility +data.

+

By looking at the autocorrelation of the reference channel adc_ch_ref +for frequency bin fbin_ref, finds timestamps indices for which the signal is +on and off. For every noise signal period, the subcycles with the noise +signal on and off are averaged separatedly and then subtracted.

+

It is assumed that there are at least 5 noise signal cycles in the data. +The first and last noise on subcycles are discarded since those cycles may +be truncated.

+
+
Parameters:
+
    +
  • vis (3d array) – Noise injection visibility array in chime’s canonical format. vis has +dimensions [frequency, corr. number, time].

  • +
  • Nchannels (int) – Order of the visibility matrix (number of channels)

  • +
  • timestamp (1d array) – Timestamps for the visibility array vis

  • +
  • adc_ch_ref (int in the range 0 <= adc_ch_ref <= N_channels-1) – Reference channel (typically, but not necessaritly the channel +corresponding to the directly injected noise signal) used to find +timestamps indices for which the signal is on and off. +on and off.

  • +
  • fbin_ref (int in the range 0 <= fbin_ref <= np.size(vis, 0)-1) – frequency bin used to find timestamps indices for which the signal is +on and off

  • +
+
+
Returns:
+

    +
  • A dictionary with keys

  • +
  • time_index_on (1d array) – timestamp indices for noise signal on.

  • +
  • time_index_off (1d array) – timestamp indices for noise signal off.

  • +
  • timestamp_on_dec (1d array) – timestamps for noise signal on after averaging.

  • +
  • timestamp_off_dec (1d array) – timestamps for noise signal off after averaging.

  • +
  • timestamp_dec (1d array) – timestamps for visibility data after averaging and subtracting on and +off subcycles. These timestaps represent the time for every noise cycle +and thus, these are the timestaps for the gain solutions.

  • +
  • vis_on_dec (3d array) – visibilities for noise signal on after averaging.

  • +
  • vis_off_dec (3d array) – visibilities for noise signal off after averaging.

  • +
  • vis_dec_sub (3d array) – visibilities data after averaging and subtracting on and +off subcycles.

  • +
  • cor_prod_ref (int) – correlation index corresponding to the autocorrelation of the reference +channel

  • +
+

+
+
+
+ +
+
+ch_util.ni_utils.utvec2mat(n, utvec)[source]
+

Recovers a hermitian matrix a from its upper triangle vectorized version.

+
+
Parameters:
+
    +
  • n (int) – order of the output hermitian matrix

  • +
  • utvec (1d array) – vectorized form of upper triangle of output matrix

  • +
+
+
Returns:
+

A – hermitian matrix

+
+
Return type:
+

2d array

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.plot.html b/docs/_autosummary/ch_util.plot.html new file mode 100644 index 00000000..354d1898 --- /dev/null +++ b/docs/_autosummary/ch_util.plot.html @@ -0,0 +1,220 @@ + + + + + + + ch_util.plot — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.plot

+

Plotting routines for CHIME data

+

Functions

+ + + + + + + + + + + + +

spectra(data[, freq_sel, prod_sel, ...])

Plots spectra at different times and for different correlation products.

time_ordered(data[, freq_sel, prod_sel, ...])

Plots data vs time for different frequencies and corr-pords.

waterfall(data[, freq_sel, prod_sel, ...])

Two dimensional plot of a visibility vs time and frequency.

+
+
+ch_util.plot.spectra(data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, **kwargs)[source]
+

Plots spectra at different times and for different correlation products.

+
+ +
+
+ch_util.plot.time_ordered(data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, **kwargs)[source]
+

Plots data vs time for different frequencies and corr-pords.

+
+ +
+
+ch_util.plot.waterfall(data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, **kwargs)[source]
+

Two dimensional plot of a visibility vs time and frequency.

+
+
Parameters:
+
    +
  • data (numpy array or AnData object) – Data to plot. If a numpy array, must be 2D or 3D.

  • +
  • freq_sel (valid numpy index) – Selects data to include along the frequency axis.

  • +
  • prod_sel (valid numpy index) – Selects data to include along the correlation product axis. If data +is a 2D array, this argument is ignored.

  • +
  • time_sel (valid numpy index) – Selects data to include along the time axis.

  • +
  • part_sel (string, one of: 'real', 'imag', 'mag', 'phase' or 'complex') – Selects what part of data to plot. If ‘None’, plot real part.

  • +
+
+
+

Examples

+
>>> data = np.ones((100, 100))
+>>> waterfall(data)
+
+
+
>>> data = andata.AnData.from_acq("...")
+>>> waterfall(data, prod_sel=5, out_file='filename.png')
+
+
+

To make a plot normalized by a baseline of the median-filtered +power spectrum averaged over 200 time bins starting at bin 0 with +a median filter window of 40 bins: +>>> data = andata.AnData.from_acq(”…”) +>>> med_filt_arg = [‘new’,200,0,40] +>>> waterfall(data, prod_sel=21, med_filt=med_filt_arg)

+

You can also make it save the calculated baseline to a file, +by providing the filename: +>>> data = andata.AnData.from_acq(”…”) +>>> med_filt_arg = [‘new’,200,0,40,’base_filename.dat’] +>>> waterfall(data, prod_sel=21, med_filt=med_filt_arg)

+

…or to use a previously obtained baseline to normalize data: +(where bsln is either a numpy array or a list with length equal +to the frequency axis of the data) +>>> data = andata.AnData.from_acq(”…”) +>>> med_filt_arg = [‘old’,bsln] +>>> waterfall(data, prod_sel=21, med_filt=med_filt_arg)

+

To make a full day plot of 01/14/2014, +rebinned to 4000 time bins: +>>> data = andata.AnData.from_acq(”…”) +>>> full_day_arg = [[2014,01,14],4000,’time’] +>>> waterfall(data, prod_sel=21, full_day=full_day_arg)

+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.rfi.html b/docs/_autosummary/ch_util.rfi.html new file mode 100644 index 00000000..014e2332 --- /dev/null +++ b/docs/_autosummary/ch_util.rfi.html @@ -0,0 +1,585 @@ + + + + + + + ch_util.rfi — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.rfi

+

Tools for RFI flagging

+

This module contains tools for finding and removing Radio Frequency Interference +(RFI).

+

Note that this generates masks where the elements containing RFI are marked as +True, and the remaining elements are marked False. This is in +contrast to the routines in ch_pipeline.rfi which generates a inverse +noise weighting, where RFI containing elements are effectively False, and +the remainder are True.

+

There are general purpose routines for flagging RFI in andata like datasets:

+ +

For more control there are specific routines that can be called:

+ +

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

flag_dataset(data[, freq_width, time_width, ...])

RFI flag the dataset.

frequency_mask(freq_centre[, freq_width, ...])

Flag known bad frequencies.

get_autocorrelations(data[, stack, normalize])

Extract autocorrelations from a data stack.

highpass_delay_filter(freq, tau_cut, flag[, ...])

Construct a high-pass delay filter.

iterative_hpf_masking(freq, y[, flag, ...])

Mask features in a spectrum that have significant power at high delays.

mad_cut_1d(data[, twidth, threshold, mask])

Mask out RFI using a median absolute deviation cut in the time direction.

mad_cut_2d(data[, fwidth, twidth, ...])

Mask out RFI using a median absolute deviation cut in time-frequency blocks.

mad_cut_rolling(data[, fwidth, twidth, ...])

Mask out RFI by placing a cut on the absolute deviation.

nanmedian(*args, **kwargs)

number_deviations(data[, freq_width, ...])

Calculate the number of median absolute deviations (MAD) of the autocorrelations from the local median.

sir(basemask[, eta, only_freq, only_time])

Apply the SIR operator over the frequency and time axes for each product.

sir1d(basemask[, eta])

Numpy implementation of the scale-invariant rank (SIR) operator.

spectral_cut(data[, fil_window, only_autos])

Flag out the TV bands, or other constant spectral RFI.

+
+
+ch_util.rfi.flag_dataset(data, freq_width=10.0, time_width=420.0, threshold=5.0, flag1d=False, rolling=False)[source]
+

RFI flag the dataset. This function wraps number_deviations, +and remains largely for backwards compatability. The pipeline code +now calls number_deviations directly.

+
+
Parameters:
+
    +
  • data (andata.CorrData) – Must contain vis and weight attribute that are both +np.ndarray[nfreq, nprod, ntime]. Note that this +function does not work with CorrData that has +been stacked over redundant baselines.

  • +
  • freq_width (float) – Frequency interval in MHz to compare across.

  • +
  • time_width (float) – Time interval in seconds to compare.

  • +
  • threshold (float) – Threshold in MAD over which to cut out RFI.

  • +
  • rolling (bool) – Use a rolling window instead of distinct blocks.

  • +
  • flag1d (bool, optional) – Only apply the MAD cut in the time direction. This is useful if the +frequency coverage is sparse.

  • +
+
+
Returns:
+

mask – RFI mask, output shape is the same as input visibilities.

+
+
Return type:
+

np.ndarray

+
+
+
+ +
+
+ch_util.rfi.frequency_mask(freq_centre: ndarray, freq_width: ndarray | float | None = None, timestamp: ndarray | float | None = None, instrument: str | None = 'chime') ndarray[source]
+

Flag known bad frequencies.

+

Time dependent static RFI flags that affect the recent observations are added.

+
+
Parameters:
+
    +
  • freq_centre – Centre of each frequency channel

  • +
  • freq_width – Width of each frequency channel. If None (default), calculate the width from +the frequency centre separation. If supplied as an array it must be +broadcastable +against freq_centre.

  • +
  • timestamp – UNIX observing time. If None (default) mask all specified bands regardless of +their start/end times, otherwise mask only timestamps within the band start and +end times. If supplied as an array it must be broadcastable against +freq_centre.

  • +
  • instrument – Telescope name. [kko, gbo, hco, chime (default)]

  • +
+
+
Returns:
+

An array marking the bad frequency channels. The final shape is the result of +broadcasting freq_centre and timestamp together.

+
+
Return type:
+

mask

+
+
+
+ +
+
+ch_util.rfi.get_autocorrelations(data, stack: bool = False, normalize: bool = False) Tuple[ndarray, ndarray, ndarray][source]
+

Extract autocorrelations from a data stack.

+
+
Parameters:
+
    +
  • data (andata.CorrData) – Must contain vis and weight attributes that are both +np.ndarray[nfreq, nprod, ntime].

  • +
  • stack (bool, optional) – Average over all autocorrelations.

  • +
  • normalize (bool, optional) – Normalize by the median value over time prior to averaging over +autocorrelations. Only relevant if stack is True.

  • +
+
+
Returns:
+

    +
  • auto_ii (np.ndarray[ninput,]) – Index of the inputs that have been processed. +If stack is True, then [0] will be returned.

  • +
  • auto_vis (np.ndarray[nfreq, ninput, ntime]) – The autocorrelations that were used to calculate +the number of deviations.

  • +
  • auto_flag (np.ndarray[nfreq, ninput, ntime]) – Indices where data weights are positive

  • +
+

+
+
+
+ +
+
+ch_util.rfi.highpass_delay_filter(freq, tau_cut, flag, epsilon=1e-10)[source]
+

Construct a high-pass delay filter.

+

The stop band will range from [-tau_cut, tau_cut]. +DAYENU is used to construct the filter in the presence +of masked frequencies. See Ewall-Wice et al. 2021 +(arXiv:2004.11397) for a description.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq,]) – Frequency in MHz.

  • +
  • tau_cut (float) – The half width of the stop band in micro-seconds.

  • +
  • flag (np.ndarray[nfreq,]) – Boolean flag that indicates what frequencies are valid.

  • +
  • epsilon (float) – The stop-band rejection of the filter.

  • +
+
+
Returns:
+

pinv – High pass delay filter.

+
+
Return type:
+

np.ndarray[nfreq, nfreq]

+
+
+
+ +
+
+ch_util.rfi.iterative_hpf_masking(freq, y, flag=None, tau_cut=0.6, epsilon=1e-10, window=65, threshold=6.0, nperiter=1, niter=40, timestamp=None)[source]
+

Mask features in a spectrum that have significant power at high delays.

+

Uses the following iterative procedure to generate the mask:

+
+
    +
  • Apply a high-pass filter to the spectrum.

  • +
  • For each frequency channel, calculate the median absolute +deviation of nearby frequency channels to get an estimate +of the noise. Divide the high-pass filtered spectrum by +the noise estimate.

  • +
  • Mask excursions with the largest signal to noise.

  • +
  • Regenerate the high-pass filter using the new mask.

  • +
  • Repeat.

  • +
+
+

The procedure stops when the maximum number of iterations is reached +or there are no excursions beyond some threshold.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq,]) – Frequency in MHz.

  • +
  • y (np.ndarray[nfreq,]) – Spectrum to search for narrowband features.

  • +
  • flag (np.ndarray[nfreq,]) – Boolean flag where True indicates valid data.

  • +
  • tau_cut (float) – Cutoff of the high-pass filter in microseconds.

  • +
  • epsilon (float) – Stop-band rejection of the filter.

  • +
  • threshold (float) – Number of median absolute deviations beyond which +a frequency channel is considered an outlier.

  • +
  • window (int) – Width of the window used to estimate the noise +(by calculating a local median absolute deviation).

  • +
  • nperiter (int) – Maximum number of frequency channels to flag +on any iteration.

  • +
  • niter (int) – Maximum number of iterations.

  • +
  • timestamp (float) – Start observing time (in unix time)

  • +
+
+
Returns:
+

    +
  • yhpf (np.ndarray[nfreq,]) – The high-pass filtered spectrum generated using +the mask from the last iteration.

  • +
  • flag (np.ndarray[nfreq,]) – Boolean flag where True indicates valid data. +This is the logical complement to the mask +from the last iteration.

  • +
  • rsigma (np.ndarray[nfreq,]) – The local median absolute deviation from the last +iteration.

  • +
+

+
+
+
+ +
+
+ch_util.rfi.mad_cut_1d(data, twidth=42, threshold=5.0, mask=True)[source]
+

Mask out RFI using a median absolute deviation cut in the time direction.

+

This is useful for datasets with sparse frequency coverage. Functionally +this routine is equivalent to mad_cut_2d() with fwidth = 1, but will +be much faster.

+
+
Parameters:
+
    +
  • data (np.ndarray[freq, time]) – Array of data to mask.

  • +
  • twidth (integer, optional) – Number of time samples to average median over.

  • +
  • threshold (scalar, optional) – Number of median deviations above which we cut the data.

  • +
  • mask (boolean, optional) – If True return the mask, if False return the number of +median absolute deviations.

  • +
+
+
Returns:
+

mask – Mask or number of median absolute deviations for each sample.

+
+
Return type:
+

np.ndarray[freq, time]

+
+
+
+ +
+
+ch_util.rfi.mad_cut_2d(data, fwidth=64, twidth=42, threshold=5.0, freq_flat=True, mask=True)[source]
+

Mask out RFI using a median absolute deviation cut in time-frequency blocks.

+
+
Parameters:
+
    +
  • data (np.ndarray[freq, time]) – Array of data to mask.

  • +
  • fwidth (integer, optional) – Number of frequency samples to average median over.

  • +
  • twidth (integer, optional) – Number of time samples to average median over.

  • +
  • threshold (scalar, optional) – Number of median deviations above which we cut the data.

  • +
  • freq_flat (boolean, optional) – Flatten in the frequency direction by dividing through by the median.

  • +
  • mask (boolean, optional) – If True return the mask, if False return the number of +median absolute deviations.

  • +
+
+
Returns:
+

mask – Mask or number of median absolute deviations for each sample.

+
+
Return type:
+

np.ndarray[freq, time]

+
+
+
+ +
+
+ch_util.rfi.mad_cut_rolling(data, fwidth=64, twidth=42, threshold=5.0, freq_flat=True, mask=True, limit_range: slice = slice(None, None, None))[source]
+

Mask out RFI by placing a cut on the absolute deviation. +Compared to mad_cut_2d, this function calculates +the median and median absolute deviation using a rolling +2D median filter, i.e., for every (freq, time) sample a +separate estimates of these statistics is obtained for a +window that is centered on that sample.

+

For sparsely sampled frequency axis, set fwidth = 1.

+
+
Parameters:
+
    +
  • data (np.ndarray[freq, time]) – Array of data to mask.

  • +
  • fwidth (integer, optional) – Number of frequency samples to calculate median over.

  • +
  • twidth (integer, optional) – Number of time samples to calculate median over.

  • +
  • threshold (scalar, optional) – Number of median absolute deviations above which we cut the data.

  • +
  • freq_flat (boolean, optional) – Flatten in the frequency direction by dividing each frequency +by the median over time.

  • +
  • mask (boolean, optional) – If True return the mask, if False return the number of +median absolute deviations.

  • +
  • limit_range (slice, optional) – Data is limited to this range in the freqeuncy axis. Defaults to slice(None).

  • +
+
+
Returns:
+

mask – Mask or number of median absolute deviations for each sample.

+
+
Return type:
+

np.ndarray[freq, time]

+
+
+
+ +
+
+ch_util.rfi.number_deviations(data, freq_width=10.0, time_width=420.0, flag1d=False, apply_static_mask=False, rolling=False, stack=False, normalize=False, fill_value=None)[source]
+

Calculate the number of median absolute deviations (MAD) +of the autocorrelations from the local median.

+
+
Parameters:
+
    +
  • data (andata.CorrData) – Must contain vis and weight attributes that are both +np.ndarray[nfreq, nprod, ntime].

  • +
  • freq_width (float) – Frequency interval in MHz to compare across.

  • +
  • time_width (float) – Time interval in seconds to compare across.

  • +
  • flag1d (bool) – Only apply the MAD cut in the time direction. This is useful if the +frequency coverage is sparse.

  • +
  • apply_static_mask (bool) – Apply static mask obtained from frequency_mask before computing +the median absolute deviation.

  • +
  • rolling (bool) – Use a rolling window instead of distinct blocks.

  • +
  • stack (bool) – Average over all autocorrelations.

  • +
  • normalize (bool) – Normalize by the median value over time prior to averaging over +autocorrelations. Only relevant if stack is True.

  • +
  • fill_value (float) – Data that was already flagged as bad will be set to this value in +the output array. Should be a large positive value that is greater +than the threshold that will be placed. Default is float(‘Inf’).

  • +
+
+
Returns:
+

    +
  • auto_ii (np.ndarray[ninput,]) – Index of the inputs that have been processed. +If stack is True, then [0] will be returned.

  • +
  • auto_vis (np.ndarray[nfreq, ninput, ntime]) – The autocorrelations that were used to calculate +the number of deviations.

  • +
  • ndev (np.ndarray[nfreq, ninput, ntime]) – Number of median absolute deviations of the autocorrelations +from the local median.

  • +
+

+
+
+
+ +
+
+ch_util.rfi.sir(basemask, eta=0.2, only_freq=False, only_time=False)[source]
+

Apply the SIR operator over the frequency and time axes for each product.

+

This is a wrapper for sir1d. It loops over times, applying sir1d +across the frequency axis. It then loops over frequencies, applying sir1d +across the time axis. It returns the logical OR of these two masks.

+
+
Parameters:
+
    +
  • basemask (np.ndarray[nfreq, nprod, ntime] of boolean type) – The previously generated threshold mask. +1 (True) for masked points, 0 (False) otherwise.

  • +
  • eta (float) – Aggressiveness of the method: with eta=0, no additional samples are +flagged and the function returns basemask. With eta=1, all samples +will be flagged.

  • +
  • only_freq (bool) – Only apply the SIR operator across the frequency axis.

  • +
  • only_time (bool) – Only apply the SIR operator across the time axis.

  • +
+
+
Returns:
+

mask – The mask after the application of the SIR operator.

+
+
Return type:
+

np.ndarray[nfreq, nprod, ntime] of boolean type

+
+
+
+ +
+
+ch_util.rfi.sir1d(basemask, eta=0.2)[source]
+

Numpy implementation of the scale-invariant rank (SIR) operator.

+

For more information, see arXiv:1201.3364v2.

+
+
Parameters:
+
    +
  • basemask (numpy 1D array of boolean type) – Array with the threshold mask previously generated. +1 (True) for flagged points, 0 (False) otherwise.

  • +
  • eta (float) – Aggressiveness of the method: with eta=0, no additional samples are +flagged and the function returns basemask. With eta=1, all samples +will be flagged. The authors in arXiv:1201.3364v2 seem to be convinced +that 0.2 is a mostly universally optimal value, but no optimization +has been done on CHIME data.

  • +
+
+
Returns:
+

mask – The mask after the application of the (SIR) operator. Same shape and +type as basemask.

+
+
Return type:
+

numpy 1D array of boolean type

+
+
+
+ +
+
+ch_util.rfi.spectral_cut(data, fil_window=15, only_autos=False)[source]
+

Flag out the TV bands, or other constant spectral RFI.

+
+
Parameters:
+
    +
  • data (andata.obj) – If only_autos shape is (freq, n_feeds, time), else (freq, n_prod, +time).

  • +
  • fil_window (integer) – Window of median filter for baseline of chime spectrum. Default is 15.

  • +
  • only_autos (boolean) – Whether data contains only autos or not.

  • +
+
+
Returns:
+

mask – RFI mask (no product axis).

+
+
Return type:
+

np.ndarray[freq,time]

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.timing.html b/docs/_autosummary/ch_util.timing.html new file mode 100644 index 00000000..9fc62d67 --- /dev/null +++ b/docs/_autosummary/ch_util.timing.html @@ -0,0 +1,1080 @@ + + + + + + + ch_util.timing — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.timing

+

Tools for timing jitter and delay corrections.

+

This module contains tools for using noise sources to correct +timing jitter and timing delay.

+

Example

+

The function construct_delay_template() generates a delay template from +measurements of the visibility between noise source inputs, which can +be used to remove the timing jitter in other data.

+

The user seldom needs to work with construct_delay_template() +directly and can instead use several high-level functions and containers +that load the timing data, derive the timing correction using +construct_delay_template(), and then enable easy application of +the timing correction to other data.

+

For example, to load the timing data and derive the timing correction from +a list of timing acquisition files (i.e., YYYYMMSSTHHMMSSZ_chimetiming_corr), +use the following:

+
+

`tdata = TimingData.from_acq_h5(timing_acq_filenames)`

+
+

This results in a andata.CorrData object that has additional +methods avaiable for applying the timing correction to other data. +For example, to obtain the complex gain for some freq, input, and time +that upon multiplication will remove the timing jitter, use the following:

+
+

`tgain, tweight = tdata.get_gain(freq, input, time)`

+
+

To apply the timing correction to the visibilities in an andata.CorrData +object called data, use the following:

+
+

`tdata.apply_timing_correction(data)`

+
+

The timing acquisitions must cover the span of time that you wish to correct. +If you have a list of data acquisition files and would like to obtain +the appropriate timing correction by searching the archive for the +corresponding timing acquisitons files, then use:

+
+

`tdata = load_timing_correction(data_acq_filenames_full_path)`

+
+

To print a summary of the timing correction, use:

+
+

`print(tdata)`

+
+

Functions

+ + + + + + + + + + + + + + + + + + + + + +

construct_delay_template(data[, ...])

Construct a relative time delay template.

eigen_decomposition(vis, flag)

Eigenvalue decomposition of the visibility matrix.

fit_poly_to_phase(freq, resp, resp_error[, ...])

Fit complex data versus frequency to a model consisting of a polynomial in phase.

load_timing_correction(files[, start, stop, ...])

Find and load the appropriate timing correction for a list of corr acquisition files.

map_input_to_noise_source(inputs, noise_sources)

Find the appropriate noise source to use to correct the phase of each input.

model_poly_phase(freq, *param)

Evaluate a polynomial model for the phase.

+

Classes

+ + + + + + + + + + + + +

TimingCorrection([h5_data])

Container that holds a timing correction.

TimingData([h5_data])

Subclass of andata.CorrData for timing data.

TimingInterpolator(x, y[, weight, flag, ...])

Interpolation that is aware of flagged data and weights.

+
+
+class ch_util.timing.TimingCorrection(h5_data=None, **kwargs)[source]
+

Bases: BaseData

+

Container that holds a timing correction.

+

Provides methods for applying that correction to other datasets.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+property alpha
+

Provide convenience access to the alpha array.

+
+ +
+
+property amp_to_delay
+

Return conversion from noise source amplitude variations to delay variations.

+
+ +
+
+apply_timing_correction(timestream, copy=False, **kwargs)[source]
+

Apply the timing correction to another visibility dataset.

+

This method uses the get_gain or get_stacked_tau method, depending +on whether or not the visibilities have been stacked. It acccepts +and passes along keyword arguments for those method.

+
+
Parameters:
+
    +
  • timestream (andata.CorrData / equivalent or np.ndarray[nfreq, nprod, ntime]) – If timestream is an np.ndarray containing the visiblities, then you +must also pass the corresponding freq, prod, input, and time axis as kwargs. +Otherwise these quantities are obtained from the attributes of CorrData. +If the visibilities have been stacked, then you must additionally pass the +stack and reverse_stack axis as kwargs, and (optionally) the input flags.

  • +
  • copy (bool) – Create a copy of the input visibilities. Apply the timing correction to +the copy and return it, leaving the original untouched. Default is False.

  • +
  • freq (np.ndarray[nfreq, ]) – Frequency in MHz. +Must be passed as keyword argument if timestream is an np.ndarray.

  • +
  • prod (np.ndarray[nprod, ]) – Product map. +Must be passed as keyword argument if timestream is an np.ndarray.

  • +
  • time (np.ndarray[ntime, ]) – Unix time. +Must be passed as keyword argument if timestream is an np.ndarray.

  • +
  • input (np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input')) – Input axis. +Must be passed as keyword argument if timestream is an np.ndarray.

  • +
  • stack (np.ndarray[nstack, ]) – Stack axis. +Must be passed as keyword argument if timestream is an np.ndarray +and the visibilities have been stacked.

  • +
  • reverse_stack (np.ndarray[nprod, ] of dtype=('stack', 'conjugate')) – The index of the stack axis that each product went into. +Typically found in reverse_map[‘stack’] attribute. +Must be passed as keyword argument if timestream is an np.ndarray +and the visibilities have been stacked.

  • +
  • input_flags (np.ndarray [ninput, ntime]) – Array indicating which inputs were good at each time. Non-zero value +indicates that an input was good. Optional. Only used for stacked visibilities.

  • +
+
+
Returns:
+

    +
  • If copy == True

    +
    +
    visnp.ndarray[nfreq, nprod(nstack), ntime]

    New set of visibilities with timing correction applied.

    +
    +
    +
  • +
  • else

    +
    +
    None

    Correction is applied to the input visibility data. Also, +if timestream is an andata.CorrData instance and the gain dataset exists, +then it will be updated with the complex gains that have been applied.

    +
    +
    +
  • +
+

+
+
+
+ +
+
+property coeff_alpha
+

Provide convenience access to the coeff_alpha array.

+
+ +
+
+property coeff_tau
+

Provide convenience access to the coeff_tau array.

+
+ +
+
+delete_coeff()[source]
+

Stop using coefficients to construct timing correction.

+

Calling this method will delete the coeff_tau, coeff_alpha, +and reference_noise_source datasets if they exist.

+
+ +
+
+property freq
+

Provide convenience access to the frequency bin centres.

+
+ +
+
+classmethod from_dict(**kwargs)[source]
+

Instantiate a TimingCorrection object.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq, ] of dtype=('centre', 'width')) – Frequencies in MHz that were used to construct the timing correction.

  • +
  • noise_source (np.ndarray[nsource,] of dtype=('chan_id', 'correlator_input')) – Correlator inputs that were used to construct the timing correction.

  • +
  • input (np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input')) – Correlator inputs to which the timing correction will be applied.

  • +
  • time (np.ndarray[ntime, ]) – Unix time.

  • +
  • param (np.ndarray[nparam, ]) – Parameters of the model fit to the static phase versus frequency.

  • +
  • tau (np.ndarray[nsource, ntime]) – The actual timing correction, which is the relative delay of each of the +noise source inputs with respect to a reference input versus time.

  • +
  • weight_tau (np.ndarray[nsource, ntime]) – Estimate of the uncertainty (inverse variance) on the timing correction.

  • +
  • static_phi (np.ndarray[nfreq, nsource]) – The phase that was subtracted from each frequency and input prior to +fitting for the timing correction. This is necessary to remove the +approximately static ripple pattern caused by reflections.

  • +
  • weight_static_phi (np.ndarray[nfreq, nsource]) – Inverse variance on static_phi.

  • +
  • static_phi_fit (np.ndarray[nparam, nsource]) – Best-fit parameters of a fit to the static phase versus frequency +for each of the noise source inputs.

  • +
  • alpha (np.ndarray[nsource, ntime]) – The coefficient of the spectral model of the amplitude variations of +each of the noise source inputs versus time.

  • +
  • weight_alpha (np.ndarray[nsource, ntime]) – Estimate of the uncertainty (inverse variance) on the amplitude coefficients.

  • +
  • static_amp (np.ndarray[nfreq, nsource]) – The amplitude that was subtracted from each frequency and input prior to +fitting for the amplitude variations. This is necessary to remove the +approximately static ripple pattern caused by reflections.

  • +
  • weight_static_amp (np.ndarray[nfreq, nsource]) – Inverse variance on static_amp.

  • +
  • num_freq (np.ndarray[nsource, ntime]) – The number of frequencies used to determine the delay and alpha quantities. +If num_freq is 0, then that time is ignored when deriving the timing correction.

  • +
  • coeff_tau (np.ndarray[ninput, nsource]) – If coeff is provided, then the timing correction applied to a particular +input will be the linear combination of the tau correction from the +noise source inputs, with the coefficients set by this array.

  • +
  • coeff_alpha (np.ndarray[ninput, nsource]) – If coeff is provided, then the timing correction applied to a particular +input will be adjusted by the linear combination of the alpha correction +from the noise source inputs, with the coefficients set by this array.

  • +
  • reference_noise_source (np.ndarray[ninput]) – The noise source input that was used as reference when fitting coeff_tau.

  • +
+
+
+
+ +
+
+get_alpha(timestamp, interp='linear', extrap_limit=None)[source]
+

Return the amplitude variation for each noise source at the requested times.

+

Uses the TimingInterpolator to interpolate to the requested times.

+
+
Parameters:
+
    +
  • timestamp (np.ndarray[ntime,]) – Unix timestamp.

  • +
  • interp (string) – Method to interpolate over time. Options include ‘linear’, ‘nearest’, +‘zero’, ‘slinear’, ‘quadratic’, ‘cubic’, ‘previous’, and ‘next’.

  • +
  • extrap_limit (float) – Do not extrapolate the underlying data beyond its boundaries by this +amount in seconds. Default is 2 integrations.

  • +
+
+
Returns:
+

    +
  • alpha (np.ndarray[nsource, ntime]) – Amplitude coefficient as a function of time for each of the noise sources.

  • +
  • weight (np.ndarray[nsource, ntime]) – The uncertainty on the amplitude coefficient, expressed as an inverse variance.

  • +
+

+
+
+
+ +
+
+get_gain(freq, inputs, timestamp, **kwargs)[source]
+

Return the complex gain for the requested frequencies, inputs, and times.

+

Multiplying the visibilities by the outer product of these gains will remove +the fluctuations in phase due to timing jitter. This method uses the +get_tau method. It acccepts and passes along keyword arguments for that method.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq, ]) – Frequency in MHz.

  • +
  • inputs (np.ndarray[ninput, ]) – Must contain ‘correlator_input’ field.

  • +
  • timestamp (np.ndarray[ntime, ]) – Unix timestamps.

  • +
+
+
Returns:
+

    +
  • gain (np.ndarray[nfreq, ninput, ntime]) – Complex gain. Multiplying the visibilities by the +outer product of this vector at a given time and +frequency will correct for the timing jitter.

  • +
  • weight (np.ndarray[nfreq, ninput, ntime]) – Uncerainty on the gain expressed as an inverse variance.

  • +
+

+
+
+
+ +
+
+get_stacked_alpha(timestamp, inputs, prod, reverse_stack, input_flags=None, **kwargs)[source]
+

Return the equivalent of get_stacked_tau for the noise source amplitude variations.

+

Averages the alphas from the noise source inputs that map to the set of redundant +baseline included in each stacked visibility. If input_flags is provided, then the +bad inputs that were excluded from the stack are also excluded from the alpha +template averaging. This method can be used to generate a stacked alpha template +that can be used to correct a stacked tau template for variations in the noise source +distribution system. However, it is recommended that the tau template be corrected +before stacking. This is accomplished by setting the amp_to_delay property +prior to calling get_stacked_tau.

+
+
Parameters:
+
    +
  • timestamp (np.ndarray[ntime,]) – Unix timestamp.

  • +
  • inputs (np.ndarray[ninput,]) – Must contain ‘correlator_input’ field.

  • +
  • prod (np.ndarray[nprod,]) – The products that were included in the stack. +Typically found in the index_map[‘prod’] attribute of the +andata.CorrData object.

  • +
  • reverse_stack (np.ndarray[nprod,] of dtype=('stack', 'conjugate')) – The index of the stack axis that each product went into. +Typically found in reverse_map[‘stack’] attribute +of the andata.CorrData.

  • +
  • input_flags (np.ndarray [ninput, ntime]) – Array indicating which inputs were good at each time. +Non-zero value indicates that an input was good.

  • +
+
+
Returns:
+

alpha – Noise source amplitude variation as a function of time for each stacked visibility.

+
+
Return type:
+

np.ndarray[nstack, ntime]

+
+
+
+ +
+
+get_stacked_tau(timestamp, inputs, prod, reverse_stack, input_flags=None, **kwargs)[source]
+

Return the appropriate delay for each stacked visibility at the requested time.

+

Averages the delays from the noise source inputs that map to the set of redundant +baseline included in each stacked visibility. This yields the appropriate +common-mode delay correction. If input_flags is provided, then the bad inputs +that were excluded from the stack are also excluded from the delay template averaging.

+
+
Parameters:
+
    +
  • timestamp (np.ndarray[ntime,]) – Unix timestamp.

  • +
  • inputs (np.ndarray[ninput,]) – Must contain ‘correlator_input’ field.

  • +
  • prod (np.ndarray[nprod,]) – The products that were included in the stack. +Typically found in the index_map[‘prod’] attribute of the +andata.CorrData object.

  • +
  • reverse_stack (np.ndarray[nprod,] of dtype=('stack', 'conjugate')) – The index of the stack axis that each product went into. +Typically found in reverse_map[‘stack’] attribute +of the andata.CorrData.

  • +
  • input_flags (np.ndarray [ninput, ntime]) – Array indicating which inputs were good at each time. +Non-zero value indicates that an input was good.

  • +
+
+
Returns:
+

tau – Delay as a function of time for each stacked visibility.

+
+
Return type:
+

np.ndarray[nstack, ntime]

+
+
+
+ +
+
+get_tau(timestamp, ignore_amp=False, interp='linear', extrap_limit=None)[source]
+

Return the delay for each noise source at the requested times.

+

Uses the TimingInterpolator to interpolate to the requested times.

+
+
Parameters:
+
    +
  • timestamp (np.ndarray[ntime,]) – Unix timestamp.

  • +
  • ignore_amp (bool) – Do not apply a noise source based amplitude correction, even if one exists.

  • +
  • interp (string) – Method to interpolate over time. Options include ‘linear’, ‘nearest’, +‘zero’, ‘slinear’, ‘quadratic’, ‘cubic’, ‘previous’, and ‘next’.

  • +
  • extrap_limit (float) – Do not extrapolate the underlying data beyond its boundaries by this +amount in seconds. Default is 2 integrations.

  • +
+
+
Returns:
+

    +
  • tau (np.ndarray[nsource, ntime]) – Delay as a function of time for each of the noise sources.

  • +
  • weight (np.ndarray[nsource, ntime]) – The uncertainty on the delay, expressed as an inverse variance.

  • +
+

+
+
+
+ +
+
+get_timing_correction(freq, timestamp, **kwargs)[source]
+

Return the phase correction from each noise source at the requested frequency and time.

+

Assumes the phase correction scales with frequency nu as phi = 2 pi nu tau and uses the +get_tau method to interpolate over time. It acccepts and passes along keyword arguments +for that method.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq, ]) – Frequency in MHz.

  • +
  • timestamp (np.ndarray[ntime, ]) – Unix timestamp.

  • +
+
+
Returns:
+

    +
  • gain (np.ndarray[nfreq, nsource, ntime]) – Complex gain containing a pure phase correction for each of the noise sources.

  • +
  • weight (np.ndarray[nfreq, nsource, ntime]) – Uncerainty on the gain for each of the noise sources, expressed as an inverse variance.

  • +
+

+
+
+
+ +
+
+property has_amplitude
+

Determine if this timing correction contains amplitude data.

+
+ +
+
+property has_coeff_alpha
+

Indicates if there are valid coeff that map noise source alpha to inputs.

+
+ +
+
+property has_coeff_tau
+

Indicates if there are valid coeff that map noise source tau to inputs.

+
+ +
+
+property has_num_freq
+

Inidicates if there is a num_freq flag that identifies missing data.

+
+ +
+
+property input
+

Provide convenience access to the correlator inputs.

+
+ +
+
+property noise_source
+

Provide convenience access to the noise source inputs.

+

Note that in older versions of the timing correction, the +noise_source axis does not exist. Instead, the equivalent +quantity is labeled as input. Since the addition of the +coeff dataset it has become necessary to distinguish between the +noise source inputs from which the timing correction is derived +and the correlator inputs to which the timing correction is applied.

+
+ +
+
+property nsource
+

Provide convenience access to the number of noise source inputs.

+
+ +
+
+property num_freq
+

Provide convenience access to the num_freq array.

+
+ +
+
+property reference_noise_source
+

Return the index of the reference noise source.

+
+ +
+
+search_input(inputs)[source]
+

Find inputs in the input axis.

+
+
Parameters:
+

inputs (np.ndarray[ninput,] of dtype=('chan_id', 'correlator_input'))

+
+
Returns:
+

index – Indices of the input axis that yield the requested inputs.

+
+
Return type:
+

np.ndarray[ninput,] of .int

+
+
+
+ +
+
+set_coeff(coeff_tau, inputs, noise_source, coeff_alpha=None, reference_noise_source=None)[source]
+

Use coefficients to construct timing correction.

+

Setting the coefficients changes how the timing corretion for a particular +correlator input is derived. Without coefficients, each input is matched +to the timing correction from a single noise source input through the +map_input_to_noise_source method. With coefficients, each input is a +linear combination of the timing correction from all noise source inputs.

+
+
Parameters:
+
    +
  • coeff_tau (np.ndarray[ninput, nsource]) – The timing correction applied to a particular input will be the +linear combination of the tau correction from the noise source inputs, +with the coefficients set by this array.

  • +
  • inputs (np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input')) – Correlator inputs to which the timing correction will be applied.

  • +
  • noise_source (np.ndarray[nsource,] of dtype=('chan_id', 'correlator_input')) – Correlator inputs that were used to construct the timing correction.

  • +
  • coeff_alpha (np.ndarray[ninput, nsource]) – The timing correction applied to a particular input will be adjusted by +the linear combination of the alpha correction from the noise source inputs, +with the coefficients set by this array.

  • +
  • reference_noise_source (np.ndarray[ninput,]) – For each input, the index into noise_source that was used as +reference in the fit for coeff_tau.

  • +
+
+
+
+ +
+
+set_global_reference_time(tref, window=0.0, interpolate=False, **kwargs)[source]
+

Normalize the delay and alpha template to the value at a single time.

+

Useful for referencing the template to the value at the time that +you plan to calibrate.

+
+
Parameters:
+
    +
  • tref (unix time) – Reference the templates to the values at this time.

  • +
  • window (float) – Reference the templates to the median value over a window (in seconds) +around tref. If nonzero, this will override the interpolate keyword.

  • +
  • interpolate (bool) – Interpolate the delay template to time tref. Otherwise take the measured time +nearest to tref. The get_tau method is use to perform the interpolation, and +kwargs for that method will be passed along.

  • +
+
+
+
+ +
+
+set_reference_time(tref, tstart, tend=None, tinit=None, tau_init=None, alpha_init=None, interpolate=False, **kwargs)[source]
+

Normalize the delay and alpha template to specific times.

+

Required if applying the timing correction to data that has +already been calibrated.

+
+
Parameters:
+
    +
  • tref (np.ndarray[nref]) – Reference the delays to the values at this unix time.

  • +
  • tstart (np.ndarray[nref]) – Begin transition to the reference delay at this unix time.

  • +
  • tend (np.ndarray[nref]) – Complete transition to the reference delay at this unix time.

  • +
  • tinit (float) – Use the delay at this time for the period before the first tstart. +Takes prescendent over tau_init.

  • +
  • tau_init (np.ndarray[nsource]) – Use this delay for times before the first tstart. Must provide a value +for each noise source input. If None, then will reference with respect +to the average delay over the full time series.

  • +
  • alpha_init (np.ndarray[nsource]) – Use this alpha for times before the first tstart. Must provide a value +for each noise source input. If None, then will reference with respect +to the average alpha over the full time series.

  • +
  • interpolate (bool) – Interpolate the delay template to times tref. Otherwise take the measured +times nearest to tref. The get_tau method is use to perform the +interpolation, and kwargs for that method will be passed along.

  • +
+
+
+
+ +
+
+property static_amp
+

Provide convenience access to the static_amp array.

+
+ +
+
+property static_phi
+

Provide convenience access to the static_phi array.

+
+ +
+
+property static_phi_fit
+

Provide convenience access to the static_phi_fit array.

+
+ +
+
+summary()[source]
+

Provide a summary of the timing correction.

+
+
Returns:
+

summary – Contains useful information about the timing correction. +Specifically contains for each noise source input the +time averaged phase offset and delay. Also contains +estimates of the variance in the timing for both the +shortest and longest timescale probed by the underlying +dataset. Meant to be joined with new lines and printed.

+
+
Return type:
+

list of strings

+
+
+
+ +
+
+property tau
+

Provide convenience access to the tau array.

+
+ +
+
+property weight_alpha
+

Provide convenience access to the weight_alpha array.

+
+ +
+
+property weight_static_amp
+

Provide convenience access to the weight_static_amp array.

+
+ +
+
+property weight_static_phi
+

Provide convenience access to the weight_static_phi array.

+
+ +
+
+property weight_tau
+

Provide convenience access to the weight_tau array.

+
+ +
+
+property zero_delay_noise_source
+

Return the index of the noise source with zero delay.

+
+ +
+ +
+
+class ch_util.timing.TimingData(h5_data=None, **kwargs)[source]
+

Bases: CorrData, TimingCorrection

+

Subclass of andata.CorrData for timing data.

+

Automatically computes the timing correction when data is loaded and +inherits the methods of TimingCorrection that enable the application +of that correction to other datasets.

+

Used to pick which subclass to instantiate based on attributes in +data.

+
+
+classmethod from_acq_h5(acq_files, only_correction=False, **kwargs)[source]
+

Load a list of acquisition files and computes the timing correction.

+

Accepts and passes on all keyword arguments for andata.CorrData.from_acq_h5 +and the construct_delay_template function.

+
+
Parameters:
+
    +
  • acq_files (str or list of str) – Path to file(s) containing the timing data.

  • +
  • only_correction (bool) – Only return the timing correction. Do not return the underlying +data from which that correction was derived.

  • +
+
+
Returns:
+

data

+
+
Return type:
+

TimingData or TimingCorrection

+
+
+
+ +
+
+summary()[source]
+

Provide a summary of the timing data and correction.

+
+
Returns:
+

summary – Contains useful information about the timing correction +and data. Includes the reduction in the standard deviation +of the phase after applying the timing correction. This is +presented as quantiles over frequency for each of the +noise source products.

+
+
Return type:
+

list of strings

+
+
+
+ +
+ +
+
+class ch_util.timing.TimingInterpolator(x, y, weight=None, flag=None, kind='linear', extrap_limit=None)[source]
+

Bases: object

+

Interpolation that is aware of flagged data and weights.

+

Flagged data is ignored during the interpolation. The weights from +the data are propagated to obtain weights for the interpolated points.

+

Instantiate a callable TimingInterpolator object.

+
+
Parameters:
+
    +
  • x (np.ndarray[nsample,]) – The points where the data was sampled. +Must be monotonically increasing.

  • +
  • y (np.ndarray[..., nsample]) – The data to interpolate.

  • +
  • weight (np.ndarray[..., nsample]) – The uncertainty on the data, expressed as an +inverse variance.

  • +
  • flag (np.ndarray[..., nsample]) – Boolean indicating if the data is to be +included in the interpolation.

  • +
  • kind (str) – String that specifies the kind of interpolation. +The value nearest, previous, next, and linear will use +custom methods that propagate uncertainty to obtain the interpolated +weights. The value zero, slinear, quadratic, and cubic +will use spline interpolation from scipy.interpolation.interp1d +and use the weight from the nearest point.

  • +
+
+
Returns:
+

interpolator – Callable that will interpolate the data that was provided +to a new set of x values.

+
+
Return type:
+

TimingInterpolator

+
+
+
+ +
+
+ch_util.timing.construct_delay_template(data, min_frac_kept=0.0, threshold=0.5, min_freq=420.0, max_freq=780.0, mask_rfi=False, max_iter_weight=None, check_amp=False, nsigma_amp=None, check_phi=True, nsigma_phi=None, nparam=2, static_phi=None, weight_static_phi=None, static_phi_fit=None, static_amp=None, weight_static_amp=None)[source]
+

Construct a relative time delay template.

+

Fits the phase of the cross-correlation between noise source inputs +to a model that increases linearly with frequency.

+
+
Parameters:
+
    +
  • data (andata.CorrData) –

    +
    Correlation data. Must contain the following attributes:
    +
    freq: np.ndarray[nfreq, ]

    Frequency in MHz.

    +
    +
    vis: np.ndarray[nfreq, nprod, ntime]

    Upper-triangle, product packed visibility matrix +containing ONLY the noise source inputs.

    +
    +
    weight: np.ndarray[nfreq, nprod, ntime]

    Flag indicating the data points to fit.

    +
    +
    flags/frac_lost: np.ndarray[nfreq, ntime]

    Flag indicating the fraction of data lost. +If provided, then data will be weighted by the +fraction of data that remains when solving +for the delay template.

    +
    +
    +
    +
    +

  • +
  • min_frac_kept (float) – Do not include frequencies and times where the fraction +of data that remains is less than this threshold. +Default is 0.0.

  • +
  • threshold (float) – A (frequency, input) must pass the checks specified above +more than this fraction of the time, otherwise it will be +flaged as bad for all times. Default is 0.50.

  • +
  • min_freq (float) – Minimum frequency in MHz to include in the fit. +Default is 420.

  • +
  • max_freq (float) – Maximum frequency in MHz to include in the fit. +Default is 780.

  • +
  • mask_rfi (bool) – Mask frequencies that occur within known RFI bands. Note that the +noise source data does not contain RFI, however the real-time pipeline +does not distinguish between noise source inputs and sky inputs, and as +a result will discard large amounts of data in these bands.

  • +
  • max_iter_weight (int) – The weight for each frequency is estimated from the variance of the +residuals of the template fit from the previous iteration. Outliers +are also flagged at each iteration with an increasingly aggresive threshold. +This is the total number of times to iterate. Setting to 1 corresponds +to linear least squares. Default is 1, unless check_amp or check_phi is True, +in which case this defaults to the maximum number of thresholds provided.

  • +
  • check_amp (bool) – Do not fit frequencies and times where the residual amplitude is an outlier. +Default is False.

  • +
  • nsigma_amp (list of float) – If check_amp is True, then residuals greater than this number of sigma +will be considered an outlier. Provide a list containing the value to be used +at each iteration. If the length of the list is less than max_iter_weight, +then the last value in the list will be repeated for the remaining iterations. +Default is [1000, 500, 200, 100, 50, 20, 10, 5].

  • +
  • check_phi (bool) – Do not fit frequencies and times where the residual phase is an outlier. +Default is True.

  • +
  • nsigma_phi (list of float) – If check_phi is True, then residuals greater than this number of sigma +will be considered an outlier. Provide a list containing the value to be used +at each iteration. If the length of the list is less than max_iter_weight, +then the last value in the list will be repeated for the remaining iterations. +Default is [1000, 500, 200, 100, 50, 20, 10, 5].

  • +
  • nparam (int) – Number of parameters for polynomial fit to the +time averaged phase versus frequency. Default is 2.

  • +
  • static_phi (np.ndarray[nfreq, nsource]) – Subtract this quantity from the noise source phase prior to fitting +for the timing correction. If None, then this will be estimated from the median +of the noise source phase over time.

  • +
  • weight_static_phi (np.ndarray[nfreq, nsource]) – Inverse variance of the time averaged phased. Set to zero for frequencies and inputs +that are missing or should be ignored. If None, then this will be estimated from the +residuals of the fit.

  • +
  • static_phi_fit (np.ndarray[nparam, nsource]) – Polynomial fit to static_phi versus frequency.

  • +
  • static_amp (np.ndarray[nfreq, nsource]) – Subtract this quantity from the noise source amplitude prior to fitting +for the amplitude variations. If None, then this will be estimated from the median +of the noise source amplitude over time.

  • +
  • weight_static_amp (np.ndarray[nfreq, nsource]) – Inverse variance of the time averaged amplitude. Set to zero for frequencies and inputs +that are missing or should be ignored. If None, then this will be estimated from the +residuals of the fit.

  • +
+
+
Returns:
+

    +
  • phi (np.ndarray[nfreq, nsource, ntime]) – Phase of the signal from the noise source.

  • +
  • weight_phi (np.ndarray[nfreq, nsource, ntime]) – Inverse variance of the phase of the signal from the noise source.

  • +
  • tau (np.ndarray[nsource, ntime]) – Delay template for each noise source input.

  • +
  • weight_tau (np.ndarray[nfreq, nsource]) – Estimate of the uncertainty on the delay template (inverse variance).

  • +
  • static_phi (np.ndarray[nfreq, nsource]) – Time averaged phase versus frequency.

  • +
  • weight_static_phi (np.ndarray[nfreq, nsource]) – Inverse variance of the time averaged phase.

  • +
  • static_phi_fit (np.ndarray[nparam, nsource]) – Best-fit parameters of the polynomial fit to the +time averaged phase versus frequency.

  • +
  • amp (np.ndarray[nfreq, nsource, ntime]) – Amplitude of the signal from the noise source.

  • +
  • weight_amp (np.ndarray[nfreq, nsource, ntime]) – Inverse variance of the amplitude of the signal from the noise source.

  • +
  • alpha (np.ndarray[nsource, ntime]) – Amplitude coefficient for each noise source input.

  • +
  • weight_alpha (np.ndarray[nfreq, nsource]) – Estimate of the uncertainty on the amplitude coefficient (inverse variance).

  • +
  • static_amp (np.ndarray[nfreq, nsource]) – Time averaged amplitude versus frequency.

  • +
  • weight_static_amp (np.ndarray[nfreq, nsource]) – Inverse variance of the time averaged amplitude.

  • +
  • num_freq (np.ndarray[nsource, ntime]) – Number of frequencies used to construct the delay and amplitude templates.

  • +
+

+
+
+
+ +
+
+ch_util.timing.eigen_decomposition(vis, flag)[source]
+

Eigenvalue decomposition of the visibility matrix.

+
+
Parameters:
+
    +
  • vis (np.ndarray[nfreq, nprod, ntime]) – Upper-triangle, product packed visibility matrix.

  • +
  • flag (np.ndarray[nfreq, nsource, ntime] (optional)) – Array of 1 or 0 indicating the inputs that should be included +in the eigenvalue decomposition for each frequency and time.

  • +
+
+
Returns:
+

resp – Eigenvector corresponding to the largest eigenvalue for +each frequency and time.

+
+
Return type:
+

np.ndarray[nfreq, nsource, ntime]

+
+
+
+ +
+
+ch_util.timing.fit_poly_to_phase(freq, resp, resp_error, nparam=2)[source]
+

Fit complex data versus frequency to a model consisting of a polynomial in phase.

+

Nonlinear least squares algorithm is applied to the complex data to avoid problems +caused by phase wrapping.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq, ]) – Frequency in MHz.

  • +
  • resp (np.ndarray[nfreq, ]) – Complex data with magnitude equal to 1.0.

  • +
  • resp_error (np.ndarray[nfreq, ]) – Uncertainty on the complex data.

  • +
  • nparam (int) – Number of parameters in the polynomial. +Default is 2 (i.e, linear).

  • +
+
+
Returns:
+

    +
  • popt (np.ndarray[nparam, ]) – Best-fit parameters.

  • +
  • pcov (np.ndarray[nparam, nparam]) – Covariance of the best-fit parameters. +Assumes that it obtained a good fit +and returns the errors +necessary to achieve that.

  • +
+

+
+
+
+ +
+
+ch_util.timing.load_timing_correction(files, start=None, stop=None, window=43200.0, instrument='chime', **kwargs)[source]
+

Find and load the appropriate timing correction for a list of corr acquisition files.

+

For example, if the instrument keyword is set to ‘chime’, +then this function will accept all types of chime corr acquisition files, +such as ‘chimetiming’, ‘chimepb’, ‘chimeN2’, ‘chimecal’, and then find +the relevant set of ‘chimetiming’ files to load.

+

Accepts and passes on all keyword arguments for the functions +andata.CorrData.from_acq_h5 and construct_delay_template.

+

Should consider modifying this method to use Finder at some point in future.

+
+
Parameters:
+
    +
  • files (string or list of strings) – Absolute path to corr acquisition file(s).

  • +
  • start (integer, optional) – What frame to start at in the full set of files.

  • +
  • stop (integer, optional) – What frame to stop at in the full set of files.

  • +
  • window (float) – Use the timing data -window from start and +window from stop. +Default is 12 hours.

  • +
  • instrument (string) – Name of the instrument. Default is ‘chime’.

  • +
+
+
Returns:
+

data

+
+
Return type:
+

TimingData

+
+
+
+ +
+
+ch_util.timing.map_input_to_noise_source(inputs, noise_sources)[source]
+

Find the appropriate noise source to use to correct the phase of each input.

+

Searches for a noise source connected to the same slot, +then crate, then hut, then correlator.

+
+
Parameters:
+
    +
  • inputs (np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input')) – The input axis from a data acquisition file.

  • +
  • noise_sources (np.ndarray[nsource, ] of dtype=('chan_id', 'correlator_input')) – The noise sources.

  • +
+
+
+
+ +
+
+ch_util.timing.model_poly_phase(freq, *param)[source]
+

Evaluate a polynomial model for the phase.

+

To be used with the parameters output from fit_poly_to_phase.

+
+
Parameters:
+
    +
  • freq (np.ndarray[nfreq, ]) – Frequency in MHz.

  • +
  • *param (float) – Coefficients of the polynomial.

  • +
+
+
Returns:
+

phi – Phase in radians between -pi and +pi.

+
+
Return type:
+

np.ndarray[nfreq, ]

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_autosummary/ch_util.tools.html b/docs/_autosummary/ch_util.tools.html new file mode 100644 index 00000000..84f4985a --- /dev/null +++ b/docs/_autosummary/ch_util.tools.html @@ -0,0 +1,1834 @@ + + + + + + + ch_util.tools — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ch_util.tools

+

Tools for CHIME analysis

+

A collection of miscellaneous utility routines.

+
+

Correlator Inputs

+

Query the layout database to find out what is ultimately connected at the end +of correlator inputs. This is done by calling the routine +get_correlator_inputs(), which returns a list of the inputs. Routines +such as get_feed_positions() operate on this list.

+ +

This can determine if we are connected to any of the following:

+ +

Example

+

Fetch the inputs for blanchard during layout 38:

+
>>> from datetime import datetime
+>>> inputs = get_correlator_inputs(datetime(2016,05,23,00), correlator='pathfinder')
+>>> inputs[1]
+CHIMEAntenna(id=1, reflector=u'W_cylinder', antenna=u'ANT0123B', powered=True, pos=9.071800000000001, input_sn=u'K7BP16-00040401', pol=u'S', corr=u'K7BP16-0004', cyl=0)
+>>> print "NS position:", inputs[1].pos
+NS position: 9.0718
+>>> print "Polarisation:", inputs[1].pol
+Polarisation: S
+>>> inputs[3]
+CHIMEAntenna(id=3, reflector=u'W_cylinder', antenna=u'ANT0128B', powered=True, pos=9.681400000000002, input_sn=u'K7BP16-00040403', pol=u'S', corr=u'K7BP16-0004', cyl=0)
+
+
+
+
+

Housekeeping Inputs

+

Functions

+ +

Classes

+ +
+
+

Product Array Mapping

+

Tools for mapping between products stored in upper triangular format, and the +underlying pairs of inputs.

+ +
+
+

Matrix Factorisation

+

A few useful routines for factorising matrices, usually for calibration.

+ +
+
+

Fringestopping

+

Routines for undoing the phase rotation of a fixed celestial source. The +routine fringestop() is an easy to use routine for fringestopping data +given a list of the feeds in the data. For more advanced usage +fringestop_phase() can be used.

+
    +
  • fringestop_phase()

  • +
  • fringestop()

  • +
+
+
+

Miscellaneous

+ +
+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

antenna_to_lna(graph, ant, pol)

Find an LNA connected to an antenna.

apply_gain(vis, gain[, axis, out, prod_map])

Apply per input gains to a set of visibilities packed in upper triangular format.

beam_index2number(beam_index)

Convert beam "index" (0-1023) to beam "number" (0-255, 1000-1255, etc.)

calibrate_temperature(raw)

Calibrate housekeeping temperatures.

change_chime_location([rotation, location, ...])

Change the orientation or location of CHIME.

change_pathfinder_location([rotation, ...])

Change the orientation or location of Pathfinder.

cmap(i, j, n)

Given a pair of feed indices, return the pair index.

decorrelation(timestream, times, feeds, src)

Apply the decorrelation corrections to a timestream from observing a source.

delay(times, feeds, src[, wterm, bterm, ...])

Calculate the delay in a visibilities observing a given source.

eigh_no_diagonal(A[, niter, eigvals])

Eigenvalue decomposition ignoring the diagonal elements.

ensure_list(obj[, num])

Ensure obj is list-like, optionally with the length num.

fake_tone_database()

A fake correlator input database for GBO/TONE.

fast_pack_product_array(arr)

Equivalent to ch_util.tools.pack_product_array(arr, axis=0), but 10^5 times faster for full CHIME!

fringestop_time(timestream, times, freq, ...)

Fringestop timestream data to a fixed source.

get_correlator_inputs(lay_time[, ...])

Get the information for all channels in a layout.

get_default_frequency_map_stream()

Get the default CHIME frequency map stream.

get_feed_polarisations(feeds)

Get an array of the feed polarisations.

get_feed_positions(feeds[, get_zpos])

Get the positions of the CHIME antennas.

get_holographic_index(inputs)

Find the indices of the holography antennas.

get_noise_channel(inputs)

Returns the index of the noise source with the lowest chan id (for backwards compatability).

get_noise_source_index(inputs)

Find the indices of the noise sources.

hk_to_sensor(graph, inp)

Find what component a housekeeping channel is connected to.

icmap(ix, n)

Inverse feed map.

invert_no_zero(*args, **kwargs)

Deprecated - use 'caput.tools.invert_no_zero'

is_array(feed)

Is this feed part of an array?

is_array_on(inputs, *args)

Check if inputs are attached to an array antenna AND powered on AND flagged as good.

is_array_x(feed)

Is this an X-polarisation antenna in an array?

is_array_y(feed)

Is this a Y-polarisation antenna in an array?

is_chime(feed)

Is this feed a CHIME antenna?

is_chime_on(inputs, *args)

Check if inputs are attached to an array antenna AND powered on AND flagged as good.

is_holographic(feed)

Is this feed a holographic antenna?

is_noise_source(inp)

Is this correlator input connected to a noise source?

is_pathfinder(feed)

Is this feed a Pathfinder antenna?

lna_to_antenna(graph, lna)

Find an antenna connected to an LNA.

normalise_correlations(A[, norm])

Normalise to make a correlation matrix from a covariance matrix.

order_frequency_map_stream(fmap, stream_id)

Order stream_id components based on a frequency map.

pack_product_array(exp_arr[, axis])

Pack full correlation matrices into upper triangular form.

parse_chime_serial(sn)

Parse a serial number into crate, slot, and SMA number.

parse_old_serial(sn)

Parse an old 8/16-channel serial number into slot, and SMA number.

parse_pathfinder_serial(sn)

Parse a pathfinder serial number into crate, slot, and SMA number.

rankN_approx(A[, rank])

Create the rank-N approximation to the matrix A.

redefine_stack_index_map(input_map, prod, ...)

Ensure that only baselines between array antennas are used to represent the stack.

reorder_correlator_inputs(input_map, corr_inputs)

Sort a list of correlator inputs into the order given in input map.

sensor_to_hk(graph, comp)

Find what housekeeping channel a component is connected to.

serial_to_id(serial)

Get the channel ID corresponding to a correlator input serial number.

serial_to_location(serial)

Get the internal correlator ordering and the crate, slot, and sma number from a correlator input serial number.

subtract_rank1_signal(vis, signal[, axis, ...])

Subtract a rank 1 signal from a set of visibilities packed in upper triangular format.

unpack_product_array(prod_arr[, axis, feeds])

Expand packed products to correlation matrices.

+

Classes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Antenna(**input_dict)

An antenna input.

ArrayAntenna(**input_dict)

Antenna that is part of a cylindrical interferometric array.

Blank(**input_dict)

Unconnected input.

CHIMEAntenna(**input_dict)

Antenna that is part of CHIME.

CorrInput(**input_dict)

Base class for describing a correlator input.

GBOAntenna(**input_dict)

GBO outrigger antenna for the CHIME/FRB project.

HCOAntenna(**input_dict)

HCRO outrigger antenna for the CHIME/FRB project.

HKInput([atmel, chan, mux])

A housekeeping input.

HolographyAntenna(**input_dict)

Antenna used for holography.

KKOAntenna(**input_dict)

KKO outrigger antenna for the CHIME/FRB project.

NoiseSource(**input_dict)

Broad band noise calibration source.

PCOAntenna

alias of KKOAntenna

PathfinderAntenna(**input_dict)

Antenna that is part of the Pathfinder.

RFIAntenna(**input_dict)

RFI monitoring antenna

TONEAntenna(**input_dict)

Antenna that is part of GBO/TONE Outrigger.

+
+
+class ch_util.tools.Antenna(**input_dict)[source]
+

Bases: CorrInput

+

An antenna input.

+
+
+reflector
+

The name of the reflector the antenna is on.

+
+
Type:
+

str

+
+
+
+ +
+
+antenna
+

Serial number of the antenna.

+
+
Type:
+

str

+
+
+
+ +
+
+rf_thru
+

Serial number of the RF room thru that +the connection passes.

+
+
Type:
+

str

+
+
+
+ +
+ +
+
+class ch_util.tools.ArrayAntenna(**input_dict)[source]
+

Bases: Antenna

+

Antenna that is part of a cylindrical interferometric array.

+
+
+cyl
+

Index of the cylinder.

+
+
Type:
+

int

+
+
+
+ +
+
+pos
+

Position of the antenna in meters in right-handed coordinates +where x is eastward, y is northward, and z is upward.

+
+
Type:
+

[x, y, z]

+
+
+
+ +
+
+pol
+

Orientation of the polarisation.

+
+
Type:
+

str

+
+
+
+ +
+
+flag
+

Flag indicating whether or not the antenna is good.

+
+
Type:
+

bool

+
+
+
+ +
+ +
+
+class ch_util.tools.Blank(**input_dict)[source]
+

Bases: CorrInput

+

Unconnected input.

+
+ +
+
+class ch_util.tools.CHIMEAntenna(**input_dict)[source]
+

Bases: ArrayAntenna

+

Antenna that is part of CHIME.

+
+ +
+
+class ch_util.tools.CorrInput(**input_dict)[source]
+

Bases: object

+

Base class for describing a correlator input.

+

Meant to be subclassed by actual types of inputs.

+
+
+input_sn
+

Unique serial number of input.

+
+
Type:
+

str

+
+
+
+ +
+
+corr
+

Unique serial number of correlator. +Set to None if no correlator is connected.

+
+
Type:
+

str

+
+
+
+ +
+
+corr_order
+

Order of input for correlator internal datastream.

+
+
Type:
+

int

+
+
+
+ +
+
+crate
+

Crate number within the correlator. +Set to None if correlator consists of single crate.

+
+
Type:
+

int

+
+
+
+ +
+
+slot
+

Slot number of the fpga motherboard within the crate. +Ranges from 0 to 15, left to right. +Set to None if correlator consists of single slot.

+
+
Type:
+

int

+
+
+
+ +
+
+sma
+

SMA number on the fpga motherboard within the slot. +Ranges from 0 to 15, bottom to top.

+
+
Type:
+

int

+
+
+
+ +
+
+property delay
+

The delay along the signal chain in seconds.

+

Postive delay values mean signals arriving later than the nominal value.

+

Note that these are always relative. Here CHIME inputs are chosen as +the delay=0 reference.

+
+ +
+
+property id
+

Channel ID. Automatically calculated from the serial number +if id is not explicitly set.

+
+
Returns:
+

id – Channel id. Calculated from the serial.

+
+
Return type:
+

int

+
+
+
+ +
+ +
+
+class ch_util.tools.GBOAntenna(**input_dict)[source]
+

Bases: ArrayAntenna

+

GBO outrigger antenna for the CHIME/FRB project.

+
+ +
+
+class ch_util.tools.HCOAntenna(**input_dict)[source]
+

Bases: ArrayAntenna

+

HCRO outrigger antenna for the CHIME/FRB project.

+
+ +
+
+class ch_util.tools.HKInput(atmel=None, chan=None, mux=None)[source]
+

Bases: object

+

A housekeeping input.

+
+
Parameters:
+
    +
  • atmel (layout.component) – The ATMEL board.

  • +
  • chan (int) – The channel number.

  • +
  • mux (int) – The mux number; if this HK stream has no multiplexer, this will simply +remain as Null

  • +
+
+
+
+
+atmel
+

The ATMEL board.

+
+
Type:
+

layout.component

+
+
+
+ +
+
+chan
+

The channel number.

+
+
Type:
+

int

+
+
+
+ +
+
+mux
+

The mux number; if this HK stream has no multiplexer, this will simply +remain as Null

+
+
Type:
+

int

+
+
+
+ +
+ +
+
+class ch_util.tools.HolographyAntenna(**input_dict)[source]
+

Bases: Antenna

+

Antenna used for holography.

+
+
+pos
+

Position of the antenna in meters in right-handed coordinates +where x is eastward, y is northward, and z is upward.

+
+
Type:
+

[x, y, z]

+
+
+
+ +
+
+pol
+

Orientation of the polarisation.

+
+
Type:
+

str

+
+
+
+ +
+ +
+
+class ch_util.tools.KKOAntenna(**input_dict)[source]
+

Bases: ArrayAntenna

+

KKO outrigger antenna for the CHIME/FRB project.

+
+ +
+
+class ch_util.tools.NoiseSource(**input_dict)[source]
+

Bases: CorrInput

+

Broad band noise calibration source.

+
+ +
+
+ch_util.tools.PCOAntenna
+

alias of KKOAntenna

+
+ +
+
+class ch_util.tools.PathfinderAntenna(**input_dict)[source]
+

Bases: ArrayAntenna

+

Antenna that is part of the Pathfinder.

+
+
+powered
+

Flag indicating that the antenna is powered.

+
+
Type:
+

bool

+
+
+
+ +
+ +
+
+class ch_util.tools.RFIAntenna(**input_dict)[source]
+

Bases: Antenna

+

RFI monitoring antenna

+
+ +
+
+class ch_util.tools.TONEAntenna(**input_dict)[source]
+

Bases: ArrayAntenna

+

Antenna that is part of GBO/TONE Outrigger. +Let’s allow for a global rotation and offset.

+
+ +
+
+ch_util.tools.antenna_to_lna(graph, ant, pol)[source]
+

Find an LNA connected to an antenna.

+
+
Parameters:
+
    +
  • graph (obj:layout.graph or datetime.datetime) – The graph in which to do the search. If you pass a time, then the graph +will be constructed internally. (Note that the latter option will be +quite slow if you do repeated calls!)

  • +
  • ant (layout.component) – The antenna.

  • +
  • pol (integer) – There can be up to two LNA’s connected to the two polarisation outputs +of an antenna. Select which by passing 1 or 2. (Note that +conversion to old-style naming ‘A’ and ‘B’ is done automatically.)

  • +
+
+
Returns:
+

lna – The LNA.

+
+
Return type:
+

layout.component or string

+
+
Raises:
+

layout.NotFound – Raised if the polarisation connector could not be found in the graph.

+
+
+
+ +
+
+ch_util.tools.apply_gain(vis, gain, axis=1, out=None, prod_map=None)[source]
+

Apply per input gains to a set of visibilities packed in upper +triangular format.

+

This allows us to apply the gains while minimising the intermediate +products created.

+
+
Parameters:
+
    +
  • vis (np.ndarray[..., nprod, ...]) – Array of visibility products.

  • +
  • gain (np.ndarray[..., ninput, ...]) – Array of gains. One gain per input.

  • +
  • axis (integer, optional) – The axis along which the inputs (or visibilities) are +contained. Currently only supports axis=1.

  • +
  • out (np.ndarray) – Array to place output in. If None create a new +array. This routine can safely use out = vis.

  • +
  • prod_map (ndarray of integer pairs) – Gives the mapping from product axis to input pairs. If not supplied, +icmap() is used.

  • +
+
+
Returns:
+

out – Visibility array with gains applied. Same shape as vis.

+
+
Return type:
+

np.ndarray

+
+
+
+ +
+
+ch_util.tools.beam_index2number(beam_index)[source]
+

Convert beam “index” (0-1023) to beam “number” (0-255, 1000-1255, etc.)

+

The beam “number”, with 1000s indicating the beam’s East-West index and the +remainder going from 0 through 255 indicating the beam’s North-South index, +is used in the CHIME/FRB beam_model package.

+
+
Parameters:
+

beam_index (int or np.ndarray of int) – The beam index or indices to be converted.

+
+
Returns:
+

beam_number – The corresponding beam number or numbers.

+
+
Return type:
+

same as beam_index

+
+
+
+ +
+
+ch_util.tools.calibrate_temperature(raw)[source]
+

Calibrate housekeeping temperatures.

+

The offset used here is rough; the results are therefore not absolutely +precise.

+
+
Parameters:
+

raw (numpy array) – The raw values.

+
+
Returns:
+

t – The temperature in degrees Kelvin.

+
+
Return type:
+

numpy array

+
+
+
+ +
+
+ch_util.tools.change_chime_location(rotation=None, location=None, default=False)[source]
+

Change the orientation or location of CHIME.

+
+
Parameters:
+
    +
  • rotation (float) – Rotation of the telescope from true north in degrees.

  • +
  • location (list) – [x, y, z] of the telescope in meters, +where x is eastward, y is northward, and z is upward.

  • +
  • default (bool) – Set parameters back to default value. Overides other keywords.

  • +
+
+
+
+ +
+
+ch_util.tools.change_pathfinder_location(rotation=None, location=None, default=False)[source]
+

Change the orientation or location of Pathfinder.

+
+
Parameters:
+
    +
  • rotation (float) – Rotation of the telescope from true north in degrees.

  • +
  • location (list) – [x, y, z] of the telescope in meters, +where x is eastward, y is northward, and z is upward.

  • +
  • default (bool) – Set parameters back to default value. Overides other keywords.

  • +
+
+
+
+ +
+
+ch_util.tools.cmap(i, j, n)[source]
+

Given a pair of feed indices, return the pair index.

+
+
Parameters:
+
    +
  • i (integer) – Feed index.

  • +
  • j (integer) – Feed index.

  • +
  • n (integer) – Total number of feeds.

  • +
+
+
Returns:
+

pi – Pair index.

+
+
Return type:
+

integer

+
+
+
+ +
+
+ch_util.tools.decorrelation(timestream, times, feeds, src, wterm=True, bterm=True, prod_map=None, csd=False, inplace=False, static_delays=True)[source]
+

Apply the decorrelation corrections to a timestream from observing a source.

+
+
Parameters:
+
    +
  • timestream (np.ndarray[nfreq, nprod, times]) – Array containing the timestream.

  • +
  • times (np.ndarray[times]) – The UNIX time of each sample, or (if csd=True), the CSD of each sample.

  • +
  • feeds (list of CorrInputs) – The feeds in the timestream.

  • +
  • src (skyfield source) – skyfield.starlib.Star or skyfield.vectorlib.VectorSum or +skyfield.jpllib.ChebyshevPosition body representing the source.

  • +
  • wterm (bool, optional) – Include elevation information in the calculation.

  • +
  • bterm (bool, optional) – Include a correction for baselines including the 26m Galt telescope.

  • +
  • prod_map (np.ndarray[nprod]) – The products in the timestream array.

  • +
  • csd (bool, optional) – Interpret the times parameter as CSDs.

  • +
  • inplace (bool, optional) – Fringestop the visibilities in place. If not set, leave the originals intact.

  • +
  • static_delays (bool, optional) – Correct for static cable delays in the system.

  • +
+
+
Returns:
+

corrected_timestream

+
+
Return type:
+

np.ndarray[nfreq, nprod, times]

+
+
+
+ +
+
+ch_util.tools.delay(times, feeds, src, wterm=True, bterm=True, prod_map=None, csd=False, static_delays=True, obs=<caput.time.Observer object>)[source]
+

Calculate the delay in a visibilities observing a given source.

+

This includes both the geometric delay and static (cable) delays.

+
+
Parameters:
+
    +
  • times (np.ndarray[times]) – The UNIX time of each sample, or (if csd=True), the CSD of each sample.

  • +
  • feeds (list of CorrInputs) – The feeds in the timestream.

  • +
  • src (skyfield source) – skyfield.starlib.Star or skyfield.vectorlib.VectorSum or +skyfield.jpllib.ChebyshevPosition body representing the source.

  • +
  • wterm (bool, optional) – Include elevation information in the calculation.

  • +
  • bterm (bool, optional) – Include a correction for baselines which include the 26m Galt telescope.

  • +
  • prod_map (np.ndarray[nprod]) – The products in the timestream array.

  • +
  • csd (bool, optional) – Interpret the times parameter as CSDs.

  • +
  • static_delays (bool, optional) – If set the returned value includes both geometric and static delays. +If False only geometric delays are included.

  • +
+
+
Returns:
+

delay

+
+
Return type:
+

np.ndarray[nprod, nra]

+
+
+
+ +
+
+ch_util.tools.eigh_no_diagonal(A, niter=5, eigvals=None)[source]
+

Eigenvalue decomposition ignoring the diagonal elements.

+

The diagonal elements are iteratively replaced with those from a rank=1 approximation.

+
+
Parameters:
+
    +
  • A (np.ndarray[:, :]) – Matrix to decompose.

  • +
  • niter (int, optional) – Number of iterations to perform.

  • +
  • eigvals ((lo, hi), optional) – Indices of eigenvalues to select (inclusive).

  • +
+
+
Returns:
+

    +
  • evals (np.ndarray[:])

  • +
  • evecs (np.ndarray[:, :])

  • +
+

+
+
+
+ +
+
+ch_util.tools.ensure_list(obj, num=None)[source]
+

Ensure obj is list-like, optionally with the length num.

+

If obj not a string but is iterable, it is returned as-is, +although a length different than num, if given, will result in a +ValueError.

+

If obj is a string or non-iterable, a new list is created with +num copies of obj as elements. In this case, if num is not +given, it is taken to be 1.

+
+
Parameters:
+
    +
  • obj – The object to check.

  • +
  • num (int, optional) – If given, also ensure that the list has num elements.

  • +
+
+
Returns:
+

The input object, or the newly created list

+
+
Return type:
+

obj

+
+
Raises:
+

ValueError:obj was iterable but did not have a length of num

+
+
+
+ +
+
+ch_util.tools.fake_tone_database()[source]
+

A fake correlator input database for GBO/TONE.

+
+ +
+
+ch_util.tools.fast_pack_product_array(arr)[source]
+

Equivalent to ch_util.tools.pack_product_array(arr, axis=0), +but 10^5 times faster for full CHIME!

+

Currently assumes that arr is a 2D array of shape (nfeeds, nfeeds), +and returns a 1D array of length (nfeed*(nfeed+1))/2. This case +is all we need for phase calibration, but pack_product_array() is +more general.

+
+ +
+
+ch_util.tools.fringestop_time(timestream, times, freq, feeds, src, wterm=False, bterm=True, prod_map=None, csd=False, inplace=False, static_delays=True, obs=<caput.time.Observer object>)[source]
+

Fringestop timestream data to a fixed source.

+
+
Parameters:
+
    +
  • timestream (np.ndarray[nfreq, nprod, times]) – Array containing the visibility timestream.

  • +
  • times (np.ndarray[times]) – The UNIX time of each sample, or (if csd=True), the CSD of each sample.

  • +
  • freq (np.ndarray[nfreq]) – The frequencies in the array (in MHz).

  • +
  • feeds (list of CorrInputs) – The feeds in the timestream.

  • +
  • src (skyfield source) – skyfield.starlib.Star or skyfield.vectorlib.VectorSum or +skyfield.jpllib.ChebyshevPosition body representing the source.

  • +
  • wterm (bool, optional) – Include elevation information in the calculation.

  • +
  • bterm (bool, optional) – Include a correction for baselines including the 26m Galt telescope.

  • +
  • prod_map (np.ndarray[nprod]) – The products in the timestream array.

  • +
  • csd (bool, optional) – Interpret the times parameter as CSDs.

  • +
  • inplace (bool, optional) – Fringestop the visibilities in place. If not set, leave the originals intact.

  • +
  • static_delays (bool, optional) – Correct for static cable delays in the system.

  • +
+
+
Returns:
+

fringestopped_timestream

+
+
Return type:
+

np.ndarray[nfreq, nprod, times]

+
+
+
+ +
+
+ch_util.tools.get_correlator_inputs(lay_time, correlator=None, connect=True)[source]
+

Get the information for all channels in a layout.

+
+
Parameters:
+
    +
  • lay_time (layout.graph or datetime) – layout.graph object, layout tag id, or datetime.

  • +
  • correlator (str, optional) – Fetch only for specified correlator. Use the serial number in database, +or pathfinder or chime, which will substitute the correct serial. +If None return for all correlators. +Option tone added for GBO 12 dish outrigger prototype array.

  • +
  • connect (bool, optional) – Connect to database and set the user to Jrs65 prior to query. +Default is True.

  • +
+
+
Returns:
+

channels – List of CorrInput instances. Returns None for MPI ranks +other than zero.

+
+
Return type:
+

list

+
+
+
+ +
+
+ch_util.tools.get_default_frequency_map_stream() Tuple[ndarray][source]
+

Get the default CHIME frequency map stream.

+

Level order is [shuffle, crate, slot, link].

+
+
Returns:
+

    +
  • stream – [shuffle, crate, slot, link] for each frequency bin

  • +
  • stream_id – stream_id for each map combination +shuffle*2**12 + crate*2**8 + slot*2**4 + link

  • +
+

+
+
+
+ +
+
+ch_util.tools.get_feed_polarisations(feeds)[source]
+

Get an array of the feed polarisations.

+
+
Parameters:
+

feeds (list of CorrInput) – List of feeds to compute positions of.

+
+
Returns:
+

pol – Array of characters giving polarisation. If not an array feed returns ‘0’.

+
+
Return type:
+

np.ndarray

+
+
+
+ +
+
+ch_util.tools.get_feed_positions(feeds, get_zpos=False)[source]
+

Get the positions of the CHIME antennas.

+
+
Parameters:
+
    +
  • feeds (list of CorrInput) – List of feeds to compute positions of.

  • +
  • get_zpos (bool) – Return a third column with elevation information.

  • +
+
+
Returns:
+

positions – Array of feed positions. The first column is the E-W position +(increasing to the E), and the second is the N-S position (increasing +to the N). Non CHIME feeds get set to NaN.

+
+
Return type:
+

np.ndarray[nfeed, 2]

+
+
+
+ +
+
+ch_util.tools.get_holographic_index(inputs)[source]
+

Find the indices of the holography antennas.

+
+
Parameters:
+

inputs (list of CorrInput)

+
+
Returns:
+

ixholo – Returns None if holographic antenna not found.

+
+
Return type:
+

list of int

+
+
+
+ +
+
+ch_util.tools.get_noise_channel(inputs)[source]
+

Returns the index of the noise source with +the lowest chan id (for backwards compatability).

+
+ +
+
+ch_util.tools.get_noise_source_index(inputs)[source]
+

Find the indices of the noise sources.

+
+
Parameters:
+

inputs (list of CorrInput)

+
+
Returns:
+

ixns – Returns None if noise source not found.

+
+
Return type:
+

list of int

+
+
+
+ +
+
+ch_util.tools.hk_to_sensor(graph, inp)[source]
+

Find what component a housekeeping channel is connected to.

+

This method is for finding either LNA or FLA’s that your housekeeping +channel is connected to. (It currently cannot find accelerometers, other +novel housekeeping instruments that may later exist; nor will it work if the +FLA/LNA is connected via a very non-standard chain of components.)

+
+
Parameters:
+
    +
  • graph (obj:layout.graph or datetime.datetime) – The graph in which to do the search. If you pass a time, then the graph +will be constructed internally. (Note that the latter option will be +quite slow if you do repeated calls!)

  • +
  • inp (HKInput) – The housekeeping input to search.

  • +
+
+
Returns:
+

comp – The LNA/FLA connected to the specified channel; None is returned +if none is found.

+
+
Return type:
+

layout.component

+
+
Raises:
+

ValueError – Raised if one of the channels or muxes passed in hk_chan is out of + range.

+
+
+
+ +
+
+ch_util.tools.icmap(ix, n)[source]
+

Inverse feed map.

+
+
Parameters:
+
    +
  • ix (integer) – Pair index.

  • +
  • n (integer) – Total number of feeds.

  • +
+
+
Returns:
+

fi, fj – Feed indices.

+
+
Return type:
+

integer

+
+
+
+ +
+
+ch_util.tools.invert_no_zero(*args, **kwargs)[source]
+

Deprecated - use ‘caput.tools.invert_no_zero’

+
+ +
+
+ch_util.tools.is_array(feed)[source]
+

Is this feed part of an array?

+
+
Parameters:
+

feed (CorrInput)

+
+
Returns:
+

isarr

+
+
Return type:
+

bool

+
+
+
+ +
+
+ch_util.tools.is_array_on(inputs, *args)[source]
+

Check if inputs are attached to an array antenna AND powered on AND flagged as good.

+
+
Parameters:
+

inputs (CorrInput or list of CorrInput objects)

+
+
Returns:
+

pwds – If list, it is the same length as inputs. Value is True if input is +attached to a ArrayAntenna and powered-on and False otherwise

+
+
Return type:
+

boolean or list of bools.

+
+
+
+ +
+
+ch_util.tools.is_array_x(feed)[source]
+

Is this an X-polarisation antenna in an array?

+
+ +
+
+ch_util.tools.is_array_y(feed)[source]
+

Is this a Y-polarisation antenna in an array?

+
+ +
+
+ch_util.tools.is_chime(feed)[source]
+

Is this feed a CHIME antenna?

+
+
Parameters:
+

feed (CorrInput)

+
+
Returns:
+

ischime

+
+
Return type:
+

bool

+
+
+
+ +
+
+ch_util.tools.is_chime_on(inputs, *args)
+

Check if inputs are attached to an array antenna AND powered on AND flagged as good.

+
+
Parameters:
+

inputs (CorrInput or list of CorrInput objects)

+
+
Returns:
+

pwds – If list, it is the same length as inputs. Value is True if input is +attached to a ArrayAntenna and powered-on and False otherwise

+
+
Return type:
+

boolean or list of bools.

+
+
+
+ +
+
+ch_util.tools.is_holographic(feed)[source]
+

Is this feed a holographic antenna?

+
+
Parameters:
+

feed (CorrInput)

+
+
Returns:
+

isholo

+
+
Return type:
+

bool

+
+
+
+ +
+
+ch_util.tools.is_noise_source(inp)[source]
+

Is this correlator input connected to a noise source?

+
+
Parameters:
+

inp (CorrInput)

+
+
Returns:
+

isnoise

+
+
Return type:
+

bool

+
+
+
+ +
+
+ch_util.tools.is_pathfinder(feed)[source]
+

Is this feed a Pathfinder antenna?

+
+
Parameters:
+

feed (CorrInput)

+
+
Returns:
+

ispathfinder

+
+
Return type:
+

bool

+
+
+
+ +
+
+ch_util.tools.lna_to_antenna(graph, lna)[source]
+

Find an antenna connected to an LNA.

+
+
Parameters:
+
    +
  • graph (obj:layout.graph or datetime.datetime) – The graph in which to do the search. If you pass a time, then the graph +will be constructed internally. (Note that the latter option will be +quite slow if you do repeated calls!)

  • +
  • lna (layout.component or string) – The LNA.

  • +
+
+
Returns:
+

antenna – The antenna.

+
+
Return type:
+

layout.component

+
+
+
+ +
+
+ch_util.tools.normalise_correlations(A, norm=None)[source]
+

Normalise to make a correlation matrix from a covariance matrix.

+
+
Parameters:
+
    +
  • A (np.ndarray[:, :]) – Matrix to normalise.

  • +
  • norm (np.ndarray[:,:]) – Normalize by diagonals of norm. +If None, then normalize by diagonals of A.

  • +
+
+
Returns:
+

    +
  • X (np.ndarray[:, :]) – Normalised correlation matrix.

  • +
  • ach (np.ndarray[:]) – Array of the square root diagonal elements that normalise the matrix.

  • +
+

+
+
+
+ +
+
+ch_util.tools.order_frequency_map_stream(fmap: ndarray, stream_id: ndarray) ndarray[source]
+

Order stream_id components based on a frequency map.

+

Level order is [shuffle, crate, slot, link]

+
+
Parameters:
+
    +
  • fmap – frequency map

  • +
  • stream_id – 1-D array of stream_ids associated with each row in fmap

  • +
+
+
Returns:
+

shuffle, crate, slot, link for each frequency

+
+
Return type:
+

stream

+
+
+
+ +
+
+ch_util.tools.pack_product_array(exp_arr, axis=1)[source]
+

Pack full correlation matrices into upper triangular form.

+

It replaces the two feed axes of the matrix, with a single upper triangle product axis.

+
+
Parameters:
+
    +
  • exp_arr (np.ndarray[..., nfeed, nfeed, ...]) – Array of full correlation matrices.

  • +
  • axis (int, optional) – Index of the first feed axis. The second feed axis must be the next one.

  • +
+
+
Returns:
+

prod_arr – Array containing products packed in upper triangle format.

+
+
Return type:
+

np.ndarray[…, nprod, …]

+
+
+
+ +
+
+ch_util.tools.parse_chime_serial(sn)[source]
+

Parse a serial number into crate, slot, and SMA number.

+
+
Parameters:
+

sn (str) – Serial number to parse

+
+
Returns:
+

    +
  • crate (int) – Crate number

  • +
  • slot (int) – Slot number

  • +
  • sma (int) – SMA number

  • +
+

+
+
Raises:
+

RuntimeError:sn did not have the correct format.

+
+
+
+ +
+
+ch_util.tools.parse_old_serial(sn)[source]
+

Parse an old 8/16-channel serial number into slot, and SMA number.

+
+
Parameters:
+

sn (str) – Serial number to parse

+
+
Returns:
+

    +
  • slot (int) – Slot number

  • +
  • sma (int) – SMA number

  • +
+

+
+
Raises:
+

RuntimeError:sn did not have the correct format.

+
+
+
+ +
+
+ch_util.tools.parse_pathfinder_serial(sn)[source]
+

Parse a pathfinder serial number into crate, slot, and SMA number.

+
+
Parameters:
+

sn (str) – Serial number to parse

+
+
Returns:
+

    +
  • crate (int) – Crate number

  • +
  • slot (int) – Slot number

  • +
  • sma (int) – SMA number

  • +
+

+
+
Raises:
+

RuntimeError:sn did not have the correct format.

+
+
+
+ +
+
+ch_util.tools.rankN_approx(A, rank=1)[source]
+

Create the rank-N approximation to the matrix A.

+
+
Parameters:
+
    +
  • A (np.ndarray) – Matrix to approximate

  • +
  • rank (int, optional)

  • +
+
+
Returns:
+

B – Low rank approximation.

+
+
Return type:
+

np.ndarray

+
+
+
+ +
+
+ch_util.tools.redefine_stack_index_map(input_map, prod, stack, reverse_stack)[source]
+

Ensure that only baselines between array antennas are used to represent the stack.

+

The correlator will have inputs that are not connected to array antennas. These inputs +are flagged as bad and are not included in the stack, however, products that contain +their chan_id can still be used to represent a characteristic baseline in the stack +index map. This method creates a new stack index map that, if possible, only contains +products between two array antennas. This new stack index map should be used when +calculating baseline distances to fringestop stacked data.

+
+
Parameters:
+
    +
  • input_map (list of CorrInput) – List describing the inputs as they are in the file, output from +tools.get_correlator_inputs

  • +
  • prod (np.ndarray[nprod,] of dtype=('input_a', 'input_b')) – The correlation products as pairs of inputs.

  • +
  • stack (np.ndarray[nstack,] of dtype=('prod', 'conjugate')) – The index into the prod axis of a characteristic baseline included in the stack.

  • +
  • reverse_stack (np.ndarray[nprod,] of dtype=('stack', 'conjugate')) – The index into the stack axis that each prod belongs.

  • +
+
+
Returns:
+

    +
  • stack_new (np.ndarray[nstack,] of dtype=(‘prod’, ‘conjugate’)) – The updated stack index map, where each element is an index to a product +consisting of a pair of array antennas.

  • +
  • stack_flag (np.ndarray[nstack,] of dtype=bool) – Boolean flag that is True if this element of the stack index map is now valid, +and False if none of the baselines that were stacked contained array antennas.

  • +
+

+
+
+
+ +
+
+ch_util.tools.reorder_correlator_inputs(input_map, corr_inputs)[source]
+

Sort a list of correlator inputs into the order given in input map.

+
+
Parameters:
+
    +
  • input_map (np.ndarray) – Index map of correlator inputs.

  • +
  • corr_inputs (list) – List of CorrInput objects, e.g. the output from +get_correlator_inputs().

  • +
+
+
Returns:
+

corr_input_list – List of CorrInput instances in the new order. Returns None +where the serial number had no matching entry in parameter corr_inputs.

+
+
Return type:
+

list

+
+
+
+ +
+
+ch_util.tools.sensor_to_hk(graph, comp)[source]
+

Find what housekeeping channel a component is connected to.

+
+
Parameters:
+
    +
  • graph (obj:layout.graph or datetime.datetime) – The graph in which to do the search. If you pass a time, then the graph +will be constructed internally. (Note that the latter option will be +quite slow if you do repeated calls!)

  • +
  • comp (layout.component or string) – The component to search for (you can pass by serial number if you wish). +Currently, only components of type LNA, FLA and RFT thru are accepted.

  • +
+
+
Returns:
+

inp – The housekeeping input channel the sensor is connected to.

+
+
Return type:
+

HKInput

+
+
+
+ +
+
+ch_util.tools.serial_to_id(serial)[source]
+

Get the channel ID corresponding to a correlator input serial number.

+
+
Parameters:
+

serial (string) – Correlator input serial number.

+
+
Returns:
+

id

+
+
Return type:
+

int

+
+
+
+ +
+
+ch_util.tools.serial_to_location(serial)[source]
+

Get the internal correlator ordering and the +crate, slot, and sma number from a correlator input serial number.

+
+
Parameters:
+

serial (string) – Correlator input serial number.

+
+
Returns:
+

location – (corr_order, crate, slot, sma)

+
+
Return type:
+

4-tuple

+
+
+
+ +
+
+ch_util.tools.subtract_rank1_signal(vis, signal, axis=1, out=None, prod_map=None)[source]
+

Subtract a rank 1 signal from a set of visibilities packed in upper +triangular format.

+

This allows us to subtract the noise injection solutions +while minimising the intermediate products created.

+
+
Parameters:
+
    +
  • vis (np.ndarray[..., nprod, ...]) – Array of visibility products.

  • +
  • signal (np.ndarray[..., ninput, ...]) – Array of underlying signals. One signal per input.

  • +
  • axis (integer, optional) – The axis along which the inputs (or visibilities) are +contained. Currently only supports axis=1.

  • +
  • out (np.ndarray) – Array to place output in. If None create a new +array. This routine can safely use out = vis.

  • +
  • prod_map (ndarray of integer pairs) – Gives the mapping from product axis to input pairs. If not supplied, +icmap() is used.

  • +
+
+
Returns:
+

out – Visibility array with signal subtracted. Same shape as vis.

+
+
Return type:
+

np.ndarray

+
+
+
+ +
+
+ch_util.tools.unpack_product_array(prod_arr, axis=1, feeds=None)[source]
+

Expand packed products to correlation matrices.

+

This turns an axis of the packed upper triangle set of products into the +full correlation matrices. It replaces the specified product axis with two +axes, one for each feed. By setting feeds this routine can also +pull out a subset of feeds.

+
+
Parameters:
+
    +
  • prod_arr (np.ndarray[..., nprod, :]) – Array containing products packed in upper triangle format.

  • +
  • axis (int, optional) – Axis the products are contained on.

  • +
  • feeds (list of int, optional) – Indices of feeds to include. If None (default) use all feeds.

  • +
+
+
Returns:
+

corr_arr – Expanded array.

+
+
Return type:
+

np.ndarray[…, nfeed, nfeed, …]

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/andata.html b/docs/_modules/ch_util/andata.html new file mode 100644 index 00000000..52ac0029 --- /dev/null +++ b/docs/_modules/ch_util/andata.html @@ -0,0 +1,3884 @@ + + + + + + ch_util.andata — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.andata

+"""Analysis data format"""
+
+import warnings
+import glob
+from os import path
+import posixpath
+import re
+
+import numpy as np
+import h5py
+from bitshuffle import h5
+
+from caput import memh5, tod
+import caput.time as ctime
+
+tmp = h5  # To appease linters who complain about unused imports.
+
+
+ni_msg = "Ask Kiyo to implement this."
+
+
+# Datasets in the Acq files whose shape is the same as the visibilities.
+# Variable only used for legacy archive version 1.
+ACQ_VIS_SHAPE_DATASETS = ("vis", "vis_flag", "vis_weight")
+
+# Datasets in the Acq files that are visibilities or gated visibilities
+ACQ_VIS_DATASETS = "^vis$|^gated_vis[0-9]$"
+
+# Datasets in the HK files that are data.
+HK_DATASET_NAMES = ("data", "^mux[0-9]{2}$")
+
+# List of axes over which we can concatenate datasets.  To be concatenated, all
+# datasets must have one and only one of these in their 'axes' attribute.
+CONCATENATION_AXES = (
+    "time",
+    "gated_time0",
+    "gated_time1",
+    "gated_time2",
+    "gated_time3",
+    "gated_time4",
+    "snapshot",
+    "update_time",
+    "station_time_blockhouse",
+)
+
+ANDATA_VERSION = "3.1.0"
+
+
+# Main Class Definition
+# ---------------------
+
+
+
+[docs] +class BaseData(tod.TOData): + """CHIME data in analysis format. + + Inherits from :class:`caput.memh5.BasicCont`. + + This is intended to be the main data class for the post + acquisition/real-time analysis parts of the pipeline. This class is laid + out very similarly to how the data is stored in analysis format hdf5 files + and the data in this class can be optionally stored in such an hdf5 file + instead of in memory. + + Parameters + ---------- + h5_data : h5py.Group, memh5.MemGroup or hdf5 filename, optional + Underlying h5py like data container where data will be stored. If not + provided a new :class:`caput.memh5.MemGroup` instance will be created. + """ + + time_axes = CONCATENATION_AXES + + # Convert strings to/from unicode on load and save + convert_attribute_strings = True + convert_dataset_strings = True + + def __new__(cls, h5_data=None, **kwargs): + """Used to pick which subclass to instantiate based on attributes in + data.""" + + new_cls = subclass_from_obj(cls, h5_data) + + self = super(BaseData, new_cls).__new__(new_cls) + return self + + def __init__(self, h5_data=None, **kwargs): + super(BaseData, self).__init__(h5_data, **kwargs) + if self._data.file.mode == "r+": + self._data.require_group("cal") + self._data.require_group("flags") + self._data.require_group("reverse_map") + self.attrs["andata_version"] = ANDATA_VERSION + + # - The main interface - # + + @property + def datasets(self): + """Stores hdf5 datasets holding all data. + + Each dataset can reference a calibration scheme in + ``datasets[name].attrs['cal']`` which refers to an entry in + :attr:`~BaseData.cal`. + + Do not try to add a new dataset by assigning to an item of this + property. Use `create_dataset` instead. + + Returns + ------- + datasets : read only dictionary + Entries are :mod:`h5py` or :mod:`caput.memh5` datasets. + + """ + + out = {} + for name, value in self._data.items(): + if not memh5.is_group(value): + out[name] = value + return memh5.ro_dict(out) + + @property + def flags(self): + """Datasets representing flags and data weights. + + Returns + ------- + flags : read only dictionary + Entries are :mod:`h5py` or :mod:`caput.memh5` datasets. + + """ + + try: + g = self._data["flags"] + except KeyError: + return memh5.ro_dict({}) + + out = {} + for name, value in g.items(): + if not memh5.is_group(value): + out[name] = value + return memh5.ro_dict(out) + + @property + def cal(self): + """Stores calibration schemes for the datasets. + + Each entry is a calibration scheme which itself is a dict storing + meta-data about calibration. + + Do not try to add a new entry by assigning to an element of this + property. Use :meth:`~BaseData.create_cal` instead. + + Returns + ------- + cal : read only dictionary + Calibration schemes. + + """ + + out = {} + for name, value in self._data["cal"].items(): + out[name] = value.attrs + return memh5.ro_dict(out) + + # - Methods used by base class to control container structure. - # + +
+[docs] + def dataset_name_allowed(self, name): + """Permits datasets in the root and 'flags' groups.""" + + parent_name, name = posixpath.split(name) + return True if parent_name == "/" or parent_name == "/flags" else False
+ + +
+[docs] + def group_name_allowed(self, name): + """Permits only the "flags" group.""" + + return True if name == "/flags" else False
+ + + # - Methods for manipulating and building the class. - # + +
+[docs] + def create_cal(self, name, cal=None): + """Create a new cal entry.""" + + if cal is None: + cal = {} + self._data["cal"].create_group(name) + for key, value in cal.items(): + self._data["cal"][name].attrs[key] = value
+ + +
+[docs] + def create_flag(self, name, *args, **kwargs): + """Create a new flags dataset.""" + return self.create_dataset("flags/" + name, *args, **kwargs)
+ + +
+[docs] + def create_reverse_map(self, axis_name, reverse_map): + """Create a new reverse map.""" + return self._data["reverse_map"].create_dataset(axis_name, data=reverse_map)
+ + +
+[docs] + def del_reverse_map(self, axis_name): + """Delete a reverse map.""" + del self._data["reverse_map"][axis_name]
+ + + # - These describe the various data axes. - # + + @property + def ntime(self): + """Length of the time axis of the visibilities.""" + + return len(self.index_map["time"]) + + @property + def time(self): + """The 'time' axis centres as Unix/POSIX time.""" + + if ( + self.index_map["time"].dtype == np.float32 + or self.index_map["time"].dtype == np.float64 + ): + # Already a calculated timestamp. + return self.index_map["time"][:] + + else: + time = _timestamp_from_fpga_cpu( + self.index_map["time"]["ctime"], 0, self.index_map["time"]["fpga_count"] + ) + + alignment = self.index_attrs["time"].get("alignment", 0) + + if alignment != 0: + time = time + alignment * abs(np.median(np.diff(time)) / 2) + + return time + + @classmethod + def _interpret_and_read(cls, acq_files, start, stop, datasets, out_group): + # Save a reference to the first file to get index map information for + # later. + f_first = acq_files[0] + + andata_objs = [cls(d) for d in acq_files] + data = concatenate( + andata_objs, + out_group=out_group, + start=start, + stop=stop, + datasets=datasets, + convert_attribute_strings=cls.convert_attribute_strings, + convert_dataset_strings=cls.convert_dataset_strings, + ) + for k, v in f_first["index_map"].attrs.items(): + data.create_index_map(k, v) + return data + +
+[docs] + @classmethod + def from_acq_h5( + cls, acq_files, start=None, stop=None, datasets=None, out_group=None, **kwargs + ): + """Convert acquisition format hdf5 data to analysis data object. + + Reads hdf5 data produced by the acquisition system and converts it to + analysis format in memory. + + Parameters + ---------- + acq_files : filename, `h5py.File` or list there-of or filename pattern + Files to convert from acquisition format to analysis format. + Filename patterns with wild cards (e.g. "foo*.h5") are supported. + start : integer, optional + What frame to start at in the full set of files. + stop : integer, optional + What frame to stop at in the full set of files. + datasets : list of strings + Names of datasets to include from acquisition files. Default is to + include all datasets found in the acquisition files. + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + + Examples + -------- + Examples are analogous to those of :meth:`CorrData.from_acq_h5`. + + """ + + # Make sure the input is a sequence and that we have at least one file. + acq_files = tod.ensure_file_list(acq_files) + if not acq_files: + raise ValueError("Acquisition file list is empty.") + + to_close = [False] * len(acq_files) + try: + # Open the files while keeping track of this so that we can close + # them later. + _open_files(acq_files, to_close) + + # Now read them in: the functionality here is provided by the + # overloaded method in the inherited class. If this method is + # called on this base class, an exception will be raised. + data = cls._interpret_and_read( + acq_files=acq_files, + start=start, + stop=stop, + datasets=datasets, + out_group=out_group, + **kwargs + ) + + # Set an attribute on the time axis specifying alignment + if "time" in data.index_map: + data.index_attrs["time"]["alignment"] = 1 + + finally: + # Close any files opened in this function. + for ii in range(len(acq_files)): + if len(to_close) > ii and to_close[ii]: + acq_files[ii].close() + + return data
+ + + @property + def timestamp(self): + """Deprecated name for :attr:`~BaseData.time`.""" + + return self.time + +
+[docs] + @staticmethod + def convert_time(time): + return ctime.ensure_unix(time)
+
+ + + +
+[docs] +class CorrData(BaseData): + """Subclass of :class:`BaseData` for correlation data.""" + + @property + def vis(self): + """Convenience access to the visibilities array. + + Equivalent to `self.datasets['vis']`. + """ + return self.datasets["vis"] + + @property + def gain(self): + """Convenience access to the gain dataset. + + Equivalent to `self.datasets['gain']`. + """ + return self.datasets["gain"] + + @property + def weight(self): + """Convenience access to the visibility weight array. + + Equivalent to `self.flags['vis_weight']`. + """ + return self.flags["vis_weight"] + + @property + def input_flags(self): + """Convenience access to the input flags dataset. + + Equivalent to `self.flags['inputs']`. + """ + return self.flags["inputs"] + + @property + def dataset_id(self): + """Access dataset id dataset in unicode format.""" + dsid = memh5.ensure_unicode(self.flags["dataset_id"][:]) + dsid.flags.writeable = False + + return dsid + + @property + def nprod(self): + """Length of the prod axis.""" + return len(self.index_map["prod"]) + + @property + def prod(self): + """The correlation product axis as channel pairs.""" + return self.index_map["prod"] + + @property + def nfreq(self): + """Length of the freq axis.""" + return len(self.index_map["freq"]) + + @property + def freq(self): + """The spectral frequency axis as bin centres in MHz.""" + return self.index_map["freq"]["centre"] + + @property + def ninput(self): + return len(self.index_map["input"]) + + @property + def input(self): + return self.index_map["input"] + + @property + def nstack(self): + return len(self.index_map["stack"]) + + @property + def stack(self): + """The correlation product axis as channel pairs.""" + return self.index_map["stack"] + + @property + def prodstack(self): + """A pair of input indices representative of those in the stack. + + Note, these are correctly conjugated on return, and so calculations + of the baseline and polarisation can be done without additionally + looking up the stack conjugation. + """ + if not self.is_stacked: + return self.prod + + t = self.index_map["prod"][:][self.index_map["stack"]["prod"]] + + prodmap = t.copy() + conj = self.stack["conjugate"] + prodmap["input_a"] = np.where(conj, t["input_b"], t["input_a"]) + prodmap["input_b"] = np.where(conj, t["input_a"], t["input_b"]) + + return prodmap + + @property + def is_stacked(self): + return "stack" in self.index_map and len(self.stack) != len(self.prod) + + @classmethod + def _interpret_and_read( + cls, + acq_files, + start, + stop, + datasets, + out_group, + stack_sel, + prod_sel, + input_sel, + freq_sel, + apply_gain, + renormalize, + ): + # Selection defaults. + freq_sel = _ensure_1D_selection(freq_sel) + # If calculating the 'gain' dataset, ensure prerequisite datasets + # are loaded. + if datasets is not None and ( + ("vis" in datasets and apply_gain) or ("gain" in datasets) + ): + datasets = tuple(datasets) + ("gain", "gain_exp", "gain_coeff") + # Always load packet loss dataset if available, so we can normalized + # for it. + if datasets is not None: + norm_dsets = [d for d in datasets if re.match(ACQ_VIS_DATASETS, d)] + if "vis_weight" in datasets: + norm_dsets += ["vis_weight"] + if len(norm_dsets): + datasets = tuple(datasets) + ("flags/lost_packet_count",) + + # Inspect the header of the first file for version information. + f = acq_files[0] + try: + archive_version = memh5.bytes_to_unicode(f.attrs["archive_version"]) + except KeyError: + archive_version = "1.0.0" + + # Transform the dataset according to the version. + if versiontuple(archive_version) < versiontuple("2.0.0"): + # Nothing to do for input_sel as there is not input axis. + if input_sel is not None: + msg = ( + "*input_sel* specified for archive version" + " 1.0 data which has no input axis." + ) + raise ValueError(msg) + prod_sel = _ensure_1D_selection(prod_sel) + data = andata_from_acq1( + acq_files, start, stop, prod_sel, freq_sel, datasets, out_group + ) + input_sel = _ensure_1D_selection(input_sel) + elif versiontuple(archive_version) >= versiontuple("2.0.0"): + data, input_sel = andata_from_archive2( + cls, + acq_files, + start, + stop, + stack_sel, + prod_sel, + input_sel, + freq_sel, + datasets, + out_group, + ) + + # Generate the correct index_map/input for older files + if versiontuple(archive_version) < versiontuple("2.1.0"): + _remap_inputs(data) + + # Insert the gain dataset if requested, or datasets is not specified + # For version 3.0.0 we don't need to do any of this + if versiontuple(archive_version) < versiontuple("3.0.0") and ( + datasets is None or "gain" in datasets + ): + _insert_gains(data, input_sel) + + # Remove the FPGA applied gains (need to invert them first). + if apply_gain and any( + [re.match(ACQ_VIS_DATASETS, key) for key in data.datasets] + ): + from ch_util import tools + + gain = data.gain[:] + + # Create an array of safe-inverse gains. + gain_inv = tools.invert_no_zero(gain) + + # Loop over datasets and apply inverse gains where appropriate + for key, dset in data.datasets.items(): + if ( + re.match(ACQ_VIS_DATASETS, key) + and dset.attrs["axis"][1] == "prod" + ): + tools.apply_gain( + dset[:], + gain_inv, + out=dset[:], + prod_map=data.index_map["prod"], + ) + + # Fix up wrapping of FPGA counts + if versiontuple(archive_version) < versiontuple("2.4.0"): + _unwrap_fpga_counts(data) + + # Renormalize for dropped packets + # Not needed for > 3.0 + if ( + versiontuple(archive_version) < versiontuple("3.0.0") + and renormalize + and "lost_packet_count" in data.flags + ): + _renormalize(data) + + return data + +
+[docs] + @classmethod + def from_acq_h5(cls, acq_files, start=None, stop=None, **kwargs): + """Convert acquisition format hdf5 data to analysis data object. + + This method overloads the one in BaseData. + + Changed Jan. 22, 2016: input arguments are now ``(acq_files, start, + stop, **kwargs)`` instead of ``(acq_files, start, stop, prod_sel, + freq_sel, datasets, out_group)``. + + Reads hdf5 data produced by the acquisition system and converts it to + analysis format in memory. + + Parameters + ---------- + acq_files : filename, `h5py.File` or list there-of or filename pattern + Files to convert from acquisition format to analysis format. + Filename patterns with wild cards (e.g. "foo*.h5") are supported. + start : integer, optional + What frame to start at in the full set of files. + stop : integer, optional + What frame to stop at in the full set of files. + stack_sel : valid numpy index + Used to select a subset of the stacked correlation products. + Only one of *stack_sel*, *prod_sel*, and *input_sel* may be + specified, with *prod_sel* preferred over *input_sel* and + *stack_sel* proferred over both. + :mod:`h5py` fancy indexing supported but to be used with caution + due to poor reading performance. + prod_sel : valid numpy index + Used to select a subset of correlation products. + Only one of *stack_sel*, *prod_sel*, and *input_sel* may be + specified, with *prod_sel* preferred over *input_sel* and + *stack_sel* proferred over both. + :mod:`h5py` fancy indexing supported but to be used with caution + due to poor reading performance. + input_sel : valid numpy index + Used to select a subset of correlator inputs. + Only one of *stack_sel*, *prod_sel*, and *input_sel* may be + specified, with *prod_sel* preferred over *input_sel* and + *stack_sel* proferred over both. + :mod:`h5py` fancy indexing supported but to be used with caution + due to poor reading performance. + freq_sel : valid numpy index + Used to select a subset of frequencies. + :mod:`h5py` fancy indexing supported but to be used with caution + due to poor reading performance. + datasets : list of strings + Names of datasets to include from acquisition files. Default is to + include all datasets found in the acquisition files. + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + apply_gain : boolean, optional + Whether to apply the inverse gains to the visibility datasets. + renormalize : boolean, optional + Whether to renormalize for dropped packets. + distributed : boolean, optional + Load data into a distributed dataset. + comm : MPI.Comm + Communicator to distributed over. Use MPI.COMM_WORLD if not set. + + Returns + ------- + data : CorrData + Loaded data object. + + Examples + -------- + + Suppose we have two acquisition format files (this test data is + included in the ch_util repository): + + >>> import os + >>> import glob + >>> from . import test_andata + >>> os.chdir(test_andata.data_path) + >>> print(glob.glob('test_acq.h5*')) + ['test_acq.h5.0001', 'test_acq.h5.0002'] + + These can be converted into one big analysis format data object: + + >>> data = CorrData.from_acq_h5('test_acq.h5*') + >>> print(data.vis.shape) + (1024, 36, 31) + + If we only want a subset of the total frames (time bins) in these files + we can supply start and stop indices. + + >>> data = CorrData.from_acq_h5('test_acq.h5*', start=5, stop=-3) + >>> print(data.vis.shape) + (1024, 36, 23) + + If we want a subset of the correlation products or spectral + frequencies, specify the *prod_sel* or *freq_sel* respectively: + + >>> data = CorrData.from_acq_h5( + ... 'test_acq.h5*', + ... prod_sel=[0, 8, 15, 21], + ... freq_sel=slice(5, 15), + ... ) + >>> print(data.vis.shape) + (10, 4, 31) + >>> data = CorrData.from_acq_h5('test_acq.h5*', prod_sel=1, + ... freq_sel=slice(None, None, 10)) + >>> print(data.vis.shape) + (103, 1, 31) + + The underlying hdf5-like container that holds the *analysis format* + data can also be specified. + + >>> group = memh5.MemGroup() + >>> data = CorrData.from_acq_h5('test_acq.h5*', out_group=group) + >>> print(group['vis'].shape) + (1024, 36, 31) + >>> group['vis'] is data.vis + True + + """ + + stack_sel = kwargs.pop("stack_sel", None) + prod_sel = kwargs.pop("prod_sel", None) + input_sel = kwargs.pop("input_sel", None) + freq_sel = kwargs.pop("freq_sel", None) + datasets = kwargs.pop("datasets", None) + out_group = kwargs.pop("out_group", None) + apply_gain = kwargs.pop("apply_gain", True) + renormalize = kwargs.pop("renormalize", True) + distributed = kwargs.pop("distributed", False) + comm = kwargs.pop("comm", None) + + if kwargs: + msg = "Received unknown keyword arguments {}." + raise ValueError(msg.format(kwargs.keys())) + + # If want a distributed file, just pass straight off to a private method + if distributed: + return cls._from_acq_h5_distributed( + acq_files=acq_files, + start=start, + stop=stop, + datasets=datasets, + stack_sel=stack_sel, + prod_sel=prod_sel, + input_sel=input_sel, + freq_sel=freq_sel, + apply_gain=apply_gain, + renormalize=renormalize, + comm=comm, + ) + + return super(CorrData, cls).from_acq_h5( + acq_files=acq_files, + start=start, + stop=stop, + datasets=datasets, + out_group=out_group, + stack_sel=stack_sel, + prod_sel=prod_sel, + input_sel=input_sel, + freq_sel=freq_sel, + apply_gain=apply_gain, + renormalize=renormalize, + )
+ + + @classmethod + def _from_acq_h5_distributed( + cls, + acq_files, + start, + stop, + stack_sel, + prod_sel, + input_sel, + freq_sel, + datasets, + apply_gain, + renormalize, + comm, + ): + from mpi4py import MPI + from caput import mpiutil, mpiarray, memh5 + + # Turn into actual list of files + files = tod.ensure_file_list(acq_files) + + # Construct communicator to use. + if comm is None: + comm = MPI.COMM_WORLD + + # Determine the total number of frequencies + nfreq = None + if comm.rank == 0: + with h5py.File(files[0], "r") as f: + nfreq = len(f["index_map/freq"][:]) + nfreq = comm.bcast(nfreq, root=0) + + # Calculate the global frequency selection + freq_sel = _ensure_1D_selection(freq_sel) + if isinstance(freq_sel, slice): + freq_sel = list(range(*freq_sel.indices(nfreq))) + nfreq = len(freq_sel) + + # Calculate the local frequency selection + n_local, f_start, f_end = mpiutil.split_local(nfreq) + local_freq_sel = _ensure_1D_selection( + _convert_to_slice(freq_sel[f_start:f_end]) + ) + + # Load just the local part of the data. + local_data = super(CorrData, cls).from_acq_h5( + acq_files=acq_files, + start=start, + stop=stop, + datasets=datasets, + out_group=None, + stack_sel=stack_sel, + prod_sel=prod_sel, + input_sel=input_sel, + freq_sel=local_freq_sel, + apply_gain=apply_gain, + renormalize=renormalize, + ) + + # Datasets that we should convert into distribute ones + _DIST_DSETS = [ + "vis", + "vis_flag", + "vis_weight", + "gain", + "gain_coeff", + "frac_lost", + "dataset_id", + "eval", + "evec", + "erms", + ] + + # Initialise distributed container + data = CorrData(distributed=True, comm=comm) + + # Copy over the attributes + memh5.copyattrs( + local_data.attrs, data.attrs, convert_strings=cls.convert_attribute_strings + ) + + # Iterate over the datasets and copy them over + for name, old_dset in local_data.datasets.items(): + # If this should be distributed, extract the sections and turn them into an MPIArray + if name in _DIST_DSETS: + array = mpiarray.MPIArray.wrap(old_dset._data, axis=0, comm=comm) + else: + # Otherwise just copy out the old dataset + array = old_dset[:] + + # Create the new dataset and copy over attributes + new_dset = data.create_dataset(name, data=array) + memh5.copyattrs( + old_dset.attrs, + new_dset.attrs, + convert_strings=cls.convert_attribute_strings, + ) + + # Iterate over the flags and copy them over + for name, old_dset in local_data.flags.items(): + # If this should be distributed, extract the sections and turn them into an MPIArray + if name in _DIST_DSETS: + array = mpiarray.MPIArray.wrap(old_dset._data, axis=0, comm=comm) + else: + # Otherwise just copy out the old dataset + array = old_dset[:] + + # Create the new dataset and copy over attributes + new_dset = data.create_flag(name, data=array) + memh5.copyattrs( + old_dset.attrs, + new_dset.attrs, + convert_strings=cls.convert_attribute_strings, + ) + + # Copy over index maps + for name, index_map in local_data.index_map.items(): + # Get reference to actual array + index_map = index_map[:] + + # We need to explicitly stitch the frequency map back together + if name == "freq": + # Gather all frequencies onto all nodes and stich together + freq_gather = comm.allgather(index_map) + index_map = np.concatenate(freq_gather) + + # Create index map + data.create_index_map(name, index_map) + memh5.copyattrs(local_data.index_attrs[name], data.index_attrs[name]) + + # Copy over reverse maps + for name, reverse_map in local_data.reverse_map.items(): + # Get reference to actual array + reverse_map = reverse_map[:] + + # Create index map + data.create_reverse_map(name, reverse_map) + + return data + +
+[docs] + @classmethod + def from_acq_h5_fast(cls, fname, comm=None, freq_sel=None, start=None, stop=None): + """Efficiently read a CorrData file in a distributed fashion. + + This reads a single file from disk into a distributed container. In + contrast to to `CorrData.from_acq_h5` it is more restrictive, + allowing only contiguous slices of the frequency and time axes, + and no down selection of the input/product/stack axis. + + Parameters + ---------- + fname : str + File name to read. Only supports one file at a time. + comm : MPI.Comm, optional + MPI communicator to distribute over. By default this will + use `MPI.COMM_WORLD`. + freq_sel : slice, optional + A selection over the frequency axis. Only `slice` objects + are supported. If not set, read all frequencies. + start, stop : int, optional + Start and stop indexes of the time selection. + + Returns + ------- + data : andata.CorrData + The CorrData container. + """ + from mpi4py import MPI + from caput import misc, mpiarray, memh5 + + ## Datasets to read, if it's not listed here, it's not read at all + # Datasets read by andata (should be small) + DSET_CORE = ["flags/inputs", "flags/frac_lost", "flags/dataset_id"] + # Datasets read directly and then inserted after the fact + # (should have an input/product/stack axis, as axis=1) + DSETS_DIRECT = ["vis", "gain", "flags/vis_weight"] + + if comm is None: + comm = MPI.COMM_WORLD + + # Check the frequency selection + if freq_sel is None: + freq_sel = slice(None) + if not isinstance(freq_sel, slice): + raise ValueError("freq_sel must be a slice object, not %s" % repr(freq_sel)) + + # Create the time selection + time_sel = slice(start, stop) + + # Read the core dataset directly + ad = cls.from_acq_h5( + fname, + datasets=DSET_CORE, + distributed=True, + comm=comm, + freq_sel=freq_sel, + start=start, + stop=stop, + ) + + archive_version = memh5.bytes_to_unicode(ad.attrs["archive_version"]) + if versiontuple(archive_version) < versiontuple("3.0.0"): + raise ValueError("Fast read not supported for files with version < 3.0.0") + + # Specify the selection to read from the file + sel = (freq_sel, slice(None), time_sel) + + with misc.open_h5py_mpi(fname, "r", comm=comm) as fh: + for ds_name in DSETS_DIRECT: + if ds_name not in fh: + continue + + # Read dataset directly (distributed over input/product/stack axis) and + # add to container + arr = mpiarray.MPIArray.from_hdf5( + fh, ds_name, comm=comm, axis=1, sel=sel + ) + arr = arr.redistribute(axis=0) + dset = ad.create_dataset(ds_name, data=arr, distributed=True) + + # Copy over the attributes + memh5.copyattrs( + fh[ds_name].attrs, + dset.attrs, + convert_strings=cls.convert_attribute_strings, + ) + + return ad
+
+ + + +# For backwards compatibility. +AnData = CorrData + + +
+[docs] +class HKData(BaseData): + """Subclass of :class:`BaseData` for housekeeping data.""" + + @property + def atmel(self): + """Get the ATMEL board that took these data. + + Returns + ------- + comp : :obj:`layout.component` + The ATMEL component that took these data. + """ + try: + from . import layout + except ValueError: + from . import layout + + sn = "ATMEGA" + "".join([str(i) for i in self.attrs["atmel_id"]]) + return layout.component.get(sn=sn) + + @property + def mux(self): + """Get the list of muxes in the data.""" + try: + return self._mux + except AttributeError: + self._mux = [] + for dummy, d in self.datasets.items(): + self._mux.append(d.attrs["mux_address"][0]) + self._mux = np.sort(self._mux) + return self._mux + + @property + def nmux(self): + """Get the number of muxes in the data.""" + return len(self.mux) + + def _find_mux(self, mux): + for dummy, d in self.datasets.items(): + if d.attrs["mux_address"] == mux: + return d + raise ValueError("No dataset with mux = %d is present." % (mux)) + +
+[docs] + def chan(self, mux=-1): + """Convenience access to the list of channels in a given mux. + + Parameters + ---------- + mux : int + A mux number. For housekeeping files with no multiplexing (e.g., + FLA's), leave this as ``-1``. + + Returns + ------- + n : list + The channels numbers. + + Raises + ------ + :exc:`ValueError` + Raised if **mux** does not exist. + """ + try: + self._chan + except AttributeError: + self._chan = dict() + try: + return self._chan[mux] + except KeyError: + ds = self._find_mux(mux) + # chan_map = ds.attrs["axis"][0] + self._chan[mux] = list(self.index_map[ds.attrs["axis"][0]]) + return self._chan[mux]
+ + +
+[docs] + def nchan(self, mux=-1): + """Convenience access to the number of channels in a given mux. + + Parameters + ---------- + mux : int + A mux number. For housekeeping files with no multiplexing (e.g., + FLA's), leave this as ``-1``. + + Returns + ------- + n : int + The number of channels + + Raises + ------ + :exc:`ValueError` + Raised if **mux** does not exist. + """ + return len(self.chan(mux))
+ + +
+[docs] + def tod(self, chan, mux=-1): + """Convenience access to a single time-ordered datastream (TOD). + + Parameters + ---------- + chan : int + A channel number. (Generally, they should be in the range 0--7 for + non-multiplexed data and 0--15 for multiplexed data.) + mux : int + A mux number. For housekeeping files with no multiplexing (e.g., + FLA's), leave this as ``-1``. + + Returns + ------- + tod : :obj:`numpy.array` + A 1D array of values for the requested channel/mux combination. Note + that a reference to the data in the dataset is returned; this method + does not make a copy. + + Raises + ------ + :exc:`ValueError` + Raised if one of **chan** or **mux** is not present in any dataset. + """ + ds = self._find_mux(mux) + chan_map = ds.attrs["axis"][0] + try: + idx = list(self.index_map[chan_map]).index(chan) + except KeyError: + raise ValueError("No channel %d exists for mux %d." % (chan, mux)) + + # Return the data. + return ds[idx, :]
+ + + @classmethod + def _interpret_and_read(cls, acq_files, start, stop, datasets, out_group): + # Save a reference to the first file to get index map information for + # later. + f_first = acq_files[0] + + # Define dataset filter to do the transpose. + def dset_filter(dataset): + name = path.split(dataset.name)[1] + match = False + for regex in HK_DATASET_NAMES: + if re.match(re.compile(regex), name): + match = True + if match: + # Do the transpose. + data = np.empty((len(dataset[0]), len(dataset)), dtype=dataset[0].dtype) + data = memh5.MemDatasetCommon.from_numpy_array(data) + for i in range(len(dataset)): + for j in range(len(dataset[i])): + data[j, i] = dataset[i][j] + memh5.copyattrs( + dataset.attrs, data.attrs, convert_strings=cls.convert_attribute_strings + ) + data.attrs["axis"] = (dataset.attrs["axis"][1], "time") + return data + + andata_objs = [HKData(d) for d in acq_files] + data = concatenate( + andata_objs, + out_group=out_group, + start=start, + stop=stop, + datasets=datasets, + dataset_filter=dset_filter, + convert_attribute_strings=cls.convert_attribute_strings, + convert_dataset_strings=cls.convert_dataset_strings, + ) + + # Some index maps saved as attributes, so convert to datasets. + for k, v in f_first["index_map"].attrs.items(): + data.create_index_map(k, v) + return data + +
+[docs] + @classmethod + def from_acq_h5( + cls, acq_files, start=None, stop=None, datasets=None, out_group=None + ): + """Convert acquisition format hdf5 data to analysis data object. + + This method overloads the one in BaseData. + + Reads hdf5 data produced by the acquisition system and converts it to + analysis format in memory. + + Parameters + ---------- + acq_files : filename, `h5py.File` or list there-of or filename pattern + Files to convert from acquisition format to analysis format. + Filename patterns with wild cards (e.g. "foo*.h5") are supported. + start : integer, optional + What frame to start at in the full set of files. + stop : integer, optional + What frame to stop at in the full set of files. + datasets : list of strings + Names of datasets to include from acquisition files. Default is to + include all datasets found in the acquisition files. + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + + Examples + -------- + Examples are analogous to those of :meth:`CorrData.from_acq_h5`. + """ + return super(HKData, cls).from_acq_h5( + acq_files=acq_files, + start=start, + stop=stop, + datasets=datasets, + out_group=out_group, + )
+
+ + + +
+[docs] +class HKPData(memh5.MemDiskGroup): + """Subclass of :class:`BaseData` for housekeeping data.""" + + # Convert strings to/from unicode on load and save + convert_attribute_strings = True + convert_dataset_strings = True + +
+[docs] + @staticmethod + def metrics(acq_files): + """Get the names of the metrics contained within the files. + + Parameters + ---------- + acq_files: list + List of acquisition filenames. + + Returns + ------- + metrics : list + """ + + import h5py + + metric_names = set() + + if isinstance(acq_files, str): + acq_files = [acq_files] + + for fname in acq_files: + with h5py.File(fname, "r") as fh: + metric_names |= set(fh.keys()) + + return metric_names
+ + +
+[docs] + @classmethod + def from_acq_h5( + cls, acq_files, start=None, stop=None, metrics=None, datasets=None, **kwargs + ): + """Load in the housekeeping files. + + Parameters + ---------- + acq_files : list + List of files to load. + start, stop : datetime or float, optional + Start and stop times for the range of data to load. Default is all. + metrics : list + Names of metrics to load. Default is all. + datasets : list + Synonym for metrics (the value of metrics will take precedence). + + + Returns + ------- + data : HKPData + """ + + metrics = metrics if metrics is not None else datasets + + if "mode" not in kwargs: + kwargs["mode"] = "r" + if "ondisk" not in kwargs: + kwargs["ondisk"] = True + + acq_files = [acq_files] if isinstance(acq_files, str) else acq_files + files = [ + cls.from_file( + f, + convert_attribute_strings=cls.convert_attribute_strings, + convert_dataset_strings=cls.convert_dataset_strings, + **kwargs + ) + for f in acq_files + ] + + def filter_time_range(dset): + """Trim dataset to the specified time range.""" + data = dset[:] + time = data["time"] + + mask = np.ones(time.shape, dtype=bool) + + if start is not None: + tstart = ctime.ensure_unix(start) + mask[:] *= time >= tstart + + if stop is not None: + tstop = ctime.ensure_unix(stop) + mask[:] *= time <= tstop + + return data[mask] + + def filter_file(f): + """Filter a file's data down to the requested metrics + and time range. + """ + metrics_to_copy = set(f.keys()) + + if metrics is not None: + metrics_to_copy = metrics_to_copy & set(metrics) + + filtered_data = {} + for dset_name in metrics_to_copy: + filtered_data[dset_name] = filter_time_range(f[dset_name]) + return filtered_data + + def get_full_dtype(dset_name, filtered_data): + """Returns a numpy.dtype object with the union of all columns + from all files. Also returns the total length of the data set + (metric) including all files. + """ + + length = 0 + all_columns = [] + all_types = [] + # review number of times and columns: + for ii in range(len(filtered_data)): + # If this file has this data set: + if dset_name not in filtered_data[ii]: + continue + # Increase the length of the data: + length += len(filtered_data[ii][dset_name]) + # Add 'time' and 'value' columns first: + if "time" not in all_columns: + all_columns.append("time") + all_types.append(filtered_data[ii][dset_name].dtype["time"]) + if "value" not in all_columns: + all_columns.append("value") + all_types.append(filtered_data[ii][dset_name].dtype["value"]) + # Add new column if any: + for col in filtered_data[ii][dset_name].dtype.names: + if col not in all_columns: + all_columns.append(col) + all_types.append(filtered_data[ii][dset_name].dtype[col]) + + data_dtype = np.dtype( + [(all_columns[ii], all_types[ii]) for ii in range(len(all_columns))] + ) + + return data_dtype, length + + def get_full_attrs(dset_name, files): + """Creates a 'full_attrs' dictionary of all attributes and all + possible values they can take, from all the files, for a + particular data set (metric). Also returns an 'index_remap' + list of dictionaries to remap indices of values in different + files. + """ + + full_attrs = {} # Dictionary of attributes + index_remap = [] # List of dictionaries (one per file) + for ii, fl in enumerate(files): + if dset_name not in fl: + continue + index_remap.append({}) # List of dictionaries (one per file) + for att, values in fl[dset_name].attrs.items(): + # Reserve zeroeth entry for N/A + index_remap[ii][att] = np.zeros(len(values) + 1, dtype=int) + if att not in full_attrs: + full_attrs[att] = [] + for idx, val in enumerate(values): + if val not in full_attrs[att]: + full_attrs[att] = np.append(full_attrs[att], val) + # Index of idx'th val in full_attrs[att]: + new_idx = np.where(full_attrs[att] == val)[0][0] + # zero is for N/A: + index_remap[ii][att][idx + 1] = new_idx + 1 + + return full_attrs, index_remap + + def get_full_data(length, data_dtype, index_remap, filtered_data, dset_name): + """Returns the full data matrix as a structured array. Values are + modified when necessary acording to 'index_remap' to correspond + to the final positions in the 'full_attrs'. + """ + + full_data = np.zeros(length, data_dtype) + + curr_ent = 0 # Current entry we are in the full data file + for ii in range(len(filtered_data)): + len_fl = len(filtered_data[ii][dset_name]) + curr_slice = np.s_[curr_ent : curr_ent + len_fl] + if dset_name not in filtered_data[ii]: + continue + for att in data_dtype.names: + # Length of this file: + if att in ["time", "value"]: + # No need to remap values: + full_data[att][curr_slice] = filtered_data[ii][dset_name][att] + elif att in index_remap[ii]: + # Needs remapping values + # (need to remove 1 beause indices are 1-based): + full_data[att][curr_slice] = index_remap[ii][att][ + filtered_data[ii][dset_name][att] + ] + else: + # Column not in file. Fill with zeros: + full_data[att][curr_slice] = np.zeros(len_fl) + # Update current entry value: + curr_ent = curr_ent + len_fl + + return full_data + + hkp_data = cls() + + filtered_data = [] + for fl in files: + filtered_data.append(filter_file(fl)) + + for dset_name in metrics: + data_dtype, length = get_full_dtype(dset_name, filtered_data) + + # Create the full dictionary of all attributes: + full_attrs, index_remap = get_full_attrs(dset_name, files) + + # Populate the data here.( Need full attrs) + full_data = get_full_data( + length, data_dtype, index_remap, filtered_data, dset_name + ) + new_dset = hkp_data.create_dataset(dset_name, data=full_data) + + # Populate attrs + for att, values in full_attrs.items(): + new_dset.attrs[att] = memh5.bytes_to_unicode(values) + + return hkp_data
+ + +
+[docs] + def select(self, metric_name): + """Return the metric as a pandas time-series DataFrame. + + Requires Pandas to be installed. + + Parameters + ---------- + metric_name : string + Name of metric to generate DataFrame for. + + Returns + ------- + df : pandas.DataFrame + """ + + import pandas as pd + + dset = self[metric_name] + + fields = set(dset.dtype.fields.keys()) + time = pd.DatetimeIndex((dset["time"] * 1e9).astype("datetime64[ns]")) + value = dset["value"] + labels = fields - {"time", "value"} + + cols = {} + cols["value"] = value + cols["time"] = time + + for label_name in labels: + label_ind = dset[label_name].astype(np.int16) - 1 + label_val = np.where( + label_ind == -1, "-", dset.attrs[label_name][label_ind] + ) + cols[label_name] = pd.Categorical(label_val) + + df = pd.DataFrame(data=cols) + df.set_index("time", inplace=True) + df.sort_index(inplace=True) + + return df
+ + +
+[docs] + def resample(self, metric_name, rule, how="mean", unstack=False, **kwargs): + """Resample the metric onto a regular grid of time. + + This internally uses the Pandas resampling functionality so that + documentation is a useful reference. This will return the metric with + the labels as a series of multi-level columns. + + Parameters + ---------- + metric_name : str + Name of metric to resample. + rule : str + The set of times to resample onto (example '30S', '1Min', '2D'). See + the pandas docs for a full description. + how : str or callable, optional + How should we combine samples to regrid the data? This takes any + valid argument for the the pandas apply method. Useful options are + `'mean'`, `'sum'`, `'min'`, `'max'` and `'std'`. + unstack : bool, optional + Unstack the data, i.e. return with the labels as hierarchial columns. + kwargs + Any remaining kwargs are passed to the `pandas.DataFrame.resample` + method to give fine grained control of the resampling. + + Returns + ------- + df : pandas.DataFrame + A dataframe resampled onto a regular grid. Labels now appear as part + of multi-level columns. + """ + + df = self.select(metric_name) + + group_columns = list(set(df.columns) - {"value"}) + + resampled_df = df.groupby(group_columns).resample(rule).apply(how) + + if unstack: + return resampled_df.unstack(group_columns) + else: + return resampled_df.reset_index(group_columns)
+
+ + + +
+[docs] +class WeatherData(BaseData): + """Subclass of :class:`BaseData` for weather data.""" + + @property + def time(self): + """Needs to be able to extrac times from both mingun_weather files + and chime_weather files. + """ + if "time" in self.index_map: + return self.index_map["time"] + else: + return self.index_map["station_time_blockhouse"] + + @property + def temperature(self): + """For easy access to outside weather station temperature. + Needs to be able to extrac temperatures from both mingun_weather files + and chime_weather files. + """ + if "blockhouse" in self.keys(): + return self["blockhouse"]["outTemp"] + else: + return self["outTemp"] + +
+[docs] + def dataset_name_allowed(self, name): + """Permits datasets in the root and 'blockhouse' groups.""" + + parent_name, name = posixpath.split(name) + return True if parent_name == "/" or parent_name == "/blockhouse" else False
+ + +
+[docs] + def group_name_allowed(self, name): + """Permits only the "blockhouse" group.""" + + return True if name == "/blockhouse" else False
+
+ + + +
+[docs] +class RawADCData(BaseData): + """Subclass of :class:`BaseData` for raw ADC data.""" + + @classmethod + def _interpret_and_read(cls, acq_files, start, stop, datasets, out_group): + # Define dataset filter to do the transpose. + def dset_filter(dataset): + if len(dataset.shape) == 2 and dataset.shape[1] == 1: + data = dataset[:] + data.shape = (dataset.shape[0],) + data = memh5.MemDatasetCommon.from_numpy_array(data) + memh5.copyattrs( + dataset.attrs, + data.attrs, + convert_strings=cls.convert_attribute_strings, + ) + elif len(dataset.shape) == 2: + data = dataset + else: + raise RuntimeError( + "Dataset (%s) has unexpected shape [%s]." + % (dataset.name, repr(dataset.shape)) + ) + return data + + andata_objs = [RawADCData(d) for d in acq_files] + data = concatenate( + andata_objs, + out_group=out_group, + start=start, + stop=stop, + datasets=datasets, + dataset_filter=dset_filter, + convert_attribute_strings=cls.convert_attribute_strings, + convert_dataset_strings=cls.convert_dataset_strings, + ) + return data
+ + + +
+[docs] +class GainFlagData(BaseData): + """Subclass of :class:`BaseData` for gain, digitalgain, and flag input acquisitions. + + These acquisitions consist of a collection of updates to the real-time pipeline ordered + chronologically. In most cases the updates do not occur at a regular cadence. + The time that each update occured can be accessed via `self.index_map['update_time']`. + In addition, each update is given a unique update ID that can be accessed via + `self.datasets['update_id']` and can be searched using the `self.search_update_id` method. + """ + +
+[docs] + def resample(self, dataset, timestamp, transpose=False): + """Return a dataset resampled at specific times. + + Parameters + ---------- + dataset : string + Name of the dataset to resample. + timestamp : `np.ndarray` + Unix timestamps. + transpose : bool + Tranpose the data such that time is the fastest varying axis. + By default time will be the slowest varying axis. + + Returns + ------- + data : np.ndarray + The dataset resampled at the desired times and transposed if requested. + """ + index = self.search_update_time(timestamp) + dset = self.datasets[dataset][index] + if transpose: + dset = np.moveaxis(dset, 0, -1) + + return dset
+ + +
+[docs] + def search_update_time(self, timestamp): + """Find the index into the `update_time` axis that is valid for specific times. + + For each time returns the most recent update the occured before that time. + + Parameters + ---------- + timestamp : `np.ndarray` of unix timestamp + Unix timestamps. + + Returns + ------- + index : `np.ndarray` of `dtype = int` + Index into the `update_time` axis that will yield values + that are valid for the requested timestamps. + """ + timestamp = np.atleast_1d(timestamp) + + if np.min(timestamp) < np.min(self.time): + raise ValueError( + "Cannot request timestamps before the earliest update_time." + ) + + dmax = np.max(timestamp) - np.max(self.time) + if dmax > 0.0: + msg = ( + "Requested timestamps are after the latest update_time " + "by as much as %0.2f hours." % (dmax / 3600.0,) + ) + warnings.warn(msg) + + index = np.digitize(timestamp, self.time, right=False) - 1 + + return index
+ + +
+[docs] + def search_update_id(self, pattern, is_regex=False): + """Find the index into the `update_time` axis corresponding to a particular `update_id`. + + Parameters + ---------- + pattern : str + The desired `update_id` or a glob pattern to search. + is_regex : bool + Set to True if `pattern` is a regular expression. + + Returns + ------- + index : `np.ndarray` of `dtype = int` + Index into the `update_time` axis that will yield all + updates whose `update_id` matches the requested pattern. + """ + import fnmatch + + ptn = pattern if is_regex else fnmatch.translate(pattern) + regex = re.compile(ptn) + index = np.array( + [ii for ii, uid in enumerate(self.update_id[:]) if regex.match(uid)] + ) + return index
+ + + @property + def time(self): + """Aliases `index_map['update_time']` to `time` for `caput.tod` functionality.""" + return self.index_map["update_time"] + + @property + def ntime(self): + """Number of updates.""" + return len(self.index_map["update_time"]) + + @property + def input(self): + """Correlator inputs.""" + return self.index_map["input"] + + @property + def ninput(self): + """Number of correlator inputs.""" + return len(self.index_map["input"]) + + @property + def update_id(self): + """Aliases the `update_id` dataset.""" + return self.datasets["update_id"]
+ + + +
+[docs] +class FlagInputData(GainFlagData): + """Subclass of :class:`GainFlagData` for flaginput acquisitions.""" + + @property + def flag(self): + """Aliases the `flag` dataset.""" + return self.datasets["flag"] + + @property + def source_flags(self): + """Dictionary that allow look up of source flags based on source name.""" + if not hasattr(self, "_source_flags"): + out = {} + for kk, key in enumerate(self.index_map["source"]): + out[key] = self.datasets["source_flags"][:, kk, :] + + self._source_flags = memh5.ro_dict(out) + + return self._source_flags + +
+[docs] + def get_source_index(self, source_name): + """Index into the `source` axis for a given source name.""" + return list(self.index_map["source"]).index(source_name)
+
+ + + +
+[docs] +class GainData(GainFlagData): + """Subclass of :class:`GainFlagData` for gain and digitalgain acquisitions.""" + + @property + def freq(self): + """The spectral frequency axis as bin centres in MHz.""" + return self.index_map["freq"]["centre"] + + @property + def nfreq(self): + """Number of frequency bins.""" + return len(self.index_map["freq"])
+ + + +
+[docs] +class CalibrationGainData(GainData): + """Subclass of :class:`GainData` for gain acquisitions.""" + + @property + def source(self): + """Names of the sources of gains.""" + return self.index_map["source"] + + @property + def nsource(self): + """Number of sources of gains.""" + return len(self.index_map["source"]) + + @property + def gain(self): + """Aliases the `gain` dataset.""" + return self.datasets["gain"] + + @property + def weight(self): + """Aliases the `weight` dataset.""" + return self.datasets["weight"] + + @property + def source_gains(self): + """Dictionary that allows look up of source gains based on source name.""" + if not hasattr(self, "_source_gains"): + out = {} + for kk, key in enumerate(self.index_map["source"]): + out[key] = self.datasets["source_gains"][:, kk, :] + + self._source_gains = memh5.ro_dict(out) + + return self._source_gains + + @property + def source_weights(self): + """Dictionary that allows look up of source weights based on source name.""" + if not hasattr(self, "_source_weights"): + out = {} + for kk, key in enumerate(self.index_map["source"]): + out[key] = self.datasets["source_weights"][:, kk, :] + + self._source_weights = memh5.ro_dict(out) + + return self._source_weights + +
+[docs] + def get_source_index(self, source_name): + """Index into the `source` axis for a given source name.""" + return list(self.index_map["source"]).index(source_name)
+
+ + + +
+[docs] +class DigitalGainData(GainData): + """Subclass of :class:`GainData` for digitalgain acquisitions.""" + + @property + def gain_coeff(self): + """The coefficient of the digital gain applied to the channelized data.""" + return self.datasets["gain_coeff"] + + @property + def gain_exp(self): + """The exponent of the digital gain applied to the channelized data.""" + return self.datasets["gain_exp"] + + @property + def compute_time(self): + """Unix timestamp indicating when the digital gain was computed.""" + return self.datasets["compute_time"] + + @property + def gain(self): + """The digital gain applied to the channelized data.""" + return self.datasets["gain_coeff"][:] * 2.0 ** ( + self.datasets["gain_exp"][:, np.newaxis, :] + )
+ + + +
+[docs] +class BaseReader(tod.Reader): + """Provides high level reading of CHIME data. + + You do not want to use this class, but rather one of its inherited classes + (:class:`CorrReader`, :class:`HKReader`, :class:`WeatherReader`). + + Parses and stores meta-data from file headers allowing for the + interpretation and selection of the data without reading it all from disk. + + Parameters + ---------- + files : filename, `h5py.File` or list there-of or filename pattern + Files containing data. Filename patterns with wild cards (e.g. + "foo*.h5") are supported. + """ + + data_class = BaseData + + def __init__(self, files): + # If files is a filename, or pattern, turn into list of files. + if isinstance(files, str): + files = sorted(glob.glob(files)) + + self._data_empty = self.data_class.from_acq_h5(files, datasets=()) + + # Fetch all meta data. + time = self._data_empty.time + datasets = _get_dataset_names(files[0]) + + # Set the metadata attributes. + self._files = tuple(files) + self._time = time + self._datasets = datasets + # Set the default selections of the data. + self.time_sel = (0, len(self.time)) + self.dataset_sel = datasets + +
+[docs] + def select_time_range(self, start_time=None, stop_time=None): + """Sets :attr:`~Reader.time_sel` to include a time range. + + The times from the samples selected will have bin centre timestamps + that are bracketed by the given *start_time* and *stop_time*. + + Parameters + ---------- + start_time : float or :class:`datetime.datetime` + If a float, this is a Unix/POSIX time. Affects the first element of + :attr:`~Reader.time_sel`. Default leaves it unchanged. + stop_time : float or :class:`datetime.datetime` + If a float, this is a Unix/POSIX time. Affects the second element + of :attr:`~Reader.time_sel`. Default leaves it unchanged. + + """ + + super(BaseReader, self).select_time_range( + start_time=start_time, stop_time=stop_time + )
+ + +
+[docs] + def read(self, out_group=None): + """Read the selected data. + + Parameters + ---------- + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + + Returns + ------- + data : :class:`BaseData` + Data read from :attr:`~Reader.files` based on the selections given + in :attr:`~Reader.time_sel`, :attr:`~Reader.prod_sel`, and + :attr:`~Reader.freq_sel`. + + """ + + return self.data_class.from_acq_h5( + self.files, + start=self.time_sel[0], + stop=self.time_sel[1], + datasets=self.dataset_sel, + out_group=out_group, + )
+
+ + + +
+[docs] +class CorrReader(BaseReader): + """Subclass of :class:`BaseReader` for correlator data.""" + + data_class = CorrData + + def __init__(self, files): + super(CorrReader, self).__init__(files) + data_empty = self._data_empty + prod = data_empty.prod + freq = data_empty.index_map["freq"] + input = data_empty.index_map["input"] + self._input = input + self._prod = prod + self._freq = freq + self.prod_sel = None + self.input_sel = None + self.freq_sel = None + # Create apply_gain and renormalize attributes, + # which are passed to CorrData.from_acq_h5() when + # the read() method is called. This gives the + # user the ability to turn off apply_gain and + # renormalize when using Reader. + self.apply_gain = True + self.renormalize = True + self.distributed = False + # Insert virtual 'gain' dataset if required parent datasets are present. + # We could be more careful about this, but I think this will always + # work. + datasets = self._datasets + # if ('gain_coeff' in datasets and 'gain_exp' in datasets): + datasets += ("gain",) + self._datasets = datasets + self.dataset_sel = datasets + + # Properties + # ---------- + + @property + def prod(self): + """Correlation products in data files.""" + return self._prod[:].copy() + + @property + def input(self): + """Correlator inputs in data files.""" + return self._input[:].copy() + + @property + def freq(self): + """Spectral frequency bin centres in data files.""" + return self._freq[:].copy() + + @property + def prod_sel(self): + """Which correlation products to read. + + Returns + ------- + prod_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the correlation product axis. + + """ + return self._prod_sel + + @prod_sel.setter + def prod_sel(self, value): + if value is not None: + # Check to make sure this is a valid index for the product axis. + self.prod["input_a"][value] + if self.input_sel is not None: + msg = ( + "*input_sel* is set and cannot specify both *prod_sel*" + " and *input_sel*." + ) + raise ValueError(msg) + self._prod_sel = value + + @property + def input_sel(self): + """Which correlator intputs to read. + + Returns + ------- + input_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the correlation product axis. + + """ + return self._input_sel + + @input_sel.setter + def input_sel(self, value): + if value is not None: + # Check to make sure this is a valid index for the product axis. + self.input["chan_id"][value] + if self.prod_sel is not None: + msg = ( + "*prod_sel* is set and cannot specify both *prod_sel*" + " and *input_sel*." + ) + raise ValueError(msg) + self._input_sel = value + + @property + def freq_sel(self): + """Which frequencies to read. + + Returns + ------- + freq_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the frequency axis. + + """ + return self._freq_sel + + @freq_sel.setter + def freq_sel(self, value): + if value is not None: + # Check to make sure this is a valid index for the frequency axis. + self.freq["centre"][value] + self._freq_sel = value + + # Data Selection Methods + # ---------------------- + +
+[docs] + def select_prod_pairs(self, pairs): + """Sets :attr:`~Reader.prod_sel` to include given product pairs. + + Parameters + ---------- + pairs : list of integer pairs + Input pairs to be included. + + """ + + sel = [] + for input_a, input_b in pairs: + for ii in range(len(self.prod)): + p_input_a, p_input_b = self.prod[ii] + if (input_a == p_input_a and input_b == p_input_b) or ( + input_a == p_input_b and input_b == p_input_a + ): + sel.append(ii) + self.prod_sel = sel
+ + +
+[docs] + def select_prod_autos(self): + """Sets :attr:`~Reader.prod_sel` to only auto-correlations.""" + + sel = [] + for ii, prod in enumerate(self.prod): + if prod[0] == prod[1]: + sel.append(ii) + self.prod_sel = sel
+ + +
+[docs] + def select_prod_by_input(self, input): + """Sets :attr:`~Reader.prod_sel` to only products with given input. + + Parameters + ---------- + input : integer + Correlator input number. All correlation products with + this input as one of the pairs are selected. + + """ + + sel = [] + for ii, prod in enumerate(self.prod): + if prod[0] == input or prod[1] == input: + sel.append(ii) + self.prod_sel = sel
+ + +
+[docs] + def select_freq_range(self, freq_low=None, freq_high=None, freq_step=None): + """Sets :attr:`~Reader.freq_sel` to given physical frequency range. + + Frequencies selected will have bin centres bracked by provided range. + + Parameters + ---------- + freq_low : float + Lower end of the frequency range in MHz. Default is the lower edge + of the band. + freq_high : float + Upper end of the frequency range in MHz. Default is the upper edge + of the band. + freq_step : float + How much bandwidth to skip over between samples in MHz. This value + is approximate. Default is to include all samples in given range. + + """ + + freq = self.freq["centre"] + nfreq = len(freq) + if freq_step is None: + step = 1 + else: + df = abs(np.mean(np.diff(freq))) + step = int(freq_step // df) + # Noting that frequencies are reverse ordered in datasets. + if freq_low is None: + stop = nfreq + else: + stop = np.where(freq < freq_low)[0][0] + if freq_high is None: + start = 0 + else: + start = np.where(freq < freq_high)[0][0] + # Slight tweak to behaviour if step is not unity, lining up edge on + # freq_low instead of freq_high. + start += (stop - start - 1) % step + self.freq_sel = np.s_[start:stop:step]
+ + +
+[docs] + def select_freq_physical(self, frequencies): + """Sets :attr:`~Reader.freq_sel` to include given physical frequencies. + + Parameters + ---------- + frequencies : list of floats + Frequencies to select. Physical frequencies are matched to indices + on a best match basis. + + """ + + freq_centre = self.freq["centre"] + freq_width = self.freq["width"] + frequencies = np.array(frequencies) + n_sel = len(frequencies) + diff_freq = abs(freq_centre - frequencies[:, None]) + match_mask = diff_freq < freq_width / 2 + freq_inds = [] + for ii in range(n_sel): + matches = np.where(match_mask[ii, :]) + try: + first_match = matches[0][0] + except IndexError: + msg = "No match for frequency %f MHz." % frequencies[ii] + raise ValueError(msg) + freq_inds.append(first_match) + self.freq_sel = freq_inds
+ + + # Data Reading + # ------------ + +
+[docs] + def read(self, out_group=None): + """Read the selected data. + + Parameters + ---------- + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + + Returns + ------- + data : :class:`BaseData` + Data read from :attr:`~Reader.files` based on the selections given + in :attr:`~Reader.time_sel`, :attr:`~Reader.prod_sel`, and + :attr:`~Reader.freq_sel`. + + """ + + dsets = tuple(self.dataset_sel) + + # Add in virtual gain dataset + # This is done in earlier now, in self.datasets. + # if ('gain_coeff' in dsets and 'gain_exp' in dsets): + # dsets += ('gain',) + + return CorrData.from_acq_h5( + self.files, + start=self.time_sel[0], + stop=self.time_sel[1], + prod_sel=self.prod_sel, + freq_sel=self.freq_sel, + input_sel=self.input_sel, + apply_gain=self.apply_gain, + renormalize=self.renormalize, + distributed=self.distributed, + datasets=dsets, + out_group=out_group, + )
+
+ + + +# For backwards compatibility. +Reader = CorrReader + + +
+[docs] +class HKReader(BaseReader): + """Subclass of :class:`BaseReader` for HK data.""" + + data_class = HKData
+ + + +
+[docs] +class HKPReader(BaseReader): + """Subclass of :class:`BaseReader` for HKP data.""" + + data_class = HKPData
+ + + +
+[docs] +class WeatherReader(BaseReader): + """Subclass of :class:`BaseReader` for weather data.""" + + data_class = WeatherData
+ + + +
+[docs] +class FlagInputReader(BaseReader): + """Subclass of :class:`BaseReader` for input flag data.""" + + data_class = FlagInputData
+ + + +
+[docs] +class CalibrationGainReader(BaseReader): + """Subclass of :class:`BaseReader` for calibration gain data.""" + + data_class = CalibrationGainData
+ + + +
+[docs] +class DigitalGainReader(BaseReader): + """Subclass of :class:`BaseReader` for digital gain data.""" + + data_class = DigitalGainData
+ + + +
+[docs] +class RawADCReader(BaseReader): + """Subclass of :class:`BaseReader` for raw ADC data.""" + + data_class = RawADCData
+ + + +
+[docs] +class AnDataError(Exception): + """Exception raised when something unexpected happens with the data.""" + + pass
+ + + +# Functions +# --------- + +# In caput now. +concatenate = tod.concatenate + + +
+[docs] +def subclass_from_obj(cls, obj): + """Pick a subclass of :class:`BaseData` based on an input object. + + Parameters + ---------- + cls : subclass of :class:`BaseData` (class, not an instance) + Default class to return. + obj : :class:`h5py.Group`, filename, :class:`memh5.Group` or + :class:`BaseData` object from which to determine the appropriate + subclass of :class:`AnData`. + + """ + # If obj is a filename, open it and recurse. + if isinstance(obj, str): + with h5py.File(obj, "r") as f: + cls = subclass_from_obj(cls, f) + return cls + + new_cls = cls + acquisition_type = None + try: + acquisition_type = obj.attrs["acquisition_type"] + except (AttributeError, KeyError): + pass + if acquisition_type == "corr": + new_cls = CorrData + elif acquisition_type == "hk": + new_cls = HKData + elif acquisition_type is None: + if isinstance(obj, BaseData): + new_cls = obj.__class__ + return new_cls
+ + + +# Private Functions +# ----------------- + +# Utilities + + +def _open_files(files, opened): + """Ensure that files are open, keeping a record of what was done. + + The arguments are modified in-place instead of returned, so that partial + work is recorded in the event of an error. + + """ + + for ii, this_file in enumerate(list(files)): + # Sort out how to get an open hdf5 file. + open_file, was_opened = memh5.get_h5py_File(this_file, mode="r") + opened[ii] = was_opened + files[ii] = open_file + + +def _ensure_1D_selection(selection): + if isinstance(selection, tuple): + if len(selection) != 1: + msg = "Wrong number of indices." + raise ValueError(msg) + selection = selection[0] + if selection is None: + selection = np.s_[:] + elif hasattr(selection, "__iter__"): + selection = np.array(selection) + elif isinstance(selection, slice): + pass + elif np.issubdtype(type(selection), np.integer): + selection = np.s_[selection : selection + 1] + else: + raise ValueError("Cannont be converted to a 1D selection.") + + if isinstance(selection, np.ndarray): + if selection.ndim != 1: + msg = "Data selections may only be one dimensional." + raise ValueError(msg) + # The following is more efficient and solves h5py issue #425. Converts + # to integer selection. + if len(selection) == 1: + return _ensure_1D_selection(selection[0]) + if np.issubdtype(selection.dtype, np.integer): + if np.any(np.diff(selection) <= 0): + raise ValueError("h5py requires sorted non-duplicate selections.") + elif not np.issubdtype(selection.dtype, bool): + raise ValueError("Array selections must be integer or boolean type.") + elif np.issubdtype(selection.dtype, bool): + # This is a workaround for h5py/h5py#1750 + selection = selection.nonzero()[0] + + return selection + + +def _convert_to_slice(selection): + if hasattr(selection, "__iter__") and len(selection) > 1: + uniq_step = np.unique(np.diff(selection)) + + if (len(uniq_step) == 1) and uniq_step[0]: + a = selection[0] + b = selection[-1] + b = b + (1 - (b < a) * 2) + + selection = slice(a, b, uniq_step[0]) + + return selection + + +def _get_dataset_names(f): + f, toclose = memh5.get_h5py_File(f, mode="r") + try: + dataset_names = () + for name in f.keys(): + if not memh5.is_group(f[name]): + dataset_names += (name,) + if "blockhouse" in f and memh5.is_group(f["blockhouse"]): + # chime_weather datasets are inside group "blockhouse" + for name in f["blockhouse"].keys(): + if not memh5.is_group(f["blockhouse"][name]): + dataset_names += ("blockhouse/" + name,) + if "flags" in f and memh5.is_group(f["flags"]): + for name in f["flags"].keys(): + if not memh5.is_group(f["flags"][name]): + dataset_names += ("flags/" + name,) + finally: + if toclose: + f.close() + return dataset_names + + +def _resolve_stack_prod_input_sel( + stack_sel, stack_map, stack_rmap, prod_sel, prod_map, input_sel, input_map +): + nsels = (stack_sel is not None) + (prod_sel is not None) + (input_sel is not None) + if nsels > 1: + raise ValueError( + "Only one of *stack_sel*, *input_sel*, and *prod_sel* may be specified." + ) + + if nsels == 0: + stack_sel = _ensure_1D_selection(stack_sel) + prod_sel = _ensure_1D_selection(prod_sel) + input_sel = _ensure_1D_selection(input_sel) + else: + if prod_sel is not None: + prod_sel = _ensure_1D_selection(prod_sel) + # Choose inputs involved in selected products. + input_sel = _input_sel_from_prod_sel(prod_sel, prod_map) + stack_sel = _stack_sel_from_prod_sel(prod_sel, stack_rmap) + elif input_sel is not None: + input_sel = _ensure_1D_selection(input_sel) + prod_sel = _prod_sel_from_input_sel(input_sel, input_map, prod_map) + stack_sel = _stack_sel_from_prod_sel(prod_sel, stack_rmap) + else: # stack_sel + stack_sel = _ensure_1D_selection(stack_sel) + prod_sel = _prod_sel_from_stack_sel(stack_sel, stack_map, stack_rmap) + input_sel = _input_sel_from_prod_sel(prod_sel, prod_map) + + # Now we need to rejig the index maps for the subsets of the inputs, + # prods. + stack_inds = np.arange(len(stack_map), dtype=int)[stack_sel] + # prod_inds = np.arange(len(prod_map), dtype=int)[prod_sel] # never used + input_inds = np.arange(len(input_map), dtype=int)[input_sel] + + stack_rmap = stack_rmap[prod_sel] + stack_rmap["stack"] = _search_array(stack_inds, stack_rmap["stack"]) + + # Remake stack map from scratch, since prod referenced in current stack + # map may have dissapeared. + stack_map = np.empty(len(stack_inds), dtype=stack_map.dtype) + stack_map["prod"] = _search_array( + stack_rmap["stack"], np.arange(len(stack_inds)) + ) + stack_map["conjugate"] = stack_rmap["conjugate"][stack_map["prod"]] + + prod_map = prod_map[prod_sel] + pa = _search_array(input_inds, prod_map["input_a"]) + pb = _search_array(input_inds, prod_map["input_b"]) + prod_map["input_a"] = pa + prod_map["input_b"] = pb + input_map = input_map[input_sel] + return stack_sel, stack_map, stack_rmap, prod_sel, prod_map, input_sel, input_map + + +def _npissorted(arr): + return np.all(np.diff >= 0) + + +def _search_array(a, v): + """Find the indeces in array `a` of values in array 'v'. + + Use algorithm that presorts `a`, efficient if `v` is long. + + """ + a_sort_inds = np.argsort(a, kind="mergesort") + a_sorted = a[a_sort_inds] + indeces_in_sorted = np.searchsorted(a_sorted, v) + # Make sure values actually present. + if not np.all(v == a_sorted[indeces_in_sorted]): + raise ValueError("Element in 'v' not in 'a'.") + return a_sort_inds[indeces_in_sorted] + + +def _input_sel_from_prod_sel(prod_sel, prod_map): + prod_map = prod_map[prod_sel] + input_sel = [] + for p0, p1 in prod_map: + input_sel.append(p0) + input_sel.append(p1) + # ensure_1D here deals with h5py issue #425. + input_sel = _ensure_1D_selection(sorted(list(set(input_sel)))) + return input_sel + + +def _prod_sel_from_input_sel(input_sel, input_map, prod_map): + inputs = list(np.arange(len(input_map), dtype=int)[input_sel]) + prod_sel = [] + for ii, p in enumerate(prod_map): + if p[0] in inputs and p[1] in inputs: + prod_sel.append(ii) + # ensure_1D here deals with h5py issue #425. + prod_sel = _ensure_1D_selection(prod_sel) + return prod_sel + + +def _stack_sel_from_prod_sel(prod_sel, stack_rmap): + stack_sel = stack_rmap["stack"][prod_sel] + stack_sel = _ensure_1D_selection(sorted(list(set(stack_sel)))) + return stack_sel + + +def _prod_sel_from_stack_sel(stack_sel, stack_map, stack_rmap): + stack_inds = np.arange(len(stack_map))[stack_sel] + stack_rmap_sort_inds = np.argsort(stack_rmap["stack"], kind="mergesort") + stack_rmap_sorted = stack_rmap["stack"][stack_rmap_sort_inds] + left_indeces = np.searchsorted(stack_rmap_sorted, stack_inds, side="left") + right_indeces = np.searchsorted(stack_rmap_sorted, stack_inds, side="right") + prod_sel = [] + for ii in range(len(stack_inds)): + prod_sel.append(stack_rmap_sort_inds[left_indeces[ii] : right_indeces[ii]]) + prod_sel = np.concatenate(prod_sel) + prod_sel = _ensure_1D_selection(sorted(list(set(prod_sel)))) + return prod_sel + + +
+[docs] +def versiontuple(v): + """Create a version tuple from a version string. + + Parameters + ---------- + v: str + A version string + + Returns + ------- + versiontuple: tuple + A tuple of `int` values created by splitting the string on dots. + """ + return tuple(map(int, (v.split("."))))
+ + + +# Calculations from data. + + +def _renormalize(data): + """Correct vis and vis_weight for lost packets.""" + from ch_util import tools + + # Determine the datasets that need to be renormalized + datasets_to_renormalize = [ + key for key in data.datasets if re.match(ACQ_VIS_DATASETS, key) + ] + + if not datasets_to_renormalize: + return + + # Determine if we will correct vis_weight in addition to vis. + adjust_weight = "vis_weight" in data.flags + + # Extract number of packets expected + n_packets_expected = data.attrs["gpu.gpu_intergration_period"][0] + + # Loop over frequencies to limit memory usage + for ff in range(data.nfreq): + # Calculate the fraction of packets received + weight_factor = 1.0 - data.flags["lost_packet_count"][ff] / float( + n_packets_expected + ) + + # Multiply vis_weight by fraction of packets received + if adjust_weight: + data.flags["vis_weight"][ff] = np.round( + data.flags["vis_weight"][ff] * weight_factor[None, :] + ) + + # Divide vis by fraction of packets received + weight_factor = tools.invert_no_zero(weight_factor) + + for key in datasets_to_renormalize: + data.datasets[key][ff] *= weight_factor[None, :] + + +def _unwrap_fpga_counts(data): + """Unwrap 32-bit FPGA counts in a CorrData object.""" + + import datetime + + time_map = data.index_map["time"][:] + + # If FPGA counts are already 64-bit then we don't need to unwrap + if time_map["fpga_count"].dtype == np.uint64: + return + + # Try and fetch out required attributes, if they are not there (which + # happens in older files), fill in the usual values + try: + nfreq = data.attrs["n_freq"][0] + samp_freq_MHz = data.attrs["fpga.samp_freq"][0] + except KeyError: + nfreq = 1024 + samp_freq_MHz = 800.0 + + # Calculate the length of an FPGA count and the time it takes to wrap + seconds_per_count = 2.0 * nfreq / (samp_freq_MHz * 1e6) + wrap_time = 2**32.0 * seconds_per_count + + # Estimate the FPGA initial zero time from the timestamp in the acquisition + # name, if the acq name is not there, or of the correct format just silently return + try: + acq_name = data.attrs["acquisition_name"] + acq_dt = datetime.datetime.strptime(acq_name[:16], "%Y%m%dT%H%M%SZ") + except (KeyError, ValueError): + return + acq_start = CorrData.convert_time(acq_dt) + + # Calculate the time that the count last wrapped + last_wrap = time_map["ctime"] - time_map["fpga_count"] * seconds_per_count + + # Use this and the FPGA zero time to calculate the total number of wraps + num_wraps = np.round((last_wrap - acq_start) / wrap_time).astype(np.uint64) + + # Correct the FPGA counts by adding on the counts lost by wrapping + fpga_corrected = time_map["fpga_count"] + num_wraps * 2**32 + + # Create an array to represent the new time dataset, and fill in the corrected values + _time_dtype = [("fpga_count", np.uint64), ("ctime", np.float64)] + new_time_map = np.zeros(time_map.shape, dtype=_time_dtype) + new_time_map["fpga_count"] = fpga_corrected + new_time_map["ctime"] = time_map["ctime"] + + # Replace the time input map + data.del_index_map("time") + data.create_index_map("time", new_time_map) + + +def _timestamp_from_fpga_cpu(cpu_s, cpu_us, fpga_counts): + ntime = len(cpu_s) + timestamp = np.empty(ntime, dtype=np.float64) + timestamp[:] = cpu_s + if cpu_us is not None: + timestamp += cpu_us / 1.0e6 + # If we have the more precise fpga clock, use it. Use the above to + # calibrate. + if fpga_counts is not None: + timestamp_cpu = timestamp.copy() + # Find discontinuities in the fpga_counts from wrapping. + d_fpga_counts = np.diff(fpga_counts.astype(np.int64)) + (edge_inds,) = np.where(d_fpga_counts != np.median(d_fpga_counts)) + edge_inds = np.concatenate(([0], edge_inds + 1, [ntime])) + # Calculate a global slope. + slope_num = 0 + slope_den = 0 + for ii in range(len(edge_inds) - 1): + sl = np.s_[edge_inds[ii] : edge_inds[ii + 1]] + mean_cpu = np.mean(timestamp_cpu[sl]) + mean_fpga = np.mean(fpga_counts[sl]) + diff_cpu = timestamp_cpu[sl] - mean_cpu + diff_fpga = fpga_counts[sl] - mean_fpga + slope_num += np.sum(diff_cpu * diff_fpga) + slope_den += np.sum(diff_fpga**2) + slope = slope_num / slope_den + # Calculate offset in each section. + for ii in range(len(edge_inds) - 1): + sl = np.s_[edge_inds[ii] : edge_inds[ii + 1]] + mean_cpu = np.mean(timestamp_cpu[sl]) + mean_fpga = np.mean(fpga_counts[sl]) + offset = mean_cpu - slope * mean_fpga + # Apply fit. + timestamp[sl] = slope * fpga_counts[sl] + offset + # XXX + # The above provides integration ends, not centres. Fix: + # delta = np.median(np.diff(timestamp)) + # timestamp -= abs(delta) / 2. + return timestamp + + +# IO for acquisition format 1.0 + + +def _copy_dataset_acq1( + dataset_name, acq_files, start, stop, out_data, prod_sel=None, freq_sel=None +): + s_ind = 0 + ntime = stop - start + for ii, acq in enumerate(acq_files): + acq_dataset = acq[dataset_name] + this_ntime = len(acq_dataset) + if s_ind + this_ntime < start or s_ind >= stop: + # No data from this file is included. + s_ind += this_ntime + continue + # What data (time frames) are included in this file. + # out_slice = np.s_[max(0, s_ind - start):s_ind - start + this_ntime] + # acq_slice = np.s_[max(0, start - s_ind):min(this_ntime, stop - s_ind)] + acq_slice, out_slice = tod._get_in_out_slice(start, stop, s_ind, this_ntime) + # Split the fields of the dataset into separate datasets and reformat. + split_dsets, split_dsets_cal = _format_split_acq_dataset_acq1( + acq_dataset, acq_slice + ) + if dataset_name == "vis": + # Convert to 64 but complex. + if set(split_dsets.keys()) != {"imag", "real"}: + msg = ( + "Visibilities should have fields 'real' and 'imag'" + " and instead have %s." % str(list(split_dsets.keys())) + ) + raise ValueError(msg) + vis_data = np.empty(split_dsets["real"].shape, dtype=np.complex64) + vis_data.real[:] = split_dsets["real"] + vis_data.imag[:] = split_dsets["imag"] + + split_dsets = {"": vis_data} + split_dsets_cal = {} + + for split_dset_name, split_dset in split_dsets.items(): + if prod_sel is not None: # prod_sel could be 0. + # Do this in two steps to get around shape matching. + split_dset = split_dset[freq_sel, :, :] + split_dset = split_dset[:, prod_sel, :] + if split_dset_name: + full_name = dataset_name + "_" + split_dset_name + else: + full_name = dataset_name + if start >= s_ind: + # First file, initialize output dataset. + shape = split_dset.shape[:-1] + (ntime,) + if split_dset_name in split_dsets_cal: + attrs = {"cal": split_dsets_cal[split_dset_name]} + else: + attrs = {} + # Try to figure out the axis names. + if prod_sel is not None: + # The shape of the visibilities. + attrs["axis"] = ("freq", "prod", "time") + else: + ndim = len(shape) + attrs["axis"] = ("UNKNOWN",) * (ndim - 1) + ("time",) + ds = out_data.create_dataset( + full_name, dtype=split_dset.dtype, shape=shape + ) + + # Copy over attributes + for k, v in attrs.items(): + ds.attrs[k] = v + # Finally copy the data over. + out_data.datasets[full_name][..., out_slice] = split_dset[:] + s_ind += this_ntime + + +def _check_files_acq1(files): + """Gets a list of open hdf5 file objects and checks their consistency. + + Checks that they all have the same datasets and that all datasets have + consistent data types. + + Essential arguments are modified in-place instead of using return values. + This keeps the lists as up to date as possible in the event that an + exception is raised within this function. + + Non-essential information is returned such as the dtypes for all the + datasets. + + """ + + first_file = True + for ii, open_file in enumerate(list(files)): + # Sort out how to get an open hdf5 file. + # Check that all files have the same datasets with the same dtypes + # and consistent shape. + # All datasets in the same file must be the same shape. + # Between files, all datasets with the same name must have the same + # dtype. + this_dtypes = {} + first_dset = True + for key in open_file.keys(): + if not memh5.is_group(open_file[key]): + this_dtypes[key] = open_file[key].dtype + if first_dset: + this_dset_shape = open_file[key].shape + first_dset = False + else: + if open_file[key].shape != this_dset_shape: + msg = "Datasets in a file do not all have same shape." + raise ValueError(msg) + if first_file: + dtypes = this_dtypes + first_file = False + else: + if this_dtypes != dtypes: + msg = "Files do not have compatible datasets." + raise ValueError(msg) + return dtypes + + +def _get_header_info_acq1(h5_file): + # Right now only have to deal with one format. In the future will need to + # deal with all different kinds of data. + header_info = _data_attrs_from_acq_attrs_acq1(h5_file.attrs) + # Now need to calculate the time stamps. + timestamp_data = h5_file["timestamp"] + if not len(timestamp_data): + msg = "Acquisition file contains zero frames" + raise AnDataError(msg) + time = np.empty( + len(timestamp_data), dtype=[("fpga_count", "<u4"), ("ctime", "<f8")] + ) + time_upper_edges = _timestamp_from_fpga_cpu( + timestamp_data["cpu_s"], timestamp_data["cpu_us"], timestamp_data["fpga_count"] + ) + time_lower_edges = time_upper_edges - np.median(np.diff(time_upper_edges)) + time["ctime"] = time_lower_edges + time["fpga_count"] = timestamp_data["fpga_count"] + header_info["time"] = time + datasets = [key for key in h5_file.keys() if not memh5.is_group(h5_file[key])] + header_info["datasets"] = tuple(datasets) + return header_info + + +def _resolve_header_info_acq1(header_info): + first_info = header_info[0] + freq = first_info["freq"] + prod = first_info["prod"] + datasets = first_info["datasets"] + time_list = [first_info["time"]] + for info in header_info[1:]: + if not np.allclose(info["freq"]["width"], freq["width"]): + msg = "Files do not have consistent frequency bin widths." + raise ValueError(msg) + if not np.allclose(info["freq"]["centre"], freq["centre"]): + msg = "Files do not have consistent frequency bin centres." + raise ValueError(msg) + if not np.all(info["prod"] == prod): + msg = "Files do not have consistent correlation products." + raise ValueError(msg) + if not np.all(info["datasets"] == datasets): + msg = "Files do not have consistent data sets." + raise ValueError(msg) + time_list.append(info["time"]) + time = np.concatenate(time_list) + return time, prod, freq, datasets + + +def _get_files_frames_acq1(files, start, stop): + """Counts the number of frames in each file and sorts out which frames to + read.""" + + dataset_name = "vis" # For now just base everything off of 'vis'. + n_times = [] + for this_file in files: + # Make sure the dataset is 1D. + if len(this_file[dataset_name].shape) != 1: + raise ValueError("Expected 1D datasets.") + n_times.append(len(this_file[dataset_name])) + n_time_total = np.sum(n_times) + return tod._start_stop_inds(start, stop, n_time_total) + + +def _format_split_acq_dataset_acq1(dataset, time_slice): + """Formats a dataset from a acq h5 file into a more easily handled array. + + Completely reverses the order of all axes. + + """ + + # Get shape information. + ntime = len(dataset) + ntime_out = len(np.arange(ntime)[time_slice]) + # If each record is an array, then get that shape. + back_shape = dataset[0].shape + # The shape of the output array. + reversed_back_shape = list(back_shape) + reversed_back_shape.reverse() + out_shape = tuple(reversed_back_shape) + (ntime_out,) + # Check if there are multiple data fields in this dataset. If so they will + # each end up in their own separate arrays. + if dataset[0].dtype.fields is None: + dtype = dataset[0].dtype + out = np.empty(out_shape, dtype=dtype) + for jj, ii in enumerate(np.arange(ntime)[time_slice]): + # 1D case is trivial. + if not back_shape: + out[jj] = dataset[ii] + elif len(back_shape) == 1: + out[:, jj] = dataset[ii] + else: + raise NotImplementedError("Not done yet.") + # Otherwise, loop over all dimensions except the last one. + it = np.nditer(dataset[ii][..., 0], flags=["multi_index"], order="C") + while not it.finished: + it.iternext() + if "cal" in dataset.attrs: + if len(dataset.attrs["cal"]) != 1: + msg = "Mismatch between dataset and it's cal attribute." + raise AttributeError(msg) + out_cal = {"": dataset.attrs["cal"][0]} + else: + out_cal = {} + return {"": out}, out_cal + else: + fields = list(dataset[0].dtype.fields.keys()) + # If there is a 'cal' attribute, make sure it's the right shape. + if "cal" in dataset.attrs: + if dataset.attrs["cal"].shape != (1,): + msg = "'cal' attribute has more than one element." + raise AttributeError(msg) + if len(list(dataset.attrs["cal"].dtype.fields.keys())) != len(fields): + msg = "'cal' attribute not compatible with dataset dtype." + raise AttributeError(msg) + out = {} + out_cal = {} + # Figure out what fields there are and allocate memory. + for field in fields: + dtype = dataset[0][field].dtype + out_arr = np.empty(out_shape, dtype=dtype) + out[field] = out_arr + if "cal" in dataset.attrs: + out_cal[field] = memh5.bytes_to_unicode(dataset.attrs["cal"][0][field]) + for jj, ii in enumerate(np.arange(ntime)[time_slice]): + # Copy data for efficient read. + record = dataset[ii] # Copies to memory. + for field in fields: + if not back_shape: + out[field][jj] = record[field] + elif len(back_shape) == 1: + out[field][:, jj] = record[field][:] + else: + # Multidimensional, try to be more efficient. + it = np.nditer(record[..., 0], flags=["multi_index"], order="C") + while not it.finished: + # Reverse the multiindex for the out array. + ind = it.multi_index + (slice(None),) + ind_rev = list(ind) + ind_rev.reverse() + ind_rev = tuple(ind_rev) + (jj,) + out[field][ind_rev] = record[field][ind] + it.iternext() + return out, out_cal + + +def _data_attrs_from_acq_attrs_acq1(acq_attrs): + # The frequency axis. In MHz. + samp_freq = float(acq_attrs["system_sampling_frequency"]) / 1e6 + nfreq = int(acq_attrs["n_freq"]) + freq_width = samp_freq / 2 / nfreq + freq_width_array = np.empty((nfreq,), dtype=np.float64) + freq_width_array[:] = freq_width + freq_centre = ( + samp_freq - np.cumsum(freq_width_array) + freq_width + ) # This offset gives the correct channels + freq = np.empty(nfreq, dtype=[("centre", np.float64), ("width", np.float64)]) + freq["centre"] = freq_centre + freq["width"] = freq_width + # The product axis. + prod_channels = acq_attrs["chan_indices"] + nprod = len(prod_channels) + prod = np.empty(nprod, dtype=[("input_a", np.int64), ("input_b", np.int64)]) + # This raises a warning for some data, where the col names aren't exactly + # 'input_a' and 'input_b'. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + prod[:] = prod_channels + for ii in range(nprod): + prod[ii][0] = prod_channels[ii][0] + prod[ii][1] = prod_channels[ii][1] + # Populate the output. + out = {} + out["freq"] = freq + out["prod"] = prod + return out + + +def _get_index_map_from_acq1(acq_files, time_sel, prod_sel, freq_sel): + data_headers = [] + for acq_file in acq_files: + data_headers.append(_get_header_info_acq1(acq_file)) + time, prod, freq, tmp_dsets = _resolve_header_info_acq1(data_headers) + # Populate output. + out = {} + out["time"] = time[time_sel[0] : time_sel[1]] + out["prod"] = prod[prod_sel] + out["freq"] = freq[freq_sel] + return out + + +
+[docs] +def andata_from_acq1(acq_files, start, stop, prod_sel, freq_sel, datasets, out_group): + """Create a `CorrData` object from a 1.0.0 archive version acq. + + Parameters + ---------- + acq_files : filename, `h5py.File` or list there-of or filename pattern + Files to convert from acquisition format to analysis format. + Filename patterns with wild cards (e.g. "foo*.h5") are supported. + start : int + What frame to start at in the full set of files. + stop : int + What frame to stop at in the full set of files. + prod_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the correlation product axis. + freq_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the frequency axis. + datasets : list of strings + Names of datasets to include from acquisition files. Default is to + include all datasets found in the acquisition files. + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + + Returns + ------- + corrdata: + A `CorrData` object with the requested data. + """ + + # First open all the files and collect necessary data for all of them. + dtypes = _check_files_acq1(acq_files) + # Figure how much of the total data to read. + start, stop = _get_files_frames_acq1(acq_files, start, stop) + # Initialize the output. + data = CorrData(out_group) + # Assume all meta-data are the same as in the first file and copy it + # over. + acq = acq_files[0] + data.add_history("acq", memh5.attrs2dict(acq.attrs)) + data.history["acq"]["archive_version"] = "1.0.0" + # Copy data attribute axis info. + index_map = _get_index_map_from_acq1(acq_files, (start, stop), prod_sel, freq_sel) + for axis_name, axis_values in index_map.items(): + data.create_index_map(axis_name, axis_values) + # Set file format attributes. + data.attrs["instrument_name"] = ( + "UNKNOWN" + if "instrument_name" not in acq.attrs + else acq.attrs["instrument_name"] + ) + data.attrs["acquisition_name"] = "UNKNOWN" + data.attrs["acquisition_type"] = "corr" + # Copy over the cal information if there is any. + if "cal" in acq: + memh5.deep_group_copy( + acq["cal"], + data._data["cal"], + convert_attribute_strings=CorrData.convert_attribute_strings, + convert_dataset_strings=CorrData.convert_dataset_strings, + ) + # Now copy the datasets. + if datasets is None: + datasets = list(dtypes.keys()) + # Start with the visibilities. + vis_shape = () + for dataset_name in dtypes.keys(): + if dataset_name not in datasets: + continue + # msg = "No dataset named %s in Acq files." % dataset_name + # raise ValueError(msg) + + if dataset_name in ACQ_VIS_SHAPE_DATASETS: + # These datasets must all be the same shape. + if not vis_shape: + vis_shape = dtypes[dataset_name].shape + elif dtypes[dataset_name].shape != vis_shape or len(vis_shape) != 2: + msg = ( + "Expected the following datasets to be" + " identically shaped and 3D in Acq files: %s." + % str(ACQ_VIS_SHAPE_DATASETS) + ) + raise ValueError(msg) + _copy_dataset_acq1( + dataset_name, acq_files, start, stop, data, prod_sel, freq_sel + ) + else: + _copy_dataset_acq1(dataset_name, acq_files, start, stop, data) + return data
+ + + +# IO for archive format 2.0 + + +
+[docs] +def andata_from_archive2( + cls, + acq_files, + start, + stop, + stack_sel, + prod_sel, + input_sel, + freq_sel, + datasets, + out_group, +): + """Create an Andata object from a version 2.0.0 archive format acq. + + Parameters + ---------- + cls: + class of object to create + acq_files : filename, `h5py.File` or list there-of or filename pattern + Files to convert from acquisition format to analysis format. + Filename patterns with wild cards (e.g. "foo*.h5") are supported. + start : int + What frame to start at in the full set of files. + stop : int + What frame to stop at in the full set of files. + prod_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the correlation product axis. + freq_sel : 1D data selection + Valid numpy index for a 1D array, specifying what data to read + along the frequency axis. + datasets : list of strings + Names of datasets to include from acquisition files. Default is to + include all datasets found in the acquisition files. + out_group : `h5py.Group`, hdf5 filename or `memh5.Group` + Underlying hdf5 like container that will store the data for the + BaseData instance. + + Returns: + ------- + andata : `cls` instance + The andata object for the requested data + """ + + # XXX For short term force to CorrData class. Will be fixed once archive + # files carry 'acquisition_type' attribute. + # andata_objs = [ cls(d) for d in acq_files ] + andata_objs = [CorrData(d) for d in acq_files] + + # Resolve input and prod maps + first_imap = andata_objs[0].index_map + first_rmap = andata_objs[0].reverse_map + + # Cannot use input/prod sel for stacked data + if "stack" in first_imap: + if input_sel: + raise ValueError("Cannot give input_sel for a stacked dataset.") + if prod_sel: + raise ValueError("Cannot give prod_sel for a stacked dataset.") + + prod_map = first_imap["prod"][:].view(np.ndarray).copy() + input_map = first_imap["input"][:].view(np.ndarray).copy() + input_map = memh5.ensure_unicode(input_map) # Convert string entries to unicode + if "stack" in first_imap: + stack_map = first_imap["stack"][:].view(np.ndarray).copy() + stack_rmap = first_rmap["stack"][:].view(np.ndarray).copy() + else: + # Unstacked so the stack and prod axes are essentially the same. + nprod = len(prod_map) + stack_map = np.empty(nprod, dtype=[("prod", "<u4"), ("conjugate", "u1")]) + stack_map["conjugate"][:] = 0 + stack_map["prod"] = np.arange(nprod) + stack_rmap = np.empty(nprod, dtype=[("stack", "<u4"), ("conjugate", "u1")]) + stack_rmap["conjugate"][:] = 0 + stack_rmap["stack"] = np.arange(nprod) + # Efficiently slice prod axis, not stack axis. + if stack_sel is not None: + prod_sel = stack_sel + stack_sel = None + + ( + stack_sel, + stack_map, + stack_rmap, + prod_sel, + prod_map, + input_sel, + input_map, + ) = _resolve_stack_prod_input_sel( + stack_sel, stack_map, stack_rmap, prod_sel, prod_map, input_sel, input_map + ) + + # Define dataset filter to convert vis datatype. + def dset_filter(dataset, time_sel=None): + # For compatibility with older caput. + if time_sel is None: + time_sel = slice(None) + # A lot of the logic here is that h5py can only deal with one + # *fancy* slice (that is 1 axis where the slice is an array). + # Note that *time_sel* is always a normal slice, so don't have to worry + # about it as much. + attrs = getattr(dataset, "attrs", {}) + name = path.split(dataset.name)[-1] + # Special treatement for pure sub-array dtypes, which get + # modified by numpy to add dimensions when read. + dtype = dataset.dtype + if dtype.kind == "V" and not dtype.fields and dtype.shape: + field_name = str(name.split("/")[-1]) + dtype = np.dtype([(field_name, dtype)]) + shape = dataset.shape + # The datasets this effects are tiny, so just read them in. + dataset = dataset[:].view(dtype) + dataset.shape = shape + + axis = attrs["axis"] + if axis[0] == "freq" and axis[1] in ("stack", "prod", "input"): + # For large datasets, take great pains to down-select as + # efficiently as possible. + if axis[1] == "stack": + msel = stack_sel + elif axis[1] == "prod": + msel = prod_sel + else: + msel = input_sel + if isinstance(msel, np.ndarray) and isinstance(freq_sel, np.ndarray): + nfsel = np.sum(freq_sel) if freq_sel.dtype == bool else len(freq_sel) + npsel = np.sum(msel) if msel.dtype == bool else len(msel) + nfreq = len(andata_objs[0].index_map["freq"]) + nprod = len(andata_objs[0].index_map["prod"]) + frac_fsel = float(nfsel) / nfreq + frac_psel = float(npsel) / nprod + + if frac_psel < frac_fsel: + dataset = dataset[:, msel, time_sel][freq_sel, :, :] + else: + dataset = dataset[freq_sel, :, time_sel][:, msel, :] + else: + # At least one of *msel* and *freq_sel* is an + # integer or slice object and h5py can do the full read + # efficiently. + dataset = dataset[freq_sel, msel, time_sel] + else: + # Dynamically figure out the axis ordering. + axis = memh5.bytes_to_unicode(attrs["axis"]) + ndim = len(dataset.shape) # h5py datasets don't have ndim. + if ("freq" in axis and isinstance(freq_sel, np.ndarray)) + ( + "stack" in axis and isinstance(stack_sel, np.ndarray) + ) + ("prod" in axis and isinstance(prod_sel, np.ndarray)) + ( + "input" in axis and isinstance(input_sel, np.ndarray) + ) > 1: + # At least two array slices. Incrementally down select. + # First freq. + dataset_sel = [slice(None)] * ndim + for ii in range(ndim): + if axis[ii] == "freq": + dataset_sel[ii] = freq_sel + # Assume the time is the fastest varying index + # and down select here. + dataset_sel[-1] = time_sel + dataset = dataset[tuple(dataset_sel)] + # And again for stack. + dataset_sel = [slice(None)] * ndim + for ii in range(ndim): + if attrs["axis"][ii] == "stack": + dataset_sel[ii] = stack_sel + dataset = dataset[tuple(dataset_sel)] + # And again for prod. + dataset_sel = [slice(None)] * ndim + for ii in range(ndim): + if axis[ii] == "prod": + dataset_sel[ii] = prod_sel + dataset = dataset[tuple(dataset_sel)] + # And again for input. + dataset_sel = [slice(None)] * ndim + for ii in range(ndim): + if axis[ii] == "input": + dataset_sel[ii] = input_sel + dataset = dataset[tuple(dataset_sel)] + else: + dataset_sel = [slice(None)] * ndim + for ii in range(ndim): + if axis[ii] == "freq": + dataset_sel[ii] = freq_sel + elif axis[ii] == "stack": + dataset_sel[ii] = stack_sel + elif axis[ii] == "prod": + dataset_sel[ii] = prod_sel + elif axis[ii] == "input": + dataset_sel[ii] = input_sel + elif axis[ii] in CONCATENATION_AXES: + dataset_sel[ii] = time_sel + dataset = dataset[tuple(dataset_sel)] + + # Change data type for the visibilities, if necessary. + if re.match(ACQ_VIS_DATASETS, name) and dtype != np.complex64: + data = dataset[:] + dataset = np.empty(dataset.shape, dtype=np.complex64) + dataset.real = data["r"] + dataset.imag = data["i"] + + return dataset + + # The actual read, file by file. + data = concatenate( + andata_objs, + out_group=out_group, + start=start, + stop=stop, + datasets=datasets, + dataset_filter=dset_filter, + convert_attribute_strings=cls.convert_attribute_strings, + convert_dataset_strings=cls.convert_dataset_strings, + ) + + # Andata (or memh5) should already do the right thing. + # Explicitly close up files + # for ad in andata_objs: + # ad.close() + + # Rejig the index map according to prod_sel and freq_sel. + # Need to use numpy arrays to avoid weird cyclic reference issues. + # (https://github.com/numpy/numpy/issues/1601) + fmap = data.index_map["freq"][freq_sel].view(np.ndarray).copy() + # pmap = data.index_map['prod'][prod_sel].view(np.ndarray).copy() + # imap = data.index_map['input'][input_sel].view(np.ndarray).copy() + data.create_index_map("freq", fmap) + data.create_index_map("stack", stack_map) + data.create_reverse_map("stack", stack_rmap) + data.create_index_map("prod", prod_map) + data.create_index_map("input", input_map) + return data, input_sel
+ + + +# Routines for re-mapping the index_map/input to match up the order that is +# in the files, and the layout database + + +def _generate_input_map(serials, chans=None): + # Generate an input map in the correct format. If chans is None, just + # number from 0 upwards, otherwise use the channel numbers specified. + + # Define datatype of input map array + # TODO: Python 3 string issues + _imap_dtype = [ + ("chan_id", np.int64), + ("correlator_input", "U32"), + ] + + # Add in channel numbers correctly + if chans is None: + chan_iter = enumerate(serials) + else: + chan_iter = list(zip(chans, serials)) + + imap = np.array(list(chan_iter), dtype=_imap_dtype) + + return imap + + +def _get_versiontuple(afile): + if "acq" in afile.history: + archive_version = afile.history["acq"]["archive_version"] + else: + archive_version = afile.attrs["archive_version"] + + archive_version = memh5.bytes_to_unicode(archive_version) + + return versiontuple(archive_version) + + +def _remap_stone_abbot(afile): + # Generate an index_map/input for the old stone/abbot files + + # Really old files do not have an adc_serial attribute + if "adc_serial" not in afile.history["acq"]: + warnings.warn("Super old file. Cannot tell difference between stone and abbot.") + serial = -1 + else: + # Fetch and parse serial value + serial = int(afile.history["acq"]["adc_serial"]) + + # The serials are defined oddly in the files, use a dict to look them up + serial_map = {1: "0003", 33: "0033", -1: "????"} # Stone # Abbot # Unknown + + # Construct new array of index_map + serial_pat = "29821-0000-%s-C%%i" % serial_map[serial] + inputmap = _generate_input_map([serial_pat % ci for ci in range(8)]) + + # Copy out old index_map/input if it exists + if "input" in afile.index_map: + afile.create_index_map("input_orig", np.array(afile.index_map["input"])) + # del afile._data['index_map']._dict['input'] + afile.del_index_map("input") + + # Create new index map + afile.create_index_map("input", inputmap) + + return afile + + +def _remap_blanchard(afile): + # Remap a blanchard correlator file + + BPC_END = ( + 1410586200.0 # 2014/09/13 05:30 UTC ~ when blanchard was moved into the crate + ) + last_time = afile.time[-1] + + # Use time to check if blanchard was in the crate or not + if last_time < BPC_END: + # Find list of channels and adc serial using different methods depending on the archive file version + if _get_versiontuple(afile) < versiontuple("2.0.0"): + # The older files have no index_map/input so we need to guess/construct it. + chanlist = list(range(16)) + adc_serial = afile.history["acq"]["adc_serial"][0] + + else: + # The newer archive files have the index map, and so we can just parse this + chanlist = afile.index_map["input"]["chan"] + adc_serial = afile.index_map["input"]["adc_serial"][0] + + # Construct new array of index_map + serial_pat = "29821-0000-%s-C%%02i" % adc_serial + inputmap = _generate_input_map([serial_pat % ci for ci in chanlist]) + + else: + _remap_crate_corr(afile, 0) + return afile + + # Copy out old index_map/input if it exists + if "input" in afile.index_map: + afile.create_index_map("input_orig", np.array(afile.index_map["input"])) + # del afile._data['index_map']._dict['input'] + afile.del_index_map("input") + + # Create new index map + afile.create_index_map("input", inputmap) + + return afile + + +def _remap_first9ucrate(afile): + # Remap a first9ucrate file + if _get_versiontuple(afile) < versiontuple("2.0.0"): + warnings.warn("Remapping old format first9ucrate files is not supported.") + return afile + + # Remap ignoring the fact that there was firt9ucrate data in the old format + _remap_crate_corr(afile, 15) + + return afile + + +def _remap_slotX(afile): + # Remap a slotXX correlator file + + # Figure out the slot number + inst_name = afile.attrs["instrument_name"] + slotnum = int(inst_name[4:]) + + _remap_crate_corr(afile, slotnum) + + return afile + + +def _remap_crate_corr(afile, slot): + # Worker routine for remapping the new style files for blanchard, first9ucrate and slotX + + if _get_versiontuple(afile) < versiontuple("2.0.0"): + raise Exception("Only functions with archive 2.0.0 files.") + + CRATE_CHANGE = 1412640000.0 # The crate serial changed over for layout 60 + last_time = afile.time[-1] + + if last_time < CRATE_CHANGE: + crate_serial = "K7BP16-0002" + else: + crate_serial = "K7BP16-0004" + + # Fetch and remap the channel list + chanlist = afile.index_map["input"]["chan"] + channel_remapping = np.array( + [12, 13, 14, 15, 8, 9, 10, 11, 4, 5, 6, 7, 0, 1, 2, 3] + ) # Channel order in new scheme + chanlist = channel_remapping[chanlist] + + # The slot remapping function (i.e. C(c) from doclib/165/channel_standards) + slot_remapping = [ + 80, + 16, + 64, + 0, + 208, + 144, + 192, + 128, + 240, + 176, + 224, + 160, + 112, + 48, + 96, + 32, + ] + + # Create new list of serials + serial_pat = crate_serial + ("%02i%%02i" % int(slot)) + serials = [serial_pat % ci for ci in chanlist] + + # Create a list of channel ids (taking into account that they are + # meaningless for the old crate) + if last_time >= CRATE_CHANGE: + chans = [slot_remapping[slot - 1] + ci for ci in chanlist] + else: + chans = chanlist + + inputmap = _generate_input_map(serials, chans) + + # Save and remove old index map + afile.create_index_map("input_orig", np.array(afile.index_map["input"])) + afile.del_index_map("input") + + # Create new index map + afile.create_index_map("input", inputmap) + + return afile + + +def _remap_inputs(afile): + # Master routine for remapping inputs. This tries to figure out which + # instrument took the data, and then dispatch to the right routine to + # generate the new index_map/input. This follows the logic in doclib:165 + + # Eventually the change will be made in the correlator software and we + # can stop remapping files after that time. + + # NOTE: need to be careful where you use afile.attrs versus + # afile.history['acq'] for getting properties + + last_time = afile.time[-1] + SA_END = 1397088000.0 # 2014/04/10 ~ last time stone and abbot were working + + # h5py should return a byte string for the attribute and so we need to decode + # it + inst_name = memh5.bytes_to_unicode(afile.attrs.get("instrument_name", b"")) + num_antenna = int(afile.history.get("acq", {}).get("n_antenna", "-1")) + + # Test if is abbot or stone + if last_time < SA_END and num_antenna == 8: + # Relies upon old files having the acq history + _remap_stone_abbot(afile) + + elif inst_name == "blanchard": + _remap_blanchard(afile) + + elif inst_name == "first9ucrate": + _remap_first9ucrate(afile) + + elif inst_name[:4] == "slot": + _remap_slotX(afile) + + else: + warnings.warn("I don't know what this data is.") + + +def _insert_gains(data, input_sel): + # Construct a full dataset for the gains and insert it into the CorrData + # object + # freq_sel is needed for selecting the relevant frequencies in old data + + # Input_sel is only used for pre archive_version 2.2, where there is no way + # to know which header items to pull out. + + # For old versions the gains are stored in the attributes and need to be + # extracted + if ("archive_version" not in data.attrs) or versiontuple( + memh5.bytes_to_unicode(data.attrs["archive_version"]) + ) < versiontuple("2.2.0"): + # Hack to find the indices of the frequencies in the file + fc = data.index_map["freq"]["centre"] + fr = np.linspace( + 800, 400.0, 1024, endpoint=False + ) # The should be the frequency channel + + # Compare with a tolerance (< 1e-4). Broken out into loop so we can deal + # with the case where there are no matches + fsel = [] + for freq in fc: + fi = np.argwhere(np.abs(fr - freq) < 1e-4) + + if len(fi) == 1: + fsel.append(fi[0, 0]) + + # Initialise gains to one by default + gain = np.ones((data.nfreq, data.ninput), dtype=np.complex64) + + try: + ninput_orig = data.attrs["number_of_antennas"] + except KeyError: + ninput_orig = data.history["acq"]["number_of_antennas"] + + # In certain files this entry is a length-1 array, turn it into a scalar if it is not + if isinstance(ninput_orig, np.ndarray): + ninput_orig = ninput_orig[0] + + if ninput_orig <= 16: + # For 16 channel or earlier data, each channel has a simple + # labelling for its gains + keylist = [ + (channel, "antenna_scaler_gain" + str(channel)) + for channel in range(ninput_orig) + ] + else: + # For 256 channel data this is more complicated + + # Construct list of keys for all gain entries + keylist = [key for key in data.attrs.keys() if key[:2] == "ID"] + + # Extract the channel id from each key + chanid = [key.split("_")[1] for key in keylist] + + # Sort the keylist according to the channel ids, as the inputs + # should be sorted by channel id. + keylist = sorted(zip(chanid, keylist)) + # Down select keylist based on input_sel. + input_sel_list = list(np.arange(ninput_orig, dtype=int)[input_sel]) + keylist = [keylist[ii] for ii in input_sel_list] + + if len(fsel) != data.nfreq: + warnings.warn( + "Could not match all frequency channels. Skipping gain calculation." + ) + else: + # Iterate over the keys and extract the gains + for chan, key in keylist: + # Try and find gain entry + if key in data.attrs: + g_data = data.attrs[key] + elif key in data.history["acq"]: + g_data = data.history["acq"][key] + else: + warnings.warn( + "Cannot find gain entry [%s] for channel %i" % (key, chan) + ) + continue + + # Unpack the gain values and construct the gain array + g_real, g_imag = g_data[1:-1:2], g_data[2:-1:2] + g_exp = g_data[-1] + + g_full = (g_real + 1.0j * g_imag) * 2**g_exp + + # Select frequencies that are loaded from the file + g_sel = g_full[fsel] + + gain[:, input_sel_list.index(chan)] = g_sel + + # Gain array must be specified for all times, repeat along the time axis + gain = np.tile(gain[:, :, np.newaxis], (1, 1, data.ntime)) + + else: + gain = np.ones((data.nfreq, data.ninput, data.ntime), dtype=np.complex64) + + # Check that the gain datasets have been loaded + if ("gain_coeff" not in data.datasets) or ("gain_exp" not in data.datasets): + warnings.warn( + "Required gain datasets not loaded from file (> v2.2.0), using unit gains." + ) + + else: + # Extract the gain datasets from the file + gain_exp = data.datasets["gain_exp"][:] + gain_coeff = data.datasets["gain_coeff"][:] + + # Turn into a single array + if gain_coeff.dtype == np.complex64: + gain *= gain_coeff + else: + gain.real[:] = gain_coeff["r"] + gain.imag[:] = gain_coeff["i"] + gain *= 2 ** gain_exp[np.newaxis, :, :] + + # Add gain dataset to object, and create axis attribute + gain_dset = data.create_dataset("gain", data=gain) + gain_dset.attrs["axis"] = np.array(["freq", "input", "time"]) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/cal_utils.html b/docs/_modules/ch_util/cal_utils.html new file mode 100644 index 00000000..9ff69968 --- /dev/null +++ b/docs/_modules/ch_util/cal_utils.html @@ -0,0 +1,3020 @@ + + + + + + ch_util.cal_utils — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.cal_utils

+"""
+Tools for point source calibration
+
+This module contains tools for performing point-source calibration.
+"""
+
+from abc import ABCMeta, abstractmethod
+from datetime import datetime
+import inspect
+import logging
+from typing import Dict, Optional, Union
+
+import numpy as np
+import scipy.stats
+from scipy.optimize import curve_fit
+from scipy.interpolate import interp1d
+from scipy.linalg import lstsq, inv
+
+from caput import memh5, time as ctime
+from chimedb import dataset as ds
+from chimedb.dataset.utils import state_id_of_type, unique_unmasked_entry
+from ch_util import ephemeris, tools
+
+# Set up logging
+logger = logging.getLogger(__name__)
+logger.addHandler(logging.NullHandler())
+
+
+
+[docs] +class FitTransit(object, metaclass=ABCMeta): + """Base class for fitting models to point source transits. + + The `fit` method should be used to populate the `param`, `param_cov`, `chisq`, + and `ndof` attributes. The `predict` and `uncertainty` methods can then be used + to obtain the model prediction for the response and uncertainty on this quantity + at a given hour angle. + + Attributes + ---------- + param : np.ndarray[..., nparam] + Best-fit parameters. + param_cov : np.ndarray[..., nparam, nparam] + Covariance of the fit parameters. + chisq : np.ndarray[...] + Chi-squared of the fit. + ndof : np.ndarray[...] + Number of degrees of freedom. + + Abstract Methods + ---------------- + Any subclass of FitTransit must define these methods: + peak + _fit + _model + _jacobian + """ + + _tval = {} + component = np.array(["complex"], dtype=np.string_) + + def __init__(self, *args, **kwargs): + """Instantiates a FitTransit object. + + Parameters + ---------- + param : np.ndarray[..., nparam] + Best-fit parameters. + param_cov : np.ndarray[..., nparam, nparam] + Covariance of the fit parameters. + chisq : np.ndarray[..., ncomponent] + Chi-squared. + ndof : np.ndarray[..., ncomponent] + Number of degrees of freedom. + """ + # Save keyword arguments as attributes + self.param = kwargs.pop("param", None) + self.param_cov = kwargs.pop("param_cov", None) + self.chisq = kwargs.pop("chisq", None) + self.ndof = kwargs.pop("ndof", None) + self.model_kwargs = kwargs + +
+[docs] + def predict(self, ha, elementwise=False): + """Predict the point source response. + + Parameters + ---------- + ha : np.ndarray[nha,] or float + The hour angle in degrees. + elementwise : bool + If False, then the model will be evaluated at the + requested hour angles for every set of parameters. + If True, then the model will be evaluated at a + separate hour angle for each set of parameters + (requires `ha.shape == self.N`). + + Returns + ------- + model : np.ndarray[..., nha] or float + Model for the point source response at the requested + hour angles. Complex valued. + """ + with np.errstate(all="ignore"): + mdl = self._model(ha, elementwise=elementwise) + return np.where(np.isfinite(mdl), mdl, 0.0 + 0.0j)
+ + +
+[docs] + def uncertainty(self, ha, alpha=0.32, elementwise=False): + """Predict the uncertainty on the point source response. + + Parameters + ---------- + ha : np.ndarray[nha,] or float + The hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + elementwise : bool + If False, then the uncertainty will be evaluated at + the requested hour angles for every set of parameters. + If True, then the uncertainty will be evaluated at a + separate hour angle for each set of parameters + (requires `ha.shape == self.N`). + + Returns + ------- + err : np.ndarray[..., nha] + Uncertainty on the point source response at the + requested hour angles. + """ + x = np.atleast_1d(ha) + with np.errstate(all="ignore"): + err = _propagate_uncertainty( + self._jacobian(x, elementwise=elementwise), + self.param_cov, + self.tval(alpha, self.ndof), + ) + return np.squeeze(np.where(np.isfinite(err), err, 0.0))
+ + +
+[docs] + def fit(self, ha, resp, resp_err, width=5, absolute_sigma=False, **kwargs): + """Apply subclass defined `_fit` method to multiple transits. + + This function can be used to fit the transit for multiple inputs + and frequencies. Populates the `param`, `param_cov`, `chisq`, and `ndof` + attributes. + + Parameters + ---------- + ha : np.ndarray[nha,] + Hour angle in degrees. + resp : np.ndarray[..., nha] + Measured response to the point source. Complex valued. + resp_err : np.ndarray[..., nha] + Error on the measured response. + width : np.ndarray[...] + Initial guess at the width (sigma) of the transit in degrees. + absolute_sigma : bool + Set to True if the errors provided are absolute. Set to False if + the errors provided are relative, in which case the parameter covariance + will be scaled by the chi-squared per degree-of-freedom. + """ + shp = resp.shape[:-1] + dtype = ha.dtype + + if not np.isscalar(width) and (width.shape != shp): + ValueError("Keyword with must be scalar or have shape %s." % str(shp)) + + self.param = np.full(shp + (self.nparam,), np.nan, dtype=dtype) + self.param_cov = np.full(shp + (self.nparam, self.nparam), np.nan, dtype=dtype) + self.chisq = np.full(shp + (self.ncomponent,), np.nan, dtype=dtype) + self.ndof = np.full(shp + (self.ncomponent,), 0, dtype=int) + + with np.errstate(all="ignore"): + for ind in np.ndindex(*shp): + wi = width if np.isscalar(width) else width[ind[: width.ndim]] + + err = resp_err[ind] + good = np.flatnonzero(err > 0.0) + + if (good.size // 2) <= self.nparam: + continue + + try: + param, param_cov, chisq, ndof = self._fit( + ha[good], + resp[ind][good], + err[good], + width=wi, + absolute_sigma=absolute_sigma, + **kwargs, + ) + except Exception as error: + logger.debug("Index %s failed with error: %s" % (str(ind), error)) + continue + + self.param[ind] = param + self.param_cov[ind] = param_cov + self.chisq[ind] = chisq + self.ndof[ind] = ndof
+ + + @property + def parameter_names(self): + """ + Array of strings containing the name of the fit parameters. + + Returns + ------- + parameter_names : np.ndarray[nparam,] + Names of the parameters. + """ + return np.array(["param%d" % p for p in range(self.nparam)], dtype=np.string_) + + @property + def param_corr(self): + """ + Parameter correlation matrix. + + Returns + ------- + param_corr : np.ndarray[..., nparam, nparam] + Correlation of the fit parameters. + """ + idiag = tools.invert_no_zero( + np.sqrt(np.diagonal(self.param_cov, axis1=-2, axis2=-1)) + ) + return self.param_cov * idiag[..., np.newaxis, :] * idiag[..., np.newaxis] + + @property + def N(self): + """ + Number of independent transit fits contained in this object. + + Returns + ------- + N : tuple + Numpy-style shape indicating the number of + fits that the object contains. Is None + if the object contains a single fit. + """ + if self.param is not None: + return self.param.shape[:-1] or None + + @property + def nparam(self): + """ + Number of parameters. + + Returns + ------- + nparam : int + Number of fit parameters. + """ + return self.param.shape[-1] + + @property + def ncomponent(self): + """ + Number of components. + + Returns + ------- + ncomponent : int + Number of components (i.e, real and imag, amp and phase, complex) that have been fit. + """ + return self.component.size + + def __getitem__(self, val): + """Instantiates a new TransitFit object containing some subset of the fits.""" + + if self.N is None: + raise KeyError( + "Attempting to slice TransitFit object containing single fit." + ) + + return self.__class__( + param=self.param[val], + param_cov=self.param_cov[val], + ndof=self.ndof[val], + chisq=self.chisq[val], + **self.model_kwargs, + ) + +
+[docs] + @abstractmethod + def peak(self): + """Calculate the peak of the transit. + + Any subclass of FitTransit must define this method. + """ + return
+ + + @abstractmethod + def _fit(self, ha, resp, resp_err, width=None, absolute_sigma=False): + """Fit data to the model. + + Any subclass of FitTransit must define this method. + + Parameters + ---------- + ha : np.ndarray[nha,] + Hour angle in degrees. + resp : np.ndarray[nha,] + Measured response to the point source. Complex valued. + resp_err : np.ndarray[nha,] + Error on the measured response. + width : np.ndarray + Initial guess at the width (sigma) of the transit in degrees. + absolute_sigma : bool + Set to True if the errors provided are absolute. Set to False if + the errors provided are relative, in which case the parameter covariance + will be scaled by the chi-squared per degree-of-freedom. + + Returns + ------- + param : np.ndarray[nparam,] + Best-fit model parameters. + param_cov : np.ndarray[nparam, nparam] + Covariance of the best-fit model parameters. + chisq : float + Chi-squared of the fit. + ndof : int + Number of degrees of freedom of the fit. + """ + return + + @abstractmethod + def _model(self, ha): + """Calculate the model for the point source response. + + Any subclass of FitTransit must define this method. + + Parameters + ---------- + ha : np.ndarray + Hour angle in degrees. + """ + return + + @abstractmethod + def _jacobian(self, ha): + """Calculate the jacobian of the model for the point source response. + + Any subclass of FitTransit must define this method. + + Parameters + ---------- + ha : np.ndarray + Hour angle in degrees. + + Returns + ------- + jac : np.ndarray[..., nparam, nha] + The jacobian defined as + jac[..., i, j] = d(model(ha)) / d(param[i]) evaluated at ha[j] + """ + return + +
+[docs] + @classmethod + def tval(cls, alpha, ndof): + """Quantile of a standardized Student's t random variable. + + This quantity is slow to compute. Past values will be cached + in a dictionary shared by all instances of the class. + + Parameters + ---------- + alpha : float + Calculate the quantile corresponding to the lower tail probability + 1 - alpha / 2. + ndof : np.ndarray or int + Number of degrees of freedom of the Student's t variable. + + Returns + ------- + tval : np.ndarray or float + Quantile of a standardized Student's t random variable. + """ + prob = 1.0 - 0.5 * alpha + + arr_ndof = np.atleast_1d(ndof) + tval = np.zeros(arr_ndof.shape, dtype=np.float32) + + for ind, nd in np.ndenumerate(arr_ndof): + key = (int(100.0 * prob), nd) + if key not in cls._tval: + cls._tval[key] = scipy.stats.t.ppf(prob, nd) + tval[ind] = cls._tval[key] + + if np.isscalar(ndof): + tval = np.squeeze(tval) + + return tval
+
+ + + +
+[docs] +class FitPoly(FitTransit): + """Base class for fitting polynomials to point source transits. + + Maps methods of np.polynomial to methods of the class for the + requested polynomial type. + """ + + def __init__(self, poly_type="standard", *args, **kwargs): + """Instantiates a FitPoly object. + + Parameters + ---------- + poly_type : str + Type of polynomial. Can be 'standard', 'hermite', or 'chebyshev'. + """ + super(FitPoly, self).__init__(poly_type=poly_type, *args, **kwargs) + + self._set_polynomial_model(poly_type) + + def _set_polynomial_model(self, poly_type): + """Map methods of np.polynomial to methods of the class.""" + if poly_type == "standard": + self._vander = np.polynomial.polynomial.polyvander + self._eval = np.polynomial.polynomial.polyval + self._deriv = np.polynomial.polynomial.polyder + self._root = np.polynomial.polynomial.polyroots + elif poly_type == "hermite": + self._vander = np.polynomial.hermite.hermvander + self._eval = np.polynomial.hermite.hermval + self._deriv = np.polynomial.hermite.hermder + self._root = np.polynomial.hermite.hermroots + elif poly_type == "chebyshev": + self._vander = np.polynomial.chebyshev.chebvander + self._eval = np.polynomial.chebyshev.chebval + self._deriv = np.polynomial.chebyshev.chebder + self._root = np.polynomial.chebyshev.chebroots + else: + raise ValueError( + "Do not recognize polynomial type %s." + "Options are 'standard', 'hermite', or 'chebyshev'." % poly_type + ) + + self.poly_type = poly_type + + def _fast_eval(self, ha, param=None, elementwise=False): + """Evaluate the polynomial at the requested hour angle.""" + if param is None: + param = self.param + + vander = self._vander(ha, param.shape[-1] - 1) + + if elementwise: + out = np.sum(vander * param, axis=-1) + elif param.ndim == 1: + out = np.dot(vander, param) + else: + out = np.matmul(param, np.rollaxis(vander, -1)) + + return np.squeeze(out, axis=-1) if np.isscalar(ha) else out
+ + + +
+[docs] +class FitRealImag(FitTransit): + """Base class for fitting models to the real and imag component. + + Assumes an independent fit to real and imaginary, and provides + methods for predicting the uncertainty on each. + """ + + component = np.array(["real", "imag"], dtype=np.string_) + +
+[docs] + def uncertainty_real(self, ha, alpha=0.32, elementwise=False): + """Predicts the uncertainty on real component at given hour angle(s). + + Parameters + ---------- + ha : np.ndarray[nha,] or float + Hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + + Returns + ------- + err : np.ndarray[..., nha] or float + Uncertainty on the real component. + """ + x = np.atleast_1d(ha) + err = _propagate_uncertainty( + self._jacobian_real(x, elementwise=elementwise), + self.param_cov[..., : self.nparr, : self.nparr], + self.tval(alpha, self.ndofr), + ) + return np.squeeze(err, axis=-1) if np.isscalar(ha) else err
+ + +
+[docs] + def uncertainty_imag(self, ha, alpha=0.32, elementwise=False): + """Predicts the uncertainty on imag component at given hour angle(s). + + Parameters + ---------- + ha : np.ndarray[nha,] or float + Hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + + Returns + ------- + err : np.ndarray[..., nha] or float + Uncertainty on the imag component. + """ + x = np.atleast_1d(ha) + err = _propagate_uncertainty( + self._jacobian_imag(x, elementwise=elementwise), + self.param_cov[..., self.nparr :, self.nparr :], + self.tval(alpha, self.ndofi), + ) + return np.squeeze(err, axis=-1) if np.isscalar(ha) else err
+ + +
+[docs] + def uncertainty(self, ha, alpha=0.32, elementwise=False): + """Predicts the uncertainty on the response at given hour angle(s). + + Returns the quadrature sum of the real and imag uncertainty. + + Parameters + ---------- + ha : np.ndarray[nha,] or float + Hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + + Returns + ------- + err : np.ndarray[..., nha] or float + Uncertainty on the response. + """ + with np.errstate(all="ignore"): + err = np.sqrt( + self.uncertainty_real(ha, alpha=alpha, elementwise=elementwise) ** 2 + + self.uncertainty_imag(ha, alpha=alpha, elementwise=elementwise) ** 2 + ) + return err
+ + + def _jacobian(self, ha): + raise NotImplementedError( + "Fits to real and imaginary are independent. " + "Use _jacobian_real and _jacobian_imag instead." + ) + + @abstractmethod + def _jacobian_real(self, ha): + """Calculate the jacobian of the model for the real component.""" + return + + @abstractmethod + def _jacobian_imag(self, ha): + """Calculate the jacobian of the model for the imag component.""" + return + + @property + def nparam(self): + return self.nparr + self.npari
+ + + +
+[docs] +class FitPolyRealPolyImag(FitPoly, FitRealImag): + """Class that enables separate fits of a polynomial to real and imag components. + + Used to fit cross-polar response that is not well-described by the + FitPolyLogAmpPolyPhase used for co-polar response. + """ + + def __init__(self, poly_deg=5, even=False, odd=False, *args, **kwargs): + """Instantiates a FitPolyRealPolyImag object. + + Parameters + ---------- + poly_deg : int + Degree of the polynomial to fit to real and imaginary component. + """ + if even and odd: + raise RuntimeError("Cannot request both even AND odd.") + + super(FitPolyRealPolyImag, self).__init__( + poly_deg=poly_deg, even=even, odd=odd, *args, **kwargs + ) + + self.poly_deg = poly_deg + self.even = even + self.odd = odd + + ind = np.arange(self.poly_deg + 1) + if self.even: + self.coeff_index = np.flatnonzero((ind == 0) | ~(ind % 2)) + + elif self.odd: + self.coeff_index = np.flatnonzero((ind == 0) | (ind % 2)) + + else: + self.coeff_index = ind + + self.nparr = self.coeff_index.size + self.npari = self.nparr + +
+[docs] + def vander(self, ha, *args): + """Create the Vandermonde matrix.""" + A = self._vander(ha, self.poly_deg) + return A[:, self.coeff_index]
+ + +
+[docs] + def deriv(self, ha, param=None): + """Calculate the derivative of the transit.""" + if param is None: + param = self.param + + is_scalar = np.isscalar(ha) + ha = np.atleast_1d(ha) + + shp = param.shape[:-1] + + param_expanded_real = np.zeros(shp + (self.poly_deg + 1,), dtype=param.dtype) + param_expanded_real[..., self.coeff_index] = param[..., : self.nparr] + der1_real = self._deriv(param_expanded_real, m=1, axis=-1) + + param_expanded_imag = np.zeros(shp + (self.poly_deg + 1,), dtype=param.dtype) + param_expanded_imag[..., self.coeff_index] = param[..., self.nparr :] + der1_imag = self._deriv(param_expanded_imag, m=1, axis=-1) + + deriv = np.full(shp + (ha.size,), np.nan, dtype=np.complex64) + for ind in np.ndindex(*shp): + ider1_real = der1_real[ind] + ider1_imag = der1_imag[ind] + + if np.any(~np.isfinite(ider1_real)) or np.any(~np.isfinite(ider1_imag)): + continue + + deriv[ind] = self._eval(ha, ider1_real) + 1.0j * self._eval(ha, ider1_imag) + + return np.squeeze(deriv, axis=-1) if is_scalar else deriv
+ + + def _fit(self, ha, resp, resp_err, absolute_sigma=False): + """Fit polynomial to real and imaginary component. + + Use weighted least squares. + + Parameters + ---------- + ha : np.ndarray[nha,] + Hour angle in degrees. + resp : np.ndarray[nha,] + Measured response to the point source. Complex valued. + resp_err : np.ndarray[nha,] + Error on the measured response. + absolute_sigma : bool + Set to True if the errors provided are absolute. Set to False if + the errors provided are relative, in which case the parameter covariance + will be scaled by the chi-squared per degree-of-freedom. + + Returns + ------- + param : np.ndarray[nparam,] + Best-fit model parameters. + param_cov : np.ndarray[nparam, nparam] + Covariance of the best-fit model parameters. + chisq : np.ndarray[2,] + Chi-squared of the fit to amplitude and phase. + ndof : np.ndarray[2,] + Number of degrees of freedom of the fit to amplitude and phase. + """ + min_nfit = min(self.nparr, self.npari) + 1 + + # Prepare amplitude data + amp = np.abs(resp) + w0 = tools.invert_no_zero(resp_err) ** 2 + + # Only perform fit if there is enough data. + this_flag = (amp > 0.0) & (w0 > 0.0) + ndata = int(np.sum(this_flag)) + if ndata < min_nfit: + raise RuntimeError("Number of data points less than number of parameters.") + + wf = w0 * this_flag.astype(np.float32) + + # Compute real and imaginary component of complex response + yr = np.real(resp) + yi = np.imag(resp) + + # Calculate vandermonde matrix + A = self.vander(ha) + + # Compute parameter covariance + cov = inv(np.dot(A.T, wf[:, np.newaxis] * A)) + + # Compute best-fit coefficients + coeffr = np.dot(cov, np.dot(A.T, wf * yr)) + coeffi = np.dot(cov, np.dot(A.T, wf * yi)) + + # Compute model estimate + mr = np.dot(A, coeffr) + mi = np.dot(A, coeffi) + + # Compute chisq per degree of freedom + ndofr = ndata - self.nparr + ndofi = ndata - self.npari + + ndof = np.array([ndofr, ndofi]) + chisq = np.array([np.sum(wf * (yr - mr) ** 2), np.sum(wf * (yi - mi) ** 2)]) + + # Scale the parameter covariance by chisq per degree of freedom. + # Equivalent to using RMS of the residuals to set the absolute error + # on the measurements. + if not absolute_sigma: + scale_factor = chisq * tools.invert_no_zero(ndof.astype(np.float32)) + covr = cov * scale_factor[0] + covi = cov * scale_factor[1] + else: + covr = cov + covi = cov + + param = np.concatenate((coeffr, coeffi)) + + param_cov = np.zeros((self.nparam, self.nparam), dtype=np.float32) + param_cov[: self.nparr, : self.nparr] = covr + param_cov[self.nparr :, self.nparr :] = covi + + return param, param_cov, chisq, ndof + + def _model(self, ha, elementwise=False): + real = self._fast_eval( + ha, self.param[..., : self.nparr], elementwise=elementwise + ) + imag = self._fast_eval( + ha, self.param[..., self.nparr :], elementwise=elementwise + ) + + return real + 1.0j * imag + + def _jacobian_real(self, ha, elementwise=False): + jac = np.rollaxis(self.vander(ha), -1) + if not elementwise and self.N is not None: + slc = (None,) * len(self.N) + jac = jac[slc] + + return jac + + def _jacobian_imag(self, ha, elementwise=False): + jac = np.rollaxis(self.vander(ha), -1) + if not elementwise and self.N is not None: + slc = (None,) * len(self.N) + jac = jac[slc] + + return jac + + @property + def ndofr(self): + """Number of degrees of freedom for the real fit.""" + return self.ndof[..., 0] + + @property + def ndofi(self): + """Number of degrees of freedom for the imag fit.""" + return self.ndof[..., 1] + + @property + def parameter_names(self): + """Array of strings containing the name of the fit parameters.""" + return np.array( + ["%s_poly_real_coeff%d" % (self.poly_type, p) for p in range(self.nparr)] + + ["%s_poly_imag_coeff%d" % (self.poly_type, p) for p in range(self.npari)], + dtype=np.string_, + ) + +
+[docs] + def peak(self): + """Calculate the peak of the transit.""" + logger.warning("The peak is not defined for this model.") + return
+
+ + + +
+[docs] +class FitAmpPhase(FitTransit): + """Base class for fitting models to the amplitude and phase. + + Assumes an independent fit to amplitude and phase, and provides + methods for predicting the uncertainty on each. + """ + + component = np.array(["amplitude", "phase"], dtype=np.string_) + +
+[docs] + def uncertainty_amp(self, ha, alpha=0.32, elementwise=False): + """Predicts the uncertainty on amplitude at given hour angle(s). + + Parameters + ---------- + ha : np.ndarray[nha,] or float + Hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + + Returns + ------- + err : np.ndarray[..., nha] or float + Uncertainty on the amplitude in fractional units. + """ + x = np.atleast_1d(ha) + err = _propagate_uncertainty( + self._jacobian_amp(x, elementwise=elementwise), + self.param_cov[..., : self.npara, : self.npara], + self.tval(alpha, self.ndofa), + ) + return np.squeeze(err, axis=-1) if np.isscalar(ha) else err
+ + +
+[docs] + def uncertainty_phi(self, ha, alpha=0.32, elementwise=False): + """Predicts the uncertainty on phase at given hour angle(s). + + Parameters + ---------- + ha : np.ndarray[nha,] or float + Hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + + Returns + ------- + err : np.ndarray[..., nha] or float + Uncertainty on the phase in radians. + """ + x = np.atleast_1d(ha) + err = _propagate_uncertainty( + self._jacobian_phi(x, elementwise=elementwise), + self.param_cov[..., self.npara :, self.npara :], + self.tval(alpha, self.ndofp), + ) + return np.squeeze(err, axis=-1) if np.isscalar(ha) else err
+ + +
+[docs] + def uncertainty(self, ha, alpha=0.32, elementwise=False): + """Predicts the uncertainty on the response at given hour angle(s). + + Returns the quadrature sum of the amplitude and phase uncertainty. + + Parameters + ---------- + ha : np.ndarray[nha,] or float + Hour angle in degrees. + alpha : float + Confidence level given by 1 - alpha. + + Returns + ------- + err : np.ndarray[..., nha] or float + Uncertainty on the response. + """ + with np.errstate(all="ignore"): + err = np.abs(self._model(ha, elementwise=elementwise)) * np.sqrt( + self.uncertainty_amp(ha, alpha=alpha, elementwise=elementwise) ** 2 + + self.uncertainty_phi(ha, alpha=alpha, elementwise=elementwise) ** 2 + ) + return err
+ + + def _jacobian(self, ha): + raise NotImplementedError( + "Fits to amplitude and phase are independent. " + "Use _jacobian_amp and _jacobian_phi instead." + ) + + @abstractmethod + def _jacobian_amp(self, ha): + """Calculate the jacobian of the model for the amplitude.""" + return + + @abstractmethod + def _jacobian_phi(self, ha): + """Calculate the jacobian of the model for the phase.""" + return + + @property + def nparam(self): + return self.npara + self.nparp
+ + + +
+[docs] +class FitPolyLogAmpPolyPhase(FitPoly, FitAmpPhase): + """Class that enables separate fits of a polynomial to log amplitude and phase.""" + + def __init__(self, poly_deg_amp=5, poly_deg_phi=5, *args, **kwargs): + """Instantiates a FitPolyLogAmpPolyPhase object. + + Parameters + ---------- + poly_deg_amp : int + Degree of the polynomial to fit to log amplitude. + poly_deg_phi : int + Degree of the polynomial to fit to phase. + """ + super(FitPolyLogAmpPolyPhase, self).__init__( + poly_deg_amp=poly_deg_amp, poly_deg_phi=poly_deg_phi, *args, **kwargs + ) + + self.poly_deg_amp = poly_deg_amp + self.poly_deg_phi = poly_deg_phi + + self.npara = poly_deg_amp + 1 + self.nparp = poly_deg_phi + 1 + + def _fit( + self, + ha, + resp, + resp_err, + width=None, + absolute_sigma=False, + moving_window=0.3, + niter=5, + ): + """Fit polynomial to log amplitude and polynomial to phase. + + Use weighted least squares. The initial errors on log amplitude + are set to `resp_err / abs(resp)`. If the niter parameter is greater than 1, + then those errors will be updated with `resp_err / model_amp`, where `model_amp` + is the best-fit model for the amplitude from the previous iteration. The errors + on the phase are set to `resp_err / model_amp` where `model_amp` is the best-fit + model for the amplitude from the log amplitude fit. + + Parameters + ---------- + ha : np.ndarray[nha,] + Hour angle in degrees. + resp : np.ndarray[nha,] + Measured response to the point source. Complex valued. + resp_err : np.ndarray[nha,] + Error on the measured response. + width : float + Initial guess at the width (sigma) of the transit in degrees. + absolute_sigma : bool + Set to True if the errors provided are absolute. Set to False if + the errors provided are relative, in which case the parameter covariance + will be scaled by the chi-squared per degree-of-freedom. + niter : int + Number of iterations for the log amplitude fit. + moving_window : float + Only fit hour angles within +/- window * width from the peak. + Note that the peak location is updated with each iteration. + Set to None to fit all hour angles where resp_err > 0.0. + + Returns + ------- + param : np.ndarray[nparam,] + Best-fit model parameters. + param_cov : np.ndarray[nparam, nparam] + Covariance of the best-fit model parameters. + chisq : np.ndarray[2,] + Chi-squared of the fit to amplitude and phase. + ndof : np.ndarray[2,] + Number of degrees of freedom of the fit to amplitude and phase. + """ + min_nfit = min(self.npara, self.nparp) + 1 + + window = width * moving_window if (width and moving_window) else None + + # Prepare amplitude data + model_amp = np.abs(resp) + w0 = tools.invert_no_zero(resp_err) ** 2 + + # Only perform fit if there is enough data. + this_flag = (model_amp > 0.0) & (w0 > 0.0) + ndata = int(np.sum(this_flag)) + if ndata < min_nfit: + raise RuntimeError("Number of data points less than number of parameters.") + + # Prepare amplitude data + ya = np.log(model_amp) + + # Prepare phase data. + phi = np.angle(resp) + phi0 = phi[np.argmin(np.abs(ha))] + + yp = phi - phi0 + yp += (yp < -np.pi) * 2 * np.pi - (yp > np.pi) * 2 * np.pi + yp += phi0 + + # Calculate vandermonde matrix + A = self._vander(ha, self.poly_deg_amp) + center = 0.0 + + # Iterate to obtain model estimate for amplitude + for kk in range(niter): + wk = w0 * model_amp**2 + + if window is not None: + if kk > 0: + center = self.peak(param=coeff) + + if np.isnan(center): + raise RuntimeError("No peak found.") + + wk *= (np.abs(ha - center) <= window).astype(np.float64) + + ndata = int(np.sum(wk > 0.0)) + if ndata < min_nfit: + raise RuntimeError( + "Number of data points less than number of parameters." + ) + + C = np.dot(A.T, wk[:, np.newaxis] * A) + coeff = lstsq(C, np.dot(A.T, wk * ya))[0] + + model_amp = np.exp(np.dot(A, coeff)) + + # Compute final value for amplitude + center = self.peak(param=coeff) + + if np.isnan(center): + raise RuntimeError("No peak found.") + + wf = w0 * model_amp**2 + if window is not None: + wf *= (np.abs(ha - center) <= window).astype(np.float64) + + ndata = int(np.sum(wf > 0.0)) + if ndata < min_nfit: + raise RuntimeError( + "Number of data points less than number of parameters." + ) + + cova = inv(np.dot(A.T, wf[:, np.newaxis] * A)) + coeffa = np.dot(cova, np.dot(A.T, wf * ya)) + + mamp = np.dot(A, coeffa) + + # Compute final value for phase + A = self._vander(ha, self.poly_deg_phi) + + covp = inv(np.dot(A.T, wf[:, np.newaxis] * A)) + coeffp = np.dot(covp, np.dot(A.T, wf * yp)) + + mphi = np.dot(A, coeffp) + + # Compute chisq per degree of freedom + ndofa = ndata - self.npara + ndofp = ndata - self.nparp + + ndof = np.array([ndofa, ndofp]) + chisq = np.array([np.sum(wf * (ya - mamp) ** 2), np.sum(wf * (yp - mphi) ** 2)]) + + # Scale the parameter covariance by chisq per degree of freedom. + # Equivalent to using RMS of the residuals to set the absolute error + # on the measurements. + if not absolute_sigma: + scale_factor = chisq * tools.invert_no_zero(ndof.astype(np.float32)) + cova *= scale_factor[0] + covp *= scale_factor[1] + + param = np.concatenate((coeffa, coeffp)) + + param_cov = np.zeros((self.nparam, self.nparam), dtype=np.float32) + param_cov[: self.npara, : self.npara] = cova + param_cov[self.npara :, self.npara :] = covp + + return param, param_cov, chisq, ndof + +
+[docs] + def peak(self, param=None): + """Find the peak of the transit. + + Parameters + ---------- + param : np.ndarray[..., nparam] + Coefficients of the polynomial model for log amplitude. + Defaults to `self.param`. + + Returns + ------- + peak : np.ndarray[...] + Location of the maximum amplitude in degrees hour angle. + If the polynomial does not have a maximum, then NaN is returned. + """ + if param is None: + param = self.param + + der1 = self._deriv(param[..., : self.npara], m=1, axis=-1) + der2 = self._deriv(param[..., : self.npara], m=2, axis=-1) + + shp = der1.shape[:-1] + peak = np.full(shp, np.nan, dtype=der1.dtype) + + for ind in np.ndindex(*shp): + ider1 = der1[ind] + + if np.any(~np.isfinite(ider1)): + continue + + root = self._root(ider1) + xmax = np.real( + [ + rr + for rr in root + if (rr.imag == 0) and (self._eval(rr, der2[ind]) < 0.0) + ] + ) + + peak[ind] = xmax[np.argmin(np.abs(xmax))] if xmax.size > 0 else np.nan + + return peak
+ + + def _model(self, ha, elementwise=False): + amp = self._fast_eval( + ha, self.param[..., : self.npara], elementwise=elementwise + ) + phi = self._fast_eval( + ha, self.param[..., self.npara :], elementwise=elementwise + ) + + return np.exp(amp) * (np.cos(phi) + 1.0j * np.sin(phi)) + + def _jacobian_amp(self, ha, elementwise=False): + jac = self._vander(ha, self.poly_deg_amp) + if not elementwise: + jac = np.rollaxis(jac, -1) + if self.N is not None: + slc = (None,) * len(self.N) + jac = jac[slc] + + return jac + + def _jacobian_phi(self, ha, elementwise=False): + jac = self._vander(ha, self.poly_deg_phi) + if not elementwise: + jac = np.rollaxis(jac, -1) + if self.N is not None: + slc = (None,) * len(self.N) + jac = jac[slc] + + return jac + + @property + def ndofa(self): + """ + Number of degrees of freedom for the amplitude fit. + + Returns + ------- + ndofa : np.ndarray[...] + Number of degrees of freedom of the amplitude fit. + """ + return self.ndof[..., 0] + + @property + def ndofp(self): + """ + Number of degrees of freedom for the phase fit. + + Returns + ------- + ndofp : np.ndarray[...] + Number of degrees of freedom of the phase fit. + """ + return self.ndof[..., 1] + + @property + def parameter_names(self): + """Array of strings containing the name of the fit parameters.""" + return np.array( + ["%s_poly_amp_coeff%d" % (self.poly_type, p) for p in range(self.npara)] + + ["%s_poly_phi_coeff%d" % (self.poly_type, p) for p in range(self.nparp)], + dtype=np.string_, + )
+ + + +
+[docs] +class FitGaussAmpPolyPhase(FitPoly, FitAmpPhase): + """Class that enables fits of a gaussian to amplitude and a polynomial to phase.""" + + component = np.array(["complex"], dtype=np.string_) + npara = 3 + + def __init__(self, poly_deg_phi=5, *args, **kwargs): + """Instantiates a FitGaussAmpPolyPhase object. + + Parameters + ---------- + poly_deg_phi : int + Degree of the polynomial to fit to phase. + """ + super(FitGaussAmpPolyPhase, self).__init__( + poly_deg_phi=poly_deg_phi, *args, **kwargs + ) + + self.poly_deg_phi = poly_deg_phi + self.nparp = poly_deg_phi + 1 + + def _fit(self, ha, resp, resp_err, width=5, absolute_sigma=False, param0=None): + """Fit gaussian to amplitude and polynomial to phase. + + Uses non-linear least squares (`scipy.optimize.curve_fit`) to + fit the model to the complex valued data. + + Parameters + ---------- + ha : np.ndarray[nha,] + Hour angle in degrees. + resp : np.ndarray[nha,] + Measured response to the point source. Complex valued. + resp_err : np.ndarray[nha,] + Error on the measured response. + width : float + Initial guess at the width (sigma) of the transit in degrees. + absolute_sigma : bool + Set to True if the errors provided are absolute. Set to False if + the errors provided are relative, in which case the parameter covariance + will be scaled by the chi-squared per degree-of-freedom. + param0 : np.ndarray[nparam,] + Initial guess at the parameters for the Levenberg-Marquardt algorithm. + If these are not provided, then this function will make reasonable guesses. + + Returns + ------- + param : np.ndarray[nparam,] + Best-fit model parameters. + param_cov : np.ndarray[nparam, nparam] + Covariance of the best-fit model parameters. + chisq : float + Chi-squared of the fit. + ndof : int + Number of degrees of freedom of the fit. + """ + if ha.size < (min(self.npara, self.nparp) + 1): + raise RuntimeError("Number of data points less than number of parameters.") + + # We will fit the complex data. Break n-element complex array y(x) + # into 2n-element real array [Re{y(x)}, Im{y(x)}] for fit. + x = np.tile(ha, 2) + y = np.concatenate((resp.real, resp.imag)) + err = np.tile(resp_err, 2) + + # Initial estimate of parameter values: + # [peak_amplitude, centroid, fwhm, phi_0, phi_1, phi_2, ...] + if param0 is None: + param0 = [np.max(np.nan_to_num(np.abs(resp))), 0.0, 2.355 * width] + param0.append(np.median(np.nan_to_num(np.angle(resp, deg=True)))) + param0 += [0.0] * (self.nparp - 1) + param0 = np.array(param0) + + # Perform the fit. + param, param_cov = curve_fit( + self._get_fit_func(), + x, + y, + sigma=err, + p0=param0, + absolute_sigma=absolute_sigma, + jac=self._get_fit_jac(), + ) + + chisq = np.sum( + ( + np.abs(resp - self._model(ha, param=param)) + * tools.invert_no_zero(resp_err) + ) + ** 2 + ) + ndof = y.size - self.nparam + + return param, param_cov, chisq, ndof + +
+[docs] + def peak(self): + """Return the peak of the transit. + + Returns + ------- + peak : float + Centroid of the gaussian fit to amplitude. + """ + return self.param[..., 1]
+ + + def _get_fit_func(self): + """Generates a function that can be used by `curve_fit` to compute the model.""" + + def fit_func(x, *param): + """Function used by `curve_fit` to compute the model. + + Parameters + ---------- + x : np.ndarray[2 * nha,] + Hour angle in degrees replicated twice for the real + and imaginary components, i.e., `x = np.concatenate((ha, ha))`. + *param : floats + Parameters of the model. + + Returns + ------- + model : np.ndarray[2 * nha,] + Model for the complex valued point source response, + packaged as `np.concatenate((model.real, model.imag))`. + """ + peak_amplitude, centroid, fwhm = param[:3] + poly_coeff = param[3:] + + nreal = len(x) // 2 + xr = x[:nreal] + + dxr = _correct_phase_wrap(xr - centroid) + + model_amp = peak_amplitude * np.exp(-4.0 * np.log(2.0) * (dxr / fwhm) ** 2) + model_phase = self._eval(xr, poly_coeff) + + model = np.concatenate( + (model_amp * np.cos(model_phase), model_amp * np.sin(model_phase)) + ) + + return model + + return fit_func + + def _get_fit_jac(self): + """Generates a function that can be used by `curve_fit` to compute jacobian of the model.""" + + def fit_jac(x, *param): + """Function used by `curve_fit` to compute the jacobian. + + Parameters + ---------- + x : np.ndarray[2 * nha,] + Hour angle in degrees. Replicated twice for the real + and imaginary components, i.e., `x = np.concatenate((ha, ha))`. + *param : float + Parameters of the model. + + Returns + ------- + jac : np.ndarray[2 * nha, nparam] + The jacobian defined as + jac[i, j] = d(model(ha)) / d(param[j]) evaluated at ha[i] + """ + + peak_amplitude, centroid, fwhm = param[:3] + poly_coeff = param[3:] + + nparam = len(param) + nx = len(x) + nreal = nx // 2 + + jac = np.empty((nx, nparam), dtype=x.dtype) + + dx = _correct_phase_wrap(x - centroid) + + dxr = dx[:nreal] + xr = x[:nreal] + + model_amp = peak_amplitude * np.exp(-4.0 * np.log(2.0) * (dxr / fwhm) ** 2) + model_phase = self._eval(xr, poly_coeff) + model = np.concatenate( + (model_amp * np.cos(model_phase), model_amp * np.sin(model_phase)) + ) + + dmodel_dphase = np.concatenate((-model[nreal:], model[:nreal])) + + jac[:, 0] = tools.invert_no_zero(peak_amplitude) * model + jac[:, 1] = 8.0 * np.log(2.0) * dx * tools.invert_no_zero(fwhm) ** 2 * model + jac[:, 2] = ( + 8.0 * np.log(2.0) * dx**2 * tools.invert_no_zero(fwhm) ** 3 * model + ) + jac[:, 3:] = ( + self._vander(x, self.poly_deg_phi) * dmodel_dphase[:, np.newaxis] + ) + + return jac + + return fit_jac + + def _model(self, ha, param=None, elementwise=False): + if param is None: + param = self.param + + # Evaluate phase + model_phase = self._fast_eval( + ha, param[..., self.npara :], elementwise=elementwise + ) + + # Evaluate amplitude + amp_param = param[..., : self.npara] + ndim1 = amp_param.ndim + if not elementwise and (ndim1 > 1) and not np.isscalar(ha): + ndim2 = ha.ndim + amp_param = amp_param[(slice(None),) * ndim1 + (None,) * ndim2] + ha = ha[(None,) * (ndim1 - 1) + (slice(None),) * ndim2] + + slc = (slice(None),) * (ndim1 - 1) + peak_amplitude = amp_param[slc + (0,)] + centroid = amp_param[slc + (1,)] + fwhm = amp_param[slc + (2,)] + + dha = _correct_phase_wrap(ha - centroid) + + model_amp = peak_amplitude * np.exp(-4.0 * np.log(2.0) * (dha / fwhm) ** 2) + + # Return complex valued quantity + return model_amp * (np.cos(model_phase) + 1.0j * np.sin(model_phase)) + + def _jacobian_amp(self, ha, elementwise=False): + amp_param = self.param[..., : self.npara] + + shp = amp_param.shape + ndim1 = amp_param.ndim + + if not elementwise: + shp = shp + ha.shape + + if ndim1 > 1: + ndim2 = ha.ndim + amp_param = amp_param[(slice(None),) * ndim1 + (None,) * ndim2] + ha = ha[(None,) * (ndim1 - 1) + (slice(None),) * ndim2] + + slc = (slice(None),) * (ndim1 - 1) + peak_amplitude = amp_param[slc + (0,)] + centroid = amp_param[slc + (1,)] + fwhm = amp_param[slc + (2,)] + + dha = _correct_phase_wrap(ha - centroid) + + jac = np.zeros(shp, dtype=ha.dtype) + jac[slc + (0,)] = tools.invert_no_zero(peak_amplitude) + jac[slc + (1,)] = 8.0 * np.log(2.0) * dha * tools.invert_no_zero(fwhm) ** 2 + jac[slc + (2,)] = 8.0 * np.log(2.0) * dha**2 * tools.invert_no_zero(fwhm) ** 3 + + return jac + + def _jacobian_phi(self, ha, elementwise=False): + jac = self._vander(ha, self.poly_deg_phi) + if not elementwise: + jac = np.rollaxis(jac, -1) + if self.N is not None: + slc = (None,) * len(self.N) + jac = jac[slc] + + return jac + + @property + def parameter_names(self): + """Array of strings containing the name of the fit parameters.""" + return np.array( + ["peak_amplitude", "centroid", "fwhm"] + + ["%s_poly_phi_coeff%d" % (self.poly_type, p) for p in range(self.nparp)], + dtype=np.string_, + ) + + @property + def ndofa(self): + """ + Number of degrees of freedom for the amplitude fit. + + Returns + ------- + ndofa : np.ndarray[...] + Number of degrees of freedom of the amplitude fit. + """ + return self.ndof[..., 0] + + @property + def ndofp(self): + """ + Number of degrees of freedom for the phase fit. + + Returns + ------- + ndofp : np.ndarray[...] + Number of degrees of freedom of the phase fit. + """ + return self.ndof[..., 0]
+ + + +def _propagate_uncertainty(jac, cov, tval): + """Propagate uncertainty on parameters to uncertainty on model prediction. + + Parameters + ---------- + jac : np.ndarray[..., nparam] (elementwise) or np.ndarray[..., nparam, nha] + The jacobian defined as + jac[..., i, j] = d(model(ha)) / d(param[i]) evaluated at ha[j] + cov : [..., nparam, nparam] + Covariance of model parameters. + tval : np.ndarray[...] + Quantile of a standardized Student's t random variable. + The 1-sigma uncertainties will be scaled by this value. + + Returns + ------- + err : np.ndarray[...] (elementwise) or np.ndarray[..., nha] + Uncertainty on the model. + """ + if jac.ndim == cov.ndim: + # Corresponds to non-elementwise analysis + df2 = np.sum(jac * np.matmul(cov, jac), axis=-2) + else: + # Corresponds to elementwise analysis + df2 = np.sum(jac * np.sum(cov * jac[..., np.newaxis], axis=-1), axis=-1) + + # Expand the tval array so that it can be broadcast against + # the sum squared error df2 + add_dim = df2.ndim - tval.ndim + if add_dim > 0: + tval = tval[(np.s_[...],) + (None,) * add_dim] + + return tval * np.sqrt(df2) + + +def _correct_phase_wrap(ha): + """Ensure hour angle is between -180 and 180 degrees. + + Parameters + ---------- + ha : np.ndarray or float + Hour angle in degrees. + + Returns + ------- + out : same as ha + Hour angle between -180 and 180 degrees. + """ + return ((ha + 180.0) % 360.0) - 180.0 + + +
+[docs] +def fit_point_source_map( + ra, + dec, + submap, + rms=None, + dirty_beam=None, + real_map=False, + freq=600.0, + ra0=None, + dec0=None, +): + """Fits a map of a point source to a model. + + Parameters + ---------- + ra : np.ndarray[nra, ] + Transit right ascension. + dec : np.ndarray[ndec, ] + Transit declination. + submap : np.ndarray[..., nra, ndec] + Region of the ringmap around the point source. + rms : np.ndarray[..., nra] + RMS error on the map. + flag : np.ndarray[..., nra, ndec] + Boolean array that indicates which pixels to fit. + dirty_beam : np.ndarray[..., nra, ndec] or [ra, dec, dirty_beam] + Fourier transform of the weighting function used to create + the map. If input, then the interpolated dirty beam will be used + as the model for the point source response in the declination direction. + Can either be an array that is the same size as submap, or a list/tuple + of length 3 that contains [ra, dec, dirty_beam] since the shape of the + dirty beam is likely to be larger than the shape of the subregion of the + map, at least in the declination direction. + + Returns + ------- + param_name : np.ndarray[nparam, ] + Names of the parameters. + param : np.ndarray[..., nparam] + Best-fit parameters for each item. + param_cov: np.ndarray[..., nparam, nparam] + Parameter covariance for each item. + """ + + el = _dec_to_el(dec) + + # Check if dirty beam was input + do_dirty = (dirty_beam is not None) and ( + (len(dirty_beam) == 3) or (dirty_beam.shape == submap.shape) + ) + if do_dirty: + if real_map: + model = func_real_dirty_gauss + else: + model = func_dirty_gauss + + # Get parameter names through inspection + param_name = inspect.getargspec(model(None)).args[1:] + + # Define dimensions of the dirty beam + if len(dirty_beam) != 3: + db_ra, db_dec, db = submap.ra, submap.dec, dirty_beam + else: + db_ra, db_dec, db = dirty_beam + + db_el = _dec_to_el(db_dec) + + # Define dimensions of the submap + coord = [ra, el] + + else: + model = func_2d_gauss + param_name = inspect.getargspec(model).args[1:] + + # Create 1d vectors that span the (ra, dec) grid + coord = [ra, dec] + + # Extract parameter names from function + nparam = len(param_name) + + # Examine dimensions of input data + dims = submap.shape + ndims = len(dims) + + # If we are performing a single fit, then we need to recast shape to allow iteration + if ndims == 2: + submap = submap[np.newaxis, ...] + if do_dirty: + db = db[np.newaxis, ...] + if rms is not None: + rms = rms[np.newaxis, ...] + + dims = submap.shape + + dims = dims[0:-2] + + # Create arrays to hold best-fit parameters and + # parameter covariance. Initialize to NaN. + param = np.full(dims + (nparam,), np.nan, dtype=np.float64) + param_cov = np.full(dims + (nparam, nparam), np.nan, dtype=np.float64) + resid_rms = np.full(dims, np.nan, dtype=np.float64) + + # Iterate over dimensions + for index in np.ndindex(*dims): + # Extract the RMS for this index. In the process, + # check for data flagged as bad (rms == 0.0). + if rms is not None: + good_ra = rms[index] > 0.0 + this_rms = np.tile( + rms[index][good_ra, np.newaxis], [1, submap.shape[-1]] + ).ravel() + else: + good_ra = np.ones(submap.shape[-2], dtype=bool) + this_rms = None + + if np.sum(good_ra) <= nparam: + continue + + # Extract map + this_submap = submap[index][good_ra, :].ravel() + this_coord = [coord[0][good_ra], coord[1]] + + # Specify initial estimates of parameter and parameter boundaries + if ra0 is None: + ra0 = np.median(ra) + if dec0 is None: + dec0 = _el_to_dec(np.median(el)) + offset0 = np.median(np.nan_to_num(this_submap)) + peak0 = np.max(np.nan_to_num(this_submap)) + + p0_dict = { + "peak_amplitude": peak0, + "centroid_x": ra0, + "centroid_y": dec0, + "fwhm_x": 2.0, + "fwhm_y": 2.0, + "offset": offset0, + "fringe_rate": 22.0 * freq * 1e6 / 3e8, + } + + lb_dict = { + "peak_amplitude": 0.0, + "centroid_x": ra0 - 1.5, + "centroid_y": dec0 - 0.75, + "fwhm_x": 0.5, + "fwhm_y": 0.5, + "offset": offset0 - 2.0 * np.abs(offset0), + "fringe_rate": -200.0, + } + + ub_dict = { + "peak_amplitude": 1.5 * peak0, + "centroid_x": ra0 + 1.5, + "centroid_y": dec0 + 0.75, + "fwhm_x": 6.0, + "fwhm_y": 6.0, + "offset": offset0 + 2.0 * np.abs(offset0), + "fringe_rate": 200.0, + } + + p0 = np.array([p0_dict[key] for key in param_name]) + + bounds = ( + np.array([lb_dict[key] for key in param_name]), + np.array([ub_dict[key] for key in param_name]), + ) + + # Define model + if do_dirty: + fdirty = interp1d( + db_el, + db[index][good_ra, :], + axis=-1, + copy=False, + kind="cubic", + bounds_error=False, + fill_value=0.0, + ) + this_model = model(fdirty) + else: + this_model = model + + # Perform the fit. If there is an error, + # then we leave parameter values as NaN. + try: + popt, pcov = curve_fit( + this_model, + this_coord, + this_submap, + p0=p0, + sigma=this_rms, + absolute_sigma=True, + ) # , bounds=bounds) + except Exception as error: + print( + "index %s: %s" + % ("(" + ", ".join(["%d" % ii for ii in index]) + ")", error) + ) + continue + + # Save the results + param[index] = popt + param_cov[index] = pcov + + # Calculate RMS of the residuals + resid = this_submap - this_model(this_coord, *popt) + resid_rms[index] = 1.4826 * np.median(np.abs(resid - np.median(resid))) + + # If this is a single fit, then remove singleton dimension + if ndims == 2: + param = param[0] + param_cov = param_cov[0] + resid_rms = resid_rms[0] + submap = submap[0] + if do_dirty: + db = db[0] + + # Return the best-fit parameters and parameter covariance + return param_name, param, param_cov, resid_rms
+ + + +
+[docs] +def func_2d_gauss( + coord, peak_amplitude, centroid_x, centroid_y, fwhm_x, fwhm_y, offset +): + """Returns a parameteric model for the map of a point source, + consisting of a 2-dimensional gaussian. + + Parameters + ---------- + coord : (ra, dec) + Tuple containing the right ascension and declination. These should be + coordinate vectors of length nra and ndec, respectively. + peak_amplitude : float + Model parameter. Normalization of the gaussian. + centroid_x : float + Model parameter. Centroid of the gaussian in degrees in the + right ascension direction. + centroid_y : float + Model parameter. Centroid of the gaussian in degrees in the + declination direction. + fwhm_x : float + Model parameter. Full width at half maximum of the gaussian + in degrees in the right ascension direction. + fwhm_y : float + Model parameter. Full width at half maximum of the gaussian + in degrees in the declination direction. + offset : float + Model parameter. Constant background value of the map. + + Returns + ------- + model : np.ndarray[nra*ndec] + Model prediction for the map of the point source. + """ + x, y = coord + + model = ( + peak_amplitude + * np.exp(-4.0 * np.log(2.0) * ((x[:, np.newaxis] - centroid_x) / fwhm_x) ** 2) + * np.exp(-4.0 * np.log(2.0) * ((y[np.newaxis, :] - centroid_y) / fwhm_y) ** 2) + ) + offset + + return model.ravel()
+ + + +
+[docs] +def func_2d_sinc_gauss( + coord, peak_amplitude, centroid_x, centroid_y, fwhm_x, fwhm_y, offset +): + """Returns a parameteric model for the map of a point source, + consisting of a sinc function along the declination direction + and gaussian along the right ascension direction. + + Parameters + ---------- + coord : (ra, dec) + Tuple containing the right ascension and declination. These should be + coordinate vectors of length nra and ndec, respectively. + peak_amplitude : float + Model parameter. Normalization of the gaussian. + centroid_x : float + Model parameter. Centroid of the gaussian in degrees in the + right ascension direction. + centroid_y : float + Model parameter. Centroid of the sinc function in degrees in the + declination direction. + fwhm_x : float + Model parameter. Full width at half maximum of the gaussian + in degrees in the right ascension direction. + fwhm_y : float + Model parameter. Full width at half maximum of the sinc function + in degrees in the declination direction. + offset : float + Model parameter. Constant background value of the map. + + Returns + ------- + model : np.ndarray[nra*ndec] + Model prediction for the map of the point source. + """ + x, y = coord + + model = ( + peak_amplitude + * np.exp(-4.0 * np.log(2.0) * ((x[:, np.newaxis] - centroid_x) / fwhm_x) ** 2) + * np.sinc(1.2075 * (y[np.newaxis, :] - centroid_y) / fwhm_y) + ) + offset + + return model.ravel()
+ + + +
+[docs] +def func_dirty_gauss(dirty_beam): + """Returns a parameteric model for the map of a point source, + consisting of the interpolated dirty beam along the y-axis + and a gaussian along the x-axis. + + This function is a wrapper that defines the interpolated + dirty beam. + + Parameters + ---------- + dirty_beam : scipy.interpolate.interp1d + Interpolation function that takes as an argument el = sin(za) + and outputs an np.ndarray[nel, nra] that represents the dirty + beam evaluated at the same right ascension as the map. + + Returns + ------- + dirty_gauss : np.ndarray[nra*ndec] + Model prediction for the map of the point source. + """ + + def dirty_gauss(coord, peak_amplitude, centroid_x, centroid_y, fwhm_x, offset): + """Returns a parameteric model for the map of a point source, + consisting of the interpolated dirty beam along the y-axis + and a gaussian along the x-axis. + + Parameter + --------- + coord : [ra, dec] + Tuple containing the right ascension and declination. These should be + coordinate vectors of length nra and ndec, respectively. + peak_amplitude : float + Model parameter. Normalization of the gaussian + in the right ascension direction. + centroid_x : float + Model parameter. Centroid of the gaussian in degrees in the + right ascension direction. + centroid_y : float + Model parameter. Centroid of the dirty beam in degrees in the + declination direction. + fwhm_x : float + Model parameter. Full width at half maximum of the gaussian + in degrees in the right ascension direction. + offset : float + Model parameter. Constant background value of the map. + + Returns + ------- + model : np.ndarray[nra*ndec] + Model prediction for the map of the point source. + """ + + x, y = coord + + model = ( + peak_amplitude + * np.exp( + -4.0 * np.log(2.0) * ((x[:, np.newaxis] - centroid_x) / fwhm_x) ** 2 + ) + * dirty_beam(y - _dec_to_el(centroid_y)) + ) + offset + + return model.ravel() + + return dirty_gauss
+ + + +
+[docs] +def func_real_dirty_gauss(dirty_beam): + """Returns a parameteric model for the map of a point source, + consisting of the interpolated dirty beam along the y-axis + and a sinusoid with gaussian envelope along the x-axis. + + This function is a wrapper that defines the interpolated + dirty beam. + + Parameters + ---------- + dirty_beam : scipy.interpolate.interp1d + Interpolation function that takes as an argument el = sin(za) + and outputs an np.ndarray[nel, nra] that represents the dirty + beam evaluated at the same right ascension as the map. + + Returns + ------- + real_dirty_gauss : np.ndarray[nra*ndec] + Model prediction for the map of the point source. + """ + + def real_dirty_gauss( + coord, peak_amplitude, centroid_x, centroid_y, fwhm_x, offset, fringe_rate + ): + """Returns a parameteric model for the map of a point source, + consisting of the interpolated dirty beam along the y-axis + and a sinusoid with gaussian envelope along the x-axis. + + Parameter + --------- + coord : [ra, dec] + Tuple containing the right ascension and declination, each + of which is coordinate vectors of length nra and ndec, respectively. + peak_amplitude : float + Model parameter. Normalization of the gaussian + in the right ascension direction. + centroid_x : float + Model parameter. Centroid of the gaussian in degrees in the + right ascension direction. + centroid_y : float + Model parameter. Centroid of the dirty beam in degrees in the + declination direction. + fwhm_x : float + Model parameter. Full width at half maximum of the gaussian + in degrees in the right ascension direction. + offset : float + Model parameter. Constant background value of the map. + fringe_rate : float + Model parameter. Frequency of the sinusoid. + + Returns + ------- + model : np.ndarray[nra*ndec] + Model prediction for the map of the point source. + """ + + x, y = coord + + model = ( + peak_amplitude + * np.exp( + -4.0 * np.log(2.0) * ((x[:, np.newaxis] - centroid_x) / fwhm_x) ** 2 + ) + * dirty_beam(y - _dec_to_el(centroid_y)) + ) + offset + + phase = np.exp( + 2.0j + * np.pi + * np.cos(np.radians(centroid_y)) + * np.sin(-np.radians(x - centroid_x)) + * fringe_rate + ) + + return (model * phase[:, np.newaxis]).real.ravel() + + return real_dirty_gauss
+ + + +
+[docs] +def guess_fwhm(freq, pol="X", dec=None, sigma=False, voltage=False, seconds=False): + """Provide rough estimate of the FWHM of the CHIME primary beam pattern. + + It uses a linear fit to the median FWHM(nu) over all feeds of a given + polarization for CygA transits. CasA and TauA transits also showed + good agreement with this relationship. + + Parameters + ---------- + freq : float or np.ndarray + Frequency in MHz. + pol : string or bool + Polarization, can be 'X'/'E' or 'Y'/'S' + dec : float + Declination of the source in radians. If this quantity + is input, then the FWHM is divided by cos(dec) to account + for the increased rate at which a source rotates across + the sky. Default is do not correct for this effect. + sigma : bool + Return the standard deviation instead of the FWHM. + Default is to return the FWHM. + voltage : bool + Return the value for a voltage beam, otherwise returns + value for a power beam. + seconds : bool + Convert to elapsed time in units of seconds. + Otherwise returns in units of degrees on the sky. + + Returns + ------- + fwhm : float or np.ndarray + Rough estimate of the FWHM (or standard deviation if sigma=True). + """ + # Define linear coefficients based on polarization + if (pol == "Y") or (pol == "S"): + coeff = [1.226e-06, -0.004097, 3.790] + else: + coeff = [7.896e-07, -0.003226, 3.717] + + # Estimate standard deviation + sig = np.polyval(coeff, freq) + + # Divide by declination to convert to degrees hour angle + if dec is not None: + sig /= np.cos(dec) + + # If requested, convert to seconds + if seconds: + earth_rotation_rate = 360.0 / (24.0 * 3600.0) + sig /= earth_rotation_rate + + # If requested, convert to width of voltage beam + if voltage: + sig *= np.sqrt(2) + + # If sigma not explicitely requested, then convert to FWHM + if not sigma: + sig *= 2.35482 + + return sig
+ + + +
+[docs] +def estimate_directional_scale(z, c=2.1): + """Calculate robust, direction dependent estimate of scale. + + Parameters + ---------- + z: np.ndarray + 1D array containing the data. + c: float + Cutoff in number of MAD. Data points whose absolute value is + larger than c * MAD from the median are saturated at the + maximum value in the estimator. + + Returns + ------- + zmed : float + The median value of z. + sa : float + Estimate of scale for z <= zmed. + sb : float + Estimate of scale for z > zmed. + """ + zmed = np.median(z) + + x = z - zmed + + xa = x[x <= 0.0] + xb = x[x >= 0.0] + + def huber_rho(dx, c=2.1): + num = float(dx.size) + + s0 = 1.4826 * np.median(np.abs(dx)) + + dx_sig0 = dx * tools.invert_no_zero(s0) + + rho = (dx_sig0 / c) ** 2 + rho[rho > 1.0] = 1.0 + + return 1.54 * s0 * np.sqrt(2.0 * np.sum(rho) / num) + + sa = huber_rho(xa, c=c) + sb = huber_rho(xb, c=c) + + return zmed, sa, sb
+ + + +
+[docs] +def fit_histogram( + arr, + bins="auto", + rng=None, + no_weight=False, + test_normal=False, + return_histogram=False, +): + """ + Fit a gaussian to a histogram of the data. + + Parameters + ---------- + arr : np.ndarray + 1D array containing the data. Arrays with more than one dimension are flattened. + bins : int or sequence of scalars or str + - If `bins` is an int, it defines the number of equal-width bins in `rng`. + - If `bins` is a sequence, it defines a monotonically increasing array of bin edges, + including the rightmost edge, allowing for non-uniform bin widths. + - If `bins` is a string, it defines a method for computing the bins. + rng : (float, float) + The lower and upper range of the bins. If not provided, then the range spans + the minimum to maximum value of `arr`. + no_weight : bool + Give equal weighting to each histogram bin. Otherwise use proper weights based + on number of counts observed in each bin. + test_normal : bool + Apply the Shapiro-Wilk and Anderson-Darling tests for normality to the data. + return_histogram : bool + Return the histogram. Otherwise return only the best fit parameters and test statistics. + + Returns + ------- + results: dict + Dictionary containing the following fields: + indmin : int + Only bins whose index is greater than indmin were included in the fit. + indmax : int + Only bins whose index is less than indmax were included in the fit. + xmin : float + The data value corresponding to the centre of the `indmin` bin. + xmax : float + The data value corresponding to the centre of the `indmax` bin. + par: [float, float, float] + The parameters of the fit, ordered as [peak, mu, sigma]. + chisq: float + The chi-squared of the fit. + ndof : int + The number of degrees of freedom of the fit. + pte : float + The probability to observe the chi-squared of the fit. + + If `return_histogram` is True, then `results` will also contain the following fields: + + bin_centre : np.ndarray + The bin centre of the histogram. + bin_count : np.ndarray + The bin counts of the histogram. + + If `test_normal` is True, then `results` will also contain the following fields: + + shapiro : dict + stat : float + The Shapiro-Wilk test statistic. + pte : float + The probability to observe `stat` if the data were drawn from a gaussian. + anderson : dict + stat : float + The Anderson-Darling test statistic. + critical : list of float + The critical values of the test statistic. + alpha : list of float + The significance levels corresponding to each critical value. + past : list of bool + Boolean indicating if the data passes the test for each critical value. + """ + # Make sure the data is 1D + data = np.ravel(arr) + + # Histogram the data + count, xbin = np.histogram(data, bins=bins, range=rng) + cbin = 0.5 * (xbin[0:-1] + xbin[1:]) + + cbin = cbin.astype(np.float64) + count = count.astype(np.float64) + + # Form initial guess at parameter values using median and MAD + nparams = 3 + par0 = np.zeros(nparams, dtype=np.float64) + par0[0] = np.max(count) + par0[1] = np.median(data) + par0[2] = 1.48625 * np.median(np.abs(data - par0[1])) + + # Find the first zero points on either side of the median + cont = True + indmin = np.argmin(np.abs(cbin - par0[1])) + while cont: + indmin -= 1 + cont = (count[indmin] > 0.0) and (indmin > 0) + indmin += count[indmin] == 0.0 + + cont = True + indmax = np.argmin(np.abs(cbin - par0[1])) + while cont: + indmax += 1 + cont = (count[indmax] > 0.0) and (indmax < (len(count) - 1)) + indmax -= count[indmax] == 0.0 + + # Restrict range of fit to between zero points + x = cbin[indmin : indmax + 1] + y = count[indmin : indmax + 1] + yerr = np.sqrt(y * (1.0 - y / np.sum(y))) + + sigma = None if no_weight else yerr + + # Require positive values of amp and sigma + bnd = (np.array([0.0, -np.inf, 0.0]), np.array([np.inf, np.inf, np.inf])) + + # Define the fitting function + def gauss(x, peak, mu, sigma): + return peak * np.exp(-((x - mu) ** 2) / (2.0 * sigma**2)) + + # Perform the fit + par, var_par = curve_fit( + gauss, + cbin[indmin : indmax + 1], + count[indmin : indmax + 1], + p0=par0, + sigma=sigma, + absolute_sigma=(not no_weight), + bounds=bnd, + method="trf", + ) + + # Calculate quality of fit + chisq = np.sum(((y - gauss(x, *par)) / yerr) ** 2) + ndof = np.size(y) - nparams + pte = 1.0 - scipy.stats.chi2.cdf(chisq, ndof) + + # Store results in dictionary + results_dict = {} + results_dict["indmin"] = indmin + results_dict["indmax"] = indmax + results_dict["xmin"] = cbin[indmin] + results_dict["xmax"] = cbin[indmax] + results_dict["par"] = par + results_dict["chisq"] = chisq + results_dict["ndof"] = ndof + results_dict["pte"] = pte + + if return_histogram: + results_dict["bin_centre"] = cbin + results_dict["bin_count"] = count + + # If requested, test normality of the main distribution + if test_normal: + flag = (data > cbin[indmin]) & (data < cbin[indmax]) + shap_stat, shap_pte = scipy.stats.shapiro(data[flag]) + + results_dict["shapiro"] = {} + results_dict["shapiro"]["stat"] = shap_stat + results_dict["shapiro"]["pte"] = shap_pte + + ander_stat, ander_crit, ander_signif = scipy.stats.anderson( + data[flag], dist="norm" + ) + + results_dict["anderson"] = {} + results_dict["anderson"]["stat"] = ander_stat + results_dict["anderson"]["critical"] = ander_crit + results_dict["anderson"]["alpha"] = ander_signif + results_dict["anderson"]["pass"] = ander_stat < ander_crit + + # Return dictionary + return results_dict
+ + + +def _sliding_window(arr, window): + # Advanced numpy tricks + shape = arr.shape[:-1] + (arr.shape[-1] - window + 1, window) + strides = arr.strides + (arr.strides[-1],) + return np.lib.stride_tricks.as_strided(arr, shape=shape, strides=strides) + + +
+[docs] +def flag_outliers(raw, flag, window=25, nsigma=5.0): + """Flag outliers with respect to rolling median. + + Parameters + ---------- + raw : np.ndarray[nsample,] + Raw data sampled at fixed rate. Use the `flag` parameter to indicate missing + or invalid data. + flag : np.ndarray[nsample,] + Boolean array where True indicates valid data and False indicates invalid data. + window : int + Window size (in number of samples) used to determine local median. + nsigma : float + Data is considered an outlier if it is greater than this number of median absolute + deviations away from the local median. + Returns + ------- + not_outlier : np.ndarray[nsample,] + Boolean array where True indicates valid data and False indicates data that is + either an outlier or had flag = True. + """ + # Make sure we have an even window size + if window % 2: + window += 1 + + hwidth = window // 2 - 1 + + nraw = raw.size + dtype = raw.dtype + + # Replace flagged samples with nan + good = np.flatnonzero(flag) + + data = np.full((nraw,), np.nan, dtype=dtype) + data[good] = raw[good] + + # Expand the edges + expanded_data = np.concatenate( + ( + np.full((hwidth,), np.nan, dtype=dtype), + data, + np.full((hwidth + 1,), np.nan, dtype=dtype), + ) + ) + + # Apply median filter + smooth = np.nanmedian(_sliding_window(expanded_data, window), axis=-1) + + # Calculate RMS of residual + resid = np.abs(data - smooth) + + rwidth = 9 * window + hrwidth = rwidth // 2 - 1 + + expanded_resid = np.concatenate( + ( + np.full((hrwidth,), np.nan, dtype=dtype), + resid, + np.full((hrwidth + 1,), np.nan, dtype=dtype), + ) + ) + + sig = 1.4826 * np.nanmedian(_sliding_window(expanded_resid, rwidth), axis=-1) + + not_outlier = resid < (nsigma * sig) + + return not_outlier
+ + + +
+[docs] +def interpolate_gain(freq, gain, weight, flag=None, length_scale=30.0): + """Replace gain at flagged frequencies with interpolated values. + + Uses a gaussian process regression to perform the interpolation + with a Matern function describing the covariance between frequencies. + + Parameters + ---------- + freq : np.ndarray[nfreq,] + Frequencies in MHz. + gain : np.ndarray[nfreq, ninput] + Complex gain for each input and frequency. + weight : np.ndarray[nfreq, ninput] + Uncertainty on the complex gain, expressed as inverse variance. + flag : np.ndarray[nfreq, ninput] + Boolean array indicating the good (True) and bad (False) gains. + If not provided, then it will be determined by evaluating `weight > 0.0`. + length_scale : float + Correlation length in frequency in MHz. + + Returns + ------- + interp_gain : np.ndarray[nfreq, ninput] + For frequencies with `flag = True`, this will be equal to gain. For frequencies with + `flag = False`, this will be an interpolation of the gains with `flag = True`. + interp_weight : np.ndarray[nfreq, ninput] + For frequencies with `flag = True`, this will be equal to weight. For frequencies with + `flag = False`, this will be the expected uncertainty on the interpolation. + """ + from sklearn import gaussian_process + from sklearn.gaussian_process.kernels import Matern, ConstantKernel + + if flag is None: + flag = weight > 0.0 + + nfreq, ninput = gain.shape + + iscomplex = np.any(np.iscomplex(gain)) + + interp_gain = gain.copy() + interp_weight = weight.copy() + + alpha = tools.invert_no_zero(weight) + + x = freq.reshape(-1, 1) + + for ii in range(ninput): + train = np.flatnonzero(flag[:, ii]) + test = np.flatnonzero(~flag[:, ii]) + + if train.size > 0: + xtest = x[test, :] + + xtrain = x[train, :] + if iscomplex: + ytrain = np.hstack( + (gain[train, ii, np.newaxis].real, gain[train, ii, np.newaxis].imag) + ) + else: + ytrain = gain[train, ii, np.newaxis].real + + # Mean subtract + ytrain_mu = np.mean(ytrain, axis=0, keepdims=True) + ytrain = ytrain - ytrain_mu + + # Get initial estimate of variance + var = 0.5 * np.sum( + ( + 1.4826 + * np.median( + np.abs(ytrain - np.median(ytrain, axis=0, keepdims=True)), + axis=0, + ) + ) + ** 2 + ) + + # Define kernel + kernel = ConstantKernel( + constant_value=var, constant_value_bounds=(0.01 * var, 100.0 * var) + ) * Matern(length_scale=length_scale, length_scale_bounds="fixed", nu=1.5) + + # Regress against non-flagged data + gp = gaussian_process.GaussianProcessRegressor( + kernel=kernel, alpha=alpha[train, ii] + ) + + gp.fit(xtrain, ytrain) + + # Predict error + ypred, err_ypred = gp.predict(xtest, return_std=True) + + # When the gains are not complex, ypred will have a single dimension for + # sklearn version 1.1.2, but will have a second dimension of length 1 for + # earlier versions. The line below ensures consistent behavior. + if ypred.ndim == 1: + ypred = ypred[:, np.newaxis] + + interp_gain[test, ii] = ypred[:, 0] + ytrain_mu[:, 0] + if iscomplex: + interp_gain[test, ii] += 1.0j * (ypred[:, 1] + ytrain_mu[:, 1]) + + # When the gains are complex, err_ypred will have a second dimension + # of length 2 for sklearn version 1.1.2, but will have a single dimension + # for earlier versions. The line below ensures consistent behavior. + if err_ypred.ndim > 1: + err_ypred = np.sqrt(np.sum(err_ypred**2, axis=-1) / err_ypred.shape[-1]) + + interp_weight[test, ii] = tools.invert_no_zero(err_ypred**2) + + else: + # No valid data + interp_gain[:, ii] = 0.0 + 0.0j + interp_weight[:, ii] = 0.0 + + return interp_gain, interp_weight
+ + + +
+[docs] +def interpolate_gain_quiet(*args, **kwargs): + """Call `interpolate_gain` with `ConvergenceWarnings` silenced. + + Accepts and passes all arguments and keyword arguments for `interpolate_gain`. + """ + import warnings + from sklearn.exceptions import ConvergenceWarning + + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=ConvergenceWarning, module="sklearn") + results = interpolate_gain(*args, **kwargs) + + return results
+ + + +
+[docs] +def thermal_amplitude(delta_T, freq): + """Computes the amplitude gain correction given a (set of) temperature + difference and a (set of) frequency based on the thermal model. + + Parameters + ---------- + delta_T : float or array of foats + Temperature difference (T - T_0) for which to find a gain correction. + freq : float or array of foats + Frequencies in MHz + + Returns + ------- + g : float or array of floats + Gain amplitude corrections. Multiply by data + to correct it. + """ + m_params = [-4.28268629e-09, 8.39576400e-06, -2.00612389e-03] + m = np.polyval(m_params, freq) + + return 1.0 + m * delta_T
+ + + +def _el_to_dec(el): + """Convert from el = sin(zenith angle) to declination in degrees.""" + + return np.degrees(np.arcsin(el)) + ephemeris.CHIMELATITUDE + + +def _dec_to_el(dec): + """Convert from declination in degrees to el = sin(zenith angle).""" + + return np.sin(np.radians(dec - ephemeris.CHIMELATITUDE)) + + +
+[docs] +def get_reference_times_file( + times: np.ndarray, + cal_file: memh5.MemGroup, + logger: Optional[logging.Logger] = None, +) -> Dict[str, np.ndarray]: + """For a given set of times determine when and how they were calibrated. + + This uses the pre-calculated calibration time reference files. + + Parameters + ---------- + times + Unix times of data points to be calibrated as floats. + cal_file + memh5 container which containes the reference times for calibration source + transits. + logger + A logging object to use for messages. If not provided, use a module level + logger. + + Returns + ------- + reftime_result : dict + A dictionary containing four entries: + + - reftime: Unix time of same length as `times`. Reference times of transit of the + source used to calibrate the data at each time in `times`. Returns `NaN` for + times without a reference. + - reftime_prev: The Unix time of the previous gain update. Only set for time + samples that need to be interpolated, otherwise `NaN`. + - interp_start: The Unix time of the start of the interpolation period. Only + set for time samples that need to be interpolated, otherwise `NaN`. + - interp_stop: The Unix time of the end of the interpolation period. Only + set for time samples that need to be interpolated, otherwise `NaN`. + """ + + if logger is None: + logger = logging.getLogger(__name__) + + # Data from calibration file. + is_restart = cal_file["is_restart"][:] + tref = cal_file["tref"][:] + tstart = cal_file["tstart"][:] + tend = cal_file["tend"][:] + # Length of calibration file and of data points + n_cal_file = len(tstart) + ntimes = len(times) + + # Len of times, indices in cal_file. + last_start_index = np.searchsorted(tstart, times, side="right") - 1 + # Len of times, indices in cal_file. + last_end_index = np.searchsorted(tend, times, side="right") - 1 + # Check for times before first update or after last update. + too_early = last_start_index < 0 + n_too_early = np.sum(too_early) + if n_too_early > 0: + msg = ( + "{0} out of {1} time entries have no reference update." + + "Cannot correct gains for those entries." + ) + logger.warning(msg.format(n_too_early, ntimes)) + # Fot times after the last update, I cannot be sure the calibration is valid + # (could be that the cal file is incomplete. To be conservative, raise warning.) + too_late = (last_start_index >= (n_cal_file - 1)) & ( + last_end_index >= (n_cal_file - 1) + ) + n_too_late = np.sum(too_late) + if n_too_late > 0: + msg = ( + "{0} out of {1} time entries are beyond calibration file time values." + + "Cannot correct gains for those entries." + ) + logger.warning(msg.format(n_too_late, ntimes)) + + # Array to contain reference times for each entry. + # NaN for entries with no reference time. + reftime = np.full(ntimes, np.nan, dtype=np.float64) + # Array to hold reftimes of previous updates + # (for entries that need interpolation). + reftime_prev = np.full(ntimes, np.nan, dtype=np.float64) + # Arrays to hold start and stop times of gain transition + # (for entries that need interpolation). + interp_start = np.full(ntimes, np.nan, dtype=np.float64) + interp_stop = np.full(ntimes, np.nan, dtype=np.float64) + + # Acquisition restart. We load an old gain. + acqrestart = is_restart[last_start_index] == 1 + reftime[acqrestart] = tref[last_start_index][acqrestart] + + # FPGA restart. Data not calibrated. + # There shouldn't be any time points here. Raise a warning if there are. + fpga_restart = is_restart[last_start_index] == 2 + n_fpga_restart = np.sum(fpga_restart) + if n_fpga_restart > 0: + msg = ( + "{0} out of {1} time entries are after an FPGA restart but before the " + + "next kotekan restart. Cannot correct gains for those entries." + ) + logger.warning(msg.format(n_fpga_restart, ntimes)) + + # This is a gain update + gainupdate = is_restart[last_start_index] == 0 + + # This is the simplest case. Last update was a gain update and + # it is finished. No need to interpolate. + calrange = (last_start_index == last_end_index) & gainupdate + reftime[calrange] = tref[last_start_index][calrange] + + # The next cases might need interpolation. Last update was a gain + # update and it is *NOT* finished. Update is in transition. + gaintrans = last_start_index == (last_end_index + 1) + + # This update is in gain transition and previous update was an + # FPGA restart. Just use new gain, no interpolation. + prev_is_fpga = is_restart[last_start_index - 1] == 2 + prev_is_fpga = prev_is_fpga & gaintrans & gainupdate + reftime[prev_is_fpga] = tref[last_start_index][prev_is_fpga] + + # The next two cases need interpolation of gain corrections. + # It's not possible to correct interpolated gains because the + # products have been stacked. Just interpolate the gain + # corrections to avoide a sharp transition. + + # This update is in gain transition and previous update was a + # Kotekan restart. Need to interpolate gain corrections. + prev_is_kotekan = is_restart[last_start_index - 1] == 1 + to_interpolate = prev_is_kotekan & gaintrans & gainupdate + + # This update is in gain transition and previous update was a + # gain update. Need to interpolate. + prev_is_gain = is_restart[last_start_index - 1] == 0 + to_interpolate = to_interpolate | (prev_is_gain & gaintrans & gainupdate) + + # Reference time of this update + reftime[to_interpolate] = tref[last_start_index][to_interpolate] + # Reference time of previous update + reftime_prev[to_interpolate] = tref[last_start_index - 1][to_interpolate] + # Start and stop times of gain transition. + interp_start[to_interpolate] = tstart[last_start_index][to_interpolate] + interp_stop[to_interpolate] = tend[last_start_index][to_interpolate] + + # For times too early or too late, don't correct gain. + # This might mean we don't correct gains right after the last update + # that could in principle be corrected. But there is no way to know + # If the calibration file is up-to-date and the last update applies + # to all entries that come after it. + reftime[too_early | too_late] = np.nan + + # Test for un-identified NaNs + known_bad_times = (too_early) | (too_late) | (fpga_restart) + n_bad_times = np.sum(~np.isfinite(reftime[~known_bad_times])) + if n_bad_times > 0: + msg = ( + "{0} out of {1} time entries don't have a reference calibration time " + + "without an identifiable cause. Cannot correct gains for those entries." + ) + logger.warning(msg.format(n_bad_times, ntimes)) + + # Bundle result in dictionary + result = { + "reftime": reftime, + "reftime_prev": reftime_prev, + "interp_start": interp_start, + "interp_stop": interp_stop, + } + + return result
+ + + +
+[docs] +def get_reference_times_dataset_id( + times: np.ndarray, + dataset_ids: np.ndarray, + logger: Optional[logging.Logger] = None, +) -> Dict[str, Union[np.ndarray, Dict]]: + """Calculate the relevant calibration reference times from the dataset IDs. + + .. warning:: + Dataset IDs before 2020/10/10 are corrupt so this routine won't work. + + Parameters + ---------- + times + Unix times of data points to be calibrated as floats. + dataset_ids + The dataset IDs as an array of strings. + logger + A logging object to use for messages. If not provided, use a module level + logger. + + Returns + ------- + reftime_result + A dictionary containing the results. See `get_reference_times_file` for a + description of the contents. + """ + if logger is None: + logger = logging.getLogger(__name__) + + # Dataset IDs before this date are untrustworthy + ds_start = ctime.datetime_to_unix(datetime(2020, 11, 1)) + if (times < ds_start).any(): + raise ValueError( + "Dataset IDs before 2020/11/01 are corrupt, so this method won't work. " + f"You passed in a time as early as {ctime.unix_to_datetime(times.min())}." + ) + + # The CHIME calibration sources + _source_dict = { + "cyga": ephemeris.CygA, + "casa": ephemeris.CasA, + "taua": ephemeris.TauA, + "vira": ephemeris.VirA, + } + + # Get the set of gain IDs for each time stamp + gain_ids = state_id_of_type(dataset_ids, "gains") + collapsed_ids = unique_unmasked_entry(gain_ids, axis=0) + unique_gains_ids = np.unique(collapsed_ids.compressed()) + + gain_info_dict = {} + + # For each gain update extract all the relevant information + for state_id in unique_gains_ids: + d = {} + gain_info_dict[state_id] = d + + # Extract the update ID + update_id = ds.DatasetState.from_id(state_id).data["data"]["update_id"] + + # Parse the ID for the required information + split_id = update_id.split("_") + # After restart we sometimes have only a timing update without a source + # reference. These aren't valid for our purposes here, and can be distinguished + # at the update_id doesn't contain source information, and is thus shorter + d["valid"] = any([src in split_id for src in _source_dict.keys()]) + d["interpolated"] = "transition" in split_id + # If it's not a valid update we shouldn't try to extract everything else + if not d["valid"]: + continue + + d["gen_time"] = ctime.datetime_to_unix(ctime.timestr_to_datetime(split_id[1])) + d["source_name"] = split_id[2].lower() + + # Calculate the source transit time, and sanity check it + source = _source_dict[d["source_name"]] + d["source_transit"] = ephemeris.transit_times( + source, d["gen_time"] - 24 * 3600.0 + ) + cal_diff_hours = (d["gen_time"] - d["source_transit"]) / 3600 + if cal_diff_hours > 3: + logger.warn( + f"Transit time ({ctime.unix_to_datetime(d['source_transit'])}) " + f"for source {d['source_name']} was a surprisingly long time " + f"before the gain update time ({cal_diff_hours} hours)." + ) + + # Array to store the extracted times in + reftime = np.zeros(len(collapsed_ids), dtype=np.float64) + reftime_prev = np.zeros(len(collapsed_ids), dtype=np.float64) + interp_start = np.zeros(len(collapsed_ids), dtype=np.float64) + interp_stop = np.zeros(len(collapsed_ids), dtype=np.float64) + + # Iterate forward through the updates, setting transit times, and keeping track of + # the last valid update. This is used to set the previous source transit and the + # interpolation start time for all blended updates + last_valid_non_interpolated = None + last_non_interpolated = None + for ii, state_id in enumerate(collapsed_ids): + valid_id = not np.ma.is_masked(state_id) + update = gain_info_dict[state_id] if valid_id else {} + valid = valid_id and update["valid"] + + if valid: + reftime[ii] = update["source_transit"] + elif last_valid_non_interpolated is not None: + reftime[ii] = reftime[last_valid_non_interpolated] + else: + reftime[ii] = np.nan + + if valid and update["interpolated"] and last_valid_non_interpolated is not None: + reftime_prev[ii] = reftime[last_valid_non_interpolated] + interp_start[ii] = times[last_non_interpolated] + else: + reftime_prev[ii] = np.nan + interp_start[ii] = np.nan + + if valid and not update["interpolated"]: + last_valid_non_interpolated = ii + if valid_id and not update["interpolated"]: + last_non_interpolated = ii + # To identify the end of the interpolation periods we need to iterate + # backwards in time. As before we need to keep track of the last valid update + # we see, and then we set the interpolation end in the same manner. + last_non_interpolated = None + for ii, state_id in list(enumerate(collapsed_ids))[::-1]: + valid_id = not np.ma.is_masked(state_id) + update = gain_info_dict[state_id] if valid_id else {} + valid = valid_id and update.get("valid", False) + + if valid and update["interpolated"] and last_non_interpolated is not None: + interp_stop[ii] = times[last_non_interpolated] + else: + interp_stop[ii] = np.nan + + if valid_id and not update["interpolated"]: + last_non_interpolated = ii + + return { + "reftime": reftime, + "reftime_prev": reftime_prev, + "interp_start": interp_start, + "interp_stop": interp_stop, + "update_info": gain_info_dict, + }
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/chan_monitor.html b/docs/_modules/ch_util/chan_monitor.html new file mode 100644 index 00000000..8c08d65d --- /dev/null +++ b/docs/_modules/ch_util/chan_monitor.html @@ -0,0 +1,1460 @@ + + + + + + ch_util.chan_monitor — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.chan_monitor

+"""Channel quality monitor routines"""
+
+import numpy as np
+import copy
+
+import caput.time as ctime
+
+from chimedb import data_index
+from ch_util import ephemeris, finder
+
+# Corrections to transit times due to 2deg rotation of cylinders:
+TTCORR = {"CygA": -94.4, "CasA": 152.3, "TauA": -236.9, "VirA": -294.5}
+
+CR = 2.0 * np.pi / 180.0  # Cylinder rotation in radians
+R = np.array(
+    [[np.cos(CR), -np.sin(CR)], [np.sin(CR), np.cos(CR)]]
+)  # Cylinder rotation matrix
+C = 2.9979e8
+PHI = ephemeris.CHIMELATITUDE * np.pi / 180.0  # DRAO Latitue
+SD = 24.0 * 3600.0 * ctime.SIDEREAL_S  # Sidereal day
+
+_DEFAULT_NODE_SPOOF = {"scinet_online": "/scratch/k/krs/jrs65/chime/archive/online/"}
+# _DEFAULT_NODE_SPOOF = {'gong': '/mnt/gong/archive'} # For tests on Marimba
+
+
+
+[docs] +class FeedLocator(object): + """This class contains functions that do all the computations to + determine feed positions from data. It also determines the quality + of data and returns a list of good inputs and frequencies. + + Uppon initialization, it receives visibility data around one or two + bright sources transits as well as corresponding meta-data. + + Parameters + ---------- + vis1, [vis2] : Visibility data around bright source transit + tm1, [tm2] : Timestamp corresponding to vis1 [vis2] + src1, [src2] : Ephemeris astronomical object corresponding to the + transit in vis1 [vis2] + freqs : frequency axis of vis1 [and vis2] + prods : Product axis of vis1 [and vis2] + inputs : inputs loaded in vis1 [and vis2] + pstns0 : positions of inputs as obtained from the layout database + bsipts : base inputs used to determine cross correlations loaded + (might become unecessary in the future) + """ + + def __init__( + self, + vis1, + vis2, + tm1, + tm2, + src1, + src2, + freqs, + prods, + inputs, + pstns0, + bsipts=None, + ): + """Basic initialization method""" + self.adj_freqs = False # frequencies adjacent in data? (used for some tests) + self.VERBOSE = False + self.PATH = True # True if Pathfinder, False if CHIME + + self.vis1 = vis1 + self.vis2 = vis2 + self.tm1 = tm1 + self.tm2 = tm2 + self.freqs = freqs + self.prods = prods + self.inputs = inputs + self.inprds = np.array( + [[self.inputs[prod[0]], self.inputs[prod[1]]] for prod in self.prods] + ) + self.pstns0 = pstns0 + self.bsipts = bsipts + + self.Nfr = len(freqs) + self.Npr = len(prods) + self.Nip = len(inputs) + self.c_bslns0, self.bslns0 = self.set_bslns0() + + self.source1 = src1 + self.source2 = src2 + self.dec1 = self.source1._dec + if self.source2 is not None: + self.dec2 = self.source2._dec + self.tt1 = ( + ephemeris.transit_times(self.source1, self.tm1[0], self.tm1[-1])[0] + + TTCORR[self.source1.name] + ) + if self.source2 is not None: + self.tt2 = ( + ephemeris.transit_times(self.source2, self.tm2[0], self.tm2[-1])[0] + + TTCORR[self.source2.name] + ) + + # Results place holders: + self.pass_xd1, self.pass_xd2 = None, None + self.pass_cont1, self.pass_cont2 = None, None + self.good_prods = None + self.good_freqs = None + self.good_prods_cons = None + self.good_freqs_cons = None + + # TODO: delete all incidences of self.dists_computed. Subs by + # initializing here to self.xdists1 = None, etc.. + self.dists_computed = False + # TODO: this might not be used... + # Possible to scramble expected baselines. For debug purposes only: + self.bslins0_scrambled = False + + def set_bslns0(self): + """ """ + prods = self.prods + pstns0 = self.pstns0 + + c_bslns0 = np.zeros((self.Npr, 2)) + bslns0 = np.zeros((self.Npr, 2)) + for ii, prd in enumerate(prods): + x0 = pstns0[prd[0], 0] + y0 = pstns0[prd[0], 1] + x1 = pstns0[prd[1], 0] + y1 = pstns0[prd[1], 1] + + # Expected baselines (cylinder coords): + c_bslns0[ii] = np.array([x0 - x1, y0 - y1]) + # Expected rotated baselines (Earth coords): + bslns0[ii] = np.dot(R, c_bslns0[ii]) + + return c_bslns0, bslns0 + + def phase_trans(self, vis, tm, tt): + """ """ + ph = np.zeros((self.Nfr, self.Npr)) + tridx = np.argmin(abs(tm - tt)) # Transit index + + # TODO: Could add a test to check if transit time falls + # in between time points (rather than close to a particular point) + # and take a wheighted average of the two points in this case: + for ii in range(self.Nfr): + for jj in range(self.Npr): + ph[ii, jj] = np.angle(vis[ii, jj, tridx]) + + return ph + + def getphases_tr(self): + """ """ + self.ph1 = self.phase_trans(self.vis1, self.tm1, self.tt1) + self.ph2 = self.phase_trans(self.vis2, self.tm2, self.tt2) + + return self.ph1, self.ph2 + +
+[docs] + def get_c_ydist( + self, ph1=None, ph2=None, good_freqs=None, tol=1.5, Nmax=20 + ): # knl=3,dfacts=[5,11,17],rms_tol=1.):#,stptol=1.5,stpdev=1.): + """N-S. Absolutelly assumes contiguous frequencies!!!""" + # TODO: add test for contiguous freqs!!! + + def yparams(xx, yy): + """ """ + a, b = np.polyfit(xx, yy, 1) + + ydiff = np.diff(yy, axis=0) + xdiff = np.diff(xx) + absydiff = abs(ydiff) + + m, mn = np.zeros(yy.shape[1]), np.zeros(yy.shape[1]) + for ii in range(ydiff.shape[1]): + slct = absydiff[:, ii] < 2.0 * np.pi - tol + m[ii] = np.nanmedian(ydiff[slct, ii] / xdiff[slct]) + mn[ii] = np.nanmean(ydiff[slct, ii] / xdiff[slct]) + + lines = np.array( + [ + (xx - xx[0])[:, np.newaxis] * slp[np.newaxis, :] + + yy[0][np.newaxis, :] + for slp in [a, m, mn] + ] + ) + # Options are: + # fit-slope-no-unwrap, median-slope-no-unwrap, mean-slope-no-unwrap + # the same three again with unwrapping and discont=2.*np.pi-tol + # the same three again with unwrapping and no discont + y_opts = np.zeros((9, yy.shape[0], yy.shape[1])) + y_opts[:3] = (yy[np.newaxis, ...] - lines + np.pi) % (2.0 * np.pi) - np.pi + y_opts[3:6] = np.unwrap(y_opts[:3], discont=2.0 * np.pi - tol, axis=1) + y_opts[6:] = np.unwrap(y_opts[:3], axis=1) + + # Get goodness of fit: + y_rms = np.zeros((y_opts.shape[0], y_opts.shape[2])) + for ii in range(y_opts.shape[0]): + opt = y_opts[ii] + a, b = np.polyfit(xx, opt, 1) + mod = xx[:, np.newaxis] * a[np.newaxis, :] + b[np.newaxis, :] + y_rms[ii] = np.nanstd(opt - mod, axis=0) + + y_idx = np.argmin(y_rms, axis=0) + + y_opts[:3] = y_opts[:3] + lines + y_opts[3:6] = y_opts[3:6] + lines + y_opts[6:] = y_opts[6:] + lines + y_nxt = y_opts[ + y_idx[np.newaxis, :], + np.arange(y_opts.shape[1])[:, np.newaxis], + np.arange(y_opts.shape[2])[np.newaxis, :], + ] + + a, b = np.polyfit(xx, y_nxt, 1) + + return y_nxt, a + + if ph1 is None: + ph1 = self.ph1 + if ph2 is None: + ph2 = self.ph2 + + yslope = (np.copy(ph1 - ph2) + np.pi) % (2.0 * np.pi) - np.pi + + if good_freqs is not None: + yslope = yslope[good_freqs] + fr = np.copy(self.freqs[good_freqs]) + + yslope = np.unwrap(yslope, discont=2.0 * np.pi - tol, axis=0) + yslope = yslope - yslope[0] + + yslp, a_prev = yparams(fr, yslope) # First iteration + for ii in range(Nmax): + yslp, a = yparams(fr, yslp) + a_incr = abs(a - a_prev) / (abs(a + a_prev) * 0.5) + pass_y = a_incr < 1e-2 + if a_incr.all(): + break + else: + a_prev = a + + # TODO: now it's only one per chan. Change rotation coda appropriatelly + c_ydists = ( + a + / 1e6 + * C + / ( + 2.0 + * np.pi + * ( + np.cos(np.pi / 2.0 - self.dec1 + PHI) + - np.cos(np.pi / 2.0 - self.dec2 + PHI) + ) + ) + ) + + return c_ydists
+ + +
+[docs] + def get_c_ydist_perfreq(self, ph1=None, ph2=None): + """Old N-S dists function. TO be used only in case a continuum of + frequencies is not available + """ + + if ph1 is None: + ph1 = self.ph1 + if ph2 is None: + ph2 = self.ph2 + + c_ydists0 = self.c_bslns0[:, 1] + + dist_period = abs( + C + / ( + self.freqs + * 1e6 + * ( + np.cos(np.pi / 2.0 - self.dec1 + PHI) + - np.cos(np.pi / 2.0 - self.dec2 + PHI) + ) + ) + ) + exp_dist = c_ydists0[np.newaxis, :] % dist_period[:, np.newaxis] + base_ctr = c_ydists0[np.newaxis, :] - exp_dist + base_up = base_ctr + dist_period[:, np.newaxis] + base_down = base_ctr - dist_period[:, np.newaxis] + + phdiff = ph1 - ph2 + c_ydists = ( + phdiff + * C + / ( + 2.0 + * np.pi + * self.freqs[:, np.newaxis] + * 1e6 + * ( + np.cos(np.pi / 2.0 - self.dec1 + PHI) + - np.cos(np.pi / 2.0 - self.dec2 + PHI) + ) + ) + ) + c_ydists = c_ydists % dist_period[:, np.newaxis] + dist_opts = np.array( + [c_ydists + base_up, c_ydists + base_ctr, c_ydists + base_down] + ) + idxs = np.argmin(abs(dist_opts - c_ydists0[np.newaxis, np.newaxis, :]), axis=0) + c_ydists = np.array( + [ + [dist_opts[idxs[ii, jj], ii, jj] for jj in range(self.Npr)] + for ii in range(self.Nfr) + ] + ) + + return c_ydists
+ + + # TODO: change to 'yparams' +
+[docs] + def params_ft(self, tm, vis, dec, x0_shift=5.0): + """Extract relevant parameters from source transit + visibility in two steps: + 1) FFT visibility + 2) Fit a gaussian to the transform + + Parameters + ---------- + tm : array-like + Independent variable (time) + trace : array-like + Dependent variable (visibility) + freq : float + Frenquency of the visibility trace, in MHz. + dec : float + Declination of source. Used for initial guess of + gaussian width. Defaults to CygA declination: 0.71 + + Returns + ------- + popt : array of float + List with optimal parameters: [A,mu,sig2] + pcov : array of float + Covariance matrix for optimal parameters. + For details see documentation on numpy.curve_fit + + """ + from scipy.optimize import curve_fit + + freqs = self.freqs + prods = self.prods + + # Gaussian function for fit: + def gaus(x, A, mu, sig2): + return A * np.exp(-((x - mu) ** 2) / (2.0 * sig2)) + + # FFT: + # TODO: add check to see if length is multiple of 2 + Nt = len(tm) + dt = tm[1] - tm[0] + ft = np.fft.fft(vis, axis=2) + fr = np.fft.fftfreq(Nt, dt) + # Re-order frequencies: + ft_ord = np.concatenate( + (ft[..., Nt // 2 + Nt % 2 :], ft[..., : Nt // 2 + Nt % 2]), axis=2 + ) + ft_ord = abs(ft_ord) + fr_ord = np.concatenate((fr[Nt // 2 + Nt % 2 :], fr[: Nt // 2 + Nt % 2])) + + # Gaussian fits: + # Initial guesses: + # distx_0 = self.bslns0[:,0] # Should all be either 0 or +-22 for Pathfinder + x0_shift = 5.0 + distx_0 = self.bslns0[:, 0] + x0_shift # Shift to test robustness + mu0 = ( + -2.0 + * np.pi + * freqs[:, np.newaxis] + * 1e6 + * distx_0[np.newaxis, :] + * np.sin(np.pi / 2.0 - dec) + / (3e8 * SD) + ) + ctr_idx = np.argmin( + abs(fr_ord[np.newaxis, np.newaxis, :] - mu0[..., np.newaxis]), axis=2 + ) + A0 = np.array( + [ + [ft_ord[ii, jj, ctr_idx[ii, jj]] for jj in range(self.Npr)] + for ii in range(self.Nfr) + ] + ) + # 1 deg => dt = 1*(pi/180)*(24*3600/2*pi) = 240s + sigsqr0 = 1.0 / (4.0 * np.pi**2 * (240.0 * np.cos(dec)) ** 2) + p0 = np.array( + [ + [[A0[ii, jj], mu0[ii, jj], sigsqr0] for jj in range(self.Npr)] + for ii in range(self.Nfr) + ] + ) + # Perform fit: + # TODO: there must be a way to do the fits without the for-loops + prms = np.zeros((self.Nfr, self.Npr, 3)) + for ii in range(self.Nfr): + for jj in range(self.Npr): + try: + popt, pcov = curve_fit(gaus, fr_ord, ft_ord[ii, jj, :], p0[ii, jj]) + prms[ii, jj] = np.array(popt) + # TODO: look for the right exception: + except: + # TODO: Use masked arrays instead of None? + prms[ii, jj] = [None] * 3 + + return prms
+ + + # TODO: change to 'get_yparams' + def getparams_ft(self): + """ """ + # TODO: Add test to eliminate bad fits! + self.ft_prms1 = self.params_ft(self.tm1, self.vis1, self.dec1) + if self.source2 is not None: + self.ft_prms2 = self.params_ft(self.tm2, self.vis2, self.dec2) + else: + self.ft_prms2 = None + + return self.ft_prms1, self.ft_prms2 + + # TODO: change all occurences of 'get_xdist' to 'xdists' + # to make it more consistent +
+[docs] + def get_xdist(self, ft_prms, dec): + """E-W""" + xdists = ( + -ft_prms[..., 1] + * SD + * C + / ( + 2.0 + * np.pi + * self.freqs[:, np.newaxis] + * 1e6 + * np.sin(np.pi / 2.0 - dec) + ) + ) + + return xdists
+ + + def data_quality(self): + """ """ + if self.pass_xd1 is None: + if self.source2 is not None: + self.xdist_test( + self.c_xdists1, self.c_xdists2 + ) # Assigns self.pass_xd1, self.pass_xd2 + else: + # Slightly higher tolerance since it uses rotated dists + self.xdist_test(self.xdists1, tol=2.5) + + if self.pass_cont1 is None: + self.continuity_test() # Assigns self.pass_cont1 and self.pass_cont2 + + gpxd1, gfxd1 = self.good_prod_freq(self.pass_xd1) + gpc1, gfc1 = self.good_prod_freq(self.pass_cont1) + + if self.source2 is not None: + gpxd2, gfxd2 = self.good_prod_freq(self.pass_xd2) + gpc2, gfc2 = self.good_prod_freq(self.pass_cont2) + + self.good_prods = np.logical_and( + np.logical_or(gpc1, gpc2), np.logical_or(gpxd1, gpxd2) + ) # good prods + self.good_freqs = np.logical_and( + np.logical_or(gfc1, gfc2), np.logical_or(gfxd1, gfxd2) + ) # good freqs + + # TODO: Delete these conservative estimates? + self.good_prods_cons = np.logical_and( + np.logical_and(gpc1, gpc2), np.logical_and(gpxd1, gpxd2) + ) # Conservative good prods + self.good_freqs_cons = np.logical_and( + np.logical_and(gfc1, gfc2), np.logical_and(gfxd1, gfxd2) + ) # Conservative good freqs + else: + self.good_prods = np.logical_and(gpc1, gpxd1) # good prods + self.good_freqs = np.logical_and(gfc1, gfxd1) # good freqs + + if self.bsipts is not None: + self.set_good_ipts(self.bsipts) # good_prods to good_ipts + + def single_source_test(self): + """ """ + self.getparams_ft() + self.xdists1 = self.get_xdist(self.ft_prms1, self.dec1) + + self.data_quality() + + def get_dists(self): + """ """ + # Get x distances in Earth coords (EW) + self.getparams_ft() + self.xdists1 = self.get_xdist(self.ft_prms1, self.dec1) + self.xdists2 = self.get_xdist(self.ft_prms2, self.dec2) + # Preliminary test for bad freqs (needed for ydists): + if self.pass_cont1 is None: + self.continuity_test() # Assigns self.pass_cont1 and self.pass_cont2 + gpc1, gfc1 = self.good_prod_freq(self.pass_cont1) + gpc2, gfc2 = self.good_prod_freq(self.pass_cont2) + gf = np.logical_and(gfc1, gfc2) # Preliminary good freqs + # Get y distances in cylinder coordinates (NS rotated by 2 deg) + self.getphases_tr() + self.c_ydists = self.get_c_ydist(good_freqs=gf) + # Transform between Earth and cylinder coords + self.c_xdists1 = ( + self.xdists1 + self.c_ydists[np.newaxis, :] * np.sin(CR) + ) / np.cos(CR) + self.c_xdists2 = ( + self.xdists2 + self.c_ydists[np.newaxis, :] * np.sin(CR) + ) / np.cos(CR) + self.ydists1 = ( + self.xdists1 + self.c_ydists[np.newaxis, :] * np.sin(CR) + ) * np.tan(CR) + self.c_ydists[np.newaxis, :] * np.cos(CR) + self.ydists2 = ( + self.xdists2 + self.c_ydists[np.newaxis, :] * np.sin(CR) + ) * np.tan(CR) + self.c_ydists[np.newaxis, :] * np.cos(CR) + + self.dists_computed = True + + self.data_quality() + + return self.c_xdists1, self.c_xdists2, self.c_ydists + +
+[docs] + def set_good_ipts(self, base_ipts): + """Good_prods to good_ipts""" + inp_list = [inpt for inpt in self.inputs] # Full input list + self.good_ipts = np.zeros(self.inputs.shape, dtype=bool) + for ii, inprd in enumerate(self.inprds): + if inprd[0] not in base_ipts: + self.good_ipts[inp_list.index(inprd[0])] = self.good_prods[ii] + if inprd[1] not in base_ipts: + self.good_ipts[inp_list.index(inprd[1])] = self.good_prods[ii] + if (inprd[0] in base_ipts) and (inprd[1] in base_ipts): + self.good_ipts[inp_list.index(inprd[0])] = self.good_prods[ii] + self.good_ipts[inp_list.index(inprd[1])] = self.good_prods[ii] + # To make sure base inputs are tagged good: + for bsip in base_ipts: + self.good_ipts[inp_list.index(bsip)] = True
+ + + def solv_pos(self, dists, base_ipt): + """ """ + from scipy.linalg import svd + + # Matrix defining order of subtraction for baseline distances + M = np.zeros((self.Npr, self.Nip - 1)) + # Remove base_ipt as its position will be set to zero + sht_inp_list = [inpt for inpt in self.inputs if inpt != base_ipt] + for ii, inprd in enumerate(self.inprds): + if inprd[0] != base_ipt: + M[ii, sht_inp_list.index(inprd[0])] = 1.0 + if inprd[1] != base_ipt: + M[ii, sht_inp_list.index(inprd[1])] = -1.0 + U, s, Vh = svd(M) + # TODO: add test for small s values to zero. Check existing code for that. + # Pseudo-inverse: + psd_inv = np.dot(np.transpose(Vh) * (1.0 / s)[np.newaxis, :], np.transpose(U)) + # Positions: + pstns = np.dot(psd_inv, dists) + # Add position of base_input + inp_list = [inpt for inpt in self.inputs] # Full input list + bs_inpt_idx = inp_list.index(base_ipt) # Original index of base_ipt + pstns = np.insert(pstns, bs_inpt_idx, 0.0) + + return pstns + + def get_postns(self): + """ """ + self.c_xd1 = np.nanmedian(self.c_xdists1[self.good_freqs], axis=0) + self.c_xd2 = np.nanmedian(self.c_xdists2[self.good_freqs], axis=0) + # Solve positions: + self.c_y = self.solv_pos(self.c_ydists, self.bsipts[0]) + self.c_x1 = self.solv_pos(self.c_xd1, self.bsipts[0]) + self.c_x2 = self.solv_pos(self.c_xd2, self.bsipts[0]) + self.expy = self.solv_pos(self.c_bslns0[:, 1], self.bsipts[0]) + self.expx = self.solv_pos(self.c_bslns0[:, 0], self.bsipts[0]) + + return self.c_x1, self.c_x2, self.c_y + + def xdist_test(self, xds1, xds2=None, tol=2.0): + """ """ + + def get_centre(xdists, tol): + """Returns the median (across frequencies) of NS separation dists for each + baseline if this median is withing *tol* of a multiple of 22 meters. Else, + returns the multiple of 22 meters closest to this median (up to 3*22=66 meters) + """ + xmeds = np.nanmedian(xdists, axis=0) + cylseps = np.arange(-1, 2) * 22.0 if self.PATH else np.arange(-3, 4) * 22.0 + devs = abs(xmeds[:, np.newaxis] - cylseps[np.newaxis, :]) + devmins = devs.min(axis=1) + centres = np.array( + [ + ( + xmeds[ii] # return median + if devmins[ii] < tol # if reasonable + else cylseps[np.argmin(devs[ii])] + ) # or use closest value + for ii in range(devmins.size) + ] + ) + + return centres + + xcentre1 = get_centre(xds1, tol) + xerr1 = abs(xds1 - xcentre1[np.newaxis, :]) + self.pass_xd1 = xerr1 < tol + + if xds2 is not None: + xcentre2 = get_centre(xds2, tol) + xerr2 = abs(xds2 - xcentre2[np.newaxis, :]) + self.pass_xd2 = xerr2 < tol + else: + self.pass_xd2 = None + + return self.pass_xd1, self.pass_xd2 + +
+[docs] + def continuity_test(self, tol=0.2, knl=5): + """Call only if freqs are adjacent. + Uses xdists (Earth coords) instead of c_xdists (cylinder coords) + to allow for calling before ydists are computed. Doesn't make any + difference for this test. Results are used in computing y_dists. + """ + from scipy.signal import medfilt + + clean_xdists1 = medfilt(self.xdists1, kernel_size=[knl, 1]) + diffs1 = abs(self.xdists1 - clean_xdists1) + self.pass_cont1 = diffs1 < tol + + if self.source2 is not None: + clean_xdists2 = medfilt(self.xdists2, kernel_size=[knl, 1]) + diffs2 = abs(self.xdists2 - clean_xdists2) + self.pass_cont2 = diffs2 < tol + else: + self.pass_cont2 = None + + return self.pass_cont1, self.pass_cont2
+ + +
+[docs] + def good_prod_freq( + self, pass_rst, tol_ch1=0.3, tol_ch2=0.7, tol_fr1=0.6, tol_fr2=0.7 + ): + """Tries to determine overall bad products and overall bad frequencies + from a test_pass result. + """ + + # First iteration: + chans_score = np.sum(pass_rst, axis=0) / float(pass_rst.shape[0]) + freqs_score = np.sum(pass_rst, axis=1) / float(pass_rst.shape[1]) + good_chans = chans_score > tol_ch1 + good_freqs = freqs_score > tol_fr1 + # Second Iteration: + pass_gch = pass_rst[:, np.where(good_chans)[0]] # Only good channels + pass_gfr = pass_rst[np.where(good_freqs)[0], :] # Only good freqs + chans_score = np.sum(pass_gfr, axis=0) / float(pass_gfr.shape[0]) + freqs_score = np.sum(pass_gch, axis=1) / float(pass_gch.shape[1]) + good_chans = chans_score > tol_ch2 + good_freqs = freqs_score > tol_fr2 + + return good_chans, good_freqs
+
+ + + +
+[docs] +class ChanMonitor(object): + """This class provides the user interface to FeedLocator. + + It initializes instances of FeedLocator (normally one per polarization) + and returns results combined lists of results (good channels and positions, + agreement/disagreement with the layout database, etc.) + + Feed locator should not + have to sepparate the visibilities in data to run the test on and data not to run the + test on. ChanMonitor should make the sepparation and provide FeedLocator with the right + data cube to test. + + Parameters + ---------- + t1 [t2] : Initial [final] time for the test period. If t2 not provided it is + set to 1 sideral day after t1 + freq_sel + prod_sel + """ + + def __init__( + self, + t1, + t2=None, + freq_sel=None, + prod_sel=None, + bswp1=26, + bswp2=90, + bsep1=154, + bsep2=218, + ): + """Here t1 and t2 have to be unix time (floats)""" + self.t1 = t1 + if t2 is None: + self.t2 = self.t1 + SD + else: + self.t2 = t2 + + self.acq_list = None + self.night_acq_list = None + + self.finder = None + self.night_finder = None + + self.source1 = None + self.source2 = None + + # if prod_sel is not None: + self.prod_sel = prod_sel + # if freq_sel is not None: + self.freq_sel = freq_sel + + self.dat1 = None + self.dat2 = None + self.tm1 = None + self.tm2 = None + + self.freqs = None + self.prods = None + self.input_map = None + self.inputs = None + + self.corr_inputs = None + self.pwds = None + self.pstns = None + self.p1_idx, self.p2_idx = None, None + + self.bswp1 = bswp1 + self.bsep1 = bsep1 + self.bswp2 = bswp2 + self.bsep2 = bsep2 + +
+[docs] + @classmethod + def fromdate( + cls, + date, + freq_sel=None, + prod_sel=None, + bswp1=26, + bswp2=90, + bsep1=154, + bsep2=218, + ): + """Initialize class from date""" + t1 = ctime.datetime_to_unix(date) + return cls( + t1, + freq_sel=freq_sel, + prod_sel=prod_sel, + bswp1=bswp1, + bswp2=bswp2, + bsep1=bsep1, + bsep2=bsep2, + )
+ + + # TODO: this is kind of silly right now. + # If it is initialized from data, I should use the data given + # or not allow for that possibility. +
+[docs] + @classmethod + def fromdata(cls, data, freq_sel=None, prod_sel=None): + """Initialize class from andata object""" + t1 = data.time[0] + t2 = data.time[-1] + return cls(t1, t2, freq_sel=freq_sel, prod_sel=prod_sel)
+ + + # TODO: test for adjacent freqs to pass to FeedLocator + + def get_src_cndts(self): + """ """ + clr, ntt, srcs = self.get_sunfree_srcs() + + grd_dict = {"CygA": 4, "CasA": 4, "TauA": 3, "VirA": 1} + # Grades for each source + grds = [ + ( + grd_dict[src.name] - 2 + if ( + (src.name in ["CygA", "CasA"]) and (not ntt[ii]) + ) # CasA and CygA at daytime worse than TauA at night + else grd_dict[src.name] + ) + for ii, src in enumerate(srcs) + ] + + # Grade 0 if not clear of Sun: + grds = [grd if clr[ii] else 0 for ii, grd in enumerate(grds)] + + # Source candidates ordered in decreasing quality + src_cndts = [ + src + for grd, src in sorted( + zip(grds, srcs), key=lambda entry: entry[0], reverse=True + ) + if grd != 0 + ] + + return src_cndts + + def get_pol_prod_idx(self, pol_inpt_idx): + """ """ + pol_prod_idx = [] + for ii, prd in enumerate(self.prods): + if (prd[0] in pol_inpt_idx) and (prd[1] in pol_inpt_idx): + pol_prod_idx.append(ii) + + return pol_prod_idx + + def get_feedlocator(self, pol=1): + """ """ + if pol == 1: + pol_inpt_idx = self.p1_idx + bsipts = [self.bswp1, self.bsep1] + elif pol == 2: + pol_inpt_idx = self.p2_idx + bsipts = [self.bswp2, self.bsep2] + + pol_prod_idx = self.get_pol_prod_idx(pol_inpt_idx) + + inputs = self.inputs[pol_inpt_idx] + pstns = self.pstns[pol_inpt_idx] + prods = [] + for prd in self.prods[pol_prod_idx]: + idx0 = np.where(inputs == self.inputs[prd[0]])[0][0] + idx1 = np.where(inputs == self.inputs[prd[1]])[0][0] + prods.append((idx0, idx1)) + + if self.source2 is not None: + fl = FeedLocator( + self.dat1.vis[:, pol_prod_idx, :], + self.dat2.vis[:, pol_prod_idx, :], + self.tm1, + self.tm2, + self.source1, + self.source2, + self.freqs, + prods, + inputs, + pstns, + bsipts, + ) + else: + fl = FeedLocator( + self.dat1.vis[:, pol_prod_idx, :], + None, + self.tm1, + None, + self.source1, + self.source2, + self.freqs, + prods, + inputs, + pstns, + bsipts, + ) + + return fl + + def init_feedloc_p1(self): + """ """ + self.flp1 = self.get_feedlocator() + return self.flp1 + + def init_feedloc_p2(self): + """ """ + self.flp2 = self.get_feedlocator(pol=2) + return self.flp2 + + def get_cyl_pol(self, corr_inputs, pwds): + """ """ + wchp1, wchp2, echp1, echp2 = [], [], [], [] + for ii, inpt in enumerate(corr_inputs): + if pwds[ii]: + if inpt.reflector == "W_cylinder": + if inpt.pol == "S": + wchp1.append(ii) + else: + wchp2.append(ii) + elif inpt.reflector == "E_cylinder": + if inpt.pol == "S": + echp1.append(ii) + else: + echp2.append(ii) + else: + # This probably doesn't happen... + pass + else: + # TODO: this only makes sense for the pathfinder: + if ii < 64: + wchp1.append(ii) + elif ii < 128: + wchp2.append(ii) + elif ii < 192: + echp1.append(ii) + else: + echp2.append(ii) + + return [wchp1, wchp2, echp1, echp2] + + def get_pos_pol(self, corr_inputs, pwds): + """ """ + Ninpts = len(pwds) + p1_idx, p2_idx = [], [] + pstns = np.zeros((Ninpts, 2)) # In-cylinder positions + for ii, inpt in enumerate(corr_inputs): + if pwds[ii]: + pstns[ii, 0] = inpt.cyl * 22.0 + pstns[ii, 1] = -1.0 * inpt.pos + if inpt.pol == "S": + p1_idx.append(ii) + else: + p2_idx.append(ii) + else: + # TODO: this only makes sense for the pathfinder: + # Numbers were taken from layout database + if ii < 64: + pstns[ii, 0] = 0.0 + pstns[ii, 1] = -8.767 - 0.3048 * float(ii) + p1_idx.append(ii) + elif ii < 128: + pstns[ii, 0] = 0.0 + pstns[ii, 1] = -8.767 - 0.3048 * float(ii - 64) + p2_idx.append(ii) + elif ii < 192: + pstns[ii, 0] = 22.0 + pstns[ii, 1] = -8.7124 - 0.3048 * float(ii - 128) + p1_idx.append(ii) + else: + pstns[ii, 0] = 22.0 + pstns[ii, 1] = -8.7124 - 0.3048 * float(ii - 192) + p2_idx.append(ii) + + return pstns, p1_idx, p2_idx + +
+[docs] + def set_metadata(self, tms, input_map): + """Sets self.corr_inputs, self.pwds, self.pstns, self.p1_idx, self.p2_idx""" + from ch_util import tools + + # Get CHIME ON channels: + half_time = ctime.unix_to_datetime(tms[int(len(tms) // 2)]) + corr_inputs = tools.get_correlator_inputs(half_time) + self.corr_inputs = tools.reorder_correlator_inputs(input_map, corr_inputs) + pwds = tools.is_chime_on(self.corr_inputs) # Which inputs are CHIME ON antennas + self.pwds = np.array(pwds, dtype=bool) + # Get cylinders and polarizations + self.pstns, self.p1_idx, self.p2_idx = self.get_pos_pol( + self.corr_inputs, self.pwds + )
+ + + def determine_bad_gpu_nodes(self, data, frac_time_on=0.7): + node_on = np.any(data.vis[:].real != 0.0, axis=1) + + self.gpu_node_flag = np.sum(node_on, axis=1) > frac_time_on * node_on.shape[1] + + def get_prod_sel(self, data): + """ """ + from ch_util import tools + + input_map = data.input + tms = data.time + half_time = ctime.unix_to_datetime(tms[int(len(tms) // 2)]) + corr_inputs = tools.get_correlator_inputs(half_time) + corr_inputs = tools.reorder_correlator_inputs(input_map, corr_inputs) + pwds = tools.is_chime_on(corr_inputs) # Which inputs are CHIME ON antennas + + wchp1, wchp2, echp1, echp2 = self.get_cyl_pol(corr_inputs, pwds) + + # Ensure base channels are CHIME and ON + while not pwds[np.where(input_map["chan_id"] == self.bswp1)[0][0]]: + self.bswp1 += 1 + while not pwds[np.where(input_map["chan_id"] == self.bswp2)[0][0]]: + self.bswp2 += 1 + while not pwds[np.where(input_map["chan_id"] == self.bsep1)[0][0]]: + self.bsep1 += 1 + while not pwds[np.where(input_map["chan_id"] == self.bsep2)[0][0]]: + self.bsep2 += 1 + + prod_sel = [] + for ii, prod in enumerate(data.prod): + add_prod = False + add_prod = add_prod or ( + (prod[0] == self.bswp1 and prod[1] in echp1) + or (prod[1] == self.bswp1 and prod[0] in echp1) + ) + add_prod = add_prod or ( + (prod[0] == self.bswp2 and prod[1] in echp2) + or (prod[1] == self.bswp2 and prod[0] in echp2) + ) + add_prod = add_prod or ( + (prod[0] == self.bsep1 and prod[1] in wchp1) + or (prod[1] == self.bsep1 and prod[0] in wchp1) + ) + add_prod = add_prod or ( + (prod[0] == self.bsep2 and prod[1] in wchp2) + or (prod[1] == self.bsep2 and prod[0] in wchp2) + ) + + if add_prod: + prod_sel.append(ii) + + prod_sel.sort() + + return prod_sel, pwds + + def get_data(self): + """ """ + from ch_util import ni_utils + + self.set_acq_list() + src_cndts = self.get_src_cndts() + + for src in src_cndts: + results_list = self.get_results(src) + if len(results_list) != 0: + if self.source1 is None: + # Get prod_sel if not given: + if self.prod_sel is None: + # Load data with a single frequency to get prod_sel + dat = results_list[0].as_loaded_data(freq_sel=[0]) + self.prod_sel, pwds = self.get_prod_sel(dat) + # Load data: + self.source1 = src + self.dat1 = results_list[0].as_loaded_data( + prod_sel=self.prod_sel, freq_sel=self.freq_sel + ) + # TODO: correct process_synced_data to not crash when no NS + try: + self.dat1 = ni_utils.process_synced_data(self.dat1) + except: + pass + self.freqs = self.dat1.freq + self.prods = self.dat1.prod + self.input_map = self.dat1.input + self.inputs = self.input_map["chan_id"] + self.tm1 = self.dat1.time + # Set metadata (corr_inputs, pstns, polarizatins, etc... + self.set_metadata(self.tm1, self.input_map) + + # Determine what frequencies are bad + # due to gpu nodes that are down + self.determine_bad_gpu_nodes(self.dat1) + + # TODO: get corr_inputs for dat2 as well and compare to dat1 + elif self.source2 is None: + self.source2 = src + self.dat2 = results_list[0].as_loaded_data( + prod_sel=self.prod_sel, freq_sel=self.freq_sel + ) + # TODO: correct process_synced_data to not crash when no NS + try: + self.dat2 = ni_utils.process_synced_data(self.dat2) + except: + pass + self.tm2 = self.dat2.time + break + + return self.source1, self.source2 + +
+[docs] + def get_results(self, src, tdelt=2800): + """If self.finder exists, then it takes a deep copy of this object, + further restricts the time range to include only src transits, + and then queries the database to obtain a list of the acquisitions. + If self.finder does not exist, then it creates a finder object, + restricts the time range to include only src transits between + self.t1 and self.t2, and then queries the database to obtain a list + of the acquisitions. + """ + + if self.finder is not None: + f = copy.deepcopy(self.finder) + else: + f = finder.Finder(node_spoof=_DEFAULT_NODE_SPOOF) + f.filter_acqs((data_index.ArchiveInst.name == "pathfinder")) + f.only_corr() + f.set_time_range(self.t1, self.t2) + + f.include_transits(src, time_delta=tdelt) + + return f.get_results()
+ + +
+[docs] + def set_acq_list(self): + """This method sets four attributes. The first two attributes + are 'night_finder' and 'night_acq_list', which are the + finder object and list of acquisitions that + contain all night time data between self.t1 and self.t2. + The second two attributes are 'finder' and 'acq_list', + which are the finder object and list of acquisitions + that contain all data beween self.t1 and self.t2 with the + sunrise, sun transit, and sunset removed. + """ + + # Create a Finder object and focus on time range + f = finder.Finder(node_spoof=_DEFAULT_NODE_SPOOF) + f.filter_acqs((data_index.ArchiveInst.name == "pathfinder")) + f.only_corr() + f.set_time_range(self.t1, self.t2) + + # Create a list of acquisitions that only contain data collected at night + f_night = copy.deepcopy(f) + f_night.exclude_daytime() + + self.night_finder = f_night + self.night_acq_list = f_night.get_results() + + # Create a list of acquisitions that flag out sunrise, sun transit, and sunset + mm = ctime.unix_to_datetime(self.t1).month + dd = ctime.unix_to_datetime(self.t1).day + mm = mm + float(dd) / 30.0 + + fct = 3.0 + tol1 = (np.arctan((mm - 3.0) * fct) + np.pi / 2.0) * 10500.0 / np.pi + 1500.0 + tol2 = (np.pi / 2.0 - np.arctan((mm - 11.0) * fct)) * 10500.0 / np.pi + 1500.0 + ttol = np.minimum(tol1, tol2) + + fct = 5.0 + tol1 = (np.arctan((mm - 4.0) * fct) + np.pi / 2.0) * 2100.0 / np.pi + 6000.0 + tol2 = (np.pi / 2.0 - np.arctan((mm - 10.0) * fct)) * 2100.0 / np.pi + 6000.0 + rstol = np.minimum(tol1, tol2) + + f.exclude_sun(time_delta=ttol, time_delta_rise_set=rstol) + + self.finder = f + self.acq_list = f.get_results()
+ + +
+[docs] + def get_sunfree_srcs(self, srcs=None): + """This method uses the attributes 'night_acq_list' and + 'acq_list' to determine the srcs that transit + in the available data. If these attributes do not + exist, then the method 'set_acq_list' is called. + If srcs is not specified, then it defaults to the + brightest four radio point sources in the sky: + CygA, CasA, TauA, and VirA. + """ + + if self.acq_list is None: + self.set_acq_list() + + if srcs is None: + srcs = [ephemeris.CygA, ephemeris.CasA, ephemeris.TauA, ephemeris.VirA] + Ns = len(srcs) + + clr = [False] * Ns + ntt = [False] * Ns # night transit + + for ii, src in enumerate(srcs): + night_transit = np.array([]) + for acq in self.night_acq_list: + night_transit = np.append( + night_transit, ephemeris.transit_times(src, *acq[1]) + ) + + if night_transit.size: + ntt[ii] = True + + if src.name in ["CygA", "CasA"]: + transit = np.array([]) + for acq in self.acq_list: + transit = np.append(transit, ephemeris.transit_times(src, *acq[1])) + + if transit.size: + clr[ii] = True + + else: + clr[ii] = ntt[ii] + + return clr, ntt, srcs
+ + +
+[docs] + def single_source_check(self): + """Assumes self.source1 is NOT None""" + Nipts = len(self.inputs) + self.good_ipts = np.zeros(Nipts, dtype=bool) + self.good_freqs = None + + if len(self.p1_idx) > 0: + self.init_feedloc_p1() # Initiate FeedLocator + self.flp1.single_source_test() + self.good_freqs = self.flp1.good_freqs + good_frac = self.get_res_sing_src(self.flp1) + if good_frac < 0.6: + msg = """ +WARNING! +Less than 60% of P1 channels turned out good. +This may be due to a poor choice of base channel. +Consider re-running the test with different +bswp1 and bsep1 arguments +""" + print(msg) + + if len(self.p2_idx) > 0: + self.init_feedloc_p2() # Initiate FeedLocator + self.flp2.single_source_test() + good_frac = self.get_res_sing_src(self.flp2) + if good_frac < 0.6: + msg = """ +WARNING! +Less than 60% of P2 channels turned out good. +This may be due to a poor choice of base channel. +Consider re-running the test with different +bswp2 and bsep2 arguments +""" + print(msg) + + if self.good_freqs is None: + self.good_freqs = self.flp2.good_freqs + else: + self.good_freqs = np.logical_or(self.good_freqs, self.flp2.good_freqs) + + self.results_summary()
+ + + def full_check(self): + """ """ + if self.source1 is None: + self.get_data() + if self.source2 is None: + if self.source1 is None: + raise RuntimeError("No sources available.") + else: + self.single_source_check() + else: + Nipts = len(self.inputs) + self.good_ipts = np.zeros(Nipts, dtype=bool) + self.postns = np.zeros((Nipts, 2)) + self.expostns = np.zeros((Nipts, 2)) + self.good_freqs = None + if len(self.p1_idx) > 0: + self.init_feedloc_p1() # Initiate FeedLocator + self.flp1.get_dists() # Run tests + self.flp1.get_postns() # Solve for positions + + self.good_freqs = self.flp1.good_freqs + good_frac = self.get_test_res(self.flp1) + if good_frac < 0.6: + msg = """ +WARNING! +Less than 60% of P1 channels turned out good. +This may be due to a poor choice of base channel. +Consider re-running the test with different +bswp1 and bsep1 arguments +""" + print(msg) + + if len(self.p2_idx) > 0: + self.init_feedloc_p2() # Initiate FeedLocator + self.flp2.get_dists() # Run tests + self.flp2.get_postns() # Solve for positions + + if self.good_freqs is None: + self.good_freqs = self.flp2.good_freqs + else: + self.good_freqs = np.logical_or( + self.good_freqs, self.flp2.good_freqs + ) + good_frac = self.get_test_res(self.flp2) + if good_frac < 0.6: + msg = """ +WARNING! +Less than 60% of P2 channels turned out good. +This may be due to a poor choice of base channel. +Consider re-running the test with different +bswp2 and bsep2 arguments +""" + print(msg) + + self.results_summary() + + def results_summary(self): + """ """ + self.bad_ipts = self.input_map[np.logical_not(self.good_ipts)] + self.deemed_bad_but_good = self.input_map[ + np.logical_and(np.logical_not(self.pwds), self.good_ipts) + ] + self.bad_not_accounted = self.input_map[ + np.logical_and(self.pwds, np.logical_not(self.good_ipts)) + ] + if self.source2 is not None: + # TODO: maybe use only x-position. Y is too erratic... + self.pos_err = np.sum((self.postns - self.expostns) ** 2, axis=1) ** 0.5 + self.wrong_position = self.input_map[self.pos_err > 1.0] + + def get_test_res(self, fl): + """ """ + for ii, ipt in enumerate(self.inputs): + for jj, fl_ipt in enumerate(fl.inputs): + if fl_ipt == ipt: + self.good_ipts[ii] = fl.good_ipts[jj] + # TODO: add some treatment for c_x2 (mean? test diff?) + self.postns[ii][0] = fl.c_x1[jj] + self.postns[ii][1] = fl.c_y[jj] + self.expostns[ii][0] = fl.expx[jj] + self.expostns[ii][1] = fl.expy[jj] + + good_frac = float(np.sum(fl.good_ipts)) / float(fl.good_ipts.size) + return good_frac + + def get_res_sing_src(self, fl): + """ """ + for ii, ipt in enumerate(self.inputs): + for jj, fl_ipt in enumerate(fl.inputs): + if fl_ipt == ipt: + self.good_ipts[ii] = fl.good_ipts[jj] + + good_frac = float(np.sum(fl.good_ipts)) / float(fl.good_ipts.size) + return good_frac
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/data_quality.html b/docs/_modules/ch_util/data_quality.html new file mode 100644 index 00000000..e071e695 --- /dev/null +++ b/docs/_modules/ch_util/data_quality.html @@ -0,0 +1,1127 @@ + + + + + + ch_util.data_quality — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.data_quality

+"""
+Data quality routines
+
+
+Data quality functions
+======================
+- :py:meth:`good_channels`
+
+
+Issues
+======
+
+Auxiliary functions are still lacking documentation.
+
+"""
+
+import numpy as np
+
+import caput.time as ctime
+
+import ch_util.ephemeris as ch_eph
+from ch_util import andata
+from ch_util import tools
+from ch_util import ni_utils
+
+
+
+[docs] +def good_channels( + data, + gain_tol=10.0, + noise_tol=2.0, + fit_tol=0.02, + test_freq=0, + noise_synced=None, + inputs=None, + res_plot=False, + verbose=True, +): + """Test data for misbehaving channels. + + Three tests are performed: + + 1. Excessively high digital gains, + 2. Compliance of noise to the radiometer equation and + 3. Goodness of fit to a template Tsky. + + See `Doclib:235 + <https://bao.phas.ubc.ca/doc/cgi-bin/general/documents/display?Id=235>`_ + file 'data_quality.pdf' for details on how the filters and tolerances work. + + Parameters + ---------- + data : ch_util.andata.CorrData object + Data to run test on. + If andata object contains cross-correlations, + test is performed on auto-correlations only. + gain_tol : float + Tolerance for digital gains filter. Flag channels whose + digital gain fractional absolute deviation + is above 'gain_tol' (default is 10.) + noise_tol : float + Tolerance for radiometer noise filter. Flag channels whose + noise rms is higher then 'noise_tol' times the expected + from the radiometer equation. (default = 2.) + fit_tol : float + Tolerance for the fit-to-Tsky filter. Flag channels whose + fractional rms for the 'gain' fit parameter is above + 'fit_tol' (default = 0.02) + test_freq : integer + Index of frequency to test. Default is 0. + noise_synced : boolean + Use this to force the code to call (or not call) + ni_utils.process_synced_data(). If not given, + the code will determine if syncronized noise injection was on. + For acquisitions newer then 20150626T200540Z_pathfinder_corr, + noise injection info is written in the attributes. For older + acquisitions the function _check_ni() is called to determine + if noise injection is On. + inputs : list of CorrInputs, optional + List of CorrInput objects describing the channels in this + dataset. This is optional, if not set (default), then it will + look the data up in the database. This option just allows + control of the database accesses. + res_plot : boolean, optional + If True, a plot with all the tested channels and the + Tsky fits is generated. File naming is + `plot_fit_{timestamp}.pdf` + verbose : boolean, optional + Print out useful output as the tests are run. + + Returns + ------- + good_gains : list of int + 1. for channels that pass the gains filter, 0. otherwise. + good_noise : list of int + 1. for channels that pass the noise filter, 0. otherwise. + good_fit : list of int + 1. for channels that pass the fit-to-Tsky filter, + 0. otherwise. + test_chans : list of int + A list of the channels tested in the same order as they + appear in all the other lists returned + + Examples + -------- + + Run test on frequency index 3. data is an andata object: + + >>> good_gains, good_noise, good_fit, test_chans = good_channels(data,test_freq=3) + + And to create a plot of the results: + + >>> good_gains, good_noise, good_fit, test_chans = good_channels(data,test_freq=3,res_plot=True) + + """ + + if verbose: + print("Running data quality test.") + + # Determine if data has a gated visibility: + is_gated_format = "gated_vis0" in data + + # Get number of samples during an integration period: + if "gpu.gpu_intergration_period" in data.attrs: + # From attributes: + n_samp = data.attrs["gpu.gpu_intergration_period"][0] + else: + # Or from integration time and bandwidth: + t_step = ( + data.index_map["time"]["ctime"][1] - data.index_map["time"]["ctime"][0] + ) # Integration time + bwdth = data.index_map["freq"][0][1] * 1e6 # Bandwidth comes in MHz + n_samp = t_step * bwdth + + # Processing noise synced data, if noise_synced != False: + if noise_synced == False: + pass + elif noise_synced == True: + if is_gated_format: + # If data is gated, ignore noise_synced argument: + msg = ( + "Warning: noise_synced=True argument given " + + "but data seems to be gated.\n" + + "Ignoring noise_synced argument" + ) + print(msg) + else: + # Process noise synced data: + data = ni_utils.process_synced_data(data) + elif noise_synced == None: + # If noise_synced is not given, try to read ni_enable from data: + try: + # Newer data have a noise-injection flag + ni_enable = data.attrs["fpga.ni_enable"][0].astype(bool) + except: + # If no info is found, run function to determine ni_enable: + ni_enable = _check_ni(data, test_freq) + # If noise injection is enabled and data is not gated: + if ni_enable and not is_gated_format: + # Process noise synced data: + data = ni_utils.process_synced_data(data) + + # Read full product array in data: + prod_array_full = data.index_map["prod"] + # Get indices for auto-corrs: + autos_index, autos_chan = _get_autos_index(prod_array_full) + # Select auto-corrs and test_freq only: + visi = np.array([data.vis[test_freq, jj, :] for jj in autos_index]) + chan_array = np.array([chan for chan in autos_chan]) + tmstp = data.index_map["time"]["ctime"] + + # Remove non-chime channels (Noise source, RFI, 26m...): + visi, test_chans = _cut_non_chime(data, visi, chan_array, inputs) + + # Digital gains test: + if "gain" in data: + if verbose: + print("Testing quality of digital gains") + + good_gains = _gains_test(data, test_freq, test_chans, tol=gain_tol) + else: + if verbose: + msg = ( + "Could not obtain digital gains information from data. " + + "Ignoring gains test." + ) + print(msg) + + good_gains = None + + # Radiometer noise test: + if verbose: + print("Testing noise levels") + + good_noise, rnt = _noise_test(visi, tmstp, n_samp, tol=noise_tol) + + # Standard channels to fit for Tsky: + if (good_gains is not None) and (good_noise is not None): + # Use channels that pass both tests + stand_chans = good_gains * good_noise + elif (good_gains is None) and (good_noise is None): + # If gains and noise tests are missing, run fit on all channels: + stand_chans = [1.0] * len(test_chans) + else: + if good_gains is not None: + # If only gains test was run + stand_chans = good_gains + if good_noise is not None: + # If only noise test was run + stand_chans = good_noise + + # Median filter visibilities for fit test: + cut_vis = _median_filter(visi) + + # Cut sun transit from visibilities: + if verbose: + print("Cutting Sun transist from visibilities") + cut_vis, cut_tmstp = _cut_sun_transit(cut_vis, tmstp) + + # Only run fit test if there are enough good channels + # and enogh time around Sun transits: + if np.sum(stand_chans) > 50 and len(cut_tmstp) > 100: + # Getting template visibility (most typical visibility): + if verbose: + print("Getting template visibility") + gn, Ts = _get_template(cut_vis, stand_chans) + + # Fit template to visibilities: + if verbose: + print("Fitting template to visibilities") + good_fit, popt, perr, sky = _fit_template(Ts, cut_vis, tol=fit_tol) + + # Create plot with results, if res_plot is True: + if res_plot: + print("Generating plots") + _create_plot( + visi, + tmstp, + cut_tmstp, + sky, + popt, + test_chans, + good_gains, + good_noise, + good_fit, + ) + else: + if verbose: + if not np.sum(stand_chans) > 50: + print("Not enough channels for fit test.") + if not len(cut_tmstp) > 100: + print("Not enough time around Sun transit.") + print("Skipping template fit test.") + good_fit = None + + if verbose: + # Computing some statistics to the filter: + Nact, Nnoisy, Ngains, Nfit, Nbad = _stats_print( + good_noise, good_gains, good_fit, test_chans + ) + + print("Finished running data quality test.") + + return good_gains, good_noise, good_fit, test_chans
+ + + +def _check_ni(data, test_freq=0): + """This is a quick and dirt function to determine if + noise injection was ON or OFF for acquisitions + older then ctime = 1435349183, when noise injection + info started to be written to the h5py files + + Parameters + ---------- + data : andata.CorrData + Data to check for noise injection. + test_freq : int + frequency bin within data, to be run the test on + + Returns + ------- + ni_on : boolean + True if noise injection is On, False otherwise. + """ + + visi = data.vis[test_freq].real + # Divide visibility in even and odd time bins + if visi.shape[1] % 2 == 0: + v_even = visi[:, 0::2] + else: + v_even = visi[:, 0:-1:2] # v_even and v_odd have same length + v_odd = visi[:, 1::2] + + # Average difference ON-OFF. Should be the same as Off-Off + # if noise injection is Off. + diff_on_off = np.mean(abs(v_even - v_odd)) + + # Divide odd visibility again in odd and even + if v_odd.shape[1] % 2 == 0: + v_1 = v_odd[:, 0::2] + else: + v_1 = v_odd[:, 0:-1:2] + v_2 = v_odd[:, 1::2] + + # Average difference OFF-OFF. + diff_off_off = np.mean(abs(v_1 - v_2)) + + # Ratio of differences. Sould be close to 1 + # if noise injection is off. + ratio = diff_on_off / diff_off_off + + if ratio > 3.0: + ni_on = True + else: + ni_on = False + + return ni_on + + +def _get_autos_index(prod_array): + """Obtain auto-correlation indices from the 'prod' index map + returned by andata. + """ + autos_index, autos_chan = [], [] + for ii in range(len(prod_array)): + if prod_array[ii][0] == prod_array[ii][1]: + autos_index.append(ii) + autos_chan.append(prod_array[ii][0]) + + return autos_index, autos_chan + + +def _get_prod_array(path): + """Function to get visibility product array from file path + + Useful when desired file is known but not the time span, so that + finder and as_reader are not useful. Or when file is not known + to alpenhorn + + Parameters: + *********** + path : string, path to file + + Returns: + ******** + prod_array : array-like, the visibility products. + """ + + # If given list of files, use first one: + if isinstance(path, list): + path = path[0] + + # Get file with single time, single frequency: + data_aux = andata.AnData.from_acq_h5(path, start=0, stop=1, freq_sel=0) + + return data_aux.index_map["prod"] + + +def _cut_non_chime(data, visi, chan_array, inputs=None): + """ + Remove non CHIME channels (noise injection, RFI antenna, + 26m, etc...) from visibility. Also remove channels marked + as powered-off in layout DB. + """ + + # Map of channels to corr. inputs: + input_map = data.input + tmstp = data.index_map["time"]["ctime"] # time stamp + # Datetime halfway through data: + half_time = ctime.unix_to_datetime(tmstp[int(len(tmstp) // 2)]) + # Get information on correlator inputs, if not already supplied + if inputs is None: + inputs = tools.get_correlator_inputs(half_time) + # Reorder inputs to have sema order as input map (and data) + inputs = tools.reorder_correlator_inputs(input_map, inputs) + # Get noise source channel index: + + # Test if inputs are attached to CHIME antenna and powered on: + pwds = tools.is_chime_on(inputs) + + for ii in range(len(inputs)): + # if ( (not tools.is_chime(inputs[ii])) + if (not pwds[ii]) and (ii in chan_array): + # Remove non-CHIME-on channels from visibility matrix... + idx = np.where(chan_array == ii)[0][0] # index of channel + visi = np.delete(visi, idx, axis=0) + # ...and from product array: + chan_array = np.delete(chan_array, idx, axis=0) + + return visi, chan_array + + +def _noise_test(visi, tmstp, n_samp, tol): + """Calls radiom_noise to obtain radiometer statistics + and aplies the noise tolerance to get a list of + channels that pass the radiometer noise test + """ + Nchans = visi.shape[0] + # Array to hold radiom noise fractions + rnt = np.full((Nchans), np.nan) + + # Cut daytime from visibility: + visi_night, tmstp_night = _cut_daytime(visi, tmstp) + + run_noise_test = True + if tmstp_night is None: + # All data is in day-time + run_noise_test = False + elif (not isinstance(tmstp_night, list)) and (len(tmstp_night) < 20): + # To little night-time: + run_noise_test = False + + if not run_noise_test: + msg = "Not enough night-time for noise test. Ignoring noise test." + print(msg) + good_noise = None + rnt = None + else: + # Run noise test + for ii in range(Nchans): + # If multiple nights are present, result is a list: + if isinstance(tmstp_night, list): + rnt_aux = [] # rnt parameter for each night + for jj in range(len(visi_night)): + rnt_array, rnt_med, rnt_max, rnt_min = _radiom_noise( + visi_night[jj][ii, :].real, n_samp + ) + rnt_aux.append(rnt_med) + # Use median of rnt's as parameter: + rnt[ii] = np.median(rnt_aux) + else: + rnt_array, rnt_med, rnt_max, rnt_min = _radiom_noise( + visi_night[ii, :].real, n_samp + ) + # Use median of rnt's as parameter: + rnt[ii] = rnt_med + + # List of good noise channels (Initialized with all True): + good_noise = np.ones((Nchans)) + # Test noise against tolerance and isnan, isinf: + for ii in range(Nchans): + is_nan_inf = np.isnan(rnt[ii]) or np.isinf(rnt[ii]) + if is_nan_inf or rnt[ii] > tol: + good_noise[ii] = 0.0 + return good_noise, rnt + + +def _radiom_noise(trace, n_samp, wind=100): + """Generates radiometer noise test statistics""" + + # If window is < the length, use length of trace: + wind = min(len(trace), wind) + + # Window has to be even in length: + if wind % 2 == 1: + wind = wind - 1 + + # Separate trace in windows: + t_w = [trace[ii * wind : (ii + 1) * wind] for ii in range(int(len(trace) // wind))] + + # Estimate total Temp by median of each window: + T = [np.median(entry) for entry in t_w] + + # Subtract even - odd bins to get rid of general trends in data: + t_s = [ + [t_w[ii][jj] - t_w[ii][jj + 1] for jj in range(0, int(wind), 2)] + for ii in range(len(t_w)) + ] + + # RMS of each window: + # Use MAD to estimate RMS. More robust against RFI/correlator spikes. + # sqrt(2) factor is due to my subtracting even - odd time bins. + # 1.4826 factor is to go from MAD to RMS of a normal distribution: + # rms = [ np.std(entry)/np.sqrt(2) for entry in t_s ] # Using MAD to estimate rms for now + rms = [ + np.median([np.abs(entry[ii] - np.median(entry)) for ii in range(len(entry))]) + * 1.4826 + / np.sqrt(2) + for entry in t_s + ] + + # Radiometer equation proporcionality factor: + r_fact = (0.5 * n_samp) ** 0.5 + # Radiometer noise factor (should be ~1): + rnt = [rms[ii] * r_fact / (T[ii]) for ii in range(len(rms))] + + rnt_med = np.median(rnt) + rnt_max = np.max(rnt) + rnt_min = np.min(rnt) + + return rnt, rnt_med, rnt_max, rnt_min + + +def _cut_daytime(visi, tmstp): + """Returns visibilities with night time only. + Returns an array if a single night is present. + Returns a list of arrays if multiple nights are present. + """ + + tstp = tmstp[1] - tmstp[0] # Get time step + + risings = ch_eph.solar_rising(tmstp[0], tmstp[-1]) + settings = ch_eph.solar_setting(tmstp[0], tmstp[-1]) + + if len(risings) == 0 and len(settings) == 0: + next_rising = ch_eph.solar_rising(tmstp[-1]) + next_setting = ch_eph.solar_setting(tmstp[-1]) + + if next_setting < next_rising: + # All data is in daylight time + cut_vis = None + cut_tmstp = None + else: + # All data is in night time + cut_vis = np.copy(visi) + cut_tmstp = tmstp + + elif len(settings) == 0: # Only one rising: + sr = risings[0] + # Find time bin index closest to solar rising: + idx = np.argmin(np.abs(tmstp - sr)) + + # Determine time limits to cut: + # (20 min after setting and before rising, if within range) + cut_low = max(0, idx - int(20.0 * 60.0 / tstp)) # lower limit of time cut + + # Cut daylight times: + cut_vis = np.copy(visi[:, :cut_low]) + cut_tmstp = tmstp[:cut_low] + + elif len(risings) == 0: # Only one setting: + ss = settings[0] + # Find time bin index closest to solar setting: + idx = np.argmin(np.abs(tmstp - ss)) + + # Determine time limits to cut: + # (20 min after setting and before rising, if within range) + cut_up = min( + len(tmstp), idx + int(20.0 * 60.0 / tstp) + ) # upper limit of time to cut + + # Cut daylight times: + cut_vis = np.copy(visi[:, cut_up:]) + cut_tmstp = tmstp[cut_up:] + + else: + cut_pairs = [] + if risings[0] > settings[0]: + cut_pairs.append([tmstp[0], settings[0]]) + for ii in range(1, len(settings)): + cut_pairs.append([risings[ii - 1], settings[ii]]) + if len(risings) == len(settings): + cut_pairs.append([risings[-1], tmstp[-1]]) + else: + for ii in range(len(settings)): + cut_pairs.append([risings[ii], settings[ii]]) + if len(risings) > len(settings): + cut_pairs.append([risings[-1], tmstp[-1]]) + + cut_tmstp = [] + cut_vis = [] + tmstp_remain = tmstp + vis_remain = np.copy(visi) + + for cp in cut_pairs: + # Find time bin index closest to cuts: + idx_low = np.argmin(np.abs(tmstp_remain - cp[0])) + idx_up = np.argmin(np.abs(tmstp_remain - cp[1])) + + # Determine time limits to cut: + # (20 min after setting and before rising, if within range) + cut_low = max( + 0, idx_low - int(20.0 * 60.0 / tstp) + ) # lower limit of time cut + cut_up = min( + len(tmstp_remain), idx_up + int(20.0 * 60.0 / tstp) + ) # upper limit of time to cut + + if len(tmstp_remain[:cut_low]) > 0: + cut_vis.append(vis_remain[:, :cut_low]) + cut_tmstp.append( + tmstp_remain[:cut_low] + ) # Append times before rising to cut_tmstp + vis_remain = vis_remain[:, cut_up:] + tmstp_remain = tmstp_remain[ + cut_up: + ] # Use times after setting for further cuts + if len(tmstp_remain) > 0: + # If there is a bit of night data in the end, append it: + cut_tmstp.append(tmstp_remain) + cut_vis.append(vis_remain) + + return cut_vis, cut_tmstp + + +def _gains_test(data, test_freq, test_chans, tol): + """Test channels for excessive digital gains.""" + + input_map = [entry[0] for entry in data.input] + + # Get gains: + # (only gains of channels being tested) + gains = abs( + np.array( + [data.gain[test_freq, input_map.index(chan), 0] for chan in test_chans] + ) + ) + + g_med = np.median(gains) # median + g_devs = [abs(entry - g_med) for entry in gains] # deviations from median + g_mad = np.median(g_devs) # MAD is insensitive to outlier deviations + g_frac_devs = [dev / g_mad for dev in g_devs] # Fractional deviations + + Nchans = len(gains) # Number of channels + + good_gains = np.ones(Nchans) # Good gains initialized to ones + + for ii in range(Nchans): + if g_frac_devs[ii] > tol: # Tolerance for gain deviations + good_gains[ii] = 0.0 + + return good_gains + + +def _stats_print(good_noise, good_gains, good_fit, test_chans): + """Generate a simple set of statistics for the test + and print them to screen. + """ + print("\nFilter statistics:") + + good_chans = [1] * len(test_chans) + + Nact = len(test_chans) # Number of active channels + if good_noise is not None: + Nnoisy = Nact - int(np.sum(good_noise)) + print( + "Noisy channels: {0} out of {1} active channels ({2:2.1f}%)".format( + Nnoisy, Nact, Nnoisy * 100 / Nact + ) + ) + good_chans = good_chans * good_noise + else: + Nnoisy = None + if good_gains is not None: + Ngains = Nact - int(np.sum(good_gains)) + print( + "High digital gains: {0} out of {1} active channels ({2:2.1f}%)".format( + Ngains, Nact, Ngains * 100 / Nact + ) + ) + good_chans = good_chans * good_gains + else: + Ngains = None + if good_fit is not None: + Nfit = Nact - int(np.sum(good_fit)) + print( + "Bad fit to T_sky: {0} out of {1} active channels ({2:2.1f}%)".format( + Nfit, Nact, Nfit * 100 / Nact + ) + ) + good_chans = good_chans * good_fit + else: + Nfit = None + + # Obtain total number of bad channels: + + if not ((good_noise is None) and (good_gains is None) and (good_fit is None)): + Nbad = Nact - int(np.sum(good_chans)) + print( + "Overall bad: {0} out of {1} active channels ({2:2.1f}%)\n".format( + Nbad, Nact, Nbad * 100 / Nact + ) + ) + else: + Nbad = None + + return Nact, Nnoisy, Ngains, Nfit, Nbad + + +def _cut_sun_transit(cut_vis, tmstp, tcut=120.0): + """Cut sun transit times from visibilities. + + Parameters + ---------- + cut_vis : numpy 2D array + visibilities to cut (prod,time). + tmstp : numpy 1D array + time stamps (u-time) + tcut : float + time (in minutes) to cut on both sides of Sun transit. + + """ + + # Start looking for transits tcut minutes before start time: + st_time = tmstp[0] - tcut * 60.0 + # Stop looking for transits tcut minutes after end time: + end_time = tmstp[-1] + tcut * 60.0 + + # Find Sun transits between start time and end time: + sun_trans = ch_eph.solar_transit(st_time, end_time) + + cut_tmstp = tmstp # Time stamps to be cut + tstp = tmstp[1] - tmstp[0] # Get time step + for st in sun_trans: + # Find time bin index closest to solar transit: + idx = np.argmin(np.abs(cut_tmstp - st)) + + # Determine time limits to cut: + # (tcut min on both sides of solar transit, if within range) + # lower limit of time cut: + cut_low = max(0, idx - int(tcut * 60.0 / tstp)) + # upper limit of time cut: + cut_up = min(len(cut_tmstp), idx + int(tcut * 60.0 / tstp)) + + # Cut times of solar transit: + cut_vis = np.concatenate((cut_vis[:, :cut_low], cut_vis[:, cut_up:]), axis=1) + cut_tmstp = np.concatenate((cut_tmstp[:cut_low], cut_tmstp[cut_up:])) + + return cut_vis, cut_tmstp + + +def _median_filter(visi, ks=3): + """Median filter visibilities for fit test.""" + from scipy.signal import medfilt + + # Median filter visibilities: + cut_vis = np.array( + [medfilt(visi[jj, :].real, kernel_size=ks) for jj in range(visi.shape[0])] + ) + return cut_vis + + +def _get_template(cut_vis_full, stand_chans): + """Obtain template visibility through an SVD. + This template will be compared to the actual + visibilities in _fit_template. + """ + + # Full copy of visibilities without sun: + cut_vis = np.copy(cut_vis_full) + + # Cut out noisy and bad-gain channels: + cut_vis = np.array( + [cut_vis[jj, :] for jj in range(cut_vis.shape[0]) if stand_chans[jj]] + ) + + Nchans = cut_vis.shape[0] # Number of channels after cut + + # Perform a first cut of the most outlying visibilities: + # Remove the offset of the visibilities (aprox T_receiver): + vis_test = np.array( + [cut_vis[jj, :] - np.min(cut_vis[jj, :]) for jj in range(Nchans)] + ) + # Normalize visibilities: + vis_test = np.array( + [ + vis_test[jj, :] / (np.max(vis_test[jj, :]) - np.min(vis_test[jj, :])) + for jj in range(vis_test.shape[0]) + ] + ) + medn = np.median(vis_test, axis=0) # Median visibility across channels + devs = [np.sum(abs(entry - medn)) for entry in vis_test] # Deviations sumed in time + # Number of channels that can be ignored due + # to excessive deviations: + Ncut = 10 + # Find the channels with largest deviations: + indices = list(range(len(devs))) + del_ind = [] + for nn in range(Ncut): + max_ind = np.argmax(devs) + del_ind.append(indices[max_ind]) + devs = np.delete(devs, max_ind) + indices = np.delete(indices, max_ind) + # Cut-out channels with largest deviations: + cut_vis = np.array( + [cut_vis[jj, :] for jj in range(len(cut_vis)) if not (jj in del_ind)] + ) + + Nchans = cut_vis.shape[0] # Number of channels after cut + + # Find most typical channel within each frequency: + # Model: visi = gn*(Ts + Tr) = gn*Ts + gTr + # Where: + # gn : gains + # Ts : Sky temperature + # Tr : reciver temperature + # gTr = gn * Tr + + # Determine first guess for receiver temperature * gain: Tr * gn = gTr + # (lower value of low-pass filtered visibility) + gTr = np.array([np.min(cut_vis[jj, :]) for jj in range(Nchans)]) + # Matrix Vs is the visibilities minus the guessed receiver temperature * gn + # For the exact gTr it should be: Vs = visi - gTr = gn * Ts + Vs = np.array([cut_vis[jj, :] - gTr[jj] for jj in range(Nchans)]) + + ss = 1.0 + ss_diff = 1.0 + tries = 0 + while ss_diff > 0.01 or ss < 3.0: + # SVD on Vs: + U, s, V = np.linalg.svd(Vs, full_matrices=False) + + ss_diff = abs(ss - s[0] / s[1]) + ss = s[0] / s[1] + # print 'Ratio of first to second singular value: ', ss + + # Updated gains, sky temp, sky visibility and + # gTr approximations: + gn = U[:, 0] * s[0] # Only first singular value + Ts = V[0, :] # Only first singular value + Vs = np.outer(gn, Ts) # Outer product + # New gTr is visi minus spprox sky vis, averaged over time: + gTr = np.mean((cut_vis - Vs), axis=1) + Vs = np.array([cut_vis[jj] - gTr[jj] for jj in range(Nchans)]) + + tries += 1 + if tries == 100: + msg = ( + "SVD search for Tsky at freq {0} did NOT converge.\n" + + "Bad channels list might not be accurate." + ) + print(msg) + break + + # Solution could have negative gains and negative T sky: + if np.sum(gn) < 0.0: # Most gains are < 0 means sign is wrong + # if np.all(Ts < 0) and np.all(gn < 0): + Ts = Ts * (-1.0) + gn = gn * (-1.0) + + return gn, Ts + + +def _fit_template(Ts, cut_vis, tol): + """Fits template visibility to actual ones + to identify bad channels. + """ + + from scipy.optimize import curve_fit + + class Template(object): + def __init__(self, tmplt): + self.tmplt = tmplt + + def fit_func(self, t, gn, Tr): + return gn * self.tmplt[t] + Tr # Template*gains + receiver temper. + + sky = Template(Ts) + + # Amplitude of template used in first guesses: + amp_t = np.max(Ts) - np.min(Ts) + min_t = np.min(Ts) + + popt, perr = [], [] + for chan in range(cut_vis.shape[0]): + # First guesses: + amp = np.max(cut_vis[chan]) - np.min(cut_vis[chan]) + gn0 = amp / amp_t # gain as fraction of amplitudes + Tr0 = np.min(cut_vis[chan]) - min_t * gn0 + p0 = [gn0, Tr0] # First guesses at parameters + + # Make the fit: + xdata = list(range(len(cut_vis[chan]))) + popt_aux, pcov = curve_fit(sky.fit_func, xdata, cut_vis[chan], p0) + perr_aux = np.sqrt(np.diag(pcov)) # Standard deviation of parameters + + popt.append(popt_aux) + perr.append(perr_aux) + + Nchans = cut_vis.shape[0] + good_fit = np.ones(Nchans) + for ii in range(Nchans): + neg_gain = popt[ii][0] < 0.0 # Negative gains fail the test + if neg_gain or (abs(perr[ii][0] / popt[ii][0]) > tol): + good_fit[ii] = 0.0 + + return good_fit, popt, perr, sky + + +def _create_plot( + visi, tmstp, cut_tmstp, sky, popt, test_chans, good_gains, good_noise, good_fit +): + """Creates plot of the visibilities and the fits + with labels for those that fail the tests + """ + import matplotlib + + matplotlib.use("PDF") + import matplotlib.pyplot as plt + import time + + # Visibilities to plot: + visi1 = visi # Raw data + tmstp1 = tmstp # Raw data + visi2 = np.array( + [ + [sky.fit_func(tt, popt[ii][0], popt[ii][1]) for tt in range(len(cut_tmstp))] + for ii in range(len(popt)) + ] + ) + tmstp2 = cut_tmstp + + # For title, use start time stamp: + title = "Good channels result for {0}".format( + ctime.unix_to_datetime(tmstp1[0]).date() + ) + + # I need to know the slot for each channel: + def get_slot(channel): + slot_array = [4, 2, 16, 14, 3, 1, 15, 13, 8, 6, 12, 10, 7, 5, 11, 9] + return slot_array[int(channel) // 16] + + fig = plt.figure(figsize=(8, 64)) + fig.suptitle(title, fontsize=16) + + if (tmstp1[-1] - tmstp1[0]) / (24.0 * 3600.0) > 3.0: + # Days since starting time + # Notice: same starting time for both + time_pl1 = (tmstp1 - tmstp1[0]) / (3600 * 24) + time_pl2 = (tmstp2 - tmstp1[0]) / (3600 * 24) + time_unit = "days" + else: + # Hours since starting time + time_pl1 = (tmstp1 - tmstp1[0]) / (3600) + time_pl2 = (tmstp2 - tmstp1[0]) / (3600) + time_unit = "hours" + + for ii in range(len(visi1)): + chan = test_chans[ii] + + # Determine position in subplot: + if chan < 64: + pos = chan * 4 + 1 + elif chan < 128: + pos = (chan - 64) * 4 + 2 + elif chan < 192: + pos = (chan - 128) * 4 + 3 + elif chan < 256: + pos = (chan - 192) * 4 + 4 + + # Create subplot: + plt.subplot(64, 4, pos) + + lab = "" + # Or print standard label: + if good_gains is not None: + if not good_gains[ii]: + lab = lab + "bad gains | " + if good_noise is not None: + if not good_noise[ii]: + lab = lab + "noisy | " + if not good_fit[ii]: + lab = lab + "bad fit" + + if lab != "": + plt.plot([], [], "1.0", label=lab) + plt.legend(loc="best", prop={"size": 6}) + + trace_pl1 = visi1[ii, :].real + plt.plot(time_pl1, trace_pl1, "b-") + + trace_pl2 = visi2[ii, :].real + plt.plot(time_pl2, trace_pl2, "r-") + + tm_brd = (time_pl1[-1] - time_pl1[0]) / 10.0 + plt.xlim(time_pl1[0] - tm_brd, time_pl1[-1] + tm_brd) + + # Determine limits of plots: + med = np.median(trace_pl1) + mad = np.median([abs(entry - med) for entry in trace_pl1]) + plt.ylim(med - 7.0 * mad, med + 7.0 * mad) + + # labels: + plt.ylabel("Ch{0} (Sl.{1})".format(chan, get_slot(chan)), fontsize=8) + + # Hide numbering: + frame = plt.gca() + frame.axes.get_yaxis().set_ticks([]) + if (chan != 63) and (chan != 127) and (chan != 191) and (chan != 255): + # Remove x-axis, except on bottom plots: + frame.axes.get_xaxis().set_ticks([]) + else: + # Change size of numbers in x axis: + frame.tick_params(axis="both", which="major", labelsize=10) + frame.tick_params(axis="both", which="minor", labelsize=8) + if chan == 127: + # Put x-labels on bottom plots: + if time_unit == "days": + plt.xlabel( + "Time (days since {0} UTC)".format( + ctime.unix_to_datetime(tmstp1[0]) + ), + fontsize=10, + ) + else: + plt.xlabel( + "Time (hours since {0} UTC)".format( + ctime.unix_to_datetime(tmstp1[0]) + ), + fontsize=10, + ) + + if chan == 0: + plt.title("West cyl. P1(N-S)", fontsize=12) + elif chan == 64: + plt.title("West cyl. P2(E-W)", fontsize=12) + elif chan == 128: + plt.title("East cyl. P1(N-S)", fontsize=12) + elif chan == 192: + plt.title("East cyl. P2(E-W)", fontsize=12) + + filename = "plot_fit_{0}.pdf".format(int(time.time())) + plt.savefig(filename) + plt.close() + print("Finished creating plot. File name: {0}".format(filename)) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/ephemeris.html b/docs/_modules/ch_util/ephemeris.html new file mode 100644 index 00000000..6037298f --- /dev/null +++ b/docs/_modules/ch_util/ephemeris.html @@ -0,0 +1,1138 @@ + + + + + + ch_util.ephemeris — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.ephemeris

+"""
+Ephemeris routines
+
+The precession of the Earth's axis gives noticeable shifts in object
+positions over the life time of CHIME. To minimise the effects of this we
+need to be careful and consistent with our ephemeris calculations.
+Historically Right Ascension has been given with respect to the Vernal
+Equinox which has a significant (and unnecessary) precession in the origin of
+the RA axis. To avoid this we use the new Celestial Intermediate Reference
+System which does not suffer from this issue.
+
+Practically this means that when calculating RA, DEC coordinates for a source
+position at a *given time* you must be careful to obtain CIRS coordinates
+(and not equinox based ones). Internally using `ephemeris.object_coords` does
+exactly that for you, so for any lookup of coordinates you should use that on
+your requested body.
+
+Note that the actual coordinate positions of sources must be specified using
+RA, DEC coordinates in ICRS (which is roughly equivalent to J2000). The
+purpose of object_coords is to transform into new RA, DEC coordinates taking
+into account the precession and nutation of the Earth's polar axis since
+then.
+
+These kind of coordinate issues are tricky, confusing and hard to debug years
+later, so if you're unsure you are recommended to seek some advice.
+
+Constants
+=========
+
+:const:`CHIMELATITUDE`
+    CHIME's latitude [degrees].
+:const:`CHIMELONGITUDE`
+    CHIME's longitude [degrees].
+:const:`CHIMEALTITUDE`
+    CHIME's altitude [metres].
+:const:`SIDEREAL_S`
+    Number of SI seconds in a sidereal second [s/sidereal s]. You probably want
+    STELLAR_S instead.
+:const:`STELLAR_S`
+    Number of SI seconds in a stellar second [s/stellar s].
+:const:`CasA`
+    :class:`skyfield.starlib.Star` representing Cassiopeia A.
+:const:`CygA`
+    :class:`skyfield.starlib.Star` representing Cygnus A.
+:const:`TauA`
+    :class:`skyfield.starlib.Star` representing Taurus A.
+:const:`VirA`
+    :class:`skyfield.starlib.Star` representing Virgo A.
+
+
+Telescope Instances
+===================
+
+- :const:`chime`
+
+
+Ephemeris Functions
+===================
+
+- :py:meth:`skyfield_star_from_ra_dec`
+- :py:meth:`transit_times`
+- :py:meth:`solar_transit`
+- :py:meth:`lunar_transit`
+- :py:meth:`setting_times`
+- :py:meth:`solar_setting`
+- :py:meth:`lunar_setting`
+- :py:meth:`rising_times`
+- :py:meth:`solar_rising`
+- :py:meth:`lunar_rising`
+- :py:meth:`_is_skyfield_obj`
+- :py:meth:`peak_RA`
+- :py:meth:`get_source_dictionary`
+- :py:meth:`lsa`
+
+
+Time Utilities
+==============
+
+- :py:meth:`ensure_unix`
+- :py:meth:`chime_local_datetime`
+- :py:meth:`unix_to_datetime`
+- :py:meth:`datetime_to_unix`
+- :py:meth:`datetime_to_timestr`
+- :py:meth:`timestr_to_datetime`
+- :py:meth:`unix_to_skyfield_time`
+- :py:meth:`skyfield_time_to_unix`
+- :py:meth:`time_of_day`
+- :py:meth:`csd`
+- :py:meth:`csd_to_unix`
+- :py:meth:`unix_to_csd`
+- :py:meth:`parse_date`
+
+
+Miscellaneous Utilities
+=======================
+
+- :py:meth:`galt_pointing_model_ha`
+- :py:meth:`galt_pointing_model_dec`
+"""
+
+from datetime import datetime
+from typing import Union
+from numpy.core.multiarray import unravel_index
+
+# NOTE: Load Skyfield API but be sure to use skyfield_wrapper for loading data
+import skyfield.api
+
+import numpy as np
+
+from caput.interferometry import sphdist
+from caput.time import (
+    unix_to_datetime,
+    datetime_to_unix,
+    datetime_to_timestr,
+    timestr_to_datetime,
+    leap_seconds_between,
+    time_of_day,
+    Observer,
+    unix_to_skyfield_time,
+    skyfield_time_to_unix,
+    skyfield_star_from_ra_dec,
+    skyfield_wrapper,
+    ensure_unix,
+    SIDEREAL_S,
+    STELLAR_S,
+)
+
+# Calvin derived the horizontal position of the center of the focal lines...
+# ...and the elevation of the focal line from survey coordinates:
+# All altitudes given in meters above sea level
+CHIMELATITUDE = 49.3207092194
+CHIMELONGITUDE = -119.6236774310
+CHIMEALTITUDE = 555.372
+
+# Calvin also positioned the GBO/TONE Outrigger similarly.
+# GBO/TONE Outrigger
+TONELATITUDE = 38.4292962636
+TONELONGITUDE = -79.8451625395
+TONEALTITUDE = 810.000
+
+# Rough position for outriggers.
+# These will be updated as positioning gets refined.
+# https://bao.chimenet.ca/doc/documents/1727
+KKOLATITUDE = 49.41905
+KKOLONGITUDE = -120.5253
+KKOALTITUDE = 835
+
+# Aliases for backwards compatibility
+PCOLATITUDE = KKOLATITUDE
+PCOLONGITUDE = KKOLONGITUDE
+PCOALTITUDE = KKOALTITUDE
+
+GBOLATITUDE = 38.436122
+GBOLONGITUDE = -79.827922
+GBOALTITUDE = 2710 / 3.28084
+
+HCOLATITUDE = 40.8171082
+HCOLONGITUDE = -121.4689584
+HCOALTITUDE = 3346 / 3.28084
+
+# Create the Observer instances for CHIME and outriggers
+chime = Observer(
+    lon=CHIMELONGITUDE,
+    lat=CHIMELATITUDE,
+    alt=CHIMEALTITUDE,
+    lsd_start=datetime(2013, 11, 15),
+)
+
+tone = Observer(
+    lon=TONELONGITUDE,
+    lat=TONELATITUDE,
+    alt=TONEALTITUDE,
+    lsd_start=datetime(2013, 11, 15),
+)
+
+kko = Observer(
+    lon=KKOLONGITUDE,
+    lat=KKOLATITUDE,
+    alt=KKOALTITUDE,
+    lsd_start=datetime(2013, 11, 15),
+)
+
+gbo = Observer(
+    lon=GBOLONGITUDE,
+    lat=GBOLATITUDE,
+    alt=GBOALTITUDE,
+    lsd_start=datetime(2013, 11, 15),
+)
+
+hco = Observer(
+    lon=HCOLONGITUDE,
+    lat=HCOLATITUDE,
+    alt=HCOALTITUDE,
+    lsd_start=datetime(2013, 11, 15),
+)
+
+
+def _get_chime():
+    import warnings
+
+    warnings.warn("Use `ephemeris.chime` instead.", DeprecationWarning)
+    return chime
+
+
+
+[docs] +def galt_pointing_model_ha( + ha_in, dec_in, a=[-5.872, -0.5292, 5.458, -0.076, -0.707, 0.0, 0.0] +): + """Calculate pointing correction in hour angle for the Galt Telescope + See description of the pointing model by Lewis Knee CHIME document library + 754 https://bao.chimenet.ca/doc/documents/754 + + Parameters + ---------- + ha, dec : Skyfield Angle objects + Target hour angle and declination + + a : list of floats + List of coefficients (in arcmin) for the pointing model + (NOTE: it is very unlikely that a user will want to change these + from the defaults, which are taken from the pointing model as of + 2019-2-15) + + Returns + ------- + Skyfield Angle object + Angular offset in hour angle + """ + + from skyfield.positionlib import Angle + + ha = ha_in.radians + dec = dec_in.radians + + # hour angle pointing correction in arcmin + delta_ha_cos_dec = ( + a[0] + + a[1] * np.sin(dec) + + a[2] * np.cos(dec) + + a[3] * np.sin(ha) * np.sin(dec) + + a[4] * np.cos(ha) * np.sin(dec) + + a[5] * np.sin(ha) * np.cos(dec) + + a[6] * np.cos(ha) * np.cos(dec) + ) + + return Angle(degrees=(delta_ha_cos_dec / np.cos(dec)) / 60.0)
+ + + +
+[docs] +def galt_pointing_model_dec( + ha_in, dec_in, b=[1.081, 0.707, -0.076, 0.0, 0.0, 0.0, 0.0] +): + """Calculate pointing correction in declination for the Galt Telescope + See description of the pointing model by Lewis Knee CHIME document library + 754 https://bao.chimenet.ca/doc/documents/754 + + Parameters + ---------- + ha, dec : Skyfield Angle objects + Target hour angle and declination + + b : list of floats + List of coefficients (in arcmin) for the pointing model + (NOTE: it is very unlikely that a user will want to change these + from the defaults, which are taken from the pointing model as of + 2019-2-15) + + Returns + ------- + Skyfield Angle object + Angular offset in hour angle + """ + + from skyfield.positionlib import Angle + + ha = ha_in.radians + dec = dec_in.radians + + # declination pointing correction in arcmin + delta_dec = ( + b[0] + + b[1] * np.sin(ha) + + b[2] * np.cos(ha) + + b[3] * np.sin(dec) + + b[4] * np.cos(dec) + + b[5] * np.sin(dec) * np.cos(ha) + + b[6] * np.sin(dec) * np.sin(ha) + ) + + return Angle(degrees=delta_dec / 60.0)
+ + + +
+[docs] +def parse_date(datestring): + """Convert date string to a datetime object. + + Parameters + ---------- + datestring : string + Date as YYYYMMDD-AAA, where AAA is one of [UTC, PST, PDT] + + Returns + ------- + date : datetime + A python datetime object in UTC. + """ + from datetime import datetime, timedelta + import re + + rm = re.match("([0-9]{8})-([A-Z]{3})", datestring) + if rm is None: + msg = ( + "Wrong format for datestring: {0}.".format(datestring) + + "\nShould be YYYYMMDD-AAA, " + + "where AAA is one of [UTC,PST,PDT]" + ) + raise ValueError(msg) + + datestring = rm.group(1) + tzoffset = 0.0 + tz = rm.group(2) + + tzs = {"PDT": -7.0, "PST": -8.0, "EDT": -4.0, "EST": -5.0, "UTC": 0.0} + + if tz is not None: + try: + tzoffset = tzs[tz.upper()] + except KeyError: + print("Time zone {} not known. Known time zones:".format(tz)) + for key, value in tzs.items(): + print(key, value) + print("Using UTC{:+.1f}.".format(tzoffset)) + + return datetime.strptime(datestring, "%Y%m%d") - timedelta(hours=tzoffset)
+ + + +
+[docs] +def utc_lst_to_mjd(datestring, lst, obs=chime): + """Convert datetime string and LST to corresponding modified Julian Day + + Parameters + ---------- + datestring : string + Date as YYYYMMDD-AAA, where AAA is one of [UTC, PST, PDT] + lst : float + Local sidereal time at DRAO (CHIME) in decimal hours + obs : caput.Observer object + + Returns + ------- + mjd : float + Modified Julian Date corresponding to the given time. + """ + return ( + unix_to_skyfield_time( + obs.lsa_to_unix(lst * 360 / 24, datetime_to_unix(parse_date(datestring))) + ).tt + - 2400000.5 + )
+ + + +
+[docs] +def solar_transit(start_time, end_time=None, obs=chime): + """Find the Solar transits between two times for CHIME. + + Parameters + ---------- + start_time : float (UNIX time) or datetime + Start time to find transits. + end_time : float (UNIX time) or datetime, optional + End time for finding transits. If `None` default, search for 24 hours + after start time. + + Returns + ------- + transit_times : array_like + Array of transit times (in UNIX time). + + """ + + planets = skyfield_wrapper.ephemeris + sun = planets["sun"] + return obs.transit_times(sun, start_time, end_time)
+ + + +
+[docs] +def lunar_transit(start_time, end_time=None, obs=chime): + """Find the Lunar transits between two times for CHIME. + + Parameters + ---------- + start_time : float (UNIX time) or datetime + Start time to find transits. + end_time : float (UNIX time) or datetime, optional + End time for finding transits. If `None` default, search for 24 hours + after start time. + + Returns + ------- + transit_times : array_like + Array of transit times (in UNIX time). + + """ + + planets = skyfield_wrapper.ephemeris + moon = planets["moon"] + return obs.transit_times(moon, start_time, end_time)
+ + + +# Create CHIME specific versions of various calls. +lsa_to_unix = chime.lsa_to_unix +unix_to_lsa = chime.unix_to_lsa +unix_to_csd = chime.unix_to_lsd +csd_to_unix = chime.lsd_to_unix +csd = unix_to_csd +lsa = unix_to_lsa +transit_times = chime.transit_times +setting_times = chime.set_times +rising_times = chime.rise_times +CSD_ZERO = chime.lsd_start_day + + +
+[docs] +def transit_RA(time): + """No longer supported. Use `lsa` instead.""" + raise NotImplementedError( + "No longer supported. Use the better defined `lsa` instead." + )
+ + + +
+[docs] +def chime_local_datetime(*args): + """Create a :class:`datetime.datetime` object in Canada/Pacific timezone. + + Parameters + ---------- + *args + Any valid arguments to the constructor of :class:`datetime.datetime` + except *tzinfo*. Local date and time at CHIME. + + Returns + ------- + dt : :class:`datetime.datetime` + Timezone naive date and time but converted to UTC. + + """ + + from pytz import timezone + + tz = timezone("Canada/Pacific") + dt_naive = datetime(*args) + if dt_naive.tzinfo: + msg = "Time zone should not be supplied." + raise ValueError(msg) + dt_aware = tz.localize(dt_naive) + return dt_aware.replace(tzinfo=None) - dt_aware.utcoffset()
+ + + +
+[docs] +def solar_setting(start_time, end_time=None, obs=chime): + """Find the Solar settings between two times for CHIME. + + Parameters + ---------- + start_time : float (UNIX time) or datetime + Start time to find settings. + end_time : float (UNIX time) or datetime, optional + End time for finding settings. If `None` default, search for 24 hours + after start time. + + Returns + ------- + setting_times : array_like + Array of setting times (in UNIX time). + + """ + + planets = skyfield_wrapper.ephemeris + sun = planets["sun"] + # Use 0.6 degrees for the angular diameter of the Sun to be conservative: + return obs.set_times(sun, start_time, end_time, diameter=0.6)
+ + + +
+[docs] +def lunar_setting(start_time, end_time=None, obs=chime): + """Find the Lunar settings between two times for CHIME. + + Parameters + ---------- + start_time : float (UNIX time) or datetime + Start time to find settings. + end_time : float (UNIX time) or datetime, optional + End time for finding settings. If `None` default, search for 24 hours + after start time. + + Returns + ------- + setting_times : array_like + Array of setting times (in UNIX time). + + """ + + planets = skyfield_wrapper.ephemeris + moon = planets["moon"] + # Use 0.6 degrees for the angular diameter of the Moon to be conservative: + return obs.set_times(moon, start_time, end_time, diameter=0.6)
+ + + +
+[docs] +def solar_rising(start_time, end_time=None, obs=chime): + """Find the Solar risings between two times for CHIME. + + Parameters + ---------- + start_time : float (UNIX time) or datetime + Start time to find risings. + end_time : float (UNIX time) or datetime, optional + End time for finding risings. If `None` default, search for 24 hours + after start time. + + Returns + ------- + rising_times : array_like + Array of rising times (in UNIX time). + + """ + + planets = skyfield_wrapper.ephemeris + sun = planets["sun"] + # Use 0.6 degrees for the angular diameter of the Sun to be conservative: + return obs.rise_times(sun, start_time, end_time, diameter=0.6)
+ + + +
+[docs] +def lunar_rising(start_time, end_time=None, obs=chime): + """Find the Lunar risings between two times for CHIME. + + Parameters + ---------- + start_time : float (UNIX time) or datetime + Start time to find risings. + end_time : float (UNIX time) or datetime, optional + End time for finding risings. If `None` default, search for 24 hours after + start time. + + Returns + ------- + rising_times : array_like + Array of rising times (in UNIX time). + + """ + + planets = skyfield_wrapper.ephemeris + moon = planets["moon"] + # Use 0.6 degrees for the angular diameter of the Moon to be conservative: + return obs.rise_times(moon, start_time, end_time, diameter=0.6)
+ + + +def _is_skyfield_obj(body): + return ( + isinstance(body, skyfield.starlib.Star) + or isinstance(body, skyfield.vectorlib.VectorSum) + or isinstance(body, skyfield.jpllib.ChebyshevPosition) + ) + + +
+[docs] +def Star_cirs(ra, dec, epoch): + """Wrapper for skyfield.api.star that creates a position given CIRS + coordinates observed from CHIME + + Parameters + ---------- + ra, dec : skyfield.api.Angle + RA and dec of the source in CIRS coordinates + epoch : skyfield.api.Time + Time of the observation + + Returns + ------- + body : skyfield.api.Star + Star object in ICRS coordinates + """ + + from skyfield.api import Star + + return cirs_radec(Star(ra=ra, dec=dec, epoch=epoch))
+ + + +
+[docs] +def cirs_radec(body, date=None, deg=False, obs=chime): + """Converts a Skyfield body in CIRS coordinates at a given epoch to + ICRS coordinates observed from CHIME + + Parameters + ---------- + body : skyfield.api.Star + Skyfield Star object with positions in CIRS coordinates. + + Returns + ------- + new_body : skyfield.api.Star + Skyfield Star object with positions in ICRS coordinates + """ + + from skyfield.positionlib import Angle + from skyfield.api import Star + + ts = skyfield_wrapper.timescale + + epoch = ts.tt_jd(np.median(body.epoch)) + + pos = obs.skyfield_obs().at(epoch).observe(body) + + # Matrix CT transforms from CIRS to ICRF (https://rhodesmill.org/skyfield/time.html) + r_au, dec, ra = skyfield.functions.to_polar( + np.einsum("ij...,j...->i...", epoch.CT, pos.position.au) + ) + + return Star( + ra=Angle(radians=ra, preference="hours"), dec=Angle(radians=dec), epoch=epoch + )
+ + + +
+[docs] +def object_coords(body, date=None, deg=False, obs=chime): + """Calculates the RA and DEC of the source. + + Gives the ICRS coordinates if no date is given (=J2000), or if a date is + specified gives the CIRS coordinates at that epoch. + + This also returns the *apparent* position, including abberation and + deflection by gravitational lensing. This shifts the positions by up to + 20 arcseconds. + + Parameters + ---------- + body : skyfield source + skyfield.starlib.Star or skyfield.vectorlib.VectorSum or + skyfield.jpllib.ChebyshevPosition body representing the source. + date : float + Unix time at which to determine ra of source If None, use Jan 01 + 2000. + deg : bool + Return RA ascension in degrees if True, radians if false (default). + obs : `caput.time.Observer` + An observer instance to use. If not supplied use `chime`. For many + calculations changing from this default will make little difference. + + Returns + ------- + ra, dec: float + Position of the source. + """ + + if date is None: # No date, get ICRS coords + if isinstance(body, skyfield.starlib.Star): + ra, dec = body.ra.radians, body.dec.radians + else: + raise ValueError( + "Body is not fixed, cannot calculate coordinates without a date." + ) + + else: # Calculate CIRS position with all corrections + date = unix_to_skyfield_time(date) + radec = obs.skyfield_obs().at(date).observe(body).apparent().cirs_radec(date) + + ra, dec = radec[0].radians, radec[1].radians + + # If requested, convert to degrees + if deg: + ra = np.degrees(ra) + dec = np.degrees(dec) + + # Return + return ra, dec
+ + + +
+[docs] +def hadec_to_bmxy(ha_cirs, dec_cirs): + """Convert CIRS hour angle and declination to CHIME/FRB beam-model XY coordinates. + + Parameters + ---------- + ha_cirs : array_like + The CIRS Hour Angle in degrees. + dec_cirs : array_like + The CIRS Declination in degrees. + + Returns + ------- + bmx, bmy : array_like + The CHIME/FRB beam model X and Y coordinates in degrees as defined in + the beam-model coordinate conventions: + https://chime-frb-open-data.github.io/beam-model/#coordinate-conventions + """ + + from caput.interferometry import rotate_ypr, sph_to_ground + + from ch_util.tools import _CHIME_ROT + + # Convert CIRS coordinates to CHIME "ground fixed" XYZ coordinates, + # which constitute a unit vector pointing towards the point of interest, + # i.e., telescope cartesian unit-sphere coordinates. + # chx: The EW coordinate (increases to the East) + # chy: The NS coordinate (increases to the North) + # chz: The vertical coordinate (increases to the sky) + chx, chy, chz = sph_to_ground( + np.deg2rad(ha_cirs), np.deg2rad(CHIMELATITUDE), np.deg2rad(dec_cirs) + ) + + # Correct for CHIME telescope rotation with respect to North + ypr = np.array([np.deg2rad(-_CHIME_ROT), 0, 0]) + chx_rot, chy_rot, chz_rot = rotate_ypr(ypr, chx, chy, chz) + + # Convert rotated CHIME "ground fixed" XYZ coordinates to spherical polar coordinates + # with the pole towards almost-North and using CHIME's meridian as the prime meridian. + # Note that the azimuthal angle theta in these spherical polar coordinates increases + # to the West (to ensure that phi and theta here have the same meaning as the variables + # with the same names in the beam_model package and DocLib #1203). + # phi (polar angle): almost-North = 0 deg; zenith = 90 deg; almost-South = 180 deg + # theta (azimuthal angle): almost-East = -90 deg; zenith = 0 deg; almost-West = +90 deg + phi = np.arccos(chy_rot) + theta = np.arctan2(-chx_rot, +chz_rot) + + # Convert polar angle and azimuth to CHIME/FRB beam model XY position + bmx = np.rad2deg(theta * np.sin(phi)) + bmy = np.rad2deg(np.pi / 2.0 - phi) + + return bmx, bmy
+ + + +
+[docs] +def bmxy_to_hadec(bmx, bmy): + """Convert CHIME/FRB beam-model XY coordinates to CIRS hour angle and declination. + + Parameters + ---------- + bmx, bmy : array_like + The CHIME/FRB beam model X and Y coordinates in degrees as defined in + the beam-model coordinate conventions: + https://chime-frb-open-data.github.io/beam-model/#coordinate-conventions + X is degrees west from the meridian + Y is degrees north from zenith + + Returns + ------- + ha_cirs : array_like + The CIRS Hour Angle in degrees. + dec_cirs : array_like + The CIRS Declination in degrees. + """ + import warnings + + from caput.interferometry import rotate_ypr, ground_to_sph + + from ch_util.tools import _CHIME_ROT + + # Convert CHIME/FRB beam model XY position to spherical polar coordinates + # with the pole towards almost-North and using CHIME's meridian as the prime + # meridian. Note that the CHIME/FRB beam model X coordinate increases westward + # and so does the azimuthal angle theta in these spherical polar coordinates + # (to ensure that phi and theta here have the same meaning as the variables + # with the same names in the beam_model package and DocLib #1203). + # phi (polar angle): almost-North = 0 deg; zenith = 90 deg; almost-South = 180 deg + # theta (azimuthal angle): almost-East = -90 deg; zenith = 0 deg; almost-West = +90 deg + phi = np.pi / 2.0 - np.deg2rad(bmy) + theta = np.deg2rad(bmx) / np.sin(phi) + + # Warn for input beam-model XY positions below the horizon + scalar_input = np.isscalar(theta) + theta = np.atleast_1d(theta) + if (theta < -1.0 * np.pi / 2.0).any() or (theta > np.pi / 2.0).any(): + warnings.warn("Input beam model XY coordinate(s) below horizon.") + if scalar_input: + theta = np.squeeze(theta) + + # Convert spherical polar coordinates to rotated CHIME "ground fixed" XYZ + # coordinates (i.e., cartesian unit-sphere coordinates, rotated to correct + # for the CHIME telescope's rotation with respect to North). + # chx_rot: The almost-EW coordinate (increases to the almost-East) + # chy_rot: The almost-NS coordinate (increases to the almost-North) + # chz_rot: The vertical coordinate (increases to the sky) + chx_rot = np.sin(phi) * np.sin(-theta) + chy_rot = np.cos(phi) + chz_rot = np.sin(phi) * np.cos(-theta) + + # Undo correction for CHIME telescope rotation with respect to North + ypr = np.array([np.deg2rad(_CHIME_ROT), 0, 0]) + chx, chy, chz = rotate_ypr(ypr, chx_rot, chy_rot, chz_rot) + + # Convert CHIME "ground fixed" XYZ coordinates to CIRS hour angle and declination + ha_cirs, dec_cirs = ground_to_sph(chx, chy, np.deg2rad(CHIMELATITUDE)) + + return np.rad2deg(ha_cirs), np.rad2deg(dec_cirs)
+ + + +
+[docs] +def peak_RA(body, date=None, deg=False): + """Calculates the RA where a source is expected to peak in the beam. + Note that this is not the same as the RA where the source is at + transit, since the pathfinder is rotated with respect to north. + + Parameters + ---------- + body : ephem.FixedBody + skyfield.starlib.Star or skyfield.vectorlib.VectorSum or + skyfield.jpllib.ChebyshevPosition or Ephemeris body + representing the source. + date : float + Unix time at which to determine ra of source + If None, use Jan 01 2000. + Ignored if body is not a skyfield object + deg : bool + Return RA ascension in degrees if True, + radians if false (default). + + Returns + ------- + peak_ra : float + RA when the transiting source peaks. + """ + + _PF_ROT = np.radians(1.986) # Pathfinder rotation from north. + _PF_LAT = np.radians(CHIMELATITUDE) # Latitude of pathfinder + + # Extract RA and dec of object + ra, dec = object_coords(body, date=date) + + # Estimate the RA at which the transiting source peaks + ra = ra + np.tan(_PF_ROT) * (dec - _PF_LAT) / np.cos(_PF_LAT) + + # If requested, convert to degrees + if deg: + ra = np.degrees(ra) + + # Return + return ra
+ + + +# For backwards compatibility +
+[docs] +def get_doppler_shifted_freq(*args, **kwargs): + """Deprecated. Use `ch_util.hfbcat.get_doppler_shifted_freq`.""" + from . import hfbcat + + import warnings + + warnings.warn( + "Use `ch_util.hfbcat.get_doppler_shifted_freq` instead.", DeprecationWarning + ) + + return hfbcat.get_doppler_shifted_freq(*args, **kwargs)
+ + + +
+[docs] +def get_range_rate( + source: skyfield.starlib.Star, + date: Union[float, list], + obs: Observer = chime, +) -> Union[float, np.array]: + """Calculate rate at which distance between observer and source changes. + + Parameters + ---------- + source + Position(s) on the sky. + date + Unix time(s) for which to calculate range rate. + obs + An Observer instance to use. If not supplied use `chime`. For many + calculations changing from this default will make little difference. + + Returns + ------- + range_rate + Rate (in m/s) at which the distance between the observer and source + changes (i.e., the velocity of observer in direction of source, but + positive for observer and source moving appart). If either `source` + or `date` contains multiple entries, `range_rate` will be an array. + Otherwise, `range_rate` will be a float. + + Notes + ----- + Only one of `source` and `date` can contain multiple entries. + + This routine uses an :class:`skyfield.positionlib.Apparent` object + (rather than an :class:`skyfield.positionlib.Astrometric` object) to find + the velocity of the observatory and the position of the source. This + accounts for the gravitational deflection and the aberration of light. + It is unclear if the latter should be taken into account for this Doppler + shift calculation, but its effects are negligible. + """ + + if hasattr(source.ra._degrees, "__iter__") and hasattr(date, "__iter__"): + raise ValueError( + "Only one of `source` and `date` can contain multiple entries." + ) + + # Convert unix times to skyfield times + date = unix_to_skyfield_time(date) + + # Create skyfield Apparent object of source position seen from observer + position = obs.skyfield_obs().at(date).observe(source).apparent() + + # Observer velocity vector in ICRS xyz coordinates in units of m/s + obs_vel_m_per_s = position.velocity.m_per_s + + # Normalized source position vector in ICRS xyz coordinates + source_pos_m = position.position.m + source_pos_norm = source_pos_m / np.linalg.norm(source_pos_m, axis=0) + + # Dot product of observer velocity and source position gives observer + # velocity in direction of source; flip sign to get range rate (positive + # for observer and source moving appart) + range_rate = -np.sum(obs_vel_m_per_s.T * source_pos_norm.T, axis=-1) + + return range_rate
+ + + +
+[docs] +def get_source_dictionary(*args): + """Returns a dictionary containing :class:`skyfield.starlib.Star` + objects for common radio point sources. This is useful for + obtaining the skyfield representation of a source from a string + containing its name. + + Parameters + ---------- + catalog_name : str + Name of the catalog. This must be the basename of the json file + in the `ch_util/catalogs` directory. Can take multiple catalogs, + with the first catalog favoured for any overlapping sources. + + Returns + ------- + src_dict : dictionary + Format is {'SOURCE_NAME': :class:`skyfield.starlib.Star`, ...} + + """ + + import os + import json + + src_dict = {} + for catalog_name in reversed(args): + path_to_catalog = os.path.join( + os.path.dirname(__file__), + "catalogs", + os.path.splitext(catalog_name)[0] + ".json", + ) + + with open(path_to_catalog, "r") as handler: + catalog = json.load(handler) + + for name, info in catalog.items(): + src_dict[name] = skyfield_star_from_ra_dec(info["ra"], info["dec"], name) + + return src_dict
+ + + +# Common radio point sources +source_dictionary = get_source_dictionary( + "primary_calibrators_perley2016", + "specfind_v2_5Jy_vollmer2009", + "atnf_psrcat", + "hfb_target_list", +) + +#: :class:`skyfield.starlib.Star` representing Cassiopeia A. +CasA = source_dictionary["CAS_A"] + +#: :class:`skyfield.starlib.Star` representing Cygnus A. +CygA = source_dictionary["CYG_A"] + +#: :class:`skyfield.starlib.Star` representing Taurus A. +TauA = source_dictionary["TAU_A"] + +#: :class:`skyfield.starlib.Star` representing Virgo A. +VirA = source_dictionary["VIR_A"] + + +if __name__ == "__main__": + import doctest + + doctest.testmod() +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/finder.html b/docs/_modules/ch_util/finder.html new file mode 100644 index 00000000..40e85522 --- /dev/null +++ b/docs/_modules/ch_util/finder.html @@ -0,0 +1,2012 @@ + + + + + + ch_util.finder — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.finder

+"""
+Data Index Searcher for CHIME
+
+Search routines for locating data withing the CHIME data index.
+
+Data tables
+===========
+
+- :py:class:`DataFlag`
+- :py:class:`DataFlagType`
+
+
+Exceptions
+==========
+
+- :py:class:`DataFlagged`
+
+
+High Level Index Searcher
+=========================
+
+- :py:class:`Finder`
+- :py:class:`DataIntervalList`
+- :py:class:`BaseDataInterval`
+- :py:class:`CorrDataInterval`
+- :py:class:`HKDataInterval`
+- :py:class:`WeatherDataInterval`
+- :py:class:`FlagInputDataInterval`
+- :py:class:`CalibrationGainDataInterval`
+- :py:class:`DigitalGainDataInterval`
+
+
+Routines
+========
+
+- :py:meth:`connect_database`
+- :py:meth:`files_in_range`
+
+"""
+
+import logging
+import os
+from os import path
+import time
+import socket
+import peewee as pw
+import re
+
+import caput.time as ctime
+
+import chimedb.core as db
+import chimedb.data_index as di
+from . import layout, ephemeris
+
+from chimedb.dataflag import DataFlagType, DataFlag
+
+from .holography import HolographySource, HolographyObservation
+
+# Module Constants
+# ================
+
+GF_REJECT = "gf_reject"
+GF_RAISE = "gf_raise"
+GF_WARN = "gf_warn"
+GF_ACCEPT = "gf_accept"
+
+
+# Initializing connection to database.
+# ====================================
+
+from ._db_tables import connect_peewee_tables as connect_database
+
+
+# High level interface to the data index
+# ======================================
+
+# The following are the info tables that we use to join over when using the
+# finder.
+_acq_info_table = [di.CorrAcqInfo, di.HKAcqInfo, di.RawadcAcqInfo]
+
+# Import list of tables that have a ``start_time`` and ``end_time``
+# field: they are necessary to do any time-based search.
+from chimedb.data_index.orm import file_info_table
+
+
+
+[docs] +class Finder(object): + """High level searching of the CHIME data index. + + This class gives a convenient way to search and filter data acquisitions + as well as time ranges of data within acquisitions. Search results + constitute a list of files within an acquisition as well as a time range for + the data within these files. Convenient methods are provided for loading + the precise time range of constituting a search result. + + This is intended to make the most common types of searches of CHIME data as + convenient as possible. However for very complex searches, it may be + necessary to resort to the lower level interface. + + Searching the index + =================== + + There are four ways that a search can be modified which may be combined in + any way. + + #. You can restrict the types of acquisition that are under + consideration, using methods whose names begin with ``only_``. + In this way, one can consider only, say, housekeeping acquisitions. + #. The second is to adjust the total time range under consideration. + This is achieved by assigning to :attr:`~Finder.time_range` or calling + methods beginning with ``set_time_range_``. The total time range affects + acquisitions under consideration as well as the data time ranges within + the acquisitions. Subsequent changes to the total time range under + consideration may only become more restrictive. + #. The data index may also be filtered by acquisition using methods whose + names begin with ``filter_acqs``. Again subsequent filtering are always + combined to become more restrictive. The attribute :attr:`~Finder.acqs` + lists the acquisitions currently included in the search for convenience + when searching interactively. + #. Time intervals within acquisitions are added using methods with names + beginning with ``include_``. Time intervals are defined in the + :attr:`~Finder.time_intervals` attribute, and are inclusive (you can + add as many as you want). + #. Finally, upon calling :meth:``get_results`` or :meth:``get_results_acq``, + one can pass an arbitrary condition on individual files, thereby + returning only a subset of files from each acquisition. + + Getting results + =============== + + Results of the search can be retrieved using methods whose names begin with + ``get_results`` An individual search result is constituted of a list of file + names and a time interval within these files. These can easily loaded into + memory using helper functions (see :class:`BaseDataInterval` and + :class:`DataIntervalList`). + + Parameters + ---------- + acqs : list of :class:`chimedb.data_index.ArchiveAcq` objects + Acquisitions to initially include in data search. Default is to search + all acquisitions. + node_spoof : dictionary + Normally, the DB will be queried to find which nodes are mounted on your + host. If you are on a machine that is cross-mounted, though, you can + enter a dictionary of "node_name": "mnt_root" pairs, specifying the + nodes to search and where they are mounted on your host. + + Examples + -------- + + To find all the correlator data between two times. + + >>> from ch_util import finder + >>> from datetime import datetime + >>> f = finder.Finder() + >>> f.only_corr() + >>> f.set_time_range(datetime(2014,02,24), datetime(2014,02,25)) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140219T145849Z_abbot_corr | 378053.1 | 86400.0 | 25 + 2 | 20140224T051212Z_stone_corr | 0.0 | 67653.9 | 19 + Total 154053.858720 seconds of data. + + Search for transits of a given source. + + >>> from ch_util import ephemeris + >>> f.include_transits(ephemeris.CasA, time_delta=3600) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140219T145849Z_abbot_corr | 452087.2 | 3600.0 | 2 + 2 | 20140224T051212Z_stone_corr | 55288.0 | 3600.0 | 2 + Total 7200.000000 seconds of data. + + To read the data, + + >>> from ch_util import andata + >>> results_list = f.get_results() + >>> # Pick result number 1 + >>> result = results_list[0] + >>> # Pick product number 0 (autocorrelation) + >>> data = result.as_loaded_data(prod_sel=0) + >>> print data.vis.shape + (1024, 1, 360) + + More intricate filters on the acquisitions are possible. + + >>> import chimedb.data_index as di + >>> f = finder.Finder() + >>> # Find ALL 10ms cadence data correlated by 'stone' with 8 channels. + >>> f.filter_acqs((di.CorrAcqInfo.integration < 0.011) + ... & (di.CorrAcqInfo.integration > 0.009) + ... & (di.CorrAcqInfo.nfreq == 1024) + ... & (di.CorrAcqInfo.nprod == 36) + ... & (di.ArchiveInst.name == 'stone')) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140211T020307Z_stone_corr | 0.0 | 391.8 | 108 + 2 | 20140128T135105Z_stone_corr | 0.0 | 4165.2 | 104 + 3 | 20131208T070336Z_stone_corr | 0.0 | 1429.8 | 377 + 4 | 20140212T014603Z_stone_corr | 0.0 | 2424.4 | 660 + 5 | 20131210T060233Z_stone_corr | 0.0 | 1875.3 | 511 + 6 | 20140210T021023Z_stone_corr | 0.0 | 874.1 | 240 + Total 11160.663510 seconds of data. + + Here is an example that uses node spoofing and also filters files within + acquisitions to include only LNA housekeeping files: + + >>> f = finder.Finder(node_spoof = {"gong" : "/mnt/gong/archive", + "suzu" : "/mnt/suzu/hk_data"}) + >>> f.only_hk() + >>> f.set_time_range(datetime(2014, 9, 1), datetime(2014, 10, 10)) + >>> f.print_results_summary() + # | acquisition |start (s)| len (s) |files | MB + 0 | 20140830T005410Z_ben_hk | 169549 | 419873 | 47 | 2093 + 1 | 20140905T203905Z_ben_hk | 0 | 16969 | 2 | 0 + 2 | 20140908T153116Z_ben_hk | 0 | 1116260 | 56 | 4 + 3 | 20141009T222415Z_ben_hk | 0 | 5745 | 2 | 0 + >>> res = f.get_results(file_condition = (di.HKFileInfo.atmel_name == "LNA")) + >>> for r in res: + ... print "No. files: %d" % (len(r[0])) + No. files: 8 + No. files: 1 + No. files: 19 + No. files: 1 + >>> data = res[0].as_loaded_data() + >>> for m in data.mux: + ... print "Mux %d: %s", (m, data.chan(m)) + Mux 0: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + Mux 1: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + Mux 2: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + >>> print "Here are the raw data for Mux 1, Channel 14:", data.tod(14, 1) + Here are the raw data for Mux 1, Channel 14: [ 1744.19091797 1766.34472656 1771.03356934 ..., 1928.61279297 1938.90075684 1945.53491211] + + In the above example, the restriction to LNA housekeeping could also have + been accomplished with the convenience method :meth:`Finder.set_hk_input`: + + >>> f.set_hk_input("LNA") + >>> res = f.get_results() + + """ + + # Constructors and setup + # ---------------------- + + def __init__(self, acqs=(), node_spoof=None): + import copy + + # Which nodes do we have available? + host = socket.gethostname().split(".")[0] + self._my_node = [] + self._node_spoof = node_spoof + + connect_database() + + if not node_spoof: + for n in ( + di.StorageNode.select() + .where(di.StorageNode.host == host) + .where(di.StorageNode.active) + ): + self._my_node.append(n) + else: + for key, val in node_spoof.items(): + self._my_node.append(di.StorageNode.get(name=key)) + + if not len(self._my_node): + raise RuntimeError( + "No nodes found. Perhaps you need to pass a 'node_spoof' parameter?" + ) + + # Get list of join tables. We make a copy because the user may alter + # this later through the only_XXX() methods. + self._acq_info = copy.copy(_acq_info_table) + self._file_info = copy.copy(file_info_table) + + if acqs: + pass + else: + acqs = di.ArchiveAcq.select() + for i in self._acq_info: + acqs.join(i) + self._acqs = list(acqs) + self._time_range = (ephemeris.CSD_ZERO, time.time()) + self._time_intervals = None + self._time_exclusions = [] + self._atmel_restrict = None + self.min_interval = 240.0 + self._gf_mode = {"comment": GF_ACCEPT, "warning": GF_WARN, "severe": GF_REJECT} + self._data_flag_types = [] + # The following line cuts any acquisitions with no files. + # self.filter_acqs_by_files(True) + # This is very similar to the above line, but takes ~.5s instead of + # 12s. + acq_ids = [acq.id for acq in self.acqs] + if not acq_ids: + # Nothing to do. + return + condition = ( + (di.ArchiveAcq.id << acq_ids) + & (di.ArchiveFileCopy.node << self._my_node) + & (di.ArchiveFileCopy.has_file == "Y") + ) + selection = di.ArchiveAcq.select().join(di.ArchiveFile).join(di.ArchiveFileCopy) + self._acqs = list(selection.where(condition).group_by(di.ArchiveAcq)) + +
+[docs] + @classmethod + def offline(cls, acqs=()): + """Initialize :class:`~Finder` when not working on a storage node. + + Normally only data that is available on the present host is searched, + and as such :class:`~Finder` can't be used to browse the index when you + don't have access to the acctual data. Initializing using this method + spoofs the 'gong' and 'niedermayer' storage nodes (which should have a + full copy of the archive) such that the data index can be search the + full archive. + + """ + + node_spoof = {} + # for n in di.StorageNode.select(): + # node_spoof[n.name] = '' + # I think all the data live on at lease one of these -KM. + node_spoof["gong"] = "" + node_spoof["niedermayer"] = "" + return cls(acqs, node_spoof=node_spoof)
+ + + # Filters on the index + # -------------------- + + @property + def acqs(self): + """Acquisitions remaining in this search. + + Returns + ------- + acqs : list of :class:`chimedb.data_index.ArchiveAcq` objects + + """ + + return list(self._acqs) + + @property + def time_range(self): + """Time range to be included in search. + + Data files and acquisitions that do not overlap with this range are + excluded. Assigning to this is equivalent to calling + :meth:`~Finder.set_time_range`. + + Returns + ------- + time_range : tuple of 2 floats + Unix/POSIX beginning and end of the time range. + + """ + + return self._time_range + + @property + def time_intervals(self): + """Periods in time to be included. + + Periods are combined with `OR` unless list is empty, in which case no + filtering is performed. + + Returns + ------- + time_intervals : list of pairs of floats + Each entry is the Unix/POSIX beginning and end of the time interval + to be included. + + """ + + if self._time_intervals is None: + return [self.time_range] + else: + return list(self._time_intervals) + + def _append_time_interval(self, interval): + if self._time_intervals is None: + time_intervals = [] + else: + time_intervals = self._time_intervals + time_intervals.append(interval) + self._time_intervals = time_intervals + + @property + def time_exclusions(self): + """Periods in time to be excluded. + + Returns + ------- + time_exclusions : list of pairs of floats + Each entry is the Unix/POSIX beginning and end of the time interval + to be excluded. + + """ + + return list(self._time_exclusions) + + def _append_time_exclusion(self, interval): + self._time_exclusions.append(interval) + + @property + def min_interval(self): + """Minimum length of a block of data to be considered. + + This can be set to any number. The default is 240 seconds. + + Returns + ------- + min_interval : float + Length of time in seconds. + + """ + + return self._min_interval + + @min_interval.setter + def min_interval(self, value): + self._min_interval = float(value) + + @property + def global_flag_mode(self): + """Global flag behaviour mode. + + Defines how global flags are treated when finding data. There are three + severities of global flag: comment, warning, and severe. There are + four possible behaviours when a search result overlaps a global flag, + represented by module constants: + + :GF_REJECT: Reject any data overlapping flag silently. + :GF_RAISE: Raise an exception when retrieving data intervals. + :GF_WARN: Send a warning when retrieving data intervals but proceed. + :GF_ACCEPT: Accept the data silently, ignoring the flag. + + The behaviour for all three severities is represented by a dictionary. + If no mode is set, then the default behaviour is + `{'comment' : GF_ACCEPT, 'warning' : GF_WARN, 'severe' : GF_REJECT}`. + + This is modified using :meth:`Finder.update_global_flag_mode`. + + Returns + ------- + global_flag_mode : dictionary with keys 'comment', 'warning', 'severe'. + Specifies finder behaviour. + + """ + + return dict(self._gf_mode) + + @property + def data_flag_types(self): + """Types of DataFlag to exclude from results.""" + return self._data_flag_types + + # Setting up filters on the data + # ------------------------------ + +
+[docs] + def update_global_flag_mode(self, comment=None, warning=None, severe=None): + """Update :attr:`Finder.global_flag_mode`, the global flag mode. + + Parameters + ---------- + comment : One of *GF_REJECT*, *GF_RAISE*, *GF_WARN*, or *GF_ACCEPT*. + warning : One of *GF_REJECT*, *GF_RAISE*, *GF_WARN*, or *GF_ACCEPT*. + severe : One of *GF_REJECT*, *GF_RAISE*, *GF_WARN*, or *GF_ACCEPT*. + + """ + + if comment: + _validate_gf_value(comment) + self._gf_mode["comment"] = comment + if warning: + _validate_gf_value(warning) + self._gf_mode["warning"] = warning + if severe: + _validate_gf_value(severe) + self._gf_mode["severe"] = severe
+ + +
+[docs] + def accept_all_global_flags(self): + """Set global flag behaviour to accept all data.""" + + self.update_global_flag_mode( + comment=GF_ACCEPT, warning=GF_ACCEPT, severe=GF_ACCEPT + )
+ + +
+[docs] + def only_corr(self): + """Only include correlator acquisitions in this search.""" + self._acq_info = [di.CorrAcqInfo] + self._file_info = [di.CorrFileInfo] + self.filter_acqs(True)
+ + +
+[docs] + def only_hk(self): + """Only include housekeeping acquisitions in this search.""" + self._acq_info = [di.HKAcqInfo] + self._file_info = [di.HKFileInfo] + self.filter_acqs(True)
+ + +
+[docs] + def only_rawadc(self): + """Only include raw ADC acquisitions in this search.""" + self._acq_info = [di.RawadcAcqInfo] + self._file_info = [di.RawadcFileInfo] + self.filter_acqs(True)
+ + +
+[docs] + def only_hfb(self): + """Only include HFB acquisitions in this search.""" + self._acq_info = [di.HFBAcqInfo] + self._file_info = [di.HFBFileInfo] + self.filter_acqs(True)
+ + +
+[docs] + def only_weather(self): + """Only include weather acquisitions in this search.""" + self._acq_info = [] + self._file_info = [di.WeatherFileInfo] + self.filter_acqs(di.AcqType.name == "weather")
+ + +
+[docs] + def only_chime_weather(self): + """Only include chime weather acquisitions in this search. + This excludes the old format mingun-weather.""" + self._acq_info = [] + self._file_info = [di.WeatherFileInfo] + self.filter_acqs(di.AcqType.name == "weather") + self.filter_acqs(di.ArchiveInst.name == "chime")
+ + +
+[docs] + def only_hkp(self): + """Only include Prometheus housekeeping data in this search""" + self._acq_info = [] + self._file_info = [di.HKPFileInfo] + self.filter_acqs(di.AcqType.name == "hkp")
+ + +
+[docs] + def only_digitalgain(self): + """Only include digital gain data in this search""" + self._acq_info = [] + self._file_info = [di.DigitalGainFileInfo] + self.filter_acqs(di.AcqType.name == "digitalgain")
+ + +
+[docs] + def only_gain(self): + """Only include calibration gain data in this search""" + self._acq_info = [] + self._file_info = [di.CalibrationGainFileInfo] + self.filter_acqs(di.AcqType.name == "gain")
+ + +
+[docs] + def only_flaginput(self): + """Only include input flag data in this search""" + self._acq_info = [] + self._file_info = [di.FlagInputFileInfo] + self.filter_acqs(di.AcqType.name == "flaginput")
+ + +
+[docs] + def filter_acqs(self, condition): + """Filter the acquisitions included in this search. + + Parameters + ---------- + condition : :mod:`peewee` comparison + Condition on any on :class:`chimedb.data_index.ArchiveAcq` or any + class joined to :class:`chimedb.data_index.ArchiveAcq`: using the + syntax from the :mod:`peewee` module [1]_. + + Examples + -------- + + >>> from ch_util import finder + >>> import chimedb.data_index as di + >>> f = finder.Finder() + >>> f.filter_acqs(di.ArchiveInst.name == 'stone') + >>> f.filter_acqs((di.AcqType == 'corr') & (di.CorrAcqInfo.nprod == 36)) + + See Also + -------- + + :meth:`Finder.filter_acqs_by_files` + + + References + ---------- + + .. [1] http://peewee.readthedocs.org/en/latest/peewee/querying.html + + """ + + # Get the acquisitions currently included. + acq_ids = [acq.id for acq in self.acqs] + if not acq_ids: + # Nothing to do. + return + # From these, only include those meeting the new condition. + # XXX simpler? + condition = (di.ArchiveAcq.id << acq_ids) & condition + + selection = di.ArchiveAcq.select().join(di.AcqType) + for i in self._acq_info: + selection = selection.switch(di.ArchiveAcq).join(i, pw.JOIN.LEFT_OUTER) + selection = selection.switch(di.ArchiveAcq).join(di.ArchiveInst) + self._acqs = list(selection.where(condition).group_by(di.ArchiveAcq))
+ + +
+[docs] + def filter_acqs_by_files(self, condition): + """Filter the acquisitions by the properties of its files. + + Because each acquisition has many files, this filter should be + significantly slower than :meth:`Finder.filter_acqs`. + + Parameters + ---------- + condition : :mod:`peewee` comparison + Condition on any on :class:`chimedb.data_index.ArchiveAcq`, + :class:`chimedb.data_index.ArchiveFile` or any class joined to + :class:`chimedb.data_index.ArchiveFile` using the syntax from the + :mod:`peewee` module [2]_. + + See Also + -------- + + :meth:`Finder.filter_acqs` + + Examples + -------- + + References + ---------- + + .. [2] http://peewee.readthedocs.org/en/latest/peewee/querying.html + + """ + # Get the acquisitions currently included. + acq_ids = [acq.id for acq in self.acqs] + if not acq_ids: + # Nothing to do. + return + condition = ( + (di.ArchiveAcq.id << acq_ids) + & (di.ArchiveFileCopy.node << self._my_node) + & (di.ArchiveFileCopy.has_file == "Y") + & (condition) + ) + selection = di.ArchiveAcq.select().join(di.ArchiveFile).join(di.ArchiveFileCopy) + info_cond = False + for i in self._file_info: + selection = selection.switch(di.ArchiveFile).join( + i, join_type=pw.JOIN.LEFT_OUTER + ) + # The following ensures that at least _one_ of the info tables is + # joined. + info_cond |= ~(i.start_time >> None) + self._acqs = list( + selection.where(condition & info_cond).group_by(di.ArchiveAcq) + )
+ + +
+[docs] + def set_time_range(self, start_time=None, end_time=None): + """Restrict the time range of the search. + + This method updates the :attr:`~Index.time_range` property and also + excludes any acquisitions that do not overlap with the new range. This + method always narrows the time range under consideration, never expands + it. + + Parameters + ---------- + start_time : float or :class:`datetime.datetime` + Unix/POSIX time or UTC start of desired time range. Optional. + end_time : float or :class:`datetime.datetime` + Unix/POSIX time or UTC end of desired time range. Optional. + + """ + # Update `self.time_range`. + if start_time is None: + start_time = 0 + if end_time is None: + end_time = time.time() + start_time = ctime.ensure_unix(start_time) + end_time = ctime.ensure_unix(end_time) + old_start_time, old_end_time = self.time_range + start_time = max(start_time, old_start_time) + end_time = min(end_time, old_end_time) + if start_time >= end_time: + msg = "No time spanned by search. start=%s, stop=%s" + msg = msg % (start_time, end_time) + raise ValueError(msg) + + # Delete any acquisitions that do not overlap with the new range. + cond = True + for i in self._file_info: + cond &= (i.start_time >> None) | ( + (i.start_time < end_time) & (i.finish_time > start_time) + ) + self.filter_acqs_by_files(cond) + + if not self._time_intervals is None: + time_intervals = _trim_intervals_range( + self.time_intervals, (start_time, end_time) + ) + self._time_intervals = time_intervals + time_exclusions = _trim_intervals_range( + self.time_exclusions, (start_time, end_time) + ) + self._time_exclusions = time_exclusions + self._time_range = (start_time, end_time)
+ + +
+[docs] + def set_time_range_global_flag(self, flag): + """Set time range to correspond to a global flag. + + Parameters + ---------- + flag : integer or string + Global flag ID or name, e.g. "run_pass1_a", or 11292. + + Notes + ----- + + Global flag ID numbers, names, and descriptions are listed at + http://bao.phas.ubc.ca/layout/event.php?filt_event_type_id=7 + + """ + + start_time, end_time = _get_global_flag_times_by_name_event_id(flag) + self.set_time_range(start_time, end_time)
+ + +
+[docs] + def set_time_range_season(self, year=None, season=None): + """Set the time range by as specific part of a given year. + + NOT YET IMPLEMENTED + + Parameters + ---------- + year : integer + Calender year + season : string + Month name (3 letter abbreviations are acceptable) or one of + 'winter', 'spring', 'summer', or 'fall'. + + """ + raise NotImplementedError()
+ + + def _format_time_interval(self, start_time, end_time): + if start_time is None: + start_time = 0 + if end_time is None: + end_time = time.time() + start_time = ctime.ensure_unix(start_time) + end_time = ctime.ensure_unix(end_time) + range_start, range_end = self.time_range + start_time = max(start_time, range_start) + end_time = min(end_time, range_end) + if start_time < end_time: + return (start_time, end_time) + else: + return None + +
+[docs] + def include_time_interval(self, start_time, end_time): + """Include a time interval. + + Examples + -------- + + First a certain layout is chosen + + >>> from ch_util import finder + >>> f = finder.Finder() + >>> f.set_time_range_layout(26) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140311T192616Z_abbot_corr | 16412.8 | 667.1 | 1 + 2 | 20140312T001123Z_abbot_corr | 0.0 | 1150.5 | 314 + 3 | 20140312T003054Z_abbot_corr | 0.0 | 79889.4 | 23 + 4 | 20140312T224940Z_abbot_corr | 0.0 | 591.0 | 4 + 5 | 20140312T230108Z_abbot_corr | 0.0 | 171909.0 | 48 + 6 | 20140315T014330Z_abbot_corr | 0.0 | 35119.7 | 10 + 7 | 20140318T154959Z_abbot_corr | 0.0 | 51739.6 | 15 + 8 | 20140320T120437Z_abbot_corr | 0.0 | 186688.6 | 52 + 9 | 20140325T174231Z_abbot_corr | 0.0 | 86019.3 | 24 + 10 | 20140326T175440Z_abbot_corr | 0.0 | 286487.7 | 80 + 11 | 20140330T064125Z_abbot_corr | 0.0 | 2998.6 | 1590 + 12 | 20140330T102505Z_abbot_corr | 0.0 | 626385.0 | 174 + 13 | 20140403T000057Z_blanchard_corr | 0.0 | 54912.3 | 16 + 14 | 20140403T152314Z_blanchard_corr | 0.0 | 340637.8 | 94 + 15 | 20140408T222844Z_abbot_corr | 0.0 | 75589.3 | 21 + 16 | 20140409T184530Z_blanchard_corr | 0.0 | 3795.0 | 2 + 17 | 20140410T003326Z_blanchard_corr | 0.0 | 2173.7 | 72 + 18 | 20140409T165603Z_blanchard_corr | 0.0 | 4952.7 | 2 + Total 2011706.304970 seconds of data. + + To find a specific day in that layout choose the functionality + include_time_interval + + >>> from datetime import datetime + >>> f.include_time_interval(datetime(2014,04,8), datetime(2014,04,9)) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140408T222844Z_abbot_corr | 0.0 | 5465.1 | 2 + Total 5465.059670 seconds of data. + + """ + + interval = self._format_time_interval(start_time, end_time) + if interval: + self._append_time_interval(interval)
+ + +
+[docs] + def exclude_time_interval(self, start_time, end_time): + """Exclude a time interval. + + Examples + -------- + + >>> from ch_util import finder + >>> from datetime import datetime + >>> f = finder.Finder() + >>> f.set_time_range(datetime(2014,04,04), datetime(2014,04,14)) + >>> # f.print_results_summary() will show all the files in this time range + >>> # Now want to exclude all data from 04, 10 to 04, 11 + >>> f.exclude_time_interval(datetime(2014,04,10),datetime(2014,04,11)) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140330T102505Z_abbot_corr | 394484.2 | 231900.8 | 65 + 2 | 20140403T152314Z_blanchard_corr | 30988.4 | 309649.3 | 86 + 3 | 20140408T222844Z_abbot_corr | 0.0 | 75589.3 | 21 + 4 | 20140409T184530Z_blanchard_corr | 0.0 | 3795.0 | 2 + 5 | 20140409T165603Z_blanchard_corr | 0.0 | 4952.7 | 2 + 6 | 20140411T003404Z_blanchard_corr | 0.0 | 161606.5 | 45 + 7 | 20140411T000920Z_blanchard_corr | 0.0 | 1080.4 | 36 + 8 | 20140413T002319Z_blanchard_corr | 0.0 | 84981.7 | 24 + Total 873555.739000 seconds of data. + """ + + interval = self._format_time_interval(start_time, end_time) + if interval: + self._append_time_exclusion(interval)
+ + +
+[docs] + def include_global_flag(self, flag): + """Update :attr:`time_intervals` to include a global flag. + + Parameters + ---------- + flag : integer or string + Global flag ID or name, e.g. "run_pass1_a", or 11292. + + Notes + ----- + + Global flag ID numbers, names, and descriptions are listed at + http://bao.phas.ubc.ca/layout/event.php?filt_event_type_id=7 + + """ + + start_time, end_time = _get_global_flag_times_by_name_event_id(flag) + self.include_time_interval(start_time, end_time)
+ + +
+[docs] + def exclude_global_flag(self, flag): + """Update :attr:`time_intervals` to exclude a global flag. + + Parameters + ---------- + flag : integer or string + Global flag ID or name, e.g. "run_pass1_a", or 65. + + See Also + -------- + + Look under :meth:`include_global_flag` for a very similar example. + + Notes + ----- + + Global flag ID numbers, names, and descriptions are listed at + http://bao.phas.ubc.ca/layout/event.php?filt_event_type_id=7 + + """ + + start_time, end_time = _get_global_flag_times_by_name_event_id(flag) + self.exclude_time_interval(start_time, end_time)
+ + +
+[docs] + def exclude_data_flag_type(self, flag_type): + """Exclude times that overlap with DataFlags of this type. + + Parameters + ---------- + flag_type : string or list of string + Name of DataFlagType(s) to exclude from results, e.g. "rain". + """ + + if isinstance(flag_type, list): + self.data_flag_types.extend(flag_type) + else: + self.data_flag_types.append(flag_type)
+ + +
+[docs] + def include_RA_interval(self, start_RA, end_RA): + """Add time intervals to include passings of given right RA intervals + + Parameters + ---------- + start_RA : float + Starting right ascension in degrees. + end_RA : float + Ending right ascension in degrees. + + Examples + -------- + >>> from ch_util import finder + >>> from datetime import datetime + >>> f = finder.Finder() + >>> f.set_time_range(datetime(2014,04,04), datetime(2014,04,14)) + >>> f.include_RA_interval(90., 180.) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140330T102505Z_abbot_corr | 398689.9 | 21541.0 | 7 + 2 | 20140330T102505Z_abbot_corr | 484854.0 | 21541.0 | 7 + 3 | 20140330T102505Z_abbot_corr | 571018.1 | 21541.0 | 7 + 4 | 20140403T152314Z_blanchard_corr | 35194.1 | 21541.0 | 7 + 5 | 20140403T152314Z_blanchard_corr | 121358.2 | 21541.0 | 7 + 6 | 20140403T152314Z_blanchard_corr | 207522.3 | 21541.0 | 7 + 7 | 20140403T152314Z_blanchard_corr | 293686.4 | 21541.0 | 6 + 8 | 20140408T222844Z_abbot_corr | 8491.2 | 21541.0 | 7 + 9 | 20140410T003326Z_blanchard_corr | 754.5 | 1419.2 | 48 + 10 | 20140410T031023Z_blanchard_corr | 0.0 | 1376.5 | 46 + 11 | 20140410T014136Z_blanchard_corr | 0.0 | 2347.4 | 78 + 12 | 20140411T003404Z_blanchard_corr | 397.4 | 21541.0 | 7 + 13 | 20140411T003404Z_blanchard_corr | 86561.5 | 21541.0 | 7 + 14 | 20140413T002319Z_blanchard_corr | 664.1 | 21541.0 | 7 + Total 242094.394565 seconds of data. + """ + + delta_RA = (end_RA - start_RA) % 360 + mid_RA = (start_RA + delta_RA / 2.0) % 360 + time_delta = delta_RA * 4 * 60.0 * ctime.SIDEREAL_S + self.include_transits(mid_RA, time_delta=time_delta)
+ + +
+[docs] + def exclude_RA_interval(self, start_RA, end_RA): + """Add time intervals to exclude passings of given right RA + intervals + + Parameters + ---------- + start_RA : float + Starting right ascension in degrees. + end_RA : float + Ending right ascension in degrees. + + Examples + -------- + Look under include_RA_interval for very similar example. + + """ + + delta_RA = (end_RA - start_RA) % 360 + mid_RA = (start_RA + delta_RA / 2.0) % 360 + time_delta = delta_RA * 4 * 60.0 * ctime.SIDEREAL_S + self.exclude_transits(mid_RA, time_delta=time_delta)
+ + +
+[docs] + def include_transits(self, body, time_delta=None): + """Add time intervals to include transits for given celestial body. + + Parameters + ---------- + body : :class:`ephem.Body` or float + Transiting celestial body. If a float, interpret as a right + ascension in degrees. + time_delta : float + Total amount of time to include surrounding the transit in + seconds. Default is to use twice the value of + :attr:`~Finder.min_interval`. + + Examples + -------- + >>> from ch_util import (finder, ephemeris) + >>> from datetime import datetime + >>> f = finder.Finder() + >>> f.set_time_range(datetime(2014,02,20), datetime(2014,02,22)) + >>> f.include_transits(ephemeris.CasA, time_delta=3600) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140219T145849Z_abbot_corr | 107430.9 | 3600.0 | 2 + 2 | 20140219T145849Z_abbot_corr | 193595.0 | 3600.0 | 2 + 3 | 20140220T213252Z_stone_corr | 0.0 | 990.2 | 1 + 4 | 20140220T213252Z_stone_corr | 83554.3 | 3600.0 | 2 + Total 11790.181012 seconds of data. + + """ + + if not time_delta: + time_delta = self.min_interval * 2 + ttimes = ephemeris.transit_times(body, *self.time_range) + for ttime in ttimes: + self.include_time_interval( + ttime - time_delta / 2.0, ttime + time_delta / 2.0 + )
+ + +
+[docs] + def include_26m_obs(self, source, require_quality=True): + """Add time intervals to include 26m observations of a source. + + Parameters + ---------- + source : string + Source observed. Has to match name on database exactly. + require_quality : bool (default: True) + Require the quality flag to be zero (ie that the 26 m + pointing is trustworthy) or None + + Examples + -------- + >>> from ch_util import finder + >>> from datetime import datetime + >>> f = finder.Finder() + >>> f.only_corr() + >>> f.set_time_range(datetime(2017,8,1,10), datetime(2017,8,2)) + >>> f.filter_acqs((di.ArchiveInst.name == 'pathfinder')) + >>> f.include_26m_obs('CasA') + >>> f.print_results_summary() + # | acquisition |start (s)| len (s) |files | MB + 0 | 20170801T063349Z_pathfinder_corr | 12337 | 11350 | 2 | 153499 + 1 | 20170801T131035Z_pathfinder_corr | 0 | 6922 | 1 | 75911 + Total 18271 seconds, 229410 MB of data. + + """ + + connect_database() + sources = HolographySource.select() + sources = sources.where(HolographySource.name == source) + if len(sources) == 0: + msg = ( + "No sources found in the database that match: {0}\n".format(source) + + "Returning full time range" + ) + logging.warning(msg) + obs = ( + HolographyObservation.select() + .join(HolographySource) + .where(HolographyObservation.source << sources) + ) + if require_quality: + obs = obs.select().where( + (HolographyObservation.quality_flag == 0) + | (HolographyObservation.quality_flag == None) + ) + + found_obs = False + for ob in obs: + in_range = (self.time_range[1] > ob.start_time) and ( + self.time_range[0] < ob.finish_time + ) + if in_range: + found_obs = True + self.include_time_interval(ob.start_time, ob.finish_time) + if not found_obs: + msg = ( + "No observation of the source ({0}) was found within the time range.\n".format( + source + ) + + "Returning full time range" + ) + logging.warning(msg)
+ + +
+[docs] + def exclude_transits(self, body, time_delta): + """Add time intervals to exclude transits for given celestial body. + + Parameters + ---------- + body : :class:`ephem.Body` or float + Transiting celestial body. If a float, interpret as a right + ascension in degrees. + time_delta : float + Total amount of time to include surrounding the transit in + seconds. Default is to use twice the value of + :attr:`~Finder.min_interval`. + + Examples + -------- + >>> from ch_util import finder + >>> from datetime import datetime + >>> f = finder.Finder() + >>> f.set_time_range(datetime(2014,02,20), datetime(2014,02,22)) + >>> import ephem + >>> f.exclude_transits(ephem.Sun(), time_delta=43200) + >>> f.print_results_summary() + interval | acquisition | offset from start (s) | length (s) | N files + 1 | 20140219T145849Z_abbot_corr | 32453.1 | 51128.4 | 15 + 2 | 20140219T145849Z_abbot_corr | 126781.5 | 43193.0 | 13 + 3 | 20140219T145523Z_stone_corr | 32662.5 | 18126.9 | 6 + 4 | 20140220T213252Z_stone_corr | 16740.8 | 43193.0 | 13 + Total 155641.231275 seconds of data. + + """ + + if not time_delta: + time_delta = self.min_interval * 2 + ttimes = ephemeris.transit_times(body, *self.time_range) + for ttime in ttimes: + self.exclude_time_interval( + ttime - time_delta / 2.0, ttime + time_delta / 2.0 + )
+ + +
+[docs] + def exclude_daytime(self): + """Add time intervals to exclude all day time data.""" + + rise_times = ephemeris.solar_rising( + self.time_range[0] - 24 * 3600.0, self.time_range[1] + ) + + for rise_time in rise_times: + set_time = ephemeris.solar_setting(rise_time) + self.exclude_time_interval(rise_time, set_time)
+ + +
+[docs] + def exclude_nighttime(self): + """Add time intervals to exclude all night time data.""" + + set_times = ephemeris.solar_setting( + self.time_range[0] - 24 * 3600.0, self.time_range[1] + ) + + for set_time in set_times: + rise_time = ephemeris.solar_rising(set_time) + self.exclude_time_interval(set_time, rise_time)
+ + +
+[docs] + def exclude_sun(self, time_delta=4000.0, time_delta_rise_set=4000.0): + """Add time intervals to exclude sunrise, sunset, and sun transit. + + Parameters + ---------- + time_delta : float + Total amount of time to exclude surrounding the sun transit in + seconds. Default is to use 4000.0 seconds. + time_delta_rise_set : float + Total amount of time to exclude after sunrise and before sunset + in seconds. Default is to use 4000.0 seconds. + """ + + # Sunrise + rise_times = ephemeris.solar_rising( + self.time_range[0] - time_delta_rise_set, self.time_range[1] + ) + for rise_time in rise_times: + self.exclude_time_interval(rise_time, rise_time + time_delta_rise_set) + + # Sunset + set_times = ephemeris.solar_setting( + self.time_range[0], self.time_range[1] + time_delta_rise_set + ) + for set_time in set_times: + self.exclude_time_interval(set_time - time_delta_rise_set, set_time) + + # Sun transit + transit_times = ephemeris.solar_transit( + self.time_range[0] - time_delta / 2.0, self.time_range[1] + time_delta / 2.0 + ) + for transit_time in transit_times: + self.exclude_time_interval( + transit_time - time_delta / 2.0, transit_time + time_delta / 2.0 + )
+ + +
+[docs] + def set_hk_input(self, name): + """Restrict files to only one HK input type. + + This is a shortcut for specifying + ``file_condition = (chimedb.data_index.HKFileInfo.atmel_name == name)`` + in :meth:`get_results_acq`. Instead, one can simply call this function + with **name** as, e.g., "LNA", "FLA", and calls to + :meth:`get_results_acq` will be appropriately restricted. + + Parameters + ---------- + name : str + The name of the housekeeping input. + """ + self._atmel_restrict = di.HKFileInfo.atmel_name == name
+ + +
+[docs] + def get_results_acq(self, acq_ind, file_condition=None): + """Get search results restricted to a given acquisition. + + Parameters + ---------- + acq_ind : int + Index of :attr:`Finder.acqs` for the desired acquisition. + file_condition : :mod:`peewee` comparison + Any additional condition for filtering the files within the + acquisition. In general, this should be a filter on one of the file + information tables, e.g., :class:`CorrFileInfo`. + + Returns + ------- + interval_list : :class:`DataIntervalList` + Search results. + + """ + acq = self.acqs[acq_ind] + acq_start = acq.start_time + acq_finish = acq.finish_time + time_intervals = _trim_intervals_range( + self.time_intervals, (acq_start, acq_finish), self.min_interval + ) + time_intervals = _trim_intervals_exclusions( + time_intervals, self.time_exclusions, self.min_interval + ) + # Deal with all global flags. + for severity, mode in self.global_flag_mode.items(): + if mode is GF_ACCEPT: + # Do nothing. + continue + else: + # Need to actually get the flags. + global_flags = layout.global_flags_between( + acq_start, acq_finish, severity + ) + global_flag_names = [gf.name for gf in global_flags] + flag_times = [] + for f in global_flags: + start, stop = layout.get_global_flag_times(f.id) + if stop is None: + stop = time.time() + start = ctime.ensure_unix(start) + stop = ctime.ensure_unix(stop) + flag_times.append((start, stop)) + overlap = _check_intervals_overlap(time_intervals, flag_times) + if mode is GF_WARN: + if overlap: + msg = ( + "Global flag with severity '%s' present in data" + " search results and warning requested." + " Global flag name: %s" + % (severity, global_flag_names[overlap[1]]) + ) + logging.warning(msg) + elif mode is GF_RAISE: + if overlap: + msg = ( + "Global flag with severity '%s' present in data" + " search results and exception requested." + " Global flag name: %s" + % (severity, global_flag_names[overlap[1]]) + ) + raise DataFlagged(msg) + elif mode is GF_REJECT: + if overlap: + time_intervals = _trim_intervals_exclusions( + time_intervals, flag_times, self.min_interval + ) + else: + raise RuntimeError("Finder has invalid global_flag_mode.") + # Do the same for Data flags + if len(self.data_flag_types) > 0: + df_types = [t.name for t in DataFlagType.select()] + for dft in self.data_flag_types: + if not dft in df_types: + raise RuntimeError("Could not find data flag type {}.".format(dft)) + flag_times = [] + for f in DataFlag.select().where( + DataFlag.type == DataFlagType.get(name=dft) + ): + start, stop = f.start_time, f.finish_time + if stop is None: + stop = time.time() + start = ctime.ensure_unix(start) + stop = ctime.ensure_unix(stop) + flag_times.append((start, stop)) + overlap = _check_intervals_overlap(time_intervals, flag_times) + if overlap: + time_intervals = _trim_intervals_exclusions( + time_intervals, flag_times, self.min_interval + ) + data_intervals = [] + if self._atmel_restrict: + if file_condition: + file_condition &= self._atmel_restrict + else: + file_condition = self._atmel_restrict + for time_interval in time_intervals: + filenames = files_in_range( + acq.id, + time_interval[0], + time_interval[1], + self._my_node, + file_condition, + self._node_spoof, + ) + filenames = sorted(filenames) + + tup = (filenames, time_interval) + if acq.type == di.AcqType.corr(): + data_intervals.append(CorrDataInterval(tup)) + elif acq.type == di.AcqType.hk(): + data_intervals.append(HKDataInterval(tup)) + elif acq.type == di.AcqType.weather(): + data_intervals.append(WeatherDataInterval(tup)) + elif acq.type == di.AcqType.flaginput(): + data_intervals.append(FlagInputDataInterval(tup)) + elif acq.type == di.AcqType.gain(): + data_intervals.append(CalibrationGainDataInterval(tup)) + elif acq.type == di.AcqType.digitalgain(): + data_intervals.append(DigitalGainDataInterval(tup)) + else: + data_intervals.append(BaseDataInterval(tup)) + + return DataIntervalList(data_intervals)
+ + +
+[docs] + def get_results(self, file_condition=None): + """Get all search results. + + Parameters + ---------- + file_condition : :mod:`peewee` comparison + Any additional condition for filtering the files within the + acquisition. In general, this should be a filter on one of the file + information tables, e.g., chimedb.data_index.CorrFileInfo. + + Returns + ------- + interval_list : :class:`DataIntervalList` + Search results. + cond : :mod:`peewee` comparison + Any extra filters, particularly filters on individual files. + + """ + + intervals = [] + for ii in range(len(self.acqs)): + intervals += self.get_results_acq(ii, file_condition) + return DataIntervalList(intervals)
+ + +
+[docs] + def print_acq_info(self): + """Print the acquisitions included in this search and thier properties. + + This method is convenient when searching the data index interactively + and you want to see what acquisitions remain after applying filters or + restricting the time range. + + See Also + -------- + :meth:`Finder.print_results_summary` + + """ + + print("acquisition | name | start | length (hrs) | N files") + row_proto = "%4d | %-36s | %s | %7.2f | %4d" + for ii, acq in enumerate(self.acqs): + start = acq.start_time + finish = acq.finish_time + length = (finish - start) / 3600.0 + start = ctime.unix_to_datetime(start) + start = start.strftime("%Y-%m-%d %H:%M") + name = acq.name + n_files = acq.n_timed_files + print(row_proto % (ii, name, start, length, n_files))
+ + +
+[docs] + def print_results_summary(self): + """Print a summary of the search results.""" + + row_proto = "%4d | %-36s | %7.f | %7.f | %4d | %6.f" + total_data = 0.0 + total_size = 0.0 + interval_number = 0 + titles = ("# ", " acquisition", "start (s)", "len (s) ", "files ", "MB ") + print("%5s|%-38s|%9s|%9s|%6s|%8s" % titles) + for ii, acq in enumerate(self.acqs): + acq_start = acq.start_time + intervals = self.get_results_acq(ii) + # if len(intervals): + # print intervals[0] + for interval in intervals: + offset = interval[1][0] - acq_start + length = interval[1][1] - interval[1][0] + n_files = len(interval[0]) + # Get total size of files by doing new query. + cond = ( + (di.ArchiveFile.acq == acq) + & (di.ArchiveFileCopy.node << self._my_node) + & (di.ArchiveFileCopy.has_file == "Y") + ) + info_cond = False + for i in self._file_info: + info_cond |= (i.finish_time >= interval[1][0]) & ( + i.start_time <= interval[1][1] + ) + size_q = di.ArchiveFile.select(pw.fn.Sum(di.ArchiveFile.size_b)).join( + di.ArchiveFileCopy + ) + for i in self._file_info: + size_q = size_q.switch(di.ArchiveFile).join( + i, join_type=pw.JOIN.LEFT_OUTER + ) + size_q = size_q.where(cond & info_cond) + try: + s = float(size_q.scalar()) / 1024**2 # MB. + except TypeError: + s = 0 + info = (interval_number, acq.name, offset, length, n_files, s) + print(row_proto % info) + total_data += length + total_size += s + interval_number += 1 + print("Total %6.f seconds, %6.f MB of data." % (total_data, total_size))
+
+ + + +def _trim_intervals_range(intervals, time_range, min_interval=0.0): + range_start, range_end = time_range + out = [] + for start, end in intervals: + start = max(start, range_start) + end = min(end, range_end) + if end <= start + min_interval: + continue + else: + out.append((start, end)) + return out + + +def _trim_intervals_exclusions(intervals, exclusions, min_interval=0.0): + for excl_start, excl_end in exclusions: + tmp_intervals = [] + for start, end in intervals: + if end <= excl_start or start >= excl_end: + if start + min_interval <= end: + tmp_intervals.append((start, end)) + continue + if end > excl_start and start + min_interval <= excl_start: + tmp_intervals.append((start, excl_start)) + if start < excl_end and excl_end + min_interval <= end: + tmp_intervals.append((excl_end, end)) + intervals = tmp_intervals + return intervals + + +def _check_intervals_overlap(intervals1, intervals2): + """Return the first pair of indexes that overlap.""" + for ii in range(len(intervals1)): + start1, stop1 = intervals1[ii] + for jj in range(len(intervals2)): + start2, stop2 = intervals2[jj] + if start1 < stop2 and start2 < stop1: + return ii, jj + + +def _validate_gf_value(value): + if not value in (GF_REJECT, GF_RAISE, GF_WARN, GF_ACCEPT): + raise ValueError( + "Global flag behaviour must be one of" + " the *GF_REJECT*, *GF_RAISE*, *GF_WARN*, *GF_ACCEPT*" + " constants from the finder module." + ) + + +def _get_global_flag_times_by_name_event_id(flag): + if isinstance(flag, str): + event = ( + layout.event.select() + .where(layout.event.active == True) + .join( + layout.global_flag, on=(layout.event.graph_obj == layout.global_flag.id) + ) + .where(layout.global_flag.name == flag) + .get() + ) + else: + event = layout.event.get(id=flag) + start = event.start.time + try: + end = event.end.time + except pw.DoesNotExist: + end = time.time() + return start, end + + +
+[docs] +class DataIntervalList(list): + """A list of data index search results. + + Just a normal python list of :class:`DataInterval`-derived objects with + some helper methods. Instances are created by calls to + :meth:`Finder.get_results`. + """ + +
+[docs] + def iter_reader(self): + """Iterate over data intervals converting to :class:`andata.Reader`. + + Returns + ------- + reader_iterator + Iterator over data intervals as :class:`andata.Reader` instances. + + """ + + for data_interval in self: + yield data_interval.as_reader()
+ + +
+[docs] + def iter_loaded_data(self, **kwargs): + """Iterate over data intervals loading as :class:`andata.AnData`. + + Parameters + ---------- + **kwargs : argument list + Pass any parameters accepted by the + :class:`BaseDataInverval`-derived class that you are using. + + Returns + ------- + loaded_data_iterator + Iterator over data intervals loaded into memory as + :class:`andata.BaseData`-derived instances. + + Examples + -------- + + Use this method to loop over data loaded into memory. + + >>> for data in interval_list.iter_loaded_data(): + ... pass + + Data is loaded into memory on each iteration. To immediately load all + data into memory, initialize a list using the iterator: + + >>> loaded_data_list = list(interval_list.iter_loaded_data()) + + """ + + for data_interval in self: + yield data_interval.as_loaded_data(**kwargs)
+
+ + + +
+[docs] +class BaseDataInterval(tuple): + """A single data index search result. + + Just a normal python tuple with some helper methods. Instances are created + by calls to :meth:`Finder.get_results`. + + A data interval as two elements: a list of filenames and a time range within + those files. + + You should generally only use the classes derived from this one (i.e., + :class:`CorrDataInterval`, etc.) + """ + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.BaseReader + +
+[docs] + def as_reader(self): + """Get data interval as an :class:`andata.Reader` instance. + + The :class:`andata.Reader` is initialized with the filename list part + of the data interval then the time range part of the data interval is + used as an arguments to :meth:`andata.Reader.select_time_range`. + + Returns + ------- + reader : :class:`andata.Reader` + + """ + rc = self._reader_class + reader = rc(self[0]) + reader.select_time_range(self[1][0], self[1][1]) + return reader
+ + +
+[docs] + def as_loaded_data(self, **kwargs): + """Load data interval to memory as an :class:`andata.AnData` instance. + + Parameters + ---------- + datasets : list of strings + Passed on to :meth:`andata.AnData.from_acq_h5` + + Returns + ------- + data : :class:`andata.AnData` + Data interval loaded into memory. + + """ + reader = self.as_reader() + for k, v in kwargs.items(): + if v is not None: + setattr(reader, k, v) + data = reader.read() + return data
+
+ + + +
+[docs] +class CorrDataInterval(BaseDataInterval): + """Derived class from :class:`BaseDataInterval` for correlator data.""" + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.CorrReader + +
+[docs] + def as_loaded_data(self, prod_sel=None, freq_sel=None, datasets=None): + """Load data interval to memory as an :class:`andata.CorrData` instance + + Parameters + ---------- + prod_sel : valid numpy index + Passed on to :meth:`andata.CorrData.from_acq_h5` + freq_sel : valid numpy index + Passed on to :meth:`andata.CorrData.from_acq_h5` + datasets : list of strings + Passed on to :meth:`andata.CorrData.from_acq_h5` + + Returns + ------- + data : :class:`andata.CorrData` + Data interval loaded into memory. + + """ + return super(CorrDataInterval, self).as_loaded_data( + prod_sel=prod_sel, freq_sel=freq_sel, datasets=datasets + )
+
+ + + +# Legacy. +DataInterval = CorrDataInterval + + +
+[docs] +class HKDataInterval(BaseDataInterval): + """Derived class from :class:`BaseDataInterval` for housekeeping data.""" + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.HKReader
+ + + +
+[docs] +class WeatherDataInterval(BaseDataInterval): + """Derived class from :class:`BaseDataInterval` for weather data.""" + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.WeatherReader
+ + + +
+[docs] +class FlagInputDataInterval(BaseDataInterval): + """Derived class from :class:`BaseDataInterval` for flag input data.""" + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.FlagInputReader
+ + + +
+[docs] +class DigitalGainDataInterval(BaseDataInterval): + """Derived class from :class:`BaseDataInterval` for digital gain data.""" + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.DigitalGainReader
+ + + +
+[docs] +class CalibrationGainDataInterval(BaseDataInterval): + """Derived class from :class:`BaseDataInterval` for calibration gain data.""" + + @property + def _reader_class(self): + # only dynamic imports of andata allowed in this module. + from . import andata + + return andata.CalibrationGainReader
+ + + +# Query routines +# ============== + + +
+[docs] +def files_in_range( + acq, start_time, end_time, node_list, extra_cond=None, node_spoof=None +): + """Get files for a given acquisition within a time range. + + Parameters + ---------- + acq : string or int + Which acquisition, by its name or id key. + start_time : float + POSIX/Unix time for the start or time range. + end_time : float + POSIX/Unix time for the end or time range. + node_list : list of `chimedb.data_index.StorageNode` objects + Only return files residing on the given nodes. + extra_cond : :mod:`peewee` comparison + Any additional expression for filtering files. + + Returns + ------- + file_names : list of strings + List of filenames, including the full path. + + """ + + if isinstance(acq, str): + acq_name = acq + acq = di.ArchiveAcq.get(di.ArchiveAcq.name == acq).acq + else: + acq_name = di.ArchiveAcq.get(di.ArchiveAcq.id == acq).name + + cond = ( + (di.ArchiveFile.acq == acq) + & (di.ArchiveFileCopy.node << node_list) + & (di.ArchiveFileCopy.has_file == "Y") + ) + info_cond = False + for i in file_info_table: + info_cond |= (i.finish_time >= start_time) & (i.start_time <= end_time) + + if extra_cond: + cond &= extra_cond + + query = ( + di.ArchiveFileCopy.select( + di.ArchiveFileCopy.node, + di.ArchiveFile.name, + di.StorageNode.root, + di.StorageNode.name.alias("node_name"), + ) + .join(di.StorageNode) + .switch(di.ArchiveFileCopy) + .join(di.ArchiveFile) + ) + for i in file_info_table: + query = query.switch(di.ArchiveFile).join(i, join_type=pw.JOIN.LEFT_OUTER) + query = query.where(cond & info_cond).objects() + + if not node_spoof: + return [path.join(af.root, acq_name, af.name) for af in query] + else: + return [path.join(node_spoof[af.node_name], acq_name, af.name) for af in query]
+ + + +# Exceptions +# ========== + +# This is the base CHIMEdb exception +from chimedb.core.exceptions import CHIMEdbError + + +
+[docs] +class DataFlagged(CHIMEdbError): + """Raised when data is affected by a global flag."""
+ + + +if __name__ == "__main__": + import doctest + + doctest.testmod() +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/fluxcat.html b/docs/_modules/ch_util/fluxcat.html new file mode 100644 index 00000000..0ee85b65 --- /dev/null +++ b/docs/_modules/ch_util/fluxcat.html @@ -0,0 +1,1688 @@ + + + + + + ch_util.fluxcat — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.fluxcat

+"""
+Catalog the measured flux densities of astronomical sources
+
+This module contains tools for cataloging astronomical sources
+and predicting their flux density at radio frequencies based on
+previous measurements.
+"""
+
+from abc import ABCMeta, abstractmethod
+import os
+import fnmatch
+import inspect
+import warnings
+
+from collections import OrderedDict
+import json
+import pickle
+
+import numpy as np
+import base64
+import datetime
+import time
+
+from caput import misc
+import caput.time as ctime
+
+from . import ephemeris
+from .tools import ensure_list
+
+# Define nominal frequency. Sources in catalog are ordered according to
+# their predicted flux density at this frequency. Also acts as default
+# pivot point in spectral fits.
+FREQ_NOMINAL = 600.0
+
+# Define the source collections that should be loaded when this module is imported.
+DIR_COLLECTIONS = os.path.join(os.path.dirname(__file__), "catalogs")
+DEFAULT_COLLECTIONS = [
+    os.path.join(DIR_COLLECTIONS, "primary_calibrators_perley2016.json"),
+    os.path.join(DIR_COLLECTIONS, "specfind_v2_5Jy_vollmer2009.json"),
+]
+
+
+# ==================================================================================
+
+[docs] +class FitSpectrum(object, metaclass=ABCMeta): + """A base class for modeling and fitting spectra. Any spectral model + used by FluxCatalog should be derived from this class. + + The `fit` method should be used to populate the `param`, `param_cov`, and `stats` + attributes. The `predict` and `uncertainty` methods can then be used to obtain + the flux density and uncertainty at arbitrary frequencies. + + Attributes + ---------- + param : np.ndarray[nparam, ] + Best-fit parameters. + param_cov : np.ndarray[nparam, nparam] + Covariance of the fit parameters. + stats : dict + Dictionary that contains statistics related to the fit. + Must include 'chisq' and 'ndof'. + + Abstract Methods + ---------------- + Any subclass of FitSpectrum must define these methods: + fit + _get_x + _fit_func + _deriv_fit_func + """ + + def __init__(self, param=None, param_cov=None, stats=None): + """Instantiates a FitSpectrum object.""" + + self.param = param + self.param_cov = param_cov + self.stats = stats + +
+[docs] + def predict(self, freq): + """Predicts the flux density at a particular frequency.""" + + x = self._get_x(freq) + + return self._fit_func(x, *self.param)
+ + +
+[docs] + def uncertainty(self, freq, alpha=0.32): + """Predicts the uncertainty on the flux density at a + particular frequency. + """ + + from scipy.stats import t + + prob = 1.0 - alpha / 2.0 + tval = t.ppf(prob, self.stats["ndof"]) + nparam = len(self.param) + + x = self._get_x(freq) + + dfdp = self._deriv_fit_func(x, *self.param) + + if hasattr(x, "__iter__"): + df2 = np.zeros(len(x), dtype=np.float64) + else: + df2 = 0.0 + + for ii in range(nparam): + for jj in range(nparam): + df2 += dfdp[ii] * dfdp[jj] * self.param_cov[ii][jj] + + return tval * np.sqrt(df2)
+ + + @abstractmethod + def fit(self, freq, flux, eflux, *args): + return + + @abstractmethod + def _get_x(self, freq): + return + + @staticmethod + @abstractmethod + def _fit_func(x, *param): + return + + @staticmethod + @abstractmethod + def _deriv_fit_func(x, *param): + return
+ + + +
+[docs] +class CurvedPowerLaw(FitSpectrum): + """ + Class to fit a spectrum to a polynomial in log-log space, given by + + .. math:: + \\ln{S} = a_{0} + a_{1} \\ln{\\nu'} + a_{2} \\ln{\\nu'}^2 + a_{3} \\ln{\\nu'}^3 + \\dots + + where :math:`S` is the flux density, :math:`\\nu'` is the (normalized) frequency, + and :math:`a_{i}` are the fit parameters. + + Parameters + ---------- + nparam : int + Number of parameters. This sets the order of the polynomial. + Default is 2 (powerlaw). + freq_pivot : float + The pivot frequency :math:`\\nu' = \\nu / freq_pivot`. + Default is :py:const:`FREQ_NOMINAL`. + """ + + def __init__(self, freq_pivot=FREQ_NOMINAL, nparam=2, *args, **kwargs): + """Instantiates a CurvedPowerLaw object""" + + super(CurvedPowerLaw, self).__init__(*args, **kwargs) + + # Set the additional model kwargs + self.freq_pivot = freq_pivot + + if self.param is not None: + self.nparam = len(self.param) + else: + self.nparam = nparam + + def fit(self, freq, flux, eflux, flag=None): + if flag is None: + flag = np.ones(len(freq), dtype=bool) + + # Make sure we have enough measurements + if np.sum(flag) >= self.nparam: + # Apply flag + fit_freq = freq[flag] + fit_flux = flux[flag] + fit_eflux = eflux[flag] + + # Convert to log space + x = self._get_x(fit_freq) + y = np.log(fit_flux) + + # Vandermonde matrix + A = self._vandermonde(x, self.nparam) + + # Data covariance matrix + C = np.diag((fit_eflux / fit_flux) ** 2.0) + + # Parameter estimate and covariance + param_cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A))) + + param = np.dot(param_cov, np.dot(A.T, np.linalg.solve(C, y))) + + # Compute residuals + resid = y - np.dot(A, param) + + # Change the overall normalization to linear units + param[0] = np.exp(param[0]) + + for ii in range(self.nparam): + for jj in range(self.nparam): + param_cov[ii, jj] *= (param[0] if ii == 0 else 1.0) * ( + param[0] if jj == 0 else 1.0 + ) + + # Save parameter estimate and covariance to instance + self.param = param.tolist() + self.param_cov = param_cov.tolist() + + # Calculate statistics + if not isinstance(self.stats, dict): + self.stats = {} + self.stats["ndof"] = len(x) - self.nparam + self.stats["chisq"] = np.sum(resid**2 / np.diag(C)) + + # Return results + return self.param, self.param_cov, self.stats + + def _get_x(self, freq): + return np.log(freq / self.freq_pivot) + + @staticmethod + def _vandermonde(x, nparam): + return np.vstack(tuple([x**rank for rank in range(nparam)])).T + + @staticmethod + def _fit_func(x, *param): + return param[0] * np.exp( + np.sum( + [par * x ** (rank + 1) for rank, par in enumerate(param[1:])], axis=0 + ) + ) + + @staticmethod + def _deriv_fit_func(x, *param): + z = param[0] * np.exp( + np.sum( + [par * x ** (rank + 1) for rank, par in enumerate(param[1:])], axis=0 + ) + ) + + dfdp = np.array([z * x**rank for rank in range(len(param))]) + dfdp[0] /= param[0] + + return dfdp
+ + + +
+[docs] +class MetaFluxCatalog(type): + """Metaclass for FluxCatalog. Defines magic methods + for the class that can act on and provice access to the + catalog of all astronomical sources. + """ + + def __str__(self): + return self.string() + + def __iter__(self): + return self.iter() + + def __reversed__(self): + return self.reversed() + + def __len__(self): + return self.len() + + def __getitem__(self, key): + return self.get(key) + + def __contains__(self, item): + try: + obj = self.get(item) + except KeyError: + obj = None + + return obj is not None
+ + + +
+[docs] +class FluxCatalog(object, metaclass=MetaFluxCatalog): + """ + Class for cataloging astronomical sources and predicting + their flux density at radio frequencies based on spectral fits + to previous measurements. + + Class methods act upon and provide access to the catalog of + all sources. Instance methods act upon and provide access + to individual sources. All instances are stored in an + internal class dictionary. + + Attributes + ---------- + fields : list + List of attributes that are read-from and written-to the + JSON catalog files. + model_lookup : dict + Dictionary that provides access to the various models that + can be fit to the spectrum. These models should be + subclasses of FitSpectrum. + """ + + fields = [ + "ra", + "dec", + "alternate_names", + "model", + "model_kwargs", + "stats", + "param", + "param_cov", + "measurements", + ] + + model_lookup = {"CurvedPowerLaw": CurvedPowerLaw} + + _entries = {} + _collections = {} + _alternate_name_lookup = {} + + def __init__( + self, + name, + ra=None, + dec=None, + alternate_names=[], + model="CurvedPowerLaw", + model_kwargs=None, + stats=None, + param=None, + param_cov=None, + measurements=None, + overwrite=0, + ): + """ + Instantiates a FluxCatalog object for an astronomical source. + + Parameters + ---------- + name : string + Name of the source. The convention for the source name is to + use the MAIN_ID in the SIMBAD database in all uppercase letters + with spaces replaced by underscores. + + ra : float + Right Ascension in degrees. + + dec : float + Declination in degrees. + + alternate_names : list of strings + Alternate names for the source. Ideally should include all alternate names + present in the SIMBAD database using the naming convention specified above. + + model : string + Name of FitSpectrum subclass. + + model_kwargs : dict + Dictionary containing keywords required by the model. + + stats : dict + Dictionary containing statistics from model fit. + + param : list, length nparam + Best-fit parameters. + + param_cov : 2D-list, size nparam x nparam + Estimate of covariance of fit parameters. + + measurements : 2D-list, size nmeas x 7 + List of measurements of the form: + [freq, flux, eflux, flag, catalog, epoch, citation]. + Should use the add_measurement method to populate this list. + + overwrite : int between 0 and 2 + Action to take in the event that this source is already in the catalog: + - 0 - Return the existing entry. + - 1 - Add the measurements to the existing entry. + - 2 - Overwrite the existing entry. + Default is 0. + """ + + # The name argument is a unique identifier into the catalog. + # Check if there is already a source in the catalog with the + # input name. If there is, then the behavior is set by the + # overwrite argument. + if (overwrite < 2) and (name in FluxCatalog): + # Return existing entry + print("%s already has an entry in catalog." % name, end=" ") + if overwrite == 0: + print("Returning existing entry.") + self = FluxCatalog[name] + + # Add any measurements to existing entry + elif overwrite == 1: + print("Adding measurements to existing entry.") + self = FluxCatalog[name] + if measurements is not None: + self.add_measurement(*measurements) + self.fit_model() + + else: + # Create new instance for this source. + self.name = format_source_name(name) + + # Initialize object attributes + # Basic info: + self.ra = ra + self.dec = dec + + self.alternate_names = [ + format_source_name(aname) for aname in ensure_list(alternate_names) + ] + + # Measurements: + self.measurements = measurements + + # Best-fit model: + self.model = model + self.param = param + self.param_cov = param_cov + self.stats = stats + self.model_kwargs = model_kwargs + if self.model_kwargs is None: + self.model_kwargs = {} + + # Create model object + self._model = self.model_lookup[self.model]( + param=self.param, + param_cov=self.param_cov, + stats=self.stats, + **self.model_kwargs + ) + + # Populate the kwargs that were used + arg_list = misc.getfullargspec(self.model_lookup[self.model].__init__) + if len(arg_list.args) > 1: + keys = arg_list.args[1:] + for key in keys: + if hasattr(self._model, key): + self.model_kwargs[key] = getattr(self._model, key) + + if not self.model_kwargs: + self.model_kwargs = None + + # Save to class dictionary + self._entries[self.name] = self + + # Add alternate names to class dictionary so they can be searched quickly + for alt_name in self.alternate_names: + if alt_name in self._alternate_name_lookup: + warnings.warn( + "The alternate name %s is already held by the source %s." + % (alt_name, self._alternate_name_lookup[alt_name]) + ) + else: + self._alternate_name_lookup[alt_name] = self.name + +
+[docs] + def add_measurement( + self, freq, flux, eflux, flag=True, catalog=None, epoch=None, citation=None + ): + """Add entries to the list of measurements. Each argument/keyword + can be a list of items with length equal to 'len(flux)', or + alternatively a single item in which case the same value is used + for all measurements. + + Parameters + ---------- + freq : float, list of floats + Frequency in MHz. + + flux : float, list of floats + Flux density in Jansky. + + eflux : float, list of floats + Uncertainty on flux density in Jansky. + + flag : bool, list of bool + If True, use this measurement in model fit. + Default is True. + + catalog : string or None, list of strings or Nones + Name of the catalog from which this measurement originates. + Default is None. + + epoch : float or None, list of floats or Nones + Year when this measurement was taken. + Default is None. + + citation : string or None, list of strings or Nones + Citation where this measurement can be found + (e.g., 'Baars et al. (1977)'). + Default is None. + + """ + + # Ensure that all of the inputs are lists + # of the same length as flux + flux = ensure_list(flux) + nmeas = len(flux) + + freq = ensure_list(freq, nmeas) + eflux = ensure_list(eflux, nmeas) + flag = ensure_list(flag, nmeas) + catalog = ensure_list(catalog, nmeas) + epoch = ensure_list(epoch, nmeas) + citation = ensure_list(citation, nmeas) + + # Store as list + meas = [ + [ + freq[mm], + flux[mm], + eflux[mm], + flag[mm], + catalog[mm], + epoch[mm], + citation[mm], + ] + for mm in range(nmeas) + ] + + # Add measurements to internal list + if self.measurements is None: + self.measurements = meas + else: + self.measurements += meas + + # Sort internal list by frequency + isort = np.argsort(self.freq) + self.measurements = [self.measurements[mm] for mm in isort]
+ + +
+[docs] + def fit_model(self): + """Fit the measurements stored in the 'measurements' attribute with the + spectral model specified in the 'model' attribute. This populates the + 'param', 'param_cov', and 'stats' attributes. + """ + + arg_list = misc.getfullargspec(self._model.fit).args[1:] + + args = [self.freq[self.flag], self.flux[self.flag], self.eflux[self.flag]] + + if (self.epoch is not None) and ("epoch" in arg_list): + args.append(self.epoch[self.flag]) + + self.param, self.param_cov, self.stats = self._model.fit(*args)
+ + +
+[docs] + def plot(self, legend=True, catalog=True, residuals=False): + """Plot the measurements, best-fit model, and confidence interval. + + Parameters + ---------- + legend : bool + Show legend. Default is True. + + catalog : bool + If True, then label and color code the measurements according to + their catalog. If False, then label and color code the measurements + according to their citation. Default is True. + + residuals : bool + Plot the residuals instead of the measurements and best-fit model. + Default is False. + """ + + import matplotlib + import matplotlib.pyplot as plt + + # Define plot parameters + colors = ["blue", "darkorchid", "m", "plum", "mediumvioletred", "palevioletred"] + markers = ["o", "*", "s", "p", "^"] + sizes = [10, 12, 12, 12, 12] + + font = {"family": "sans-serif", "weight": "normal", "size": 16} + + plt.rc("font", **font) + + nplot = 500 + + # Plot the model fit and uncertainty + xrng = [np.floor(np.log10(self.freq.min())), np.ceil(np.log10(self.freq.max()))] + xrng = [min(xrng[0], 2.0), max(xrng[1], 3.0)] + + fplot = np.logspace(*xrng, num=nplot) + + xrng = [10.0**xx for xx in xrng] + + if residuals: + flux_hat = self.predict_flux(self.freq) + flux = (self.flux - flux_hat) / flux_hat + eflux = self.eflux / flux_hat + model = np.zeros_like(fplot) + delta = self.predict_uncertainty(fplot) / self.predict_flux(fplot) + ylbl = "Residuals: " + r"$(S - \hat{S}) / \hat{S}$" + yrng = [-0.50, 0.50] + else: + flux = self.flux + eflux = self.eflux + model = self.predict_flux(fplot) + delta = self.predict_uncertainty(fplot) + ylbl = "Flux Density [Jy]" + yrng = [model.min(), model.max()] + + plt.fill_between( + fplot, model - delta, model + delta, facecolor="darkgray", alpha=0.3 + ) + plt.plot( + fplot, + model - delta, + fplot, + model + delta, + color="black", + linestyle="-", + linewidth=0.5, + ) + + plt.plot( + fplot, model, color="black", linestyle="-", linewidth=1.0, label=self.model + ) + + # Plot the measurements + if catalog: + cat_uniq = list(set(self.catalog)) + else: + cat_uniq = list(set(self.citation)) + + # Loop over catalogs/citations + for ii, cat in enumerate(cat_uniq): + if catalog: + pind = np.array([cc == cat for cc in self.catalog]) + else: + pind = np.array([cc == cat for cc in self.citation]) + + if cat is None: + pcol = "black" + pmrk = "o" + psz = 10 + lbl = "Meas." + else: + pcol = colors[ii % len(colors)] + pmrk = markers[ii // len(colors)] + psz = sizes[ii // len(colors)] + lbl = cat + + plt.errorbar( + self.freq[pind], + flux[pind], + yerr=eflux[pind], + color=pcol, + marker=pmrk, + markersize=psz, + linestyle="None", + label=lbl, + ) + + # Set log axis + ax = plt.gca() + ax.set_xscale("log") + if not residuals: + ax.set_yscale("log") + plt.xlim(xrng) + plt.ylim(yrng) + + plt.grid(b=True, which="both") + + # Plot lines denoting CHIME band + plt.axvspan(400.0, 800.0, color="green", alpha=0.1) + + # Create a legend + if legend: + plt.legend(loc="lower left", numpoints=1, prop=font) + + # Set labels + plt.xlabel("Frequency [MHz]") + plt.ylabel(ylbl) + + # Create block with statistics + if not residuals: + txt = r"$\chi^2 = %0.2f$ $(%d)$" % (self.stats["chisq"], self.stats["ndof"]) + + plt.text( + 0.95, + 0.95, + txt, + horizontalalignment="right", + verticalalignment="top", + transform=ax.transAxes, + ) + + # Create title + ttl = self.name.replace("_", " ") + plt.title(ttl)
+ + +
+[docs] + def predict_flux(self, freq, epoch=None): + """Predict the flux density of the source at a particular + frequency and epoch. + + Parameters + ---------- + freq : float, np.array of floats + Frequency in MHz. + + epoch : float, np.array of floats + Year. Defaults to current year. + + Returns + ------- + flux : float, np.array of floats + Flux density in Jansky. + + """ + + arg_list = misc.getfullargspec(self._model.predict).args[1:] + + if (epoch is not None) and ("epoch" in arg_list): + args = [freq, epoch] + else: + args = [freq] + + flux = self._model.predict(*args) + + return flux
+ + +
+[docs] + def predict_uncertainty(self, freq, epoch=None): + """Calculate the uncertainty in the estimate of the flux density + of the source at a particular frequency and epoch. + + Parameters + ---------- + freq : float, np.array of floats + Frequency in MHz. + + epoch : float, np.array of floats + Year. Defaults to current year. + + Returns + ------- + flux_uncertainty : float, np.array of floats + Uncertainty on the flux density in Jansky. + + """ + + arg_list = misc.getfullargspec(self._model.uncertainty).args[1:] + + if (epoch is not None) and ("epoch" in arg_list): + args = [freq, epoch] + else: + args = [freq] + + flux_uncertainty = self._model.uncertainty(*args) + + return flux_uncertainty
+ + +
+[docs] + def to_dict(self): + """Returns an ordered dictionary containing attributes + for this instance object. Used to dump the information + stored in the instance object to a file. + + Returns + ------- + flux_body_dict : dict + Dictionary containing all attributes listed in + the 'fields' class attribute. + """ + + flux_body_dict = OrderedDict() + + for key in self.fields: + if hasattr(self, key) and (getattr(self, key) is not None): + flux_body_dict[key] = getattr(self, key) + + return flux_body_dict
+ + + def __str__(self): + """Returns a string containing basic information about the source. + Called by the print statement. + """ + source_string = ( + "{0:<25.25s} {1:>6.2f} {2:>6.2f} {3:>6d} {4:^15.1f} {5:^15.1f}".format( + self.name, + self.ra, + self.dec, + len(self), + self.predict_flux(FREQ_NOMINAL), + 100.0 + * self.predict_uncertainty(FREQ_NOMINAL) + / self.predict_flux(FREQ_NOMINAL), + ) + ) + + return source_string + + def __len__(self): + """Returns the number of measurements of the source.""" + return len(self.measurements) if self.measurements is not None else 0 + +
+[docs] + def print_measurements(self): + """Print all measurements.""" + + out = [] + + # Define header + hdr = "{0:<10s} {1:>8s} {2:>8s} {3:>6s} {4:>8s} {5:>8s} {6:<60s}".format( + "Frequency", "Flux", "Error", "Flag", "Catalog", "Epoch", "Citation" + ) + + units = "{0:<10s} {1:>8s} {2:>8s} {3:>6s} {4:>8s} {5:>8s} {6:<60s}".format( + "[MHz]", "[Jy]", "[%]", "", "", "", "" + ) + + # Setup the title + out.append("".join(["="] * max(len(hdr), len(units)))) + out.append("NAME: {0:s}".format(self.name.replace("_", " "))) + out.append("RA: {0:>6.2f} deg".format(self.ra)) + out.append("DEC: {0:>6.2f} deg".format(self.dec)) + out.append("{0:d} Measurements".format(len(self.measurements))) + + out.append("".join(["-"] * max(len(hdr), len(units)))) + out.append(hdr) + out.append(units) + out.append("".join(["-"] * max(len(hdr), len(units)))) + + # Add the measurements + for meas in self.measurements: + if meas[5] is None: + epoch_fmt = "{5:>8s}" + else: + epoch_fmt = "{5:>8.1f}" + + fmt_string = ( + "{0:<10.1f} {1:>8.1f} {2:>8.1f} {3:>6s} {4:>8s} " + + epoch_fmt + + " {6:<.60s}" + ) + + entry = fmt_string.format( + meas[0], + meas[1], + 100.0 * meas[2] / meas[1], + "Good" if meas[3] else "Bad", + meas[4] if meas[4] is not None else "--", + meas[5] if meas[5] is not None else "--", + meas[6] if meas[6] is not None else "--", + ) + + out.append(entry) + + # Print + print("\n".join(out))
+ + + @property + def skyfield(self): + """Skyfield star representation :class:`skyfield.starlib.Star` + for the source. + """ + return ctime.skyfield_star_from_ra_dec(self.ra, self.dec, self.name) + + @property + def freq(self): + """Frequency of measurements in MHz.""" + return np.array([meas[0] for meas in self.measurements]) + + @property + def flux(self): + """Flux measurements in Jansky.""" + return np.array([meas[1] for meas in self.measurements]) + + @property + def eflux(self): + """Error on the flux measurements in Jansky.""" + return np.array([meas[2] for meas in self.measurements]) + + @property + def flag(self): + """Boolean flag indicating what measurements are used + in the spectral fit. + """ + return np.array([meas[3] for meas in self.measurements]) + + @property + def catalog(self): + """Catalog from which each measurement originates.""" + return np.array([meas[4] for meas in self.measurements]) + + @property + def epoch(self): + """Year that each measurement occured.""" + return np.array([meas[5] for meas in self.measurements]) + + @property + def citation(self): + """Citation where more information on each measurement + can be found. + """ + return np.array([meas[6] for meas in self.measurements]) + + @property + def _sort_id(self): + """Sources in the catalog are ordered according to this + property. Currently use the predicted flux at FREQ_NOMINAL + in descending order. + """ + # Multipy by -1 so that we will + # sort from higher to lower flux + return -self.predict_flux( + FREQ_NOMINAL, epoch=get_epoch(datetime.datetime.now()) + ) + + # ============================================================= + # Class methods that act on the entire catalog + # ============================================================= + +
+[docs] + @classmethod + def string(cls): + """Print basic information about the sources in the catalog.""" + + catalog_string = [] + + # Print the header + hdr = "{0:<25s} {1:^6s} {2:^6s} {3:>6s} {4:^15s} {5:^15s}".format( + "Name", "RA", "Dec", "Nmeas", "Flux", "Error" + ) + + units = "{0:<25s} {1:^6s} {2:^6s} {3:>6s} {4:^15s} {5:^15s}".format( + "", + "[deg]", + "[deg]", + "", + "@%d MHz [Jy]" % FREQ_NOMINAL, + "@%d MHz [%%]" % FREQ_NOMINAL, + ) + + catalog_string.append("".join(["-"] * max(len(hdr), len(units)))) + catalog_string.append(hdr) + catalog_string.append(units) + catalog_string.append("".join(["-"] * max(len(hdr), len(units)))) + + # Loop over sorted entries and print + for key in cls.sort(): + catalog_string.append(cls[key].__str__()) + + return "\n".join(catalog_string)
+ + +
+[docs] + @classmethod + def from_dict(cls, name, flux_body_dict): + """Instantiates a FluxCatalog object for an astronomical source + from a dictionary of kwargs. Used when loading sources from a + JSON catalog file. + + Parameters + ---------- + name : str + Name of the astronomical source. + + flux_body_dict : dict + Dictionary containing some or all of the keyword arguments + listed in the __init__ function for this class. + + Returns + ------- + obj : FluxCatalog instance + Object that can be used to predict the flux of this source, + plot flux measurements, etc. + + """ + + arg_list = misc.getfullargspec(cls.__init__).args[2:] + + kwargs = { + field: flux_body_dict[field] + for field in arg_list + if field in flux_body_dict + } + + return cls(name, **kwargs)
+ + +
+[docs] + @classmethod + def get(cls, key): + """Searches the catalog for a source. First checks against the + 'name' of each entry, then checks against the 'alternate_names' + of each entry. + + Parameters + ---------- + key : str + Name of the astronomical source. + + Returns + ------- + obj : FluxCatalog instance + Object that can be used to predict the flux of this source, + plot flux measurements, etc. + + """ + + # Check that key is a string + if not isinstance(key, str): + raise TypeError("Provide source name as string.") + + fkey = format_source_name(key) + + # First check names + obj = cls._entries.get(fkey, None) + + # Next check alternate names + if obj is None: + afkey = cls._alternate_name_lookup.get(fkey, None) + if afkey is not None: + obj = cls._entries.get(afkey) + + # Check if the object was found + if obj is None: + raise KeyError("%s was not found." % fkey) + + # Return the body corresponding to this source + return obj
+ + +
+[docs] + @classmethod + def delete(cls, source_name): + """Deletes a source from the catalog. + + Parameters + ---------- + source_name : str + Name of the astronomical source. + + """ + + try: + obj = cls.get(source_name) + except KeyError: + key = None + else: + key = obj.name + + if key is not None: + obj = cls._entries.pop(key) + + for akey in obj.alternate_names: + cls._alternate_name_lookup.pop(akey, None) + + del obj
+ + +
+[docs] + @classmethod + def sort(cls): + """Sorts the entries in the catalog by their flux density + at FREQ_NOMINAL in descending order. + + Returns + ------- + names : list of str + List of source names in correct order. + + """ + + keys = [] + for name, body in cls._entries.items(): + keys.append((body._sort_id, name)) + + keys.sort() + + return [key[1] for key in keys]
+ + +
+[docs] + @classmethod + def keys(cls): + """Alias for sort. + + Returns + ------- + names : list of str + List of source names in correct order. + + """ + return cls.sort()
+ + +
+[docs] + @classmethod + def iter(cls): + """Iterates through the sources in the catalog. + + Returns + ------- + it : iterator + Provides the name of each source in the catalog + in the order specified by the 'sort' class method. + + """ + return iter(cls.sort())
+ + +
+[docs] + @classmethod + def reversed(cls): + """Iterates through the sources in the catalog + in reverse order. + + Returns + ------- + it : iterator + Provides the name of each source in the catalog + in the reverse order as that specified by the + 'sort' class method. + + """ + return reversed(cls.sort())
+ + +
+[docs] + @classmethod + def iteritems(cls): + """Iterates through the sources in the catalog. + + Returns + ------- + it : iterator + Provides (name, object) for each source in the catalog + in the order specified by the 'sort' class method. + + """ + return iter([(key, cls._entries[key]) for key in cls.sort()])
+ + +
+[docs] + @classmethod + def len(cls): + """Number of sources in the catalog. + + Returns + ------- + N : int + + """ + return len(cls._entries)
+ + +
+[docs] + @classmethod + def available_collections(cls): + """Search the local directory for potential collections that + can be loaded. + + Returns + ------- + collections : list of (str, [str, ...]) + List containing a tuple for each collection. The tuple contains + the filename of the collection (str) and the sources it contains + (list of str). + + """ + + # Determine the directory where this class is located + current_file = inspect.getfile(cls.__class__) + current_dir = os.path.abspath(os.path.dirname(os.path.dirname(current_file))) + + # Search this directory recursively for JSON files. + # Load each one that is found into a dictionary and + # return the number of sources and source names. + matches = [] + for root, dirnames, filenames in os.walk(current_dir): + for filename in fnmatch.filter(filenames, "*.json") + fnmatch.filter( + filenames, "*.pickle" + ): + full_path = os.path.join(root, filename) + + # Read into dictionary + with open(full_path, "r") as fp: + collection_dict = json.load(fp, object_hook=json_numpy_obj_hook) + + # Append (path, number of sources, source names) to list + matches.append((full_path, list(collection_dict.keys()))) + + # Return matches + return matches
+ + +
+[docs] + @classmethod + def print_available_collections(cls, verbose=False): + """Print information about the available collections. + + Parameters + ---------- + verbose : bool + If True, then print all source names in addition to the names + of the files and number of sources. Default is False. + """ + for cc in cls.available_collections(): + _print_collection_summary(*cc, verbose=verbose)
+ + +
+[docs] + @classmethod + def loaded_collections(cls): + """Return the collections that have been loaded. + + Returns + ------- + collections : list of (str, [str, ...]) + List containing a tuple for each collection. The tuple contains + the filename of the collection (str) and the sources it contains + (list of str). + """ + return list(cls._collections.items())
+ + +
+[docs] + @classmethod + def print_loaded_collections(cls, verbose=False): + """Print information about the collection that have been loaded. + + Parameters + ---------- + verbose : bool + If True, then print all source names in addition to the names + of the files and number of sources. Default is False. + """ + for cat, sources in cls._collections.items(): + _print_collection_summary(cat, sources, verbose=verbose)
+ + + @classmethod + def delete_loaded_collection(cls, cat): + sources_to_delete = cls._collections.pop(cat) + + for source_name in sources_to_delete: + cls.delete(source_name) + +
+[docs] + @classmethod + def dump(cls, filename): + """Dumps the contents of the catalog to a file. + + Parameters + ---------- + filename : str + Valid path name. Should have .json or .pickle extension. + + """ + + # Parse filename + filename = os.path.expandvars(os.path.expanduser(filename)) + path = os.path.abspath(os.path.dirname(filename)) + ext = os.path.splitext(filename)[1] + + if ext not in [".pickle", ".json"]: + raise ValueError("Do not recognize '%s' extension." % ext) + + try: + os.makedirs(path) + except OSError: + if not os.path.isdir(path): + raise + + # Sort based on _sort_id + keys = cls.sort() + + # Place a dictionary with the information + # stored in each object into an OrderedDict + output = OrderedDict() + for key in keys: + output[key] = cls._entries[key].to_dict() + + # Dump this dictionary to file + with open(filename, "w") as fp: + if ext == ".json": + json.dump(output, fp, cls=NumpyEncoder, indent=4) + elif ext == ".pickle": + pickle.dump(output, fp)
+ + +
+[docs] + @classmethod + def load(cls, filename, overwrite=0, set_globals=False, verbose=False): + """ + Load the contents of a file into the catalog. + + Parameters + ---------- + filename : str + Valid path name. Should have .json or .pickle extension. + overwrite : int between 0 and 2 + Action to take in the event that this source is already in the catalog: + - 0 - Return the existing entry. + - 1 - Add any measurements to the existing entry. + - 2 - Overwrite the existing entry. + Default is 0. + set_globals : bool + If True, this creates a variable in the global space + for each source in the file. Default is False. + verbose : bool + If True, print some basic info about the contents of + the file as it is loaded. Default is False. + """ + + # Parse filename + # Define collection name as basename of file without extension + filename = os.path.expandvars(os.path.expanduser(filename)) + collection_name, ext = os.path.splitext(os.path.basename(filename)) + + # Check if the file actually exists and has the correct extension + if not os.path.isfile(filename): + raise ValueError("%s does not exist." % filename) + + if ext not in [".pickle", ".json"]: + raise ValueError("Do not recognize '%s' extension." % ext) + + # Load contents of file into a dictionary + with open(filename, "r") as fp: + if ext == ".json": + collection_dict = json.load(fp, object_hook=json_numpy_obj_hook) + elif ext == ".pickle": + collection_dict = pickle.load(fp) + + # Add this to the list of files + cls._collections[collection_name] = list(collection_dict.keys()) + + # If requested, print some basic info about the collection + if verbose: + _print_collection_summary(cls._collections[collection_name]) + + # Loop through dictionary and add each source to the catalog + for key, value in collection_dict.items(): + # Add overwrite keyword + value["overwrite"] = overwrite + + # Create object for this source + obj = cls.from_dict(key, value) + + # If requested, create a variable in the global space + # containing the object for this source. + if set_globals: + varkey = varname(key) + globals()[varkey] = obj
+
+ + + +
+[docs] +def get_epoch(date): + """Return the epoch for a date. + + Parameters + ---------- + date : datetime.datetime + Date to calculate epoch + + Returns + ------- + epoch : float + The fractional-year epoch + """ + + def sinceEpoch(date): # returns seconds since epoch + return time.mktime(date.timetuple()) + + year = date.year + startOfThisYear = datetime.datetime(year=year, month=1, day=1) + startOfNextYear = datetime.datetime(year=year + 1, month=1, day=1) + + yearElapsed = sinceEpoch(date) - sinceEpoch(startOfThisYear) + yearDuration = sinceEpoch(startOfNextYear) - sinceEpoch(startOfThisYear) + fraction = yearElapsed / yearDuration + + return date.year + fraction
+ + + +
+[docs] +def varname(name): + """Create a python variable name from `name`. + + The variable name replaces spaces in `name` with + underscores and adds a leading underscore if `name` + starts with a digit. + + Parameters + ---------- + name : str + The name to create a variable name for + + Returns + ------- + varname : str + The python variable name. + """ + varname = name.replace(" ", "_") + + if varname[0].isdigit(): + varname = "_" + varname + + return varname
+ + + +
+[docs] +def format_source_name(input_name): + """Standardise the name of a source. + + Parameters + ---------- + input_name: str + The name to format + + Returns + formatted_name: str + The name after formatting. + """ + # Address some common naming conventions. + if input_name.startswith("NAME "): + # SIMBAD prefixes common source names with 'NAME '. + # Remove this. + output_name = input_name[5:] + + elif not any(char.isdigit() for char in input_name): + # We have been using PascalCase to denote common source names. + # Convert from CygA, HerA, PerB ---> Cyg A, Her A, Per B. + output_name = input_name[0] + for ii in range(1, len(input_name)): + if input_name[ii - 1].islower() and input_name[ii].isupper(): + output_name += " " + input_name[ii] + else: + output_name += input_name[ii] + + else: + # No funny business with the input_name in this case. + output_name = input_name + + # Remove multiple spaces. Replace single spaces with underscores. + output_name = "_".join(output_name.split()) + + # Put the name in all uppercase. + output_name = output_name.upper() + + # Return properly formatted name + return output_name
+ + + +
+[docs] +class NumpyEncoder(json.JSONEncoder): +
+[docs] + def default(self, obj): + """If input object is an ndarray it will be converted into a dict + holding dtype, shape and the data, base64 encoded. + """ + if isinstance(obj, np.ndarray): + if obj.flags["C_CONTIGUOUS"]: + obj_data = obj.data + else: + cont_obj = np.ascontiguousarray(obj) + assert cont_obj.flags["C_CONTIGUOUS"] + obj_data = cont_obj.data + data_b64 = base64.b64encode(obj_data) + return dict(__ndarray__=data_b64, dtype=str(obj.dtype), shape=obj.shape) + # Let the base class default method raise the TypeError + return json.JSONEncoder(self, obj)
+
+ + + +
+[docs] +def json_numpy_obj_hook(dct): + """Decodes a previously encoded numpy ndarray with proper shape and dtype. + + :param dct: (dict) json encoded ndarray + :return: (ndarray) if input was an encoded ndarray + """ + if isinstance(dct, dict) and "__ndarray__" in dct: + data = base64.b64decode(dct["__ndarray__"]) + return np.frombuffer(data, dct["dtype"]).reshape(dct["shape"]) + return dct
+ + + +def _print_collection_summary(collection_name, source_names, verbose=True): + """This prints out information about a collection of sources + in a standardized way. + + Parameters + ---------- + collection_name : str + Name of the collection. + + source_names : list of str + Names of the sources in the collection. + + verbose : bool + If true, then print out all of the source names. + """ + + ncol = 4 + nsrc = len(source_names) + + # Create a header containing the collection name and number of sources + header = collection_name + " (%d Sources)" % nsrc + print(header) + + # Print the sources contained in this collection + if verbose: + # Seperator + print("".join(["-"] * len(header))) + + # Source names + for ii in range(0, nsrc, ncol): + jj = min(ii + ncol, nsrc) + print((" ".join(["%-25s"] * (jj - ii))) % tuple(source_names[ii:jj])) + + # Space + print("") + + +# Load the default collections +for col in DEFAULT_COLLECTIONS: + FluxCatalog.load(col) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/hfbcat.html b/docs/_modules/ch_util/hfbcat.html new file mode 100644 index 00000000..6878ff89 --- /dev/null +++ b/docs/_modules/ch_util/hfbcat.html @@ -0,0 +1,290 @@ + + + + + + ch_util.hfbcat — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.hfbcat

+"""
+Catalog of HFB test targets
+"""
+
+from __future__ import annotations
+import os
+import numpy as np
+from typing import TYPE_CHECKING, Union
+
+from . import ephemeris
+from .fluxcat import FluxCatalog
+from .tools import ensure_list
+
+if TYPE_CHECKING:
+    import skyfield.starlib.Star
+
+# Define the source collection that should be loaded when this module is imported.
+HFB_COLLECTION = os.path.join(
+    os.path.dirname(__file__), "catalogs", "hfb_target_list.json"
+)
+
+
+
+[docs] +class HFBCatalog(FluxCatalog): + """ + Class for cataloguing HFB targets. + + Attributes + ---------- + fields : list + List of attributes that are read-from and written-to the + JSON catalog files. + """ + + fields = [ + "ra", + "dec", + "alternate_names", + "freq_abs", + ] + + def __init__( + self, + name, + ra=None, + dec=None, + alternate_names=[], + freq_abs=[], + overwrite=0, + ): + """ + Instantiate an HFBCatalog object for an HFB target. + + Parameters + ---------- + name : string + Name of the source. + + ra : float + Right Ascension in degrees. + + dec : float + Declination in degrees. + + alternate_names : list of strings + Alternate names for the source. + + freq_abs : list of floats + Frequencies at which (the peaks of) absorption features are found. + + overwrite : int between 0 and 2 + Action to take in the event that this source is already in the catalog: + - 0 - Return the existing entry. + - 1 - Add the measurements to the existing entry. + - 2 - Overwrite the existing entry. + Default is 0. + BUG: Currently, `freq_abs` is always overwritten. + """ + + super().__init__( + name, + ra=ra, + dec=dec, + alternate_names=alternate_names, + overwrite=overwrite, + ) + + self.freq_abs = freq_abs
+ + + +
+[docs] +def get_doppler_shifted_freq( + source: Union[skyfield.starlib.Star, str], + date: Union[float, list], + freq_rest: Union[float, list] = None, + obs: ephemeris.Observer = ephemeris.chime, +) -> np.array: + """Calculate Doppler shifted frequency of spectral feature with rest + frequency `freq_rest`, seen towards source `source` at time `date`, due to + Earth's motion and rotation, following the relativistic Doppler effect. + + Parameters + ---------- + source + Position(s) on the sky. If the input is a `str`, attempt to resolve this + from `ch_util.hfbcat.HFBCatalog`. + date + Unix time(s) for which to calculate Doppler shift. + freq_rest + Rest frequency(ies) in MHz. If None, attempt to obtain rest frequency + of absorption feature from `ch_util.hfbcat.HFBCatalog.freq_abs`. + obs + An Observer instance to use. If not supplied use `chime`. For many + calculations changing from this default will make little difference. + + Returns + ------- + freq_obs + Doppler shifted frequencies in MHz. Array where rows correspond to the + different input rest frequencies and columns correspond either to input + times or to input sky positions (whichever contains multiple entries). + + Notes + ----- + Only one of `source` and `date` can contain multiple entries. + + Example + ------- + To get the Doppler shifted frequencies of a feature with a rest frequency + of 600 MHz for two positions on the sky at a single point in time (Unix + time 1717809534 = 2024-06-07T21:18:54+00:00), run: + + >>> from skyfield.starlib import Star + >>> from skyfield.units import Angle + >>> from ch_util.hfbcat import get_doppler_shifted_freq + >>> coord = Star(ra=Angle(degrees=[100, 110]), dec=Angle(degrees=[45, 50])) + >>> get_doppler_shifted_freq(coord, 1717809534, 600) + """ + + from scipy.constants import c as speed_of_light + + # For source string inputs, get skyfield Star object from HFB catalog + if isinstance(source, str): + try: + source = HFBCatalog[source].skyfield + except KeyError: + raise ValueError(f"Could not find source {source} in HFB catalog.") + + # Get rest frequency from HFB catalog + if freq_rest is None: + if not source.names or source.names not in HFBCatalog: + raise ValueError( + "Rest frequencies must be supplied unless source can be found " + "in ch_util.hfbcat.HFBCatalog. " + f"Got source {source} with names {source.names}" + ) + else: + freq_rest = HFBCatalog[source.names].freq_abs + + # Prepare rest frequencies for broadcasting + freq_rest = np.asarray(ensure_list(freq_rest))[:, np.newaxis] + + # Get rate at which the distance between the observer and source changes + # (positive for observer and source moving appart) + range_rate = ephemeris.get_range_rate(source, date, obs) + + # Compute observed frequencies from rest frequencies + # using relativistic Doppler effect + beta = range_rate / speed_of_light + freq_obs = freq_rest * np.sqrt((1.0 - beta) / (1.0 + beta)) + + return freq_obs
+ + + +# Load the HFB target list +HFBCatalog.load(HFB_COLLECTION) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/holography.html b/docs/_modules/ch_util/holography.html new file mode 100644 index 00000000..ade157ec --- /dev/null +++ b/docs/_modules/ch_util/holography.html @@ -0,0 +1,985 @@ + + + + + + ch_util.holography — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.holography

+"""
+Holography observation tables.
+
+This module defines the tables:
+
+- :py:class:`HolographyObservation`
+- :py:class:`HolographySource`
+
+and the constants:
+
+- :py:const:`QUALITY_GOOD`
+- :py:const:`QUALITY_OFFSOURCE`
+- :py:const:`ONSOURCE_DIST_TO_FLAG`
+
+"""
+
+import os
+import warnings
+import zipfile
+import numpy as np
+import peewee as pw
+
+import caput.time as ctime
+from chimedb.core.orm import base_model
+
+from ch_util import ephemeris
+
+# Global variables and constants.
+# ================================
+
+QUALITY_GOOD = 0
+QUALITY_OFFSOURCE = 1
+QUALITY_BADGATING = 2
+QUALITY_NOISEOFF = 4
+ONSOURCE_DIST_TO_FLAG = 0.1
+
+# Tables in the for tracking Holography observations
+# ==================================================
+
+
+
+[docs] +class HolographySource(base_model): + """A peewee model for the Holography sources. + + Attributes + ---------- + name : str + Unique name for the source. Be careful to avoid duplicates. + ra, dec : float + ICRS co-ordinates of the source. + """ + + name = pw.CharField(max_length=128, unique=True) + ra = pw.FloatField() + dec = pw.FloatField()
+ + + +
+[docs] +class HolographyObservation(base_model): + """ + A peewee model for the holographic observations. + + Attributes + ---------- + source : foreign key + The source that we were observing. + start_time, finish_time : float + Start and end times of the source observation (as UNIX times). + notes : str + Any free form notes about the observation. + """ + + source = pw.ForeignKeyField(HolographySource, backref="observations") + start_time = pw.DoubleField() + finish_time = pw.DoubleField() + + quality_flag = ( + pw.BitField() + ) # maximum of 64 fields. If we need more, use BigBitField + off_source = quality_flag.flag(QUALITY_OFFSOURCE) + bad_gating = quality_flag.flag(QUALITY_BADGATING) + noise_off = quality_flag.flag(QUALITY_NOISEOFF) + + notes = pw.TextField(null=True) + +
+[docs] + @classmethod + def from_lst( + cls, + source, + start_day, + start_lst, + duration_lst, + quality_flag=QUALITY_GOOD, + notes=None, + ): + """Method to initialize a HolographyObservation from a start day, + start LST, and a stop day, stop LST. + + Parameters + ---------- + source : HolographySource + An instance of HolographySource. + start_day: string + Of format YYYMMDD-ABT, ABT can be one of (UTC, PST, PDT) + start_lst, duration: float + Hours and fraction of hours on a scale from 0-24. + quality_flag : int, default : 0 + Flag for poor quality data. Good data is zero. + Sets a bitmask in the HolographyObservation instance. + notes : string, optional + Any notes on this observation. + """ + + start_time = ephemeris.lsa_to_unix( + start_lst * 360 / 24, + ctime.datetime_to_unix(ephemeris.parse_date(start_day)), + ) + duration_unix = duration_lst * (3600.0) * ctime.SIDEREAL_S + + finish_time = start_time + duration_unix + + return cls.create( + source=source, + start_time=start_time, + finish_time=finish_time, + quality_flag=quality_flag, + notes=notes, + )
+ + + # Aliases of source names in the spreadsheet to ones we use in the database + # (hard-coded at initialization, but user should be able to edit) + source_alias = { + "B0329******": "B0329+54", + "B0950*****": "B0950+08", + "B1133+16*****": "B1133+16", + "B1929+10*****": "B1929+10", + "B0355+56": "B0355+54", + "3C218": "HydraA", + "C48": "3C48", + "3C_58": "3C58", + "3C348": "HerA", + "3C144": "TauA", + "PerB": "3C123", + "B0531+21*****": "B0531+21", + "B2016+28*****": "B2016+28", + "B1133*****": "B1133+16", + "B1937+21*****": "B1937+21", + "B2016*****": "B2016+28", + "B0950+08*****": "B0950+08", + "FAN": "FanRegion1", + "Fan Region 1": "FanRegion1", + "FAN1": "FanRegion1", + "Fan Region 2": "FanRegion2", + "FAN2": "FanRegion2", + "B0905*****": "B0905*****", + "VIRA": "VirA", + "3C274": "VirA", + "3C405": "CygA", + "3C461": "CasA", + "NCP_20H": "NCP 20H", + "NCP_4H": "NCP 4H", + } + + # read the .POST_REPORT file and pull out source name, time, and observation + # duration +
+[docs] + @classmethod + def parse_post_report(cls, post_report_file): + """ + read a .POST_REPORT file from the nsched program which controls the + John Galt Telescope and extract the source name, estimated start time, + DRAO sidereal day, commanded duration, and estimated finish time + + Parameters + ---------- + post_report_file : str + path to the .POST_REPORT file to read + + Returns + ------- + output_params : dictionary + output_params['src'] : HolographySource object or string + If the source is a known source in the holography database, + return the HolographySource object. If not, return the name + of the source as a string + output_params['SID'] : int + DRAO sidereal day at the beginning of the observation + output_params['start_time'] : skyfield time object + UTC time at the beginning of the observation + output_params['DURATION'] : float + Commanded duration of the observation in sidereal hours + output_params['finish_time'] : skyfield time object + Calculated UTC time at the end of the observation + Calculated as start_time + duration * caput.time.SIDEREAL_S + + """ + import re + + ts = ctime.skyfield_wrapper.timescale + + output_params = {} + + with open(post_report_file, "r") as f: + lines = [line for line in f] + for l in lines: + if (l.find("Source")) != -1: + srcnm = re.search("Source:\s+(.*?)\s+", l).group(1) + if srcnm in cls.source_alias: + srcnm = cls.source_alias[srcnm] + if (l.find("DURATION")) != -1: + output_params["DURATION"] = float( + re.search("DURATION:\s+(.*?)\s+", l).group(1) + ) + + # convert Julian Date to Skyfield time object + if (l.find("JULIAN DATE")) != -1: + output_params["start_time"] = ts.ut1( + jd=float(re.search("JULIAN DATE:\s+(.*?)\s+", l).group(1)) + ) + + if l.find("SID:") != -1: + output_params["SID"] = int(re.search("SID:\s(.*?)\s+", l).group(1)) + try: + output_params["src"] = HolographySource.get(name=srcnm) + except pw.DoesNotExist: + print("Missing", srcnm) + output_params["src"] = srcnm + + output_params["finish_time"] = ctime.unix_to_skyfield_time( + ctime.ensure_unix(output_params["start_time"]) + + output_params["DURATION"] * 3600.0 * ctime.SIDEREAL_S + ) + + output_params["quality_flag"] = QUALITY_GOOD + + return output_params
+ + +
+[docs] + @classmethod + def create_from_ant_logs( + cls, + logs, + verbose=False, + onsource_dist=0.1, + notes=None, + quality_flag=0, + **kwargs, + ): + """ + Read John Galt Telescope log files and create an entry in the + holography database corresponding to the exact times on source + + Parameters + ---------- + logs : list of strings + log file archives (.zip files) to pass to parse_ant_logs() + onsource_dist : float (default: 0.1) + maximum angular distance at which to consider the Galt Telescope + on source (in degrees) + + Returns + ------- + none + """ + + from caput.interferometry import sphdist + from skyfield.positionlib import Angle + + ts = ctime.skyfield_wrapper.timescale + DATE_FMT_STR = "%Y-%m-%d %H:%M:%S %z" + + pr_list, al_list = cls.parse_ant_logs(logs, return_post_report_params=True) + + for post_report_params, ant_log, curlog in zip(pr_list, al_list, logs): + print(" ") + if isinstance(post_report_params["src"], HolographySource): + if verbose: + print( + "Processing {} from {}".format( + post_report_params["src"].name, curlog + ) + ) + dist = sphdist( + Angle(degrees=post_report_params["src"].ra), + Angle(degrees=post_report_params["src"].dec), + ant_log["ra"], + ant_log["dec"], + ) + if verbose: + print("onsource_dist = {:.2f} deg".format(onsource_dist)) + onsource = np.where(dist.degrees < onsource_dist)[0] + + if len(onsource) > 0: + stdoffset = np.std(dist.degrees[onsource[0] : onsource[-1]]) + meanoffset = np.mean(dist.degrees[onsource[0] : onsource[-1]]) + obs = { + "src": post_report_params["src"], + "start_time": ant_log["t"][onsource[0]], + "finish_time": ant_log["t"][onsource[-1]], + "quality_flag": QUALITY_GOOD, + } + noteout = "from .ANT log " + ts.now().utc_strftime(DATE_FMT_STR) + if notes is not None: + noteout = notes + " " + noteout + if stdoffset > 0.05 or meanoffset > ONSOURCE_DIST_TO_FLAG: + obs["quality_flag"] += QUALITY_OFFSOURCE + print( + ( + "Mean offset: {:.4f}. Std offset: {:.4f}. " + "Setting quality flag to {}." + ).format(meanoffset, stdoffset, QUALITY_OFFSOURCE) + ) + noteout = ( + "Questionable on source. Mean, STD(offset) : " + "{:.3f}, {:.3f}. {}".format(meanoffset, stdoffset, noteout) + ) + obs["quality_flag"] |= quality_flag + if verbose: + print( + "Times in .ANT log : {} {}".format( + ant_log["t"][onsource[0]].utc_strftime(DATE_FMT_STR), + ant_log["t"][onsource[-1]].utc_strftime(DATE_FMT_STR), + ) + ) + print( + "Times in .POST_REPORT: {} {}".format( + post_report_params["start_time"].utc_strftime( + DATE_FMT_STR + ), + post_report_params["finish_time"].utc_strftime( + DATE_FMT_STR + ), + ) + ) + print( + "Mean offset: {:.4f}. Std offset: {:.4f}.".format( + meanoffset, stdoffset + ) + ) + + cls.create_from_dict(obs, verbose=verbose, notes=noteout, **kwargs) + else: + print( + ( + "No on source time found for {}\n{} {}\n" + "Min distance from source {:.1f} degrees" + ).format( + curlog, + post_report_params["src"].name, + post_report_params["start_time"].utc_strftime( + "%Y-%m-%d %H:%M" + ), + np.min(dist.degrees), + ) + ) + else: + print( + "{} is not a HolographySource; need to add to database?".format( + post_report_params["src"] + ) + ) + print("Doing nothing")
+ + +
+[docs] + @classmethod + def create_from_dict( + cls, + dict, + notes=None, + start_tol=60.0, + dryrun=True, + replace_dup=False, + verbose=False, + ): + """ + Create a holography database entry from a dictionary + + This routine checks for duplicates and overwrites duplicates if and + only if `replace_dup = True` + + Parameters + ---------- + dict : dict + src : :py:class:`HolographySource` + A HolographySource object for the source + start_time + Start time as a Skyfield Time object + finish_time + Finish time as a Skyfield Time object + """ + DATE_FMT_STR = "%Y-%m-%d %H:%M:%S %Z" + + def check_for_duplicates(t, src, start_tol, ignore_src_mismatch=False): + """ + Check for duplicate holography observations, comparing the given + observation to the existing database + + Inputs + ------ + t: Skyfield Time object + beginning time of observation + src: HolographySource + target source + start_tol: float + Tolerance in seconds within which to search for duplicates + ignore_src_mismatch: bool (default: False) + If True, consider observations a match if the time matches + but the source does not + + Outputs + ------- + If a duplicate is found: :py:class:`HolographyObservation` object for the + existing entry in the database + + If no duplicate is found: None + """ + ts = ctime.skyfield_wrapper.timescale + + unixt = ctime.ensure_unix(t) + + dup_found = False + + existing_db_entry = cls.select().where( + cls.start_time.between(unixt - start_tol, unixt + start_tol) + ) + if len(existing_db_entry) > 0: + if len(existing_db_entry) > 1: + print("Multiple entries found.") + for entry in existing_db_entry: + tt = ts.utc(ctime.unix_to_datetime(entry.start_time)) + # LST = GST + east longitude + ttlst = np.mod(tt.gmst + DRAO_lon, 24.0) + + # Check if source name matches. If not, print a warning + # but proceed anyway. + if src.name.upper() == entry.source.name.upper(): + dup_found = True + if verbose: + print("Observation is already in database.") + else: + if ignore_src_mismatch: + dup_found = True + print( + "** Observation at same time but with different " + + "sources in database: ", + src.name, + entry.source.name, + tt.utc_datetime().isoformat(), + ) + # if the observations match in start time and source, + # call them the same observation. Not the most strict + # check possible. + + if dup_found: + tf = ts.utc(ctime.unix_to_datetime(entry.finish_time)) + print( + "Tried to add : {} {}; LST={:.3f}".format( + src.name, t.utc_datetime().strftime(DATE_FMT_STR), ttlst + ) + ) + print( + "Existing entry: {} {}; LST={:.3f}".format( + entry.source.name, + tt.utc_datetime().strftime(DATE_FMT_STR), + ttlst, + ) + ) + if dup_found: + return existing_db_entry + else: + return None + + # DRAO longitude in hours + DRAO_lon = ephemeris.chime.longitude * 24.0 / 360.0 + + if verbose: + print(" ") + addtodb = True + + dup_entries = check_for_duplicates(dict["start_time"], dict["src"], start_tol) + + if dup_entries is not None: + if replace_dup: + if not dryrun: + for entry in dup_entries: + cls.delete_instance(entry) + if verbose: + print("Deleted observation from database and replacing.") + elif verbose: + print("Would have deleted observation and replaced (dry run).") + addtodb = True + else: + addtodb = False + for entry in dup_entries: + print( + "Not replacing duplicate {} observation {}".format( + entry.source.name, + ctime.unix_to_datetime(entry.start_time).strftime( + DATE_FMT_STR + ), + ) + ) + + # we've appended this observation to obslist. + # Now add to the database, if we're supposed to. + if addtodb: + string = "Adding to database: {} {} to {}" + print( + string.format( + dict["src"].name, + dict["start_time"].utc_datetime().strftime(DATE_FMT_STR), + dict["finish_time"].utc_datetime().strftime(DATE_FMT_STR), + ) + ) + if dryrun: + print("Dry run; doing nothing") + else: + cls.create( + source=dict["src"], + start_time=ctime.ensure_unix(dict["start_time"]), + finish_time=ctime.ensure_unix(dict["finish_time"]), + quality_flag=dict["quality_flag"], + notes=notes, + )
+ + +
+[docs] + @classmethod + def parse_ant_logs(cls, logs, return_post_report_params=False): + """ + Unzip and parse .ANT log file output by nsched for John Galt Telescope + observations + + Parameters + ---------- + logs : list of strings + .ZIP filenames. Each .ZIP archive should include a .ANT file and + a .POST_REPORT file. This method unzips the archive, uses + `parse_post_report` to read the .POST_REPORT file and extract + the CHIME sidereal day corresponding to the DRAO sidereal day, + and then reads the lines in the .ANT file to obtain the pointing + history of the Galt Telescope during this observation. + + (The DRAO sidereal day is days since the clock in Ev Sheehan's + office at DRAO was reset. This clock is typically only reset every + few years, but it does not correspond to any defined date, so the + date must be figured out from the .POST_REPORT file, which reports + both the DRAO sidereal day and the UTC date and time. + + Known reset dates: 2017-11-21, 2019-3-10) + + Returns + ------- + + if output_params == False: + ant_data: A dictionary consisting of lists containing the LST, + hour angle, RA, and dec (all as Skyfield Angle objects), + CHIME sidereal day, and DRAO sidereal day. + + if output_params == True + output_params: dictionary returned by `parse_post_report` + and + ant_data: described above + + Files + ----- + the .ANT and .POST_REPORT files in the input .zip archive are + extracted into /tmp/26mlog/<loginname>/ + """ + + from skyfield.positionlib import Angle + + DRAO_lon = ephemeris.CHIMELONGITUDE * 24.0 / 360.0 + + def sidlst_to_csd(sid, lst, sid_ref, t_ref): + """ + Convert an integer DRAO sidereal day and LST to a float + CHIME sidereal day + + Parameters + ---------- + sid : int + DRAO sidereal day + lst : float, in hours + local sidereal time + sid_ref : int + DRAO sidereal day at the reference time t_ref + t_ref : skyfield time object, Julian days + Reference time + + Returns + ------- + output : float + CHIME sidereal day + """ + csd_ref = int(ephemeris.csd(ctime.datetime_to_unix(t_ref.utc_datetime()))) + csd = sid - sid_ref + csd_ref + return csd + lst / ctime.SIDEREAL_S / 24.0 + + ant_data_list = [] + post_report_list = [] + + for log in logs: + doobs = True + + filename = log.split("/")[-1] + basedir = "/tmp/26mlog/{}/".format(os.getlogin()) + basename, extension = filename.split(".") + post_report_file = basename + ".POST_REPORT" + ant_file = basename + ".ANT" + + if extension == "zip": + try: + zipfile.ZipFile(log).extract(post_report_file, path=basedir) + except: + print( + "Failed to extract {} into {}. Moving right along...".format( + post_report_file, basedir + ) + ) + doobs = False + try: + zipfile.ZipFile(log).extract(ant_file, path=basedir) + except: + print( + "Failed to extract {} into {}. Moving right along...".format( + ant_file, basedir + ) + ) + doobs = False + + if doobs: + try: + post_report_params = cls.parse_post_report( + basedir + post_report_file + ) + + with open(os.path.join(basedir, ant_file), "r") as f: + lines = [line for line in f] + ant_data = {"sid": np.array([])} + lsth = [] + lstm = [] + lsts = [] + + hah = [] + ham = [] + has = [] + + decd = [] + decm = [] + decs = [] + + for l in lines: + arr = l.split() + + try: + lst_hms = [float(x) for x in arr[2].split(":")] + + # do last element first: if this is going to + # crash because a line in the log is incomplete, + # we don't want it to append to any of the lists + + decs.append(float(arr[8].replace('"', ""))) + decm.append(float(arr[7].replace("'", ""))) + decd.append(float(arr[6].replace("D", ""))) + + has.append(float(arr[5].replace("S", ""))) + ham.append(float(arr[4].replace("M", ""))) + hah.append(float(arr[3].replace("H", ""))) + + lsts.append(float(lst_hms[2])) + lstm.append(float(lst_hms[1])) + lsth.append(float(lst_hms[0])) + + ant_data["sid"] = np.append( + ant_data["sid"], int(arr[1]) + ) + except: + print( + "Failed in file {} for line \n{}".format( + ant_file, l + ) + ) + if len(ant_data["sid"]) != len(decs): + print("WARNING: mismatch in list lengths.") + + ant_data["lst"] = Angle(hours=(lsth, lstm, lsts)) + + ha = Angle(hours=(hah, ham, has)) + dec = Angle(degrees=(decd, decm, decs)) + + ant_data["ha"] = Angle( + radians=ha.radians + - ephemeris.galt_pointing_model_ha(ha, dec).radians, + preference="hours", + ) + + ant_data["dec_cirs"] = Angle( + radians=dec.radians + - ephemeris.galt_pointing_model_dec(ha, dec).radians, + preference="degrees", + ) + + ant_data["csd"] = sidlst_to_csd( + np.array(ant_data["sid"]), + ant_data["lst"].hours, + post_report_params["SID"], + post_report_params["start_time"], + ) + + ant_data["t"] = ctime.unix_to_skyfield_time( + ephemeris.csd_to_unix(ant_data["csd"]) + ) + + # Correct RA from equinox to CIRS coords (both in radians) + era = np.radians( + ctime.unix_to_era(ctime.ensure_unix(ant_data["t"])) + ) + gast = ant_data["t"].gast * 2 * np.pi / 24.0 + + ant_data["ra_cirs"] = Angle( + radians=ant_data["lst"].radians + - ant_data["ha"].radians + + (era - gast), + preference="hours", + ) + + obs = ephemeris.Star_cirs( + ra=ant_data["ra_cirs"], + dec=ant_data["dec_cirs"], + epoch=ant_data["t"], + ) + + ant_data["ra"] = obs.ra + ant_data["dec"] = obs.dec + + ant_data_list.append(ant_data) + post_report_list.append(post_report_params) + except: + print("Parsing {} failed".format(post_report_file)) + + if return_post_report_params: + return post_report_list, ant_data_list + return ant_data
+ + +
+[docs] + @classmethod + def create_from_post_reports( + cls, + logs, + start_tol=60.0, + dryrun=True, + replace_dup=False, + verbose=True, + notes=None, + ): + """Create holography database entry from .POST_REPORT log files + generated by the nsched controller for the Galt Telescope. + + Parameters + ---------- + logs : string + list of paths to archives. Filenames should be, eg, + 01DEC17_1814.zip. Must be only one period in the filename, + separating the extension. + + start_tol : float (optional; default: 60.) + Tolerance (in seconds) around which to search for duplicate + operations. + + dryrun : boolean (optional; default: True) + Dry run only; do not add entries to database + + replace_dup : boolean (optional; default: False) + Delete existing duplicate entries and replace. Only has effect if + dry_run == False + + notes : string or list of strings (optional; default: None) + notes to be added. If a string, the same note will be added to all + observations. If a list of strings (must be same length as logs), + each element of the list will be added to the corresponding + database entry. + Nota bene: the text "Added by create_from_post_reports" with the + current date and time will also be included in the notes database + entry. + + Example + ------- + from ch_util import holography as hl + import glob + + obs = hl.HolographyObservation + logs = glob.glob('/path/to/logs/*JUN18*.zip') + obs_list, dup_obs_list, missing = obs.create_from_post_reports(logs, dryrun=False) + """ + # check notes. Can be a string (in which case duplicate it), None (in + # which case do nothing) or a list (in which case use it if same length + # as logs, otherwise crash) + if notes is None: + print("Notes is None") + notesarr = [None] * len(logs) + elif isinstance(notes, str): + notesarr = [notes] * len(logs) + else: + assert len(notes) == len( + logs + ), "notes must be a string or a list the same length as logs" + notesarr = notes + + for log, note in zip(logs, notesarr): + if verbose: + print("Working on {}".format(log)) + filename = log.split("/")[-1] + # basedir = '/'.join(log.split('/')[:-1]) + '/' + basedir = "/tmp/" + + basename, extension = filename.split(".") + + post_report_file = basename + ".POST_REPORT" + + doobs = True + if extension == "zip": + try: + zipfile.ZipFile(log).extract(post_report_file, path=basedir) + except Exception: + print( + "failed to find {}. Moving right along...".format( + post_report_file + ) + ) + doobs = False + elif extension != "POST_REPORT": + print( + "WARNING: extension should be .zip or .POST_REPORT; is ", extension + ) + + if doobs: + # Read the post report file and pull out the HolographySource + # object, start time (LST), and duration (in LST hours) of the + # observation + output_params = cls.parse_post_report(basedir + post_report_file) + t = output_params["start_time"] + src = output_params["src"] + + # if the source was found, src would be a HolographySource + # object otherwise (ie the source is missing), it's a string + if isinstance(src, str): + warnings.warn( + f"Source {src} was not found for observation at time {t}." + ) + else: + cls.create_from_dict( + output_params, + notes=notes, + start_tol=start_tol, + dryrun=dryrun, + replace_dup=replace_dup, + verbose=verbose, + )
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/layout.html b/docs/_modules/ch_util/layout.html new file mode 100644 index 00000000..8d0f27da --- /dev/null +++ b/docs/_modules/ch_util/layout.html @@ -0,0 +1,1646 @@ + + + + + + ch_util.layout — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.layout

+"""
+Interface to the CHIME components and graphs
+
+This module interfaces to the layout tables in the CHIME database.
+
+The :mod:`peewee` module is used for the ORM to the MySQL database. Because the
+layouts are event-driven, you should never attempt to enter events by raw
+inserts to the :class:`event` or :class:`timestamp` tables, as you could create
+inconsistencies. Rather, use the methods which are described in this document to
+do such alterations robustly.
+
+For most uses, you probably want to import the following:
+
+>>> from datetime import datetime
+>>> import logging
+>>> logging.basicConfig(level = logging.INFO)
+>>> import peewee
+>>> import layout
+>>> layout.connect_database()
+
+.. note::
+   The database must now be explicitly connected. This should not be done within
+   an import statement.
+
+.. note::
+   The :mod:`logging` module can be set to the level of your preference, or not
+   imported altogether if you don't want log messages from the :mod:`layout`
+   module. Note that the :mod:`peewee` module sends a lot of messages to the
+   DEBUG stream.
+
+If you will be altering the layouts, you will need to register as a user:
+
+>>> layout.set_user("Ahincks")
+
+Use your CHIME wiki username here.  Make sure it starts with a capital letter.
+Note that different users have different permissions, stored in the
+:class:`user_permission` table. If you are simply reading from the layout,
+there is no need to register as a user.
+
+Choose Your Own Adventure
+=========================
+
+============================================= ==================================
+If you want to ...                            ... then see
+============================================= ==================================
+retrieve and examine layout graphs            :class:`graph`
+add components                                :meth:`component.add<ch_util._db_tables.component.add>`,
+                                              :func:`add_component<ch_util._db_tables.add_component>`
+remove components                             :meth:`component.remove<ch_util._db_tables.component.remove>`,
+                                              :func:`remove_component<ch_util._db_tables.remove_component>`
+make connexions                               :func:`make_connexion<ch_util._db_tables.make_connexion>`
+sever connexions                              :func:`sever_connexion<ch_util._db_tables.sever_connexion>`
+set component properties                      :meth:`component.set_property<ch_util._db_tables.component.set_property>`
+                                              :func:`set_property<ch_util._db_tables.set_property>`
+get component properties                      :meth:`component.get_property<ch_util._db_tables.component.get_property>`
+perform bulk updates                          :func:`enter_ltf`
+add component history notes                   :meth:`component.add_history<ch_util._db_tables.component.add_history>`
+add link to component documentation           :meth:`component.add_doc<ch_util._db_tables.component.add_doc>`
+create a global flag                          :meth:`global_flag.start<ch_util._db_tables.global_flag.start>`
+============================================= ==================================
+
+Functions
+=========
+
+- :py:meth:`add_component<ch_util._db_tables.add_component>`
+- :py:meth:`compare_connexion<ch_util._db_tables.compare_connexion>`
+- :py:meth:`connect_database<ch_util._db_tables.connect_peewee_tables>`
+- :py:meth:`enter_ltf`
+- :py:meth:`make_connexion<ch_util._db_tables.make_connexion>`
+- :py:meth:`remove_component<ch_util._db_tables.remove_component>`
+- :py:meth:`set_user<ch_util._db_tables.set_user>`
+- :py:meth:`sever_connexion<ch_util._db_tables.sever_connexion>`
+- :py:meth:`global_flags_between`
+- :py:meth:`get_global_flag_times`
+
+Classes
+=======
+
+- :py:class:`subgraph_spec`
+- :py:class:`graph`
+
+Database Models
+===============
+
+- :py:class:`component<ch_util._db_tables.component>`
+- :py:class:`component_history<ch_util._db_tables.component_history>`
+- :py:class:`component_type<ch_util._db_tables.component_type>`
+- :py:class:`component_type_rev<ch_util._db_tables.component_type_rev>`
+- :py:class:`component_doc<ch_util._db_tables.component_doc>`
+- :py:class:`connexion<ch_util._db_tables.connexion>`
+- :py:class:`external_repo<ch_util._db_tables.external_repo>`
+- :py:class:`event<ch_util._db_tables.event>`
+- :py:class:`event_type<ch_util._db_tables.event_type>`
+- :py:class:`graph_obj<ch_util._db_tables.graph_obj>`
+- :py:class:`global_flag<ch_util._db_tables.global_flag>`
+- :py:class:`predef_subgraph_spec<ch_util._db_tables.predef_subgraph_spec>`
+- :py:class:`predef_subgraph_spec_param<ch_util._db_tables.predef_subgraph_spec_param>`
+- :py:class:`property<ch_util._db_tables.property>`
+- :py:class:`property_component<ch_util._db_tables.property_component>`
+- :py:class:`property_type<ch_util._db_tables.property_type>`
+- :py:class:`timestamp<ch_util._db_tables.timestamp>`
+- :py:class:`user_permission<ch_util._db_tables.user_permission>`
+- :py:class:`user_permission_type<ch_util._db_tables.user_permission_type>`
+
+Exceptions
+==========
+
+- :py:class:`NoSubgraph<ch_util._db_tables.NoSubgraph>`
+- :py:class:`BadSubgraph<ch_util._db_tables.BadSubgraph>`
+- :py:class:`DoesNotExist<ch_util._db_tables.DoesNotExist>`
+- :py:class:`UnknownUser<ch_util._db_tables.UnknownUser>`
+- :py:class:`NoPermission<ch_util._db_tables.NoPermission>`
+- :py:class:`LayoutIntegrity<ch_util._db_tables.LayoutIntegrity>`
+- :py:class:`PropertyType<ch_util._db_tables.PropertyType>`
+- :py:class:`PropertyUnchanged<ch_util._db_tables.PropertyUnchanged>`
+- :py:class:`ClosestDraw<ch_util._db_tables.ClosestDraw>`
+- :py:class:`NotFound<chimedb.core.NotFoundError>`
+
+Constants
+=========
+
+- :py:const:`EVENT_AT`
+- :py:const:`EVENT_BEFORE`
+- :py:const:`EVENT_AFTER`
+- :py:const:`EVENT_ALL`
+- :py:const:`ORDER_ASC`
+- :py:const:`ORDER_DESC`
+"""
+
+import datetime
+import inspect
+import logging
+import networkx as nx
+import os
+import peewee as pw
+import re
+
+import chimedb.core
+
+import caput.time as ctime
+
+_property = property  # Do this since there is a class "property" in _db_tables.
+from ._db_tables import (
+    EVENT_AT,
+    EVENT_BEFORE,
+    EVENT_AFTER,
+    EVENT_ALL,
+    ORDER_ASC,
+    ORDER_DESC,
+    _check_fail,
+    _plural,
+    _are,
+    AlreadyExists,
+    NoSubgraph,
+    BadSubgraph,
+    DoesNotExist,
+    UnknownUser,
+    NoPermission,
+    LayoutIntegrity,
+    PropertyType,
+    PropertyUnchanged,
+    ClosestDraw,
+    set_user,
+    graph_obj,
+    global_flag_category,
+    global_flag,
+    component_type,
+    component_type_rev,
+    external_repo,
+    component,
+    component_history,
+    component_doc,
+    connexion,
+    property_type,
+    property_component,
+    property,
+    event_type,
+    timestamp,
+    event,
+    predef_subgraph_spec,
+    predef_subgraph_spec_param,
+    user_permission_type,
+    user_permission,
+    compare_connexion,
+    add_component,
+    remove_component,
+    set_property,
+    make_connexion,
+    sever_connexion,
+)
+
+# Legacy name
+from chimedb.core import NotFoundError as NotFound
+
+os.environ["TZ"] = "UTC"
+
+# Logging
+# =======
+
+# Set default logging handler to avoid "No handlers could be found for logger
+# 'layout'" warnings.
+from logging import NullHandler
+
+
+# All peewee-generated logs are logged to this namespace.
+logger = logging.getLogger("layout")
+logger.addHandler(NullHandler())
+
+
+# Layout!
+# =======
+
+
+
+[docs] +class subgraph_spec(object): + """Specifications for extracting a subgraph from a full graph. + + The subgraph specification can be created from scratch by passing the + appropriate parameters. They can also be pulled from the database using the + class method :meth:`FROM_PREDef`. + + The parameters can be passed as ID's, names of compoenet types or + :obj:`component_type` instances. + + Parameters + ---------- + start : integer, :obj:`component_type` or string + The component type for the start of the subgraph. + terminate : list of integers, of :obj:`component_type` or of strings + Component type id's for terminating the subgraph. + oneway : list of list of integer pairs, of :obj:`component_type` or of strings + Pairs of component types for defining connexions that should only be + traced one way when moving from the starting to terminating components. + hide : list of integers, of :obj:`component_type` or of strings + Component types for components that should be hidden and skipped over in + the subgraph. + + Examples + -------- + To look at subgraphs of components between the outer bulkhead and the + correlator inputs, one could create the following specification: + + >>> import layout + >>> from datetime import datetime + >>> sg_spec = layout.subgraph_spec(start = "c-can thru", + terminate = ["correlator input", "60m coax"], + oneway = [], + hide = ["60m coax", "SMA coax"]) + + What did we do? We specified that the subgraph starts at the C-Can bulkhead. + It terminates at the correlator input; in the other direction, it must also + terminate at a 60 m coaxial cable plugged into the bulkhead. We hide the 60 m + coaxial cable so that it doesn't show up in the subgraph. We also hide the SMA + cables so that they will be skipped over. + + We can load all such subgraphs from the database now and see how many nodes + they contain: + + >>> sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec) + print [s.order() for s in sg] + [903, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 903, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 903, 3, 3, 3, 903, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 903, 3, 1, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 903, 3, 903, 3, 3, 3, 3, 3, 3, 3, 903, 903, 5, 5] + + Most of them are as short as we would expect, but there are some + complications. Let's look at that first one by printing out its LTF: + + >>> print sg[0].ltf + # C-can thru to RFT thru. + CANAD0B + RFTA15B attenuation=10 therm_avail=ch7 + <BLANKLINE> + # RFT thru to HK preamp. + RFTA15B attenuation=10 therm_avail=ch7 + CHB036C7 + HPA0002A + <BLANKLINE> + # HK preamp to HK readout. + HPA0002A + ATMEGA49704949575721220150 + HKR00 + <BLANKLINE> + # HK readout to HK ATMega. + HKR00 + ATMEGA50874956504915100100 + etc... + etc... + # RFT thru to FLA. + RFTA15B attenuation=10 therm_avail=ch7 + FLA0159B + + Some FLA's are connected to HK hydra cables and we need to terminate on these + as well. It turns out that some outer bulkheads are connected to 200 m + coaxial cables, and some FLA's are connected to 50 m delay cables, adding to + the list of terminations. Let's exclude these as well: + + >>> sg_spec.terminate += ["200m coax", "HK hydra", "50m coax"] + >>> sg_spec.hide += ["200m coax", "HK hydra", "50m coax"] + >>> sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec) + >>> print [s.order() for s in sg] + [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 10, 10, 5, 5] + + The remaining subgraphs with more than three components actually turn out to + be errors in the layout! Let's investigate the last one by removing any hidden + components and printing its LTF. + + >>> sn = sg[-1].component(type = "C-can thru")[0].sn + CANBL1B + >>> sg_spec.hide = [] + >>> bad_sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec, sn) + >>> print bad_sg.ltf() + # C-can thru to c-can thru. + CANBL1B + CXS0017 + RFTQ00B + CXS0016 + FLA0073B + RFTQ01B attenuation=9 + CXS0015 + CANBL0B + + It appears that :code:`CXS0016` mistakenly connects :code:`RFTQ00B` to + :code:`FLA0073B`. This is an error that should be investigated and fixed. But + by way of illustration, let's cut this subgraph short by specifying a one-way + connection, and not allowing the subgrapher to trace backwards from the inner + bulkhead to an SMA cable: + + >>> sg_spec.oneway = [["SMA coax", "RFT thru"]] + >>> bad_sg = layout.graph.from_db(datetime(2014, 10, 5, 12, 0), sg_spec, sn) + >>> print bad_sg.ltf() + # C-can thru to RFT thru. + CANBL1B + CXS0017 + RFTQ00B + """ + + def __init__(self, start, terminate, oneway, hide): + self.start = start + self.terminate = terminate + self.oneway = oneway + self.hide = hide + +
+[docs] + @classmethod + def from_predef(cls, predef): + """Create a subgraph specification from a predefined version in the DB. + + Parameters + ---------- + predef : :class:`predef_subgraph_spec` + A predefined subgraph specification in the DB. + """ + s = predef.start_type.id + t = [] + o = [] + h = [] + for param in predef_subgraph_spec_param.select( + predef_subgraph_spec_param.action, + predef_subgraph_spec_param.type1.alias("type1_id"), + predef_subgraph_spec_param.type2.alias("type2_id"), + ).where(predef_subgraph_spec_param.predef_subgraph_spec == predef): + if param.action == "T": + t.append(param.type1_id) + elif param.action == "O": + o.append([param.type1_id, param.type2_id]) + elif param.action == "H": + h.append(param.type1_id) + else: + raise RuntimeError('Unknown subgraph action type "%s".' % param.action) + return cls(s, t, o, h)
+ + + @_property + def start(self): + """The component type ID starting the subgraph.""" + return self._start + + @start.setter + def start(self, val): + self._start = _id_from_multi(component_type, val) + + @_property + def terminate(self): + """The component type ID(s) terminating the subgraph.""" + return self._terminate + + @terminate.setter + def terminate(self, val): + self._terminate = [_id_from_multi(component_type, tt) for tt in val] + + @_property + def oneway(self): + """Pairs of component type ID(s) for one-way tracing of the subgraph.""" + return self._oneway + + @oneway.setter + def oneway(self, val): + self._oneway = [ + [ + _id_from_multi(component_type, oo[0]), + _id_from_multi(component_type, oo[1]), + ] + for oo in val + ] + + @_property + def hide(self): + """The component type ID(s) that are skipped over in the subgraph.""" + return self._hide + + @hide.setter + def hide(self, val): + self._hide = [_id_from_multi(component_type, h) for h in val]
+ + + +
+[docs] +class graph(nx.Graph): + """A graph of connexions. + + This class inherits the + `networkx.Graph <http://networkx.github.io/documentation/networkx-1.9.1/>`_ + class and adds CHIME-specific functionality. + + Use the :meth:`from_db` class method to construct a graph from the database. + + Parameters + ---------- + time : datetime.datetime + The time at which the graph is valid. Default is now(). + + Examples + -------- + + To load a graph from the database, use the :meth:`from_db` class method: + + >>> from ch_util import graph + >>> from datetime import datetime + >>> g = layout.graph.from_db(datetime(2014, 10, 5, 12, 0)) + + You can now use any of the + `networkx.Graph <http://networkx.github.io/documentation/networkx-1.9.1/>`_ + methods: + + >>> print g.order(), g.size() + 2483 2660 + + There are some convenience methods for our implementation. For example, you + can easily find components by component type: + + >>> print g.component(type = "reflector") + [<layout.component object at 0x7fd1b2cda710>, <layout.component object at 0x7fd1b2cda810>, <layout.component object at 0x7fd1b2cfb7d0>] + + Note that the graph nodes are :obj:`component` objects. You can also use the + :meth:`component` method to search for components by serial number: + + >>> ant = g.component(comp = "ANT0044B") + + Node properties are stored as per usual for :class:`networkx.Graph` objects: + + >>> print g.nodes[ant] + {'_rev_id': 11L, '_type_id': 2L, u'pol1_orient': <layout.property object at 0x7f31ed323fd0>, '_type_name': u'antenna', '_id': 32L, u'pol2_orient': <layout.property object at 0x7f31ed2c8790>, '_rev_name': u'B'} + + Note, however, that there are some internally-used properties (starting with + an underscore). The :meth:`node_property` returns a dictionary of properties + without these private memebers: + + >>> for p in g.node_property(ant).values(): + ... print "%s = %s %s" % (p.type.name, p.value, p.type.units if p.type.units else "") + pol1_orient = S + pol2_orient = E + + To search the graph for the closest component of a given type to a single + component, using :meth:`closest_of_type`: + + >>> slt_type = layout.component_type.get(name = "cassette slot") + >>> print g.closest_of_type(ant, slt_type).sn + CSS004C0 + + Use of :meth:`closest_of_type` can be subtle for components separated by long + paths. See its documentation for more examples. + + Subgraphs can be created using a subgraph specification, encoded in a + :class:`subgraph_spec` object. See the documentation for that class for + details, but briefly, this allows one to create a smaller, more manageable + graph containing only components and connexions you are interested in. Given a + subgraph, the :meth:`ltf` method can be useful. + """ + + def __init__(self, time=datetime.datetime.now()): + # Initialise the graph. + nx.Graph.__init__(self) + self._time = time + self._sg_spec = None + self._sg_spec_start = None + self._sn_dict = dict() + self._ctype_dict = dict() + + # We will cache all the component types, revisions and properties now, + # since these will be used constantly by the graph. + component_type.fill_cache() + component_type_rev.fill_cache() + property_type.fill_cache() + + # Aliases. + self.neighbours = self.neighbors + self.neighbor_of_type = self.neighbour_of_type + +
+[docs] + @classmethod + def from_db(cls, time=datetime.datetime.now(), sg_spec=None, sg_start_sn=None): + """Create a new graph by reading the database. + + This method is designed to be efficient. It has customised SQL calls so that + only a couple of queries are required. Doing this with the standard peewee + functionality requires many more calls. + + This method will establish a connection to the database if it doesn't + already exist. + + Parameters + ---------- + time : datetime.datetime + The time at which the graph is valid. Default is now(). + sg_spec : :obj:`subgraph_spec` + The subgraph specificationto use; can be set to :obj:`None`. + sg_start_sn : string + If a serial number is specified, then only the subgraph starting with that + component will be returned. This parameter is ignored if sg_spec is + :obj:`None`. + + Returns + ------- + :obj:`graph` + If *sg_spec* is not :obj:`None`, and *sg_start_sn* is not specified, then + a list of :obj:`graph` objects is returned instead. + + Raises + ------ + If no graph is found, :exc:`NotFound` is raised. + """ + + # Initalise the database connections + connect_database() + + g = cls(time) + + # Add the connexions. + sql = ( + "SELECT c1.*, c2.*, pt.id " + "FROM connexion c " + "JOIN component c1 ON c1.sn = c.comp_sn1 " + "JOIN event e1 ON e1.graph_obj_id = c1.id " + "JOIN timestamp e1t1 ON e1.start_id = e1t1.id " + "LEFT JOIN timestamp e1t2 ON e1.end_id = e1t2.id " + "JOIN component c2 ON c2.sn = c.comp_sn2 " + "JOIN event e2 ON e2.graph_obj_id = c2.id " + "JOIN timestamp e2t1 ON e2.start_id = e2t1.id " + "LEFT JOIN timestamp e2t2 ON e2.end_id = e2t2.id " + "JOIN event e ON e.graph_obj_id = c.id " + "JOIN event_type pt ON e.type_id = pt.id " + "JOIN timestamp t1 ON e.start_id = t1.id " + "LEFT JOIN timestamp t2 ON e.end_id = t2.id " + "WHERE e.active = 1 AND e1.type_id = 1 AND e2.type_id = 1 AND " + "e1t1.time <= '%s' AND " + "(e1.end_id IS NULL OR e1t2.time > '%s') AND " + "e2t1.time <= '%s' AND " + "(e2.end_id IS NULL OR e2t2.time > '%s') AND " + "t1.time <= '%s' AND " + "(e.end_id IS NULL OR t2.time > '%s');" + % (time, time, time, time, time, time) + ) + # print sql + conn_list = chimedb.core.proxy.execute_sql(sql) + for r in conn_list: + c1 = g._ensure_add(r[0], r[1], r[2], r[3]) + c2 = g._ensure_add(r[4], r[5], r[6], r[7]) + if r[8] == event_type.perm_connexion().id: + perm = True + else: + perm = False + g.add_edge(c1, c2, permanent=perm, hidden=False) + + # Add the properties. + sql = ( + "SELECT p.*, c.*, pt.name " + "FROM property p " + "JOIN property_type pt ON p.type_id = pt.id " + "JOIN component c ON p.comp_sn = c.sn " + "JOIN event ce ON ce.graph_obj_id = c.id " + "JOIN timestamp ct1 ON ce.start_id = ct1.id " + "LEFT JOIN timestamp ct2 ON ce.end_id = ct2.id " + "JOIN event e ON e.graph_obj_id = p.id " + "JOIN timestamp t1 ON e.start_id = t1.id " + "LEFT JOIN timestamp t2 ON e.end_id = t2.id " + "WHERE e.active = 1 AND ce.type_id = 1 AND " + "ct1.time <= '%s' AND " + "(ce.end_id IS NULL OR ct2.time > '%s') AND " + "t1.time <= '%s' AND " + "(e.end_id IS NULL OR t2.time > '%s');" % (time, time, time, time) + ) + prop_list = chimedb.core.proxy.execute_sql(sql) + for r in prop_list: + p = property(id=r[0], comp=r[1], type=r[2], value=r[3]) + p.type = property_type.from_id(r[2]) + c = g._ensure_add(r[4], r[5], r[6], r[7]) + g.nodes[c][r[8]] = p + + if sg_spec: + return graph.from_graph(g, sg_spec, sg_start_sn) + else: + return g
+ + + def _ensure_add(self, id, sn, type, rev): + """Robustly add a component, avoiding duplication.""" + try: + c = self.component(comp=sn) + except NotFound: + # Component ID is a foreign key to graph_obj, so we need to make an + # instance of this for that. + g = graph_obj(id=id) + c = component(id=g, sn=sn, type=type, type_rev=rev) + + # We hydrate the component type and revision so that no further queries + # need to be made. When the graph was initialised, all of the types and + # revisions were cached, so the following requires no further queries. + c.type = component_type.from_id(type) + c.rev = component_type_rev.from_id(rev) + self.add_node(c) + self._sn_dict[sn] = c + try: + self._ctype_dict[type].append(c) + except KeyError: + self._ctype_dict[type] = [c] + return c + +
+[docs] + def node_property(self, n): + """Return the properties of a node excluding internally used properties. + + If you iterate over a nodes properties, you will also get the + internally-used properties (starting with an underscore). This method gets + the dictionary of properties without these "private" properties. + + Parameters + ---------- + node : node object + The node for which to get the properties. + + Returns + ------- + A dictionary of properties. + + Examples + -------- + >>> from ch_util import graph + >>> from datetime import datetime + >>> g = layout.graph.from_db(datetime(2014, 10, 5, 12, 0)) + >>> rft = g.component(comp = "RFTK07B") + >>> for p in g.node_property(rft).values(): + ... print "%s = %s %s" % (p.type.name, p.value, p.type.units if p.type.units else "") + attenuation = 10 dB + therm_avail = ch1 + """ + ret = dict() + for key, val in self.nodes[n].items(): + if key[0] != "_": + ret[key] = val + return ret
+ + +
+[docs] + def component(self, comp=None, type=None, sort_sn=False): + """Return a component or list of components from the graph. + + The components exist as graph nodes. This method provides searchable access + to them. + + Parameters + ---------- + comp : string or :obj:`component` + If not :obj:`None`, then return the component with this serial number, or + :obj:`None` if it does not exist in the graph. If this parameter is set, + then **type** is ignored. You can also pass a component object; the + instance of that component with the same serial number will be returned if + it exists in this graph. + type : string or :class:`component_type` + If not :obj:`None`, then only return components of this type. You may pass + either the name of the component type or an object. + + Returns + ------- + :class:`component` or list of such + If the **sn** parameter is passed, a single :class:`component` object is + returned. If the **type** parameter is passed, a list of + :class:`component` objects is returned. + + Raises + ------ + :exc:`NotFound` + Raised if no component is found. + + Examples + -------- + >>> from ch_util import graph + >>> from datetime import datetime + >>> g = layout.graph.from_db(datetime(2014, 10, 5, 12, 0)) + >>> print g.component("CXA0005A").type_rev.name + B + >>> for r in g.component(type = "reflector"): + ... print r.sn + E_cylinder + W_cylinder + 26m_dish + + """ + if comp: + ret = None + try: + sn = comp.sn + except AttributeError: + sn = comp + try: + ret = self._sn_dict[sn] + except KeyError: + raise NotFound('Serial number "%s" is not in the graph.' % (sn)) + elif not type: + ret = self.nodes() + else: + try: + type_id = type.id + type_name = type.name + except AttributeError: + type_id = component_type.from_name(type).id + type_name = type + try: + ret = list(self._ctype_dict[type_id]) + if sort_sn: + ret.sort(key=lambda x: x.sn) + except KeyError: + raise NotFound( + 'No components of type "%s" are in the graph.' % type_name + ) + return ret
+ + + def _subgraph_recurse(self, gr, comp1, sg, done, last_no_hide): + if comp1.type.id in sg.hide: + c1 = last_no_hide + hidden = True + else: + c1 = gr._ensure_add( + comp1.id, comp1.sn, comp1.type.id, comp1.rev.id if comp1.rev else None + ) + if not last_no_hide: + last_no_hide = c1 + for k, v in self.node_property(comp1).items(): + gr.nodes[c1][k] = v + hidden = False + if not last_no_hide: + last_no_hide = c1 + + done.append(comp1.sn) + for comp2 in self.neighbors(comp1): + # Watch for connexions in the wrong order. + check = [comp2.type.id, comp1.type.id] + if check in sg.oneway: + continue + + if comp2.type.id not in sg.hide: + c2 = gr._ensure_add( + comp2.id, + comp2.sn, + comp2.type.id, + comp2.rev.id if comp2.rev else None, + ) + for k, v in self.node_property(comp2).items(): + gr.nodes[c2][k] = v + + try: + gr.edges[c1, c2] + except KeyError: + if c1.sn != c2.sn: + if hidden: + perm = False + else: + perm = self.edges[comp1, comp2]["permanent"] + gr.add_edge(c1, c2, permanent=perm, hidden=hidden, _head=c1) + last_no_hide = c2 + + if comp2.type.id not in sg.terminate and comp2.sn not in done: + self._subgraph_recurse(gr, comp2, sg, done, last_no_hide) + return + +
+[docs] + @classmethod + def from_graph(cls, g, sg_spec=None, sg_start_sn=None): + """Find subgraphs within this graph. + + Parameters + ---------- + g : :obj:`graph` + The graph from which to get the new graph. + sg_spect : :obj:`subgraph_spec` + The subgraph specification to use; can be set to :obj:`None`. + + Returns + ------- + A list of :obj:`graph` objects, one for each subgraph found. If, however, + *g* is set to :obj:`None`, a reference to the input graph is returned. + """ + if sg_spec == None: + return g + if sg_spec.start in sg_spec.terminate: + raise BadSubgraph( + "You cannot terminate on the component type of the " + "starting component of your subgraph." + ) + if sg_spec.start in sg_spec.hide: + raise BadSubgraph( + "You cannot hide the component type of the " + "starting component of a subgraph." + ) + + ret = [] + for start_comp in g.component(type=component_type.from_id(sg_spec.start)): + if sg_start_sn: + if start_comp.sn != sg_start_sn: + continue + ret.append(cls(time=g.time)) + g._subgraph_recurse(ret[-1], start_comp, sg_spec, [], None) + ret[-1]._sg_spec = sg_spec + ret[-1]._sg_spec_start = ret[-1].component(comp=start_comp.sn) + + if len(ret) < 1: + raise NotFound("No subgraph was found.") + if sg_start_sn: + return ret[-1] + else: + return ret
+ + + def _print_chain(self, chain): + if len(chain) <= 1: + return "" + + ret = "" + ctype1 = chain[0].type.name + ctype2 = chain[-1].type.name + ret = "# %s to %s.\n" % (ctype1[0].upper() + ctype1[1:], ctype2) + for c in chain: + ret += c.sn + for prop, value in self.node_property(c).items(): + ret += " %s=%s" % (prop, value.value) + ret += "\n" + ret += "\n" + + return ret + + def _ltf_recurse(self, comp, done, last): + ret = "" + if last: + chain = [last, comp] + else: + chain = [comp] + done.append(comp) + while 1: + next_comp = list(set(self.neighbors(comp)) - set(done)) + if not len(next_comp) or comp.type.id in self.sg_spec.terminate: + ret += self._print_chain(chain) + break + + if len(next_comp) == 1: + chain.append(next_comp[0]) + done.append(next_comp[0]) + comp = next_comp[0] + elif len(next_comp) > 1: + done_print = False + for c in next_comp: + if not done_print: + ret += self._print_chain(chain) + done_print = True + done.append(c) + ret += self._ltf_recurse(c, done, chain[-1]) + break + else: + break + + return ret + +
+[docs] + def ltf(self): + """Get an LTF representation of the graph. The graph must be a subgraph, + i.e., generated with a :obj:`predef_subgraph_spec`. + + Returns + ------- + ltf : string + The LTF representation of the graph. + + Raises + ------ + :exc:`NoSubgraph` + Raised if no subgraph specification is associate with this layout. + + Examples + -------- + Get the LTF for a subgraph of antenna to HK. + + >>> import layout + >>> from datetime import datetime + >>> start = layout.component_type.get(name = "antenna").id + >>> terminate = [layout.component_type.get(name = "reflector").id, + layout.component_type.get(name = "cassette slot").id, + layout.component_type.get(name = "correlator input").id, + layout.component_type.get(name = "HK preamp").id, + layout.component_type.get(name = "HK hydra").id] + >>> hide = [layout.component_type.get(name = "reflector").id, + layout.component_type.get(name = "cassette slot").id, + layout.component_type.get(name = "HK preamp").id, + layout.component_type.get(name = "HK hydra").id] + >>> sg_spec = layout.subgraph_spec(start, terminate, [], hide) + >>> sg = layout.graph.from_db(datetime(2014, 11, 20, 12, 0), sg_spec, "ANT0108B") + >>> print sg.ltf() + # Antenna to correlator input. + ANT0108B pol1_orient=S pol2_orient=E + PL0108B1 + LNA0249B + CXA0239C + CANBJ6B + CXS0042 + RFTG00B attenuation=10 + FLA0196B + CXS0058 + K7BP16-00041606 + <BLANKLINE> + # Antenna to correlator input. + ANT0108B pol1_orient=S pol2_orient=E + PL0108B2 + LNA0296B + CXA0067B + CANBG6B + CXS0090 + RFTG01B attenuation=10 + FLA0269B + CXS0266 + K7BP16-00041506 + """ + if not self._sg_spec: + raise NoSubgraph( + "This layout is not a subgraph. You can only create " + "LTF representations of subgraphs generated from " + "predef_subgraph_spec objects." + ) + return self._ltf_recurse(self._sg_spec_start, [], None)
+ + +
+[docs] + def shortest_path_to_type(self, comp, type, type_exclude=None, ignore_draws=True): + """Searches for the shortest path to a component of a given type. + + Sometimes the closest component is through a long, convoluted path that you + do not wish to explore. You can cut out these cases by including a list of + component types that will block the search along a path. + + The component may be passed by object or by serial number; similarly for + component types. + + Parameters + ---------- + comp : :obj:`component` or string or list of one of these + The component(s) to search from. + type : :obj:`component_type` or string + The component type to find. + type_exclude : list of :obj:`component_type` or strings + Any components of this type will prematurely cut off a line of + investigation. + ignore_draws : boolean + It is possible that there be more than one component of a given type the + same distance from the starting component. If this parameter is set to + :obj:`True`, then just return the first one that is found. If set to + :obj:`False`, then raise an exception. + + Returns + ------- + comp: :obj:`component` or list of such + The closest component of the given type to **start**. If no path to a + component of the specified type exists, return :obj:`None`. + + Raises + ------ + :exc:`ClosestDraw` + Raised if there is no unique closest component and **ignore_draws** is set + to :obj:`False`. + + Examples + -------- + See the examples for :meth:`closest_of_type`. + """ + # Get the start node and the list of candidate end nodes. + one = False + if isinstance(comp, str) or isinstance(comp, component): + comp = [comp] + one = True + + start_list = [self.component(comp=c) for c in comp] + + # Find end_candidates. If there are none in this graph, return None. + try: + end_candidate = self.component(type=type) + except NotFound: + return None if one else [None] * len(comp) + + if end_candidate is None: + return None if one else [None] * len(comp) + + # Get the list of components to exclude, based on the types in the + # **type_exclude** parameter. + exclude = [] + if type_exclude is not None: + if not isinstance(type_exclude, list): + type_exclude = [type_exclude] + for t in type_exclude: + try: + exclude += self.component(type=t) + except NotFound: + pass + + # Construct a subgraph without the excluded nodes + graph = self.subgraph(set(self.nodes()) - set(exclude)).copy() + + # Add a type marking node into the graph connected to all components of + # the type we are looking for + tn = "Type node marker" + graph.add_node(tn) + edges = [(tn, end) for end in end_candidate] + graph.add_edges_from(edges) + + # Get the shortest path to type by searching for the shortest path from + # the start to the type marker, the actual path is the same after + # removing the type marker + shortest = [] + for start in start_list: + try: + path = nx.shortest_path(graph, source=start, target=tn)[:-1] + except (nx.NetworkXError, nx.NetworkXNoPath): + path = None + + shortest.append(path) + + # Return the shortest path (or None if not found) + if one: + return shortest[0] + else: + return shortest
+ + +
+[docs] + def closest_of_type(self, comp, type, type_exclude=None, ignore_draws=True): + """Searches for the closest connected component of a given type. + + Sometimes the closest component is through a long, convoluted path that you + do not wish to explore. You can cut out these cases by including a list of + component types that will block the search along a path. + + The component may be passed by object or by serial number; similarly for + component types. + + Parameters + ---------- + comp : :obj:`component` or string or list of such + The component to search from. + type : :obj:`component_type` or string + The component type to find. + type_exclude : list of :obj:`component_type` or strings + Any components of this type will prematurely cut off a line of + investigation. + ignore_draws : boolean + It is possible that there be more than one component of a given type the + same distance from the starting component. If this parameter is set to + :obj:`True`, then just return the first one that is found. If set to + :obj:`False`, then raise an exception. + + Returns + ------- + comp: :obj:`component` or list of such + The closest component of the given type to **start**. If no component of + type is found :obj:`None` is returned. + + Raises + ------ + :exc:`ClosestDraw` + Raised if there is no unique closest component and **ignore_draws** is set + to :obj:`False`. + + Examples + -------- + Find the cassette slot an antenna is plugged into: + + >>> import layout + >>> from datetime import datetime + >>> g = layout.graph.from_db(datetime(2014, 11, 5, 12, 0)) + >>> print g.closest_of_type("ANT0044B", "cassette slot").sn + CSS004C0 + + The example above is simple as the two components are adjacent: + + >>> print [c.sn for c in g.shortest_path_to_type("ANT0044B", "cassette slot")] + [u'ANT0044B', u'CSS004C0'] + + In general, though, you need to take care when + using this method and make judicious use of the **type_exclude** parameter. + For example, consider the following example: + + >>> print g.closest_of_type("K7BP16-00040112", "RFT thru").sn + RFTB15B + + It seems OK on the surface, but the path it has used is probably not what + you want: + + >>> print [c.sn for c in g.shortest_path_to_type("K7BP16-00040112", "RFT thru")] + [u'K7BP16-00040112', u'K7BP16-000401', u'K7BP16-00040101', u'FLA0280B', u'RFTB15B'] + + We need to block the searcher from going into the correlator card slot and + then back out another input, which we can do like so: + + >>> print g.closest_of_type("K7BP16-00040112", "RFT thru", type_exclude = "correlator card slot").sn + RFTQ15B + + The reason the first search went through the correlator card slot is because + there are delay cables and splitters involved. + + >>> print [c.sn for c in g.shortest_path_to_type("K7BP16-00040112", "RFT thru", type_exclude = "correlator card slot")] + [u'K7BP16-00040112', u'CXS0279', u'CXA0018A', u'CXA0139B', u'SPL001AP2', u'SPL001A', u'SPL001AP3', u'CXS0281', u'RFTQ15B'] + + The shortest path really was through the correlator card slot, until we + explicitly rejected such paths. + """ + + path = self.shortest_path_to_type(comp, type, type_exclude, ignore_draws) + + try: + closest = [p[-1] if p is not None else None for p in path] + except TypeError: + closest = path[-1] if path is not None else None + return closest
+ + +
+[docs] + def neighbour_of_type(self, n, type): + """Get a list of neighbours of a given type. + + This is like the :meth:`networkx.Graph.neighbors` method, but selects only + the neighbours of the specified type. + + Parameters + ---------- + comp : :obj:`component` + A node in the graph. + type : :obj:`component_type` or string + The component type to find. + + Returns + ------- + nlist : A list of nodes of type **type** adjacent to **n**. + + Raises + ------ + :exc:`networkx.NetworkXError` + Raised if **n** is not in the graph. + """ + ret = [] + try: + type.name + except AttributeError: + type = component_type.from_name(type) + for nn in self.neighbours(n): + if nn.type == type: + ret.append(nn) + return ret
+ + + @_property + def time(self): + """The time of the graph. + + Returns + ------- + time : datetime.datetime + The time at which this graph existed. + """ + return self._time + + @_property + def sg_spec(self): + """The :obj:`subgraph_spec` (subgraph specification) used to get this graph. + + Returns + ------- + The :obj:`subgraph_spec` used to get this graph, if any. + """ + return self._sg_spec + + @_property + def sg_spec_start(self): + """The subgraph starting component. + + Returns + ------- + The :obj:`component` that was used to begin the subgraph, if any. + """ + return self._sg_spec_start
+ + + +# Private Functions +# ================== + + +def _add_to_sever(sn1, sn2, sever, fail_comp): + ok = True + for sn in (sn1, sn2): + try: + component.get(sn=sn) + except pw.DoesNotExist: + fail_comp.append(sn) + ok = False + if ok: + conn = connexion.from_pair(sn1, sn2) + sever.append(conn) + + +def _add_to_chain(chain, sn, prop, sever, fail_comp): + if sn == "//": + if not len(chain): + raise SyntaxError("Stray sever mark (//) in LTF.") + if chain[-1] == "//": + raise SyntaxError("Consecutive sever marks (//) in LTF.") + chain.append("//") + return + + if len(chain): + if chain[-1] == "//": + if len(chain) < 2: + raise SyntaxError( + 'Confused about chain ending in "%s". Is the ' + "first serial number valid?" % (chain[-1]) + ) + try: + _add_to_sever(chain[-2]["comp"].sn, sn, sever, fail_comp) + except KeyError: + pass + del chain[-2] + del chain[-1] + + chain.append(dict()) + try: + chain[-1]["comp"] = component.get(sn=sn) + for k in range(len(prop)): + if len(prop[k].split("=")) != 2: + raise SyntaxError('Confused by the property command "%s".' % prop[k]) + chain[-1][prop[k].split("=")[0]] = prop[k].split("=")[1] + except pw.DoesNotExist: + if not sn in fail_comp: + fail_comp.append(sn) + + +def _id_from_multi(cls, o): + if isinstance(o, int): + return o + elif isinstance(o, cls): + return o.id + else: + return cls.get(name=o).id + + +# Public Functions +# ================ + +from ._db_tables import connect_peewee_tables as connect_database + + +
+[docs] +def enter_ltf(ltf, time=datetime.datetime.now(), notes=None, force=False): + """Enter an LTF into the database. + + This is a special mark-up language for quickly entering events. See the "help" + box on the LTF page of the web interface for instructions. + + Parameters + ---------- + ltf : string + Pass either the path to a file containing the LTF, or a string containing + the LTF. + time : datetime.datetime + The time at which to apply the LTF. + notes : string + Notes for the timestamp. + force : bool + If :obj:`True`, then do nothing when events that would damage database + integrity are encountered; skip over them. If :obj:`False`, then a bad + propsed event will raise the appropriate exception. + """ + + try: + with open(ltf, "r") as myfile: + ltf = myfile.readlines() + except IOError: + try: + ltf = ltf.splitlines() + except AttributeError: + pass + chain = [] + fail_comp = [] + multi_sn = None + multi_prop = None + chain.append([]) + sever = [] + i = 0 + for l in ltf: + if len(l) and l[0] == "#": + continue + severed = False + try: + if l.split()[1] == "//": + severed = True + except IndexError: + pass + + if not len(l) or l.isspace() or severed or l[0:2] == "$$": + if severed: + _add_to_sever(l.split()[0], l.split()[2], sever, fail_comp) + if multi_sn: + _add_to_chain(chain[i], multi_sn, prop, sever, fail_comp) + multi_sn = False + chain.append([]) + i += 1 + continue + + l = l.replace("\n", "") + l = l.strip() + + sn = l.split()[0] + prop = l.split()[1:] + + # Check to see if this is a multiple-line SN. + if multi_sn: + if sn[0] == "+": + off = len(multi_sn) - len(sn) + else: + off = 0 + match = False + if len(multi_sn) == len(sn) + off: + for j in range(len(sn)): + if sn[j] != "." and sn[j] != "-" and sn[j] != "+": + if multi_sn[j + off] == "." or multi_sn[j + off] == "-": + match = True + multi_sn = ( + multi_sn[: j + off] + sn[j] + multi_sn[j + off + 1 :] + ) + multi_prop = prop + if not match: + _add_to_chain(chain[i], multi_sn, multi_prop, sever, fail_comp) + _add_to_chain(chain[i], sn, prop, sever, fail_comp) + multi_sn = None + multi_prop = [] + else: + if sn.find("+") >= 0 or sn.find("-") >= 0 or sn.find(".") >= 0: + multi_sn = sn + multi_prop = [] + else: + _add_to_chain(chain[i], sn, prop, sever, fail_comp) + + if multi_sn: + _add_to_chain(chain[i], multi_sn, multi_prop, sever, fail_comp) + + _check_fail( + fail_comp, + False, + DoesNotExist, + "The following component%s " + "%s not in the DB and must be added first" + % (_plural(fail_comp), _are(fail_comp)), + ) + + conn_list = [] + prop_list = [] + for c in chain: + for i in range(1, len(c)): + comp1 = c[i - 1]["comp"] + comp2 = c[i]["comp"] + if comp1.sn == comp2.sn: + logger.info( + "Skipping auto connexion: %s <=> %s." % (comp1.sn, comp2.sn) + ) + else: + conn = connexion.from_pair(comp1, comp2) + try: + if conn.is_permanent(time): + logger.info( + "Skipping permanent connexion: %s <=> %s." + % (comp1.sn, comp2.sn) + ) + elif conn not in conn_list: + conn_list.append(conn) + except pw.DoesNotExist: + conn_list.append(conn) + for i in range(len(c)): + comp = c[i]["comp"] + for p in c[i].keys(): + if p == "comp": + continue + try: + prop_list.append([comp, property_type.get(name=p), c[i][p]]) + except pw.DoesNotExist: + raise DoesNotExist('Property type "%s" does not exist.' % p) + make_connexion(conn_list, time, False, notes, force) + sever_connexion(sever, time, notes, force) + for p in prop_list: + p[0].set_property(p[1], p[2], time, notes)
+ + + +
+[docs] +def get_global_flag_times(flag): + """Convenience function to get global flag times by id or name. + + Parameters + ---------- + flag : integer or string + If an integer, this is a global flag id, e.g. `64`. If a string this is the + global flag's name e.g. 'run_pass0_e'. + + Returns + ------- + start : :class:`datetime.datetime` + Global flag start time (UTC). + end : :class:`datetime.datetime` or `None` + Global flag end time (UTC) or `None` if the flag hasn't ended. + + """ + + if isinstance(flag, str): + query_ = global_flag.select().where(global_flag.name == flag) + else: + query_ = global_flag.select().where(global_flag.id == flag) + + flag_ = query_.join(graph_obj).join(event).where(event.active == True).get() + + event_ = event.get(graph_obj=flag_.id, active=True) + + start = event_.start.time + try: + end = event_.end.time + except pw.DoesNotExist: + end = None + return start, end
+ + + +
+[docs] +def global_flags_between(start_time, end_time, severity=None): + """Find global flags that overlap a time interval. + + Parameters + ---------- + start_time + end_time + severity : str + One of 'comment', 'warning', 'severe', or None. + + Returns + ------- + flags : list + List of global_flag objects matching criteria. + + """ + + start_time = ctime.ensure_unix(start_time) + end_time = ctime.ensure_unix(end_time) + + query = global_flag.select() + if severity: + query = query.where(global_flag.severity == severity) + query = query.join(graph_obj).join(event).where(event.active == True) + + # Set aliases for the join + ststamp = timestamp.alias() + etstamp = timestamp.alias() + + # Add constraint for the start time + query = query.join(ststamp, on=event.start).where( + ststamp.time < ctime.unix_to_datetime(end_time) + ) + # Constrain the end time (being careful to deal with open events properly) + query = ( + query.switch(event) + .join(etstamp, on=event.end, join_type=pw.JOIN.LEFT_OUTER) + .where( + (etstamp.time > ctime.unix_to_datetime(start_time)) | event.end.is_null() + ) + ) + + return list(query)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/ni_utils.html b/docs/_modules/ch_util/ni_utils.html new file mode 100644 index 00000000..142929c2 --- /dev/null +++ b/docs/_modules/ch_util/ni_utils.html @@ -0,0 +1,1475 @@ + + + + + + ch_util.ni_utils — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.ni_utils

+"""Tools for noise injection data"""
+
+import numpy as np
+import os
+import datetime
+from numpy import linalg as LA
+from scipy import linalg as sciLA
+import warnings
+import copy
+from caput import memh5
+from caput import mpiarray
+
+from . import andata
+
+
+
+[docs] +def process_synced_data(data, ni_params=None, only_off=False): + """Turn a synced noise source observation into gated form. + + This will decimate the visibility to only the noise source off bins, and + will add 1 or more gated on-off dataset according to the specification in + doclib:5. + + Parameters + ---------- + data : andata.CorrData + Correlator data with noise source switched synchronously with the + integration. + ni_params : dict + Dictionary with the noise injection parameters. Optional + for data after ctime=1435349183. ni_params has the following keys + - ni_period: Noise injection period in GPU integrations. + It is assummed to be the same for all the enabled noise sources + - ni_on_bins: A list of lists, one per enabled noise source, + with the corresponding ON gates (within a period). For each + noise source, the list contains the indices of the time frames + for which the source is ON. + Example: For 3 GPU integration period (3 gates: 0, 1, 2), two enabled + noise sources, one ON during gate 0, the other ON during gate 1, + and both OFF during gate 2, then + ``` + ni_params = {'ni_period':3, 'ni_on_bins':[[0], [1]]} + ``` + only_off : boolean + Only return the off dataset. Do not return gated datasets. + + Returns + ------- + newdata : andata.CorrData + Correlator data folded on the noise source. + + Comments + -------- + - The function assumes that the fpga frame counter, which is used to + determine the noise injection gating parameters, is unwrapped. + - For noise injection data before ctime=1435349183 (i.e. for noise + injection data before 20150626T200540Z_pathfinder_corr) the noise + injection information is not in the headers so this function cannot be + used to determine the noise injection parameters. A different method is + required. Although it is recommended to check the data directly in this + case, the previous version of this function assumed that + ni_params = {'ni_period':2, 'ni_on_bins':[[0],]} + for noise injection data before ctime=1435349183. Although this is not + always true, it is true for big old datasets like pass1g. + Use the value of ni_params recommended above to reproduce the + results of the old function with the main old datasets. + - Data (visibility, gain and weight datasets) are averaged for all the + off gates within the noise source period, and also for all the on + gates of each noise source. + - For the time index map, only one timestamp per noise period is kept + (no averaging) + """ + + if ni_params is None: + # ctime before which the noise injection information is not in the + # headers so this function cannot be used to determine the noise + # injection parameters. + ctime_no_noise_inj_data = 1435349183 + if data.index_map["time"]["ctime"][0] > ctime_no_noise_inj_data: + # All the data required to figure out the noise inj gating is in + # the data header + try: + ni_params = _find_ni_params(data) + except ValueError: + warn_str = ( + "There are no enabled noise sources for these data. " + "Returning input" + ) + warnings.warn(warn_str) + return data + else: + # This is data before ctime = 1435349183. Noise injection + # parameters are not in the data header. Raise error + t = datetime.datetime.utcfromtimestamp(ctime_no_noise_inj_data) + t_str = t.strftime("%Y %b %d %H:%M:%S UTC") + err_str = ( + "ni_params parameter is required for data before " + "%s (ctime=%i)." % (t_str, ctime_no_noise_inj_data) + ) + raise Exception(err_str) + + if len([s for s in data.datasets.keys() if "gated_vis" in s]): + # If there are datasets with gated_vis in their names then assume + # this is fast gating data, where the vis dataset has on+off and + # the vis_gatedxx has onxx-off. Process separatedly since in + # this case the noise injection parameters are not in gpu + # integration frames but in fpga frames and the gates are already + # separated + newdata = process_gated_data(data, only_off=only_off) + else: + # time bins with noise ON for each source (within a noise period) + # This is a list of lists, each list corresponding to the ON time bins + # for each noise source. + ni_on_bins = ni_params["ni_on_bins"] + + # Number of enabled noise sources + N_ni_sources = len(ni_on_bins) + + # Noise injection period (assume all sources have same period) + ni_period = ni_params["ni_period"] + + # time bins with all noise sources off (within a noise period) + ni_off_bins = np.delete(list(range(ni_period)), np.concatenate(ni_on_bins)) + + # Find largest number of exact noise injection periods + nt = ni_period * (data.ntime // ni_period) + + # Make sure we're distributed over something other than time + data.redistribute("freq") + + # Get distribution parameters + dist = isinstance(data.vis, memh5.MemDatasetDistributed) + comm = data.vis.comm + + # Construct new CorrData object for gated dataset + newdata = andata.CorrData.__new__(andata.CorrData) + if dist: + memh5.BasicCont.__init__(newdata, distributed=dist, comm=comm) + else: + memh5.BasicCont.__init__(newdata, distributed=dist) + memh5.copyattrs(data.attrs, newdata.attrs) + + # Add index maps to newdata + newdata.create_index_map("freq", data.index_map["freq"]) + newdata.create_index_map("prod", data.index_map["prod"]) + newdata.create_index_map("input", data.input) + # Extract timestamps for OFF bins. Only one timestamp per noise period is + # kept. These will be the timestamps for both the noise on ON and OFF data + time = data.index_map["time"][ni_off_bins[0] : nt : ni_period] + folding_period = time["ctime"][1] - time["ctime"][0] + folding_start = time["ctime"][0] + # Add index map for noise OFF timestamps. + newdata.create_index_map("time", time) + + # Add datasets (for noise OFF) to newdata + # Extract the noise source off data + if len(ni_off_bins) > 1: + # Average all time bins with noise OFF within a period + vis_sky = [data.vis[..., gate:nt:ni_period] for gate in ni_off_bins] + vis_sky = np.mean(vis_sky, axis=0) + else: + vis_sky = data.vis[..., ni_off_bins[0] : nt : ni_period] + + # Turn vis_sky into MPIArray if we are distributed + if dist: + vis_sky = mpiarray.MPIArray.wrap(vis_sky, axis=0, comm=comm) + + # Add new visibility dataset + vis_dset = newdata.create_dataset("vis", data=vis_sky, distributed=dist) + memh5.copyattrs(data.vis.attrs, vis_dset.attrs) + + # Add gain dataset (if exists) for noise OFF data. + # Gain dataset also averaged (within a period) + # These will be the gains for both the noise on ON and OFF data + if "gain" in data: + if len(ni_off_bins) > 1: + gain = [data.gain[..., gate:nt:ni_period] for gate in ni_off_bins] + gain = np.mean(gain, axis=0) + else: + gain = data.gain[..., ni_off_bins[0] : nt : ni_period] + + # Turn gain into MPIArray if we are distributed + if dist: + gain = mpiarray.MPIArray.wrap(gain, axis=0, comm=comm) + + # Add new gain dataset + gain_dset = newdata.create_dataset("gain", data=gain, distributed=dist) + memh5.copyattrs(data.gain.attrs, gain_dset.attrs) + + # Pull out weight dataset if it exists. + # vis_weight dataset also averaged (within a period) + # These will be the weights for both the noise on ON and OFF data + if "vis_weight" in data.flags: + if len(ni_off_bins) > 1: + vis_weight = [ + data.weight[..., gate:nt:ni_period] for gate in ni_off_bins + ] + vis_weight = np.mean(vis_weight, axis=0) + else: + vis_weight = data.weight[..., ni_off_bins[0] : nt : ni_period] + + # Turn vis_weight into MPIArray if we are distributed + if dist: + vis_weight = mpiarray.MPIArray.wrap(vis_weight, axis=0, comm=comm) + + # Add new vis_weight dataset + vis_weight_dset = newdata.create_flag( + "vis_weight", data=vis_weight, distributed=dist + ) + memh5.copyattrs(data.weight.attrs, vis_weight_dset.attrs) + + # Add gated datasets for each noise source: + if not only_off: + for i in range(N_ni_sources): + # Construct the noise source only data + vis_noise = [data.vis[..., gate:nt:ni_period] for gate in ni_on_bins[i]] + vis_noise = np.mean(vis_noise, axis=0) # Averaging + vis_noise -= vis_sky # Subtracting sky contribution + + # Turn vis_noise into MPIArray if we are distributed + if dist: + vis_noise = mpiarray.MPIArray.wrap(vis_noise, axis=0, comm=comm) + + # Add noise source dataset + gate_dset = newdata.create_dataset( + "gated_vis{0}".format(i + 1), data=vis_noise, distributed=dist + ) + gate_dset.attrs["axis"] = np.array( + ["freq", "prod", "gated_time{0}".format(i + 1)] + ) + gate_dset.attrs["folding_period"] = folding_period + gate_dset.attrs["folding_start"] = folding_start + + # Construct array of gate weights (sum = 0) + gw = np.zeros(ni_period, dtype=np.float64) + gw[ni_off_bins] = -1.0 / len(ni_off_bins) + gw[ni_on_bins[i]] = 1.0 / len(ni_on_bins[i]) + gate_dset.attrs["gate_weight"] = gw + + return newdata
+ + + +def _find_ni_params(data, verbose=0): + """ + Finds the noise injection gating parameters. + + Parameters + ---------- + data : andata.CorrData + Correlator data with noise source switched synchronously with the + integration. + verbose: bool + If True, print messages. + + Returns + ------- + ni_params : dict + Dictionary with the noise injection parameters. ni_params has the + following keys + ni_period: Noise injection period in GPU integrations. It is + assummed to be the same for all the enabled noise sources + ni_on_bins: A list of lists, one per enabled noise source, + with the corresponding ON gates (within a period). For each + noise source, the list contains the indices of the time frames + for which the source is ON. + + Example: For 3 GPU integration period (3 gates: 0, 1, 2), two enabled + noise sources, one ON during gate 0, the other ON during gate 1, + and both OFF during gate 2, then + ni_params = {'ni_period':3, 'ni_on_bins':[[0], [1]]} + + Comments + -------- + - The function assumes that the fpga frame counter, which is used to + determine the noise injection gating parameters, is unwrapped. + - For noise injection data before ctime=1435349183 (i.e. for noise + injection data before 20150626T200540Z_pathfinder_corr) the noise + injection information is not in the headers so this function cannot be + used to determine the noise injection parameters. A different method is + required (e.g. check the data directly). The previous version of this + function assumed that + ni_params = {'ni_period':2, 'ni_on_bins':[[0],]} + for noise injection data before ctime=1435349183. Although this is not + always true, it is true for big old datasets like pass1g. + Use the value of ni_params recommended above to reproduce the + results of the old function with the main old datasets. + """ + + # ctime before which the noise injection information is not in the headers + # so this function cannot be used to determine the noise injection + # parameters. + ctime_no_noise_inj_data = 1435349183 + + # ctime of first data frame + ctime0 = data.index_map["time"]["ctime"][0] + if ctime0 < ctime_no_noise_inj_data: + # This is data before ctime = 1435349183. Noise injection parameters + # are not in the data header. Raise error + err_str = ( + "Noise injection parameters are not in the header for " + "these data. See help for details." + ) + raise Exception(err_str) + + ni_period = [] # Noise source period in GPU integrations + ni_high_time = [] # Noise source high time in GPU integrations + ni_offset = [] # Noise source offset in GPU integrations + ni_board = [] # Noise source PWM board + + # Noise inj information is in the headers. Assume the fpga frame + # counter is unwrapped + if verbose: + print("Reading noise injection data from header") + + # Read noise injection parameters from header. Currently the system + # Can handle up to two noise sources. Only the enabled sources are + # analyzed + if ("fpga.ni_enable" in data.attrs) and (data.attrs["fpga.ni_enable"][0]): + # It seems some old data.attrs may have 'fpga.ni_enable' but not + # 'fpga.ni_high_time' (this has to be checked!!) + if "fpga.ni_period" in data.attrs: + ni_period.append(data.attrs["fpga.ni_period"][0]) + else: + ni_period.append(2) + if verbose: + debug_str = ( + '"fpga.ni_period" not in data header. ' + "Assuming noise source period = 2" + ) + print(debug_str) + + if "fpga.ni_high_time" in data.attrs: + ni_high_time.append(data.attrs["fpga.ni_high_time"][0]) + else: + ni_high_time.append(1) + if verbose: + debug_str = ( + '"fpga.ni_high_time" not in data header. ' + "Assuming noise source high time = 1" + ) + print(debug_str) + + if "fpga.ni_offset" in data.attrs: + ni_offset.append(data.attrs["fpga.ni_offset"][0]) + else: + ni_offset.append(0) + if verbose: + debug_str = ( + '"fpga.ni_offset" not in data header. ' + "Assuming noise source offset = 0" + ) + print(debug_str) + + if "fpga.ni_board" in data.attrs: + ni_board.append(data.attrs["fpga.ni_board"]) + else: + ni_board.append("") + if verbose: + debug_str = '"fpga.ni_board" not in data header.' + print(debug_str) + + if ("fpga.ni_enable_26m" in data.attrs) and (data.attrs["fpga.ni_enable_26m"][0]): + # It seems some old data.attrs may have 'fpga.ni_enable_26m' but + # not 'fpga.ni_high_time_26m' (this has to be checked!!) + if "fpga.ni_period_26m" in data.attrs: + ni_period.append(data.attrs["fpga.ni_period_26m"][0]) + else: + ni_period.append(2) + if verbose: + debug_str = ( + '"fpga.ni_period_26m" not in data header. ' + "Assuming noise source period = 2" + ) + print(debug_str) + + if "fpga.ni_high_time_26m" in data.attrs: + ni_high_time.append(data.attrs["fpga.ni_high_time_26m"][0]) + else: + ni_high_time.append(1) + if verbose: + debug_str = ( + '"fpga.ni_high_time_26m" not in data header.' + " Assuming noise source high time = 1" + ) + print(debug_str) + + if "fpga.ni_offset_26m" in data.attrs: + ni_offset.append(data.attrs["fpga.ni_offset_26m"][0]) + else: + ni_offset.append(0) + if verbose: + debug_str = ( + '"fpga.ni_offset_26m" not in data header. ' + "Assuming noise source offset = 0" + ) + print(debug_str) + + if "fpga.ni_board_26m" in data.attrs: + ni_board.append(data.attrs["fpga.ni_board_26m"]) + else: + ni_board.append("") + if verbose: + debug_str = '"fpga.ni_board_26m" not in data header.' + print(debug_str) + + # Number of enabled noise sources + N_ni_sources = len(ni_period) + if N_ni_sources == 0: + # There are not enabled noise sources. Raise error + raise ValueError("There are no enabled noise sources for these data") + + if np.any(np.array(ni_period - ni_period[0])): + # Enabled sources do not have same period. Raise error + raise Exception("Enabled sources do not have same period") + + # Period of first noise source (assume all have same period) + ni_period = ni_period[0] + + if verbose: + for i in range(N_ni_sources): + print("\nPWM signal from board %s is enabled" % ni_board[i]) + print("Period: %i GPU integrations" % ni_period) + print("High time: %i GPU integrations" % ni_high_time[i]) + print("FPGA offset: %i GPU integrations\n" % ni_offset[i]) + + # Number of fpga frames within a GPU integration + int_period = data.attrs["gpu.gpu_intergration_period"][0] + + # fpga counts for first period + fpga_counts = data.index_map["time"]["fpga_count"][:ni_period] + + # Start of high time for each noise source (within a noise period) + ni_on_start_bin = [ + np.argmin(np.remainder((fpga_counts // int_period - ni_offset[i]), ni_period)) + for i in range(N_ni_sources) + ] + + # time bins with noise ON for each source (within a noise period) + ni_on_bins = [ + np.arange(ni_on_start_bin[i], ni_on_start_bin[i] + ni_high_time[i]) + for i in range(N_ni_sources) + ] + + ni_params = {"ni_period": ni_period, "ni_on_bins": ni_on_bins} + + return ni_params + + +
+[docs] +def process_gated_data(data, only_off=False): + """ + Processes fast gating data and turns it into gated form. + + Parameters + ---------- + data : andata.CorrData + Correlator data with noise source switched synchronously with the + integration. + only_off : boolean + Only return the off dataset. Do not return gated datasets. + + Returns + ------- + newdata : andata.CorrData + Correlator data folded on the noise source. + + Comments + -------- + For now the correlator only supports fast gating with one gate + (gated_vis1) and 50% duty cycle. The vis dataset contains on+off + and the gated_vis1 contains on-off. This function returns a new + andata object with vis containing the off data only and gated_vis1 + as in the original andata object. The attribute + 'gpu.gpu_intergration_period' is divided by 2 since during an + integration half of the frames have on data. + """ + # Make sure we're distributed over something other than time + data.redistribute("freq") + + # Get distribution parameters + dist = isinstance(data.vis, memh5.MemDatasetDistributed) + comm = data.vis.comm + + # Construct new CorrData object for gated dataset + newdata = andata.CorrData.__new__(andata.CorrData) + if dist: + memh5.BasicCont.__init__(newdata, distributed=dist, comm=comm) + else: + memh5.BasicCont.__init__(newdata, distributed=dist) + memh5.copyattrs(data.attrs, newdata.attrs) + + # Add index maps to newdata + newdata.create_index_map("freq", data.index_map["freq"]) + newdata.create_index_map("prod", data.index_map["prod"]) + newdata.create_index_map("input", data.input) + newdata.create_index_map("time", data.index_map["time"]) + + # Add datasets (for noise OFF) to newdata + # Extract the noise source off data + vis_off = 0.5 * ( + data.vis[:].view(np.ndarray) - data["gated_vis1"][:].view(np.ndarray) + ) + + # Turn vis_off into MPIArray if we are distributed + if dist: + vis_off = mpiarray.MPIArray.wrap(vis_off, axis=0, comm=comm) + + # Add new visibility dataset + vis_dset = newdata.create_dataset("vis", data=vis_off, distributed=dist) + memh5.copyattrs(data.vis.attrs, vis_dset.attrs) + + # Add gain dataset (if exists) for vis_off. + # These will be the gains for both the noise on ON and OFF data + if "gain" in data: + gain = data.gain[:].view(np.ndarray) + # Turn gain into MPIArray if we are distributed + if dist: + gain = mpiarray.MPIArray.wrap(gain, axis=0, comm=comm) + + gain_dset = newdata.create_dataset("gain", data=gain, distributed=dist) + memh5.copyattrs(data.gain.attrs, gain_dset.attrs) + + # Pull out weight dataset if it exists. + # These will be the weights for both the noise on ON and OFF data + if "vis_weight" in data.flags: + vis_weight = data.weight[:].view(np.ndarray) + # Turn vis_weight into MPIArray if we are distributed + if dist: + vis_weight = mpiarray.MPIArray.wrap(vis_weight, axis=0, comm=comm) + + vis_weight_dset = newdata.create_flag( + "vis_weight", data=vis_weight, distributed=dist + ) + memh5.copyattrs(data.weight.attrs, vis_weight_dset.attrs) + + # Add gated dataset (only gated_vis1 currently supported by correlator + # with 50% duty cycle) + if not only_off: + gated_vis1 = data["gated_vis1"][:].view(np.ndarray) + # Turn gated_vis1 into MPIArray if we are distributed + if dist: + gated_vis1 = mpiarray.MPIArray.wrap(gated_vis1, axis=0, comm=comm) + + gate_dset = newdata.create_dataset( + "gated_vis1", data=gated_vis1, distributed=dist + ) + memh5.copyattrs(data["gated_vis1"].attrs, gate_dset.attrs) + + # The CHIME pipeline uses gpu.gpu_intergration_period to estimate the integration period + # for both the on and off gates. That number has to be changed (divided by 2) since + # with fast gating one integration period has 1/2 of data for the on gate and 1/2 + # for the off gate + newdata.attrs["gpu.gpu_intergration_period"] = ( + data.attrs["gpu.gpu_intergration_period"] // 2 + ) + + return newdata
+ + + +
+[docs] +class ni_data(object): + """Provides analysis utilities for CHIME noise injection data. + + This is just a wrapper for all the utilities created in this module. + + Parameters + ----------- + Reader_read_obj : andata.Reader.read() like object + Contains noise injection data. Must have 'vis' and 'timestamp' property. + Assumed to contain all the Nadc_channels*(Nadc_channels+1)/2 correlation + products, in chime's canonical vector, for an + Nadc_channels x Nadc_channels correlation matrix + Nadc_channels : int + Number of channels read in Reader_read_obj + adc_ch_ref : int in the range 0 <= adc_ch_ref <= Nadc_channels-1 + Reference channel (used to find on/off points). + fbin_ref : int in the range + 0 <= fbin_ref <= np.size(Reader_read_obj.vis, 0)-1 + Reference frequency bin (used to find on/off points). + + Methods + ------- + subtract_sky_noise : Removes sky and system noise contributions from noise + injection visibility data. + get_ni_gains : Solve for gains from decimated sky-and-noise-subtracted + visibilities + get_als_gains : Compute gains, sky and system noise covariance matrices from + a combination of noise injection gains and point source gains + """ + + def __init__(self, Reader_read_obj, Nadc_channels, adc_ch_ref=None, fbin_ref=None): + """Processes raw noise injection data so it is ready to compute gains.""" + + self.adc_channels = np.arange(Nadc_channels) + self.Nadc_channels = Nadc_channels + self.raw_vis = Reader_read_obj.vis + self.Nfreqs = np.size(self.raw_vis, 0) # Number of frequencies + if adc_ch_ref != None: + self.adc_ch_ref = adc_ch_ref + else: + self.adc_ch_ref = self.adc_channels[0] # Default reference channel + + if fbin_ref != None: + self.fbin_ref = fbin_ref + else: # Default reference frequency bin (rather arbitrary) + self.fbin_ref = self.Nfreqs // 3 + + self.timestamp = Reader_read_obj.timestamp + try: + self.f_MHz = Reader_read_obj.freq + except AttributeError: + pass # May happen if TimeStream type does not have this property + + self.subtract_sky_noise() + +
+[docs] + def subtract_sky_noise(self): + """Removes sky and system noise contributions from noise injection + visibility data. + + See also + -------- + subtract_sky_noise function + """ + + ni_dict = subtract_sky_noise( + self.raw_vis, + self.Nadc_channels, + self.timestamp, + self.adc_ch_ref, + self.fbin_ref, + ) + self.time_index_on = ni_dict["time_index_on"] + self.time_index_off = ni_dict["time_index_off"] + self.vis_on_dec = ni_dict["vis_on_dec"] + self.vis_off_dec = ni_dict["vis_off_dec"] + self.vis_dec_sub = ni_dict["vis_dec_sub"] + self.timestamp_on_dec = ni_dict["timestamp_on_dec"] + self.timestamp_off_dec = ni_dict["timestamp_off_dec"] + self.timestamp_dec = ni_dict["timestamp_dec"] + self.cor_prod_ref = ni_dict["cor_prod_ref"]
+ + +
+[docs] + def get_ni_gains(self, normalize_vis=False, masked_channels=None): + """Computes gains and evalues from noise injection visibility data. + + See also + -------- + ni_gains_evalues_tf + + Additional parameters + --------------------- + masked_channels : list of integers + channels which are not considered in the calculation of the gains. + """ + + self.channels = np.arange(self.Nadc_channels) + if masked_channels != None: + self.channels = np.delete(self.channels, masked_channels) + + self.Nchannels = len(self.channels) + # Correlation product indices for selected channels + cor_prod = gen_prod_sel(self.channels, total_N_channels=self.Nadc_channels) + self.ni_gains, self.ni_evals = ni_gains_evalues_tf( + self.vis_dec_sub[:, cor_prod, :], self.Nchannels, normalize_vis + )
+ + +
+[docs] + def get_als_gains(self): + """Compute gains, sky and system noise covariance matrices from a + combination of noise injection gains and point source gains + """ + + pass
+ + +
+[docs] + def save(self): + """Save gain solutions""" + + pass
+
+ + + +
+[docs] +def gen_prod_sel(channels_to_select, total_N_channels): + """Generates correlation product indices for selected channels. + + For a correlation matrix with total_N_channels total number of channels, + generates indices for correlation products corresponding to channels in + the list channels_to_select. + + Parameters + ---------- + channels_to_select : list of integers + Indices of channels to select + total_N_channels : int + Total number of channels + + Returns + ------- + prod_sel : array + indices of correlation products for channels in channels_to_select + + """ + + prod_sel = [] + k = 0 + for i in range(total_N_channels): + for j in range(i, total_N_channels): + if (i in channels_to_select) and (j in channels_to_select): + prod_sel.append(k) + + k = k + 1 + + return np.array(prod_sel)
+ + + +
+[docs] +def mat2utvec(A): + """Vectorizes its upper triangle of the (hermitian) matrix A. + + Parameters + ---------- + A : 2d array + Hermitian matrix + + Returns + ------- + 1d array with vectorized form of upper triangle of A + + Example + ------- + if A is a 3x3 matrix then the output vector is + outvector = [A00, A01, A02, A11, A12, A22] + + See also + -------- + utvec2mat + """ + + iu = np.triu_indices(np.size(A, 0)) # Indices for upper triangle of A + + return A[iu]
+ + + +
+[docs] +def utvec2mat(n, utvec): + """ + Recovers a hermitian matrix a from its upper triangle vectorized version. + + Parameters + ---------- + n : int + order of the output hermitian matrix + utvec : 1d array + vectorized form of upper triangle of output matrix + + Returns + ------- + A : 2d array + hermitian matrix + """ + + iu = np.triu_indices(n) + A = np.zeros((n, n), dtype=np.complex128) + A[iu] = utvec # Filling uppper triangle of A + A = A + np.triu(A, 1).conj().T # Filling lower triangle of A + return A
+ + + +
+[docs] +def ktrprod(A, B): + """Khatri-Rao or column-wise Kronecker product of two matrices. + + A and B have the same number of columns + + Parameters + ---------- + A : 2d array + B : 2d array + + Returns + ------- + C : 2d array + Khatri-Rao product of A and B + """ + nrowsA = np.size(A, 0) + nrowsB = np.size(B, 0) + ncols = np.size(A, 1) + C = np.zeros((nrowsA * nrowsB, ncols), dtype=np.complex128) + for i in range(ncols): + C[:, i] = np.kron(A[:, i], B[:, i]) + + return C
+ + + +
+[docs] +def ni_als(R, g0, Gamma, Upsilon, maxsteps, abs_tol, rel_tol, weighted_als=True): + """Implementation of the Alternating Least Squares algorithm for noise + injection. + + Implements the Alternating Least Squares algorithm to recover the system + gains, sky covariance matrix and system output noise covariance matrix + from the data covariance matrix R. All the variables and definitions are as + in http://bao.phas.ubc.ca/doc/library/doc_0103/rev_01/chime_calibration.pdf + + Parameters + ---------- + R : 2d array + Data covariance matrix + g0 : 1d array + First estimate of system gains + Gamma : 2d array + Matrix that characterizes parametrization of sky covariance matrix + Upsilon : 2d array + Matrix characterizing parametrization of system noise covariance matrix + maxsteps : int + Maximum number of iterations + abs_tol : float + Absolute tolerance on error function + rel_tol : float + Relative tolerance on error function + weighted_als : bool + If True, perform weighted ALS + + Returns + ------- + g : 1d array + System gains + C : 2d array + Sky covariance matrix + N : 2d array + System output noise covariance matrix + err : 1d array + Error function for every step + + See also + -------- + http://bao.phas.ubc.ca/doc/library/doc_0103/rev_01/chime_calibration.pdf + """ + + g = g0.copy() + G = np.diag(g) + Nchannels = np.size(R, 0) # Number of receiver channels + rank_Gamma = np.size(Gamma, 1) # Number of sky covariance matrix parameters + # Calculate initial weight matrix + if weighted_als: + inv_W = sciLA.sqrtm(R) + W = LA.inv(inv_W) + else: + W = np.eye(Nchannels) + inv_W = W.copy() + + W_kron_W = np.kron(W.conj(), W) + G_kron_G = np.kron(G.conj(), G) + Psi = np.hstack((np.dot(G_kron_G, Gamma), Upsilon)) + psi = np.dot(np.dot(np.linalg.pinv(np.dot(W_kron_W, Psi)), W_kron_W), R) + gamma = psi[:rank_Gamma] + upsilon = psi[rank_Gamma:] + # Estimate of sky covariance matrix + C = np.dot(Gamma, gamma).reshape((Nchannels, Nchannels), order="F") + # Estimate of output noise covariance matrix + N = np.dot(Upsilon, upsilon).reshape((Nchannels, Nchannels), order="F") + # Make sure C and N are positive (semi-)definite + evals, V = LA.eigh(C, "U") # Get eigens of C + D = np.diag(np.maximum(evals, 0.0)) # Replace negative eigenvalues by zeros + C = np.dot(V, np.dot(D, V.conj().T)) # Positive (semi-)definite version of C + evals, V = LA.eigh(N, "U") + D = np.diag(np.maximum(evals, 0)) + N = np.dot(V, np.dot(D, V.conj().T)) + # Calculate error + err = [ + LA.norm(np.dot(W, np.dot(R - np.dot(G, np.dot(C, G.conj())) - N, W)), ord="fro") + ] + + for i in range(1, maxsteps): + if (err[-1] >= abs_tol) or ( + (i > 1) and (abs(err[-2] - err[-1]) <= rel_tol * err[-2]) + ): + break + + if weighted_als: + inv_W = sciLA.sqrtm(R + np.dot(G, np.dot(C, G.conj())) + N) + W = LA.inv(inv_W) + else: + W = np.eye(Nchannels) + inv_W = W.copy() + + W_pow2 = np.dot(W, W) + W_pow2GC = np.dot(W_pow2, np.dot(G, C)) + g = np.dot( + LA.pinv(np.dot(C, np.dot(G.conj().T, W_pow2GC)).conj() * W_pow2), + np.dot( + ktrprod(W_pow2GC, W_pow2).conj().T, + (R - N).reshape(Nchannels**2, order="F"), + ), + ) + + G = np.diag(g) + G_kron_G = np.kron(G.conj(), G) + Psi = np.hstack((np.dot(G_kron_G, Gamma), Upsilon)) + psi = np.dot(np.dot(np.linalg.pinv(np.dot(W_kron_W, Psi)), W_kron_W), R) + gamma = psi[:rank_Gamma] + upsilon = psi[rank_Gamma:] + C = np.dot(Gamma, gamma).reshape((Nchannels, Nchannels), order="F") + N = np.dot(Upsilon, upsilon).reshape((Nchannels, Nchannels), order="F") + evals, V = LA.eigh(C, "U") + D = np.diag(np.maximum(evals, 0.0)) + C = np.dot(V, np.dot(D, V.conj().T)) + evals, V = LA.eigh(N, "U") + D = np.diag(np.maximum(evals, 0)) + N = np.dot(V, np.dot(D, V.conj().T)) + err.append( + LA.norm( + np.dot(W, np.dot(R - np.dot(G, np.dot(C, G.conj())) - N, W)), ord="fro" + ) + ) + + return g, C, N, np.array(err)
+ + + +
+[docs] +def sort_evalues_mag(evalues): + """Sorts eigenvalue array by magnitude for all frequencies and time frames + + Parameters + ---------- + evalues : 3d array + Array of evalues. Its shape is [Nfreqs, Nevalues, Ntimeframes] + + Returns + ------- + ev : 3d array + Array of same shape as evalues + """ + + ev = np.zeros(evalues.shape, dtype=float) + for f in range(np.size(ev, 0)): + for t in range(np.size(ev, 2)): + ev[f, :, t] = evalues[f, np.argsort(abs(evalues[f, :, t])), t] + + return ev
+ + + +
+[docs] +def ni_gains_evalues(C, normalize_vis=False): + """Basic algorithm to compute gains and evalues from noise injection data. + + C is a correlation matrix from which the gains are calculated. + If normalize_vis = True, the visibility matrix is weighted by the diagonal + matrix that turns it into a crosscorrelation coefficient matrix before the + gain calculation. The eigenvalues are not sorted. The returned gain solution + vector is normalized (LA.norm(g) = 1.) + + Parameters + ---------- + C : 2d array + Data covariance matrix from which the gains are calculated. It is + assumed that both the sky and system noise contributions have already + been subtracted using noise injection + normalize_vis : bool + If True, the visibility matrix is weighted by the diagonal matrix that + turns it into a crosscorrelation coefficient matrix before the + gain calculation. + + Returns + ------- + g : 1d array + Noise injection gains + ev : 1d array + Noise injection eigenvalues + + See also + -------- + ni_gains_evalues_tf, subtract_sky_noise + """ + + Nchannels = np.size(C, 0) # Number of receiver channels + if normalize_vis: # Convert to correlation coefficient matrix + W = np.diag(1 / np.sqrt(np.diag(C).real)) + Winv = np.diag(np.sqrt(np.diag(C).real)) + else: + W = np.identity(Nchannels) + Winv = np.identity(Nchannels) + + ev, V = LA.eigh(np.dot(np.dot(W, C), W), "U") + g = np.sqrt(ev.max()) * np.dot(Winv, V[:, ev.argmax()]) + + return g, ev
+ + + +
+[docs] +def ni_gains_evalues_tf( + vis_gated, Nchannels, normalize_vis=False, vis_on=None, vis_off=None, niter=0 +): + """Computes gains and evalues from noise injection visibility data. + + Gains and eigenvalues are calculated for all frames and + frequencies in vis_gated. The returned gain solution + vector is normalized (LA.norm(gains[f, :, t]) = 1.) + + Parameters + ---------- + vis_gated : 3d array + Visibility array in chime's canonical format. vis_gated has dimensions + [frequency, corr. number, time]. It is assumed that both the sky and + system noise contributions have already been subtracted using noise + injection. + Nchannels : int + Order of the visibility matrix (number of channels) + normalize_vis : bool + If True, then the visibility matrix is weighted by the diagonal matrix that + turns it into a crosscorrelation coefficient matrix before the + gain calculation. + vis_on : 3d array + If input and normalize_vis is True, then vis_gated is weighted + by the diagonal elements of the matrix vis_on. + vis_on must be the same shape as vis_gated. + vis_off : 3d array + If input and normalize_vis is True, then vis_gated is weighted + by the diagonal elements of the matrix: vis_on = vis_gated + vis_off. + vis_off must be the same shape as vis_gated. Keyword vis_on + supersedes keyword vis_off. + niter : 0 + Number of iterations to perform. At each iteration, the diagonal + elements of vis_gated are replaced with their rank 1 approximation. + If niter == 0 (default), then no iterations are peformed and the + autocorrelations are used instead. + + Returns + ------- + gains : 3d array + Noise injection gains + evals : 3d array + Noise injection eigenvalues + + Dependencies + ------------ + tools.normalise_correlations, tools.eigh_no_diagonal + + See also + -------- + ni_gains_evalues, subtract_sky_noise + """ + + from .tools import normalise_correlations + from .tools import eigh_no_diagonal + + # Determine the number of frequencies and time frames + Nfreqs = np.size(vis_gated, 0) + Ntimeframes = np.size(vis_gated, 2) + + # Create NaN matrices to hold the gains and eigenvalues + gains = np.zeros((Nfreqs, Nchannels, Ntimeframes), dtype=np.complex) * ( + np.nan + 1j * np.nan + ) + evals = np.zeros((Nfreqs, Nchannels, Ntimeframes), dtype=np.float64) * np.nan + + # Determine if we will weight by the square root of the autos + # of the matrix vis_on = vis_gated + vis_off + vis_on_is_input = (vis_on is not None) and (vis_on.shape == vis_gated.shape) + vis_off_is_input = (vis_off is not None) and (vis_off.shape == vis_gated.shape) + weight_by_autos_on = normalize_vis and (vis_on_is_input or vis_off_is_input) + + sqrt_autos = np.ones(Nchannels) + + # Loop through the input frequencies and time frames + for f in range(Nfreqs): + for t in range(Ntimeframes): + # Create Nchannel x Nchannel matrix of noise-injection-on visibilities + if weight_by_autos_on: + if vis_on_is_input: + mat_slice_vis_on = utvec2mat(Nchannels, vis_on[f, :, t]) + else: + mat_slice_vis_on = utvec2mat( + Nchannels, np.add(vis_gated[f, :, t], vis_off[f, :, t]) + ) + else: + mat_slice_vis_on = None + + # Create Nchannel x Nchannel matrix of gated visibilities + mat_slice_vis_gated = utvec2mat(Nchannels, vis_gated[f, :, t]) + + # If requested, then normalize the gated visibilities + # by the square root of the autocorrelations + if normalize_vis: + mat_slice_vis_gated, sqrt_autos = normalise_correlations( + mat_slice_vis_gated, norm=mat_slice_vis_on + ) + + # Solve for eigenvalues and eigenvectors. + # The gain solutions for the zero'th feed + # are forced to be real and positive. + # This means that the phases of the gain + # solutions are relative phases with respect + # to the zero'th feed. + try: + eigenvals, eigenvecs = eigh_no_diagonal( + mat_slice_vis_gated, niter=niter + ) + + if eigenvecs[0, eigenvals.argmax()] < 0: + sign0 = -1 + else: + sign0 = 1 + + gains[f, :, t] = ( + sign0 + * sqrt_autos + * eigenvecs[:, eigenvals.argmax()] + * np.sqrt(np.abs(eigenvals.max())) + ) + evals[f, :, t] = eigenvals + + except LA.LinAlgError: + pass + + return gains, evals
+ + + +
+[docs] +def subtract_sky_noise(vis, Nchannels, timestamp, adc_ch_ref, fbin_ref): + """Removes sky and system noise contributions from noise injection visibility + data. + + By looking at the autocorrelation of the reference channel adc_ch_ref + for frequency bin fbin_ref, finds timestamps indices for which the signal is + on and off. For every noise signal period, the subcycles with the noise + signal on and off are averaged separatedly and then subtracted. + + It is assumed that there are at least 5 noise signal cycles in the data. + The first and last noise on subcycles are discarded since those cycles may + be truncated. + + Parameters + ---------- + vis: 3d array + Noise injection visibility array in chime's canonical format. vis has + dimensions [frequency, corr. number, time]. + Nchannels : int + Order of the visibility matrix (number of channels) + timestamp : 1d array + Timestamps for the visibility array vis + adc_ch_ref : int in the range 0 <= adc_ch_ref <= N_channels-1 + Reference channel (typically, but not necessaritly the channel + corresponding to the directly injected noise signal) used to find + timestamps indices for which the signal is on and off. + on and off. + fbin_ref : int in the range 0 <= fbin_ref <= np.size(vis, 0)-1 + frequency bin used to find timestamps indices for which the signal is + on and off + + Returns + ------- + A dictionary with keys + time_index_on : 1d array + timestamp indices for noise signal on. + time_index_off : 1d array + timestamp indices for noise signal off. + timestamp_on_dec : 1d array + timestamps for noise signal on after averaging. + timestamp_off_dec : 1d array + timestamps for noise signal off after averaging. + timestamp_dec : 1d array + timestamps for visibility data after averaging and subtracting on and + off subcycles. These timestaps represent the time for every noise cycle + and thus, these are the timestaps for the gain solutions. + vis_on_dec : 3d array + visibilities for noise signal on after averaging. + vis_off_dec : 3d array + visibilities for noise signal off after averaging. + vis_dec_sub : 3d array + visibilities data after averaging and subtracting on and + off subcycles. + cor_prod_ref : int + correlation index corresponding to the autocorrelation of the reference + channel + """ + + # Find correlation product of autocorrelation of ref channel in read data + # Indices of autocorrelations for selected channels + cor_prod_auto = [k * Nchannels - (k * (k - 1)) // 2 for k in range(Nchannels)] + cor_prod_ref = cor_prod_auto[adc_ch_ref] + auto_ref = np.real(vis[fbin_ref, cor_prod_ref, :]) + + # Find timestamp indices for noise signal on and off + # auto_ref points above/below auto_ref_mean are considered to be on/off + auto_ref_mean = np.mean(auto_ref) + time_index_on = np.where(auto_ref >= auto_ref_mean)[0] + time_index_off = np.where(auto_ref < auto_ref_mean)[0] + diff_index_on = np.diff(time_index_on) + # Indices indicating ends of noise-on subsets + index_end_on_cycle = time_index_on[np.where(diff_index_on > 1)[0]] + # Indices indicating starts of noise-on subsets + index_start_on_cycle = time_index_on[np.where(diff_index_on > 1)[0] + 1] + vis_on_dec = [] # Decimated visibility on points + vis_off_dec = [] + timestamp_on_dec = [] # Timestamps of visibility on points + timestamp_off_dec = [] + timestamp_dec = [] # Timestamp of decimated visibility (on minus off) + + for i in range(len(index_end_on_cycle) - 1): + # Visibilities with noise on for cycle i + vis_on_cycle_i = vis[ + :, :, index_start_on_cycle[i] : index_end_on_cycle[i + 1] + 1 + ] + # Visibilities with noise off for cycle i + vis_off_cycle_i = vis[:, :, index_end_on_cycle[i] + 1 : index_start_on_cycle[i]] + + # New lines to find indices of maximum and minimum point of each cycle based on the reference channel + index_max_i = auto_ref[ + index_start_on_cycle[i] : index_end_on_cycle[i + 1] + 1 + ].argmax() + index_min_i = auto_ref[ + index_end_on_cycle[i] + 1 : index_start_on_cycle[i] + ].argmin() + vis_on_dec.append(vis_on_cycle_i[:, :, index_max_i]) + vis_off_dec.append(vis_off_cycle_i[:, :, index_min_i]) + + # Instead of averaging all the data with noise on of a cycle, we take the median + # vis_on_dec.append(np.median(vis_on_cycle_i.real, axis=2)+1j*np.median(vis_on_cycle_i.imag, axis=2)) + # vis_off_dec.append(np.median(vis_off_cycle_i.real, axis=2)+1j*np.median(vis_off_cycle_i.imag, axis=2)) + timestamp_on_dec.append( + np.mean(timestamp[index_start_on_cycle[i] : index_end_on_cycle[i + 1] + 1]) + ) + timestamp_off_dec.append( + np.mean(timestamp[index_end_on_cycle[i] + 1 : index_start_on_cycle[i]]) + ) + timestamp_dec.append( + np.mean( + timestamp[index_end_on_cycle[i] + 1 : index_end_on_cycle[i + 1] + 1] + ) + ) + + vis_on_dec = np.dstack(vis_on_dec) + vis_off_dec = np.dstack(vis_off_dec) + vis_dec_sub = vis_on_dec - vis_off_dec + timestamp_on_dec = np.array(timestamp_on_dec) + timestamp_off_dec = np.array(timestamp_off_dec) + timestamp_dec = np.array(timestamp_dec) + + return { + "time_index_on": time_index_on, + "time_index_off": time_index_off, + "vis_on_dec": vis_on_dec, + "vis_off_dec": vis_off_dec, + "vis_dec_sub": vis_dec_sub, + "timestamp_on_dec": timestamp_on_dec, + "timestamp_off_dec": timestamp_off_dec, + "timestamp_dec": timestamp_dec, + "cor_prod_ref": cor_prod_ref, + }
+ + + +
+[docs] +def gains2utvec_tf(gains): + """Converts gain array to CHIME visibility format for all frequencies and + time frames. + + For every frequency and time frame, converts a gain vector into an outer + product matrix and then vectorizes its upper triangle to obtain a vector in + the same format as the CHIME visibility matrix. + + Converting the gain arrays to CHIME visibility format makes easier to + apply the gain corrections to the visibility data. See example below. + + Parameters + ---------- + gains : 3d array + Input array with the gains for all frequencies, channels and time frames + in the fromat of ni_gains_evalues_tf. g has dimensions + [frequency, channels, time]. + + Returns + ------- + G_ut : 3d array + Output array with dimmensions [frequency, corr. number, time]. For + every frequency and time frame, contains the vectorized form of upper + triangle for the outer product of the respective gain vector. + + Example + ------- + To compute the gains from a set of noise injection pass0 data and apply the + gains to the visibilities run: + + >>> from ch_util import andata + >>> from ch_util import import ni_utils as ni + >>> data = andata.Reader('/scratch/k/krs/jrs65/chime_archive/20140916T173334Z_blanchard_corr/000[0-3]*.h5') + >>> readdata = data.read() + >>> nidata = ni.ni_data(readdata, 16) + >>> nidata.get_ni_gains() + >>> G_ut = ni.gains2utvec(nidata.ni_gains) + >>> corrected_vis = nidata.vis_off_dec/G_ut + + See also + -------- + gains2utvec, ni_gains_evalues_tf + """ + + Nfreqs = np.size(gains, 0) # Number of frequencies + Ntimeframes = np.size(gains, 2) # Number of time frames + Nchannels = np.size(gains, 1) + Ncorrprods = Nchannels * (Nchannels + 1) // 2 # Number of correlation products + G_ut = np.zeros((Nfreqs, Ncorrprods, Ntimeframes), dtype=np.complex) + + for f in range(Nfreqs): + for t in range(Ntimeframes): + G_ut[f, :, t] = gains2utvec(gains[f, :, t]) + + return G_ut
+ + + +
+[docs] +def gains2utvec(g): + """Converts a vector into an outer product matrix and vectorizes its upper + triangle to obtain a vector in same format as the CHIME visibility matrix. + + Parameters + ---------- + g : 1d array + gain vector + + Returns + ------- + 1d array with vectorized form of upper triangle for the outer product of g + """ + + n = len(g) + G = np.dot(g.reshape(n, 1), g.conj().reshape(1, n)) + return mat2utvec(G)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/plot.html b/docs/_modules/ch_util/plot.html new file mode 100644 index 00000000..0a3c858a --- /dev/null +++ b/docs/_modules/ch_util/plot.html @@ -0,0 +1,772 @@ + + + + + + ch_util.plot — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.plot

+"""Plotting routines for CHIME data"""
+
+import numpy as np
+import scipy as sp
+import matplotlib.pyplot as plt
+import warnings
+import datetime
+import scipy.signal as sig
+
+import caput.time as ctime
+
+from . import andata
+from . import ephemeris
+
+
+
+[docs] +def waterfall( + data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, **kwargs +): + """Two dimensional plot of a visibility vs time and frequency. + + Parameters + ---------- + data : numpy array or :class:`~ch_util.andata.AnData` object + Data to plot. If a numpy array, must be 2D or 3D. + freq_sel : valid numpy index + Selects data to include along the frequency axis. + prod_sel : valid numpy index + Selects data to include along the correlation product axis. If *data* + is a 2D array, this argument is ignored. + time_sel : valid numpy index + Selects data to include along the time axis. + part_sel : string, one of: 'real', 'imag', 'mag', 'phase' or 'complex' + Selects what part of data to plot. If 'None', plot real part. + + Examples + -------- + + >>> data = np.ones((100, 100)) + >>> waterfall(data) + + >>> data = andata.AnData.from_acq("...") + >>> waterfall(data, prod_sel=5, out_file='filename.png') + + To make a plot normalized by a baseline of the median-filtered + power spectrum averaged over 200 time bins starting at bin 0 with + a median filter window of 40 bins: + >>> data = andata.AnData.from_acq("...") + >>> med_filt_arg = ['new',200,0,40] + >>> waterfall(data, prod_sel=21, med_filt=med_filt_arg) + + You can also make it save the calculated baseline to a file, + by providing the filename: + >>> data = andata.AnData.from_acq("...") + >>> med_filt_arg = ['new',200,0,40,'base_filename.dat'] + >>> waterfall(data, prod_sel=21, med_filt=med_filt_arg) + + ...or to use a previously obtained baseline to normalize data: + (where bsln is either a numpy array or a list with length equal + to the frequency axis of the data) + >>> data = andata.AnData.from_acq("...") + >>> med_filt_arg = ['old',bsln] + >>> waterfall(data, prod_sel=21, med_filt=med_filt_arg) + + To make a full day plot of 01/14/2014, + rebinned to 4000 time bins: + >>> data = andata.AnData.from_acq("...") + >>> full_day_arg = [[2014,01,14],4000,'time'] + >>> waterfall(data, prod_sel=21, full_day=full_day_arg) + + """ + ########## Section for retrieving keyword arguments.############## + # Please remove from the kwargs dictionary any arguments for + # which you provided functionality in waterfall(). The resulting + # dictionary is going to be passed on to imshow() + + aspect = kwargs.pop("aspect", None) # float. Aspect ratio of image + show_plot = kwargs.pop("show_plot", None) # True or False. Interactive plot + out_file = kwargs.pop("out_file", None) # str. File name to save to + res = kwargs.pop("res", None) # int. Resolution of saved image in dpi + title = kwargs.pop("title", None) # str. Graph title. + x_label = kwargs.pop("x_label", None) # str. + y_label = kwargs.pop("y_label", None) # str. + med_filt = kwargs.pop("med_filt", None) # List of length 2 or 4. See examples. + full_day = kwargs.pop("full_day", None) # List of length 3. See examples. + cbar_label = kwargs.pop("cbar_label", None) # str. Colorbar label. + + ################################################################## + + # waterfall() does not accept 'complex' + if part_sel == "complex": + msg = 'waterfall() does not take "complex" for "part_sel"' " argument." + raise ValueError(msg) + + fig = plt.figure() + ax = fig.add_subplot(111) + + # Set title, if given: + if title != None: + ax.set_title(title) + + # Setting labels, if given: + if x_label != None: + ax.set_xlabel(x_label) + if y_label != None: + ax.set_ylabel(y_label) + + # Preparing data shape for plotting: + plt_data = _coerce_data_shape( + data, freq_sel, prod_sel, time_sel, part_sel, axes=(1,) + ) + if isinstance(data, andata.AnData): + tmstp = _select_time(data, time_sel) + + # Apply median filter, if 'med_filt' is given: + if med_filt != None: + msg = "Warning: Wrong value for 'med_filt'. Ignoring argument." + if med_filt[0] == "new": + # Apply median filter: + plt_data, baseline = _med_filter( + plt_data, med_filt[1], med_filt[2], med_filt[3] + ) + if len(med_filt) == 5: + # Save baseline to file, if given: + fileBaseOut = open(med_filt[4], "w") + for ii in range(len(baseline)): + fileBaseOut.write("{0}\n".format(baseline[ii, 0])) + fileBaseOut.close() + elif med_filt[0] == "old": + # Reshape baseline to ensure type and shape: + baseline = np.array(med_filt[1]).reshape(len(med_filt[1]), 1) + # Normalize data: + plt_data = plt_data / baseline + else: + print(msg) + + # Shape data to full day, if 'full_day' is given: + if full_day != None: + plt_data = _full_day_shape( + plt_data, + tmstp, + date=full_day[0], + n_bins=full_day[1], + axis=full_day[2], + ax=ax, + ) + + # Call imshow reversing frequency order + wtfl = ax.imshow(plt_data[::-1, :], **kwargs) + + # Ajust aspect ratio of image if aspect is provided: + if aspect != None: + _force_aspect(ax, aspect) + + # Ajust colorbar size: + if aspect >= 1.0: + shrink = 1 / float(aspect) + else: + shrink = 1.0 + cbar = fig.colorbar(wtfl, shrink=shrink) + else: + cbar = fig.colorbar(wtfl) + + # Set label to colorbar, if given: + if cbar_label != None: + cbar.set_label(cbar_label) + + # Output depends on keyword arguments: + if show_plot == True: + plt.show() + elif (show_plot != None) and (show_plot != False): + msg = ( + 'Optional keyword argument "show_plot" should receive either' + ' "True" or "False". Received "{0}". Ignoring argument.'.format(show_plot) + ) + warnings.warn(msg, SyntaxWarning) + + # Save to file if filename is provided: + if out_file != None: + if res != None: + fig.savefig(out_file, dpi=res) + else: + fig.savefig(out_file) + + plt.close(fig)
+ + + +
+[docs] +def spectra(data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, **kwargs): + """Plots spectra at different times and for different correlation products.""" + + plt_data = _coerce_data_shape(data, freq_sel, prod_sel, time_sel, axes=()) + ntime = plt_data.shape[2] + nprod = plt_data.shape[1] + for ii in range(ntime): + for jj in range(nprod): + plt.plot(plt_data[:, ii, jj])
+ + + +
+[docs] +def time_ordered( + data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, **kwargs +): + """Plots data vs time for different frequencies and corr-pords.""" + + pass
+ + + +def _coerce_data_shape( + data, freq_sel=None, prod_sel=None, time_sel=None, part_sel=None, axes=() +): + """Gets well shaped data array for plotting. + + Parameters + ---------- + data : numpy array or :class:`~ch_util.andata.AnData` object + Data to coerse. + freq_sel : valid numpy index + Selects data to include along the frequency axis. Default slices the + full axis. + prod_sel : valid numpy index + Selects data to include along the correlation product axis. If *data* + is a 2D array, this argument is ignored. Default slices the + full axis. + time_sel : valid numpy index + Selects data to include along the time axis. Default slices the + full axis. + part_sel : string, one of: 'real', 'imag', 'mag', 'phase' or 'complex' + Selects what part of data to plot. If 'None', plot real part. + axes : tuple or axis numbers + Axes to eliminate + + Returns + ------- + plt_data : numpy array + The dimentionality of the array is guaranteed to be + ``plt_data == 3 - len(axes)``. + + Raises + ------ + ValueError + If data provided could not be coersed. + + Examples + -------- + + Lets start with simple sliceing of numpy array data. + + >>> data = np.ones((5, 7, 3)) + >>> _coerse_data_shape(data, [2, 3, 4], 3, None).shape + (3, 1, 3) + + Notice that the out put is 3D even though normal numpy indexing would have + eliminated the correation-product axis. This is because the *axes* + parameter is set to it's default meaning 3D output is required. If we + instead tell it to eliminate the product axis: + + >>> _coerse_data_shape(data, [2, 3, 4], 3, None, axes=(1,)).shape + (3, 3) + + If an axis to be eliminated is not length 1, a :exc:`ValueError` is raised. + + >>> _coerse_data_shape(data, [2, 3, 4], [3, 2], None, axes=(1,)) + Traceback (most recent call last) + ... + ValueError: Need to eliminate axis 1 but it is not length 1. + + The input data may be less than 3D. In this case *axes* indicates which axes + are missing. + + >>> data = np.ones((2, 3)) + >>> _coerse_data_shape(data, 1, None, None, axes=(1,)).shape + (1, 3) + + Example of selecting part to plot: + + >>> data = np.ones((2,3,4))*(5+6j) + >>> _coerce_data_shape(data,None,1,None,part_sel='imag',axes=(1,)) + array([[ 6., 6., 6., 6.], + [ 6., 6., 6., 6.]]) + + All this works with :class:`~chutil.andata.AnData` input data, where the + visibilities are treated as a 3D array. + + """ + + axes = sorted(axes) + if isinstance(data, andata.AnData): + data = data.vis + if data.ndim != 3: + if data.ndim != 3 - len(axes): + msg = ( + "Could no interpret input data axes. Got %dD data and need" + " coerse to %dD data" + ) % (data.ndim, 3 - len(axes)) + raise ValueError(msg) + # Temporarily make the data 3D (for slicing), will reshape in the end. + shape = data.shape + for axis in axes: + shape = shape[:axis] + (1,) + shape[axis:] + data = np.reshape(data, shape) + # Select data. + if isinstance(freq_sel, int): + freq_sel = [freq_sel] + elif freq_sel is None: + freq_sel = slice(None) + data = data[freq_sel] + if isinstance(prod_sel, int): + prod_sel = [prod_sel] + elif prod_sel is None: + prod_sel = slice(None) + data = data[:, prod_sel] + if isinstance(time_sel, int): + time_sel = [time_sel] + elif time_sel is None: + time_sel = slice(None) + data = data[:, :, time_sel] + if data.ndim != 3: + raise RuntimeError("Shouldn't have happend") + # Now reshape to the correct dimensionality. + shape = data.shape + axes.reverse() + for axis in axes: + if not shape[axis] == 1: + msg = "Need to eliminate axis %d but it is not length 1." % axis + raise ValueError(msg) + shape = shape[:axis] + shape[axis + 1 :] + data.shape = shape + + # Selects what part to plot: + # Defaults to plotting real part of data. + if part_sel == "real" or part_sel == None: + data = data.real + elif part_sel == "imag": + data = data.imag + elif part_sel == "mag": + data = (data.real**2 + data.imag**2) ** (0.5) + elif part_sel == "phase": + data = np.arctan(data.imag / data.real) + elif part_sel == "complex": + pass + else: + msg = ( + 'Optional keyword argument "part_sel" has to receive' + ' one of "real", "imag", "mag", "phase" or "complex".' + ' Received "{0}"'.format(part_sel) + ) + raise ValueError(msg) + + return data + + +def _select_time(data, time_sel): + """Reshape time stamp vector acording to 'time_sel' + + Parameters + ---------- + data : class:`~ch_util.andata.AnData` object + Data to take time stamp from. + time_sel : valid numpy index + Selects data to include along the time axis. Default slices the + full axis. + + Returns + ------- + tmstp : numpy array + time stamp with selected times + + """ + if isinstance(data, andata.AnData): + tmstp = data.timestamp + if isinstance(time_sel, int): + time_sel = [time_sel] + elif time_sel is None: + time_sel = slice(None) + tmstp = tmstp[time_sel] + + return tmstp + + +def _full_day_shape(data, tmstp, date, n_bins=8640, axis="solar", ax=None): + """Rebin data in linear time or solar azimuth. + + Parameters + ---------- + data : numpy array + Data to plot. Must be 2D. + tmstp : numpy array + Time stamp of data to plot. + ax : matplotlib.axes.Axes instance + Axes to receive plot. Time/azimuth ticks + and labels will be set acordingly + date : python list of length 3 + Date of day to plot in the format: + [yyyy,mm,dd], al entries 'int'. + n_bins : int + Number of time/azimuth bins in new matrix. + axis : str + If 'solar': rebin by solar azimuth + If 'time': rebin by time + + Returns + ------- + Z : numpy ndarray + New rebinned matrix + + Example + ------- + For example of usage, see plot.waterfall() documentation + + """ + n_bins = int(n_bins) + start_time = datetime.datetime(date[0], date[1], date[2], 8, 0, 0) # UTC-8 + end_time = start_time + datetime.timedelta(days=1) + unix_start = ctime.datetime_to_unix(start_time) + unix_end = ctime.datetime_to_unix(end_time) + print("Re-binning full day data to plot") + + if axis == "solar": + bin_width = float(2 * np.pi) / float(n_bins) + bin_ranges = [] + for ii in range(n_bins): + az1 = ii * bin_width + az2 = az1 + bin_width + bin_ranges.append([az1, az2]) + + values_to_sum = [] + for ii in range(n_bins): + values_to_sum.append([]) + + start_range = [unix_start - 1.5 * 3600, unix_start + 0.5 * 3600] + end_range = [unix_end - 1.5 * 3600, unix_end + 0.5 * 3600] + + n_added = 0 + + for ii in range(len(tmstp)): + in_range = (tmstp[ii] > start_range[0]) and (tmstp[ii] < end_range[1]) + if in_range: + sf_time = ctime.unix_to_skyfield_time(tmstp[ii]) + sun = ctime.skyfield_wrapper.ephemeris["sun"] + obs = ephemeris.chime.skyfield_obs().at(sf_time) + azim = obs.observe(sun).apparent().altaz()[1].radians + + in_start_range = (tmstp[ii] > start_range[0]) and ( + tmstp[ii] < start_range[1] + ) + in_end_range = (tmstp[ii] > end_range[0]) and (tmstp[ii] < end_range[1]) + + if in_start_range: + for jj in range(int(n_bins // 2)): + if (azim > bin_ranges[jj][0]) and (azim <= bin_ranges[jj][1]): + values_to_sum[jj].append(ii) + n_added = n_added + 1 + break + elif in_end_range: + for jj in range(int(n_bins // 2)): + kk = n_bins - jj - 1 + if (azim > bin_ranges[kk][0]) and (azim <= bin_ranges[kk][1]): + values_to_sum[kk].append(ii) + n_added = n_added + 1 + break + else: + for jj in range(n_bins): + if (azim > bin_ranges[jj][0]) and (azim <= bin_ranges[jj][1]): + values_to_sum[jj].append(ii) + n_added = n_added + 1 + break + + # Set azimuth ticks, if given: + if ax != None: + tck_stp = n_bins / 6.0 + ticks = np.array( + [ + int(tck_stp), + int(2 * tck_stp), + int(3 * tck_stp), + int(4 * tck_stp), + int(5 * tck_stp), + ] + ) + ax.set_xticks(ticks) + labels = ["60", "120", "180", "240", "300"] + ax.set_xticklabels(labels) + # Set label: + ax.set_xlabel("Solar azimuth (degrees)") + + elif axis == "time": + bin_width = float(86400) / float(n_bins) + bin_ranges = [] + for ii in range(n_bins): + t1 = unix_start + ii * bin_width + t2 = t1 + bin_width + bin_ranges.append([t1, t2]) + + values_to_sum = [] + for ii in range(n_bins): + values_to_sum.append([]) + + n_added = 0 + + for ii in range(len(tmstp)): + in_range = (tmstp[ii] >= unix_start) and (tmstp[ii] <= unix_end) + if in_range: + time = tmstp[ii] + for jj in range(n_bins): + if (time > bin_ranges[jj][0]) and (time <= bin_ranges[jj][1]): + values_to_sum[jj].append(ii) + n_added = n_added + 1 + break + + # Set time ticks, if given: + if ax != None: + tck_stp = n_bins / 6.0 + ticks = np.array( + [ + int(tck_stp), + int(2 * tck_stp), + int(3 * tck_stp), + int(4 * tck_stp), + int(5 * tck_stp), + ] + ) + ax.set_xticks(ticks) + labels = ["04:00", "08:00", "12:00", "16:00", "20:00"] + ax.set_xticklabels(labels) + # Set label: + ax.set_xlabel("Time (UTC-8 hours)") + + print("Number of 10-second bins added to full day data: {0}".format(n_added)) + + # Set new array to NaN for subsequent masking: + Z = np.ones((1024, n_bins)) + for ii in range(1024): + for jj in range(n_bins): + Z[ii, jj] = float("NaN") + + for ii in range(n_bins): + n_col = len(values_to_sum[ii]) + if n_col > 0: + col = np.zeros((1024)) + for jj in range(n_col): + col = col + data[:, values_to_sum[ii][jj]] + Z[:, ii] = col / float(n_col) + + return Z + + +def _force_aspect(ax, aspect=1.0): + """Force desired aspect ratio into image axes. + + Parameters + ---------- + ax : matplotlib.axes.Axes instance + Axes that will be set to the desired aspect ratio + aspect : float or int + Desired aspect ratio in horizontal/vertical order + + Motivation + ---------- + Apparently, the 'aspect' keyword argument in Imshow() is + not working properlly in this version of matplotlib (1.1.1rc) + + Example + ------- + + data = np.ones((100,200)) + fig = plt.figure() + ax = fig.add_subplot(111) + ax.imshow(data) + _force_aspect(ax,aspect=1.) + plt.show() + + Will produce a square solid image. + + """ + + im = ax.get_images() + extent = im[0].get_extent() + ax.set_aspect(abs((extent[1] - extent[0]) / float(extent[3] - extent[2])) / aspect) + + +def _med_filter(data, n_bins=200, i_bin=0, filt_window=37): + """Normalize a 2D array by its power spectrum averaged over 'n_bins' starting at 'i_bin'. + + Parameters + ---------- + data : numpy.ndarray + Data to be normalized + + n_bins : integer + Number of bins over which to average the power spectrum + + i_bin : integer + First bin of the range over which to average the power spectrum + + filt_window : integer + Width of the window for the median filter. The filter is applied + once with this window and a second time with 1/3 of this window width. + + Returns + ------- + rel_power : 2d array normalized by average power spectrum (baseline) + medfilt_baseline : Average power spectrum + + Issues + ------ + Assumes frequency in first index and time in second index + Assumes data has the standard 1024 frequency bins + + Comments + -------- + If entry is 0 in data and in baseline, entry is set to 1. in + normalized matrix + + """ + # If n_bins biger than array, average over entire array: + if data.shape[1] > n_bins: + sliced2darray = data[:, 0 : (n_bins - 1)] + else: + sliced2darray = data + + # Mean of range selected: + mean_arr = np.mean(sliced2darray, axis=-1) + # Standard deviation: + std_arr = np.std(sliced2darray, axis=-1) + # Standard deviation of the mean: + sigma = np.median(std_arr) / (sliced2darray.shape[1]) ** (0.5) + print("Taking median filter") + medfilt_arr = sig.medfilt(mean_arr, filt_window) + # Extract RFI: + non_rfi_mask = (mean_arr - medfilt_arr) < 5 * sigma + print("Number of good data points for baseline: ", np.sum(non_rfi_mask)) + print("out of 1024 points - ", np.sum(non_rfi_mask / float(1024)) * 100, "%") + # Interpolate result: + freq = np.linspace(400, 800, 1024) + interpolat_arr_baseline = np.interp( + freq, freq[non_rfi_mask], mean_arr[non_rfi_mask] + ) + # Median filter a second time: + small_window = int(filt_window // 3) + # Has to be odd: + if small_window % 2 == 0: + small_window = small_window + 1 + medfilt_baseline = np.reshape( + sig.medfilt(interpolat_arr_baseline, small_window), + (interpolat_arr_baseline.shape[0], 1), + ) + + # Boolean mask for entries where original data and baseline are zero: + mask = np.where(medfilt_baseline == 0, data, 1) == 0 + # Normalize data: + rel_power = data / medfilt_baseline + # Set masked entries to 1: + rel_power[mask] = 1.0 + + return rel_power, medfilt_baseline +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/rfi.html b/docs/_modules/ch_util/rfi.html new file mode 100644 index 00000000..f568e105 --- /dev/null +++ b/docs/_modules/ch_util/rfi.html @@ -0,0 +1,1151 @@ + + + + + + ch_util.rfi — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.rfi

+"""Tools for RFI flagging
+
+This module contains tools for finding and removing Radio Frequency Interference
+(RFI).
+
+Note that this generates masks where the elements containing RFI are marked as
+:obj:`True`, and the remaining elements are marked :obj:`False`. This is in
+contrast to the routines in :mod:`ch_pipeline.rfi` which generates a inverse
+noise weighting, where RFI containing elements are effectively :obj:`False`, and
+the remainder are :obj:`True`.
+
+There are general purpose routines for flagging RFI in `andata` like datasets:
+
+- :py:meth:`flag_dataset`
+- :py:meth:`number_deviations`
+
+For more control there are specific routines that can be called:
+
+- :py:meth:`mad_cut_2d`
+- :py:meth:`mad_cut_1d`
+- :py:meth:`mad_cut_rolling`
+- :py:meth:`spectral_cut`
+- :py:meth:`frequency_mask`
+- :py:meth:`sir1d`
+- :py:meth:`sir`
+"""
+
+import warnings
+import logging
+from typing import Tuple, Optional, Union
+
+import numpy as np
+import scipy.signal as sig
+
+from . import tools, ephemeris
+
+# Set up logging
+logger = logging.getLogger(__name__)
+logger.addHandler(logging.NullHandler())
+
+
+# Ranges of bad frequencies given by their start time (in unix time) and corresponding start and end frequencies (in MHz)
+# If the start time is not specified, t = [], the flag is applied to all CSDs
+BAD_FREQUENCIES = {
+    "chime": [
+        ### Bad bands at first light
+        [[None, None], [449.41, 450.98]],
+        [[None, None], [454.88, 456.05]],
+        [[None, None], [457.62, 459.18]],
+        [[None, None], [483.01, 485.35]],
+        [[None, None], [487.70, 494.34]],
+        [[None, None], [497.85, 506.05]],
+        [[None, None], [529.10, 536.52]],
+        [[None, None], [541.60, 548.00]],
+        ### Additional bad bands
+        # Narrow, high power bands visible in sensitivities and
+        # some longer baselines. There is some sporadic rfi in between
+        # the two bands
+        [[None, None], [460.15, 460.55]],
+        [[None, None], [464.00, 470.32]],
+        # 6 MHz band (reported by Simon)
+        [[None, None], [505.85, 511.71]],
+        # Bright band which has been present since early on
+        [[None, None], [517.97, 525.00]],
+        # UHF TV Channel 27 ending CSD 3212 inclusive (2022/08/24)
+        # This is extended until CSD 3446 (2023/04/13) to account for gain errors
+        [[None, 1681410777], [548.00, 554.49]],
+        [[None, None], [564.65, 578.00]],
+        # UHF TV Channel 32 ending CSD 3213 inclusive (2022/08/25)
+        # This is extended until CSD 3446 (2023/04/13) to account for gain errors
+        [[None, 1681410777], [578.00, 585.35]],
+        # from CSD 2893 (2021/10/09 - ) UHF TV Channel 33 (reported by Seth)
+        [[1633758888, None], [584.00, 590.00]],
+        # UHF TV Channel 35
+        [[1633758888, None], [596.00, 602.00]],
+        # Low power band visible in long baselines
+        [[None, None], [602.00, 607.82]],
+        # from CSD 2243 (2019/12/31 - ) Rogers’ new 600 MHz band
+        [[1577755022, None], [617.00, 627.00]],
+        [[None, None], [693.16, 693.55]],
+        [[None, None], [694.34, 696.68]],
+        # from CSD 2080 (2019/07/21 - ) Blobs, Channels 55 and 56
+        [[1564051033, None], [716.00, 728.00]],
+        [[None, None], [729.88, 745.12]],
+        [[None, None], [746.29, 756.45]],
+    ],
+    "kko": [
+        # Bad bands from statistical analysis of Jan 20, 2023 N2 data
+        [[None, None], [433.59, 433.98]],
+        [[None, None], [439.84, 440.62]],
+        [[None, None], [483.20, 484.38]],
+        [[None, None], [616.80, 626.95]],
+        [[None, None], [799.61, 800.00]],
+        # Notch filter stoppband + leakage
+        [[None, None], [710.55, 757.81]],
+    ],
+    "gbo": [],
+    "hco": [],
+}
+
+
+
+[docs] +def flag_dataset( + data, freq_width=10.0, time_width=420.0, threshold=5.0, flag1d=False, rolling=False +): + """RFI flag the dataset. This function wraps `number_deviations`, + and remains largely for backwards compatability. The pipeline code + now calls `number_deviations` directly. + + Parameters + ---------- + data : `andata.CorrData` + Must contain vis and weight attribute that are both + `np.ndarray[nfreq, nprod, ntime]`. Note that this + function does not work with CorrData that has + been stacked over redundant baselines. + freq_width : float + Frequency interval in *MHz* to compare across. + time_width : float + Time interval in *seconds* to compare. + threshold : float + Threshold in MAD over which to cut out RFI. + rolling : bool + Use a rolling window instead of distinct blocks. + flag1d : bool, optional + Only apply the MAD cut in the time direction. This is useful if the + frequency coverage is sparse. + + Returns + ------- + mask : np.ndarray + RFI mask, output shape is the same as input visibilities. + """ + + auto_ii, auto_vis, auto_ndev = number_deviations( + data, + freq_width=freq_width, + time_width=time_width, + flag1d=flag1d, + rolling=rolling, + stack=False, + ) + + auto_mask = np.abs(auto_ndev) > threshold + + # Apply the frequency cut to the data (add here because we are distributed + # over products and its easy) + if "time" in data.index_map: + timestamp = data.time[0] + elif "ra" in data.index_map: + timestamp = ephemeris.csd_to_unix(data.attrs["lsd"]) + + freq_mask = frequency_mask(data.freq[:], timestamp=timestamp) + auto_ii, auto_mask = np.logical_or(auto_mask, freq_mask[:, np.newaxis, np.newaxis]) + + # Create an empty mask for the full dataset + mask = np.zeros(data.vis[:].shape, dtype=bool) + + # Loop over all products and flag if either inputs auto correlation was flagged + for pi in range(data.nprod): + ii, ij = data.index_map["prod"][pi] + + if ii in auto_ii: + ai = auto_ii.index(ii) + mask[:, pi] = np.logical_or(mask[:, pi], auto_mask[:, ai]) + + if ij in auto_ii: + aj = auto_ii.index(ij) + mask[:, pi] = np.logical_or(mask[:, pi], auto_mask[:, aj]) + + return mask
+ + + +
+[docs] +def number_deviations( + data, + freq_width=10.0, + time_width=420.0, + flag1d=False, + apply_static_mask=False, + rolling=False, + stack=False, + normalize=False, + fill_value=None, +): + """Calculate the number of median absolute deviations (MAD) + of the autocorrelations from the local median. + + Parameters + ---------- + data : `andata.CorrData` + Must contain vis and weight attributes that are both + `np.ndarray[nfreq, nprod, ntime]`. + freq_width : float + Frequency interval in *MHz* to compare across. + time_width : float + Time interval in *seconds* to compare across. + flag1d : bool + Only apply the MAD cut in the time direction. This is useful if the + frequency coverage is sparse. + apply_static_mask : bool + Apply static mask obtained from `frequency_mask` before computing + the median absolute deviation. + rolling : bool + Use a rolling window instead of distinct blocks. + stack: bool + Average over all autocorrelations. + normalize : bool + Normalize by the median value over time prior to averaging over + autocorrelations. Only relevant if `stack` is True. + fill_value: float + Data that was already flagged as bad will be set to this value in + the output array. Should be a large positive value that is greater + than the threshold that will be placed. Default is float('Inf'). + + Returns + ------- + auto_ii: np.ndarray[ninput,] + Index of the inputs that have been processed. + If stack is True, then [0] will be returned. + + auto_vis: np.ndarray[nfreq, ninput, ntime] + The autocorrelations that were used to calculate + the number of deviations. + + ndev : np.ndarray[nfreq, ninput, ntime] + Number of median absolute deviations of the autocorrelations + from the local median. + """ + from caput import memh5, mpiarray + + if fill_value is None: + fill_value = float("Inf") + + # Check if dataset is parallel + parallel = isinstance(data.vis, memh5.MemDatasetDistributed) + + data.redistribute("freq") + + # Extract the auto correlations + auto_ii, auto_vis, auto_flag = get_autocorrelations(data, stack, normalize) + + # Calculate time interval in samples. If the data has an ra axis instead, + # use an estimation of the time per sample + if "time" in data.index_map: + twidth = int(time_width / np.median(np.abs(np.diff(data.time)))) + 1 + timestamp = data.time[0] + elif "ra" in data.index_map: + twidth = int(time_width * len(data.ra[:]) / 86164.0) + 1 + timestamp = ephemeris.csd_to_unix(data.attrs["lsd"]) + else: + raise TypeError( + f"Expected data type with a `time` or `ra` axis. Got {type(data)}." + ) + + # Create static flag of frequencies that are known to be bad + static_flag = ( + ~frequency_mask(data.freq[:], timestamp=timestamp) + if apply_static_mask + else np.ones(len(data.freq[:]), dtype=bool) + )[:, np.newaxis] + + if parallel: + # Ensure these are distributed across frequency + auto_vis = auto_vis.redistribute(0) + auto_flag = auto_flag.redistribute(0) + static_flag = mpiarray.MPIArray.wrap(static_flag[auto_vis.local_bounds], axis=0) + + # Calculate frequency interval in bins + fwidth = ( + int(freq_width / np.median(np.abs(np.diff(data.freq)))) + 1 if not flag1d else 1 + ) + + # Create an empty array for number of median absolute deviations + ndev = np.zeros_like(auto_vis, dtype=np.float32) + + auto_flag_view = auto_flag.allgather() if parallel else auto_flag + static_flag_view = static_flag.allgather() if parallel else static_flag + ndev_view = ndev.local_array if parallel else ndev + + # Loop over extracted autos and create a mask for each + for ind in range(auto_vis.shape[1]): + flg = static_flag_view & auto_flag_view[:, ind] + # Gather enire array onto each rank + arr = auto_vis[:, ind].allgather() if parallel else auto_vis[:, ind] + # Use NaNs to ignore previously flagged data when computing the MAD + arr = np.where(flg, arr.real, np.nan) + local_bounds = auto_vis.local_bounds if parallel else slice(None) + # Apply RFI flagger + if rolling: + # Limit bounds to the local portion of the array + ndev_i = mad_cut_rolling( + arr, twidth=twidth, fwidth=fwidth, mask=False, limit_range=local_bounds + ) + elif flag1d: + ndev_i = mad_cut_1d(arr[local_bounds, :], twidth=twidth, mask=False) + else: + ndev_i = mad_cut_2d( + arr[local_bounds, :], twidth=twidth, fwidth=fwidth, mask=False + ) + + ndev_view[:, ind, :] = ndev_i + + # Fill any values equal to NaN with the user specified fill value + ndev_view[~np.isfinite(ndev_view)] = fill_value + + return auto_ii, auto_vis, ndev
+ + + +
+[docs] +def get_autocorrelations( + data, stack: bool = False, normalize: bool = False +) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """Extract autocorrelations from a data stack. + + Parameters + ---------- + data : `andata.CorrData` + Must contain vis and weight attributes that are both + `np.ndarray[nfreq, nprod, ntime]`. + stack: bool, optional + Average over all autocorrelations. + normalize : bool, optional + Normalize by the median value over time prior to averaging over + autocorrelations. Only relevant if `stack` is True. + + Returns + ------- + auto_ii: np.ndarray[ninput,] + Index of the inputs that have been processed. + If stack is True, then [0] will be returned. + + auto_vis: np.ndarray[nfreq, ninput, ntime] + The autocorrelations that were used to calculate + the number of deviations. + + auto_flag: np.ndarray[nfreq, ninput, ntime] + Indices where data weights are positive + """ + # Extract the auto correlations + prod = data.index_map["prod"][data.index_map["stack"]["prod"]] + auto_ii, auto_pi = np.array( + list(zip(*[(pp[0], ind) for ind, pp in enumerate(prod) if pp[0] == pp[1]])) + ) + + auto_vis = data.vis[:, auto_pi, :].real.copy() + + # If requested, average over all inputs to construct the stacked autocorrelations + # for the instrument (also known as the incoherent beam) + if stack: + weight = (data.weight[:, auto_pi, :] > 0.0).astype(np.float32) + + # Do not include bad inputs in the average + partial_stack = data.index_map["stack"].size < data.index_map["prod"].size + + if not partial_stack and hasattr(data, "input_flags"): + input_flags = data.input_flags[:] + logger.info( + "There are on average %d good inputs." + % np.mean(np.sum(input_flags, axis=0), axis=-1) + ) + + if np.any(input_flags) and not np.all(input_flags): + logger.info("Applying input_flags to weight.") + weight *= input_flags[np.newaxis, auto_ii, :].astype(weight.dtype) + + if normalize: + logger.info("Normalizing autocorrelations prior to stacking.") + med_auto = nanmedian( + np.where(weight, auto_vis, np.nan), axis=-1, keepdims=True + ) + med_auto = np.where(np.isfinite(med_auto), med_auto, 0.0) + auto_vis *= tools.invert_no_zero(med_auto) + + norm = np.sum(weight, axis=1, keepdims=True) + + auto_vis = np.sum( + weight * auto_vis, axis=1, keepdims=True + ) * tools.invert_no_zero(norm) + + auto_flag = norm > 0.0 + auto_ii = np.zeros(1, dtype=int) + + else: + auto_flag = data.weight[:, auto_pi, :] > 0.0 + + return auto_ii, auto_vis, auto_flag
+ + + +
+[docs] +def spectral_cut(data, fil_window=15, only_autos=False): + """Flag out the TV bands, or other constant spectral RFI. + + Parameters + ---------- + data : `andata.obj` + If `only_autos` shape is (freq, n_feeds, time), else (freq, n_prod, + time). + fil_window : integer + Window of median filter for baseline of chime spectrum. Default is 15. + only_autos : boolean + Whether data contains only autos or not. + + Returns + ------- + mask: np.ndarray[freq,time] + RFI mask (no product axis). + """ + + if only_autos: + data_vis = data.vis[:].real + else: + nfeed = int((2 * data.vis.shape[1]) ** 0.5) + auto_ind = [tools.cmap(i, i, nfeed) for i in range(nfeed)] + data_vis = data.vis[:, auto_ind].real + + stack_autos = np.mean(data_vis, axis=1) + stack_autos_time_ave = np.mean(stack_autos, axis=-1) + + # Locations of the generally decent frequency bands + if "time" in data.index_map: + timestamp = data.time[0] + elif "ra" in data.index_map: + timestamp = ephemeris.csd_to_unix(data.attrs["lsd"]) + + drawn_bool_mask = frequency_mask(data.freq[:], timestamp=timestamp) + good_data = np.logical_not(drawn_bool_mask) + + # Calculate standard deivation of the average channel + std_arr = np.std(stack_autos, axis=-1) + sigma = np.median(std_arr) / np.sqrt( + stack_autos.shape[1] + ) # standard deviation of the mean + + # Smooth with a median filter, and then interpolate to estimate the + # baseline of the spectrum + fa = np.arange(data_vis.shape[0]) + medfilt = sig.medfilt(stack_autos_time_ave[good_data], fil_window) + interpolat_arr_baseline = np.interp(fa, fa[good_data], medfilt) + rel_pow = stack_autos_time_ave - interpolat_arr_baseline + + # Mask out frequencies with too much power + mask_1d = rel_pow > 10 * sigma + + # Generate mask + mask = np.zeros((data_vis.shape[0], data_vis.shape[2]), dtype=bool) + mask[:] = mask_1d[:, None] + + return mask
+ + + +
+[docs] +def frequency_mask( + freq_centre: np.ndarray, + freq_width: Optional[Union[np.ndarray, float]] = None, + timestamp: Optional[Union[np.ndarray, float]] = None, + instrument: Optional[str] = "chime", +) -> np.ndarray: + """Flag known bad frequencies. + + Time dependent static RFI flags that affect the recent observations are added. + + Parameters + ---------- + freq_centre + Centre of each frequency channel + freq_width + Width of each frequency channel. If `None` (default), calculate the width from + the frequency centre separation. If supplied as an array it must be + broadcastable + against `freq_centre`. + timestamp + UNIX observing time. If `None` (default) mask all specified bands regardless of + their start/end times, otherwise mask only timestamps within the band start and + end times. If supplied as an array it must be broadcastable against + `freq_centre`. + instrument + Telescope name. [kko, gbo, hco, chime (default)] + + Returns + ------- + mask + An array marking the bad frequency channels. The final shape is the result of + broadcasting `freq_centre` and `timestamp` together. + """ + if freq_width is None: + freq_width = np.abs(np.median(np.diff(freq_centre))) + + freq_start = freq_centre - freq_width / 2 + freq_end = freq_centre + freq_width / 2 + + # Broadcast to get the output mask + mask = np.zeros(np.broadcast(freq_centre, timestamp).shape, dtype=bool) + + try: + bad_freq = BAD_FREQUENCIES[instrument] + except KeyError as e: + raise ValueError(f"No RFI flags defined for {instrument}") from e + + for (start_time, end_time), (fs, fe) in bad_freq: + fmask = (freq_end > fs) & (freq_start < fe) + + # If we don't have a timestamp then just mask all bands + if timestamp is None: + tmask = True + else: + # Otherwise calculate the mask based on the start and end times + tmask = np.ones_like(timestamp, dtype=bool) + + if start_time is not None: + tmask &= timestamp >= start_time + + if end_time is not None: + tmask &= timestamp <= end_time + + # Mask frequencies and times specified in this band + mask |= tmask & fmask + + return mask
+ + + +
+[docs] +def mad_cut_2d(data, fwidth=64, twidth=42, threshold=5.0, freq_flat=True, mask=True): + """Mask out RFI using a median absolute deviation cut in time-frequency blocks. + + Parameters + ---------- + data : np.ndarray[freq, time] + Array of data to mask. + fwidth : integer, optional + Number of frequency samples to average median over. + twidth : integer, optional + Number of time samples to average median over. + threshold : scalar, optional + Number of median deviations above which we cut the data. + freq_flat : boolean, optional + Flatten in the frequency direction by dividing through by the median. + mask : boolean, optional + If True return the mask, if False return the number of + median absolute deviations. + + Returns + ------- + mask : np.ndarray[freq, time] + Mask or number of median absolute deviations for each sample. + """ + + median = nanmedian if np.any(~np.isfinite(data)) else np.median + + flen = int(np.ceil(data.shape[0] * 1.0 / fwidth)) + tlen = int(np.ceil(data.shape[1] * 1.0 / twidth)) + + if mask: + madmask = np.ones(data.shape, dtype="bool") + else: + madmask = np.ones(data.shape, dtype=np.float64) + + if freq_flat: + # Flatten + mfd = tools.invert_no_zero(median(data, axis=1)) + data *= mfd[:, np.newaxis] + + ## Iterate over all frequency and time blocks + # + # This can be done more quickly by reshaping the arrays into blocks, but + # only works when there are an integer number of blocks. Probably best to + # rewrite in cython. + for fi in range(flen): + fs = fi * fwidth + fe = min((fi + 1) * fwidth, data.shape[0]) + + for ti in range(tlen): + ts = ti * twidth + te = min((ti + 1) * twidth, data.shape[1]) + + dsec = data[fs:fe, ts:te] + msec = madmask[fs:fe, ts:te] + + mval = median(dsec.flatten()) + dev = dsec - mval + med_abs_dev = median(np.abs(dev).flatten()) + + med_inv = tools.invert_no_zero(med_abs_dev) + + if mask: + msec[:] = (np.abs(dev) * med_inv) > threshold + else: + msec[:] = dev * med_inv + + return madmask
+ + + +
+[docs] +def mad_cut_1d(data, twidth=42, threshold=5.0, mask=True): + """Mask out RFI using a median absolute deviation cut in the time direction. + + This is useful for datasets with sparse frequency coverage. Functionally + this routine is equivalent to :func:`mad_cut_2d` with `fwidth = 1`, but will + be much faster. + + Parameters + ---------- + data : np.ndarray[freq, time] + Array of data to mask. + twidth : integer, optional + Number of time samples to average median over. + threshold : scalar, optional + Number of median deviations above which we cut the data. + mask : boolean, optional + If True return the mask, if False return the number of + median absolute deviations. + + Returns + ------- + mask : np.ndarray[freq, time] + Mask or number of median absolute deviations for each sample. + """ + + median = nanmedian if np.any(~np.isfinite(data)) else np.median + + tlen = int(np.ceil(data.shape[1] * 1.0 / twidth)) + + if mask: + madmask = np.ones(data.shape, dtype="bool") + else: + madmask = np.ones(data.shape, dtype=np.float64) + + ## Iterate over all time chunks + for ti in range(tlen): + ts = ti * twidth + te = min((ti + 1) * twidth, data.shape[1]) + + dsec = data[:, ts:te] + msec = madmask[:, ts:te] + + mval = median(dsec, axis=1) + dev = dsec - mval[:, np.newaxis] + med_abs_dev = median(np.abs(dev), axis=1) + + med_inv = tools.invert_no_zero(med_abs_dev[:, np.newaxis]) + + if mask: + msec[:] = (np.abs(dev) * med_inv) > threshold + else: + msec[:] = dev * med_inv + + return madmask
+ + + +# Define several functions for creating 2D rolling window +def _rolling_window_lastaxis(a, window): + shape = a.shape[:-1] + (a.shape[-1] - window + 1, window) + strides = a.strides + (a.strides[-1],) + return np.lib.stride_tricks.as_strided(a, shape=shape, strides=strides) + + +def _rolling_window(a, window): + if not hasattr(window, "__iter__"): + return _rolling_window_lastaxis(a, window) + for i, win in enumerate(window): + a = a.swapaxes(i, -1) + a = _rolling_window_lastaxis(a, win) + a = a.swapaxes(-2, i) + return a + + +
+[docs] +def mad_cut_rolling( + data, + fwidth=64, + twidth=42, + threshold=5.0, + freq_flat=True, + mask=True, + limit_range: slice = slice(None), +): + """Mask out RFI by placing a cut on the absolute deviation. + Compared to `mad_cut_2d`, this function calculates + the median and median absolute deviation using a rolling + 2D median filter, i.e., for every (freq, time) sample a + separate estimates of these statistics is obtained for a + window that is centered on that sample. + + For sparsely sampled frequency axis, set fwidth = 1. + + Parameters + ---------- + data : np.ndarray[freq, time] + Array of data to mask. + fwidth : integer, optional + Number of frequency samples to calculate median over. + twidth : integer, optional + Number of time samples to calculate median over. + threshold : scalar, optional + Number of median absolute deviations above which we cut the data. + freq_flat : boolean, optional + Flatten in the frequency direction by dividing each frequency + by the median over time. + mask : boolean, optional + If True return the mask, if False return the number of + median absolute deviations. + limit_range : slice, optional + Data is limited to this range in the freqeuncy axis. Defaults to slice(None). + + Returns + ------- + mask : np.ndarray[freq, time] + Mask or number of median absolute deviations for each sample. + """ + # Make sure we have an odd number of samples + fwidth += int(not (fwidth % 2)) + twidth += int(not (twidth % 2)) + + foff = fwidth // 2 + toff = twidth // 2 + + nfreq, ntime = data.shape + + # If requested, flatten over the frequency direction. + if freq_flat: + mfd = tools.invert_no_zero(nanmedian(data, axis=1)) + data *= mfd[:, np.newaxis] + + # Add NaNs around the edges of the array so that we don't have to treat them separately + eshp = [nfreq + fwidth - 1, ntime + twidth - 1] + exp_data = np.full(eshp, np.nan, dtype=data.dtype) + exp_data[foff : foff + nfreq, toff : toff + ntime] = data + + if limit_range != slice(None): + # Get only desired slice + expsl = slice( + max(limit_range.start, 0), + min(limit_range.stop + 2 * foff, exp_data.shape[0]), + ) + dsl = slice(max(limit_range.start, 0), min(limit_range.stop, data.shape[0])) + exp_data = exp_data[expsl, :] + data = data[dsl, :] + + # Use numpy slices to construct the rolling windowed data + win_data = _rolling_window(exp_data, (fwidth, twidth)) + + # Compute the local median and median absolute deviation + med = nanmedian(win_data, axis=(-2, -1)) + med_abs_dev = nanmedian( + np.abs(win_data - med[..., np.newaxis, np.newaxis]), axis=(-2, -1) + ) + + inv_med_abs_dev = tools.invert_no_zero(med_abs_dev) + + # Calculate and return the mask or the number of median absolute deviations + if mask: + madmask = (np.abs(data - med) * inv_med_abs_dev) > threshold + else: + madmask = (data - med) * inv_med_abs_dev + + return madmask
+ + + +def nanmedian(*args, **kwargs): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", r"All-NaN (slice|axis) encountered") + return np.nanmedian(*args, **kwargs) + + +# Iterative HPF masking for identifying narrow-band features in gains or spectra +
+[docs] +def highpass_delay_filter(freq, tau_cut, flag, epsilon=1e-10): + """Construct a high-pass delay filter. + + The stop band will range from [-tau_cut, tau_cut]. + DAYENU is used to construct the filter in the presence + of masked frequencies. See Ewall-Wice et al. 2021 + (arXiv:2004.11397) for a description. + + Parameters + ---------- + freq : np.ndarray[nfreq,] + Frequency in MHz. + tau_cut : float + The half width of the stop band in micro-seconds. + flag : np.ndarray[nfreq,] + Boolean flag that indicates what frequencies are valid. + epsilon : float + The stop-band rejection of the filter. + + Returns + ------- + pinv : np.ndarray[nfreq, nfreq] + High pass delay filter. + """ + + nfreq = freq.size + assert (flag.ndim == 1) and (flag.size == nfreq) + + mflag = (flag[:, np.newaxis] & flag[np.newaxis, :]).astype(np.float64) + + cov = np.eye(nfreq, dtype=np.float64) + cov += ( + np.sinc(2.0 * tau_cut * (freq[:, np.newaxis] - freq[np.newaxis, :])) / epsilon + ) + + pinv = np.linalg.pinv(cov * mflag, hermitian=True) * mflag + + return pinv
+ + + +
+[docs] +def iterative_hpf_masking( + freq, + y, + flag=None, + tau_cut=0.60, + epsilon=1e-10, + window=65, + threshold=6.0, + nperiter=1, + niter=40, + timestamp=None, +): + """Mask features in a spectrum that have significant power at high delays. + + Uses the following iterative procedure to generate the mask: + + - Apply a high-pass filter to the spectrum. + - For each frequency channel, calculate the median absolute + deviation of nearby frequency channels to get an estimate + of the noise. Divide the high-pass filtered spectrum by + the noise estimate. + - Mask excursions with the largest signal to noise. + - Regenerate the high-pass filter using the new mask. + - Repeat. + + The procedure stops when the maximum number of iterations is reached + or there are no excursions beyond some threshold. + + Parameters + ---------- + freq: np.ndarray[nfreq,] + Frequency in MHz. + y: np.ndarray[nfreq,] + Spectrum to search for narrowband features. + flag: np.ndarray[nfreq,] + Boolean flag where True indicates valid data. + tau_cut: float + Cutoff of the high-pass filter in microseconds. + epsilon: float + Stop-band rejection of the filter. + threshold: float + Number of median absolute deviations beyond which + a frequency channel is considered an outlier. + window: int + Width of the window used to estimate the noise + (by calculating a local median absolute deviation). + nperiter: int + Maximum number of frequency channels to flag + on any iteration. + niter: int + Maximum number of iterations. + timestamp : float + Start observing time (in unix time) + + Returns + ------- + yhpf: np.ndarray[nfreq,] + The high-pass filtered spectrum generated using + the mask from the last iteration. + flag: np.ndarray[nfreq,] + Boolean flag where True indicates valid data. + This is the logical complement to the mask + from the last iteration. + rsigma: np.ndarray[nfreq,] + The local median absolute deviation from the last + iteration. + """ + + from caput import weighted_median + + assert y.ndim == 1 + + # Make sure the frequencies are float64, otherwise + # can have problems with construction of filter + freq = freq.astype(np.float64) + + # Make sure the size of the window is odd + window = window + int(not (window % 2)) + + # If an initial flag was not provided, then use the static rfi mask. + if flag is None: + flag = ~frequency_mask(freq, timestamp=timestamp) + + # We will be updating the flags on each iteration. Make a copy of + # the input so that we do not overwrite. + new_flag = flag.copy() + + # Iterate + itt = 0 + while itt < niter: + # Construct the filter using the current mask + NF = highpass_delay_filter(freq, tau_cut, new_flag, epsilon=epsilon) + + # Apply the filter + yhpf = np.matmul(NF, y) + + # Calculate the local median absolute deviation + ry = np.ascontiguousarray(yhpf.astype(np.float64)) + w = np.ascontiguousarray(new_flag.astype(np.float64)) + + rsigma = 1.48625 * weighted_median.moving_weighted_median( + np.abs(ry), w, window, method="split" + ) + + # Calculate the signal to noise + rs2n = np.abs(yhpf * tools.invert_no_zero(rsigma)) + + # Identify frequency channels that are above the signal to noise threshold + above_threshold = np.flatnonzero(rs2n > threshold) + + if above_threshold.size == 0: + break + + # Find the largest nperiter frequency channels that are above the threshold + ibad = above_threshold[np.argsort(-np.abs(yhpf[above_threshold]))[0:nperiter]] + + # Flag those frequency channels, increment the counter + new_flag[ibad] = False + + itt += 1 + + # Construct and apply the filter using the final flag + NF = highpass_delay_filter(freq, tau_cut, new_flag, epsilon=epsilon) + + yhpf = np.matmul(NF, y) + + return yhpf, new_flag, rsigma
+ + + +# Scale-invariant rank (SIR) functions +
+[docs] +def sir1d(basemask, eta=0.2): + """Numpy implementation of the scale-invariant rank (SIR) operator. + + For more information, see arXiv:1201.3364v2. + + Parameters + ---------- + basemask : numpy 1D array of boolean type + Array with the threshold mask previously generated. + 1 (True) for flagged points, 0 (False) otherwise. + eta : float + Aggressiveness of the method: with eta=0, no additional samples are + flagged and the function returns basemask. With eta=1, all samples + will be flagged. The authors in arXiv:1201.3364v2 seem to be convinced + that 0.2 is a mostly universally optimal value, but no optimization + has been done on CHIME data. + + Returns + ------- + mask : numpy 1D array of boolean type + The mask after the application of the (SIR) operator. Same shape and + type as basemask. + """ + n = basemask.size + psi = basemask.astype(np.float64) - 1.0 + eta + + M = np.zeros(n + 1, dtype=np.float64) + M[1:] = np.cumsum(psi) + + MP = np.minimum.accumulate(M)[:-1] + MQ = np.concatenate((np.maximum.accumulate(M[-2::-1])[-2::-1], M[-1, np.newaxis])) + + return (MQ - MP) >= 0.0
+ + + +
+[docs] +def sir(basemask, eta=0.2, only_freq=False, only_time=False): + """Apply the SIR operator over the frequency and time axes for each product. + + This is a wrapper for `sir1d`. It loops over times, applying `sir1d` + across the frequency axis. It then loops over frequencies, applying `sir1d` + across the time axis. It returns the logical OR of these two masks. + + Parameters + ---------- + basemask : np.ndarray[nfreq, nprod, ntime] of boolean type + The previously generated threshold mask. + 1 (True) for masked points, 0 (False) otherwise. + eta : float + Aggressiveness of the method: with eta=0, no additional samples are + flagged and the function returns basemask. With eta=1, all samples + will be flagged. + only_freq : bool + Only apply the SIR operator across the frequency axis. + only_time : bool + Only apply the SIR operator across the time axis. + + Returns + ------- + mask : np.ndarray[nfreq, nprod, ntime] of boolean type + The mask after the application of the SIR operator. + """ + if only_freq and only_time: + raise ValueError("Only one of only_freq and only_time can be True.") + + nfreq, nprod, ntime = basemask.shape + + newmask = basemask.astype(bool).copy() + + for pp in range(nprod): + if not only_time: + for tt in range(ntime): + newmask[:, pp, tt] |= sir1d(basemask[:, pp, tt], eta=eta) + + if not only_freq: + for ff in range(nfreq): + newmask[ff, pp, :] |= sir1d(basemask[ff, pp, :], eta=eta) + + return newmask
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/timing.html b/docs/_modules/ch_util/timing.html new file mode 100644 index 00000000..09c761c0 --- /dev/null +++ b/docs/_modules/ch_util/timing.html @@ -0,0 +1,2840 @@ + + + + + + ch_util.timing — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.timing

+"""Tools for timing jitter and delay corrections.
+
+This module contains tools for using noise sources to correct
+timing jitter and timing delay.
+
+
+Example
+=======
+
+The function :meth:`construct_delay_template` generates a delay template from
+measurements of the visibility between noise source inputs, which can
+be used to remove the timing jitter in other data.
+
+The user seldom needs to work with :meth:`construct_delay_template`
+directly and can instead use several high-level functions and containers
+that load the timing data, derive the timing correction using
+:meth:`construct_delay_template`, and then enable easy application of
+the timing correction to other data.
+
+For example, to load the timing data and derive the timing correction from
+a list of timing acquisition files (i.e., `YYYYMMSSTHHMMSSZ_chimetiming_corr`),
+use the following:
+
+    ```tdata = TimingData.from_acq_h5(timing_acq_filenames)```
+
+This results in a :class:`andata.CorrData` object that has additional
+methods avaiable for applying the timing correction to other data.
+For example, to obtain the complex gain for some freq, input, and time
+that upon multiplication will remove the timing jitter, use the following:
+
+    ```tgain, tweight = tdata.get_gain(freq, input, time)```
+
+To apply the timing correction to the visibilities in an :class:`andata.CorrData`
+object called `data`, use the following:
+
+    ```tdata.apply_timing_correction(data)```
+
+The timing acquisitions must cover the span of time that you wish to correct.
+If you have a list of data acquisition files and would like to obtain
+the appropriate timing correction by searching the archive for the
+corresponding timing acquisitons files, then use:
+
+    ```tdata = load_timing_correction(data_acq_filenames_full_path)```
+
+To print a summary of the timing correction, use:
+
+    ```print(tdata)```
+
+"""
+
+import os
+import glob
+import numpy as np
+import inspect
+import logging
+import gc
+
+import scipy.interpolate
+import scipy.optimize
+
+from . import tools, andata, rfi
+from caput import memh5, mpiarray, tod
+import caput.time as ctime
+
+FREQ_TO_OMEGA = 2.0 * np.pi * 1e-6
+FREQ_PIVOT = 600.0
+
+AXES = ["freq", "noise_source", "input", "time", "param"]
+
+DSET_SPEC = {
+    "tau": {"axis": ["noise_source", "time"], "flag": False},
+    "alpha": {"axis": ["noise_source", "time"], "flag": False},
+    "weight_tau": {"axis": ["noise_source", "time"], "flag": True},
+    "weight_alpha": {"axis": ["noise_source", "time"], "flag": True},
+    "static_phi": {"axis": ["freq", "noise_source"], "flag": False},
+    "static_amp": {"axis": ["freq", "noise_source"], "flag": False},
+    "weight_static_phi": {"axis": ["freq", "noise_source"], "flag": True},
+    "weight_static_amp": {"axis": ["freq", "noise_source"], "flag": True},
+    "static_phi_fit": {"axis": ["param", "noise_source"], "flag": False},
+    "num_freq": {"axis": ["noise_source", "time"], "flag": True},
+    "phi": {"axis": ["freq", "noise_source", "time"], "flag": False},
+    "amp": {"axis": ["freq", "noise_source", "time"], "flag": False},
+    "weight_phi": {"axis": ["freq", "noise_source", "time"], "flag": True},
+    "weight_amp": {"axis": ["freq", "noise_source", "time"], "flag": True},
+    "coeff_tau": {"axis": ["input", "noise_source"], "flag": False},
+    "coeff_alpha": {"axis": ["input", "noise_source"], "flag": False},
+    "reference_noise_source": {"axis": ["input"], "flag": False},
+}
+
+# Set up logging
+logger = logging.getLogger(__name__)
+logger.addHandler(logging.NullHandler())
+
+
+
+[docs] +class TimingCorrection(andata.BaseData): + """ + Container that holds a timing correction. + + Provides methods for applying that correction to other datasets. + """ + +
+[docs] + @classmethod + def from_dict(self, **kwargs): + """ + Instantiate a TimingCorrection object. + + Parameters + ---------- + freq: np.ndarray[nfreq, ] of dtype=('centre', 'width') + Frequencies in MHz that were used to construct the timing correction. + noise_source: np.ndarray[nsource,] of dtype=('chan_id', 'correlator_input') + Correlator inputs that were used to construct the timing correction. + input: np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input') + Correlator inputs to which the timing correction will be applied. + time: np.ndarray[ntime, ] + Unix time. + param: np.ndarray[nparam, ] + Parameters of the model fit to the static phase versus frequency. + tau: np.ndarray[nsource, ntime] + The actual timing correction, which is the relative delay of each of the + noise source inputs with respect to a reference input versus time. + weight_tau: np.ndarray[nsource, ntime] + Estimate of the uncertainty (inverse variance) on the timing correction. + static_phi: np.ndarray[nfreq, nsource] + The phase that was subtracted from each frequency and input prior to + fitting for the timing correction. This is necessary to remove the + approximately static ripple pattern caused by reflections. + weight_static_phi: np.ndarray[nfreq, nsource] + Inverse variance on static_phi. + static_phi_fit: np.ndarray[nparam, nsource] + Best-fit parameters of a fit to the static phase versus frequency + for each of the noise source inputs. + alpha: np.ndarray[nsource, ntime] + The coefficient of the spectral model of the amplitude variations of + each of the noise source inputs versus time. + weight_alpha: np.ndarray[nsource, ntime] + Estimate of the uncertainty (inverse variance) on the amplitude coefficients. + static_amp: np.ndarray[nfreq, nsource] + The amplitude that was subtracted from each frequency and input prior to + fitting for the amplitude variations. This is necessary to remove the + approximately static ripple pattern caused by reflections. + weight_static_amp: np.ndarray[nfreq, nsource] + Inverse variance on static_amp. + num_freq: np.ndarray[nsource, ntime] + The number of frequencies used to determine the delay and alpha quantities. + If num_freq is 0, then that time is ignored when deriving the timing correction. + coeff_tau: np.ndarray[ninput, nsource] + If coeff is provided, then the timing correction applied to a particular + input will be the linear combination of the tau correction from the + noise source inputs, with the coefficients set by this array. + coeff_alpha: np.ndarray[ninput, nsource] + If coeff is provided, then the timing correction applied to a particular + input will be adjusted by the linear combination of the alpha correction + from the noise source inputs, with the coefficients set by this array. + reference_noise_source: np.ndarray[ninput] + The noise source input that was used as reference when fitting coeff_tau. + """ + index_map = {key: kwargs.pop(key) for key in AXES if key in kwargs} + datasets = {key: kwargs.pop(key) for key in DSET_SPEC.keys() if key in kwargs} + + # Run base initialiser + tcorr = TimingCorrection(**kwargs) + + # Create index maps + for name, data in index_map.items(): + tcorr.create_index_map(name, data) + + # Create datasets + for name, data in datasets.items(): + if data is None: + continue + spec = DSET_SPEC[name] + if spec["flag"]: + dset = tcorr.create_flag(name, data=data) + else: + dset = tcorr.create_dataset(name, data=data) + + dset.attrs["axis"] = np.array(spec["axis"], dtype=np.string_) + + return tcorr
+ + + @classmethod + def _interpret_and_read(cls, acq_files, start, stop, datasets, out_group): + # Instantiate an object for each file + objs = [cls.from_file(d, ondisk=False) for d in acq_files] + + # Reference all dynamic datasets to the static quantities + # defined in the first file + iref = 0 + + freq = objs[iref].freq + + # Determine the overall delay offset relative to the reference file + phi = np.stack([obj.static_phi[:] for obj in objs], axis=-1) + weight = np.stack([obj.weight_static_phi[:] for obj in objs], axis=-1) + + phi_ref = phi[..., iref, np.newaxis] + weight_ref = weight[..., iref, np.newaxis] + + flag = (weight > 0.0) & (weight_ref > 0.0) + err = np.sqrt(tools.invert_no_zero(weight) + tools.invert_no_zero(weight_ref)) + err *= flag.astype(err.dtype) + + dphi = phi - phi_ref + + for tt, obj in enumerate(objs): + for nn in range(dphi.shape[1]): + if np.sum(flag[:, nn, tt], dtype=int) > 2: + # Fit the difference in the static phase between this file and the + # reference file to a linear relationship with frequency. Uses + # nonlinear-least-squares that is insensitive to phase wrapping + param = fit_poly_to_phase( + freq, np.exp(1.0j * dphi[:, nn, tt]), err[:, nn, tt], nparam=2 + )[0] + + # Add the best-fit slope to the delay template for this file + obj.tau[nn, :] += param[1] + + # Determine the overall amplitude offset relative to the reference file + amp = np.stack([obj.static_amp[:] for obj in objs], axis=-1) + weight = np.stack([obj.weight_static_amp[:] for obj in objs], axis=-1) + + amp_ref = amp[..., iref, np.newaxis] + weight_ref = weight[..., iref, np.newaxis] + + flag = (weight > 0.0) & (weight_ref > 0.0) + weight = tools.invert_no_zero( + tools.invert_no_zero(weight) + tools.invert_no_zero(weight_ref) + ) + weight *= flag.astype(weight.dtype) + + damp = amp - amp_ref + + asc = amp_ref * _amplitude_scaling(freq[:, np.newaxis, np.newaxis]) + + alpha = np.sum(weight * asc * damp, axis=0) * tools.invert_no_zero( + np.sum(weight * asc**2, axis=0) + ) + + for tt, obj in enumerate(objs): + # Add the offset to the amplitude template for this file + obj.alpha[:] += alpha[:, tt, np.newaxis] + + # Now concatenate the files. Dynamic datasets will be concatenated. + # Static datasets will be extracted from the first file. + data = tod.concatenate( + objs, out_group=out_group, start=start, stop=stop, datasets=datasets + ) + + return data + + @property + def freq(self): + """Provide convenience access to the frequency bin centres.""" + return self.index_map["freq"]["centre"] + + @property + def noise_source(self): + """Provide convenience access to the noise source inputs. + + Note that in older versions of the timing correction, the + noise_source axis does not exist. Instead, the equivalent + quantity is labeled as input. Since the addition of the + coeff dataset it has become necessary to distinguish between the + noise source inputs from which the timing correction is derived + and the correlator inputs to which the timing correction is applied. + """ + key = "noise_source" if "noise_source" in self.index_map else "input" + return self.index_map[key] + + @property + def nsource(self): + """Provide convenience access to the number of noise source inputs.""" + return self.noise_source.size + + @property + def input(self): + """Provide convenience access to the correlator inputs.""" + return self.index_map["input"] + + @property + def tau(self): + """Provide convenience access to the tau array.""" + return self.datasets["tau"] + + @property + def weight_tau(self): + """Provide convenience access to the weight_tau array.""" + if "weight_tau" not in self.flags: + # weight_tau does not exist. This is the case for timing + # corrections generated with older versions of the code. + # Create a default weight_tau dataset and return that. + if self.has_num_freq: + weight_tau = (self.num_freq[:] > 0).astype(np.float32) + else: + weight_tau = np.ones_like(self.tau[:]) + + dset = self.create_flag("weight_tau", data=weight_tau) + dset.attrs["axis"] = np.array( + DSET_SPEC["weight_tau"]["axis"], dtype=np.string_ + ) + + return self.flags["weight_tau"] + + @property + def static_phi(self): + """Provide convenience access to the static_phi array.""" + return self.datasets["static_phi"] + + @property + def weight_static_phi(self): + """Provide convenience access to the weight_static_phi array.""" + return self.flags["weight_static_phi"] + + @property + def static_phi_fit(self): + """Provide convenience access to the static_phi_fit array.""" + return self.datasets["static_phi_fit"] + + @property + def alpha(self): + """Provide convenience access to the alpha array.""" + return self.datasets["alpha"] + + @property + def weight_alpha(self): + """Provide convenience access to the weight_alpha array.""" + if "weight_alpha" not in self.flags: + # weight_alpha does not exist. This is the case for timing + # corrections generated with older versions of the code. + # Create a default weight_alpha dataset and return that. + if self.has_num_freq: + weight_alpha = (self.num_freq[:] > 0).astype(np.float32) + else: + weight_alpha = np.ones_like(self.alpha[:]) + + scale = (self.amp_to_delay or 1.0) ** 2 + weight_alpha *= scale + + dset = self.create_flag("weight_alpha", data=weight_alpha) + dset.attrs["axis"] = np.array( + DSET_SPEC["weight_alpha"]["axis"], dtype=np.string_ + ) + + return self.flags["weight_alpha"] + + @property + def static_amp(self): + """Provide convenience access to the static_amp array.""" + return self.datasets["static_amp"] + + @property + def weight_static_amp(self): + """Provide convenience access to the weight_static_amp array.""" + return self.flags["weight_static_amp"] + + @property + def num_freq(self): + """Provide convenience access to the num_freq array.""" + return self.flags["num_freq"] + + @property + def has_num_freq(self): + """Inidicates if there is a num_freq flag that identifies missing data.""" + return "num_freq" in self.flags + + @property + def coeff_tau(self): + """Provide convenience access to the coeff_tau array.""" + return self.datasets["coeff_tau"] + + @property + def has_coeff_tau(self): + """Indicates if there are valid coeff that map noise source tau to inputs.""" + return ( + "coeff_tau" in self.datasets + and "noise_source" in self.index_map + and "input" in self.index_map + ) + + @property + def coeff_alpha(self): + """Provide convenience access to the coeff_alpha array.""" + return self.datasets["coeff_alpha"] + + @property + def has_coeff_alpha(self): + """Indicates if there are valid coeff that map noise source alpha to inputs.""" + return ( + "coeff_alpha" in self.datasets + and "noise_source" in self.index_map + and "input" in self.index_map + ) + + @property + def amp_to_delay(self): + """Return conversion from noise source amplitude variations to delay variations.""" + return self.attrs.get("amp_to_delay", None) + + @amp_to_delay.setter + def amp_to_delay(self, val): + """Sets the conversion from noise source amplitude variations to delay variations. + + Note that setting this quantity will result in the following modification to the + timing correction: tau --> tau - amp_to_delay * alpha. This can be used to remove + variations introduced by the noise source distribution system from the timing correction + using the amplitude variations as a proxy for temperature. + """ + if self.has_coeff_alpha: + raise AttributeError( + "The amplitude variations are already being used to " + "correct the delay variations through the coeff_alpha dataset." + ) + elif val is not None: + self.attrs["amp_to_delay"] = val + + @amp_to_delay.deleter + def amp_to_delay(self): + """Remove any conversion from noise source amplitude variations to delay variations.""" + if "amp_to_delay" in self.attrs: + del self.attrs["amp_to_delay"] + + @property + def has_amplitude(self): + """Determine if this timing correction contains amplitude data.""" + return "alpha" in self.datasets + + @property + def reference_noise_source(self): + """Return the index of the reference noise source.""" + if "reference_noise_source" in self.datasets: + iref = self.datasets["reference_noise_source"][:] + return iref if np.unique(iref).size > 1 else iref[0] + else: + return self.zero_delay_noise_source + + @property + def zero_delay_noise_source(self): + """Return the index of the noise source with zero delay.""" + zero_tau = np.flatnonzero(np.all(np.abs(self.tau[:]) < 1e-5, axis=-1)) + if zero_tau.size == 0: + raise AttributeError( + "Could not determine which input the delay template " + "is referenced with respect to." + ) + else: + return zero_tau[0] + +
+[docs] + def set_coeff( + self, + coeff_tau, + inputs, + noise_source, + coeff_alpha=None, + reference_noise_source=None, + ): + """Use coefficients to construct timing correction. + + Setting the coefficients changes how the timing corretion for a particular + correlator input is derived. Without coefficients, each input is matched + to the timing correction from a single noise source input through the + map_input_to_noise_source method. With coefficients, each input is a + linear combination of the timing correction from all noise source inputs. + + Parameters + ---------- + coeff_tau: np.ndarray[ninput, nsource] + The timing correction applied to a particular input will be the + linear combination of the tau correction from the noise source inputs, + with the coefficients set by this array. + inputs: np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input') + Correlator inputs to which the timing correction will be applied. + noise_source: np.ndarray[nsource,] of dtype=('chan_id', 'correlator_input') + Correlator inputs that were used to construct the timing correction. + coeff_alpha: np.ndarray[ninput, nsource] + The timing correction applied to a particular input will be adjusted by + the linear combination of the alpha correction from the noise source inputs, + with the coefficients set by this array. + reference_noise_source: np.ndarray[ninput,] + For each input, the index into noise_source that was used as + reference in the fit for coeff_tau. + """ + sn_lookup = { + sn: ii for ii, sn in enumerate(noise_source["correlator_input"][:]) + } + + reod = np.array( + [sn_lookup[sn] for sn in self.noise_source["correlator_input"][:]] + ) + + datasets = {"coeff_tau": coeff_tau} + if coeff_alpha is not None: + if self.amp_to_delay is None: + datasets["coeff_alpha"] = coeff_alpha + else: + raise AttributeError( + "The amplitude variations are already " + "being used to correct the delay variations " + "through the amp_to_delay parameter." + ) + + for name, coeff in datasets.items(): + spec = DSET_SPEC[name] + if spec["flag"]: + dset = self.create_flag(name, data=coeff[:, reod]) + else: + dset = self.create_dataset(name, data=coeff[:, reod]) + dset.attrs["axis"] = np.array(spec["axis"], dtype=np.string_) + + if reference_noise_source is not None: + ref_sn_lookup = { + sn: ii for ii, sn in enumerate(self.noise_source["correlator_input"][:]) + } + + reference_reodered = np.array( + [ + ref_sn_lookup[sn] + for sn in noise_source["correlator_input"][reference_noise_source] + ] + ) + + name = "reference_noise_source" + spec = DSET_SPEC[name] + if spec["flag"]: + dset = self.create_flag(name, data=reference_reodered) + else: + dset = self.create_dataset(name, data=reference_reodered) + dset.attrs["axis"] = np.array(spec["axis"], dtype=np.string_) + + self.create_index_map("input", inputs)
+ + +
+[docs] + def delete_coeff(self): + """Stop using coefficients to construct timing correction. + + Calling this method will delete the `coeff_tau`, `coeff_alpha`, + and `reference_noise_source` datasets if they exist. + """ + for name in ["coeff_tau", "coeff_alpha", "reference_noise_source"]: + spec = DSET_SPEC[name] + group = self["flag"] if spec["flag"] else self + if name in group: + del group[name]
+ + +
+[docs] + def search_input(self, inputs): + """Find inputs in the input axis. + + Parameters + ---------- + inputs: np.ndarray[ninput,] of dtype=('chan_id', 'correlator_input') + + Returns + ------- + index: np.ndarray[ninput,] of .int + Indices of the input axis that yield the requested inputs. + """ + if not hasattr(self, "_input_lookup"): + self._input_lookup = { + sn: ind for ind, sn in enumerate(self.input["correlator_input"][:]) + } + + return np.array( + [self._input_lookup[sn] for sn in inputs["correlator_input"][:]] + )
+ + +
+[docs] + def set_global_reference_time(self, tref, window=0.0, interpolate=False, **kwargs): + """Normalize the delay and alpha template to the value at a single time. + + Useful for referencing the template to the value at the time that + you plan to calibrate. + + Parameters + ---------- + tref : unix time + Reference the templates to the values at this time. + window: float + Reference the templates to the median value over a window (in seconds) + around tref. If nonzero, this will override the interpolate keyword. + interpolate : bool + Interpolate the delay template to time tref. Otherwise take the measured time + nearest to tref. The get_tau method is use to perform the interpolation, and + kwargs for that method will be passed along. + """ + tref = ctime.ensure_unix(tref) + tref_string = ctime.unix_to_datetime(tref).strftime("%Y-%m-%d %H:%M:%S %Z") + logger.info("Referencing timing correction with respect to %s." % tref_string) + if window > 0.0: + iref = np.flatnonzero( + (self.time >= (tref - window)) & (self.time <= (tref + window)) + ) + if iref.size > 0: + logger.info( + "Using median of %d samples around reference time." % iref.size + ) + if self.has_num_freq: + tau_ref = np.zeros((self.nsource, 1), dtype=self.tau.dtype) + alpha_ref = np.zeros((self.nsource, 1), dtype=self.alpha.dtype) + + for ss in range(self.nsource): + good = np.flatnonzero(self.num_freq[ss, iref] > 0) + if good.size > 0: + tau_ref[ss] = np.median(self.tau[ss, iref[good]]) + alpha_ref[ss] = np.median(self.alpha[ss, iref[good]]) + + else: + tau_ref = np.median(self.tau[:, iref], axis=-1, keepdims=True) + alpha_ref = np.median(self.alpha[:, iref], axis=-1, keepdims=True) + + else: + raise ValueError( + "Timing correction not available for time %s." % tref_string + ) + + elif (tref < self.time[0]) or (tref > self.time[-1]): + raise ValueError( + "Timing correction not available for time %s." % tref_string + ) + + else: + if not interpolate: + kwargs["interp"] = "nearest" + + tau_ref, _ = self.get_tau(np.atleast_1d(tref), ignore_amp=True, **kwargs) + alpha_ref, _ = self.get_alpha(np.atleast_1d(tref), **kwargs) + + self.tau[:] = self.tau[:] - tau_ref + self.alpha[:] = self.alpha[:] - alpha_ref
+ + +
+[docs] + def set_reference_time( + self, + tref, + tstart, + tend=None, + tinit=None, + tau_init=None, + alpha_init=None, + interpolate=False, + **kwargs + ): + """Normalize the delay and alpha template to specific times. + + Required if applying the timing correction to data that has + already been calibrated. + + Parameters + ---------- + tref : np.ndarray[nref] + Reference the delays to the values at this unix time. + tstart : np.ndarray[nref] + Begin transition to the reference delay at this unix time. + tend : np.ndarray[nref] + Complete transition to the reference delay at this unix time. + tinit : float + Use the delay at this time for the period before the first tstart. + Takes prescendent over tau_init. + tau_init : np.ndarray[nsource] + Use this delay for times before the first tstart. Must provide a value + for each noise source input. If None, then will reference with respect + to the average delay over the full time series. + alpha_init : np.ndarray[nsource] + Use this alpha for times before the first tstart. Must provide a value + for each noise source input. If None, then will reference with respect + to the average alpha over the full time series. + interpolate : bool + Interpolate the delay template to times tref. Otherwise take the measured + times nearest to tref. The get_tau method is use to perform the + interpolation, and kwargs for that method will be passed along. + """ + tref = np.atleast_1d(ctime.ensure_unix(tref)) + + if not interpolate: + kwargs["interp"] = "nearest" + + tau_ref, _ = self.get_tau(tref, ignore_amp=True, **kwargs) + alpha_ref, _ = self.get_alpha(tref, **kwargs) + + if tinit is not None: + tinit = ctime.ensure_unix(tinit) + tau_init, _ = self.get_tau(tinit, ignore_amp=True, **kwargs) + alpha_init, _ = self.get_alpha(tinit, **kwargs) + + if tau_init is None: + tau_init = np.zeros((tau_ref.shape[0], 1), dtype=tau_ref.dtype) + else: + if tau_init.size == tau_ref.shape[0]: + tau_init = tau_init[:, np.newaxis] + else: + raise ValueError( + "Initial tau has size %d, but there are %d noise sources." + % (tau_init.size, tau_ref.shape[0]) + ) + + if alpha_init is None: + alpha_init = np.zeros((alpha_ref.shape[0], 1), dtype=alpha_ref.dtype) + else: + if alpha_init.size == alpha_ref.shape[0]: + alpha_init = alpha_init[:, np.newaxis] + else: + raise ValueError( + "Initial alpha has size %d, but there are %d noise sources." + % (alpha_init.size, alpha_ref.shape[0]) + ) + + tau_ref = np.concatenate((tau_init, tau_ref), axis=-1) + alpha_ref = np.concatenate((alpha_init, alpha_ref), axis=-1) + + tstart = np.atleast_1d(ctime.ensure_unix(tstart)) + istart = np.digitize(self.time, tstart) + + if tend is not None: + tend = np.atleast_1d(ctime.ensure_unix(tend)) + iend = np.digitize(self.time, tend) + else: + tend = tstart + iend = istart + + coeff = np.full(self.time.size, 0.5, dtype=np.float32) + for ts, te in zip(tstart, tend): + if te > ts: + fill = np.flatnonzero((self.time >= ts) & (self.time <= te)) + coeff[fill] = np.hanning(2 * fill.size - 1)[0 : fill.size] + + coeff = coeff[np.newaxis, :] + tau_ref_full = coeff * tau_ref[:, istart] + (1.0 - coeff) * tau_ref[:, iend] + alpha_ref_full = ( + coeff * alpha_ref[:, istart] + (1.0 - coeff) * alpha_ref[:, iend] + ) + + self.tau[:] = self.tau[:] - tau_ref_full + self.alpha[:] = self.alpha[:] - alpha_ref_full
+ + +
+[docs] + def get_tau(self, timestamp, ignore_amp=False, interp="linear", extrap_limit=None): + """Return the delay for each noise source at the requested times. + + Uses the TimingInterpolator to interpolate to the requested times. + + Parameters + ---------- + timestamp: np.ndarray[ntime,] + Unix timestamp. + ignore_amp: bool + Do not apply a noise source based amplitude correction, even if one exists. + interp: string + Method to interpolate over time. Options include 'linear', 'nearest', + 'zero', 'slinear', 'quadratic', 'cubic', 'previous', and 'next'. + extrap_limit: float + Do not extrapolate the underlying data beyond its boundaries by this + amount in seconds. Default is 2 integrations. + + Returns + ------- + tau: np.ndarray[nsource, ntime] + Delay as a function of time for each of the noise sources. + weight : np.ndarray[nsource, ntime] + The uncertainty on the delay, expressed as an inverse variance. + """ + flag = self.num_freq[:] > 0 if self.has_num_freq else None + + if ignore_amp or (self.amp_to_delay is None) or not self.has_amplitude: + tau_interpolator = TimingInterpolator( + self.time[:], + self.tau[:], + weight=self.weight_tau[:], + flag=flag, + kind=interp, + extrap_limit=extrap_limit, + ) + + tau, weight = tau_interpolator(timestamp) + + else: + logger.info( + "Correcting delay template using amplitude template " + "with coefficient %0.1f." % self.amp_to_delay + ) + + # Determine which input the delay template is referenced to + iref = self.zero_delay_noise_source + + # Subtract the referenced, scaled alpha template from the delay template + tau_corrected = self.tau[:] - self.amp_to_delay * ( + self.alpha[:] - self.alpha[iref, np.newaxis, :] + ) + + # Extract the weights + weight_corrected = _weight_propagation_addition( + self.weight_tau[:], + self.weight_alpha[:] / self.amp_to_delay**2, + self.weight_alpha[iref, np.newaxis, :] / self.amp_to_delay**2, + ) + + # Interpolate to the requested times + tau_interpolator = TimingInterpolator( + self.time[:], + tau_corrected, + weight=weight_corrected, + flag=flag, + kind=interp, + extrap_limit=extrap_limit, + ) + + tau, weight = tau_interpolator(timestamp) + + return tau, weight
+ + +
+[docs] + def get_alpha(self, timestamp, interp="linear", extrap_limit=None): + """Return the amplitude variation for each noise source at the requested times. + + Uses the TimingInterpolator to interpolate to the requested times. + + Parameters + ---------- + timestamp: np.ndarray[ntime,] + Unix timestamp. + interp: string + Method to interpolate over time. Options include 'linear', 'nearest', + 'zero', 'slinear', 'quadratic', 'cubic', 'previous', and 'next'. + extrap_limit: float + Do not extrapolate the underlying data beyond its boundaries by this + amount in seconds. Default is 2 integrations. + + Returns + ------- + alpha: np.ndarray[nsource, ntime] + Amplitude coefficient as a function of time for each of the noise sources. + weight : np.ndarray[nsource, ntime] + The uncertainty on the amplitude coefficient, expressed as an inverse variance. + """ + flag = self.num_freq[:] > 0 if self.has_num_freq else None + + alpha_interpolator = TimingInterpolator( + self.time[:], + self.alpha[:], + weight=self.weight_alpha[:], + flag=flag, + kind=interp, + extrap_limit=extrap_limit, + ) + + alpha, weight = alpha_interpolator(timestamp) + + return alpha, weight
+ + +
+[docs] + def get_stacked_tau( + self, timestamp, inputs, prod, reverse_stack, input_flags=None, **kwargs + ): + """Return the appropriate delay for each stacked visibility at the requested time. + + Averages the delays from the noise source inputs that map to the set of redundant + baseline included in each stacked visibility. This yields the appropriate + common-mode delay correction. If input_flags is provided, then the bad inputs + that were excluded from the stack are also excluded from the delay template averaging. + + Parameters + ---------- + timestamp: np.ndarray[ntime,] + Unix timestamp. + inputs: np.ndarray[ninput,] + Must contain 'correlator_input' field. + prod: np.ndarray[nprod,] + The products that were included in the stack. + Typically found in the `index_map['prod']` attribute of the + `andata.CorrData` object. + reverse_stack: np.ndarray[nprod,] of dtype=('stack', 'conjugate') + The index of the stack axis that each product went into. + Typically found in `reverse_map['stack']` attribute + of the `andata.CorrData`. + input_flags : np.ndarray [ninput, ntime] + Array indicating which inputs were good at each time. + Non-zero value indicates that an input was good. + + Returns + ------- + tau: np.ndarray[nstack, ntime] + Delay as a function of time for each stacked visibility. + """ + # Use the get_tau method to get the data for the noise source inputs + # at the requested times. + data, _ = self.get_tau(timestamp, **kwargs) + + if self.has_coeff_tau: + # This tau correction has a coefficient array. + # Find the coefficients for the requested inputs. + reod = andata._convert_to_slice(self.search_input(inputs)) + coeff = self.coeff_tau[reod, :] + + # Determine how the noise source delays were referenced + # when fitting for the coefficients + iref = self.reference_noise_source + if np.isscalar(iref): + if iref != self.zero_delay_noise_source: + data = data - data[iref, np.newaxis, :] + iref = None + else: + iref = iref[reod] + else: + coeff = None + iref = None + + # Stack the data from the noise source inputs + return self._stack( + data, + inputs, + prod, + reverse_stack, + coeff=coeff, + input_flags=input_flags, + reference_noise_source=iref, + )
+ + +
+[docs] + def get_stacked_alpha( + self, timestamp, inputs, prod, reverse_stack, input_flags=None, **kwargs + ): + """Return the equivalent of `get_stacked_tau` for the noise source amplitude variations. + + Averages the alphas from the noise source inputs that map to the set of redundant + baseline included in each stacked visibility. If input_flags is provided, then the + bad inputs that were excluded from the stack are also excluded from the alpha + template averaging. This method can be used to generate a stacked alpha template + that can be used to correct a stacked tau template for variations in the noise source + distribution system. However, it is recommended that the tau template be corrected + before stacking. This is accomplished by setting the `amp_to_delay` property + prior to calling `get_stacked_tau`. + + Parameters + ---------- + timestamp: np.ndarray[ntime,] + Unix timestamp. + inputs: np.ndarray[ninput,] + Must contain 'correlator_input' field. + prod: np.ndarray[nprod,] + The products that were included in the stack. + Typically found in the `index_map['prod']` attribute of the + `andata.CorrData` object. + reverse_stack: np.ndarray[nprod,] of dtype=('stack', 'conjugate') + The index of the stack axis that each product went into. + Typically found in `reverse_map['stack']` attribute + of the `andata.CorrData`. + input_flags : np.ndarray [ninput, ntime] + Array indicating which inputs were good at each time. + Non-zero value indicates that an input was good. + + Returns + ------- + alpha: np.ndarray[nstack, ntime] + Noise source amplitude variation as a function of time for each stacked visibility. + """ + if not self.has_amplitude: + raise AttributeError( + "This timing correction does not include " + "an adjustment based on the noise soure amplitude." + ) + + if self.has_coeff_alpha: + # This alpha correction has a coefficient array. + # Find the coefficients for the requested inputs. + reod = andata._convert_to_slice(self.search_input(inputs)) + coeff = self.coeff_alpha[reod, :] + else: + coeff = None + + # Use the get_alpha method to get the data for the noise source inputs. + data, _ = self.get_alpha(timestamp, **kwargs) + + # Stack the data from the noise source inputs + return self._stack( + data, inputs, prod, reverse_stack, coeff=coeff, input_flags=input_flags + )
+ + + def _stack( + self, + data, + inputs, + prod, + reverse_stack, + coeff=None, + input_flags=None, + reference_noise_source=None, + ): + stack_index = reverse_stack["stack"][:] + stack_conj = reverse_stack["conjugate"][:].astype(bool) + + nstack = np.max(stack_index) + 1 + nprod = prod.size + ninput = inputs.size + + # Sort the products based on the index of the stack axis they went into. + isort = np.argsort(stack_index) + sorted_stack_index = stack_index[isort] + sorted_stack_conj = stack_conj[isort] + sorted_prod = prod[isort] + + temp = sorted_prod.copy() + sorted_prod["input_a"] = np.where( + sorted_stack_conj, temp["input_b"], temp["input_a"] + ) + sorted_prod["input_b"] = np.where( + sorted_stack_conj, temp["input_a"], temp["input_b"] + ) + + # Find boundaries into the sorted products that separate stacks. + boundary = np.concatenate( + ( + np.atleast_1d(0), + np.flatnonzero(np.diff(sorted_stack_index) > 0) + 1, + np.atleast_1d(nprod), + ) + ) + + # Check for coefficient array that encodes the contribution of + # each noise source to each input. + if coeff is None: + # This timing correction does not have a coefficient array. + # Construct from the output of the map_input_to_noise_source method. + index = np.array(map_input_to_noise_source(inputs, self.noise_source)) + coeff = np.zeros((ninput, self.nsource), dtype=np.float64) + coeff[np.arange(ninput), index] = 1.0 + + # Expand the coefficient array to have single element time axis + nsource = coeff.shape[-1] + coeff = coeff[:, :, np.newaxis] + + # Construct separate coefficient array that handles the reference noise source + with_ref = reference_noise_source is not None + if with_ref: + cref = np.zeros((ninput, nsource, nsource, 1), dtype=np.float64) + cref[np.arange(ninput), reference_noise_source] = coeff + + # If input_flags was not provided, or if it is all True or all False, then we + # assume all inputs are good and carry out a faster calculation. + no_input_flags = ( + (input_flags is None) or not np.any(input_flags) or np.all(input_flags) + ) + + if no_input_flags: + # No input flags provided. All inputs considered good. + uniq_input_flags = np.ones((ninput, 1), dtype=np.float64) + index_time = slice(None) + else: + # Find the unique sets of input flags. + uniq_input_flags, index_time = np.unique( + input_flags, return_inverse=True, axis=1 + ) + + ntime_uniq = uniq_input_flags.shape[-1] + + # Initialize arrays to hold the stacked coefficients + stack_coeff = np.zeros((nstack, nsource, ntime_uniq), dtype=np.float64) + weight_norm = np.zeros((nstack, ntime_uniq), dtype=np.float64) + + # Loop over stacked products + for ss, ssi in enumerate(np.unique(sorted_stack_index)): + # Get the input pairs that went into this stack + prodo = sorted_prod[boundary[ss] : boundary[ss + 1]] + aa = prodo["input_a"] + bb = prodo["input_b"] + + # Sum the difference in coefficients over pairs of inputs, + # weighted by the product of the input flags for those inputs. + ww = uniq_input_flags[aa] * uniq_input_flags[bb] + weight_norm[ssi] = np.sum(ww, axis=0) + stack_coeff[ssi] = np.sum( + ww[:, np.newaxis, :] * (coeff[aa] - coeff[bb]), axis=0 + ) + + if with_ref: + stack_coeff[ssi] -= np.sum( + ww[:, np.newaxis, :] * np.sum(cref[aa] - cref[bb], axis=2), axis=0 + ) + + # The delay for each stacked product is a linear combination of the + # delay from the noise source inputs. + stacked_data = np.sum( + stack_coeff[:, :, index_time] * data[np.newaxis, :, :], axis=1 + ) + stacked_data *= tools.invert_no_zero(weight_norm[:, index_time]) + + return stacked_data + +
+[docs] + def get_timing_correction(self, freq, timestamp, **kwargs): + """Return the phase correction from each noise source at the requested frequency and time. + + Assumes the phase correction scales with frequency nu as phi = 2 pi nu tau and uses the + get_tau method to interpolate over time. It acccepts and passes along keyword arguments + for that method. + + Parameters + ---------- + freq: np.ndarray[nfreq, ] + Frequency in MHz. + timestamp: np.ndarray[ntime, ] + Unix timestamp. + + Returns + ------- + gain: np.ndarray[nfreq, nsource, ntime] + Complex gain containing a pure phase correction for each of the noise sources. + weight: np.ndarray[nfreq, nsource, ntime] + Uncerainty on the gain for each of the noise sources, expressed as an inverse variance. + """ + tau, wtau = self.get_tau(timestamp, **kwargs) + + gain = np.exp( + -1.0j + * FREQ_TO_OMEGA + * freq[:, np.newaxis, np.newaxis] + * tau[np.newaxis, :, :] + ) + + weight = ( + wtau[np.newaxis, :, :] + * tools.invert_no_zero(FREQ_TO_OMEGA * freq[:, np.newaxis, np.newaxis]) ** 2 + ) + + return gain, weight
+ + +
+[docs] + def get_gain(self, freq, inputs, timestamp, **kwargs): + """Return the complex gain for the requested frequencies, inputs, and times. + + Multiplying the visibilities by the outer product of these gains will remove + the fluctuations in phase due to timing jitter. This method uses the + get_tau method. It acccepts and passes along keyword arguments for that method. + + Parameters + ---------- + freq: np.ndarray[nfreq, ] + Frequency in MHz. + inputs: np.ndarray[ninput, ] + Must contain 'correlator_input' field. + timestamp: np.ndarray[ntime, ] + Unix timestamps. + + Returns + ------- + gain : np.ndarray[nfreq, ninput, ntime] + Complex gain. Multiplying the visibilities by the + outer product of this vector at a given time and + frequency will correct for the timing jitter. + weight: np.ndarray[nfreq, ninput, ntime] + Uncerainty on the gain expressed as an inverse variance. + """ + if self.has_coeff_tau: + # Get the delay template for the noise source inputs + # at the requested times + tau, wtau = self.get_tau(timestamp, **kwargs) + + vartau = tools.invert_no_zero(wtau) + + # Find the coefficients for the requested inputs + reod = andata._convert_to_slice(self.search_input(inputs)) + + C = self.coeff_tau[reod, :] + + # Different calculation dependening on whether or not the + # reference noise source changes with input + iref = self.reference_noise_source + if np.isscalar(iref): + # There is a single reference for all inputs. + # Check if it is different than the current reference. + if iref != self.zero_delay_noise_source: + tau = tau - tau[iref, np.newaxis, :] + vartau = vartau + vartau[iref, np.newaxis, :] + + # The delay for each input is a linear combination of the + # delay from the noise source inputs + tau = np.matmul(C, tau) + vartau = np.matmul(C**2, vartau) + + else: + # Find the reference for the requested inputs + iref = iref[reod] + + # The delay for each input is a linear combination of the + # delay from the noise source inputs + sumC = np.sum(C, axis=-1, keepdims=True) + + tau = np.matmul(C, tau) - sumC * tau[iref, :] + + vartau = np.matmul(C**2, vartau) + sumC**2 * vartau[iref, :] + + # Check if we need to correct the delay using the noise source amplitude + if self.has_amplitude and self.has_coeff_alpha: + # Get the alpha template for the noise source inputs + # at the requested times + alpha, walpha = self.get_alpha(timestamp, **kwargs) + + varalpha = tools.invert_no_zero(walpha) + + Calpha = self.coeff_alpha[reod, :] + + # Adjust the delay for each input by the linear combination of the + # amplitude from the noise source inputs + tau += np.matmul(Calpha, alpha) + + vartau += np.matmul(Calpha**2, varalpha) + + # Scale by 2 pi nu to convert to gain + gain = np.exp( + -1.0j + * FREQ_TO_OMEGA + * freq[:, np.newaxis, np.newaxis] + * tau[np.newaxis, :, :] + ) + + weight = tools.invert_no_zero( + vartau[np.newaxis, :, :] + * (FREQ_TO_OMEGA * freq[:, np.newaxis, np.newaxis]) ** 2 + ) + + else: + # Get the timing correction for the noise source inputs at the + # requested times and frequencies + gain, weight = self.get_timing_correction(freq, timestamp, **kwargs) + + # Determine which noise source to use for each input + index = map_input_to_noise_source(inputs, self.noise_source) + + gain = gain[:, index, :] + weight = weight[:, index, :] + + # Return gains + return gain, weight
+ + +
+[docs] + def apply_timing_correction(self, timestream, copy=False, **kwargs): + """Apply the timing correction to another visibility dataset. + + This method uses the get_gain or get_stacked_tau method, depending + on whether or not the visibilities have been stacked. It acccepts + and passes along keyword arguments for those method. + + Parameters + ---------- + timestream : andata.CorrData / equivalent or np.ndarray[nfreq, nprod, ntime] + If timestream is an np.ndarray containing the visiblities, then you + must also pass the corresponding freq, prod, input, and time axis as kwargs. + Otherwise these quantities are obtained from the attributes of CorrData. + If the visibilities have been stacked, then you must additionally pass the + stack and reverse_stack axis as kwargs, and (optionally) the input flags. + copy : bool + Create a copy of the input visibilities. Apply the timing correction to + the copy and return it, leaving the original untouched. Default is False. + freq : np.ndarray[nfreq, ] + Frequency in MHz. + Must be passed as keyword argument if timestream is an np.ndarray. + prod: np.ndarray[nprod, ] + Product map. + Must be passed as keyword argument if timestream is an np.ndarray. + time: np.ndarray[ntime, ] + Unix time. + Must be passed as keyword argument if timestream is an np.ndarray. + input: np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input') + Input axis. + Must be passed as keyword argument if timestream is an np.ndarray. + stack : np.ndarray[nstack, ] + Stack axis. + Must be passed as keyword argument if timestream is an np.ndarray + and the visibilities have been stacked. + reverse_stack : np.ndarray[nprod, ] of dtype=('stack', 'conjugate') + The index of the stack axis that each product went into. + Typically found in `reverse_map['stack']` attribute. + Must be passed as keyword argument if timestream is an np.ndarray + and the visibilities have been stacked. + input_flags : np.ndarray [ninput, ntime] + Array indicating which inputs were good at each time. Non-zero value + indicates that an input was good. Optional. Only used for stacked visibilities. + + Returns + ------- + If copy == True: + vis : np.ndarray[nfreq, nprod(nstack), ntime] + New set of visibilities with timing correction applied. + else: + None + Correction is applied to the input visibility data. Also, + if timestream is an andata.CorrData instance and the gain dataset exists, + then it will be updated with the complex gains that have been applied. + """ + if isinstance(timestream, np.ndarray): + is_obj = False + + vis = timestream if not copy else timestream.copy() + + freq = kwargs.pop("freq") + prod = kwargs.pop("prod") + inputs = kwargs.pop("input") + timestamp = kwargs.pop("time") + stack = kwargs.pop("stack", None) + reverse_stack = kwargs.pop("reverse_stack", None) + + else: + is_obj = True + + # This works for both distributed and non-distributed datasets + vis = timestream.vis[:].view(np.ndarray) + + if copy: + vis = vis.copy() + + freq = kwargs.pop("freq") if "freq" in kwargs else timestream.freq[:] + prod = ( + kwargs.pop("prod") + if "prod" in kwargs + else timestream.index_map["prod"][:] + ) + inputs = kwargs.pop("input") if "input" in kwargs else timestream.input[:] + timestamp = kwargs.pop("time") if "time" in kwargs else timestream.time[:] + stack = ( + kwargs.pop("stack") + if "stack" in kwargs + else timestream.index_map["stack"][:] + ) + reverse_stack = ( + kwargs.pop("reverse_stack") + if "reverse_stack" in kwargs + else timestream.reverse_map["stack"][:] + ) + + input_flags = kwargs.pop("input_flags", None) + + # Determine if the visibilities have been stacked + is_stack = ( + (stack is not None) + and (reverse_stack is not None) + and (stack.size < prod.size) + ) + + if is_stack: + logger.info("Applying timing correction to stacked data.") + # Visibilities have been stacked. + # Stack the timing correction before applying it. + tau = self.get_stacked_tau( + timestamp, + inputs, + prod, + reverse_stack, + input_flags=input_flags, + **kwargs + ) + + if self.has_amplitude and self.has_coeff_alpha: + tau += self.get_stacked_alpha( + timestamp, + inputs, + prod, + reverse_stack, + input_flags=input_flags, + **kwargs + ) + + # Loop over local frequencies and apply the timing correction + for ff in range(freq.size): + vis[ff] *= np.exp(-1.0j * FREQ_TO_OMEGA * freq[ff] * tau) + + else: + logger.info("Applying timing correction to unstacked data.") + # Visibilities have not been stacked yet. Use the timing correction as is. + # Get the gain corrections for the times and frequencies in timestream. + gain, _ = self.get_gain(freq, inputs, timestamp, **kwargs) + + # Loop over products and apply the timing correction + for ii, (aa, bb) in enumerate(prod): + vis[:, ii, :] *= gain[:, aa, :] * gain[:, bb, :].conj() + + # If andata object was input then update the gain + # dataset so that we have record of what was done + if is_obj and not copy and "gain" in timestream: + timestream.gain[:] *= gain + + # If a copy was requested, then return the + # new vis with phase correction applied + if copy: + return vis
+ + +
+[docs] + def summary(self): + """Provide a summary of the timing correction. + + Returns + ------- + summary : list of strings + Contains useful information about the timing correction. + Specifically contains for each noise source input the + time averaged phase offset and delay. Also contains + estimates of the variance in the timing for both the + shortest and longest timescale probed by the underlying + dataset. Meant to be joined with new lines and printed. + """ + span = (self.time[-1] - self.time[0]) / 3600.0 + sig_tau = np.std(self.tau[:], axis=-1) + + step = np.median(np.diff(self.time)) + sig2_tau = np.sqrt( + np.sum(np.diff(self.tau[:], axis=-1) ** 2, axis=-1) + / (2.0 * (self.tau.shape[-1] - 1.0)) + ) + + fmt = "%-10s %10s %10s %15s %15s" + hdr = fmt % ("", "PHI0", "TAU0", "SIGMA(TAU)", "SIGMA(TAU)") + per = fmt % ("", "", "", "@ %0.2f sec" % step, "@ %0.2f hr" % span) + unt = fmt % ("INPUT", "[rad]", "[nsec]", "[psec]", "[psec]") + line = "".join(["-"] * 65) + summary = [line, hdr, per, unt, line] + + fmt = "%-10s %10.2f %10.2f %15.2f %15.2f" + for ii, inp in enumerate(self.noise_source): + summary.append( + fmt + % ( + inp["correlator_input"], + self.static_phi_fit[0, ii], + self.static_phi_fit[1, ii] * 1e-3, + sig2_tau[ii], + sig_tau[ii], + ) + ) + + return summary
+ + + def __repr__(self): + """Return a summary of the timing correction nicely formatted for printing. + + Calls the method summary and joins the list of strings with new lines. + """ + summary = self.summary() + summary.insert(0, self.__class__.__name__) + + return "\n".join(summary)
+ + + +
+[docs] +class TimingData(andata.CorrData, TimingCorrection): + """ + Subclass of :class:`andata.CorrData` for timing data. + + Automatically computes the timing correction when data is loaded and + inherits the methods of :class:`TimingCorrection` that enable the application + of that correction to other datasets. + """ + +
+[docs] + @classmethod + def from_acq_h5(cls, acq_files, only_correction=False, **kwargs): + """Load a list of acquisition files and computes the timing correction. + + Accepts and passes on all keyword arguments for andata.CorrData.from_acq_h5 + and the construct_delay_template function. + + Parameters + ---------- + acq_files: str or list of str + Path to file(s) containing the timing data. + only_correction: bool + Only return the timing correction. Do not return the underlying + data from which that correction was derived. + + Returns + ------- + data: TimingData or TimingCorrection + """ + # Separate the kwargs for construct_delay_template. This is necessary + # because andata will not accept extraneous kwargs. + insp = inspect.getargspec(construct_delay_template) + cdt_kwargs_list = set(insp[0][-len(insp[-1]) :]) & set(kwargs) + + cdt_kwargs = {} + for name in cdt_kwargs_list: + cdt_kwargs[name] = kwargs.pop(name) + + # Change some of the default parameters for CorrData.from_acq_h5 to reflect + # the fact that this data will be used to compute a timing correction. + apply_gain = kwargs.pop("apply_gain", False) + datasets = kwargs.pop( + "datasets", ["vis", "flags/vis_weight", "flags/frac_lost"] + ) + + # Load the data into an andata.CorrData object + corr_data = super(TimingData, cls).from_acq_h5( + acq_files, apply_gain=apply_gain, datasets=datasets, **kwargs + ) + + # Instantiate a TimingCorrection or TimingData object + dist_kwargs = {"distributed": corr_data.distributed, "comm": corr_data.comm} + data = ( + TimingCorrection(**dist_kwargs) + if only_correction + else TimingData(**dist_kwargs) + ) + + # Redefine input axis to contain only noise sources + isource = np.unique(corr_data.prod.tolist()) + noise_source = corr_data.input[isource] + data.create_index_map("noise_source", noise_source) + + # Copy over relevant data to the newly instantiated object + if only_correction: + # We are only returning a correction, so we only need to + # copy over a subset of index_map. + for name in ["time", "freq"]: + data.create_index_map(name, corr_data.index_map[name][:]) + + else: + # We are returning the data in addition to the correction. + # Redefine prod axis to contain only noise sources. + prod = np.zeros(corr_data.prod.size, dtype=corr_data.prod.dtype) + prod["input_a"] = andata._search_array(isource, corr_data.prod["input_a"]) + prod["input_b"] = andata._search_array(isource, corr_data.prod["input_b"]) + data.create_index_map("prod", prod) + + # Copy over remaining index maps + for name, index_map in corr_data.index_map.items(): + if name not in data.index_map: + data.create_index_map(name, index_map[:]) + + # Copy over the attributes + memh5.copyattrs(corr_data.attrs, data.attrs) + + # Iterate over the datasets and copy them over + for name, old_dset in corr_data.datasets.items(): + new_dset = data.create_dataset( + name, data=old_dset[:], distributed=old_dset.distributed + ) + memh5.copyattrs(old_dset.attrs, new_dset.attrs) + + # Iterate over the flags and copy them over + for name, old_dset in corr_data.flags.items(): + new_dset = data.create_flag( + name, data=old_dset[:], distributed=old_dset.distributed + ) + memh5.copyattrs(old_dset.attrs, new_dset.attrs) + + # Construct delay template + res = construct_delay_template(corr_data, **cdt_kwargs) + + # If we are only returning the timing correction, then remove + # the amplitude and phase of the noise source + if only_correction: + for name in ["amp", "weight_amp", "phi", "weight_phi"]: + res.pop(name) + + # Create index map containing names of parameters + param = ["intercept", "slope", "quad", "cube", "quart", "quint"] + param = param[0 : res["static_phi_fit"].shape[0]] + data.create_index_map("param", np.array(param, dtype=np.string_)) + + # Create datasets containing the timing correction + for name, arr in res.items(): + spec = DSET_SPEC[name] + if spec["flag"]: + dset = data.create_flag(name, data=arr) + else: + dset = data.create_dataset(name, data=arr) + + dset.attrs["axis"] = np.array(spec["axis"], dtype=np.string_) + + # Delete the temporary corr_data object + del corr_data + gc.collect() + + # Return timing data object + return data
+ + +
+[docs] + def summary(self): + """Provide a summary of the timing data and correction. + + Returns + ------- + summary : list of strings + Contains useful information about the timing correction + and data. Includes the reduction in the standard deviation + of the phase after applying the timing correction. This is + presented as quantiles over frequency for each of the + noise source products. + """ + summary = super(TimingData, self).summary() + + vis = self.apply_timing_correction( + self.vis[:], + copy=True, + freq=self.freq, + time=self.time, + prod=self.prod, + input=self.noise_source, + ) + + phi_before = np.angle(self.vis[:]) + phi_after = np.angle(vis) + + phi_before = _correct_phase_wrap( + phi_before - np.median(phi_before, axis=-1)[..., np.newaxis] + ) + phi_after = _correct_phase_wrap( + phi_after - np.median(phi_after, axis=-1)[..., np.newaxis] + ) + + sig_before = np.median( + np.abs(phi_before - np.median(phi_before, axis=-1)[..., np.newaxis]), + axis=-1, + ) + sig_after = np.median( + np.abs(phi_after - np.median(phi_after, axis=-1)[..., np.newaxis]), axis=-1 + ) + + ratio = sig_before * tools.invert_no_zero(sig_after) + + stats = np.percentile(ratio, [0, 25, 50, 75, 100], axis=0) + + fmt = "%-23s %5s %5s %8s %5s %5s" + hdr1 = "Factor Reduction in RMS Phase Noise (Quantiles Over Frequency)" + hdr2 = fmt % ("PRODUCT", "MIN", "25%", "MEDIAN", "75%", "MAX") + line = "".join(["-"] * 65) + summary += ["", line, hdr1, hdr2, line] + + fmt = "%-10s x %-10s %5d %5d %8d %5d %5d" + for ii, pp in enumerate(self.prod): + if pp[0] != pp[1]: + summary.append( + fmt + % ( + ( + self.noise_source[pp[0]]["correlator_input"], + self.noise_source[pp[1]]["correlator_input"], + ) + + tuple(stats[:, ii]) + ) + ) + + return summary
+
+ + + +
+[docs] +class TimingInterpolator(object): + """Interpolation that is aware of flagged data and weights. + + Flagged data is ignored during the interpolation. The weights from + the data are propagated to obtain weights for the interpolated points. + """ + + def __init__(self, x, y, weight=None, flag=None, kind="linear", extrap_limit=None): + """Instantiate a callable TimingInterpolator object. + + Parameters + ---------- + x : np.ndarray[nsample,] + The points where the data was sampled. + Must be monotonically increasing. + y : np.ndarray[..., nsample] + The data to interpolate. + weight : np.ndarray[..., nsample] + The uncertainty on the data, expressed as an + inverse variance. + flag : np.ndarray[..., nsample] + Boolean indicating if the data is to be + included in the interpolation. + kind : str + String that specifies the kind of interpolation. + The value `nearest`, `previous`, `next`, and `linear` will use + custom methods that propagate uncertainty to obtain the interpolated + weights. The value `zero`, `slinear`, `quadratic`, and `cubic` + will use spline interpolation from scipy.interpolation.interp1d + and use the weight from the nearest point. + + Returns + ------- + interpolator : TimingInterpolator + Callable that will interpolate the data that was provided + to a new set of x values. + """ + self.x = x + self.y = y + + self._shape = y.shape[:-1] + + if weight is None: + self.var = np.ones(y.shape, dtype=np.float32) + else: + self.var = tools.invert_no_zero(weight) + + if flag is None: + self.flag = np.ones(y.shape, dtype=bool) + else: + self.flag = flag + + if extrap_limit is None: + self._extrap_limit = 2.0 * np.median(np.diff(self.x)) + else: + self._extrap_limit = extrap_limit + + self._interp = INTERPOLATION_LOOKUP.get(kind, _interpolation_scipy(kind)) + + def __call__(self, xeval): + """Interpolate the data. + + Parameters + ---------- + xeval : np.ndarray[neval,] + Evaluate the interpolant at these points. + + Returns + ------- + yeval : np.ndarray[neval,] + Interpolated values. + weval : np.ndarray[neval,] + Uncertainty on the interpolated values, + expressed as an inverse variance. + """ + # Make sure we are not extrapolating too much + dx_beg = self.x[0] - np.min(xeval) + dx_end = np.max(xeval) - self.x[-1] + + if (dx_beg > self._extrap_limit) or (dx_end > self._extrap_limit): + raise ValueError("Extrapolating beyond span of data.") + + # Create arrays to hold interpolation + shape = self._shape if np.isscalar(xeval) else self._shape + (xeval.size,) + + yeval = np.zeros(shape, dtype=self.y.dtype) + weval = np.zeros(shape, dtype=np.float32) + + # Loop over other axes and interpolate along last axis + for ind in np.ndindex(*self._shape): + to_interp = np.flatnonzero(self.flag[ind]) + if to_interp.size > 0: + yeval[ind], weval[ind] = self._interp( + self.x[to_interp], + self.y[ind][to_interp], + self.var[ind][to_interp], + xeval, + ) + + return yeval, weval
+ + + +
+[docs] +def load_timing_correction( + files, start=None, stop=None, window=43200.0, instrument="chime", **kwargs +): + """Find and load the appropriate timing correction for a list of corr acquisition files. + + For example, if the instrument keyword is set to 'chime', + then this function will accept all types of chime corr acquisition files, + such as 'chimetiming', 'chimepb', 'chimeN2', 'chimecal', and then find + the relevant set of 'chimetiming' files to load. + + Accepts and passes on all keyword arguments for the functions + andata.CorrData.from_acq_h5 and construct_delay_template. + + Should consider modifying this method to use Finder at some point in future. + + Parameters + ---------- + files : string or list of strings + Absolute path to corr acquisition file(s). + start : integer, optional + What frame to start at in the full set of files. + stop : integer, optional + What frame to stop at in the full set of files. + window : float + Use the timing data -window from start and +window from stop. + Default is 12 hours. + instrument : string + Name of the instrument. Default is 'chime'. + + Returns + ------- + data: TimingData + """ + files = np.atleast_1d(files) + + # Check that a single acquisition was requested + input_dirs = [os.path.dirname(ff) for ff in files] + if len(set(input_dirs)) > 1: + raise RuntimeError("Input files span multiple acquisitions!") + + # Extract relevant information from the filename + node = os.path.dirname(input_dirs[0]) + acq = os.path.basename(input_dirs[0]) + + acq_date, acq_inst, acq_type = acq.split("_") + if not acq_inst.startswith(instrument) or (acq_type != "corr"): + raise RuntimeError( + "This function is only able to parse corr type files " + "from the specified instrument (currently %s)." % instrument + ) + + # Search for all timing acquisitions on this node + tdirs = sorted( + glob.glob(os.path.join(node, "_".join(["*", instrument + "timing", acq_type]))) + ) + if not tdirs: + raise RuntimeError("No timing acquisitions found on node %s." % node) + + # Determine the start time of the requested acquistion and the available timing acquisitions + acq_start = ctime.datetime_to_unix(ctime.timestr_to_datetime(acq_date)) + + tacq_start = np.array( + [ctime.timestr_to_datetime(os.path.basename(tt)) for tt in tdirs] + ) + tacq_start = ctime.datetime_to_unix(tacq_start) + + # Find the closest timing acquisition to the requested acquisition + iclose = np.argmin(np.abs(acq_start - tacq_start)) + if np.abs(acq_start - tacq_start[iclose]) > 60.0: + raise RuntimeError("Cannot find appropriate timing acquisition for %s." % acq) + + # Grab all timing files from this acquisition + tfiles = sorted(glob.glob(os.path.join(tdirs[iclose], "*.h5"))) + + tdata = andata.CorrData.from_acq_h5(tfiles, datasets=()) + + # Find relevant span of time + data = andata.CorrData.from_acq_h5(files, start=start, stop=stop, datasets=()) + + time_start = data.time[0] - window + time_stop = data.time[-1] + window + + tstart = int(np.argmin(np.abs(time_start - tdata.time))) + tstop = int(np.argmin(np.abs(time_stop - tdata.time))) + + # Load into TimingData object + data = TimingData.from_acq_h5(tfiles, start=tstart, stop=tstop, **kwargs) + + return data
+ + + +# ancillary functions +# ------------------- + + +
+[docs] +def construct_delay_template( + data, + min_frac_kept=0.0, + threshold=0.50, + min_freq=420.0, + max_freq=780.0, + mask_rfi=False, + max_iter_weight=None, + check_amp=False, + nsigma_amp=None, + check_phi=True, + nsigma_phi=None, + nparam=2, + static_phi=None, + weight_static_phi=None, + static_phi_fit=None, + static_amp=None, + weight_static_amp=None, +): + """Construct a relative time delay template. + + Fits the phase of the cross-correlation between noise source inputs + to a model that increases linearly with frequency. + + Parameters + ---------- + data: andata.CorrData + Correlation data. Must contain the following attributes: + freq: np.ndarray[nfreq, ] + Frequency in MHz. + vis: np.ndarray[nfreq, nprod, ntime] + Upper-triangle, product packed visibility matrix + containing ONLY the noise source inputs. + weight: np.ndarray[nfreq, nprod, ntime] + Flag indicating the data points to fit. + flags/frac_lost: np.ndarray[nfreq, ntime] + Flag indicating the fraction of data lost. + If provided, then data will be weighted by the + fraction of data that remains when solving + for the delay template. + min_frac_kept: float + Do not include frequencies and times where the fraction + of data that remains is less than this threshold. + Default is 0.0. + threshold: float + A (frequency, input) must pass the checks specified above + more than this fraction of the time, otherwise it will be + flaged as bad for all times. Default is 0.50. + min_freq: float + Minimum frequency in MHz to include in the fit. + Default is 420. + max_freq: float + Maximum frequency in MHz to include in the fit. + Default is 780. + mask_rfi: bool + Mask frequencies that occur within known RFI bands. Note that the + noise source data does not contain RFI, however the real-time pipeline + does not distinguish between noise source inputs and sky inputs, and as + a result will discard large amounts of data in these bands. + max_iter_weight: int + The weight for each frequency is estimated from the variance of the + residuals of the template fit from the previous iteration. Outliers + are also flagged at each iteration with an increasingly aggresive threshold. + This is the total number of times to iterate. Setting to 1 corresponds + to linear least squares. Default is 1, unless check_amp or check_phi is True, + in which case this defaults to the maximum number of thresholds provided. + check_amp: bool + Do not fit frequencies and times where the residual amplitude is an outlier. + Default is False. + nsigma_amp: list of float + If check_amp is True, then residuals greater than this number of sigma + will be considered an outlier. Provide a list containing the value to be used + at each iteration. If the length of the list is less than max_iter_weight, + then the last value in the list will be repeated for the remaining iterations. + Default is [1000, 500, 200, 100, 50, 20, 10, 5]. + check_phi: bool + Do not fit frequencies and times where the residual phase is an outlier. + Default is True. + nsigma_phi: list of float + If check_phi is True, then residuals greater than this number of sigma + will be considered an outlier. Provide a list containing the value to be used + at each iteration. If the length of the list is less than max_iter_weight, + then the last value in the list will be repeated for the remaining iterations. + Default is [1000, 500, 200, 100, 50, 20, 10, 5]. + nparam: int + Number of parameters for polynomial fit to the + time averaged phase versus frequency. Default is 2. + static_phi: np.ndarray[nfreq, nsource] + Subtract this quantity from the noise source phase prior to fitting + for the timing correction. If None, then this will be estimated from the median + of the noise source phase over time. + weight_static_phi: np.ndarray[nfreq, nsource] + Inverse variance of the time averaged phased. Set to zero for frequencies and inputs + that are missing or should be ignored. If None, then this will be estimated from the + residuals of the fit. + static_phi_fit: np.ndarray[nparam, nsource] + Polynomial fit to static_phi versus frequency. + static_amp: np.ndarray[nfreq, nsource] + Subtract this quantity from the noise source amplitude prior to fitting + for the amplitude variations. If None, then this will be estimated from the median + of the noise source amplitude over time. + weight_static_amp: np.ndarray[nfreq, nsource] + Inverse variance of the time averaged amplitude. Set to zero for frequencies and inputs + that are missing or should be ignored. If None, then this will be estimated from the + residuals of the fit. + + Returns + ------- + phi: np.ndarray[nfreq, nsource, ntime] + Phase of the signal from the noise source. + weight_phi: np.ndarray[nfreq, nsource, ntime] + Inverse variance of the phase of the signal from the noise source. + tau: np.ndarray[nsource, ntime] + Delay template for each noise source input. + weight_tau: np.ndarray[nfreq, nsource] + Estimate of the uncertainty on the delay template (inverse variance). + static_phi: np.ndarray[nfreq, nsource] + Time averaged phase versus frequency. + weight_static_phi: np.ndarray[nfreq, nsource] + Inverse variance of the time averaged phase. + static_phi_fit: np.ndarray[nparam, nsource] + Best-fit parameters of the polynomial fit to the + time averaged phase versus frequency. + amp: np.ndarray[nfreq, nsource, ntime] + Amplitude of the signal from the noise source. + weight_amp: np.ndarray[nfreq, nsource, ntime] + Inverse variance of the amplitude of the signal from the noise source. + alpha: np.ndarray[nsource, ntime] + Amplitude coefficient for each noise source input. + weight_alpha: np.ndarray[nfreq, nsource] + Estimate of the uncertainty on the amplitude coefficient (inverse variance). + static_amp: np.ndarray[nfreq, nsource] + Time averaged amplitude versus frequency. + weight_static_amp: np.ndarray[nfreq, nsource] + Inverse variance of the time averaged amplitude. + num_freq: np.ndarray[nsource, ntime] + Number of frequencies used to construct the delay and amplitude templates. + """ + # Check if we are distributed. If so make sure we are distributed over time. + parallel = isinstance(data.vis, memh5.MemDatasetDistributed) + if parallel: + data.redistribute("time") + comm = data.vis.comm + + # Extract relevant datasets + freq = data.freq[:] + vis = data.vis[:].view(np.ndarray) + weight = data.weight[:].view(np.ndarray) + + # Check dimensions + nfreq, nprod, ntime = vis.shape + nsource = int((np.sqrt(8 * nprod + 1) - 1) // 2) + ilocal = range(0, nsource) + + assert nfreq == freq.size + assert nsource >= 2 + assert nparam >= 2 + + if static_phi is not None: + static_phi, sphi_shp, sphi_ind = _resolve_distributed(static_phi, axis=1) + assert sphi_shp == (nfreq, nsource) + + if weight_static_phi is not None: + weight_static_phi, wsphi_shp, wsphi_ind = _resolve_distributed( + weight_static_phi, axis=1 + ) + assert wsphi_shp == (nfreq, nsource) + + if static_phi_fit is not None: + static_phi_fit, sphifit_shp, sphifit_ind = _resolve_distributed( + static_phi_fit, axis=1 + ) + assert sphifit_shp == (nparam, nsource) + + if static_amp is not None: + static_amp, samp_shp, samp_ind = _resolve_distributed(static_amp, axis=1) + assert samp_shp == (nfreq, nsource) + + if weight_static_amp is not None: + weight_static_amp, wsamp_shp, wsamp_ind = _resolve_distributed( + weight_static_amp, axis=1 + ) + assert wsamp_shp == (nfreq, nsource) + + # Set default nsigma for flagging outliers + if nsigma_amp is None: + nsigma_amp = [1000.0, 500.0, 200.0, 100.0, 50.0, 20.0, 10.0, 5.0] + elif np.isscalar(nsigma_amp): + nsigma_amp = [nsigma_amp] + + if nsigma_phi is None: + nsigma_phi = [1000.0, 500.0, 200.0, 100.0, 50.0, 20.0, 10.0, 5.0] + elif np.isscalar(nsigma_phi): + nsigma_phi = [nsigma_phi] + + if max_iter_weight is None: + max_iter_weight = max( + len(nsigma_amp) + 1 if check_amp else 1, + len(nsigma_phi) + 1 if check_phi else 1, + ) + else: + max_iter_weight = max(max_iter_weight, 1) + + nsigma_amp = [ + nsigma_amp[min(ii, len(nsigma_amp) - 1)] for ii in range(max_iter_weight) + ] + nsigma_phi = [ + nsigma_phi[min(ii, len(nsigma_phi) - 1)] for ii in range(max_iter_weight) + ] + + # Compute amplitude of noise source signal from autocorrelation + iauto = np.array([int(k * (2 * nsource - k + 1) // 2) for k in range(nsource)]) + + amp = np.sqrt(vis[:, iauto, :].real) + + # Determine which data points to fit + flg = amp > 0.0 + if weight is not None: + flg &= weight[:, iauto, :] > 0.0 + + # If requested discard frequencies and times that have high frac_lost + if hasattr(data, "flags") and ("frac_lost" in data.flags): + logger.info("Fraction of data kept must be greater than %0.2f." % min_frac_kept) + + frac_kept = 1.0 - data.flags["frac_lost"][:].view(np.ndarray) + flg &= frac_kept[:, np.newaxis, :] >= min_frac_kept + + else: + frac_kept = np.ones((nfreq, ntime), dtype=np.float32) + + # Restrict the range of frequencies that are fit to avoid bandpass edges + limit_freq = (freq > min_freq) & (freq < max_freq) + if mask_rfi: + logger.info("Masking RFI bands.") + limit_freq &= ~rfi.frequency_mask( + freq, freq_width=data.index_map["freq"]["width"][:] + ) + + flg = (flg & limit_freq[:, np.newaxis, np.newaxis]).astype(np.float32) + + # If we only have two noise source inputs, then we use the cross-correlation + # between them to characterize their relative response to the noise source signal. + # If we have more than two noise source inputs, then we perform an eigenvalue + # decomposition of the cross-correlation matrix to obtain an improved estimate + # of the response of each input to the noise source signal. + if nsource > 2: + response = eigen_decomposition(vis, flg) + + phi = np.angle(response) + amp = np.abs(response) + + ww = flg + + else: + phi = np.zeros((nfreq, nsource, ntime), dtype=np.float32) + phi[:, 1, :] = np.angle(vis[:, 1, :].conj()) + + amp = np.sqrt(vis[:, iauto, :].real) + + ww = np.repeat(flg[:, 0, np.newaxis, :] * flg[:, 1, np.newaxis, :], 2, axis=1) + + # Scale the flag by the fraction of data that was kept + ww *= frac_kept[:, np.newaxis, :] + + # If parallelized we need to redistribute over inputs for the + # operations below, which require full frequency and time coverage. + if parallel: + amp = mpiarray.MPIArray.wrap(amp, axis=2, comm=comm) + phi = mpiarray.MPIArray.wrap(phi, axis=2, comm=comm) + ww = mpiarray.MPIArray.wrap(ww, axis=2, comm=comm) + + amp = amp.redistribute(1) + phi = phi.redistribute(1) + ww = ww.redistribute(1) + + nsource = amp.local_shape[1] + ilocal = range(amp.local_offset[1], amp.local_offset[1] + nsource) + + logger.info("I am processing %d noise source inputs." % nsource) + + amp = amp[:].view(np.ndarray) + phi = phi[:].view(np.ndarray) + ww = ww[:].view(np.ndarray) + + # If a frequency is flagged more than `threshold` fraction of the time, then flag it entirely + ww *= ( + ( + np.sum(ww > 0.0, axis=-1, dtype=np.float32, keepdims=True) + / float(ww.shape[-1]) + ) + > threshold + ).astype(np.float32) + + logger.info( + "%0.1f percent of frequencies will be used to construct timing correction." + % ( + 100.0 + * np.sum(np.any(ww > 0.0, axis=(1, 2)), dtype=np.float32) + / float(ww.shape[0]), + ) + ) + + # If the starting values for the mean and variance were not provided, + # then estimate them from the data. + if static_phi is None: + static_phi = _flagged_median(phi, ww, axis=-1) + else: + sphi_ind = np.array([sphi_ind.index(ilcl) for ilcl in ilocal]) + static_phi = static_phi[:, sphi_ind] + + if weight_static_phi is None: + weight_static_phi = np.ones(ww.shape[0:2], dtype=np.float32) + else: + wsphi_ind = np.array([wsphi_ind.index(ilcl) for ilcl in ilocal]) + weight_static_phi = weight_static_phi[:, wsphi_ind] + + if static_amp is None: + static_amp = _flagged_median(amp, ww, axis=-1) + else: + samp_ind = np.array([samp_ind.index(ilcl) for ilcl in ilocal]) + static_amp = static_amp[:, samp_ind] + + if weight_static_amp is None: + weight_static_amp = np.ones(ww.shape[0:2], dtype=np.float32) + else: + wsamp_ind = np.array([wsamp_ind.index(ilcl) for ilcl in ilocal]) + weight_static_amp = weight_static_amp[:, wsamp_ind] + + # Fit frequency dependence of amplitude and phase + # damp = asc * dalpha and dphi = omega * dtau + asc = ( + _amplitude_scaling(freq[:, np.newaxis, np.newaxis]) + * static_amp[:, :, np.newaxis] + ) + + omega = FREQ_TO_OMEGA * freq[:, np.newaxis, np.newaxis] + + # Estimate variance of each frequency from residuals + for iter_weight in range(max_iter_weight): + msg = ["Iteration %d of %d" % (iter_weight + 1, max_iter_weight)] + + dphi = _correct_phase_wrap(phi - static_phi[:, :, np.newaxis]) + damp = amp - static_amp[:, :, np.newaxis] + + weight_amp = ww * weight_static_amp[:, :, np.newaxis] + weight_phi = ww * weight_static_phi[:, :, np.newaxis] + + # Construct alpha template + alpha = np.sum(weight_amp * asc * damp, axis=0) * tools.invert_no_zero( + np.sum(weight_amp * asc**2, axis=0) + ) + + # Construct delay template + tau = np.sum(weight_phi * omega * dphi, axis=0) * tools.invert_no_zero( + np.sum(weight_phi * omega**2, axis=0) + ) + + # Calculate amplitude residuals + ramp = damp - asc * alpha[np.newaxis, :, :] + + # Calculate phase residuals + rphi = dphi - omega * tau[np.newaxis, :, :] + + # Calculate the mean and variance of the amplitude residuals + inv_num = tools.invert_no_zero(np.sum(ww, axis=-1)) + mu_ramp = np.sum(ww * ramp, axis=-1) * inv_num + var_ramp = ( + np.sum(ww * (ramp - mu_ramp[:, :, np.newaxis]) ** 2, axis=-1) * inv_num + ) + + # Calculate the mean and variance of the phase residuals + mu_rphi = np.sum(ww * rphi, axis=-1) * inv_num + var_rphi = ( + np.sum(ww * (rphi - mu_rphi[:, :, np.newaxis]) ** 2, axis=-1) * inv_num + ) + + # Update the static quantities + static_amp = static_amp + mu_ramp + static_phi = static_phi + mu_rphi + + weight_static_amp = tools.invert_no_zero(var_ramp) + weight_static_phi = tools.invert_no_zero(var_rphi) + + # Flag outliers + not_outlier = np.ones_like(ww) + if check_amp: + nsigma = np.abs(ramp) * np.sqrt(weight_static_amp[:, :, np.newaxis]) + not_outlier *= (nsigma < nsigma_amp[iter_weight]).astype(np.float32) + msg.append("nsigma_amp = %0.1f" % nsigma_amp[iter_weight]) + + if check_phi: + nsigma = np.abs(rphi) * np.sqrt(weight_static_phi[:, :, np.newaxis]) + not_outlier *= (nsigma < nsigma_phi[iter_weight]).astype(np.float32) + msg.append("nsigma_phi = %0.1f" % nsigma_phi[iter_weight]) + + if check_amp or check_phi: + ww *= not_outlier + + logger.info(" | ".join(msg)) + + # Calculate the number of frequencies used in the fit + num_freq = np.sum(weight_amp > 0.0, axis=0, dtype=int) + + # Calculate the uncertainties on the fit parameters + weight_tau = np.sum(weight_phi * omega**2, axis=0) + weight_alpha = np.sum(weight_amp * asc**2, axis=0) + + # Calculate the average delay over this period using non-linear + # least squares that is insensitive to phase wrapping + if static_phi_fit is None: + err_static_phi = np.sqrt(tools.invert_no_zero(weight_static_phi)) + + static_phi_fit = np.zeros((nparam, nsource), dtype=np.float64) + for nn in range(nsource): + if np.sum(err_static_phi[:, nn] > 0.0, dtype=int) > nparam: + static_phi_fit[:, nn] = fit_poly_to_phase( + freq, + np.exp(1.0j * static_phi[:, nn]), + err_static_phi[:, nn], + nparam=nparam, + )[0] + else: + sphifit_ind = np.array([sphifit_ind.index(ilcl) for ilcl in ilocal]) + static_phi_fit = static_phi_fit[:, sphifit_ind] + + # Convert the outputs to MPIArrays distributed over input + if parallel: + tau = mpiarray.MPIArray.wrap(tau, axis=0, comm=comm) + alpha = mpiarray.MPIArray.wrap(alpha, axis=0, comm=comm) + + weight_tau = mpiarray.MPIArray.wrap(weight_tau, axis=0, comm=comm) + weight_alpha = mpiarray.MPIArray.wrap(weight_alpha, axis=0, comm=comm) + + static_phi = mpiarray.MPIArray.wrap(static_phi, axis=1, comm=comm) + static_amp = mpiarray.MPIArray.wrap(static_amp, axis=1, comm=comm) + + weight_static_phi = mpiarray.MPIArray.wrap(weight_static_phi, axis=1, comm=comm) + weight_static_amp = mpiarray.MPIArray.wrap(weight_static_amp, axis=1, comm=comm) + + static_phi_fit = mpiarray.MPIArray.wrap(static_phi_fit, axis=1, comm=comm) + + num_freq = mpiarray.MPIArray.wrap(num_freq, axis=0, comm=comm) + + phi = mpiarray.MPIArray.wrap(phi, axis=1, comm=comm) + amp = mpiarray.MPIArray.wrap(amp, axis=1, comm=comm) + + weight_phi = mpiarray.MPIArray.wrap(weight_phi, axis=1, comm=comm) + weight_amp = mpiarray.MPIArray.wrap(weight_amp, axis=1, comm=comm) + + data.redistribute("freq") + + # Return results + return dict( + tau=tau, + alpha=alpha, + weight_tau=weight_tau, + weight_alpha=weight_alpha, + static_phi=static_phi, + static_amp=static_amp, + weight_static_phi=weight_static_phi, + weight_static_amp=weight_static_amp, + static_phi_fit=static_phi_fit, + num_freq=num_freq, + phi=phi, + amp=amp, + weight_phi=weight_phi, + weight_amp=weight_amp, + )
+ + + +
+[docs] +def map_input_to_noise_source(inputs, noise_sources): + """Find the appropriate noise source to use to correct the phase of each input. + + Searches for a noise source connected to the same slot, + then crate, then hut, then correlator. + + Parameters + ---------- + inputs: np.ndarray[ninput, ] of dtype=('chan_id', 'correlator_input') + The input axis from a data acquisition file. + noise_sources: np.ndarray[nsource, ] of dtype=('chan_id', 'correlator_input') + The noise sources. + """ + + # Define functions + def parse_serial(input_serial): + # Have to distinguish between CHIME WRH and ERH + # Otherwise serial numbers already have the + # desired hierarchical structure. + + # Serial from file is often bytes, ensure it is unicode + if not isinstance(input_serial, str): + input_serial = input_serial.decode("utf-8") + + if input_serial.startswith("FCC"): + if int(input_serial[3:5]) < 4: + name = "FCCW" + input_serial[3:] + else: + name = "FCCE" + input_serial[3:] + else: + name = input_serial + + return name + + def count_startswith(x, y): + cnt = 0 + for ii in range(min(len(x), len(y))): + if x[ii] == y[ii]: + cnt += 1 + else: + break + + return cnt + + # Create hierarchical identifier from serial number for the + # noise sources and requested inputs + input_names = list(map(parse_serial, inputs["correlator_input"])) + source_names = list(map(parse_serial, noise_sources["correlator_input"])) + + # Map each input to a noise source + imap = [ + np.argmax([count_startswith(inp, src) for src in source_names]) + for inp in input_names + ] + + return imap
+ + + +
+[docs] +def eigen_decomposition(vis, flag): + """Eigenvalue decomposition of the visibility matrix. + + Parameters + ---------- + vis: np.ndarray[nfreq, nprod, ntime] + Upper-triangle, product packed visibility matrix. + flag: np.ndarray[nfreq, nsource, ntime] (optional) + Array of 1 or 0 indicating the inputs that should be included + in the eigenvalue decomposition for each frequency and time. + + Returns + ------- + resp: np.ndarray[nfreq, nsource, ntime] + Eigenvector corresponding to the largest eigenvalue for + each frequency and time. + """ + nfreq, nprod, ntime = vis.shape + nsource = int((np.sqrt(8 * nprod + 1) - 1) // 2) + + # Do not bother performing the eigen-decomposition for + # times and frequencies that are entirely flagged + ind = np.where(np.any(flag, axis=1)) + ind = (ind[0], slice(None), ind[1]) + + # Indexing the flag and vis datasets with ind flattens + # the frequency and time dimension. This results in + # flg having shape (nfreq x ntime, nsource) and + # Q having shape (nfreq x ntime, nsource, nsource). + flg = flag[ind].astype(np.float32) + + Q = ( + flg[:, :, np.newaxis] + * flg[:, np.newaxis, :] + * tools.unpack_product_array(vis[ind], axis=1) + ) + + # Solve for eigenvectors and eigenvalues + evals, evecs = np.linalg.eigh(Q) + + # Set phase convention + sign0 = 1.0 - 2.0 * (evecs[:, np.newaxis, 0, -1].real < 0.0) + + # Determine response of each source + resp = np.zeros((nfreq, nsource, ntime), dtype=vis.dtype) + resp[ind] = flg * sign0 * evecs[:, :, -1] * evals[:, np.newaxis, -1] ** 0.5 + + return resp
+ + + +
+[docs] +def fit_poly_to_phase(freq, resp, resp_error, nparam=2): + """Fit complex data versus frequency to a model consisting of a polynomial in phase. + + Nonlinear least squares algorithm is applied to the complex data to avoid problems + caused by phase wrapping. + + Parameters + ---------- + freq: np.ndarray[nfreq, ] + Frequency in MHz. + resp: np.ndarray[nfreq, ] + Complex data with magnitude equal to 1.0. + resp_error: np.ndarray[nfreq, ] + Uncertainty on the complex data. + nparam: int + Number of parameters in the polynomial. + Default is 2 (i.e, linear). + + Returns + ------- + popt: np.ndarray[nparam, ] + Best-fit parameters. + pcov: np.ndarray[nparam, nparam] + Covariance of the best-fit parameters. + Assumes that it obtained a good fit + and returns the errors + necessary to achieve that. + """ + flg = np.flatnonzero(resp_error > 0.0) + + if flg.size < (nparam + 1): + msg = ( + "Number of data points must be greater than number of parameters (%d)." + % nparam + ) + raise RuntimeError(msg) + + # We will fit the complex data. Break n-element complex array g(ra) + # into 2n-element real array [Re{g(ra)}, Im{g(ra)}] for fit. + y_complex = resp[flg] + y = np.concatenate((y_complex.real, y_complex.imag)).astype(np.float64) + + x = np.tile(freq[flg], 2).astype(np.float64) + + err = np.tile(resp_error[flg], 2).astype(np.float64) + + # Initial guess for parameters + p0 = np.zeros(nparam, dtype=np.float64) + p0[1] = np.median( + np.diff(np.angle(y_complex)) / (FREQ_TO_OMEGA * np.diff(freq[flg])) + ) + p0[0] = np.median( + _correct_phase_wrap(np.angle(y_complex) - p0[1] * FREQ_TO_OMEGA * freq[flg]) + ) + + # Try nonlinear least squares fit + try: + popt, pcov = scipy.optimize.curve_fit( + _func_poly_phase, x, y, p0=p0.copy(), sigma=err, absolute_sigma=False + ) + + except Exception as excep: + logger.warning("Nonlinear phase fit failed with error: %s" % excep) + # Fit failed, return the initial parameter estimates + popt = p0 + pcov = np.zeros((nparam, nparam), dtype=np.float64) + + finally: + return popt, pcov
+ + + +
+[docs] +def model_poly_phase(freq, *param): + """Evaluate a polynomial model for the phase. + + To be used with the parameters output from fit_poly_to_phase. + + Parameters + ---------- + freq: np.ndarray[nfreq, ] + Frequency in MHz. + *param: float + Coefficients of the polynomial. + + Returns + ------- + phi: np.ndarray[nfreq, ] + Phase in radians between -pi and +pi. + """ + x = FREQ_TO_OMEGA * freq + + model_phase = np.zeros_like(freq) + for pp, par in enumerate(param): + model_phase += par * x**pp + + model_phase = model_phase % (2.0 * np.pi) + model_phase -= 2.0 * np.pi * (model_phase > np.pi) + + return model_phase
+ + + +# private functions +# ----------------- +def _amplitude_scaling(freq): + return np.sqrt(freq / FREQ_PIVOT) + + +def _flagged_median(data, flag, axis=0, keepdims=False): + bflag = flag.astype(bool) + nandata = np.full(data.shape, np.nan, dtype=data.dtype) + nandata[bflag] = data[bflag] + + sortdata = np.sort(nandata, axis=axis) + ieval = np.sum(np.isfinite(sortdata), axis=axis, dtype=int, keepdims=True) // 2 + + med = np.zeros(ieval.shape, dtype=sortdata.dtype) + for aind, sind in np.ndenumerate(ieval): + find = list(aind) + find[axis] = sind + + sdata = sortdata[tuple(find)] + if np.isfinite(sdata): + med[aind] = sdata + + if not keepdims: + med = np.squeeze(med, axis=axis) + + return med + + +def _func_poly_phase(freq, *param): + nreal = len(freq) // 2 + + x = FREQ_TO_OMEGA * freq[:nreal] + + model_phase = np.zeros(x.size, dtype=x.dtype) + for pp, par in enumerate(param): + model_phase += par * x**pp + + return np.concatenate((np.cos(model_phase), np.sin(model_phase))) + + +def _correct_phase_wrap(phi): + return ((phi + np.pi) % (2.0 * np.pi)) - np.pi + + +def _weight_propagation_addition(*args): + sum_variance = np.zeros_like(args[0]) + for weight in args: + sum_variance += tools.invert_no_zero(weight) + + return tools.invert_no_zero(sum_variance) + + +def _resolve_distributed(arr, axis=1): + if isinstance(arr, mpiarray.MPIArray): + arr.redistribute(axis) + global_shape = arr.global_shape + ilocal = list( + range( + arr.local_offset[axis], arr.local_offset[axis] + arr.local_shape[axis] + ) + ) + else: + global_shape = arr.shape + ilocal = list(range(0, global_shape[axis])) + + return arr[:].view(np.ndarray), global_shape, ilocal + + +def _search_nearest(x, xeval): + index_next = np.searchsorted(x, xeval, side="left") + + index_previous = np.maximum(0, index_next - 1) + index_next = np.minimum(x.size - 1, index_next) + + index = np.where( + np.abs(xeval - x[index_previous]) < np.abs(xeval - x[index_next]), + index_previous, + index_next, + ) + + return index + + +def _interpolation_nearest(x, y, var, xeval): + index = _search_nearest(x, xeval) + + yeval = y[index] + weval = tools.invert_no_zero(var[index]) + + return yeval, weval + + +def _interpolation_previous(x, y, var, xeval): + index = np.maximum(np.searchsorted(x, xeval, side="right") - 1, 0) + return y[index], tools.invert_no_zero(var[index]) + + +def _interpolation_next(x, y, var, xeval): + index = np.minimum(np.searchsorted(x, xeval, side="left"), x.size - 1) + return y[index], tools.invert_no_zero(var[index]) + + +def _interpolation_linear(x, y, var, xeval): + index = np.searchsorted(x, xeval, side="left") + + ind1 = index - 1 + ind2 = index + + below = np.flatnonzero(ind1 == -1) + if below.size > 0: + ind1[below] = 0 + ind2[below] = 1 + + above = np.flatnonzero(ind2 == x.size) + if above.size > 0: + ind1[above] = x.size - 2 + ind2[above] = x.size - 1 + + adx1 = xeval - x[ind1] + adx2 = x[ind2] - xeval + + norm = tools.invert_no_zero(adx1 + adx2) + a1 = adx2 * norm + a2 = adx1 * norm + + yeval = a1 * y[ind1] + a2 * y[ind2] + weval = tools.invert_no_zero(a1**2 * var[ind1] + a2**2 * var[ind2]) + + return yeval, weval + + +def _interpolation_scipy(kind): + def _interp1d(x, y, var, xeval): + interpolator = scipy.interpolate.interp1d( + x, y, kind=kind, fill_value="extrapolate" + ) + yeval = interpolator(xeval) + + # For the scipy interpolation, we do not attempt to propagate the errors. + # Instead we just use the weight from the nearest point. + index = _search_nearest(x, xeval) + weval = tools.invert_no_zero(var[index]) + + return yeval, weval + + return _interp1d + + +INTERPOLATION_LOOKUP = { + "nearest": _interpolation_nearest, + "previous": _interpolation_previous, + "next": _interpolation_next, + "linear": _interpolation_linear, +} +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/ch_util/tools.html b/docs/_modules/ch_util/tools.html new file mode 100644 index 00000000..3c9e01ca --- /dev/null +++ b/docs/_modules/ch_util/tools.html @@ -0,0 +1,2913 @@ + + + + + + ch_util.tools — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for ch_util.tools

+"""
+Tools for CHIME analysis
+
+A collection of miscellaneous utility routines.
+
+
+Correlator Inputs
+=================
+
+Query the layout database to find out what is ultimately connected at the end
+of correlator inputs. This is done by calling the routine
+:func:`get_correlator_inputs`, which returns a list of the inputs. Routines
+such as :func:`get_feed_positions` operate on this list.
+
+- :py:meth:`get_correlator_inputs`
+- :py:meth:`get_feed_positions`
+- :py:meth:`get_feed_polarisations`
+- :py:meth:`is_array`
+- :py:meth:`is_array_x`
+- :py:meth:`is_array_y`
+- :py:meth:`is_array_on`
+- :py:meth:`is_chime`
+- :py:meth:`is_pathfinder`
+- :py:meth:`is_holographic`
+- :py:meth:`is_noise_source`
+- :py:meth:`reorder_correlator_inputs`
+- :py:meth:`redefine_stack_index_map`
+- :py:meth:`serial_to_id`
+- :py:meth:`serial_to_location`
+- :py:meth:`parse_chime_serial`
+- :py:meth:`parse_pathfinder_serial`
+- :py:meth:`parse_old_serial`
+- :py:meth:`get_noise_source_index`
+- :py:meth:`get_holographic_index`
+- :py:meth:`change_pathfinder_location`
+- :py:meth:`change_chime_location`
+- :py:meth:`beam_index2number`
+
+This can determine if we are connected to any of the following:
+
+- :py:class:`HolographyAntenna`
+- :py:class:`ArrayAntenna`
+- :py:class:`PathfinderAntenna`
+- :py:class:`CHIMEAntenna`
+- :py:class:`RFIAntenna`
+- :py:class:`NoiseSource`
+- :py:class:`Blank`
+
+Example
+-------
+
+Fetch the inputs for blanchard during layout 38::
+
+    >>> from datetime import datetime
+    >>> inputs = get_correlator_inputs(datetime(2016,05,23,00), correlator='pathfinder')
+    >>> inputs[1]
+    CHIMEAntenna(id=1, reflector=u'W_cylinder', antenna=u'ANT0123B', powered=True, pos=9.071800000000001, input_sn=u'K7BP16-00040401', pol=u'S', corr=u'K7BP16-0004', cyl=0)
+    >>> print "NS position:", inputs[1].pos
+    NS position: 9.0718
+    >>> print "Polarisation:", inputs[1].pol
+    Polarisation: S
+    >>> inputs[3]
+    CHIMEAntenna(id=3, reflector=u'W_cylinder', antenna=u'ANT0128B', powered=True, pos=9.681400000000002, input_sn=u'K7BP16-00040403', pol=u'S', corr=u'K7BP16-0004', cyl=0)
+
+Housekeeping Inputs
+===================
+
+Functions
+---------
+
+- :py:meth:`antenna_to_lna`
+- :py:meth:`calibrate_temperature`
+- :py:meth:`hk_to_sensor`
+- :py:meth:`lna_to_antenna`
+- :py:meth:`sensor_to_hk`
+
+Classes
+-------
+
+- :py:class:`HKInput`
+
+
+Product Array Mapping
+=====================
+
+Tools for mapping between products stored in upper triangular format, and the
+underlying pairs of inputs.
+
+- :py:meth:`cmap`
+- :py:meth:`icmap`
+- :py:meth:`fast_pack_product_array`
+- :py:meth:`pack_product_array`
+- :py:meth:`unpack_product_array`
+
+
+Matrix Factorisation
+====================
+
+A few useful routines for factorising matrices, usually for calibration.
+
+- :py:meth:`eigh_no_diagonal`
+- :py:meth:`rankN_approx`
+- :py:meth:`normalise_correlations`
+- :py:meth:`apply_gain`
+- :py:meth:`subtract_rank1_signal`
+
+
+Fringestopping
+==============
+
+Routines for undoing the phase rotation of a fixed celestial source. The
+routine :func:`fringestop` is an easy to use routine for fringestopping data
+given a list of the feeds in the data. For more advanced usage
+:func:`fringestop_phase` can be used.
+
+- :py:meth:`fringestop_phase`
+- :py:meth:`fringestop`
+
+Miscellaneous
+=============
+
+- :py:meth:`ensure_list`
+"""
+
+import datetime
+import numpy as np
+import scipy.linalg as la
+import re
+from typing import Tuple
+
+from caput import pfb
+from caput.interferometry import projected_distance, fringestop_phase
+
+from ch_util import ephemeris
+
+# Currently the position between the Pathfinder and 26m have been
+# calibrated with holography, but positions between CHIME and
+# Pathfinder/26m have not (they were determined from high-res
+# satellite images and are only approximate).  We need to
+# use CHIME holography data to constrain distance [x, y, z] between
+# CHIME and 26m.  I then recommend defining our coordinate system
+# such that center of CHIME array is the origin (so leaving variable
+# _CHIME_POS alone, and updating _PF_POS and _26M_POS appropriately.)
+
+# CHIME geometry
+_CHIME_POS = [0.0, 0.0, 0.0]
+# CHIME rotation from north. Anti-clockwise looking at the ground (degrees).
+# See DocLib #695 for more information.
+_CHIME_ROT = -0.071
+
+# 26m geometry
+_26M_POS = [254.162124, 21.853934, 18.93]
+_26M_B = 2.14  # m
+
+# Pathfinder geometry
+_PF_POS = [373.754961, -54.649866, 0.0]
+_PF_ROT = 1.986  # Pathfinder rotation from north (towards west) in degrees
+_PF_SPACE = 22.0  # Pathfinder cylinder spacing
+
+# KKO geometry
+_KKO_POS = [0.0, 0.0, 0.0]
+_KKO_ROT = 0.6874
+_KKO_ROLL = 0.5888
+_PCO_POS = _KKO_POS
+_PCO_ROT = _KKO_ROT  # Aliases for backwards-compatibility
+# KKO_ROT = rotation of cylinder axis from North. Anti-clockwise looking at the ground (degrees).
+# KKO_ROLL = roll of cylinder toward east from Vertical. Anti-clockwise looking North along the focal line.
+# See Doclib #1530 and #1121 for more information.
+
+# GBO geometry
+_GBO_POS = [0.0, 0.0, 0.0]
+_GBO_ROT = -27.3745
+_GBO_ROLL = -30.0871
+
+# HCO geometry
+_HCO_POS = [0.0, 0.0, 0.0]
+_HCO_ROT = -0.8023
+_HCO_ROLL = 1.0556
+
+
+# Lat/Lon
+_LAT_LON = {
+    "chime": [49.3207125, -119.623670],
+    "pathfinder": [49.3202245, -119.6183635],
+    "galt_26m": [49.320909, -119.620174],
+    "gbo_tone": [38.4292962636, -79.8451625395],
+}
+
+# Classes
+# =======
+
+
+
+[docs] +class HKInput(object): + """A housekeeping input. + + Parameters + ---------- + atmel : :obj:`layout.component` + The ATMEL board. + chan : int + The channel number. + mux : int + The mux number; if this HK stream has no multiplexer, this will simply + remain as :obj:`Null` + + Attributes + ---------- + atmel : :obj:`layout.component` + The ATMEL board. + chan : int + The channel number. + mux : int + The mux number; if this HK stream has no multiplexer, this will simply + remain as :obj:`Null` + """ + + atmel = None + chan = None + mux = None + + def __init__(self, atmel=None, chan=None, mux=None): + self.atmel = atmel + self.chan = chan + self.mux = mux + + def __repr__(self): + ret = "<HKInput atmel=%s chan=%d " % (self.atmel.sn, self.chan) + if self.mux: + ret += "mux=%d>" % self.mux + else: + ret += "(no mux)>" + return ret
+ + + +
+[docs] +class CorrInput(object): + """Base class for describing a correlator input. + + Meant to be subclassed by actual types of inputs. + + Attributes + ---------- + input_sn : str + Unique serial number of input. + corr : str + Unique serial number of correlator. + Set to `None` if no correlator is connected. + corr_order : int + Order of input for correlator internal datastream. + crate : int + Crate number within the correlator. + Set to `None` if correlator consists of single crate. + slot : int + Slot number of the fpga motherboard within the crate. + Ranges from 0 to 15, left to right. + Set to `None` if correlator consists of single slot. + sma : int + SMA number on the fpga motherboard within the slot. + Ranges from 0 to 15, bottom to top. + """ + + def __init__(self, **input_dict): + import inspect + + for basecls in inspect.getmro(type(self))[::-1]: + for k, attr in basecls.__dict__.items(): + if k[0] != "_": + if not isinstance(attr, property): + self.__dict__[k] = input_dict.get(k, None) + + elif attr.fset is not None: + attr.fset(self, input_dict.get(k, None)) + + def _attribute_strings(self): + prop = [ + (k, getattr(self, k)) + for k in ["id", "crate", "slot", "sma", "corr_order", "delay"] + ] + + kv = ["%s=%s" % (k, repr(v)) for k, v in prop if v is not None] + [ + "%s=%s" % (k, repr(v)) for k, v in self.__dict__.items() if k[0] != "_" + ] + + return kv + + def __repr__(self): + kv = self._attribute_strings() + + return "%s(%s)" % (self.__class__.__name__, ", ".join(kv)) + + @property + def id(self): + """Channel ID. Automatically calculated from the serial number + if id is not explicitly set. + + Returns + ------- + id : int + Channel id. Calculated from the serial. + """ + if hasattr(self, "_id"): + return self._id + else: + return serial_to_id(self.input_sn) + + @id.setter + def id(self, val): + if val is not None: + self._id = val + + @property + def corr_order(self): + return serial_to_location(self.input_sn)[0] + + @property + def crate(self): + return serial_to_location(self.input_sn)[1] + + @property + def slot(self): + return serial_to_location(self.input_sn)[2] + + @property + def sma(self): + return serial_to_location(self.input_sn)[3] + + @property + def delay(self): + """The delay along the signal chain in seconds. + + Postive delay values mean signals arriving later than the nominal value. + + Note that these are always relative. Here CHIME inputs are chosen as + the delay=0 reference. + """ + return getattr(self, "_delay", 0) + + input_sn = None + corr = None
+ + + +
+[docs] +class Blank(CorrInput): + """Unconnected input.""" + + pass
+ + + +
+[docs] +class Antenna(CorrInput): + """An antenna input. + + Attributes + ---------- + reflector : str + The name of the reflector the antenna is on. + antenna : str + Serial number of the antenna. + rf_thru : str + Serial number of the RF room thru that + the connection passes. + """ + + reflector = None + antenna = None + rf_thru = None
+ + + +
+[docs] +class RFIAntenna(Antenna): + """RFI monitoring antenna""" + + pass
+ + + +
+[docs] +class NoiseSource(CorrInput): + """Broad band noise calibration source.""" + + pass
+ + + +
+[docs] +class ArrayAntenna(Antenna): + """Antenna that is part of a cylindrical interferometric array. + + Attributes + ---------- + cyl : int + Index of the cylinder. + pos : [x, y, z] + Position of the antenna in meters in right-handed coordinates + where x is eastward, y is northward, and z is upward. + pol : str + Orientation of the polarisation. + flag : bool + Flag indicating whether or not the antenna is good. + """ + + _rotation = 0.0 + _roll = 0.0 + _offset = [0.0] * 3 + + cyl = None + pol = None + flag = None + + def _attribute_strings(self): + kv = super(ArrayAntenna, self)._attribute_strings() + if self.pos is not None: + pos = ", ".join(["%0.2f" % pp for pp in self.pos]) + kv.append("pos=[%s]" % pos) + return kv + + @property + def pos(self): + if hasattr(self, "_pos"): + pos = self._pos + + if self._rotation: + t = np.radians(self._rotation) + c, s = np.cos(t), np.sin(t) + + pos = [c * pos[0] - s * pos[1], s * pos[0] + c * pos[1], pos[2]] + + if any(self._offset): + pos = [pos[dim] + off for dim, off in enumerate(self._offset)] + + return pos + + else: + return None + + @pos.setter + def pos(self, val): + if (val is not None) and hasattr(val, "__iter__") and (len(val) > 1): + self._pos = [0.0] * 3 + for ind, vv in enumerate(val): + self._pos[ind] = vv
+ + + +
+[docs] +class PathfinderAntenna(ArrayAntenna): + """Antenna that is part of the Pathfinder. + + Attributes + ---------- + powered : bool + Flag indicating that the antenna is powered. + """ + + _rotation = _PF_ROT + _offset = _PF_POS + + # The delay relative to other inputs isn't really known. Set to NaN so we + # don't make any mistakes + _delay = np.nan + + powered = None
+ + + +
+[docs] +class CHIMEAntenna(ArrayAntenna): + """Antenna that is part of CHIME.""" + + _rotation = _CHIME_ROT + _offset = _CHIME_POS + _delay = 0 # Treat CHIME antennas as defining the delay zero point
+ + + +
+[docs] +class KKOAntenna(ArrayAntenna): + """KKO outrigger antenna for the CHIME/FRB project.""" + + _rotation = _KKO_ROT + _roll = _KKO_ROLL + _offset = _KKO_POS + _delay = np.nan
+ + + +PCOAntenna = KKOAntenna # Alias for backwards-compatibility + + +
+[docs] +class GBOAntenna(ArrayAntenna): + """GBO outrigger antenna for the CHIME/FRB project.""" + + _rotation = _GBO_ROT + _roll = _GBO_ROLL + _offset = _GBO_POS + _delay = np.nan
+ + + +
+[docs] +class HCOAntenna(ArrayAntenna): + """HCRO outrigger antenna for the CHIME/FRB project.""" + + _rotation = _HCO_ROT + _roll = _HCO_ROLL + _offset = _HCO_POS + _delay = np.nan
+ + + +
+[docs] +class TONEAntenna(ArrayAntenna): + """Antenna that is part of GBO/TONE Outrigger. + Let's allow for a global rotation and offset. + """ + + _rotation = 0.00 + _offset = [0.00, 0.00, 0.00] + _delay = np.nan
+ + + +
+[docs] +class HolographyAntenna(Antenna): + """Antenna used for holography. + + Attributes + ---------- + pos : [x, y, z] + Position of the antenna in meters in right-handed coordinates + where x is eastward, y is northward, and z is upward. + pol : str + Orientation of the polarisation. + """ + + pos = None + pol = None + _delay = 1.475e-6 # From doclib:1093
+ + + +# Private Functions +# ================= + + +def _ensure_graph(graph): + from . import layout + + try: + graph.sg_spec + except: + graph = layout.graph(graph) + return graph + + +def _get_feed_position(lay, rfl, foc, cas, slt, slot_factor): + """Calculate feed position from node properties. + + Parameters + ---------- + lay : layout.graph + Layout instance to search from. + rfl : layout.component + Reflector. + foc : layout.component + Focal line slot. + cas : layout.component + Cassette. + slt : layout.component + Cassette slot. + slot_factor : float + 1.5 for CHIME, 0.5 for Outriggers + + Returns + ------- + pos : list + x,y,z coordinates of the feed relative to the centre of the focal line. + """ + try: + pos = [0.0] * 3 + + for node in [rfl, foc, cas, slt]: + prop = lay.node_property(node) + + for ind, dim in enumerate(["x_offset", "y_offset", "z_offset"]): + if dim in prop: + pos[ind] += float(prop[dim].value) # in metres + + if "y_offset" not in lay.node_property(slt): + pos[1] += (float(slt.sn[-1]) - slot_factor) * 0.3048 + + except: + pos = None + + return pos + + +def _get_input_props(lay, corr_input, corr, rfl_path, rfi_antenna, noise_source): + """Fetch all the required properties of an ADC channel or correlator input. + + Parameters + ---------- + lay : layout.graph + Layout instance to search from. + corr_input : layout.component + ADC channel or correlator input. + corr : layout.component + Correlator. + rfl_path : [layout.component] + Path from input to reflector, or None. + rfi_antenna : layout.component + Closest RFI antenna + noise_source : layout.component + Closest noise source. + + Returns + ------- + channel : CorrInput + An instance of `CorrInput` containing the channel properties. + """ + + if corr is not None: + corr_sn = corr.sn + else: + corr_sn = None + + # Check if the correlator input component contains a chan_id property + corr_prop = lay.node_property(corr_input) + chan_id = int(corr_prop["chan_id"].value) if "chan_id" in corr_prop else None + + rfl = None + cas = None + slt = None + ant = None + pol = None + rft = None + if rfl_path is not None: + rfl = rfl_path[-1] + + def find(name): + f = [a for a in rfl_path[1:-1] if a.type.name == name] + return f[0] if len(f) == 1 else None + + foc = find("focal line slot") + cas = find("cassette") + slt = find("cassette slot") + ant = find("antenna") + pol = find("polarisation") + + for rft_name in ["rf room thru", "RFT thru"]: + rft = find(rft_name) + if rft is not None: + break + + # If the antenna does not exist, it might be the RFI antenna, the noise source, or empty + if ant is None: + if rfi_antenna is not None: + rfl = lay.closest_of_type( + rfi_antenna, + "reflector", + type_exclude=["correlator card slot", "ADC board"], + ) + rfl_sn = rfl.sn if rfl is not None else None + return RFIAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl_sn, + antenna=rfi_antenna.sn, + ) + + # Check to see if it is a noise source + if noise_source is not None: + return NoiseSource(id=chan_id, input_sn=corr_input.sn, corr=corr_sn) + + # If we get to here, it's probably a blank input + return Blank(id=chan_id, input_sn=corr_input.sn, corr=corr_sn) + + # Determine polarization from antenna properties + try: + keydict = { + "H": "hpol_orient", + "V": "vpol_orient", + "1": "pol1_orient", + "2": "pol2_orient", + } + + pkey = keydict[pol.sn[-1]] + pdir = lay.node_property(ant)[pkey].value + + except: + pdir = None + + # Determine serial number of RF thru + rft_sn = getattr(rft, "sn", None) + + # If the cassette does not exist, must be holography antenna + if slt is None: + return HolographyAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl.sn, + pol=pdir, + antenna=ant.sn, + rf_thru=rft_sn, + pos=_26M_POS, + ) + + # If we are still here, we are a CHIME/Pathfinder feed + + # Determine if the correlator input has been manually flagged as good or bad + flag = ( + bool(int(corr_prop["manual_flag"].value)) + if "manual_flag" in corr_prop + else True + ) + + # Map the cylinder name in the database into a number. This might + # be worth changing, such that we could also map into letters + # (i.e. A, B, C, D) to save confusion. + pos_dict = { + "W_cylinder": 0, + "E_cylinder": 1, + "cylinder_A": 2, + "cylinder_B": 3, + "cylinder_C": 4, + "cylinder_D": 5, + "pco_cylinder": 6, + "gbo_cylinder": 7, + "hcro_cylinder": 8, + } + + cyl = pos_dict[rfl.sn] + + # Different conventions for CHIME, PCO, GBO, HCRO, and Pathfinder + if cyl >= 2 and cyl <= 5: + # Dealing with a CHIME feed + + # Determine position + pos = _get_feed_position( + lay=lay, rfl=rfl, foc=foc, cas=cas, slt=slt, slot_factor=1.5 + ) + + # Return CHIMEAntenna object + return CHIMEAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl.sn, + cyl=cyl, + pos=pos, + pol=pdir, + antenna=ant.sn, + rf_thru=rft_sn, + flag=flag, + ) + + elif cyl == 0 or cyl == 1: + # Dealing with a pathfinder feed + + # Determine y_offset + try: + pos = [0.0] * 3 + + pos[0] = cyl * _PF_SPACE + + cas_prop = lay.node_property(cas) + slt_prop = lay.node_property(slt) + + d1 = float(cas_prop["dist_to_n_end"].value) / 100.0 # in metres + d2 = float(slt_prop["dist_to_edge"].value) / 100.0 # in metres + orient = cas_prop["slot_zero_pos"].value + + pos[1] = d1 + d2 if orient == "N" else d1 - d2 + + # Turn into distance increasing from South to North. + pos[1] = 20.0 - pos[1] + + except: + pos = None + + # Try and determine if the FLA is powered or not. Paths without an + # FLA (e.g. RFoF paths) are assumed to be powered on. + pwd = True + + if rft is not None: + rft_prop = lay.node_property(rft) + + if "powered" in rft_prop: + pwd = rft_prop["powered"].value + pwd = bool(int(pwd)) + + # Return PathfinderAntenna object + return PathfinderAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl.sn, + cyl=cyl, + pos=pos, + pol=pdir, + antenna=ant.sn, + rf_thru=rft_sn, + powered=pwd, + flag=flag, + ) + + elif cyl == 6: + # Dealing with an KKO feed + + # Determine position + pos = _get_feed_position( + lay=lay, rfl=rfl, foc=foc, cas=cas, slt=slt, slot_factor=0.5 + ) + + # Return KKOAntenna object + return KKOAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl.sn, + cyl=cyl, + pos=pos, + pol=pdir, + antenna=ant.sn, + rf_thru=rft_sn, + flag=flag, + ) + + elif cyl == 7: + # Dealing with a GBO feed + + # Determine position + pos = _get_feed_position( + lay=lay, rfl=rfl, foc=foc, cas=cas, slt=slt, slot_factor=0.5 + ) + + # Return GBOAntenna object + return GBOAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl.sn, + cyl=cyl, + pos=pos, + pol=pdir, + antenna=ant.sn, + rf_thru=rft_sn, + flag=flag, + ) + + elif cyl == 8: + # Dealing with a HCO feed + + # Determine position + pos = _get_feed_position( + lay=lay, rfl=rfl, foc=foc, cas=cas, slt=slt, slot_factor=0.5 + ) + + # Return HCOAntenna object + return HCOAntenna( + id=chan_id, + input_sn=corr_input.sn, + corr=corr_sn, + reflector=rfl.sn, + cyl=cyl, + pos=pos, + pol=pdir, + antenna=ant.sn, + rf_thru=rft_sn, + flag=flag, + ) + + +# Public Functions +# ================ + + +
+[docs] +def calibrate_temperature(raw): + """Calibrate housekeeping temperatures. + + The offset used here is rough; the results are therefore not absolutely + precise. + + Parameters + ---------- + raw : numpy array + The raw values. + + Returns + ------- + t : numpy array + The temperature in degrees Kelvin. + """ + import numpy + + off = 150.0 + r_t = 2000.0 * (8320.0 / (raw - off) - 1.0) + return 1.0 / (1.0 / 298.0 + numpy.log(r_t / 1.0e4) / 3950.0)
+ + + +
+[docs] +def antenna_to_lna(graph, ant, pol): + """Find an LNA connected to an antenna. + + Parameters + ---------- + graph : obj:`layout.graph` or :obj:`datetime.datetime` + The graph in which to do the search. If you pass a time, then the graph + will be constructed internally. (Note that the latter option will be + quite slow if you do repeated calls!) + ant : :obj:`layout.component` + The antenna. + pol : integer + There can be up to two LNA's connected to the two polarisation outputs + of an antenna. Select which by passing :obj:`1` or :obj:`2`. (Note that + conversion to old-style naming 'A' and 'B' is done automatically.) + + Returns + ------- + lna : :obj:`layout.component` or string + The LNA. + + Raises + ------ + :exc:`layout.NotFound` + Raised if the polarisation connector could not be found in the graph. + """ + from . import layout + + graph = _ensure_graph(graph) + pol_obj = None + for p in graph.neighbour_of_type(graph.component(comp=ant), "polarisation"): + if p.sn[-1] == str(pol) or p.sn[-1] == chr(ord("A") + pol): + pol_obj = p + break + if not pol_obj: + raise layout.NotFound + try: + return graph.neighbour_of_type(pol_obj, "LNA")[0] + except IndexError: + return None
+ + + +
+[docs] +def lna_to_antenna(graph, lna): + """Find an antenna connected to an LNA. + + Parameters + ---------- + graph : obj:`layout.graph` or :obj:`datetime.datetime` + The graph in which to do the search. If you pass a time, then the graph + will be constructed internally. (Note that the latter option will be + quite slow if you do repeated calls!) + lna : :obj:`layout.component` or string + The LNA. + + Returns + ------- + antenna : :obj:`layout.component` + The antenna. + """ + graph = _ensure_graph(graph) + return graph.closest_of_type( + graph.component(comp=lna), "antenna", type_exclude="60m coax" + )
+ + + +
+[docs] +def sensor_to_hk(graph, comp): + """Find what housekeeping channel a component is connected to. + + Parameters + ---------- + graph : obj:`layout.graph` or :obj:`datetime.datetime` + The graph in which to do the search. If you pass a time, then the graph + will be constructed internally. (Note that the latter option will be + quite slow if you do repeated calls!) + comp : :obj:`layout.component` or string + The component to search for (you can pass by serial number if you wish). + Currently, only components of type LNA, FLA and RFT thru are accepted. + + Returns + ------- + inp : :obj:`HKInput` + The housekeeping input channel the sensor is connected to. + """ + graph = _ensure_graph(graph) + comp = graph.component(comp=comp) + + if comp.type.name == "LNA": + # Find the closest mux. + mux = graph.closest_of_type( + comp, "HK mux", type_exclude=["polarisation", "cassette", "60m coax"] + ) + if not mux: + return None + try: + hydra = graph.neighbour_of_type(comp, "HK hydra")[0] + except IndexError: + return None + chan = int(hydra.sn[-1]) + if mux.sn[-1] == "B": + chan += 8 + + # Find the ATMEL board. + atmel = graph.closest_of_type( + hydra, "HK ATMega", type_exclude=["cassette", "antenna"] + ) + + return HKInput(atmel, chan, int(mux.sn[-2])) + + elif comp.type.name == "FLA" or comp.type.name == "RFT thru": + if comp.type.name == "FLA": + try: + comp = graph.neighbour_of_type(comp, "RFT thru")[0] + except IndexError: + return None + try: + hydra = graph.neighbour_of_type(comp, "HK hydra")[0] + except IndexError: + return None + + # Find the ATMEL board. + atmel = graph.closest_of_type( + hydra, "HK ATMega", type_exclude=["RFT thru", "FLA", "SMA coax"] + ) + + return HKInput(atmel, int(hydra.sn[-1]), None) + else: + raise ValueError("You can only pass components of type LNA, FLA or RFT thru.")
+ + + +
+[docs] +def hk_to_sensor(graph, inp): + """Find what component a housekeeping channel is connected to. + + This method is for finding either LNA or FLA's that your housekeeping + channel is connected to. (It currently cannot find accelerometers, other + novel housekeeping instruments that may later exist; nor will it work if the + FLA/LNA is connected via a very non-standard chain of components.) + + Parameters + ---------- + graph : obj:`layout.graph` or :obj:`datetime.datetime` + The graph in which to do the search. If you pass a time, then the graph + will be constructed internally. (Note that the latter option will be + quite slow if you do repeated calls!) + inp : :obj:`HKInput` + The housekeeping input to search. + + Returns + ------- + comp : :obj:`layout.component` + The LNA/FLA connected to the specified channel; :obj:`None` is returned + if none is found. + + Raises + ------ + :exc:`ValueError` + Raised if one of the channels or muxes passed in **hk_chan** is out of + range. + """ + + from . import layout + + graph = _ensure_graph(graph) + + # Figure out what it is connected to. + for thing in graph.neighbours(graph.component(comp=inp.atmel)): + if thing.type.name == "HK preamp": + # OK, this is a preamp going to FLA's. + if inp.chan < 0 or inp.chan > 7: + raise ValueError( + "For FLA housekeeping, the channel number " + "must be in the range [0, 7]." + ) + for hydra in graph.neighbour_of_type(thing, "HK hydra"): + if hydra.sn[-1] == str(inp.chan): + return graph.closest_of_type(hydra, "FLA", type_exclude="HK preamp") + + if thing.type.name == "HK mux box": + # OK, this is a mux box going to LNA's. + if inp.mux < 0 or inp.mux > 7: + raise ValueError( + "For LNA housekeeping, the mux number must be " + "in the range [0, 7]." + ) + if inp.chan < 0 or inp.chan > 15: + raise ValueError( + "For LNA housekeeping, the channel number " + "must be in the range [0, 15]." + ) + + # Construct the S/N of the mux connector and get it. + sn = "%s%d%s" % (thing.sn, inp.mux, "A" if inp.chan < 8 else "B") + try: + mux_card = graph.component(comp=sn) + except layout.NotFound: + return None + + # Find the closest preamp and the hydra cable corresponding to the + # channel requested. + preamp = graph.closest_of_type( + mux_card, "HK preamp", type_exclude="HK mux box" + ) + if not preamp: + return None + + for hydra in graph.neighbour_of_type(preamp, "HK hydra"): + if hydra.sn[-1] == str(inp.chan % 8): + try: + return graph.neighbour_of_type(hydra, "LNA")[0] + except IndexError: + return None + return None
+ + + +
+[docs] +def parse_chime_serial(sn): + """Parse a serial number into crate, slot, and SMA number. + + Parameters + ---------- + sn: str + Serial number to parse + + Returns + ------- + crate: int + Crate number + slot: int + Slot number + sma: int + SMA number + + Raises + ------ + RuntimeError: + `sn` did not have the correct format. + """ + mo = re.match("FCC(\d{2})(\d{2})(\d{2})", sn) + + if mo is None: + raise RuntimeError( + "Serial number %s does not match expected CHIME format." % sn + ) + + crate = int(mo.group(1)) + slot = int(mo.group(2)) + sma = int(mo.group(3)) + + return crate, slot, sma
+ + + +
+[docs] +def parse_pathfinder_serial(sn): + """Parse a pathfinder serial number into crate, slot, and SMA number. + + Parameters + ---------- + sn: str + Serial number to parse + + Returns + ------- + crate: int + Crate number + slot: int + Slot number + sma: int + SMA number + + Raises + ------ + RuntimeError: + `sn` did not have the correct format. + """ + mo = re.match("(\w{6}\-\d{4})(\d{2})(\d{2})", sn) + + if mo is None: + raise RuntimeError( + "Serial number %s does not match expected Pathfinder format." % sn + ) + + crate = mo.group(1) + slot = int(mo.group(2)) + sma = int(mo.group(3)) + + return crate, slot, sma
+ + + +
+[docs] +def parse_old_serial(sn): + """Parse an old 8/16-channel serial number into slot, and SMA number. + + Parameters + ---------- + sn: str + Serial number to parse + + Returns + ------- + slot: int + Slot number + sma: int + SMA number + + Raises + ------ + RuntimeError: + `sn` did not have the correct format. + """ + mo = re.match("(\d{5}\-\d{4}\-\d{4})\-C(\d{1,2})", sn) + + if mo is None: + raise RuntimeError( + "Serial number %s does not match expected 8/16 channel format." % sn + ) + + slot = mo.group(1) + sma = int(mo.group(2)) + + return slot, sma
+ + + +
+[docs] +def serial_to_id(serial): + """Get the channel ID corresponding to a correlator input serial number. + + Parameters + ---------- + serial : string + Correlator input serial number. + + Returns + ------- + id : int + """ + + # Map a slot and SMA to channel id for Pathfinder + def get_pathfinder_channel(slot, sma): + c = [ + None, + 80, + 16, + 64, + 0, + 208, + 144, + 192, + 128, + 240, + 176, + 224, + 160, + 112, + 48, + 96, + 32, + ] + channel = c[slot] + sma if slot > 0 else sma + return channel + + # Determine ID + try: + res = parse_chime_serial(serial) + # CHIME chan_id is defined in layout database + return -1 + except RuntimeError: + pass + + try: + res = parse_pathfinder_serial(serial) + return get_pathfinder_channel(*(res[1:])) + except RuntimeError: + pass + + try: + res = parse_old_serial(serial) + return res[1] + except RuntimeError: + pass + + return -1
+ + + +
+[docs] +def serial_to_location(serial): + """Get the internal correlator ordering and the + crate, slot, and sma number from a correlator input serial number. + + Parameters + ---------- + serial : string + Correlator input serial number. + + Returns + ------- + location : 4-tuple + (corr_order, crate, slot, sma) + """ + + default = (None,) * 4 + if serial is None: + return default + + # Map slot and sma to position within + def get_crate_channel(slot, sma): + sma_to_adc = [12, 13, 14, 15, 8, 9, 10, 11, 4, 5, 6, 7, 0, 1, 2, 3] + return slot * 16 + sma_to_adc[sma] + + # Determine ID + try: + res = parse_chime_serial(serial) + corr_id = res[0] * 256 + get_crate_channel(*res[1:]) + return (corr_id,) + res + except RuntimeError: + pass + + try: + res = list(parse_pathfinder_serial(serial)) + # Use convention that slot number starts at 0 for consistency with CHIME + res[1] -= 1 + corr_id = get_crate_channel(*res[1:]) + return (corr_id, None) + tuple(res[1:]) + except RuntimeError: + pass + + try: + res = parse_old_serial(serial) + return (res[1], None, None, res[1]) + except RuntimeError: + pass + + return default
+ + + +
+[docs] +def get_default_frequency_map_stream() -> Tuple[np.ndarray]: + """Get the default CHIME frequency map stream. + + Level order is [shuffle, crate, slot, link]. + + Returns + ------- + stream + [shuffle, crate, slot, link] for each frequency bin + stream_id + stream_id for each map combination + shuffle*2**12 + crate*2**8 + slot*2**4 + link + """ + stream = np.empty((1024, 4), dtype=np.int32) + + # shuffle + stream[:, 0] = 3 + # crate + stream[:, 1] = np.tile(np.arange(2).repeat(16), 32) + # slot + stream[:, 2] = np.tile(np.arange(16), 64) + # link + stream[:, 3] = np.tile(np.arange(8).repeat(32), 4) + + stream_id = ( + stream[:, 0] * 2**12 + stream[:, 1] * 2**12 + stream[:, 2] * 2**4 + stream[:, 3] + ).astype(np.int64) + + return stream, stream_id
+ + + +
+[docs] +def order_frequency_map_stream(fmap: np.ndarray, stream_id: np.ndarray) -> np.ndarray: + """Order stream_id components based on a frequency map. + + Level order is [shuffle, crate, slot, link] + + Parameters + ---------- + fmap + frequency map + stream_id + 1-D array of stream_ids associated with each row in fmap + + Returns + ------- + stream + shuffle, crate, slot, link for each frequency + """ + + def decode_stream_id(sid: int) -> Tuple[int]: + link = sid & 15 + slot = (sid >> 4) & 15 + crate = (sid >> 8) & 15 + shuffle = (sid >> 12) & 15 + + return (shuffle, crate, slot, link) + + decoded_stream = [decode_stream_id(i) for i in stream_id[:]] + x = [[] for _ in range(len(stream_id))] + + for ii, freqs in enumerate(fmap): + for f in freqs: + x[f].append(decoded_stream[ii]) + + # TODO: maybe implement some checks here + stream = np.array([i[0] for i in x], dtype=np.int32) + + return stream
+ + + +
+[docs] +def get_correlator_inputs(lay_time, correlator=None, connect=True): + """Get the information for all channels in a layout. + + Parameters + ---------- + lay_time : layout.graph or datetime + layout.graph object, layout tag id, or datetime. + correlator : str, optional + Fetch only for specified correlator. Use the serial number in database, + or `pathfinder` or `chime`, which will substitute the correct serial. + If `None` return for all correlators. + Option `tone` added for GBO 12 dish outrigger prototype array. + connect : bool, optional + Connect to database and set the user to Jrs65 prior to query. + Default is True. + + Returns + ------- + channels : list + List of :class:`CorrInput` instances. Returns `None` for MPI ranks + other than zero. + """ + + from ch_util import layout + import networkx as nx + from chimedb.core.connectdb import connect_this_rank + + coax_type = ["SMA coax", "3.25m SMA coax"] + + block = [ + "correlator card slot", + "ADC board", + "rf room bulkhead", + "c-can bulkhead", + "50m coax bundle", + "HK hydra", + "connector plate pol 1", + "connector plate pol 2", + "thermometer", + ] + + # Replace 'pathfinder' or 'chime' with serial number + if isinstance(correlator, str): + if correlator.lower() == "pathfinder": + correlator = "K7BP16-0004" + elif correlator.lower() == "chime": + correlator = "FCC" + elif correlator.lower() == "pco": + correlator = "FCA" + elif correlator.lower() == "kko": + correlator = "FCA" + elif correlator.lower() == "gbo": + correlator = "FCG" + elif correlator.lower() == "tone": + # A hack to return GBO correlator inputs + correlator = "tone" + connect = False + laytime = 0 + return fake_tone_database() + + if not connect_this_rank(): + return None + + if connect: + layout.connect_database(read_write=False) + layout.set_user("Jrs65") + + # Fetch layout_tag start time if we received a layout num + if isinstance(lay_time, int): + raise ValueError("Layout IDs are no longer supported.") + elif isinstance(lay_time, datetime.datetime): + layout_graph = layout.graph.from_db(lay_time) + elif isinstance(lay_time, layout.graph): + layout_graph = lay_time + else: + raise ValueError("Unsupported argument lay_time=%s" % repr(lay_time)) + + # Fetch all the input components + inputs = [] + try: + inputs += layout_graph.component(type="ADC channel") + except layout.NotFound: + pass + + try: + inputs += layout_graph.component(type="correlator input") + except layout.NotFound: + pass + + # Restrict the inputs processed to only those directly connected to the + # specified correlator + if correlator is not None: + try: + corr = layout_graph.component(correlator) + except layout.NotFound: + raise ValueError("Unknown correlator %s" % correlator) + + # Cut out SMA coaxes so we don't go outside of the correlator + sg = set(layout_graph.nodes()) + for coty in coax_type: + try: + comp_coty = layout_graph.component(type=coty) + except layout.NotFound: + pass + else: + sg -= set(comp_coty) + sg = layout_graph.subgraph(sg) + + # Use only inputs that are connected to the correlator + inputs = nx.node_connected_component(sg, corr) & set(inputs) + + inputs = sorted(inputs, key=lambda adc: adc.sn) + + # Perform nearly all the graph queries in one huge batcn to speed things up, + # and pass the results into _get_input_props for further processing + corrs = layout_graph.closest_of_type(inputs, "correlator", type_exclude=coax_type) + + rfls = layout_graph.shortest_path_to_type(inputs, "reflector", type_exclude=block) + + block.append("reflector") + rfi_ants = layout_graph.closest_of_type(inputs, "RFI antenna", type_exclude=block) + noise_sources = layout_graph.closest_of_type( + inputs, "noise source", type_exclude=block + ) + + inputlist = [ + _get_input_props(layout_graph, *args) + for args in zip(inputs, corrs, rfls, rfi_ants, noise_sources) + ] + + # Filter to include only inputs attached to the given correlator. In theory + # this shouldn't be necessary if the earlier filtering worked, but I think + # it'll help catch some odd cases + if correlator is not None: + inputlist = [input_ for input_ in inputlist if input_.corr == correlator] + + # Sort by channel ID + inputlist.sort(key=lambda input_: input_.id) + + return inputlist
+ + + +
+[docs] +def change_pathfinder_location(rotation=None, location=None, default=False): + """Change the orientation or location of Pathfinder. + + Parameters + ---------- + rotation : float + Rotation of the telescope from true north in degrees. + location: list + [x, y, z] of the telescope in meters, + where x is eastward, y is northward, and z is upward. + default: bool + Set parameters back to default value. Overides other keywords. + """ + + if default: + rotation = _PF_ROT + location = _PF_POS + + if rotation is not None: + PathfinderAntenna._rotation = rotation + + if location is not None: + offset = [location[ii] if ii < len(location) else 0.0 for ii in range(3)] + PathfinderAntenna._offset = offset
+ + + +
+[docs] +def change_chime_location(rotation=None, location=None, default=False): + """Change the orientation or location of CHIME. + + Parameters + ---------- + rotation : float + Rotation of the telescope from true north in degrees. + location: list + [x, y, z] of the telescope in meters, + where x is eastward, y is northward, and z is upward. + default: bool + Set parameters back to default value. Overides other keywords. + """ + + if default: + rotation = _CHIME_ROT + location = _CHIME_POS + + if rotation is not None: + CHIMEAntenna._rotation = rotation + + if location is not None: + offset = [location[ii] if ii < len(location) else 0.0 for ii in range(3)] + CHIMEAntenna._offset = offset
+ + + +
+[docs] +def get_feed_positions(feeds, get_zpos=False): + """Get the positions of the CHIME antennas. + + Parameters + ---------- + feeds : list of CorrInput + List of feeds to compute positions of. + get_zpos: bool + Return a third column with elevation information. + + Returns + ------- + positions : np.ndarray[nfeed, 2] + Array of feed positions. The first column is the E-W position + (increasing to the E), and the second is the N-S position (increasing + to the N). Non CHIME feeds get set to `NaN`. + """ + + # Extract positions for all array antennas or holographic antennas, fill other + # inputs with NaNs + pos = np.array( + [ + feed.pos if (is_array(feed) or is_holographic(feed)) else [np.nan] * 3 + for feed in feeds + ] + ) + + # Drop z coordinate if not explicitely requested + if not get_zpos: + pos = pos[:, 0:2] + + return pos
+ + + +
+[docs] +def fake_tone_database(): + """A fake correlator input database for GBO/TONE.""" + positions_and_polarizations = [ + ("S", [15.08, -1.61]), + ("E", [15.08, -1.61]), + ("S", [-9.19, -15.24]), + ("E", [-9.19, -15.24]), + ("S", [7.02, 14.93]), + ("E", [7.02, 14.93]), + ("S", [9.01, -5.02]), + ("E", [9.01, -5.02]), + ("S", [2.8, 2.67]), + ("E", [2.8, 2.67]), + ("S", [-1.66, 10.38]), + ("E", [-1.66, 10.38]), + ("S", [-7.63, -0.79]), + ("E", [-7.63, -0.79]), + ("S", [-15.43, -5.33]), + ("E", [-15.43, -5.33]), + ] + inputs = [] + for id, pol_ns_ew in enumerate(positions_and_polarizations): + inputs.append( + TONEAntenna( + id=id, + crate=0, + slot=0, + sma=0, + corr_order=0, + input_sn=f"TONE{id:04}", + corr="tone", + reflector=None, + antenna=f"ANT{id//2:04}", + rf_thru="N/A", + cyl=0, + pol=pol_ns_ew[0], + flag=True, + pos=[pol_ns_ew[1][0], pol_ns_ew[1][1], 0], + ) + ) + return inputs
+ + + +
+[docs] +def get_feed_polarisations(feeds): + """Get an array of the feed polarisations. + + Parameters + ---------- + feeds : list of CorrInput + List of feeds to compute positions of. + + Returns + ------- + pol : np.ndarray + Array of characters giving polarisation. If not an array feed returns '0'. + """ + pol = np.array([(f.pol if is_array(f) else "0") for f in feeds]) + + return pol
+ + + +
+[docs] +def is_array(feed): + """Is this feed part of an array? + + Parameters + ---------- + feed : CorrInput + + Returns + ------- + isarr : bool + """ + return isinstance(feed, ArrayAntenna)
+ + + +
+[docs] +def is_array_x(feed): + """Is this an X-polarisation antenna in an array?""" + return is_array(feed) and feed.pol == "E"
+ + + +
+[docs] +def is_array_y(feed): + """Is this a Y-polarisation antenna in an array?""" + return is_array(feed) and feed.pol == "S"
+ + + +
+[docs] +def is_chime(feed): + """Is this feed a CHIME antenna? + + Parameters + ---------- + feed : CorrInput + + Returns + ------- + ischime : bool + """ + return isinstance(feed, CHIMEAntenna)
+ + + +
+[docs] +def is_pathfinder(feed): + """Is this feed a Pathfinder antenna? + + Parameters + ---------- + feed : CorrInput + + Returns + ------- + ispathfinder : bool + """ + return isinstance(feed, PathfinderAntenna)
+ + + +
+[docs] +def is_holographic(feed): + """Is this feed a holographic antenna? + + Parameters + ---------- + feed : CorrInput + + Returns + ------- + isholo : bool + """ + return isinstance(feed, HolographyAntenna)
+ + + +
+[docs] +def get_holographic_index(inputs): + """Find the indices of the holography antennas. + + Parameters + ---------- + inputs : list of :class:`CorrInput` + + Returns + ------- + ixholo : list of int + Returns None if holographic antenna not found. + """ + ixholo = [ix for ix, inp in enumerate(inputs) if is_holographic(inp)] + return ixholo or None
+ + + +
+[docs] +def is_noise_source(inp): + """Is this correlator input connected to a noise source? + + Parameters + ---------- + inp : CorrInput + + Returns + ------- + isnoise : bool + """ + return isinstance(inp, NoiseSource)
+ + + +
+[docs] +def get_noise_source_index(inputs): + """Find the indices of the noise sources. + + Parameters + ---------- + inputs : list of :class:`CorrInput` + + Returns + ------- + ixns : list of int + Returns None if noise source not found. + """ + ixns = [ix for ix, inp in enumerate(inputs) if is_noise_source(inp)] + return ixns or None
+ + + +
+[docs] +def get_noise_channel(inputs): + """Returns the index of the noise source with + the lowest chan id (for backwards compatability). + """ + noise_sources = get_noise_source_index(inputs) + return (noise_sources or [None])[0]
+ + + +
+[docs] +def is_array_on(inputs, *args): + """Check if inputs are attached to an array antenna AND powered on AND flagged as good. + + Parameters + ---------- + inputs : CorrInput or list of CorrInput objects + + Returns + ------- + pwds : boolean or list of bools. + If list, it is the same length as inputs. Value is True if input is + attached to a ArrayAntenna *and* powered-on and False otherwise + """ + + if len(args) > 0: + raise RuntimeError("This routine no longer accepts a layout time argument.") + + # Treat scalar case + if isinstance(inputs, CorrInput): + return ( + is_array(inputs) + and getattr(inputs, "powered", True) + and getattr(inputs, "flag", True) + ) + + # Assume that the argument is a sequence otherwise + else: + return [is_array_on(inp) for inp in inputs]
+ + + +# Create an is_chime_on alias for backwards compatibility +is_chime_on = is_array_on + + +
+[docs] +def reorder_correlator_inputs(input_map, corr_inputs): + """Sort a list of correlator inputs into the order given in input map. + + Parameters + ---------- + input_map : np.ndarray + Index map of correlator inputs. + corr_inputs : list + List of :class:`CorrInput` objects, e.g. the output from + :func:`get_correlator_inputs`. + + Returns + ------- + corr_input_list: list + List of :class:`CorrInput` instances in the new order. Returns `None` + where the serial number had no matching entry in parameter ``corr_inputs``. + """ + serials = input_map["correlator_input"] + + sorted_inputs = [] + + for serial in serials: + for corr_input in corr_inputs: + if serial == corr_input.input_sn: + sorted_inputs.append(corr_input) + break + else: + sorted_inputs.append(None) + + return sorted_inputs
+ + + +
+[docs] +def redefine_stack_index_map(input_map, prod, stack, reverse_stack): + """Ensure that only baselines between array antennas are used to represent the stack. + + The correlator will have inputs that are not connected to array antennas. These inputs + are flagged as bad and are not included in the stack, however, products that contain + their `chan_id` can still be used to represent a characteristic baseline in the `stack` + index map. This method creates a new `stack` index map that, if possible, only contains + products between two array antennas. This new `stack` index map should be used when + calculating baseline distances to fringestop stacked data. + + Parameters + ---------- + input_map : list of :class:`CorrInput` + List describing the inputs as they are in the file, output from + `tools.get_correlator_inputs` + prod : np.ndarray[nprod,] of dtype=('input_a', 'input_b') + The correlation products as pairs of inputs. + stack : np.ndarray[nstack,] of dtype=('prod', 'conjugate') + The index into the `prod` axis of a characteristic baseline included in the stack. + reverse_stack : np.ndarray[nprod,] of dtype=('stack', 'conjugate') + The index into the `stack` axis that each `prod` belongs. + + Returns + ------- + stack_new : np.ndarray[nstack,] of dtype=('prod', 'conjugate') + The updated `stack` index map, where each element is an index to a product + consisting of a pair of array antennas. + stack_flag : np.ndarray[nstack,] of dtype=bool + Boolean flag that is True if this element of the stack index map is now valid, + and False if none of the baselines that were stacked contained array antennas. + """ + feed_flag = np.array([is_array(inp) for inp in input_map]) + example_prod = prod[stack["prod"]] + stack_flag = feed_flag[example_prod["input_a"]] & feed_flag[example_prod["input_b"]] + + stack_new = stack.copy() + + bad_stack_index = np.flatnonzero(~stack_flag) + for ind in bad_stack_index: + this_stack = np.flatnonzero(reverse_stack["stack"] == ind) + for ts in this_stack: + tp = prod[ts] + if feed_flag[tp[0]] and feed_flag[tp[1]]: + stack_new[ind]["prod"] = ts + stack_new[ind]["conjugate"] = reverse_stack[ts]["conjugate"] + stack_flag[ind] = True + break + + return stack_new, stack_flag
+ + + +
+[docs] +def cmap(i, j, n): + """Given a pair of feed indices, return the pair index. + + Parameters + ---------- + i, j : integer + Feed index. + n : integer + Total number of feeds. + + Returns + ------- + pi : integer + Pair index. + """ + if i <= j: + return (n * (n + 1) // 2) - ((n - i) * (n - i + 1) // 2) + (j - i) + else: + return cmap(j, i, n)
+ + + +
+[docs] +def icmap(ix, n): + """Inverse feed map. + + Parameters + ---------- + ix : integer + Pair index. + n : integer + Total number of feeds. + + Returns + ------- + fi, fj : integer + Feed indices. + """ + for ii in range(n): + if cmap(ii, n - 1, n) >= ix: + break + + i = ii + j = ix - cmap(i, i, n) + i + return i, j
+ + + +
+[docs] +def unpack_product_array(prod_arr, axis=1, feeds=None): + """Expand packed products to correlation matrices. + + This turns an axis of the packed upper triangle set of products into the + full correlation matrices. It replaces the specified product axis with two + axes, one for each feed. By setting `feeds` this routine can also + pull out a subset of feeds. + + Parameters + ---------- + prod_arr : np.ndarray[..., nprod, :] + Array containing products packed in upper triangle format. + axis : int, optional + Axis the products are contained on. + feeds : list of int, optional + Indices of feeds to include. If :obj:`None` (default) use all feeds. + + Returns + ------- + corr_arr : np.ndarray[..., nfeed, nfeed, ...] + Expanded array. + """ + + nprod = prod_arr.shape[axis] + nfeed = int((2 * nprod) ** 0.5) + + if nprod != (nfeed * (nfeed + 1) // 2): + raise Exception( + "Product axis size does not look correct (not exactly n(n+1)/2)." + ) + + shape0 = prod_arr.shape[:axis] + shape1 = prod_arr.shape[(axis + 1) :] + + # Construct slice objects representing the axes before and after the product axis + slice0 = (np.s_[:],) * len(shape0) + slice1 = (np.s_[:],) * len(shape1) + + # If no feeds specified use all of them + feeds = list(range(nfeed)) if feeds is None else feeds + + outfeeds = len(feeds) + + exp_arr = np.zeros(shape0 + (outfeeds, outfeeds) + shape1, dtype=prod_arr.dtype) + + # Iterate over products and copy into correct location of expanded array + # Use a python loop, but should be fast if other axes are large + for ii, fi in enumerate(feeds): + for ij, fj in enumerate(feeds): + pi = cmap(fi, fj, nfeed) + + if fi <= fj: + exp_arr[slice0 + (ii, ij) + slice1] = prod_arr[slice0 + (pi,) + slice1] + else: + exp_arr[slice0 + (ii, ij) + slice1] = prod_arr[ + slice0 + (pi,) + slice1 + ].conj() + + return exp_arr
+ + + +
+[docs] +def pack_product_array(exp_arr, axis=1): + """Pack full correlation matrices into upper triangular form. + + It replaces the two feed axes of the matrix, with a single upper triangle product axis. + + + Parameters + ---------- + exp_arr : np.ndarray[..., nfeed, nfeed, ...] + Array of full correlation matrices. + axis : int, optional + Index of the first feed axis. The second feed axis must be the next one. + + Returns + ------- + prod_arr : np.ndarray[..., nprod, ...] + Array containing products packed in upper triangle format. + """ + + nfeed = exp_arr.shape[axis] + nprod = nfeed * (nfeed + 1) // 2 + + if nfeed != exp_arr.shape[axis + 1]: + raise Exception("Does not look like correlation matrices (axes must be equal).") + + shape0 = exp_arr.shape[:axis] + shape1 = exp_arr.shape[(axis + 2) :] + + slice0 = (np.s_[:],) * len(shape0) + slice1 = (np.s_[:],) * len(shape1) + + prod_arr = np.zeros(shape0 + (nprod,) + shape1, dtype=exp_arr.dtype) + + # Iterate over products and copy from correct location of expanded array + for pi in range(nprod): + fi, fj = icmap(pi, nfeed) + + prod_arr[slice0 + (pi,) + slice1] = exp_arr[slice0 + (fi, fj) + slice1] + + return prod_arr
+ + + +
+[docs] +def fast_pack_product_array(arr): + """ + Equivalent to ch_util.tools.pack_product_array(arr, axis=0), + but 10^5 times faster for full CHIME! + + Currently assumes that arr is a 2D array of shape (nfeeds, nfeeds), + and returns a 1D array of length (nfeed*(nfeed+1))/2. This case + is all we need for phase calibration, but pack_product_array() is + more general. + """ + + assert arr.ndim == 2 + assert arr.shape[0] == arr.shape[1] + + nfeed = arr.shape[0] + nprod = (nfeed * (nfeed + 1)) // 2 + + ret = np.zeros(nprod, dtype=np.float64) + iout = 0 + + for i in range(nfeed): + ret[iout : (iout + nfeed - i)] = arr[i, i:] + iout += nfeed - i + + return ret
+ + + +
+[docs] +def rankN_approx(A, rank=1): + """Create the rank-N approximation to the matrix A. + + Parameters + ---------- + A : np.ndarray + Matrix to approximate + rank : int, optional + + Returns + ------- + B : np.ndarray + Low rank approximation. + """ + + N = A.shape[0] + + evals, evecs = la.eigh(A, eigvals=(N - rank, N - 1)) + + return np.dot(evecs, evals * evecs.T.conj())
+ + + +
+[docs] +def eigh_no_diagonal(A, niter=5, eigvals=None): + """Eigenvalue decomposition ignoring the diagonal elements. + + The diagonal elements are iteratively replaced with those from a rank=1 approximation. + + Parameters + ---------- + A : np.ndarray[:, :] + Matrix to decompose. + niter : int, optional + Number of iterations to perform. + eigvals : (lo, hi), optional + Indices of eigenvalues to select (inclusive). + + Returns + ------- + evals : np.ndarray[:] + evecs : np.ndarray[:, :] + """ + + Ac = A.copy() + + if niter > 0: + Ac[np.diag_indices(Ac.shape[0])] = 0.0 + + for i in range(niter): + Ac[np.diag_indices(Ac.shape[0])] = rankN_approx(Ac).diagonal() + + return la.eigh(Ac, eigvals=eigvals)
+ + + +
+[docs] +def normalise_correlations(A, norm=None): + """Normalise to make a correlation matrix from a covariance matrix. + + Parameters + ---------- + A : np.ndarray[:, :] + Matrix to normalise. + norm : np.ndarray[:,:] + Normalize by diagonals of norm. + If None, then normalize by diagonals of A. + + Returns + ------- + X : np.ndarray[:, :] + Normalised correlation matrix. + ach : np.ndarray[:] + Array of the square root diagonal elements that normalise the matrix. + """ + + if norm is None: + ach = A.diagonal() ** 0.5 + else: + ach = norm.diagonal() ** 0.5 + + aci = invert_no_zero(ach) + + X = A * np.outer(aci, aci.conj()) + + return X, ach
+ + + +
+[docs] +def apply_gain(vis, gain, axis=1, out=None, prod_map=None): + """Apply per input gains to a set of visibilities packed in upper + triangular format. + + This allows us to apply the gains while minimising the intermediate + products created. + + Parameters + ---------- + vis : np.ndarray[..., nprod, ...] + Array of visibility products. + gain : np.ndarray[..., ninput, ...] + Array of gains. One gain per input. + axis : integer, optional + The axis along which the inputs (or visibilities) are + contained. Currently only supports axis=1. + out : np.ndarray + Array to place output in. If :obj:`None` create a new + array. This routine can safely use `out = vis`. + prod_map : ndarray of integer pairs + Gives the mapping from product axis to input pairs. If not supplied, + :func:`icmap` is used. + + Returns + ------- + out : np.ndarray + Visibility array with gains applied. Same shape as :obj:`vis`. + + """ + + nprod = vis.shape[axis] + ninput = gain.shape[axis] + + if prod_map is None and nprod != (ninput * (ninput + 1) // 2): + raise Exception("Number of inputs does not match the number of products.") + + if prod_map is not None: + if len(prod_map) != nprod: + msg = "Length of *prod_map* does not match number of input products." + raise ValueError(msg) + # Could check prod_map contents as well, but the loop should give a + # sensible error if this is wrong, and checking is expensive. + else: + prod_map = [icmap(pp, ninput) for pp in range(nprod)] + + if out is None: + out = np.empty_like(vis) + elif out.shape != vis.shape: + raise Exception("Output array is wrong shape.") + + # Iterate over input pairs and set gains + for pp in range(nprod): + # Determine the inputs. + ii, ij = prod_map[pp] + + # Fetch the gains + gi = gain[:, ii] + gj = gain[:, ij].conj() + + # Apply the gains and save into the output array. + out[:, pp] = vis[:, pp] * gi * gj + + return out
+ + + +
+[docs] +def subtract_rank1_signal(vis, signal, axis=1, out=None, prod_map=None): + """Subtract a rank 1 signal from a set of visibilities packed in upper + triangular format. + + This allows us to subtract the noise injection solutions + while minimising the intermediate products created. + + Parameters + ---------- + vis : np.ndarray[..., nprod, ...] + Array of visibility products. + signal : np.ndarray[..., ninput, ...] + Array of underlying signals. One signal per input. + axis : integer, optional + The axis along which the inputs (or visibilities) are + contained. Currently only supports axis=1. + out : np.ndarray + Array to place output in. If :obj:`None` create a new + array. This routine can safely use `out = vis`. + prod_map : ndarray of integer pairs + Gives the mapping from product axis to input pairs. If not supplied, + :func:`icmap` is used. + + Returns + ------- + out : np.ndarray + Visibility array with signal subtracted. Same shape as :obj:`vis`. + """ + + nprod = vis.shape[axis] + ninput = signal.shape[axis] + + if prod_map is None and nprod != (ninput * (ninput + 1) // 2): + raise Exception("Number of inputs does not match the number of products.") + + if prod_map is not None: + if len(prod_map) != nprod: + msg = "Length of *prod_map* does not match number of input products." + raise ValueError(msg) + # Could check prod_map contents as well, but the loop should give a + # sensible error if this is wrong, and checking is expensive. + else: + prod_map = [icmap(pp, ninput) for pp in range(nprod)] + + if out is None: + out = np.empty_like(vis) + elif out.shape != vis.shape: + raise Exception("Output array is wrong shape.") + + # Iterate over input pairs and set signals + for pp in range(nprod): + # Determine the inputs. + ii, ij = prod_map[pp] + + # Fetch the signals + si = signal[:, ii] + sj = signal[:, ij].conj() + + # Apply the signals and save into the output array. + out[:, pp] = vis[:, pp] - si * sj + + return out
+ + + +
+[docs] +def fringestop_time( + timestream, + times, + freq, + feeds, + src, + wterm=False, + bterm=True, + prod_map=None, + csd=False, + inplace=False, + static_delays=True, + obs=ephemeris.chime, +): + """Fringestop timestream data to a fixed source. + + Parameters + ---------- + timestream : np.ndarray[nfreq, nprod, times] + Array containing the visibility timestream. + times : np.ndarray[times] + The UNIX time of each sample, or (if csd=True), the CSD of each sample. + freq : np.ndarray[nfreq] + The frequencies in the array (in MHz). + feeds : list of CorrInputs + The feeds in the timestream. + src : skyfield source + skyfield.starlib.Star or skyfield.vectorlib.VectorSum or + skyfield.jpllib.ChebyshevPosition body representing the source. + wterm: bool, optional + Include elevation information in the calculation. + bterm: bool, optional + Include a correction for baselines including the 26m Galt telescope. + prod_map: np.ndarray[nprod] + The products in the `timestream` array. + csd: bool, optional + Interpret the times parameter as CSDs. + inplace: bool, optional + Fringestop the visibilities in place. If not set, leave the originals intact. + static_delays: bool, optional + Correct for static cable delays in the system. + + Returns + ------- + fringestopped_timestream : np.ndarray[nfreq, nprod, times] + """ + + # Check the shapes match + nfeed = len(feeds) + nprod = len(prod_map) if prod_map is not None else nfeed * (nfeed + 1) // 2 + expected_shape = (len(freq), nprod, len(times)) + + if timestream.shape != expected_shape: + raise ValueError( + "The shape of the timestream %s does not match the expected shape %s" + % (timestream.shape, expected_shape) + ) + + delays = delay( + times, + feeds, + src, + wterm=wterm, + bterm=bterm, + prod_map=prod_map, + csd=csd, + static_delays=static_delays, + obs=obs, + ) + + # Set any non CHIME feeds to have zero phase + delays = np.nan_to_num(delays, copy=False) + + # If modifying inplace, loop to try and save some memory on large datasets + if inplace: + for fi, fr in enumerate(freq): + fs_phase = np.exp(2.0j * np.pi * delays * fr * 1e6) + timestream[fi] *= fs_phase + fs_timestream = timestream + # Otherwise we might as well generate the entire phase array in onestop + else: + fs_timestream = 2.0j * np.pi * delays * freq[:, np.newaxis, np.newaxis] * 1e6 + fs_timestream = np.exp(fs_timestream, out=fs_timestream) + fs_timestream *= timestream + + return fs_timestream
+ + + +# Cache the PFB object +_chime_pfb = pfb.PFB(4, 2048) + + +
+[docs] +def decorrelation( + timestream, + times, + feeds, + src, + wterm=True, + bterm=True, + prod_map=None, + csd=False, + inplace=False, + static_delays=True, +): + """Apply the decorrelation corrections to a timestream from observing a source. + + Parameters + ---------- + timestream : np.ndarray[nfreq, nprod, times] + Array containing the timestream. + times : np.ndarray[times] + The UNIX time of each sample, or (if csd=True), the CSD of each sample. + feeds : list of CorrInputs + The feeds in the timestream. + src : skyfield source + skyfield.starlib.Star or skyfield.vectorlib.VectorSum or + skyfield.jpllib.ChebyshevPosition body representing the source. + wterm: bool, optional + Include elevation information in the calculation. + bterm: bool, optional + Include a correction for baselines including the 26m Galt telescope. + prod_map: np.ndarray[nprod] + The products in the `timestream` array. + csd: bool, optional + Interpret the times parameter as CSDs. + inplace: bool, optional + Fringestop the visibilities in place. If not set, leave the originals intact. + static_delays: bool, optional + Correct for static cable delays in the system. + + Returns + ------- + corrected_timestream : np.ndarray[nfreq, nprod, times] + """ + + # Check the shapes match + nfeed = len(feeds) + nprod = len(prod_map) if prod_map is not None else nfeed * (nfeed + 1) // 2 + expected_shape = (nprod, len(times)) + + if timestream.shape[1:] != expected_shape: + raise ValueError( + "The shape of the timestream %s does not match the expected shape %s" + % (timestream.shape, expected_shape) + ) + + delays = delay( + times, + feeds, + src, + wterm=wterm, + bterm=bterm, + prod_map=prod_map, + csd=csd, + static_delays=static_delays, + ) + + # Set any non CHIME feeds to have zero delay + delays = np.nan_to_num(delays, copy=False) + + ratio_correction = invert_no_zero( + _chime_pfb.decorrelation_ratio(delays * 800e6)[np.newaxis, ...] + ) + + if inplace: + timestream *= ratio_correction + else: + timestream = timestream * ratio_correction + + return timestream
+ + + +
+[docs] +def delay( + times, + feeds, + src, + wterm=True, + bterm=True, + prod_map=None, + csd=False, + static_delays=True, + obs=ephemeris.chime, +): + """Calculate the delay in a visibilities observing a given source. + + This includes both the geometric delay and static (cable) delays. + + Parameters + ---------- + times : np.ndarray[times] + The UNIX time of each sample, or (if csd=True), the CSD of each sample. + feeds : list of CorrInputs + The feeds in the timestream. + src : skyfield source + skyfield.starlib.Star or skyfield.vectorlib.VectorSum or + skyfield.jpllib.ChebyshevPosition body representing the source. + wterm: bool, optional + Include elevation information in the calculation. + bterm: bool, optional + Include a correction for baselines which include the 26m Galt telescope. + prod_map: np.ndarray[nprod] + The products in the `timestream` array. + csd: bool, optional + Interpret the times parameter as CSDs. + static_delays: bool, optional + If set the returned value includes both geometric and static delays. + If `False` only geometric delays are included. + + Returns + ------- + delay : np.ndarray[nprod, nra] + """ + + import scipy.constants + + ra = (times % 1.0) * 360.0 if csd else obs.unix_to_lsa(times) + src_ra, src_dec = ephemeris.object_coords(src, times.mean(), obs=obs) + ha = (np.radians(ra) - src_ra)[np.newaxis, :] + latitude = np.radians(obs.latitude) + # Get feed positions / c + feedpos = get_feed_positions(feeds, get_zpos=wterm) / scipy.constants.c + feed_delays = np.array([f.delay for f in feeds]) + # Calculate the geometric delay between the feed and the reference position + delay_ref = -projected_distance(ha, latitude, src_dec, *feedpos.T[..., np.newaxis]) + + # Add in the static delays + if static_delays: + delay_ref += feed_delays[:, np.newaxis] + + # Calculate baseline separations and pack into product array + if prod_map is None: + delays = fast_pack_product_array( + delay_ref[:, np.newaxis] - delay_ref[np.newaxis, :] + ) + else: + delays = delay_ref[prod_map["input_a"]] - delay_ref[prod_map["input_b"]] + + # Add the b-term for baselines including the 26m Galt telescope + if bterm: + b_delay = _26M_B / scipy.constants.c * np.cos(src_dec) + + galt_feeds = get_holographic_index(feeds) + + galt_conj = np.where(np.isin(prod_map["input_a"], galt_feeds), -1, 0) + galt_noconj = np.where(np.isin(prod_map["input_b"], galt_feeds), 1, 0) + + conj_flag = galt_conj + galt_noconj + + delays += conj_flag[:, np.newaxis] * b_delay + + return delays
+ + + +
+[docs] +def beam_index2number(beam_index): + """Convert beam "index" (0-1023) to beam "number" (0-255, 1000-1255, etc.) + + The beam "number", with 1000s indicating the beam's East-West index and the + remainder going from 0 through 255 indicating the beam's North-South index, + is used in the CHIME/FRB beam_model package. + + Parameters + ---------- + beam_index : int or np.ndarray of int + The beam index or indices to be converted. + + Returns + ------- + beam_number : same as beam_index + The corresponding beam number or numbers. + """ + beam_ew_index = beam_index // 256 + beam_ns_index = beam_index % 256 + beam_number = 1000 * beam_ew_index + beam_ns_index + return beam_number
+ + + +
+[docs] +def invert_no_zero(*args, **kwargs): + """Deprecated - use 'caput.tools.invert_no_zero'""" + from caput import tools + import warnings + + warnings.warn( + f"Function invert_no_zero is deprecated - use 'caput.tools.invert_no_zero'", + category=DeprecationWarning, + ) + return tools.invert_no_zero(*args, **kwargs)
+ + + +
+[docs] +def ensure_list(obj, num=None): + """Ensure `obj` is list-like, optionally with the length `num`. + + If `obj` not a string but is iterable, it is returned as-is, + although a length different than `num`, if given, will result in a + `ValueError`. + + If `obj` is a string or non-iterable, a new list is created with + `num` copies of `obj` as elements. In this case, if `num` is not + given, it is taken to be 1. + + Parameters + ---------- + obj + The object to check. + num: int, optional + If given, also ensure that the list has `num` elements. + + + Returns + ------- + obj + The input object, or the newly created list + + Raises + ------ + ValueError: + `obj` was iterable but did not have a length of `num` + """ + if hasattr(obj, "__iter__") and not isinstance(obj, str): + nnum = len(obj) + if (num is not None) and (nnum != num): + raise ValueError("Input list has wrong size.") + else: + if num is not None: + obj = [obj] * num + else: + obj = [obj] + + return obj
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_modules/index.html b/docs/_modules/index.html new file mode 100644 index 00000000..57cb9327 --- /dev/null +++ b/docs/_modules/index.html @@ -0,0 +1,124 @@ + + + + + + Overview: module code — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+ + +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.andata.rst.txt b/docs/_sources/_autosummary/ch_util.andata.rst.txt new file mode 100644 index 00000000..e86c351a --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.andata.rst.txt @@ -0,0 +1,48 @@ +ch\_util.andata +=============== + +.. automodule:: ch_util.andata + + + .. rubric:: Functions + + .. autosummary:: + + andata_from_acq1 + andata_from_archive2 + subclass_from_obj + versiontuple + + .. rubric:: Classes + + .. autosummary:: + + AnData + BaseData + BaseReader + CalibrationGainData + CalibrationGainReader + CorrData + CorrReader + DigitalGainData + DigitalGainReader + FlagInputData + FlagInputReader + GainData + GainFlagData + HKData + HKPData + HKPReader + HKReader + RawADCData + RawADCReader + Reader + WeatherData + WeatherReader + + .. rubric:: Exceptions + + .. autosummary:: + + AnDataError + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.cal_utils.rst.txt b/docs/_sources/_autosummary/ch_util.cal_utils.rst.txt new file mode 100644 index 00000000..a4bf2a12 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.cal_utils.rst.txt @@ -0,0 +1,37 @@ +ch\_util.cal\_utils +=================== + +.. automodule:: ch_util.cal_utils + + + .. rubric:: Functions + + .. autosummary:: + + estimate_directional_scale + fit_histogram + fit_point_source_map + flag_outliers + func_2d_gauss + func_2d_sinc_gauss + func_dirty_gauss + func_real_dirty_gauss + get_reference_times_dataset_id + get_reference_times_file + guess_fwhm + interpolate_gain + interpolate_gain_quiet + thermal_amplitude + + .. rubric:: Classes + + .. autosummary:: + + FitAmpPhase + FitGaussAmpPolyPhase + FitPoly + FitPolyLogAmpPolyPhase + FitPolyRealPolyImag + FitRealImag + FitTransit + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.chan_monitor.rst.txt b/docs/_sources/_autosummary/ch_util.chan_monitor.rst.txt new file mode 100644 index 00000000..620f2fe6 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.chan_monitor.rst.txt @@ -0,0 +1,13 @@ +ch\_util.chan\_monitor +====================== + +.. automodule:: ch_util.chan_monitor + + + .. rubric:: Classes + + .. autosummary:: + + ChanMonitor + FeedLocator + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.data_quality.rst.txt b/docs/_sources/_autosummary/ch_util.data_quality.rst.txt new file mode 100644 index 00000000..15e5e503 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.data_quality.rst.txt @@ -0,0 +1,12 @@ +ch\_util.data\_quality +====================== + +.. automodule:: ch_util.data_quality + + + .. rubric:: Functions + + .. autosummary:: + + good_channels + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.ephemeris.rst.txt b/docs/_sources/_autosummary/ch_util.ephemeris.rst.txt new file mode 100644 index 00000000..8c3db049 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.ephemeris.rst.txt @@ -0,0 +1,41 @@ +ch\_util.ephemeris +================== + +.. automodule:: ch_util.ephemeris + + + .. rubric:: Module Attributes + + .. autosummary:: + + CasA + CygA + TauA + VirA + + .. rubric:: Functions + + .. autosummary:: + + Star_cirs + bmxy_to_hadec + chime_local_datetime + cirs_radec + galt_pointing_model_dec + galt_pointing_model_ha + get_doppler_shifted_freq + get_range_rate + get_source_dictionary + hadec_to_bmxy + lunar_rising + lunar_setting + lunar_transit + object_coords + parse_date + peak_RA + solar_rising + solar_setting + solar_transit + transit_RA + utc_lst_to_mjd + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.finder.rst.txt b/docs/_sources/_autosummary/ch_util.finder.rst.txt new file mode 100644 index 00000000..8fb16631 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.finder.rst.txt @@ -0,0 +1,33 @@ +ch\_util.finder +=============== + +.. automodule:: ch_util.finder + + + .. rubric:: Functions + + .. autosummary:: + + files_in_range + + .. rubric:: Classes + + .. autosummary:: + + BaseDataInterval + CalibrationGainDataInterval + CorrDataInterval + DataInterval + DataIntervalList + DigitalGainDataInterval + Finder + FlagInputDataInterval + HKDataInterval + WeatherDataInterval + + .. rubric:: Exceptions + + .. autosummary:: + + DataFlagged + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.fluxcat.rst.txt b/docs/_sources/_autosummary/ch_util.fluxcat.rst.txt new file mode 100644 index 00000000..d858fe4f --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.fluxcat.rst.txt @@ -0,0 +1,25 @@ +ch\_util.fluxcat +================ + +.. automodule:: ch_util.fluxcat + + + .. rubric:: Functions + + .. autosummary:: + + format_source_name + get_epoch + json_numpy_obj_hook + varname + + .. rubric:: Classes + + .. autosummary:: + + CurvedPowerLaw + FitSpectrum + FluxCatalog + MetaFluxCatalog + NumpyEncoder + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.hfbcat.rst.txt b/docs/_sources/_autosummary/ch_util.hfbcat.rst.txt new file mode 100644 index 00000000..2f68c99f --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.hfbcat.rst.txt @@ -0,0 +1,18 @@ +ch\_util.hfbcat +=============== + +.. automodule:: ch_util.hfbcat + + + .. rubric:: Functions + + .. autosummary:: + + get_doppler_shifted_freq + + .. rubric:: Classes + + .. autosummary:: + + HFBCatalog + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.holography.rst.txt b/docs/_sources/_autosummary/ch_util.holography.rst.txt new file mode 100644 index 00000000..42c61b0a --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.holography.rst.txt @@ -0,0 +1,13 @@ +ch\_util.holography +=================== + +.. automodule:: ch_util.holography + + + .. rubric:: Classes + + .. autosummary:: + + HolographyObservation + HolographySource + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.layout.rst.txt b/docs/_sources/_autosummary/ch_util.layout.rst.txt new file mode 100644 index 00000000..f9826091 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.layout.rst.txt @@ -0,0 +1,21 @@ +ch\_util.layout +=============== + +.. automodule:: ch_util.layout + + + .. rubric:: Functions + + .. autosummary:: + + enter_ltf + get_global_flag_times + global_flags_between + + .. rubric:: Classes + + .. autosummary:: + + graph + subgraph_spec + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.ni_utils.rst.txt b/docs/_sources/_autosummary/ch_util.ni_utils.rst.txt new file mode 100644 index 00000000..de9b0fbd --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.ni_utils.rst.txt @@ -0,0 +1,30 @@ +ch\_util.ni\_utils +================== + +.. automodule:: ch_util.ni_utils + + + .. rubric:: Functions + + .. autosummary:: + + gains2utvec + gains2utvec_tf + gen_prod_sel + ktrprod + mat2utvec + ni_als + ni_gains_evalues + ni_gains_evalues_tf + process_gated_data + process_synced_data + sort_evalues_mag + subtract_sky_noise + utvec2mat + + .. rubric:: Classes + + .. autosummary:: + + ni_data + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.plot.rst.txt b/docs/_sources/_autosummary/ch_util.plot.rst.txt new file mode 100644 index 00000000..36683a24 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.plot.rst.txt @@ -0,0 +1,14 @@ +ch\_util.plot +============= + +.. automodule:: ch_util.plot + + + .. rubric:: Functions + + .. autosummary:: + + spectra + time_ordered + waterfall + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.rfi.rst.txt b/docs/_sources/_autosummary/ch_util.rfi.rst.txt new file mode 100644 index 00000000..7c079274 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.rfi.rst.txt @@ -0,0 +1,24 @@ +ch\_util.rfi +============ + +.. automodule:: ch_util.rfi + + + .. rubric:: Functions + + .. autosummary:: + + flag_dataset + frequency_mask + get_autocorrelations + highpass_delay_filter + iterative_hpf_masking + mad_cut_1d + mad_cut_2d + mad_cut_rolling + nanmedian + number_deviations + sir + sir1d + spectral_cut + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.timing.rst.txt b/docs/_sources/_autosummary/ch_util.timing.rst.txt new file mode 100644 index 00000000..94e2112d --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.timing.rst.txt @@ -0,0 +1,25 @@ +ch\_util.timing +=============== + +.. automodule:: ch_util.timing + + + .. rubric:: Functions + + .. autosummary:: + + construct_delay_template + eigen_decomposition + fit_poly_to_phase + load_timing_correction + map_input_to_noise_source + model_poly_phase + + .. rubric:: Classes + + .. autosummary:: + + TimingCorrection + TimingData + TimingInterpolator + \ No newline at end of file diff --git a/docs/_sources/_autosummary/ch_util.tools.rst.txt b/docs/_sources/_autosummary/ch_util.tools.rst.txt new file mode 100644 index 00000000..f8ae2e45 --- /dev/null +++ b/docs/_sources/_autosummary/ch_util.tools.rst.txt @@ -0,0 +1,79 @@ +ch\_util.tools +============== + +.. automodule:: ch_util.tools + + + .. rubric:: Functions + + .. autosummary:: + + antenna_to_lna + apply_gain + beam_index2number + calibrate_temperature + change_chime_location + change_pathfinder_location + cmap + decorrelation + delay + eigh_no_diagonal + ensure_list + fake_tone_database + fast_pack_product_array + fringestop_time + get_correlator_inputs + get_default_frequency_map_stream + get_feed_polarisations + get_feed_positions + get_holographic_index + get_noise_channel + get_noise_source_index + hk_to_sensor + icmap + invert_no_zero + is_array + is_array_on + is_array_x + is_array_y + is_chime + is_chime_on + is_holographic + is_noise_source + is_pathfinder + lna_to_antenna + normalise_correlations + order_frequency_map_stream + pack_product_array + parse_chime_serial + parse_old_serial + parse_pathfinder_serial + rankN_approx + redefine_stack_index_map + reorder_correlator_inputs + sensor_to_hk + serial_to_id + serial_to_location + subtract_rank1_signal + unpack_product_array + + .. rubric:: Classes + + .. autosummary:: + + Antenna + ArrayAntenna + Blank + CHIMEAntenna + CorrInput + GBOAntenna + HCOAntenna + HKInput + HolographyAntenna + KKOAntenna + NoiseSource + PCOAntenna + PathfinderAntenna + RFIAntenna + TONEAntenna + \ No newline at end of file diff --git a/docs/_sources/index.rst.txt b/docs/_sources/index.rst.txt new file mode 100644 index 00000000..2f802868 --- /dev/null +++ b/docs/_sources/index.rst.txt @@ -0,0 +1,21 @@ +.. ch_util documentation master file, created by + sphinx-quickstart on Thu Oct 10 12:52:16 2013. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to ch_util's documentation! +=================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + reference + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/_sources/reference.rst.txt b/docs/_sources/reference.rst.txt new file mode 100644 index 00000000..999e35fa --- /dev/null +++ b/docs/_sources/reference.rst.txt @@ -0,0 +1,4 @@ +API Reference +------------- + +.. automodule:: ch_util diff --git a/docs/_static/_sphinx_javascript_frameworks_compat.js b/docs/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000..81415803 --- /dev/null +++ b/docs/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/docs/_static/auto-render.min.js b/docs/_static/auto-render.min.js new file mode 100644 index 00000000..46d62af2 --- /dev/null +++ b/docs/_static/auto-render.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("katex")):"function"==typeof define&&define.amd?define(["katex"],t):"object"==typeof exports?exports.renderMathInElement=t(require("katex")):e.renderMathInElement=t(e.katex)}("undefined"!=typeof self?self:this,(function(e){return function(){"use strict";var t={771:function(t){t.exports=e}},n={};function r(e){var o=n[e];if(void 0!==o)return o.exports;var i=n[e]={exports:{}};return t[e](i,i.exports,r),i.exports}r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,{a:t}),t},r.d=function(e,t){for(var n in t)r.o(t,n)&&!r.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)};var o={};return function(){r.d(o,{default:function(){return d}});var e=r(771),t=r.n(e);const n=function(e,t,n){let r=n,o=0;const i=e.length;for(;re.left.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"))).join("|")+")");for(;r=e.search(a),-1!==r;){r>0&&(o.push({type:"text",data:e.slice(0,r)}),e=e.slice(r));const a=t.findIndex((t=>e.startsWith(t.left)));if(r=n(t[a].right,e,t[a].left.length),-1===r)break;const l=e.slice(0,r+t[a].right.length),s=i.test(l)?l:e.slice(t[a].left.length,r);o.push({type:"math",data:s,rawData:l,display:t[a].display}),e=e.slice(r+t[a].right.length)}return""!==e&&o.push({type:"text",data:e}),o};const l=function(e,n){const r=a(e,n.delimiters);if(1===r.length&&"text"===r[0].type)return null;const o=document.createDocumentFragment();for(let e=0;e-1===e.indexOf(" "+t+" ")))&&s(r,t)}}};var d=function(e,t){if(!e)throw new Error("No element provided to render");const n={};for(const e in t)t.hasOwnProperty(e)&&(n[e]=t[e]);n.delimiters=n.delimiters||[{left:"$$",right:"$$",display:!0},{left:"\\(",right:"\\)",display:!1},{left:"\\begin{equation}",right:"\\end{equation}",display:!0},{left:"\\begin{align}",right:"\\end{align}",display:!0},{left:"\\begin{alignat}",right:"\\end{alignat}",display:!0},{left:"\\begin{gather}",right:"\\end{gather}",display:!0},{left:"\\begin{CD}",right:"\\end{CD}",display:!0},{left:"\\[",right:"\\]",display:!0}],n.ignoredTags=n.ignoredTags||["script","noscript","style","textarea","pre","code","option"],n.ignoredClasses=n.ignoredClasses||[],n.errorCallback=n.errorCallback||console.error,n.macros=n.macros||{},s(e,n)}}(),o=o.default}()})); \ No newline at end of file diff --git a/docs/_static/basic.css b/docs/_static/basic.css new file mode 100644 index 00000000..f316efcb --- /dev/null +++ b/docs/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/docs/_static/css/badge_only.css b/docs/_static/css/badge_only.css new file mode 100644 index 00000000..c718cee4 --- /dev/null +++ b/docs/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff b/docs/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff b/docs/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.eot b/docs/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.svg b/docs/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/docs/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/_static/css/fonts/fontawesome-webfont.ttf b/docs/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.woff b/docs/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.woff2 b/docs/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff b/docs/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/docs/_static/css/fonts/lato-bold-italic.woff differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff2 b/docs/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/docs/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/docs/_static/css/fonts/lato-bold.woff b/docs/_static/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/docs/_static/css/fonts/lato-bold.woff differ diff --git a/docs/_static/css/fonts/lato-bold.woff2 b/docs/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/docs/_static/css/fonts/lato-bold.woff2 differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff b/docs/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/docs/_static/css/fonts/lato-normal-italic.woff differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff2 b/docs/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/docs/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/docs/_static/css/fonts/lato-normal.woff b/docs/_static/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/docs/_static/css/fonts/lato-normal.woff differ diff --git a/docs/_static/css/fonts/lato-normal.woff2 b/docs/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/docs/_static/css/fonts/lato-normal.woff2 differ diff --git a/docs/_static/css/theme.css b/docs/_static/css/theme.css new file mode 100644 index 00000000..19a446a0 --- /dev/null +++ b/docs/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/docs/_static/doctools.js b/docs/_static/doctools.js new file mode 100644 index 00000000..4d67807d --- /dev/null +++ b/docs/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/docs/_static/documentation_options.js b/docs/_static/documentation_options.js new file mode 100644 index 00000000..884b5727 --- /dev/null +++ b/docs/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '24.6.0+10.g15fb19e', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/docs/_static/file.png b/docs/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/docs/_static/file.png differ diff --git a/docs/_static/jquery.js b/docs/_static/jquery.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/docs/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/docs/_static/js/html5shiv.min.js b/docs/_static/js/html5shiv.min.js new file mode 100644 index 00000000..cd1c674f --- /dev/null +++ b/docs/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/docs/_static/js/theme.js b/docs/_static/js/theme.js new file mode 100644 index 00000000..1fddb6ee --- /dev/null +++ b/docs/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t .katex { + max-width: 100%; +} +.katex-display > .katex > .katex-html { + max-width: 100%; + overflow-x: auto; + overflow-y: hidden; + padding-left: 2px; + padding-right: 2px; + padding-bottom: 1px; + padding-top: 3px; +} +/* Increase margin around equations */ +.katex-display { + margin: 1.2em 0; +} +/* Equation number floats to the right and shows permalink for mouse hover + on the right side of equation number. */ +div.math { + position: relative; + padding-right: 2.5em; +} +.eqno { + height: 100%; + position: absolute; + right: 0; + padding-left: 5px; + padding-bottom: 5px; + padding-right: 1px; +} +.eqno:before { + /* Force vertical alignment of number */ + display: inline-block; + height: 100%; + vertical-align: middle; + content: ""; +} +.eqno .headerlink { + display: none; + visibility: hidden; + font-size: 14px; + padding-left: .3em; +} +.eqno:hover .headerlink { + display: inline-block; + visibility: visible; + margin-right: -1.05em; +} diff --git a/docs/_static/katex.min.js b/docs/_static/katex.min.js new file mode 100644 index 00000000..e701cfde --- /dev/null +++ b/docs/_static/katex.min.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.katex=t():e.katex=t()}("undefined"!=typeof self?self:this,(function(){return function(){"use strict";var e={d:function(t,r){for(var n in r)e.o(r,n)&&!e.o(t,n)&&Object.defineProperty(t,n,{enumerable:!0,get:r[n]})},o:function(e,t){return Object.prototype.hasOwnProperty.call(e,t)}},t={};e.d(t,{default:function(){return Yn}});class r{constructor(e,t){this.name=void 0,this.position=void 0,this.length=void 0,this.rawMessage=void 0;let n,o,s="KaTeX parse error: "+e;const i=t&&t.loc;if(i&&i.start<=i.end){const e=i.lexer.input;n=i.start,o=i.end,n===e.length?s+=" at end of input: ":s+=" at position "+(n+1)+": ";const t=e.slice(n,o).replace(/[^]/g,"$&\u0332");let r,a;r=n>15?"\u2026"+e.slice(n-15,n):e.slice(0,n),a=o+15":">","<":"<",'"':""","'":"'"},i=/[&><"']/g;const a=function(e){return"ordgroup"===e.type||"color"===e.type?1===e.body.length?a(e.body[0]):e:"font"===e.type?a(e.body):e};var l={contains:function(e,t){return-1!==e.indexOf(t)},deflt:function(e,t){return void 0===e?t:e},escape:function(e){return String(e).replace(i,(e=>s[e]))},hyphenate:function(e){return e.replace(o,"-$1").toLowerCase()},getBaseElem:a,isCharacterBox:function(e){const t=a(e);return"mathord"===t.type||"textord"===t.type||"atom"===t.type},protocolFromUrl:function(e){const t=/^[\x00-\x20]*([^\\/#?]*?)(:|�*58|�*3a|&colon)/i.exec(e);return t?":"!==t[2]?null:/^[a-zA-Z][a-zA-Z0-9+\-.]*$/.test(t[1])?t[1].toLowerCase():null:"_relative"}};const h={displayMode:{type:"boolean",description:"Render math in display mode, which puts the math in display style (so \\int and \\sum are large, for example), and centers the math on the page on its own line.",cli:"-d, --display-mode"},output:{type:{enum:["htmlAndMathml","html","mathml"]},description:"Determines the markup language of the output.",cli:"-F, --format "},leqno:{type:"boolean",description:"Render display math in leqno style (left-justified tags)."},fleqn:{type:"boolean",description:"Render display math flush left."},throwOnError:{type:"boolean",default:!0,cli:"-t, --no-throw-on-error",cliDescription:"Render errors (in the color given by --error-color) instead of throwing a ParseError exception when encountering an error."},errorColor:{type:"string",default:"#cc0000",cli:"-c, --error-color ",cliDescription:"A color string given in the format 'rgb' or 'rrggbb' (no #). This option determines the color of errors rendered by the -t option.",cliProcessor:e=>"#"+e},macros:{type:"object",cli:"-m, --macro ",cliDescription:"Define custom macro of the form '\\foo:expansion' (use multiple -m arguments for multiple macros).",cliDefault:[],cliProcessor:(e,t)=>(t.push(e),t)},minRuleThickness:{type:"number",description:"Specifies a minimum thickness, in ems, for fraction lines, `\\sqrt` top lines, `{array}` vertical lines, `\\hline`, `\\hdashline`, `\\underline`, `\\overline`, and the borders of `\\fbox`, `\\boxed`, and `\\fcolorbox`.",processor:e=>Math.max(0,e),cli:"--min-rule-thickness ",cliProcessor:parseFloat},colorIsTextColor:{type:"boolean",description:"Makes \\color behave like LaTeX's 2-argument \\textcolor, instead of LaTeX's one-argument \\color mode change.",cli:"-b, --color-is-text-color"},strict:{type:[{enum:["warn","ignore","error"]},"boolean","function"],description:"Turn on strict / LaTeX faithfulness mode, which throws an error if the input uses features that are not supported by LaTeX.",cli:"-S, --strict",cliDefault:!1},trust:{type:["boolean","function"],description:"Trust the input, enabling all HTML features such as \\url.",cli:"-T, --trust"},maxSize:{type:"number",default:1/0,description:"If non-zero, all user-specified sizes, e.g. in \\rule{500em}{500em}, will be capped to maxSize ems. Otherwise, elements and spaces can be arbitrarily large",processor:e=>Math.max(0,e),cli:"-s, --max-size ",cliProcessor:parseInt},maxExpand:{type:"number",default:1e3,description:"Limit the number of macro expansions to the specified number, to prevent e.g. infinite macro loops. If set to Infinity, the macro expander will try to fully expand as in LaTeX.",processor:e=>Math.max(0,e),cli:"-e, --max-expand ",cliProcessor:e=>"Infinity"===e?1/0:parseInt(e)},globalGroup:{type:"boolean",cli:!1}};function c(e){if(e.default)return e.default;const t=e.type,r=Array.isArray(t)?t[0]:t;if("string"!=typeof r)return r.enum[0];switch(r){case"boolean":return!1;case"string":return"";case"number":return 0;case"object":return{}}}class m{constructor(e){this.displayMode=void 0,this.output=void 0,this.leqno=void 0,this.fleqn=void 0,this.throwOnError=void 0,this.errorColor=void 0,this.macros=void 0,this.minRuleThickness=void 0,this.colorIsTextColor=void 0,this.strict=void 0,this.trust=void 0,this.maxSize=void 0,this.maxExpand=void 0,this.globalGroup=void 0,e=e||{};for(const t in h)if(h.hasOwnProperty(t)){const r=h[t];this[t]=void 0!==e[t]?r.processor?r.processor(e[t]):e[t]:c(r)}}reportNonstrict(e,t,r){let o=this.strict;if("function"==typeof o&&(o=o(e,t,r)),o&&"ignore"!==o){if(!0===o||"error"===o)throw new n("LaTeX-incompatible input and strict mode is set to 'error': "+t+" ["+e+"]",r);"warn"===o?"undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to 'warn': "+t+" ["+e+"]"):"undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to unrecognized '"+o+"': "+t+" ["+e+"]")}}useStrictBehavior(e,t,r){let n=this.strict;if("function"==typeof n)try{n=n(e,t,r)}catch(e){n="error"}return!(!n||"ignore"===n)&&(!0===n||"error"===n||("warn"===n?("undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to 'warn': "+t+" ["+e+"]"),!1):("undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to unrecognized '"+n+"': "+t+" ["+e+"]"),!1)))}isTrusted(e){if(e.url&&!e.protocol){const t=l.protocolFromUrl(e.url);if(null==t)return!1;e.protocol=t}const t="function"==typeof this.trust?this.trust(e):this.trust;return Boolean(t)}}class p{constructor(e,t,r){this.id=void 0,this.size=void 0,this.cramped=void 0,this.id=e,this.size=t,this.cramped=r}sup(){return u[d[this.id]]}sub(){return u[g[this.id]]}fracNum(){return u[f[this.id]]}fracDen(){return u[b[this.id]]}cramp(){return u[y[this.id]]}text(){return u[x[this.id]]}isTight(){return this.size>=2}}const u=[new p(0,0,!1),new p(1,0,!0),new p(2,1,!1),new p(3,1,!0),new p(4,2,!1),new p(5,2,!0),new p(6,3,!1),new p(7,3,!0)],d=[4,5,4,5,6,7,6,7],g=[5,5,5,5,7,7,7,7],f=[2,3,4,5,6,7,6,7],b=[3,3,5,5,7,7,7,7],y=[1,1,3,3,5,5,7,7],x=[0,1,2,3,2,3,2,3];var w={DISPLAY:u[0],TEXT:u[2],SCRIPT:u[4],SCRIPTSCRIPT:u[6]};const v=[{name:"latin",blocks:[[256,591],[768,879]]},{name:"cyrillic",blocks:[[1024,1279]]},{name:"armenian",blocks:[[1328,1423]]},{name:"brahmic",blocks:[[2304,4255]]},{name:"georgian",blocks:[[4256,4351]]},{name:"cjk",blocks:[[12288,12543],[19968,40879],[65280,65376]]},{name:"hangul",blocks:[[44032,55215]]}];const k=[];function S(e){for(let t=0;t=k[t]&&e<=k[t+1])return!0;return!1}v.forEach((e=>e.blocks.forEach((e=>k.push(...e)))));const M=80,z={doubleleftarrow:"M262 157\nl10-10c34-36 62.7-77 86-123 3.3-8 5-13.3 5-16 0-5.3-6.7-8-20-8-7.3\n 0-12.2.5-14.5 1.5-2.3 1-4.8 4.5-7.5 10.5-49.3 97.3-121.7 169.3-217 216-28\n 14-57.3 25-88 33-6.7 2-11 3.8-13 5.5-2 1.7-3 4.2-3 7.5s1 5.8 3 7.5\nc2 1.7 6.3 3.5 13 5.5 68 17.3 128.2 47.8 180.5 91.5 52.3 43.7 93.8 96.2 124.5\n 157.5 9.3 8 15.3 12.3 18 13h6c12-.7 18-4 18-10 0-2-1.7-7-5-15-23.3-46-52-87\n-86-123l-10-10h399738v-40H218c328 0 0 0 0 0l-10-8c-26.7-20-65.7-43-117-69 2.7\n-2 6-3.7 10-5 36.7-16 72.3-37.3 107-64l10-8h399782v-40z\nm8 0v40h399730v-40zm0 194v40h399730v-40z",doublerightarrow:"M399738 392l\n-10 10c-34 36-62.7 77-86 123-3.3 8-5 13.3-5 16 0 5.3 6.7 8 20 8 7.3 0 12.2-.5\n 14.5-1.5 2.3-1 4.8-4.5 7.5-10.5 49.3-97.3 121.7-169.3 217-216 28-14 57.3-25 88\n-33 6.7-2 11-3.8 13-5.5 2-1.7 3-4.2 3-7.5s-1-5.8-3-7.5c-2-1.7-6.3-3.5-13-5.5-68\n-17.3-128.2-47.8-180.5-91.5-52.3-43.7-93.8-96.2-124.5-157.5-9.3-8-15.3-12.3-18\n-13h-6c-12 .7-18 4-18 10 0 2 1.7 7 5 15 23.3 46 52 87 86 123l10 10H0v40h399782\nc-328 0 0 0 0 0l10 8c26.7 20 65.7 43 117 69-2.7 2-6 3.7-10 5-36.7 16-72.3 37.3\n-107 64l-10 8H0v40zM0 157v40h399730v-40zm0 194v40h399730v-40z",leftarrow:"M400000 241H110l3-3c68.7-52.7 113.7-120\n 135-202 4-14.7 6-23 6-25 0-7.3-7-11-21-11-8 0-13.2.8-15.5 2.5-2.3 1.7-4.2 5.8\n-5.5 12.5-1.3 4.7-2.7 10.3-4 17-12 48.7-34.8 92-68.5 130S65.3 228.3 18 247\nc-10 4-16 7.7-18 11 0 8.7 6 14.3 18 17 47.3 18.7 87.8 47 121.5 85S196 441.3 208\n 490c.7 2 1.3 5 2 9s1.2 6.7 1.5 8c.3 1.3 1 3.3 2 6s2.2 4.5 3.5 5.5c1.3 1 3.3\n 1.8 6 2.5s6 1 10 1c14 0 21-3.7 21-11 0-2-2-10.3-6-25-20-79.3-65-146.7-135-202\n l-3-3h399890zM100 241v40h399900v-40z",leftbrace:"M6 548l-6-6v-35l6-11c56-104 135.3-181.3 238-232 57.3-28.7 117\n-45 179-50h399577v120H403c-43.3 7-81 15-113 26-100.7 33-179.7 91-237 174-2.7\n 5-6 9-10 13-.7 1-7.3 1-20 1H6z",leftbraceunder:"M0 6l6-6h17c12.688 0 19.313.3 20 1 4 4 7.313 8.3 10 13\n 35.313 51.3 80.813 93.8 136.5 127.5 55.688 33.7 117.188 55.8 184.5 66.5.688\n 0 2 .3 4 1 18.688 2.7 76 4.3 172 5h399450v120H429l-6-1c-124.688-8-235-61.7\n-331-161C60.687 138.7 32.312 99.3 7 54L0 41V6z",leftgroup:"M400000 80\nH435C64 80 168.3 229.4 21 260c-5.9 1.2-18 0-18 0-2 0-3-1-3-3v-38C76 61 257 0\n 435 0h399565z",leftgroupunder:"M400000 262\nH435C64 262 168.3 112.6 21 82c-5.9-1.2-18 0-18 0-2 0-3 1-3 3v38c76 158 257 219\n 435 219h399565z",leftharpoon:"M0 267c.7 5.3 3 10 7 14h399993v-40H93c3.3\n-3.3 10.2-9.5 20.5-18.5s17.8-15.8 22.5-20.5c50.7-52 88-110.3 112-175 4-11.3 5\n-18.3 3-21-1.3-4-7.3-6-18-6-8 0-13 .7-15 2s-4.7 6.7-8 16c-42 98.7-107.3 174.7\n-196 228-6.7 4.7-10.7 8-12 10-1.3 2-2 5.7-2 11zm100-26v40h399900v-40z",leftharpoonplus:"M0 267c.7 5.3 3 10 7 14h399993v-40H93c3.3-3.3 10.2-9.5\n 20.5-18.5s17.8-15.8 22.5-20.5c50.7-52 88-110.3 112-175 4-11.3 5-18.3 3-21-1.3\n-4-7.3-6-18-6-8 0-13 .7-15 2s-4.7 6.7-8 16c-42 98.7-107.3 174.7-196 228-6.7 4.7\n-10.7 8-12 10-1.3 2-2 5.7-2 11zm100-26v40h399900v-40zM0 435v40h400000v-40z\nm0 0v40h400000v-40z",leftharpoondown:"M7 241c-4 4-6.333 8.667-7 14 0 5.333.667 9 2 11s5.333\n 5.333 12 10c90.667 54 156 130 196 228 3.333 10.667 6.333 16.333 9 17 2 .667 5\n 1 9 1h5c10.667 0 16.667-2 18-6 2-2.667 1-9.667-3-21-32-87.333-82.667-157.667\n-152-211l-3-3h399907v-40zM93 281 H400000 v-40L7 241z",leftharpoondownplus:"M7 435c-4 4-6.3 8.7-7 14 0 5.3.7 9 2 11s5.3 5.3 12\n 10c90.7 54 156 130 196 228 3.3 10.7 6.3 16.3 9 17 2 .7 5 1 9 1h5c10.7 0 16.7\n-2 18-6 2-2.7 1-9.7-3-21-32-87.3-82.7-157.7-152-211l-3-3h399907v-40H7zm93 0\nv40h399900v-40zM0 241v40h399900v-40zm0 0v40h399900v-40z",lefthook:"M400000 281 H103s-33-11.2-61-33.5S0 197.3 0 164s14.2-61.2 42.5\n-83.5C70.8 58.2 104 47 142 47 c16.7 0 25 6.7 25 20 0 12-8.7 18.7-26 20-40 3.3\n-68.7 15.7-86 37-10 12-15 25.3-15 40 0 22.7 9.8 40.7 29.5 54 19.7 13.3 43.5 21\n 71.5 23h399859zM103 281v-40h399897v40z",leftlinesegment:"M40 281 V428 H0 V94 H40 V241 H400000 v40z\nM40 281 V428 H0 V94 H40 V241 H400000 v40z",leftmapsto:"M40 281 V448H0V74H40V241H400000v40z\nM40 281 V448H0V74H40V241H400000v40z",leftToFrom:"M0 147h400000v40H0zm0 214c68 40 115.7 95.7 143 167h22c15.3 0 23\n-.3 23-1 0-1.3-5.3-13.7-16-37-18-35.3-41.3-69-70-101l-7-8h399905v-40H95l7-8\nc28.7-32 52-65.7 70-101 10.7-23.3 16-35.7 16-37 0-.7-7.7-1-23-1h-22C115.7 265.3\n 68 321 0 361zm0-174v-40h399900v40zm100 154v40h399900v-40z",longequal:"M0 50 h400000 v40H0z m0 194h40000v40H0z\nM0 50 h400000 v40H0z m0 194h40000v40H0z",midbrace:"M200428 334\nc-100.7-8.3-195.3-44-280-108-55.3-42-101.7-93-139-153l-9-14c-2.7 4-5.7 8.7-9 14\n-53.3 86.7-123.7 153-211 199-66.7 36-137.3 56.3-212 62H0V214h199568c178.3-11.7\n 311.7-78.3 403-201 6-8 9.7-12 11-12 .7-.7 6.7-1 18-1s17.3.3 18 1c1.3 0 5 4 11\n 12 44.7 59.3 101.3 106.3 170 141s145.3 54.3 229 60h199572v120z",midbraceunder:"M199572 214\nc100.7 8.3 195.3 44 280 108 55.3 42 101.7 93 139 153l9 14c2.7-4 5.7-8.7 9-14\n 53.3-86.7 123.7-153 211-199 66.7-36 137.3-56.3 212-62h199568v120H200432c-178.3\n 11.7-311.7 78.3-403 201-6 8-9.7 12-11 12-.7.7-6.7 1-18 1s-17.3-.3-18-1c-1.3 0\n-5-4-11-12-44.7-59.3-101.3-106.3-170-141s-145.3-54.3-229-60H0V214z",oiintSize1:"M512.6 71.6c272.6 0 320.3 106.8 320.3 178.2 0 70.8-47.7 177.6\n-320.3 177.6S193.1 320.6 193.1 249.8c0-71.4 46.9-178.2 319.5-178.2z\nm368.1 178.2c0-86.4-60.9-215.4-368.1-215.4-306.4 0-367.3 129-367.3 215.4 0 85.8\n60.9 214.8 367.3 214.8 307.2 0 368.1-129 368.1-214.8z",oiintSize2:"M757.8 100.1c384.7 0 451.1 137.6 451.1 230 0 91.3-66.4 228.8\n-451.1 228.8-386.3 0-452.7-137.5-452.7-228.8 0-92.4 66.4-230 452.7-230z\nm502.4 230c0-111.2-82.4-277.2-502.4-277.2s-504 166-504 277.2\nc0 110 84 276 504 276s502.4-166 502.4-276z",oiiintSize1:"M681.4 71.6c408.9 0 480.5 106.8 480.5 178.2 0 70.8-71.6 177.6\n-480.5 177.6S202.1 320.6 202.1 249.8c0-71.4 70.5-178.2 479.3-178.2z\nm525.8 178.2c0-86.4-86.8-215.4-525.7-215.4-437.9 0-524.7 129-524.7 215.4 0\n85.8 86.8 214.8 524.7 214.8 438.9 0 525.7-129 525.7-214.8z",oiiintSize2:"M1021.2 53c603.6 0 707.8 165.8 707.8 277.2 0 110-104.2 275.8\n-707.8 275.8-606 0-710.2-165.8-710.2-275.8C311 218.8 415.2 53 1021.2 53z\nm770.4 277.1c0-131.2-126.4-327.6-770.5-327.6S248.4 198.9 248.4 330.1\nc0 130 128.8 326.4 772.7 326.4s770.5-196.4 770.5-326.4z",rightarrow:"M0 241v40h399891c-47.3 35.3-84 78-110 128\n-16.7 32-27.7 63.7-33 95 0 1.3-.2 2.7-.5 4-.3 1.3-.5 2.3-.5 3 0 7.3 6.7 11 20\n 11 8 0 13.2-.8 15.5-2.5 2.3-1.7 4.2-5.5 5.5-11.5 2-13.3 5.7-27 11-41 14.7-44.7\n 39-84.5 73-119.5s73.7-60.2 119-75.5c6-2 9-5.7 9-11s-3-9-9-11c-45.3-15.3-85\n-40.5-119-75.5s-58.3-74.8-73-119.5c-4.7-14-8.3-27.3-11-40-1.3-6.7-3.2-10.8-5.5\n-12.5-2.3-1.7-7.5-2.5-15.5-2.5-14 0-21 3.7-21 11 0 2 2 10.3 6 25 20.7 83.3 67\n 151.7 139 205zm0 0v40h399900v-40z",rightbrace:"M400000 542l\n-6 6h-17c-12.7 0-19.3-.3-20-1-4-4-7.3-8.3-10-13-35.3-51.3-80.8-93.8-136.5-127.5\ns-117.2-55.8-184.5-66.5c-.7 0-2-.3-4-1-18.7-2.7-76-4.3-172-5H0V214h399571l6 1\nc124.7 8 235 61.7 331 161 31.3 33.3 59.7 72.7 85 118l7 13v35z",rightbraceunder:"M399994 0l6 6v35l-6 11c-56 104-135.3 181.3-238 232-57.3\n 28.7-117 45-179 50H-300V214h399897c43.3-7 81-15 113-26 100.7-33 179.7-91 237\n-174 2.7-5 6-9 10-13 .7-1 7.3-1 20-1h17z",rightgroup:"M0 80h399565c371 0 266.7 149.4 414 180 5.9 1.2 18 0 18 0 2 0\n 3-1 3-3v-38c-76-158-257-219-435-219H0z",rightgroupunder:"M0 262h399565c371 0 266.7-149.4 414-180 5.9-1.2 18 0 18\n 0 2 0 3 1 3 3v38c-76 158-257 219-435 219H0z",rightharpoon:"M0 241v40h399993c4.7-4.7 7-9.3 7-14 0-9.3\n-3.7-15.3-11-18-92.7-56.7-159-133.7-199-231-3.3-9.3-6-14.7-8-16-2-1.3-7-2-15-2\n-10.7 0-16.7 2-18 6-2 2.7-1 9.7 3 21 15.3 42 36.7 81.8 64 119.5 27.3 37.7 58\n 69.2 92 94.5zm0 0v40h399900v-40z",rightharpoonplus:"M0 241v40h399993c4.7-4.7 7-9.3 7-14 0-9.3-3.7-15.3-11\n-18-92.7-56.7-159-133.7-199-231-3.3-9.3-6-14.7-8-16-2-1.3-7-2-15-2-10.7 0-16.7\n 2-18 6-2 2.7-1 9.7 3 21 15.3 42 36.7 81.8 64 119.5 27.3 37.7 58 69.2 92 94.5z\nm0 0v40h399900v-40z m100 194v40h399900v-40zm0 0v40h399900v-40z",rightharpoondown:"M399747 511c0 7.3 6.7 11 20 11 8 0 13-.8 15-2.5s4.7-6.8\n 8-15.5c40-94 99.3-166.3 178-217 13.3-8 20.3-12.3 21-13 5.3-3.3 8.5-5.8 9.5\n-7.5 1-1.7 1.5-5.2 1.5-10.5s-2.3-10.3-7-15H0v40h399908c-34 25.3-64.7 57-92 95\n-27.3 38-48.7 77.7-64 119-3.3 8.7-5 14-5 16zM0 241v40h399900v-40z",rightharpoondownplus:"M399747 705c0 7.3 6.7 11 20 11 8 0 13-.8\n 15-2.5s4.7-6.8 8-15.5c40-94 99.3-166.3 178-217 13.3-8 20.3-12.3 21-13 5.3-3.3\n 8.5-5.8 9.5-7.5 1-1.7 1.5-5.2 1.5-10.5s-2.3-10.3-7-15H0v40h399908c-34 25.3\n-64.7 57-92 95-27.3 38-48.7 77.7-64 119-3.3 8.7-5 14-5 16zM0 435v40h399900v-40z\nm0-194v40h400000v-40zm0 0v40h400000v-40z",righthook:"M399859 241c-764 0 0 0 0 0 40-3.3 68.7-15.7 86-37 10-12 15-25.3\n 15-40 0-22.7-9.8-40.7-29.5-54-19.7-13.3-43.5-21-71.5-23-17.3-1.3-26-8-26-20 0\n-13.3 8.7-20 26-20 38 0 71 11.2 99 33.5 0 0 7 5.6 21 16.7 14 11.2 21 33.5 21\n 66.8s-14 61.2-42 83.5c-28 22.3-61 33.5-99 33.5L0 241z M0 281v-40h399859v40z",rightlinesegment:"M399960 241 V94 h40 V428 h-40 V281 H0 v-40z\nM399960 241 V94 h40 V428 h-40 V281 H0 v-40z",rightToFrom:"M400000 167c-70.7-42-118-97.7-142-167h-23c-15.3 0-23 .3-23\n 1 0 1.3 5.3 13.7 16 37 18 35.3 41.3 69 70 101l7 8H0v40h399905l-7 8c-28.7 32\n-52 65.7-70 101-10.7 23.3-16 35.7-16 37 0 .7 7.7 1 23 1h23c24-69.3 71.3-125 142\n-167z M100 147v40h399900v-40zM0 341v40h399900v-40z",twoheadleftarrow:"M0 167c68 40\n 115.7 95.7 143 167h22c15.3 0 23-.3 23-1 0-1.3-5.3-13.7-16-37-18-35.3-41.3-69\n-70-101l-7-8h125l9 7c50.7 39.3 85 86 103 140h46c0-4.7-6.3-18.7-19-42-18-35.3\n-40-67.3-66-96l-9-9h399716v-40H284l9-9c26-28.7 48-60.7 66-96 12.7-23.333 19\n-37.333 19-42h-46c-18 54-52.3 100.7-103 140l-9 7H95l7-8c28.7-32 52-65.7 70-101\n 10.7-23.333 16-35.7 16-37 0-.7-7.7-1-23-1h-22C115.7 71.3 68 127 0 167z",twoheadrightarrow:"M400000 167\nc-68-40-115.7-95.7-143-167h-22c-15.3 0-23 .3-23 1 0 1.3 5.3 13.7 16 37 18 35.3\n 41.3 69 70 101l7 8h-125l-9-7c-50.7-39.3-85-86-103-140h-46c0 4.7 6.3 18.7 19 42\n 18 35.3 40 67.3 66 96l9 9H0v40h399716l-9 9c-26 28.7-48 60.7-66 96-12.7 23.333\n-19 37.333-19 42h46c18-54 52.3-100.7 103-140l9-7h125l-7 8c-28.7 32-52 65.7-70\n 101-10.7 23.333-16 35.7-16 37 0 .7 7.7 1 23 1h22c27.3-71.3 75-127 143-167z",tilde1:"M200 55.538c-77 0-168 73.953-177 73.953-3 0-7\n-2.175-9-5.437L2 97c-1-2-2-4-2-6 0-4 2-7 5-9l20-12C116 12 171 0 207 0c86 0\n 114 68 191 68 78 0 168-68 177-68 4 0 7 2 9 5l12 19c1 2.175 2 4.35 2 6.525 0\n 4.35-2 7.613-5 9.788l-19 13.05c-92 63.077-116.937 75.308-183 76.128\n-68.267.847-113-73.952-191-73.952z",tilde2:"M344 55.266c-142 0-300.638 81.316-311.5 86.418\n-8.01 3.762-22.5 10.91-23.5 5.562L1 120c-1-2-1-3-1-4 0-5 3-9 8-10l18.4-9C160.9\n 31.9 283 0 358 0c148 0 188 122 331 122s314-97 326-97c4 0 8 2 10 7l7 21.114\nc1 2.14 1 3.21 1 4.28 0 5.347-3 9.626-7 10.696l-22.3 12.622C852.6 158.372 751\n 181.476 676 181.476c-149 0-189-126.21-332-126.21z",tilde3:"M786 59C457 59 32 175.242 13 175.242c-6 0-10-3.457\n-11-10.37L.15 138c-1-7 3-12 10-13l19.2-6.4C378.4 40.7 634.3 0 804.3 0c337 0\n 411.8 157 746.8 157 328 0 754-112 773-112 5 0 10 3 11 9l1 14.075c1 8.066-.697\n 16.595-6.697 17.492l-21.052 7.31c-367.9 98.146-609.15 122.696-778.15 122.696\n -338 0-409-156.573-744-156.573z",tilde4:"M786 58C457 58 32 177.487 13 177.487c-6 0-10-3.345\n-11-10.035L.15 143c-1-7 3-12 10-13l22-6.7C381.2 35 637.15 0 807.15 0c337 0 409\n 177 744 177 328 0 754-127 773-127 5 0 10 3 11 9l1 14.794c1 7.805-3 13.38-9\n 14.495l-20.7 5.574c-366.85 99.79-607.3 139.372-776.3 139.372-338 0-409\n -175.236-744-175.236z",vec:"M377 20c0-5.333 1.833-10 5.5-14S391 0 397 0c4.667 0 8.667 1.667 12 5\n3.333 2.667 6.667 9 10 19 6.667 24.667 20.333 43.667 41 57 7.333 4.667 11\n10.667 11 18 0 6-1 10-3 12s-6.667 5-14 9c-28.667 14.667-53.667 35.667-75 63\n-1.333 1.333-3.167 3.5-5.5 6.5s-4 4.833-5 5.5c-1 .667-2.5 1.333-4.5 2s-4.333 1\n-7 1c-4.667 0-9.167-1.833-13.5-5.5S337 184 337 178c0-12.667 15.667-32.333 47-59\nH213l-171-1c-8.667-6-13-12.333-13-19 0-4.667 4.333-11.333 13-20h359\nc-16-25.333-24-45-24-59z",widehat1:"M529 0h5l519 115c5 1 9 5 9 10 0 1-1 2-1 3l-4 22\nc-1 5-5 9-11 9h-2L532 67 19 159h-2c-5 0-9-4-11-9l-5-22c-1-6 2-12 8-13z",widehat2:"M1181 0h2l1171 176c6 0 10 5 10 11l-2 23c-1 6-5 10\n-11 10h-1L1182 67 15 220h-1c-6 0-10-4-11-10l-2-23c-1-6 4-11 10-11z",widehat3:"M1181 0h2l1171 236c6 0 10 5 10 11l-2 23c-1 6-5 10\n-11 10h-1L1182 67 15 280h-1c-6 0-10-4-11-10l-2-23c-1-6 4-11 10-11z",widehat4:"M1181 0h2l1171 296c6 0 10 5 10 11l-2 23c-1 6-5 10\n-11 10h-1L1182 67 15 340h-1c-6 0-10-4-11-10l-2-23c-1-6 4-11 10-11z",widecheck1:"M529,159h5l519,-115c5,-1,9,-5,9,-10c0,-1,-1,-2,-1,-3l-4,-22c-1,\n-5,-5,-9,-11,-9h-2l-512,92l-513,-92h-2c-5,0,-9,4,-11,9l-5,22c-1,6,2,12,8,13z",widecheck2:"M1181,220h2l1171,-176c6,0,10,-5,10,-11l-2,-23c-1,-6,-5,-10,\n-11,-10h-1l-1168,153l-1167,-153h-1c-6,0,-10,4,-11,10l-2,23c-1,6,4,11,10,11z",widecheck3:"M1181,280h2l1171,-236c6,0,10,-5,10,-11l-2,-23c-1,-6,-5,-10,\n-11,-10h-1l-1168,213l-1167,-213h-1c-6,0,-10,4,-11,10l-2,23c-1,6,4,11,10,11z",widecheck4:"M1181,340h2l1171,-296c6,0,10,-5,10,-11l-2,-23c-1,-6,-5,-10,\n-11,-10h-1l-1168,273l-1167,-273h-1c-6,0,-10,4,-11,10l-2,23c-1,6,4,11,10,11z",baraboveleftarrow:"M400000 620h-399890l3 -3c68.7 -52.7 113.7 -120 135 -202\nc4 -14.7 6 -23 6 -25c0 -7.3 -7 -11 -21 -11c-8 0 -13.2 0.8 -15.5 2.5\nc-2.3 1.7 -4.2 5.8 -5.5 12.5c-1.3 4.7 -2.7 10.3 -4 17c-12 48.7 -34.8 92 -68.5 130\ns-74.2 66.3 -121.5 85c-10 4 -16 7.7 -18 11c0 8.7 6 14.3 18 17c47.3 18.7 87.8 47\n121.5 85s56.5 81.3 68.5 130c0.7 2 1.3 5 2 9s1.2 6.7 1.5 8c0.3 1.3 1 3.3 2 6\ns2.2 4.5 3.5 5.5c1.3 1 3.3 1.8 6 2.5s6 1 10 1c14 0 21 -3.7 21 -11\nc0 -2 -2 -10.3 -6 -25c-20 -79.3 -65 -146.7 -135 -202l-3 -3h399890z\nM100 620v40h399900v-40z M0 241v40h399900v-40zM0 241v40h399900v-40z",rightarrowabovebar:"M0 241v40h399891c-47.3 35.3-84 78-110 128-16.7 32\n-27.7 63.7-33 95 0 1.3-.2 2.7-.5 4-.3 1.3-.5 2.3-.5 3 0 7.3 6.7 11 20 11 8 0\n13.2-.8 15.5-2.5 2.3-1.7 4.2-5.5 5.5-11.5 2-13.3 5.7-27 11-41 14.7-44.7 39\n-84.5 73-119.5s73.7-60.2 119-75.5c6-2 9-5.7 9-11s-3-9-9-11c-45.3-15.3-85-40.5\n-119-75.5s-58.3-74.8-73-119.5c-4.7-14-8.3-27.3-11-40-1.3-6.7-3.2-10.8-5.5\n-12.5-2.3-1.7-7.5-2.5-15.5-2.5-14 0-21 3.7-21 11 0 2 2 10.3 6 25 20.7 83.3 67\n151.7 139 205zm96 379h399894v40H0zm0 0h399904v40H0z",baraboveshortleftharpoon:"M507,435c-4,4,-6.3,8.7,-7,14c0,5.3,0.7,9,2,11\nc1.3,2,5.3,5.3,12,10c90.7,54,156,130,196,228c3.3,10.7,6.3,16.3,9,17\nc2,0.7,5,1,9,1c0,0,5,0,5,0c10.7,0,16.7,-2,18,-6c2,-2.7,1,-9.7,-3,-21\nc-32,-87.3,-82.7,-157.7,-152,-211c0,0,-3,-3,-3,-3l399351,0l0,-40\nc-398570,0,-399437,0,-399437,0z M593 435 v40 H399500 v-40z\nM0 281 v-40 H399908 v40z M0 281 v-40 H399908 v40z",rightharpoonaboveshortbar:"M0,241 l0,40c399126,0,399993,0,399993,0\nc4.7,-4.7,7,-9.3,7,-14c0,-9.3,-3.7,-15.3,-11,-18c-92.7,-56.7,-159,-133.7,-199,\n-231c-3.3,-9.3,-6,-14.7,-8,-16c-2,-1.3,-7,-2,-15,-2c-10.7,0,-16.7,2,-18,6\nc-2,2.7,-1,9.7,3,21c15.3,42,36.7,81.8,64,119.5c27.3,37.7,58,69.2,92,94.5z\nM0 241 v40 H399908 v-40z M0 475 v-40 H399500 v40z M0 475 v-40 H399500 v40z",shortbaraboveleftharpoon:"M7,435c-4,4,-6.3,8.7,-7,14c0,5.3,0.7,9,2,11\nc1.3,2,5.3,5.3,12,10c90.7,54,156,130,196,228c3.3,10.7,6.3,16.3,9,17c2,0.7,5,1,9,\n1c0,0,5,0,5,0c10.7,0,16.7,-2,18,-6c2,-2.7,1,-9.7,-3,-21c-32,-87.3,-82.7,-157.7,\n-152,-211c0,0,-3,-3,-3,-3l399907,0l0,-40c-399126,0,-399993,0,-399993,0z\nM93 435 v40 H400000 v-40z M500 241 v40 H400000 v-40z M500 241 v40 H400000 v-40z",shortrightharpoonabovebar:"M53,241l0,40c398570,0,399437,0,399437,0\nc4.7,-4.7,7,-9.3,7,-14c0,-9.3,-3.7,-15.3,-11,-18c-92.7,-56.7,-159,-133.7,-199,\n-231c-3.3,-9.3,-6,-14.7,-8,-16c-2,-1.3,-7,-2,-15,-2c-10.7,0,-16.7,2,-18,6\nc-2,2.7,-1,9.7,3,21c15.3,42,36.7,81.8,64,119.5c27.3,37.7,58,69.2,92,94.5z\nM500 241 v40 H399408 v-40z M500 435 v40 H400000 v-40z"};class A{constructor(e){this.children=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.maxFontSize=void 0,this.style=void 0,this.children=e,this.classes=[],this.height=0,this.depth=0,this.maxFontSize=0,this.style={}}hasClass(e){return l.contains(this.classes,e)}toNode(){const e=document.createDocumentFragment();for(let t=0;te.toText())).join("")}}var T={"AMS-Regular":{32:[0,0,0,0,.25],65:[0,.68889,0,0,.72222],66:[0,.68889,0,0,.66667],67:[0,.68889,0,0,.72222],68:[0,.68889,0,0,.72222],69:[0,.68889,0,0,.66667],70:[0,.68889,0,0,.61111],71:[0,.68889,0,0,.77778],72:[0,.68889,0,0,.77778],73:[0,.68889,0,0,.38889],74:[.16667,.68889,0,0,.5],75:[0,.68889,0,0,.77778],76:[0,.68889,0,0,.66667],77:[0,.68889,0,0,.94445],78:[0,.68889,0,0,.72222],79:[.16667,.68889,0,0,.77778],80:[0,.68889,0,0,.61111],81:[.16667,.68889,0,0,.77778],82:[0,.68889,0,0,.72222],83:[0,.68889,0,0,.55556],84:[0,.68889,0,0,.66667],85:[0,.68889,0,0,.72222],86:[0,.68889,0,0,.72222],87:[0,.68889,0,0,1],88:[0,.68889,0,0,.72222],89:[0,.68889,0,0,.72222],90:[0,.68889,0,0,.66667],107:[0,.68889,0,0,.55556],160:[0,0,0,0,.25],165:[0,.675,.025,0,.75],174:[.15559,.69224,0,0,.94666],240:[0,.68889,0,0,.55556],295:[0,.68889,0,0,.54028],710:[0,.825,0,0,2.33334],732:[0,.9,0,0,2.33334],770:[0,.825,0,0,2.33334],771:[0,.9,0,0,2.33334],989:[.08167,.58167,0,0,.77778],1008:[0,.43056,.04028,0,.66667],8245:[0,.54986,0,0,.275],8463:[0,.68889,0,0,.54028],8487:[0,.68889,0,0,.72222],8498:[0,.68889,0,0,.55556],8502:[0,.68889,0,0,.66667],8503:[0,.68889,0,0,.44445],8504:[0,.68889,0,0,.66667],8513:[0,.68889,0,0,.63889],8592:[-.03598,.46402,0,0,.5],8594:[-.03598,.46402,0,0,.5],8602:[-.13313,.36687,0,0,1],8603:[-.13313,.36687,0,0,1],8606:[.01354,.52239,0,0,1],8608:[.01354,.52239,0,0,1],8610:[.01354,.52239,0,0,1.11111],8611:[.01354,.52239,0,0,1.11111],8619:[0,.54986,0,0,1],8620:[0,.54986,0,0,1],8621:[-.13313,.37788,0,0,1.38889],8622:[-.13313,.36687,0,0,1],8624:[0,.69224,0,0,.5],8625:[0,.69224,0,0,.5],8630:[0,.43056,0,0,1],8631:[0,.43056,0,0,1],8634:[.08198,.58198,0,0,.77778],8635:[.08198,.58198,0,0,.77778],8638:[.19444,.69224,0,0,.41667],8639:[.19444,.69224,0,0,.41667],8642:[.19444,.69224,0,0,.41667],8643:[.19444,.69224,0,0,.41667],8644:[.1808,.675,0,0,1],8646:[.1808,.675,0,0,1],8647:[.1808,.675,0,0,1],8648:[.19444,.69224,0,0,.83334],8649:[.1808,.675,0,0,1],8650:[.19444,.69224,0,0,.83334],8651:[.01354,.52239,0,0,1],8652:[.01354,.52239,0,0,1],8653:[-.13313,.36687,0,0,1],8654:[-.13313,.36687,0,0,1],8655:[-.13313,.36687,0,0,1],8666:[.13667,.63667,0,0,1],8667:[.13667,.63667,0,0,1],8669:[-.13313,.37788,0,0,1],8672:[-.064,.437,0,0,1.334],8674:[-.064,.437,0,0,1.334],8705:[0,.825,0,0,.5],8708:[0,.68889,0,0,.55556],8709:[.08167,.58167,0,0,.77778],8717:[0,.43056,0,0,.42917],8722:[-.03598,.46402,0,0,.5],8724:[.08198,.69224,0,0,.77778],8726:[.08167,.58167,0,0,.77778],8733:[0,.69224,0,0,.77778],8736:[0,.69224,0,0,.72222],8737:[0,.69224,0,0,.72222],8738:[.03517,.52239,0,0,.72222],8739:[.08167,.58167,0,0,.22222],8740:[.25142,.74111,0,0,.27778],8741:[.08167,.58167,0,0,.38889],8742:[.25142,.74111,0,0,.5],8756:[0,.69224,0,0,.66667],8757:[0,.69224,0,0,.66667],8764:[-.13313,.36687,0,0,.77778],8765:[-.13313,.37788,0,0,.77778],8769:[-.13313,.36687,0,0,.77778],8770:[-.03625,.46375,0,0,.77778],8774:[.30274,.79383,0,0,.77778],8776:[-.01688,.48312,0,0,.77778],8778:[.08167,.58167,0,0,.77778],8782:[.06062,.54986,0,0,.77778],8783:[.06062,.54986,0,0,.77778],8785:[.08198,.58198,0,0,.77778],8786:[.08198,.58198,0,0,.77778],8787:[.08198,.58198,0,0,.77778],8790:[0,.69224,0,0,.77778],8791:[.22958,.72958,0,0,.77778],8796:[.08198,.91667,0,0,.77778],8806:[.25583,.75583,0,0,.77778],8807:[.25583,.75583,0,0,.77778],8808:[.25142,.75726,0,0,.77778],8809:[.25142,.75726,0,0,.77778],8812:[.25583,.75583,0,0,.5],8814:[.20576,.70576,0,0,.77778],8815:[.20576,.70576,0,0,.77778],8816:[.30274,.79383,0,0,.77778],8817:[.30274,.79383,0,0,.77778],8818:[.22958,.72958,0,0,.77778],8819:[.22958,.72958,0,0,.77778],8822:[.1808,.675,0,0,.77778],8823:[.1808,.675,0,0,.77778],8828:[.13667,.63667,0,0,.77778],8829:[.13667,.63667,0,0,.77778],8830:[.22958,.72958,0,0,.77778],8831:[.22958,.72958,0,0,.77778],8832:[.20576,.70576,0,0,.77778],8833:[.20576,.70576,0,0,.77778],8840:[.30274,.79383,0,0,.77778],8841:[.30274,.79383,0,0,.77778],8842:[.13597,.63597,0,0,.77778],8843:[.13597,.63597,0,0,.77778],8847:[.03517,.54986,0,0,.77778],8848:[.03517,.54986,0,0,.77778],8858:[.08198,.58198,0,0,.77778],8859:[.08198,.58198,0,0,.77778],8861:[.08198,.58198,0,0,.77778],8862:[0,.675,0,0,.77778],8863:[0,.675,0,0,.77778],8864:[0,.675,0,0,.77778],8865:[0,.675,0,0,.77778],8872:[0,.69224,0,0,.61111],8873:[0,.69224,0,0,.72222],8874:[0,.69224,0,0,.88889],8876:[0,.68889,0,0,.61111],8877:[0,.68889,0,0,.61111],8878:[0,.68889,0,0,.72222],8879:[0,.68889,0,0,.72222],8882:[.03517,.54986,0,0,.77778],8883:[.03517,.54986,0,0,.77778],8884:[.13667,.63667,0,0,.77778],8885:[.13667,.63667,0,0,.77778],8888:[0,.54986,0,0,1.11111],8890:[.19444,.43056,0,0,.55556],8891:[.19444,.69224,0,0,.61111],8892:[.19444,.69224,0,0,.61111],8901:[0,.54986,0,0,.27778],8903:[.08167,.58167,0,0,.77778],8905:[.08167,.58167,0,0,.77778],8906:[.08167,.58167,0,0,.77778],8907:[0,.69224,0,0,.77778],8908:[0,.69224,0,0,.77778],8909:[-.03598,.46402,0,0,.77778],8910:[0,.54986,0,0,.76042],8911:[0,.54986,0,0,.76042],8912:[.03517,.54986,0,0,.77778],8913:[.03517,.54986,0,0,.77778],8914:[0,.54986,0,0,.66667],8915:[0,.54986,0,0,.66667],8916:[0,.69224,0,0,.66667],8918:[.0391,.5391,0,0,.77778],8919:[.0391,.5391,0,0,.77778],8920:[.03517,.54986,0,0,1.33334],8921:[.03517,.54986,0,0,1.33334],8922:[.38569,.88569,0,0,.77778],8923:[.38569,.88569,0,0,.77778],8926:[.13667,.63667,0,0,.77778],8927:[.13667,.63667,0,0,.77778],8928:[.30274,.79383,0,0,.77778],8929:[.30274,.79383,0,0,.77778],8934:[.23222,.74111,0,0,.77778],8935:[.23222,.74111,0,0,.77778],8936:[.23222,.74111,0,0,.77778],8937:[.23222,.74111,0,0,.77778],8938:[.20576,.70576,0,0,.77778],8939:[.20576,.70576,0,0,.77778],8940:[.30274,.79383,0,0,.77778],8941:[.30274,.79383,0,0,.77778],8994:[.19444,.69224,0,0,.77778],8995:[.19444,.69224,0,0,.77778],9416:[.15559,.69224,0,0,.90222],9484:[0,.69224,0,0,.5],9488:[0,.69224,0,0,.5],9492:[0,.37788,0,0,.5],9496:[0,.37788,0,0,.5],9585:[.19444,.68889,0,0,.88889],9586:[.19444,.74111,0,0,.88889],9632:[0,.675,0,0,.77778],9633:[0,.675,0,0,.77778],9650:[0,.54986,0,0,.72222],9651:[0,.54986,0,0,.72222],9654:[.03517,.54986,0,0,.77778],9660:[0,.54986,0,0,.72222],9661:[0,.54986,0,0,.72222],9664:[.03517,.54986,0,0,.77778],9674:[.11111,.69224,0,0,.66667],9733:[.19444,.69224,0,0,.94445],10003:[0,.69224,0,0,.83334],10016:[0,.69224,0,0,.83334],10731:[.11111,.69224,0,0,.66667],10846:[.19444,.75583,0,0,.61111],10877:[.13667,.63667,0,0,.77778],10878:[.13667,.63667,0,0,.77778],10885:[.25583,.75583,0,0,.77778],10886:[.25583,.75583,0,0,.77778],10887:[.13597,.63597,0,0,.77778],10888:[.13597,.63597,0,0,.77778],10889:[.26167,.75726,0,0,.77778],10890:[.26167,.75726,0,0,.77778],10891:[.48256,.98256,0,0,.77778],10892:[.48256,.98256,0,0,.77778],10901:[.13667,.63667,0,0,.77778],10902:[.13667,.63667,0,0,.77778],10933:[.25142,.75726,0,0,.77778],10934:[.25142,.75726,0,0,.77778],10935:[.26167,.75726,0,0,.77778],10936:[.26167,.75726,0,0,.77778],10937:[.26167,.75726,0,0,.77778],10938:[.26167,.75726,0,0,.77778],10949:[.25583,.75583,0,0,.77778],10950:[.25583,.75583,0,0,.77778],10955:[.28481,.79383,0,0,.77778],10956:[.28481,.79383,0,0,.77778],57350:[.08167,.58167,0,0,.22222],57351:[.08167,.58167,0,0,.38889],57352:[.08167,.58167,0,0,.77778],57353:[0,.43056,.04028,0,.66667],57356:[.25142,.75726,0,0,.77778],57357:[.25142,.75726,0,0,.77778],57358:[.41951,.91951,0,0,.77778],57359:[.30274,.79383,0,0,.77778],57360:[.30274,.79383,0,0,.77778],57361:[.41951,.91951,0,0,.77778],57366:[.25142,.75726,0,0,.77778],57367:[.25142,.75726,0,0,.77778],57368:[.25142,.75726,0,0,.77778],57369:[.25142,.75726,0,0,.77778],57370:[.13597,.63597,0,0,.77778],57371:[.13597,.63597,0,0,.77778]},"Caligraphic-Regular":{32:[0,0,0,0,.25],65:[0,.68333,0,.19445,.79847],66:[0,.68333,.03041,.13889,.65681],67:[0,.68333,.05834,.13889,.52653],68:[0,.68333,.02778,.08334,.77139],69:[0,.68333,.08944,.11111,.52778],70:[0,.68333,.09931,.11111,.71875],71:[.09722,.68333,.0593,.11111,.59487],72:[0,.68333,.00965,.11111,.84452],73:[0,.68333,.07382,0,.54452],74:[.09722,.68333,.18472,.16667,.67778],75:[0,.68333,.01445,.05556,.76195],76:[0,.68333,0,.13889,.68972],77:[0,.68333,0,.13889,1.2009],78:[0,.68333,.14736,.08334,.82049],79:[0,.68333,.02778,.11111,.79611],80:[0,.68333,.08222,.08334,.69556],81:[.09722,.68333,0,.11111,.81667],82:[0,.68333,0,.08334,.8475],83:[0,.68333,.075,.13889,.60556],84:[0,.68333,.25417,0,.54464],85:[0,.68333,.09931,.08334,.62583],86:[0,.68333,.08222,0,.61278],87:[0,.68333,.08222,.08334,.98778],88:[0,.68333,.14643,.13889,.7133],89:[.09722,.68333,.08222,.08334,.66834],90:[0,.68333,.07944,.13889,.72473],160:[0,0,0,0,.25]},"Fraktur-Regular":{32:[0,0,0,0,.25],33:[0,.69141,0,0,.29574],34:[0,.69141,0,0,.21471],38:[0,.69141,0,0,.73786],39:[0,.69141,0,0,.21201],40:[.24982,.74947,0,0,.38865],41:[.24982,.74947,0,0,.38865],42:[0,.62119,0,0,.27764],43:[.08319,.58283,0,0,.75623],44:[0,.10803,0,0,.27764],45:[.08319,.58283,0,0,.75623],46:[0,.10803,0,0,.27764],47:[.24982,.74947,0,0,.50181],48:[0,.47534,0,0,.50181],49:[0,.47534,0,0,.50181],50:[0,.47534,0,0,.50181],51:[.18906,.47534,0,0,.50181],52:[.18906,.47534,0,0,.50181],53:[.18906,.47534,0,0,.50181],54:[0,.69141,0,0,.50181],55:[.18906,.47534,0,0,.50181],56:[0,.69141,0,0,.50181],57:[.18906,.47534,0,0,.50181],58:[0,.47534,0,0,.21606],59:[.12604,.47534,0,0,.21606],61:[-.13099,.36866,0,0,.75623],63:[0,.69141,0,0,.36245],65:[0,.69141,0,0,.7176],66:[0,.69141,0,0,.88397],67:[0,.69141,0,0,.61254],68:[0,.69141,0,0,.83158],69:[0,.69141,0,0,.66278],70:[.12604,.69141,0,0,.61119],71:[0,.69141,0,0,.78539],72:[.06302,.69141,0,0,.7203],73:[0,.69141,0,0,.55448],74:[.12604,.69141,0,0,.55231],75:[0,.69141,0,0,.66845],76:[0,.69141,0,0,.66602],77:[0,.69141,0,0,1.04953],78:[0,.69141,0,0,.83212],79:[0,.69141,0,0,.82699],80:[.18906,.69141,0,0,.82753],81:[.03781,.69141,0,0,.82699],82:[0,.69141,0,0,.82807],83:[0,.69141,0,0,.82861],84:[0,.69141,0,0,.66899],85:[0,.69141,0,0,.64576],86:[0,.69141,0,0,.83131],87:[0,.69141,0,0,1.04602],88:[0,.69141,0,0,.71922],89:[.18906,.69141,0,0,.83293],90:[.12604,.69141,0,0,.60201],91:[.24982,.74947,0,0,.27764],93:[.24982,.74947,0,0,.27764],94:[0,.69141,0,0,.49965],97:[0,.47534,0,0,.50046],98:[0,.69141,0,0,.51315],99:[0,.47534,0,0,.38946],100:[0,.62119,0,0,.49857],101:[0,.47534,0,0,.40053],102:[.18906,.69141,0,0,.32626],103:[.18906,.47534,0,0,.5037],104:[.18906,.69141,0,0,.52126],105:[0,.69141,0,0,.27899],106:[0,.69141,0,0,.28088],107:[0,.69141,0,0,.38946],108:[0,.69141,0,0,.27953],109:[0,.47534,0,0,.76676],110:[0,.47534,0,0,.52666],111:[0,.47534,0,0,.48885],112:[.18906,.52396,0,0,.50046],113:[.18906,.47534,0,0,.48912],114:[0,.47534,0,0,.38919],115:[0,.47534,0,0,.44266],116:[0,.62119,0,0,.33301],117:[0,.47534,0,0,.5172],118:[0,.52396,0,0,.5118],119:[0,.52396,0,0,.77351],120:[.18906,.47534,0,0,.38865],121:[.18906,.47534,0,0,.49884],122:[.18906,.47534,0,0,.39054],160:[0,0,0,0,.25],8216:[0,.69141,0,0,.21471],8217:[0,.69141,0,0,.21471],58112:[0,.62119,0,0,.49749],58113:[0,.62119,0,0,.4983],58114:[.18906,.69141,0,0,.33328],58115:[.18906,.69141,0,0,.32923],58116:[.18906,.47534,0,0,.50343],58117:[0,.69141,0,0,.33301],58118:[0,.62119,0,0,.33409],58119:[0,.47534,0,0,.50073]},"Main-Bold":{32:[0,0,0,0,.25],33:[0,.69444,0,0,.35],34:[0,.69444,0,0,.60278],35:[.19444,.69444,0,0,.95833],36:[.05556,.75,0,0,.575],37:[.05556,.75,0,0,.95833],38:[0,.69444,0,0,.89444],39:[0,.69444,0,0,.31944],40:[.25,.75,0,0,.44722],41:[.25,.75,0,0,.44722],42:[0,.75,0,0,.575],43:[.13333,.63333,0,0,.89444],44:[.19444,.15556,0,0,.31944],45:[0,.44444,0,0,.38333],46:[0,.15556,0,0,.31944],47:[.25,.75,0,0,.575],48:[0,.64444,0,0,.575],49:[0,.64444,0,0,.575],50:[0,.64444,0,0,.575],51:[0,.64444,0,0,.575],52:[0,.64444,0,0,.575],53:[0,.64444,0,0,.575],54:[0,.64444,0,0,.575],55:[0,.64444,0,0,.575],56:[0,.64444,0,0,.575],57:[0,.64444,0,0,.575],58:[0,.44444,0,0,.31944],59:[.19444,.44444,0,0,.31944],60:[.08556,.58556,0,0,.89444],61:[-.10889,.39111,0,0,.89444],62:[.08556,.58556,0,0,.89444],63:[0,.69444,0,0,.54305],64:[0,.69444,0,0,.89444],65:[0,.68611,0,0,.86944],66:[0,.68611,0,0,.81805],67:[0,.68611,0,0,.83055],68:[0,.68611,0,0,.88194],69:[0,.68611,0,0,.75555],70:[0,.68611,0,0,.72361],71:[0,.68611,0,0,.90416],72:[0,.68611,0,0,.9],73:[0,.68611,0,0,.43611],74:[0,.68611,0,0,.59444],75:[0,.68611,0,0,.90138],76:[0,.68611,0,0,.69166],77:[0,.68611,0,0,1.09166],78:[0,.68611,0,0,.9],79:[0,.68611,0,0,.86388],80:[0,.68611,0,0,.78611],81:[.19444,.68611,0,0,.86388],82:[0,.68611,0,0,.8625],83:[0,.68611,0,0,.63889],84:[0,.68611,0,0,.8],85:[0,.68611,0,0,.88472],86:[0,.68611,.01597,0,.86944],87:[0,.68611,.01597,0,1.18888],88:[0,.68611,0,0,.86944],89:[0,.68611,.02875,0,.86944],90:[0,.68611,0,0,.70277],91:[.25,.75,0,0,.31944],92:[.25,.75,0,0,.575],93:[.25,.75,0,0,.31944],94:[0,.69444,0,0,.575],95:[.31,.13444,.03194,0,.575],97:[0,.44444,0,0,.55902],98:[0,.69444,0,0,.63889],99:[0,.44444,0,0,.51111],100:[0,.69444,0,0,.63889],101:[0,.44444,0,0,.52708],102:[0,.69444,.10903,0,.35139],103:[.19444,.44444,.01597,0,.575],104:[0,.69444,0,0,.63889],105:[0,.69444,0,0,.31944],106:[.19444,.69444,0,0,.35139],107:[0,.69444,0,0,.60694],108:[0,.69444,0,0,.31944],109:[0,.44444,0,0,.95833],110:[0,.44444,0,0,.63889],111:[0,.44444,0,0,.575],112:[.19444,.44444,0,0,.63889],113:[.19444,.44444,0,0,.60694],114:[0,.44444,0,0,.47361],115:[0,.44444,0,0,.45361],116:[0,.63492,0,0,.44722],117:[0,.44444,0,0,.63889],118:[0,.44444,.01597,0,.60694],119:[0,.44444,.01597,0,.83055],120:[0,.44444,0,0,.60694],121:[.19444,.44444,.01597,0,.60694],122:[0,.44444,0,0,.51111],123:[.25,.75,0,0,.575],124:[.25,.75,0,0,.31944],125:[.25,.75,0,0,.575],126:[.35,.34444,0,0,.575],160:[0,0,0,0,.25],163:[0,.69444,0,0,.86853],168:[0,.69444,0,0,.575],172:[0,.44444,0,0,.76666],176:[0,.69444,0,0,.86944],177:[.13333,.63333,0,0,.89444],184:[.17014,0,0,0,.51111],198:[0,.68611,0,0,1.04166],215:[.13333,.63333,0,0,.89444],216:[.04861,.73472,0,0,.89444],223:[0,.69444,0,0,.59722],230:[0,.44444,0,0,.83055],247:[.13333,.63333,0,0,.89444],248:[.09722,.54167,0,0,.575],305:[0,.44444,0,0,.31944],338:[0,.68611,0,0,1.16944],339:[0,.44444,0,0,.89444],567:[.19444,.44444,0,0,.35139],710:[0,.69444,0,0,.575],711:[0,.63194,0,0,.575],713:[0,.59611,0,0,.575],714:[0,.69444,0,0,.575],715:[0,.69444,0,0,.575],728:[0,.69444,0,0,.575],729:[0,.69444,0,0,.31944],730:[0,.69444,0,0,.86944],732:[0,.69444,0,0,.575],733:[0,.69444,0,0,.575],915:[0,.68611,0,0,.69166],916:[0,.68611,0,0,.95833],920:[0,.68611,0,0,.89444],923:[0,.68611,0,0,.80555],926:[0,.68611,0,0,.76666],928:[0,.68611,0,0,.9],931:[0,.68611,0,0,.83055],933:[0,.68611,0,0,.89444],934:[0,.68611,0,0,.83055],936:[0,.68611,0,0,.89444],937:[0,.68611,0,0,.83055],8211:[0,.44444,.03194,0,.575],8212:[0,.44444,.03194,0,1.14999],8216:[0,.69444,0,0,.31944],8217:[0,.69444,0,0,.31944],8220:[0,.69444,0,0,.60278],8221:[0,.69444,0,0,.60278],8224:[.19444,.69444,0,0,.51111],8225:[.19444,.69444,0,0,.51111],8242:[0,.55556,0,0,.34444],8407:[0,.72444,.15486,0,.575],8463:[0,.69444,0,0,.66759],8465:[0,.69444,0,0,.83055],8467:[0,.69444,0,0,.47361],8472:[.19444,.44444,0,0,.74027],8476:[0,.69444,0,0,.83055],8501:[0,.69444,0,0,.70277],8592:[-.10889,.39111,0,0,1.14999],8593:[.19444,.69444,0,0,.575],8594:[-.10889,.39111,0,0,1.14999],8595:[.19444,.69444,0,0,.575],8596:[-.10889,.39111,0,0,1.14999],8597:[.25,.75,0,0,.575],8598:[.19444,.69444,0,0,1.14999],8599:[.19444,.69444,0,0,1.14999],8600:[.19444,.69444,0,0,1.14999],8601:[.19444,.69444,0,0,1.14999],8636:[-.10889,.39111,0,0,1.14999],8637:[-.10889,.39111,0,0,1.14999],8640:[-.10889,.39111,0,0,1.14999],8641:[-.10889,.39111,0,0,1.14999],8656:[-.10889,.39111,0,0,1.14999],8657:[.19444,.69444,0,0,.70277],8658:[-.10889,.39111,0,0,1.14999],8659:[.19444,.69444,0,0,.70277],8660:[-.10889,.39111,0,0,1.14999],8661:[.25,.75,0,0,.70277],8704:[0,.69444,0,0,.63889],8706:[0,.69444,.06389,0,.62847],8707:[0,.69444,0,0,.63889],8709:[.05556,.75,0,0,.575],8711:[0,.68611,0,0,.95833],8712:[.08556,.58556,0,0,.76666],8715:[.08556,.58556,0,0,.76666],8722:[.13333,.63333,0,0,.89444],8723:[.13333,.63333,0,0,.89444],8725:[.25,.75,0,0,.575],8726:[.25,.75,0,0,.575],8727:[-.02778,.47222,0,0,.575],8728:[-.02639,.47361,0,0,.575],8729:[-.02639,.47361,0,0,.575],8730:[.18,.82,0,0,.95833],8733:[0,.44444,0,0,.89444],8734:[0,.44444,0,0,1.14999],8736:[0,.69224,0,0,.72222],8739:[.25,.75,0,0,.31944],8741:[.25,.75,0,0,.575],8743:[0,.55556,0,0,.76666],8744:[0,.55556,0,0,.76666],8745:[0,.55556,0,0,.76666],8746:[0,.55556,0,0,.76666],8747:[.19444,.69444,.12778,0,.56875],8764:[-.10889,.39111,0,0,.89444],8768:[.19444,.69444,0,0,.31944],8771:[.00222,.50222,0,0,.89444],8773:[.027,.638,0,0,.894],8776:[.02444,.52444,0,0,.89444],8781:[.00222,.50222,0,0,.89444],8801:[.00222,.50222,0,0,.89444],8804:[.19667,.69667,0,0,.89444],8805:[.19667,.69667,0,0,.89444],8810:[.08556,.58556,0,0,1.14999],8811:[.08556,.58556,0,0,1.14999],8826:[.08556,.58556,0,0,.89444],8827:[.08556,.58556,0,0,.89444],8834:[.08556,.58556,0,0,.89444],8835:[.08556,.58556,0,0,.89444],8838:[.19667,.69667,0,0,.89444],8839:[.19667,.69667,0,0,.89444],8846:[0,.55556,0,0,.76666],8849:[.19667,.69667,0,0,.89444],8850:[.19667,.69667,0,0,.89444],8851:[0,.55556,0,0,.76666],8852:[0,.55556,0,0,.76666],8853:[.13333,.63333,0,0,.89444],8854:[.13333,.63333,0,0,.89444],8855:[.13333,.63333,0,0,.89444],8856:[.13333,.63333,0,0,.89444],8857:[.13333,.63333,0,0,.89444],8866:[0,.69444,0,0,.70277],8867:[0,.69444,0,0,.70277],8868:[0,.69444,0,0,.89444],8869:[0,.69444,0,0,.89444],8900:[-.02639,.47361,0,0,.575],8901:[-.02639,.47361,0,0,.31944],8902:[-.02778,.47222,0,0,.575],8968:[.25,.75,0,0,.51111],8969:[.25,.75,0,0,.51111],8970:[.25,.75,0,0,.51111],8971:[.25,.75,0,0,.51111],8994:[-.13889,.36111,0,0,1.14999],8995:[-.13889,.36111,0,0,1.14999],9651:[.19444,.69444,0,0,1.02222],9657:[-.02778,.47222,0,0,.575],9661:[.19444,.69444,0,0,1.02222],9667:[-.02778,.47222,0,0,.575],9711:[.19444,.69444,0,0,1.14999],9824:[.12963,.69444,0,0,.89444],9825:[.12963,.69444,0,0,.89444],9826:[.12963,.69444,0,0,.89444],9827:[.12963,.69444,0,0,.89444],9837:[0,.75,0,0,.44722],9838:[.19444,.69444,0,0,.44722],9839:[.19444,.69444,0,0,.44722],10216:[.25,.75,0,0,.44722],10217:[.25,.75,0,0,.44722],10815:[0,.68611,0,0,.9],10927:[.19667,.69667,0,0,.89444],10928:[.19667,.69667,0,0,.89444],57376:[.19444,.69444,0,0,0]},"Main-BoldItalic":{32:[0,0,0,0,.25],33:[0,.69444,.11417,0,.38611],34:[0,.69444,.07939,0,.62055],35:[.19444,.69444,.06833,0,.94444],37:[.05556,.75,.12861,0,.94444],38:[0,.69444,.08528,0,.88555],39:[0,.69444,.12945,0,.35555],40:[.25,.75,.15806,0,.47333],41:[.25,.75,.03306,0,.47333],42:[0,.75,.14333,0,.59111],43:[.10333,.60333,.03306,0,.88555],44:[.19444,.14722,0,0,.35555],45:[0,.44444,.02611,0,.41444],46:[0,.14722,0,0,.35555],47:[.25,.75,.15806,0,.59111],48:[0,.64444,.13167,0,.59111],49:[0,.64444,.13167,0,.59111],50:[0,.64444,.13167,0,.59111],51:[0,.64444,.13167,0,.59111],52:[.19444,.64444,.13167,0,.59111],53:[0,.64444,.13167,0,.59111],54:[0,.64444,.13167,0,.59111],55:[.19444,.64444,.13167,0,.59111],56:[0,.64444,.13167,0,.59111],57:[0,.64444,.13167,0,.59111],58:[0,.44444,.06695,0,.35555],59:[.19444,.44444,.06695,0,.35555],61:[-.10889,.39111,.06833,0,.88555],63:[0,.69444,.11472,0,.59111],64:[0,.69444,.09208,0,.88555],65:[0,.68611,0,0,.86555],66:[0,.68611,.0992,0,.81666],67:[0,.68611,.14208,0,.82666],68:[0,.68611,.09062,0,.87555],69:[0,.68611,.11431,0,.75666],70:[0,.68611,.12903,0,.72722],71:[0,.68611,.07347,0,.89527],72:[0,.68611,.17208,0,.8961],73:[0,.68611,.15681,0,.47166],74:[0,.68611,.145,0,.61055],75:[0,.68611,.14208,0,.89499],76:[0,.68611,0,0,.69777],77:[0,.68611,.17208,0,1.07277],78:[0,.68611,.17208,0,.8961],79:[0,.68611,.09062,0,.85499],80:[0,.68611,.0992,0,.78721],81:[.19444,.68611,.09062,0,.85499],82:[0,.68611,.02559,0,.85944],83:[0,.68611,.11264,0,.64999],84:[0,.68611,.12903,0,.7961],85:[0,.68611,.17208,0,.88083],86:[0,.68611,.18625,0,.86555],87:[0,.68611,.18625,0,1.15999],88:[0,.68611,.15681,0,.86555],89:[0,.68611,.19803,0,.86555],90:[0,.68611,.14208,0,.70888],91:[.25,.75,.1875,0,.35611],93:[.25,.75,.09972,0,.35611],94:[0,.69444,.06709,0,.59111],95:[.31,.13444,.09811,0,.59111],97:[0,.44444,.09426,0,.59111],98:[0,.69444,.07861,0,.53222],99:[0,.44444,.05222,0,.53222],100:[0,.69444,.10861,0,.59111],101:[0,.44444,.085,0,.53222],102:[.19444,.69444,.21778,0,.4],103:[.19444,.44444,.105,0,.53222],104:[0,.69444,.09426,0,.59111],105:[0,.69326,.11387,0,.35555],106:[.19444,.69326,.1672,0,.35555],107:[0,.69444,.11111,0,.53222],108:[0,.69444,.10861,0,.29666],109:[0,.44444,.09426,0,.94444],110:[0,.44444,.09426,0,.64999],111:[0,.44444,.07861,0,.59111],112:[.19444,.44444,.07861,0,.59111],113:[.19444,.44444,.105,0,.53222],114:[0,.44444,.11111,0,.50167],115:[0,.44444,.08167,0,.48694],116:[0,.63492,.09639,0,.385],117:[0,.44444,.09426,0,.62055],118:[0,.44444,.11111,0,.53222],119:[0,.44444,.11111,0,.76777],120:[0,.44444,.12583,0,.56055],121:[.19444,.44444,.105,0,.56166],122:[0,.44444,.13889,0,.49055],126:[.35,.34444,.11472,0,.59111],160:[0,0,0,0,.25],168:[0,.69444,.11473,0,.59111],176:[0,.69444,0,0,.94888],184:[.17014,0,0,0,.53222],198:[0,.68611,.11431,0,1.02277],216:[.04861,.73472,.09062,0,.88555],223:[.19444,.69444,.09736,0,.665],230:[0,.44444,.085,0,.82666],248:[.09722,.54167,.09458,0,.59111],305:[0,.44444,.09426,0,.35555],338:[0,.68611,.11431,0,1.14054],339:[0,.44444,.085,0,.82666],567:[.19444,.44444,.04611,0,.385],710:[0,.69444,.06709,0,.59111],711:[0,.63194,.08271,0,.59111],713:[0,.59444,.10444,0,.59111],714:[0,.69444,.08528,0,.59111],715:[0,.69444,0,0,.59111],728:[0,.69444,.10333,0,.59111],729:[0,.69444,.12945,0,.35555],730:[0,.69444,0,0,.94888],732:[0,.69444,.11472,0,.59111],733:[0,.69444,.11472,0,.59111],915:[0,.68611,.12903,0,.69777],916:[0,.68611,0,0,.94444],920:[0,.68611,.09062,0,.88555],923:[0,.68611,0,0,.80666],926:[0,.68611,.15092,0,.76777],928:[0,.68611,.17208,0,.8961],931:[0,.68611,.11431,0,.82666],933:[0,.68611,.10778,0,.88555],934:[0,.68611,.05632,0,.82666],936:[0,.68611,.10778,0,.88555],937:[0,.68611,.0992,0,.82666],8211:[0,.44444,.09811,0,.59111],8212:[0,.44444,.09811,0,1.18221],8216:[0,.69444,.12945,0,.35555],8217:[0,.69444,.12945,0,.35555],8220:[0,.69444,.16772,0,.62055],8221:[0,.69444,.07939,0,.62055]},"Main-Italic":{32:[0,0,0,0,.25],33:[0,.69444,.12417,0,.30667],34:[0,.69444,.06961,0,.51444],35:[.19444,.69444,.06616,0,.81777],37:[.05556,.75,.13639,0,.81777],38:[0,.69444,.09694,0,.76666],39:[0,.69444,.12417,0,.30667],40:[.25,.75,.16194,0,.40889],41:[.25,.75,.03694,0,.40889],42:[0,.75,.14917,0,.51111],43:[.05667,.56167,.03694,0,.76666],44:[.19444,.10556,0,0,.30667],45:[0,.43056,.02826,0,.35778],46:[0,.10556,0,0,.30667],47:[.25,.75,.16194,0,.51111],48:[0,.64444,.13556,0,.51111],49:[0,.64444,.13556,0,.51111],50:[0,.64444,.13556,0,.51111],51:[0,.64444,.13556,0,.51111],52:[.19444,.64444,.13556,0,.51111],53:[0,.64444,.13556,0,.51111],54:[0,.64444,.13556,0,.51111],55:[.19444,.64444,.13556,0,.51111],56:[0,.64444,.13556,0,.51111],57:[0,.64444,.13556,0,.51111],58:[0,.43056,.0582,0,.30667],59:[.19444,.43056,.0582,0,.30667],61:[-.13313,.36687,.06616,0,.76666],63:[0,.69444,.1225,0,.51111],64:[0,.69444,.09597,0,.76666],65:[0,.68333,0,0,.74333],66:[0,.68333,.10257,0,.70389],67:[0,.68333,.14528,0,.71555],68:[0,.68333,.09403,0,.755],69:[0,.68333,.12028,0,.67833],70:[0,.68333,.13305,0,.65277],71:[0,.68333,.08722,0,.77361],72:[0,.68333,.16389,0,.74333],73:[0,.68333,.15806,0,.38555],74:[0,.68333,.14028,0,.525],75:[0,.68333,.14528,0,.76888],76:[0,.68333,0,0,.62722],77:[0,.68333,.16389,0,.89666],78:[0,.68333,.16389,0,.74333],79:[0,.68333,.09403,0,.76666],80:[0,.68333,.10257,0,.67833],81:[.19444,.68333,.09403,0,.76666],82:[0,.68333,.03868,0,.72944],83:[0,.68333,.11972,0,.56222],84:[0,.68333,.13305,0,.71555],85:[0,.68333,.16389,0,.74333],86:[0,.68333,.18361,0,.74333],87:[0,.68333,.18361,0,.99888],88:[0,.68333,.15806,0,.74333],89:[0,.68333,.19383,0,.74333],90:[0,.68333,.14528,0,.61333],91:[.25,.75,.1875,0,.30667],93:[.25,.75,.10528,0,.30667],94:[0,.69444,.06646,0,.51111],95:[.31,.12056,.09208,0,.51111],97:[0,.43056,.07671,0,.51111],98:[0,.69444,.06312,0,.46],99:[0,.43056,.05653,0,.46],100:[0,.69444,.10333,0,.51111],101:[0,.43056,.07514,0,.46],102:[.19444,.69444,.21194,0,.30667],103:[.19444,.43056,.08847,0,.46],104:[0,.69444,.07671,0,.51111],105:[0,.65536,.1019,0,.30667],106:[.19444,.65536,.14467,0,.30667],107:[0,.69444,.10764,0,.46],108:[0,.69444,.10333,0,.25555],109:[0,.43056,.07671,0,.81777],110:[0,.43056,.07671,0,.56222],111:[0,.43056,.06312,0,.51111],112:[.19444,.43056,.06312,0,.51111],113:[.19444,.43056,.08847,0,.46],114:[0,.43056,.10764,0,.42166],115:[0,.43056,.08208,0,.40889],116:[0,.61508,.09486,0,.33222],117:[0,.43056,.07671,0,.53666],118:[0,.43056,.10764,0,.46],119:[0,.43056,.10764,0,.66444],120:[0,.43056,.12042,0,.46389],121:[.19444,.43056,.08847,0,.48555],122:[0,.43056,.12292,0,.40889],126:[.35,.31786,.11585,0,.51111],160:[0,0,0,0,.25],168:[0,.66786,.10474,0,.51111],176:[0,.69444,0,0,.83129],184:[.17014,0,0,0,.46],198:[0,.68333,.12028,0,.88277],216:[.04861,.73194,.09403,0,.76666],223:[.19444,.69444,.10514,0,.53666],230:[0,.43056,.07514,0,.71555],248:[.09722,.52778,.09194,0,.51111],338:[0,.68333,.12028,0,.98499],339:[0,.43056,.07514,0,.71555],710:[0,.69444,.06646,0,.51111],711:[0,.62847,.08295,0,.51111],713:[0,.56167,.10333,0,.51111],714:[0,.69444,.09694,0,.51111],715:[0,.69444,0,0,.51111],728:[0,.69444,.10806,0,.51111],729:[0,.66786,.11752,0,.30667],730:[0,.69444,0,0,.83129],732:[0,.66786,.11585,0,.51111],733:[0,.69444,.1225,0,.51111],915:[0,.68333,.13305,0,.62722],916:[0,.68333,0,0,.81777],920:[0,.68333,.09403,0,.76666],923:[0,.68333,0,0,.69222],926:[0,.68333,.15294,0,.66444],928:[0,.68333,.16389,0,.74333],931:[0,.68333,.12028,0,.71555],933:[0,.68333,.11111,0,.76666],934:[0,.68333,.05986,0,.71555],936:[0,.68333,.11111,0,.76666],937:[0,.68333,.10257,0,.71555],8211:[0,.43056,.09208,0,.51111],8212:[0,.43056,.09208,0,1.02222],8216:[0,.69444,.12417,0,.30667],8217:[0,.69444,.12417,0,.30667],8220:[0,.69444,.1685,0,.51444],8221:[0,.69444,.06961,0,.51444],8463:[0,.68889,0,0,.54028]},"Main-Regular":{32:[0,0,0,0,.25],33:[0,.69444,0,0,.27778],34:[0,.69444,0,0,.5],35:[.19444,.69444,0,0,.83334],36:[.05556,.75,0,0,.5],37:[.05556,.75,0,0,.83334],38:[0,.69444,0,0,.77778],39:[0,.69444,0,0,.27778],40:[.25,.75,0,0,.38889],41:[.25,.75,0,0,.38889],42:[0,.75,0,0,.5],43:[.08333,.58333,0,0,.77778],44:[.19444,.10556,0,0,.27778],45:[0,.43056,0,0,.33333],46:[0,.10556,0,0,.27778],47:[.25,.75,0,0,.5],48:[0,.64444,0,0,.5],49:[0,.64444,0,0,.5],50:[0,.64444,0,0,.5],51:[0,.64444,0,0,.5],52:[0,.64444,0,0,.5],53:[0,.64444,0,0,.5],54:[0,.64444,0,0,.5],55:[0,.64444,0,0,.5],56:[0,.64444,0,0,.5],57:[0,.64444,0,0,.5],58:[0,.43056,0,0,.27778],59:[.19444,.43056,0,0,.27778],60:[.0391,.5391,0,0,.77778],61:[-.13313,.36687,0,0,.77778],62:[.0391,.5391,0,0,.77778],63:[0,.69444,0,0,.47222],64:[0,.69444,0,0,.77778],65:[0,.68333,0,0,.75],66:[0,.68333,0,0,.70834],67:[0,.68333,0,0,.72222],68:[0,.68333,0,0,.76389],69:[0,.68333,0,0,.68056],70:[0,.68333,0,0,.65278],71:[0,.68333,0,0,.78472],72:[0,.68333,0,0,.75],73:[0,.68333,0,0,.36111],74:[0,.68333,0,0,.51389],75:[0,.68333,0,0,.77778],76:[0,.68333,0,0,.625],77:[0,.68333,0,0,.91667],78:[0,.68333,0,0,.75],79:[0,.68333,0,0,.77778],80:[0,.68333,0,0,.68056],81:[.19444,.68333,0,0,.77778],82:[0,.68333,0,0,.73611],83:[0,.68333,0,0,.55556],84:[0,.68333,0,0,.72222],85:[0,.68333,0,0,.75],86:[0,.68333,.01389,0,.75],87:[0,.68333,.01389,0,1.02778],88:[0,.68333,0,0,.75],89:[0,.68333,.025,0,.75],90:[0,.68333,0,0,.61111],91:[.25,.75,0,0,.27778],92:[.25,.75,0,0,.5],93:[.25,.75,0,0,.27778],94:[0,.69444,0,0,.5],95:[.31,.12056,.02778,0,.5],97:[0,.43056,0,0,.5],98:[0,.69444,0,0,.55556],99:[0,.43056,0,0,.44445],100:[0,.69444,0,0,.55556],101:[0,.43056,0,0,.44445],102:[0,.69444,.07778,0,.30556],103:[.19444,.43056,.01389,0,.5],104:[0,.69444,0,0,.55556],105:[0,.66786,0,0,.27778],106:[.19444,.66786,0,0,.30556],107:[0,.69444,0,0,.52778],108:[0,.69444,0,0,.27778],109:[0,.43056,0,0,.83334],110:[0,.43056,0,0,.55556],111:[0,.43056,0,0,.5],112:[.19444,.43056,0,0,.55556],113:[.19444,.43056,0,0,.52778],114:[0,.43056,0,0,.39167],115:[0,.43056,0,0,.39445],116:[0,.61508,0,0,.38889],117:[0,.43056,0,0,.55556],118:[0,.43056,.01389,0,.52778],119:[0,.43056,.01389,0,.72222],120:[0,.43056,0,0,.52778],121:[.19444,.43056,.01389,0,.52778],122:[0,.43056,0,0,.44445],123:[.25,.75,0,0,.5],124:[.25,.75,0,0,.27778],125:[.25,.75,0,0,.5],126:[.35,.31786,0,0,.5],160:[0,0,0,0,.25],163:[0,.69444,0,0,.76909],167:[.19444,.69444,0,0,.44445],168:[0,.66786,0,0,.5],172:[0,.43056,0,0,.66667],176:[0,.69444,0,0,.75],177:[.08333,.58333,0,0,.77778],182:[.19444,.69444,0,0,.61111],184:[.17014,0,0,0,.44445],198:[0,.68333,0,0,.90278],215:[.08333,.58333,0,0,.77778],216:[.04861,.73194,0,0,.77778],223:[0,.69444,0,0,.5],230:[0,.43056,0,0,.72222],247:[.08333,.58333,0,0,.77778],248:[.09722,.52778,0,0,.5],305:[0,.43056,0,0,.27778],338:[0,.68333,0,0,1.01389],339:[0,.43056,0,0,.77778],567:[.19444,.43056,0,0,.30556],710:[0,.69444,0,0,.5],711:[0,.62847,0,0,.5],713:[0,.56778,0,0,.5],714:[0,.69444,0,0,.5],715:[0,.69444,0,0,.5],728:[0,.69444,0,0,.5],729:[0,.66786,0,0,.27778],730:[0,.69444,0,0,.75],732:[0,.66786,0,0,.5],733:[0,.69444,0,0,.5],915:[0,.68333,0,0,.625],916:[0,.68333,0,0,.83334],920:[0,.68333,0,0,.77778],923:[0,.68333,0,0,.69445],926:[0,.68333,0,0,.66667],928:[0,.68333,0,0,.75],931:[0,.68333,0,0,.72222],933:[0,.68333,0,0,.77778],934:[0,.68333,0,0,.72222],936:[0,.68333,0,0,.77778],937:[0,.68333,0,0,.72222],8211:[0,.43056,.02778,0,.5],8212:[0,.43056,.02778,0,1],8216:[0,.69444,0,0,.27778],8217:[0,.69444,0,0,.27778],8220:[0,.69444,0,0,.5],8221:[0,.69444,0,0,.5],8224:[.19444,.69444,0,0,.44445],8225:[.19444,.69444,0,0,.44445],8230:[0,.123,0,0,1.172],8242:[0,.55556,0,0,.275],8407:[0,.71444,.15382,0,.5],8463:[0,.68889,0,0,.54028],8465:[0,.69444,0,0,.72222],8467:[0,.69444,0,.11111,.41667],8472:[.19444,.43056,0,.11111,.63646],8476:[0,.69444,0,0,.72222],8501:[0,.69444,0,0,.61111],8592:[-.13313,.36687,0,0,1],8593:[.19444,.69444,0,0,.5],8594:[-.13313,.36687,0,0,1],8595:[.19444,.69444,0,0,.5],8596:[-.13313,.36687,0,0,1],8597:[.25,.75,0,0,.5],8598:[.19444,.69444,0,0,1],8599:[.19444,.69444,0,0,1],8600:[.19444,.69444,0,0,1],8601:[.19444,.69444,0,0,1],8614:[.011,.511,0,0,1],8617:[.011,.511,0,0,1.126],8618:[.011,.511,0,0,1.126],8636:[-.13313,.36687,0,0,1],8637:[-.13313,.36687,0,0,1],8640:[-.13313,.36687,0,0,1],8641:[-.13313,.36687,0,0,1],8652:[.011,.671,0,0,1],8656:[-.13313,.36687,0,0,1],8657:[.19444,.69444,0,0,.61111],8658:[-.13313,.36687,0,0,1],8659:[.19444,.69444,0,0,.61111],8660:[-.13313,.36687,0,0,1],8661:[.25,.75,0,0,.61111],8704:[0,.69444,0,0,.55556],8706:[0,.69444,.05556,.08334,.5309],8707:[0,.69444,0,0,.55556],8709:[.05556,.75,0,0,.5],8711:[0,.68333,0,0,.83334],8712:[.0391,.5391,0,0,.66667],8715:[.0391,.5391,0,0,.66667],8722:[.08333,.58333,0,0,.77778],8723:[.08333,.58333,0,0,.77778],8725:[.25,.75,0,0,.5],8726:[.25,.75,0,0,.5],8727:[-.03472,.46528,0,0,.5],8728:[-.05555,.44445,0,0,.5],8729:[-.05555,.44445,0,0,.5],8730:[.2,.8,0,0,.83334],8733:[0,.43056,0,0,.77778],8734:[0,.43056,0,0,1],8736:[0,.69224,0,0,.72222],8739:[.25,.75,0,0,.27778],8741:[.25,.75,0,0,.5],8743:[0,.55556,0,0,.66667],8744:[0,.55556,0,0,.66667],8745:[0,.55556,0,0,.66667],8746:[0,.55556,0,0,.66667],8747:[.19444,.69444,.11111,0,.41667],8764:[-.13313,.36687,0,0,.77778],8768:[.19444,.69444,0,0,.27778],8771:[-.03625,.46375,0,0,.77778],8773:[-.022,.589,0,0,.778],8776:[-.01688,.48312,0,0,.77778],8781:[-.03625,.46375,0,0,.77778],8784:[-.133,.673,0,0,.778],8801:[-.03625,.46375,0,0,.77778],8804:[.13597,.63597,0,0,.77778],8805:[.13597,.63597,0,0,.77778],8810:[.0391,.5391,0,0,1],8811:[.0391,.5391,0,0,1],8826:[.0391,.5391,0,0,.77778],8827:[.0391,.5391,0,0,.77778],8834:[.0391,.5391,0,0,.77778],8835:[.0391,.5391,0,0,.77778],8838:[.13597,.63597,0,0,.77778],8839:[.13597,.63597,0,0,.77778],8846:[0,.55556,0,0,.66667],8849:[.13597,.63597,0,0,.77778],8850:[.13597,.63597,0,0,.77778],8851:[0,.55556,0,0,.66667],8852:[0,.55556,0,0,.66667],8853:[.08333,.58333,0,0,.77778],8854:[.08333,.58333,0,0,.77778],8855:[.08333,.58333,0,0,.77778],8856:[.08333,.58333,0,0,.77778],8857:[.08333,.58333,0,0,.77778],8866:[0,.69444,0,0,.61111],8867:[0,.69444,0,0,.61111],8868:[0,.69444,0,0,.77778],8869:[0,.69444,0,0,.77778],8872:[.249,.75,0,0,.867],8900:[-.05555,.44445,0,0,.5],8901:[-.05555,.44445,0,0,.27778],8902:[-.03472,.46528,0,0,.5],8904:[.005,.505,0,0,.9],8942:[.03,.903,0,0,.278],8943:[-.19,.313,0,0,1.172],8945:[-.1,.823,0,0,1.282],8968:[.25,.75,0,0,.44445],8969:[.25,.75,0,0,.44445],8970:[.25,.75,0,0,.44445],8971:[.25,.75,0,0,.44445],8994:[-.14236,.35764,0,0,1],8995:[-.14236,.35764,0,0,1],9136:[.244,.744,0,0,.412],9137:[.244,.745,0,0,.412],9651:[.19444,.69444,0,0,.88889],9657:[-.03472,.46528,0,0,.5],9661:[.19444,.69444,0,0,.88889],9667:[-.03472,.46528,0,0,.5],9711:[.19444,.69444,0,0,1],9824:[.12963,.69444,0,0,.77778],9825:[.12963,.69444,0,0,.77778],9826:[.12963,.69444,0,0,.77778],9827:[.12963,.69444,0,0,.77778],9837:[0,.75,0,0,.38889],9838:[.19444,.69444,0,0,.38889],9839:[.19444,.69444,0,0,.38889],10216:[.25,.75,0,0,.38889],10217:[.25,.75,0,0,.38889],10222:[.244,.744,0,0,.412],10223:[.244,.745,0,0,.412],10229:[.011,.511,0,0,1.609],10230:[.011,.511,0,0,1.638],10231:[.011,.511,0,0,1.859],10232:[.024,.525,0,0,1.609],10233:[.024,.525,0,0,1.638],10234:[.024,.525,0,0,1.858],10236:[.011,.511,0,0,1.638],10815:[0,.68333,0,0,.75],10927:[.13597,.63597,0,0,.77778],10928:[.13597,.63597,0,0,.77778],57376:[.19444,.69444,0,0,0]},"Math-BoldItalic":{32:[0,0,0,0,.25],48:[0,.44444,0,0,.575],49:[0,.44444,0,0,.575],50:[0,.44444,0,0,.575],51:[.19444,.44444,0,0,.575],52:[.19444,.44444,0,0,.575],53:[.19444,.44444,0,0,.575],54:[0,.64444,0,0,.575],55:[.19444,.44444,0,0,.575],56:[0,.64444,0,0,.575],57:[.19444,.44444,0,0,.575],65:[0,.68611,0,0,.86944],66:[0,.68611,.04835,0,.8664],67:[0,.68611,.06979,0,.81694],68:[0,.68611,.03194,0,.93812],69:[0,.68611,.05451,0,.81007],70:[0,.68611,.15972,0,.68889],71:[0,.68611,0,0,.88673],72:[0,.68611,.08229,0,.98229],73:[0,.68611,.07778,0,.51111],74:[0,.68611,.10069,0,.63125],75:[0,.68611,.06979,0,.97118],76:[0,.68611,0,0,.75555],77:[0,.68611,.11424,0,1.14201],78:[0,.68611,.11424,0,.95034],79:[0,.68611,.03194,0,.83666],80:[0,.68611,.15972,0,.72309],81:[.19444,.68611,0,0,.86861],82:[0,.68611,.00421,0,.87235],83:[0,.68611,.05382,0,.69271],84:[0,.68611,.15972,0,.63663],85:[0,.68611,.11424,0,.80027],86:[0,.68611,.25555,0,.67778],87:[0,.68611,.15972,0,1.09305],88:[0,.68611,.07778,0,.94722],89:[0,.68611,.25555,0,.67458],90:[0,.68611,.06979,0,.77257],97:[0,.44444,0,0,.63287],98:[0,.69444,0,0,.52083],99:[0,.44444,0,0,.51342],100:[0,.69444,0,0,.60972],101:[0,.44444,0,0,.55361],102:[.19444,.69444,.11042,0,.56806],103:[.19444,.44444,.03704,0,.5449],104:[0,.69444,0,0,.66759],105:[0,.69326,0,0,.4048],106:[.19444,.69326,.0622,0,.47083],107:[0,.69444,.01852,0,.6037],108:[0,.69444,.0088,0,.34815],109:[0,.44444,0,0,1.0324],110:[0,.44444,0,0,.71296],111:[0,.44444,0,0,.58472],112:[.19444,.44444,0,0,.60092],113:[.19444,.44444,.03704,0,.54213],114:[0,.44444,.03194,0,.5287],115:[0,.44444,0,0,.53125],116:[0,.63492,0,0,.41528],117:[0,.44444,0,0,.68102],118:[0,.44444,.03704,0,.56666],119:[0,.44444,.02778,0,.83148],120:[0,.44444,0,0,.65903],121:[.19444,.44444,.03704,0,.59028],122:[0,.44444,.04213,0,.55509],160:[0,0,0,0,.25],915:[0,.68611,.15972,0,.65694],916:[0,.68611,0,0,.95833],920:[0,.68611,.03194,0,.86722],923:[0,.68611,0,0,.80555],926:[0,.68611,.07458,0,.84125],928:[0,.68611,.08229,0,.98229],931:[0,.68611,.05451,0,.88507],933:[0,.68611,.15972,0,.67083],934:[0,.68611,0,0,.76666],936:[0,.68611,.11653,0,.71402],937:[0,.68611,.04835,0,.8789],945:[0,.44444,0,0,.76064],946:[.19444,.69444,.03403,0,.65972],947:[.19444,.44444,.06389,0,.59003],948:[0,.69444,.03819,0,.52222],949:[0,.44444,0,0,.52882],950:[.19444,.69444,.06215,0,.50833],951:[.19444,.44444,.03704,0,.6],952:[0,.69444,.03194,0,.5618],953:[0,.44444,0,0,.41204],954:[0,.44444,0,0,.66759],955:[0,.69444,0,0,.67083],956:[.19444,.44444,0,0,.70787],957:[0,.44444,.06898,0,.57685],958:[.19444,.69444,.03021,0,.50833],959:[0,.44444,0,0,.58472],960:[0,.44444,.03704,0,.68241],961:[.19444,.44444,0,0,.6118],962:[.09722,.44444,.07917,0,.42361],963:[0,.44444,.03704,0,.68588],964:[0,.44444,.13472,0,.52083],965:[0,.44444,.03704,0,.63055],966:[.19444,.44444,0,0,.74722],967:[.19444,.44444,0,0,.71805],968:[.19444,.69444,.03704,0,.75833],969:[0,.44444,.03704,0,.71782],977:[0,.69444,0,0,.69155],981:[.19444,.69444,0,0,.7125],982:[0,.44444,.03194,0,.975],1009:[.19444,.44444,0,0,.6118],1013:[0,.44444,0,0,.48333],57649:[0,.44444,0,0,.39352],57911:[.19444,.44444,0,0,.43889]},"Math-Italic":{32:[0,0,0,0,.25],48:[0,.43056,0,0,.5],49:[0,.43056,0,0,.5],50:[0,.43056,0,0,.5],51:[.19444,.43056,0,0,.5],52:[.19444,.43056,0,0,.5],53:[.19444,.43056,0,0,.5],54:[0,.64444,0,0,.5],55:[.19444,.43056,0,0,.5],56:[0,.64444,0,0,.5],57:[.19444,.43056,0,0,.5],65:[0,.68333,0,.13889,.75],66:[0,.68333,.05017,.08334,.75851],67:[0,.68333,.07153,.08334,.71472],68:[0,.68333,.02778,.05556,.82792],69:[0,.68333,.05764,.08334,.7382],70:[0,.68333,.13889,.08334,.64306],71:[0,.68333,0,.08334,.78625],72:[0,.68333,.08125,.05556,.83125],73:[0,.68333,.07847,.11111,.43958],74:[0,.68333,.09618,.16667,.55451],75:[0,.68333,.07153,.05556,.84931],76:[0,.68333,0,.02778,.68056],77:[0,.68333,.10903,.08334,.97014],78:[0,.68333,.10903,.08334,.80347],79:[0,.68333,.02778,.08334,.76278],80:[0,.68333,.13889,.08334,.64201],81:[.19444,.68333,0,.08334,.79056],82:[0,.68333,.00773,.08334,.75929],83:[0,.68333,.05764,.08334,.6132],84:[0,.68333,.13889,.08334,.58438],85:[0,.68333,.10903,.02778,.68278],86:[0,.68333,.22222,0,.58333],87:[0,.68333,.13889,0,.94445],88:[0,.68333,.07847,.08334,.82847],89:[0,.68333,.22222,0,.58056],90:[0,.68333,.07153,.08334,.68264],97:[0,.43056,0,0,.52859],98:[0,.69444,0,0,.42917],99:[0,.43056,0,.05556,.43276],100:[0,.69444,0,.16667,.52049],101:[0,.43056,0,.05556,.46563],102:[.19444,.69444,.10764,.16667,.48959],103:[.19444,.43056,.03588,.02778,.47697],104:[0,.69444,0,0,.57616],105:[0,.65952,0,0,.34451],106:[.19444,.65952,.05724,0,.41181],107:[0,.69444,.03148,0,.5206],108:[0,.69444,.01968,.08334,.29838],109:[0,.43056,0,0,.87801],110:[0,.43056,0,0,.60023],111:[0,.43056,0,.05556,.48472],112:[.19444,.43056,0,.08334,.50313],113:[.19444,.43056,.03588,.08334,.44641],114:[0,.43056,.02778,.05556,.45116],115:[0,.43056,0,.05556,.46875],116:[0,.61508,0,.08334,.36111],117:[0,.43056,0,.02778,.57246],118:[0,.43056,.03588,.02778,.48472],119:[0,.43056,.02691,.08334,.71592],120:[0,.43056,0,.02778,.57153],121:[.19444,.43056,.03588,.05556,.49028],122:[0,.43056,.04398,.05556,.46505],160:[0,0,0,0,.25],915:[0,.68333,.13889,.08334,.61528],916:[0,.68333,0,.16667,.83334],920:[0,.68333,.02778,.08334,.76278],923:[0,.68333,0,.16667,.69445],926:[0,.68333,.07569,.08334,.74236],928:[0,.68333,.08125,.05556,.83125],931:[0,.68333,.05764,.08334,.77986],933:[0,.68333,.13889,.05556,.58333],934:[0,.68333,0,.08334,.66667],936:[0,.68333,.11,.05556,.61222],937:[0,.68333,.05017,.08334,.7724],945:[0,.43056,.0037,.02778,.6397],946:[.19444,.69444,.05278,.08334,.56563],947:[.19444,.43056,.05556,0,.51773],948:[0,.69444,.03785,.05556,.44444],949:[0,.43056,0,.08334,.46632],950:[.19444,.69444,.07378,.08334,.4375],951:[.19444,.43056,.03588,.05556,.49653],952:[0,.69444,.02778,.08334,.46944],953:[0,.43056,0,.05556,.35394],954:[0,.43056,0,0,.57616],955:[0,.69444,0,0,.58334],956:[.19444,.43056,0,.02778,.60255],957:[0,.43056,.06366,.02778,.49398],958:[.19444,.69444,.04601,.11111,.4375],959:[0,.43056,0,.05556,.48472],960:[0,.43056,.03588,0,.57003],961:[.19444,.43056,0,.08334,.51702],962:[.09722,.43056,.07986,.08334,.36285],963:[0,.43056,.03588,0,.57141],964:[0,.43056,.1132,.02778,.43715],965:[0,.43056,.03588,.02778,.54028],966:[.19444,.43056,0,.08334,.65417],967:[.19444,.43056,0,.05556,.62569],968:[.19444,.69444,.03588,.11111,.65139],969:[0,.43056,.03588,0,.62245],977:[0,.69444,0,.08334,.59144],981:[.19444,.69444,0,.08334,.59583],982:[0,.43056,.02778,0,.82813],1009:[.19444,.43056,0,.08334,.51702],1013:[0,.43056,0,.05556,.4059],57649:[0,.43056,0,.02778,.32246],57911:[.19444,.43056,0,.08334,.38403]},"SansSerif-Bold":{32:[0,0,0,0,.25],33:[0,.69444,0,0,.36667],34:[0,.69444,0,0,.55834],35:[.19444,.69444,0,0,.91667],36:[.05556,.75,0,0,.55],37:[.05556,.75,0,0,1.02912],38:[0,.69444,0,0,.83056],39:[0,.69444,0,0,.30556],40:[.25,.75,0,0,.42778],41:[.25,.75,0,0,.42778],42:[0,.75,0,0,.55],43:[.11667,.61667,0,0,.85556],44:[.10556,.13056,0,0,.30556],45:[0,.45833,0,0,.36667],46:[0,.13056,0,0,.30556],47:[.25,.75,0,0,.55],48:[0,.69444,0,0,.55],49:[0,.69444,0,0,.55],50:[0,.69444,0,0,.55],51:[0,.69444,0,0,.55],52:[0,.69444,0,0,.55],53:[0,.69444,0,0,.55],54:[0,.69444,0,0,.55],55:[0,.69444,0,0,.55],56:[0,.69444,0,0,.55],57:[0,.69444,0,0,.55],58:[0,.45833,0,0,.30556],59:[.10556,.45833,0,0,.30556],61:[-.09375,.40625,0,0,.85556],63:[0,.69444,0,0,.51945],64:[0,.69444,0,0,.73334],65:[0,.69444,0,0,.73334],66:[0,.69444,0,0,.73334],67:[0,.69444,0,0,.70278],68:[0,.69444,0,0,.79445],69:[0,.69444,0,0,.64167],70:[0,.69444,0,0,.61111],71:[0,.69444,0,0,.73334],72:[0,.69444,0,0,.79445],73:[0,.69444,0,0,.33056],74:[0,.69444,0,0,.51945],75:[0,.69444,0,0,.76389],76:[0,.69444,0,0,.58056],77:[0,.69444,0,0,.97778],78:[0,.69444,0,0,.79445],79:[0,.69444,0,0,.79445],80:[0,.69444,0,0,.70278],81:[.10556,.69444,0,0,.79445],82:[0,.69444,0,0,.70278],83:[0,.69444,0,0,.61111],84:[0,.69444,0,0,.73334],85:[0,.69444,0,0,.76389],86:[0,.69444,.01528,0,.73334],87:[0,.69444,.01528,0,1.03889],88:[0,.69444,0,0,.73334],89:[0,.69444,.0275,0,.73334],90:[0,.69444,0,0,.67223],91:[.25,.75,0,0,.34306],93:[.25,.75,0,0,.34306],94:[0,.69444,0,0,.55],95:[.35,.10833,.03056,0,.55],97:[0,.45833,0,0,.525],98:[0,.69444,0,0,.56111],99:[0,.45833,0,0,.48889],100:[0,.69444,0,0,.56111],101:[0,.45833,0,0,.51111],102:[0,.69444,.07639,0,.33611],103:[.19444,.45833,.01528,0,.55],104:[0,.69444,0,0,.56111],105:[0,.69444,0,0,.25556],106:[.19444,.69444,0,0,.28611],107:[0,.69444,0,0,.53056],108:[0,.69444,0,0,.25556],109:[0,.45833,0,0,.86667],110:[0,.45833,0,0,.56111],111:[0,.45833,0,0,.55],112:[.19444,.45833,0,0,.56111],113:[.19444,.45833,0,0,.56111],114:[0,.45833,.01528,0,.37222],115:[0,.45833,0,0,.42167],116:[0,.58929,0,0,.40417],117:[0,.45833,0,0,.56111],118:[0,.45833,.01528,0,.5],119:[0,.45833,.01528,0,.74445],120:[0,.45833,0,0,.5],121:[.19444,.45833,.01528,0,.5],122:[0,.45833,0,0,.47639],126:[.35,.34444,0,0,.55],160:[0,0,0,0,.25],168:[0,.69444,0,0,.55],176:[0,.69444,0,0,.73334],180:[0,.69444,0,0,.55],184:[.17014,0,0,0,.48889],305:[0,.45833,0,0,.25556],567:[.19444,.45833,0,0,.28611],710:[0,.69444,0,0,.55],711:[0,.63542,0,0,.55],713:[0,.63778,0,0,.55],728:[0,.69444,0,0,.55],729:[0,.69444,0,0,.30556],730:[0,.69444,0,0,.73334],732:[0,.69444,0,0,.55],733:[0,.69444,0,0,.55],915:[0,.69444,0,0,.58056],916:[0,.69444,0,0,.91667],920:[0,.69444,0,0,.85556],923:[0,.69444,0,0,.67223],926:[0,.69444,0,0,.73334],928:[0,.69444,0,0,.79445],931:[0,.69444,0,0,.79445],933:[0,.69444,0,0,.85556],934:[0,.69444,0,0,.79445],936:[0,.69444,0,0,.85556],937:[0,.69444,0,0,.79445],8211:[0,.45833,.03056,0,.55],8212:[0,.45833,.03056,0,1.10001],8216:[0,.69444,0,0,.30556],8217:[0,.69444,0,0,.30556],8220:[0,.69444,0,0,.55834],8221:[0,.69444,0,0,.55834]},"SansSerif-Italic":{32:[0,0,0,0,.25],33:[0,.69444,.05733,0,.31945],34:[0,.69444,.00316,0,.5],35:[.19444,.69444,.05087,0,.83334],36:[.05556,.75,.11156,0,.5],37:[.05556,.75,.03126,0,.83334],38:[0,.69444,.03058,0,.75834],39:[0,.69444,.07816,0,.27778],40:[.25,.75,.13164,0,.38889],41:[.25,.75,.02536,0,.38889],42:[0,.75,.11775,0,.5],43:[.08333,.58333,.02536,0,.77778],44:[.125,.08333,0,0,.27778],45:[0,.44444,.01946,0,.33333],46:[0,.08333,0,0,.27778],47:[.25,.75,.13164,0,.5],48:[0,.65556,.11156,0,.5],49:[0,.65556,.11156,0,.5],50:[0,.65556,.11156,0,.5],51:[0,.65556,.11156,0,.5],52:[0,.65556,.11156,0,.5],53:[0,.65556,.11156,0,.5],54:[0,.65556,.11156,0,.5],55:[0,.65556,.11156,0,.5],56:[0,.65556,.11156,0,.5],57:[0,.65556,.11156,0,.5],58:[0,.44444,.02502,0,.27778],59:[.125,.44444,.02502,0,.27778],61:[-.13,.37,.05087,0,.77778],63:[0,.69444,.11809,0,.47222],64:[0,.69444,.07555,0,.66667],65:[0,.69444,0,0,.66667],66:[0,.69444,.08293,0,.66667],67:[0,.69444,.11983,0,.63889],68:[0,.69444,.07555,0,.72223],69:[0,.69444,.11983,0,.59722],70:[0,.69444,.13372,0,.56945],71:[0,.69444,.11983,0,.66667],72:[0,.69444,.08094,0,.70834],73:[0,.69444,.13372,0,.27778],74:[0,.69444,.08094,0,.47222],75:[0,.69444,.11983,0,.69445],76:[0,.69444,0,0,.54167],77:[0,.69444,.08094,0,.875],78:[0,.69444,.08094,0,.70834],79:[0,.69444,.07555,0,.73611],80:[0,.69444,.08293,0,.63889],81:[.125,.69444,.07555,0,.73611],82:[0,.69444,.08293,0,.64584],83:[0,.69444,.09205,0,.55556],84:[0,.69444,.13372,0,.68056],85:[0,.69444,.08094,0,.6875],86:[0,.69444,.1615,0,.66667],87:[0,.69444,.1615,0,.94445],88:[0,.69444,.13372,0,.66667],89:[0,.69444,.17261,0,.66667],90:[0,.69444,.11983,0,.61111],91:[.25,.75,.15942,0,.28889],93:[.25,.75,.08719,0,.28889],94:[0,.69444,.0799,0,.5],95:[.35,.09444,.08616,0,.5],97:[0,.44444,.00981,0,.48056],98:[0,.69444,.03057,0,.51667],99:[0,.44444,.08336,0,.44445],100:[0,.69444,.09483,0,.51667],101:[0,.44444,.06778,0,.44445],102:[0,.69444,.21705,0,.30556],103:[.19444,.44444,.10836,0,.5],104:[0,.69444,.01778,0,.51667],105:[0,.67937,.09718,0,.23889],106:[.19444,.67937,.09162,0,.26667],107:[0,.69444,.08336,0,.48889],108:[0,.69444,.09483,0,.23889],109:[0,.44444,.01778,0,.79445],110:[0,.44444,.01778,0,.51667],111:[0,.44444,.06613,0,.5],112:[.19444,.44444,.0389,0,.51667],113:[.19444,.44444,.04169,0,.51667],114:[0,.44444,.10836,0,.34167],115:[0,.44444,.0778,0,.38333],116:[0,.57143,.07225,0,.36111],117:[0,.44444,.04169,0,.51667],118:[0,.44444,.10836,0,.46111],119:[0,.44444,.10836,0,.68334],120:[0,.44444,.09169,0,.46111],121:[.19444,.44444,.10836,0,.46111],122:[0,.44444,.08752,0,.43472],126:[.35,.32659,.08826,0,.5],160:[0,0,0,0,.25],168:[0,.67937,.06385,0,.5],176:[0,.69444,0,0,.73752],184:[.17014,0,0,0,.44445],305:[0,.44444,.04169,0,.23889],567:[.19444,.44444,.04169,0,.26667],710:[0,.69444,.0799,0,.5],711:[0,.63194,.08432,0,.5],713:[0,.60889,.08776,0,.5],714:[0,.69444,.09205,0,.5],715:[0,.69444,0,0,.5],728:[0,.69444,.09483,0,.5],729:[0,.67937,.07774,0,.27778],730:[0,.69444,0,0,.73752],732:[0,.67659,.08826,0,.5],733:[0,.69444,.09205,0,.5],915:[0,.69444,.13372,0,.54167],916:[0,.69444,0,0,.83334],920:[0,.69444,.07555,0,.77778],923:[0,.69444,0,0,.61111],926:[0,.69444,.12816,0,.66667],928:[0,.69444,.08094,0,.70834],931:[0,.69444,.11983,0,.72222],933:[0,.69444,.09031,0,.77778],934:[0,.69444,.04603,0,.72222],936:[0,.69444,.09031,0,.77778],937:[0,.69444,.08293,0,.72222],8211:[0,.44444,.08616,0,.5],8212:[0,.44444,.08616,0,1],8216:[0,.69444,.07816,0,.27778],8217:[0,.69444,.07816,0,.27778],8220:[0,.69444,.14205,0,.5],8221:[0,.69444,.00316,0,.5]},"SansSerif-Regular":{32:[0,0,0,0,.25],33:[0,.69444,0,0,.31945],34:[0,.69444,0,0,.5],35:[.19444,.69444,0,0,.83334],36:[.05556,.75,0,0,.5],37:[.05556,.75,0,0,.83334],38:[0,.69444,0,0,.75834],39:[0,.69444,0,0,.27778],40:[.25,.75,0,0,.38889],41:[.25,.75,0,0,.38889],42:[0,.75,0,0,.5],43:[.08333,.58333,0,0,.77778],44:[.125,.08333,0,0,.27778],45:[0,.44444,0,0,.33333],46:[0,.08333,0,0,.27778],47:[.25,.75,0,0,.5],48:[0,.65556,0,0,.5],49:[0,.65556,0,0,.5],50:[0,.65556,0,0,.5],51:[0,.65556,0,0,.5],52:[0,.65556,0,0,.5],53:[0,.65556,0,0,.5],54:[0,.65556,0,0,.5],55:[0,.65556,0,0,.5],56:[0,.65556,0,0,.5],57:[0,.65556,0,0,.5],58:[0,.44444,0,0,.27778],59:[.125,.44444,0,0,.27778],61:[-.13,.37,0,0,.77778],63:[0,.69444,0,0,.47222],64:[0,.69444,0,0,.66667],65:[0,.69444,0,0,.66667],66:[0,.69444,0,0,.66667],67:[0,.69444,0,0,.63889],68:[0,.69444,0,0,.72223],69:[0,.69444,0,0,.59722],70:[0,.69444,0,0,.56945],71:[0,.69444,0,0,.66667],72:[0,.69444,0,0,.70834],73:[0,.69444,0,0,.27778],74:[0,.69444,0,0,.47222],75:[0,.69444,0,0,.69445],76:[0,.69444,0,0,.54167],77:[0,.69444,0,0,.875],78:[0,.69444,0,0,.70834],79:[0,.69444,0,0,.73611],80:[0,.69444,0,0,.63889],81:[.125,.69444,0,0,.73611],82:[0,.69444,0,0,.64584],83:[0,.69444,0,0,.55556],84:[0,.69444,0,0,.68056],85:[0,.69444,0,0,.6875],86:[0,.69444,.01389,0,.66667],87:[0,.69444,.01389,0,.94445],88:[0,.69444,0,0,.66667],89:[0,.69444,.025,0,.66667],90:[0,.69444,0,0,.61111],91:[.25,.75,0,0,.28889],93:[.25,.75,0,0,.28889],94:[0,.69444,0,0,.5],95:[.35,.09444,.02778,0,.5],97:[0,.44444,0,0,.48056],98:[0,.69444,0,0,.51667],99:[0,.44444,0,0,.44445],100:[0,.69444,0,0,.51667],101:[0,.44444,0,0,.44445],102:[0,.69444,.06944,0,.30556],103:[.19444,.44444,.01389,0,.5],104:[0,.69444,0,0,.51667],105:[0,.67937,0,0,.23889],106:[.19444,.67937,0,0,.26667],107:[0,.69444,0,0,.48889],108:[0,.69444,0,0,.23889],109:[0,.44444,0,0,.79445],110:[0,.44444,0,0,.51667],111:[0,.44444,0,0,.5],112:[.19444,.44444,0,0,.51667],113:[.19444,.44444,0,0,.51667],114:[0,.44444,.01389,0,.34167],115:[0,.44444,0,0,.38333],116:[0,.57143,0,0,.36111],117:[0,.44444,0,0,.51667],118:[0,.44444,.01389,0,.46111],119:[0,.44444,.01389,0,.68334],120:[0,.44444,0,0,.46111],121:[.19444,.44444,.01389,0,.46111],122:[0,.44444,0,0,.43472],126:[.35,.32659,0,0,.5],160:[0,0,0,0,.25],168:[0,.67937,0,0,.5],176:[0,.69444,0,0,.66667],184:[.17014,0,0,0,.44445],305:[0,.44444,0,0,.23889],567:[.19444,.44444,0,0,.26667],710:[0,.69444,0,0,.5],711:[0,.63194,0,0,.5],713:[0,.60889,0,0,.5],714:[0,.69444,0,0,.5],715:[0,.69444,0,0,.5],728:[0,.69444,0,0,.5],729:[0,.67937,0,0,.27778],730:[0,.69444,0,0,.66667],732:[0,.67659,0,0,.5],733:[0,.69444,0,0,.5],915:[0,.69444,0,0,.54167],916:[0,.69444,0,0,.83334],920:[0,.69444,0,0,.77778],923:[0,.69444,0,0,.61111],926:[0,.69444,0,0,.66667],928:[0,.69444,0,0,.70834],931:[0,.69444,0,0,.72222],933:[0,.69444,0,0,.77778],934:[0,.69444,0,0,.72222],936:[0,.69444,0,0,.77778],937:[0,.69444,0,0,.72222],8211:[0,.44444,.02778,0,.5],8212:[0,.44444,.02778,0,1],8216:[0,.69444,0,0,.27778],8217:[0,.69444,0,0,.27778],8220:[0,.69444,0,0,.5],8221:[0,.69444,0,0,.5]},"Script-Regular":{32:[0,0,0,0,.25],65:[0,.7,.22925,0,.80253],66:[0,.7,.04087,0,.90757],67:[0,.7,.1689,0,.66619],68:[0,.7,.09371,0,.77443],69:[0,.7,.18583,0,.56162],70:[0,.7,.13634,0,.89544],71:[0,.7,.17322,0,.60961],72:[0,.7,.29694,0,.96919],73:[0,.7,.19189,0,.80907],74:[.27778,.7,.19189,0,1.05159],75:[0,.7,.31259,0,.91364],76:[0,.7,.19189,0,.87373],77:[0,.7,.15981,0,1.08031],78:[0,.7,.3525,0,.9015],79:[0,.7,.08078,0,.73787],80:[0,.7,.08078,0,1.01262],81:[0,.7,.03305,0,.88282],82:[0,.7,.06259,0,.85],83:[0,.7,.19189,0,.86767],84:[0,.7,.29087,0,.74697],85:[0,.7,.25815,0,.79996],86:[0,.7,.27523,0,.62204],87:[0,.7,.27523,0,.80532],88:[0,.7,.26006,0,.94445],89:[0,.7,.2939,0,.70961],90:[0,.7,.24037,0,.8212],160:[0,0,0,0,.25]},"Size1-Regular":{32:[0,0,0,0,.25],40:[.35001,.85,0,0,.45834],41:[.35001,.85,0,0,.45834],47:[.35001,.85,0,0,.57778],91:[.35001,.85,0,0,.41667],92:[.35001,.85,0,0,.57778],93:[.35001,.85,0,0,.41667],123:[.35001,.85,0,0,.58334],125:[.35001,.85,0,0,.58334],160:[0,0,0,0,.25],710:[0,.72222,0,0,.55556],732:[0,.72222,0,0,.55556],770:[0,.72222,0,0,.55556],771:[0,.72222,0,0,.55556],8214:[-99e-5,.601,0,0,.77778],8593:[1e-5,.6,0,0,.66667],8595:[1e-5,.6,0,0,.66667],8657:[1e-5,.6,0,0,.77778],8659:[1e-5,.6,0,0,.77778],8719:[.25001,.75,0,0,.94445],8720:[.25001,.75,0,0,.94445],8721:[.25001,.75,0,0,1.05556],8730:[.35001,.85,0,0,1],8739:[-.00599,.606,0,0,.33333],8741:[-.00599,.606,0,0,.55556],8747:[.30612,.805,.19445,0,.47222],8748:[.306,.805,.19445,0,.47222],8749:[.306,.805,.19445,0,.47222],8750:[.30612,.805,.19445,0,.47222],8896:[.25001,.75,0,0,.83334],8897:[.25001,.75,0,0,.83334],8898:[.25001,.75,0,0,.83334],8899:[.25001,.75,0,0,.83334],8968:[.35001,.85,0,0,.47222],8969:[.35001,.85,0,0,.47222],8970:[.35001,.85,0,0,.47222],8971:[.35001,.85,0,0,.47222],9168:[-99e-5,.601,0,0,.66667],10216:[.35001,.85,0,0,.47222],10217:[.35001,.85,0,0,.47222],10752:[.25001,.75,0,0,1.11111],10753:[.25001,.75,0,0,1.11111],10754:[.25001,.75,0,0,1.11111],10756:[.25001,.75,0,0,.83334],10758:[.25001,.75,0,0,.83334]},"Size2-Regular":{32:[0,0,0,0,.25],40:[.65002,1.15,0,0,.59722],41:[.65002,1.15,0,0,.59722],47:[.65002,1.15,0,0,.81111],91:[.65002,1.15,0,0,.47222],92:[.65002,1.15,0,0,.81111],93:[.65002,1.15,0,0,.47222],123:[.65002,1.15,0,0,.66667],125:[.65002,1.15,0,0,.66667],160:[0,0,0,0,.25],710:[0,.75,0,0,1],732:[0,.75,0,0,1],770:[0,.75,0,0,1],771:[0,.75,0,0,1],8719:[.55001,1.05,0,0,1.27778],8720:[.55001,1.05,0,0,1.27778],8721:[.55001,1.05,0,0,1.44445],8730:[.65002,1.15,0,0,1],8747:[.86225,1.36,.44445,0,.55556],8748:[.862,1.36,.44445,0,.55556],8749:[.862,1.36,.44445,0,.55556],8750:[.86225,1.36,.44445,0,.55556],8896:[.55001,1.05,0,0,1.11111],8897:[.55001,1.05,0,0,1.11111],8898:[.55001,1.05,0,0,1.11111],8899:[.55001,1.05,0,0,1.11111],8968:[.65002,1.15,0,0,.52778],8969:[.65002,1.15,0,0,.52778],8970:[.65002,1.15,0,0,.52778],8971:[.65002,1.15,0,0,.52778],10216:[.65002,1.15,0,0,.61111],10217:[.65002,1.15,0,0,.61111],10752:[.55001,1.05,0,0,1.51112],10753:[.55001,1.05,0,0,1.51112],10754:[.55001,1.05,0,0,1.51112],10756:[.55001,1.05,0,0,1.11111],10758:[.55001,1.05,0,0,1.11111]},"Size3-Regular":{32:[0,0,0,0,.25],40:[.95003,1.45,0,0,.73611],41:[.95003,1.45,0,0,.73611],47:[.95003,1.45,0,0,1.04445],91:[.95003,1.45,0,0,.52778],92:[.95003,1.45,0,0,1.04445],93:[.95003,1.45,0,0,.52778],123:[.95003,1.45,0,0,.75],125:[.95003,1.45,0,0,.75],160:[0,0,0,0,.25],710:[0,.75,0,0,1.44445],732:[0,.75,0,0,1.44445],770:[0,.75,0,0,1.44445],771:[0,.75,0,0,1.44445],8730:[.95003,1.45,0,0,1],8968:[.95003,1.45,0,0,.58334],8969:[.95003,1.45,0,0,.58334],8970:[.95003,1.45,0,0,.58334],8971:[.95003,1.45,0,0,.58334],10216:[.95003,1.45,0,0,.75],10217:[.95003,1.45,0,0,.75]},"Size4-Regular":{32:[0,0,0,0,.25],40:[1.25003,1.75,0,0,.79167],41:[1.25003,1.75,0,0,.79167],47:[1.25003,1.75,0,0,1.27778],91:[1.25003,1.75,0,0,.58334],92:[1.25003,1.75,0,0,1.27778],93:[1.25003,1.75,0,0,.58334],123:[1.25003,1.75,0,0,.80556],125:[1.25003,1.75,0,0,.80556],160:[0,0,0,0,.25],710:[0,.825,0,0,1.8889],732:[0,.825,0,0,1.8889],770:[0,.825,0,0,1.8889],771:[0,.825,0,0,1.8889],8730:[1.25003,1.75,0,0,1],8968:[1.25003,1.75,0,0,.63889],8969:[1.25003,1.75,0,0,.63889],8970:[1.25003,1.75,0,0,.63889],8971:[1.25003,1.75,0,0,.63889],9115:[.64502,1.155,0,0,.875],9116:[1e-5,.6,0,0,.875],9117:[.64502,1.155,0,0,.875],9118:[.64502,1.155,0,0,.875],9119:[1e-5,.6,0,0,.875],9120:[.64502,1.155,0,0,.875],9121:[.64502,1.155,0,0,.66667],9122:[-99e-5,.601,0,0,.66667],9123:[.64502,1.155,0,0,.66667],9124:[.64502,1.155,0,0,.66667],9125:[-99e-5,.601,0,0,.66667],9126:[.64502,1.155,0,0,.66667],9127:[1e-5,.9,0,0,.88889],9128:[.65002,1.15,0,0,.88889],9129:[.90001,0,0,0,.88889],9130:[0,.3,0,0,.88889],9131:[1e-5,.9,0,0,.88889],9132:[.65002,1.15,0,0,.88889],9133:[.90001,0,0,0,.88889],9143:[.88502,.915,0,0,1.05556],10216:[1.25003,1.75,0,0,.80556],10217:[1.25003,1.75,0,0,.80556],57344:[-.00499,.605,0,0,1.05556],57345:[-.00499,.605,0,0,1.05556],57680:[0,.12,0,0,.45],57681:[0,.12,0,0,.45],57682:[0,.12,0,0,.45],57683:[0,.12,0,0,.45]},"Typewriter-Regular":{32:[0,0,0,0,.525],33:[0,.61111,0,0,.525],34:[0,.61111,0,0,.525],35:[0,.61111,0,0,.525],36:[.08333,.69444,0,0,.525],37:[.08333,.69444,0,0,.525],38:[0,.61111,0,0,.525],39:[0,.61111,0,0,.525],40:[.08333,.69444,0,0,.525],41:[.08333,.69444,0,0,.525],42:[0,.52083,0,0,.525],43:[-.08056,.53055,0,0,.525],44:[.13889,.125,0,0,.525],45:[-.08056,.53055,0,0,.525],46:[0,.125,0,0,.525],47:[.08333,.69444,0,0,.525],48:[0,.61111,0,0,.525],49:[0,.61111,0,0,.525],50:[0,.61111,0,0,.525],51:[0,.61111,0,0,.525],52:[0,.61111,0,0,.525],53:[0,.61111,0,0,.525],54:[0,.61111,0,0,.525],55:[0,.61111,0,0,.525],56:[0,.61111,0,0,.525],57:[0,.61111,0,0,.525],58:[0,.43056,0,0,.525],59:[.13889,.43056,0,0,.525],60:[-.05556,.55556,0,0,.525],61:[-.19549,.41562,0,0,.525],62:[-.05556,.55556,0,0,.525],63:[0,.61111,0,0,.525],64:[0,.61111,0,0,.525],65:[0,.61111,0,0,.525],66:[0,.61111,0,0,.525],67:[0,.61111,0,0,.525],68:[0,.61111,0,0,.525],69:[0,.61111,0,0,.525],70:[0,.61111,0,0,.525],71:[0,.61111,0,0,.525],72:[0,.61111,0,0,.525],73:[0,.61111,0,0,.525],74:[0,.61111,0,0,.525],75:[0,.61111,0,0,.525],76:[0,.61111,0,0,.525],77:[0,.61111,0,0,.525],78:[0,.61111,0,0,.525],79:[0,.61111,0,0,.525],80:[0,.61111,0,0,.525],81:[.13889,.61111,0,0,.525],82:[0,.61111,0,0,.525],83:[0,.61111,0,0,.525],84:[0,.61111,0,0,.525],85:[0,.61111,0,0,.525],86:[0,.61111,0,0,.525],87:[0,.61111,0,0,.525],88:[0,.61111,0,0,.525],89:[0,.61111,0,0,.525],90:[0,.61111,0,0,.525],91:[.08333,.69444,0,0,.525],92:[.08333,.69444,0,0,.525],93:[.08333,.69444,0,0,.525],94:[0,.61111,0,0,.525],95:[.09514,0,0,0,.525],96:[0,.61111,0,0,.525],97:[0,.43056,0,0,.525],98:[0,.61111,0,0,.525],99:[0,.43056,0,0,.525],100:[0,.61111,0,0,.525],101:[0,.43056,0,0,.525],102:[0,.61111,0,0,.525],103:[.22222,.43056,0,0,.525],104:[0,.61111,0,0,.525],105:[0,.61111,0,0,.525],106:[.22222,.61111,0,0,.525],107:[0,.61111,0,0,.525],108:[0,.61111,0,0,.525],109:[0,.43056,0,0,.525],110:[0,.43056,0,0,.525],111:[0,.43056,0,0,.525],112:[.22222,.43056,0,0,.525],113:[.22222,.43056,0,0,.525],114:[0,.43056,0,0,.525],115:[0,.43056,0,0,.525],116:[0,.55358,0,0,.525],117:[0,.43056,0,0,.525],118:[0,.43056,0,0,.525],119:[0,.43056,0,0,.525],120:[0,.43056,0,0,.525],121:[.22222,.43056,0,0,.525],122:[0,.43056,0,0,.525],123:[.08333,.69444,0,0,.525],124:[.08333,.69444,0,0,.525],125:[.08333,.69444,0,0,.525],126:[0,.61111,0,0,.525],127:[0,.61111,0,0,.525],160:[0,0,0,0,.525],176:[0,.61111,0,0,.525],184:[.19445,0,0,0,.525],305:[0,.43056,0,0,.525],567:[.22222,.43056,0,0,.525],711:[0,.56597,0,0,.525],713:[0,.56555,0,0,.525],714:[0,.61111,0,0,.525],715:[0,.61111,0,0,.525],728:[0,.61111,0,0,.525],730:[0,.61111,0,0,.525],770:[0,.61111,0,0,.525],771:[0,.61111,0,0,.525],776:[0,.61111,0,0,.525],915:[0,.61111,0,0,.525],916:[0,.61111,0,0,.525],920:[0,.61111,0,0,.525],923:[0,.61111,0,0,.525],926:[0,.61111,0,0,.525],928:[0,.61111,0,0,.525],931:[0,.61111,0,0,.525],933:[0,.61111,0,0,.525],934:[0,.61111,0,0,.525],936:[0,.61111,0,0,.525],937:[0,.61111,0,0,.525],8216:[0,.61111,0,0,.525],8217:[0,.61111,0,0,.525],8242:[0,.61111,0,0,.525],9251:[.11111,.21944,0,0,.525]}};const B={slant:[.25,.25,.25],space:[0,0,0],stretch:[0,0,0],shrink:[0,0,0],xHeight:[.431,.431,.431],quad:[1,1.171,1.472],extraSpace:[0,0,0],num1:[.677,.732,.925],num2:[.394,.384,.387],num3:[.444,.471,.504],denom1:[.686,.752,1.025],denom2:[.345,.344,.532],sup1:[.413,.503,.504],sup2:[.363,.431,.404],sup3:[.289,.286,.294],sub1:[.15,.143,.2],sub2:[.247,.286,.4],supDrop:[.386,.353,.494],subDrop:[.05,.071,.1],delim1:[2.39,1.7,1.98],delim2:[1.01,1.157,1.42],axisHeight:[.25,.25,.25],defaultRuleThickness:[.04,.049,.049],bigOpSpacing1:[.111,.111,.111],bigOpSpacing2:[.166,.166,.166],bigOpSpacing3:[.2,.2,.2],bigOpSpacing4:[.6,.611,.611],bigOpSpacing5:[.1,.143,.143],sqrtRuleThickness:[.04,.04,.04],ptPerEm:[10,10,10],doubleRuleSep:[.2,.2,.2],arrayRuleWidth:[.04,.04,.04],fboxsep:[.3,.3,.3],fboxrule:[.04,.04,.04]},C={"\xc5":"A","\xd0":"D","\xde":"o","\xe5":"a","\xf0":"d","\xfe":"o","\u0410":"A","\u0411":"B","\u0412":"B","\u0413":"F","\u0414":"A","\u0415":"E","\u0416":"K","\u0417":"3","\u0418":"N","\u0419":"N","\u041a":"K","\u041b":"N","\u041c":"M","\u041d":"H","\u041e":"O","\u041f":"N","\u0420":"P","\u0421":"C","\u0422":"T","\u0423":"y","\u0424":"O","\u0425":"X","\u0426":"U","\u0427":"h","\u0428":"W","\u0429":"W","\u042a":"B","\u042b":"X","\u042c":"B","\u042d":"3","\u042e":"X","\u042f":"R","\u0430":"a","\u0431":"b","\u0432":"a","\u0433":"r","\u0434":"y","\u0435":"e","\u0436":"m","\u0437":"e","\u0438":"n","\u0439":"n","\u043a":"n","\u043b":"n","\u043c":"m","\u043d":"n","\u043e":"o","\u043f":"n","\u0440":"p","\u0441":"c","\u0442":"o","\u0443":"y","\u0444":"b","\u0445":"x","\u0446":"n","\u0447":"n","\u0448":"w","\u0449":"w","\u044a":"a","\u044b":"m","\u044c":"a","\u044d":"e","\u044e":"m","\u044f":"r"};function N(e,t,r){if(!T[t])throw new Error("Font metrics not found for font: "+t+".");let n=e.charCodeAt(0),o=T[t][n];if(!o&&e[0]in C&&(n=C[e[0]].charCodeAt(0),o=T[t][n]),o||"text"!==r||S(n)&&(o=T[t][77]),o)return{depth:o[0],height:o[1],italic:o[2],skew:o[3],width:o[4]}}const q={};const I=[[1,1,1],[2,1,1],[3,1,1],[4,2,1],[5,2,1],[6,3,1],[7,4,2],[8,6,3],[9,7,6],[10,8,7],[11,10,9]],R=[.5,.6,.7,.8,.9,1,1.2,1.44,1.728,2.074,2.488],H=function(e,t){return t.size<2?e:I[e-1][t.size-1]};class O{constructor(e){this.style=void 0,this.color=void 0,this.size=void 0,this.textSize=void 0,this.phantom=void 0,this.font=void 0,this.fontFamily=void 0,this.fontWeight=void 0,this.fontShape=void 0,this.sizeMultiplier=void 0,this.maxSize=void 0,this.minRuleThickness=void 0,this._fontMetrics=void 0,this.style=e.style,this.color=e.color,this.size=e.size||O.BASESIZE,this.textSize=e.textSize||this.size,this.phantom=!!e.phantom,this.font=e.font||"",this.fontFamily=e.fontFamily||"",this.fontWeight=e.fontWeight||"",this.fontShape=e.fontShape||"",this.sizeMultiplier=R[this.size-1],this.maxSize=e.maxSize,this.minRuleThickness=e.minRuleThickness,this._fontMetrics=void 0}extend(e){const t={style:this.style,size:this.size,textSize:this.textSize,color:this.color,phantom:this.phantom,font:this.font,fontFamily:this.fontFamily,fontWeight:this.fontWeight,fontShape:this.fontShape,maxSize:this.maxSize,minRuleThickness:this.minRuleThickness};for(const r in e)e.hasOwnProperty(r)&&(t[r]=e[r]);return new O(t)}havingStyle(e){return this.style===e?this:this.extend({style:e,size:H(this.textSize,e)})}havingCrampedStyle(){return this.havingStyle(this.style.cramp())}havingSize(e){return this.size===e&&this.textSize===e?this:this.extend({style:this.style.text(),size:e,textSize:e,sizeMultiplier:R[e-1]})}havingBaseStyle(e){e=e||this.style.text();const t=H(O.BASESIZE,e);return this.size===t&&this.textSize===O.BASESIZE&&this.style===e?this:this.extend({style:e,size:t})}havingBaseSizing(){let e;switch(this.style.id){case 4:case 5:e=3;break;case 6:case 7:e=1;break;default:e=6}return this.extend({style:this.style.text(),size:e})}withColor(e){return this.extend({color:e})}withPhantom(){return this.extend({phantom:!0})}withFont(e){return this.extend({font:e})}withTextFontFamily(e){return this.extend({fontFamily:e,font:""})}withTextFontWeight(e){return this.extend({fontWeight:e,font:""})}withTextFontShape(e){return this.extend({fontShape:e,font:""})}sizingClasses(e){return e.size!==this.size?["sizing","reset-size"+e.size,"size"+this.size]:[]}baseSizingClasses(){return this.size!==O.BASESIZE?["sizing","reset-size"+this.size,"size"+O.BASESIZE]:[]}fontMetrics(){return this._fontMetrics||(this._fontMetrics=function(e){let t;if(t=e>=5?0:e>=3?1:2,!q[t]){const e=q[t]={cssEmPerMu:B.quad[t]/18};for(const r in B)B.hasOwnProperty(r)&&(e[r]=B[r][t])}return q[t]}(this.size)),this._fontMetrics}getColor(){return this.phantom?"transparent":this.color}}O.BASESIZE=6;var E=O;const L={pt:1,mm:7227/2540,cm:7227/254,in:72.27,bp:1.00375,pc:12,dd:1238/1157,cc:14856/1157,nd:685/642,nc:1370/107,sp:1/65536,px:1.00375},D={ex:!0,em:!0,mu:!0},V=function(e){return"string"!=typeof e&&(e=e.unit),e in L||e in D||"ex"===e},P=function(e,t){let r;if(e.unit in L)r=L[e.unit]/t.fontMetrics().ptPerEm/t.sizeMultiplier;else if("mu"===e.unit)r=t.fontMetrics().cssEmPerMu;else{let o;if(o=t.style.isTight()?t.havingStyle(t.style.text()):t,"ex"===e.unit)r=o.fontMetrics().xHeight;else{if("em"!==e.unit)throw new n("Invalid unit: '"+e.unit+"'");r=o.fontMetrics().quad}o!==t&&(r*=o.sizeMultiplier/t.sizeMultiplier)}return Math.min(e.number*r,t.maxSize)},F=function(e){return+e.toFixed(4)+"em"},G=function(e){return e.filter((e=>e)).join(" ")},U=function(e,t,r){if(this.classes=e||[],this.attributes={},this.height=0,this.depth=0,this.maxFontSize=0,this.style=r||{},t){t.style.isTight()&&this.classes.push("mtight");const e=t.getColor();e&&(this.style.color=e)}},Y=function(e){const t=document.createElement(e);t.className=G(this.classes);for(const e in this.style)this.style.hasOwnProperty(e)&&(t.style[e]=this.style[e]);for(const e in this.attributes)this.attributes.hasOwnProperty(e)&&t.setAttribute(e,this.attributes[e]);for(let e=0;e",t};class W{constructor(e,t,r,n){this.children=void 0,this.attributes=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.width=void 0,this.maxFontSize=void 0,this.style=void 0,U.call(this,e,r,n),this.children=t||[]}setAttribute(e,t){this.attributes[e]=t}hasClass(e){return l.contains(this.classes,e)}toNode(){return Y.call(this,"span")}toMarkup(){return X.call(this,"span")}}class _{constructor(e,t,r,n){this.children=void 0,this.attributes=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.maxFontSize=void 0,this.style=void 0,U.call(this,t,n),this.children=r||[],this.setAttribute("href",e)}setAttribute(e,t){this.attributes[e]=t}hasClass(e){return l.contains(this.classes,e)}toNode(){return Y.call(this,"a")}toMarkup(){return X.call(this,"a")}}class j{constructor(e,t,r){this.src=void 0,this.alt=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.maxFontSize=void 0,this.style=void 0,this.alt=t,this.src=e,this.classes=["mord"],this.style=r}hasClass(e){return l.contains(this.classes,e)}toNode(){const e=document.createElement("img");e.src=this.src,e.alt=this.alt,e.className="mord";for(const t in this.style)this.style.hasOwnProperty(t)&&(e.style[t]=this.style[t]);return e}toMarkup(){let e=''+l.escape(this.alt)+'=n[0]&&e<=n[1])return r.name}}return null}(this.text.charCodeAt(0));l&&this.classes.push(l+"_fallback"),/[\xee\xef\xed\xec]/.test(this.text)&&(this.text=$[this.text])}hasClass(e){return l.contains(this.classes,e)}toNode(){const e=document.createTextNode(this.text);let t=null;this.italic>0&&(t=document.createElement("span"),t.style.marginRight=F(this.italic)),this.classes.length>0&&(t=t||document.createElement("span"),t.className=G(this.classes));for(const e in this.style)this.style.hasOwnProperty(e)&&(t=t||document.createElement("span"),t.style[e]=this.style[e]);return t?(t.appendChild(e),t):e}toMarkup(){let e=!1,t="0&&(r+="margin-right:"+this.italic+"em;");for(const e in this.style)this.style.hasOwnProperty(e)&&(r+=l.hyphenate(e)+":"+this.style[e]+";");r&&(e=!0,t+=' style="'+l.escape(r)+'"');const n=l.escape(this.text);return e?(t+=">",t+=n,t+="",t):n}}class K{constructor(e,t){this.children=void 0,this.attributes=void 0,this.children=e||[],this.attributes=t||{}}toNode(){const e=document.createElementNS("http://www.w3.org/2000/svg","svg");for(const t in this.attributes)Object.prototype.hasOwnProperty.call(this.attributes,t)&&e.setAttribute(t,this.attributes[t]);for(let t=0;t':''}}class Q{constructor(e){this.attributes=void 0,this.attributes=e||{}}toNode(){const e=document.createElementNS("http://www.w3.org/2000/svg","line");for(const t in this.attributes)Object.prototype.hasOwnProperty.call(this.attributes,t)&&e.setAttribute(t,this.attributes[t]);return e}toMarkup(){let e="","\\gt",!0),se(ie,le,ye,"\u2208","\\in",!0),se(ie,le,ye,"\ue020","\\@not"),se(ie,le,ye,"\u2282","\\subset",!0),se(ie,le,ye,"\u2283","\\supset",!0),se(ie,le,ye,"\u2286","\\subseteq",!0),se(ie,le,ye,"\u2287","\\supseteq",!0),se(ie,he,ye,"\u2288","\\nsubseteq",!0),se(ie,he,ye,"\u2289","\\nsupseteq",!0),se(ie,le,ye,"\u22a8","\\models"),se(ie,le,ye,"\u2190","\\leftarrow",!0),se(ie,le,ye,"\u2264","\\le"),se(ie,le,ye,"\u2264","\\leq",!0),se(ie,le,ye,"<","\\lt",!0),se(ie,le,ye,"\u2192","\\rightarrow",!0),se(ie,le,ye,"\u2192","\\to"),se(ie,he,ye,"\u2271","\\ngeq",!0),se(ie,he,ye,"\u2270","\\nleq",!0),se(ie,le,xe,"\xa0","\\ "),se(ie,le,xe,"\xa0","\\space"),se(ie,le,xe,"\xa0","\\nobreakspace"),se(ae,le,xe,"\xa0","\\ "),se(ae,le,xe,"\xa0"," "),se(ae,le,xe,"\xa0","\\space"),se(ae,le,xe,"\xa0","\\nobreakspace"),se(ie,le,xe,null,"\\nobreak"),se(ie,le,xe,null,"\\allowbreak"),se(ie,le,be,",",","),se(ie,le,be,";",";"),se(ie,he,me,"\u22bc","\\barwedge",!0),se(ie,he,me,"\u22bb","\\veebar",!0),se(ie,le,me,"\u2299","\\odot",!0),se(ie,le,me,"\u2295","\\oplus",!0),se(ie,le,me,"\u2297","\\otimes",!0),se(ie,le,we,"\u2202","\\partial",!0),se(ie,le,me,"\u2298","\\oslash",!0),se(ie,he,me,"\u229a","\\circledcirc",!0),se(ie,he,me,"\u22a1","\\boxdot",!0),se(ie,le,me,"\u25b3","\\bigtriangleup"),se(ie,le,me,"\u25bd","\\bigtriangledown"),se(ie,le,me,"\u2020","\\dagger"),se(ie,le,me,"\u22c4","\\diamond"),se(ie,le,me,"\u22c6","\\star"),se(ie,le,me,"\u25c3","\\triangleleft"),se(ie,le,me,"\u25b9","\\triangleright"),se(ie,le,fe,"{","\\{"),se(ae,le,we,"{","\\{"),se(ae,le,we,"{","\\textbraceleft"),se(ie,le,pe,"}","\\}"),se(ae,le,we,"}","\\}"),se(ae,le,we,"}","\\textbraceright"),se(ie,le,fe,"{","\\lbrace"),se(ie,le,pe,"}","\\rbrace"),se(ie,le,fe,"[","\\lbrack",!0),se(ae,le,we,"[","\\lbrack",!0),se(ie,le,pe,"]","\\rbrack",!0),se(ae,le,we,"]","\\rbrack",!0),se(ie,le,fe,"(","\\lparen",!0),se(ie,le,pe,")","\\rparen",!0),se(ae,le,we,"<","\\textless",!0),se(ae,le,we,">","\\textgreater",!0),se(ie,le,fe,"\u230a","\\lfloor",!0),se(ie,le,pe,"\u230b","\\rfloor",!0),se(ie,le,fe,"\u2308","\\lceil",!0),se(ie,le,pe,"\u2309","\\rceil",!0),se(ie,le,we,"\\","\\backslash"),se(ie,le,we,"\u2223","|"),se(ie,le,we,"\u2223","\\vert"),se(ae,le,we,"|","\\textbar",!0),se(ie,le,we,"\u2225","\\|"),se(ie,le,we,"\u2225","\\Vert"),se(ae,le,we,"\u2225","\\textbardbl"),se(ae,le,we,"~","\\textasciitilde"),se(ae,le,we,"\\","\\textbackslash"),se(ae,le,we,"^","\\textasciicircum"),se(ie,le,ye,"\u2191","\\uparrow",!0),se(ie,le,ye,"\u21d1","\\Uparrow",!0),se(ie,le,ye,"\u2193","\\downarrow",!0),se(ie,le,ye,"\u21d3","\\Downarrow",!0),se(ie,le,ye,"\u2195","\\updownarrow",!0),se(ie,le,ye,"\u21d5","\\Updownarrow",!0),se(ie,le,ge,"\u2210","\\coprod"),se(ie,le,ge,"\u22c1","\\bigvee"),se(ie,le,ge,"\u22c0","\\bigwedge"),se(ie,le,ge,"\u2a04","\\biguplus"),se(ie,le,ge,"\u22c2","\\bigcap"),se(ie,le,ge,"\u22c3","\\bigcup"),se(ie,le,ge,"\u222b","\\int"),se(ie,le,ge,"\u222b","\\intop"),se(ie,le,ge,"\u222c","\\iint"),se(ie,le,ge,"\u222d","\\iiint"),se(ie,le,ge,"\u220f","\\prod"),se(ie,le,ge,"\u2211","\\sum"),se(ie,le,ge,"\u2a02","\\bigotimes"),se(ie,le,ge,"\u2a01","\\bigoplus"),se(ie,le,ge,"\u2a00","\\bigodot"),se(ie,le,ge,"\u222e","\\oint"),se(ie,le,ge,"\u222f","\\oiint"),se(ie,le,ge,"\u2230","\\oiiint"),se(ie,le,ge,"\u2a06","\\bigsqcup"),se(ie,le,ge,"\u222b","\\smallint"),se(ae,le,ue,"\u2026","\\textellipsis"),se(ie,le,ue,"\u2026","\\mathellipsis"),se(ae,le,ue,"\u2026","\\ldots",!0),se(ie,le,ue,"\u2026","\\ldots",!0),se(ie,le,ue,"\u22ef","\\@cdots",!0),se(ie,le,ue,"\u22f1","\\ddots",!0),se(ie,le,we,"\u22ee","\\varvdots"),se(ie,le,ce,"\u02ca","\\acute"),se(ie,le,ce,"\u02cb","\\grave"),se(ie,le,ce,"\xa8","\\ddot"),se(ie,le,ce,"~","\\tilde"),se(ie,le,ce,"\u02c9","\\bar"),se(ie,le,ce,"\u02d8","\\breve"),se(ie,le,ce,"\u02c7","\\check"),se(ie,le,ce,"^","\\hat"),se(ie,le,ce,"\u20d7","\\vec"),se(ie,le,ce,"\u02d9","\\dot"),se(ie,le,ce,"\u02da","\\mathring"),se(ie,le,de,"\ue131","\\@imath"),se(ie,le,de,"\ue237","\\@jmath"),se(ie,le,we,"\u0131","\u0131"),se(ie,le,we,"\u0237","\u0237"),se(ae,le,we,"\u0131","\\i",!0),se(ae,le,we,"\u0237","\\j",!0),se(ae,le,we,"\xdf","\\ss",!0),se(ae,le,we,"\xe6","\\ae",!0),se(ae,le,we,"\u0153","\\oe",!0),se(ae,le,we,"\xf8","\\o",!0),se(ae,le,we,"\xc6","\\AE",!0),se(ae,le,we,"\u0152","\\OE",!0),se(ae,le,we,"\xd8","\\O",!0),se(ae,le,ce,"\u02ca","\\'"),se(ae,le,ce,"\u02cb","\\`"),se(ae,le,ce,"\u02c6","\\^"),se(ae,le,ce,"\u02dc","\\~"),se(ae,le,ce,"\u02c9","\\="),se(ae,le,ce,"\u02d8","\\u"),se(ae,le,ce,"\u02d9","\\."),se(ae,le,ce,"\xb8","\\c"),se(ae,le,ce,"\u02da","\\r"),se(ae,le,ce,"\u02c7","\\v"),se(ae,le,ce,"\xa8",'\\"'),se(ae,le,ce,"\u02dd","\\H"),se(ae,le,ce,"\u25ef","\\textcircled");const ve={"--":!0,"---":!0,"``":!0,"''":!0};se(ae,le,we,"\u2013","--",!0),se(ae,le,we,"\u2013","\\textendash"),se(ae,le,we,"\u2014","---",!0),se(ae,le,we,"\u2014","\\textemdash"),se(ae,le,we,"\u2018","`",!0),se(ae,le,we,"\u2018","\\textquoteleft"),se(ae,le,we,"\u2019","'",!0),se(ae,le,we,"\u2019","\\textquoteright"),se(ae,le,we,"\u201c","``",!0),se(ae,le,we,"\u201c","\\textquotedblleft"),se(ae,le,we,"\u201d","''",!0),se(ae,le,we,"\u201d","\\textquotedblright"),se(ie,le,we,"\xb0","\\degree",!0),se(ae,le,we,"\xb0","\\degree"),se(ae,le,we,"\xb0","\\textdegree",!0),se(ie,le,we,"\xa3","\\pounds"),se(ie,le,we,"\xa3","\\mathsterling",!0),se(ae,le,we,"\xa3","\\pounds"),se(ae,le,we,"\xa3","\\textsterling",!0),se(ie,he,we,"\u2720","\\maltese"),se(ae,he,we,"\u2720","\\maltese");const ke='0123456789/@."';for(let e=0;e{if(G(e.classes)!==G(t.classes)||e.skew!==t.skew||e.maxFontSize!==t.maxFontSize)return!1;if(1===e.classes.length){const t=e.classes[0];if("mbin"===t||"mord"===t)return!1}for(const r in e.style)if(e.style.hasOwnProperty(r)&&e.style[r]!==t.style[r])return!1;for(const r in t.style)if(t.style.hasOwnProperty(r)&&e.style[r]!==t.style[r])return!1;return!0},Ie=function(e){let t=0,r=0,n=0;for(let o=0;ot&&(t=s.height),s.depth>r&&(r=s.depth),s.maxFontSize>n&&(n=s.maxFontSize)}e.height=t,e.depth=r,e.maxFontSize=n},Re=function(e,t,r,n){const o=new W(e,t,r,n);return Ie(o),o},He=(e,t,r,n)=>new W(e,t,r,n),Oe=function(e){const t=new A(e);return Ie(t),t},Ee=function(e,t,r){let n,o="";switch(e){case"amsrm":o="AMS";break;case"textrm":o="Main";break;case"textsf":o="SansSerif";break;case"texttt":o="Typewriter";break;default:o=e}return n="textbf"===t&&"textit"===r?"BoldItalic":"textbf"===t?"Bold":"textit"===t?"Italic":"Regular",o+"-"+n},Le={mathbf:{variant:"bold",fontName:"Main-Bold"},mathrm:{variant:"normal",fontName:"Main-Regular"},textit:{variant:"italic",fontName:"Main-Italic"},mathit:{variant:"italic",fontName:"Main-Italic"},mathnormal:{variant:"italic",fontName:"Math-Italic"},mathbb:{variant:"double-struck",fontName:"AMS-Regular"},mathcal:{variant:"script",fontName:"Caligraphic-Regular"},mathfrak:{variant:"fraktur",fontName:"Fraktur-Regular"},mathscr:{variant:"script",fontName:"Script-Regular"},mathsf:{variant:"sans-serif",fontName:"SansSerif-Regular"},mathtt:{variant:"monospace",fontName:"Typewriter-Regular"}},De={vec:["vec",.471,.714],oiintSize1:["oiintSize1",.957,.499],oiintSize2:["oiintSize2",1.472,.659],oiiintSize1:["oiiintSize1",1.304,.499],oiiintSize2:["oiiintSize2",1.98,.659]};var Ve={fontMap:Le,makeSymbol:Ne,mathsym:function(e,t,r,n){return void 0===n&&(n=[]),"boldsymbol"===r.font&&Ce(e,"Main-Bold",t).metrics?Ne(e,"Main-Bold",t,r,n.concat(["mathbf"])):"\\"===e||"main"===oe[t][e].font?Ne(e,"Main-Regular",t,r,n):Ne(e,"AMS-Regular",t,r,n.concat(["amsrm"]))},makeSpan:Re,makeSvgSpan:He,makeLineSpan:function(e,t,r){const n=Re([e],[],t);return n.height=Math.max(r||t.fontMetrics().defaultRuleThickness,t.minRuleThickness),n.style.borderBottomWidth=F(n.height),n.maxFontSize=1,n},makeAnchor:function(e,t,r,n){const o=new _(e,t,r,n);return Ie(o),o},makeFragment:Oe,wrapFragment:function(e,t){return e instanceof A?Re([],[e],t):e},makeVList:function(e,t){const{children:r,depth:n}=function(e){if("individualShift"===e.positionType){const t=e.children,r=[t[0]],n=-t[0].shift-t[0].elem.depth;let o=n;for(let e=1;e0)return Ne(s,h,o,t,i.concat(c));if(l){let e,n;if("boldsymbol"===l){const t=function(e,t,r,n,o){return"textord"!==o&&Ce(e,"Math-BoldItalic",t).metrics?{fontName:"Math-BoldItalic",fontClass:"boldsymbol"}:{fontName:"Main-Bold",fontClass:"mathbf"}}(s,o,0,0,r);e=t.fontName,n=[t.fontClass]}else a?(e=Le[l].fontName,n=[l]):(e=Ee(l,t.fontWeight,t.fontShape),n=[l,t.fontWeight,t.fontShape]);if(Ce(s,e,o).metrics)return Ne(s,e,o,t,i.concat(n));if(ve.hasOwnProperty(s)&&"Typewriter"===e.slice(0,10)){const r=[];for(let a=0;a{const r=Re(["mspace"],[],t),n=P(e,t);return r.style.marginRight=F(n),r},staticSvg:function(e,t){const[r,n,o]=De[e],s=new J(r),i=new K([s],{width:F(n),height:F(o),style:"width:"+F(n),viewBox:"0 0 "+1e3*n+" "+1e3*o,preserveAspectRatio:"xMinYMin"}),a=He(["overlay"],[i],t);return a.height=o,a.style.height=F(o),a.style.width=F(n),a},svgData:De,tryCombineChars:e=>{for(let t=0;t{const r=t.classes[0],n=e.classes[0];"mbin"===r&&l.contains(et,n)?t.classes[0]="mord":"mbin"===n&&l.contains(Qe,r)&&(e.classes[0]="mord")}),{node:i},a,h),ot(o,((e,t)=>{const r=at(t),n=at(e),o=r&&n?e.hasClass("mtight")?Ye[r][n]:Ue[r][n]:null;if(o)return Ve.makeGlue(o,s)}),{node:i},a,h),o},ot=function(e,t,r,n,o){n&&e.push(n);let s=0;for(;sr=>{e.splice(t+1,0,r),s++})(s)}n&&e.pop()},st=function(e){return e instanceof A||e instanceof _||e instanceof W&&e.hasClass("enclosing")?e:null},it=function(e,t){const r=st(e);if(r){const e=r.children;if(e.length){if("right"===t)return it(e[e.length-1],"right");if("left"===t)return it(e[0],"left")}}return e},at=function(e,t){return e?(t&&(e=it(e,t)),rt[e.classes[0]]||null):null},lt=function(e,t){const r=["nulldelimiter"].concat(e.baseSizingClasses());return Je(t.concat(r))},ht=function(e,t,r){if(!e)return Je();if(We[e.type]){let n=We[e.type](e,t);if(r&&t.size!==r.size){n=Je(t.sizingClasses(r),[n],t);const e=t.sizeMultiplier/r.sizeMultiplier;n.height*=e,n.depth*=e}return n}throw new n("Got group of unknown type: '"+e.type+"'")};function ct(e,t){const r=Je(["base"],e,t),n=Je(["strut"]);return n.style.height=F(r.height+r.depth),r.depth&&(n.style.verticalAlign=F(-r.depth)),r.children.unshift(n),r}function mt(e,t){let r=null;1===e.length&&"tag"===e[0].type&&(r=e[0].tag,e=e[0].body);const n=nt(e,t,"root");let o;2===n.length&&n[1].hasClass("tag")&&(o=n.pop());const s=[];let i,a=[];for(let e=0;e0&&(s.push(ct(a,t)),a=[]),s.push(n[e]));a.length>0&&s.push(ct(a,t)),r?(i=ct(nt(r,t,!0)),i.classes=["tag"],s.push(i)):o&&s.push(o);const l=Je(["katex-html"],s);if(l.setAttribute("aria-hidden","true"),i){const e=i.children[0];e.style.height=F(l.height+l.depth),l.depth&&(e.style.verticalAlign=F(-l.depth))}return l}function pt(e){return new A(e)}class ut{constructor(e,t,r){this.type=void 0,this.attributes=void 0,this.children=void 0,this.classes=void 0,this.type=e,this.attributes={},this.children=t||[],this.classes=r||[]}setAttribute(e,t){this.attributes[e]=t}getAttribute(e){return this.attributes[e]}toNode(){const e=document.createElementNS("http://www.w3.org/1998/Math/MathML",this.type);for(const t in this.attributes)Object.prototype.hasOwnProperty.call(this.attributes,t)&&e.setAttribute(t,this.attributes[t]);this.classes.length>0&&(e.className=G(this.classes));for(let t=0;t0&&(e+=' class ="'+l.escape(G(this.classes))+'"'),e+=">";for(let t=0;t",e}toText(){return this.children.map((e=>e.toText())).join("")}}class dt{constructor(e){this.text=void 0,this.text=e}toNode(){return document.createTextNode(this.text)}toMarkup(){return l.escape(this.toText())}toText(){return this.text}}var gt={MathNode:ut,TextNode:dt,SpaceNode:class{constructor(e){this.width=void 0,this.character=void 0,this.width=e,this.character=e>=.05555&&e<=.05556?"\u200a":e>=.1666&&e<=.1667?"\u2009":e>=.2222&&e<=.2223?"\u2005":e>=.2777&&e<=.2778?"\u2005\u200a":e>=-.05556&&e<=-.05555?"\u200a\u2063":e>=-.1667&&e<=-.1666?"\u2009\u2063":e>=-.2223&&e<=-.2222?"\u205f\u2063":e>=-.2778&&e<=-.2777?"\u2005\u2063":null}toNode(){if(this.character)return document.createTextNode(this.character);{const e=document.createElementNS("http://www.w3.org/1998/Math/MathML","mspace");return e.setAttribute("width",F(this.width)),e}}toMarkup(){return this.character?""+this.character+"":''}toText(){return this.character?this.character:" "}},newDocumentFragment:pt};const ft=function(e,t,r){return!oe[t][e]||!oe[t][e].replace||55349===e.charCodeAt(0)||ve.hasOwnProperty(e)&&r&&(r.fontFamily&&"tt"===r.fontFamily.slice(4,6)||r.font&&"tt"===r.font.slice(4,6))||(e=oe[t][e].replace),new gt.TextNode(e)},bt=function(e){return 1===e.length?e[0]:new gt.MathNode("mrow",e)},yt=function(e,t){if("texttt"===t.fontFamily)return"monospace";if("textsf"===t.fontFamily)return"textit"===t.fontShape&&"textbf"===t.fontWeight?"sans-serif-bold-italic":"textit"===t.fontShape?"sans-serif-italic":"textbf"===t.fontWeight?"bold-sans-serif":"sans-serif";if("textit"===t.fontShape&&"textbf"===t.fontWeight)return"bold-italic";if("textit"===t.fontShape)return"italic";if("textbf"===t.fontWeight)return"bold";const r=t.font;if(!r||"mathnormal"===r)return null;const n=e.mode;if("mathit"===r)return"italic";if("boldsymbol"===r)return"textord"===e.type?"bold":"bold-italic";if("mathbf"===r)return"bold";if("mathbb"===r)return"double-struck";if("mathfrak"===r)return"fraktur";if("mathscr"===r||"mathcal"===r)return"script";if("mathsf"===r)return"sans-serif";if("mathtt"===r)return"monospace";let o=e.text;if(l.contains(["\\imath","\\jmath"],o))return null;oe[n][o]&&oe[n][o].replace&&(o=oe[n][o].replace);return N(o,Ve.fontMap[r].fontName,n)?Ve.fontMap[r].variant:null},xt=function(e,t,r){if(1===e.length){const n=vt(e[0],t);return r&&n instanceof ut&&"mo"===n.type&&(n.setAttribute("lspace","0em"),n.setAttribute("rspace","0em")),[n]}const n=[];let o;for(let r=0;r0&&(e.text=e.text.slice(0,1)+"\u0338"+e.text.slice(1),n.pop())}}}n.push(s),o=s}return n},wt=function(e,t,r){return bt(xt(e,t,r))},vt=function(e,t){if(!e)return new gt.MathNode("mrow");if(_e[e.type]){return _e[e.type](e,t)}throw new n("Got group of unknown type: '"+e.type+"'")};function kt(e,t,r,n,o){const s=xt(e,r);let i;i=1===s.length&&s[0]instanceof ut&&l.contains(["mrow","mtable"],s[0].type)?s[0]:new gt.MathNode("mrow",s);const a=new gt.MathNode("annotation",[new gt.TextNode(t)]);a.setAttribute("encoding","application/x-tex");const h=new gt.MathNode("semantics",[i,a]),c=new gt.MathNode("math",[h]);c.setAttribute("xmlns","http://www.w3.org/1998/Math/MathML"),n&&c.setAttribute("display","block");const m=o?"katex":"katex-mathml";return Ve.makeSpan([m],[c])}const St=function(e){return new E({style:e.displayMode?w.DISPLAY:w.TEXT,maxSize:e.maxSize,minRuleThickness:e.minRuleThickness})},Mt=function(e,t){if(t.displayMode){const r=["katex-display"];t.leqno&&r.push("leqno"),t.fleqn&&r.push("fleqn"),e=Ve.makeSpan(r,[e])}return e},zt=function(e,t,r){const n=St(r);let o;if("mathml"===r.output)return kt(e,t,n,r.displayMode,!0);if("html"===r.output){const t=mt(e,n);o=Ve.makeSpan(["katex"],[t])}else{const s=kt(e,t,n,r.displayMode,!1),i=mt(e,n);o=Ve.makeSpan(["katex"],[s,i])}return Mt(o,r)};const At={widehat:"^",widecheck:"\u02c7",widetilde:"~",utilde:"~",overleftarrow:"\u2190",underleftarrow:"\u2190",xleftarrow:"\u2190",overrightarrow:"\u2192",underrightarrow:"\u2192",xrightarrow:"\u2192",underbrace:"\u23df",overbrace:"\u23de",overgroup:"\u23e0",undergroup:"\u23e1",overleftrightarrow:"\u2194",underleftrightarrow:"\u2194",xleftrightarrow:"\u2194",Overrightarrow:"\u21d2",xRightarrow:"\u21d2",overleftharpoon:"\u21bc",xleftharpoonup:"\u21bc",overrightharpoon:"\u21c0",xrightharpoonup:"\u21c0",xLeftarrow:"\u21d0",xLeftrightarrow:"\u21d4",xhookleftarrow:"\u21a9",xhookrightarrow:"\u21aa",xmapsto:"\u21a6",xrightharpoondown:"\u21c1",xleftharpoondown:"\u21bd",xrightleftharpoons:"\u21cc",xleftrightharpoons:"\u21cb",xtwoheadleftarrow:"\u219e",xtwoheadrightarrow:"\u21a0",xlongequal:"=",xtofrom:"\u21c4",xrightleftarrows:"\u21c4",xrightequilibrium:"\u21cc",xleftequilibrium:"\u21cb","\\cdrightarrow":"\u2192","\\cdleftarrow":"\u2190","\\cdlongequal":"="},Tt={overrightarrow:[["rightarrow"],.888,522,"xMaxYMin"],overleftarrow:[["leftarrow"],.888,522,"xMinYMin"],underrightarrow:[["rightarrow"],.888,522,"xMaxYMin"],underleftarrow:[["leftarrow"],.888,522,"xMinYMin"],xrightarrow:[["rightarrow"],1.469,522,"xMaxYMin"],"\\cdrightarrow":[["rightarrow"],3,522,"xMaxYMin"],xleftarrow:[["leftarrow"],1.469,522,"xMinYMin"],"\\cdleftarrow":[["leftarrow"],3,522,"xMinYMin"],Overrightarrow:[["doublerightarrow"],.888,560,"xMaxYMin"],xRightarrow:[["doublerightarrow"],1.526,560,"xMaxYMin"],xLeftarrow:[["doubleleftarrow"],1.526,560,"xMinYMin"],overleftharpoon:[["leftharpoon"],.888,522,"xMinYMin"],xleftharpoonup:[["leftharpoon"],.888,522,"xMinYMin"],xleftharpoondown:[["leftharpoondown"],.888,522,"xMinYMin"],overrightharpoon:[["rightharpoon"],.888,522,"xMaxYMin"],xrightharpoonup:[["rightharpoon"],.888,522,"xMaxYMin"],xrightharpoondown:[["rightharpoondown"],.888,522,"xMaxYMin"],xlongequal:[["longequal"],.888,334,"xMinYMin"],"\\cdlongequal":[["longequal"],3,334,"xMinYMin"],xtwoheadleftarrow:[["twoheadleftarrow"],.888,334,"xMinYMin"],xtwoheadrightarrow:[["twoheadrightarrow"],.888,334,"xMaxYMin"],overleftrightarrow:[["leftarrow","rightarrow"],.888,522],overbrace:[["leftbrace","midbrace","rightbrace"],1.6,548],underbrace:[["leftbraceunder","midbraceunder","rightbraceunder"],1.6,548],underleftrightarrow:[["leftarrow","rightarrow"],.888,522],xleftrightarrow:[["leftarrow","rightarrow"],1.75,522],xLeftrightarrow:[["doubleleftarrow","doublerightarrow"],1.75,560],xrightleftharpoons:[["leftharpoondownplus","rightharpoonplus"],1.75,716],xleftrightharpoons:[["leftharpoonplus","rightharpoondownplus"],1.75,716],xhookleftarrow:[["leftarrow","righthook"],1.08,522],xhookrightarrow:[["lefthook","rightarrow"],1.08,522],overlinesegment:[["leftlinesegment","rightlinesegment"],.888,522],underlinesegment:[["leftlinesegment","rightlinesegment"],.888,522],overgroup:[["leftgroup","rightgroup"],.888,342],undergroup:[["leftgroupunder","rightgroupunder"],.888,342],xmapsto:[["leftmapsto","rightarrow"],1.5,522],xtofrom:[["leftToFrom","rightToFrom"],1.75,528],xrightleftarrows:[["baraboveleftarrow","rightarrowabovebar"],1.75,901],xrightequilibrium:[["baraboveshortleftharpoon","rightharpoonaboveshortbar"],1.75,716],xleftequilibrium:[["shortbaraboveleftharpoon","shortrightharpoonabovebar"],1.75,716]};var Bt=function(e,t,r,n,o){let s;const i=e.height+e.depth+r+n;if(/fbox|color|angl/.test(t)){if(s=Ve.makeSpan(["stretchy",t],[],o),"fbox"===t){const e=o.color&&o.getColor();e&&(s.style.borderColor=e)}}else{const e=[];/^[bx]cancel$/.test(t)&&e.push(new Q({x1:"0",y1:"0",x2:"100%",y2:"100%","stroke-width":"0.046em"})),/^x?cancel$/.test(t)&&e.push(new Q({x1:"0",y1:"100%",x2:"100%",y2:"0","stroke-width":"0.046em"}));const r=new K(e,{width:"100%",height:F(i)});s=Ve.makeSvgSpan([],[r],o)}return s.height=i,s.style.height=F(i),s},Ct=function(e){const t=new gt.MathNode("mo",[new gt.TextNode(At[e.replace(/^\\/,"")])]);return t.setAttribute("stretchy","true"),t},Nt=function(e,t){const{span:r,minWidth:n,height:o}=function(){let r=4e5;const n=e.label.slice(1);if(l.contains(["widehat","widecheck","widetilde","utilde"],n)){const s="ordgroup"===(o=e.base).type?o.body.length:1;let i,a,l;if(s>5)"widehat"===n||"widecheck"===n?(i=420,r=2364,l=.42,a=n+"4"):(i=312,r=2340,l=.34,a="tilde4");else{const e=[1,1,2,2,3,3][s];"widehat"===n||"widecheck"===n?(r=[0,1062,2364,2364,2364][e],i=[0,239,300,360,420][e],l=[0,.24,.3,.3,.36,.42][e],a=n+e):(r=[0,600,1033,2339,2340][e],i=[0,260,286,306,312][e],l=[0,.26,.286,.3,.306,.34][e],a="tilde"+e)}const h=new J(a),c=new K([h],{width:"100%",height:F(l),viewBox:"0 0 "+r+" "+i,preserveAspectRatio:"none"});return{span:Ve.makeSvgSpan([],[c],t),minWidth:0,height:l}}{const e=[],o=Tt[n],[s,i,a]=o,l=a/1e3,h=s.length;let c,m;if(1===h){c=["hide-tail"],m=[o[3]]}else if(2===h)c=["halfarrow-left","halfarrow-right"],m=["xMinYMin","xMaxYMin"];else{if(3!==h)throw new Error("Correct katexImagesData or update code here to support\n "+h+" children.");c=["brace-left","brace-center","brace-right"],m=["xMinYMin","xMidYMin","xMaxYMin"]}for(let n=0;n0&&(r.style.minWidth=F(n)),r};function qt(e,t){if(!e||e.type!==t)throw new Error("Expected node of type "+t+", but got "+(e?"node of type "+e.type:String(e)));return e}function It(e){const t=Rt(e);if(!t)throw new Error("Expected node of symbol group type, but got "+(e?"node of type "+e.type:String(e)));return t}function Rt(e){return e&&("atom"===e.type||re.hasOwnProperty(e.type))?e:null}const Ht=(e,t)=>{let r,n,o;e&&"supsub"===e.type?(n=qt(e.base,"accent"),r=n.base,e.base=r,o=function(e){if(e instanceof W)return e;throw new Error("Expected span but got "+String(e)+".")}(ht(e,t)),e.base=n):(n=qt(e,"accent"),r=n.base);const s=ht(r,t.havingCrampedStyle());let i=0;if(n.isShifty&&l.isCharacterBox(r)){const e=l.getBaseElem(r);i=ee(ht(e,t.havingCrampedStyle())).skew}const a="\\c"===n.label;let h,c=a?s.height+s.depth:Math.min(s.height,t.fontMetrics().xHeight);if(n.isStretchy)h=Nt(n,t),h=Ve.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:s},{type:"elem",elem:h,wrapperClasses:["svg-align"],wrapperStyle:i>0?{width:"calc(100% - "+F(2*i)+")",marginLeft:F(2*i)}:void 0}]},t);else{let e,r;"\\vec"===n.label?(e=Ve.staticSvg("vec",t),r=Ve.svgData.vec[1]):(e=Ve.makeOrd({mode:n.mode,text:n.label},t,"textord"),e=ee(e),e.italic=0,r=e.width,a&&(c+=e.depth)),h=Ve.makeSpan(["accent-body"],[e]);const o="\\textcircled"===n.label;o&&(h.classes.push("accent-full"),c=s.height);let l=i;o||(l-=r/2),h.style.left=F(l),"\\textcircled"===n.label&&(h.style.top=".2em"),h=Ve.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:s},{type:"kern",size:-c},{type:"elem",elem:h}]},t)}const m=Ve.makeSpan(["mord","accent"],[h],t);return o?(o.children[0]=m,o.height=Math.max(m.height,o.height),o.classes[0]="mord",o):m},Ot=(e,t)=>{const r=e.isStretchy?Ct(e.label):new gt.MathNode("mo",[ft(e.label,e.mode)]),n=new gt.MathNode("mover",[vt(e.base,t),r]);return n.setAttribute("accent","true"),n},Et=new RegExp(["\\acute","\\grave","\\ddot","\\tilde","\\bar","\\breve","\\check","\\hat","\\vec","\\dot","\\mathring"].map((e=>"\\"+e)).join("|"));je({type:"accent",names:["\\acute","\\grave","\\ddot","\\tilde","\\bar","\\breve","\\check","\\hat","\\vec","\\dot","\\mathring","\\widecheck","\\widehat","\\widetilde","\\overrightarrow","\\overleftarrow","\\Overrightarrow","\\overleftrightarrow","\\overgroup","\\overlinesegment","\\overleftharpoon","\\overrightharpoon"],props:{numArgs:1},handler:(e,t)=>{const r=Ze(t[0]),n=!Et.test(e.funcName),o=!n||"\\widehat"===e.funcName||"\\widetilde"===e.funcName||"\\widecheck"===e.funcName;return{type:"accent",mode:e.parser.mode,label:e.funcName,isStretchy:n,isShifty:o,base:r}},htmlBuilder:Ht,mathmlBuilder:Ot}),je({type:"accent",names:["\\'","\\`","\\^","\\~","\\=","\\u","\\.",'\\"',"\\c","\\r","\\H","\\v","\\textcircled"],props:{numArgs:1,allowedInText:!0,allowedInMath:!0,argTypes:["primitive"]},handler:(e,t)=>{const r=t[0];let n=e.parser.mode;return"math"===n&&(e.parser.settings.reportNonstrict("mathVsTextAccents","LaTeX's accent "+e.funcName+" works only in text mode"),n="text"),{type:"accent",mode:n,label:e.funcName,isStretchy:!1,isShifty:!0,base:r}},htmlBuilder:Ht,mathmlBuilder:Ot}),je({type:"accentUnder",names:["\\underleftarrow","\\underrightarrow","\\underleftrightarrow","\\undergroup","\\underlinesegment","\\utilde"],props:{numArgs:1},handler:(e,t)=>{let{parser:r,funcName:n}=e;const o=t[0];return{type:"accentUnder",mode:r.mode,label:n,base:o}},htmlBuilder:(e,t)=>{const r=ht(e.base,t),n=Nt(e,t),o="\\utilde"===e.label?.12:0,s=Ve.makeVList({positionType:"top",positionData:r.height,children:[{type:"elem",elem:n,wrapperClasses:["svg-align"]},{type:"kern",size:o},{type:"elem",elem:r}]},t);return Ve.makeSpan(["mord","accentunder"],[s],t)},mathmlBuilder:(e,t)=>{const r=Ct(e.label),n=new gt.MathNode("munder",[vt(e.base,t),r]);return n.setAttribute("accentunder","true"),n}});const Lt=e=>{const t=new gt.MathNode("mpadded",e?[e]:[]);return t.setAttribute("width","+0.6em"),t.setAttribute("lspace","0.3em"),t};je({type:"xArrow",names:["\\xleftarrow","\\xrightarrow","\\xLeftarrow","\\xRightarrow","\\xleftrightarrow","\\xLeftrightarrow","\\xhookleftarrow","\\xhookrightarrow","\\xmapsto","\\xrightharpoondown","\\xrightharpoonup","\\xleftharpoondown","\\xleftharpoonup","\\xrightleftharpoons","\\xleftrightharpoons","\\xlongequal","\\xtwoheadrightarrow","\\xtwoheadleftarrow","\\xtofrom","\\xrightleftarrows","\\xrightequilibrium","\\xleftequilibrium","\\\\cdrightarrow","\\\\cdleftarrow","\\\\cdlongequal"],props:{numArgs:1,numOptionalArgs:1},handler(e,t,r){let{parser:n,funcName:o}=e;return{type:"xArrow",mode:n.mode,label:o,body:t[0],below:r[0]}},htmlBuilder(e,t){const r=t.style;let n=t.havingStyle(r.sup());const o=Ve.wrapFragment(ht(e.body,n,t),t),s="\\x"===e.label.slice(0,2)?"x":"cd";let i;o.classes.push(s+"-arrow-pad"),e.below&&(n=t.havingStyle(r.sub()),i=Ve.wrapFragment(ht(e.below,n,t),t),i.classes.push(s+"-arrow-pad"));const a=Nt(e,t),l=-t.fontMetrics().axisHeight+.5*a.height;let h,c=-t.fontMetrics().axisHeight-.5*a.height-.111;if((o.depth>.25||"\\xleftequilibrium"===e.label)&&(c-=o.depth),i){const e=-t.fontMetrics().axisHeight+i.height+.5*a.height+.111;h=Ve.makeVList({positionType:"individualShift",children:[{type:"elem",elem:o,shift:c},{type:"elem",elem:a,shift:l},{type:"elem",elem:i,shift:e}]},t)}else h=Ve.makeVList({positionType:"individualShift",children:[{type:"elem",elem:o,shift:c},{type:"elem",elem:a,shift:l}]},t);return h.children[0].children[0].children[1].classes.push("svg-align"),Ve.makeSpan(["mrel","x-arrow"],[h],t)},mathmlBuilder(e,t){const r=Ct(e.label);let n;if(r.setAttribute("minsize","x"===e.label.charAt(0)?"1.75em":"3.0em"),e.body){const o=Lt(vt(e.body,t));if(e.below){const s=Lt(vt(e.below,t));n=new gt.MathNode("munderover",[r,s,o])}else n=new gt.MathNode("mover",[r,o])}else if(e.below){const o=Lt(vt(e.below,t));n=new gt.MathNode("munder",[r,o])}else n=Lt(),n=new gt.MathNode("mover",[r,n]);return n}});const Dt=Ve.makeSpan;function Vt(e,t){const r=nt(e.body,t,!0);return Dt([e.mclass],r,t)}function Pt(e,t){let r;const n=xt(e.body,t);return"minner"===e.mclass?r=new gt.MathNode("mpadded",n):"mord"===e.mclass?e.isCharacterBox?(r=n[0],r.type="mi"):r=new gt.MathNode("mi",n):(e.isCharacterBox?(r=n[0],r.type="mo"):r=new gt.MathNode("mo",n),"mbin"===e.mclass?(r.attributes.lspace="0.22em",r.attributes.rspace="0.22em"):"mpunct"===e.mclass?(r.attributes.lspace="0em",r.attributes.rspace="0.17em"):"mopen"===e.mclass||"mclose"===e.mclass?(r.attributes.lspace="0em",r.attributes.rspace="0em"):"minner"===e.mclass&&(r.attributes.lspace="0.0556em",r.attributes.width="+0.1111em")),r}je({type:"mclass",names:["\\mathord","\\mathbin","\\mathrel","\\mathopen","\\mathclose","\\mathpunct","\\mathinner"],props:{numArgs:1,primitive:!0},handler(e,t){let{parser:r,funcName:n}=e;const o=t[0];return{type:"mclass",mode:r.mode,mclass:"m"+n.slice(5),body:Ke(o),isCharacterBox:l.isCharacterBox(o)}},htmlBuilder:Vt,mathmlBuilder:Pt});const Ft=e=>{const t="ordgroup"===e.type&&e.body.length?e.body[0]:e;return"atom"!==t.type||"bin"!==t.family&&"rel"!==t.family?"mord":"m"+t.family};je({type:"mclass",names:["\\@binrel"],props:{numArgs:2},handler(e,t){let{parser:r}=e;return{type:"mclass",mode:r.mode,mclass:Ft(t[0]),body:Ke(t[1]),isCharacterBox:l.isCharacterBox(t[1])}}}),je({type:"mclass",names:["\\stackrel","\\overset","\\underset"],props:{numArgs:2},handler(e,t){let{parser:r,funcName:n}=e;const o=t[1],s=t[0];let i;i="\\stackrel"!==n?Ft(o):"mrel";const a={type:"op",mode:o.mode,limits:!0,alwaysHandleSupSub:!0,parentIsSupSub:!1,symbol:!1,suppressBaseShift:"\\stackrel"!==n,body:Ke(o)},h={type:"supsub",mode:s.mode,base:a,sup:"\\underset"===n?null:s,sub:"\\underset"===n?s:null};return{type:"mclass",mode:r.mode,mclass:i,body:[h],isCharacterBox:l.isCharacterBox(h)}},htmlBuilder:Vt,mathmlBuilder:Pt}),je({type:"pmb",names:["\\pmb"],props:{numArgs:1,allowedInText:!0},handler(e,t){let{parser:r}=e;return{type:"pmb",mode:r.mode,mclass:Ft(t[0]),body:Ke(t[0])}},htmlBuilder(e,t){const r=nt(e.body,t,!0),n=Ve.makeSpan([e.mclass],r,t);return n.style.textShadow="0.02em 0.01em 0.04px",n},mathmlBuilder(e,t){const r=xt(e.body,t),n=new gt.MathNode("mstyle",r);return n.setAttribute("style","text-shadow: 0.02em 0.01em 0.04px"),n}});const Gt={">":"\\\\cdrightarrow","<":"\\\\cdleftarrow","=":"\\\\cdlongequal",A:"\\uparrow",V:"\\downarrow","|":"\\Vert",".":"no arrow"},Ut=e=>"textord"===e.type&&"@"===e.text;function Yt(e,t,r){const n=Gt[e];switch(n){case"\\\\cdrightarrow":case"\\\\cdleftarrow":return r.callFunction(n,[t[0]],[t[1]]);case"\\uparrow":case"\\downarrow":{const e={type:"atom",text:n,mode:"math",family:"rel"},o={type:"ordgroup",mode:"math",body:[r.callFunction("\\\\cdleft",[t[0]],[]),r.callFunction("\\Big",[e],[]),r.callFunction("\\\\cdright",[t[1]],[])]};return r.callFunction("\\\\cdparent",[o],[])}case"\\\\cdlongequal":return r.callFunction("\\\\cdlongequal",[],[]);case"\\Vert":{const e={type:"textord",text:"\\Vert",mode:"math"};return r.callFunction("\\Big",[e],[])}default:return{type:"textord",text:" ",mode:"math"}}}je({type:"cdlabel",names:["\\\\cdleft","\\\\cdright"],props:{numArgs:1},handler(e,t){let{parser:r,funcName:n}=e;return{type:"cdlabel",mode:r.mode,side:n.slice(4),label:t[0]}},htmlBuilder(e,t){const r=t.havingStyle(t.style.sup()),n=Ve.wrapFragment(ht(e.label,r,t),t);return n.classes.push("cd-label-"+e.side),n.style.bottom=F(.8-n.depth),n.height=0,n.depth=0,n},mathmlBuilder(e,t){let r=new gt.MathNode("mrow",[vt(e.label,t)]);return r=new gt.MathNode("mpadded",[r]),r.setAttribute("width","0"),"left"===e.side&&r.setAttribute("lspace","-1width"),r.setAttribute("voffset","0.7em"),r=new gt.MathNode("mstyle",[r]),r.setAttribute("displaystyle","false"),r.setAttribute("scriptlevel","1"),r}}),je({type:"cdlabelparent",names:["\\\\cdparent"],props:{numArgs:1},handler(e,t){let{parser:r}=e;return{type:"cdlabelparent",mode:r.mode,fragment:t[0]}},htmlBuilder(e,t){const r=Ve.wrapFragment(ht(e.fragment,t),t);return r.classes.push("cd-vert-arrow"),r},mathmlBuilder(e,t){return new gt.MathNode("mrow",[vt(e.fragment,t)])}}),je({type:"textord",names:["\\@char"],props:{numArgs:1,allowedInText:!0},handler(e,t){let{parser:r}=e;const o=qt(t[0],"ordgroup").body;let s="";for(let e=0;e=1114111)throw new n("\\@char with invalid code point "+s);return a<=65535?i=String.fromCharCode(a):(a-=65536,i=String.fromCharCode(55296+(a>>10),56320+(1023&a))),{type:"textord",mode:r.mode,text:i}}});const Xt=(e,t)=>{const r=nt(e.body,t.withColor(e.color),!1);return Ve.makeFragment(r)},Wt=(e,t)=>{const r=xt(e.body,t.withColor(e.color)),n=new gt.MathNode("mstyle",r);return n.setAttribute("mathcolor",e.color),n};je({type:"color",names:["\\textcolor"],props:{numArgs:2,allowedInText:!0,argTypes:["color","original"]},handler(e,t){let{parser:r}=e;const n=qt(t[0],"color-token").color,o=t[1];return{type:"color",mode:r.mode,color:n,body:Ke(o)}},htmlBuilder:Xt,mathmlBuilder:Wt}),je({type:"color",names:["\\color"],props:{numArgs:1,allowedInText:!0,argTypes:["color"]},handler(e,t){let{parser:r,breakOnTokenText:n}=e;const o=qt(t[0],"color-token").color;r.gullet.macros.set("\\current@color",o);const s=r.parseExpression(!0,n);return{type:"color",mode:r.mode,color:o,body:s}},htmlBuilder:Xt,mathmlBuilder:Wt}),je({type:"cr",names:["\\\\"],props:{numArgs:0,numOptionalArgs:0,allowedInText:!0},handler(e,t,r){let{parser:n}=e;const o="["===n.gullet.future().text?n.parseSizeGroup(!0):null,s=!n.settings.displayMode||!n.settings.useStrictBehavior("newLineInDisplayMode","In LaTeX, \\\\ or \\newline does nothing in display mode");return{type:"cr",mode:n.mode,newLine:s,size:o&&qt(o,"size").value}},htmlBuilder(e,t){const r=Ve.makeSpan(["mspace"],[],t);return e.newLine&&(r.classes.push("newline"),e.size&&(r.style.marginTop=F(P(e.size,t)))),r},mathmlBuilder(e,t){const r=new gt.MathNode("mspace");return e.newLine&&(r.setAttribute("linebreak","newline"),e.size&&r.setAttribute("height",F(P(e.size,t)))),r}});const _t={"\\global":"\\global","\\long":"\\\\globallong","\\\\globallong":"\\\\globallong","\\def":"\\gdef","\\gdef":"\\gdef","\\edef":"\\xdef","\\xdef":"\\xdef","\\let":"\\\\globallet","\\futurelet":"\\\\globalfuture"},jt=e=>{const t=e.text;if(/^(?:[\\{}$&#^_]|EOF)$/.test(t))throw new n("Expected a control sequence",e);return t},$t=(e,t,r,n)=>{let o=e.gullet.macros.get(r.text);null==o&&(r.noexpand=!0,o={tokens:[r],numArgs:0,unexpandable:!e.gullet.isExpandable(r.text)}),e.gullet.macros.set(t,o,n)};je({type:"internal",names:["\\global","\\long","\\\\globallong"],props:{numArgs:0,allowedInText:!0},handler(e){let{parser:t,funcName:r}=e;t.consumeSpaces();const o=t.fetch();if(_t[o.text])return"\\global"!==r&&"\\\\globallong"!==r||(o.text=_t[o.text]),qt(t.parseFunction(),"internal");throw new n("Invalid token after macro prefix",o)}}),je({type:"internal",names:["\\def","\\gdef","\\edef","\\xdef"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler(e){let{parser:t,funcName:r}=e,o=t.gullet.popToken();const s=o.text;if(/^(?:[\\{}$&#^_]|EOF)$/.test(s))throw new n("Expected a control sequence",o);let i,a=0;const l=[[]];for(;"{"!==t.gullet.future().text;)if(o=t.gullet.popToken(),"#"===o.text){if("{"===t.gullet.future().text){i=t.gullet.future(),l[a].push("{");break}if(o=t.gullet.popToken(),!/^[1-9]$/.test(o.text))throw new n('Invalid argument number "'+o.text+'"');if(parseInt(o.text)!==a+1)throw new n('Argument number "'+o.text+'" out of order');a++,l.push([])}else{if("EOF"===o.text)throw new n("Expected a macro definition");l[a].push(o.text)}let{tokens:h}=t.gullet.consumeArg();return i&&h.unshift(i),"\\edef"!==r&&"\\xdef"!==r||(h=t.gullet.expandTokens(h),h.reverse()),t.gullet.macros.set(s,{tokens:h,numArgs:a,delimiters:l},r===_t[r]),{type:"internal",mode:t.mode}}}),je({type:"internal",names:["\\let","\\\\globallet"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler(e){let{parser:t,funcName:r}=e;const n=jt(t.gullet.popToken());t.gullet.consumeSpaces();const o=(e=>{let t=e.gullet.popToken();return"="===t.text&&(t=e.gullet.popToken()," "===t.text&&(t=e.gullet.popToken())),t})(t);return $t(t,n,o,"\\\\globallet"===r),{type:"internal",mode:t.mode}}}),je({type:"internal",names:["\\futurelet","\\\\globalfuture"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler(e){let{parser:t,funcName:r}=e;const n=jt(t.gullet.popToken()),o=t.gullet.popToken(),s=t.gullet.popToken();return $t(t,n,s,"\\\\globalfuture"===r),t.gullet.pushToken(s),t.gullet.pushToken(o),{type:"internal",mode:t.mode}}});const Zt=function(e,t,r){const n=N(oe.math[e]&&oe.math[e].replace||e,t,r);if(!n)throw new Error("Unsupported symbol "+e+" and font size "+t+".");return n},Kt=function(e,t,r,n){const o=r.havingBaseStyle(t),s=Ve.makeSpan(n.concat(o.sizingClasses(r)),[e],r),i=o.sizeMultiplier/r.sizeMultiplier;return s.height*=i,s.depth*=i,s.maxFontSize=o.sizeMultiplier,s},Jt=function(e,t,r){const n=t.havingBaseStyle(r),o=(1-t.sizeMultiplier/n.sizeMultiplier)*t.fontMetrics().axisHeight;e.classes.push("delimcenter"),e.style.top=F(o),e.height-=o,e.depth+=o},Qt=function(e,t,r,n,o,s){const i=function(e,t,r,n){return Ve.makeSymbol(e,"Size"+t+"-Regular",r,n)}(e,t,o,n),a=Kt(Ve.makeSpan(["delimsizing","size"+t],[i],n),w.TEXT,n,s);return r&&Jt(a,n,w.TEXT),a},er=function(e,t,r){let n;n="Size1-Regular"===t?"delim-size1":"delim-size4";return{type:"elem",elem:Ve.makeSpan(["delimsizinginner",n],[Ve.makeSpan([],[Ve.makeSymbol(e,t,r)])])}},tr=function(e,t,r){const n=T["Size4-Regular"][e.charCodeAt(0)]?T["Size4-Regular"][e.charCodeAt(0)][4]:T["Size1-Regular"][e.charCodeAt(0)][4],o=new J("inner",function(e,t){switch(e){case"\u239c":return"M291 0 H417 V"+t+" H291z M291 0 H417 V"+t+" H291z";case"\u2223":return"M145 0 H188 V"+t+" H145z M145 0 H188 V"+t+" H145z";case"\u2225":return"M145 0 H188 V"+t+" H145z M145 0 H188 V"+t+" H145zM367 0 H410 V"+t+" H367z M367 0 H410 V"+t+" H367z";case"\u239f":return"M457 0 H583 V"+t+" H457z M457 0 H583 V"+t+" H457z";case"\u23a2":return"M319 0 H403 V"+t+" H319z M319 0 H403 V"+t+" H319z";case"\u23a5":return"M263 0 H347 V"+t+" H263z M263 0 H347 V"+t+" H263z";case"\u23aa":return"M384 0 H504 V"+t+" H384z M384 0 H504 V"+t+" H384z";case"\u23d0":return"M312 0 H355 V"+t+" H312z M312 0 H355 V"+t+" H312z";case"\u2016":return"M257 0 H300 V"+t+" H257z M257 0 H300 V"+t+" H257zM478 0 H521 V"+t+" H478z M478 0 H521 V"+t+" H478z";default:return""}}(e,Math.round(1e3*t))),s=new K([o],{width:F(n),height:F(t),style:"width:"+F(n),viewBox:"0 0 "+1e3*n+" "+Math.round(1e3*t),preserveAspectRatio:"xMinYMin"}),i=Ve.makeSvgSpan([],[s],r);return i.height=t,i.style.height=F(t),i.style.width=F(n),{type:"elem",elem:i}},rr={type:"kern",size:-.008},nr=["|","\\lvert","\\rvert","\\vert"],or=["\\|","\\lVert","\\rVert","\\Vert"],sr=function(e,t,r,n,o,s){let i,a,h,c,m="",p=0;i=h=c=e,a=null;let u="Size1-Regular";"\\uparrow"===e?h=c="\u23d0":"\\Uparrow"===e?h=c="\u2016":"\\downarrow"===e?i=h="\u23d0":"\\Downarrow"===e?i=h="\u2016":"\\updownarrow"===e?(i="\\uparrow",h="\u23d0",c="\\downarrow"):"\\Updownarrow"===e?(i="\\Uparrow",h="\u2016",c="\\Downarrow"):l.contains(nr,e)?(h="\u2223",m="vert",p=333):l.contains(or,e)?(h="\u2225",m="doublevert",p=556):"["===e||"\\lbrack"===e?(i="\u23a1",h="\u23a2",c="\u23a3",u="Size4-Regular",m="lbrack",p=667):"]"===e||"\\rbrack"===e?(i="\u23a4",h="\u23a5",c="\u23a6",u="Size4-Regular",m="rbrack",p=667):"\\lfloor"===e||"\u230a"===e?(h=i="\u23a2",c="\u23a3",u="Size4-Regular",m="lfloor",p=667):"\\lceil"===e||"\u2308"===e?(i="\u23a1",h=c="\u23a2",u="Size4-Regular",m="lceil",p=667):"\\rfloor"===e||"\u230b"===e?(h=i="\u23a5",c="\u23a6",u="Size4-Regular",m="rfloor",p=667):"\\rceil"===e||"\u2309"===e?(i="\u23a4",h=c="\u23a5",u="Size4-Regular",m="rceil",p=667):"("===e||"\\lparen"===e?(i="\u239b",h="\u239c",c="\u239d",u="Size4-Regular",m="lparen",p=875):")"===e||"\\rparen"===e?(i="\u239e",h="\u239f",c="\u23a0",u="Size4-Regular",m="rparen",p=875):"\\{"===e||"\\lbrace"===e?(i="\u23a7",a="\u23a8",c="\u23a9",h="\u23aa",u="Size4-Regular"):"\\}"===e||"\\rbrace"===e?(i="\u23ab",a="\u23ac",c="\u23ad",h="\u23aa",u="Size4-Regular"):"\\lgroup"===e||"\u27ee"===e?(i="\u23a7",c="\u23a9",h="\u23aa",u="Size4-Regular"):"\\rgroup"===e||"\u27ef"===e?(i="\u23ab",c="\u23ad",h="\u23aa",u="Size4-Regular"):"\\lmoustache"===e||"\u23b0"===e?(i="\u23a7",c="\u23ad",h="\u23aa",u="Size4-Regular"):"\\rmoustache"!==e&&"\u23b1"!==e||(i="\u23ab",c="\u23a9",h="\u23aa",u="Size4-Regular");const d=Zt(i,u,o),g=d.height+d.depth,f=Zt(h,u,o),b=f.height+f.depth,y=Zt(c,u,o),x=y.height+y.depth;let v=0,k=1;if(null!==a){const e=Zt(a,u,o);v=e.height+e.depth,k=2}const S=g+x+v,M=S+Math.max(0,Math.ceil((t-S)/(k*b)))*k*b;let z=n.fontMetrics().axisHeight;r&&(z*=n.sizeMultiplier);const A=M/2-z,T=[];if(m.length>0){const e=M-g-x,t=Math.round(1e3*M),r=function(e,t){switch(e){case"lbrack":return"M403 1759 V84 H666 V0 H319 V1759 v"+t+" v1759 h347 v-84\nH403z M403 1759 V0 H319 V1759 v"+t+" v1759 h84z";case"rbrack":return"M347 1759 V0 H0 V84 H263 V1759 v"+t+" v1759 H0 v84 H347z\nM347 1759 V0 H263 V1759 v"+t+" v1759 h84z";case"vert":return"M145 15 v585 v"+t+" v585 c2.667,10,9.667,15,21,15\nc10,0,16.667,-5,20,-15 v-585 v"+-t+" v-585 c-2.667,-10,-9.667,-15,-21,-15\nc-10,0,-16.667,5,-20,15z M188 15 H145 v585 v"+t+" v585 h43z";case"doublevert":return"M145 15 v585 v"+t+" v585 c2.667,10,9.667,15,21,15\nc10,0,16.667,-5,20,-15 v-585 v"+-t+" v-585 c-2.667,-10,-9.667,-15,-21,-15\nc-10,0,-16.667,5,-20,15z M188 15 H145 v585 v"+t+" v585 h43z\nM367 15 v585 v"+t+" v585 c2.667,10,9.667,15,21,15\nc10,0,16.667,-5,20,-15 v-585 v"+-t+" v-585 c-2.667,-10,-9.667,-15,-21,-15\nc-10,0,-16.667,5,-20,15z M410 15 H367 v585 v"+t+" v585 h43z";case"lfloor":return"M319 602 V0 H403 V602 v"+t+" v1715 h263 v84 H319z\nMM319 602 V0 H403 V602 v"+t+" v1715 H319z";case"rfloor":return"M319 602 V0 H403 V602 v"+t+" v1799 H0 v-84 H319z\nMM319 602 V0 H403 V602 v"+t+" v1715 H319z";case"lceil":return"M403 1759 V84 H666 V0 H319 V1759 v"+t+" v602 h84z\nM403 1759 V0 H319 V1759 v"+t+" v602 h84z";case"rceil":return"M347 1759 V0 H0 V84 H263 V1759 v"+t+" v602 h84z\nM347 1759 V0 h-84 V1759 v"+t+" v602 h84z";case"lparen":return"M863,9c0,-2,-2,-5,-6,-9c0,0,-17,0,-17,0c-12.7,0,-19.3,0.3,-20,1\nc-5.3,5.3,-10.3,11,-15,17c-242.7,294.7,-395.3,682,-458,1162c-21.3,163.3,-33.3,349,\n-36,557 l0,"+(t+84)+"c0.2,6,0,26,0,60c2,159.3,10,310.7,24,454c53.3,528,210,\n949.7,470,1265c4.7,6,9.7,11.7,15,17c0.7,0.7,7,1,19,1c0,0,18,0,18,0c4,-4,6,-7,6,-9\nc0,-2.7,-3.3,-8.7,-10,-18c-135.3,-192.7,-235.5,-414.3,-300.5,-665c-65,-250.7,-102.5,\n-544.7,-112.5,-882c-2,-104,-3,-167,-3,-189\nl0,-"+(t+92)+"c0,-162.7,5.7,-314,17,-454c20.7,-272,63.7,-513,129,-723c65.3,\n-210,155.3,-396.3,270,-559c6.7,-9.3,10,-15.3,10,-18z";case"rparen":return"M76,0c-16.7,0,-25,3,-25,9c0,2,2,6.3,6,13c21.3,28.7,42.3,60.3,\n63,95c96.7,156.7,172.8,332.5,228.5,527.5c55.7,195,92.8,416.5,111.5,664.5\nc11.3,139.3,17,290.7,17,454c0,28,1.7,43,3.3,45l0,"+(t+9)+"\nc-3,4,-3.3,16.7,-3.3,38c0,162,-5.7,313.7,-17,455c-18.7,248,-55.8,469.3,-111.5,664\nc-55.7,194.7,-131.8,370.3,-228.5,527c-20.7,34.7,-41.7,66.3,-63,95c-2,3.3,-4,7,-6,11\nc0,7.3,5.7,11,17,11c0,0,11,0,11,0c9.3,0,14.3,-0.3,15,-1c5.3,-5.3,10.3,-11,15,-17\nc242.7,-294.7,395.3,-681.7,458,-1161c21.3,-164.7,33.3,-350.7,36,-558\nl0,-"+(t+144)+"c-2,-159.3,-10,-310.7,-24,-454c-53.3,-528,-210,-949.7,\n-470,-1265c-4.7,-6,-9.7,-11.7,-15,-17c-0.7,-0.7,-6.7,-1,-18,-1z";default:throw new Error("Unknown stretchy delimiter.")}}(m,Math.round(1e3*e)),o=new J(m,r),s=(p/1e3).toFixed(3)+"em",i=(t/1e3).toFixed(3)+"em",a=new K([o],{width:s,height:i,viewBox:"0 0 "+p+" "+t}),l=Ve.makeSvgSpan([],[a],n);l.height=t/1e3,l.style.width=s,l.style.height=i,T.push({type:"elem",elem:l})}else{if(T.push(er(c,u,o)),T.push(rr),null===a){const e=M-g-x+.016;T.push(tr(h,e,n))}else{const e=(M-g-x-v)/2+.016;T.push(tr(h,e,n)),T.push(rr),T.push(er(a,u,o)),T.push(rr),T.push(tr(h,e,n))}T.push(rr),T.push(er(i,u,o))}const B=n.havingBaseStyle(w.TEXT),C=Ve.makeVList({positionType:"bottom",positionData:A,children:T},B);return Kt(Ve.makeSpan(["delimsizing","mult"],[C],B),w.TEXT,n,s)},ir=.08,ar=function(e,t,r,n,o){const s=function(e,t,r){t*=1e3;let n="";switch(e){case"sqrtMain":n=function(e,t){return"M95,"+(622+e+t)+"\nc-2.7,0,-7.17,-2.7,-13.5,-8c-5.8,-5.3,-9.5,-10,-9.5,-14\nc0,-2,0.3,-3.3,1,-4c1.3,-2.7,23.83,-20.7,67.5,-54\nc44.2,-33.3,65.8,-50.3,66.5,-51c1.3,-1.3,3,-2,5,-2c4.7,0,8.7,3.3,12,10\ns173,378,173,378c0.7,0,35.3,-71,104,-213c68.7,-142,137.5,-285,206.5,-429\nc69,-144,104.5,-217.7,106.5,-221\nl"+e/2.075+" -"+e+"\nc5.3,-9.3,12,-14,20,-14\nH400000v"+(40+e)+"H845.2724\ns-225.272,467,-225.272,467s-235,486,-235,486c-2.7,4.7,-9,7,-19,7\nc-6,0,-10,-1,-12,-3s-194,-422,-194,-422s-65,47,-65,47z\nM"+(834+e)+" "+t+"h400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize1":n=function(e,t){return"M263,"+(601+e+t)+"c0.7,0,18,39.7,52,119\nc34,79.3,68.167,158.7,102.5,238c34.3,79.3,51.8,119.3,52.5,120\nc340,-704.7,510.7,-1060.3,512,-1067\nl"+e/2.084+" -"+e+"\nc4.7,-7.3,11,-11,19,-11\nH40000v"+(40+e)+"H1012.3\ns-271.3,567,-271.3,567c-38.7,80.7,-84,175,-136,283c-52,108,-89.167,185.3,-111.5,232\nc-22.3,46.7,-33.8,70.3,-34.5,71c-4.7,4.7,-12.3,7,-23,7s-12,-1,-12,-1\ns-109,-253,-109,-253c-72.7,-168,-109.3,-252,-110,-252c-10.7,8,-22,16.7,-34,26\nc-22,17.3,-33.3,26,-34,26s-26,-26,-26,-26s76,-59,76,-59s76,-60,76,-60z\nM"+(1001+e)+" "+t+"h400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize2":n=function(e,t){return"M983 "+(10+e+t)+"\nl"+e/3.13+" -"+e+"\nc4,-6.7,10,-10,18,-10 H400000v"+(40+e)+"\nH1013.1s-83.4,268,-264.1,840c-180.7,572,-277,876.3,-289,913c-4.7,4.7,-12.7,7,-24,7\ns-12,0,-12,0c-1.3,-3.3,-3.7,-11.7,-7,-25c-35.3,-125.3,-106.7,-373.3,-214,-744\nc-10,12,-21,25,-33,39s-32,39,-32,39c-6,-5.3,-15,-14,-27,-26s25,-30,25,-30\nc26.7,-32.7,52,-63,76,-91s52,-60,52,-60s208,722,208,722\nc56,-175.3,126.3,-397.3,211,-666c84.7,-268.7,153.8,-488.2,207.5,-658.5\nc53.7,-170.3,84.5,-266.8,92.5,-289.5z\nM"+(1001+e)+" "+t+"h400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize3":n=function(e,t){return"M424,"+(2398+e+t)+"\nc-1.3,-0.7,-38.5,-172,-111.5,-514c-73,-342,-109.8,-513.3,-110.5,-514\nc0,-2,-10.7,14.3,-32,49c-4.7,7.3,-9.8,15.7,-15.5,25c-5.7,9.3,-9.8,16,-12.5,20\ns-5,7,-5,7c-4,-3.3,-8.3,-7.7,-13,-13s-13,-13,-13,-13s76,-122,76,-122s77,-121,77,-121\ns209,968,209,968c0,-2,84.7,-361.7,254,-1079c169.3,-717.3,254.7,-1077.7,256,-1081\nl"+e/4.223+" -"+e+"c4,-6.7,10,-10,18,-10 H400000\nv"+(40+e)+"H1014.6\ns-87.3,378.7,-272.6,1166c-185.3,787.3,-279.3,1182.3,-282,1185\nc-2,6,-10,9,-24,9\nc-8,0,-12,-0.7,-12,-2z M"+(1001+e)+" "+t+"\nh400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize4":n=function(e,t){return"M473,"+(2713+e+t)+"\nc339.3,-1799.3,509.3,-2700,510,-2702 l"+e/5.298+" -"+e+"\nc3.3,-7.3,9.3,-11,18,-11 H400000v"+(40+e)+"H1017.7\ns-90.5,478,-276.2,1466c-185.7,988,-279.5,1483,-281.5,1485c-2,6,-10,9,-24,9\nc-8,0,-12,-0.7,-12,-2c0,-1.3,-5.3,-32,-16,-92c-50.7,-293.3,-119.7,-693.3,-207,-1200\nc0,-1.3,-5.3,8.7,-16,30c-10.7,21.3,-21.3,42.7,-32,64s-16,33,-16,33s-26,-26,-26,-26\ns76,-153,76,-153s77,-151,77,-151c0.7,0.7,35.7,202,105,604c67.3,400.7,102,602.7,104,\n606zM"+(1001+e)+" "+t+"h400000v"+(40+e)+"H1017.7z"}(t,M);break;case"sqrtTall":n=function(e,t,r){return"M702 "+(e+t)+"H400000"+(40+e)+"\nH742v"+(r-54-t-e)+"l-4 4-4 4c-.667.7 -2 1.5-4 2.5s-4.167 1.833-6.5 2.5-5.5 1-9.5 1\nh-12l-28-84c-16.667-52-96.667 -294.333-240-727l-212 -643 -85 170\nc-4-3.333-8.333-7.667-13 -13l-13-13l77-155 77-156c66 199.333 139 419.667\n219 661 l218 661zM702 "+t+"H400000v"+(40+e)+"H742z"}(t,M,r)}return n}(e,n,r),i=new J(e,s),a=new K([i],{width:"400em",height:F(t),viewBox:"0 0 400000 "+r,preserveAspectRatio:"xMinYMin slice"});return Ve.makeSvgSpan(["hide-tail"],[a],o)},lr=["(","\\lparen",")","\\rparen","[","\\lbrack","]","\\rbrack","\\{","\\lbrace","\\}","\\rbrace","\\lfloor","\\rfloor","\u230a","\u230b","\\lceil","\\rceil","\u2308","\u2309","\\surd"],hr=["\\uparrow","\\downarrow","\\updownarrow","\\Uparrow","\\Downarrow","\\Updownarrow","|","\\|","\\vert","\\Vert","\\lvert","\\rvert","\\lVert","\\rVert","\\lgroup","\\rgroup","\u27ee","\u27ef","\\lmoustache","\\rmoustache","\u23b0","\u23b1"],cr=["<",">","\\langle","\\rangle","/","\\backslash","\\lt","\\gt"],mr=[0,1.2,1.8,2.4,3],pr=[{type:"small",style:w.SCRIPTSCRIPT},{type:"small",style:w.SCRIPT},{type:"small",style:w.TEXT},{type:"large",size:1},{type:"large",size:2},{type:"large",size:3},{type:"large",size:4}],ur=[{type:"small",style:w.SCRIPTSCRIPT},{type:"small",style:w.SCRIPT},{type:"small",style:w.TEXT},{type:"stack"}],dr=[{type:"small",style:w.SCRIPTSCRIPT},{type:"small",style:w.SCRIPT},{type:"small",style:w.TEXT},{type:"large",size:1},{type:"large",size:2},{type:"large",size:3},{type:"large",size:4},{type:"stack"}],gr=function(e){if("small"===e.type)return"Main-Regular";if("large"===e.type)return"Size"+e.size+"-Regular";if("stack"===e.type)return"Size4-Regular";throw new Error("Add support for delim type '"+e.type+"' here.")},fr=function(e,t,r,n){for(let o=Math.min(2,3-n.style.size);ot)return r[o]}return r[r.length-1]},br=function(e,t,r,n,o,s){let i;"<"===e||"\\lt"===e||"\u27e8"===e?e="\\langle":">"!==e&&"\\gt"!==e&&"\u27e9"!==e||(e="\\rangle"),i=l.contains(cr,e)?pr:l.contains(lr,e)?dr:ur;const a=fr(e,t,i,n);return"small"===a.type?function(e,t,r,n,o,s){const i=Ve.makeSymbol(e,"Main-Regular",o,n),a=Kt(i,t,n,s);return r&&Jt(a,n,t),a}(e,a.style,r,n,o,s):"large"===a.type?Qt(e,a.size,r,n,o,s):sr(e,t,r,n,o,s)};var yr={sqrtImage:function(e,t){const r=t.havingBaseSizing(),n=fr("\\surd",e*r.sizeMultiplier,dr,r);let o=r.sizeMultiplier;const s=Math.max(0,t.minRuleThickness-t.fontMetrics().sqrtRuleThickness);let i,a,l=0,h=0,c=0;return"small"===n.type?(c=1e3+1e3*s+80,e<1?o=1:e<1.4&&(o=.7),l=(1+s+ir)/o,h=(1+s)/o,i=ar("sqrtMain",l,c,s,t),i.style.minWidth="0.853em",a=.833/o):"large"===n.type?(c=1080*mr[n.size],h=(mr[n.size]+s)/o,l=(mr[n.size]+s+ir)/o,i=ar("sqrtSize"+n.size,l,c,s,t),i.style.minWidth="1.02em",a=1/o):(l=e+s+ir,h=e+s,c=Math.floor(1e3*e+s)+80,i=ar("sqrtTall",l,c,s,t),i.style.minWidth="0.742em",a=1.056),i.height=h,i.style.height=F(l),{span:i,advanceWidth:a,ruleWidth:(t.fontMetrics().sqrtRuleThickness+s)*o}},sizedDelim:function(e,t,r,o,s){if("<"===e||"\\lt"===e||"\u27e8"===e?e="\\langle":">"!==e&&"\\gt"!==e&&"\u27e9"!==e||(e="\\rangle"),l.contains(lr,e)||l.contains(cr,e))return Qt(e,t,!1,r,o,s);if(l.contains(hr,e))return sr(e,mr[t],!1,r,o,s);throw new n("Illegal delimiter: '"+e+"'")},sizeToMaxHeight:mr,customSizedDelim:br,leftRightDelim:function(e,t,r,n,o,s){const i=n.fontMetrics().axisHeight*n.sizeMultiplier,a=5/n.fontMetrics().ptPerEm,l=Math.max(t-i,r+i),h=Math.max(l/500*901,2*l-a);return br(e,h,!0,n,o,s)}};const xr={"\\bigl":{mclass:"mopen",size:1},"\\Bigl":{mclass:"mopen",size:2},"\\biggl":{mclass:"mopen",size:3},"\\Biggl":{mclass:"mopen",size:4},"\\bigr":{mclass:"mclose",size:1},"\\Bigr":{mclass:"mclose",size:2},"\\biggr":{mclass:"mclose",size:3},"\\Biggr":{mclass:"mclose",size:4},"\\bigm":{mclass:"mrel",size:1},"\\Bigm":{mclass:"mrel",size:2},"\\biggm":{mclass:"mrel",size:3},"\\Biggm":{mclass:"mrel",size:4},"\\big":{mclass:"mord",size:1},"\\Big":{mclass:"mord",size:2},"\\bigg":{mclass:"mord",size:3},"\\Bigg":{mclass:"mord",size:4}},wr=["(","\\lparen",")","\\rparen","[","\\lbrack","]","\\rbrack","\\{","\\lbrace","\\}","\\rbrace","\\lfloor","\\rfloor","\u230a","\u230b","\\lceil","\\rceil","\u2308","\u2309","<",">","\\langle","\u27e8","\\rangle","\u27e9","\\lt","\\gt","\\lvert","\\rvert","\\lVert","\\rVert","\\lgroup","\\rgroup","\u27ee","\u27ef","\\lmoustache","\\rmoustache","\u23b0","\u23b1","/","\\backslash","|","\\vert","\\|","\\Vert","\\uparrow","\\Uparrow","\\downarrow","\\Downarrow","\\updownarrow","\\Updownarrow","."];function vr(e,t){const r=Rt(e);if(r&&l.contains(wr,r.text))return r;throw new n(r?"Invalid delimiter '"+r.text+"' after '"+t.funcName+"'":"Invalid delimiter type '"+e.type+"'",e)}function kr(e){if(!e.body)throw new Error("Bug: The leftright ParseNode wasn't fully parsed.")}je({type:"delimsizing",names:["\\bigl","\\Bigl","\\biggl","\\Biggl","\\bigr","\\Bigr","\\biggr","\\Biggr","\\bigm","\\Bigm","\\biggm","\\Biggm","\\big","\\Big","\\bigg","\\Bigg"],props:{numArgs:1,argTypes:["primitive"]},handler:(e,t)=>{const r=vr(t[0],e);return{type:"delimsizing",mode:e.parser.mode,size:xr[e.funcName].size,mclass:xr[e.funcName].mclass,delim:r.text}},htmlBuilder:(e,t)=>"."===e.delim?Ve.makeSpan([e.mclass]):yr.sizedDelim(e.delim,e.size,t,e.mode,[e.mclass]),mathmlBuilder:e=>{const t=[];"."!==e.delim&&t.push(ft(e.delim,e.mode));const r=new gt.MathNode("mo",t);"mopen"===e.mclass||"mclose"===e.mclass?r.setAttribute("fence","true"):r.setAttribute("fence","false"),r.setAttribute("stretchy","true");const n=F(yr.sizeToMaxHeight[e.size]);return r.setAttribute("minsize",n),r.setAttribute("maxsize",n),r}}),je({type:"leftright-right",names:["\\right"],props:{numArgs:1,primitive:!0},handler:(e,t)=>{const r=e.parser.gullet.macros.get("\\current@color");if(r&&"string"!=typeof r)throw new n("\\current@color set to non-string in \\right");return{type:"leftright-right",mode:e.parser.mode,delim:vr(t[0],e).text,color:r}}}),je({type:"leftright",names:["\\left"],props:{numArgs:1,primitive:!0},handler:(e,t)=>{const r=vr(t[0],e),n=e.parser;++n.leftrightDepth;const o=n.parseExpression(!1);--n.leftrightDepth,n.expect("\\right",!1);const s=qt(n.parseFunction(),"leftright-right");return{type:"leftright",mode:n.mode,body:o,left:r.text,right:s.delim,rightColor:s.color}},htmlBuilder:(e,t)=>{kr(e);const r=nt(e.body,t,!0,["mopen","mclose"]);let n,o,s=0,i=0,a=!1;for(let e=0;e{kr(e);const r=xt(e.body,t);if("."!==e.left){const t=new gt.MathNode("mo",[ft(e.left,e.mode)]);t.setAttribute("fence","true"),r.unshift(t)}if("."!==e.right){const t=new gt.MathNode("mo",[ft(e.right,e.mode)]);t.setAttribute("fence","true"),e.rightColor&&t.setAttribute("mathcolor",e.rightColor),r.push(t)}return bt(r)}}),je({type:"middle",names:["\\middle"],props:{numArgs:1,primitive:!0},handler:(e,t)=>{const r=vr(t[0],e);if(!e.parser.leftrightDepth)throw new n("\\middle without preceding \\left",r);return{type:"middle",mode:e.parser.mode,delim:r.text}},htmlBuilder:(e,t)=>{let r;if("."===e.delim)r=lt(t,[]);else{r=yr.sizedDelim(e.delim,1,t,e.mode,[]);const n={delim:e.delim,options:t};r.isMiddle=n}return r},mathmlBuilder:(e,t)=>{const r="\\vert"===e.delim||"|"===e.delim?ft("|","text"):ft(e.delim,e.mode),n=new gt.MathNode("mo",[r]);return n.setAttribute("fence","true"),n.setAttribute("lspace","0.05em"),n.setAttribute("rspace","0.05em"),n}});const Sr=(e,t)=>{const r=Ve.wrapFragment(ht(e.body,t),t),n=e.label.slice(1);let o,s=t.sizeMultiplier,i=0;const a=l.isCharacterBox(e.body);if("sout"===n)o=Ve.makeSpan(["stretchy","sout"]),o.height=t.fontMetrics().defaultRuleThickness/s,i=-.5*t.fontMetrics().xHeight;else if("phase"===n){const e=P({number:.6,unit:"pt"},t),n=P({number:.35,unit:"ex"},t);s/=t.havingBaseSizing().sizeMultiplier;const a=r.height+r.depth+e+n;r.style.paddingLeft=F(a/2+e);const l=Math.floor(1e3*a*s),c="M400000 "+(h=l)+" H0 L"+h/2+" 0 l65 45 L145 "+(h-80)+" H400000z",m=new K([new J("phase",c)],{width:"400em",height:F(l/1e3),viewBox:"0 0 400000 "+l,preserveAspectRatio:"xMinYMin slice"});o=Ve.makeSvgSpan(["hide-tail"],[m],t),o.style.height=F(a),i=r.depth+e+n}else{/cancel/.test(n)?a||r.classes.push("cancel-pad"):"angl"===n?r.classes.push("anglpad"):r.classes.push("boxpad");let s=0,l=0,h=0;/box/.test(n)?(h=Math.max(t.fontMetrics().fboxrule,t.minRuleThickness),s=t.fontMetrics().fboxsep+("colorbox"===n?0:h),l=s):"angl"===n?(h=Math.max(t.fontMetrics().defaultRuleThickness,t.minRuleThickness),s=4*h,l=Math.max(0,.25-r.depth)):(s=a?.2:0,l=s),o=Bt(r,n,s,l,t),/fbox|boxed|fcolorbox/.test(n)?(o.style.borderStyle="solid",o.style.borderWidth=F(h)):"angl"===n&&.049!==h&&(o.style.borderTopWidth=F(h),o.style.borderRightWidth=F(h)),i=r.depth+l,e.backgroundColor&&(o.style.backgroundColor=e.backgroundColor,e.borderColor&&(o.style.borderColor=e.borderColor))}var h;let c;if(e.backgroundColor)c=Ve.makeVList({positionType:"individualShift",children:[{type:"elem",elem:o,shift:i},{type:"elem",elem:r,shift:0}]},t);else{const e=/cancel|phase/.test(n)?["svg-align"]:[];c=Ve.makeVList({positionType:"individualShift",children:[{type:"elem",elem:r,shift:0},{type:"elem",elem:o,shift:i,wrapperClasses:e}]},t)}return/cancel/.test(n)&&(c.height=r.height,c.depth=r.depth),/cancel/.test(n)&&!a?Ve.makeSpan(["mord","cancel-lap"],[c],t):Ve.makeSpan(["mord"],[c],t)},Mr=(e,t)=>{let r=0;const n=new gt.MathNode(e.label.indexOf("colorbox")>-1?"mpadded":"menclose",[vt(e.body,t)]);switch(e.label){case"\\cancel":n.setAttribute("notation","updiagonalstrike");break;case"\\bcancel":n.setAttribute("notation","downdiagonalstrike");break;case"\\phase":n.setAttribute("notation","phasorangle");break;case"\\sout":n.setAttribute("notation","horizontalstrike");break;case"\\fbox":n.setAttribute("notation","box");break;case"\\angl":n.setAttribute("notation","actuarial");break;case"\\fcolorbox":case"\\colorbox":if(r=t.fontMetrics().fboxsep*t.fontMetrics().ptPerEm,n.setAttribute("width","+"+2*r+"pt"),n.setAttribute("height","+"+2*r+"pt"),n.setAttribute("lspace",r+"pt"),n.setAttribute("voffset",r+"pt"),"\\fcolorbox"===e.label){const r=Math.max(t.fontMetrics().fboxrule,t.minRuleThickness);n.setAttribute("style","border: "+r+"em solid "+String(e.borderColor))}break;case"\\xcancel":n.setAttribute("notation","updiagonalstrike downdiagonalstrike")}return e.backgroundColor&&n.setAttribute("mathbackground",e.backgroundColor),n};je({type:"enclose",names:["\\colorbox"],props:{numArgs:2,allowedInText:!0,argTypes:["color","text"]},handler(e,t,r){let{parser:n,funcName:o}=e;const s=qt(t[0],"color-token").color,i=t[1];return{type:"enclose",mode:n.mode,label:o,backgroundColor:s,body:i}},htmlBuilder:Sr,mathmlBuilder:Mr}),je({type:"enclose",names:["\\fcolorbox"],props:{numArgs:3,allowedInText:!0,argTypes:["color","color","text"]},handler(e,t,r){let{parser:n,funcName:o}=e;const s=qt(t[0],"color-token").color,i=qt(t[1],"color-token").color,a=t[2];return{type:"enclose",mode:n.mode,label:o,backgroundColor:i,borderColor:s,body:a}},htmlBuilder:Sr,mathmlBuilder:Mr}),je({type:"enclose",names:["\\fbox"],props:{numArgs:1,argTypes:["hbox"],allowedInText:!0},handler(e,t){let{parser:r}=e;return{type:"enclose",mode:r.mode,label:"\\fbox",body:t[0]}}}),je({type:"enclose",names:["\\cancel","\\bcancel","\\xcancel","\\sout","\\phase"],props:{numArgs:1},handler(e,t){let{parser:r,funcName:n}=e;const o=t[0];return{type:"enclose",mode:r.mode,label:n,body:o}},htmlBuilder:Sr,mathmlBuilder:Mr}),je({type:"enclose",names:["\\angl"],props:{numArgs:1,argTypes:["hbox"],allowedInText:!1},handler(e,t){let{parser:r}=e;return{type:"enclose",mode:r.mode,label:"\\angl",body:t[0]}}});const zr={};function Ar(e){let{type:t,names:r,props:n,handler:o,htmlBuilder:s,mathmlBuilder:i}=e;const a={type:t,numArgs:n.numArgs||0,allowedInText:!1,numOptionalArgs:0,handler:o};for(let e=0;e{if(!e.parser.settings.displayMode)throw new n("{"+e.envName+"} can be used only in display mode.")};function Rr(e){if(-1===e.indexOf("ed"))return-1===e.indexOf("*")}function Hr(e,t,r){let{hskipBeforeAndAfter:o,addJot:s,cols:i,arraystretch:a,colSeparationType:l,autoTag:h,singleRow:c,emptySingleRow:m,maxNumCols:p,leqno:u}=t;if(e.gullet.beginGroup(),c||e.gullet.macros.set("\\cr","\\\\\\relax"),!a){const t=e.gullet.expandMacroAsText("\\arraystretch");if(null==t)a=1;else if(a=parseFloat(t),!a||a<0)throw new n("Invalid \\arraystretch: "+t)}e.gullet.beginGroup();let d=[];const g=[d],f=[],b=[],y=null!=h?[]:void 0;function x(){h&&e.gullet.macros.set("\\@eqnsw","1",!0)}function w(){y&&(e.gullet.macros.get("\\df@tag")?(y.push(e.subparse([new Nr("\\df@tag")])),e.gullet.macros.set("\\df@tag",void 0,!0)):y.push(Boolean(h)&&"1"===e.gullet.macros.get("\\@eqnsw")))}for(x(),b.push(qr(e));;){let t=e.parseExpression(!1,c?"\\end":"\\\\");e.gullet.endGroup(),e.gullet.beginGroup(),t={type:"ordgroup",mode:e.mode,body:t},r&&(t={type:"styling",mode:e.mode,style:r,body:[t]}),d.push(t);const o=e.fetch().text;if("&"===o){if(p&&d.length===p){if(c||l)throw new n("Too many tab characters: &",e.nextToken);e.settings.reportNonstrict("textEnv","Too few columns specified in the {array} column argument.")}e.consume()}else{if("\\end"===o){w(),1===d.length&&"styling"===t.type&&0===t.body[0].body.length&&(g.length>1||!m)&&g.pop(),b.length0&&(x+=.25),c.push({pos:x,isDashed:e[t]})}for(v(i[0]),r=0;r0&&(p+=y,le)))for(r=0;r=a)continue;(o>0||e.hskipBeforeAndAfter)&&(i=l.deflt(c.pregap,u),0!==i&&(z=Ve.makeSpan(["arraycolsep"],[]),z.style.width=F(i),M.push(z)));let d=[];for(r=0;r0){const e=Ve.makeLineSpan("hline",t,m),r=Ve.makeLineSpan("hdashline",t,m),n=[{type:"elem",elem:h,shift:0}];for(;c.length>0;){const t=c.pop(),o=t.pos-k;t.isDashed?n.push({type:"elem",elem:r,shift:o}):n.push({type:"elem",elem:e,shift:o})}h=Ve.makeVList({positionType:"individualShift",children:n},t)}if(0===T.length)return Ve.makeSpan(["mord"],[h],t);{let e=Ve.makeVList({positionType:"individualShift",children:T},t);return e=Ve.makeSpan(["tag"],[e],t),Ve.makeFragment([h,e])}},Lr={c:"center ",l:"left ",r:"right "},Dr=function(e,t){const r=[],n=new gt.MathNode("mtd",[],["mtr-glue"]),o=new gt.MathNode("mtd",[],["mml-eqn-num"]);for(let s=0;s0){const t=e.cols;let r="",n=!1,o=0,i=t.length;"separator"===t[0].type&&(a+="top ",o=1),"separator"===t[t.length-1].type&&(a+="bottom ",i-=1);for(let e=o;e0?"left ":"",a+=c[c.length-1].length>0?"right ":"";for(let e=1;e-1?"alignat":"align",s="split"===e.envName,i=Hr(e.parser,{cols:r,addJot:!0,autoTag:s?void 0:Rr(e.envName),emptySingleRow:!0,colSeparationType:o,maxNumCols:s?2:void 0,leqno:e.parser.settings.leqno},"display");let a,l=0;const h={type:"ordgroup",mode:e.mode,body:[]};if(t[0]&&"ordgroup"===t[0].type){let e="";for(let r=0;r0&&c&&(n=1),r[e]={type:"align",align:t,pregap:n,postgap:0}}return i.colSeparationType=c?"align":"alignat",i};Ar({type:"array",names:["array","darray"],props:{numArgs:1},handler(e,t){const r=(Rt(t[0])?[t[0]]:qt(t[0],"ordgroup").body).map((function(e){const t=It(e).text;if(-1!=="lcr".indexOf(t))return{type:"align",align:t};if("|"===t)return{type:"separator",separator:"|"};if(":"===t)return{type:"separator",separator:":"};throw new n("Unknown column alignment: "+t,e)})),o={cols:r,hskipBeforeAndAfter:!0,maxNumCols:r.length};return Hr(e.parser,o,Or(e.envName))},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["matrix","pmatrix","bmatrix","Bmatrix","vmatrix","Vmatrix","matrix*","pmatrix*","bmatrix*","Bmatrix*","vmatrix*","Vmatrix*"],props:{numArgs:0},handler(e){const t={matrix:null,pmatrix:["(",")"],bmatrix:["[","]"],Bmatrix:["\\{","\\}"],vmatrix:["|","|"],Vmatrix:["\\Vert","\\Vert"]}[e.envName.replace("*","")];let r="c";const o={hskipBeforeAndAfter:!1,cols:[{type:"align",align:r}]};if("*"===e.envName.charAt(e.envName.length-1)){const t=e.parser;if(t.consumeSpaces(),"["===t.fetch().text){if(t.consume(),t.consumeSpaces(),r=t.fetch().text,-1==="lcr".indexOf(r))throw new n("Expected l or c or r",t.nextToken);t.consume(),t.consumeSpaces(),t.expect("]"),t.consume(),o.cols=[{type:"align",align:r}]}}const s=Hr(e.parser,o,Or(e.envName)),i=Math.max(0,...s.body.map((e=>e.length)));return s.cols=new Array(i).fill({type:"align",align:r}),t?{type:"leftright",mode:e.mode,body:[s],left:t[0],right:t[1],rightColor:void 0}:s},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["smallmatrix"],props:{numArgs:0},handler(e){const t=Hr(e.parser,{arraystretch:.5},"script");return t.colSeparationType="small",t},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["subarray"],props:{numArgs:1},handler(e,t){const r=(Rt(t[0])?[t[0]]:qt(t[0],"ordgroup").body).map((function(e){const t=It(e).text;if(-1!=="lc".indexOf(t))return{type:"align",align:t};throw new n("Unknown column alignment: "+t,e)}));if(r.length>1)throw new n("{subarray} can contain only one column");let o={cols:r,hskipBeforeAndAfter:!1,arraystretch:.5};if(o=Hr(e.parser,o,"script"),o.body.length>0&&o.body[0].length>1)throw new n("{subarray} can contain only one column");return o},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["cases","dcases","rcases","drcases"],props:{numArgs:0},handler(e){const t=Hr(e.parser,{arraystretch:1.2,cols:[{type:"align",align:"l",pregap:0,postgap:1},{type:"align",align:"l",pregap:0,postgap:0}]},Or(e.envName));return{type:"leftright",mode:e.mode,body:[t],left:e.envName.indexOf("r")>-1?".":"\\{",right:e.envName.indexOf("r")>-1?"\\}":".",rightColor:void 0}},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["align","align*","aligned","split"],props:{numArgs:0},handler:Vr,htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["gathered","gather","gather*"],props:{numArgs:0},handler(e){l.contains(["gather","gather*"],e.envName)&&Ir(e);const t={cols:[{type:"align",align:"c"}],addJot:!0,colSeparationType:"gather",autoTag:Rr(e.envName),emptySingleRow:!0,leqno:e.parser.settings.leqno};return Hr(e.parser,t,"display")},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["alignat","alignat*","alignedat"],props:{numArgs:1},handler:Vr,htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["equation","equation*"],props:{numArgs:0},handler(e){Ir(e);const t={autoTag:Rr(e.envName),emptySingleRow:!0,singleRow:!0,maxNumCols:1,leqno:e.parser.settings.leqno};return Hr(e.parser,t,"display")},htmlBuilder:Er,mathmlBuilder:Dr}),Ar({type:"array",names:["CD"],props:{numArgs:0},handler(e){return Ir(e),function(e){const t=[];for(e.gullet.beginGroup(),e.gullet.macros.set("\\cr","\\\\\\relax"),e.gullet.beginGroup();;){t.push(e.parseExpression(!1,"\\\\")),e.gullet.endGroup(),e.gullet.beginGroup();const r=e.fetch().text;if("&"!==r&&"\\\\"!==r){if("\\end"===r){0===t[t.length-1].length&&t.pop();break}throw new n("Expected \\\\ or \\cr or \\end",e.nextToken)}e.consume()}let r=[];const o=[r];for(let a=0;a-1);else{if(!("<>AV".indexOf(o)>-1))throw new n('Expected one of "<>AV=|." after @',l[t]);for(let e=0;e<2;e++){let r=!0;for(let h=t+1;h{const r=e.font,n=t.withFont(r);return ht(e.body,n)},Gr=(e,t)=>{const r=e.font,n=t.withFont(r);return vt(e.body,n)},Ur={"\\Bbb":"\\mathbb","\\bold":"\\mathbf","\\frak":"\\mathfrak","\\bm":"\\boldsymbol"};je({type:"font",names:["\\mathrm","\\mathit","\\mathbf","\\mathnormal","\\mathbb","\\mathcal","\\mathfrak","\\mathscr","\\mathsf","\\mathtt","\\Bbb","\\bold","\\frak"],props:{numArgs:1,allowedInArgument:!0},handler:(e,t)=>{let{parser:r,funcName:n}=e;const o=Ze(t[0]);let s=n;return s in Ur&&(s=Ur[s]),{type:"font",mode:r.mode,font:s.slice(1),body:o}},htmlBuilder:Fr,mathmlBuilder:Gr}),je({type:"mclass",names:["\\boldsymbol","\\bm"],props:{numArgs:1},handler:(e,t)=>{let{parser:r}=e;const n=t[0],o=l.isCharacterBox(n);return{type:"mclass",mode:r.mode,mclass:Ft(n),body:[{type:"font",mode:r.mode,font:"boldsymbol",body:n}],isCharacterBox:o}}}),je({type:"font",names:["\\rm","\\sf","\\tt","\\bf","\\it","\\cal"],props:{numArgs:0,allowedInText:!0},handler:(e,t)=>{let{parser:r,funcName:n,breakOnTokenText:o}=e;const{mode:s}=r,i=r.parseExpression(!0,o);return{type:"font",mode:s,font:"math"+n.slice(1),body:{type:"ordgroup",mode:r.mode,body:i}}},htmlBuilder:Fr,mathmlBuilder:Gr});const Yr=(e,t)=>{let r=t;return"display"===e?r=r.id>=w.SCRIPT.id?r.text():w.DISPLAY:"text"===e&&r.size===w.DISPLAY.size?r=w.TEXT:"script"===e?r=w.SCRIPT:"scriptscript"===e&&(r=w.SCRIPTSCRIPT),r},Xr=(e,t)=>{const r=Yr(e.size,t.style),n=r.fracNum(),o=r.fracDen();let s;s=t.havingStyle(n);const i=ht(e.numer,s,t);if(e.continued){const e=8.5/t.fontMetrics().ptPerEm,r=3.5/t.fontMetrics().ptPerEm;i.height=i.height0?3*c:7*c,u=t.fontMetrics().denom1):(h>0?(m=t.fontMetrics().num2,p=c):(m=t.fontMetrics().num3,p=3*c),u=t.fontMetrics().denom2),l){const e=t.fontMetrics().axisHeight;m-i.depth-(e+.5*h){let r=new gt.MathNode("mfrac",[vt(e.numer,t),vt(e.denom,t)]);if(e.hasBarLine){if(e.barSize){const n=P(e.barSize,t);r.setAttribute("linethickness",F(n))}}else r.setAttribute("linethickness","0px");const n=Yr(e.size,t.style);if(n.size!==t.style.size){r=new gt.MathNode("mstyle",[r]);const e=n.size===w.DISPLAY.size?"true":"false";r.setAttribute("displaystyle",e),r.setAttribute("scriptlevel","0")}if(null!=e.leftDelim||null!=e.rightDelim){const t=[];if(null!=e.leftDelim){const r=new gt.MathNode("mo",[new gt.TextNode(e.leftDelim.replace("\\",""))]);r.setAttribute("fence","true"),t.push(r)}if(t.push(r),null!=e.rightDelim){const r=new gt.MathNode("mo",[new gt.TextNode(e.rightDelim.replace("\\",""))]);r.setAttribute("fence","true"),t.push(r)}return bt(t)}return r};je({type:"genfrac",names:["\\dfrac","\\frac","\\tfrac","\\dbinom","\\binom","\\tbinom","\\\\atopfrac","\\\\bracefrac","\\\\brackfrac"],props:{numArgs:2,allowedInArgument:!0},handler:(e,t)=>{let{parser:r,funcName:n}=e;const o=t[0],s=t[1];let i,a=null,l=null,h="auto";switch(n){case"\\dfrac":case"\\frac":case"\\tfrac":i=!0;break;case"\\\\atopfrac":i=!1;break;case"\\dbinom":case"\\binom":case"\\tbinom":i=!1,a="(",l=")";break;case"\\\\bracefrac":i=!1,a="\\{",l="\\}";break;case"\\\\brackfrac":i=!1,a="[",l="]";break;default:throw new Error("Unrecognized genfrac command")}switch(n){case"\\dfrac":case"\\dbinom":h="display";break;case"\\tfrac":case"\\tbinom":h="text"}return{type:"genfrac",mode:r.mode,continued:!1,numer:o,denom:s,hasBarLine:i,leftDelim:a,rightDelim:l,size:h,barSize:null}},htmlBuilder:Xr,mathmlBuilder:Wr}),je({type:"genfrac",names:["\\cfrac"],props:{numArgs:2},handler:(e,t)=>{let{parser:r,funcName:n}=e;const o=t[0],s=t[1];return{type:"genfrac",mode:r.mode,continued:!0,numer:o,denom:s,hasBarLine:!0,leftDelim:null,rightDelim:null,size:"display",barSize:null}}}),je({type:"infix",names:["\\over","\\choose","\\atop","\\brace","\\brack"],props:{numArgs:0,infix:!0},handler(e){let t,{parser:r,funcName:n,token:o}=e;switch(n){case"\\over":t="\\frac";break;case"\\choose":t="\\binom";break;case"\\atop":t="\\\\atopfrac";break;case"\\brace":t="\\\\bracefrac";break;case"\\brack":t="\\\\brackfrac";break;default:throw new Error("Unrecognized infix genfrac command")}return{type:"infix",mode:r.mode,replaceWith:t,token:o}}});const _r=["display","text","script","scriptscript"],jr=function(e){let t=null;return e.length>0&&(t=e,t="."===t?null:t),t};je({type:"genfrac",names:["\\genfrac"],props:{numArgs:6,allowedInArgument:!0,argTypes:["math","math","size","text","math","math"]},handler(e,t){let{parser:r}=e;const n=t[4],o=t[5],s=Ze(t[0]),i="atom"===s.type&&"open"===s.family?jr(s.text):null,a=Ze(t[1]),l="atom"===a.type&&"close"===a.family?jr(a.text):null,h=qt(t[2],"size");let c,m=null;h.isBlank?c=!0:(m=h.value,c=m.number>0);let p="auto",u=t[3];if("ordgroup"===u.type){if(u.body.length>0){const e=qt(u.body[0],"textord");p=_r[Number(e.text)]}}else u=qt(u,"textord"),p=_r[Number(u.text)];return{type:"genfrac",mode:r.mode,numer:n,denom:o,continued:!1,hasBarLine:c,barSize:m,leftDelim:i,rightDelim:l,size:p}},htmlBuilder:Xr,mathmlBuilder:Wr}),je({type:"infix",names:["\\above"],props:{numArgs:1,argTypes:["size"],infix:!0},handler(e,t){let{parser:r,funcName:n,token:o}=e;return{type:"infix",mode:r.mode,replaceWith:"\\\\abovefrac",size:qt(t[0],"size").value,token:o}}}),je({type:"genfrac",names:["\\\\abovefrac"],props:{numArgs:3,argTypes:["math","size","math"]},handler:(e,t)=>{let{parser:r,funcName:n}=e;const o=t[0],s=function(e){if(!e)throw new Error("Expected non-null, but got "+String(e));return e}(qt(t[1],"infix").size),i=t[2],a=s.number>0;return{type:"genfrac",mode:r.mode,numer:o,denom:i,continued:!1,hasBarLine:a,barSize:s,leftDelim:null,rightDelim:null,size:"auto"}},htmlBuilder:Xr,mathmlBuilder:Wr});const $r=(e,t)=>{const r=t.style;let n,o;"supsub"===e.type?(n=e.sup?ht(e.sup,t.havingStyle(r.sup()),t):ht(e.sub,t.havingStyle(r.sub()),t),o=qt(e.base,"horizBrace")):o=qt(e,"horizBrace");const s=ht(o.base,t.havingBaseStyle(w.DISPLAY)),i=Nt(o,t);let a;if(o.isOver?(a=Ve.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:s},{type:"kern",size:.1},{type:"elem",elem:i}]},t),a.children[0].children[0].children[1].classes.push("svg-align")):(a=Ve.makeVList({positionType:"bottom",positionData:s.depth+.1+i.height,children:[{type:"elem",elem:i},{type:"kern",size:.1},{type:"elem",elem:s}]},t),a.children[0].children[0].children[0].classes.push("svg-align")),n){const e=Ve.makeSpan(["mord",o.isOver?"mover":"munder"],[a],t);a=o.isOver?Ve.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:e},{type:"kern",size:.2},{type:"elem",elem:n}]},t):Ve.makeVList({positionType:"bottom",positionData:e.depth+.2+n.height+n.depth,children:[{type:"elem",elem:n},{type:"kern",size:.2},{type:"elem",elem:e}]},t)}return Ve.makeSpan(["mord",o.isOver?"mover":"munder"],[a],t)};je({type:"horizBrace",names:["\\overbrace","\\underbrace"],props:{numArgs:1},handler(e,t){let{parser:r,funcName:n}=e;return{type:"horizBrace",mode:r.mode,label:n,isOver:/^\\over/.test(n),base:t[0]}},htmlBuilder:$r,mathmlBuilder:(e,t)=>{const r=Ct(e.label);return new gt.MathNode(e.isOver?"mover":"munder",[vt(e.base,t),r])}}),je({type:"href",names:["\\href"],props:{numArgs:2,argTypes:["url","original"],allowedInText:!0},handler:(e,t)=>{let{parser:r}=e;const n=t[1],o=qt(t[0],"url").url;return r.settings.isTrusted({command:"\\href",url:o})?{type:"href",mode:r.mode,href:o,body:Ke(n)}:r.formatUnsupportedCmd("\\href")},htmlBuilder:(e,t)=>{const r=nt(e.body,t,!1);return Ve.makeAnchor(e.href,[],r,t)},mathmlBuilder:(e,t)=>{let r=wt(e.body,t);return r instanceof ut||(r=new ut("mrow",[r])),r.setAttribute("href",e.href),r}}),je({type:"href",names:["\\url"],props:{numArgs:1,argTypes:["url"],allowedInText:!0},handler:(e,t)=>{let{parser:r}=e;const n=qt(t[0],"url").url;if(!r.settings.isTrusted({command:"\\url",url:n}))return r.formatUnsupportedCmd("\\url");const o=[];for(let e=0;e{let{parser:r,funcName:o,token:s}=e;const i=qt(t[0],"raw").string,a=t[1];let l;r.settings.strict&&r.settings.reportNonstrict("htmlExtension","HTML extension is disabled on strict mode");const h={};switch(o){case"\\htmlClass":h.class=i,l={command:"\\htmlClass",class:i};break;case"\\htmlId":h.id=i,l={command:"\\htmlId",id:i};break;case"\\htmlStyle":h.style=i,l={command:"\\htmlStyle",style:i};break;case"\\htmlData":{const e=i.split(",");for(let t=0;t{const r=nt(e.body,t,!1),n=["enclosing"];e.attributes.class&&n.push(...e.attributes.class.trim().split(/\s+/));const o=Ve.makeSpan(n,r,t);for(const t in e.attributes)"class"!==t&&e.attributes.hasOwnProperty(t)&&o.setAttribute(t,e.attributes[t]);return o},mathmlBuilder:(e,t)=>wt(e.body,t)}),je({type:"htmlmathml",names:["\\html@mathml"],props:{numArgs:2,allowedInText:!0},handler:(e,t)=>{let{parser:r}=e;return{type:"htmlmathml",mode:r.mode,html:Ke(t[0]),mathml:Ke(t[1])}},htmlBuilder:(e,t)=>{const r=nt(e.html,t,!1);return Ve.makeFragment(r)},mathmlBuilder:(e,t)=>wt(e.mathml,t)});const Zr=function(e){if(/^[-+]? *(\d+(\.\d*)?|\.\d+)$/.test(e))return{number:+e,unit:"bp"};{const t=/([-+]?) *(\d+(?:\.\d*)?|\.\d+) *([a-z]{2})/.exec(e);if(!t)throw new n("Invalid size: '"+e+"' in \\includegraphics");const r={number:+(t[1]+t[2]),unit:t[3]};if(!V(r))throw new n("Invalid unit: '"+r.unit+"' in \\includegraphics.");return r}};je({type:"includegraphics",names:["\\includegraphics"],props:{numArgs:1,numOptionalArgs:1,argTypes:["raw","url"],allowedInText:!1},handler:(e,t,r)=>{let{parser:o}=e,s={number:0,unit:"em"},i={number:.9,unit:"em"},a={number:0,unit:"em"},l="";if(r[0]){const e=qt(r[0],"raw").string.split(",");for(let t=0;t{const r=P(e.height,t);let n=0;e.totalheight.number>0&&(n=P(e.totalheight,t)-r);let o=0;e.width.number>0&&(o=P(e.width,t));const s={height:F(r+n)};o>0&&(s.width=F(o)),n>0&&(s.verticalAlign=F(-n));const i=new j(e.src,e.alt,s);return i.height=r,i.depth=n,i},mathmlBuilder:(e,t)=>{const r=new gt.MathNode("mglyph",[]);r.setAttribute("alt",e.alt);const n=P(e.height,t);let o=0;if(e.totalheight.number>0&&(o=P(e.totalheight,t)-n,r.setAttribute("valign",F(-o))),r.setAttribute("height",F(n+o)),e.width.number>0){const n=P(e.width,t);r.setAttribute("width",F(n))}return r.setAttribute("src",e.src),r}}),je({type:"kern",names:["\\kern","\\mkern","\\hskip","\\mskip"],props:{numArgs:1,argTypes:["size"],primitive:!0,allowedInText:!0},handler(e,t){let{parser:r,funcName:n}=e;const o=qt(t[0],"size");if(r.settings.strict){const e="m"===n[1],t="mu"===o.value.unit;e?(t||r.settings.reportNonstrict("mathVsTextUnits","LaTeX's "+n+" supports only mu units, not "+o.value.unit+" units"),"math"!==r.mode&&r.settings.reportNonstrict("mathVsTextUnits","LaTeX's "+n+" works only in math mode")):t&&r.settings.reportNonstrict("mathVsTextUnits","LaTeX's "+n+" doesn't support mu units")}return{type:"kern",mode:r.mode,dimension:o.value}},htmlBuilder(e,t){return Ve.makeGlue(e.dimension,t)},mathmlBuilder(e,t){const r=P(e.dimension,t);return new gt.SpaceNode(r)}}),je({type:"lap",names:["\\mathllap","\\mathrlap","\\mathclap"],props:{numArgs:1,allowedInText:!0},handler:(e,t)=>{let{parser:r,funcName:n}=e;const o=t[0];return{type:"lap",mode:r.mode,alignment:n.slice(5),body:o}},htmlBuilder:(e,t)=>{let r;"clap"===e.alignment?(r=Ve.makeSpan([],[ht(e.body,t)]),r=Ve.makeSpan(["inner"],[r],t)):r=Ve.makeSpan(["inner"],[ht(e.body,t)]);const n=Ve.makeSpan(["fix"],[]);let o=Ve.makeSpan([e.alignment],[r,n],t);const s=Ve.makeSpan(["strut"]);return s.style.height=F(o.height+o.depth),o.depth&&(s.style.verticalAlign=F(-o.depth)),o.children.unshift(s),o=Ve.makeSpan(["thinbox"],[o],t),Ve.makeSpan(["mord","vbox"],[o],t)},mathmlBuilder:(e,t)=>{const r=new gt.MathNode("mpadded",[vt(e.body,t)]);if("rlap"!==e.alignment){const t="llap"===e.alignment?"-1":"-0.5";r.setAttribute("lspace",t+"width")}return r.setAttribute("width","0px"),r}}),je({type:"styling",names:["\\(","$"],props:{numArgs:0,allowedInText:!0,allowedInMath:!1},handler(e,t){let{funcName:r,parser:n}=e;const o=n.mode;n.switchMode("math");const s="\\("===r?"\\)":"$",i=n.parseExpression(!1,s);return n.expect(s),n.switchMode(o),{type:"styling",mode:n.mode,style:"text",body:i}}}),je({type:"text",names:["\\)","\\]"],props:{numArgs:0,allowedInText:!0,allowedInMath:!1},handler(e,t){throw new n("Mismatched "+e.funcName)}});const Kr=(e,t)=>{switch(t.style.size){case w.DISPLAY.size:return e.display;case w.TEXT.size:return e.text;case w.SCRIPT.size:return e.script;case w.SCRIPTSCRIPT.size:return e.scriptscript;default:return e.text}};je({type:"mathchoice",names:["\\mathchoice"],props:{numArgs:4,primitive:!0},handler:(e,t)=>{let{parser:r}=e;return{type:"mathchoice",mode:r.mode,display:Ke(t[0]),text:Ke(t[1]),script:Ke(t[2]),scriptscript:Ke(t[3])}},htmlBuilder:(e,t)=>{const r=Kr(e,t),n=nt(r,t,!1);return Ve.makeFragment(n)},mathmlBuilder:(e,t)=>{const r=Kr(e,t);return wt(r,t)}});const Jr=(e,t,r,n,o,s,i)=>{e=Ve.makeSpan([],[e]);const a=r&&l.isCharacterBox(r);let h,c,m;if(t){const e=ht(t,n.havingStyle(o.sup()),n);c={elem:e,kern:Math.max(n.fontMetrics().bigOpSpacing1,n.fontMetrics().bigOpSpacing3-e.depth)}}if(r){const e=ht(r,n.havingStyle(o.sub()),n);h={elem:e,kern:Math.max(n.fontMetrics().bigOpSpacing2,n.fontMetrics().bigOpSpacing4-e.height)}}if(c&&h){const t=n.fontMetrics().bigOpSpacing5+h.elem.height+h.elem.depth+h.kern+e.depth+i;m=Ve.makeVList({positionType:"bottom",positionData:t,children:[{type:"kern",size:n.fontMetrics().bigOpSpacing5},{type:"elem",elem:h.elem,marginLeft:F(-s)},{type:"kern",size:h.kern},{type:"elem",elem:e},{type:"kern",size:c.kern},{type:"elem",elem:c.elem,marginLeft:F(s)},{type:"kern",size:n.fontMetrics().bigOpSpacing5}]},n)}else if(h){const t=e.height-i;m=Ve.makeVList({positionType:"top",positionData:t,children:[{type:"kern",size:n.fontMetrics().bigOpSpacing5},{type:"elem",elem:h.elem,marginLeft:F(-s)},{type:"kern",size:h.kern},{type:"elem",elem:e}]},n)}else{if(!c)return e;{const t=e.depth+i;m=Ve.makeVList({positionType:"bottom",positionData:t,children:[{type:"elem",elem:e},{type:"kern",size:c.kern},{type:"elem",elem:c.elem,marginLeft:F(s)},{type:"kern",size:n.fontMetrics().bigOpSpacing5}]},n)}}const p=[m];if(h&&0!==s&&!a){const e=Ve.makeSpan(["mspace"],[],n);e.style.marginRight=F(s),p.unshift(e)}return Ve.makeSpan(["mop","op-limits"],p,n)},Qr=["\\smallint"],en=(e,t)=>{let r,n,o,s=!1;"supsub"===e.type?(r=e.sup,n=e.sub,o=qt(e.base,"op"),s=!0):o=qt(e,"op");const i=t.style;let a,h=!1;if(i.size===w.DISPLAY.size&&o.symbol&&!l.contains(Qr,o.name)&&(h=!0),o.symbol){const e=h?"Size2-Regular":"Size1-Regular";let r="";if("\\oiint"!==o.name&&"\\oiiint"!==o.name||(r=o.name.slice(1),o.name="oiint"===r?"\\iint":"\\iiint"),a=Ve.makeSymbol(o.name,e,"math",t,["mop","op-symbol",h?"large-op":"small-op"]),r.length>0){const e=a.italic,n=Ve.staticSvg(r+"Size"+(h?"2":"1"),t);a=Ve.makeVList({positionType:"individualShift",children:[{type:"elem",elem:a,shift:0},{type:"elem",elem:n,shift:h?.08:0}]},t),o.name="\\"+r,a.classes.unshift("mop"),a.italic=e}}else if(o.body){const e=nt(o.body,t,!0);1===e.length&&e[0]instanceof Z?(a=e[0],a.classes[0]="mop"):a=Ve.makeSpan(["mop"],e,t)}else{const e=[];for(let r=1;r{let r;if(e.symbol)r=new ut("mo",[ft(e.name,e.mode)]),l.contains(Qr,e.name)&&r.setAttribute("largeop","false");else if(e.body)r=new ut("mo",xt(e.body,t));else{r=new ut("mi",[new dt(e.name.slice(1))]);const t=new ut("mo",[ft("\u2061","text")]);r=e.parentIsSupSub?new ut("mrow",[r,t]):pt([r,t])}return r},rn={"\u220f":"\\prod","\u2210":"\\coprod","\u2211":"\\sum","\u22c0":"\\bigwedge","\u22c1":"\\bigvee","\u22c2":"\\bigcap","\u22c3":"\\bigcup","\u2a00":"\\bigodot","\u2a01":"\\bigoplus","\u2a02":"\\bigotimes","\u2a04":"\\biguplus","\u2a06":"\\bigsqcup"};je({type:"op",names:["\\coprod","\\bigvee","\\bigwedge","\\biguplus","\\bigcap","\\bigcup","\\intop","\\prod","\\sum","\\bigotimes","\\bigoplus","\\bigodot","\\bigsqcup","\\smallint","\u220f","\u2210","\u2211","\u22c0","\u22c1","\u22c2","\u22c3","\u2a00","\u2a01","\u2a02","\u2a04","\u2a06"],props:{numArgs:0},handler:(e,t)=>{let{parser:r,funcName:n}=e,o=n;return 1===o.length&&(o=rn[o]),{type:"op",mode:r.mode,limits:!0,parentIsSupSub:!1,symbol:!0,name:o}},htmlBuilder:en,mathmlBuilder:tn}),je({type:"op",names:["\\mathop"],props:{numArgs:1,primitive:!0},handler:(e,t)=>{let{parser:r}=e;const n=t[0];return{type:"op",mode:r.mode,limits:!1,parentIsSupSub:!1,symbol:!1,body:Ke(n)}},htmlBuilder:en,mathmlBuilder:tn});const nn={"\u222b":"\\int","\u222c":"\\iint","\u222d":"\\iiint","\u222e":"\\oint","\u222f":"\\oiint","\u2230":"\\oiiint"};je({type:"op",names:["\\arcsin","\\arccos","\\arctan","\\arctg","\\arcctg","\\arg","\\ch","\\cos","\\cosec","\\cosh","\\cot","\\cotg","\\coth","\\csc","\\ctg","\\cth","\\deg","\\dim","\\exp","\\hom","\\ker","\\lg","\\ln","\\log","\\sec","\\sin","\\sinh","\\sh","\\tan","\\tanh","\\tg","\\th"],props:{numArgs:0},handler(e){let{parser:t,funcName:r}=e;return{type:"op",mode:t.mode,limits:!1,parentIsSupSub:!1,symbol:!1,name:r}},htmlBuilder:en,mathmlBuilder:tn}),je({type:"op",names:["\\det","\\gcd","\\inf","\\lim","\\max","\\min","\\Pr","\\sup"],props:{numArgs:0},handler(e){let{parser:t,funcName:r}=e;return{type:"op",mode:t.mode,limits:!0,parentIsSupSub:!1,symbol:!1,name:r}},htmlBuilder:en,mathmlBuilder:tn}),je({type:"op",names:["\\int","\\iint","\\iiint","\\oint","\\oiint","\\oiiint","\u222b","\u222c","\u222d","\u222e","\u222f","\u2230"],props:{numArgs:0},handler(e){let{parser:t,funcName:r}=e,n=r;return 1===n.length&&(n=nn[n]),{type:"op",mode:t.mode,limits:!1,parentIsSupSub:!1,symbol:!0,name:n}},htmlBuilder:en,mathmlBuilder:tn});const on=(e,t)=>{let r,n,o,s,i=!1;if("supsub"===e.type?(r=e.sup,n=e.sub,o=qt(e.base,"operatorname"),i=!0):o=qt(e,"operatorname"),o.body.length>0){const e=o.body.map((e=>{const t=e.text;return"string"==typeof t?{type:"textord",mode:e.mode,text:t}:e})),r=nt(e,t.withFont("mathrm"),!0);for(let e=0;e{let{parser:r,funcName:n}=e;const o=t[0];return{type:"operatorname",mode:r.mode,body:Ke(o),alwaysHandleSupSub:"\\operatornamewithlimits"===n,limits:!1,parentIsSupSub:!1}},htmlBuilder:on,mathmlBuilder:(e,t)=>{let r=xt(e.body,t.withFont("mathrm")),n=!0;for(let e=0;ee.toText())).join("");r=[new gt.TextNode(e)]}const o=new gt.MathNode("mi",r);o.setAttribute("mathvariant","normal");const s=new gt.MathNode("mo",[ft("\u2061","text")]);return e.parentIsSupSub?new gt.MathNode("mrow",[o,s]):gt.newDocumentFragment([o,s])}}),Br("\\operatorname","\\@ifstar\\operatornamewithlimits\\operatorname@"),$e({type:"ordgroup",htmlBuilder(e,t){return e.semisimple?Ve.makeFragment(nt(e.body,t,!1)):Ve.makeSpan(["mord"],nt(e.body,t,!0),t)},mathmlBuilder(e,t){return wt(e.body,t,!0)}}),je({type:"overline",names:["\\overline"],props:{numArgs:1},handler(e,t){let{parser:r}=e;const n=t[0];return{type:"overline",mode:r.mode,body:n}},htmlBuilder(e,t){const r=ht(e.body,t.havingCrampedStyle()),n=Ve.makeLineSpan("overline-line",t),o=t.fontMetrics().defaultRuleThickness,s=Ve.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:r},{type:"kern",size:3*o},{type:"elem",elem:n},{type:"kern",size:o}]},t);return Ve.makeSpan(["mord","overline"],[s],t)},mathmlBuilder(e,t){const r=new gt.MathNode("mo",[new gt.TextNode("\u203e")]);r.setAttribute("stretchy","true");const n=new gt.MathNode("mover",[vt(e.body,t),r]);return n.setAttribute("accent","true"),n}}),je({type:"phantom",names:["\\phantom"],props:{numArgs:1,allowedInText:!0},handler:(e,t)=>{let{parser:r}=e;const n=t[0];return{type:"phantom",mode:r.mode,body:Ke(n)}},htmlBuilder:(e,t)=>{const r=nt(e.body,t.withPhantom(),!1);return Ve.makeFragment(r)},mathmlBuilder:(e,t)=>{const r=xt(e.body,t);return new gt.MathNode("mphantom",r)}}),je({type:"hphantom",names:["\\hphantom"],props:{numArgs:1,allowedInText:!0},handler:(e,t)=>{let{parser:r}=e;const n=t[0];return{type:"hphantom",mode:r.mode,body:n}},htmlBuilder:(e,t)=>{let r=Ve.makeSpan([],[ht(e.body,t.withPhantom())]);if(r.height=0,r.depth=0,r.children)for(let e=0;e{const r=xt(Ke(e.body),t),n=new gt.MathNode("mphantom",r),o=new gt.MathNode("mpadded",[n]);return o.setAttribute("height","0px"),o.setAttribute("depth","0px"),o}}),je({type:"vphantom",names:["\\vphantom"],props:{numArgs:1,allowedInText:!0},handler:(e,t)=>{let{parser:r}=e;const n=t[0];return{type:"vphantom",mode:r.mode,body:n}},htmlBuilder:(e,t)=>{const r=Ve.makeSpan(["inner"],[ht(e.body,t.withPhantom())]),n=Ve.makeSpan(["fix"],[]);return Ve.makeSpan(["mord","rlap"],[r,n],t)},mathmlBuilder:(e,t)=>{const r=xt(Ke(e.body),t),n=new gt.MathNode("mphantom",r),o=new gt.MathNode("mpadded",[n]);return o.setAttribute("width","0px"),o}}),je({type:"raisebox",names:["\\raisebox"],props:{numArgs:2,argTypes:["size","hbox"],allowedInText:!0},handler(e,t){let{parser:r}=e;const n=qt(t[0],"size").value,o=t[1];return{type:"raisebox",mode:r.mode,dy:n,body:o}},htmlBuilder(e,t){const r=ht(e.body,t),n=P(e.dy,t);return Ve.makeVList({positionType:"shift",positionData:-n,children:[{type:"elem",elem:r}]},t)},mathmlBuilder(e,t){const r=new gt.MathNode("mpadded",[vt(e.body,t)]),n=e.dy.number+e.dy.unit;return r.setAttribute("voffset",n),r}}),je({type:"internal",names:["\\relax"],props:{numArgs:0,allowedInText:!0},handler(e){let{parser:t}=e;return{type:"internal",mode:t.mode}}}),je({type:"rule",names:["\\rule"],props:{numArgs:2,numOptionalArgs:1,argTypes:["size","size","size"]},handler(e,t,r){let{parser:n}=e;const o=r[0],s=qt(t[0],"size"),i=qt(t[1],"size");return{type:"rule",mode:n.mode,shift:o&&qt(o,"size").value,width:s.value,height:i.value}},htmlBuilder(e,t){const r=Ve.makeSpan(["mord","rule"],[],t),n=P(e.width,t),o=P(e.height,t),s=e.shift?P(e.shift,t):0;return r.style.borderRightWidth=F(n),r.style.borderTopWidth=F(o),r.style.bottom=F(s),r.width=n,r.height=o+s,r.depth=-s,r.maxFontSize=1.125*o*t.sizeMultiplier,r},mathmlBuilder(e,t){const r=P(e.width,t),n=P(e.height,t),o=e.shift?P(e.shift,t):0,s=t.color&&t.getColor()||"black",i=new gt.MathNode("mspace");i.setAttribute("mathbackground",s),i.setAttribute("width",F(r)),i.setAttribute("height",F(n));const a=new gt.MathNode("mpadded",[i]);return o>=0?a.setAttribute("height",F(o)):(a.setAttribute("height",F(o)),a.setAttribute("depth",F(-o))),a.setAttribute("voffset",F(o)),a}});const an=["\\tiny","\\sixptsize","\\scriptsize","\\footnotesize","\\small","\\normalsize","\\large","\\Large","\\LARGE","\\huge","\\Huge"];je({type:"sizing",names:an,props:{numArgs:0,allowedInText:!0},handler:(e,t)=>{let{breakOnTokenText:r,funcName:n,parser:o}=e;const s=o.parseExpression(!1,r);return{type:"sizing",mode:o.mode,size:an.indexOf(n)+1,body:s}},htmlBuilder:(e,t)=>{const r=t.havingSize(e.size);return sn(e.body,r,t)},mathmlBuilder:(e,t)=>{const r=t.havingSize(e.size),n=xt(e.body,r),o=new gt.MathNode("mstyle",n);return o.setAttribute("mathsize",F(r.sizeMultiplier)),o}}),je({type:"smash",names:["\\smash"],props:{numArgs:1,numOptionalArgs:1,allowedInText:!0},handler:(e,t,r)=>{let{parser:n}=e,o=!1,s=!1;const i=r[0]&&qt(r[0],"ordgroup");if(i){let e="";for(let t=0;t{const r=Ve.makeSpan([],[ht(e.body,t)]);if(!e.smashHeight&&!e.smashDepth)return r;if(e.smashHeight&&(r.height=0,r.children))for(let e=0;e{const r=new gt.MathNode("mpadded",[vt(e.body,t)]);return e.smashHeight&&r.setAttribute("height","0px"),e.smashDepth&&r.setAttribute("depth","0px"),r}}),je({type:"sqrt",names:["\\sqrt"],props:{numArgs:1,numOptionalArgs:1},handler(e,t,r){let{parser:n}=e;const o=r[0],s=t[0];return{type:"sqrt",mode:n.mode,body:s,index:o}},htmlBuilder(e,t){let r=ht(e.body,t.havingCrampedStyle());0===r.height&&(r.height=t.fontMetrics().xHeight),r=Ve.wrapFragment(r,t);const n=t.fontMetrics().defaultRuleThickness;let o=n;t.style.idr.height+r.depth+s&&(s=(s+c-r.height-r.depth)/2);const m=a.height-r.height-s-l;r.style.paddingLeft=F(h);const p=Ve.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:r,wrapperClasses:["svg-align"]},{type:"kern",size:-(r.height+m)},{type:"elem",elem:a},{type:"kern",size:l}]},t);if(e.index){const r=t.havingStyle(w.SCRIPTSCRIPT),n=ht(e.index,r,t),o=.6*(p.height-p.depth),s=Ve.makeVList({positionType:"shift",positionData:-o,children:[{type:"elem",elem:n}]},t),i=Ve.makeSpan(["root"],[s]);return Ve.makeSpan(["mord","sqrt"],[i,p],t)}return Ve.makeSpan(["mord","sqrt"],[p],t)},mathmlBuilder(e,t){const{body:r,index:n}=e;return n?new gt.MathNode("mroot",[vt(r,t),vt(n,t)]):new gt.MathNode("msqrt",[vt(r,t)])}});const ln={display:w.DISPLAY,text:w.TEXT,script:w.SCRIPT,scriptscript:w.SCRIPTSCRIPT};je({type:"styling",names:["\\displaystyle","\\textstyle","\\scriptstyle","\\scriptscriptstyle"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler(e,t){let{breakOnTokenText:r,funcName:n,parser:o}=e;const s=o.parseExpression(!0,r),i=n.slice(1,n.length-5);return{type:"styling",mode:o.mode,style:i,body:s}},htmlBuilder(e,t){const r=ln[e.style],n=t.havingStyle(r).withFont("");return sn(e.body,n,t)},mathmlBuilder(e,t){const r=ln[e.style],n=t.havingStyle(r),o=xt(e.body,n),s=new gt.MathNode("mstyle",o),i={display:["0","true"],text:["0","false"],script:["1","false"],scriptscript:["2","false"]}[e.style];return s.setAttribute("scriptlevel",i[0]),s.setAttribute("displaystyle",i[1]),s}});$e({type:"supsub",htmlBuilder(e,t){const r=function(e,t){const r=e.base;if(r)return"op"===r.type?r.limits&&(t.style.size===w.DISPLAY.size||r.alwaysHandleSupSub)?en:null:"operatorname"===r.type?r.alwaysHandleSupSub&&(t.style.size===w.DISPLAY.size||r.limits)?on:null:"accent"===r.type?l.isCharacterBox(r.base)?Ht:null:"horizBrace"===r.type&&!e.sub===r.isOver?$r:null;return null}(e,t);if(r)return r(e,t);const{base:n,sup:o,sub:s}=e,i=ht(n,t);let a,h;const c=t.fontMetrics();let m=0,p=0;const u=n&&l.isCharacterBox(n);if(o){const e=t.havingStyle(t.style.sup());a=ht(o,e,t),u||(m=i.height-e.fontMetrics().supDrop*e.sizeMultiplier/t.sizeMultiplier)}if(s){const e=t.havingStyle(t.style.sub());h=ht(s,e,t),u||(p=i.depth+e.fontMetrics().subDrop*e.sizeMultiplier/t.sizeMultiplier)}let d;d=t.style===w.DISPLAY?c.sup1:t.style.cramped?c.sup3:c.sup2;const g=t.sizeMultiplier,f=F(.5/c.ptPerEm/g);let b,y=null;if(h){const t=e.base&&"op"===e.base.type&&e.base.name&&("\\oiint"===e.base.name||"\\oiiint"===e.base.name);(i instanceof Z||t)&&(y=F(-i.italic))}if(a&&h){m=Math.max(m,d,a.depth+.25*c.xHeight),p=Math.max(p,c.sub2);const e=4*c.defaultRuleThickness;if(m-a.depth-(h.height-p)0&&(m+=t,p-=t)}const r=[{type:"elem",elem:h,shift:p,marginRight:f,marginLeft:y},{type:"elem",elem:a,shift:-m,marginRight:f}];b=Ve.makeVList({positionType:"individualShift",children:r},t)}else if(h){p=Math.max(p,c.sub1,h.height-.8*c.xHeight);const e=[{type:"elem",elem:h,marginLeft:y,marginRight:f}];b=Ve.makeVList({positionType:"shift",positionData:p,children:e},t)}else{if(!a)throw new Error("supsub must have either sup or sub.");m=Math.max(m,d,a.depth+.25*c.xHeight),b=Ve.makeVList({positionType:"shift",positionData:-m,children:[{type:"elem",elem:a,marginRight:f}]},t)}const x=at(i,"right")||"mord";return Ve.makeSpan([x],[i,Ve.makeSpan(["msupsub"],[b])],t)},mathmlBuilder(e,t){let r,n,o=!1;e.base&&"horizBrace"===e.base.type&&(n=!!e.sup,n===e.base.isOver&&(o=!0,r=e.base.isOver)),!e.base||"op"!==e.base.type&&"operatorname"!==e.base.type||(e.base.parentIsSupSub=!0);const s=[vt(e.base,t)];let i;if(e.sub&&s.push(vt(e.sub,t)),e.sup&&s.push(vt(e.sup,t)),o)i=r?"mover":"munder";else if(e.sub)if(e.sup){const r=e.base;i=r&&"op"===r.type&&r.limits&&t.style===w.DISPLAY||r&&"operatorname"===r.type&&r.alwaysHandleSupSub&&(t.style===w.DISPLAY||r.limits)?"munderover":"msubsup"}else{const r=e.base;i=r&&"op"===r.type&&r.limits&&(t.style===w.DISPLAY||r.alwaysHandleSupSub)||r&&"operatorname"===r.type&&r.alwaysHandleSupSub&&(r.limits||t.style===w.DISPLAY)?"munder":"msub"}else{const r=e.base;i=r&&"op"===r.type&&r.limits&&(t.style===w.DISPLAY||r.alwaysHandleSupSub)||r&&"operatorname"===r.type&&r.alwaysHandleSupSub&&(r.limits||t.style===w.DISPLAY)?"mover":"msup"}return new gt.MathNode(i,s)}}),$e({type:"atom",htmlBuilder(e,t){return Ve.mathsym(e.text,e.mode,t,["m"+e.family])},mathmlBuilder(e,t){const r=new gt.MathNode("mo",[ft(e.text,e.mode)]);if("bin"===e.family){const n=yt(e,t);"bold-italic"===n&&r.setAttribute("mathvariant",n)}else"punct"===e.family?r.setAttribute("separator","true"):"open"!==e.family&&"close"!==e.family||r.setAttribute("stretchy","false");return r}});const hn={mi:"italic",mn:"normal",mtext:"normal"};$e({type:"mathord",htmlBuilder(e,t){return Ve.makeOrd(e,t,"mathord")},mathmlBuilder(e,t){const r=new gt.MathNode("mi",[ft(e.text,e.mode,t)]),n=yt(e,t)||"italic";return n!==hn[r.type]&&r.setAttribute("mathvariant",n),r}}),$e({type:"textord",htmlBuilder(e,t){return Ve.makeOrd(e,t,"textord")},mathmlBuilder(e,t){const r=ft(e.text,e.mode,t),n=yt(e,t)||"normal";let o;return o="text"===e.mode?new gt.MathNode("mtext",[r]):/[0-9]/.test(e.text)?new gt.MathNode("mn",[r]):"\\prime"===e.text?new gt.MathNode("mo",[r]):new gt.MathNode("mi",[r]),n!==hn[o.type]&&o.setAttribute("mathvariant",n),o}});const cn={"\\nobreak":"nobreak","\\allowbreak":"allowbreak"},mn={" ":{},"\\ ":{},"~":{className:"nobreak"},"\\space":{},"\\nobreakspace":{className:"nobreak"}};$e({type:"spacing",htmlBuilder(e,t){if(mn.hasOwnProperty(e.text)){const r=mn[e.text].className||"";if("text"===e.mode){const n=Ve.makeOrd(e,t,"textord");return n.classes.push(r),n}return Ve.makeSpan(["mspace",r],[Ve.mathsym(e.text,e.mode,t)],t)}if(cn.hasOwnProperty(e.text))return Ve.makeSpan(["mspace",cn[e.text]],[],t);throw new n('Unknown type of space "'+e.text+'"')},mathmlBuilder(e,t){let r;if(!mn.hasOwnProperty(e.text)){if(cn.hasOwnProperty(e.text))return new gt.MathNode("mspace");throw new n('Unknown type of space "'+e.text+'"')}return r=new gt.MathNode("mtext",[new gt.TextNode("\xa0")]),r}});const pn=()=>{const e=new gt.MathNode("mtd",[]);return e.setAttribute("width","50%"),e};$e({type:"tag",mathmlBuilder(e,t){const r=new gt.MathNode("mtable",[new gt.MathNode("mtr",[pn(),new gt.MathNode("mtd",[wt(e.body,t)]),pn(),new gt.MathNode("mtd",[wt(e.tag,t)])])]);return r.setAttribute("width","100%"),r}});const un={"\\text":void 0,"\\textrm":"textrm","\\textsf":"textsf","\\texttt":"texttt","\\textnormal":"textrm"},dn={"\\textbf":"textbf","\\textmd":"textmd"},gn={"\\textit":"textit","\\textup":"textup"},fn=(e,t)=>{const r=e.font;return r?un[r]?t.withTextFontFamily(un[r]):dn[r]?t.withTextFontWeight(dn[r]):t.withTextFontShape(gn[r]):t};je({type:"text",names:["\\text","\\textrm","\\textsf","\\texttt","\\textnormal","\\textbf","\\textmd","\\textit","\\textup"],props:{numArgs:1,argTypes:["text"],allowedInArgument:!0,allowedInText:!0},handler(e,t){let{parser:r,funcName:n}=e;const o=t[0];return{type:"text",mode:r.mode,body:Ke(o),font:n}},htmlBuilder(e,t){const r=fn(e,t),n=nt(e.body,r,!0);return Ve.makeSpan(["mord","text"],n,r)},mathmlBuilder(e,t){const r=fn(e,t);return wt(e.body,r)}}),je({type:"underline",names:["\\underline"],props:{numArgs:1,allowedInText:!0},handler(e,t){let{parser:r}=e;return{type:"underline",mode:r.mode,body:t[0]}},htmlBuilder(e,t){const r=ht(e.body,t),n=Ve.makeLineSpan("underline-line",t),o=t.fontMetrics().defaultRuleThickness,s=Ve.makeVList({positionType:"top",positionData:r.height,children:[{type:"kern",size:o},{type:"elem",elem:n},{type:"kern",size:3*o},{type:"elem",elem:r}]},t);return Ve.makeSpan(["mord","underline"],[s],t)},mathmlBuilder(e,t){const r=new gt.MathNode("mo",[new gt.TextNode("\u203e")]);r.setAttribute("stretchy","true");const n=new gt.MathNode("munder",[vt(e.body,t),r]);return n.setAttribute("accentunder","true"),n}}),je({type:"vcenter",names:["\\vcenter"],props:{numArgs:1,argTypes:["original"],allowedInText:!1},handler(e,t){let{parser:r}=e;return{type:"vcenter",mode:r.mode,body:t[0]}},htmlBuilder(e,t){const r=ht(e.body,t),n=t.fontMetrics().axisHeight,o=.5*(r.height-n-(r.depth+n));return Ve.makeVList({positionType:"shift",positionData:o,children:[{type:"elem",elem:r}]},t)},mathmlBuilder(e,t){return new gt.MathNode("mpadded",[vt(e.body,t)],["vcenter"])}}),je({type:"verb",names:["\\verb"],props:{numArgs:0,allowedInText:!0},handler(e,t,r){throw new n("\\verb ended by end of line instead of matching delimiter")},htmlBuilder(e,t){const r=bn(e),n=[],o=t.havingStyle(t.style.text());for(let t=0;te.body.replace(/ /g,e.star?"\u2423":"\xa0");var yn=Xe;const xn="[ \r\n\t]",wn="(\\\\[a-zA-Z@]+)"+xn+"*",vn="[\u0300-\u036f]",kn=new RegExp(vn+"+$"),Sn="("+xn+"+)|\\\\(\n|[ \r\t]+\n?)[ \r\t]*|([!-\\[\\]-\u2027\u202a-\ud7ff\uf900-\uffff]"+vn+"*|[\ud800-\udbff][\udc00-\udfff]"+vn+"*|\\\\verb\\*([^]).*?\\4|\\\\verb([^*a-zA-Z]).*?\\5|"+wn+"|\\\\[^\ud800-\udfff])";class Mn{constructor(e,t){this.input=void 0,this.settings=void 0,this.tokenRegex=void 0,this.catcodes=void 0,this.input=e,this.settings=t,this.tokenRegex=new RegExp(Sn,"g"),this.catcodes={"%":14,"~":13}}setCatcode(e,t){this.catcodes[e]=t}lex(){const e=this.input,t=this.tokenRegex.lastIndex;if(t===e.length)return new Nr("EOF",new Cr(this,t,t));const r=this.tokenRegex.exec(e);if(null===r||r.index!==t)throw new n("Unexpected character: '"+e[t]+"'",new Nr(e[t],new Cr(this,t,t+1)));const o=r[6]||r[3]||(r[2]?"\\ ":" ");if(14===this.catcodes[o]){const t=e.indexOf("\n",this.tokenRegex.lastIndex);return-1===t?(this.tokenRegex.lastIndex=e.length,this.settings.reportNonstrict("commentAtEnd","% comment has no terminating newline; LaTeX would fail because of commenting the end of math mode (e.g. $)")):this.tokenRegex.lastIndex=t+1,this.lex()}return new Nr(o,new Cr(this,t,this.tokenRegex.lastIndex))}}class zn{constructor(e,t){void 0===e&&(e={}),void 0===t&&(t={}),this.current=void 0,this.builtins=void 0,this.undefStack=void 0,this.current=t,this.builtins=e,this.undefStack=[]}beginGroup(){this.undefStack.push({})}endGroup(){if(0===this.undefStack.length)throw new n("Unbalanced namespace destruction: attempt to pop global namespace; please report this as a bug");const e=this.undefStack.pop();for(const t in e)e.hasOwnProperty(t)&&(null==e[t]?delete this.current[t]:this.current[t]=e[t])}endGroups(){for(;this.undefStack.length>0;)this.endGroup()}has(e){return this.current.hasOwnProperty(e)||this.builtins.hasOwnProperty(e)}get(e){return this.current.hasOwnProperty(e)?this.current[e]:this.builtins[e]}set(e,t,r){if(void 0===r&&(r=!1),r){for(let t=0;t0&&(this.undefStack[this.undefStack.length-1][e]=t)}else{const t=this.undefStack[this.undefStack.length-1];t&&!t.hasOwnProperty(e)&&(t[e]=this.current[e])}null==t?delete this.current[e]:this.current[e]=t}}var An=Tr;Br("\\noexpand",(function(e){const t=e.popToken();return e.isExpandable(t.text)&&(t.noexpand=!0,t.treatAsRelax=!0),{tokens:[t],numArgs:0}})),Br("\\expandafter",(function(e){const t=e.popToken();return e.expandOnce(!0),{tokens:[t],numArgs:0}})),Br("\\@firstoftwo",(function(e){return{tokens:e.consumeArgs(2)[0],numArgs:0}})),Br("\\@secondoftwo",(function(e){return{tokens:e.consumeArgs(2)[1],numArgs:0}})),Br("\\@ifnextchar",(function(e){const t=e.consumeArgs(3);e.consumeSpaces();const r=e.future();return 1===t[0].length&&t[0][0].text===r.text?{tokens:t[1],numArgs:0}:{tokens:t[2],numArgs:0}})),Br("\\@ifstar","\\@ifnextchar *{\\@firstoftwo{#1}}"),Br("\\TextOrMath",(function(e){const t=e.consumeArgs(2);return"text"===e.mode?{tokens:t[0],numArgs:0}:{tokens:t[1],numArgs:0}}));const Tn={0:0,1:1,2:2,3:3,4:4,5:5,6:6,7:7,8:8,9:9,a:10,A:10,b:11,B:11,c:12,C:12,d:13,D:13,e:14,E:14,f:15,F:15};Br("\\char",(function(e){let t,r=e.popToken(),o="";if("'"===r.text)t=8,r=e.popToken();else if('"'===r.text)t=16,r=e.popToken();else if("`"===r.text)if(r=e.popToken(),"\\"===r.text[0])o=r.text.charCodeAt(1);else{if("EOF"===r.text)throw new n("\\char` missing argument");o=r.text.charCodeAt(0)}else t=10;if(t){if(o=Tn[r.text],null==o||o>=t)throw new n("Invalid base-"+t+" digit "+r.text);let s;for(;null!=(s=Tn[e.future().text])&&s{let o=e.consumeArg().tokens;if(1!==o.length)throw new n("\\newcommand's first argument must be a macro name");const s=o[0].text,i=e.isDefined(s);if(i&&!t)throw new n("\\newcommand{"+s+"} attempting to redefine "+s+"; use \\renewcommand");if(!i&&!r)throw new n("\\renewcommand{"+s+"} when command "+s+" does not yet exist; use \\newcommand");let a=0;if(o=e.consumeArg().tokens,1===o.length&&"["===o[0].text){let t="",r=e.expandNextToken();for(;"]"!==r.text&&"EOF"!==r.text;)t+=r.text,r=e.expandNextToken();if(!t.match(/^\s*[0-9]+\s*$/))throw new n("Invalid number of arguments: "+t);a=parseInt(t),o=e.consumeArg().tokens}return e.macros.set(s,{tokens:o,numArgs:a}),""};Br("\\newcommand",(e=>Bn(e,!1,!0))),Br("\\renewcommand",(e=>Bn(e,!0,!1))),Br("\\providecommand",(e=>Bn(e,!0,!0))),Br("\\message",(e=>{const t=e.consumeArgs(1)[0];return console.log(t.reverse().map((e=>e.text)).join("")),""})),Br("\\errmessage",(e=>{const t=e.consumeArgs(1)[0];return console.error(t.reverse().map((e=>e.text)).join("")),""})),Br("\\show",(e=>{const t=e.popToken(),r=t.text;return console.log(t,e.macros.get(r),yn[r],oe.math[r],oe.text[r]),""})),Br("\\bgroup","{"),Br("\\egroup","}"),Br("~","\\nobreakspace"),Br("\\lq","`"),Br("\\rq","'"),Br("\\aa","\\r a"),Br("\\AA","\\r A"),Br("\\textcopyright","\\html@mathml{\\textcircled{c}}{\\char`\xa9}"),Br("\\copyright","\\TextOrMath{\\textcopyright}{\\text{\\textcopyright}}"),Br("\\textregistered","\\html@mathml{\\textcircled{\\scriptsize R}}{\\char`\xae}"),Br("\u212c","\\mathscr{B}"),Br("\u2130","\\mathscr{E}"),Br("\u2131","\\mathscr{F}"),Br("\u210b","\\mathscr{H}"),Br("\u2110","\\mathscr{I}"),Br("\u2112","\\mathscr{L}"),Br("\u2133","\\mathscr{M}"),Br("\u211b","\\mathscr{R}"),Br("\u212d","\\mathfrak{C}"),Br("\u210c","\\mathfrak{H}"),Br("\u2128","\\mathfrak{Z}"),Br("\\Bbbk","\\Bbb{k}"),Br("\xb7","\\cdotp"),Br("\\llap","\\mathllap{\\textrm{#1}}"),Br("\\rlap","\\mathrlap{\\textrm{#1}}"),Br("\\clap","\\mathclap{\\textrm{#1}}"),Br("\\mathstrut","\\vphantom{(}"),Br("\\underbar","\\underline{\\text{#1}}"),Br("\\not",'\\html@mathml{\\mathrel{\\mathrlap\\@not}}{\\char"338}'),Br("\\neq","\\html@mathml{\\mathrel{\\not=}}{\\mathrel{\\char`\u2260}}"),Br("\\ne","\\neq"),Br("\u2260","\\neq"),Br("\\notin","\\html@mathml{\\mathrel{{\\in}\\mathllap{/\\mskip1mu}}}{\\mathrel{\\char`\u2209}}"),Br("\u2209","\\notin"),Br("\u2258","\\html@mathml{\\mathrel{=\\kern{-1em}\\raisebox{0.4em}{$\\scriptsize\\frown$}}}{\\mathrel{\\char`\u2258}}"),Br("\u2259","\\html@mathml{\\stackrel{\\tiny\\wedge}{=}}{\\mathrel{\\char`\u2258}}"),Br("\u225a","\\html@mathml{\\stackrel{\\tiny\\vee}{=}}{\\mathrel{\\char`\u225a}}"),Br("\u225b","\\html@mathml{\\stackrel{\\scriptsize\\star}{=}}{\\mathrel{\\char`\u225b}}"),Br("\u225d","\\html@mathml{\\stackrel{\\tiny\\mathrm{def}}{=}}{\\mathrel{\\char`\u225d}}"),Br("\u225e","\\html@mathml{\\stackrel{\\tiny\\mathrm{m}}{=}}{\\mathrel{\\char`\u225e}}"),Br("\u225f","\\html@mathml{\\stackrel{\\tiny?}{=}}{\\mathrel{\\char`\u225f}}"),Br("\u27c2","\\perp"),Br("\u203c","\\mathclose{!\\mkern-0.8mu!}"),Br("\u220c","\\notni"),Br("\u231c","\\ulcorner"),Br("\u231d","\\urcorner"),Br("\u231e","\\llcorner"),Br("\u231f","\\lrcorner"),Br("\xa9","\\copyright"),Br("\xae","\\textregistered"),Br("\ufe0f","\\textregistered"),Br("\\ulcorner",'\\html@mathml{\\@ulcorner}{\\mathop{\\char"231c}}'),Br("\\urcorner",'\\html@mathml{\\@urcorner}{\\mathop{\\char"231d}}'),Br("\\llcorner",'\\html@mathml{\\@llcorner}{\\mathop{\\char"231e}}'),Br("\\lrcorner",'\\html@mathml{\\@lrcorner}{\\mathop{\\char"231f}}'),Br("\\vdots","\\mathord{\\varvdots\\rule{0pt}{15pt}}"),Br("\u22ee","\\vdots"),Br("\\varGamma","\\mathit{\\Gamma}"),Br("\\varDelta","\\mathit{\\Delta}"),Br("\\varTheta","\\mathit{\\Theta}"),Br("\\varLambda","\\mathit{\\Lambda}"),Br("\\varXi","\\mathit{\\Xi}"),Br("\\varPi","\\mathit{\\Pi}"),Br("\\varSigma","\\mathit{\\Sigma}"),Br("\\varUpsilon","\\mathit{\\Upsilon}"),Br("\\varPhi","\\mathit{\\Phi}"),Br("\\varPsi","\\mathit{\\Psi}"),Br("\\varOmega","\\mathit{\\Omega}"),Br("\\substack","\\begin{subarray}{c}#1\\end{subarray}"),Br("\\colon","\\nobreak\\mskip2mu\\mathpunct{}\\mathchoice{\\mkern-3mu}{\\mkern-3mu}{}{}{:}\\mskip6mu\\relax"),Br("\\boxed","\\fbox{$\\displaystyle{#1}$}"),Br("\\iff","\\DOTSB\\;\\Longleftrightarrow\\;"),Br("\\implies","\\DOTSB\\;\\Longrightarrow\\;"),Br("\\impliedby","\\DOTSB\\;\\Longleftarrow\\;");const Cn={",":"\\dotsc","\\not":"\\dotsb","+":"\\dotsb","=":"\\dotsb","<":"\\dotsb",">":"\\dotsb","-":"\\dotsb","*":"\\dotsb",":":"\\dotsb","\\DOTSB":"\\dotsb","\\coprod":"\\dotsb","\\bigvee":"\\dotsb","\\bigwedge":"\\dotsb","\\biguplus":"\\dotsb","\\bigcap":"\\dotsb","\\bigcup":"\\dotsb","\\prod":"\\dotsb","\\sum":"\\dotsb","\\bigotimes":"\\dotsb","\\bigoplus":"\\dotsb","\\bigodot":"\\dotsb","\\bigsqcup":"\\dotsb","\\And":"\\dotsb","\\longrightarrow":"\\dotsb","\\Longrightarrow":"\\dotsb","\\longleftarrow":"\\dotsb","\\Longleftarrow":"\\dotsb","\\longleftrightarrow":"\\dotsb","\\Longleftrightarrow":"\\dotsb","\\mapsto":"\\dotsb","\\longmapsto":"\\dotsb","\\hookrightarrow":"\\dotsb","\\doteq":"\\dotsb","\\mathbin":"\\dotsb","\\mathrel":"\\dotsb","\\relbar":"\\dotsb","\\Relbar":"\\dotsb","\\xrightarrow":"\\dotsb","\\xleftarrow":"\\dotsb","\\DOTSI":"\\dotsi","\\int":"\\dotsi","\\oint":"\\dotsi","\\iint":"\\dotsi","\\iiint":"\\dotsi","\\iiiint":"\\dotsi","\\idotsint":"\\dotsi","\\DOTSX":"\\dotsx"};Br("\\dots",(function(e){let t="\\dotso";const r=e.expandAfterFuture().text;return r in Cn?t=Cn[r]:("\\not"===r.slice(0,4)||r in oe.math&&l.contains(["bin","rel"],oe.math[r].group))&&(t="\\dotsb"),t}));const Nn={")":!0,"]":!0,"\\rbrack":!0,"\\}":!0,"\\rbrace":!0,"\\rangle":!0,"\\rceil":!0,"\\rfloor":!0,"\\rgroup":!0,"\\rmoustache":!0,"\\right":!0,"\\bigr":!0,"\\biggr":!0,"\\Bigr":!0,"\\Biggr":!0,$:!0,";":!0,".":!0,",":!0};Br("\\dotso",(function(e){return e.future().text in Nn?"\\ldots\\,":"\\ldots"})),Br("\\dotsc",(function(e){const t=e.future().text;return t in Nn&&","!==t?"\\ldots\\,":"\\ldots"})),Br("\\cdots",(function(e){return e.future().text in Nn?"\\@cdots\\,":"\\@cdots"})),Br("\\dotsb","\\cdots"),Br("\\dotsm","\\cdots"),Br("\\dotsi","\\!\\cdots"),Br("\\dotsx","\\ldots\\,"),Br("\\DOTSI","\\relax"),Br("\\DOTSB","\\relax"),Br("\\DOTSX","\\relax"),Br("\\tmspace","\\TextOrMath{\\kern#1#3}{\\mskip#1#2}\\relax"),Br("\\,","\\tmspace+{3mu}{.1667em}"),Br("\\thinspace","\\,"),Br("\\>","\\mskip{4mu}"),Br("\\:","\\tmspace+{4mu}{.2222em}"),Br("\\medspace","\\:"),Br("\\;","\\tmspace+{5mu}{.2777em}"),Br("\\thickspace","\\;"),Br("\\!","\\tmspace-{3mu}{.1667em}"),Br("\\negthinspace","\\!"),Br("\\negmedspace","\\tmspace-{4mu}{.2222em}"),Br("\\negthickspace","\\tmspace-{5mu}{.277em}"),Br("\\enspace","\\kern.5em "),Br("\\enskip","\\hskip.5em\\relax"),Br("\\quad","\\hskip1em\\relax"),Br("\\qquad","\\hskip2em\\relax"),Br("\\tag","\\@ifstar\\tag@literal\\tag@paren"),Br("\\tag@paren","\\tag@literal{({#1})}"),Br("\\tag@literal",(e=>{if(e.macros.get("\\df@tag"))throw new n("Multiple \\tag");return"\\gdef\\df@tag{\\text{#1}}"})),Br("\\bmod","\\mathchoice{\\mskip1mu}{\\mskip1mu}{\\mskip5mu}{\\mskip5mu}\\mathbin{\\rm mod}\\mathchoice{\\mskip1mu}{\\mskip1mu}{\\mskip5mu}{\\mskip5mu}"),Br("\\pod","\\allowbreak\\mathchoice{\\mkern18mu}{\\mkern8mu}{\\mkern8mu}{\\mkern8mu}(#1)"),Br("\\pmod","\\pod{{\\rm mod}\\mkern6mu#1}"),Br("\\mod","\\allowbreak\\mathchoice{\\mkern18mu}{\\mkern12mu}{\\mkern12mu}{\\mkern12mu}{\\rm mod}\\,\\,#1"),Br("\\newline","\\\\\\relax"),Br("\\TeX","\\textrm{\\html@mathml{T\\kern-.1667em\\raisebox{-.5ex}{E}\\kern-.125emX}{TeX}}");const qn=F(T["Main-Regular"]["T".charCodeAt(0)][1]-.7*T["Main-Regular"]["A".charCodeAt(0)][1]);Br("\\LaTeX","\\textrm{\\html@mathml{L\\kern-.36em\\raisebox{"+qn+"}{\\scriptstyle A}\\kern-.15em\\TeX}{LaTeX}}"),Br("\\KaTeX","\\textrm{\\html@mathml{K\\kern-.17em\\raisebox{"+qn+"}{\\scriptstyle A}\\kern-.15em\\TeX}{KaTeX}}"),Br("\\hspace","\\@ifstar\\@hspacer\\@hspace"),Br("\\@hspace","\\hskip #1\\relax"),Br("\\@hspacer","\\rule{0pt}{0pt}\\hskip #1\\relax"),Br("\\ordinarycolon",":"),Br("\\vcentcolon","\\mathrel{\\mathop\\ordinarycolon}"),Br("\\dblcolon",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-.9mu}\\vcentcolon}}{\\mathop{\\char"2237}}'),Br("\\coloneqq",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}=}}{\\mathop{\\char"2254}}'),Br("\\Coloneqq",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}=}}{\\mathop{\\char"2237\\char"3d}}'),Br("\\coloneq",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}\\mathrel{-}}}{\\mathop{\\char"3a\\char"2212}}'),Br("\\Coloneq",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}\\mathrel{-}}}{\\mathop{\\char"2237\\char"2212}}'),Br("\\eqqcolon",'\\html@mathml{\\mathrel{=\\mathrel{\\mkern-1.2mu}\\vcentcolon}}{\\mathop{\\char"2255}}'),Br("\\Eqqcolon",'\\html@mathml{\\mathrel{=\\mathrel{\\mkern-1.2mu}\\dblcolon}}{\\mathop{\\char"3d\\char"2237}}'),Br("\\eqcolon",'\\html@mathml{\\mathrel{\\mathrel{-}\\mathrel{\\mkern-1.2mu}\\vcentcolon}}{\\mathop{\\char"2239}}'),Br("\\Eqcolon",'\\html@mathml{\\mathrel{\\mathrel{-}\\mathrel{\\mkern-1.2mu}\\dblcolon}}{\\mathop{\\char"2212\\char"2237}}'),Br("\\colonapprox",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}\\approx}}{\\mathop{\\char"3a\\char"2248}}'),Br("\\Colonapprox",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}\\approx}}{\\mathop{\\char"2237\\char"2248}}'),Br("\\colonsim",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}\\sim}}{\\mathop{\\char"3a\\char"223c}}'),Br("\\Colonsim",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}\\sim}}{\\mathop{\\char"2237\\char"223c}}'),Br("\u2237","\\dblcolon"),Br("\u2239","\\eqcolon"),Br("\u2254","\\coloneqq"),Br("\u2255","\\eqqcolon"),Br("\u2a74","\\Coloneqq"),Br("\\ratio","\\vcentcolon"),Br("\\coloncolon","\\dblcolon"),Br("\\colonequals","\\coloneqq"),Br("\\coloncolonequals","\\Coloneqq"),Br("\\equalscolon","\\eqqcolon"),Br("\\equalscoloncolon","\\Eqqcolon"),Br("\\colonminus","\\coloneq"),Br("\\coloncolonminus","\\Coloneq"),Br("\\minuscolon","\\eqcolon"),Br("\\minuscoloncolon","\\Eqcolon"),Br("\\coloncolonapprox","\\Colonapprox"),Br("\\coloncolonsim","\\Colonsim"),Br("\\simcolon","\\mathrel{\\sim\\mathrel{\\mkern-1.2mu}\\vcentcolon}"),Br("\\simcoloncolon","\\mathrel{\\sim\\mathrel{\\mkern-1.2mu}\\dblcolon}"),Br("\\approxcolon","\\mathrel{\\approx\\mathrel{\\mkern-1.2mu}\\vcentcolon}"),Br("\\approxcoloncolon","\\mathrel{\\approx\\mathrel{\\mkern-1.2mu}\\dblcolon}"),Br("\\notni","\\html@mathml{\\not\\ni}{\\mathrel{\\char`\u220c}}"),Br("\\limsup","\\DOTSB\\operatorname*{lim\\,sup}"),Br("\\liminf","\\DOTSB\\operatorname*{lim\\,inf}"),Br("\\injlim","\\DOTSB\\operatorname*{inj\\,lim}"),Br("\\projlim","\\DOTSB\\operatorname*{proj\\,lim}"),Br("\\varlimsup","\\DOTSB\\operatorname*{\\overline{lim}}"),Br("\\varliminf","\\DOTSB\\operatorname*{\\underline{lim}}"),Br("\\varinjlim","\\DOTSB\\operatorname*{\\underrightarrow{lim}}"),Br("\\varprojlim","\\DOTSB\\operatorname*{\\underleftarrow{lim}}"),Br("\\gvertneqq","\\html@mathml{\\@gvertneqq}{\u2269}"),Br("\\lvertneqq","\\html@mathml{\\@lvertneqq}{\u2268}"),Br("\\ngeqq","\\html@mathml{\\@ngeqq}{\u2271}"),Br("\\ngeqslant","\\html@mathml{\\@ngeqslant}{\u2271}"),Br("\\nleqq","\\html@mathml{\\@nleqq}{\u2270}"),Br("\\nleqslant","\\html@mathml{\\@nleqslant}{\u2270}"),Br("\\nshortmid","\\html@mathml{\\@nshortmid}{\u2224}"),Br("\\nshortparallel","\\html@mathml{\\@nshortparallel}{\u2226}"),Br("\\nsubseteqq","\\html@mathml{\\@nsubseteqq}{\u2288}"),Br("\\nsupseteqq","\\html@mathml{\\@nsupseteqq}{\u2289}"),Br("\\varsubsetneq","\\html@mathml{\\@varsubsetneq}{\u228a}"),Br("\\varsubsetneqq","\\html@mathml{\\@varsubsetneqq}{\u2acb}"),Br("\\varsupsetneq","\\html@mathml{\\@varsupsetneq}{\u228b}"),Br("\\varsupsetneqq","\\html@mathml{\\@varsupsetneqq}{\u2acc}"),Br("\\imath","\\html@mathml{\\@imath}{\u0131}"),Br("\\jmath","\\html@mathml{\\@jmath}{\u0237}"),Br("\\llbracket","\\html@mathml{\\mathopen{[\\mkern-3.2mu[}}{\\mathopen{\\char`\u27e6}}"),Br("\\rrbracket","\\html@mathml{\\mathclose{]\\mkern-3.2mu]}}{\\mathclose{\\char`\u27e7}}"),Br("\u27e6","\\llbracket"),Br("\u27e7","\\rrbracket"),Br("\\lBrace","\\html@mathml{\\mathopen{\\{\\mkern-3.2mu[}}{\\mathopen{\\char`\u2983}}"),Br("\\rBrace","\\html@mathml{\\mathclose{]\\mkern-3.2mu\\}}}{\\mathclose{\\char`\u2984}}"),Br("\u2983","\\lBrace"),Br("\u2984","\\rBrace"),Br("\\minuso","\\mathbin{\\html@mathml{{\\mathrlap{\\mathchoice{\\kern{0.145em}}{\\kern{0.145em}}{\\kern{0.1015em}}{\\kern{0.0725em}}\\circ}{-}}}{\\char`\u29b5}}"),Br("\u29b5","\\minuso"),Br("\\darr","\\downarrow"),Br("\\dArr","\\Downarrow"),Br("\\Darr","\\Downarrow"),Br("\\lang","\\langle"),Br("\\rang","\\rangle"),Br("\\uarr","\\uparrow"),Br("\\uArr","\\Uparrow"),Br("\\Uarr","\\Uparrow"),Br("\\N","\\mathbb{N}"),Br("\\R","\\mathbb{R}"),Br("\\Z","\\mathbb{Z}"),Br("\\alef","\\aleph"),Br("\\alefsym","\\aleph"),Br("\\Alpha","\\mathrm{A}"),Br("\\Beta","\\mathrm{B}"),Br("\\bull","\\bullet"),Br("\\Chi","\\mathrm{X}"),Br("\\clubs","\\clubsuit"),Br("\\cnums","\\mathbb{C}"),Br("\\Complex","\\mathbb{C}"),Br("\\Dagger","\\ddagger"),Br("\\diamonds","\\diamondsuit"),Br("\\empty","\\emptyset"),Br("\\Epsilon","\\mathrm{E}"),Br("\\Eta","\\mathrm{H}"),Br("\\exist","\\exists"),Br("\\harr","\\leftrightarrow"),Br("\\hArr","\\Leftrightarrow"),Br("\\Harr","\\Leftrightarrow"),Br("\\hearts","\\heartsuit"),Br("\\image","\\Im"),Br("\\infin","\\infty"),Br("\\Iota","\\mathrm{I}"),Br("\\isin","\\in"),Br("\\Kappa","\\mathrm{K}"),Br("\\larr","\\leftarrow"),Br("\\lArr","\\Leftarrow"),Br("\\Larr","\\Leftarrow"),Br("\\lrarr","\\leftrightarrow"),Br("\\lrArr","\\Leftrightarrow"),Br("\\Lrarr","\\Leftrightarrow"),Br("\\Mu","\\mathrm{M}"),Br("\\natnums","\\mathbb{N}"),Br("\\Nu","\\mathrm{N}"),Br("\\Omicron","\\mathrm{O}"),Br("\\plusmn","\\pm"),Br("\\rarr","\\rightarrow"),Br("\\rArr","\\Rightarrow"),Br("\\Rarr","\\Rightarrow"),Br("\\real","\\Re"),Br("\\reals","\\mathbb{R}"),Br("\\Reals","\\mathbb{R}"),Br("\\Rho","\\mathrm{P}"),Br("\\sdot","\\cdot"),Br("\\sect","\\S"),Br("\\spades","\\spadesuit"),Br("\\sub","\\subset"),Br("\\sube","\\subseteq"),Br("\\supe","\\supseteq"),Br("\\Tau","\\mathrm{T}"),Br("\\thetasym","\\vartheta"),Br("\\weierp","\\wp"),Br("\\Zeta","\\mathrm{Z}"),Br("\\argmin","\\DOTSB\\operatorname*{arg\\,min}"),Br("\\argmax","\\DOTSB\\operatorname*{arg\\,max}"),Br("\\plim","\\DOTSB\\mathop{\\operatorname{plim}}\\limits"),Br("\\bra","\\mathinner{\\langle{#1}|}"),Br("\\ket","\\mathinner{|{#1}\\rangle}"),Br("\\braket","\\mathinner{\\langle{#1}\\rangle}"),Br("\\Bra","\\left\\langle#1\\right|"),Br("\\Ket","\\left|#1\\right\\rangle");const In=e=>t=>{const r=t.consumeArg().tokens,n=t.consumeArg().tokens,o=t.consumeArg().tokens,s=t.consumeArg().tokens,i=t.macros.get("|"),a=t.macros.get("\\|");t.macros.beginGroup();const l=t=>r=>{e&&(r.macros.set("|",i),o.length&&r.macros.set("\\|",a));let s=t;if(!t&&o.length){"|"===r.future().text&&(r.popToken(),s=!0)}return{tokens:s?o:n,numArgs:0}};t.macros.set("|",l(!1)),o.length&&t.macros.set("\\|",l(!0));const h=t.consumeArg().tokens,c=t.expandTokens([...s,...h,...r]);return t.macros.endGroup(),{tokens:c.reverse(),numArgs:0}};Br("\\bra@ket",In(!1)),Br("\\bra@set",In(!0)),Br("\\Braket","\\bra@ket{\\left\\langle}{\\,\\middle\\vert\\,}{\\,\\middle\\vert\\,}{\\right\\rangle}"),Br("\\Set","\\bra@set{\\left\\{\\:}{\\;\\middle\\vert\\;}{\\;\\middle\\Vert\\;}{\\:\\right\\}}"),Br("\\set","\\bra@set{\\{\\,}{\\mid}{}{\\,\\}}"),Br("\\angln","{\\angl n}"),Br("\\blue","\\textcolor{##6495ed}{#1}"),Br("\\orange","\\textcolor{##ffa500}{#1}"),Br("\\pink","\\textcolor{##ff00af}{#1}"),Br("\\red","\\textcolor{##df0030}{#1}"),Br("\\green","\\textcolor{##28ae7b}{#1}"),Br("\\gray","\\textcolor{gray}{#1}"),Br("\\purple","\\textcolor{##9d38bd}{#1}"),Br("\\blueA","\\textcolor{##ccfaff}{#1}"),Br("\\blueB","\\textcolor{##80f6ff}{#1}"),Br("\\blueC","\\textcolor{##63d9ea}{#1}"),Br("\\blueD","\\textcolor{##11accd}{#1}"),Br("\\blueE","\\textcolor{##0c7f99}{#1}"),Br("\\tealA","\\textcolor{##94fff5}{#1}"),Br("\\tealB","\\textcolor{##26edd5}{#1}"),Br("\\tealC","\\textcolor{##01d1c1}{#1}"),Br("\\tealD","\\textcolor{##01a995}{#1}"),Br("\\tealE","\\textcolor{##208170}{#1}"),Br("\\greenA","\\textcolor{##b6ffb0}{#1}"),Br("\\greenB","\\textcolor{##8af281}{#1}"),Br("\\greenC","\\textcolor{##74cf70}{#1}"),Br("\\greenD","\\textcolor{##1fab54}{#1}"),Br("\\greenE","\\textcolor{##0d923f}{#1}"),Br("\\goldA","\\textcolor{##ffd0a9}{#1}"),Br("\\goldB","\\textcolor{##ffbb71}{#1}"),Br("\\goldC","\\textcolor{##ff9c39}{#1}"),Br("\\goldD","\\textcolor{##e07d10}{#1}"),Br("\\goldE","\\textcolor{##a75a05}{#1}"),Br("\\redA","\\textcolor{##fca9a9}{#1}"),Br("\\redB","\\textcolor{##ff8482}{#1}"),Br("\\redC","\\textcolor{##f9685d}{#1}"),Br("\\redD","\\textcolor{##e84d39}{#1}"),Br("\\redE","\\textcolor{##bc2612}{#1}"),Br("\\maroonA","\\textcolor{##ffbde0}{#1}"),Br("\\maroonB","\\textcolor{##ff92c6}{#1}"),Br("\\maroonC","\\textcolor{##ed5fa6}{#1}"),Br("\\maroonD","\\textcolor{##ca337c}{#1}"),Br("\\maroonE","\\textcolor{##9e034e}{#1}"),Br("\\purpleA","\\textcolor{##ddd7ff}{#1}"),Br("\\purpleB","\\textcolor{##c6b9fc}{#1}"),Br("\\purpleC","\\textcolor{##aa87ff}{#1}"),Br("\\purpleD","\\textcolor{##7854ab}{#1}"),Br("\\purpleE","\\textcolor{##543b78}{#1}"),Br("\\mintA","\\textcolor{##f5f9e8}{#1}"),Br("\\mintB","\\textcolor{##edf2df}{#1}"),Br("\\mintC","\\textcolor{##e0e5cc}{#1}"),Br("\\grayA","\\textcolor{##f6f7f7}{#1}"),Br("\\grayB","\\textcolor{##f0f1f2}{#1}"),Br("\\grayC","\\textcolor{##e3e5e6}{#1}"),Br("\\grayD","\\textcolor{##d6d8da}{#1}"),Br("\\grayE","\\textcolor{##babec2}{#1}"),Br("\\grayF","\\textcolor{##888d93}{#1}"),Br("\\grayG","\\textcolor{##626569}{#1}"),Br("\\grayH","\\textcolor{##3b3e40}{#1}"),Br("\\grayI","\\textcolor{##21242c}{#1}"),Br("\\kaBlue","\\textcolor{##314453}{#1}"),Br("\\kaGreen","\\textcolor{##71B307}{#1}");const Rn={"^":!0,_:!0,"\\limits":!0,"\\nolimits":!0};class Hn{constructor(e,t,r){this.settings=void 0,this.expansionCount=void 0,this.lexer=void 0,this.macros=void 0,this.stack=void 0,this.mode=void 0,this.settings=t,this.expansionCount=0,this.feed(e),this.macros=new zn(An,t.macros),this.mode=r,this.stack=[]}feed(e){this.lexer=new Mn(e,this.settings)}switchMode(e){this.mode=e}beginGroup(){this.macros.beginGroup()}endGroup(){this.macros.endGroup()}endGroups(){this.macros.endGroups()}future(){return 0===this.stack.length&&this.pushToken(this.lexer.lex()),this.stack[this.stack.length-1]}popToken(){return this.future(),this.stack.pop()}pushToken(e){this.stack.push(e)}pushTokens(e){this.stack.push(...e)}scanArgument(e){let t,r,n;if(e){if(this.consumeSpaces(),"["!==this.future().text)return null;t=this.popToken(),({tokens:n,end:r}=this.consumeArg(["]"]))}else({tokens:n,start:t,end:r}=this.consumeArg());return this.pushToken(new Nr("EOF",r.loc)),this.pushTokens(n),t.range(r,"")}consumeSpaces(){for(;;){if(" "!==this.future().text)break;this.stack.pop()}}consumeArg(e){const t=[],r=e&&e.length>0;r||this.consumeSpaces();const o=this.future();let s,i=0,a=0;do{if(s=this.popToken(),t.push(s),"{"===s.text)++i;else if("}"===s.text){if(--i,-1===i)throw new n("Extra }",s)}else if("EOF"===s.text)throw new n("Unexpected end of input in a macro argument, expected '"+(e&&r?e[a]:"}")+"'",s);if(e&&r)if((0===i||1===i&&"{"===e[a])&&s.text===e[a]){if(++a,a===e.length){t.splice(-a,a);break}}else a=0}while(0!==i||r);return"{"===o.text&&"}"===t[t.length-1].text&&(t.pop(),t.shift()),t.reverse(),{tokens:t,start:o,end:s}}consumeArgs(e,t){if(t){if(t.length!==e+1)throw new n("The length of delimiters doesn't match the number of args!");const r=t[0];for(let e=0;ethis.settings.maxExpand)throw new n("Too many expansions: infinite loop or need to increase maxExpand setting")}expandOnce(e){const t=this.popToken(),r=t.text,o=t.noexpand?null:this._getExpansion(r);if(null==o||e&&o.unexpandable){if(e&&null==o&&"\\"===r[0]&&!this.isDefined(r))throw new n("Undefined control sequence: "+r);return this.pushToken(t),!1}this.countExpansion(1);let s=o.tokens;const i=this.consumeArgs(o.numArgs,o.delimiters);if(o.numArgs){s=s.slice();for(let e=s.length-1;e>=0;--e){let t=s[e];if("#"===t.text){if(0===e)throw new n("Incomplete placeholder at end of macro body",t);if(t=s[--e],"#"===t.text)s.splice(e+1,1);else{if(!/^[1-9]$/.test(t.text))throw new n("Not a valid argument number",t);s.splice(e,2,...i[+t.text-1])}}}}return this.pushTokens(s),s.length}expandAfterFuture(){return this.expandOnce(),this.future()}expandNextToken(){for(;;)if(!1===this.expandOnce()){const e=this.stack.pop();return e.treatAsRelax&&(e.text="\\relax"),e}throw new Error}expandMacro(e){return this.macros.has(e)?this.expandTokens([new Nr(e)]):void 0}expandTokens(e){const t=[],r=this.stack.length;for(this.pushTokens(e);this.stack.length>r;)if(!1===this.expandOnce(!0)){const e=this.stack.pop();e.treatAsRelax&&(e.noexpand=!1,e.treatAsRelax=!1),t.push(e)}return this.countExpansion(t.length),t}expandMacroAsText(e){const t=this.expandMacro(e);return t?t.map((e=>e.text)).join(""):t}_getExpansion(e){const t=this.macros.get(e);if(null==t)return t;if(1===e.length){const t=this.lexer.catcodes[e];if(null!=t&&13!==t)return}const r="function"==typeof t?t(this):t;if("string"==typeof r){let e=0;if(-1!==r.indexOf("#")){const t=r.replace(/##/g,"");for(;-1!==t.indexOf("#"+(e+1));)++e}const t=new Mn(r,this.settings),n=[];let o=t.lex();for(;"EOF"!==o.text;)n.push(o),o=t.lex();n.reverse();return{tokens:n,numArgs:e}}return r}isDefined(e){return this.macros.has(e)||yn.hasOwnProperty(e)||oe.math.hasOwnProperty(e)||oe.text.hasOwnProperty(e)||Rn.hasOwnProperty(e)}isExpandable(e){const t=this.macros.get(e);return null!=t?"string"==typeof t||"function"==typeof t||!t.unexpandable:yn.hasOwnProperty(e)&&!yn[e].primitive}}const On=/^[\u208a\u208b\u208c\u208d\u208e\u2080\u2081\u2082\u2083\u2084\u2085\u2086\u2087\u2088\u2089\u2090\u2091\u2095\u1d62\u2c7c\u2096\u2097\u2098\u2099\u2092\u209a\u1d63\u209b\u209c\u1d64\u1d65\u2093\u1d66\u1d67\u1d68\u1d69\u1d6a]/,En=Object.freeze({"\u208a":"+","\u208b":"-","\u208c":"=","\u208d":"(","\u208e":")","\u2080":"0","\u2081":"1","\u2082":"2","\u2083":"3","\u2084":"4","\u2085":"5","\u2086":"6","\u2087":"7","\u2088":"8","\u2089":"9","\u2090":"a","\u2091":"e","\u2095":"h","\u1d62":"i","\u2c7c":"j","\u2096":"k","\u2097":"l","\u2098":"m","\u2099":"n","\u2092":"o","\u209a":"p","\u1d63":"r","\u209b":"s","\u209c":"t","\u1d64":"u","\u1d65":"v","\u2093":"x","\u1d66":"\u03b2","\u1d67":"\u03b3","\u1d68":"\u03c1","\u1d69":"\u03d5","\u1d6a":"\u03c7","\u207a":"+","\u207b":"-","\u207c":"=","\u207d":"(","\u207e":")","\u2070":"0","\xb9":"1","\xb2":"2","\xb3":"3","\u2074":"4","\u2075":"5","\u2076":"6","\u2077":"7","\u2078":"8","\u2079":"9","\u1d2c":"A","\u1d2e":"B","\u1d30":"D","\u1d31":"E","\u1d33":"G","\u1d34":"H","\u1d35":"I","\u1d36":"J","\u1d37":"K","\u1d38":"L","\u1d39":"M","\u1d3a":"N","\u1d3c":"O","\u1d3e":"P","\u1d3f":"R","\u1d40":"T","\u1d41":"U","\u2c7d":"V","\u1d42":"W","\u1d43":"a","\u1d47":"b","\u1d9c":"c","\u1d48":"d","\u1d49":"e","\u1da0":"f","\u1d4d":"g","\u02b0":"h","\u2071":"i","\u02b2":"j","\u1d4f":"k","\u02e1":"l","\u1d50":"m","\u207f":"n","\u1d52":"o","\u1d56":"p","\u02b3":"r","\u02e2":"s","\u1d57":"t","\u1d58":"u","\u1d5b":"v","\u02b7":"w","\u02e3":"x","\u02b8":"y","\u1dbb":"z","\u1d5d":"\u03b2","\u1d5e":"\u03b3","\u1d5f":"\u03b4","\u1d60":"\u03d5","\u1d61":"\u03c7","\u1dbf":"\u03b8"}),Ln={"\u0301":{text:"\\'",math:"\\acute"},"\u0300":{text:"\\`",math:"\\grave"},"\u0308":{text:'\\"',math:"\\ddot"},"\u0303":{text:"\\~",math:"\\tilde"},"\u0304":{text:"\\=",math:"\\bar"},"\u0306":{text:"\\u",math:"\\breve"},"\u030c":{text:"\\v",math:"\\check"},"\u0302":{text:"\\^",math:"\\hat"},"\u0307":{text:"\\.",math:"\\dot"},"\u030a":{text:"\\r",math:"\\mathring"},"\u030b":{text:"\\H"},"\u0327":{text:"\\c"}},Dn={"\xe1":"a\u0301","\xe0":"a\u0300","\xe4":"a\u0308","\u01df":"a\u0308\u0304","\xe3":"a\u0303","\u0101":"a\u0304","\u0103":"a\u0306","\u1eaf":"a\u0306\u0301","\u1eb1":"a\u0306\u0300","\u1eb5":"a\u0306\u0303","\u01ce":"a\u030c","\xe2":"a\u0302","\u1ea5":"a\u0302\u0301","\u1ea7":"a\u0302\u0300","\u1eab":"a\u0302\u0303","\u0227":"a\u0307","\u01e1":"a\u0307\u0304","\xe5":"a\u030a","\u01fb":"a\u030a\u0301","\u1e03":"b\u0307","\u0107":"c\u0301","\u1e09":"c\u0327\u0301","\u010d":"c\u030c","\u0109":"c\u0302","\u010b":"c\u0307","\xe7":"c\u0327","\u010f":"d\u030c","\u1e0b":"d\u0307","\u1e11":"d\u0327","\xe9":"e\u0301","\xe8":"e\u0300","\xeb":"e\u0308","\u1ebd":"e\u0303","\u0113":"e\u0304","\u1e17":"e\u0304\u0301","\u1e15":"e\u0304\u0300","\u0115":"e\u0306","\u1e1d":"e\u0327\u0306","\u011b":"e\u030c","\xea":"e\u0302","\u1ebf":"e\u0302\u0301","\u1ec1":"e\u0302\u0300","\u1ec5":"e\u0302\u0303","\u0117":"e\u0307","\u0229":"e\u0327","\u1e1f":"f\u0307","\u01f5":"g\u0301","\u1e21":"g\u0304","\u011f":"g\u0306","\u01e7":"g\u030c","\u011d":"g\u0302","\u0121":"g\u0307","\u0123":"g\u0327","\u1e27":"h\u0308","\u021f":"h\u030c","\u0125":"h\u0302","\u1e23":"h\u0307","\u1e29":"h\u0327","\xed":"i\u0301","\xec":"i\u0300","\xef":"i\u0308","\u1e2f":"i\u0308\u0301","\u0129":"i\u0303","\u012b":"i\u0304","\u012d":"i\u0306","\u01d0":"i\u030c","\xee":"i\u0302","\u01f0":"j\u030c","\u0135":"j\u0302","\u1e31":"k\u0301","\u01e9":"k\u030c","\u0137":"k\u0327","\u013a":"l\u0301","\u013e":"l\u030c","\u013c":"l\u0327","\u1e3f":"m\u0301","\u1e41":"m\u0307","\u0144":"n\u0301","\u01f9":"n\u0300","\xf1":"n\u0303","\u0148":"n\u030c","\u1e45":"n\u0307","\u0146":"n\u0327","\xf3":"o\u0301","\xf2":"o\u0300","\xf6":"o\u0308","\u022b":"o\u0308\u0304","\xf5":"o\u0303","\u1e4d":"o\u0303\u0301","\u1e4f":"o\u0303\u0308","\u022d":"o\u0303\u0304","\u014d":"o\u0304","\u1e53":"o\u0304\u0301","\u1e51":"o\u0304\u0300","\u014f":"o\u0306","\u01d2":"o\u030c","\xf4":"o\u0302","\u1ed1":"o\u0302\u0301","\u1ed3":"o\u0302\u0300","\u1ed7":"o\u0302\u0303","\u022f":"o\u0307","\u0231":"o\u0307\u0304","\u0151":"o\u030b","\u1e55":"p\u0301","\u1e57":"p\u0307","\u0155":"r\u0301","\u0159":"r\u030c","\u1e59":"r\u0307","\u0157":"r\u0327","\u015b":"s\u0301","\u1e65":"s\u0301\u0307","\u0161":"s\u030c","\u1e67":"s\u030c\u0307","\u015d":"s\u0302","\u1e61":"s\u0307","\u015f":"s\u0327","\u1e97":"t\u0308","\u0165":"t\u030c","\u1e6b":"t\u0307","\u0163":"t\u0327","\xfa":"u\u0301","\xf9":"u\u0300","\xfc":"u\u0308","\u01d8":"u\u0308\u0301","\u01dc":"u\u0308\u0300","\u01d6":"u\u0308\u0304","\u01da":"u\u0308\u030c","\u0169":"u\u0303","\u1e79":"u\u0303\u0301","\u016b":"u\u0304","\u1e7b":"u\u0304\u0308","\u016d":"u\u0306","\u01d4":"u\u030c","\xfb":"u\u0302","\u016f":"u\u030a","\u0171":"u\u030b","\u1e7d":"v\u0303","\u1e83":"w\u0301","\u1e81":"w\u0300","\u1e85":"w\u0308","\u0175":"w\u0302","\u1e87":"w\u0307","\u1e98":"w\u030a","\u1e8d":"x\u0308","\u1e8b":"x\u0307","\xfd":"y\u0301","\u1ef3":"y\u0300","\xff":"y\u0308","\u1ef9":"y\u0303","\u0233":"y\u0304","\u0177":"y\u0302","\u1e8f":"y\u0307","\u1e99":"y\u030a","\u017a":"z\u0301","\u017e":"z\u030c","\u1e91":"z\u0302","\u017c":"z\u0307","\xc1":"A\u0301","\xc0":"A\u0300","\xc4":"A\u0308","\u01de":"A\u0308\u0304","\xc3":"A\u0303","\u0100":"A\u0304","\u0102":"A\u0306","\u1eae":"A\u0306\u0301","\u1eb0":"A\u0306\u0300","\u1eb4":"A\u0306\u0303","\u01cd":"A\u030c","\xc2":"A\u0302","\u1ea4":"A\u0302\u0301","\u1ea6":"A\u0302\u0300","\u1eaa":"A\u0302\u0303","\u0226":"A\u0307","\u01e0":"A\u0307\u0304","\xc5":"A\u030a","\u01fa":"A\u030a\u0301","\u1e02":"B\u0307","\u0106":"C\u0301","\u1e08":"C\u0327\u0301","\u010c":"C\u030c","\u0108":"C\u0302","\u010a":"C\u0307","\xc7":"C\u0327","\u010e":"D\u030c","\u1e0a":"D\u0307","\u1e10":"D\u0327","\xc9":"E\u0301","\xc8":"E\u0300","\xcb":"E\u0308","\u1ebc":"E\u0303","\u0112":"E\u0304","\u1e16":"E\u0304\u0301","\u1e14":"E\u0304\u0300","\u0114":"E\u0306","\u1e1c":"E\u0327\u0306","\u011a":"E\u030c","\xca":"E\u0302","\u1ebe":"E\u0302\u0301","\u1ec0":"E\u0302\u0300","\u1ec4":"E\u0302\u0303","\u0116":"E\u0307","\u0228":"E\u0327","\u1e1e":"F\u0307","\u01f4":"G\u0301","\u1e20":"G\u0304","\u011e":"G\u0306","\u01e6":"G\u030c","\u011c":"G\u0302","\u0120":"G\u0307","\u0122":"G\u0327","\u1e26":"H\u0308","\u021e":"H\u030c","\u0124":"H\u0302","\u1e22":"H\u0307","\u1e28":"H\u0327","\xcd":"I\u0301","\xcc":"I\u0300","\xcf":"I\u0308","\u1e2e":"I\u0308\u0301","\u0128":"I\u0303","\u012a":"I\u0304","\u012c":"I\u0306","\u01cf":"I\u030c","\xce":"I\u0302","\u0130":"I\u0307","\u0134":"J\u0302","\u1e30":"K\u0301","\u01e8":"K\u030c","\u0136":"K\u0327","\u0139":"L\u0301","\u013d":"L\u030c","\u013b":"L\u0327","\u1e3e":"M\u0301","\u1e40":"M\u0307","\u0143":"N\u0301","\u01f8":"N\u0300","\xd1":"N\u0303","\u0147":"N\u030c","\u1e44":"N\u0307","\u0145":"N\u0327","\xd3":"O\u0301","\xd2":"O\u0300","\xd6":"O\u0308","\u022a":"O\u0308\u0304","\xd5":"O\u0303","\u1e4c":"O\u0303\u0301","\u1e4e":"O\u0303\u0308","\u022c":"O\u0303\u0304","\u014c":"O\u0304","\u1e52":"O\u0304\u0301","\u1e50":"O\u0304\u0300","\u014e":"O\u0306","\u01d1":"O\u030c","\xd4":"O\u0302","\u1ed0":"O\u0302\u0301","\u1ed2":"O\u0302\u0300","\u1ed6":"O\u0302\u0303","\u022e":"O\u0307","\u0230":"O\u0307\u0304","\u0150":"O\u030b","\u1e54":"P\u0301","\u1e56":"P\u0307","\u0154":"R\u0301","\u0158":"R\u030c","\u1e58":"R\u0307","\u0156":"R\u0327","\u015a":"S\u0301","\u1e64":"S\u0301\u0307","\u0160":"S\u030c","\u1e66":"S\u030c\u0307","\u015c":"S\u0302","\u1e60":"S\u0307","\u015e":"S\u0327","\u0164":"T\u030c","\u1e6a":"T\u0307","\u0162":"T\u0327","\xda":"U\u0301","\xd9":"U\u0300","\xdc":"U\u0308","\u01d7":"U\u0308\u0301","\u01db":"U\u0308\u0300","\u01d5":"U\u0308\u0304","\u01d9":"U\u0308\u030c","\u0168":"U\u0303","\u1e78":"U\u0303\u0301","\u016a":"U\u0304","\u1e7a":"U\u0304\u0308","\u016c":"U\u0306","\u01d3":"U\u030c","\xdb":"U\u0302","\u016e":"U\u030a","\u0170":"U\u030b","\u1e7c":"V\u0303","\u1e82":"W\u0301","\u1e80":"W\u0300","\u1e84":"W\u0308","\u0174":"W\u0302","\u1e86":"W\u0307","\u1e8c":"X\u0308","\u1e8a":"X\u0307","\xdd":"Y\u0301","\u1ef2":"Y\u0300","\u0178":"Y\u0308","\u1ef8":"Y\u0303","\u0232":"Y\u0304","\u0176":"Y\u0302","\u1e8e":"Y\u0307","\u0179":"Z\u0301","\u017d":"Z\u030c","\u1e90":"Z\u0302","\u017b":"Z\u0307","\u03ac":"\u03b1\u0301","\u1f70":"\u03b1\u0300","\u1fb1":"\u03b1\u0304","\u1fb0":"\u03b1\u0306","\u03ad":"\u03b5\u0301","\u1f72":"\u03b5\u0300","\u03ae":"\u03b7\u0301","\u1f74":"\u03b7\u0300","\u03af":"\u03b9\u0301","\u1f76":"\u03b9\u0300","\u03ca":"\u03b9\u0308","\u0390":"\u03b9\u0308\u0301","\u1fd2":"\u03b9\u0308\u0300","\u1fd1":"\u03b9\u0304","\u1fd0":"\u03b9\u0306","\u03cc":"\u03bf\u0301","\u1f78":"\u03bf\u0300","\u03cd":"\u03c5\u0301","\u1f7a":"\u03c5\u0300","\u03cb":"\u03c5\u0308","\u03b0":"\u03c5\u0308\u0301","\u1fe2":"\u03c5\u0308\u0300","\u1fe1":"\u03c5\u0304","\u1fe0":"\u03c5\u0306","\u03ce":"\u03c9\u0301","\u1f7c":"\u03c9\u0300","\u038e":"\u03a5\u0301","\u1fea":"\u03a5\u0300","\u03ab":"\u03a5\u0308","\u1fe9":"\u03a5\u0304","\u1fe8":"\u03a5\u0306","\u038f":"\u03a9\u0301","\u1ffa":"\u03a9\u0300"};class Vn{constructor(e,t){this.mode=void 0,this.gullet=void 0,this.settings=void 0,this.leftrightDepth=void 0,this.nextToken=void 0,this.mode="math",this.gullet=new Hn(e,t,this.mode),this.settings=t,this.leftrightDepth=0}expect(e,t){if(void 0===t&&(t=!0),this.fetch().text!==e)throw new n("Expected '"+e+"', got '"+this.fetch().text+"'",this.fetch());t&&this.consume()}consume(){this.nextToken=null}fetch(){return null==this.nextToken&&(this.nextToken=this.gullet.expandNextToken()),this.nextToken}switchMode(e){this.mode=e,this.gullet.switchMode(e)}parse(){this.settings.globalGroup||this.gullet.beginGroup(),this.settings.colorIsTextColor&&this.gullet.macros.set("\\color","\\textcolor");try{const e=this.parseExpression(!1);return this.expect("EOF"),this.settings.globalGroup||this.gullet.endGroup(),e}finally{this.gullet.endGroups()}}subparse(e){const t=this.nextToken;this.consume(),this.gullet.pushToken(new Nr("}")),this.gullet.pushTokens(e);const r=this.parseExpression(!1);return this.expect("}"),this.nextToken=t,r}parseExpression(e,t){const r=[];for(;;){"math"===this.mode&&this.consumeSpaces();const n=this.fetch();if(-1!==Vn.endOfExpression.indexOf(n.text))break;if(t&&n.text===t)break;if(e&&yn[n.text]&&yn[n.text].infix)break;const o=this.parseAtom(t);if(!o)break;"internal"!==o.type&&r.push(o)}return"text"===this.mode&&this.formLigatures(r),this.handleInfixNodes(r)}handleInfixNodes(e){let t,r=-1;for(let o=0;o=0&&this.settings.reportNonstrict("unicodeTextInMathMode",'Latin-1/Unicode text character "'+t[0]+'" used in math mode',e);const r=oe[this.mode][t].group,n=Cr.range(e);let s;if(te.hasOwnProperty(r)){const e=r;s={type:"atom",mode:this.mode,family:e,loc:n,text:t}}else s={type:r,mode:this.mode,loc:n,text:t};o=s}else{if(!(t.charCodeAt(0)>=128))return null;this.settings.strict&&(S(t.charCodeAt(0))?"math"===this.mode&&this.settings.reportNonstrict("unicodeTextInMathMode",'Unicode text character "'+t[0]+'" used in math mode',e):this.settings.reportNonstrict("unknownSymbol",'Unrecognized Unicode character "'+t[0]+'" ('+t.charCodeAt(0)+")",e)),o={type:"textord",mode:"text",loc:Cr.range(e),text:t}}if(this.consume(),r)for(let t=0;t0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/docs/_static/minus.png b/docs/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/docs/_static/minus.png differ diff --git a/docs/_static/plus.png b/docs/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/docs/_static/plus.png differ diff --git a/docs/_static/pygments.css b/docs/_static/pygments.css new file mode 100644 index 00000000..0d49244e --- /dev/null +++ b/docs/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/docs/_static/searchtools.js b/docs/_static/searchtools.js new file mode 100644 index 00000000..b08d58c9 --- /dev/null +++ b/docs/_static/searchtools.js @@ -0,0 +1,620 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms, anchor) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + "Search finished, found ${resultCount} page(s) matching the search query." + ).replace('${resultCount}', resultCount); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString, anchor) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + for (const removalQuery of [".headerlink", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent) return docContent.textContent; + + console.warn( + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + _parseQuery: (query) => { + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename]. + const normalResults = []; + const nonMainIndexResults = []; + + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase().trim(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + const score = Math.round(Scorer.title * queryLower.length / title.length); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + normalResults.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score + boost, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } + } + } + } + + // lookup as object + objectTerms.forEach((term) => + normalResults.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/docs/_static/sphinx_highlight.js b/docs/_static/sphinx_highlight.js new file mode 100644 index 00000000..8a96c69a --- /dev/null +++ b/docs/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/docs/genindex.html b/docs/genindex.html new file mode 100644 index 00000000..cddce558 --- /dev/null +++ b/docs/genindex.html @@ -0,0 +1,1594 @@ + + + + + + Index — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Index

+ +
+ A + | B + | C + | D + | E + | F + | G + | H + | I + | J + | K + | L + | M + | N + | O + | P + | R + | S + | T + | U + | V + | W + | Z + +
+

A

+ + + +
+ +

B

+ + + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + + +
+ +

I

+ + + +
+ +

J

+ + +
+ +

K

+ + + +
+ +

L

+ + + +
+ +

M

+ + + +
+ +

N

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + + +
+ +

V

+ + + +
+ +

W

+ + + +
+ +

Z

+ + +
+ + + +
+
+
+ +
+ +
+

© Copyright 2013–2024, CHIME Collaboration.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 00000000..3b773b69 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,133 @@ + + + + + + + Welcome to ch_util’s documentation! — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Welcome to ch_util’s documentation!

+

Contents:

+ +
+
+

Indices and tables

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/objects.inv b/docs/objects.inv new file mode 100644 index 00000000..e67085a5 Binary files /dev/null and b/docs/objects.inv differ diff --git a/docs/py-modindex.html b/docs/py-modindex.html new file mode 100644 index 00000000..48cf738c --- /dev/null +++ b/docs/py-modindex.html @@ -0,0 +1,204 @@ + + + + + + Python Module Index — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Python Module Index

+ +
+ c +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ c
+ ch_util +
    + ch_util.andata +
    + ch_util.cal_utils +
    + ch_util.chan_monitor +
    + ch_util.data_quality +
    + ch_util.ephemeris +
    + ch_util.finder +
    + ch_util.fluxcat +
    + ch_util.hfbcat +
    + ch_util.holography +
    + ch_util.layout +
    + ch_util.ni_utils +
    + ch_util.plot +
    + ch_util.rfi +
    + ch_util.timing +
    + ch_util.tools +
+ + +
+
+
+ +
+ +
+

© Copyright 2013–2024, CHIME Collaboration.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/reference.html b/docs/reference.html new file mode 100644 index 00000000..8c3f7c95 --- /dev/null +++ b/docs/reference.html @@ -0,0 +1,191 @@ + + + + + + + API Reference — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

API Reference

+

General CHIME utilities

+
+

Submodules

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

andata

Analysis data format

cal_utils

Tools for point source calibration

chan_monitor

Channel quality monitor routines

data_quality

Data quality routines

ephemeris

Ephemeris routines

finder

Data Index Searcher for CHIME

fluxcat

Catalog the measured flux densities of astronomical sources

hfbcat

Catalog of HFB test targets

holography

Holography observation tables.

layout

Interface to the CHIME components and graphs

ni_utils

Tools for noise injection data

plot

Plotting routines for CHIME data

rfi

Tools for RFI flagging

timing

Tools for timing jitter and delay corrections.

tools

Tools for CHIME analysis

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/search.html b/docs/search.html new file mode 100644 index 00000000..7f25a498 --- /dev/null +++ b/docs/search.html @@ -0,0 +1,129 @@ + + + + + + Search — ch_util 24.6.0+10.g15fb19e documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+
+ +
+ +
+

© Copyright 2013–2024, CHIME Collaboration.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/docs/searchindex.js b/docs/searchindex.js new file mode 100644 index 00000000..be19b3ad --- /dev/null +++ b/docs/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"alltitles": {"API Reference": [[16, null]], "Choose Your Own Adventure": [[9, "choose-your-own-adventure"]], "Correlator Inputs": [[14, "correlator-inputs"]], "Data quality functions": [[3, "data-quality-functions"]], "Data tables": [[5, "data-tables"]], "Database Models": [[9, "database-models"]], "Ephemeris Functions": [[4, "ephemeris-functions"]], "Fringestopping": [[14, "fringestopping"]], "High Level Index Searcher": [[5, "high-level-index-searcher"]], "Housekeeping Inputs": [[14, "housekeeping-inputs"]], "Indices and tables": [[15, "indices-and-tables"]], "Issues": [[3, "issues"]], "Matrix Factorisation": [[14, "matrix-factorisation"]], "Miscellaneous": [[14, "miscellaneous"]], "Miscellaneous Utilities": [[4, "miscellaneous-utilities"]], "Product Array Mapping": [[14, "product-array-mapping"]], "Routines": [[5, "routines"]], "Submodules": [[16, "submodules"]], "Telescope Instances": [[4, "telescope-instances"]], "Time Utilities": [[4, "time-utilities"]], "Welcome to ch_util\u2019s documentation!": [[15, null]], "ch_util.andata": [[0, null]], "ch_util.cal_utils": [[1, null]], "ch_util.chan_monitor": [[2, null]], "ch_util.data_quality": [[3, null]], "ch_util.ephemeris": [[4, null]], "ch_util.finder": [[5, null]], "ch_util.fluxcat": [[6, null]], "ch_util.hfbcat": [[7, null]], "ch_util.holography": [[8, null]], "ch_util.layout": [[9, null]], "ch_util.ni_utils": [[10, null]], "ch_util.plot": [[11, null]], "ch_util.rfi": [[12, null]], "ch_util.timing": [[13, null]], "ch_util.tools": [[14, null]]}, "docnames": ["_autosummary/ch_util.andata", "_autosummary/ch_util.cal_utils", "_autosummary/ch_util.chan_monitor", "_autosummary/ch_util.data_quality", "_autosummary/ch_util.ephemeris", "_autosummary/ch_util.finder", "_autosummary/ch_util.fluxcat", "_autosummary/ch_util.hfbcat", "_autosummary/ch_util.holography", "_autosummary/ch_util.layout", "_autosummary/ch_util.ni_utils", "_autosummary/ch_util.plot", "_autosummary/ch_util.rfi", "_autosummary/ch_util.timing", "_autosummary/ch_util.tools", "index", "reference"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.viewcode": 1}, "filenames": ["_autosummary/ch_util.andata.rst", "_autosummary/ch_util.cal_utils.rst", "_autosummary/ch_util.chan_monitor.rst", "_autosummary/ch_util.data_quality.rst", "_autosummary/ch_util.ephemeris.rst", "_autosummary/ch_util.finder.rst", "_autosummary/ch_util.fluxcat.rst", "_autosummary/ch_util.hfbcat.rst", "_autosummary/ch_util.holography.rst", "_autosummary/ch_util.layout.rst", "_autosummary/ch_util.ni_utils.rst", "_autosummary/ch_util.plot.rst", "_autosummary/ch_util.rfi.rst", "_autosummary/ch_util.timing.rst", "_autosummary/ch_util.tools.rst", "index.rst", "reference.rst"], "indexentries": {"accept_all_global_flags() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.accept_all_global_flags", false]], "acqs (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.acqs", false]], "add_measurement() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.add_measurement", false]], "alpha (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.alpha", false]], "amp_to_delay (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.amp_to_delay", false]], "andata (in module ch_util.andata)": [[0, "ch_util.andata.AnData", false]], "andata_from_acq1() (in module ch_util.andata)": [[0, "ch_util.andata.andata_from_acq1", false]], "andata_from_archive2() (in module ch_util.andata)": [[0, "ch_util.andata.andata_from_archive2", false]], "andataerror": [[0, "ch_util.andata.AnDataError", false]], "antenna (ch_util.tools.antenna attribute)": [[14, "ch_util.tools.Antenna.antenna", false]], "antenna (class in ch_util.tools)": [[14, "ch_util.tools.Antenna", false]], "antenna_to_lna() (in module ch_util.tools)": [[14, "ch_util.tools.antenna_to_lna", false]], "apply_gain() (in module ch_util.tools)": [[14, "ch_util.tools.apply_gain", false]], "apply_timing_correction() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.apply_timing_correction", false]], "arrayantenna (class in ch_util.tools)": [[14, "ch_util.tools.ArrayAntenna", false]], "as_loaded_data() (ch_util.finder.basedatainterval method)": [[5, "ch_util.finder.BaseDataInterval.as_loaded_data", false]], "as_loaded_data() (ch_util.finder.corrdatainterval method)": [[5, "ch_util.finder.CorrDataInterval.as_loaded_data", false]], "as_reader() (ch_util.finder.basedatainterval method)": [[5, "ch_util.finder.BaseDataInterval.as_reader", false]], "atmel (ch_util.andata.hkdata property)": [[0, "ch_util.andata.HKData.atmel", false]], "atmel (ch_util.tools.hkinput attribute)": [[14, "ch_util.tools.HKInput.atmel", false]], "available_collections() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.available_collections", false]], "basedata (class in ch_util.andata)": [[0, "ch_util.andata.BaseData", false]], "basedatainterval (class in ch_util.finder)": [[5, "ch_util.finder.BaseDataInterval", false]], "basereader (class in ch_util.andata)": [[0, "ch_util.andata.BaseReader", false]], "beam_index2number() (in module ch_util.tools)": [[14, "ch_util.tools.beam_index2number", false]], "blank (class in ch_util.tools)": [[14, "ch_util.tools.Blank", false]], "bmxy_to_hadec() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.bmxy_to_hadec", false]], "cal (ch_util.andata.basedata property)": [[0, "ch_util.andata.BaseData.cal", false]], "calibrate_temperature() (in module ch_util.tools)": [[14, "ch_util.tools.calibrate_temperature", false]], "calibrationgaindata (class in ch_util.andata)": [[0, "ch_util.andata.CalibrationGainData", false]], "calibrationgaindatainterval (class in ch_util.finder)": [[5, "ch_util.finder.CalibrationGainDataInterval", false]], "calibrationgainreader (class in ch_util.andata)": [[0, "ch_util.andata.CalibrationGainReader", false]], "casa (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.CasA", false]], "catalog (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.catalog", false]], "ch_util": [[16, "module-ch_util", false]], "ch_util.andata": [[0, "module-ch_util.andata", false]], "ch_util.cal_utils": [[1, "module-ch_util.cal_utils", false]], "ch_util.chan_monitor": [[2, "module-ch_util.chan_monitor", false]], "ch_util.data_quality": [[3, "module-ch_util.data_quality", false]], "ch_util.ephemeris": [[4, "module-ch_util.ephemeris", false]], "ch_util.finder": [[5, "module-ch_util.finder", false]], "ch_util.fluxcat": [[6, "module-ch_util.fluxcat", false]], "ch_util.hfbcat": [[7, "module-ch_util.hfbcat", false]], "ch_util.holography": [[8, "module-ch_util.holography", false]], "ch_util.layout": [[9, "module-ch_util.layout", false]], "ch_util.ni_utils": [[10, "module-ch_util.ni_utils", false]], "ch_util.plot": [[11, "module-ch_util.plot", false]], "ch_util.rfi": [[12, "module-ch_util.rfi", false]], "ch_util.timing": [[13, "module-ch_util.timing", false]], "ch_util.tools": [[14, "module-ch_util.tools", false]], "chan (ch_util.tools.hkinput attribute)": [[14, "ch_util.tools.HKInput.chan", false]], "chan() (ch_util.andata.hkdata method)": [[0, "ch_util.andata.HKData.chan", false]], "change_chime_location() (in module ch_util.tools)": [[14, "ch_util.tools.change_chime_location", false]], "change_pathfinder_location() (in module ch_util.tools)": [[14, "ch_util.tools.change_pathfinder_location", false]], "chanmonitor (class in ch_util.chan_monitor)": [[2, "ch_util.chan_monitor.ChanMonitor", false]], "chime_local_datetime() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.chime_local_datetime", false]], "chimeantenna (class in ch_util.tools)": [[14, "ch_util.tools.CHIMEAntenna", false]], "chisq (ch_util.cal_utils.fittransit attribute)": [[1, "ch_util.cal_utils.FitTransit.chisq", false]], "cirs_radec() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.cirs_radec", false]], "citation (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.citation", false]], "closest_of_type() (ch_util.layout.graph method)": [[9, "ch_util.layout.graph.closest_of_type", false]], "cmap() (in module ch_util.tools)": [[14, "ch_util.tools.cmap", false]], "coeff_alpha (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.coeff_alpha", false]], "coeff_tau (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.coeff_tau", false]], "component() (ch_util.layout.graph method)": [[9, "ch_util.layout.graph.component", false]], "compute_time (ch_util.andata.digitalgaindata property)": [[0, "ch_util.andata.DigitalGainData.compute_time", false]], "construct_delay_template() (in module ch_util.timing)": [[13, "ch_util.timing.construct_delay_template", false]], "continuity_test() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.continuity_test", false]], "convert_time() (ch_util.andata.basedata static method)": [[0, "ch_util.andata.BaseData.convert_time", false]], "corr (ch_util.tools.corrinput attribute)": [[14, "ch_util.tools.CorrInput.corr", false]], "corr_order (ch_util.tools.corrinput attribute)": [[14, "ch_util.tools.CorrInput.corr_order", false]], "corrdata (class in ch_util.andata)": [[0, "ch_util.andata.CorrData", false]], "corrdatainterval (class in ch_util.finder)": [[5, "ch_util.finder.CorrDataInterval", false]], "corrinput (class in ch_util.tools)": [[14, "ch_util.tools.CorrInput", false]], "corrreader (class in ch_util.andata)": [[0, "ch_util.andata.CorrReader", false]], "crate (ch_util.tools.corrinput attribute)": [[14, "ch_util.tools.CorrInput.crate", false]], "create_cal() (ch_util.andata.basedata method)": [[0, "ch_util.andata.BaseData.create_cal", false]], "create_flag() (ch_util.andata.basedata method)": [[0, "ch_util.andata.BaseData.create_flag", false]], "create_from_ant_logs() (ch_util.holography.holographyobservation class method)": [[8, "ch_util.holography.HolographyObservation.create_from_ant_logs", false]], "create_from_dict() (ch_util.holography.holographyobservation class method)": [[8, "ch_util.holography.HolographyObservation.create_from_dict", false]], "create_from_post_reports() (ch_util.holography.holographyobservation class method)": [[8, "ch_util.holography.HolographyObservation.create_from_post_reports", false]], "create_reverse_map() (ch_util.andata.basedata method)": [[0, "ch_util.andata.BaseData.create_reverse_map", false]], "curvedpowerlaw (class in ch_util.fluxcat)": [[6, "ch_util.fluxcat.CurvedPowerLaw", false]], "cyga (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.CygA", false]], "cyl (ch_util.tools.arrayantenna attribute)": [[14, "ch_util.tools.ArrayAntenna.cyl", false]], "data_class (ch_util.andata.basereader attribute)": [[0, "ch_util.andata.BaseReader.data_class", false]], "data_class (ch_util.andata.calibrationgainreader attribute)": [[0, "ch_util.andata.CalibrationGainReader.data_class", false]], "data_class (ch_util.andata.corrreader attribute)": [[0, "ch_util.andata.CorrReader.data_class", false]], "data_class (ch_util.andata.digitalgainreader attribute)": [[0, "ch_util.andata.DigitalGainReader.data_class", false]], "data_class (ch_util.andata.flaginputreader attribute)": [[0, "ch_util.andata.FlagInputReader.data_class", false]], "data_class (ch_util.andata.hkpreader attribute)": [[0, "ch_util.andata.HKPReader.data_class", false]], "data_class (ch_util.andata.hkreader attribute)": [[0, "ch_util.andata.HKReader.data_class", false]], "data_class (ch_util.andata.rawadcreader attribute)": [[0, "ch_util.andata.RawADCReader.data_class", false]], "data_class (ch_util.andata.weatherreader attribute)": [[0, "ch_util.andata.WeatherReader.data_class", false]], "data_flag_types (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.data_flag_types", false]], "dataflagged": [[5, "ch_util.finder.DataFlagged", false]], "datainterval (in module ch_util.finder)": [[5, "ch_util.finder.DataInterval", false]], "dataintervallist (class in ch_util.finder)": [[5, "ch_util.finder.DataIntervalList", false]], "dataset_id (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.dataset_id", false]], "dataset_name_allowed() (ch_util.andata.basedata method)": [[0, "ch_util.andata.BaseData.dataset_name_allowed", false]], "dataset_name_allowed() (ch_util.andata.weatherdata method)": [[0, "ch_util.andata.WeatherData.dataset_name_allowed", false]], "datasets (ch_util.andata.basedata property)": [[0, "ch_util.andata.BaseData.datasets", false]], "decorrelation() (in module ch_util.tools)": [[14, "ch_util.tools.decorrelation", false]], "default() (ch_util.fluxcat.numpyencoder method)": [[6, "ch_util.fluxcat.NumpyEncoder.default", false]], "del_reverse_map() (ch_util.andata.basedata method)": [[0, "ch_util.andata.BaseData.del_reverse_map", false]], "delay (ch_util.tools.corrinput property)": [[14, "ch_util.tools.CorrInput.delay", false]], "delay() (in module ch_util.tools)": [[14, "ch_util.tools.delay", false]], "delete() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.delete", false]], "delete_coeff() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.delete_coeff", false]], "deriv() (ch_util.cal_utils.fitpolyrealpolyimag method)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag.deriv", false]], "digitalgaindata (class in ch_util.andata)": [[0, "ch_util.andata.DigitalGainData", false]], "digitalgaindatainterval (class in ch_util.finder)": [[5, "ch_util.finder.DigitalGainDataInterval", false]], "digitalgainreader (class in ch_util.andata)": [[0, "ch_util.andata.DigitalGainReader", false]], "doesnotexist (ch_util.holography.holographyobservation attribute)": [[8, "ch_util.holography.HolographyObservation.DoesNotExist", false]], "doesnotexist (ch_util.holography.holographysource attribute)": [[8, "ch_util.holography.HolographySource.DoesNotExist", false]], "dump() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.dump", false]], "eflux (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.eflux", false]], "eigen_decomposition() (in module ch_util.timing)": [[13, "ch_util.timing.eigen_decomposition", false]], "eigh_no_diagonal() (in module ch_util.tools)": [[14, "ch_util.tools.eigh_no_diagonal", false]], "ensure_list() (in module ch_util.tools)": [[14, "ch_util.tools.ensure_list", false]], "enter_ltf() (in module ch_util.layout)": [[9, "ch_util.layout.enter_ltf", false]], "epoch (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.epoch", false]], "estimate_directional_scale() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.estimate_directional_scale", false]], "exclude_data_flag_type() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_data_flag_type", false]], "exclude_daytime() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_daytime", false]], "exclude_global_flag() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_global_flag", false]], "exclude_nighttime() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_nighttime", false]], "exclude_ra_interval() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_RA_interval", false]], "exclude_sun() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_sun", false]], "exclude_time_interval() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_time_interval", false]], "exclude_transits() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.exclude_transits", false]], "fake_tone_database() (in module ch_util.tools)": [[14, "ch_util.tools.fake_tone_database", false]], "fast_pack_product_array() (in module ch_util.tools)": [[14, "ch_util.tools.fast_pack_product_array", false]], "feedlocator (class in ch_util.chan_monitor)": [[2, "ch_util.chan_monitor.FeedLocator", false]], "fields (ch_util.fluxcat.fluxcatalog attribute)": [[6, "ch_util.fluxcat.FluxCatalog.fields", false]], "fields (ch_util.hfbcat.hfbcatalog attribute)": [[7, "ch_util.hfbcat.HFBCatalog.fields", false]], "files_in_range() (in module ch_util.finder)": [[5, "ch_util.finder.files_in_range", false]], "filter_acqs() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.filter_acqs", false]], "filter_acqs_by_files() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.filter_acqs_by_files", false]], "finder (class in ch_util.finder)": [[5, "ch_util.finder.Finder", false]], "fit() (ch_util.cal_utils.fittransit method)": [[1, "ch_util.cal_utils.FitTransit.fit", false]], "fit_histogram() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.fit_histogram", false]], "fit_model() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.fit_model", false]], "fit_point_source_map() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.fit_point_source_map", false]], "fit_poly_to_phase() (in module ch_util.timing)": [[13, "ch_util.timing.fit_poly_to_phase", false]], "fitampphase (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitAmpPhase", false]], "fitgaussamppolyphase (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitGaussAmpPolyPhase", false]], "fitpoly (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitPoly", false]], "fitpolylogamppolyphase (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitPolyLogAmpPolyPhase", false]], "fitpolyrealpolyimag (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag", false]], "fitrealimag (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitRealImag", false]], "fitspectrum (class in ch_util.fluxcat)": [[6, "ch_util.fluxcat.FitSpectrum", false]], "fittransit (class in ch_util.cal_utils)": [[1, "ch_util.cal_utils.FitTransit", false]], "flag (ch_util.andata.flaginputdata property)": [[0, "ch_util.andata.FlagInputData.flag", false]], "flag (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.flag", false]], "flag (ch_util.tools.arrayantenna attribute)": [[14, "ch_util.tools.ArrayAntenna.flag", false]], "flag_dataset() (in module ch_util.rfi)": [[12, "ch_util.rfi.flag_dataset", false]], "flag_outliers() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.flag_outliers", false]], "flaginputdata (class in ch_util.andata)": [[0, "ch_util.andata.FlagInputData", false]], "flaginputdatainterval (class in ch_util.finder)": [[5, "ch_util.finder.FlagInputDataInterval", false]], "flaginputreader (class in ch_util.andata)": [[0, "ch_util.andata.FlagInputReader", false]], "flags (ch_util.andata.basedata property)": [[0, "ch_util.andata.BaseData.flags", false]], "flux (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.flux", false]], "fluxcatalog (class in ch_util.fluxcat)": [[6, "ch_util.fluxcat.FluxCatalog", false]], "format_source_name() (in module ch_util.fluxcat)": [[6, "ch_util.fluxcat.format_source_name", false]], "freq (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.freq", false]], "freq (ch_util.andata.corrreader property)": [[0, "ch_util.andata.CorrReader.freq", false]], "freq (ch_util.andata.gaindata property)": [[0, "ch_util.andata.GainData.freq", false]], "freq (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.freq", false]], "freq (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.freq", false]], "freq_sel (ch_util.andata.corrreader property)": [[0, "ch_util.andata.CorrReader.freq_sel", false]], "frequency_mask() (in module ch_util.rfi)": [[12, "ch_util.rfi.frequency_mask", false]], "fringestop_time() (in module ch_util.tools)": [[14, "ch_util.tools.fringestop_time", false]], "from_acq_h5() (ch_util.andata.basedata class method)": [[0, "ch_util.andata.BaseData.from_acq_h5", false]], "from_acq_h5() (ch_util.andata.corrdata class method)": [[0, "ch_util.andata.CorrData.from_acq_h5", false]], "from_acq_h5() (ch_util.andata.hkdata class method)": [[0, "ch_util.andata.HKData.from_acq_h5", false]], "from_acq_h5() (ch_util.andata.hkpdata class method)": [[0, "ch_util.andata.HKPData.from_acq_h5", false]], "from_acq_h5() (ch_util.timing.timingdata class method)": [[13, "ch_util.timing.TimingData.from_acq_h5", false]], "from_acq_h5_fast() (ch_util.andata.corrdata class method)": [[0, "ch_util.andata.CorrData.from_acq_h5_fast", false]], "from_db() (ch_util.layout.graph class method)": [[9, "ch_util.layout.graph.from_db", false]], "from_dict() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.from_dict", false]], "from_dict() (ch_util.timing.timingcorrection class method)": [[13, "ch_util.timing.TimingCorrection.from_dict", false]], "from_graph() (ch_util.layout.graph class method)": [[9, "ch_util.layout.graph.from_graph", false]], "from_lst() (ch_util.holography.holographyobservation class method)": [[8, "ch_util.holography.HolographyObservation.from_lst", false]], "from_predef() (ch_util.layout.subgraph_spec class method)": [[9, "ch_util.layout.subgraph_spec.from_predef", false]], "fromdata() (ch_util.chan_monitor.chanmonitor class method)": [[2, "ch_util.chan_monitor.ChanMonitor.fromdata", false]], "fromdate() (ch_util.chan_monitor.chanmonitor class method)": [[2, "ch_util.chan_monitor.ChanMonitor.fromdate", false]], "func_2d_gauss() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.func_2d_gauss", false]], "func_2d_sinc_gauss() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.func_2d_sinc_gauss", false]], "func_dirty_gauss() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.func_dirty_gauss", false]], "func_real_dirty_gauss() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.func_real_dirty_gauss", false]], "gain (ch_util.andata.calibrationgaindata property)": [[0, "ch_util.andata.CalibrationGainData.gain", false]], "gain (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.gain", false]], "gain (ch_util.andata.digitalgaindata property)": [[0, "ch_util.andata.DigitalGainData.gain", false]], "gain_coeff (ch_util.andata.digitalgaindata property)": [[0, "ch_util.andata.DigitalGainData.gain_coeff", false]], "gain_exp (ch_util.andata.digitalgaindata property)": [[0, "ch_util.andata.DigitalGainData.gain_exp", false]], "gaindata (class in ch_util.andata)": [[0, "ch_util.andata.GainData", false]], "gainflagdata (class in ch_util.andata)": [[0, "ch_util.andata.GainFlagData", false]], "gains2utvec() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.gains2utvec", false]], "gains2utvec_tf() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.gains2utvec_tf", false]], "galt_pointing_model_dec() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.galt_pointing_model_dec", false]], "galt_pointing_model_ha() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.galt_pointing_model_ha", false]], "gboantenna (class in ch_util.tools)": [[14, "ch_util.tools.GBOAntenna", false]], "gen_prod_sel() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.gen_prod_sel", false]], "get() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.get", false]], "get_alpha() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.get_alpha", false]], "get_als_gains() (ch_util.ni_utils.ni_data method)": [[10, "ch_util.ni_utils.ni_data.get_als_gains", false]], "get_autocorrelations() (in module ch_util.rfi)": [[12, "ch_util.rfi.get_autocorrelations", false]], "get_c_ydist() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.get_c_ydist", false]], "get_c_ydist_perfreq() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.get_c_ydist_perfreq", false]], "get_correlator_inputs() (in module ch_util.tools)": [[14, "ch_util.tools.get_correlator_inputs", false]], "get_default_frequency_map_stream() (in module ch_util.tools)": [[14, "ch_util.tools.get_default_frequency_map_stream", false]], "get_doppler_shifted_freq() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.get_doppler_shifted_freq", false]], "get_doppler_shifted_freq() (in module ch_util.hfbcat)": [[7, "ch_util.hfbcat.get_doppler_shifted_freq", false]], "get_epoch() (in module ch_util.fluxcat)": [[6, "ch_util.fluxcat.get_epoch", false]], "get_feed_polarisations() (in module ch_util.tools)": [[14, "ch_util.tools.get_feed_polarisations", false]], "get_feed_positions() (in module ch_util.tools)": [[14, "ch_util.tools.get_feed_positions", false]], "get_gain() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.get_gain", false]], "get_global_flag_times() (in module ch_util.layout)": [[9, "ch_util.layout.get_global_flag_times", false]], "get_holographic_index() (in module ch_util.tools)": [[14, "ch_util.tools.get_holographic_index", false]], "get_ni_gains() (ch_util.ni_utils.ni_data method)": [[10, "ch_util.ni_utils.ni_data.get_ni_gains", false]], "get_noise_channel() (in module ch_util.tools)": [[14, "ch_util.tools.get_noise_channel", false]], "get_noise_source_index() (in module ch_util.tools)": [[14, "ch_util.tools.get_noise_source_index", false]], "get_range_rate() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.get_range_rate", false]], "get_reference_times_dataset_id() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.get_reference_times_dataset_id", false]], "get_reference_times_file() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.get_reference_times_file", false]], "get_results() (ch_util.chan_monitor.chanmonitor method)": [[2, "ch_util.chan_monitor.ChanMonitor.get_results", false]], "get_results() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.get_results", false]], "get_results_acq() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.get_results_acq", false]], "get_source_dictionary() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.get_source_dictionary", false]], "get_source_index() (ch_util.andata.calibrationgaindata method)": [[0, "ch_util.andata.CalibrationGainData.get_source_index", false]], "get_source_index() (ch_util.andata.flaginputdata method)": [[0, "ch_util.andata.FlagInputData.get_source_index", false]], "get_stacked_alpha() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.get_stacked_alpha", false]], "get_stacked_tau() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.get_stacked_tau", false]], "get_sunfree_srcs() (ch_util.chan_monitor.chanmonitor method)": [[2, "ch_util.chan_monitor.ChanMonitor.get_sunfree_srcs", false]], "get_tau() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.get_tau", false]], "get_timing_correction() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.get_timing_correction", false]], "get_xdist() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.get_xdist", false]], "global_flag_mode (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.global_flag_mode", false]], "global_flags_between() (in module ch_util.layout)": [[9, "ch_util.layout.global_flags_between", false]], "good_channels() (in module ch_util.data_quality)": [[3, "ch_util.data_quality.good_channels", false]], "good_prod_freq() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.good_prod_freq", false]], "graph (class in ch_util.layout)": [[9, "ch_util.layout.graph", false]], "group_name_allowed() (ch_util.andata.basedata method)": [[0, "ch_util.andata.BaseData.group_name_allowed", false]], "group_name_allowed() (ch_util.andata.weatherdata method)": [[0, "ch_util.andata.WeatherData.group_name_allowed", false]], "guess_fwhm() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.guess_fwhm", false]], "hadec_to_bmxy() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.hadec_to_bmxy", false]], "has_amplitude (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.has_amplitude", false]], "has_coeff_alpha (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.has_coeff_alpha", false]], "has_coeff_tau (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.has_coeff_tau", false]], "has_num_freq (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.has_num_freq", false]], "hcoantenna (class in ch_util.tools)": [[14, "ch_util.tools.HCOAntenna", false]], "hfbcatalog (class in ch_util.hfbcat)": [[7, "ch_util.hfbcat.HFBCatalog", false]], "hide (ch_util.layout.subgraph_spec property)": [[9, "ch_util.layout.subgraph_spec.hide", false]], "highpass_delay_filter() (in module ch_util.rfi)": [[12, "ch_util.rfi.highpass_delay_filter", false]], "hk_to_sensor() (in module ch_util.tools)": [[14, "ch_util.tools.hk_to_sensor", false]], "hkdata (class in ch_util.andata)": [[0, "ch_util.andata.HKData", false]], "hkdatainterval (class in ch_util.finder)": [[5, "ch_util.finder.HKDataInterval", false]], "hkinput (class in ch_util.tools)": [[14, "ch_util.tools.HKInput", false]], "hkpdata (class in ch_util.andata)": [[0, "ch_util.andata.HKPData", false]], "hkpreader (class in ch_util.andata)": [[0, "ch_util.andata.HKPReader", false]], "hkreader (class in ch_util.andata)": [[0, "ch_util.andata.HKReader", false]], "holographyantenna (class in ch_util.tools)": [[14, "ch_util.tools.HolographyAntenna", false]], "holographyobservation (class in ch_util.holography)": [[8, "ch_util.holography.HolographyObservation", false]], "holographysource (class in ch_util.holography)": [[8, "ch_util.holography.HolographySource", false]], "icmap() (in module ch_util.tools)": [[14, "ch_util.tools.icmap", false]], "id (ch_util.tools.corrinput property)": [[14, "ch_util.tools.CorrInput.id", false]], "include_26m_obs() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.include_26m_obs", false]], "include_global_flag() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.include_global_flag", false]], "include_ra_interval() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.include_RA_interval", false]], "include_time_interval() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.include_time_interval", false]], "include_transits() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.include_transits", false]], "input (ch_util.andata.corrreader property)": [[0, "ch_util.andata.CorrReader.input", false]], "input (ch_util.andata.gainflagdata property)": [[0, "ch_util.andata.GainFlagData.input", false]], "input (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.input", false]], "input_flags (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.input_flags", false]], "input_sel (ch_util.andata.corrreader property)": [[0, "ch_util.andata.CorrReader.input_sel", false]], "input_sn (ch_util.tools.corrinput attribute)": [[14, "ch_util.tools.CorrInput.input_sn", false]], "interpolate_gain() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.interpolate_gain", false]], "interpolate_gain_quiet() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.interpolate_gain_quiet", false]], "invert_no_zero() (in module ch_util.tools)": [[14, "ch_util.tools.invert_no_zero", false]], "is_array() (in module ch_util.tools)": [[14, "ch_util.tools.is_array", false]], "is_array_on() (in module ch_util.tools)": [[14, "ch_util.tools.is_array_on", false]], "is_array_x() (in module ch_util.tools)": [[14, "ch_util.tools.is_array_x", false]], "is_array_y() (in module ch_util.tools)": [[14, "ch_util.tools.is_array_y", false]], "is_chime() (in module ch_util.tools)": [[14, "ch_util.tools.is_chime", false]], "is_chime_on() (in module ch_util.tools)": [[14, "ch_util.tools.is_chime_on", false]], "is_holographic() (in module ch_util.tools)": [[14, "ch_util.tools.is_holographic", false]], "is_noise_source() (in module ch_util.tools)": [[14, "ch_util.tools.is_noise_source", false]], "is_pathfinder() (in module ch_util.tools)": [[14, "ch_util.tools.is_pathfinder", false]], "iter() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.iter", false]], "iter_loaded_data() (ch_util.finder.dataintervallist method)": [[5, "ch_util.finder.DataIntervalList.iter_loaded_data", false]], "iter_reader() (ch_util.finder.dataintervallist method)": [[5, "ch_util.finder.DataIntervalList.iter_reader", false]], "iterative_hpf_masking() (in module ch_util.rfi)": [[12, "ch_util.rfi.iterative_hpf_masking", false]], "iteritems() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.iteritems", false]], "json_numpy_obj_hook() (in module ch_util.fluxcat)": [[6, "ch_util.fluxcat.json_numpy_obj_hook", false]], "keys() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.keys", false]], "kkoantenna (class in ch_util.tools)": [[14, "ch_util.tools.KKOAntenna", false]], "ktrprod() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.ktrprod", false]], "len() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.len", false]], "lna_to_antenna() (in module ch_util.tools)": [[14, "ch_util.tools.lna_to_antenna", false]], "load() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.load", false]], "load_timing_correction() (in module ch_util.timing)": [[13, "ch_util.timing.load_timing_correction", false]], "loaded_collections() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.loaded_collections", false]], "ltf() (ch_util.layout.graph method)": [[9, "ch_util.layout.graph.ltf", false]], "lunar_rising() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.lunar_rising", false]], "lunar_setting() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.lunar_setting", false]], "lunar_transit() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.lunar_transit", false]], "mad_cut_1d() (in module ch_util.rfi)": [[12, "ch_util.rfi.mad_cut_1d", false]], "mad_cut_2d() (in module ch_util.rfi)": [[12, "ch_util.rfi.mad_cut_2d", false]], "mad_cut_rolling() (in module ch_util.rfi)": [[12, "ch_util.rfi.mad_cut_rolling", false]], "map_input_to_noise_source() (in module ch_util.timing)": [[13, "ch_util.timing.map_input_to_noise_source", false]], "mat2utvec() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.mat2utvec", false]], "metafluxcatalog (class in ch_util.fluxcat)": [[6, "ch_util.fluxcat.MetaFluxCatalog", false]], "metrics() (ch_util.andata.hkpdata static method)": [[0, "ch_util.andata.HKPData.metrics", false]], "min_interval (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.min_interval", false]], "model_lookup (ch_util.fluxcat.fluxcatalog attribute)": [[6, "ch_util.fluxcat.FluxCatalog.model_lookup", false]], "model_poly_phase() (in module ch_util.timing)": [[13, "ch_util.timing.model_poly_phase", false]], "module": [[0, "module-ch_util.andata", false], [1, "module-ch_util.cal_utils", false], [2, "module-ch_util.chan_monitor", false], [3, "module-ch_util.data_quality", false], [4, "module-ch_util.ephemeris", false], [5, "module-ch_util.finder", false], [6, "module-ch_util.fluxcat", false], [7, "module-ch_util.hfbcat", false], [8, "module-ch_util.holography", false], [9, "module-ch_util.layout", false], [10, "module-ch_util.ni_utils", false], [11, "module-ch_util.plot", false], [12, "module-ch_util.rfi", false], [13, "module-ch_util.timing", false], [14, "module-ch_util.tools", false], [16, "module-ch_util", false]], "mux (ch_util.andata.hkdata property)": [[0, "ch_util.andata.HKData.mux", false]], "mux (ch_util.tools.hkinput attribute)": [[14, "ch_util.tools.HKInput.mux", false]], "n (ch_util.cal_utils.fittransit property)": [[1, "ch_util.cal_utils.FitTransit.N", false]], "name (ch_util.holography.holographysource attribute)": [[8, "ch_util.holography.HolographySource.name", false]], "nchan() (ch_util.andata.hkdata method)": [[0, "ch_util.andata.HKData.nchan", false]], "ncomponent (ch_util.cal_utils.fittransit property)": [[1, "ch_util.cal_utils.FitTransit.ncomponent", false]], "ndof (ch_util.cal_utils.fittransit attribute)": [[1, "ch_util.cal_utils.FitTransit.ndof", false]], "ndofa (ch_util.cal_utils.fitgaussamppolyphase property)": [[1, "ch_util.cal_utils.FitGaussAmpPolyPhase.ndofa", false]], "ndofa (ch_util.cal_utils.fitpolylogamppolyphase property)": [[1, "ch_util.cal_utils.FitPolyLogAmpPolyPhase.ndofa", false]], "ndofi (ch_util.cal_utils.fitpolyrealpolyimag property)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag.ndofi", false]], "ndofp (ch_util.cal_utils.fitgaussamppolyphase property)": [[1, "ch_util.cal_utils.FitGaussAmpPolyPhase.ndofp", false]], "ndofp (ch_util.cal_utils.fitpolylogamppolyphase property)": [[1, "ch_util.cal_utils.FitPolyLogAmpPolyPhase.ndofp", false]], "ndofr (ch_util.cal_utils.fitpolyrealpolyimag property)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag.ndofr", false]], "neighbour_of_type() (ch_util.layout.graph method)": [[9, "ch_util.layout.graph.neighbour_of_type", false]], "nfreq (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.nfreq", false]], "nfreq (ch_util.andata.gaindata property)": [[0, "ch_util.andata.GainData.nfreq", false]], "ni_als() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.ni_als", false]], "ni_data (class in ch_util.ni_utils)": [[10, "ch_util.ni_utils.ni_data", false]], "ni_gains_evalues() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.ni_gains_evalues", false]], "ni_gains_evalues_tf() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.ni_gains_evalues_tf", false]], "ninput (ch_util.andata.gainflagdata property)": [[0, "ch_util.andata.GainFlagData.ninput", false]], "nmux (ch_util.andata.hkdata property)": [[0, "ch_util.andata.HKData.nmux", false]], "node_property() (ch_util.layout.graph method)": [[9, "ch_util.layout.graph.node_property", false]], "noise_source (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.noise_source", false]], "noisesource (class in ch_util.tools)": [[14, "ch_util.tools.NoiseSource", false]], "normalise_correlations() (in module ch_util.tools)": [[14, "ch_util.tools.normalise_correlations", false]], "notes (ch_util.holography.holographyobservation attribute)": [[8, "ch_util.holography.HolographyObservation.notes", false]], "nparam (ch_util.cal_utils.fitampphase property)": [[1, "ch_util.cal_utils.FitAmpPhase.nparam", false]], "nparam (ch_util.cal_utils.fitrealimag property)": [[1, "ch_util.cal_utils.FitRealImag.nparam", false]], "nparam (ch_util.cal_utils.fittransit property)": [[1, "ch_util.cal_utils.FitTransit.nparam", false]], "nprod (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.nprod", false]], "nsource (ch_util.andata.calibrationgaindata property)": [[0, "ch_util.andata.CalibrationGainData.nsource", false]], "nsource (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.nsource", false]], "ntime (ch_util.andata.basedata property)": [[0, "ch_util.andata.BaseData.ntime", false]], "ntime (ch_util.andata.gainflagdata property)": [[0, "ch_util.andata.GainFlagData.ntime", false]], "num_freq (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.num_freq", false]], "number_deviations() (in module ch_util.rfi)": [[12, "ch_util.rfi.number_deviations", false]], "numpyencoder (class in ch_util.fluxcat)": [[6, "ch_util.fluxcat.NumpyEncoder", false]], "object_coords() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.object_coords", false]], "offline() (ch_util.finder.finder class method)": [[5, "ch_util.finder.Finder.offline", false]], "oneway (ch_util.layout.subgraph_spec property)": [[9, "ch_util.layout.subgraph_spec.oneway", false]], "only_chime_weather() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_chime_weather", false]], "only_corr() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_corr", false]], "only_digitalgain() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_digitalgain", false]], "only_flaginput() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_flaginput", false]], "only_gain() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_gain", false]], "only_hfb() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_hfb", false]], "only_hk() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_hk", false]], "only_hkp() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_hkp", false]], "only_rawadc() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_rawadc", false]], "only_weather() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.only_weather", false]], "order_frequency_map_stream() (in module ch_util.tools)": [[14, "ch_util.tools.order_frequency_map_stream", false]], "pack_product_array() (in module ch_util.tools)": [[14, "ch_util.tools.pack_product_array", false]], "param (ch_util.cal_utils.fittransit attribute)": [[1, "ch_util.cal_utils.FitTransit.param", false]], "param (ch_util.fluxcat.fitspectrum attribute)": [[6, "ch_util.fluxcat.FitSpectrum.param", false]], "param_corr (ch_util.cal_utils.fittransit property)": [[1, "ch_util.cal_utils.FitTransit.param_corr", false]], "param_cov (ch_util.cal_utils.fittransit attribute)": [[1, "ch_util.cal_utils.FitTransit.param_cov", false]], "param_cov (ch_util.fluxcat.fitspectrum attribute)": [[6, "ch_util.fluxcat.FitSpectrum.param_cov", false]], "parameter_names (ch_util.cal_utils.fitgaussamppolyphase property)": [[1, "ch_util.cal_utils.FitGaussAmpPolyPhase.parameter_names", false]], "parameter_names (ch_util.cal_utils.fitpolylogamppolyphase property)": [[1, "ch_util.cal_utils.FitPolyLogAmpPolyPhase.parameter_names", false]], "parameter_names (ch_util.cal_utils.fitpolyrealpolyimag property)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag.parameter_names", false]], "parameter_names (ch_util.cal_utils.fittransit property)": [[1, "ch_util.cal_utils.FitTransit.parameter_names", false]], "params_ft() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.params_ft", false]], "parse_ant_logs() (ch_util.holography.holographyobservation class method)": [[8, "ch_util.holography.HolographyObservation.parse_ant_logs", false]], "parse_chime_serial() (in module ch_util.tools)": [[14, "ch_util.tools.parse_chime_serial", false]], "parse_date() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.parse_date", false]], "parse_old_serial() (in module ch_util.tools)": [[14, "ch_util.tools.parse_old_serial", false]], "parse_pathfinder_serial() (in module ch_util.tools)": [[14, "ch_util.tools.parse_pathfinder_serial", false]], "parse_post_report() (ch_util.holography.holographyobservation class method)": [[8, "ch_util.holography.HolographyObservation.parse_post_report", false]], "pathfinderantenna (class in ch_util.tools)": [[14, "ch_util.tools.PathfinderAntenna", false]], "pcoantenna (in module ch_util.tools)": [[14, "ch_util.tools.PCOAntenna", false]], "peak() (ch_util.cal_utils.fitgaussamppolyphase method)": [[1, "ch_util.cal_utils.FitGaussAmpPolyPhase.peak", false]], "peak() (ch_util.cal_utils.fitpolylogamppolyphase method)": [[1, "ch_util.cal_utils.FitPolyLogAmpPolyPhase.peak", false]], "peak() (ch_util.cal_utils.fitpolyrealpolyimag method)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag.peak", false]], "peak() (ch_util.cal_utils.fittransit method)": [[1, "ch_util.cal_utils.FitTransit.peak", false]], "peak_ra() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.peak_RA", false]], "plot() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.plot", false]], "pol (ch_util.tools.arrayantenna attribute)": [[14, "ch_util.tools.ArrayAntenna.pol", false]], "pol (ch_util.tools.holographyantenna attribute)": [[14, "ch_util.tools.HolographyAntenna.pol", false]], "pos (ch_util.tools.arrayantenna attribute)": [[14, "ch_util.tools.ArrayAntenna.pos", false]], "pos (ch_util.tools.holographyantenna attribute)": [[14, "ch_util.tools.HolographyAntenna.pos", false]], "powered (ch_util.tools.pathfinderantenna attribute)": [[14, "ch_util.tools.PathfinderAntenna.powered", false]], "predict() (ch_util.cal_utils.fittransit method)": [[1, "ch_util.cal_utils.FitTransit.predict", false]], "predict() (ch_util.fluxcat.fitspectrum method)": [[6, "ch_util.fluxcat.FitSpectrum.predict", false]], "predict_flux() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.predict_flux", false]], "predict_uncertainty() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.predict_uncertainty", false]], "print_acq_info() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.print_acq_info", false]], "print_available_collections() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.print_available_collections", false]], "print_loaded_collections() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.print_loaded_collections", false]], "print_measurements() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.print_measurements", false]], "print_results_summary() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.print_results_summary", false]], "process_gated_data() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.process_gated_data", false]], "process_synced_data() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.process_synced_data", false]], "prod (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.prod", false]], "prod (ch_util.andata.corrreader property)": [[0, "ch_util.andata.CorrReader.prod", false]], "prod_sel (ch_util.andata.corrreader property)": [[0, "ch_util.andata.CorrReader.prod_sel", false]], "prodstack (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.prodstack", false]], "rankn_approx() (in module ch_util.tools)": [[14, "ch_util.tools.rankN_approx", false]], "rawadcdata (class in ch_util.andata)": [[0, "ch_util.andata.RawADCData", false]], "rawadcreader (class in ch_util.andata)": [[0, "ch_util.andata.RawADCReader", false]], "read() (ch_util.andata.basereader method)": [[0, "ch_util.andata.BaseReader.read", false]], "read() (ch_util.andata.corrreader method)": [[0, "ch_util.andata.CorrReader.read", false]], "reader (in module ch_util.andata)": [[0, "ch_util.andata.Reader", false]], "redefine_stack_index_map() (in module ch_util.tools)": [[14, "ch_util.tools.redefine_stack_index_map", false]], "reference_noise_source (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.reference_noise_source", false]], "reflector (ch_util.tools.antenna attribute)": [[14, "ch_util.tools.Antenna.reflector", false]], "reorder_correlator_inputs() (in module ch_util.tools)": [[14, "ch_util.tools.reorder_correlator_inputs", false]], "resample() (ch_util.andata.gainflagdata method)": [[0, "ch_util.andata.GainFlagData.resample", false]], "resample() (ch_util.andata.hkpdata method)": [[0, "ch_util.andata.HKPData.resample", false]], "reversed() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.reversed", false]], "rf_thru (ch_util.tools.antenna attribute)": [[14, "ch_util.tools.Antenna.rf_thru", false]], "rfiantenna (class in ch_util.tools)": [[14, "ch_util.tools.RFIAntenna", false]], "save() (ch_util.ni_utils.ni_data method)": [[10, "ch_util.ni_utils.ni_data.save", false]], "search_input() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.search_input", false]], "search_update_id() (ch_util.andata.gainflagdata method)": [[0, "ch_util.andata.GainFlagData.search_update_id", false]], "search_update_time() (ch_util.andata.gainflagdata method)": [[0, "ch_util.andata.GainFlagData.search_update_time", false]], "select() (ch_util.andata.hkpdata method)": [[0, "ch_util.andata.HKPData.select", false]], "select_freq_physical() (ch_util.andata.corrreader method)": [[0, "ch_util.andata.CorrReader.select_freq_physical", false]], "select_freq_range() (ch_util.andata.corrreader method)": [[0, "ch_util.andata.CorrReader.select_freq_range", false]], "select_prod_autos() (ch_util.andata.corrreader method)": [[0, "ch_util.andata.CorrReader.select_prod_autos", false]], "select_prod_by_input() (ch_util.andata.corrreader method)": [[0, "ch_util.andata.CorrReader.select_prod_by_input", false]], "select_prod_pairs() (ch_util.andata.corrreader method)": [[0, "ch_util.andata.CorrReader.select_prod_pairs", false]], "select_time_range() (ch_util.andata.basereader method)": [[0, "ch_util.andata.BaseReader.select_time_range", false]], "sensor_to_hk() (in module ch_util.tools)": [[14, "ch_util.tools.sensor_to_hk", false]], "serial_to_id() (in module ch_util.tools)": [[14, "ch_util.tools.serial_to_id", false]], "serial_to_location() (in module ch_util.tools)": [[14, "ch_util.tools.serial_to_location", false]], "set_acq_list() (ch_util.chan_monitor.chanmonitor method)": [[2, "ch_util.chan_monitor.ChanMonitor.set_acq_list", false]], "set_coeff() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.set_coeff", false]], "set_global_reference_time() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.set_global_reference_time", false]], "set_good_ipts() (ch_util.chan_monitor.feedlocator method)": [[2, "ch_util.chan_monitor.FeedLocator.set_good_ipts", false]], "set_hk_input() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.set_hk_input", false]], "set_metadata() (ch_util.chan_monitor.chanmonitor method)": [[2, "ch_util.chan_monitor.ChanMonitor.set_metadata", false]], "set_reference_time() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.set_reference_time", false]], "set_time_range() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.set_time_range", false]], "set_time_range_global_flag() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.set_time_range_global_flag", false]], "set_time_range_season() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.set_time_range_season", false]], "sg_spec (ch_util.layout.graph property)": [[9, "ch_util.layout.graph.sg_spec", false]], "sg_spec_start (ch_util.layout.graph property)": [[9, "ch_util.layout.graph.sg_spec_start", false]], "shortest_path_to_type() (ch_util.layout.graph method)": [[9, "ch_util.layout.graph.shortest_path_to_type", false]], "single_source_check() (ch_util.chan_monitor.chanmonitor method)": [[2, "ch_util.chan_monitor.ChanMonitor.single_source_check", false]], "sir() (in module ch_util.rfi)": [[12, "ch_util.rfi.sir", false]], "sir1d() (in module ch_util.rfi)": [[12, "ch_util.rfi.sir1d", false]], "skyfield (ch_util.fluxcat.fluxcatalog property)": [[6, "ch_util.fluxcat.FluxCatalog.skyfield", false]], "slot (ch_util.tools.corrinput attribute)": [[14, "ch_util.tools.CorrInput.slot", false]], "sma (ch_util.tools.corrinput attribute)": [[14, "ch_util.tools.CorrInput.sma", false]], "solar_rising() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.solar_rising", false]], "solar_setting() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.solar_setting", false]], "solar_transit() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.solar_transit", false]], "sort() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.sort", false]], "sort_evalues_mag() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.sort_evalues_mag", false]], "source (ch_util.andata.calibrationgaindata property)": [[0, "ch_util.andata.CalibrationGainData.source", false]], "source (ch_util.holography.holographyobservation attribute)": [[8, "ch_util.holography.HolographyObservation.source", false]], "source_flags (ch_util.andata.flaginputdata property)": [[0, "ch_util.andata.FlagInputData.source_flags", false]], "source_gains (ch_util.andata.calibrationgaindata property)": [[0, "ch_util.andata.CalibrationGainData.source_gains", false]], "source_weights (ch_util.andata.calibrationgaindata property)": [[0, "ch_util.andata.CalibrationGainData.source_weights", false]], "spectra() (in module ch_util.plot)": [[11, "ch_util.plot.spectra", false]], "spectral_cut() (in module ch_util.rfi)": [[12, "ch_util.rfi.spectral_cut", false]], "stack (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.stack", false]], "star_cirs() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.Star_cirs", false]], "start (ch_util.layout.subgraph_spec property)": [[9, "ch_util.layout.subgraph_spec.start", false]], "static_amp (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.static_amp", false]], "static_phi (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.static_phi", false]], "static_phi_fit (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.static_phi_fit", false]], "stats (ch_util.fluxcat.fitspectrum attribute)": [[6, "ch_util.fluxcat.FitSpectrum.stats", false]], "string() (ch_util.fluxcat.fluxcatalog class method)": [[6, "ch_util.fluxcat.FluxCatalog.string", false]], "subclass_from_obj() (in module ch_util.andata)": [[0, "ch_util.andata.subclass_from_obj", false]], "subgraph_spec (class in ch_util.layout)": [[9, "ch_util.layout.subgraph_spec", false]], "subtract_rank1_signal() (in module ch_util.tools)": [[14, "ch_util.tools.subtract_rank1_signal", false]], "subtract_sky_noise() (ch_util.ni_utils.ni_data method)": [[10, "ch_util.ni_utils.ni_data.subtract_sky_noise", false]], "subtract_sky_noise() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.subtract_sky_noise", false]], "summary() (ch_util.timing.timingcorrection method)": [[13, "ch_util.timing.TimingCorrection.summary", false]], "summary() (ch_util.timing.timingdata method)": [[13, "ch_util.timing.TimingData.summary", false]], "tau (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.tau", false]], "taua (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.TauA", false]], "temperature (ch_util.andata.weatherdata property)": [[0, "ch_util.andata.WeatherData.temperature", false]], "terminate (ch_util.layout.subgraph_spec property)": [[9, "ch_util.layout.subgraph_spec.terminate", false]], "thermal_amplitude() (in module ch_util.cal_utils)": [[1, "ch_util.cal_utils.thermal_amplitude", false]], "time (ch_util.andata.basedata property)": [[0, "ch_util.andata.BaseData.time", false]], "time (ch_util.andata.gainflagdata property)": [[0, "ch_util.andata.GainFlagData.time", false]], "time (ch_util.andata.weatherdata property)": [[0, "ch_util.andata.WeatherData.time", false]], "time (ch_util.layout.graph property)": [[9, "ch_util.layout.graph.time", false]], "time_exclusions (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.time_exclusions", false]], "time_intervals (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.time_intervals", false]], "time_ordered() (in module ch_util.plot)": [[11, "ch_util.plot.time_ordered", false]], "time_range (ch_util.finder.finder property)": [[5, "ch_util.finder.Finder.time_range", false]], "timestamp (ch_util.andata.basedata property)": [[0, "ch_util.andata.BaseData.timestamp", false]], "timingcorrection (class in ch_util.timing)": [[13, "ch_util.timing.TimingCorrection", false]], "timingdata (class in ch_util.timing)": [[13, "ch_util.timing.TimingData", false]], "timinginterpolator (class in ch_util.timing)": [[13, "ch_util.timing.TimingInterpolator", false]], "to_dict() (ch_util.fluxcat.fluxcatalog method)": [[6, "ch_util.fluxcat.FluxCatalog.to_dict", false]], "tod() (ch_util.andata.hkdata method)": [[0, "ch_util.andata.HKData.tod", false]], "toneantenna (class in ch_util.tools)": [[14, "ch_util.tools.TONEAntenna", false]], "transit_ra() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.transit_RA", false]], "tval() (ch_util.cal_utils.fittransit class method)": [[1, "ch_util.cal_utils.FitTransit.tval", false]], "uncertainty() (ch_util.cal_utils.fitampphase method)": [[1, "ch_util.cal_utils.FitAmpPhase.uncertainty", false]], "uncertainty() (ch_util.cal_utils.fitrealimag method)": [[1, "ch_util.cal_utils.FitRealImag.uncertainty", false]], "uncertainty() (ch_util.cal_utils.fittransit method)": [[1, "ch_util.cal_utils.FitTransit.uncertainty", false]], "uncertainty() (ch_util.fluxcat.fitspectrum method)": [[6, "ch_util.fluxcat.FitSpectrum.uncertainty", false]], "uncertainty_amp() (ch_util.cal_utils.fitampphase method)": [[1, "ch_util.cal_utils.FitAmpPhase.uncertainty_amp", false]], "uncertainty_imag() (ch_util.cal_utils.fitrealimag method)": [[1, "ch_util.cal_utils.FitRealImag.uncertainty_imag", false]], "uncertainty_phi() (ch_util.cal_utils.fitampphase method)": [[1, "ch_util.cal_utils.FitAmpPhase.uncertainty_phi", false]], "uncertainty_real() (ch_util.cal_utils.fitrealimag method)": [[1, "ch_util.cal_utils.FitRealImag.uncertainty_real", false]], "unpack_product_array() (in module ch_util.tools)": [[14, "ch_util.tools.unpack_product_array", false]], "update_global_flag_mode() (ch_util.finder.finder method)": [[5, "ch_util.finder.Finder.update_global_flag_mode", false]], "update_id (ch_util.andata.gainflagdata property)": [[0, "ch_util.andata.GainFlagData.update_id", false]], "utc_lst_to_mjd() (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.utc_lst_to_mjd", false]], "utvec2mat() (in module ch_util.ni_utils)": [[10, "ch_util.ni_utils.utvec2mat", false]], "vander() (ch_util.cal_utils.fitpolyrealpolyimag method)": [[1, "ch_util.cal_utils.FitPolyRealPolyImag.vander", false]], "varname() (in module ch_util.fluxcat)": [[6, "ch_util.fluxcat.varname", false]], "versiontuple() (in module ch_util.andata)": [[0, "ch_util.andata.versiontuple", false]], "vira (in module ch_util.ephemeris)": [[4, "ch_util.ephemeris.VirA", false]], "vis (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.vis", false]], "waterfall() (in module ch_util.plot)": [[11, "ch_util.plot.waterfall", false]], "weatherdata (class in ch_util.andata)": [[0, "ch_util.andata.WeatherData", false]], "weatherdatainterval (class in ch_util.finder)": [[5, "ch_util.finder.WeatherDataInterval", false]], "weatherreader (class in ch_util.andata)": [[0, "ch_util.andata.WeatherReader", false]], "weight (ch_util.andata.calibrationgaindata property)": [[0, "ch_util.andata.CalibrationGainData.weight", false]], "weight (ch_util.andata.corrdata property)": [[0, "ch_util.andata.CorrData.weight", false]], "weight_alpha (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.weight_alpha", false]], "weight_static_amp (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.weight_static_amp", false]], "weight_static_phi (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.weight_static_phi", false]], "weight_tau (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.weight_tau", false]], "zero_delay_noise_source (ch_util.timing.timingcorrection property)": [[13, "ch_util.timing.TimingCorrection.zero_delay_noise_source", false]]}, "objects": {"": [[16, 0, 0, "-", "ch_util"]], "ch_util": [[0, 0, 0, "-", "andata"], [1, 0, 0, "-", "cal_utils"], [2, 0, 0, "-", "chan_monitor"], [3, 0, 0, "-", "data_quality"], [4, 0, 0, "-", "ephemeris"], [5, 0, 0, "-", "finder"], [6, 0, 0, "-", "fluxcat"], [7, 0, 0, "-", "hfbcat"], [8, 0, 0, "-", "holography"], [9, 0, 0, "-", "layout"], [10, 0, 0, "-", "ni_utils"], [11, 0, 0, "-", "plot"], [12, 0, 0, "-", "rfi"], [13, 0, 0, "-", "timing"], [14, 0, 0, "-", "tools"]], "ch_util.andata": [[0, 1, 1, "", "AnData"], [0, 2, 1, "", "AnDataError"], [0, 3, 1, "", "BaseData"], [0, 3, 1, "", "BaseReader"], [0, 3, 1, "", "CalibrationGainData"], [0, 3, 1, "", "CalibrationGainReader"], [0, 3, 1, "", "CorrData"], [0, 3, 1, "", "CorrReader"], [0, 3, 1, "", "DigitalGainData"], [0, 3, 1, "", "DigitalGainReader"], [0, 3, 1, "", "FlagInputData"], [0, 3, 1, "", "FlagInputReader"], [0, 3, 1, "", "GainData"], [0, 3, 1, "", "GainFlagData"], [0, 3, 1, "", "HKData"], [0, 3, 1, "", "HKPData"], [0, 3, 1, "", "HKPReader"], [0, 3, 1, "", "HKReader"], [0, 3, 1, "", "RawADCData"], [0, 3, 1, "", "RawADCReader"], [0, 1, 1, "", "Reader"], [0, 3, 1, "", "WeatherData"], [0, 3, 1, "", "WeatherReader"], [0, 6, 1, "", "andata_from_acq1"], [0, 6, 1, "", "andata_from_archive2"], [0, 6, 1, "", "subclass_from_obj"], [0, 6, 1, "", "versiontuple"]], "ch_util.andata.BaseData": [[0, 4, 1, "", "cal"], [0, 5, 1, "", "convert_time"], [0, 5, 1, "", "create_cal"], [0, 5, 1, "", "create_flag"], [0, 5, 1, "", "create_reverse_map"], [0, 5, 1, "", "dataset_name_allowed"], [0, 4, 1, "", "datasets"], [0, 5, 1, "", "del_reverse_map"], [0, 4, 1, "", "flags"], [0, 5, 1, "", "from_acq_h5"], [0, 5, 1, "", "group_name_allowed"], [0, 4, 1, "", "ntime"], [0, 4, 1, "", "time"], [0, 4, 1, "", "timestamp"]], "ch_util.andata.BaseReader": [[0, 1, 1, "", "data_class"], [0, 5, 1, "", "read"], [0, 5, 1, "", "select_time_range"]], "ch_util.andata.CalibrationGainData": [[0, 4, 1, "", "gain"], [0, 5, 1, "", "get_source_index"], [0, 4, 1, "", "nsource"], [0, 4, 1, "", "source"], [0, 4, 1, "", "source_gains"], [0, 4, 1, "", "source_weights"], [0, 4, 1, "", "weight"]], "ch_util.andata.CalibrationGainReader": [[0, 1, 1, "", "data_class"]], "ch_util.andata.CorrData": [[0, 4, 1, "", "dataset_id"], [0, 4, 1, "", "freq"], [0, 5, 1, "", "from_acq_h5"], [0, 5, 1, "", "from_acq_h5_fast"], [0, 4, 1, "", "gain"], [0, 4, 1, "", "input_flags"], [0, 4, 1, "", "nfreq"], [0, 4, 1, "", "nprod"], [0, 4, 1, "", "prod"], [0, 4, 1, "", "prodstack"], [0, 4, 1, "", "stack"], [0, 4, 1, "", "vis"], [0, 4, 1, "", "weight"]], "ch_util.andata.CorrReader": [[0, 1, 1, "", "data_class"], [0, 4, 1, "", "freq"], [0, 4, 1, "", "freq_sel"], [0, 4, 1, "", "input"], [0, 4, 1, "", "input_sel"], [0, 4, 1, "", "prod"], [0, 4, 1, "", "prod_sel"], [0, 5, 1, "", "read"], [0, 5, 1, "", "select_freq_physical"], [0, 5, 1, "", "select_freq_range"], [0, 5, 1, "", "select_prod_autos"], [0, 5, 1, "", "select_prod_by_input"], [0, 5, 1, "", "select_prod_pairs"]], "ch_util.andata.DigitalGainData": [[0, 4, 1, "", "compute_time"], [0, 4, 1, "", "gain"], [0, 4, 1, "", "gain_coeff"], [0, 4, 1, "", "gain_exp"]], "ch_util.andata.DigitalGainReader": [[0, 1, 1, "", "data_class"]], "ch_util.andata.FlagInputData": [[0, 4, 1, "", "flag"], [0, 5, 1, "", "get_source_index"], [0, 4, 1, "", "source_flags"]], "ch_util.andata.FlagInputReader": [[0, 1, 1, "", "data_class"]], "ch_util.andata.GainData": [[0, 4, 1, "", "freq"], [0, 4, 1, "", "nfreq"]], "ch_util.andata.GainFlagData": [[0, 4, 1, "", "input"], [0, 4, 1, "", "ninput"], [0, 4, 1, "", "ntime"], [0, 5, 1, "", "resample"], [0, 5, 1, "", "search_update_id"], [0, 5, 1, "", "search_update_time"], [0, 4, 1, "", "time"], [0, 4, 1, "", "update_id"]], "ch_util.andata.HKData": [[0, 4, 1, "", "atmel"], [0, 5, 1, "", "chan"], [0, 5, 1, "", "from_acq_h5"], [0, 4, 1, "", "mux"], [0, 5, 1, "", "nchan"], [0, 4, 1, "", "nmux"], [0, 5, 1, "", "tod"]], "ch_util.andata.HKPData": [[0, 5, 1, "", "from_acq_h5"], [0, 5, 1, "", "metrics"], [0, 5, 1, "", "resample"], [0, 5, 1, "", "select"]], "ch_util.andata.HKPReader": [[0, 1, 1, "", "data_class"]], "ch_util.andata.HKReader": [[0, 1, 1, "", "data_class"]], "ch_util.andata.RawADCReader": [[0, 1, 1, "", "data_class"]], "ch_util.andata.WeatherData": [[0, 5, 1, "", "dataset_name_allowed"], [0, 5, 1, "", "group_name_allowed"], [0, 4, 1, "", "temperature"], [0, 4, 1, "", "time"]], "ch_util.andata.WeatherReader": [[0, 1, 1, "", "data_class"]], "ch_util.cal_utils": [[1, 3, 1, "", "FitAmpPhase"], [1, 3, 1, "", "FitGaussAmpPolyPhase"], [1, 3, 1, "", "FitPoly"], [1, 3, 1, "", "FitPolyLogAmpPolyPhase"], [1, 3, 1, "", "FitPolyRealPolyImag"], [1, 3, 1, "", "FitRealImag"], [1, 3, 1, "", "FitTransit"], [1, 6, 1, "", "estimate_directional_scale"], [1, 6, 1, "", "fit_histogram"], [1, 6, 1, "", "fit_point_source_map"], [1, 6, 1, "", "flag_outliers"], [1, 6, 1, "", "func_2d_gauss"], [1, 6, 1, "", "func_2d_sinc_gauss"], [1, 6, 1, "", "func_dirty_gauss"], [1, 6, 1, "", "func_real_dirty_gauss"], [1, 6, 1, "", "get_reference_times_dataset_id"], [1, 6, 1, "", "get_reference_times_file"], [1, 6, 1, "", "guess_fwhm"], [1, 6, 1, "", "interpolate_gain"], [1, 6, 1, "", "interpolate_gain_quiet"], [1, 6, 1, "", "thermal_amplitude"]], "ch_util.cal_utils.FitAmpPhase": [[1, 4, 1, "", "nparam"], [1, 5, 1, "", "uncertainty"], [1, 5, 1, "", "uncertainty_amp"], [1, 5, 1, "", "uncertainty_phi"]], "ch_util.cal_utils.FitGaussAmpPolyPhase": [[1, 4, 1, "", "ndofa"], [1, 4, 1, "", "ndofp"], [1, 4, 1, "", "parameter_names"], [1, 5, 1, "", "peak"]], "ch_util.cal_utils.FitPolyLogAmpPolyPhase": [[1, 4, 1, "", "ndofa"], [1, 4, 1, "", "ndofp"], [1, 4, 1, "", "parameter_names"], [1, 5, 1, "", "peak"]], "ch_util.cal_utils.FitPolyRealPolyImag": [[1, 5, 1, "", "deriv"], [1, 4, 1, "", "ndofi"], [1, 4, 1, "", "ndofr"], [1, 4, 1, "", "parameter_names"], [1, 5, 1, "", "peak"], [1, 5, 1, "", "vander"]], "ch_util.cal_utils.FitRealImag": [[1, 4, 1, "", "nparam"], [1, 5, 1, "", "uncertainty"], [1, 5, 1, "", "uncertainty_imag"], [1, 5, 1, "", "uncertainty_real"]], "ch_util.cal_utils.FitTransit": [[1, 4, 1, "", "N"], [1, 1, 1, "", "chisq"], [1, 5, 1, "", "fit"], [1, 4, 1, "", "ncomponent"], [1, 1, 1, "", "ndof"], [1, 4, 1, "", "nparam"], [1, 1, 1, "", "param"], [1, 4, 1, "", "param_corr"], [1, 1, 1, "", "param_cov"], [1, 4, 1, "", "parameter_names"], [1, 5, 1, "", "peak"], [1, 5, 1, "", "predict"], [1, 5, 1, "", "tval"], [1, 5, 1, "", "uncertainty"]], "ch_util.chan_monitor": [[2, 3, 1, "", "ChanMonitor"], [2, 3, 1, "", "FeedLocator"]], "ch_util.chan_monitor.ChanMonitor": [[2, 5, 1, "", "fromdata"], [2, 5, 1, "", "fromdate"], [2, 5, 1, "", "get_results"], [2, 5, 1, "", "get_sunfree_srcs"], [2, 5, 1, "", "set_acq_list"], [2, 5, 1, "", "set_metadata"], [2, 5, 1, "", "single_source_check"]], "ch_util.chan_monitor.FeedLocator": [[2, 5, 1, "", "continuity_test"], [2, 5, 1, "", "get_c_ydist"], [2, 5, 1, "", "get_c_ydist_perfreq"], [2, 5, 1, "", "get_xdist"], [2, 5, 1, "", "good_prod_freq"], [2, 5, 1, "", "params_ft"], [2, 5, 1, "", "set_good_ipts"]], "ch_util.data_quality": [[3, 6, 1, "", "good_channels"]], "ch_util.ephemeris": [[4, 7, 1, "", "CasA"], [4, 7, 1, "", "CygA"], [4, 6, 1, "", "Star_cirs"], [4, 7, 1, "", "TauA"], [4, 7, 1, "", "VirA"], [4, 6, 1, "", "bmxy_to_hadec"], [4, 6, 1, "", "chime_local_datetime"], [4, 6, 1, "", "cirs_radec"], [4, 6, 1, "", "galt_pointing_model_dec"], [4, 6, 1, "", "galt_pointing_model_ha"], [4, 6, 1, "", "get_doppler_shifted_freq"], [4, 6, 1, "", "get_range_rate"], [4, 6, 1, "", "get_source_dictionary"], [4, 6, 1, "", "hadec_to_bmxy"], [4, 6, 1, "", "lunar_rising"], [4, 6, 1, "", "lunar_setting"], [4, 6, 1, "", "lunar_transit"], [4, 6, 1, "", "object_coords"], [4, 6, 1, "", "parse_date"], [4, 6, 1, "", "peak_RA"], [4, 6, 1, "", "solar_rising"], [4, 6, 1, "", "solar_setting"], [4, 6, 1, "", "solar_transit"], [4, 6, 1, "", "transit_RA"], [4, 6, 1, "", "utc_lst_to_mjd"]], "ch_util.finder": [[5, 3, 1, "", "BaseDataInterval"], [5, 3, 1, "", "CalibrationGainDataInterval"], [5, 3, 1, "", "CorrDataInterval"], [5, 2, 1, "", "DataFlagged"], [5, 1, 1, "", "DataInterval"], [5, 3, 1, "", "DataIntervalList"], [5, 3, 1, "", "DigitalGainDataInterval"], [5, 3, 1, "", "Finder"], [5, 3, 1, "", "FlagInputDataInterval"], [5, 3, 1, "", "HKDataInterval"], [5, 3, 1, "", "WeatherDataInterval"], [5, 6, 1, "", "files_in_range"]], "ch_util.finder.BaseDataInterval": [[5, 5, 1, "", "as_loaded_data"], [5, 5, 1, "", "as_reader"]], "ch_util.finder.CorrDataInterval": [[5, 5, 1, "", "as_loaded_data"]], "ch_util.finder.DataIntervalList": [[5, 5, 1, "", "iter_loaded_data"], [5, 5, 1, "", "iter_reader"]], "ch_util.finder.Finder": [[5, 5, 1, "", "accept_all_global_flags"], [5, 4, 1, "", "acqs"], [5, 4, 1, "", "data_flag_types"], [5, 5, 1, "", "exclude_RA_interval"], [5, 5, 1, "", "exclude_data_flag_type"], [5, 5, 1, "", "exclude_daytime"], [5, 5, 1, "", "exclude_global_flag"], [5, 5, 1, "", "exclude_nighttime"], [5, 5, 1, "", "exclude_sun"], [5, 5, 1, "", "exclude_time_interval"], [5, 5, 1, "", "exclude_transits"], [5, 5, 1, "", "filter_acqs"], [5, 5, 1, "", "filter_acqs_by_files"], [5, 5, 1, "", "get_results"], [5, 5, 1, "", "get_results_acq"], [5, 4, 1, "", "global_flag_mode"], [5, 5, 1, "", "include_26m_obs"], [5, 5, 1, "", "include_RA_interval"], [5, 5, 1, "", "include_global_flag"], [5, 5, 1, "", "include_time_interval"], [5, 5, 1, "", "include_transits"], [5, 4, 1, "", "min_interval"], [5, 5, 1, "", "offline"], [5, 5, 1, "", "only_chime_weather"], [5, 5, 1, "", "only_corr"], [5, 5, 1, "", "only_digitalgain"], [5, 5, 1, "", "only_flaginput"], [5, 5, 1, "", "only_gain"], [5, 5, 1, "", "only_hfb"], [5, 5, 1, "", "only_hk"], [5, 5, 1, "", "only_hkp"], [5, 5, 1, "", "only_rawadc"], [5, 5, 1, "", "only_weather"], [5, 5, 1, "", "print_acq_info"], [5, 5, 1, "", "print_results_summary"], [5, 5, 1, "", "set_hk_input"], [5, 5, 1, "", "set_time_range"], [5, 5, 1, "", "set_time_range_global_flag"], [5, 5, 1, "", "set_time_range_season"], [5, 4, 1, "", "time_exclusions"], [5, 4, 1, "", "time_intervals"], [5, 4, 1, "", "time_range"], [5, 5, 1, "", "update_global_flag_mode"]], "ch_util.fluxcat": [[6, 3, 1, "", "CurvedPowerLaw"], [6, 3, 1, "", "FitSpectrum"], [6, 3, 1, "", "FluxCatalog"], [6, 3, 1, "", "MetaFluxCatalog"], [6, 3, 1, "", "NumpyEncoder"], [6, 6, 1, "", "format_source_name"], [6, 6, 1, "", "get_epoch"], [6, 6, 1, "", "json_numpy_obj_hook"], [6, 6, 1, "", "varname"]], "ch_util.fluxcat.FitSpectrum": [[6, 1, 1, "", "param"], [6, 1, 1, "", "param_cov"], [6, 5, 1, "", "predict"], [6, 1, 1, "", "stats"], [6, 5, 1, "", "uncertainty"]], "ch_util.fluxcat.FluxCatalog": [[6, 5, 1, "", "add_measurement"], [6, 5, 1, "", "available_collections"], [6, 4, 1, "", "catalog"], [6, 4, 1, "", "citation"], [6, 5, 1, "", "delete"], [6, 5, 1, "", "dump"], [6, 4, 1, "", "eflux"], [6, 4, 1, "", "epoch"], [6, 1, 1, "", "fields"], [6, 5, 1, "", "fit_model"], [6, 4, 1, "", "flag"], [6, 4, 1, "", "flux"], [6, 4, 1, "", "freq"], [6, 5, 1, "", "from_dict"], [6, 5, 1, "", "get"], [6, 5, 1, "", "iter"], [6, 5, 1, "", "iteritems"], [6, 5, 1, "", "keys"], [6, 5, 1, "", "len"], [6, 5, 1, "", "load"], [6, 5, 1, "", "loaded_collections"], [6, 1, 1, "", "model_lookup"], [6, 5, 1, "", "plot"], [6, 5, 1, "", "predict_flux"], [6, 5, 1, "", "predict_uncertainty"], [6, 5, 1, "", "print_available_collections"], [6, 5, 1, "", "print_loaded_collections"], [6, 5, 1, "", "print_measurements"], [6, 5, 1, "", "reversed"], [6, 4, 1, "", "skyfield"], [6, 5, 1, "", "sort"], [6, 5, 1, "", "string"], [6, 5, 1, "", "to_dict"]], "ch_util.fluxcat.NumpyEncoder": [[6, 5, 1, "", "default"]], "ch_util.hfbcat": [[7, 3, 1, "", "HFBCatalog"], [7, 6, 1, "", "get_doppler_shifted_freq"]], "ch_util.hfbcat.HFBCatalog": [[7, 1, 1, "", "fields"]], "ch_util.holography": [[8, 3, 1, "", "HolographyObservation"], [8, 3, 1, "", "HolographySource"]], "ch_util.holography.HolographyObservation": [[8, 1, 1, "", "DoesNotExist"], [8, 5, 1, "", "create_from_ant_logs"], [8, 5, 1, "", "create_from_dict"], [8, 5, 1, "", "create_from_post_reports"], [8, 5, 1, "", "from_lst"], [8, 1, 1, "", "notes"], [8, 5, 1, "", "parse_ant_logs"], [8, 5, 1, "", "parse_post_report"], [8, 1, 1, "", "source"]], "ch_util.holography.HolographySource": [[8, 1, 1, "", "DoesNotExist"], [8, 1, 1, "", "name"]], "ch_util.layout": [[9, 6, 1, "", "enter_ltf"], [9, 6, 1, "", "get_global_flag_times"], [9, 6, 1, "", "global_flags_between"], [9, 3, 1, "", "graph"], [9, 3, 1, "", "subgraph_spec"]], "ch_util.layout.graph": [[9, 5, 1, "", "closest_of_type"], [9, 5, 1, "", "component"], [9, 5, 1, "", "from_db"], [9, 5, 1, "", "from_graph"], [9, 5, 1, "", "ltf"], [9, 5, 1, "", "neighbour_of_type"], [9, 5, 1, "", "node_property"], [9, 4, 1, "", "sg_spec"], [9, 4, 1, "", "sg_spec_start"], [9, 5, 1, "", "shortest_path_to_type"], [9, 4, 1, "", "time"]], "ch_util.layout.subgraph_spec": [[9, 5, 1, "", "from_predef"], [9, 4, 1, "", "hide"], [9, 4, 1, "", "oneway"], [9, 4, 1, "", "start"], [9, 4, 1, "", "terminate"]], "ch_util.ni_utils": [[10, 6, 1, "", "gains2utvec"], [10, 6, 1, "", "gains2utvec_tf"], [10, 6, 1, "", "gen_prod_sel"], [10, 6, 1, "", "ktrprod"], [10, 6, 1, "", "mat2utvec"], [10, 6, 1, "", "ni_als"], [10, 3, 1, "", "ni_data"], [10, 6, 1, "", "ni_gains_evalues"], [10, 6, 1, "", "ni_gains_evalues_tf"], [10, 6, 1, "", "process_gated_data"], [10, 6, 1, "", "process_synced_data"], [10, 6, 1, "", "sort_evalues_mag"], [10, 6, 1, "", "subtract_sky_noise"], [10, 6, 1, "", "utvec2mat"]], "ch_util.ni_utils.ni_data": [[10, 5, 1, "", "get_als_gains"], [10, 5, 1, "", "get_ni_gains"], [10, 5, 1, "", "save"], [10, 5, 1, "", "subtract_sky_noise"]], "ch_util.plot": [[11, 6, 1, "", "spectra"], [11, 6, 1, "", "time_ordered"], [11, 6, 1, "", "waterfall"]], "ch_util.rfi": [[12, 6, 1, "", "flag_dataset"], [12, 6, 1, "", "frequency_mask"], [12, 6, 1, "", "get_autocorrelations"], [12, 6, 1, "", "highpass_delay_filter"], [12, 6, 1, "", "iterative_hpf_masking"], [12, 6, 1, "", "mad_cut_1d"], [12, 6, 1, "", "mad_cut_2d"], [12, 6, 1, "", "mad_cut_rolling"], [12, 6, 1, "", "number_deviations"], [12, 6, 1, "", "sir"], [12, 6, 1, "", "sir1d"], [12, 6, 1, "", "spectral_cut"]], "ch_util.timing": [[13, 3, 1, "", "TimingCorrection"], [13, 3, 1, "", "TimingData"], [13, 3, 1, "", "TimingInterpolator"], [13, 6, 1, "", "construct_delay_template"], [13, 6, 1, "", "eigen_decomposition"], [13, 6, 1, "", "fit_poly_to_phase"], [13, 6, 1, "", "load_timing_correction"], [13, 6, 1, "", "map_input_to_noise_source"], [13, 6, 1, "", "model_poly_phase"]], "ch_util.timing.TimingCorrection": [[13, 4, 1, "", "alpha"], [13, 4, 1, "", "amp_to_delay"], [13, 5, 1, "", "apply_timing_correction"], [13, 4, 1, "", "coeff_alpha"], [13, 4, 1, "", "coeff_tau"], [13, 5, 1, "", "delete_coeff"], [13, 4, 1, "", "freq"], [13, 5, 1, "", "from_dict"], [13, 5, 1, "", "get_alpha"], [13, 5, 1, "", "get_gain"], [13, 5, 1, "", "get_stacked_alpha"], [13, 5, 1, "", "get_stacked_tau"], [13, 5, 1, "", "get_tau"], [13, 5, 1, "", "get_timing_correction"], [13, 4, 1, "", "has_amplitude"], [13, 4, 1, "", "has_coeff_alpha"], [13, 4, 1, "", "has_coeff_tau"], [13, 4, 1, "", "has_num_freq"], [13, 4, 1, "", "input"], [13, 4, 1, "", "noise_source"], [13, 4, 1, "", "nsource"], [13, 4, 1, "", "num_freq"], [13, 4, 1, "", "reference_noise_source"], [13, 5, 1, "", "search_input"], [13, 5, 1, "", "set_coeff"], [13, 5, 1, "", "set_global_reference_time"], [13, 5, 1, "", "set_reference_time"], [13, 4, 1, "", "static_amp"], [13, 4, 1, "", "static_phi"], [13, 4, 1, "", "static_phi_fit"], [13, 5, 1, "", "summary"], [13, 4, 1, "", "tau"], [13, 4, 1, "", "weight_alpha"], [13, 4, 1, "", "weight_static_amp"], [13, 4, 1, "", "weight_static_phi"], [13, 4, 1, "", "weight_tau"], [13, 4, 1, "", "zero_delay_noise_source"]], "ch_util.timing.TimingData": [[13, 5, 1, "", "from_acq_h5"], [13, 5, 1, "", "summary"]], "ch_util.tools": [[14, 3, 1, "", "Antenna"], [14, 3, 1, "", "ArrayAntenna"], [14, 3, 1, "", "Blank"], [14, 3, 1, "", "CHIMEAntenna"], [14, 3, 1, "", "CorrInput"], [14, 3, 1, "", "GBOAntenna"], [14, 3, 1, "", "HCOAntenna"], [14, 3, 1, "", "HKInput"], [14, 3, 1, "", "HolographyAntenna"], [14, 3, 1, "", "KKOAntenna"], [14, 3, 1, "", "NoiseSource"], [14, 1, 1, "", "PCOAntenna"], [14, 3, 1, "", "PathfinderAntenna"], [14, 3, 1, "", "RFIAntenna"], [14, 3, 1, "", "TONEAntenna"], [14, 6, 1, "", "antenna_to_lna"], [14, 6, 1, "", "apply_gain"], [14, 6, 1, "", "beam_index2number"], [14, 6, 1, "", "calibrate_temperature"], [14, 6, 1, "", "change_chime_location"], [14, 6, 1, "", "change_pathfinder_location"], [14, 6, 1, "", "cmap"], [14, 6, 1, "", "decorrelation"], [14, 6, 1, "", "delay"], [14, 6, 1, "", "eigh_no_diagonal"], [14, 6, 1, "", "ensure_list"], [14, 6, 1, "", "fake_tone_database"], [14, 6, 1, "", "fast_pack_product_array"], [14, 6, 1, "", "fringestop_time"], [14, 6, 1, "", "get_correlator_inputs"], [14, 6, 1, "", "get_default_frequency_map_stream"], [14, 6, 1, "", "get_feed_polarisations"], [14, 6, 1, "", "get_feed_positions"], [14, 6, 1, "", "get_holographic_index"], [14, 6, 1, "", "get_noise_channel"], [14, 6, 1, "", "get_noise_source_index"], [14, 6, 1, "", "hk_to_sensor"], [14, 6, 1, "", "icmap"], [14, 6, 1, "", "invert_no_zero"], [14, 6, 1, "", "is_array"], [14, 6, 1, "", "is_array_on"], [14, 6, 1, "", "is_array_x"], [14, 6, 1, "", "is_array_y"], [14, 6, 1, "", "is_chime"], [14, 6, 1, "", "is_chime_on"], [14, 6, 1, "", "is_holographic"], [14, 6, 1, "", "is_noise_source"], [14, 6, 1, "", "is_pathfinder"], [14, 6, 1, "", "lna_to_antenna"], [14, 6, 1, "", "normalise_correlations"], [14, 6, 1, "", "order_frequency_map_stream"], [14, 6, 1, "", "pack_product_array"], [14, 6, 1, "", "parse_chime_serial"], [14, 6, 1, "", "parse_old_serial"], [14, 6, 1, "", "parse_pathfinder_serial"], [14, 6, 1, "", "rankN_approx"], [14, 6, 1, "", "redefine_stack_index_map"], [14, 6, 1, "", "reorder_correlator_inputs"], [14, 6, 1, "", "sensor_to_hk"], [14, 6, 1, "", "serial_to_id"], [14, 6, 1, "", "serial_to_location"], [14, 6, 1, "", "subtract_rank1_signal"], [14, 6, 1, "", "unpack_product_array"]], "ch_util.tools.Antenna": [[14, 1, 1, "", "antenna"], [14, 1, 1, "", "reflector"], [14, 1, 1, "", "rf_thru"]], "ch_util.tools.ArrayAntenna": [[14, 1, 1, "", "cyl"], [14, 1, 1, "", "flag"], [14, 1, 1, "", "pol"], [14, 1, 1, "", "pos"]], "ch_util.tools.CorrInput": [[14, 1, 1, "", "corr"], [14, 1, 1, "", "corr_order"], [14, 1, 1, "", "crate"], [14, 4, 1, "", "delay"], [14, 4, 1, "", "id"], [14, 1, 1, "", "input_sn"], [14, 1, 1, "", "slot"], [14, 1, 1, "", "sma"]], "ch_util.tools.HKInput": [[14, 1, 1, "", "atmel"], [14, 1, 1, "", "chan"], [14, 1, 1, "", "mux"]], "ch_util.tools.HolographyAntenna": [[14, 1, 1, "", "pol"], [14, 1, 1, "", "pos"]], "ch_util.tools.PathfinderAntenna": [[14, 1, 1, "", "powered"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "attribute", "Python attribute"], "2": ["py", "exception", "Python exception"], "3": ["py", "class", "Python class"], "4": ["py", "property", "Python property"], "5": ["py", "method", "Python method"], "6": ["py", "function", "Python function"], "7": ["py", "data", "Python data"]}, "objtypes": {"0": "py:module", "1": "py:attribute", "2": "py:exception", "3": "py:class", "4": "py:property", "5": "py:method", "6": "py:function", "7": "py:data"}, "terms": {"": [0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 13, 14], "0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "00": [7, 14], "000": 10, "000000": 5, "0001": 0, "0002": 0, "0004": 14, "000401": 9, "00040101": 9, "00040112": 9, "00040401": 14, "00040403": 14, "00041506": 9, "00041606": 9, "009": 5, "01": [4, 11], "011": 5, "01446": 4, "01dec17_1814": 8, "02": [3, 5], "03356934": 5, "04": 5, "05": 14, "059670": 5, "06": 7, "0718": 14, "071800000000001": 14, "076": 4, "07t21": 7, "081": 4, "0x7f31ed2c8790": 9, "0x7f31ed323fd0": 9, "0x7fd1b2cda710": 9, "0x7fd1b2cda810": 9, "0x7fd1b2cfb7d0": 9, "1": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "10": [0, 1, 3, 5, 8, 9, 12, 13, 14], "100": [7, 11, 13], "1000": [13, 14], "1023": 14, "1024": [0, 5], "103": 0, "104": 5, "107430": 5, "108": 5, "1080": 5, "10m": 5, "11": [5, 8, 9], "110": 7, "11160": 5, "1116260": 5, "11292": 5, "11350": 5, "11397": 12, "1150": 5, "11790": 5, "11l": 9, "12": [4, 5, 9, 13, 14], "1201": 12, "120853": 9, "120876": 9, "121358": 5, "12337": 5, "1255": 14, "126781": 5, "13": 5, "1376": 5, "14": [5, 11], "1419": 5, "1429": 5, "1435349183": 10, "15": [0, 4, 5, 12, 14], "153499": 5, "154": 2, "154053": 5, "155641": 5, "1590": 5, "16": [5, 10, 14], "161606": 5, "16412": 5, "16740": 5, "169549": 5, "16969": 5, "17": 5, "1717809534": 7, "171909": 5, "174": 5, "1744": 5, "1766": 5, "1771": 5, "18": [5, 7], "180": 5, "181012": 5, "18126": 5, "18271": 5, "186688": 5, "187": 4, "1875": 5, "19": 5, "19091797": 5, "1928": 5, "193595": 5, "1938": 5, "1945": 5, "1977": 6, "1d": [0, 1, 10, 12, 14], "1e": 12, "1min": 0, "2": [0, 1, 2, 3, 4, 5, 6, 7, 10, 12, 13, 14], "20": [2, 4, 5, 9, 13], "200": [9, 11, 13], "2000": 4, "2004": 12, "200m": 9, "2011706": 5, "20131208t070336z_stone_corr": 5, "20131210t060233z_stone_corr": 5, "2014": [5, 9, 11], "20140128t135105z_stone_corr": 5, "20140210t021023z_stone_corr": 5, "20140211t020307z_stone_corr": 5, "20140212t014603z_stone_corr": 5, "20140219t145523z_stone_corr": 5, "20140219t145849z_abbot_corr": 5, "20140220t213252z_stone_corr": 5, "20140224t051212z_stone_corr": 5, "20140311t192616z_abbot_corr": 5, "20140312t001123z_abbot_corr": 5, "20140312t003054z_abbot_corr": 5, "20140312t224940z_abbot_corr": 5, "20140312t230108z_abbot_corr": 5, "20140315t014330z_abbot_corr": 5, "20140318t154959z_abbot_corr": 5, "20140320t120437z_abbot_corr": 5, "20140325t174231z_abbot_corr": 5, "20140326t175440z_abbot_corr": 5, "20140330t064125z_abbot_corr": 5, "20140330t102505z_abbot_corr": 5, "20140403t000057z_blanchard_corr": 5, "20140403t152314z_blanchard_corr": 5, "20140408t222844z_abbot_corr": 5, "20140409t165603z_blanchard_corr": 5, "20140409t184530z_blanchard_corr": 5, "20140410t003326z_blanchard_corr": 5, "20140410t014136z_blanchard_corr": 5, "20140410t031023z_blanchard_corr": 5, "20140411t000920z_blanchard_corr": 5, "20140411t003404z_blanchard_corr": 5, "20140413t002319z_blanchard_corr": 5, "20140830t005410z_ben_hk": 5, "20140905t203905z_ben_hk": 5, "20140908t153116z_ben_hk": 5, "20140916t173334z_blanchard_corr": 10, "20141009t222415z_ben_hk": 5, "20150626t200540z_pathfinder_corr": [3, 10], "2016": [0, 14], "2017": [5, 8], "20170801t063349z_pathfinder_corr": 5, "20170801t131035z_pathfinder_corr": 5, "2019": [4, 8], "2020": 1, "2021": 12, "2024": [7, 9], "207522": 5, "2093": 5, "21": [0, 5, 8, 11], "21541": 5, "2173": 5, "218": 2, "22": [0, 4, 5, 9], "229410": 5, "23": [0, 5, 14], "231275": 5, "231900": 5, "2347": 5, "235": 3, "24": [4, 5, 8], "240": 5, "242094": 5, "2424": 5, "2451545": 4, "2483": 9, "25": [1, 5], "255": 14, "26": [2, 5], "2660": 9, "26m": [5, 14], "26m_dish": 9, "26mlog": 8, "2800": 2, "286487": 5, "293686": 5, "299": 4, "2998": 5, "2d": [0, 6, 10, 11, 12, 14], "2l": 9, "3": [0, 1, 2, 3, 5, 6, 8, 9, 10, 14], "30": [0, 1], "304970": 5, "309649": 5, "30988": 5, "31": [0, 9], "314": 5, "32": [1, 6], "32453": 5, "32662": 5, "32l": 9, "3364v2": 12, "340637": 5, "34472656": 5, "350": 4, "35119": 5, "35194": 5, "36": [0, 5], "360": 5, "3600": 5, "377": 5, "378053": 5, "3795": 5, "38": 14, "391": 5, "39112": 4, "394484": 5, "394565": 5, "397": 5, "398689": 5, "3d": [10, 11], "3x3": 10, "4": [0, 5, 14], "40": [4, 11, 12], "4000": [5, 11], "4165": 5, "419873": 5, "42": 12, "420": [12, 13], "43193": 5, "43200": [5, 13], "45": [5, 7], "452087": 5, "458": 4, "46": 5, "47": 5, "48": 5, "484854": 5, "4952": 5, "5": [0, 1, 2, 4, 5, 9, 10, 11, 12, 13, 14], "50": [7, 9, 10, 13], "500": 13, "50m": 9, "511": 5, "51128": 5, "51739": 5, "52": 5, "5292": 4, "53491211": 5, "54": [7, 9], "5465": 5, "54912": 5, "55288": 5, "56": 5, "571018": 5, "5745": 5, "58": 4, "591": 5, "6": [2, 5, 12], "60": [8, 9], "600": [1, 6, 7], "60m": 9, "61279297": 5, "626385": 5, "63321": 4, "64": [9, 12], "65": [5, 12], "660": 5, "663510": 5, "664": 5, "667": 5, "67653": 5, "681400000000002": 14, "6922": 5, "7": [0, 2, 5, 6, 9], "70593": 4, "707": 4, "71": 2, "72": 5, "7200": 5, "73392": 4, "739000": 5, "754": [4, 5], "75589": 5, "75911": 5, "78": 5, "780": 13, "79889": 5, "8": [0, 5, 14], "80": 5, "81178": 4, "83": 4, "83554": 5, "8491": 5, "84981": 5, "858720": 5, "86": 5, "86019": 5, "86400": 5, "86561": 5, "86642": 4, "86815": 4, "872": 4, "873555": 5, "874": 5, "9": [5, 9, 14], "90": [2, 5], "90075684": 5, "903": 9, "94": 5, "990": 5, "A": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 13, 14], "AND": 14, "And": 3, "At": 10, "Be": 8, "But": 9, "By": [0, 10, 14], "For": [0, 1, 2, 3, 4, 7, 9, 10, 12, 13, 14], "If": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "In": [0, 5, 9, 14], "It": [1, 2, 4, 6, 9, 10, 12, 13, 14], "Its": 10, "NOT": [2, 5], "No": [4, 5], "ON": 10, "OR": [5, 12], "Of": 8, "On": 3, "One": [5, 9, 14], "TO": 2, "The": [0, 1, 2, 4, 5, 6, 8, 9, 10, 12, 13, 14], "There": [5, 9, 12, 14], "These": [0, 1, 4, 5, 6, 10, 14], "To": [4, 5, 6, 7, 9, 10, 11, 13], "With": [12, 13], "__init__": 6, "_check_ni": 3, "_deriv_fit_func": 6, "_fit": 1, "_fit_func": 6, "_get_x": 6, "_id": 9, "_is_skyfield_obj": 4, "_jacobian": 1, "_model": 1, "_rev_id": 9, "_rev_nam": 9, "_type_id": 9, "_type_nam": 9, "a00": 10, "a01": 10, "a02": 10, "a11": 10, "a12": 10, "a22": 10, "a_": 6, "aaa": 4, "abber": 4, "abbrevi": 5, "aberr": 4, "abl": 0, "about": [0, 6, 8, 13], "abov": [3, 5, 6, 8, 9, 10, 12, 13], "abs_tol": 10, "absolut": [1, 3, 10, 12, 13, 14], "absolute_sigma": 1, "absolutelli": 2, "absorpt": 7, "abstract": [1, 6], "abt": 8, "acccept": 13, "acceleromet": 14, "accept": [0, 1, 5, 13, 14], "accept_all_global_flag": 5, "access": [0, 3, 5, 6, 9, 13], "accomplish": [5, 13], "accord": [6, 10], "account": [1, 4], "acctual": 5, "ach": 14, "achiev": [5, 13], "acq": [0, 5], "acq_fil": [0, 13], "acq_ind": 5, "acq_list": 2, "acqtyp": 5, "acquisit": [0, 2, 3, 5, 13], "acquisiton": 13, "across": [1, 12], "act": 6, "action": [6, 7], "actual": [4, 9, 13, 14], "ad": [5, 8, 9, 12, 14], "adc": [0, 5], "adc_ch_ref": 10, "add": [0, 5, 6, 7, 8, 9, 10], "add_compon": 9, "add_doc": 9, "add_histori": 9, "add_measur": 6, "addit": [0, 5, 6, 10, 12, 13], "addition": [0, 13], "adjac": [2, 9], "adjust": [5, 13], "advanc": 14, "advic": 4, "affect": [0, 5, 12], "after": [2, 4, 5, 6, 10, 12, 13], "again": 5, "against": [6, 12], "aggres": 13, "aggress": 12, "agreement": [1, 2], "ahinck": 9, "al": [6, 10, 12], "algorithm": [10, 13], "alia": [0, 5, 6, 8, 14], "alias": 0, "all": [0, 1, 2, 3, 5, 6, 8, 9, 10, 12, 13, 14], "allow": [0, 1, 2, 3, 9, 14], "allow_nan": 6, "along": [0, 1, 9, 11, 13, 14], "alpha": [1, 6, 13], "alpha_init": 13, "alreadi": [6, 7, 9, 10, 12, 13], "also": [0, 1, 2, 4, 5, 8, 9, 10, 11, 13, 14], "alter": 9, "altern": [6, 7, 10], "alternate_nam": [6, 7], "although": [10, 14], "altitud": 4, "altogeth": 9, "alwai": [5, 7, 10, 14], "amount": [5, 13], "amp": [1, 13], "amp_to_delai": 13, "amplitud": [1, 13], "an": [0, 1, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "analog": 0, "analysi": [0, 10, 14], "andata": [2, 3, 5, 10, 11, 12, 13], "andata_from_acq1": 0, "andata_from_archive2": 0, "andataerror": 0, "anderson": 1, "angl": [1, 4, 7, 8], "angular": [4, 8], "ani": [0, 1, 2, 4, 5, 6, 8, 9, 12, 14], "anoth": [9, 13], "ant": [8, 9, 14], "ant0044b": 9, "ant0108b": 9, "ant0123b": 14, "ant0128b": 14, "ant_data": 8, "antenna": [9, 14], "antenna_to_lna": 14, "api": [4, 15], "appar": 4, "appart": 4, "appear": [0, 3, 9], "appli": [0, 1, 5, 9, 10, 12, 13, 14], "applic": [12, 13], "apply_gain": [0, 14], "apply_static_mask": 12, "apply_timing_correct": 13, "appropri": [0, 5, 9, 13], "approxim": [0, 10, 13, 14], "ar": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "arbitrari": [5, 6], "archiv": [0, 5, 8, 13], "archiveacq": 5, "archivefil": 5, "archiveinst": 5, "arcmin": 4, "arcsecond": 4, "arg": [0, 1, 4, 6, 8, 9, 14], "argument": [0, 1, 4, 5, 6, 11, 13], "around": [1, 2, 8, 13], "arr": [1, 14], "arrai": [0, 1, 2, 4, 6, 7, 10, 11, 12, 13], "array_lik": 4, "arrayantenna": 14, "arriv": 14, "arxiv": 12, "as_loaded_data": 5, "as_read": 5, "ascens": [1, 4, 5, 6, 7], "ascii": 6, "assign": [0, 5], "associ": [9, 14], "assum": [1, 2, 10, 13, 14], "astrometr": 4, "astronom": [2, 6], "atmega": 9, "atmega49704949575721220150": 9, "atmega50874956504915100100": 9, "atmel": [0, 14], "atmel_nam": 5, "attach": 14, "attempt": [6, 7, 9], "attenu": 9, "attr": 0, "attribut": [0, 1, 2, 3, 4, 5, 6, 7, 10, 12, 13], "author": 12, "auto": [0, 1, 3, 12], "auto_flag": 12, "auto_ii": 12, "auto_vi": 12, "autocorrel": [5, 10, 12], "automat": [13, 14], "auxiliari": 3, "avaiabl": 13, "avail": [2, 5, 6], "available_collect": 6, "averag": [10, 11, 12, 13], "avoid": [4, 8, 13], "awai": 1, "awar": 13, "ax": [0, 12, 14], "axi": [0, 1, 2, 4, 11, 12, 13, 14], "axis_nam": 0, "b": [4, 9, 10, 14], "baar": 6, "back": [9, 14], "background": 1, "backward": [9, 12, 14], "bad": [1, 2, 9, 12, 13, 14], "bad_sg": 9, "badsubgraph": 9, "band": [0, 12, 13, 14], "bandwidth": 0, "bao": [4, 5, 10], "base": [0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 13, 14], "base64": 6, "base_filenam": 11, "base_ipt": 2, "base_model": 8, "basedata": [0, 5, 13], "basedatainterv": 5, "basedatainverv": 5, "baselin": [0, 11, 12, 13, 14], "basemask": 12, "basenam": 4, "baseread": 0, "basi": [0, 6], "basic": [2, 6, 10], "basicconfig": 9, "basiccont": 0, "beam": [1, 4, 14], "beam_index": 14, "beam_index2numb": 14, "beam_model": 14, "beam_numb": 14, "becaus": [5, 9], "becom": [2, 5, 13], "been": [1, 4, 5, 6, 10, 12, 13], "befor": [0, 1, 2, 5, 10, 12, 13], "begin": [5, 8, 9, 13], "behavior": 6, "behaviour": 5, "belong": 14, "below": 10, "bene": 8, "best": [0, 1, 6, 13], "between": [0, 1, 2, 4, 5, 6, 7, 9, 13, 14], "beween": 2, "beyond": [12, 13], "big": [0, 10], "bin": [0, 1, 10, 11, 13, 14], "bin_centr": 1, "bin_count": 1, "bitmask": 8, "blanchard": 14, "blank": 14, "block": [5, 9, 12], "blockhous": 0, "bmx": 4, "bmxy_to_hadec": 4, "bmy": 4, "board": [0, 14], "bodi": [4, 5, 14], "bool": [0, 1, 4, 5, 6, 9, 10, 12, 13, 14], "boolean": [0, 1, 3, 6, 8, 9, 10, 12, 13, 14], "both": [0, 8, 10, 12, 13, 14], "bottom": 14, "boundari": 13, "box": 9, "brack": 0, "bracket": 0, "briefli": 9, "bright": 2, "brightest": 2, "broad": 14, "broadcast": 12, "brows": 5, "bsep1": 2, "bsep2": 2, "bsipt": 2, "bsln": 11, "bswp1": 2, "bswp2": 2, "bterm": 14, "bug": 7, "bulk": 9, "bulkhead": 9, "c": [1, 9, 10], "c_xdist": 2, "ca": [4, 5, 10], "cabl": [9, 14], "cach": 1, "cadenc": [0, 5], "cal": 0, "cal_fil": 1, "calcul": [0, 1, 4, 6, 7, 8, 10, 11, 12, 14], "calend": 5, "calibr": [0, 1, 5, 13, 14], "calibrate_temperatur": 14, "calibrationgaindata": 0, "calibrationgaindatainterv": 5, "calibrationgainread": 0, "call": [1, 2, 3, 5, 6, 9, 12, 13, 14], "callabl": [0, 13], "can": [0, 1, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14], "canad0b": 9, "canada": 4, "canbg6b": 9, "canbj6b": 9, "canbl0b": 9, "canbl1b": 9, "cannot": [10, 14], "canon": 10, "capit": 9, "caput": [0, 4, 7, 8, 14], "card": [0, 9], "care": [4, 8, 9], "cas_a": 4, "casa": [1, 2, 4, 5], "case": [0, 1, 2, 5, 6, 9, 10, 13, 14], "cassett": 9, "cassiopeia": 4, "catalog": [4, 6, 7], "catalog_nam": 4, "catalogu": 7, "caus": [6, 13], "caution": 0, "celesti": [4, 5, 14], "center": 12, "centr": [0, 1, 12, 13], "centroid": 1, "centroid_i": 1, "centroid_x": 1, "certain": 5, "ch1": 9, "ch7": 9, "ch_pipelin": 12, "chain": 14, "chan": [0, 5, 14], "chan_id": [13, 14], "chang": [0, 4, 5, 7, 13, 14], "change_chime_loc": 14, "change_pathfinder_loc": 14, "chanmonitor": 2, "channel": [0, 2, 3, 5, 10, 12, 14], "channels_to_select": 10, "charact": [6, 14], "character": 10, "characterist": 14, "chb036c7": 9, "chdir": 0, "chebyshev": 1, "chebyshevposit": [4, 14], "check": [6, 8, 10, 13, 14], "check_amp": 13, "check_circular": 6, "check_phi": 13, "chi": 1, "chime": [0, 1, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 16], "chime_arch": 10, "chime_calibr": 10, "chime_local_datetim": 4, "chime_weath": 0, "chimealtitud": 4, "chimeantenna": 14, "chimec": 13, "chimedb": 5, "chimedberror": 5, "chimelatitud": 4, "chimelongitud": 4, "chimen2": 13, "chimenet": 4, "chimepb": 13, "chimetim": 13, "chisq": [1, 6], "choos": 5, "chosen": [5, 14], "chronolog": 0, "cir": 4, "circular": 6, "cirs_radec": 4, "citat": 6, "cl": 0, "class": [0, 1, 2, 5, 6, 7, 8, 9, 10, 13, 14], "classmethod": [0, 1, 2, 5, 6, 8, 9, 13], "clock": 8, "closest": 9, "closest_of_typ": 9, "closestdraw": 9, "cmap": 14, "co": [1, 8], "coax": 9, "coaxial": 9, "code": [3, 6, 12], "coeff": 13, "coeff_alpha": 13, "coeff_tau": 13, "coeffici": [0, 1, 4, 10, 13], "collect": [0, 6, 14], "color": 6, "column": [0, 7, 10, 14], "combin": [0, 2, 5, 10, 13, 14], "comm": 0, "comm_world": 0, "command": 8, "comment": [5, 9, 10], "common": [4, 5, 13], "commun": 0, "comp": [0, 9, 14], "compact": 6, "compar": [6, 12], "compare_connexion": 9, "comparison": 5, "compat": [12, 14], "complement": 12, "complet": 13, "complex": [1, 5, 11, 13], "complianc": 3, "compliant": 6, "complic": 9, "compoenet": 9, "compon": [0, 1, 9, 14], "component_doc": 9, "component_histori": 9, "component_typ": 9, "component_type_rev": 9, "comput": [0, 1, 2, 10, 12, 13, 14], "compute_tim": 0, "cond": 5, "condit": 5, "confid": [1, 6], "confus": 4, "conjug": [0, 13, 14], "connect": [9, 13, 14], "connect_databas": [5, 9], "connector": 14, "connexion": 9, "consid": [1, 5, 8, 9, 10, 12, 13], "consider": 5, "consist": [0, 1, 4, 6, 8, 13, 14], "constant": [1, 4, 5, 8, 9, 12], "constitut": 5, "construct": [9, 12, 13, 14], "construct_delay_templ": 13, "constructor": [4, 6], "contain": [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 12, 13, 14], "content": [1, 6, 15], "contigu": [0, 2], "continuity_test": 2, "continuum": 2, "contrast": [0, 12], "contribut": 10, "control": [0, 3, 8, 12], "conveni": [0, 5, 9, 13], "convent": [4, 6], "convergencewarn": 1, "convers": [13, 14], "convert": [0, 1, 4, 5, 6, 10, 14], "convert_tim": 0, "convinc": 12, "convolut": 9, "coord": [1, 2, 7], "coordin": [1, 4, 14], "copi": [0, 2, 5, 13, 14], "cor_prod_ref": 10, "corr": [5, 10, 11, 13, 14], "corr_arr": 14, "corr_input": [2, 14], "corr_input_list": 14, "corr_ord": 14, "corracqinfo": 5, "corrdata": [0, 3, 5, 10, 12, 13], "corrdatainterv": 5, "correct": [1, 4, 6, 10, 13, 14], "corrected_timestream": 14, "corrected_vi": 10, "correctli": 0, "correl": [0, 1, 2, 3, 5, 9, 10, 11, 13], "correlator_input": 13, "correspond": [0, 1, 2, 4, 5, 7, 8, 10, 13, 14], "corret": 13, "corrfileinfo": 5, "corrinput": [3, 14], "corrread": 0, "corrupt": 1, "could": [5, 9, 14], "count": 1, "counter": 10, "coupl": 9, "covari": [1, 2, 6, 10, 13, 14], "cover": 13, "coverag": 12, "crate": [13, 14], "creat": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 13, 14], "create_c": 0, "create_dataset": 0, "create_flag": 0, "create_from_ant_log": 8, "create_from_dict": 8, "create_from_post_report": 8, "create_reverse_map": 0, "criteria": 9, "critic": 1, "cross": [1, 2, 3, 5, 13], "crosscorrel": 10, "csd": [4, 14], "csd_to_unix": 4, "css004c0": 9, "ctime": 10, "cube": 2, "cubic": 13, "current": [5, 6, 7, 8, 14], "curve_fit": 2, "curvedpowerlaw": 6, "custom": [6, 13], "customis": 9, "cut": [9, 12], "cutoff": [1, 12], "cxa0005a": 9, "cxa0018a": 9, "cxa0067b": 9, "cxa0139b": 9, "cxa0239c": 9, "cxs0015": 9, "cxs0016": 9, "cxs0017": 9, "cxs0042": 9, "cxs0058": 9, "cxs0090": 9, "cxs0266": 9, "cxs0279": 9, "cxs0281": 9, "cycl": 10, "cyg_a": 4, "cyga": [1, 2, 4], "cygnu": 4, "cyl": 14, "cylind": [2, 14], "cylindr": 14, "d": [5, 14], "dai": [2, 4, 5, 6, 8, 11], "damag": 9, "darl": 1, "dat": 11, "data": [0, 1, 2, 4, 6, 8, 10, 11, 12, 13, 14], "data_acq_filenames_full_path": 13, "data_class": 0, "data_flag_typ": 5, "data_group": 0, "data_index": 5, "data_path": 0, "databas": [2, 3, 5, 6, 8, 14], "dataflag": 5, "dataflagtyp": 5, "datafram": 0, "datainterv": 5, "dataintervallist": 5, "dataset": [0, 1, 3, 5, 10, 12, 13], "dataset_id": [0, 1], "dataset_name_allow": 0, "datastream": [0, 14], "date": [2, 4, 6, 7, 8], "datestr": 4, "datetim": [0, 4, 5, 6, 9, 14], "datetime_to_timestr": 4, "datetime_to_unix": 4, "dayenu": 12, "db": [5, 9], "dct": 6, "debug": [4, 9], "dec": [1, 2, 4, 6, 7, 8], "dec0": 1, "dec_cir": 4, "dec_in": 4, "decim": [4, 10], "declin": [1, 2, 4, 6, 7], "decod": 6, "decompos": 14, "decomposit": [13, 14], "decorrel": 14, "deep": 2, "default": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "defin": [1, 4, 5, 6, 8, 9], "definit": 10, "deflect": 4, "deg": 4, "degre": [1, 4, 5, 6, 7, 8, 14], "del_reverse_map": 0, "delai": [9, 12, 13, 14], "delet": [0, 6, 8, 13], "delete_coeff": 13, "delta_t": 1, "densiti": 6, "depend": [1, 2, 10, 12, 13], "deprec": [0, 4, 14], "deriv": [1, 5, 6, 13], "descend": 6, "describ": [1, 3, 8, 9, 14], "descript": [0, 1, 4, 5, 12], "design": 9, "desir": [0, 5], "detail": [2, 3, 9], "determin": [0, 1, 2, 3, 4, 10, 13, 14], "deviat": [1, 3, 12, 13], "df": 0, "di": 5, "diagon": [10, 14], "dict": [0, 1, 6, 8, 10], "dictionari": [0, 1, 4, 5, 6, 8, 9, 10], "did": [9, 14], "differ": [1, 2, 4, 7, 9, 10, 11, 14], "digit": [0, 3, 5, 6], "digitalgain": 0, "digitalgaindata": 0, "digitalgaindatainterv": 5, "digitalgainread": 0, "dimens": [1, 10], "dimension": [1, 11], "dimmens": 10, "direct": [1, 4, 9, 12], "directli": [10, 12, 13], "directori": [4, 6], "dirti": 1, "dirty_beam": 1, "dirty_gauss": 1, "disagr": 2, "discard": [10, 13], "dish": 14, "disk": 0, "dist": 2, "distanc": [4, 8, 9, 14], "distinct": 12, "distinguish": 13, "distribut": [0, 13], "divid": [1, 10, 12], "do": [0, 1, 2, 5, 8, 9, 10, 13, 14], "doc": [0, 4, 10], "doc_0103": 10, "doclib": [3, 10], "document": [0, 2, 3, 4, 9], "doe": [0, 1, 2, 4, 8, 9, 12, 13], "doesn": [2, 9], "doesnotexist": [8, 9], "don": [5, 9], "done": [0, 9, 12, 14], "doppler": [4, 7], "dot": [0, 6], "down": 0, "drao": [4, 8], "drawn": 1, "driven": 9, "drop": 0, "dry": 8, "dry_run": 8, "dryrun": 8, "dt": 4, "dtype": [0, 6, 13, 14], "due": [0, 7, 13], "dump": 6, "dup_obs_list": 8, "duplic": 8, "durat": 8, "duration_lst": 8, "dure": [6, 8, 10, 13, 14], "duti": 10, "e": [0, 1, 2, 4, 5, 6, 9, 10, 12, 13, 14], "e_cylind": 9, "each": [0, 1, 5, 6, 8, 9, 10, 12, 13, 14], "earth": [2, 4, 7], "easi": [0, 13, 14], "easier": 10, "easili": [5, 9], "east": 14, "eastward": 14, "edg": [0, 1], "effect": [1, 4, 7, 8, 12], "effici": [0, 9], "eflux": 6, "eg": 8, "eigen_decomposit": 13, "eigenvalu": [10, 13, 14], "eigenvector": 13, "eigh_no_diagon": [10, 14], "eigval": 14, "either": [1, 4, 7, 9, 11, 14], "el": 1, "elaps": 1, "element": [0, 5, 6, 8, 10, 12, 14], "elementwis": 1, "elev": 14, "elimin": 6, "els": [9, 12, 13], "empti": 5, "en": 5, "enabl": [1, 10, 13], "encod": [6, 9], "encount": 9, "end": [0, 1, 4, 5, 8, 9, 12, 14], "end_ra": 5, "end_tim": [4, 5, 9], "ensur": [6, 14], "ensure_ascii": 6, "ensure_list": 14, "ensure_unix": 4, "enter": [5, 9], "enter_ltf": 9, "entri": [0, 1, 4, 5, 6, 7, 8, 14], "envelop": 1, "ephem": [4, 5], "ephemeri": [2, 5, 7], "epoch": [4, 6], "epsilon": 12, "equal": [1, 6, 11, 13], "equat": 3, "equinox": 4, "equival": [0, 4, 5, 12, 13, 14], "err": [1, 10], "error": [1, 6, 9, 10, 13], "escap": 6, "establish": 9, "estim": [1, 6, 8, 10, 12, 13], "estimate_directional_scal": 1, "et": [6, 12], "eta": 12, "etc": [2, 5, 6, 9, 14], "ev": [8, 10], "eval": [10, 14], "evalu": [1, 10, 13], "evec": 14, "even": [1, 13], "event": [5, 6, 7, 9], "event_aft": 9, "event_al": 9, "event_at": 9, "event_befor": 9, "event_typ": 9, "everi": [1, 8, 10, 12], "ewal": 12, "exact": 8, "exactli": [4, 5], "examin": 9, "exampl": [0, 3, 5, 7, 8, 9, 10, 11, 13, 14], "except": [0, 4, 5, 9], "excess": 3, "exclud": [5, 9, 13], "exclude_data_flag_typ": 5, "exclude_daytim": 5, "exclude_global_flag": 5, "exclude_nighttim": 5, "exclude_ra_interv": 5, "exclude_sun": 5, "exclude_time_interv": 5, "exclude_transit": 5, "excurs": 12, "exist": [0, 2, 6, 7, 8, 9, 13, 14], "exp_arr": 14, "expand": [5, 14], "expect": [1, 3, 4, 9], "explicitli": [9, 14], "explor": 9, "expon": 0, "express": [0, 1, 5, 13], "extens": [6, 8], "external_repo": 9, "extra": 5, "extra_cond": 5, "extrac": 0, "extract": [2, 8, 9, 12], "extrap_limit": 13, "extrapol": 13, "f": [5, 10], "fake": 14, "fake_tone_databas": 14, "fall": 5, "fals": [0, 1, 3, 4, 6, 8, 9, 10, 12, 13, 14], "fanci": 0, "fashion": 0, "fast": 10, "fast_pack_product_arrai": 14, "faster": [12, 14], "fastest": 0, "favour": 4, "fbin_ref": 10, "featur": [7, 12], "feed": [1, 2, 14], "feedloc": 2, "fetch": 14, "few": [8, 14], "fft": 2, "fi": 14, "field": [1, 6, 7, 13], "figur": 8, "fil_window": 12, "file": [0, 1, 3, 4, 5, 6, 7, 8, 9, 11, 13, 14], "file_condit": 5, "file_format": 0, "file_nam": 5, "filenam": [0, 5, 6, 8, 11], "files_in_rang": 5, "fill_valu": 12, "filt_event_type_id": 5, "filter": [3, 5, 11, 12], "filter_acq": 5, "filter_acqs_by_fil": 5, "final": [5, 12], "find": [0, 1, 4, 5, 9, 10, 12, 13, 14], "finder": [2, 13], "fine": 0, "finish": 8, "finish_tim": 8, "first": [0, 2, 4, 5, 6, 9, 10, 13, 14], "fit": [1, 2, 3, 6, 13], "fit_histogram": 1, "fit_model": 6, "fit_point_source_map": 1, "fit_poly_to_phas": 13, "fit_tol": 3, "fitampphas": 1, "fitgaussamppolyphas": 1, "fitpoli": 1, "fitpolylogamppolyphas": 1, "fitpolyrealpolyimag": 1, "fitrealimag": 1, "fitspectrum": 6, "fittransit": 1, "fix": [1, 9, 14], "fixedbodi": 4, "fj": 14, "fla": [0, 5, 9, 14], "fla0073b": 9, "fla0159b": 9, "fla0196b": 9, "fla0269b": 9, "fla0280b": 9, "flag": [0, 1, 3, 5, 6, 8, 9, 12, 13, 14], "flag1d": 12, "flag_dataset": 12, "flag_outli": 1, "flag_typ": 5, "flage": 13, "flaginput": 0, "flaginputdata": 0, "flaginputdatainterv": 5, "flaginputread": 0, "flatten": [1, 12], "float": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 13, 14], "fluctuat": 13, "flux": 6, "flux_body_dict": 6, "flux_uncertainti": 6, "fluxcat": 7, "fluxcatalog": [6, 7], "fmap": 14, "fname": 0, "foat": 1, "fold": 10, "follow": [1, 7, 9, 10, 12, 13, 14], "foo": 0, "forc": [3, 9], "foreign": 8, "form": [6, 8, 10, 14], "format": [0, 4, 5, 6, 8, 10, 14], "format_source_nam": 6, "formatted_nam": 6, "found": [0, 6, 7, 9, 13, 14], "four": [1, 2, 5], "fourier": 1, "fpga": [10, 14], "frac_lost": 13, "fraction": [1, 3, 6, 8, 13], "frame": [0, 10, 13], "frb": [4, 14], "free": 8, "freedom": 1, "frenquenc": 2, "freq": [0, 1, 2, 6, 12, 13, 14], "freq_ab": 7, "freq_centr": 12, "freq_flat": 12, "freq_high": 0, "freq_low": 0, "freq_nomin": 6, "freq_ob": 7, "freq_pivot": 6, "freq_rest": 7, "freq_sel": [0, 2, 5, 11], "freq_step": 0, "freq_width": 12, "freqeunci": 12, "frequenc": [0, 1, 2, 3, 6, 7, 10, 11, 12, 13, 14], "frequency_mask": 12, "fringestop_phas": 14, "fringestop_tim": 14, "fringestopped_timestream": 14, "from": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "from_acq": 11, "from_acq_h5": [0, 5, 13], "from_acq_h5_fast": 0, "from_db": 9, "from_dict": [6, 13], "from_graph": 9, "from_lst": 8, "from_predef": 9, "fromat": 10, "fromdat": 2, "fromdata": 2, "ft_prm": 2, "full": [0, 1, 5, 9, 11, 13, 14], "full_dai": 11, "full_day_arg": 11, "func_2d_gauss": 1, "func_2d_sinc_gauss": 1, "func_dirty_gauss": 1, "func_real_dirty_gauss": 1, "function": [0, 1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14], "further": 2, "futur": [2, 13], "fwhm": 1, "fwhm_x": 1, "fwhm_y": 1, "fwidth": 12, "g": [0, 1, 5, 6, 9, 10, 14], "g0": 10, "g_ut": 10, "gain": [0, 1, 3, 5, 10, 13, 14], "gain_coeff": 0, "gain_exp": 0, "gain_tol": 3, "gaindata": 0, "gainflagdata": 0, "gains2utvec": 10, "gains2utvec_tf": 10, "galt": [4, 8, 14], "galt_pointing_model_dec": 4, "galt_pointing_model_ha": 4, "gamma": 10, "gate": 10, "gated_vis1": 10, "gaussian": [1, 2], "gbo": [12, 14], "gboantenna": 14, "gen_prod_sel": 10, "gener": [0, 3, 5, 8, 9, 10, 12, 13, 14, 16], "geometr": 14, "get": [0, 5, 6, 7, 9, 12, 14], "get_alpha": 13, "get_als_gain": 10, "get_autocorrel": 12, "get_c_ydist": 2, "get_c_ydist_perfreq": 2, "get_correlator_input": 14, "get_default_frequency_map_stream": 14, "get_doppler_shifted_freq": [4, 7], "get_epoch": 6, "get_feed_polaris": 14, "get_feed_posit": 14, "get_gain": 13, "get_global_flag_tim": 9, "get_holographic_index": 14, "get_ni_gain": 10, "get_noise_channel": 14, "get_noise_source_index": 14, "get_properti": 9, "get_range_r": 4, "get_reference_times_dataset_id": 1, "get_reference_times_fil": 1, "get_result": [2, 5], "get_results_acq": 5, "get_source_dictionari": 4, "get_source_index": 0, "get_stacked_alpha": 13, "get_stacked_tau": 13, "get_sunfree_src": 2, "get_tau": 13, "get_timing_correct": 13, "get_xdist": 2, "get_zpo": 14, "gf_accept": 5, "gf_rais": 5, "gf_reject": 5, "gf_warn": 5, "github": 4, "give": [0, 1, 4, 5, 14], "given": [0, 1, 3, 4, 5, 6, 9, 13, 14], "glob": [0, 8], "global": [5, 6, 9, 14], "global_flag": 9, "global_flag_mod": 5, "global_flags_between": 9, "go": [9, 14], "gong": 5, "good": [1, 2, 3, 8, 13, 14], "good_channel": 3, "good_fit": 3, "good_freq": 2, "good_gain": 3, "good_ipt": 2, "good_nois": 3, "good_prod": 2, "good_prod_freq": 2, "gpu": 10, "gpu_intergration_period": 10, "grain": 0, "graph": [9, 14], "graph_obj": 9, "gravit": 4, "greater": [1, 12, 13], "grid": 0, "group": 0, "group_name_allow": 0, "guarante": 6, "guess": [1, 2], "guess_fwhm": 1, "h5": [0, 10], "h5_data": [0, 13], "h5py": 0, "ha": [1, 4, 5, 8, 9, 10, 12, 13, 14], "ha_cir": 4, "ha_in": 4, "had": [1, 14], "hadec_to_bmxi": 4, "half": [1, 10, 12], "hand": 14, "happen": 0, "hard": 4, "has_amplitud": 13, "has_coeff_alpha": 13, "has_coeff_tau": 13, "has_num_freq": 13, "hasn": 9, "have": [0, 1, 2, 5, 6, 9, 10, 12, 13, 14], "hco": 12, "hcoantenna": 14, "hcro": 14, "hdf5": 0, "header": [0, 10], "help": 9, "helper": 5, "here": [2, 5, 9, 14], "hermit": 1, "hermitian": 10, "hfb": [5, 7], "hfbcat": 4, "hfbcatalog": 7, "hi": 14, "hidden": 9, "hide": 9, "hierarchi": 0, "high": [0, 3, 12, 13], "higher": 3, "highpass_delay_filt": 12, "histogram": 1, "histor": 4, "histori": [8, 9], "hk": [0, 5, 9, 14], "hk_chan": 14, "hk_data": 5, "hk_to_sensor": 14, "hkdata": 0, "hkdatainterv": 5, "hkfileinfo": 5, "hkinput": 14, "hkp": 0, "hkpdata": 0, "hkpreader": 0, "hkr00": 9, "hkreader": 0, "hl": 8, "hold": [0, 6, 13], "holograph": [8, 14], "holographi": 14, "holographyantenna": 14, "holographyobserv": 8, "holographyobservationdoesnotexist": 8, "holographysourc": 8, "holographysourcedoesnotexist": 8, "host": 5, "hour": [1, 4, 8, 13], "housekeep": [0, 5], "how": [0, 1, 3, 5, 9, 13], "howev": [5, 9, 13, 14], "hpa0002a": 9, "html": 5, "http": [4, 5, 10], "hut": 13, "hydra": 9, "i": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "icmap": 14, "icr": [4, 8], "id": [0, 1, 5, 9, 14], "ideal": 6, "identifi": 13, "ie": 5, "ignor": [4, 5, 9, 11, 13, 14], "ignore_amp": 13, "ignore_draw": 9, "illustr": 9, "imag": [1, 11], "imaginari": 1, "immedi": 5, "implement": [5, 9, 10, 12], "import": [0, 5, 7, 8, 9, 10, 14], "includ": [0, 1, 2, 4, 5, 6, 8, 9, 11, 13, 14], "include_": 5, "include_26m_ob": 5, "include_global_flag": 5, "include_ra_interv": 5, "include_time_interv": 5, "include_transit": 5, "inclus": [5, 14], "incom": 6, "inconsist": 9, "increas": [1, 13, 14], "increasingli": 13, "indent": 6, "independ": [1, 2], "index": [0, 1, 3, 10, 11, 12, 13, 14, 15], "index_map": [0, 13], "indic": [0, 1, 6, 10, 12, 13, 14], "individu": [5, 6], "indmax": 1, "indmin": 1, "inf": 12, "infin": 6, "infinit": 6, "info": [3, 6, 9], "inform": [5, 6, 10, 12, 13, 14], "inherit": [0, 9, 13], "inid": 13, "initi": [1, 2, 5, 8], "inject": [3, 10, 14], "inner": 9, "inp": 14, "inplac": 14, "input": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 12, 13], "input_a": 14, "input_b": 14, "input_dict": 14, "input_flag": [0, 13], "input_map": [2, 14], "input_nam": 6, "input_sel": 0, "input_sn": 14, "insert": [6, 9], "instal": 0, "instanc": [0, 1, 2, 5, 6, 7, 8, 9, 13, 14], "instanti": [0, 1, 6, 7, 13], "instead": [0, 1, 2, 4, 5, 6, 9, 10, 12, 13], "instruct": 9, "instrument": [12, 13, 14], "int": [0, 1, 3, 5, 6, 7, 8, 10, 12, 13, 14], "intact": 14, "integ": [0, 3, 5, 6, 9, 10, 12, 13, 14], "integr": [5, 9, 10, 13], "intend": [0, 5], "interact": 5, "interest": 9, "interfac": [2, 5, 9], "interfer": 12, "interferometr": 14, "intermedi": [4, 14], "intern": [0, 4, 6, 9, 14], "interp": 13, "interp1d": [1, 13], "interp_gain": 1, "interp_start": 1, "interp_stop": 1, "interp_weight": 1, "interpol": [1, 13], "interpolate_gain": 1, "interpolate_gain_quiet": 1, "interpret": [0, 5, 14], "interv": [5, 6, 9, 12], "interval_list": 5, "intput": 0, "intric": 5, "invalid": 1, "invari": 12, "invers": [0, 1, 12, 13, 14], "invert_no_zero": 14, "investig": 9, "involv": 9, "io": 4, "is_arrai": 14, "is_array_i": 14, "is_array_on": 14, "is_array_x": 14, "is_chim": 14, "is_chime_on": 14, "is_holograph": 14, "is_noise_sourc": 14, "is_pathfind": 14, "is_regex": 0, "isarr": 14, "ischim": 14, "isholo": 14, "isnois": 14, "ispathfind": 14, "issu": 4, "item": [0, 1, 6], "item_separ": 6, "iter": [5, 6, 9, 10, 12, 13, 14], "iter_loaded_data": 5, "iter_read": 5, "iterative_hpf_mask": 12, "iteritem": 6, "its": [0, 4, 5, 9, 10, 13], "itself": 0, "ix": 14, "ixholo": 14, "ixn": 14, "j": 14, "j2000": 4, "jan": [0, 4], "janski": 6, "javascript": 6, "jitter": 13, "john": 8, "join": [5, 13], "jpllib": [4, 14], "jrs65": [10, 14], "json": [4, 6, 7], "json_numpy_obj_hook": 6, "jsonencod": 6, "judici": 9, "julian": 4, "jun18": 8, "just": [3, 5, 9, 10], "k": 10, "k7bp16": [9, 14], "kei": [5, 6, 8, 10], "kelvin": 14, "kept": 10, "key_separ": 6, "keyword": [1, 6, 10, 13, 14], "khatri": 10, "kind": [4, 13], "kko": [12, 14], "kkoantenna": 14, "knee": 4, "knl": 2, "known": [8, 12, 13], "kr": 10, "kroneck": 10, "ktrprod": 10, "kwarg": [0, 1, 4, 5, 6, 8, 9, 11, 13, 14], "la": 10, "label": [0, 6, 13], "lack": 3, "laid": 0, "languag": 9, "larg": [12, 13], "larger": 1, "largest": [12, 13], "last": [9, 10, 12, 13], "later": [4, 14], "latest": 5, "latitud": 4, "latter": [4, 14], "lay_tim": 14, "layout": [0, 2, 5, 14], "layoutintegr": 9, "lead": 6, "least": [1, 10, 13], "leav": [0, 13, 14], "left": 14, "legend": 6, "len": [5, 6], "length": [0, 1, 5, 6, 8, 11, 13, 14], "length_scal": 1, "lens": 4, "less": [1, 13], "let": [9, 14], "letter": [5, 6, 9], "level": [0, 1, 6, 9, 13, 14], "lewi": 4, "librari": [4, 10], "life": 4, "light": 4, "like": [0, 1, 2, 9, 10, 12, 13, 14], "limit": 12, "limit_rang": 12, "line": [8, 9, 13], "linear": [1, 13], "linearli": 13, "link": [9, 14], "list": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14], "littl": [4, 7], "ln": 6, "lna": [5, 14], "lna0249b": 9, "lna0296b": 9, "lna_to_antenna": 14, "lo": 14, "load": [0, 2, 5, 6, 9, 13], "load_timing_correct": 13, "loaded_collect": 6, "loaded_data_iter": 5, "loaded_data_list": 5, "local": [1, 4, 6, 12], "locat": [1, 2, 5, 14], "log": [1, 6, 8, 9], "logger": 1, "logic": 12, "loginnam": 8, "long": 9, "longer": 4, "longest": 13, "longitud": 4, "look": [0, 3, 5, 9, 10], "lookup": 4, "loop": [5, 12], "lost": 13, "lot": 9, "low": 14, "lower": [0, 1, 5], "lowest": 14, "lsa": 4, "lst": [4, 8], "ltf": 9, "lunar": 4, "lunar_ris": 4, "lunar_set": 4, "lunar_transit": 4, "m": [4, 5, 9], "machin": 5, "mad": [1, 12], "mad_cut_1d": 12, "mad_cut_2d": 12, "mad_cut_rol": 12, "mag": 11, "magic": 6, "magnitud": [10, 13], "mai": [0, 5, 9, 10, 14], "main": [0, 10], "main_id": 6, "make": [0, 2, 4, 5, 7, 9, 10, 11, 14], "make_connexion": 9, "manag": 9, "mani": [4, 5, 7, 9], "map": [0, 1, 10, 13], "map_input_to_noise_sourc": 13, "mark": [9, 12], "mask": [12, 13], "mask_rfi": 13, "masked_channel": 10, "mat2utvec": 10, "match": [0, 5, 9, 13, 14], "matern": 1, "matric": [10, 14], "matrix": [1, 2, 10, 13], "max": 0, "max_freq": 13, "max_iter_weight": 13, "maximum": [1, 8, 10, 12, 13], "maxstep": 10, "mb": 5, "mean": [0, 4, 14], "meant": [13, 14], "measur": [1, 6, 7, 13], "med_filt": 11, "med_filt_arg": 11, "median": [1, 11, 12, 13], "member": 6, "memdiskgroup": 0, "memeb": 9, "memgroup": [0, 1], "memh5": [0, 1], "memori": [0, 5], "meridian": 4, "messag": [1, 9], "meta": [0, 2], "metaclass": 6, "metafluxcatalog": 6, "meter": 14, "meth": 5, "method": [0, 1, 2, 5, 6, 8, 9, 10, 12, 13, 14], "metr": 4, "metric": 0, "metric_nam": 0, "mhz": [0, 1, 2, 6, 7, 12, 13, 14], "micro": 12, "microsecond": 12, "might": 2, "min": 0, "min_frac_kept": 13, "min_freq": 13, "min_interv": 5, "mingun": 5, "mingun_weath": 0, "minimis": [4, 14], "minimum": [1, 5, 13], "misbehav": 3, "miss": [1, 8, 13], "mistakenli": 9, "mjd": 4, "mnt": 5, "mnt_root": 5, "mode": [5, 13], "model": [1, 4, 6, 8, 13], "model_kwarg": 6, "model_lookup": 6, "model_poly_phas": 13, "modifi": [4, 5, 13], "modul": [1, 4, 5, 6, 8, 9, 10, 12, 13, 15], "monitor": [2, 14], "monoton": [1, 13], "month": 5, "more": [0, 1, 5, 6, 9, 10, 12, 13, 14], "most": [0, 5, 6, 9], "mostli": 12, "motherboard": 14, "motion": 7, "mount": 5, "move": [4, 9], "mpi": [0, 14], "mu": [1, 2], "much": [0, 12], "multi": 0, "multipl": [0, 1, 4, 7, 13], "multiplex": [0, 14], "multipli": [1, 13], "must": [1, 4, 6, 8, 9, 10, 11, 12, 13, 14], "mux": [0, 5, 14], "mysql": 9, "n": [0, 1, 2, 5, 6, 9, 10, 14], "n_channel": 10, "n_feed": 12, "n_prod": 12, "nadc_channel": 10, "naiv": 4, "name": [0, 1, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14], "nan": [1, 6, 14], "narrow": 5, "narrowband": 12, "nchan": 0, "nchannel": 10, "ncompon": 1, "ndarrai": [0, 1, 6, 12, 13, 14], "ndec": 1, "ndev": 12, "ndof": [1, 6], "ndofa": 1, "ndofi": 1, "ndofp": 1, "ndofr": 1, "nearbi": 12, "nearest": 13, "necessari": [5, 13], "necessaritli": 10, "need": [0, 1, 4, 9, 13, 14], "neg": 6, "neglig": 4, "neighbor": 9, "neighbour": 9, "neighbour_of_typ": 9, "nel": 1, "networkx": 9, "networkxerror": 9, "nevalu": 10, "never": [5, 9], "new": [0, 4, 5, 9, 10, 11, 12, 13, 14], "new_bodi": 4, "newdata": 10, "newer": 3, "newli": 14, "newlin": 6, "next": [13, 14], "nfeed": 14, "nfreq": [0, 1, 5, 10, 12, 13, 14], "nha": 1, "ni": 10, "ni_al": 10, "ni_data": 10, "ni_gain": 10, "ni_gains_evalu": 10, "ni_gains_evalues_tf": 10, "ni_on_bin": 10, "ni_param": 10, "ni_period": 10, "ni_util": 3, "nidata": 10, "niedermay": 5, "night": [2, 5], "night_acq_list": 2, "night_find": 2, "ninput": [0, 1, 12, 13, 14], "niter": [10, 12, 14], "nlist": 9, "nmax": 2, "nmea": 6, "nmux": 0, "no_weight": 1, "node": [5, 9], "node_list": 5, "node_nam": 5, "node_properti": 9, "node_spoof": 5, "nois": [3, 10, 12, 13, 14], "noise_sourc": 13, "noise_sync": 3, "noise_tol": 3, "noisesourc": 14, "nomin": 14, "non": [0, 1, 6, 13, 14], "none": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "nonlinear": 13, "nonzero": 13, "nopermiss": 9, "nor": 14, "norm": [10, 14], "normal": [1, 2, 5, 6, 10, 11, 12, 13, 14], "normalis": 14, "normalise_correl": [10, 14], "normalize_vi": 10, "north": [4, 14], "northward": 14, "nosubgraph": 9, "not_outli": 1, "nota": 8, "note": [0, 4, 5, 7, 8, 9, 12, 13, 14], "notfound": [9, 14], "noth": 9, "notic": 4, "novel": 14, "now": [0, 5, 9, 10, 12, 14], "np": [0, 1, 6, 7, 10, 11, 12, 13, 14], "nparam": [1, 6, 13], "nperit": 12, "nprod": [0, 5, 12, 13, 14], "nra": [1, 14], "nref": 13, "nsampl": [1, 13], "nsched": 8, "nsigma": 1, "nsigma_amp": 13, "nsigma_phi": 13, "nsourc": [0, 13], "nstack": [13, 14], "ntime": [0, 12, 13], "ntimefram": 10, "nu": [1, 6, 13], "null": 14, "num": 14, "num_freq": 13, "number": [0, 1, 4, 5, 6, 9, 10, 12, 13, 14], "number_devi": 12, "numpi": [0, 1, 2, 5, 6, 11, 12, 14], "numpyencod": 6, "nutat": 4, "o": 0, "ob": [4, 7, 8, 14], "obj": [0, 6, 12, 14], "object": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14], "object_coord": 4, "obs_list": 8, "observ": [1, 4, 5, 7, 8, 10, 12, 14], "observatori": 4, "obtain": [1, 2, 4, 6, 7, 8, 10, 11, 12, 13], "occur": [0, 6, 13], "odd": 1, "off": [9, 10], "offic": 8, "offlin": 5, "offset": [1, 4, 5, 13, 14], "ok": 9, "old": [2, 5, 10, 11, 14], "older": [3, 13], "one": [0, 1, 2, 4, 5, 7, 8, 9, 10, 11, 13, 14], "ones": [4, 11], "onewai": 9, "onli": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "only_": 5, "only_auto": 12, "only_chime_weath": 5, "only_corr": 5, "only_correct": 13, "only_digitalgain": 5, "only_flaginput": 5, "only_freq": 12, "only_gain": 5, "only_hfb": 5, "only_hk": 5, "only_hkp": 5, "only_off": 10, "only_rawadc": 5, "only_tim": 12, "only_weath": 5, "onsource_dist": 8, "onsource_dist_to_flag": 8, "onto": 0, "open": 4, "oper": [8, 12, 14], "optim": [2, 12], "option": [0, 3, 4, 5, 8, 10, 12, 13, 14], "order": [0, 1, 3, 6, 9, 10, 14], "order_asc": 9, "order_desc": 9, "order_frequency_map_stream": 14, "ordin": 8, "org": 5, "orient": 14, "origin": [4, 6, 10, 13, 14], "orm": 9, "other": [3, 9, 10, 12, 13, 14], "otherwis": [1, 3, 4, 6, 12, 13, 14], "our": [4, 9], "out": [0, 3, 8, 9, 12, 14], "out_fil": 11, "out_group": 0, "outer": [9, 10, 13], "outlier": [1, 12, 13], "output": [1, 3, 6, 8, 10, 12, 13, 14], "output_param": 8, "outrigg": 14, "outsid": 0, "outvector": 10, "over": [0, 1, 4, 5, 9, 11, 12, 13], "overal": 2, "overid": 14, "overlap": [4, 5, 9], "overload": 0, "overrid": 13, "overwrit": [6, 7, 8], "overwritten": 7, "p": 9, "p1_idx": 2, "p2_idx": 2, "pacif": 4, "pack": [13, 14], "pack_product_arrai": 14, "packag": 14, "packet": 0, "page": [9, 15], "pair": [0, 5, 9, 14], "panda": 0, "par": 1, "param": [1, 6, 13], "param_corr": 1, "param_cov": [1, 6], "param_nam": 1, "paramet": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "parameter": 1, "parameter_nam": 1, "parametr": 10, "params_ft": 2, "pars": [0, 8, 14], "parse_ant_log": 8, "parse_chime_seri": 14, "parse_d": 4, "parse_old_seri": 14, "parse_pathfinder_seri": 14, "parse_post_report": 8, "part": [0, 5, 11, 14], "part_sel": 11, "particular": [0, 6, 13], "particularli": 5, "pass": [0, 1, 3, 5, 8, 9, 12, 13, 14], "pass0": 10, "pass1g": 10, "pass_rst": 2, "past": 1, "path": [5, 6, 8, 9, 13], "pathfind": [4, 5, 14], "pathfinderantenna": 14, "pattern": [0, 1, 13], "pcoantenna": 14, "pcov": [2, 13], "pdf": [3, 10], "pdt": [4, 8], "peak": [1, 4, 7], "peak_amplitud": 1, "peak_ra": 4, "peewe": [5, 8, 9], "peform": 10, "per": [1, 2, 9, 10, 14], "perform": [0, 1, 3, 5, 9, 10, 13, 14], "period": [1, 5, 8, 10, 13], "permiss": 9, "permit": 0, "ph1": 2, "ph2": 2, "pha": [5, 10], "phase": [1, 11, 13, 14], "phi": 13, "php": 5, "physic": 0, "pi": [13, 14], "pick": [0, 5, 13], "pickl": 6, "pinv": 12, "pipelin": [0, 12, 13], "pivot": 6, "pixel": 1, "pl0108b1": 9, "pl0108b2": 9, "place": [6, 12, 14], "plan": 13, "plot": [3, 6], "plot_fit_": 3, "plug": 9, "png": 11, "po": 14, "point": [1, 2, 4, 5, 7, 8, 10, 12, 13], "pol": [1, 14], "pol1_ori": 9, "pol2_ori": 9, "polar": [1, 2, 4], "polaris": [0, 14], "poly_deg": 1, "poly_deg_amp": 1, "poly_deg_phi": 1, "poly_typ": 1, "polynomi": [1, 6, 13], "poor": [0, 8], "popt": [2, 13], "popul": [1, 6], "pord": 11, "posit": [2, 4, 7, 12, 14], "positionlib": 4, "posix": [0, 5], "possibl": [5, 9, 14], "post": 0, "post_report": 8, "post_report_fil": 8, "postiv": 14, "potenti": 6, "power": [1, 11, 12, 14], "powerlaw": 6, "practic": 4, "pre": 1, "preamp": 9, "preced": 0, "precess": 4, "precis": [5, 14], "predef": 9, "predef_subgraph_spec": 9, "predef_subgraph_spec_param": 9, "predefin": 9, "predict": [1, 6], "predict_flux": 6, "predict_uncertainti": 6, "prefer": [0, 9], "prematur": 9, "prescend": 13, "presenc": 12, "present": [0, 5, 6, 13], "pretti": 6, "prevent": 6, "previou": [1, 6, 10, 13], "previous": [6, 11, 12], "primari": 1, "print": [0, 3, 5, 6, 9, 13, 14], "print_acq_info": 5, "print_available_collect": 6, "print_loaded_collect": 6, "print_measur": 6, "print_results_summari": 5, "prior": [12, 13, 14], "privat": 9, "probabl": [1, 4, 9], "probe": 13, "problem": 13, "proce": 5, "procedur": 12, "process": [1, 10, 12], "process_gated_data": 10, "process_synced_data": [3, 10], "prod": [0, 2, 13, 14], "prod_arr": 14, "prod_map": 14, "prod_sel": [0, 2, 5, 10, 11], "prodstack": 0, "produc": 0, "product": [0, 2, 5, 10, 11, 12, 13], "profer": 0, "program": 8, "project": 14, "prometheu": 5, "propag": 13, "proper": [1, 6], "properti": [0, 1, 5, 6, 9, 10, 13, 14], "property_compon": 9, "property_typ": 9, "propertytyp": 9, "propertyunchang": 9, "props": 9, "prototyp": 14, "provic": 6, "provid": [0, 1, 2, 5, 6, 9, 10, 11, 13], "pst": [4, 8], "pstn": 2, "pstns0": 2, "pte": 1, "pull": [9, 14], "pure": 13, "purpos": [4, 12], "pwd": [2, 14], "python": [4, 5, 6], "quadrat": 13, "quadratur": 1, "qualiti": [2, 5, 8], "quality_flag": 8, "quality_good": 8, "quality_offsourc": 8, "quantil": [1, 13], "quantiti": [1, 13], "queri": [2, 5, 9, 14], "quickli": 9, "quit": 14, "r": [5, 9, 10], "ra": [1, 4, 5, 6, 7, 8], "ra0": 1, "radian": [1, 4, 13], "radio": [2, 4, 6, 12], "radiomet": 3, "rain": 5, "rais": [0, 5, 6, 9, 14], "random": 1, "rang": [0, 1, 2, 4, 5, 10, 12, 14], "range_r": 4, "rank": [10, 12, 14], "rankn_approx": 14, "rao": 10, "rate": [1, 4], "rather": [0, 4, 9], "raw": [0, 1, 5, 9, 10, 14], "rawadcdata": 0, "rawadcread": 0, "re": [4, 5], "reach": 12, "read": [0, 5, 6, 7, 8, 9, 10], "readdata": 10, "reader": [0, 5, 10], "reader_iter": 5, "reader_read_obj": 10, "readi": 10, "readout": 9, "readthedoc": 5, "real": [0, 1, 11, 13], "real_dirty_gauss": 1, "real_map": 1, "realli": 9, "reason": 9, "rebin": 11, "receiv": 2, "recent": [0, 12], "recommend": [4, 10, 13], "recov": 10, "recurs": 6, "recursionerror": 6, "redefine_stack_index_map": 14, "reduct": 13, "redund": [12, 13], "refer": [0, 1, 4, 5, 6, 9, 10, 13, 14, 15], "referenc": 13, "reference_noise_sourc": 13, "reflect": 13, "reflector": [9, 14], "reftim": 1, "reftime_prev": 1, "reftime_result": 1, "regardless": 12, "regener": 12, "region": 1, "regist": 9, "regress": [1, 6], "regrid": 0, "regular": 0, "reject": [5, 9, 12], "rel": [1, 10, 13, 14], "rel_tol": 10, "relat": 6, "relationship": 1, "relativist": 7, "relev": [1, 2, 12, 13], "remain": [0, 5, 9, 12, 13, 14], "remaind": [12, 14], "remov": [2, 9, 10, 12, 13], "remove_compon": 9, "renorm": 0, "reorder_correlator_input": 14, "repeat": [12, 13, 14], "replac": [1, 6, 8, 10, 14], "replace_dup": 8, "report": 8, "repositori": 0, "repres": [0, 1, 4, 5, 10, 14], "represent": [4, 6, 9], "reproduc": 10, "request": [0, 1, 4, 13], "requir": [0, 1, 5, 6, 9, 10, 13], "require_qu": 5, "res_plot": 3, "resampl": 0, "reset": 8, "resid": 5, "residu": [6, 13], "resolv": 7, "resort": 5, "resp": [1, 13], "resp_err": 1, "resp_error": 13, "respect": [0, 1, 4, 10, 13], "respons": 1, "rest": 7, "restrict": [0, 2, 5], "result": [1, 2, 3, 5, 10, 12, 13, 14], "results_list": 5, "retriev": [5, 9], "return": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "return_histogram": 1, "return_post_report_param": 8, "rev_01": 10, "revers": [0, 6], "reverse_map": [0, 13], "reverse_stack": [13, 14], "rf": 14, "rf_thru": 14, "rfi": [13, 14], "rfiantenna": 14, "rft": [9, 14], "rfta15b": 9, "rftb15b": 9, "rftg00b": 9, "rftg01b": 9, "rftk07b": 9, "rftq00b": 9, "rftq01b": 9, "rftq15b": 9, "right": [1, 2, 4, 5, 6, 7, 14], "rightmost": 1, "ringmap": 1, "rippl": 13, "rise": 4, "rising_tim": 4, "rm": [1, 3], "rng": 1, "robust": 1, "robustli": 9, "roll": [1, 12], "room": 14, "root": [0, 14], "rotat": [1, 4, 7, 14], "rough": [1, 14], "roughli": 4, "routin": [1, 2, 3, 4, 8, 11, 12, 14], "row": [7, 14], "rsigma": 12, "rule": 0, "run": [2, 3, 7, 8, 10], "run_pass0_": 9, "run_pass1_a": 5, "runtimeerror": 14, "sa": 1, "safe": 14, "sai": 5, "same": [0, 1, 3, 4, 6, 8, 9, 10, 12, 13, 14], "sampl": [0, 1, 12, 13, 14], "satur": 1, "save": [10, 11], "sb": 1, "scalar": [0, 1, 12], "scale": [1, 8, 12, 13], "scheme": 0, "scipi": [1, 13], "scratch": [9, 10], "search": [0, 4, 5, 6, 8, 9, 12, 13, 14, 15], "search_input": 13, "search_update_id": 0, "search_update_tim": 0, "searchabl": 9, "searcher": 9, "season": 5, "second": [0, 1, 2, 4, 5, 8, 12, 13, 14], "see": [0, 1, 2, 3, 4, 5, 9, 10, 12], "seek": 4, "seem": [9, 12], "seen": 7, "seldom": 13, "select": [0, 9, 10, 11, 14], "select_freq_phys": 0, "select_freq_rang": 0, "select_prod_auto": 0, "select_prod_by_input": 0, "select_prod_pair": 0, "select_time_rang": [0, 5], "self": [0, 1, 2], "send": [5, 9], "sensibl": 6, "sensor": 14, "sensor_to_hk": 14, "separ": [1, 6, 8, 9, 12], "separatedli": 10, "seppar": 2, "sequenc": 1, "seri": [0, 13], "serial": [6, 9, 14], "serial_to_id": 14, "serial_to_loc": 14, "set": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 13, 14], "set_acq_list": 2, "set_coeff": 13, "set_glob": 6, "set_global_reference_tim": 13, "set_good_ipt": 2, "set_hk_input": 5, "set_metadata": 2, "set_properti": 9, "set_reference_tim": 13, "set_time_rang": 5, "set_time_range_": 5, "set_time_range_global_flag": 5, "set_time_range_layout": 5, "set_time_range_season": 5, "set_us": 9, "setting_tim": 4, "sever": [5, 9, 13], "sever_connexion": 9, "sg": 9, "sg_spec": 9, "sg_spec_start": 9, "sg_spect": 9, "sg_start_sn": 9, "shape": [0, 1, 5, 6, 10, 12, 14], "shapiro": 1, "share": 1, "sheehan": 8, "shift": [4, 7], "short": 9, "shortcut": 5, "shortest": [9, 13], "shortest_path_to_typ": 9, "should": [0, 1, 2, 4, 5, 6, 8, 9, 12, 13, 14], "show": [1, 5, 6, 9], "shuffl": 14, "si": 4, "sid": 8, "sider": [2, 4, 8], "sidereal_": [4, 8], "sig2": 2, "sigma": [1, 13], "signal": [10, 12, 13, 14], "signific": [1, 4, 12], "significantli": 5, "silenc": 1, "silent": 5, "simbad": 6, "similar": 5, "similarli": [0, 9], "simpl": 9, "simpli": [5, 6, 9, 14], "sin": 1, "sinc": [1, 4, 8, 10, 13], "singl": [0, 1, 5, 6, 7, 9, 13, 14], "single_source_check": 2, "sinusoid": 1, "sir": 12, "sir1d": 12, "size": [1, 6, 9, 10], "skip": [0, 6, 9], "skipkei": 6, "sky": [1, 2, 4, 7, 10, 13], "skyfield": [4, 6, 7, 8, 14], "skyfield_star_from_ra_dec": 4, "skyfield_time_to_unix": 4, "slice": [0, 12], "slinear": 13, "slot": [9, 13, 14], "slow": [1, 14], "slower": 5, "slowest": 0, "slt_type": 9, "sma": [9, 14], "smaller": 9, "sn": [9, 14], "so": [0, 1, 4, 8, 9, 10], "solar": 4, "solar_ris": 4, "solar_set": 4, "solar_transit": 4, "solut": [10, 14], "solv": [10, 13], "some": [4, 5, 6, 9, 12, 13], "someth": 0, "sometim": 9, "sort": [6, 10, 14], "sort_evalues_mag": 10, "sort_kei": 6, "sort_sn": 9, "sourc": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "source1": 2, "source_flag": 0, "source_gain": 0, "source_nam": [0, 4, 6], "source_weight": 0, "south": 14, "space": 6, "span": [1, 13], "spars": 12, "special": 9, "specif": [0, 5, 6, 9, 10, 12, 13], "specifi": [0, 2, 4, 5, 6, 9, 12, 13, 14], "specificationto": 9, "spectra": [6, 11], "spectral": [0, 6, 7, 12, 13], "spectral_cut": 12, "spectrum": [6, 11, 12], "spl001a": 9, "spl001ap2": 9, "spl001ap3": 9, "spline": 13, "split": 0, "splitter": 9, "spoof": 5, "spring": 5, "sql": 9, "squar": [1, 10, 13, 14], "src": [2, 8, 14], "src1": 2, "src2": 2, "src_dict": 4, "stack": [0, 12, 13, 14], "stack_flag": 14, "stack_new": 14, "stack_sel": 0, "standard": [1, 9, 13, 14], "standardis": 6, "star": [4, 6, 7, 14], "star_cir": 4, "starlib": [4, 6, 7, 14], "start": [0, 1, 4, 5, 6, 8, 9, 11, 12, 13], "start_dai": 8, "start_lst": 8, "start_ra": 5, "start_tim": [0, 4, 5, 8, 9], "start_tol": 8, "stat": [1, 6], "statement": 9, "static": [0, 12, 13, 14], "static_amp": 13, "static_delai": 14, "static_phi": 13, "static_phi_fit": 13, "station": 0, "statist": [1, 6, 12], "std": 0, "stellar": 4, "stellar_": 4, "step": [2, 10], "still": [3, 14], "stone": 5, "stop": [0, 8, 12, 13], "stop_tim": 0, "storag": 5, "storagenod": 5, "store": [0, 6, 9, 14], "str": [0, 1, 4, 5, 6, 7, 8, 9, 12, 13, 14], "stream": [9, 14], "stream_id": 14, "string": [0, 1, 4, 5, 6, 7, 8, 9, 11, 13, 14], "student": 1, "style": [1, 14], "subclass": [0, 1, 6, 13, 14], "subclass_from_obj": 0, "subcycl": 10, "subgraph": 9, "subgraph_spec": 9, "submap": 1, "submodul": 15, "subregion": 1, "subsequ": 5, "subset": [0, 5, 14], "substitut": 14, "subtl": 9, "subtract": [10, 13, 14], "subtract_rank1_sign": 14, "subtract_sky_nois": 10, "suffer": 4, "sum": [0, 1], "summari": [5, 13], "summer": 5, "sun": [2, 5], "sunris": [2, 5], "sunset": [2, 5], "supersed": 10, "suppli": [0, 4, 7, 12, 14], "support": [0, 4, 10, 14], "suppos": 0, "sure": 9, "surfac": 9, "surround": 5, "suzu": 5, "switch": 10, "sync": 10, "synchron": 10, "syncron": 3, "synonym": 0, "syntax": 5, "system": [0, 4, 10, 13, 14], "t": [1, 2, 5, 6, 9, 10, 14], "t1": 2, "t2": 2, "t_0": 1, "tabl": [8, 9], "tag": 14, "tail": 1, "take": [0, 1, 2, 4, 6, 7, 9, 13], "taken": [4, 6, 14], "target": [4, 7], "tau": 13, "tau_a": 4, "tau_cut": 12, "tau_init": 13, "taua": [1, 2, 4], "tauru": 4, "tdata": 13, "tdelt": 2, "telescop": [8, 12, 14], "temperatur": [0, 1, 14], "templat": [3, 13], "tend": 13, "termin": 9, "test": [0, 1, 2, 3, 6, 7], "test_acq": 0, "test_andata": 0, "test_chan": 3, "test_freq": 3, "test_norm": 1, "test_pass": 2, "text": 8, "tgain": 13, "than": [1, 4, 5, 9, 12, 13, 14], "thei": [0, 1, 3, 5, 9, 13, 14], "them": [0, 9], "therebi": 5, "therefor": 14, "therm_avail": 9, "thermal": 1, "thermal_amplitud": 1, "thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "thier": 5, "third": 14, "those": [0, 5, 10, 13, 14], "though": [5, 9], "three": [3, 5, 9], "threshold": [12, 13], "through": [6, 9, 12, 13, 14], "thru": [9, 14], "thu": 10, "time": [0, 1, 2, 3, 5, 7, 8, 9, 10, 11, 12, 14], "time_delta": 5, "time_delta_rise_set": 5, "time_exclus": 5, "time_index_off": 10, "time_index_on": 10, "time_interv": 5, "time_of_dai": 4, "time_ord": 11, "time_rang": 5, "time_sel": [0, 11], "time_width": 12, "timescal": 13, "timestamp": [0, 2, 3, 9, 10, 12, 13], "timestamp_dec": 10, "timestamp_off_dec": 10, "timestamp_on_dec": 10, "timestap": 10, "timestr_to_datetim": 4, "timestream": [13, 14], "timezon": 4, "timing_acq_filenam": 13, "timingcorrect": 13, "timingdata": 13, "timinginterpol": 13, "tinit": 13, "tm": 2, "tm1": 2, "tm2": 2, "tmp": 8, "to_dict": 6, "tod": [0, 5], "todata": 0, "togeth": 12, "tol": 2, "tol_ch1": 2, "tol_ch2": 2, "tol_fr1": 2, "tol_fr2": 2, "toler": [3, 8, 10], "tone": 14, "toneantenna": 14, "took": 0, "tool": [1, 6, 10, 12, 13], "top": 14, "total": [0, 5, 10, 13, 14], "total_n_channel": 10, "toward": 7, "trace": [2, 9], "tranpos": 0, "transform": [1, 2, 4], "transit": [1, 2, 4, 5, 13], "transit_ra": 4, "transit_tim": 4, "transpos": 0, "treat": 5, "tref": 13, "tri": 2, "triangl": [10, 13, 14], "triangular": 14, "tricki": 4, "true": [0, 1, 3, 4, 5, 6, 8, 9, 10, 12, 13, 14], "truncat": 10, "trustworthi": 5, "try": 0, "tsky": 3, "tstart": 13, "tupl": [0, 1, 5, 6, 12, 14], "turn": [9, 10, 14], "tv": 12, "tval": 1, "tweight": 13, "twice": 5, "twidth": 12, "two": [0, 2, 4, 5, 7, 9, 10, 11, 12, 14], "type": [0, 1, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "type_exclud": 9, "type_rev": 9, "typeerror": 6, "typic": [8, 10, 13], "tzinfo": 4, "u": [9, 14], "ubc": [5, 10], "ultim": 14, "uncerainti": 13, "uncertainti": [1, 6, 13], "uncertainty_amp": 1, "uncertainty_imag": 1, "uncertainty_phi": 1, "uncertainty_r": 1, "unchang": 0, "unclear": 4, "unconnect": 14, "under": 5, "underli": [0, 13, 14], "underscor": [6, 9], "undo": 14, "unecessari": 2, "unexpect": 0, "unicod": 0, "uniform": 1, "union": 7, "uniqu": [0, 8, 9, 14], "unit": [1, 7, 9], "univers": 12, "unix": [0, 1, 2, 4, 5, 7, 8, 12, 13, 14], "unix_to_csd": 4, "unix_to_datetim": 4, "unix_to_skyfield_tim": 4, "unknownus": 9, "unless": [5, 13], "unlik": 4, "unnecessari": 4, "unpack_product_arrai": 14, "unstack": 0, "unsur": 4, "until": 9, "untouch": 13, "unwrap": 10, "unzip": 8, "up": [0, 3, 4, 9, 14], "updat": [0, 1, 5, 9, 13, 14], "update_global_flag_mod": 5, "update_id": 0, "update_tim": 0, "upon": [5, 6, 13], "upper": [0, 1, 10, 13, 14], "uppercas": 6, "uppon": 2, "upsilon": 10, "upward": 14, "us": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14], "usag": 14, "user": [2, 4, 9, 13, 14], "user_permiss": 9, "user_permission_typ": 9, "usernam": 9, "usual": [9, 14], "utc": [4, 5, 8, 9], "utc_lst_to_mjd": 4, "util": [10, 14, 16], "utvec": 10, "utvec2mat": 10, "v": [0, 11], "valid": [0, 1, 4, 5, 6, 9, 11, 12, 13, 14], "valu": [0, 1, 5, 6, 9, 10, 12, 13, 14], "valueerror": [0, 6, 14], "vander": 1, "vandermond": 1, "vari": 0, "variabl": [1, 2, 6, 10], "varianc": [1, 13], "variat": 13, "variou": 6, "varnam": 6, "vector": [1, 10, 13], "vectorlib": [4, 14], "vectorsum": [4, 14], "veloc": 4, "verbos": [3, 6, 8], "veri": [0, 4, 5, 14], "vernal": 4, "version": [0, 6, 9, 10, 13], "versiontupl": 0, "versu": 13, "vi": [0, 2, 5, 10, 12, 13, 14], "via": [0, 14], "vir_a": 4, "vira": [2, 4], "virgo": 4, "vis1": 2, "vis2": 2, "vis_dec_sub": 10, "vis_gat": 10, "vis_off": 10, "vis_off_dec": 10, "vis_on": 10, "vis_on_dec": 10, "vis_weight": 0, "visibl": [0, 2, 10, 11, 12, 13, 14], "voltag": 1, "w": [2, 14], "w_cylind": [9, 14], "wa": [0, 3, 6, 8, 9, 12, 13, 14], "wai": [5, 9], "want": [0, 4, 5, 9], "warn": [5, 9], "waterfal": 11, "we": [0, 4, 8, 9, 12, 14], "weather": [0, 5], "weatherdata": 0, "weatherdatainterv": 5, "weatherread": 0, "web": 9, "weight": [0, 1, 10, 12, 13], "weight_alpha": 13, "weight_amp": 13, "weight_phi": 13, "weight_static_amp": 13, "weight_static_phi": 13, "weight_tau": 13, "weighted_": 10, "well": [1, 2, 5, 9], "went": [9, 13], "were": [1, 8, 12, 13, 14], "west": [4, 14], "what": [0, 5, 6, 9, 11, 12, 13, 14], "when": [0, 1, 4, 5, 6, 9, 12, 13, 14], "where": [0, 1, 4, 5, 6, 7, 11, 12, 13, 14], "whether": [0, 12, 13, 14], "which": [0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 12, 13, 14], "whichev": 7, "while": 14, "whitespac": 6, "whose": [0, 1, 3, 5], "wice": 12, "width": [1, 2, 12, 13], "wiki": 9, "wild": 0, "wilk": 1, "window": [1, 11, 12, 13], "winter": 5, "wise": 10, "wish": [9, 13, 14], "within": [0, 5, 9, 10, 12, 13, 14], "withing": 5, "without": [0, 1, 9, 13], "won": 1, "work": [1, 3, 5, 12, 13, 14], "would": [6, 9, 13], "wrap": [12, 13], "wrapper": [1, 4, 10, 12], "written": [3, 6, 7], "wterm": 14, "x": [1, 4, 6, 10, 13, 14], "x0_shift": 2, "xdist": 2, "xmax": 1, "xmin": 1, "xy": 4, "y": [1, 4, 12, 13, 14], "y_dist": 2, "ydist": 2, "year": [4, 5, 6, 8], "yet": 5, "yhpf": 12, "yield": [0, 13], "you": [0, 4, 5, 6, 9, 11, 13, 14], "your": [4, 5, 14], "yyymmdd": 8, "yyyymmdd": 4, "yyyymmssthhmmssz_chimetiming_corr": 13, "z": [1, 14], "za": 1, "zenith": 4, "zero": [5, 8, 13, 14], "zero_delay_noise_sourc": 13, "zip": 8, "zmed": 1}, "titles": ["ch_util.andata", "ch_util.cal_utils", "ch_util.chan_monitor", "ch_util.data_quality", "ch_util.ephemeris", "ch_util.finder", "ch_util.fluxcat", "ch_util.hfbcat", "ch_util.holography", "ch_util.layout", "ch_util.ni_utils", "ch_util.plot", "ch_util.rfi", "ch_util.timing", "ch_util.tools", "Welcome to ch_util\u2019s documentation!", "API Reference"], "titleterms": {"": 15, "adventur": 9, "andata": 0, "api": 16, "arrai": 14, "cal_util": 1, "ch_util": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], "chan_monitor": 2, "choos": 9, "correl": 14, "data": [3, 5], "data_qu": 3, "databas": 9, "document": 15, "ephemeri": 4, "factoris": 14, "finder": 5, "fluxcat": 6, "fringestop": 14, "function": [3, 4], "hfbcat": 7, "high": 5, "holographi": 8, "housekeep": 14, "index": 5, "indic": 15, "input": 14, "instanc": 4, "issu": 3, "layout": 9, "level": 5, "map": 14, "matrix": 14, "miscellan": [4, 14], "model": 9, "ni_util": 10, "own": 9, "plot": 11, "product": 14, "qualiti": 3, "refer": 16, "rfi": 12, "routin": 5, "searcher": 5, "submodul": 16, "tabl": [5, 15], "telescop": 4, "time": [4, 13], "tool": 14, "util": 4, "welcom": 15, "your": 9}}) \ No newline at end of file