From 1d68f13aa857012a90479806981044d2e67bd24f Mon Sep 17 00:00:00 2001 From: Bing Li Date: Wed, 7 Aug 2024 14:28:34 -0400 Subject: [PATCH] reorganized repo --- .DS_Store | Bin 6148 -> 8196 bytes src/.DS_Store | Bin 0 -> 6148 bytes src/tavi/.DS_Store | Bin 0 -> 6148 bytes src/tavi/__init__.py | 1 + src/tavi/data/fit.py | 155 ++++ src/tavi/data/nexus_reader.py | 127 +++ src/tavi/data/scan.py | 273 ++++++ src/tavi/data/scan_group.py | 252 ++++++ src/tavi/data/spice_to_nexus.py | 850 ++++++++++++++++++ src/tavi/data/tavi_data.py | 303 +++++++ src/tavi/instrument/.DS_Store | Bin 0 -> 6148 bytes .../instrument/instrument_params/cg4c.json | 71 ++ .../instrument/instrument_params/hb3.json | 75 ++ .../instrument_params/python_dicts/cg4c.py | 105 +++ .../instrument_params/python_dicts/hb3.py | 104 +++ .../python_dicts/takin_test.py | 113 +++ .../instrument_params/takin_test.json | 75 ++ .../resolution/backups/cooper_nathans_bak.py | 196 ++++ .../resolution/backups/reso_ellipses_bak.py | 374 ++++++++ .../instrument/resolution/cooper_nathans.py | 281 ++++++ .../instrument/resolution/reso_ellipses.py | 427 +++++++++ src/tavi/instrument/tas.py | 338 +++++++ src/tavi/instrument/tas_cmponents.py | 306 +++++++ src/tavi/plotter.py | 166 ++++ src/tavi/sample/powder.py | 22 + src/tavi/sample/sample.py | 196 ++++ src/tavi/sample/xtal.py | 86 ++ src/tavi/spice_to_hdf5_backup.py | 77 ++ src/tavi/utilities.py | 156 ++++ 29 files changed, 5129 insertions(+) create mode 100644 src/.DS_Store create mode 100644 src/tavi/.DS_Store create mode 100644 src/tavi/data/fit.py create mode 100644 src/tavi/data/nexus_reader.py create mode 100644 src/tavi/data/scan.py create mode 100644 src/tavi/data/scan_group.py create mode 100644 src/tavi/data/spice_to_nexus.py create mode 100644 src/tavi/data/tavi_data.py create mode 100644 src/tavi/instrument/.DS_Store create mode 100644 src/tavi/instrument/instrument_params/cg4c.json create mode 100644 src/tavi/instrument/instrument_params/hb3.json create mode 100644 src/tavi/instrument/instrument_params/python_dicts/cg4c.py create mode 100644 src/tavi/instrument/instrument_params/python_dicts/hb3.py create mode 100644 src/tavi/instrument/instrument_params/python_dicts/takin_test.py create mode 100644 src/tavi/instrument/instrument_params/takin_test.json create mode 100755 src/tavi/instrument/resolution/backups/cooper_nathans_bak.py create mode 100755 src/tavi/instrument/resolution/backups/reso_ellipses_bak.py create mode 100755 src/tavi/instrument/resolution/cooper_nathans.py create mode 100755 src/tavi/instrument/resolution/reso_ellipses.py create mode 100644 src/tavi/instrument/tas.py create mode 100644 src/tavi/instrument/tas_cmponents.py create mode 100644 src/tavi/plotter.py create mode 100755 src/tavi/sample/powder.py create mode 100755 src/tavi/sample/sample.py create mode 100755 src/tavi/sample/xtal.py create mode 100644 src/tavi/spice_to_hdf5_backup.py create mode 100644 src/tavi/utilities.py diff --git a/.DS_Store b/.DS_Store index 049f7bc64f1e53f8f262ec9413bb86b76421ca08..04512f2d906d908ced185b967c1e31bbaed4feb0 100644 GIT binary patch delta 165 zcmZoMXmOBWU|?W$DortDU;r^WfEYvza8E20o2aMAD6lbLH}hr%jz7$c**Q2SHn1=X zZ02FHXJTh&C}t>PNS@rl)*xbLW~`%NXk=ciqfl*WV5p;DYGg54gxy#es;DfuC@&{J yFCAppW)C(o#?4MVXPCJpxPbz$Aj>uja(ri=%rD|O*`J4lgAw8mhRyLjbC?0vf+3p# delta 112 zcmZp1XfcprU|?W$DortDU=RQ@Ie-{Mvv5r;6q~50$jG-bU^g=(-)0^Gd#1^e!rYs! yggF=|7Oq^(&cPwb3{(jO0^C5t6{Kln;dkcA{4$;(BN&(<#(`{L*c{I@hZz7UW)cMe diff --git a/src/.DS_Store b/src/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..1e197c2ff0e1b323f62e7b1622ee82418e66d28c GIT binary patch literal 6148 zcmeHK%SyvQ6rE|KO({Ya3SADkE!Y<712-YoA26Z|m70*E!I&vc+AK;TYyBa=#P9Lm znTc4eTM>IN%$)n2$sEW$7-QUq%ci}ze{gy+dP<&C`JxHsz_*eegB84j^0}V5KTQ*vK7g;vukr|q0b+m{AO<#< z0dpqUt<9x?R!$5M13xf;`-6an=o&0Fs;vV$ygp;xLPP-_-x7$zplh(y2oVsjO96E$ zH%|<%%fT;9o@=nwsLL5wGs8G$=IZgn)$HIGDxGmxBlW}pF|f)&U7HS`|L5?_tbOFK zmXJjZ5Ci{=0d5Wbp$CgHXY04+;aMx7-9tmcyb=`<&{r-2VBkK|RzV#Xs6(D>u+)g7 TpkI{((nUZKLLD*i3k-Y!;)qIW literal 0 HcmV?d00001 diff --git a/src/tavi/.DS_Store b/src/tavi/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..d8328b40542632505f0a08227ec1a57aa1b4560b GIT binary patch literal 6148 zcmeHK!EVz)5S?vJNOG$l}jovR@(jmuHaBmu(1Uet~ZJua)=`N-2PC% zq~F7v-7O+bdf@=5-H~SB?(B@M_oDS?h)9fP*?_1=L_VC++r_ZKc%OaEwrr#W6#R^I zUQg0erC8srWxIe=z$x&*DZppfql^-2DEa;TrjxR$n;^IqTif2VosQq}_xw-sH#Lu& zq?y&DWO~7?bEV4kZl0v?i+n!ry*N=-lN41x)dfYCBjw|VqRP~Kq-IrC>)P0C_+7s{ z?(HuYN5OGl4hC3OGI2eHQ>){`8-pDPhzmzF{9~Nh0 z!aad@AWi~dCR9@XNQ7D8bdN9V@`TNvA%E`)Yv8Cm1)Kt#72xlKgfm7KTZ8)3fkC$b zKo4PUm~$_|HPT{au{DStm@uV4Qz~qWAxt^q(&j}LTZ5*Ygl#^AeY3C~iqLPz^QBHF z5gBx&Q@|;(s=&5c_W1rkd$|8!EpjWTfK%X6DImPl_;dtH_HJDx$9JuSe}=PhUTaX7 kV6fM*p72$?57&l0$rE5?u{DSY%>5D2GPuDh@K+W134Hy~_5c6? literal 0 HcmV?d00001 diff --git a/src/tavi/__init__.py b/src/tavi/__init__.py index 9c510912..266ff930 100644 --- a/src/tavi/__init__.py +++ b/src/tavi/__init__.py @@ -2,6 +2,7 @@ Contains the entry point for the application """ +# TODO template try: from ._version import __version__ # noqa: F401 except ImportError: diff --git a/src/tavi/data/fit.py b/src/tavi/data/fit.py new file mode 100644 index 00000000..f1fa58bd --- /dev/null +++ b/src/tavi/data/fit.py @@ -0,0 +1,155 @@ +import matplotlib.pyplot as plt +import numpy as np +from lmfit import models + + +class Fit(object): + """Save information about fits""" + + models = { + # ---------- peak models --------------- + "Gaussian": models.GaussianModel, + "Lorentzian": models.LorentzianModel, + "Voigt": models.VoigtModel, + "PseudoVoigt": models.PseudoVoigtModel, + "DampedOscillator": models.DampedOscillatorModel, + "DampedHarmonicOscillator": models.DampedHarmonicOscillatorModel, + # ---------- background models ------------ + "Constant": models.ConstantModel, + "Linear": models.LinearModel, + "Quadratic": models.QuadraticModel, + "Polynomial": models.PolynomialModel, + "Exponential": models.ExponentialModel, + "PowerLaw": models.PowerLawModel, + # --------- expression --------------- + "Expression": models.ExpressionModel, + "Spline": models.SplineModel, + } + + def __init__(self, x, y, err=None, fit_range=None): + """ + initialize a fit model + + Args: + x (list) + y (list) + err (list | None) + fit_range (tuple) + """ + + self.range = fit_range + self.x = np.array(x) + self.y = np.array(y) + self.err = err + + # trim the range + if fit_range is not None: + fit_min, fit_max = fit_range + mask = np.bitwise_and(x > fit_min, x < fit_max) + self.x = self.x[mask] + self.y = self.y[mask] + if self.err is not None: + self.err = self.err[mask] + + self.background_models = [] + self.signal_models = [] + self.num_backgrounds = 0 + self.num_signals = 0 + self.FIT_STATUS = None + self.chi_squred = 0 + self.PLOT_SEPARATELY = False + + def add_background( + self, + model="Constant", + p0=None, + min=None, + max=None, + fixed=None, + expr=None, + ): + """Set the model for background + + Args: + model (str): Constant, Linear, Quadratic, Polynomial, + Exponential, PowerLaw + p0 (tuple | None): inital parameters + min (tuple | None): minimum + max (tuple | None): maximum + fixed (tuple | None): tuple of flags + expr (tuple| None ): constraint expressions + """ + self.num_backgrounds += 1 + + # add prefix if more than one background + if self.num_backgrounds > 1: + prefix = f"b{self.num_backgrounds}_" + else: + prefix = "" + model = Fit.models[model](prefix=prefix, nan_policy="propagate") + + pass + + # pars = model.guess(self.y, x=self.x) + # pars["c"].set(value=0.7, vary=True, expr="") + + self.background_models.append(model) + + def add_signal( + self, + model="Gaussian", + p0=None, + min=None, + max=None, + expr=None, + ): + """Set the model for background + + Args: + model (str): Constant, Linear, Quadratic, Polynomial, + Exponential, PowerLaw + p0 (tuple | None): inital parameters + min (tuple | None): minimum + max (tuple | None): maximum + expr (str| None ): constraint expression + """ + self.num_signals += 1 + prefix = f"s{self.num_signals}_" + model = Fit.models[model](prefix=prefix, nan_policy="propagate") + print(model.param_names) + self.signal_models.append(model) + pars = model.guess(self.y, x=self.x) + pars["c"].set(value=0.7, vary=True, expr="") + + def perform_fit(self): + + model = np.sum(self.signal_models) + + if self.num_backgrounds > 0: + model += np.sum(self.background_models) + + if self.err is None: + # pars = model.guess(self.y, x=self.x) + out = model.fit(self.y, pars, x=self.x) + else: + pars = model.fit(self.y, x=self.x, weights=self.err) + + # out = model.fit(self.y, pars, x=self.x) + print(out.fit_report(min_correl=0.25)) + + +# # plot fitting results +# fig, ax = plt.subplots() +# if std is None: +# ax.plot(self.scan_data[x_str], self.scan_data[y_str], "o") +# else: +# ax.errorbar(x, y, yerr=std, fmt="o") +# ax.plot(x, out.best_fit, "-") + +# if "scan_title" in self.scan_params: +# ax.set_title(self.scan_params["scan_title"]) +# ax.set_xlabel(x_str) +# ax.set_ylabel(y_str) +# ax.grid(alpha=0.6) +# ax.set_ylim(bottom=0) +# plt.tight_layout() diff --git a/src/tavi/data/nexus_reader.py b/src/tavi/data/nexus_reader.py new file mode 100644 index 00000000..3b2b5247 --- /dev/null +++ b/src/tavi/data/nexus_reader.py @@ -0,0 +1,127 @@ +import numpy as np + + +def nexus_to_dict(nexus_entry): + """Reads a nexus entry, convert to dictionaries of data and meta_data + + Args: + nexus entry + + Returns: + meta_data (dict) + data (dict) + """ + + def dataset_to_string(ds): + return str(ds.asstr()[...]) + + scan_info = { + "scan": int(nexus_entry.name[-4:]), # last 4 digits are scan number + "time": dataset_to_string(nexus_entry["start_time"]), + "scan_title": dataset_to_string(nexus_entry["title"]), + # "preset_type": "normal", + "preset_channel": dataset_to_string(nexus_entry["monitor/mode"]), + "preset_value": float(nexus_entry["monitor/preset"][...]), + "def_y": nexus_entry["data"].attrs["signal"], + "def_x": nexus_entry["data"].attrs["axes"], + } + + sample_ub_info = { + "samplename": dataset_to_string(nexus_entry["sample/name"]), + "lattice_constants": nexus_entry["sample/unit_cell"][...], + "ub_matrix": np.reshape(nexus_entry["sample/orientation_matrix"][...], (3, 3)), + # "mode": # UB mode + "palne_normal": nexus_entry["sample/plane_normal"][...], + "ubconf": dataset_to_string(nexus_entry["sample/ub_conf"]), + } + + instrument = dataset_to_string(nexus_entry["instrument/name"]) + + instrument_info = { + "instrument": instrument, + # "monochromator": + # "analyzer": + "sense": dataset_to_string(nexus_entry["instrument/monochromator/sense"]) + + dataset_to_string(nexus_entry["sample/sense"]) + + dataset_to_string(nexus_entry["instrument/analyser/sense"]), + "collimation": nexus_entry["instrument/collimator/divergence_x"][...], + } + + num = np.size(nexus_entry["data/" + scan_info["def_x"]]) + + data = { + "Pt.": np.arange(start=1, stop=num + 1, step=1), + # detector + "detector": nexus_entry["instrument/detector/data"][...], + # monitor + "time": nexus_entry["monitor/time"][...], + "monitor": nexus_entry["monitor/monitor"][...], + "mcu": nexus_entry["monitor/mcu"][...], + # sample + "s1": nexus_entry["sample/s1"][...], + "s2": nexus_entry["sample/s2"][...], + "sgl": nexus_entry["sample/sgl"][...], + "sgu": nexus_entry["sample/sgu"][...], + "stl": nexus_entry["sample/stl"][...], + "stu": nexus_entry["sample/stu"][...], + "q": nexus_entry["sample/q"][...], + "qh": nexus_entry["sample/qh"][...], + "qk": nexus_entry["sample/qk"][...], + "ql": nexus_entry["sample/ql"][...], + "en": nexus_entry["sample/en"][...], + } + + # analyser + ana_str = ( + ("a1", "a2", "afocus", "ef") + tuple([f"qm{i+1}" for i in range(8)]) + tuple([f"xm{i+1}" for i in range(8)]) + ) + nexus_ana_str = nexus_entry["instrument/analyser"].keys() + for key in ana_str: + if key in nexus_ana_str: + data.update({key: nexus_entry["instrument/analyser/" + key][...]}) + + # monochromator + mono_str = ("m1", "m2", "ei", "focal_length", "mfocus", "marc", "mtrans") + nexus_mono_str = nexus_entry["instrument/monochromator"].keys() + for key in mono_str: + if key in nexus_mono_str: + data.update({key: nexus_entry["instrument/monochromator/" + key][...]}) + + # slits + slits_str1 = ("bat", "bab", "bal", "bar", "bbt", "bbb", "bbl", "bbr") + slits_str2 = ("slita_lf", "slita_rt", "slita_tp", "slitb_bt", "slitb_lf", "slitb_rt", "slitb_tp") + slits_str3 = ("slit_pre_bt", "slit_pre_lf", "slit_pre_rt", "slit_pre_tp") + slit_str = slits_str1 + slits_str2 + slits_str3 + + nexus_slit_str = nexus_entry["instrument/slit"].keys() + for key in slit_str: + if key in nexus_slit_str: + data.update({key: nexus_entry["instrument/slit/" + key][...]}) + + # temprature + temperatue_str = ( + "coldtip", + "tsample", + "temp_a", + "temp_2", + "vti", + "sample", + "temp", + "dr_tsample", + "dr_temp", + ) + for t in nexus_entry["sample"].keys(): + if t in temperatue_str: + data.update({t: nexus_entry["sample/" + t][...]}) + + return scan_info, sample_ub_info, instrument_info, data + + +def nexus_to_SPICE(nexus_entry): + """Reads a nexus entry, convert to a SPICE scan file + + Args: + nexus entry + + """ + pass diff --git a/src/tavi/data/scan.py b/src/tavi/data/scan.py new file mode 100644 index 00000000..d8d031e9 --- /dev/null +++ b/src/tavi/data/scan.py @@ -0,0 +1,273 @@ +import numpy as np +import matplotlib.pyplot as plt + +from tavi.tavi_data.nexus_reader import nexus_to_dict + + +class Scan(object): + """ + Manage a single measued scan + + Attributes: + scan_info (dict): + sample_ub_info (dict): + instrument_info (dict): + data (dict): dictionary contains lists of scan data + + Methods: + load_scan + generate_curve + plot_curve + + """ + + def __init__(self, nexus_entry=None): + """Initialze an empty scan if nexus entry not provided""" + + self.scan_info = None + self.sample_ub_info = None + self.instrument_info = None + self.data = None + + if nexus_entry is not None: + self.load_scan(nexus_entry) + + def load_scan(self, nexus_entry): + """Unpack metadata and data from nexus_entry + + Args: + nexus_entry: + """ + + scan_info, sample_ub_info, instrument_info, data = nexus_to_dict(nexus_entry) + self.scan_info = scan_info + self.sample_ub_info = sample_ub_info + self.instrument_info = instrument_info + self.data = data + + def get_scan_info(self): + """Return scan_info in metadata. + + Returns: + dict: dictionay of scan_info metadata. + """ + return self.scan_info + + def get_sample_ub_info(self): + """Return sample_UB_info in metadata. + + Returns: + dict: dictionay of sample_UB_info metadata. + """ + return self.sample_ub_info + + def get_instrument_info(self): + """Return instrument_info in metadata. + + Returns: + dict: dictionay of instrument_info metadata. + """ + return self.instrument_info + + # def save_metadata(self, metadata_entry): + # """Save metadata_entry into file + + # Args: + # metadata_entry (dict): {key: value} + # """ + # pass + + def get_data_entry(self, entry_name): + """Return data entry based on entry_name + + Args: + entry_name (str): a key of the dictionay in data + + Returns: + tuple: data entry + """ + return self.data[entry_name] + + def generate_curve( + self, + x_str=None, + y_str=None, + norm_channel=None, + norm_val=1, + rebin_type=None, + rebin_step=0, + ): + """Generate a curve from a single scan to plot, + with the options to + normalize the y-axis and rebin x-axis. + + Args: + x_str (str): string of x axis + y_str (str): string of x axis + norm_channel (str): None, "time", "monitor" or "mcu" + norm_val (float): + rebin_type (str): None, "tol", or "grid" + rebin_size (float): + + Returns: + x: + y: + xerr: if rebin + yerr: if "detector" + xlabel: + ylabel: + title: "scan number: scan title" + label: run number as legend if overplotting multiple curves + """ + + if x_str is None: + x_str = self.scan_info["def_x"] + + if y_str is None: + y_str = self.scan_info["def_y"] + + x_raw = self.data[x_str] + y_raw = self.data[y_str] + + # xerr NOT used + xerr = None + yerr = None + + if rebin_type is None: # no rebin + x = x_raw + y = y_raw + # normalize y-axis without rebining along x-axis + if norm_channel is not None: + norm = self.data[norm_channel] / norm_val + y = y / norm + if yerr is not None: + yerr = yerr / norm + # errror bars for detector only + if "det" in y_str: + yerr = np.sqrt(y) + else: + if rebin_step > 0: + match rebin_type: + case "tol": # x weighted by normalization channel + x_grid = np.arange( + np.min(x_raw) + rebin_step / 2, + np.max(x_raw) + rebin_step / 2, + rebin_step, + ) + x = np.zeros_like(x_grid) + y = np.zeros_like(x_grid) + cts = np.zeros_like(x_grid) + wts = np.zeros_like(x_grid) + + if norm_channel is not None: # rebin and renorm + norm = self.data[norm_channel] + for i, x0 in enumerate(x_raw): + idx = np.nanargmax(x_grid + rebin_step / 2 >= x0) + y[idx] += y_raw[i] + x[idx] += x_raw[i] * norm[i] + cts[idx] += norm[i] + + # errror bars for detector only + if "det" in y_str: + yerr = np.sqrt(y) / cts * norm_val + y = y / cts * norm_val + x = x / cts + + else: # rebin, no renorm + weight_channel = self.scan_info["preset_channel"] + weight = self.data[weight_channel] + for i, x0 in enumerate(x_raw): + idx = np.nanargmax(x_grid + rebin_step / 2 >= x0) + y[idx] += y_raw[i] + x[idx] += x_raw[i] * weight[i] + wts[idx] += weight[i] + cts[idx] += 1 + + # errror bars for detector only + if "det" in y_str: + yerr = np.sqrt(y) / cts + y = y / cts + x = x / wts + case "grid": + x = np.arange( + np.min(x_raw) + rebin_step / 2, + np.max(x_raw) + rebin_step / 2, + rebin_step, + ) + y = np.zeros_like(x) + cts = np.zeros_like(x) + + if norm_channel is not None: # rebin and renorm + norm = self.data[norm_channel] + for i, x0 in enumerate(x_raw): + idx = np.nanargmax(x + rebin_step / 2 >= x0) + y[idx] += y_raw[i] + cts[idx] += norm[i] + + # errror bars for detector only + if "det" in y_str: + yerr = np.sqrt(y) / cts * norm_val + y = y / cts * norm_val + + else: # rebin, no renorm + for i, x0 in enumerate(x_raw): + idx = np.nanargmax(x + rebin_step / 2 >= x0) + y[idx] += y_raw[i] + cts[idx] += 1 + + # errror bars for detector only + if "det" in y_str: + yerr = np.sqrt(y) / cts + y = y / cts + + case _: + print('Unrecogonized rebin type. Needs to be "tol" or "grid".') + else: + print("Rebin step needs to be greater than zero.") + + # generate labels and title + if norm_channel is not None: + if norm_channel == "time": + norm_channel = "seconds" + if norm_val == 1: + ylabel = y_str + "/ " + norm_channel + else: + ylabel = y_str + f" / {norm_val} " + norm_channel + else: + preset_val = self.scan_info["preset_value"] + ylabel = y_str + f" / {preset_val} " + self.scan_info["preset_channel"] + + xlabel = x_str + label = "scan " + str(self.scan_info["scan"]) + title = label + ": " + self.scan_info["scan_title"] + + return (x, y, xerr, yerr, xlabel, ylabel, title, label) + + def plot_curve( + self, + x_str=None, + y_str=None, + norm_channel=None, + norm_val=1, + rebin_type=None, + rebin_step=0, + ): + """Plot a 1D curve gnerated from a singal scan in a new window""" + + x, y, xerr, yerr, xlabel, ylabel, title, _ = self.generate_curve( + x_str, + y_str, + norm_channel, + norm_val, + rebin_type, + rebin_step, + ) + + fig, ax = plt.subplots() + ax.errorbar(x, y, xerr=xerr, yerr=yerr, fmt="o") + ax.set_title(title) + ax.set_xlabel(xlabel) + ax.set_ylabel(ylabel) + ax.grid(alpha=0.6) + + fig.show() diff --git a/src/tavi/data/scan_group.py b/src/tavi/data/scan_group.py new file mode 100644 index 00000000..1e109875 --- /dev/null +++ b/src/tavi/data/scan_group.py @@ -0,0 +1,252 @@ +import matplotlib.pyplot as plt +import numpy as np + + +class ScanGroup(object): + """ + Manage combined scans + + Atributes: + name (string): Name of combined scans + signals (list of Scan objects): + backgrounds (list of Scan objects): + signal_axes (list): Can be ["s1", "s2", "detector"], + Or ["s1", "s2",["det_1", "det_2", "det_3"]]. + Default is (None, None, None) + background_axes (list): Default is (None, None, None) + + Methods: + generate_curve + plot_curve + generate_waterfall + plot_waterfall + generate_contour + plot_contour + """ + + def __init__( + self, + signals, + backgrounds=None, + signal_axes=(None, None, None), + background_axes=(None, None, None), + ): + + self.signals = signals + self.backgrounds = backgrounds + self.signal_axes = list(signal_axes) + self.background_axes = list(background_axes) + self.name = "" + + def generate_curve(self): + pass + + def plot_curve(self): + pass + + # TODO background subtraction + # TODO non-orthogonal axes for constant E contours + + def generate_contour( + self, + norm_channel=None, + norm_val=1, + rebin_steps=(None, None), + ): + """Generate a 2D contour plot""" + + num_scans = np.size(self.signals) + + signal_x, signal_y, signal_z = self.signal_axes + + if np.size(signal_x) == 1: + signal_x = [signal_x] * num_scans + xlabel = signal_x[0] + if np.size(signal_y) == 1: + signal_y = [signal_y] * num_scans + ylabel = signal_y[0] + if np.size(signal_z) == 1: + signal_z = [signal_z] * num_scans + zlabel = signal_z[0] + + # shape = (num_scans, num_pts) + x_array = [scan.data[signal_x[i]] for i, scan in enumerate(self.signals)] + y_array = [scan.data[signal_y[i]] for i, scan in enumerate(self.signals)] + + x_min = np.min([np.min(np.round(x, 3)) for x in x_array]) + x_max = np.max([np.max(np.round(x, 3)) for x in x_array]) + y_min = np.min([np.min(np.round(y, 3)) for y in y_array]) + y_max = np.max([np.max(np.round(y, 3)) for y in y_array]) + + # TODO problem if irregular size + x_step, y_step = rebin_steps + if x_step is None: + x_unique = np.unique(np.concatenate([np.unique(np.round(x, 1)) for x in x_array])) + x_diff = np.unique(np.round(np.diff(x_unique), 1)) + x_diff = x_diff[x_diff > 0] + x_step = x_diff[0] + + if y_step is None: + y_unique = np.unique(np.concatenate([np.unique(np.round(y, 1)) for y in y_array])) + y_diff = np.unique(np.round(np.diff(y_unique), 1)) + y_diff = y_diff[y_diff > 0] + y_step = y_diff[0] + + x_list = np.round(np.arange(x_min, x_max + x_step / 2, x_step), 3) + y_list = np.round(np.arange(y_min, y_max + y_step / 2, y_step), 3) + # shape = (num_pts, num_scans) + xv, yv = np.meshgrid(x_list, y_list) + + # finding bin boxes + cts = np.zeros_like(xv) + z = np.zeros_like(xv) + for i in range(num_scans): + scan = self.signals[i] + scan_len = np.size(scan.data[signal_z[i]]) + for j in range(scan_len): + # if SCAN_ALONG_Y: + x0 = scan.data[signal_x[i]][j] + y0 = scan.data[signal_y[i]][j] + z0 = scan.data[signal_z[i]][j] + idx = np.nanargmax(x_list + x_step / 2 >= x0) + idy = np.nanargmax(y_list + y_step / 2 >= y0) + z[idy, idx] += z0 + if norm_channel is None: + cts[idy, idx] += 1 + else: + cts[idy, idx] += scan.data[norm_channel][j] / norm_val + + z = z / cts + + title = self.name + if norm_channel is not None: + zlabel += f" / {norm_val} " + norm_channel + title += f" nomralized by {norm_val} " + norm_channel + + return (xv, yv, z, x_step, y_step, xlabel, ylabel, zlabel, title) + + def plot_contour(self, contour_plot, cmap="turbo", vmax=100, ylim=None, xlim=None): + """Plot contour""" + + x, y, z, _, _, xlabel, ylabel, zlabel, title = contour_plot + + fig, ax = plt.subplots() + p = ax.pcolormesh(x, y, z, shading="auto", cmap=cmap, vmax=vmax) + fig.colorbar(p, ax=ax) + ax.set_title(title) + ax.set_xlabel(xlabel) + ax.set_ylabel(ylabel) + ax.grid(alpha=0.6) + + if xlim is not None: + ax.set_xlim(left=xlim[0], right=xlim[1]) + if ylim is not None: + ax.set_ylim(bottom=ylim[0], top=ylim[1]) + + fig.show() + + def plot_waterfall(self, contour_plot, shifts=None, ylim=None, xlim=None, fmt="o"): + """Plot waterfall plot. + + Note: + Horizontal is Y-axis, vertical is Z-axis. Stacked along X-axis. + """ + + x, y, z, _, _, xlabel, ylabel, zlabel, title = contour_plot + + num = len(x[0]) + + if shifts is not None: + if np.size(shifts) == 1: + shifts = (shifts,) * num + else: + shifts = (0,) * num + + fig, ax = plt.subplots() + shift = 0 + for i in range(num): + + if np.isnan(z[:, i]).all(): # all nan + continue + else: + p = ax.errorbar( + x=y[:, i], + y=z[:, i] + shift, + fmt=fmt, + label=f"{xlabel}={np.round(x[0,i],3)}, shift={shift}", + ) + shift += shifts[i] + + ax.set_title(title) + ax.set_xlabel(ylabel) + ax.set_ylabel(zlabel) + ax.grid(alpha=0.6) + ax.legend() + if xlim is not None: + ax.set_xlim(left=xlim[0], right=xlim[1]) + if ylim is not None: + ax.set_ylim(bottom=ylim[0], top=ylim[1]) + fig.show() + + # def generate_waterfall_scans( + # self, + # rebin_type=None, + # rebin_step=0, + # norm_channel=None, + # norm_val=1, + # ): + # curves = [] + # num_scans = np.size(self.signals) + # signal_x, signal_y, _ = self.signal_axes + + # if np.size(signal_x) == 1: + # signal_x = [signal_x] * num_scans + # xlabel = signal_x[0] + # if np.size(signal_y) == 1: + # signal_y = [signal_y] * num_scans + # ylabel = signal_y[0] + # if norm_channel is not None: + # ylabel += f" / {norm_val} " + norm_channel + + # title = self.name + + # for i, signal in enumerate(self.signals): + # x, y, _, yerr, _, _, _, label = signal.generate_curve( + # x_str=signal_x[i], + # y_str=signal_y[i], + # norm_channel=norm_channel, + # norm_val=norm_val, + # rebin_type=rebin_type, + # rebin_step=rebin_step, + # ) + # curve = (x, y, yerr, label) + # curves.append(curve) + # waterfall = curves, xlabel, ylabel, title + # return waterfall + + # def plot_waterfall_scans(self, waterfall, shifts=None, ylim=None, xlim=None, fmt="o"): + + # curves, xlabel, ylabel, title = waterfall + # if shifts is not None: + # if np.size(shifts) == 1: + # shifts = (shifts,) * len(curves) + # else: + # shifts = (0,) * len(curves) + + # fig, ax = plt.subplots() + # shift = 0 + # for i, curve in enumerate(curves): + # shift += shifts[i] + # x, y, yerr, label = curve + # ax.errorbar(x, y + shift, yerr=yerr, label=label, fmt=fmt) + + # ax.set_title(title) + # ax.set_xlabel(xlabel) + # ax.set_ylabel(ylabel) + # ax.legend() + # if xlim is not None: + # ax.set_xlim(left=xlim[0], right=xlim[1]) + # if ylim is not None: + # ax.set_ylim(bottom=ylim[0], top=ylim[1]) + + # fig.show() diff --git a/src/tavi/data/spice_to_nexus.py b/src/tavi/data/spice_to_nexus.py new file mode 100644 index 00000000..3d0fcd41 --- /dev/null +++ b/src/tavi/data/spice_to_nexus.py @@ -0,0 +1,850 @@ +import numpy as np +import h5py +import os +from pathlib import Path +from datetime import datetime + +# from tavi.instrument.instrument_params.takin_test import instrument_params + + +def read_spice(file_name): + """Reads an ascii generated by spice, and returns a header structure and a data table + + Args: + file_name (str): a string containing the filename + + Returns: + spice_data (numpy.array): an array containing all columns/rows + headers (dict): a dictionary containing information from the commented lines. + col_headers (list): + unused (dict): not yet used in hdf5 + """ + with open(file_name, encoding="utf-8") as f: + all_content = f.readlines() + header_list = [line.strip() for line in all_content if "#" in line] + col_name_index = header_list.index("# col_headers =") + 1 + col_names = header_list[col_name_index].strip("#").split() + header_list.pop(col_name_index) + header_list.pop(col_name_index - 1) + + spice_data = np.genfromtxt(file_name, comments="#") + + col_headers = col_names + headers = {} + unused = [] + + for line in header_list: + line = line.strip("# ") + if "=" in line: # empty field + if line[-1] == "=": + unused.append(line[:-2]) # remove " =" + else: + parts = line.split("=") + key = parts[0].strip() + val = "=".join(parts[1:])[1:] # remove the fisrt space charactor + headers[key] = val + elif "completed" in line or "stopped" in line: # last line + parts = line.split(" ") + headers["end_time"] = parts[3] + " " + parts[0] + " " + parts[1] + else: # empty field + unused.append(line) + + return spice_data, col_headers, headers, unused + + +def read_spice_ub(ub_file): + """Reads ub info from UBConf + + Args: + ub_file (str): a string containing the filename + + Returns: + ubconf (dict) + """ + ubconf = {} + with open(ub_file, encoding="utf-8") as f: + all_content = f.readlines() + + for idx, line in enumerate(all_content): + if line.strip() == "": + continue # skip if empty + elif line.strip()[0] == "[": + continue # skiplines like "[xx]" + + ub_dict = {} + key, val = line.strip().split("=") + if key == "Mode": + if all_content[idx - 1].strip() == "[UBMode]": + ub_dict["UBMode"] = int(val) + elif all_content[idx - 1].strip() == "[AngleMode]": + ub_dict["AngleMode"] = int(val) + else: + if "," in line: # vector + ub_dict[key] = np.array([float(v) for v in val.strip('"').split(",")]) + else: # float number + ub_dict[key] = float(val) + + ubconf.update(ub_dict) + + return ubconf + + +def spicelogs_to_nexus(nxentry): + """Format info from SPICElogs into Nexus format""" + spice_logs = nxentry["SPICElogs"] + + # Create the GROUPS + nxentry.attrs["NX_class"] = "NXentry" + nxentry.attrs["EX_required"] = "true" + + nxentry.create_group("instrument") + nxentry["instrument"].attrs["NX_class"] = "NXinstrument" + nxentry["instrument"].attrs["EX_required"] = "true" + + nxentry["instrument/"].create_group("source") + nxentry["instrument/source"].attrs["NX_class"] = "NXsource" + nxentry["instrument/source"].attrs["EX_required"] = "true" + + nxmono = nxentry["instrument/"].create_group("monochromator") + nxmono.attrs["NX_class"] = "NXcrystal" + nxmono.attrs["EX_required"] = "true" + + nxana = nxentry["instrument/"].create_group("analyser") + nxana.attrs["NX_class"] = "NXcrystal" + nxana.attrs["EX_required"] = "true" + + nxentry["instrument/"].create_group("detector") + nxentry["instrument/detector"].attrs["NX_class"] = "NXdetector" + nxentry["instrument/detector"].attrs["EX_required"] = "true" + + nxentry.create_group("sample") + nxentry["sample"].attrs["NX_class"] = "NXsample" + nxentry["sample"].attrs["EX_required"] = "true" + + nxentry.create_group("monitor") + nxentry["monitor"].attrs["NX_class"] = "NXmonitor" + nxentry["monitor"].attrs["EX_required"] = "true" + + nxentry.create_group("data") + nxentry["data"].attrs["NX_class"] = "NXdata" + nxentry["data"].attrs["EX_required"] = "true" + + nxentry["instrument/"].create_group("collimator") + nxentry["instrument/collimator"].attrs["NX_class"] = "NXcollimator" + + nxslit = nxentry["instrument/"].create_group("slit") + nxslit.attrs["NX_class"] = "NXslit" + + nxflipper = nxentry["instrument/"].create_group("flipper") + nxflipper.attrs["NX_class"] = "NXflipper" + + # nxentry["instrument"].create_group("filter") + # nxentry["instrument"].attrs["NX_class"] = "NXfilter" + + # Create the FIELDS + + nxentry.create_dataset(name="title", data=spice_logs.attrs["scan_title"], maxshape=None) + nxentry["title"].attrs["type"] = "NX_CHAR" + nxentry["title"].attrs["EX_required"] = "true" + + # TODO timezone + start_date_time = "{} {}".format(spice_logs.attrs["date"], spice_logs.attrs["time"]) + start_time = datetime.strptime(start_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() + + nxentry.create_dataset(name="start_time", data=start_time, maxshape=None) + nxentry["start_time"].attrs["type"] = "NX_DATE_TIME" + nxentry["start_time"].attrs["EX_required"] = "true" + + if "end_time" in spice_logs.attrs: # last scan never finished + end_date_time = spice_logs.attrs["end_time"] + end_time = datetime.strptime(end_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() + nxentry.create_dataset(name="end_time", data=end_time, maxshape=None) + nxentry["end_time"].attrs["type"] = "NX_DATE_TIME" + + # Valid enumeration values for root['/entry']['definition'] are: + # NXtas + nxentry.create_dataset(name="definition", data="NXtas", maxshape=None) + nxentry["definition"].attrs["type"] = "NX_CHAR" + nxentry["definition"].attrs["EX_required"] = "true" + + # --------------------------- instrument --------------------------- + nxentry["instrument"].create_dataset(name="name", data=spice_logs.attrs["instrument"], maxshape=None) + nxentry["instrument/name"].attrs["type"] = "NX_CHAR" + + # --------------------------- source --------------------------- + nxentry["instrument/source"].create_dataset(name="name", data="HFIR", maxshape=None) + nxentry["instrument/source/name"].attrs["type"] = "NX_CHAR" + nxentry["instrument/source/name"].attrs["EX_required"] = "true" + + # Valid enumeration values for root['/entry/instrument/source']['probe'] are: + # neutron + # x-ray + + nxentry["instrument/source"].create_dataset(name="probe", data="neutron", maxshape=None) + nxentry["instrument/source/probe"].attrs["type"] = "NX_CHAR" + nxentry["instrument/source/probe"].attrs["EX_required"] = "true" + + # Effective distance from sample Distance as seen by radiation from sample. + # This number should be negative to signify that it is upstream of the sample. + # nxentry["instrument/source"].attrs["distance"] = -0.0 + + # --------------------------- collimators --------------------------- + nxentry["instrument/collimator"].create_dataset(name="type", data="Soller", maxshape=None) + nxentry["instrument/collimator/type"].attrs["type"] = "NX_CHAR" + + div_x = [float(v) for v in list(spice_logs.attrs["collimation"].split("-"))] + nxentry["instrument/collimator"].create_dataset(name="divergence_x", data=div_x, maxshape=None) + nxentry["instrument/collimator/divergence_x"].attrs["type"] = "NX_ANGLE" + nxentry["instrument/collimator/divergence_x"].attrs["units"] = "min" + + # --------------------------- monochromator --------------------------- + nxmono.create_dataset(name="ei", data=spice_logs["ei"], maxshape=None) + nxmono["ei"].attrs["type"] = "NX_FLOAT" + nxmono["ei"].attrs["EX_required"] = "true" + # nxmono["ei"].attrs["axis"] = "1" + nxmono["ei"].attrs["units"] = "meV" + + nxmono.create_dataset(name="type", data=spice_logs.attrs["monochromator"], maxshape=None) + nxmono.attrs["type"] = "NX_CHAR" + + nxmono.create_dataset(name="m1", data=spice_logs["m1"], maxshape=None) + nxmono["m1"].attrs["type"] = "NX_FLOAT" + nxmono["m1"].attrs["units"] = "degrees" + + nxmono.create_dataset(name="m2", data=spice_logs["m2"], maxshape=None) + nxmono["m2"].attrs["type"] = "NX_FLOAT" + nxmono["m2"].attrs["units"] = "degrees" + + if "mfocus" in spice_logs.keys(): + nxmono.create_dataset(name="mfocus", data=spice_logs["mfocus"], maxshape=None) + nxmono["mfocus"].attrs["type"] = "NX_FLOAT" + + if "marc" in spice_logs.keys(): + nxmono.create_dataset(name="marc", data=spice_logs["marc"], maxshape=None) + nxmono["marc"].attrs["type"] = "NX_FLOAT" + + if "mtrans" in spice_logs.keys(): + nxmono.create_dataset(name="mtrans", data=spice_logs["mtrans"], maxshape=None) + nxmono["mtrans"].attrs["type"] = "NX_FLOAT" + + if "focal_length" in spice_logs.keys(): + nxmono.create_dataset(name="focal_length", data=spice_logs["focal_length"], maxshape=None) + nxmono["focal_length"].attrs["type"] = "NX_FLOAT" + + nxmono.create_dataset(name="sense", data=spice_logs.attrs["sense"][0], maxshape=None) + nxmono.attrs["type"] = "NX_CHAR" + + # nxmono.create_dataset(name="rotation_angle", data=1.0, maxshape=None) + # nxmono["rotation_angle"].attrs["type"] = "NX_FLOAT" + # nxmono["rotation_angle"].attrs["EX_required"] = "true" + # nxmono["rotation_angle"].attrs["units"] = "NX_ANGLE" + + # --------------------------- analyzer --------------------------- + + nxana.create_dataset(name="ef", data=spice_logs["ef"], maxshape=None) + nxana["ef"].attrs["type"] = "NX_FLOAT" + nxana["ef"].attrs["EX_required"] = "true" + # nxana["ef"].attrs["axis"] = "1" + nxana["ef"].attrs["units"] = "meV" + + nxana.create_dataset(name="type", data=spice_logs.attrs["analyzer"], maxshape=None) + nxana.attrs["type"] = "NX_CHAR" + + nxana.create_dataset(name="a1", data=spice_logs["a1"], maxshape=None) + nxana["a1"].attrs["type"] = "NX_FLOAT" + nxana["a1"].attrs["units"] = "degrees" + + nxana.create_dataset(name="a2", data=spice_logs["a2"], maxshape=None) + nxana["a2"].attrs["type"] = "NX_FLOAT" + nxana["a2"].attrs["units"] = "degrees" + + if "afocus" in spice_logs.keys(): + nxana.create_dataset(name="afocus", data=spice_logs["afocus"], maxshape=None) + nxana["afocus"].attrs["type"] = "NX_FLOAT" + + if spice_logs.attrs["instrument"] == "CG4C": + for i in range(8): # qm1--qm8, xm1 -- xm8 + nxana.create_dataset(name=f"qm{i+1}", data=spice_logs[f"qm{i+1}"], maxshape=None) + nxana.create_dataset(name=f"xm{i+1}", data=spice_logs[f"xm{i+1}"], maxshape=None) + + nxana.create_dataset(name="sense", data=spice_logs.attrs["sense"][2], maxshape=None) + nxana.attrs["type"] = "NX_CHAR" + + # nxana.create_dataset(name="rotation_angle", data=1.0, maxshape=None) + # nxana["rotation_angle"].attrs["type"] = "NX_FLOAT" + # nxana["rotation_angle"].attrs["EX_required"] = "true" + # nxana["rotation_angle"].attrs["units"] = "NX_ANGLE" + + # nxana.create_dataset(name="polar_angle", data=1.0, maxshape=None) + # nxana["polar_angle"].attrs["type"] = "NX_FLOAT" + # nxana["polar_angle"].attrs["EX_required"] = "true" + # nxana["polar_angle"].attrs["units"] = "NX_ANGLE" + + # --------------------------- detector --------------------------- + + nxentry["instrument/detector"].create_dataset(name="data", data=spice_logs["detector"], maxshape=None, dtype="int") + nxentry["instrument/detector/data"].attrs["type"] = "NX_INT" + nxentry["instrument/detector/data"].attrs["EX_required"] = "true" + nxentry["instrument/detector/data"].attrs["units"] = "counts" + + # TODO HB1 polarized experiment + + # nxentry["instrument/detector"].create_dataset(name="polar_angle", data=1.0, maxshape=None) + # nxentry["instrument/detector/polar_angle"].attrs["type"] = "NX_FLOAT" + # nxentry["instrument/detector/polar_angle"].attrs["EX_required"] = "true" + # nxentry["instrument/detector/polar_angle"].attrs["units"] = "NX_ANGLE" + + # ---------------------------- flipper --------------------------------- + if "fguide" in spice_logs.keys(): + nxflipper.create_dataset(name="fguide", data=spice_logs["fguide"], maxshape=None) + nxflipper["fguide"].attrs["type"] = "NX_FLOAT" + if "hguide" in spice_logs.keys(): + nxflipper.create_dataset(name="hguide", data=spice_logs["hguide"], maxshape=None) + nxflipper["hguide"].attrs["type"] = "NX_FLOAT" + if "vguide" in spice_logs.keys(): + nxflipper.create_dataset(name="vguide", data=spice_logs["vguide"], maxshape=None) + nxflipper["vguide"].attrs["type"] = "NX_FLOAT" + + # TODO Helmohtz coils guide fields: tbguide, aguide, bguide + # + # --------------------------- slits --------------------------- + + slits_str1 = ("bat", "bab", "bal", "bar", "bbt", "bbb", "bbl", "bbr") + slits_str2 = ("slita_lf", "slita_rt", "slita_tp", "slitb_bt", "slitb_lf", "slitb_rt", "slitb_tp") + slits_str3 = ("slit_pre_bt", "slit_pre_lf", "slit_pre_rt", "slit_pre_tp") + + slits_str = (slits_str1, slits_str2, slits_str3) + + for slit_str in slits_str: + if slit_str[0] in spice_logs.keys(): + for st in slit_str: + nxslit.create_dataset(name=st, data=spice_logs[st]) + nxslit[st].attrs["type"] = "NX_FLOAT" + nxslit[st].attrs["units"] = "cm" + + # --------------------------- sample --------------------------- + + nxentry["sample"].create_dataset(name="name", data=spice_logs.attrs["samplename"], maxshape=None) + nxentry["sample/name"].attrs["type"] = "NX_CHAR" + nxentry["sample/name"].attrs["EX_required"] = "true" + + nxentry["sample"].create_dataset(name="qh", data=spice_logs["h"], maxshape=None) + nxentry["sample/qh"].attrs["type"] = "NX_FLOAT" + nxentry["sample/qh"].attrs["EX_required"] = "true" + # nxentry["sample/qh"].attrs["axis"] = "1" + # nxentry["sample/qh"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="qk", data=spice_logs["k"], maxshape=None) + nxentry["sample/qk"].attrs["type"] = "NX_FLOAT" + nxentry["sample/qk"].attrs["EX_required"] = "true" + # nxentry["sample/qk"].attrs["axis"] = "1" + # nxentry["sample/qk"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="ql", data=spice_logs["l"], maxshape=None) + nxentry["sample/ql"].attrs["type"] = "NX_FLOAT" + nxentry["sample/ql"].attrs["EX_required"] = "true" + # nxentry["sample/ql"].attrs["axis"] = "1" + # nxentry["sample/ql"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="en", data=spice_logs["e"], maxshape=None) + nxentry["sample/en"].attrs["type"] = "NX_FLOAT" + nxentry["sample/en"].attrs["EX_required"] = "true" + # nxentry["sample/en"].attrs["axis"] = "1" + nxentry["sample/en"].attrs["units"] = "meV" + + nxentry["sample"].create_dataset(name="sgu", data=spice_logs["sgu"], maxshape=None) + nxentry["sample/sgu"].attrs["type"] = "NX_FLOAT" + nxentry["sample/sgu"].attrs["EX_required"] = "true" + nxentry["sample/sgu"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="sgl", data=spice_logs["sgl"], maxshape=None) + nxentry["sample/sgl"].attrs["type"] = "NX_FLOAT" + nxentry["sample/sgl"].attrs["EX_required"] = "true" + nxentry["sample/sgl"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="unit_cell", data=spice_logs.attrs["latticeconstants"], maxshape=None) + nxentry["sample/unit_cell"].attrs["type"] = "NX_FLOAT" + nxentry["sample/unit_cell"].attrs["EX_required"] = "true" + # nxentry["sample/unit_cell"].attrs["units"] = "NX_LENGTH" + + nxentry["sample"].create_dataset(name="orientation_matrix", data=spice_logs.attrs["ubmatrix"], maxshape=None) + nxentry["sample/orientation_matrix"].attrs["type"] = "NX_FLOAT" + nxentry["sample/orientation_matrix"].attrs["EX_required"] = "true" + nxentry["sample/orientation_matrix"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="ub_conf", data=spice_logs.attrs["ubconf"].split(".")[0], maxshape=None) + nxentry["sample/ub_conf"].attrs["type"] = "NX_CHAR" + + nxentry["sample"].create_dataset(name="plane_normal", data=spice_logs.attrs["plane_normal"], maxshape=None) + nxentry["sample/plane_normal"].attrs["type"] = "NX_FLOAT" + + nxentry["sample"].create_dataset(name="q", data=spice_logs["q"], maxshape=None) + nxentry["sample/q"].attrs["type"] = "NX_FLOAT" + nxentry["sample/q"].attrs["units"] = "Angstrom^-1" + + nxentry["sample"].create_dataset(name="stu", data=spice_logs["stu"], maxshape=None) + nxentry["sample/stu"].attrs["type"] = "NX_FLOAT" + nxentry["sample/stu"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="stl", data=spice_logs["stl"], maxshape=None) + nxentry["sample/stl"].attrs["type"] = "NX_FLOAT" + nxentry["sample/stl"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="s1", data=spice_logs["s1"], maxshape=None) + nxentry["sample/s1"].attrs["type"] = "NX_FLOAT" + nxentry["sample/s1"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="s2", data=spice_logs["s2"], maxshape=None) + nxentry["sample/s2"].attrs["type"] = "NX_FLOAT" + nxentry["sample/s2"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="type", data=spice_logs.attrs["sampletype"], maxshape=None) + nxentry["sample/type"].attrs["type"] = "NX_CHAR" + + nxentry["sample"].create_dataset(name="sense", data=spice_logs.attrs["sense"][1], maxshape=None) + nxentry["sample"].attrs["type"] = "NX_CHAR" + + # nxentry["sample"].create_dataset(name="rotation_angle", data=1.0, maxshape=None) + # nxentry["sample/rotation_angle"].attrs["type"] = "NX_FLOAT" + # nxentry["sample/rotation_angle"].attrs["EX_required"] = "true" + # nxentry["sample/rotation_angle"].attrs["units"] = "NX_ANGLE" + + # nxentry["sample"].create_dataset(name="polar_angle", data=1.0, maxshape=None) + # nxentry["sample/polar_angle"].attrs["type"] = "NX_FLOAT" + # nxentry["sample/polar_angle"].attrs["EX_required"] = "true" + # nxentry["sample/polar_angle"].attrs["units"] = "NX_ANGLE" + + # --------------------------- monitor --------------------------- + # Valid enumeration values for root['/entry/monitor']['mode'] are: + # monitor + # time + # mcu + + if spice_logs.attrs["preset_type"] == "normal": + + preset_channel = spice_logs.attrs["preset_channel"] + + nxentry["monitor"].create_dataset(name="mode", data=preset_channel, maxshape=None) + nxentry["monitor/mode"].attrs["type"] = "NX_CHAR" + nxentry["monitor/mode"].attrs["EX_required"] = "true" + + nxentry["monitor"].create_dataset(name="preset", data=spice_logs.attrs["preset_value"], maxshape=None) + nxentry["monitor/preset"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/preset"].attrs["EX_required"] = "true" + + nxentry["monitor"].create_dataset(name="time", data=spice_logs["time"], maxshape=None) + nxentry["monitor/time"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/time"].attrs["units"] = "seconds" + + nxentry["monitor"].create_dataset(name="monitor", data=spice_logs["monitor"], maxshape=None) + nxentry["monitor/monitor"].attrs["type"] = "NX_INT" + nxentry["monitor/monitor"].attrs["units"] = "counts" + + nxentry["monitor"].create_dataset(name="mcu", data=spice_logs["mcu"], maxshape=None) + nxentry["monitor/mcu"].attrs["type"] = "NX_FLOAT" + + nxentry["monitor"].create_dataset(name="data", data=spice_logs[preset_channel], maxshape=None) + nxentry["monitor/data"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/data"].attrs["EX_required"] = "true" + # nxentry["monitor/data"].attrs["units"] = "counts" + + # TODO polarized exp at HB1 + elif spice_logs.attrs["preset_type"] == "countfile": + print("Polarization data, not yet supported.") + + else: + print("Unrecogonized preset type. ") + + # --------------------------- data links --------------------------- + + def find_val(val, grp, prefix=""): + + for obj_name, obj in grp.items(): + if obj_name in ("SPICElogs", "data"): + continue + else: + path = f"{prefix}/{obj_name}" + if val == obj_name: + return path + elif isinstance(obj, h5py.Group): # test for group (go down) + gpath = find_val(val, obj, path) + if gpath: + return gpath + + nexus_dict = {"h": "qh", "k": "qk", "l": "ql", "e": "en"} + def_x = spice_logs.attrs["def_x"] + def_y = spice_logs.attrs["def_y"] + if def_x in nexus_dict: + def_x = nexus_dict[def_x] + + path_x = find_val(def_x, nxentry) + path_y = find_val(def_y, nxentry) + if def_y == "detector" or def_y == "monitor": + path_y += "/data" + + # Create the LINKS + nxentry["data/" + def_y] = h5py.SoftLink(nxentry.name + path_y) + nxentry["data/" + def_y + "/"].attrs["target"] = nxentry.name + path_y + # if def_y == "detector" or def_y == "monitor": + # nxentry["data"].attrs["signal"] = "data" + # else: + nxentry["data"].attrs["signal"] = def_y + + # Create the LINKS + nxentry["data/" + def_x] = h5py.SoftLink(nxentry.name + path_x) + nxentry["data/" + def_x + "/"].attrs["target"] = nxentry.name + path_x + + if def_x in nexus_dict: + nxentry["data"].attrs["axes"] = nexus_dict[def_x] + + else: + nxentry["data"].attrs["axes"] = def_x + + # # Create the DOC strings + # nxentry["definition"].attrs["EX_doc"] = "Official NeXus NXDL schema to which this file conforms " + # nxentry["sample/name"].attrs["EX_doc"] = "Descriptive name of sample " + # nxentry["monitor/mode"].attrs[ + # "EX_doc" + # ] = "Count to a preset value based on either clock time (timer) or received monitor counts (monitor). " + # nxentry["monitor/preset"].attrs["EX_doc"] = "preset value for time or monitor " + # nxentry["monitor/data"].attrs["EX_doc"] = "Total integral monitor counts " + # nxentry["data"].attrs[ + # "EX_doc" + # ] = "One of the ei,ef,qh,qk,ql,en should get a primary=1 attribute to denote the main scan axis " + + # Create the ATTRIBUTES + # root["/"].attrs["default"] = "entry" + # root["/entry"].attrs["default"] = "data" + # nxentry["data"].attrs["signal"] = "data" + # nxentry["data/data"].attrs["signal"] = "1" + + # -------------------------------------------------------------------------------------- + + # # /entry/sample + # nxsample = nxentry.create_group("sample") + # nxsample.attrs["name"] = headers["samplename"] + # nxsample.attrs["type"] = headers["sampletype"] + # nxsample.attrs["mosiac"] = headers["samplemosaic"] + + # TODO sample environment -- temperture, magnetic field, pressure + temperatue_str = ( + "coldtip", + "tsample", + "temp_a", + "temp_2", + "vti", + "sample", + "temp", + "dr_tsample", + "dr_temp", + ) + for t in temperatue_str: + if t in spice_logs.keys(): + nxentry["sample"].create_dataset(name=t, data=spice_logs[t], maxshape=None) + nxentry["sample/" + t].attrs["type"] = "NX_FLOAT" + nxentry["sample/" + t].attrs["units"] = "K" + + # TODO field + # TODO pressure + + +# TODO +def instrument_info_to_nexus(nxentry, instrument_params): + """Extra info missing in SPICE, for resolution calculation + + Args: + nxentry: + instrument_params (dict) + + Note: + This function does NOT overwrite exsiting parameters in SPICE, + it only adds extra info to the nexus file. + """ + source = instrument_params["source"] + mono = instrument_params["monochromator"] + monitor = instrument_params["monitor"] + ana = instrument_params["analyser"] + detector = instrument_params["detector"] + collimators = instrument_params["collimators"] + # --------------------------- source --------------------------- + # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + nxentry["instrument/source"].create_dataset(name="shape", data=source["shape"], maxshape=None) + nxentry["instrument/source/shape"].attrs["type"] = "NX_CHAR" + + nxentry["instrument/source"].create_dataset(name="width", data=source["width"], maxshape=None) + nxentry["instrument/source/width"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/source/width"].attrs["units"] = "cm" + + nxentry["instrument/source"].create_dataset(name="height", data=source["height"], maxshape=None) + nxentry["instrument/source/height"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/source/height"].attrs["units"] = "cm" + + # --------------------------- monochromator --------------------------- + nxentry["instrument/monochromator"].create_dataset(name="d_spacing", data=mono["d_spacing"], maxshape=None) + nxentry["instrument/monochromator/d_spacing"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/d_spacing"].attrs["units"] = "Angstrom" + + nxentry["instrument/monochromator"].create_dataset(name="mosaic", data=mono["mosaic"], maxshape=None) + nxentry["instrument/monochromator/mosaic"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/mosaic"].attrs["units"] = "min" + + nxentry["instrument/monochromator"].create_dataset(name="mosaic_v", data=mono["mosaic_v"], maxshape=None) + nxentry["instrument/monochromator/mosaic_v"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/mosaic_v"].attrs["units"] = "min" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/monochromator"].create_dataset(name="width", data=mono["width"], maxshape=None) + nxentry["instrument/monochromator/width"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/width"].attrs["units"] = "cm" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/monochromator"].create_dataset(name="height", data=mono["height"], maxshape=None) + nxentry["instrument/monochromator/height"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/height"].attrs["units"] = "cm" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/monochromator"].create_dataset(name="depth", data=mono["depth"], maxshape=None) + nxentry["instrument/monochromator/depth"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/depth"].attrs["units"] = "cm" + + # horizontal focusing + nxentry["instrument/monochromator"].create_dataset(name="curvh", data=mono["curvh"], maxshape=None) + nxentry["instrument/monochromator/curvh"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/curvh"].attrs["units"] = "degrees" + + # vertical focusing + nxentry["instrument/monochromator"].create_dataset(name="curvv", data=mono["curvv"], maxshape=None) + nxentry["instrument/monochromator/curvv"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/monochromator/curvv"].attrs["units"] = "degrees" + + # --------------------------- monitor --------------------------- + nxentry["monitor"].create_dataset(name="width", data=monitor["width"], maxshape=None) + nxentry["monitor/width"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/width"].attrs["units"] = "cm" + + # divide by np.sqrt(12) if rectangular + nxentry["monitor"].create_dataset(name="height", data=monitor["height"], maxshape=None) + nxentry["monitor/height"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/height"].attrs["units"] = "cm" + + # --------------------------- analyzer --------------------------- + nxentry["instrument/analyser"].create_dataset(name="d_spacing", data=ana["d_spacing"], maxshape=None) + nxentry["instrument/analyser/d_spacing"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/d_spacing"].attrs["units"] = "Angstrom" + + nxentry["instrument/analyser"].create_dataset(name="mosaic", data=ana["mosaic"], maxshape=None) + nxentry["instrument/analyser/mosaic"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/mosaic"].attrs["units"] = "min" + + nxentry["instrument/analyser"].create_dataset(name="mosaic_v", data=ana["mosaic_v"], maxshape=None) + nxentry["instrument/analyser/mosaic_v"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/mosaic_v"].attrs["units"] = "min" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/analyser"].create_dataset(name="width", data=ana["width"], maxshape=None) + nxentry["instrument/analyser/width"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/width"].attrs["units"] = "cm" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/analyser"].create_dataset(name="height", data=ana["height"], maxshape=None) + nxentry["instrument/analyser/height"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/height"].attrs["units"] = "cm" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/analyser"].create_dataset(name="depth", data=ana["depth"], maxshape=None) + nxentry["instrument/analyser/depth"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/depth"].attrs["units"] = "cm" + + # horizontal focusing + nxentry["instrument/analyser"].create_dataset(name="curvh", data=ana["curvh"], maxshape=None) + nxentry["instrument/analyser/curvh"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/curvh"].attrs["units"] = "degrees" + + # vertical focusing + nxentry["instrument/analyser"].create_dataset(name="curvv", data=ana["curvv"], maxshape=None) + nxentry["instrument/analyser/curvv"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/analyser/curvv"].attrs["units"] = "degrees" + + # --------------------------- detector --------------------------- + # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + nxentry["instrument/detector"].create_dataset(name="shape", data=detector["shape"], maxshape=None) + nxentry["instrument/detector/shape"].attrs["type"] = "NX_CHAR" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/detector"].create_dataset(name="width", data=detector["width"], maxshape=None) + nxentry["instrument/detector/width"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/detector/width"].attrs["units"] = "cm" + + # divide by np.sqrt(12) if rectangular + nxentry["instrument/detector"].create_dataset(name="height", data=detector["height"], maxshape=None) + nxentry["instrument/detector/height"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/detector/height"].attrs["units"] = "cm" + + # --------------------------- collimators --------------------------- + nxentry["instrument/collimator"].create_dataset(name="divergence_y", data=[150, 270, 300, 600], maxshape=None) + nxentry["instrument/collimator/divergence_y"].attrs["type"] = "NX_ANGLE" + nxentry["instrument/collimator/divergence_y"].attrs["units"] = "min" + + # --------------------------- distances --------------------------- + nxentry["instrument"].create_dataset(name="dist_src_mono", data=650.0, maxshape=None) + nxentry["instrument/dist_src_mono"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/dist_src_mono"].attrs["units"] = "cm" + + nxentry["instrument"].create_dataset(name="dist_mono_sample", data=190.0, maxshape=None) + nxentry["instrument/dist_mono_sample"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/dist_mono_sample"].attrs["units"] = "cm" + + nxentry["instrument"].create_dataset(name="dist_sample_ana", data=160.0, maxshape=None) + nxentry["instrument/dist_sample_ana"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/dist_sample_ana"].attrs["units"] = "cm" + + nxentry["instrument"].create_dataset(name="dist_ana_det", data=60.0, maxshape=None) + nxentry["instrument/dist_ana_det"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/dist_ana_det"].attrs["units"] = "cm" + + nxentry["instrument"].create_dataset(name="dist_mono_monitor", data=86.0, maxshape=None) + nxentry["instrument/dist_mono_monitor"].attrs["type"] = "NX_FLOAT" + nxentry["instrument/dist_mono_monitor"].attrs["units"] = "cm" + + # -----------------------------------sample------------------------------------- + nxentry["sample"].create_dataset(name="shape", data="cylindrical", maxshape=None) + nxentry["sample/shape"].attrs["type"] = "NX_CHAR" + + nxentry["sample"].create_dataset(name="width", data=1.0, maxshape=None) + nxentry["sample/width"].attrs["type"] = "NX_FLOAT" + nxentry["sample/width"].attrs["units"] = "cm" + + nxentry["sample"].create_dataset(name="height", data=1.0, maxshape=None) + nxentry["sample/height"].attrs["type"] = "NX_FLOAT" + nxentry["sample/height"].attrs["units"] = "cm" + + nxentry["sample"].create_dataset(name="depth", data=1.0, maxshape=None) + nxentry["sample/depth"].attrs["type"] = "NX_FLOAT" + nxentry["sample/depth"].attrs["units"] = "cm" + + nxentry["sample"].create_dataset(name="mosaic", data=1.0, maxshape=None) + nxentry["sample/mosaic"].attrs["type"] = "NX_FLOAT" + nxentry["sample/mosaic"].attrs["units"] = "min" + + nxentry["sample"].create_dataset(name="mosaic_v", data=1.0, maxshape=None) + nxentry["sample/mosaic_v"].attrs["type"] = "NX_FLOAT" + nxentry["sample/mosaic_v"].attrs["units"] = "min" + + +def convert_spice_to_nexus(path_to_spice_folder, path_to_hdf5): + """Load data from spice folder. Convert to a nexus file + + Args: + path_to_spice_folder (str): spice folder, ends with '/' + path_to_nexus (str): path to hdf5 data file, ends with '.h5' + instrument_config: python file contains instrument configuration parameters + """ + + print(f"Converting {path_to_spice_folder} to {path_to_hdf5}") + + p = Path(path_to_spice_folder) + + with h5py.File(path_to_hdf5, "w") as root: + + # ----------------------------- ub info ------------------------------------ + ub_files = sorted((p / "UBConf").glob("*.ini")) + tmp_ub_files = sorted((p / "UBConf/tmp").glob("*.ini")) + ub_entries = root.create_group("UBConf") + ub_entries.attrs["NX_class"] = "NXcollection" + ub_entries.attrs["X_required"] = "false" + + for ub_file in ub_files + tmp_ub_files: + ub_entry_name = ub_file.parts[-1].split(".")[0] + ub_entry = ub_entries.create_group(ub_entry_name) + ub_conf = read_spice_ub(ub_file) + for ub_item in ub_conf.items(): + k, v = ub_item + ub_entry.create_dataset(name=k, data=v, maxshape=None) + + # --------------------------- scan info ------------------------------------ + scans = sorted((p / "Datafiles").glob("*.dat")) + instrument_str = scans[0].parts[-1].split("_")[0] + + # read in exp_info from the first scan and save as attibutes of the file + _, _, headers, _ = read_spice(scans[0]) + ipts = headers["proposal"] + exp_num = headers["experiment_number"] + + # root.attrs["name"] = headers["experiment"] + # root.attrs["users"] = headers["users"] + # root.attrs["local_contact"] = headers["local_contact"] + + # convert SPICE scans into nexus entries + for scan in scans: # ignoring unused keys + spice_data, col_headers, headers, unused = read_spice(scan) + + # pre-processing + scan_num = ((scan.parts[-1].split("_"))[-1]).split(".")[0] # e.g. "scan0001" + if "scan_title" in unused: + headers["scan_title"] = "" + + # /entry/SPICElogs0 + nxentry = root.create_group(scan_num) + spice_logs = nxentry.create_group("SPICElogs") + spice_logs.attrs["NX_class"] = "NXcollection" + spice_logs.attrs["X_required"] = "false" + spice_logs.attrs["instrument"] = instrument_str + + # metadata to attibutes + exp_str = ["scan_title", "users", "local_contact", "experiment"] + for k, v in headers.items(): + + if "," in v and k not in exp_str: # vectors + spice_logs.attrs[k] = np.array([float(v0) for v0 in v.split(",")]) + elif v.replace(".", "").isnumeric(): # numebrs only + if v.isdigit(): # int + spice_logs.attrs[k] = int(v) + else: # float + spice_logs.attrs[k] = float(v) + # separate COM/FWHM and its errorbar + elif k == "Center of Mass": + com, e_com = v.split("+/-") + spice_logs.attrs["COM"] = float(com) + spice_logs.attrs["COM_err"] = float(e_com) + elif k == "Full Width Half-Maximum": + fwhm, e_fwhm = v.split("+/-") + spice_logs.attrs["FWHM"] = float(fwhm) + spice_logs.attrs["FWHM_err"] = float(e_fwhm) + else: # other crap, keep as is + spice_logs.attrs[k] = v + + # motor position table to datasets + if spice_data.ndim == 1: # empty data or 1 point only + if len(spice_data): # 1 point only + for idx, col_header in enumerate(col_headers): + spice_logs.create_dataset(col_header, data=spice_data[idx]) + else: # empty + pass + else: # nomarl data + for idx, col_header in enumerate(col_headers): + spice_logs.create_dataset(col_header, data=spice_data[:, idx]) + + spicelogs_to_nexus(nxentry) + # instrument_info_to_nexus(nxentry, instrument_config) + + # Create the ATTRIBUTES + root.attrs["file_name"] = os.path.abspath(f"IPTS{ipts}_{instrument_str}_exp{exp_num}") + root.attrs["file_time"] = datetime.now().isoformat() + root.attrs["h5py_version"] = h5py.version.version + root.attrs["HDF5_Version"] = h5py.version.hdf5_version + + +if __name__ == "__main__": + spice_folder = "./tests/test_data_folder/exp424/" # CG4C + # spice_folder = "./tests/test_data_folder/exp758/" # NB3 + # spice_folder = "./tests/test_data_folder/exp932/" # HB1 polarized + # h5_file_name = "./tests/test_data_folder/tavi_exp758.h5" + nexus_file_name = "./tests/test_data_folder/nexus_exp424.h5" + + convert_spice_to_nexus(spice_folder, nexus_file_name) + # TODO update instrument parameters and sample parameters of a single scan + # instrument_params) diff --git a/src/tavi/data/tavi_data.py b/src/tavi/data/tavi_data.py new file mode 100644 index 00000000..a56012cb --- /dev/null +++ b/src/tavi/data/tavi_data.py @@ -0,0 +1,303 @@ +import h5py + +from tavi.tavi_data.scan import Scan +from tavi.tavi_data.scan_group import ScanGroup +from tavi.tavi_data.spice_to_nexus import convert_spice_to_nexus + + +class TAVI_Data(object): + """TAVI data file manager. + + TAVI_data contains four possible categories, including + - data, a list of 1D scans, raws data. + - process_data, including combined scan, 2D maps or dispersion plot + - fit, contains fitting info including model, parameters and reduced chi_squared + - plot, contains one or more scans and/or fits + + Attributes: + hdf5_path: save path to hdf5 + self.scans: list of Scan instances + + """ + + def __init__(self): + """Initialization""" + self.file_path = None + self.data = {} + self.processed_data = {} + self.fits = {} + self.plots = {} + + def new_tavi_file(self, file_path): + """Create a new tavi file""" + self.file_path = file_path + with h5py.File(file_path, "w") as root: + root.create_group("data") + root.create_group("processed_data") + root.create_group("fits") + root.create_group("plots") + + def load_nexus_data_from_disk(self, path_to_hdf5): + """Load hdf5 data from path_to_hdf5. + + Args: + path_to_hdf5 (str): path to hdf5 data file + OVERWRITE (bool): overwrite exsiting data if Ture, oterwise append new scans in data. + Do not change processed_data, fit or plot + """ + # TODO check if file exsits + + with h5py.File(self.file_path, "a") as tavi_file, h5py.File(path_to_hdf5, "r") as data_file: + # IPTS1234_HB3_exp567 + data_id = data_file.attrs["file_name"].split("/")[-1] + grp = tavi_file["data"].create_group(data_id) + + scans = {} + for entry in data_file: + data_file.copy(source=data_file[entry], dest=grp, expand_soft=True) + + if entry[0:4] == "scan": + s = Scan(data_file[entry]) + scans.update({entry: s}) + + self.data.update({data_id: scans}) + + def load_spice_data_from_disk(self, path_to_spice_folder, OVERWRITE=True): + """Load hdf5 data from path_to_hdf5. + + Args: + path_to_spice_folder (str): path to spice folder + OVERWRITE (bool): overwrite exsiting data if Ture, oterwise append new scans in data. + Do not change processed_data, fit or plot + """ + + # exp_info = [ + # "experiment", + # "experiment_number", + # "proposal", + # "users", + # "local_contact", + # ] + + # p = Path(path_to_spice_folder) + # scans = sorted((p / "Datafiles").glob("*")) + # instrument, exp = scans[0].parts[-1].split("_")[0:2] + + # # read in exp_info from the first scan and save as attibutes of the file + # _, _, headers, _ = read_spice(scans[0]) + # ipts = headers["proposal"] + # data_id = f"IPTS{ipts}_{instrument}_{exp}" # e.g. "IPTS1234_HB3_exp567" + # data_entries = [] + + # for scan in scans: # ignoring unused keys + # spice_data, col_headers, headers, unused = read_spice(scan) + # scan_id = ((scan.parts[-1].split("_"))[-1]).split(".")[0] # e.g. "scan0001" + + # meta_data = {"scan_id": scan_id} + + # for k, v in headers.items(): + # if k not in exp_info: # ignore common keys in single scans + # if "," in v and k != "scan_title": # vectors + # meta_data.update({k: np.array([float(v0) for v0 in v.split(",")])}) + # elif v.replace(".", "").isnumeric(): # numebrs only + # if v.isdigit(): # int + # meta_data.update({k: int(v)}) + # else: # float + # meta_data.update({k: float(v)}) + # # separate COM/FWHM and its errorbar + # elif k == "Center of Mass": + # com, e_com = v.split("+/-") + # meta_data.update({"COM": float(com)}) + # meta_data.update({"COM_err": float(e_com)}) + # elif k == "Full Width Half-Maximum": + # fwhm, e_fwhm = v.split("+/-") + # meta_data.update({"FWHM": float(fwhm)}) + # meta_data.update({"FWHM_err": float(e_fwhm)}) + # else: # other crap, keep as is + # if k not in exp_info: + # meta_data.update({k: v}) + + # data = {} + + # if spice_data.ndim == 1: # empty data or 1 point only + # if len(spice_data): # 1 point only + # for idx, col_header in enumerate(col_headers): + # data.update({col_header: spice_data[idx]}) + # else: # empty + # pass + # else: # nomarl data + # for idx, col_header in enumerate(col_headers): + # data.update({col_header: spice_data[:, idx]}) + + # s = Scan() + # s.set_metadata(meta_data) + # s.set_data(data) + + # data_entries.append(s) + + # self.data.update({data_id: data_entries}) + + def load_data_from_oncat(self, user_credentials, ipts_info, OVERWRITE=True): + """Load data from ONCat based on user_credentials and ipts_info. + + Args: + user_credentials (str): username and password. + scipts_infoans (str): ipts number and exp number. + OVERWRITE (bool): overwrite exsiting data if Ture, oterwise append. + Do not change processed_data, fit or plot + """ + pass + + def open_tavi_file(self, file_path): + """Open existing tavi file""" + self.file_path = file_path + with h5py.File(file_path, "a") as tavi_file: + + # load datasets in data folder + for data_id in tavi_file["data"].keys(): + dataset = tavi_file["data"][data_id] + scans = {} + for entry in dataset: + if entry[0:4] == "scan": + s = Scan(dataset[entry]) + scans.update({entry: s}) + self.data.update({data_id: scans}) + + # TODO + # load processed_data fits, and plots + + def save_to_file(self, path_to_hdf5): + """Save current data to a hdf5 on disk at path_to_hdf5 + + Args: + path_to_hdf5 (str): path to hdf5 data file, ends with '.h5' + """ + pass + + def delete_data_entry(self, entry_id): + """Delete a date entry from file based on entry_id. + + If an data entry if deleted, also delete the fits, scan_groups, plots + that contains the conresponding entry. + + Args: + entry_id (str): ID of entry to be deteled. + """ + pass + + def select_entries(self, conditions): + """Return a list of data entry IDs which satisfis the conditions. + + All data points in a scan have to satisfy all conditions. + + Args: + conditions (dict): scan[key] equals value + or in the range of (vaule[0], value[1]) + for example: {"IPTS":1234, "temperature":(2,5)} + + Returns: + tuple: entry IDs + + """ + pass + + # """Load data from spice folder. + + # Args: + # path_to_spice_folder (str): spice folder, ends with '/' + # path_to_hdf5 (str): path to hdf5 data file, ends with '.h5' + # """ + # exp_info = [ + # "experiment", + # "experiment_number", + # "proposal", + # "users", + # "local_contact", + # ] + + # p = Path(path_to_spice_folder) + + # with h5py.File(path_to_hdf5, "w") as f: + + # scans = sorted((p / "Datafiles").glob("*")) + # instrument_str, exp_str = scans[0].parts[-1].split("_")[0:2] + + # # read in exp_info from the first scan and save as attibutes of the file + # _, _, headers, _ = read_spice(scans[0]) + # ipts = headers["proposal"] + # exp_id = "IPTS" + ipts + "_" + instrument_str + + # grp_data = f.create_group("data_" + exp_id) + # grp_processed_data = f.create_group("processed_data") + # grp_fit = f.create_group("fits") + # grp_plot = f.create_group("plots") + + # for k, v in headers.items(): + # if k in exp_info: + # grp_data.attrs[k] = v + + # # read scans into dataset1 + # for scan in scans: # ignoring unused keys + # spice_data, col_headers, headers, unused = read_spice(scan) + + # scan_num = ((scan.parts[-1].split("_"))[-1]).split(".")[0] + # scan_id = exp_str + "_" + scan_num + # scan_entry = grp_data.create_group(scan_id) + # scan_entry.attrs["scan_id"] = scan_id + + # for k, v in headers.items(): + # if k not in exp_info: # ignore common keys in single scans + # if "," in v and k != "scan_title": # vectors + # scan_entry.attrs[k] = np.array([float(v0) for v0 in v.split(",")]) + # elif v.replace(".", "").isnumeric(): # numebrs only + # if v.isdigit(): # int + # scan_entry.attrs[k] = int(v) + # else: # float + # scan_entry.attrs[k] = float(v) + # # separate COM/FWHM and its errorbar + # elif k == "Center of Mass": + # com, e_com = v.split("+/-") + # scan_entry.attrs["COM"] = float(com) + # scan_entry.attrs["COM_err"] = float(e_com) + # elif k == "Full Width Half-Maximum": + # fwhm, e_fwhm = v.split("+/-") + # scan_entry.attrs["FWHM"] = float(fwhm) + # scan_entry.attrs["FWHM_err"] = float(e_fwhm) + # else: # other crap, keep as is + # if k not in exp_info: + # scan_entry.attrs[k] = v + + # if spice_data.ndim == 1: # empty data or 1 point only + # if len(spice_data): # 1 point only + # for idx, col_header in enumerate(col_headers): + # scan_entry.create_dataset(col_header, data=spice_data[idx]) + # else: # empty + # pass + # else: # nomarl data + # for idx, col_header in enumerate(col_headers): + # scan_entry.create_dataset(col_header, data=spice_data[:, idx]) + + def get_selected(self): + return "scan0001" + + def generate_scan_group( + self, + signals=None, + backgrounds=None, + signal_axes=(None, None, None), + background_axes=(None, None, None), + ): + """Generate a scan group.""" + sg = ScanGroup(signals, backgrounds, signal_axes, background_axes) + + return sg + + +if __name__ == "__main__": + spice_folder = "./tests/test_data_folder/exp416/" + # h5_file_name = "./tests/test_data_folder/tavi_exp758.h5" + nexus_file_name = "./tests/test_data_folder/nexus_exp424.h5" + convert_spice_to_nexus(spice_folder, nexus_file_name) + + # data = TAVI_Data() + # data.load_data_from_disk(h5_file_name) diff --git a/src/tavi/instrument/.DS_Store b/src/tavi/instrument/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..e6a5f205e0156ef89b1cf6acba0fd8bcff1b84d9 GIT binary patch literal 6148 zcmeHK!D`z;5S?|LMouWW&_a$2y&CE?IHdF<-1G;kkRICLN|9(qm0e@W@xd5$PJXCg z((mb;-Hl0cdTdF`49vXo?2M#+1G^p~Qsa4kK-4Fq2+G*$!Q3Of&blF;5ZMAM_lz0M z>5>XE)NEu+;2|==y?aI_I##1+cl$S?DW!;{=xCkS7gc6$g*MYJf(6noRcLpEVoN4l z)YGi88P_j`!UUZ&(vx}rjn|VD>XP@}jDIS>J`e}An7K=&$ z`8#Wxv^2%c6_j~_l+PbaliS7E&P`ss+QctJy{I?o?=P2!N27s09F0~3y?iq~80gXK zeAS2&%fe5usrp!wm0zZ-P=_%GnF-8v3^^-E)qu=5C(*S$H+i9 z%%Z)=`0?`1!hkUFR~g{*K|>kifUQS&bilX~0N6#?34ER{cFA|GhJJyva9-{5l#mMEWcpvHnam@}e4%m7`1R{R~ M91UWGfq%-tFSWf`;{X5v literal 0 HcmV?d00001 diff --git a/src/tavi/instrument/instrument_params/cg4c.json b/src/tavi/instrument/instrument_params/cg4c.json new file mode 100644 index 00000000..dc26bc51 --- /dev/null +++ b/src/tavi/instrument/instrument_params/cg4c.json @@ -0,0 +1,71 @@ +{ + "source": { + "shape": "rectangular", + "width": 7.0, + "height": 15.0 + }, + "guide": { + "in_use": false, + "div_h": 0.0, + "div_v": 0.0 + }, + "monochromator": { + "type": "PG002", + "mosaic": 30, + "mosaic_v": 30, + "sense": -1, + "shape": "rectangular", + "width": 7, + "height": 15, + "depth": 0.2, + "curved_h": false, + "curvh": 0.0, + "optimally_curved_h": false, + "curved_v": true, + "curvv": 60.4, + "optimally_curved_v": false + }, + "goniometer": { + "sense": 1, + "type": "Y-ZX" + }, + "monitor": {}, + "analyzer": { + "type": "Pg002", + "d_spacing": 3.35416, + "mosaic": 90, + "mosaic_v": 90, + "sense": -1, + "shape": "rectangular", + "width": 20.0, + "height": 18.0, + "depth": 0.2, + "curved_h": false, + "curvh": 0.0, + "optimally_curved_h": false, + "curved_v": true, + "curvv": 163.2, + "optimally_curved_v": false + }, + "detector": { + "shape": "rectangular", + "width": 5, + "height": 10.0 + }, + "distances": { + "src_mono": 530.0, + "mono_sample": 160.0, + "sample_ana": 106.0, + "ana_det": 50.0 + }, + "collimators": { + "h_pre_mono": 40, + "h_pre_sample": 100, + "h_post_sample": 80, + "h_post_ana": 120, + "v_pre_mono": 600, + "v_pre_sample": 600, + "v_post_sample": 600, + "v_post_ana": 600 + } +} \ No newline at end of file diff --git a/src/tavi/instrument/instrument_params/hb3.json b/src/tavi/instrument/instrument_params/hb3.json new file mode 100644 index 00000000..02de5cda --- /dev/null +++ b/src/tavi/instrument/instrument_params/hb3.json @@ -0,0 +1,75 @@ +{ + "source": { + "shape": "rectangular", + "width": 15.0, + "height": 15.0 + }, + "guide": { + "in_use": false, + "div_h": 0.0, + "div_v": 0.0 + }, + "monochromator": { + "type": "PG002", + "mosaic": 30, + "mosaic_v": 30, + "sense": -1, + "shape": "rectangular", + "width": 7.62, + "height": 10.16, + "depth": 0.25, + "curved_h": false, + "curvh": 0.0, + "optimally_curved_h": false, + "curved_v": false, + "curvv": 0.4, + "optimally_curved_v": false + }, + "monitor": { + "width": 5, + "height": 12 + }, + "goniometer": { + "sense": 1, + "type": "Y-ZX" + }, + "analyzer": { + "type": "Pg002", + "d_spacing": 3.35416, + "mosaic": 40, + "mosaic_v": 40, + "sense": -1, + "shape": "rectangular", + "width": 7.62, + "height": 7, + "depth": 0.2, + "curved_h": false, + "curvh": 0.0, + "optimally_curved_h": false, + "curved_v": true, + "curvv": 163.2, + "optimally_curved_v": false + }, + "detector": { + "shape": "rectangular", + "width": 4, + "height": 12.0 + }, + "distances": { + "src_mono": 650.0, + "mono_sample": 190.0, + "sample_ana": 106.0, + "ana_det": 60.0, + "mono_monitor": 86.0 + }, + "collimators": { + "h_pre_mono": 40, + "h_pre_sample": 40, + "h_post_sample": 40, + "h_post_ana": 120, + "v_pre_mono": 150, + "v_pre_sample": 270, + "v_post_sample": 300, + "v_post_ana": 600 + } +} \ No newline at end of file diff --git a/src/tavi/instrument/instrument_params/python_dicts/cg4c.py b/src/tavi/instrument/instrument_params/python_dicts/cg4c.py new file mode 100644 index 00000000..fb406d94 --- /dev/null +++ b/src/tavi/instrument/instrument_params/python_dicts/cg4c.py @@ -0,0 +1,105 @@ +from tavi.utilities import * + + +source = { + "shape": "rectangular", # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 7, # / np.sqrt(12), # in cm + "height": 15, # / np.sqrt(12), # in cm +} + + +# guide before monochromator +# guide = { +# "in_use": False, +# "div_h": 0.0, +# "div_v": 0.0, +# } + +monochromator = { + "type": "PG002", + "d_spacing": mono_ana_xtal["PG002"], + "mosaic": 30, # horizontal mosaic + "mosaic_v": 30, # vertical mosaic, if anisotropic + "sense": -1, + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 7.0, # / np.sqrt(12), # in cm + "height": 15.0, # / np.sqrt(12), # in cm + "depth": 0.2, # / np.sqrt(12), # in cm + # horizontal focusing + "curvh": 0.0, # in cm^-1 + # vertical focusing + "curvv": 60.4, # at Ei = 4 meV, in cm^-1 +} + +monitor = { + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + # "width": 5 / np.sqrt(12), + # "height": 12 / np.sqrt(12), +} + +goniometer = { + "sense": +1, + "type": "Y-ZX", # s1-sgl-sgu + # CTAX's angle convention is actually YZ-X, + # but the UB calculation in SPICE is done using the convetion Y-ZX +} + +analyzer = { + "type": "Pg002", + "d_spacing": mono_ana_xtal["Pg002"], + "mosaic": 90, # horizontal mosaic + "mosaic_v": 90, # vertical mosaic, if anisotropic + "sense": -1, + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 20.0 / np.sqrt(12), + "height": 15.0 / np.sqrt(12), + "depth": 0.2 / np.sqrt(12), + # horizontal focusing + "curvh": 0.0, + # vertical focusing + "curvv": 163.2, # in cm^-1 +} +detector = { + "shape": "rectangular", # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 5 / np.sqrt(12), + "height": 10 / np.sqrt(12), +} + +distances = { + "src_mono": 530.0, # in cm + "mono_sample": 160.0, + "sample_ana": 106.0, + "ana_det": 50.0, + # "mono_monitor": 86.0, +} + +collimators = { # in units of mins of arc + "h_pre_mono": 40, + "h_pre_sample": 100, + "h_post_sample": 80, + "h_post_ana": 120, + "v_pre_mono": 600, + "v_pre_sample": 600, + "v_post_sample": 600, + "v_post_ana": 600, +} + + +cg4c_config_params = { + "source": source, + # "guide": guide, + "monochromator": monochromator, + "goniometer": goniometer, + "monitor": monitor, + "analyzer": analyzer, + "detector": detector, + "distances": distances, + "collimators": collimators, +} diff --git a/src/tavi/instrument/instrument_params/python_dicts/hb3.py b/src/tavi/instrument/instrument_params/python_dicts/hb3.py new file mode 100644 index 00000000..6dd1f5d8 --- /dev/null +++ b/src/tavi/instrument/instrument_params/python_dicts/hb3.py @@ -0,0 +1,104 @@ +import numpy as np +from tavi.utilities import * + + +source = { + "shape": "rectangular", # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 15, # / np.sqrt(12), # in cm + "height": 15, # / np.sqrt(12), # in cm +} + + +# guide before monochromator +guide = { + "in_use": False, + "div_h": 0.0, + "div_v": 0.0, +} + +monochromator = { + "type": "PG002", + "d_spacing": mono_ana_xtal["PG002"], + "mosaic": 30, # horizontal mosaic + "mosaic_v": 30, # vertical mosaic, if anisotropic + "sense": -1, + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 7.62, # / np.sqrt(12), + "height": 10.16, # / np.sqrt(12), + "depth": 0.25, # / np.sqrt(12), + # horizontal focusing + "curvh": 0.0, + # vertical focusing + "curvv": 0.0, +} + +monitor = { + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 5 / np.sqrt(12), + "height": 12 / np.sqrt(12), +} + +goniometer = { + "sense": +1, + "type": "Y-ZX", # s1-sgl-sgu +} + +analyzer = { + "type": "Pg002", + "d_spacing": mono_ana_xtal["Pg002"], + "mosaic": 40, # horizontal mosaic + "mosaic_v": 40, # vertical mosaic, if anisotropic + "sense": -1, + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 7.62, # / np.sqrt(12), + "height": 7, # / np.sqrt(12), + "depth": 0.2, # / np.sqrt(12), + # horizontal focusing + "curvh": 0.0, + # vertical focusing + "curvv": 0.0, +} +detector = { + "shape": "rectangular", # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 4, # / np.sqrt(12), + "height": 12, # / np.sqrt(12), +} + +distances = { + "src_mono": 650.0, # in cm + "mono_sample": 190.0, + "sample_ana": 160.0, + "ana_det": 60.0, + "mono_monitor": 86.0, +} + +collimators = { # in units of mins of arc + "h_pre_mono": 48, + "h_pre_sample": 40, + "h_post_sample": 40, + "h_post_ana": 120, + "v_pre_mono": 150, + "v_pre_sample": 270, + "v_post_sample": 300, + "v_post_ana": 600, +} + + +config_params = { + "source": source, + "guide": guide, + "monochromator": monochromator, + "goniometer": goniometer, + "monitor": monitor, + "analyzer": analyzer, + "detector": detector, + "distances": distances, + "collimators": collimators, +} diff --git a/src/tavi/instrument/instrument_params/python_dicts/takin_test.py b/src/tavi/instrument/instrument_params/python_dicts/takin_test.py new file mode 100644 index 00000000..f83d1dd5 --- /dev/null +++ b/src/tavi/instrument/instrument_params/python_dicts/takin_test.py @@ -0,0 +1,113 @@ +import numpy as np +from tavi.utilities import * + + +source = { + "shape": "rectangular", # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 6.0, # in cm + "height": 12.0, # in cm +} + + +# guide before monochromator +guide = { + "in_use": False, + "div_h": 15.0, # min of arc + "div_v": 15.0, # min of arc +} + +monochromator = { + "type": "PG002", + # "d_spacing": mono_ana_xtal["PG002"], + "mosaic": 45, # horizontal mosaic + "mosaic_v": 45, # vertical mosaic, if anisotropic + "sense": +1, + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 12.0, + "height": 8.0, + "depth": 0.15, + # horizontal focusing + "curved_h": False, + "curvh": 0.0, + "optimally_curved_h": False, + # vertical focusing + "curved_v": False, + "curvv": 0.0, + "optimally_curved_v": False, +} + +monitor = { + "shape": "rectangular", + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 5, # / np.sqrt(12), + "height": 12, # / np.sqrt(12), +} + +goniometer = { + "sense": -1, + "type": "Y-ZX", # s1-sgl-sgu +} + +analyzer = { + "type": "Pg002", + "d_spacing": mono_ana_xtal["Pg002"], + "mosaic": 45, # horizontal mosaic + "mosaic_v": 45, # vertical mosaic, if anisotropic + "sense": +1, + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 12.0, + "height": 8.0, + "depth": 0.3, + # horizontal focusing + "curved_h": False, + "curvh": 0.0, + "optimally_curved_h": False, + # vertical focusing + "curved_v": False, + "curvv": 0.0, + "optimally_curved_v": False, +} +detector = { + "shape": "rectangular", # rectangular or circular + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + "width": 1.5, # cm + "height": 5.0, # cm +} + +distances = { # in units of cm + "src_mono": 10.0, + "mono_sample": 200.0, + "sample_ana": 115.0, + "ana_det": 85.0, + # "mono_monitor": 86.0 , +} + +collimators = { # in units of mins of arc + "h_pre_mono": 30, + "h_pre_sample": 30, + "h_post_sample": 30, + "h_post_ana": 30, + "v_pre_mono": 30, + "v_pre_sample": 30, + "v_post_sample": 30, + "v_post_ana": 30, +} + + +instrument_params = { + "source": source, + "guide": guide, + "monochromator": monochromator, + "monitor": monitor, + "goniometer": goniometer, + "analyzer": analyzer, + "detector": detector, + "distances": distances, + "collimators": collimators, +} diff --git a/src/tavi/instrument/instrument_params/takin_test.json b/src/tavi/instrument/instrument_params/takin_test.json new file mode 100644 index 00000000..95638b4f --- /dev/null +++ b/src/tavi/instrument/instrument_params/takin_test.json @@ -0,0 +1,75 @@ +{ + "source": { + "shape": "rectangular", + "width": 6.0, + "height": 12.0 + }, + "guide": { + "in_use": false, + "div_h": 15.0, + "div_v": 15.0 + }, + "monochromator": { + "type": "PG002", + "mosaic": 45, + "mosaic_v": 45, + "sense": 1, + "shape": "rectangular", + "width": 12.0, + "height": 8.0, + "depth": 0.15, + "curved_h": false, + "curvh": 0.0, + "optimally_curved_h": false, + "curved_v": false, + "curvv": 0.0, + "optimally_curved_v": false + }, + "monitor": { + "shape": "rectangular", + "width": 5, + "height": 12 + }, + "goniometer": { + "sense": -1, + "type": "Y-ZX" + }, + "analyzer": { + "type": "Pg002", + "d_spacing": 3.35416, + "mosaic": 45, + "mosaic_v": 45, + "sense": 1, + "shape": "rectangular", + "width": 12.0, + "height": 8.0, + "depth": 0.3, + "curved_h": false, + "curvh": 0.0, + "optimally_curved_h": false, + "curved_v": false, + "curvv": 0.0, + "optimally_curved_v": false + }, + "detector": { + "shape": "rectangular", + "width": 1.5, + "height": 5.0 + }, + "distances": { + "src_mono": 10.0, + "mono_sample": 200.0, + "sample_ana": 115.0, + "ana_det": 85.0 + }, + "collimators": { + "h_pre_mono": 30, + "h_pre_sample": 30, + "h_post_sample": 30, + "h_post_ana": 30, + "v_pre_mono": 30, + "v_pre_sample": 30, + "v_post_sample": 30, + "v_post_ana": 30 + } +} \ No newline at end of file diff --git a/src/tavi/instrument/resolution/backups/cooper_nathans_bak.py b/src/tavi/instrument/resolution/backups/cooper_nathans_bak.py new file mode 100755 index 00000000..53b4c756 --- /dev/null +++ b/src/tavi/instrument/resolution/backups/cooper_nathans_bak.py @@ -0,0 +1,196 @@ +import numpy as np +import numpy.linalg as la +from tavi.utilities import * +from tavi.instrument.tas import TAS +from tavi.instrument.resolution.reso_ellipses import ResoEllipsoid + + +class CN(TAS): + """Copper-Nathans method + + Attibutes: + _mat_f + _mat_g + + + Methods: + cooper_nathans: resolution in Q-local frame + cooper_nathans_hkle: resolution in (H,K,L,E) frame + + """ + + # 4 soller slits collimators + NUM_COLLS = 4 + IDX_COLL0_H = 0 + IDX_COLL0_V = 2 + IDX_COLL1_H = 1 + IDX_COLL1_V = 3 + IDX_COLL2_H = 4 + IDX_COLL2_V = 6 + IDX_COLL3_H = 5 + IDX_COLL3_V = 7 + # 1 monochromator and 1 analyzer + NUM_MONOS = 1 + NUM_ANAS = 1 + IDX_MONO0_H = 0 + IDX_MONO0_V = 1 + IDX_ANA0_H = 2 + IDX_ANA0_V = 3 + + def __init__(self): + super().__init__() + + # constants independent of q and eng + self._mat_f = None + self._mat_g = None + + def cooper_nathans_hkle(self, ei, ef, hkl, R0=False): + """Calculate Cooper-Nathans resolution fucntion in hkle frame""" + + angles = self.find_angles(hkl, ei, ef) + r_mat = self.goniometer.r_mat(angles[1:]) + ub_mat = self.sample.ub_matrix + conv_mat = 2 * np.pi * r_mat @ ub_mat + q_lab = conv_mat @ hkl + q = np.linalg.norm(q_lab) + rez_hkle = self.cooper_nathans(ei, ef, q, R0) + + ki = np.sqrt(ei / ksq2eng) + kf = np.sqrt(ef / ksq2eng) + + # opposite sign of s2 + phi = get_angle(ki, q, kf) * np.sign(angles[0]) * (-1) + # phi = np.deg2rad(0) + + conv_mat_4d = np.eye(4) + # conv_mat_4d[0:3, 0:3] = rot_y(phi * rad2deg) @ rot_x(90) @ conv_mat + conv_mat_4d[0:3, 0:3] = ( + np.array( + [ + [np.sin(phi), 0, np.cos(phi)], + [np.cos(phi), 0, -np.sin(phi)], + [0, 1, 0], + ] + ) + @ conv_mat + ) + + rez_hkle.mat = conv_mat_4d.T @ rez_hkle.mat @ conv_mat_4d + self.frame = "HKLE" + return rez_hkle + + def cooper_nathans(self, ei, ef, q, R0=False): + """Calculate resolution using Cooper-Nathans method + + Args: + ei (float): incident energy, in units of meV + ef (float): final energy, in units of meV + q (float | tuple of floats): momentum transfer, in units of inverse angstrom + R0 (bool): calculate normalization factor if True + """ + self.frame = "Qlocal" + if self._mat_f is None: + # matrix F, divergence of monochromator and analyzer, [pop75] Appendix 1 + mat_f = np.zeros(((CN.NUM_MONOS + CN.NUM_ANAS) * 2, (CN.NUM_MONOS + CN.NUM_ANAS) * 2)) + mat_f[CN.IDX_MONO0_H, CN.IDX_MONO0_H] = 1.0 / self.monochromator.mosaic**2 + mat_f[CN.IDX_MONO0_V, CN.IDX_MONO0_V] = 1.0 / self.monochromator.mosaic_v**2 + mat_f[CN.IDX_ANA0_H, CN.IDX_ANA0_H] = 1.0 / self.analyzer.mosaic**2 + mat_f[CN.IDX_ANA0_V, CN.IDX_ANA0_V] = 1.0 / self.analyzer.mosaic_v**2 + self._mat_f = mat_f + + if self._mat_g is None: + # matrix G, divergence of collimators, [pop75] Appendix 1 + mat_g = np.zeros((CN.NUM_COLLS * 2, CN.NUM_COLLS * 2)) + mat_g[CN.IDX_COLL0_H, CN.IDX_COLL0_H] = 1.0 / self.collimators.h_pre_mono**2 + mat_g[CN.IDX_COLL0_V, CN.IDX_COLL0_V] = 1.0 / self.collimators.v_pre_mono**2 + mat_g[CN.IDX_COLL1_H, CN.IDX_COLL1_H] = 1.0 / self.collimators.h_pre_sample**2 + mat_g[CN.IDX_COLL1_V, CN.IDX_COLL1_V] = 1.0 / self.collimators.v_pre_sample**2 + mat_g[CN.IDX_COLL2_H, CN.IDX_COLL2_H] = 1.0 / self.collimators.h_post_sample**2 + mat_g[CN.IDX_COLL2_V, CN.IDX_COLL2_V] = 1.0 / self.collimators.v_post_sample**2 + mat_g[CN.IDX_COLL3_H, CN.IDX_COLL3_H] = 1.0 / self.collimators.h_post_ana**2 + mat_g[CN.IDX_COLL3_V, CN.IDX_COLL3_V] = 1.0 / self.collimators.v_post_ana**2 + + self._mat_g = mat_g + + if isinstance(q, tuple | list): + if len(q) == 3: + h, k, l = q + q = self.sample.hkl2q((h, k, l)) + else: + print("Length of q is not 3.") + elif isinstance(q, float): + pass + else: + print("q is not float or tupe or list.") + + ki = np.sqrt(ei / ksq2eng) + kf = np.sqrt(ef / ksq2eng) + en = ei - ef + + two_theta = get_angle(ki, kf, q) * self.goniometer.sense + # theta_s = two_theta / 2 + + # phi = , always has the oppositie sign of theta_s + phi = get_angle(ki, q, kf) * self.goniometer.sense * (-1) + + theta_m = get_angle_bragg(ki, self.monochromator.d_spacing) * self.monochromator.sense + theta_a = get_angle_bragg(kf, self.analyzer.d_spacing) * self.analyzer.sense + + # TODO + # curved monochromator and analyzer + + # TODO + # reflection efficiency + + # TODO + # matrix A,Y=AU, tranform from collimators angular divergence to ki-kf frame + mat_a = np.zeros((6, 2 * CN.NUM_COLLS)) + mat_a[0, CN.IDX_COLL0_H] = 0.5 * ki / np.tan(theta_m) + mat_a[0, CN.IDX_COLL1_H] = -0.5 * ki / np.tan(theta_m) + mat_a[1, CN.IDX_COLL1_H] = ki + mat_a[2, CN.IDX_COLL1_V] = -ki + + mat_a[3, CN.IDX_COLL2_H] = 0.5 * kf / np.tan(theta_a) + mat_a[3, CN.IDX_COLL3_H] = -0.5 * kf / np.tan(theta_a) + mat_a[4, CN.IDX_COLL2_H] = kf + mat_a[5, CN.IDX_COLL2_V] = kf + + # matrix B, X=BY, transform from ki-kf frame to momentum transfer q-frame + mat_b = np.zeros((4, 6)) + mat_b[0:3, 0:3] = rotation_matrix_2d(phi) + mat_b[0:3, 3:6] = rotation_matrix_2d(phi - two_theta) * (-1) + mat_b[3, 0] = 2 * ksq2eng * ki + mat_b[3, 3] = -2 * ksq2eng * kf + + # matrix C, constrinat between mono/ana mosaic and collimator divergence + mat_c = np.zeros(((CN.NUM_MONOS + CN.NUM_ANAS) * 2, CN.NUM_COLLS * 2)) + mat_c[CN.IDX_MONO0_H, CN.IDX_COLL0_H] = 0.5 + mat_c[CN.IDX_MONO0_H, CN.IDX_COLL1_H] = 0.5 + mat_c[CN.IDX_MONO0_V, CN.IDX_COLL0_V] = 0.5 / np.sin(theta_m) + mat_c[CN.IDX_MONO0_V, CN.IDX_COLL1_V] = -0.5 / np.sin(theta_m) + + mat_c[CN.IDX_ANA0_H, CN.IDX_COLL2_H] = 0.5 + mat_c[CN.IDX_ANA0_H, CN.IDX_COLL3_H] = 0.5 + mat_c[CN.IDX_ANA0_V, CN.IDX_COLL2_V] = 0.5 / np.sin(theta_a) + mat_c[CN.IDX_ANA0_V, CN.IDX_COLL3_V] = -0.5 / np.sin(theta_a) + + mat_h = mat_c.T @ self._mat_f @ mat_c + mat_hg_inv = la.inv(mat_h + self._mat_g) + mat_ba = mat_b @ mat_a + mat_cov = mat_ba @ mat_hg_inv @ mat_ba.T + + # TODO smaple mosaic???? + mat_cov[1, 1] += q**2 * self.sample.mosaic**2 + mat_cov[2, 2] += q**2 * self.sample.mosaic_v**2 + + mat_reso = la.inv(mat_cov) * sig2fwhm**2 + + # TODO normalization factor + if R0: # calculate + r0 = 1 + else: + r0 = 0 + + rez = ResoEllipsoid(mat_reso, r0) + + return rez diff --git a/src/tavi/instrument/resolution/backups/reso_ellipses_bak.py b/src/tavi/instrument/resolution/backups/reso_ellipses_bak.py new file mode 100755 index 00000000..5a4423f8 --- /dev/null +++ b/src/tavi/instrument/resolution/backups/reso_ellipses_bak.py @@ -0,0 +1,374 @@ +import numpy as np +import numpy.linalg as la +import mpl_toolkits.mplot3d as mplot3d +import matplotlib +import matplotlib.pyplot as plt +from tavi.utilities import * + + +np.set_printoptions(floatmode="fixed", precision=4) + + +class ResoEllipsoid(object): + """Manage the resolution ellipoid + + Attributs: + frame (str): "q", "hkl", or "proj" + STATUS (None | bool): True if resolution calculation is successful + mat (float): 4 by 4 resolution matrix + r0 (float | None): Normalization factor + + Methods: + + """ + + g_eps = 1e-8 + + def __init__(self): + + self.q = None + self.en = None + self.frame = None + self.projection = None + + self.STATUS = None + self.mat = None + self.r0 = None + self.coh_fwhms = None + self.incoh_fwhms = None + self.principal_fwhms = None + + # def __init__(self, mat, r0): + + # self.mat = mat + # self.r0 = r0 + # self.coh_fwhms = None + # self.incoh_fwhms = None + # self.principal_fwhms = None + + # if np.isnan(r0) or np.isinf(r0) or np.isnan(mat.any()) or np.isinf(mat.any()): + # self.STATUS = False + # else: + # self.STATUS = True + + def ellipsoid_volume(self): + """volume of the ellipsoid""" + pass + + @staticmethod + def quadric_proj(quadric, idx): + """projects along one axis of the quadric""" + + if np.abs(quadric[idx, idx]) < ResoEllipsoid.g_eps: + return np.delete(np.delete(quadric, idx, axis=0), idx, axis=1) + + # row/column along which to perform the orthogonal projection + vec = 0.5 * (quadric[idx, :] + quadric[:, idx]) # symmetrise if not symmetric + vec /= np.sqrt(quadric[idx, idx]) # normalise to indexed component + proj_op = np.outer(vec, vec) # projection operator + ortho_proj = quadric - proj_op # projected quadric + + # comparing with simple projection + # rank = len(quadric) + # vec /= np.sqrt(np.dot(vec, vec)) + # proj_op = np.outer(vec, vec) + # ortho_proj = np.dot((np.identity(rank) - proj_op), quadric) + + # remove row/column that was projected out + # print("\nProjected row/column %d:\n%s\n->\n%s.\n" % (idx, str(quadric), str(ortho_proj))) + return np.delete(np.delete(ortho_proj, idx, axis=0), idx, axis=1) + + def coh_fwhms_calc(self): + """Coherent FWHMs""" + reso = self.mat + fwhms = [] + + for i in range(len(reso)): + fwhms.append(sig2fwhm / np.sqrt(reso[i, i])) + fwhms = np.array(fwhms) + self.coh_fwhms = fwhms + + def incoh_fwhms_calc(self): + """Incoherent FWHMs""" + + reso = self.mat + proj_q_para = ResoEllipsoid.quadric_proj(reso, 3) + proj_q_para = ResoEllipsoid.quadric_proj(proj_q_para, 2) + proj_q_para = ResoEllipsoid.quadric_proj(proj_q_para, 1) + + proj_q_perp = ResoEllipsoid.quadric_proj(reso, 3) + proj_q_perp = ResoEllipsoid.quadric_proj(proj_q_perp, 2) + proj_q_perp = ResoEllipsoid.quadric_proj(proj_q_perp, 0) + + proj_q_up = ResoEllipsoid.quadric_proj(reso, 3) + proj_q_up = ResoEllipsoid.quadric_proj(proj_q_up, 1) + proj_q_up = ResoEllipsoid.quadric_proj(proj_q_up, 0) + + proj_en = ResoEllipsoid.quadric_proj(reso, 2) + proj_en = ResoEllipsoid.quadric_proj(proj_en, 1) + proj_en = ResoEllipsoid.quadric_proj(proj_en, 0) + + fwhms = np.array( + [ + 1.0 / np.sqrt(np.abs(proj_q_para[0, 0])) * sig2fwhm, + 1.0 / np.sqrt(np.abs(proj_q_perp[0, 0])) * sig2fwhm, + 1.0 / np.sqrt(np.abs(proj_q_up[0, 0])) * sig2fwhm, + 1.0 / np.sqrt(np.abs(proj_en[0, 0])) * sig2fwhm, + ] + ) + self.incoh_fwhms = fwhms + + def principal_fwhms_calc(self): + """FWHMs of principal axes in 4D""" + + evals, _ = la.eig(self.mat) + fwhms = np.array(1.0 / np.sqrt(np.abs(evals)) * sig2fwhm) + self.principal_fwhms = fwhms + + @staticmethod + def descr_ellipse(quadric): + """2D ellipse""" + + evals, evecs = la.eig(quadric) # evecs normalized already + fwhms = 1.0 / np.sqrt(np.abs(evals)) * sig2fwhm + # angles = np.array([np.arctan2(evecs[1][0], evecs[0][0])]) + + return (fwhms, evecs) + + @staticmethod + def _gen_ellipse_cut(mat, axes=(0, 1)): + """generate a 2D ellipsis by removing other axes""" + q_res = mat + for i in (3, 2, 1, 0): + if i not in axes: + q_res = np.delete(np.delete(q_res, i, axis=0), i, axis=1) + + fwhms, vec = ResoEllipsoid.descr_ellipse(q_res) + return (fwhms, vec) + + @staticmethod + def _gen_ellipse_project(mat, axes=(0, 1)): + """generate a 2D ellipsis by projecting out other axes""" + q_res = mat + # Qres_QxE_proj = np.delete(np.delete(self.mat, 2, axis=0), 2, axis=1) + for i in (3, 2, 1, 0): + if i not in axes: + q_res = ResoEllipsoid.quadric_proj(q_res, i) + + fwhms, rot = ResoEllipsoid.descr_ellipse(q_res) + return (fwhms, rot) + + def calc_ellipses(self, verbose=True): + """Calculate FWHMs and rotation angles""" + self.coh_fwhms_calc() + self.incoh_fwhms_calc() + self.principal_fwhms_calc() + + if verbose: + print(f"Coherent-elastic fwhms:{self.coh_fwhms}") + print(f"Incoherent-elastic fwhms: {self.incoh_fwhms}") + print(f"Principal axes fwhms: {self.principal_fwhms}") + + # 2d sliced ellipses + + fwhms_QxE, rot_QxE = ResoEllipsoid._gen_ellipse_cut(self.mat, axes=(0, 3)) + fwhms_QyE, rot_QyE = ResoEllipsoid._gen_ellipse_cut(self.mat, axes=(1, 3)) + fwhms_QzE, rot_QzE = ResoEllipsoid._gen_ellipse_cut(self.mat, axes=(2, 3)) + + fwhms_QxQy, rot_QxQy = ResoEllipsoid._gen_ellipse_cut(self.mat, axes=(0, 1)) + fwhms_QyQz, rot_QyQz = ResoEllipsoid._gen_ellipse_cut(self.mat, axes=(1, 2)) + fwhms_QxQz, rot_QxQz = ResoEllipsoid._gen_ellipse_cut(self.mat, axes=(0, 2)) + + if verbose: + print(f"2d Qx,E slice fwhms and vectors: {fwhms_QxE}, {rot_QxE}") + print(f"2d Qy,E slice fwhms and vectors:{fwhms_QyE},{rot_QyE}") + print(f"2d Qz,E slice fwhms and vectors:{fwhms_QzE},{rot_QzE}") + print(f"2d Qx,Qy slice fwhms and vectors:{fwhms_QxQy},{rot_QxQy}") + print(f"2d Qy,Qz slice fwhms and vectors:{fwhms_QyQz},{rot_QyQz}") + print(f"2d Qx,Qz slice fwhms and vectors:{fwhms_QxQz},{rot_QxQz}") + + # 2d projected ellipses + # Qres_QxE_proj = np.delete(np.delete(self.mat, 2, axis=0), 2, axis=1) + + (fwhms_QxE_proj, rot_QxE_proj) = ResoEllipsoid._gen_ellipse_project(self.mat, axes=(0, 3)) + (fwhms_QyE_proj, rot_QyE_proj) = ResoEllipsoid._gen_ellipse_project(self.mat, axes=(1, 3)) + (fwhms_QzE_proj, rot_QzE_proj) = ResoEllipsoid._gen_ellipse_project(self.mat, axes=(2, 3)) + (fwhms_QxQy_proj, rot_QxQy_proj) = ResoEllipsoid._gen_ellipse_project(self.mat, axes=(0, 1)) + (fwhms_QyQz_proj, rot_QyQz_proj) = ResoEllipsoid._gen_ellipse_project(self.mat, axes=(1, 2)) + (fwhms_QxQz_proj, rot_QxQz_proj) = ResoEllipsoid._gen_ellipse_project(self.mat, axes=(0, 2)) + + if verbose: + print(f"2d Qx,E projection fwhms and vectors: {fwhms_QxE_proj}, {rot_QxE_proj}") + print(f"2d Qy,E projection fwhms and vectors: {fwhms_QyE_proj}, {rot_QyE_proj}") + print(f"2d Qz,E projection fwhms and vectors: {fwhms_QzE_proj}, {rot_QzE_proj}") + print(f"2d Qx,Qy projection fwhms and vectors: {fwhms_QxQy_proj}, {rot_QxQy_proj}") + print(f"2d Qy,Qz projection fwhms and vectors: {fwhms_QyQz_proj}, {rot_QyQz_proj}") + print(f"2d Qx,Qz projection fwhms and vectors: {fwhms_QxQz_proj}, {rot_QxQz_proj}") + + results = { + "fwhms_QxE": fwhms_QxE, + "rot_QxE": rot_QxE, + "fwhms_QyE": fwhms_QyE, + "rot_QyE": rot_QyE, + "fwhms_QzE": fwhms_QzE, + "rot_QzE": rot_QzE, + "fwhms_QxQy": fwhms_QxQy, + "rot_QxQy": rot_QxQy, + "fwhms_QyQz": fwhms_QyQz, + "rot_QyQz": rot_QyQz, + "fwhms_QxQz": fwhms_QxQz, + "rot_QxQz": rot_QxQz, + "fwhms_QxE_proj": fwhms_QxE_proj, + "rot_QxE_proj": rot_QxE_proj, + "fwhms_QyE_proj": fwhms_QyE_proj, + "rot_QyE_proj": rot_QyE_proj, + "fwhms_QzE_proj": fwhms_QzE_proj, + "rot_QzE_proj": rot_QzE_proj, + "fwhms_QxQy_proj": fwhms_QxQy_proj, + "rot_QxQy_proj": rot_QxQy_proj, + "fwhms_QyQz_proj": fwhms_QyQz_proj, + "rot_QyQz_proj": rot_QyQz_proj, + "fwhms_QxQz_proj": fwhms_QxQz_proj, + "rot_QxQz_proj": rot_QxQz_proj, + } + + return results + + def plot_ellipses( + self, + ellis, + verbose=True, + plot_results=True, + file="", + dpi=600, + ellipse_points=128, + use_tex=False, + ): + """Plot 2D and 3D ellipses""" + matplotlib.rc("text", usetex=use_tex) + + def ellfkt(rad, vecs, phi): + return np.dot( + vecs, + np.array( + [ + rad[0] * np.cos(phi), + rad[1] * np.sin(phi), + ], + ), + ) + + phi = np.linspace(0, 2.0 * np.pi, ellipse_points) + + ell_QxE = ellfkt(ellis["fwhms_QxE"] * 0.5, ellis["rot_QxE"], phi) + ell_QyE = ellfkt(ellis["fwhms_QyE"] * 0.5, ellis["rot_QyE"], phi) + ell_QzE = ellfkt(ellis["fwhms_QzE"] * 0.5, ellis["rot_QzE"], phi) + ell_QxQy = ellfkt(ellis["fwhms_QxQy"] * 0.5, ellis["rot_QxQy"], phi) + ell_QyQz = ellfkt(ellis["fwhms_QyQz"] * 0.5, ellis["rot_QyQz"], phi) + ell_QxQz = ellfkt(ellis["fwhms_QxQz"] * 0.5, ellis["rot_QxQz"], phi) + + ell_QxE_proj = ellfkt(ellis["fwhms_QxE_proj"] * 0.5, ellis["rot_QxE_proj"], phi) + ell_QyE_proj = ellfkt(ellis["fwhms_QyE_proj"] * 0.5, ellis["rot_QyE_proj"], phi) + ell_QzE_proj = ellfkt(ellis["fwhms_QzE_proj"] * 0.5, ellis["rot_QzE_proj"], phi) + ell_QxQy_proj = ellfkt(ellis["fwhms_QxQy_proj"] * 0.5, ellis["rot_QxQy_proj"], phi) + ell_QyQz_proj = ellfkt(ellis["fwhms_QyQz_proj"] * 0.5, ellis["rot_QyQz_proj"], phi) + ell_QxQz_proj = ellfkt(ellis["fwhms_QxQz_proj"] * 0.5, ellis["rot_QxQz_proj"], phi) + + labelQpara = "Qpara (1/A)" + labelQperp = "Qperp (1/A)" + labelQup = "Qup (1/A)" + + if use_tex: + labelQpara = "$Q_{\parallel}$ (\AA$^{-1}$)" + labelQperp = "$Q_{\perp}$ (\AA$^{-1}$)" + labelQup = "$Q_{up}$ (\AA$^{-1}$)" + + # Qpara, E axis + + fig = plt.figure() + subplot_QxE = fig.add_subplot(231) + subplot_QxE.set_xlabel(labelQpara) + subplot_QxE.set_ylabel("E (meV)") + subplot_QxE.plot(ell_QxE[0], ell_QxE[1], c="black", linestyle="dashed") + subplot_QxE.plot(ell_QxE_proj[0], ell_QxE_proj[1], c="black", linestyle="solid") + subplot_QxE.grid(alpha=0.6) + + # Qperp, E axis + subplot_QyE = fig.add_subplot(232) + subplot_QyE.set_xlabel(labelQperp) + subplot_QyE.set_ylabel("E (meV)") + subplot_QyE.plot(ell_QyE[0], ell_QyE[1], c="black", linestyle="dashed") + subplot_QyE.plot(ell_QyE_proj[0], ell_QyE_proj[1], c="black", linestyle="solid") + subplot_QyE.grid(alpha=0.6) + # Qup, E axis + subplot_QzE = fig.add_subplot(233) + subplot_QzE.set_xlabel(labelQup) + subplot_QzE.set_ylabel("E (meV)") + subplot_QzE.plot(ell_QzE[0], ell_QzE[1], c="black", linestyle="dashed") + subplot_QzE.plot(ell_QzE_proj[0], ell_QzE_proj[1], c="black", linestyle="solid") + subplot_QzE.grid(alpha=0.6) + # Qpara, Qperp axis + subplot_QxQy = fig.add_subplot(234) + subplot_QxQy.set_xlabel(labelQpara) + subplot_QxQy.set_ylabel(labelQperp) + subplot_QxQy.plot(ell_QxQy[0], ell_QxQy[1], c="black", linestyle="dashed") + subplot_QxQy.plot(ell_QxQy_proj[0], ell_QxQy_proj[1], c="black", linestyle="solid") + subplot_QxQy.grid(alpha=0.6) + subplot_QxQy.set_aspect("equal") + # Qperp, Qup axis + subplot_QyQz = fig.add_subplot(235) + subplot_QyQz.set_xlabel(labelQperp) + subplot_QyQz.set_ylabel(labelQup) + subplot_QyQz.plot(ell_QyQz[0], ell_QyQz[1], c="black", linestyle="dashed") + subplot_QyQz.plot(ell_QyQz_proj[0], ell_QyQz_proj[1], c="black", linestyle="solid") + subplot_QyQz.grid(alpha=0.6) + subplot_QyQz.set_aspect("equal") + # Qpara, Qup axis + subplot_QxQz = fig.add_subplot(236) + subplot_QxQz.set_xlabel(labelQpara) + subplot_QxQz.set_ylabel(labelQup) + subplot_QxQz.plot(ell_QxQz[0], ell_QxQz[1], c="black", linestyle="dashed") + subplot_QxQz.plot(ell_QxQz_proj[0], ell_QxQz_proj[1], c="black", linestyle="solid") + subplot_QxQz.grid(alpha=0.6) + subplot_QxQz.set_aspect("equal") + + plt.tight_layout() + + # 3d plot + fig3d = plt.figure() + subplot3d = fig3d.add_subplot(111, projection="3d") + + subplot3d.set_xlabel(labelQpara) + subplot3d.set_ylabel(labelQperp) + # subplot3d.set_ylabel(labelQup) + subplot3d.set_zlabel("E (meV)") + + # xE + subplot3d.plot(ell_QxE[0], ell_QxE[1], zs=0.0, zdir="y", c="black", linestyle="dashed") + subplot3d.plot(ell_QxE_proj[0], ell_QxE_proj[1], zs=0.0, zdir="y", c="black", linestyle="solid") + # yE + subplot3d.plot(ell_QyE[0], ell_QyE[1], zs=0.0, zdir="x", c="black", linestyle="dashed") + subplot3d.plot(ell_QyE_proj[0], ell_QyE_proj[1], zs=0.0, zdir="x", c="black", linestyle="solid") + # zE + # subplot3d.plot(ell_QzE[0], ell_QzE[1], zs=0., zdir="x", c="black", linestyle="dashed") + # subplot3d.plot(ell_QzE_proj[0], ell_QzE_proj[1], zs=0., zdir="x", c="black", linestyle="solid") + # xy + subplot3d.plot(ell_QxQy[0], ell_QxQy[1], zs=0.0, zdir="z", c="black", linestyle="dashed") + subplot3d.plot(ell_QxQy_proj[0], ell_QxQy_proj[1], zs=0.0, zdir="z", c="black", linestyle="solid") + + # save plots to files + if file != "": + import os + + splitext = os.path.splitext(file) + file3d = splitext[0] + "_3d" + splitext[1] + + if verbose: + print('Saving 2d plot to "%s".' % file) + print('Saving 3d plot to "%s".' % file3d) + fig.savefig(file, dpi=dpi) + fig3d.savefig(file3d, dpi=dpi) + + # show plots + if plot_results: + plt.show() diff --git a/src/tavi/instrument/resolution/cooper_nathans.py b/src/tavi/instrument/resolution/cooper_nathans.py new file mode 100755 index 00000000..d34bc95d --- /dev/null +++ b/src/tavi/instrument/resolution/cooper_nathans.py @@ -0,0 +1,281 @@ +import numpy as np +import numpy.linalg as la +from tavi.utilities import * +from tavi.instrument.tas import TAS +from tavi.instrument.resolution.reso_ellipses import ResoEllipsoid + + +class CN(TAS): + """Copper-Nathans method + + Attibutes: + _mat_f + _mat_g + + Methods: + cooper_nathans + + """ + + g_esp = 1e-8 + + # 4 soller slits collimators + NUM_COLLS = 4 + IDX_COLL0_H = 0 + IDX_COLL0_V = 2 + IDX_COLL1_H = 1 + IDX_COLL1_V = 3 + IDX_COLL2_H = 4 + IDX_COLL2_V = 6 + IDX_COLL3_H = 5 + IDX_COLL3_V = 7 + # 1 monochromator and 1 analyzer + NUM_MONOS = 1 + NUM_ANAS = 1 + IDX_MONO0_H = 0 + IDX_MONO0_V = 1 + IDX_ANA0_H = 2 + IDX_ANA0_V = 3 + + def __init__(self): + super().__init__() + + # constants independent of q and eng + self._mat_f = None + self._mat_g = None + + def cooper_nathans( + self, + ei, + ef, + hkl, + projection=((1, 0, 0), (0, 1, 0), (0, 0, 1)), + R0=False, + ): + """Calculate resolution using Cooper-Nathans method + + Args: + ei (float): incident energy, in units of meV + ef (float): final energy, in units of meV + hkl (tuple of floats): momentum transfer, miller indices in reciprocal lattice + R0 (bool): calculate normalization factor if True + projection (tuple): three non-coplaner vectors. If projection is None, the calculation is done in local Q frame + + """ + + rez = ResoEllipsoid() + rez.projection = projection + + if self._mat_f is None: + # matrix F, divergence of monochromator and analyzer, [pop75] Appendix 1 + mat_f = np.zeros(((CN.NUM_MONOS + CN.NUM_ANAS) * 2, (CN.NUM_MONOS + CN.NUM_ANAS) * 2)) + mat_f[CN.IDX_MONO0_H, CN.IDX_MONO0_H] = 1.0 / self.monochromator.mosaic**2 + mat_f[CN.IDX_MONO0_V, CN.IDX_MONO0_V] = 1.0 / self.monochromator.mosaic_v**2 + mat_f[CN.IDX_ANA0_H, CN.IDX_ANA0_H] = 1.0 / self.analyzer.mosaic**2 + mat_f[CN.IDX_ANA0_V, CN.IDX_ANA0_V] = 1.0 / self.analyzer.mosaic_v**2 + self._mat_f = mat_f + + if self._mat_g is None: + # matrix G, divergence of collimators, [pop75] Appendix 1 + mat_g = np.zeros((CN.NUM_COLLS * 2, CN.NUM_COLLS * 2)) + mat_g[CN.IDX_COLL0_H, CN.IDX_COLL0_H] = 1.0 / self.collimators.h_pre_mono**2 + mat_g[CN.IDX_COLL0_V, CN.IDX_COLL0_V] = 1.0 / self.collimators.v_pre_mono**2 + mat_g[CN.IDX_COLL1_H, CN.IDX_COLL1_H] = 1.0 / self.collimators.h_pre_sample**2 + mat_g[CN.IDX_COLL1_V, CN.IDX_COLL1_V] = 1.0 / self.collimators.v_pre_sample**2 + mat_g[CN.IDX_COLL2_H, CN.IDX_COLL2_H] = 1.0 / self.collimators.h_post_sample**2 + mat_g[CN.IDX_COLL2_V, CN.IDX_COLL2_V] = 1.0 / self.collimators.v_post_sample**2 + mat_g[CN.IDX_COLL3_H, CN.IDX_COLL3_H] = 1.0 / self.collimators.h_post_ana**2 + mat_g[CN.IDX_COLL3_V, CN.IDX_COLL3_V] = 1.0 / self.collimators.v_post_ana**2 + + self._mat_g = mat_g + + # determine frame + if isinstance(hkl, tuple | list) and len(hkl) == 3: + + if projection is None: # Local Q frame + rez.frame = "q" + rez.angles = (90, 90, 90) + rez.STATUS = True + + elif projection == ((1, 0, 0), (0, 1, 0), (0, 0, 1)): # HKL + rez.frame = "hkl" + rez.q = hkl + rez.hkl = hkl + rez.angles = ( + self.sample.gamma_star, + self.sample.alpha_star, + self.sample.beta_star, + ) + rez.STATUS = True + + else: # customized projection + p1, p2, p3 = projection + reciprocal_vecs = [ + self.sample.a_star_vec, + self.sample.b_star_vec, + self.sample.c_star_vec, + ] + v1 = np.sum([p1[i] * vec for (i, vec) in enumerate(reciprocal_vecs)], axis=0) + v2 = np.sum([p2[i] * vec for (i, vec) in enumerate(reciprocal_vecs)], axis=0) + v3 = np.sum([p3[i] * vec for (i, vec) in enumerate(reciprocal_vecs)], axis=0) + + if np.dot(v1, np.cross(v2, v3)) < CN.g_esp: + # TODO + print("Left handed!") + if np.abs(np.dot(v1, np.cross(v2, v3))) < CN.g_esp: + print("Projection vectors need to be non-coplanar.") + else: + mat_w = np.array([p1, p2, p3]).T + mat_w_inv = np.array( + [ + np.cross(p2, p3), + np.cross(p3, p1), + np.cross(p1, p2), + ] + ) / np.dot(p1, np.cross(p2, p3)) + + hkl_prime = mat_w_inv @ hkl + rez.frame = "proj" + rez.q = hkl_prime + rez.hkl = hkl + + rez.angles = ( + get_angle_vec(v1, v2), + get_angle_vec(v2, v3), + get_angle_vec(v3, v1), + ) + + angles = self.find_angles(hkl, ei, ef) # s2, s1, sgl, sgu + if angles is not None: + r_mat = self.goniometer.r_mat(angles[1:]) # s1, sgl, sgu + ub_mat = self.sample.ub_matrix + conv_mat = 2 * np.pi * r_mat @ ub_mat + q_lab = conv_mat @ hkl + q_mod = np.linalg.norm(q_lab) + rez.STATUS = True + else: + rez.STATUS = False + + else: + print("q needs to be a tupe of size 3.") + rez.STATUS = False + + if rez.STATUS: + ki = np.sqrt(ei / ksq2eng) + kf = np.sqrt(ef / ksq2eng) + en = ei - ef + rez.en = en + + two_theta = get_angle(ki, kf, q_mod) * self.goniometer.sense + + # phi = , always has the oppositie sign of s2 + phi = get_angle(ki, q_mod, kf) * self.goniometer.sense * (-1) + + theta_m = get_angle_bragg(ki, self.monochromator.d_spacing) * self.monochromator.sense + theta_a = get_angle_bragg(kf, self.analyzer.d_spacing) * self.analyzer.sense + + # TODO + # curved monochromator and analyzer + + # TODO + # reflection efficiency + + # TODO + # matrix A,Y=AU, tranform from collimators angular divergence to ki-kf frame + mat_a = np.zeros((6, 2 * CN.NUM_COLLS)) + mat_a[0, CN.IDX_COLL0_H] = 0.5 * ki / np.tan(theta_m) + mat_a[0, CN.IDX_COLL1_H] = -0.5 * ki / np.tan(theta_m) + mat_a[1, CN.IDX_COLL1_H] = ki + mat_a[2, CN.IDX_COLL1_V] = -ki + + mat_a[3, CN.IDX_COLL2_H] = 0.5 * kf / np.tan(theta_a) + mat_a[3, CN.IDX_COLL3_H] = -0.5 * kf / np.tan(theta_a) + mat_a[4, CN.IDX_COLL2_H] = kf + mat_a[5, CN.IDX_COLL2_V] = kf + + # matrix B, X=BY, transform from ki-kf frame to momentum transfer q-frame + mat_b = np.zeros((4, 6)) + mat_b[0:3, 0:3] = rotation_matrix_2d(phi) + mat_b[0:3, 3:6] = rotation_matrix_2d(phi - two_theta) * (-1) + mat_b[3, 0] = 2 * ksq2eng * ki + mat_b[3, 3] = -2 * ksq2eng * kf + + # matrix C, constrinat between mono/ana mosaic and collimator divergence + mat_c = np.zeros(((CN.NUM_MONOS + CN.NUM_ANAS) * 2, CN.NUM_COLLS * 2)) + mat_c[CN.IDX_MONO0_H, CN.IDX_COLL0_H] = 0.5 + mat_c[CN.IDX_MONO0_H, CN.IDX_COLL1_H] = 0.5 + mat_c[CN.IDX_MONO0_V, CN.IDX_COLL0_V] = 0.5 / np.sin(theta_m) + mat_c[CN.IDX_MONO0_V, CN.IDX_COLL1_V] = -0.5 / np.sin(theta_m) + + mat_c[CN.IDX_ANA0_H, CN.IDX_COLL2_H] = 0.5 + mat_c[CN.IDX_ANA0_H, CN.IDX_COLL3_H] = 0.5 + mat_c[CN.IDX_ANA0_V, CN.IDX_COLL2_V] = 0.5 / np.sin(theta_a) + mat_c[CN.IDX_ANA0_V, CN.IDX_COLL3_V] = -0.5 / np.sin(theta_a) + + mat_h = mat_c.T @ self._mat_f @ mat_c + self._mat_g + mat_h_inv = la.inv(mat_h) + mat_ba = mat_b @ mat_a + mat_cov = mat_ba @ mat_h_inv @ mat_ba.T + + # TODO smaple mosaic???? + mat_cov[1, 1] += q_mod**2 * self.sample.mosaic**2 + mat_cov[2, 2] += q_mod**2 * self.sample.mosaic_v**2 + + mat_reso = la.inv(mat_cov) * sig2fwhm**2 + + if rez.frame == "q": + rez.mat = mat_reso + rez.q = (q_mod, 0, 0) + + elif rez.frame == "hkl": + + conv_mat_4d = np.eye(4) + conv_mat_4d[0:3, 0:3] = ( + np.array( + [ + [np.sin(phi), 0, np.cos(phi)], + [np.cos(phi), 0, -np.sin(phi)], + [0, 1, 0], + ] + ) + @ conv_mat + ) + rez.mat = conv_mat_4d.T @ mat_reso @ conv_mat_4d + + elif rez.frame == "proj": + + conv_mat_4d = np.eye(4) + conv_mat_4d[0:3, 0:3] = ( + np.array( + [ + [np.sin(phi), 0, np.cos(phi)], + [np.cos(phi), 0, -np.sin(phi)], + [0, 1, 0], + ] + ) + @ conv_mat + @ mat_w + ) + rez.mat = conv_mat_4d.T @ mat_reso @ conv_mat_4d + + # TODO check normalization factor + # ------------------------------------------------------------------------- + # - if the instruments works in kf=const mode and the scans are counted for + # or normalised to monitor counts no ki^3 or kf^3 factor is needed. + # - if the instrument works in ki=const mode the kf^3 factor is needed. + + if R0: # calculate + r0 = np.pi**2 / 4 / np.sin(theta_m) / np.sin(theta_a) + r0 *= np.sqrt(np.linalg.det(self._mat_f) / np.linalg.det(mat_h)) + else: + r0 = 0 + + rez.r0 = r0 + + if np.isnan(rez.r0) or np.isinf(rez.r0) or np.isnan(rez.mat.any()) or np.isinf(rez.mat.any()): + rez.STATUS = False + else: + rez.STATUS = True + + rez.set_labels() + return rez diff --git a/src/tavi/instrument/resolution/reso_ellipses.py b/src/tavi/instrument/resolution/reso_ellipses.py new file mode 100755 index 00000000..60a856b7 --- /dev/null +++ b/src/tavi/instrument/resolution/reso_ellipses.py @@ -0,0 +1,427 @@ +import numpy as np +import numpy.linalg as la +import matplotlib.pyplot as plt +from mpl_toolkits.axisartist import Subplot, Axes +from mpl_toolkits.axisartist.grid_finder import MaxNLocator +from mpl_toolkits.axisartist.grid_helper_curvelinear import GridHelperCurveLinear +from tavi.utilities import * + +np.set_printoptions(floatmode="fixed", precision=4) + + +class ResoCurve(object): + """1D Gaussian curve + + Attributes: + cen (float) + fwhm (float) + xlabel + ylabel + title + legend + + Methods: + generate_curve + generate_plot + """ + + def __init__(self): + self.center: float = None + self.fwhm = None + self.r0 = None + self.x = None + self.y = None + self.xlabel = None + self.ylabel = None + self.title = None + self.legend = None + + def generate_curve(self, num_of_sigmas=3, points=100): + """Generate points on the Gaussian curve""" + + sigma = self.fwhm / sig2fwhm + cen = self.cen + self.x = np.linspace( + -num_of_sigmas * sigma + cen, + num_of_sigmas * sigma + cen, + points, + ) + amp = self.r0 / np.sqrt(2 * np.pi) / sigma + self.y = amp * np.exp(-((self.x - cen) ** 2) / sigma**2 / 2) + + def generate_plot(self, ax, c="black", linestyle="solid"): + """Generate the plot""" + self.generate_curve() + + s = ax.plot( + self.x, + self.y, + c=c, + linestyle=linestyle, + label=self.legend, + ) + ax.set_xlabel(self.xlabel) + ax.set_ylabel(self.ylabel) + ax.set_title(self.title) + ax.grid(alpha=0.6) + ax.legend() + + +class ResoEllipse(object): + """2D ellipses + + Attributes: + mat(2 by 2 matrix) + centers (tuple): 2 by 1 + fwhms (tuple): 2 by 1 + vecs: eigen-vectors + angle: angle between the two plotting axes, in degrees + axes_labels (str) + ORIGIN (bool|None): shift the origin if True + + Methods: + generate_ellipse(ellipse_points=128) + generate_axes() + generate_plot(ax, c="black", linestyle="solid") + plot() + + """ + + def __init__(self): + self.mat = None + self.centers = None + self.fwhms = None + self.vecs = None + self.angle = None + self.axes_labels = None + + self.ORIGIN = None + self.grid_helper = None + + def generate_ellipse(self, ellipse_points=128): + """Generate points on a ellipse""" + + phi = np.linspace(0, 2.0 * np.pi, ellipse_points) + + pts = np.dot( + self.vecs, + np.array( + [ + self.fwhms[0] / 2 * np.cos(phi), + self.fwhms[1] / 2 * np.sin(phi), + ], + ), + ) + if self.ORIGIN: + pts[0] += self.centers[0] + pts[1] += self.centers[1] + return pts + + def _tr(self, x, y): + x, y = np.asarray(x), np.asarray(y) + return x + y / np.tan(self.angle / 180 * np.pi), y + + def _inv_tr(self, x, y): + x, y = np.asarray(x), np.asarray(y) + return x - y / np.tan(self.angle / 180 * np.pi), y + + def generate_axes(self): + """Generate grid helper""" + + if not np.abs(self.angle - 90) < 1e-2: # regular axes + self.grid_helper = GridHelperCurveLinear( + (self._tr, self._inv_tr), + grid_locator1=MaxNLocator(integer=True, steps=[1]), + grid_locator2=MaxNLocator(integer=True, steps=[1]), + ) + + def generate_plot(self, ax, c="black", linestyle="solid"): + """Gnerate the ellipse for plotting""" + + pts = self.generate_ellipse() + + if self.grid_helper is None: + + s = ax.plot( + pts[0], + pts[1], + c=c, + linestyle=linestyle, + ) + else: # askew axes + s = ax.plot( + *self._tr(pts[0], pts[1]), + c=c, + linestyle=linestyle, + ) + + ax.set_xlabel(self.axes_labels[0]) + ax.set_ylabel(self.axes_labels[1]) + ax.grid(alpha=0.6) + + return None + + def plot(self): + """Plot the ellipsis.""" + + fig = plt.figure() + ax = Subplot(fig, 1, 1, 1, grid_helper=self.grid_helper) + fig.add_subplot(ax) + self.generate_plot(ax) + fig.show() + + +class ResoEllipsoid(object): + """Manage the 4D resolution ellipoid + + Attributs: + STATUS (None | bool): True if resolution calculation is successful + frame (str): "q", "hkl", or "proj" + projection (tuple): three non-coplanar vectors + angles (tuple): angles between the projection vectors + q (tuple): momentum transfer (h', k', l') in the coordinate system specified by projection + hkl (tuple): momentum transfer (h, k, l) + en (float): energy transfer + + mat (float): 4 by 4 resolution matrix + r0 (float | None): Normalization factor + + Methods: + generate_ellipse(axes=(0, 1), PROJECTION=False, ORIGIN=True) + set_labels() + plot(): plot all six combination of axes + + """ + + g_eps = 1e-8 + + def __init__(self): + + self.STATUS = None + self.q = None + self.hkl = None + self.en = None + self.frame = None + self.projection = None + self.angles = None + self.axes_labels = None + + self.mat = None + self.r0 = None + + def volume(self): + """volume of the ellipsoid""" + pass + + def coh_fwhms(self, axis=None): + """Coherent FWHM""" + + curve = ResoCurve() + idx = int(axis) + curve.fwhm = np.array(sig2fwhm / np.sqrt(self.mat[idx, idx])) + if idx == 3: + curve.cen = self.en + else: + curve.cen = self.q[idx] + + curve.r0 = self.r0 + curve.xlabel = self.axes_labels[idx] + curve.title = f"q={np.round(self.hkl,3)}, en={np.round(self.en,3)}" + curve.legend = f"coherent FWHM={np.round(curve.fwhm, 3)}" + return curve + + def incoh_fwhms(self, axis=None): + """Incoherent FWHMs""" + + curve = ResoCurve() + idx = int(axis) + + reso = self.mat + for i in (3, 2, 1, 0): + if not i == idx: + reso = ResoEllipsoid.quadric_proj(reso, i) + + curve.fwhm = (1.0 / np.sqrt(np.abs(reso[0, 0])) * sig2fwhm,) + if idx == 3: + curve.cen = self.en + else: + curve.cen = self.q[idx] + + curve.r0 = self.r0 + curve.xlabel = self.axes_labels[idx] + curve.title = f"q={np.round(self.hkl,3)}, en={np.round(self.en,3)}" + curve.legend = f"incoherent FWHM={np.round(curve.fwhm , 3)}" + return curve + + # def principal_fwhms_calc(self): + # """FWHMs of principal axes in 4D""" + + # evals, _ = la.eig(self.mat) + # fwhms = np.array(1.0 / np.sqrt(np.abs(evals)) * sig2fwhm) + # self.principal_fwhms = fwhms + + @staticmethod + def quadric_proj(quadric, idx): + """projects along one axis of the quadric""" + + if np.abs(quadric[idx, idx]) < ResoEllipsoid.g_eps: + return np.delete(np.delete(quadric, idx, axis=0), idx, axis=1) + + # row/column along which to perform the orthogonal projection + vec = 0.5 * (quadric[idx, :] + quadric[:, idx]) # symmetrise if not symmetric + vec /= np.sqrt(quadric[idx, idx]) # normalise to indexed component + proj_op = np.outer(vec, vec) # projection operator + ortho_proj = quadric - proj_op # projected quadric + + # comparing with simple projection + # rank = len(quadric) + # vec /= np.sqrt(np.dot(vec, vec)) + # proj_op = np.outer(vec, vec) + # ortho_proj = np.dot((np.identity(rank) - proj_op), quadric) + + # remove row/column that was projected out + # print("\nProjected row/column %d:\n%s\n->\n%s.\n" % (idx, str(quadric), str(ortho_proj))) + return np.delete(np.delete(ortho_proj, idx, axis=0), idx, axis=1) + + def generate_ellipse(self, axes=(0, 1), PROJECTION=False, ORIGIN=True): + """Gnerate a 2D ellipse by either making a cut or projection + + Arguments: + axes(tuple) + PROJECTION: slice if False + ORIGIN: shift the center if True + + """ + ellipse = ResoEllipse() + ellipse.ORIGIN = ORIGIN + qe_list = np.concatenate((self.q, self.en), axis=None) + # axes = np.sort(axes) + # match tuple(np.sort(axes)): + match tuple(axes): + case (0, 1) | (1, 0): + ellipse.angle = np.round(self.angles[0], 2) + case (1, 2) | (2, 1): + ellipse.angle = np.round(self.angles[1], 2) + case (0, 2) | (2, 0): + ellipse.angle = np.round(self.angles[2], 2) + case _: + ellipse.angle = 90 + + if PROJECTION: + q_res = self.mat + ellipse.centers = (qe_list[axes[0]], qe_list[axes[1]]) + ellipse.axes_labels = (self.axes_labels[axes[0]], self.axes_labels[axes[1]]) + # Qres_QxE_proj = np.delete(np.delete(self.mat, 2, axis=0), 2, axis=1) + for i in (3, 2, 1, 0): + if i not in axes: + q_res = ResoEllipsoid.quadric_proj(q_res, i) + ellipse.mat = q_res + + else: # make a slice + q_res = self.mat + ellipse.centers = (qe_list[axes[0]], qe_list[axes[1]]) + ellipse.axes_labels = (self.axes_labels[axes[0]], self.axes_labels[axes[1]]) + for i in (3, 2, 1, 0): + if i not in axes: + q_res = np.delete(np.delete(q_res, i, axis=0), i, axis=1) + ellipse.mat = q_res + + evals, evecs = la.eig(ellipse.mat) # evecs normalized already + fwhms = 1.0 / np.sqrt(np.abs(evals)) * sig2fwhm + # angles = np.array([np.arctan2(evecs[1][0], evecs[0][0])]) + ellipse.fwhms = fwhms + ellipse.vecs = evecs + ellipse.generate_axes() + + return ellipse + + def set_labels(self): + """Set axes labels based on the frame""" + # determine frame + match self.frame: + case "q": + self.axes_labels = ("Q_para (1/A)", "Q_perp (1/A)", "Q_up (1/A)", "E (meV)") + case "hkl": + self.axes_labels = ("H (r.l.u.)", "K (r.l.u.)", "L (r.l.u.)", "E (meV)") + case "proj": + self.axes_labels = ( + f"{self.projection[0]}", + f"{self.projection[1]}", + f"{self.projection[2]}", + "E (meV)", + ) + + # TODO + def calc_ellipses(self, verbose=True): + """Calculate FWHMs and rotation angles""" + + # 2d sliced ellipses + + elps_qx_en = self.generate_ellipse(axes=(0, 3), PROJECTION=False) + # elps_qx_en.plot() + + elps_qy_en = self.generate_ellipse(axes=(1, 3), PROJECTION=False) + elps_qz_en = self.generate_ellipse(axes=(2, 3), PROJECTION=False) + + elps_qx_qy = self.generate_ellipse(axes=(0, 1), PROJECTION=False) + elps_qx_qy.plot() + elps_qy_qz = self.generate_ellipse(axes=(1, 2), PROJECTION=False) + elps_qx_qz = self.generate_ellipse(axes=(0, 2), PROJECTION=False) + elps_qx_qz.plot() + plt.show() + + # 2d projected ellipses + # Qres_QxE_proj = np.delete(np.delete(self.mat, 2, axis=0), 2, axis=1) + + elps_proj_qx_en = self.generate_ellipse(axes=(0, 3), PROJECTION=True) + elps_proj_qy_en = self.generate_ellipse(axes=(1, 3), PROJECTION=True) + elps_proj_qz_en = self.generate_ellipse(axes=(2, 3), PROJECTION=True) + + elps_proj_qx_qy = self.generate_ellipse(axes=(0, 1), PROJECTION=True) + elps_proj_qy_qz = self.generate_ellipse(axes=(1, 2), PROJECTION=True) + elps_proj_qx_qz = self.generate_ellipse(axes=(0, 2), PROJECTION=True) + + return None + + def plot(self): + """Plot all 2D ellipses""" + + # fig = plt.figure() + fig = plt.figure(figsize=(10, 6)) + elps_qx_en = self.generate_ellipse(axes=(0, 3), PROJECTION=False) + ax = fig.add_subplot(231, axes_class=Axes, grid_helper=elps_qx_en.grid_helper) + elps_qx_en.generate_plot(ax, c="black", linestyle="solid") + elps_proj_qx_en = self.generate_ellipse(axes=(0, 3), PROJECTION=True) + elps_proj_qx_en.generate_plot(ax, c="black", linestyle="dashed") + + elps_qy_en = self.generate_ellipse(axes=(1, 3), PROJECTION=False) + ax = fig.add_subplot(232, axes_class=Axes, grid_helper=elps_qy_en.grid_helper) + elps_qy_en.generate_plot(ax, c="black", linestyle="solid") + elps_proj_qy_en = self.generate_ellipse(axes=(1, 3), PROJECTION=True) + elps_proj_qy_en.generate_plot(ax, c="black", linestyle="dashed") + + elps_qz_en = self.generate_ellipse(axes=(2, 3), PROJECTION=False) + ax = fig.add_subplot(233, axes_class=Axes, grid_helper=elps_qz_en.grid_helper) + elps_qz_en.generate_plot(ax, c="black", linestyle="solid") + elps_proj_qz_en = self.generate_ellipse(axes=(2, 3), PROJECTION=True) + elps_proj_qz_en.generate_plot(ax, c="black", linestyle="dashed") + + elps_qx_qy = self.generate_ellipse(axes=(0, 1), PROJECTION=False) + ax = fig.add_subplot(234, axes_class=Axes, grid_helper=elps_qx_qy.grid_helper) + elps_qx_qy.generate_plot(ax, c="black", linestyle="solid") + elps_proj_qx_qy = self.generate_ellipse(axes=(0, 1), PROJECTION=True) + elps_proj_qx_qy.generate_plot(ax, c="black", linestyle="dashed") + + elps_qy_qz = self.generate_ellipse(axes=(1, 2), PROJECTION=False) + ax = fig.add_subplot(235, axes_class=Axes, grid_helper=elps_qy_qz.grid_helper) + elps_qy_qz.generate_plot(ax, c="black", linestyle="solid") + elps_proj_qy_qz = self.generate_ellipse(axes=(1, 2), PROJECTION=True) + elps_proj_qy_qz.generate_plot(ax, c="black", linestyle="dashed") + + elps_qx_qz = self.generate_ellipse(axes=(0, 2), PROJECTION=False) + ax = fig.add_subplot(236, axes_class=Axes, grid_helper=elps_qx_qz.grid_helper) + elps_qx_qz.generate_plot(ax, c="black", linestyle="solid") + elps_proj_qx_qz = self.generate_ellipse(axes=(0, 2), PROJECTION=True) + elps_proj_qx_qz.generate_plot(ax, c="black", linestyle="dashed") + + fig.tight_layout(pad=2) diff --git a/src/tavi/instrument/tas.py b/src/tavi/instrument/tas.py new file mode 100644 index 00000000..62287aeb --- /dev/null +++ b/src/tavi/instrument/tas.py @@ -0,0 +1,338 @@ +import math +import json +import numpy as np +from tavi.utilities import * +from tavi.instrument.tas_cmponents import * +from tavi.sample.xtal import Xtal +from tavi.sample.powder import Powder + + +class TAS(object): + """Triple-axis instrument class. Manage instrument congigutarion parameters + + Attibutes: + + Methods: + load_instrument(config_params): + load_sample(sample_params): + + + """ + + def __init__(self, path_to_json=None): + """load default instrument configuration""" + # TODO + self.source = None + self.collimators = None + self.guide = None + self.monochromator = None + self.monitor = None + self.goniometer = None + self.analyzer = None + self.detector = None + self.arms = None + self.sample = None + + if path_to_json is not None: + self.load_instrument_from_json(path_to_json) + + def load_instrument_from_dicts(self, config_params): + """Load instrument configuration from a json file""" + + self.source = Source(config_params["source"]) + self.collimators = Collimators(config_params["collimators"]) + self.guide = Guide(config_params["guide"]) + self.monochromator = Monochromator(config_params["monochromator"]) + self.monitor = Monitor(config_params["monitor"]) + self.goniometer = Goniometer(config_params["goniometer"]) + self.analyzer = Analyzer(config_params["analyzer"]) + self.detector = Detector(config_params["detector"]) + self.arms = Arms(config_params["distances"]) + + def load_instrument_from_json(self, path_to_json): + """Load instrument configuration from a json file""" + + with open(path_to_json, "r", encoding="utf-8") as file: + config_params = json.load(file) + + self.load_instrument_from_dicts(config_params) + + # def save_instrument(self): + # """Save configuration into a dictionary""" + # pass + + def load_sample(self, sample): + """Load sample info""" + self.sample = sample + + def load_sample_from_json(self, path_to_json): + """Load sample info""" + + with open(path_to_json, "r", encoding="utf-8") as file: + sample_params = json.load(file) + lattice_params = ( + sample_params["a"], + sample_params["b"], + sample_params["c"], + sample_params["alpha"], + sample_params["beta"], + sample_params["gamma"], + ) + + if sample_params["type"] == "xtal": + sample = Xtal(lattice_params=lattice_params) + sample.ub_matrix = np.array(sample_params["ub_matrix"]).reshape(3, 3) + sample.plane_normal = np.array(sample_params["plane_normal"]) + elif sample_params["type"] == "powder": + sample = Powder(lattice_params=lattice_params) + else: + print("Sample type needs to be either xtal or powder.") + + param_dict = ("shape", "width", "height", "depth", "mosaic", "mosaic_v") + + for key, val in sample_params.items(): + + match key: + case "height" | "width" | "depth": + setattr(sample, key, val * cm2angstrom) + case "mosaic" | "mosaic_v": + setattr(sample, key, val * min2rad) + case _: + if key in param_dict: + setattr(sample, key, val) + + self.load_sample(sample) + + @staticmethod + def q_lab(two_theta, ki, kf): + """Momentum transfer q in lab frame. + + Note: + Only for a single detector in the scattering plane. + Depends on inciendnt and final wavevector ki, kf + and two theta, phi is zero. + """ + return np.array( + [ + -kf * np.sin(two_theta / rad2deg), + 0, + ki - kf * np.cos(two_theta / rad2deg), + ] + ) + + def _find_u_from_2peaks(self, peaks, angles, ki, kf): + """Calculate UB matrix from two peaks + + Args: + peaks (list): lists of two tuples, [(h1,k1,l1), (h2,k2,l2)] + angles (list): lists of goniometer angles + ki (float): incident neutron wavelength, in inv Ang + kf (float): final neutron wavelength, in inv Ang + + Returns: + u_mat: 3 by 3 unitary matrix + in_plane_ref: vector of peak1 in q_sample frame (zero goniometer angles) + plane_normal: normal vector of the scattering plane in q_sample frame, + always pointing up (positive y-axis) + """ + + b_mat = self.sample.b_mat() + peak1, peak2 = peaks + q_hkl1 = b_mat @ peak1 + q_hkl2p = b_mat @ peak2 + q_hkl3 = np.cross(q_hkl1, q_hkl2p) + q_hkl_2 = np.cross(q_hkl3, q_hkl1) + + q_hkl_mat = np.array( + [ + q_hkl1 / np.linalg.norm(q_hkl1), + q_hkl_2 / np.linalg.norm(q_hkl_2), + q_hkl3 / np.linalg.norm(q_hkl3), + ] + ).T + + # find r_inv + angles1, angles2 = angles + two_theta1, _, _, _ = angles1 + two_theta2, _, _, _ = angles2 + + q_lab1 = TAS.q_lab(two_theta1, ki=ki, kf=kf) + q_lab2 = TAS.q_lab(two_theta2, ki=ki, kf=kf) + + # Goniometer angles all zeros in q_sample frame + q_sample1 = self.goniometer.r_mat_inv(angles1[1:]) @ q_lab1 + q_sample2p = self.goniometer.r_mat_inv(angles2[1:]) @ q_lab2 + q_sample3 = np.cross(q_sample1, q_sample2p) + q_sample2 = np.cross(q_sample3, q_sample1) + + q_sample1 = q_sample1 / np.linalg.norm(q_sample1) + q_sample2 = q_sample2 / np.linalg.norm(q_sample2) + q_sample3 = q_sample3 / np.linalg.norm(q_sample3) + + q_sample_mat = np.array([q_sample1, q_sample2, q_sample3]).T + + u_mat = q_sample_mat @ np.linalg.inv(q_hkl_mat) + + plane_normal = q_sample3 + if plane_normal[1] < 0: # plane normal always up along +Y + plane_normal = -plane_normal + + in_plane_ref = q_sample1 + + return u_mat, in_plane_ref, plane_normal + + def find_ub(self, peaks, angles, ei=13.5, ef=None): + """calculate UB matrix from peaks and motor positions + + Args: + peaks (list) + angles (list) + ei (float): incident neutron energy, in meV + ef (float): final neutron energy, in meV + + """ + + self.sample.ub_peaks = peaks + self.sample.ub_angles = angles + + ki = np.sqrt(ei / ksq2eng) + if ef is None: + kf = ki + else: + kf = np.sqrt(ef / ksq2eng) + + if not len(peaks) == len(angles): + print("Number of peaks and angles provided do not match.") + + if len(peaks) == 2: + b_mat = self.sample.b_mat() + ( + u_mat, + in_plane_ref, + plane_normal, + ) = self._find_u_from_2peaks(peaks, angles, ki, kf) + ub_matrix = u_mat @ b_mat + + self.sample.in_plane_ref = in_plane_ref + self.sample.plane_normal = plane_normal + + # TODO + elif len(peaks) == 3: # find_ub_from_3peaks + pass + # TODO + elif len(peaks) > 3: # find_ub_from_mulitple_peaks + pass + else: + print("I don't even know what you're doing.") + + self.sample.ub_matrix = ub_matrix + inv_ub_matrix = np.linalg.inv(ub_matrix) + self.sample.inv_ub_matrix = inv_ub_matrix + + # print(np.round(ub_matrix, 6)) + + def find_two_theta(self, peak, ei=13.5, ef=None): + """calculate two theta angle of a given peak + + Args: + peak (tuple): (h, k, l) of a peak + ei (float): incident neutron energy, in meV + ef (float): final neutron energy, in meV + Return + two_theta: in degrees + """ + S2_MIN_DEG = 1 + + hkl = np.array(peak) + ki = np.sqrt(ei / ksq2eng) + if ef is None: + kf = ki + else: + kf = np.sqrt(ef / ksq2eng) + + b_mat = self.sample.b_mat() + q_sq = 4 * np.pi**2 * hkl.T @ b_mat.T @ b_mat @ hkl + q_norm = np.sqrt(q_sq) + + # two_theta = np.arccos((ki**2 + kf**2 - q_sq) / (2 * ki * kf)) + two_theta = get_angle(ki, kf, q_norm) + if two_theta is None: + print(f"Triangle cannot be closed at q={hkl}, en={ei-ef} meV.") + return None + elif two_theta * rad2deg < S2_MIN_DEG: + print(f"s2 is smaller than {S2_MIN_DEG} deg at q={hkl}.") + return None + else: + return two_theta * rad2deg * self.goniometer.sense + + def find_angles(self, peak, ei=13.5, ef=None): + """calculate motor positions for a given peak if UB matrix has been determined + + Args: + peak (tuple): (h, k, l) of a peak + ei (float): incident neutron energy, in meV + ef (float): final neutron energy, in meV + + """ + S2_MIN_DEG = 1 + + hkl = np.array(peak) + ki = np.sqrt(ei / ksq2eng) + if ef is None: + kf = ki + else: + kf = np.sqrt(ef / ksq2eng) + + b_mat = self.sample.b_mat() + + q_sq = 4 * np.pi**2 * hkl.T @ b_mat.T @ b_mat @ hkl + q_norm = np.sqrt(q_sq) + + # two_theta = np.arccos((ki**2 + kf**2 - q_sq) / (2 * ki * kf)) * self.goniometer.sense + two_theta = get_angle(ki, kf, q_norm) + + if two_theta is None: + print(f"Triangle cannot be closed at q={hkl}, en={ei-ef} meV.") + angles = None + elif two_theta * rad2deg < S2_MIN_DEG: + print(f"s2 is smaller than {S2_MIN_DEG} deg at q={hkl}.") + angles = None + else: + two_theta = two_theta * rad2deg * self.goniometer.sense + q = self.sample.ub_matrix @ hkl + t1 = q / np.linalg.norm(q) + + eps = 1e-8 # zero + plane_normal = self.sample.plane_normal + in_plane_ref = self.sample.in_plane_ref + + # Minimal tilts + if np.dot(t1, plane_normal) < eps: # t1 in plane + t3 = plane_normal + t2 = np.cross(t3, t1) + else: # t1 not in plane, need to change tilts + if np.linalg.norm(np.cross(plane_normal, t1)) < eps: + # oops, t1 along plane_normal + t2 = in_plane_ref + t3 = np.cross(t1, t2) + else: + t2p = np.cross(plane_normal, t1) + t3 = np.cross(t1, t2p) + t2 = np.cross(t3, t1) + + t_mat = np.array( + [t1, t2 / np.linalg.norm(t2), t3 / np.linalg.norm(t3)], + ).T + + t_mat_inv = np.linalg.inv(t_mat) + + q_lab1 = TAS.q_lab(two_theta, ki, kf) / q_norm + q_lab2 = np.array([q_lab1[2], 0, -q_lab1[0]]) + q_lab3 = np.array([0, 1, 0]) + + q_lab_mat = np.array([q_lab1, q_lab2, q_lab3]).T + r_mat = q_lab_mat @ t_mat_inv + + angles = np.round((two_theta,) + self.goniometer.angles_from_r_mat(r_mat), 6) + + return angles diff --git a/src/tavi/instrument/tas_cmponents.py b/src/tavi/instrument/tas_cmponents.py new file mode 100644 index 00000000..34000d67 --- /dev/null +++ b/src/tavi/instrument/tas_cmponents.py @@ -0,0 +1,306 @@ +import numpy as np +from tavi.utilities import * + + +class Source(object): + """Neutron source""" + + def __init__(self, param_dict): + + self.name = None + self.shape = "rectangular" + self.width = None + self.height = None + + for key, val in param_dict.items(): + match key: + case "width" | "height": + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + if param_dict["shape"] == "rectangular": + setattr(self, key, val / np.sqrt(12) * cm2angstrom) + elif param_dict["shape"] == "circular": + setattr(self, key, val / 4 * cm2angstrom) + else: + print("Source shape needs to be either rectangular or circular.") + case _: + setattr(self, key, val) + + +class Guide(object): + """Guide""" + + def __init__(self, param_dict): + + self.in_use = False + self.div_h = None + self.div_v = None + + for key, val in param_dict.items(): + match key: + case "div_h" | "div_v": + setattr(self, key, val * min2rad) + case _: + setattr(self, key, val) + + +class Monochromator(object): + """Monochromator""" + + def __init__(self, param_dict): + + self.type = "PG002" + self.d_spacing = mono_ana_xtal["PG002"] + self.mosaic = 45 # horizontal mosaic, min of arc + self.mosaic_v = 45 # vertical mosaic, if anisotropic + self.sense = -1 + + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + self.shape = "rectangular" # or spherical + self.width = 12.0 + self.height = 8.0 + self.depth = 0.15 + # horizontal focusing + self.curved_h = False + self.curvh = 0.0 + self.optimally_curved_h = False + # vertical focusing + self.curved_v = False + self.curvv = 0.0 + self.optimally_curved_v = False + + for key, val in param_dict.items(): + match key: + case "mosaic" | "mosaic_v" | "curvh" | "curvv": + setattr(self, key, val * min2rad) + case "width" | "height" | "depth": + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + if param_dict["shape"] == "rectangular": + setattr(self, key, val / np.sqrt(12) * cm2angstrom) + elif param_dict["shape"] == "spherical": + setattr(self, key, val / 4 * cm2angstrom) + else: + print("Monochromator shape needs to be either rectangular or spherical.") + case _: + setattr(self, key, val) + self.d_spacing = mono_ana_xtal[self.type] + + +class Collimators(object): + """collimitor divergences, in mins of arc""" + + def __init__(self, param_dict): + self.h_pre_mono = 30 # mins of arc + self.h_pre_sample = 30 + self.h_post_sample = 30 + self.h_post_ana = 30 + self.v_pre_mono = 30 + self.v_pre_sample = 30 + self.v_post_sample = 30 + self.v_post_ana = 30 + + for key, val in param_dict.items(): + if key in ( + "h_pre_mono", + "h_pre_sample", + "h_post_sample", + "h_post_ana", + "v_pre_mono", + "v_pre_sample", + "v_post_sample", + "v_post_ana", + ): + setattr(self, key, val * min2rad) + else: + setattr(self, key, val) + + +# TODO +class Monitor(object): + + def __init__(self, param_dict): + + self.shape = "rectangular" # or spherical + self.width = 5 * cm2angstrom + self.height = 12 * cm2angstrom + + for key, val in param_dict.items(): + + match key: + case "width" | "height": + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + if param_dict["shape"] == "rectangular": + setattr(self, key, val / np.sqrt(12) * cm2angstrom) + elif param_dict["shape"] == "spherical": + setattr(self, key, val / 4 * cm2angstrom) + else: + print("Monitor shape needs to be either rectangular or spherical.") + case _: + setattr(self, key, val) + + +class Analyzer(object): + + def __init__(self, param_dict): + + self.type = "Pg002" + self.d_spacing = mono_ana_xtal["Pg002"] + self.mosaic = 45 # horizontal mosaic, min of arc + self.mosaic_v = 45 # vertical mosaic, if anisotropic + self.sense = -1 + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + self.shape = "rectangular" + self.width = 12.0 + self.height = 8.0 + self.depth = 0.3 + # horizontal focusing + self.curved_h = False + self.curvh = 0.0 + self.optimally_curved_h = False + # vertical focusing + self.curved_v = False + self.curvv = 0.0 + self.optimally_curved_v = False + + for key, val in param_dict.items(): + match key: + case "mosaic" | "mosaic_v" | "curvh" | "curvv": + setattr(self, key, val * min2rad) + case "width" | "height" | "depth": + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + if param_dict["shape"] == "rectangular": + setattr(self, key, val / np.sqrt(12) * cm2angstrom) + elif param_dict["shape"] == "spherical": + setattr(self, key, val / 4 * cm2angstrom) + else: + print("Analyzer shape needs to be either rectangular or spherical.") + + setattr(self, key, val * cm2angstrom) + case _: + setattr(self, key, val) + self.d_spacing = mono_ana_xtal[self.type] + + +# TODO +class Detector(object): + + def __init__(self, param_dict): + self.shape = "rectangular" + self.width = 1.5 # in cm + self.height = 5.0 # in cm + + for key, val in param_dict.items(): + match key: + case "width" | "height": + # divide by np.sqrt(12) if rectangular + # Diameter D/4 if spherical + if param_dict["shape"] == "rectangular": + setattr(self, key, val / np.sqrt(12) * cm2angstrom) + elif param_dict["shape"] == "spherical": + setattr(self, key, val / 4 * cm2angstrom) + else: + print("Detector shape needs to be either rectangular or spherical.") + case _: + setattr(self, key, val) + + +class Arms(object): + """lengths of arms, in units of cm""" + + def __init__(self, param_dict): + # in units of cm + self.src_mono = 0 + self.mono_sample = 0 + self.sample_ana = 0 + self.ana_det = 0 + self.mono_monitor = 0 + + for key, val in param_dict.items(): + setattr(self, key, val * cm2angstrom) + + +class Goniometer(object): + """Goniometer table, type = Y-ZX or YZ-X""" + + def __init__(self, param_dict): + self.type = "Y-ZX" # Y-mZ-X for Huber stage at HB1A and HB3 + self.sense = -1 + + for key, val in param_dict.items(): + setattr(self, key, val) + + def r_mat(self, angles): + "rotation matrix" + + omega, sgl, sgu = angles # s2, s1, sgl, sgu + match self.type: + + case "Y-ZX": # HB3 + r_mat = rot_y(omega) @ rot_z(-1 * sgl) @ rot_x(sgu) + case "YZ-X": # CG4C + r_mat = rot_y(omega) @ rot_z(sgl) @ rot_x(-1 * sgu) + case _: + r_mat = None + print("Unknow goniometer type. Curruntly support Y-ZX and YZ-X") + + return r_mat + + def r_mat_inv( + self, + angles, + ): + """inverse of rotation matrix""" + # return np.linalg.inv(self.r_mat(angles)) + return self.r_mat(angles).T + + def angles_from_r_mat(self, r_mat): + """Calculate goniometer angles from the R matrix + + Note: + range of np.arcsin is -pi/2 to pi/2 + range of np.atan2 is -pi to pi + """ + + match self.type: + + case "Y-ZX" | "YZ-X": # Y-mZ-X (s1, sgl, sgu) for HB1A and HB3, Y-Z-mX (s1, sgl, sgu) for CG4C + + # sgl1 = np.arcsin(r_mat[1, 0]) * rad2deg + # sgl2 = np.arccos(np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2)) * rad2deg + sgl = np.arctan2(r_mat[1, 0], np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2)) * rad2deg + + # sgu1 = np.arcsin(-r_mat[1, 2] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2)) * rad2deg + # sgu2 = np.arccos(r_mat[1, 1] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2)) * rad2deg + sgu = ( + np.arctan2( + -r_mat[1, 2] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2), + r_mat[1, 1] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2), + ) + * rad2deg + ) + + # omega1 = np.arcsin(-r_mat[2, 0] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2)) * rad2deg + # omega2 = np.arccos(r_mat[0, 0] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2)) * rad2deg + omega = ( + np.arctan2( + -r_mat[2, 0] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2), + r_mat[0, 0] / np.sqrt(r_mat[0, 0] ** 2 + r_mat[2, 0] ** 2), + ) + * rad2deg + ) + match self.type: + case "Y-ZX": + angles = (omega, -1 * sgl, sgu) + case "YZ-X": + angles = (omega, sgl, -1 * sgu) + + case _: + angles = None + print("Unknow goniometer type. Curruntly support Y-ZX and YZ-X.") + + return angles diff --git a/src/tavi/plotter.py b/src/tavi/plotter.py new file mode 100644 index 00000000..0c957562 --- /dev/null +++ b/src/tavi/plotter.py @@ -0,0 +1,166 @@ +import matplotlib.pylab as plt +from mpl_toolkits.axisartist import Axes +import numpy as np + + +class Plot1DManager(object): + """Manage a plot""" + + def __init__(self) -> None: + + _, self.ax = plt.subplots() + self.title = None + self.xlim = None + self.ylim = None + self.xlabel = None + self.ylabel = None + + def set_labels(self): + + if self.xlim is not None: + self.ax.set_xlim(left=self.xlim[0], right=self.xlim[1]) + if self.ylim is not None: + self.ax.set_ylim(bottom=self.ylim[0], top=self.ylim[1]) + + self.ax.set_title(self.title) + self.ax.set_xlabel(self.xlabel) + self.ax.set_ylabel(self.ylabel) + self.ax.grid(alpha=0.6) + self.ax.legend() + + def rez_plot_1D(self, tas, projection, hkl, en, ef, R0, axis): + + rez = tas.cooper_nathans( + ei=ef + en, + ef=ef, + hkl=hkl, + projection=projection, + R0=R0, + ) + if rez.STATUS: + curve1 = rez.coh_fwhms(axis=axis) + curve2 = rez.incoh_fwhms(axis=axis) + + curve1.generate_plot(self.ax, c="black", linestyle="solid") + curve2.generate_plot(self.ax, c="black", linestyle="dashed") + + self.set_labels() + return rez + + def plot_curve(self, x, y, xerr=None, yerr=None, xlabel=None, ylabel=None, title=None, label=None, fmt="o"): + self.title = title + self.xlabel = xlabel + self.ylabel = ylabel + + self.ax.errorbar(x=x, y=y, xerr=xerr, yerr=yerr, label=label, fmt=fmt) + + self.set_labels() + + +class Plot2DManager(object): + """Manage a plot""" + + def __init__(self, grid_helper=None) -> None: + + self.fig = plt.figure(figsize=(10, 6)) + self.ax = self.fig.add_subplot(111, axes_class=Axes, grid_helper=grid_helper) + + self.title = None + self.xlim = None + self.ylim = None + self.zlim = None + self.xlabel = None + self.ylabel = None + self.zlabel = None + self.shading = "auto" + self.cmap = "turbo" + + def set_labels(self): + + if self.xlim is not None: + self.ax.set_xlim(left=self.xlim[0], right=self.xlim[1]) + if self.ylim is not None: + self.ax.set_ylim(bottom=self.ylim[0], top=self.ylim[1]) + + self.ax.set_title(self.title) + self.ax.set_xlabel(self.xlabel) + self.ax.set_ylabel(self.ylabel) + self.ax.grid(alpha=0.6) + self.ax.legend() + + def plot_contour( + self, + x, + y, + z, + x_step=None, + y_step=None, + xlabel=None, + ylabel=None, + zlabel=None, + title=None, + ): + if self.zlim is not None: + vmin, vmax = self.zlim + + self.xlabel = xlabel + self.ylabel = ylabel + self.zlabel = zlabel + self.title = title + + p = self.ax.pcolormesh( + x, + y, + z, + shading=self.shading, + cmap=self.cmap, + vmin=vmin, + vmax=vmax, + ) + + self.fig.colorbar(p, ax=self.ax) + self.set_labels() + + def rez_plot(self, tas, projection, q1, q2, q3, en, ef, R0): + + qe_list = np.empty((4,), dtype=object) + plot_axes = [] + perp_axes = [] + for idx, qe in enumerate((q1, q2, q3, en)): + if np.size(qe) > 1: + plot_axes.append(idx) + qe_list[idx] = np.arange(qe[0], qe[1] + qe[2] / 2, qe[2]) + else: + perp_axes.append(idx) + qe_list[idx] = np.array([qe]) + qe_list[3] = qe_list[3] + ef + + has_axes = False + p1, p2, p3 = projection + + for q1 in qe_list[0]: + for q2 in qe_list[1]: + for q3 in qe_list[2]: + for ei0 in qe_list[3]: + + h, k, l = tuple(np.array(p1) * q1 + np.array(p2) * q2 + np.array(p3) * q3) + + rez = tas.cooper_nathans( + ei=ei0, + ef=ef, + hkl=(h, k, l), + projection=projection, + R0=R0, + ) + if rez.STATUS: + elps = rez.generate_ellipse(axes=tuple(plot_axes), PROJECTION=False) + elps.generate_plot(self.ax, c="black", linestyle="solid") + elps_proj = rez.generate_ellipse(axes=tuple(plot_axes), PROJECTION=True) + elps_proj.generate_plot(self.ax, c="black", linestyle="dashed") + + # if has_axes: + projection = projection + ("en",) + self.ax.set_title( + f"{projection[perp_axes[0]]}={qe_list[perp_axes[0]][0]}, " + + f"{projection[perp_axes[1]]}={qe_list[perp_axes[1]][0]}" + ) diff --git a/src/tavi/sample/powder.py b/src/tavi/sample/powder.py new file mode 100755 index 00000000..708923ca --- /dev/null +++ b/src/tavi/sample/powder.py @@ -0,0 +1,22 @@ +import numpy as np +from tavi.sample.sample import Sample + + +class Powder(Sample): + """Powder sample + + Attibutes: + type (str): "powder" + + """ + + def __init__(self, lattice_params): + super().__init__(lattice_params) + self.type = "powder" + + +if __name__ == "__main__": + powder = Powder(lattice_params=(1, 1, 1, 90, 90, 120)) + + print(powder.b_mat() / 2 / np.pi) + print(powder.b_mat() @ np.array([0, 1, 0]) / 2 / np.pi) diff --git a/src/tavi/sample/sample.py b/src/tavi/sample/sample.py new file mode 100755 index 00000000..5ec8916a --- /dev/null +++ b/src/tavi/sample/sample.py @@ -0,0 +1,196 @@ +import numpy as np +from tavi.utilities import * + +np.set_printoptions(floatmode="fixed", precision=4) + + +class Sample(object): + """ + Attributes: + shape (str): "cuboid" or "cylindrical" + width (float): in units of cm + height (float): in units of cm + depth (float): in units of cm + + mosaic (fload): in units of minutes of arc + mosaic_v (fload): verital mosaic if anisotropic, in units of minutes of arc + + a, b, c lattice constants in Angstrom + alpha, beta, gamma angles in degrees + a_vec, b_vec, c_vec real sapce lattice vector + _v_abg V_alpha_beta_gamma = unit_cell_volume/(abc) + a_star, b_star, c_star lattice constants in inverse Angstrom + alpha_star, beta_star, gamma_star reciprocal angles in degrees + a_star_vec, b_star_vec, c_star_vec reciprocal lattice vector + i_star, j_star, k_star bases for the reciprocal space lattice vectors + + + + + Methods: + real_vec_cart + reciprocal_vec_cart + reciprocal_latt_params + reciprocal_basis + b_mat + + + Static Methods: + v_alpha_beta_gamma_calc(alpha,m beta, gamma) + + + """ + + def __init__(self, lattice_params=(1, 1, 1, 90, 90, 90)): + + # parameters for resolution calculation + self.shape = "cuboid" + self.width = 1.0 # * cm2angstrom + self.height = 1.0 # * cm2angstrom + self.depth = 1.0 # * cm2angstrom + self.mosaic = 30 # * min2rad # horizontal mosaic + self.mosaic_v = 30 # * min2rad # vertical mosaic + + self.update_lattice(lattice_params) + + def update_lattice(self, lattice_params=(1, 1, 1, 90, 90, 90)): + """update real and reciprocal space lattice parameters and vectors""" + + a, b, c, alpha, beta, gamma = lattice_params + self.a = a + self.b = b + self.c = c + self.alpha = alpha + self.beta = beta + self.gamma = gamma + + self._v_abg = Sample.v_alpha_beta_gamma_calc(alpha, beta, gamma) + self.a_vec, self.b_vec, self.c_vec = self.real_vec_cart() + ( + self.a_star, + self.b_star, + self.c_star, + self.alpha_star, + self.beta_star, + self.gamma_star, + ) = self.reciprocal_latt_params() + + self.a_star_vec, self.b_star_vec, self.c_star_vec = self.reciprocal_vec_cart() + + @staticmethod + def v_alpha_beta_gamma_calc(alpha, beta, gamma): + """ + Calculate V_alpha_bet_gamma = Volume/(abc) + Volume = a * (b x c) + """ + cos_alpha = np.cos(alpha / 180 * np.pi) + cos_beta = np.cos(beta / 180 * np.pi) + cos_gamma = np.cos(gamma / 180 * np.pi) + v_alpha_beta_gamma = np.sqrt( + 1 - cos_alpha**2 - cos_beta**2 - cos_gamma**2 + 2 * cos_alpha * cos_beta * cos_gamma + ) + return v_alpha_beta_gamma + + def real_vec_cart(self): + """ + Calculate the real space lattice vectors in Cartesian coordiantes + """ + cos_alpha = np.cos(self.alpha / 180 * np.pi) + cos_beta = np.cos(self.beta / 180 * np.pi) + cos_gamma = np.cos(self.gamma / 180 * np.pi) + sin_gamma = np.sin(self.gamma / 180 * np.pi) + + ac = np.array([self.a, 0, 0]) + bc = np.array( + [ + self.b * cos_gamma, + self.b * sin_gamma, + 0, + ] + ) + + cc = np.array( + [ + self.c * cos_beta, + self.c * (cos_alpha - cos_gamma * cos_beta) / sin_gamma, + self.c * self._v_abg / sin_gamma, + ] + ) + # ac = np.round(ac, 8) + # bc = np.round(bc, 8) + # cc = np.round(cc, 8) + return (ac, bc, cc) + + def reciprocal_latt_params(self): + """Calculate the reciprocal lattice parameters and angles""" + sin_alpha = np.sin(self.alpha / 180 * np.pi) + cos_alpha = np.cos(self.alpha / 180 * np.pi) + sin_beta = np.sin(self.beta / 180 * np.pi) + cos_beta = np.cos(self.beta / 180 * np.pi) + cos_gamma = np.cos(self.gamma / 180 * np.pi) + sin_gamma = np.sin(self.gamma / 180 * np.pi) + + a_star = sin_alpha / self.a / self._v_abg * np.pi * 2 + b_star = sin_beta / self.b / self._v_abg * np.pi * 2 + c_star = sin_gamma / self.c / self._v_abg * np.pi * 2 + alpha_star = np.arccos((cos_beta * cos_gamma - cos_alpha) / sin_beta / sin_gamma) / np.pi * 180 + beta_star = np.arccos((cos_gamma * cos_alpha - cos_beta) / sin_alpha / sin_gamma) / np.pi * 180 + gamma_star = np.arccos((cos_alpha * cos_beta - cos_gamma) / sin_beta / sin_alpha) / np.pi * 180 + # a_star = np.round(a_star, 8) + # b_star = np.round(b_star, 8) + # c_star = np.round(c_star, 8) + # alpha_star = np.round(alpha_star, 8) + # beta_star = np.round(beta_star, 8) + # gamma_star = np.round(gamma_star, 8) + + return (a_star, b_star, c_star, alpha_star, beta_star, gamma_star) + + def reciprocal_vec_cart(self): + """ + Calculate the reciprocal space lattice vectors in the Cartesian coordinates + """ + v = self._v_abg * self.a * self.b * self.c + prefactor = 2 * np.pi / v + a_star_vec = np.cross(self.b_vec, self.c_vec) * prefactor + b_star_vec = np.cross(self.c_vec, self.a_vec) * prefactor + c_star_vec = np.cross(self.a_vec, self.b_vec) * prefactor + + return (a_star_vec, b_star_vec, c_star_vec) + + def hkl2q(self, hkl): + """Convert (h,k,l) to q, in units of inverse Angstrom""" + (h, k, l) = hkl + q = np.linalg.norm(h * self.a_star_vec + k * self.b_star_vec + l * self.c_star_vec) + return q + + def b_mat(self): + """ + Calculate the B matrix + B * (h,k,l) gives Q in terms of i_star, j_star, k_star + """ + b_mat = np.array( + [ + [ + self.a_star, + self.b_star * np.cos(self.gamma_star / 180 * np.pi), + self.c_star * np.cos(self.beta_star / 180 * np.pi), + ], + [ + 0, + self.b_star * np.sin(self.gamma_star / 180 * np.pi), + -self.c_star * np.sin(self.beta_star / 180 * np.pi) * np.cos(self.alpha / 180 * np.pi), + ], + [0, 0, 2 * np.pi / self.c], + ] + ) + b_mat = b_mat / 2 / np.pi + b_mat = np.round(b_mat, 8) + return b_mat + + def reciprocal_basis(self): + """Calculate the reciprocal basis vectors i_star, j_star, k_star""" + i_star = self.a_star_vec / np.linalg.norm(self.a_star_vec) + a_star_perp = np.cross(np.cross(self.a_star_vec, self.b_star_vec), self.a_star_vec) + j_star = a_star_perp / np.linalg.norm(a_star_perp) + k_star = np.cross(i_star, j_star) + return (i_star, j_star, k_star) diff --git a/src/tavi/sample/xtal.py b/src/tavi/sample/xtal.py new file mode 100755 index 00000000..c82df735 --- /dev/null +++ b/src/tavi/sample/xtal.py @@ -0,0 +1,86 @@ +import numpy as np +from tavi.utilities import * +from tavi.sample.sample import Sample + + +class Xtal(Sample): + """Singel crystal sample + + Attibutes: + type (str): "xtal" + inv_ub_matrix + in_plane_ref: in plane vector in Qsample frame, goniometers at zero + plane_normal: normal vector in Qsample frame, goniometers at zero + u (tuple) + v (tuple) + Methods: + + + """ + + def __init__(self, lattice_params=(1, 1, 1, 90, 90, 90)): + super().__init__(lattice_params) + self.type = "xtal" + + self.ub_peaks = None + self.ub_angles = None + self.ub_matrix = None + self.inv_ub_matrix = None + + self.plane_normal = None + self.in_plane_ref = None + + self.i_star, self.j_star, self.k_star = self.reciprocal_basis() + + @property + def u(self): + """u vector, in reciprocal lattice unit, along beam""" + return self.ub_matrix_to_uv(self.ub_matrix)[0] + + @property + def v(self): + """ + v vector, in reciprocal lattice unit, + in the horizaontal scattering plane + """ + return self.ub_matrix_to_uv(self.ub_matrix)[1] + + @staticmethod + def ub_matrix_to_uv(ub_matrix): + """ "Calculate u and v vector from UB matrix""" + inv_ub_matrix = np.linalg.inv(ub_matrix) + u = inv_ub_matrix @ np.array([0, 0, 1]) + v = inv_ub_matrix @ np.array([1, 0, 0]) + return (u, v) + + @staticmethod + def ub_matrix_to_lattice_params(ub_matrix): + """Calculate lattice parameters from UB matrix""" + g_mat = np.linalg.inv(ub_matrix.T @ ub_matrix) + a = np.sqrt(g_mat[0, 0]) + b = np.sqrt(g_mat[1, 1]) + c = np.sqrt(g_mat[2, 2]) + alpha = np.arccos((g_mat[1, 2] + g_mat[2, 1]) / (2 * b * c)) * rad2deg + beta = np.arccos((g_mat[0, 2] + g_mat[2, 0]) / (2 * a * c)) * rad2deg + gamma = np.arccos((g_mat[0, 1] + g_mat[1, 0]) / (2 * a * b)) * rad2deg + return (a, b, c, alpha, beta, gamma) + + def uv_to_ub_matrix(self, u, v): + """Calculate UB matrix from u and v vector, and lattice parameters""" + + b_mat = self.b_mat() + t1 = b_mat @ u + t2_prime = b_mat @ v + t3 = np.cross(t1, t2_prime) + t2 = np.cross(t3, t1) + t_mat = np.array( + [ + t1 / np.linalg.norm(t1), + t2 / np.linalg.norm(t2), + t3 / np.linalg.norm(t3), + ] + ).T + q_mat = np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0]]).T + ub_matrix = q_mat @ np.linalg.inv(t_mat) @ b_mat + + return ub_matrix diff --git a/src/tavi/spice_to_hdf5_backup.py b/src/tavi/spice_to_hdf5_backup.py new file mode 100644 index 00000000..60eb8eb3 --- /dev/null +++ b/src/tavi/spice_to_hdf5_backup.py @@ -0,0 +1,77 @@ + @staticmethod + def convert_spice_to_hdf5(path_to_spice_folder, path_to_hdf5): + """Load data from spice folder. + + Args: + path_to_spice_folder (str): spice folder, ends with '/' + path_to_hdf5 (str): path to hdf5 data file, ends with '.h5' + """ + exp_info = [ + "experiment", + "experiment_number", + "proposal", + "users", + "local_contact", + ] + + p = Path(path_to_spice_folder) + + with h5py.File(path_to_hdf5, "w") as f: + + scans = sorted((p / "Datafiles").glob("*")) + instrument_str, exp_str = scans[0].parts[-1].split("_")[0:2] + + # read in exp_info from the first scan and save as attibutes of the file + _, _, headers, _ = TAVI_Data._read_spice(scans[0]) + ipts = headers["proposal"] + exp_id = "IPTS" + ipts + "_" + instrument_str + + grp_data = f.create_group("data_" + exp_id) + grp_processed_data = f.create_group("processed_data") + grp_fit = f.create_group("fits") + grp_plot = f.create_group("plots") + + for k, v in headers.items(): + if k in exp_info: + grp_data.attrs[k] = v + + # read scans into dataset1 + for scan in scans: # ignoring unused keys + spice_data, col_headers, headers, unused = TAVI_Data._read_spice(scan) + + scan_num = ((scan.parts[-1].split("_"))[-1]).split(".")[0] + scan_id = exp_str + "_" + scan_num + scan_entry = grp_data.create_group(scan_id) + scan_entry.attrs["scan_id"] = scan_id + + for k, v in headers.items(): + if k not in exp_info: # ignore common keys in single scans + if "," in v and k != "scan_title": # vectors + scan_entry.attrs[k] = np.array([float(v0) for v0 in v.split(",")]) + elif v.replace(".", "").isnumeric(): # numebrs only + if v.isdigit(): # int + scan_entry.attrs[k] = int(v) + else: # float + scan_entry.attrs[k] = float(v) + # separate COM/FWHM and its errorbar + elif k == "Center of Mass": + com, e_com = v.split("+/-") + scan_entry.attrs["COM"] = float(com) + scan_entry.attrs["COM_err"] = float(e_com) + elif k == "Full Width Half-Maximum": + fwhm, e_fwhm = v.split("+/-") + scan_entry.attrs["FWHM"] = float(fwhm) + scan_entry.attrs["FWHM_err"] = float(e_fwhm) + else: # other crap, keep as is + if k not in exp_info: + scan_entry.attrs[k] = v + + if spice_data.ndim == 1: # empty data or 1 point only + if len(spice_data): # 1 point only + for idx, col_header in enumerate(col_headers): + scan_entry.create_dataset(col_header, data=spice_data[idx]) + else: # empty + pass + else: # nomarl data + for idx, col_header in enumerate(col_headers): + scan_entry.create_dataset(col_header, data=spice_data[:, idx]) diff --git a/src/tavi/utilities.py b/src/tavi/utilities.py new file mode 100644 index 00000000..e4fcc191 --- /dev/null +++ b/src/tavi/utilities.py @@ -0,0 +1,156 @@ +import numpy as np + +# -------------------------------------------------------------------------- +# constants +# -------------------------------------------------------------------------- + +# import scipy.constants as co +# ksq2E = (co.Planck / co.elementary_charge / 2.0 / np.pi) ** 2.0 * co.elementary_charge / 2.0 / co.neutron_mass * 1e23 +ksq2eng = 2.072124855 # calculated with scipy.constants using the formula above + +sig2fwhm = 2.0 * np.sqrt(2.0 * np.log(2.0)) +cm2angstrom = 1e8 +min2rad = 1.0 / 60.0 / 180.0 * np.pi +rad2deg = 180.0 / np.pi + +# -------------------------------------------------------------------------- +# d_spacing table from Shirane Appendix 3, in units of Angstrom, from +# -------------------------------------------------------------------------- +mono_ana_xtal = { + "PG002": 3.35416, + "Pg002": 3.35416, + "PG004": 1.67708, + "Cu111": 2.08717, + "Cu220": 1.27813, + "Ge111": 3.26627, + "Ge220": 2.00018, + "Ge311": 1.70576, + "Ge331": 1.29789, + "Be002": 1.79160, + "Be110": 1.14280, + "Heusler": 3.435, # Cu2MnAl(111) +} + + +# -------------------------------------------------------------------------- +# helper functions +# -------------------------------------------------------------------------- +def eng2k(en): + """convert energy in meV to wave vector k in inverse Angstrom""" + return + + +def get_angle(a, b, c): + """In a triangle with sides a,b and c, get angle between a and b in radian + Note: + return value in [0,pi]""" + acos = (a**2 + b**2 - c**2) / (2 * a * b) + if acos > 1 or acos < -1: + angle = None + else: + angle = np.arccos(acos) + return angle + + +def get_angle_vec(v1, v2): + """Get the angle in degress between two vectors v1 and v2""" + return np.arccos(np.dot(v1, v2) / np.linalg.norm(v1) / np.linalg.norm(v2)) / np.pi * 180 + + +def get_angle_bragg(q, d_spaceing): + """return angle based on Bragg's law, in radian + 2d sin(theta) = lambda = 2 pi /q + """ + return np.arcsin(np.pi / (d_spaceing * q)) + + +def rotation_matrix_2d(phi): + """rotate the coordination system by angle of phi about z-axis + + Args: + phi (float): angle in radian + + Note: + This is to rotate the coordination system, NOT the vector! + + + """ + s = np.sin(phi) + c = np.cos(phi) + mat = np.array( + [ + [c, s, 0], + [-s, c, 0], + [0, 0, 1], + ] + ) + return mat + + +def rot_x(nu): + """rotation matrix about y-axis by angle nu + + Args: + nu (float): angle in degrees + + Note: + Using Mantid convention, beam along z, y is up, x in plane + """ + + angle = nu / rad2deg + c = np.cos(angle) + s = np.sin(angle) + mat = np.array( + [ + [1, 0, 0], + [0, c, -s], + [0, s, c], + ] + ) + return mat + + +def rot_y(omega): + """rotation matrix about y-axis by angle omega + + Args: + omega (float): angle in degrees + + Note: + Using Mantid convention, beam along z, y is up, x in plane + """ + + angle = omega / rad2deg + c = np.cos(angle) + s = np.sin(angle) + mat = np.array( + [ + [c, 0, s], + [0, 1, 0], + [-s, 0, c], + ] + ) + return mat + + +def rot_z(mu): + """rotation matrix about z-axis by angle mu + + Args: + mu (float): angle in degrees + + Note: + Using Mantid convention, beam along z, y is up, x in plane + """ + + angle = mu / rad2deg + c = np.cos(angle) + s = np.sin(angle) + mat = np.array( + [ + [c, -s, 0], + [s, c, 0], + [0, 0, 1], + ] + ) + return mat