Skip to content

Commit

Permalink
refactor tavi data classes
Browse files Browse the repository at this point in the history
  • Loading branch information
Bing Li committed Aug 27, 2024
1 parent 854fa9d commit fd1aec3
Show file tree
Hide file tree
Showing 6 changed files with 210 additions and 95 deletions.
1 change: 1 addition & 0 deletions src/tavi/data/nexus_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ def dataset_to_string(ds):
return scan_info, sample_ub_info, instrument_info, data


# TODO
def nexus_to_SPICE(nexus_entry):
"""Reads a nexus entry, convert to a SPICE scan file
Expand Down
197 changes: 177 additions & 20 deletions src/tavi/data/scan.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,153 @@
from typing import NamedTuple, Optional

import matplotlib.pyplot as plt
import numpy as np
from tavi.data.nexus_reader import nexus_to_dict


class ScanInfo(NamedTuple):
"""Metadata containing scan information"""

scan_num: Optional[int] = None
time: Optional[str] = None
scan_title: str = ""
preset_type: str = "normal"
preset_channel: str = "time"
preset_value: float = 1.0
def_y: str = "detector"
def_x: str = "s1"


class SampleUBInfo(NamedTuple):
"""Metadata about sample and UB matrix"""

sample_name: Optional[str] = None
lattice_constants: tuple[
float,
float,
float,
float,
float,
float,
] = (1.0, 1.0, 1.0, 90.0, 90.0, 90.0)
ub_matrix: Optional[np.ndarray] = None
# mode: int = 0 # mode for UB determination in SPICE
plane_normal: Optional[np.ndarray] = None
in_plane_ref: Optional[np.ndarray] = None
ubconf: Optional[str] = None # path to UB configration file


class InstrumentInfo(NamedTuple):
"""Metadata about instrument configuration"""

instrument_name: str = ""
# monochromator:
# analyzer:
# TODO
sense: int = 1
collimation: tuple[float, float, float, float] = (60, 60, 60, 60)
# TODO vertical collimation??


class ScanData(NamedTuple):
"""Data points in a measured scan"""

# Pt.
detector: Optional[tuple[int, ...]] = None
# monitor
time: Optional[tuple[float, ...]] = None
monitor: Optional[tuple[int, ...]] = None
mcu: Optional[tuple[float, ...]] = None
# monochromator
m1: Optional[tuple[float, ...]] = None
m2: Optional[tuple[float, ...]] = None
ei: Optional[tuple[float, ...]] = None
focal_length: Optional[tuple[float, ...]] = None
mfocus: Optional[tuple[float, ...]] = None
marc: Optional[tuple[float, ...]] = None
mtrans: Optional[tuple[float, ...]] = None
# analyzer
ef: Optional[tuple[float, ...]] = None
a1: Optional[tuple[float, ...]] = None
a2: Optional[tuple[float, ...]] = None
afocus: Optional[tuple[float, ...]] = None
# ctax double-focused analyzer
qm: Optional[
tuple[
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
]
] = None
xm: Optional[
tuple[
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
tuple[float, ...],
]
] = None
# goiometer motor angles
s1: Optional[tuple[float, ...]] = None
s2: Optional[tuple[float, ...]] = None
sgl: Optional[tuple[float, ...]] = None
sgu: Optional[tuple[float, ...]] = None
stl: Optional[tuple[float, ...]] = None
stu: Optional[tuple[float, ...]] = None
chi: Optional[tuple[float, ...]] = None
phi: Optional[tuple[float, ...]] = None
# slits
slit_pre_bt: Optional[tuple[float, ...]] = None
slit_pre_tp: Optional[tuple[float, ...]] = None
slit_pre_lf: Optional[tuple[float, ...]] = None
slit_pre_rt: Optional[tuple[float, ...]] = None
slit_aft_bt: Optional[tuple[float, ...]] = None
slit_aft_tp: Optional[tuple[float, ...]] = None
slit_aft_lf: Optional[tuple[float, ...]] = None
slit_aft_rt: Optional[tuple[float, ...]] = None
# Q-E space
q: Optional[tuple[float, ...]] = None
qh: Optional[tuple[float, ...]] = None
qk: Optional[tuple[float, ...]] = None
ql: Optional[tuple[float, ...]] = None
en: Optional[tuple[float, ...]] = None
# temperature
temp: Optional[tuple[float, ...]] = None
temp: Optional[tuple[float, ...]] = None
temp_a: Optional[tuple[float, ...]] = None
temp_2: Optional[tuple[float, ...]] = None
coldtip: Optional[tuple[float, ...]] = None
tsample: Optional[tuple[float, ...]] = None
sample: Optional[tuple[float, ...]] = None
vti: Optional[tuple[float, ...]] = None
dr_tsample: Optional[tuple[float, ...]] = None
dr_temp: Optional[tuple[float, ...]] = None
lt: Optional[tuple[float, ...]] = None
ht: Optional[tuple[float, ...]] = None
sorb_temp: Optional[tuple[float, ...]] = None
sorb: Optional[tuple[float, ...]] = None
sample_ht: Optional[tuple[float, ...]] = None
# field
persistent_field: Optional[tuple[float, ...]] = None


class Scan(object):
"""
Manage a single measued scan
Attributes:
scan_info (dict):
sample_ub_info (dict):
instrument_info (dict):
data (dict): dictionary contains lists of scan data
scan_info (ScanInfo):
sample_ub_info (SampleUBInfo):
instrument_info (InstrumentInfo):
data (ScanData): dictionary contains lists of scan data
Methods:
load_scan
Expand All @@ -20,29 +156,50 @@ class Scan(object):
"""

def __init__(self, nexus_entry=None):
"""Initialze an empty scan if nexus entry not provided"""
def __init__(self) -> None:
self.scan_info: Optional[ScanInfo] = None
self.sample_ub_info: Optional[SampleUBInfo] = None
self.instrument_info: Optional[InstrumentInfo] = None
self.data: Optional[ScanData] = None

self.scan_info = None
self.sample_ub_info = None
self.instrument_info = None
self.data = None

if nexus_entry is not None:
self.load_scan(nexus_entry)

def load_scan(self, nexus_entry):
"""Unpack metadata and data from nexus_entry
# TODO
@staticmethod
def unpack_nexus(nexus_entry):
"""Reads a nexus entry, convert to dictionaries of data and meta_data
Args:
nexus_entry:
nexus entry
Returns:
meta_data (dict)
data (dict)
"""
scan_info = {
"scan": int(nexus_entry.name[-4:]), # last 4 digits are scan number
"time": dataset_to_string(nexus_entry["start_time"]),
"scan_title": dataset_to_string(nexus_entry["title"]),
# "preset_type": "normal",
"preset_channel": dataset_to_string(nexus_entry["monitor/mode"]),
"preset_value": float(nexus_entry["monitor/preset"][...]),
"def_y": nexus_entry["data"].attrs["signal"],
"def_x": nexus_entry["data"].attrs["axes"],
}
return (scan_info, sample_ub_info, instrument_info, scan_data)

scan_info, sample_ub_info, instrument_info, data = nexus_to_dict(nexus_entry)
def load_scan(self, nexus_entry):
"""Unpack metadata and data from nexus_entry"""

# scan_info, sample_ub_info, instrument_info, data = nexus_to_dict(nexus_entry)
(
scan_info,
sample_ub_info,
instrument_info,
scan_data,
) = Scan.unpack_nexus(nexus_entry)
self.scan_info = scan_info
self.sample_ub_info = sample_ub_info
self.instrument_info = instrument_info
self.data = data
self.data = scan_data

def get_scan_info(self):
"""Return scan_info in metadata.
Expand Down
92 changes: 17 additions & 75 deletions src/tavi/data/tavi.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ class TAVI(object):
TAVI_data contains four possible categories, including
- data, a list of 1D scans, raws data.
- process_data, including combined scan, 2D maps or dispersion plot
- fit, contains fitting info including model, parameters and reduced chi_squared
- plot, contains one or more scans and/or fits
- fits, contains fitting info including model, parameters and reduced chi_squared
- plots, contains one or more scans and/or fits
Attributes:
hdf5_path: save path to hdf5
self.scans: list of Scan instances
file_path: path to a tavi file
"""

Expand All @@ -31,10 +31,13 @@ def __init__(self):
def new_tavi_file(self, file_path):
"""Create a new tavi file"""
self.file_path = file_path
h5py.get_config().track_order = True
with h5py.File(file_path, "w") as root:
root.create_group("data")
root.create_group("processed_data")
root.create_group("fits")
root.create_group(
"fits",
)
root.create_group("plots")

def load_nexus_data_from_disk(self, path_to_hdf5):
Expand All @@ -45,7 +48,7 @@ def load_nexus_data_from_disk(self, path_to_hdf5):
OVERWRITE (bool): overwrite exsiting data if Ture, oterwise append new scans in data.
Do not change processed_data, fit or plot
"""
# TODO check if file exsits
# TODO validate path

with h5py.File(self.file_path, "a") as tavi_file, h5py.File(path_to_hdf5, "r") as data_file:
# IPTS1234_HB3_exp567
Expand All @@ -62,7 +65,8 @@ def load_nexus_data_from_disk(self, path_to_hdf5):

self.data.update({data_id: scans})

def load_spice_data_from_disk(self, path_to_spice_folder, OVERWRITE=True):
# TODO
def load_spice_data_from_disk(self, path_to_spice_folder):
"""Load hdf5 data from path_to_hdf5.
Args:
Expand All @@ -71,71 +75,7 @@ def load_spice_data_from_disk(self, path_to_spice_folder, OVERWRITE=True):
Do not change processed_data, fit or plot
"""

# exp_info = [
# "experiment",
# "experiment_number",
# "proposal",
# "users",
# "local_contact",
# ]

# p = Path(path_to_spice_folder)
# scans = sorted((p / "Datafiles").glob("*"))
# instrument, exp = scans[0].parts[-1].split("_")[0:2]

# # read in exp_info from the first scan and save as attibutes of the file
# _, _, headers, _ = read_spice(scans[0])
# ipts = headers["proposal"]
# data_id = f"IPTS{ipts}_{instrument}_{exp}" # e.g. "IPTS1234_HB3_exp567"
# data_entries = []

# for scan in scans: # ignoring unused keys
# spice_data, col_headers, headers, unused = read_spice(scan)
# scan_id = ((scan.parts[-1].split("_"))[-1]).split(".")[0] # e.g. "scan0001"

# meta_data = {"scan_id": scan_id}

# for k, v in headers.items():
# if k not in exp_info: # ignore common keys in single scans
# if "," in v and k != "scan_title": # vectors
# meta_data.update({k: np.array([float(v0) for v0 in v.split(",")])})
# elif v.replace(".", "").isnumeric(): # numebrs only
# if v.isdigit(): # int
# meta_data.update({k: int(v)})
# else: # float
# meta_data.update({k: float(v)})
# # separate COM/FWHM and its errorbar
# elif k == "Center of Mass":
# com, e_com = v.split("+/-")
# meta_data.update({"COM": float(com)})
# meta_data.update({"COM_err": float(e_com)})
# elif k == "Full Width Half-Maximum":
# fwhm, e_fwhm = v.split("+/-")
# meta_data.update({"FWHM": float(fwhm)})
# meta_data.update({"FWHM_err": float(e_fwhm)})
# else: # other crap, keep as is
# if k not in exp_info:
# meta_data.update({k: v})

# data = {}

# if spice_data.ndim == 1: # empty data or 1 point only
# if len(spice_data): # 1 point only
# for idx, col_header in enumerate(col_headers):
# data.update({col_header: spice_data[idx]})
# else: # empty
# pass
# else: # nomarl data
# for idx, col_header in enumerate(col_headers):
# data.update({col_header: spice_data[:, idx]})

# s = Scan()
# s.set_metadata(meta_data)
# s.set_data(data)

# data_entries.append(s)

# self.data.update({data_id: data_entries})
pass

def load_data_from_oncat(self, user_credentials, ipts_info, OVERWRITE=True):
"""Load data from ONCat based on user_credentials and ipts_info.
Expand All @@ -148,10 +88,12 @@ def load_data_from_oncat(self, user_credentials, ipts_info, OVERWRITE=True):
"""
pass

def open_tavi_file(self, file_path):
def open_tavi_file(self, tavi_file_path):
"""Open existing tavi file"""
self.file_path = file_path
with h5py.File(file_path, "a") as tavi_file:
# TODO validate path
self.file_path = tavi_file_path

with h5py.File(tavi_file_path, "a") as tavi_file:
# load datasets in data folder
for data_id in tavi_file["data"].keys():
dataset = tavi_file["data"][data_id]
Expand Down
Binary file removed test_data/tavi_exp424.h5
Binary file not shown.
Binary file modified test_data/tavi_test.h5
Binary file not shown.
15 changes: 15 additions & 0 deletions tests/test_tavi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*
import h5py

from tavi.data.tavi import TAVI


def test_new_tavi_file():
tavi = TAVI()
tavi_file_name = "./test_data/tavi_test.h5"
tavi.new_tavi_file(tavi_file_name)

with h5py.File(tavi_file_name, "r") as f:
keys = f.keys()

assert keys[1] == "processed_data"

0 comments on commit fd1aec3

Please sign in to comment.