diff --git a/scripts/test_data_conversion.py b/scripts/test_data_conversion.py index e9441d28..f5079f77 100644 --- a/scripts/test_data_conversion.py +++ b/scripts/test_data_conversion.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +# -*- coding: utf-8 -*- from tavi.data.spice_to_nexus import convert_spice_to_nexus from tavi.data.tavi import TAVI diff --git a/src/tavi/data/spice_to_nexus.py b/src/tavi/data/spice_to_nexus.py index 47e96a7d..7e8c3fcc 100644 --- a/src/tavi/data/spice_to_nexus.py +++ b/src/tavi/data/spice_to_nexus.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os from datetime import datetime from pathlib import Path @@ -6,89 +7,116 @@ import numpy as np -def read_spice(file_name): +def _read_spice(file_name): """Reads an ascii generated by spice, and returns a header structure and a data table Args: file_name (str): a string containing the filename Returns: - spice_data (numpy.array): an array containing all columns/rows + spice_data (np.ndarray): an array containing all columns/rows headers (dict): a dictionary containing information from the commented lines. - col_headers (list): - unused (dict): not yet used in hdf5 + col_headers (tuple): name of each collum in spice_data + unused (tuple): not yet used in hdf5 """ with open(file_name, encoding="utf-8") as f: all_content = f.readlines() - header_list = [line.strip() for line in all_content if "#" in line] - col_name_index = header_list.index("# col_headers =") + 1 - col_names = header_list[col_name_index].strip("#").split() - header_list.pop(col_name_index) - header_list.pop(col_name_index - 1) - spice_data = np.genfromtxt(file_name, comments="#") + metadata = [line.strip() for line in all_content if "#" in line] + index_col_name = metadata.index("# col_headers =") + col_headers = tuple(metadata[index_col_name + 1].strip("#").split()) + metadata_list = metadata[:index_col_name] + + index_sum_count = [i for i, header in enumerate(metadata) if header.startswith("# Sum of Counts =")] + # in case "Sum of Counts" doesn't exist + # happens to the last scan after beam is down + if len(index_sum_count) != 0: + metadata_list += metadata[index_sum_count[0] + 1 :] - col_headers = col_names headers = {} unused = [] - for line in header_list: - line = line.strip("# ") - if "=" in line: # empty field - if line[-1] == "=": - unused.append(line[:-2]) # remove " =" - else: - parts = line.split("=") - key = parts[0].strip() - val = "=".join(parts[1:])[1:] # remove the fisrt space charactor - headers[key] = val - elif "completed" in line or "stopped" in line: # last line + for metadata_entry in metadata_list: + line = metadata_entry.strip("# ") + + if "completed" in line or "stopped" in line: # last line parts = line.split(" ") headers["end_time"] = parts[3] + " " + parts[0] + " " + parts[1] - else: # empty field + # elif line[-1] == "=": # empty line + # unused.append(line[:-2]) # remove " =" + elif "=" in line: # useful line + parts = line.split("=") + key = parts[0].strip() + val = "=".join(parts[1:])[1:] # remove the fisrt space charactor + headers[key] = val + else: # how did you get here? unused.append(line) + unused = tuple(unused) - return spice_data, col_headers, headers, unused - + spice_data = np.genfromtxt(file_name, comments="#") -def read_spice_ub(ub_file): - """Reads ub info from UBConf + return spice_data, col_headers, headers, unused - Args: - ub_file (str): a string containing the filename - Returns: - ubconf (dict) - """ +def _read_spice_ub(ub_file_name: str) -> dict: + """Reads ub info from UBConf""" ubconf = {} - with open(ub_file, encoding="utf-8") as f: + with open(ub_file_name, encoding="utf-8") as f: all_content = f.readlines() - for idx, line in enumerate(all_content): - if line.strip() == "": - continue # skip if empty - elif line.strip()[0] == "[": - continue # skiplines like "[xx]" - - ub_dict = {} - key, val = line.strip().split("=") - if key == "Mode": - if all_content[idx - 1].strip() == "[UBMode]": - ub_dict["UBMode"] = int(val) - elif all_content[idx - 1].strip() == "[AngleMode]": - ub_dict["AngleMode"] = int(val) - else: - if "," in line: # vector - ub_dict[key] = np.array([float(v) for v in val.strip('"').split(",")]) - else: # float number - ub_dict[key] = float(val) - - ubconf.update(ub_dict) + for idx, line in enumerate(all_content): + if line.strip() == "": + continue # skip if empty + elif line.strip()[0] == "[": + continue # skiplines like "[xx]" + + ub_dict = {} + key, val = line.strip().split("=") + if key == "Mode": + if all_content[idx - 1].strip() == "[UBMode]": + ub_dict["UBMode"] = int(val) + elif all_content[idx - 1].strip() == "[AngleMode]": + ub_dict["AngleMode"] = int(val) + else: + if "," in line: # vector + ub_dict[key] = np.array([float(v) for v in val.strip('"').split(",")]) + else: # float number + ub_dict[key] = float(val) + + ubconf.update(ub_dict) return ubconf -def spicelogs_to_nexus(nxentry): +def _format_spice_header(headers: dict) -> dict: + + formatted_headers = {} + + exp_str = ["scan_title", "users", "local_contact", "experiment"] + for k, v in headers.items(): + if "," in v and k not in exp_str: # vectors + formatted_headers[k] = np.array([float(v0) for v0 in v.split(",")]) + elif v.replace(".", "").isnumeric(): # numebrs only + if v.isdigit(): # int + formatted_headers[k] = int(v) + else: # float + formatted_headers[k] = float(v) + # separate COM/FWHM and its errorbar + elif k == "Center of Mass": + com, e_com = v.split("+/-") + formatted_headers["COM"] = float(com) + formatted_headers["COM_err"] = float(e_com) + elif k == "Full Width Half-Maximum": + fwhm, e_fwhm = v.split("+/-") + formatted_headers["FWHM"] = float(fwhm) + formatted_headers["FWHM_err"] = float(e_fwhm) + else: # other crap, keep as is + formatted_headers[k] = v + + return formatted_headers + + +def _spicelogs_to_nexus(nxentry): """Format info from SPICElogs into Nexus format""" spice_logs = nxentry["SPICElogs"] @@ -140,31 +168,32 @@ def spicelogs_to_nexus(nxentry): # nxentry["instrument"].create_group("filter") # nxentry["instrument"].attrs["NX_class"] = "NXfilter" - # Create the FIELDS + # Valid enumeration values for root['/entry']['definition'] are: + # NXtas + nxentry.create_dataset(name="definition", data="NXtas", maxshape=None) + nxentry["definition"].attrs["type"] = "NX_CHAR" + nxentry["definition"].attrs["EX_required"] = "true" - nxentry.create_dataset(name="title", data=spice_logs.attrs["scan_title"], maxshape=None) - nxentry["title"].attrs["type"] = "NX_CHAR" - nxentry["title"].attrs["EX_required"] = "true" + try: # scan info + nxentry.create_dataset(name="title", data=spice_logs.attrs["scan_title"], maxshape=None) + nxentry["title"].attrs["type"] = "NX_CHAR" + nxentry["title"].attrs["EX_required"] = "true" - # TODO timezone - start_date_time = "{} {}".format(spice_logs.attrs["date"], spice_logs.attrs["time"]) - start_time = datetime.strptime(start_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() + # TODO timezone + start_date_time = "{} {}".format(spice_logs.attrs["date"], spice_logs.attrs["time"]) + start_time = datetime.strptime(start_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() - nxentry.create_dataset(name="start_time", data=start_time, maxshape=None) - nxentry["start_time"].attrs["type"] = "NX_DATE_TIME" - nxentry["start_time"].attrs["EX_required"] = "true" + nxentry.create_dataset(name="start_time", data=start_time, maxshape=None) + nxentry["start_time"].attrs["type"] = "NX_DATE_TIME" + nxentry["start_time"].attrs["EX_required"] = "true" - if "end_time" in spice_logs.attrs: # last scan never finished + # if "end_time" in spice_logs.attrs: # last scan never finished end_date_time = spice_logs.attrs["end_time"] end_time = datetime.strptime(end_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() nxentry.create_dataset(name="end_time", data=end_time, maxshape=None) nxentry["end_time"].attrs["type"] = "NX_DATE_TIME" - - # Valid enumeration values for root['/entry']['definition'] are: - # NXtas - nxentry.create_dataset(name="definition", data="NXtas", maxshape=None) - nxentry["definition"].attrs["type"] = "NX_CHAR" - nxentry["definition"].attrs["EX_required"] = "true" + except KeyError: + pass # --------------------------- instrument --------------------------- nxentry["instrument"].create_dataset(name="name", data=spice_logs.attrs["instrument"], maxshape=None) @@ -187,149 +216,135 @@ def spicelogs_to_nexus(nxentry): # This number should be negative to signify that it is upstream of the sample. # nxentry["instrument/source"].attrs["distance"] = -0.0 - # --------------------------- collimators --------------------------- - nxentry["instrument/collimator"].create_dataset(name="type", data="Soller", maxshape=None) - nxentry["instrument/collimator/type"].attrs["type"] = "NX_CHAR" + try: # -------------------collimators------------- + nxentry["instrument/collimator"].create_dataset(name="type", data="Soller", maxshape=None) + nxentry["instrument/collimator/type"].attrs["type"] = "NX_CHAR" - div_x = [float(v) for v in list(spice_logs.attrs["collimation"].split("-"))] - nxentry["instrument/collimator"].create_dataset(name="divergence_x", data=div_x, maxshape=None) - nxentry["instrument/collimator/divergence_x"].attrs["type"] = "NX_ANGLE" - nxentry["instrument/collimator/divergence_x"].attrs["units"] = "min" + div_x = [float(v) for v in list(spice_logs.attrs["collimation"].split("-"))] + nxentry["instrument/collimator"].create_dataset(name="divergence_x", data=div_x, maxshape=None) + nxentry["instrument/collimator/divergence_x"].attrs["type"] = "NX_ANGLE" + nxentry["instrument/collimator/divergence_x"].attrs["units"] = "min" + except KeyError: + pass - # --------------------------- monochromator --------------------------- - nxmono.create_dataset(name="ei", data=spice_logs["ei"], maxshape=None) - nxmono["ei"].attrs["type"] = "NX_FLOAT" - nxmono["ei"].attrs["EX_required"] = "true" - # nxmono["ei"].attrs["axis"] = "1" - nxmono["ei"].attrs["units"] = "meV" + try: # ----------------- monochromator------------ + nxmono.create_dataset(name="ei", data=spice_logs["ei"], maxshape=None) + nxmono["ei"].attrs["type"] = "NX_FLOAT" + nxmono["ei"].attrs["EX_required"] = "true" + # nxmono["ei"].attrs["axis"] = "1" + nxmono["ei"].attrs["units"] = "meV" - nxmono.create_dataset(name="type", data=spice_logs.attrs["monochromator"], maxshape=None) - nxmono.attrs["type"] = "NX_CHAR" + nxmono.create_dataset(name="type", data=spice_logs.attrs["monochromator"], maxshape=None) + nxmono.attrs["type"] = "NX_CHAR" - try: nxmono.create_dataset(name="m1", data=spice_logs["m1"], maxshape=None) nxmono["m1"].attrs["type"] = "NX_FLOAT" nxmono["m1"].attrs["units"] = "degrees" - except KeyError: - pass - nxmono.create_dataset(name="m2", data=spice_logs["m2"], maxshape=None) - nxmono["m2"].attrs["type"] = "NX_FLOAT" - nxmono["m2"].attrs["units"] = "degrees" + nxmono.create_dataset(name="m2", data=spice_logs["m2"], maxshape=None) + nxmono["m2"].attrs["type"] = "NX_FLOAT" + nxmono["m2"].attrs["units"] = "degrees" - try: nxmono.create_dataset(name="mfocus", data=spice_logs["mfocus"], maxshape=None) nxmono["mfocus"].attrs["type"] = "NX_FLOAT" - except KeyError: - pass - try: nxmono.create_dataset(name="marc", data=spice_logs["marc"], maxshape=None) nxmono["marc"].attrs["type"] = "NX_FLOAT" - except KeyError: - pass - try: nxmono.create_dataset(name="mtrans", data=spice_logs["mtrans"], maxshape=None) nxmono["mtrans"].attrs["type"] = "NX_FLOAT" - except KeyError: - pass - try: nxmono.create_dataset(name="focal_length", data=spice_logs["focal_length"], maxshape=None) nxmono["focal_length"].attrs["type"] = "NX_FLOAT" - except KeyError: - pass - nxmono.create_dataset(name="sense", data=spice_logs.attrs["sense"][0], maxshape=None) - nxmono.attrs["type"] = "NX_CHAR" + nxmono.create_dataset(name="sense", data=spice_logs.attrs["sense"][0], maxshape=None) + nxmono.attrs["type"] = "NX_CHAR" - # nxmono.create_dataset(name="rotation_angle", data=1.0, maxshape=None) - # nxmono["rotation_angle"].attrs["type"] = "NX_FLOAT" - # nxmono["rotation_angle"].attrs["EX_required"] = "true" - # nxmono["rotation_angle"].attrs["units"] = "NX_ANGLE" + # nxmono.create_dataset(name="rotation_angle", data=1.0, maxshape=None) + # nxmono["rotation_angle"].attrs["type"] = "NX_FLOAT" + # nxmono["rotation_angle"].attrs["EX_required"] = "true" + # nxmono["rotation_angle"].attrs["units"] = "NX_ANGLE" + except KeyError: + pass - # --------------------------- analyzer --------------------------- + try: # -------------- analyzer --------------- - nxana.create_dataset(name="ef", data=spice_logs["ef"], maxshape=None) - nxana["ef"].attrs["type"] = "NX_FLOAT" - nxana["ef"].attrs["EX_required"] = "true" - # nxana["ef"].attrs["axis"] = "1" - nxana["ef"].attrs["units"] = "meV" + nxana.create_dataset(name="ef", data=spice_logs["ef"], maxshape=None) + nxana["ef"].attrs["type"] = "NX_FLOAT" + nxana["ef"].attrs["EX_required"] = "true" + # nxana["ef"].attrs["axis"] = "1" + nxana["ef"].attrs["units"] = "meV" - nxana.create_dataset(name="type", data=spice_logs.attrs["analyzer"], maxshape=None) - nxana.attrs["type"] = "NX_CHAR" + nxana.create_dataset(name="type", data=spice_logs.attrs["analyzer"], maxshape=None) + nxana.attrs["type"] = "NX_CHAR" - nxana.create_dataset(name="a1", data=spice_logs["a1"], maxshape=None) - nxana["a1"].attrs["type"] = "NX_FLOAT" - nxana["a1"].attrs["units"] = "degrees" + nxana.create_dataset(name="a1", data=spice_logs["a1"], maxshape=None) + nxana["a1"].attrs["type"] = "NX_FLOAT" + nxana["a1"].attrs["units"] = "degrees" - nxana.create_dataset(name="a2", data=spice_logs["a2"], maxshape=None) - nxana["a2"].attrs["type"] = "NX_FLOAT" - nxana["a2"].attrs["units"] = "degrees" + nxana.create_dataset(name="a2", data=spice_logs["a2"], maxshape=None) + nxana["a2"].attrs["type"] = "NX_FLOAT" + nxana["a2"].attrs["units"] = "degrees" - if "afocus" in spice_logs.keys(): nxana.create_dataset(name="afocus", data=spice_logs["afocus"], maxshape=None) nxana["afocus"].attrs["type"] = "NX_FLOAT" - if spice_logs.attrs["instrument"] == "CG4C": for i in range(8): # qm1--qm8, xm1 -- xm8 nxana.create_dataset(name=f"qm{i+1}", data=spice_logs[f"qm{i+1}"], maxshape=None) nxana.create_dataset(name=f"xm{i+1}", data=spice_logs[f"xm{i+1}"], maxshape=None) - nxana.create_dataset(name="sense", data=spice_logs.attrs["sense"][2], maxshape=None) - nxana.attrs["type"] = "NX_CHAR" + nxana.create_dataset(name="sense", data=spice_logs.attrs["sense"][2], maxshape=None) + nxana.attrs["type"] = "NX_CHAR" - # nxana.create_dataset(name="rotation_angle", data=1.0, maxshape=None) - # nxana["rotation_angle"].attrs["type"] = "NX_FLOAT" - # nxana["rotation_angle"].attrs["EX_required"] = "true" - # nxana["rotation_angle"].attrs["units"] = "NX_ANGLE" + # nxana.create_dataset(name="rotation_angle", data=1.0, maxshape=None) + # nxana["rotation_angle"].attrs["type"] = "NX_FLOAT" + # nxana["rotation_angle"].attrs["EX_required"] = "true" + # nxana["rotation_angle"].attrs["units"] = "NX_ANGLE" - # nxana.create_dataset(name="polar_angle", data=1.0, maxshape=None) - # nxana["polar_angle"].attrs["type"] = "NX_FLOAT" - # nxana["polar_angle"].attrs["EX_required"] = "true" - # nxana["polar_angle"].attrs["units"] = "NX_ANGLE" + # nxana.create_dataset(name="polar_angle", data=1.0, maxshape=None) + # nxana["polar_angle"].attrs["type"] = "NX_FLOAT" + # nxana["polar_angle"].attrs["EX_required"] = "true" + # nxana["polar_angle"].attrs["units"] = "NX_ANGLE" - # --------------------------- detector --------------------------- + except KeyError: + pass - nxentry["instrument/detector"].create_dataset(name="data", data=spice_logs["detector"], maxshape=None, dtype="int") - nxentry["instrument/detector/data"].attrs["type"] = "NX_INT" - nxentry["instrument/detector/data"].attrs["EX_required"] = "true" - nxentry["instrument/detector/data"].attrs["units"] = "counts" + try: # ------------ detector ----------- + nxentry["instrument/detector"].create_dataset( + name="data", data=spice_logs["detector"], maxshape=None, dtype="int" + ) + nxentry["instrument/detector/data"].attrs["type"] = "NX_INT" + nxentry["instrument/detector/data"].attrs["EX_required"] = "true" + nxentry["instrument/detector/data"].attrs["units"] = "counts" - # TODO HB1 polarized experiment + # nxentry["instrument/detector"].create_dataset(name="polar_angle", data=1.0, maxshape=None) + # nxentry["instrument/detector/polar_angle"].attrs["type"] = "NX_FLOAT" + # nxentry["instrument/detector/polar_angle"].attrs["EX_required"] = "true" + # nxentry["instrument/detector/polar_angle"].attrs["units"] = "NX_ANGLE" - # nxentry["instrument/detector"].create_dataset(name="polar_angle", data=1.0, maxshape=None) - # nxentry["instrument/detector/polar_angle"].attrs["type"] = "NX_FLOAT" - # nxentry["instrument/detector/polar_angle"].attrs["EX_required"] = "true" - # nxentry["instrument/detector/polar_angle"].attrs["units"] = "NX_ANGLE" + except KeyError: + pass - # ---------------------------- flipper --------------------------------- - if "fguide" in spice_logs.keys(): + # TODO HB1 polarized experiment + try: # --------------flipper ---------------- nxflipper.create_dataset(name="fguide", data=spice_logs["fguide"], maxshape=None) nxflipper["fguide"].attrs["type"] = "NX_FLOAT" - if "hguide" in spice_logs.keys(): + nxflipper.create_dataset(name="hguide", data=spice_logs["hguide"], maxshape=None) nxflipper["hguide"].attrs["type"] = "NX_FLOAT" - if "vguide" in spice_logs.keys(): + nxflipper.create_dataset(name="vguide", data=spice_logs["vguide"], maxshape=None) nxflipper["vguide"].attrs["type"] = "NX_FLOAT" + except KeyError: + pass + # TODO Helmohtz coils guide fields: tbguide, aguide, bguide # # --------------------------- slits --------------------------- slits_str1 = ("bat", "bab", "bal", "bar", "bbt", "bbb", "bbl", "bbr") - slits_str2 = ( - "slita_lf", - "slita_rt", - "slita_tp", - "slita_bt", - "slitb_lf", - "slitb_rt", - "slitb_tp", - "slitb_bt", - ) + slits_str2 = ("slita_lf", "slita_rt", "slita_tp", "slita_bt", "slitb_lf", "slitb_rt", "slitb_tp", "slitb_bt") slits_str3 = ("slit_pre_bt", "slit_pre_lf", "slit_pre_rt", "slit_pre_tp") slits_str = (slits_str1, slits_str2, slits_str3) @@ -341,140 +356,140 @@ def spicelogs_to_nexus(nxentry): nxslit[st].attrs["type"] = "NX_FLOAT" nxslit[st].attrs["units"] = "cm" - # --------------------------- sample --------------------------- - - nxentry["sample"].create_dataset(name="name", data=spice_logs.attrs["samplename"], maxshape=None) - nxentry["sample/name"].attrs["type"] = "NX_CHAR" - nxentry["sample/name"].attrs["EX_required"] = "true" - - nxentry["sample"].create_dataset(name="qh", data=spice_logs["h"], maxshape=None) - nxentry["sample/qh"].attrs["type"] = "NX_FLOAT" - nxentry["sample/qh"].attrs["EX_required"] = "true" - # nxentry["sample/qh"].attrs["axis"] = "1" - # nxentry["sample/qh"].attrs["units"] = "NX_DIMENSIONLESS" - - nxentry["sample"].create_dataset(name="qk", data=spice_logs["k"], maxshape=None) - nxentry["sample/qk"].attrs["type"] = "NX_FLOAT" - nxentry["sample/qk"].attrs["EX_required"] = "true" - # nxentry["sample/qk"].attrs["axis"] = "1" - # nxentry["sample/qk"].attrs["units"] = "NX_DIMENSIONLESS" - - nxentry["sample"].create_dataset(name="ql", data=spice_logs["l"], maxshape=None) - nxentry["sample/ql"].attrs["type"] = "NX_FLOAT" - nxentry["sample/ql"].attrs["EX_required"] = "true" - # nxentry["sample/ql"].attrs["axis"] = "1" - # nxentry["sample/ql"].attrs["units"] = "NX_DIMENSIONLESS" - - nxentry["sample"].create_dataset(name="en", data=spice_logs["e"], maxshape=None) - nxentry["sample/en"].attrs["type"] = "NX_FLOAT" - nxentry["sample/en"].attrs["EX_required"] = "true" - # nxentry["sample/en"].attrs["axis"] = "1" - nxentry["sample/en"].attrs["units"] = "meV" - - nxentry["sample"].create_dataset(name="sgu", data=spice_logs["sgu"], maxshape=None) - nxentry["sample/sgu"].attrs["type"] = "NX_FLOAT" - nxentry["sample/sgu"].attrs["EX_required"] = "true" - nxentry["sample/sgu"].attrs["units"] = "degrees" - - nxentry["sample"].create_dataset(name="sgl", data=spice_logs["sgl"], maxshape=None) - nxentry["sample/sgl"].attrs["type"] = "NX_FLOAT" - nxentry["sample/sgl"].attrs["EX_required"] = "true" - nxentry["sample/sgl"].attrs["units"] = "degrees" - - nxentry["sample"].create_dataset(name="unit_cell", data=spice_logs.attrs["latticeconstants"], maxshape=None) - nxentry["sample/unit_cell"].attrs["type"] = "NX_FLOAT" - nxentry["sample/unit_cell"].attrs["EX_required"] = "true" - # nxentry["sample/unit_cell"].attrs["units"] = "NX_LENGTH" - - nxentry["sample"].create_dataset(name="orientation_matrix", data=spice_logs.attrs["ubmatrix"], maxshape=None) - nxentry["sample/orientation_matrix"].attrs["type"] = "NX_FLOAT" - nxentry["sample/orientation_matrix"].attrs["EX_required"] = "true" - nxentry["sample/orientation_matrix"].attrs["units"] = "NX_DIMENSIONLESS" - - nxentry["sample"].create_dataset(name="ub_conf", data=spice_logs.attrs["ubconf"].split(".")[0], maxshape=None) - nxentry["sample/ub_conf"].attrs["type"] = "NX_CHAR" - - nxentry["sample"].create_dataset(name="plane_normal", data=spice_logs.attrs["plane_normal"], maxshape=None) - nxentry["sample/plane_normal"].attrs["type"] = "NX_FLOAT" - - nxentry["sample"].create_dataset(name="q", data=spice_logs["q"], maxshape=None) - nxentry["sample/q"].attrs["type"] = "NX_FLOAT" - nxentry["sample/q"].attrs["units"] = "Angstrom^-1" - - nxentry["sample"].create_dataset(name="stu", data=spice_logs["stu"], maxshape=None) - nxentry["sample/stu"].attrs["type"] = "NX_FLOAT" - nxentry["sample/stu"].attrs["units"] = "degrees" - - nxentry["sample"].create_dataset(name="stl", data=spice_logs["stl"], maxshape=None) - nxentry["sample/stl"].attrs["type"] = "NX_FLOAT" - nxentry["sample/stl"].attrs["units"] = "degrees" - - nxentry["sample"].create_dataset(name="s1", data=spice_logs["s1"], maxshape=None) - nxentry["sample/s1"].attrs["type"] = "NX_FLOAT" - nxentry["sample/s1"].attrs["units"] = "degrees" - - nxentry["sample"].create_dataset(name="s2", data=spice_logs["s2"], maxshape=None) - nxentry["sample/s2"].attrs["type"] = "NX_FLOAT" - nxentry["sample/s2"].attrs["units"] = "degrees" - - nxentry["sample"].create_dataset(name="type", data=spice_logs.attrs["sampletype"], maxshape=None) - nxentry["sample/type"].attrs["type"] = "NX_CHAR" - - nxentry["sample"].create_dataset(name="sense", data=spice_logs.attrs["sense"][1], maxshape=None) - nxentry["sample"].attrs["type"] = "NX_CHAR" - - nxentry["sample"].create_dataset(name="Pt.", data=spice_logs["Pt."], maxshape=None) - nxentry["sample"].attrs["type"] = "NX_CHAR" - - # nxentry["sample"].create_dataset(name="rotation_angle", data=1.0, maxshape=None) - # nxentry["sample/rotation_angle"].attrs["type"] = "NX_FLOAT" - # nxentry["sample/rotation_angle"].attrs["EX_required"] = "true" - # nxentry["sample/rotation_angle"].attrs["units"] = "NX_ANGLE" - - # nxentry["sample"].create_dataset(name="polar_angle", data=1.0, maxshape=None) - # nxentry["sample/polar_angle"].attrs["type"] = "NX_FLOAT" - # nxentry["sample/polar_angle"].attrs["EX_required"] = "true" - # nxentry["sample/polar_angle"].attrs["units"] = "NX_ANGLE" - - # --------------------------- monitor --------------------------- - # Valid enumeration values for root['/entry/monitor']['mode'] are: - # monitor - # time - # mcu - - if spice_logs.attrs["preset_type"] == "normal": - preset_channel = spice_logs.attrs["preset_channel"] - - nxentry["monitor"].create_dataset(name="mode", data=preset_channel, maxshape=None) - nxentry["monitor/mode"].attrs["type"] = "NX_CHAR" - nxentry["monitor/mode"].attrs["EX_required"] = "true" - - nxentry["monitor"].create_dataset(name="preset", data=spice_logs.attrs["preset_value"], maxshape=None) - nxentry["monitor/preset"].attrs["type"] = "NX_FLOAT" - nxentry["monitor/preset"].attrs["EX_required"] = "true" - - nxentry["monitor"].create_dataset(name="time", data=spice_logs["time"], maxshape=None) - nxentry["monitor/time"].attrs["type"] = "NX_FLOAT" - nxentry["monitor/time"].attrs["units"] = "seconds" - - nxentry["monitor"].create_dataset(name="monitor", data=spice_logs["monitor"], maxshape=None) - nxentry["monitor/monitor"].attrs["type"] = "NX_INT" - nxentry["monitor/monitor"].attrs["units"] = "counts" - - nxentry["monitor"].create_dataset(name="mcu", data=spice_logs["mcu"], maxshape=None) - nxentry["monitor/mcu"].attrs["type"] = "NX_FLOAT" + try: # -----------------sample ----------------- + + nxentry["sample"].create_dataset(name="name", data=spice_logs.attrs["samplename"], maxshape=None) + nxentry["sample/name"].attrs["type"] = "NX_CHAR" + nxentry["sample/name"].attrs["EX_required"] = "true" + + nxentry["sample"].create_dataset(name="qh", data=spice_logs["h"], maxshape=None) + nxentry["sample/qh"].attrs["type"] = "NX_FLOAT" + nxentry["sample/qh"].attrs["EX_required"] = "true" + # nxentry["sample/qh"].attrs["axis"] = "1" + # nxentry["sample/qh"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="qk", data=spice_logs["k"], maxshape=None) + nxentry["sample/qk"].attrs["type"] = "NX_FLOAT" + nxentry["sample/qk"].attrs["EX_required"] = "true" + # nxentry["sample/qk"].attrs["axis"] = "1" + # nxentry["sample/qk"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="ql", data=spice_logs["l"], maxshape=None) + nxentry["sample/ql"].attrs["type"] = "NX_FLOAT" + nxentry["sample/ql"].attrs["EX_required"] = "true" + # nxentry["sample/ql"].attrs["axis"] = "1" + # nxentry["sample/ql"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="en", data=spice_logs["e"], maxshape=None) + nxentry["sample/en"].attrs["type"] = "NX_FLOAT" + nxentry["sample/en"].attrs["EX_required"] = "true" + # nxentry["sample/en"].attrs["axis"] = "1" + nxentry["sample/en"].attrs["units"] = "meV" + + nxentry["sample"].create_dataset(name="sgu", data=spice_logs["sgu"], maxshape=None) + nxentry["sample/sgu"].attrs["type"] = "NX_FLOAT" + nxentry["sample/sgu"].attrs["EX_required"] = "true" + nxentry["sample/sgu"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="sgl", data=spice_logs["sgl"], maxshape=None) + nxentry["sample/sgl"].attrs["type"] = "NX_FLOAT" + nxentry["sample/sgl"].attrs["EX_required"] = "true" + nxentry["sample/sgl"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="unit_cell", data=spice_logs.attrs["latticeconstants"], maxshape=None) + nxentry["sample/unit_cell"].attrs["type"] = "NX_FLOAT" + nxentry["sample/unit_cell"].attrs["EX_required"] = "true" + # nxentry["sample/unit_cell"].attrs["units"] = "NX_LENGTH" + + nxentry["sample"].create_dataset(name="orientation_matrix", data=spice_logs.attrs["ubmatrix"], maxshape=None) + nxentry["sample/orientation_matrix"].attrs["type"] = "NX_FLOAT" + nxentry["sample/orientation_matrix"].attrs["EX_required"] = "true" + nxentry["sample/orientation_matrix"].attrs["units"] = "NX_DIMENSIONLESS" + + nxentry["sample"].create_dataset(name="ub_conf", data=spice_logs.attrs["ubconf"].split(".")[0], maxshape=None) + nxentry["sample/ub_conf"].attrs["type"] = "NX_CHAR" + + nxentry["sample"].create_dataset(name="plane_normal", data=spice_logs.attrs["plane_normal"], maxshape=None) + nxentry["sample/plane_normal"].attrs["type"] = "NX_FLOAT" + + nxentry["sample"].create_dataset(name="q", data=spice_logs["q"], maxshape=None) + nxentry["sample/q"].attrs["type"] = "NX_FLOAT" + nxentry["sample/q"].attrs["units"] = "Angstrom^-1" + + nxentry["sample"].create_dataset(name="stu", data=spice_logs["stu"], maxshape=None) + nxentry["sample/stu"].attrs["type"] = "NX_FLOAT" + nxentry["sample/stu"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="stl", data=spice_logs["stl"], maxshape=None) + nxentry["sample/stl"].attrs["type"] = "NX_FLOAT" + nxentry["sample/stl"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="s1", data=spice_logs["s1"], maxshape=None) + nxentry["sample/s1"].attrs["type"] = "NX_FLOAT" + nxentry["sample/s1"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="s2", data=spice_logs["s2"], maxshape=None) + nxentry["sample/s2"].attrs["type"] = "NX_FLOAT" + nxentry["sample/s2"].attrs["units"] = "degrees" + + nxentry["sample"].create_dataset(name="type", data=spice_logs.attrs["sampletype"], maxshape=None) + nxentry["sample/type"].attrs["type"] = "NX_CHAR" + + nxentry["sample"].create_dataset(name="sense", data=spice_logs.attrs["sense"][1], maxshape=None) + nxentry["sample"].attrs["type"] = "NX_CHAR" + + nxentry["sample"].create_dataset(name="Pt.", data=spice_logs["Pt."], maxshape=None) + nxentry["sample"].attrs["type"] = "NX_CHAR" + + # nxentry["sample"].create_dataset(name="rotation_angle", data=1.0, maxshape=None) + # nxentry["sample/rotation_angle"].attrs["type"] = "NX_FLOAT" + # nxentry["sample/rotation_angle"].attrs["EX_required"] = "true" + # nxentry["sample/rotation_angle"].attrs["units"] = "NX_ANGLE" + + # nxentry["sample"].create_dataset(name="polar_angle", data=1.0, maxshape=None) + # nxentry["sample/polar_angle"].attrs["type"] = "NX_FLOAT" + # nxentry["sample/polar_angle"].attrs["EX_required"] = "true" + # nxentry["sample/polar_angle"].attrs["units"] = "NX_ANGLE" + except KeyError: + pass - nxentry["monitor"].create_dataset(name="data", data=spice_logs[preset_channel], maxshape=None) - nxentry["monitor/data"].attrs["type"] = "NX_FLOAT" - nxentry["monitor/data"].attrs["EX_required"] = "true" - # nxentry["monitor/data"].attrs["units"] = "counts" + try: # --------------------------- monitor --------------------------- + # Valid enumeration values for root['/entry/monitor']['mode'] are:monitor/time/ mcu + if spice_logs.attrs["preset_type"] == "normal": + preset_channel = spice_logs.attrs["preset_channel"] - # TODO polarized exp at HB1 - elif spice_logs.attrs["preset_type"] == "countfile": - print("Polarization data, not yet supported.") + nxentry["monitor"].create_dataset(name="mode", data=preset_channel, maxshape=None) + nxentry["monitor/mode"].attrs["type"] = "NX_CHAR" + nxentry["monitor/mode"].attrs["EX_required"] = "true" - else: - print("Unrecogonized preset type. ") + nxentry["monitor"].create_dataset(name="preset", data=spice_logs.attrs["preset_value"], maxshape=None) + nxentry["monitor/preset"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/preset"].attrs["EX_required"] = "true" + + nxentry["monitor"].create_dataset(name="time", data=spice_logs["time"], maxshape=None) + nxentry["monitor/time"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/time"].attrs["units"] = "seconds" + + nxentry["monitor"].create_dataset(name="monitor", data=spice_logs["monitor"], maxshape=None) + nxentry["monitor/monitor"].attrs["type"] = "NX_INT" + nxentry["monitor/monitor"].attrs["units"] = "counts" + + nxentry["monitor"].create_dataset(name="mcu", data=spice_logs["mcu"], maxshape=None) + nxentry["monitor/mcu"].attrs["type"] = "NX_FLOAT" + + nxentry["monitor"].create_dataset(name="data", data=spice_logs[preset_channel], maxshape=None) + nxentry["monitor/data"].attrs["type"] = "NX_FLOAT" + nxentry["monitor/data"].attrs["EX_required"] = "true" + # nxentry["monitor/data"].attrs["units"] = "counts" + + # TODO polarized exp at HB1 + elif spice_logs.attrs["preset_type"] == "countfile": + print("Polarization data, not yet supported.") + + else: + print("Unrecogonized preset type. ") + except KeyError: + pass # --------------------------- data links --------------------------- @@ -563,24 +578,21 @@ def find_val(val, grp, prefix=""): ) + ("lt", "ht", "sorb_temp", "sorb", "sample_ht") ) - for t in temperatue_str: - try: + # TODO field + field_str = ("persistent_field",) + # TODO pressure + try: + for t in temperatue_str: nxentry["sample"].create_dataset(name=t, data=spice_logs[t], maxshape=None) nxentry["sample/" + t].attrs["type"] = "NX_FLOAT" nxentry["sample/" + t].attrs["units"] = "K" - except KeyError: - pass - # TODO field - field_str = ("persistent_field",) - for f in field_str: - try: + for f in field_str: nxentry["sample"].create_dataset(name=f, data=spice_logs[f], maxshape=None) nxentry["sample/" + t].attrs["type"] = "NX_FLOAT" nxentry["sample/" + t].attrs["units"] = "T" - except KeyError: - pass - # TODO pressure + except KeyError: + pass # TODO @@ -770,98 +782,74 @@ def instrument_info_to_nexus(nxentry, instrument_params): nxentry["sample/mosaic_v"].attrs["units"] = "min" -def convert_spice_to_nexus(path_to_spice_folder, path_to_hdf5): +def convert_spice_to_nexus( + path_to_spice_folder: str, + path_to_hdf5: str, +) -> None: """Load data from spice folder. Convert to a nexus file Args: path_to_spice_folder (str): spice folder, ends with '/' path_to_nexus (str): path to hdf5 data file, ends with '.h5' - instrument_config: python file contains instrument configuration parameters """ print(f"Converting {path_to_spice_folder} to {path_to_hdf5}") p = Path(path_to_spice_folder) + ub_files = sorted((p / "UBConf").glob("*.ini")) + tmp_ub_files = sorted((p / "UBConf/tmp").glob("*.ini")) + ub_conf_dicts = {ub_file.parts[-1].split(".")[0]: _read_spice_ub(ub_file) for ub_file in ub_files + tmp_ub_files} + + scans = sorted((p / "Datafiles").glob("*.dat")) + instrument_str = scans[0].parts[-1].split("_")[0] + # read in exp_info from the first scan and save as attibutes of the file + _, _, headers, _ = _read_spice(scans[0]) + ipts = headers["proposal"] + exp_num = headers["experiment_number"] + with h5py.File(path_to_hdf5, "w") as root: # ----------------------------- ub info ------------------------------------ - ub_files = sorted((p / "UBConf").glob("*.ini")) - tmp_ub_files = sorted((p / "UBConf/tmp").glob("*.ini")) - ub_entries = root.create_group("UBConf") + ub_entries = root.create_group("UBConf", track_order=True) ub_entries.attrs["NX_class"] = "NXcollection" ub_entries.attrs["X_required"] = "false" - for ub_file in ub_files + tmp_ub_files: - ub_entry_name = ub_file.parts[-1].split(".")[0] - ub_entry = ub_entries.create_group(ub_entry_name) - ub_conf = read_spice_ub(ub_file) - for ub_item in ub_conf.items(): - k, v = ub_item + for ub_name, ub_data in ub_conf_dicts.items(): + ub_entry = ub_entries.create_group(ub_name, track_order=True) + for k, v in ub_data.items(): ub_entry.create_dataset(name=k, data=v, maxshape=None) - # --------------------------- scan info ------------------------------------ - scans = sorted((p / "Datafiles").glob("*.dat")) - instrument_str = scans[0].parts[-1].split("_")[0] - - # read in exp_info from the first scan and save as attibutes of the file - _, _, headers, _ = read_spice(scans[0]) - ipts = headers["proposal"] - exp_num = headers["experiment_number"] - - # root.attrs["name"] = headers["experiment"] - # root.attrs["users"] = headers["users"] - # root.attrs["local_contact"] = headers["local_contact"] - - # convert SPICE scans into nexus entries for scan in scans: # ignoring unused keys - spice_data, col_headers, headers, unused = read_spice(scan) + spice_data, col_headers, headers, unused = _read_spice(scan) + formatted_headers = _format_spice_header(headers) - # pre-processing + # create SPICElogs scan_num = ((scan.parts[-1].split("_"))[-1]).split(".")[0] # e.g. "scan0001" - if "scan_title" in unused: - headers["scan_title"] = "" - - # /entry/SPICElogs0 nxentry = root.create_group(scan_num) spice_logs = nxentry.create_group("SPICElogs") spice_logs.attrs["NX_class"] = "NXcollection" spice_logs.attrs["X_required"] = "false" spice_logs.attrs["instrument"] = instrument_str - # metadata to attibutes - exp_str = ["scan_title", "users", "local_contact", "experiment"] - for k, v in headers.items(): - if "," in v and k not in exp_str: # vectors - spice_logs.attrs[k] = np.array([float(v0) for v0 in v.split(",")]) - elif v.replace(".", "").isnumeric(): # numebrs only - if v.isdigit(): # int - spice_logs.attrs[k] = int(v) - else: # float - spice_logs.attrs[k] = float(v) - # separate COM/FWHM and its errorbar - elif k == "Center of Mass": - com, e_com = v.split("+/-") - spice_logs.attrs["COM"] = float(com) - spice_logs.attrs["COM_err"] = float(e_com) - elif k == "Full Width Half-Maximum": - fwhm, e_fwhm = v.split("+/-") - spice_logs.attrs["FWHM"] = float(fwhm) - spice_logs.attrs["FWHM_err"] = float(e_fwhm) - else: # other crap, keep as is - spice_logs.attrs[k] = v - - # motor position table to datasets - if spice_data.ndim == 1: # empty data or 1 point only - if len(spice_data): # 1 point only + # write SPICElogs attributes + for k, v in formatted_headers.items(): + spice_logs.attrs[k] = v + + # write SPICElogs datasets + spice_data_shape = spice_data.shape + if len(spice_data_shape) == 1: + if spice_data_shape[0] == 1: # 1 row ony for idx, col_header in enumerate(col_headers): spice_logs.create_dataset(col_header, data=spice_data[idx]) - else: # empty + else: # ignore if empty pass - else: # nomarl data + elif len(spice_data_shape) == 2: # nomarl data with mutiple rows + print(scan_num) + print(spice_data.shape) for idx, col_header in enumerate(col_headers): spice_logs.create_dataset(col_header, data=spice_data[:, idx]) + _spicelogs_to_nexus(nxentry) - spicelogs_to_nexus(nxentry) # instrument_info_to_nexus(nxentry, instrument_config) # Create the ATTRIBUTES diff --git a/src/tavi/data/tavi.py b/src/tavi/data/tavi.py index a3b5ca19..c2f005d6 100644 --- a/src/tavi/data/tavi.py +++ b/src/tavi/data/tavi.py @@ -31,14 +31,15 @@ def __init__(self): def new_tavi_file(self, file_path): """Create a new tavi file""" self.file_path = file_path - h5py.get_config().track_order = True - with h5py.File(file_path, "w") as root: - root.create_group("data") - root.create_group("processed_data") - root.create_group( - "fits", - ) - root.create_group("plots") + + try: + with h5py.File(file_path, "w", track_order=True) as root: + root.create_group("data", track_order=True) + root.create_group("processed_data", track_order=True) + root.create_group("fits", track_order=True) + root.create_group("plots", track_order=True) + except OSError: + print(f"Cannot create tavi file at {file_path}") def load_nexus_data_from_disk(self, path_to_hdf5): """Load hdf5 data from path_to_hdf5. diff --git a/test_data/nexus_exp1031.h5 b/test_data/nexus_exp1031.h5 index b23e38ec..aaf7196e 100644 Binary files a/test_data/nexus_exp1031.h5 and b/test_data/nexus_exp1031.h5 differ diff --git a/test_data/nexus_exp416.h5 b/test_data/nexus_exp416.h5 index 110b241c..d908f570 100644 Binary files a/test_data/nexus_exp416.h5 and b/test_data/nexus_exp416.h5 differ diff --git a/test_data/nexus_exp424.h5 b/test_data/nexus_exp424.h5 index 5c28a7e4..6818d443 100644 Binary files a/test_data/nexus_exp424.h5 and b/test_data/nexus_exp424.h5 differ diff --git a/test_data/nexus_exp710.h5 b/test_data/nexus_exp710.h5 index e847afae..7f6c17f2 100644 Binary files a/test_data/nexus_exp710.h5 and b/test_data/nexus_exp710.h5 differ diff --git a/test_data/nexus_exp758.h5 b/test_data/nexus_exp758.h5 deleted file mode 100644 index 436801f0..00000000 Binary files a/test_data/nexus_exp758.h5 and /dev/null differ diff --git a/test_data/nexus_exp815.h5 b/test_data/nexus_exp815.h5 new file mode 100644 index 00000000..90f6e7b7 Binary files /dev/null and b/test_data/nexus_exp815.h5 differ diff --git a/test_data/nexus_exp932.h5 b/test_data/nexus_exp932.h5 index 0ec0c7b1..4d1d404f 100644 Binary files a/test_data/nexus_exp932.h5 and b/test_data/nexus_exp932.h5 differ diff --git a/test_data/tavi_test.h5 b/test_data/tavi_test.h5 index e69de29b..aba89906 100644 Binary files a/test_data/tavi_test.h5 and b/test_data/tavi_test.h5 differ diff --git a/test_data/tavi_test_exp1031.h5 b/test_data/tavi_test_exp1031.h5 deleted file mode 100644 index e9fd3f21..00000000 Binary files a/test_data/tavi_test_exp1031.h5 and /dev/null differ diff --git a/test_data/tavi_test_exp410.h5 b/test_data/tavi_test_exp410.h5 deleted file mode 100644 index df05c7ba..00000000 Binary files a/test_data/tavi_test_exp410.h5 and /dev/null differ diff --git a/test_data/tavi_test_exp424.h5 b/test_data/tavi_test_exp424.h5 deleted file mode 100644 index 67eb6fec..00000000 Binary files a/test_data/tavi_test_exp424.h5 and /dev/null differ diff --git a/test_data/tavi_test_exp710.h5 b/test_data/tavi_test_exp710.h5 deleted file mode 100644 index f1892db1..00000000 Binary files a/test_data/tavi_test_exp710.h5 and /dev/null differ diff --git a/tests/test_tavi.py b/tests/test_tavi.py deleted file mode 100644 index 8cd29b1a..00000000 --- a/tests/test_tavi.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -* -import h5py - -from tavi.data.tavi import TAVI - - -def test_new_tavi_file(): - tavi = TAVI() - tavi_file_name = "./test_data/tavi_test.h5" - tavi.new_tavi_file(tavi_file_name) - - with h5py.File(tavi_file_name, "r") as f: - keys = f.keys() - - assert keys[1] == "processed_data" diff --git a/tests/test_tavi_data.py b/tests/test_tavi_data.py new file mode 100644 index 00000000..a75160bd --- /dev/null +++ b/tests/test_tavi_data.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -* +import h5py +import numpy as np + +from tavi.data.spice_to_nexus import _format_spice_header, _read_spice, _read_spice_ub, convert_spice_to_nexus +from tavi.data.tavi import TAVI + + +def test_read_spice(): + spice_file = "./test_data/exp424/Datafiles/CG4C_exp0424_scan0001.dat" + spice_data, col_headers, headers, unused = _read_spice(spice_file) + assert spice_data.shape == (2, 55) + assert headers["scan_title"] == "" + assert len(unused) == 0 + + # unfinished scan + spice_file = "./test_data/exp416/Datafiles/CG4C_exp0416_scan0050.dat" + spice_data, col_headers, headers, unused = _read_spice(spice_file) + assert spice_data.shape == (16, 56) + + +def test_read_spice_ub(): + spice_ub_file = "./test_data/exp424/UBConf/UB02Jul2024_14108PM.ini" + ubconf = _read_spice_ub(spice_ub_file) + assert np.allclose(ubconf["Energy"], 4.8) + assert len(ubconf) == 13 + + +def test_format_spice_header(): + spice_file = "./test_data/exp424/Datafiles/CG4C_exp0424_scan0001.dat" + _, _, headers, _ = _read_spice(spice_file) + formatted_headers = _format_spice_header(headers) + assert "COM" in formatted_headers.keys() + assert isinstance(formatted_headers["scan"], int) + + +def test_spice_to_nexus_conversion(): + exp_nums = [416, 424, 710, 815, 932, 1031] + + for exp_num in exp_nums: + spice_folder = f"./test_data/exp{exp_num}/" + nexus_file_name = f"./test_data/nexus_exp{exp_num}.h5" + convert_spice_to_nexus(spice_folder, nexus_file_name) + + +def test_new_tavi_file(): + tavi = TAVI() + tavi_file_name = "./test_data/tavi_test.h5" + tavi.new_tavi_file(tavi_file_name) + + with h5py.File(tavi_file_name, "r") as f: + keys = [key for key in f["/"].keys()] + # check if groups preserves the order + assert keys[0] == "data" + assert keys[1] == "processed_data" + assert keys[2] == "fits" + assert keys[3] == "plots"