|
| 1 | +# -*- coding: utf-8 -*- |
| 2 | +import json |
| 3 | +import os |
| 4 | +from datetime import datetime |
| 5 | +from pathlib import Path |
1 | 6 | from typing import Optional
|
2 | 7 |
|
3 | 8 | import numpy as np
|
4 | 9 |
|
| 10 | +from tavi.data.spice_reader import _create_spicelogs, read_spice_datafile |
5 | 11 |
|
6 |
| -class NexusDict(object): |
7 |
| - """Read in dictionaries from DAS logs, instrument configuration json or sample json, |
8 |
| - format into NeXus style: nxentry_dict = {"attrs":{"attr1":attr1, ...}, "dataset":dataset} |
9 | 12 |
|
10 |
| - Attributes: |
11 |
| - daslogs_dict (dict) |
12 |
| - instrument_dict (dict) |
13 |
| - sample_dict (dict) |
14 |
| - nxentry_dict (dict) |
| 13 | +class NXdataset(dict): |
| 14 | + """Dataset in a format consistent with NeXus, containg attrs and dataset""" |
15 | 15 |
|
| 16 | + def __init__(self, ds, **kwargs): |
| 17 | + if ds is None: |
| 18 | + return None |
16 | 19 |
|
17 |
| - Methods: |
18 |
| - set_attrs() |
19 |
| - get_attrs() |
20 |
| - get_dataset() |
21 |
| - set_dataset() |
| 20 | + attr_dict = {} |
| 21 | + for k, v in kwargs.items(): |
| 22 | + attr_dict.update({k: v}) |
22 | 23 |
|
23 |
| - """ |
| 24 | + match kwargs: |
| 25 | + case {"type": "NX_CHAR"}: |
| 26 | + dataset = str(ds) |
| 27 | + case {"type": "NX_INT"}: |
| 28 | + dataset = np.array([int(d) for d in ds]) |
| 29 | + case {"type": "NX_FLOAT"}: |
| 30 | + dataset = np.array([float(d) for d in ds]) |
| 31 | + case _: |
| 32 | + dataset = ds |
24 | 33 |
|
25 |
| - def __init__( |
26 |
| - self, |
27 |
| - daslogs_dict: Optional[dict] = None, |
28 |
| - instrument_dict: Optional[dict] = None, |
29 |
| - sample_dict: Optional[dict] = None, |
30 |
| - ) -> None: |
31 |
| - self.daslogs_dict = daslogs_dict |
32 |
| - self.instrument_dict = instrument_dict |
33 |
| - self.sample_dict = sample_dict |
34 |
| - self.nxentry_dict: dict = {"attrs": {}} |
35 |
| - |
36 |
| - def set_attrs(self, **kwargs): |
37 |
| - for k, v in kwargs.items(): |
38 |
| - self.nxentry_dict["attrs"].update({k: v}) |
| 34 | + return super().__init__([("dataset", dataset), ("attrs", attr_dict)]) |
| 35 | + |
| 36 | + def get_attr(self, key: str, default=None): |
| 37 | + val = self["attrs"].get(key) |
| 38 | + return val if val is not None else default |
39 | 39 |
|
40 |
| - def get_attrs(self, key: str, default=None): |
41 |
| - val = self.nxentry_dict["attrs"].get(key) |
| 40 | + def get_dataset(self, default=None): |
| 41 | + val = self["dataset"] |
42 | 42 | return val if val is not None else default
|
43 | 43 |
|
44 |
| - def get_dataset(self, key: str, default=None): |
45 |
| - val = self.nxentry_dict.get(key) |
46 |
| - return val["dataset"] if val is not None else default |
47 | 44 |
|
48 |
| - def set_dataset(self, key: str, dataset: str | dict | np.ndarray, **kwargs): |
49 |
| - """Set dataset with proper format if dataset is a string or an array. Take directly if dictionary.""" |
| 45 | +class NXentry(dict): |
| 46 | + """Entry in a format consistent with NeXus""" |
50 | 47 |
|
| 48 | + def __init__(self, **kwargs): |
51 | 49 | attr_dict = {}
|
| 50 | + dataset_list = [] |
52 | 51 | for k, v in kwargs.items():
|
53 |
| - attr_dict.update({k: v}) |
54 |
| - if "type" in kwargs.keys(): |
55 |
| - match kwargs["type"]: |
56 |
| - case "NX_CHAR": |
57 |
| - dataset = str(dataset) |
58 |
| - case "NX_INT": |
59 |
| - dataset = dataset |
60 |
| - case "NX_FLOAT": |
61 |
| - dataset = dataset |
62 |
| - |
63 |
| - self.nxentry_dict.update({key: {"dataset": dataset}}) |
64 |
| - self.nxentry_dict[key].update({"attrs": attr_dict}) |
65 |
| - |
66 |
| - def set_dataset_from( |
67 |
| - self, |
68 |
| - key: str, |
69 |
| - source: Literal["DAS_DATA", "DAS_METADATA", "INSTRU", "SAMPLE"] = "DAS_DATA", |
70 |
| - daslogs_key: Optional[str] = None, |
71 |
| - **kwargs, |
72 |
| - ): |
73 |
| - match source: |
74 |
| - case "DAS_DATA": |
75 |
| - if self.daslogs_dict is None: |
76 |
| - raise ValueError("Cannot find DAS logs.") |
77 |
| - case "DAS_METADATA": |
78 |
| - if self.daslogs_dict is None: |
79 |
| - raise ValueError("Cannot find DAS logs.") |
80 |
| - case "INSTRU": |
81 |
| - if self.instrument_dict is None: |
82 |
| - raise ValueError("Cannot find instrument configuration dict.") |
83 |
| - case "SMAPLE": |
84 |
| - if self.sample_config_params is None: |
85 |
| - raise ValueError("Cannot find sample configuration dict.") |
86 |
| - case _: |
87 |
| - raise ValueError(f"Unrecogonized source {source}.") |
88 |
| - |
89 |
| - match source: |
90 |
| - case "DAS_DATA": |
91 |
| - try: |
92 |
| - val = self.daslogs_dict[key] if daslogs_key is None else self.daslogs_dict[daslogs_key] |
93 |
| - except KeyError: |
94 |
| - print(f"Variable {key} cannot be found in DAS logs.") |
95 |
| - return self.nxentry_dict |
96 |
| - |
97 |
| - self.nxentry_dict.update({key: val}) |
98 |
| - if not kwargs: |
99 |
| - return self.nxentry_dict |
100 |
| - |
101 |
| - attr_dict = {} |
102 |
| - for k, v in kwargs.items(): |
103 |
| - attr_dict.update({k: v}) |
104 |
| - self.nxentry_dict[key].update({"attrs": attr_dict}) |
105 |
| - return self.nxentry_dict |
106 |
| - case "DAS_METADATA": |
107 |
| - pass |
108 |
| - case "INSTRU": |
109 |
| - pass |
110 |
| - case "SMAPLE": |
111 |
| - pass |
112 |
| - |
113 |
| - |
114 |
| -def spicelogs_to_nested_dict( |
115 |
| - spicelogs: dict, |
116 |
| - instrument_dict: Optional[dict], |
117 |
| - sample_dict: Optional[dict], |
| 52 | + if isinstance(v, NXdataset) or isinstance(v, NXentry): |
| 53 | + if not v: # ignore if empty |
| 54 | + pass |
| 55 | + else: |
| 56 | + dataset_list.append((k, v)) |
| 57 | + else: # must be an attribute |
| 58 | + attr_dict.update({k: v}) |
| 59 | + return super().__init__([("attrs", attr_dict)] + dataset_list) |
| 60 | + |
| 61 | + def add_dataset(self, key: str, ds: NXdataset): |
| 62 | + if not ds: # ignore if empty |
| 63 | + pass |
| 64 | + else: |
| 65 | + self.update({key: ds}) |
| 66 | + |
| 67 | + |
| 68 | +def spice_scan_to_nxdict( |
| 69 | + path_to_scan_file: str, |
| 70 | + path_to_instrument_json: Optional[str] = None, |
| 71 | + path_to_sample_json: Optional[str] = None, |
| 72 | +) -> NXentry: |
| 73 | + """Format SPICE data in a nested dictionary format""" |
| 74 | + |
| 75 | + # parse instruemnt and sample json files |
| 76 | + instrument_config_params = None |
| 77 | + if path_to_instrument_json is not None: |
| 78 | + instrument_config = Path(path_to_instrument_json) |
| 79 | + if not instrument_config.is_file(): |
| 80 | + raise ValueError(f"Invalid instrument json file path: {path_to_instrument_json}") |
| 81 | + with open(instrument_config, "r", encoding="utf-8") as file: |
| 82 | + instrument_config_params = json.load(file) |
| 83 | + |
| 84 | + sample_config_params = None |
| 85 | + if path_to_sample_json is not None: |
| 86 | + sample_config = Path(path_to_sample_json) |
| 87 | + if not sample_config.is_file(): |
| 88 | + raise ValueError(f"Invalid sample json file path: {path_to_instrument_json}") |
| 89 | + with open(sample_config, "r", encoding="utf-8") as file: |
| 90 | + sample_config_params = json.load(file) |
| 91 | + |
| 92 | + spicelogs = _create_spicelogs(path_to_scan_file) |
| 93 | + metadata = spicelogs["metadata"] |
| 94 | + |
| 95 | + nxsource = NXentry( |
| 96 | + name=NXdataset(ds="HFIR", type="NX_CHAR", EX_required="true"), |
| 97 | + probe=NXdataset(ds="neutron", type="NX_CHAR", EX_required="true"), |
| 98 | + NX_class="NXsource", |
| 99 | + EX_required="true", |
| 100 | + ) |
| 101 | + |
| 102 | + nxmono = NXentry( |
| 103 | + ei=NXdataset(ds=spicelogs.get("ei"), type="NX_FLOAT", EX_required="true", units="meV"), |
| 104 | + type=NXdataset(ds=metadata.get("monochromator"), type="NX_CHAR"), |
| 105 | + sense=NXdataset(ds=metadata.get["sense"][0], type="NX_CHAR"), |
| 106 | + m1=NXdataset(ds=spicelogs.get("m1"), type="NX_FLOAT", units="degrees"), |
| 107 | + m2=NXdataset(ds=spicelogs.get("m2"), type="NX_FLOAT", units="degrees"), |
| 108 | + NX_class="NXcrystal", |
| 109 | + EX_required="true", |
| 110 | + ) |
| 111 | + nxmono.add_dataset("mfocus", NXdataset(ds=spicelogs.get("mfocus"), type="NX_FLOAT")) |
| 112 | + nxmono.add_dataset("marc", NXdataset(ds=spicelogs.get("marc"), type="NX_FLOAT")) |
| 113 | + nxmono.add_dataset("mtrans", NXdataset(ds=spicelogs.get("mtrans"), type="NX_FLOAT")) |
| 114 | + nxmono.add_dataset("focal_length", NXdataset(ds=spicelogs.get("focal_length"), type="NX_FLOAT")) |
| 115 | + |
| 116 | + nxana = NXentry( |
| 117 | + ef=NXdataset(ds=spicelogs.get("ef"), type="NX_FLOAT", EX_required="true", units="meV"), |
| 118 | + type=NXdataset(ds=metadata.get("analyzer"), type="NX_CHAR"), |
| 119 | + sense=NXdataset(ds=metadata["sense"][2], type="NX_CHAR"), |
| 120 | + a1=NXdataset(ds=spicelogs.get("a1"), type="NX_FLOAT", units="degrees"), |
| 121 | + a2=NXdataset(ds=spicelogs.get("a2"), type="NX_FLOAT", units="degrees"), |
| 122 | + afocus=NXdataset(ds=spicelogs.get("afocus"), type="NX_FLOAT"), |
| 123 | + NX_class="NXcrystal", |
| 124 | + EX_required="true", |
| 125 | + ) |
| 126 | + for i in range(8): |
| 127 | + nxana.add_dataset(key=f"qm{i+1}", ds=NXdataset(ds=spicelogs.get(f"qm{i+1}"), type="NX_FLOAT")) |
| 128 | + nxana.add_dataset(key=f"xm{i+1}", ds=NXdataset(ds=spicelogs.get(f"xm{i+1}"), type="NX_FLOAT")) |
| 129 | + |
| 130 | + nxdet = NXentry( |
| 131 | + data=NXdataset(ds=spicelogs.get("detector"), type="NX_INT", EX_required="true", units="counts"), |
| 132 | + NX_class="NXdetector", |
| 133 | + EX_required="true", |
| 134 | + ) |
| 135 | + |
| 136 | + nxinstrument = NXentry( |
| 137 | + source=nxsource, |
| 138 | + monochromator=nxmono, |
| 139 | + analyzer=nxana, |
| 140 | + detector=nxdet, |
| 141 | + name=NXdataset(ds=metadata.get("instrument"), type="NX_CHAR"), |
| 142 | + NX_class="NXinstrument", |
| 143 | + EX_required="true", |
| 144 | + ) |
| 145 | + preset_type = metadata.get("preset_type") |
| 146 | + if preset_type == "normal": |
| 147 | + preset_channel = metadata.get("preset_channel") |
| 148 | + nxmonitor = NXentry( |
| 149 | + mode=NXdataset(ds=preset_channel, type="NX_CHAR", EX_required="true"), |
| 150 | + preset=NXdataset(ds=metadata.get("preset_value"), type="NX_FLOAT", EX_required="true"), |
| 151 | + time=NXdataset(ds=spicelogs.get("time"), type="NX_FLOAT", units="seconds"), |
| 152 | + monitor=NXdataset(ds=spicelogs.get("monitor"), type="NX_INT", units="counts"), |
| 153 | + mcu=NXdataset(ds=spicelogs.get("mcu"), type="NX_FLOAT", units="mcu"), |
| 154 | + # TODO link to the right channel |
| 155 | + # data=NXdataset(), |
| 156 | + NX_class="NXmonitor", |
| 157 | + EX_required="true", |
| 158 | + ) |
| 159 | + # TODO polarized exp at HB1 |
| 160 | + elif preset_type == "countfile": |
| 161 | + print("Polarization data, not yet supported.") |
| 162 | + nxmonitor = NXentry(NX_class="NXmonitor", EX_required="true") |
| 163 | + else: |
| 164 | + print(f"Unrecogonized preset type {preset_type}.") |
| 165 | + nxmonitor = NXentry(NX_class="NXmonitor", EX_required="true") |
| 166 | + |
| 167 | + # ------------------------------------------------------------------ |
| 168 | + nxsample = NXentry(NX_class="NXsample", EX_required="true") |
| 169 | + |
| 170 | + # TODO all sample environment variable names needed! |
| 171 | + temperatue_str = ( |
| 172 | + ("temp", "temp_a", "temp_2", "coldtip", "tsample", "sample") |
| 173 | + + ("vti", "dr_tsample", "dr_temp") |
| 174 | + + ("lt", "ht", "sorb_temp", "sorb", "sample_ht") |
| 175 | + ) |
| 176 | + |
| 177 | + field_str = ("persistent_field", "mag_i") |
| 178 | + |
| 179 | + # TODO timezone |
| 180 | + start_date_time = "{} {}".format(metadata.get("date"), metadata.get("time")) |
| 181 | + start_time = datetime.strptime(start_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() |
| 182 | + # TODO what is last scan never finished? |
| 183 | + # if "end_time" in das_logs.attrs: |
| 184 | + end_date_time = metadata.get("end_time") |
| 185 | + end_time = datetime.strptime(end_date_time, "%m/%d/%Y %I:%M:%S %p").isoformat() |
| 186 | + |
| 187 | + nxscan = NXentry( |
| 188 | + SPICElogs=spicelogs, |
| 189 | + definition=NXdataset(ds="NXtas", type="NX_CHAR", EX_required="true"), |
| 190 | + title=NXdataset(ds=metadata.get("scan_title"), type="NX_CHAR", EX_required="true"), |
| 191 | + start_time=NXdataset(ds=start_time, type="NX_DATE_TIME", EX_required="true"), |
| 192 | + end_time=NXdataset(ds=end_time, type="NX_DATE_TIME"), |
| 193 | + instrument=nxinstrument, |
| 194 | + monitor=nxmonitor, |
| 195 | + NX_class="NXentry", |
| 196 | + EX_required="true", |
| 197 | + ) |
| 198 | + return nxscan |
| 199 | + |
| 200 | + |
| 201 | +def spice_data_to_nxdict( |
| 202 | + path_to_spice_folder: str, |
| 203 | + scan_num: Optional[int] = None, |
| 204 | + path_to_instrument_json: Optional[str] = None, |
| 205 | + path_to_sample_json: Optional[str] = None, |
118 | 206 | ) -> dict:
|
| 207 | + """Read SPICE data files into nested dictionary, ready to be converted to NeXus format |
119 | 208 |
|
120 |
| - nxsource = NexusDict() |
121 |
| - nxsource.set_attrs(NX_class="NXsource", EX_required="true") |
122 |
| - nxsource.set_dataset(key="name", dataset="HFIR", type="NX_CHAR", EX_required="true") |
123 |
| - nxsource.set_dataset(key="probe", dataset="neutron", type="NX_CHAR", EX_required="true") |
124 |
| - |
125 |
| - # Effective distance from sample Distance as seen by radiation from sample. |
126 |
| - # This number should be negative to signify that it is upstream of the sample. |
127 |
| - # nxsource.attrs["distance"] = -0.0 |
128 |
| - |
129 |
| - nxmono = NexusDict(daslogs_dict=spicelogs) |
130 |
| - nxmono.set_attrs(NX_class="NXcrystal", EX_required="true") |
131 |
| - # nxmono.set_dataset_from(source="DAS_DATA", key="ei", type="NX_FLOAT", EX_required="true", unit="meV") |
132 |
| - |
133 |
| - return {} |
| 209 | + Args: |
| 210 | + path_to_spice_folder (str): path to a SPICE folder |
| 211 | + scan_num (int): read all scans in folder if not None, otherwise read one scan only |
| 212 | + path_to_instrument_json: Optional[str] = None, |
| 213 | + path_to_sample_json: Optional[str] = None, |
| 214 | + """ |
| 215 | + if path_to_spice_folder[-1] != "/": |
| 216 | + path_to_spice_folder += "/" |
| 217 | + |
| 218 | + scan_list = os.listdir(path_to_spice_folder + "Datafiles/") |
| 219 | + if scan_num is None: # read all scans in folder |
| 220 | + filter_keyword = ".dat" |
| 221 | + else: # read one scan only |
| 222 | + filter_keyword = f"scan{scan_num:04}.dat" |
| 223 | + |
| 224 | + scan_list = [path_to_spice_folder + "Datafiles/" + scan for scan in scan_list if scan.endswith(filter_keyword)] |
| 225 | + scan_list.sort() |
| 226 | + |
| 227 | + # get IPTS number and instrument string |
| 228 | + first_scan = scan_list[0] |
| 229 | + (_, _, headers, _, _) = read_spice_datafile(first_scan) |
| 230 | + ipts = headers["proposal"] |
| 231 | + spice_file_name = first_scan.split("/")[-1] # e.g. CG4C_exp0424_scan0001.dat |
| 232 | + instrument_str, exp_num, _ = spice_file_name.split("_") |
| 233 | + dataset_name = f"IPTS{ipts}_{instrument_str}_{exp_num}" |
| 234 | + |
| 235 | + nexus_dict = {} |
| 236 | + for path_to_scan_file in scan_list: |
| 237 | + pass |
| 238 | + |
| 239 | + scan_name, scan_dict = spice_scan_to_nxdict( |
| 240 | + path_to_scan_file, |
| 241 | + path_to_instrument_json, |
| 242 | + path_to_sample_json, |
| 243 | + ) |
| 244 | + |
| 245 | + nexus_dict.update({scan_name: scan_dict}) |
| 246 | + nexus_dict[scan_name]["attrs"].update({"dataset_name": dataset_name}) |
| 247 | + |
| 248 | + return nexus_dict |
0 commit comments