From 2155b0277fd0ba99544d1c37c828c3dce8674ec5 Mon Sep 17 00:00:00 2001 From: Daniel Hollarek Date: Thu, 6 Feb 2025 22:11:32 +0100 Subject: [PATCH] Exported processors to opXRD; Updated get_library_version to not use deprecated pkg_resources --- special/contribs.py | 107 ------------ special/processors/ase_db.py | 55 ------- special/processors/cod.py | 76 --------- special/processors/icsd.py | 39 ----- special/processors/opxrd.py | 152 ------------------ special/processors/rruff.py | 90 ----------- special/tools/__init__.py | 0 tests/t_tools/t_spg_converter.py | 7 +- xrdpattern/pattern/pattern.py | 2 +- xrdpattern/pattern/visualization.py | 3 +- .../tools}/__init__.py | 0 .../tools/binary_analyser.py | 0 {special => xrdpattern}/tools/csv_label.py | 0 .../tools/spg_converter.py | 0 .../tools/spg_formulas.txt | 0 xrdpattern/xrd/experiment.py | 6 +- 16 files changed, 10 insertions(+), 527 deletions(-) delete mode 100644 special/contribs.py delete mode 100644 special/processors/ase_db.py delete mode 100644 special/processors/cod.py delete mode 100644 special/processors/icsd.py delete mode 100644 special/processors/opxrd.py delete mode 100644 special/processors/rruff.py delete mode 100644 special/tools/__init__.py rename {special/processors => xrdpattern/tools}/__init__.py (100%) rename {special => xrdpattern}/tools/binary_analyser.py (100%) rename {special => xrdpattern}/tools/csv_label.py (100%) rename {special => xrdpattern}/tools/spg_converter.py (100%) rename {special => xrdpattern}/tools/spg_formulas.txt (100%) diff --git a/special/contribs.py b/special/contribs.py deleted file mode 100644 index a353497..0000000 --- a/special/contribs.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import shutil -import tempfile - -from special.processors.opxrd import OpXRDProcessor -from xrdpattern.xrd import XrayInfo - - -# ------------------------------------------------------------- - -class ContributionProcessor(OpXRDProcessor): - def parse_INT(self): - db0 = self.get_db(dirname='breitung_schweidler_0', suffixes=['raw']) - db1 = self.get_db(dirname='breitung_schweidler_1', suffixes=['raw']) - merged = db0 + db1 - merged.save(dirpath=os.path.join(self.final_dirpath, 'INT')) - - def parse_CNRS(self): - db = self.get_db(dirname='coudert_hardiagon_0', suffixes=['json']) - db.save(dirpath=os.path.join(self.final_dirpath, 'CNRS')) - - - def parse_USC(self): - db0 = self.get_db(dirname='hodge_alwen_0', xray_info=self.cu_xray) - db1 = self.get_db(dirname='hodge_alwen_1', xray_info=self.cu_xray) - merged = db0 + db1 - merged.save(dirpath=self.get_final_dirpath('USC')) - - - def parse_LBNL(self): - perovskite_db = self.get_csv_db(dirname='sutter-fella_singh_0', orientation='horizontal') - perovskite_db += self.get_csv_db(dirname='sutter-fella_kodalle_0', orientation='horizontal', suffixes=['dat','csv']) - xray_info = XrayInfo(primary_wavelength=1.23984, secondary_wavelength=None) - perovskite_db.set_xray(xray_info=xray_info) - perovskite_db.save(dirpath=self.get_final_dirpath('LBNL','A_PEROVSKITES_1')) - - db_B = self.get_csv_db(dirname='sutter-fella_abdelsamie_0', orientation='horizontal') - db_B.set_xray(xray_info=xray_info) - db_B.save(dirpath=self.get_final_dirpath('LBNL','B_PEROVSKITES_2')) - - mn_sb_db = self.get_csv_db(dirname='sutter-fella_heymans_0', suffixes=['xlsx'], orientation='vertical') - mn_sb_db.save(dirpath=self.get_final_dirpath('LBNL','C_MnSbO_annealing')) - - uio_db = self.get_csv_db(dirname='sutter-fella_hu_0', orientation='horizontal') - uio_db.save(dirpath=self.get_final_dirpath('LBNL','D_UiO_compounds')) - - - - - def parse_EMPA(self): - db0 = self.get_db(dirname='siol_wieczorek_0', xray_info=self.cu_xray) - db1 = self.get_db(dirname='siol_zhuk_0', xray_info=self.cu_xray) - merged = db0 + db1 - merged.save(dirpath=self.get_final_dirpath('EMPA')) - - - def parse_IKFT(self): - db = self.get_db(dirname='wolf_wolf_0', xray_info=self.cu_xray) - db.save(dirpath=self.get_final_dirpath('IKFT')) - - - def parse_HKUST(self): - db = self.get_db(dirname='zhang_cao_0', use_cif_labels=True, suffixes=['txt'], xray_info=self.cu_xray) - db.save(dirpath=self.get_final_dirpath('HKUST','A_in_house')) - - db1 = self.get_db(dirname='zhang_cao_1', xray_info=self.cu_xray) - db1.save(dirpath=self.get_final_dirpath('HKUST', 'B_accumulated')) - - # -------------------------------------------------------------------- - - def prepare_zips(self): - dir_content_names = os.listdir(self.final_dirpath) - dir_content_paths = [os.path.join(self.final_dirpath, name) for name in dir_content_names] - dirpaths = [d for d in dir_content_paths if os.path.isdir(d)] - - in_situ_dirs = [d for d in dirpaths if 'LBNL' in d] - non_situ_dirs = [d for d in dirpaths if d not in in_situ_dirs] - - print(f'In situ dirs: {in_situ_dirs}') - print(f'Non situ dirs: {non_situ_dirs}') - - in_situ_fpath = os.path.join(self.final_dirpath, 'opxrd_in_situ.zip') - self._zip_dirs(in_situ_dirs, output_fpath=in_situ_fpath) - - non_situ_fpath = os.path.join(self.final_dirpath, 'opxrd.zip') - self._zip_dirs(non_situ_dirs, output_fpath=non_situ_fpath) - - @staticmethod - def _zip_dirs(dirpaths : list[str], output_fpath : str): - if len(dirpaths) == 0: - raise ValueError('No directories to zip') - - tmp_dir = tempfile.mktemp() - for d in dirpaths: - print(f'Copying {d} to {tmp_dir}') - tmp_dirpath = os.path.join(tmp_dir, os.path.basename(d)) - shutil.copytree(d,tmp_dirpath) - print(f'Zipping {tmp_dir} to {output_fpath}') - parts = output_fpath.split('.')[:-1] - output_fpath = '.'.join(parts) - shutil.make_archive(base_name=output_fpath, format='zip', root_dir=tmp_dir) - - -if __name__ == "__main__": - processor = ContributionProcessor(root_dirpath='/home/daniel/aimat/data/opXRD/') - processor.parse_all() - # processor.prepare_zips() \ No newline at end of file diff --git a/special/processors/ase_db.py b/special/processors/ase_db.py deleted file mode 100644 index e8bfa9f..0000000 --- a/special/processors/ase_db.py +++ /dev/null @@ -1,55 +0,0 @@ -import os.path - -import numpy as np -from ase.db import connect -from ase.db.core import Database -from numpy.typing import NDArray - -from xrdpattern.crystal import CrystalPhase, CrystalBase, AtomicSite -from xrdpattern.pattern import XrdPattern -from xrdpattern.xrd import PowderExperiment, XrdAnode - -# ------------------------------------- - -def get_xrdpattern(database: Database, index: int, add_labels : bool = True) -> XrdPattern: - row = database.get(id=index) - two_theta_values = get_as_float_arr('angle', row=row) - intensities = get_as_float_arr('intensity', row=row) - - if add_labels: - atom = database.get_atoms(id=index) - base = make_base(chemical_symbols=atom.get_chemical_symbols(), fract_positions=atom.get_positions()) - a, b, c, alpha, beta, gamma = atom.get_cell_lengths_and_angles().tolist() - phase = CrystalPhase(base=base, lengths=(a, b, c), angles=(alpha, beta, gamma)) - experiment = PowderExperiment(phases=[phase], xray_info=XrdAnode.Cu.get_xray_info()) - p = XrdPattern(two_theta_values=np.array(two_theta_values), intensities=np.array(intensities), powder_experiment=experiment) - else: - p = XrdPattern.make_unlabeled(two_theta_values=two_theta_values, intensities=intensities) - - return p - -def get_as_float_arr(name : str, row) -> list[float]: - return eval(getattr(row, name)) - -def make_base(chemical_symbols : list[str], fract_positions : NDArray) -> CrystalBase: - if not len(chemical_symbols) == len(fract_positions): - raise ValueError('The number of chemical symbols and positions must be equal') - - atoms : list[AtomicSite] = [] - for symbol, (x,y,z) in zip(chemical_symbols, fract_positions): - atoms.append(AtomicSite(species_str=symbol, x=x, y=y, z=z, occupancy=1)) - - return CrystalBase(atoms) - -if __name__ == "__main__": - processing_dirpath = '/home/daniel/aimat/data/opXRD/processed/zhang_cao_1' - database_fpath = os.path.join(processing_dirpath,'caobin.db') - print(f'Reading form database at {database_fpath}') - data = connect(database_fpath) - - print(f'Reading data from database containing {data.count()} entries') - for idx in range(1, data.count()+1): - xrdpattern = get_xrdpattern(data, index=idx, add_labels=False) - xrdpattern.save(fpath=os.path.join(processing_dirpath, 'data', f'pattern_{idx}.json'), force_overwrite=True) - print(f'Saved pattern {idx} to file') - diff --git a/special/processors/cod.py b/special/processors/cod.py deleted file mode 100644 index dc1af49..0000000 --- a/special/processors/cod.py +++ /dev/null @@ -1,76 +0,0 @@ -import json -import os -import tempfile - -import numpy as np -import requests - -from xrdpattern.crystal import CrystalPhase, CrystalBase -from xrdpattern.pattern import XrdPattern -from xrdpattern.xrd import PowderExperiment - - -# ------------------------------------------------- - -def retrieve_cod_data(json_fpath : str, out_dirpath : str): - with open(json_fpath, 'r') as f: - content = f.read() - - the_dict = json.loads(content) - print(f'done reading json. Contains {len(the_dict)} entries') - - for cod_id, data_dict in the_dict.items(): - num = cod_id.split('/')[-1] - fname = f"COD_{num}" - save_fpath = os.path.join(out_dirpath, f'{fname}.json') - try: - pattern = parse_cod_cif(num=num) - print(f'Successfully parsed structure number {num} and saved file at {save_fpath}') - except BaseException as e: - a,b,c = data_dict['cell_a'], data_dict['cell_b'], data_dict['cell_c'] - a,b,c = (10*a,10*b,10*c) - alpha, beta, gamma = data_dict['cell_alpha'], data_dict['cell_beta'], data_dict['cell_gamma'] - spg_num = data_dict['sg_number'] - - x, y = data_dict['x'], data_dict['y'] - phase = CrystalPhase(lengths=(a,b,c), angles=(alpha,beta,gamma), spacegroup=spg_num, base=CrystalBase()) - powder_experiment = PowderExperiment.from_single_phase(phase=phase) - pattern = XrdPattern(two_theta_values=np.array(x), intensities=np.array(y), powder_experiment=powder_experiment) - - print(f'Failed to extract COD pattern {num} due to error {e}. Falling back on provided data') - - pattern.save(fpath=save_fpath, force_overwrite=True) - -def parse_cod_cif(num : int) -> XrdPattern: - base_url = 'https://www.crystallography.net/cod' - cif_request_url = f'{base_url}/{num}.cif' - cif_content = requests.get(url=cif_request_url).content.decode() - - try: - hkl_request_url = f'{base_url}/{num}.hkl' - hkl_content = requests.get(url=hkl_request_url).content.decode() - loops = hkl_content.split(f'loop_') - xfields = ["_pd_proc_2theta_corrected", "_pd_meas_2theta_scan", "_pd_meas_2theta"] - - for l in loops: - l = l.strip() - if any([x in l for x in xfields]): - cif_content += f'loop_\n{l}' - except: - pass - - temp_fpath = tempfile.mktemp(suffix='.cif') - with open(temp_fpath, 'w') as f: - f.write(cif_content) - - return XrdPattern.load(fpath=temp_fpath, mute=True) - -if __name__ == "__main__": - # cod_int = 1508528 - # pattern = parse_cod_cif(num=cod_int) - # pattern.save(fpath=f'./thisjson.json', force_overwrite=True) - - j_fpath = '/home/daniel/aimat/data/opXRD/processed/coudert_hardiagon_0/data/extracted_data.json' - the_out_dirpath = '/home/daniel/aimat/data/opXRD/processed/coudert_hardiagon_0/data/' - retrieve_cod_data(json_fpath=j_fpath, out_dirpath=the_out_dirpath) - print(f'done') diff --git a/special/processors/icsd.py b/special/processors/icsd.py deleted file mode 100644 index a367fc1..0000000 --- a/special/processors/icsd.py +++ /dev/null @@ -1,39 +0,0 @@ -import json -import os -import tempfile -import uuid - -from xrdpattern.pattern import XrdPattern - -icsd_json_path = '/home/daniel/Drive/data/workspace/icsd/dataset.json' -keywords = ['fullprof', 'rietveld', 'powder'] -save_dir = '/home/daniel/Drive/data/workspace/icsd_extracted' - -with open(icsd_json_path, 'r') as f: - the_dict = json.loads(f.read()) -print(f'Loaded icsd dataset') - - -for num, content in the_dict.items(): - lower_content = content.lower() - if not any([word in lower_content for word in keywords]): - print(f'Could not find any matching keywords in cif file number {num}') - continue - else: - print(f'Found keyword matching powder diffraction in cif file number {num}! \n' - f'Starting pattern extraction') - - try: - tmp_fpath = tempfile.mktemp(suffix='.cif') - lines = content.split('\n') - content = '\n'.join(lines[1:]) - - with open(tmp_fpath, 'w') as f: - f.write(content) - pattern = XrdPattern.load(fpath=tmp_fpath) - fpath = os.path.join(save_dir, str(uuid.uuid4())) - pattern.save(fpath=fpath) - print(f'Successfullly extracted pattern from cif and wrote information to {fpath}') - except Exception as e: - print(f'An error occured during extraction: {e}') - diff --git a/special/processors/opxrd.py b/special/processors/opxrd.py deleted file mode 100644 index 3f41f6d..0000000 --- a/special/processors/opxrd.py +++ /dev/null @@ -1,152 +0,0 @@ -import os -from logging import Logger -from typing import Optional - -import pandas as pd - -from holytools.logging import LoggerFactory -from special.tools.csv_label import get_powder_experiment, get_label_mapping -from holytools.devtools import ModuleInspector -from holytools.fsys import PathTools -from holytools.logging.tools import log_execution -from xrdpattern.crystal import CrystalPhase -from xrdpattern.pattern import PatternDB -from xrdpattern.xrd import PowderExperiment, XrayInfo, XrdAnode - - -# ------------------------------------------- - -class OpXRDProcessor: - def __init__(self, root_dirpath : str): - self.root_dirpath : str = root_dirpath - self.processed_dirpath : str = os.path.join(root_dirpath, 'processed') - self.final_dirpath : str = os.path.join(root_dirpath, 'final') - self.cu_xray : XrayInfo = XrdAnode.Cu.get_xray_info() - self.logger : Logger = LoggerFactory.get_logger(name=__name__) - - # --------------------------------------- - # Parsing individual contributions - - def parse_all(self): - methods = ModuleInspector.get_methods(self) - parse_methods = [m for m in methods if not m.__name__.endswith('all') and 'parse' in m.__name__] - - for mthd in parse_methods: - print(f'mthd name = {mthd.__name__}') - mthd() - - def get_db(self, dirname: str, - suffixes : Optional[list[str]] = None, - use_cif_labels : bool = False, - xray_info : Optional[XrayInfo] = None, - csv_orientation : Optional[str] = None, - strict : bool = False) -> PatternDB: - self.logger.info(f'Started processing contribution {dirname}') - data_dirpath = os.path.join(self.processed_dirpath, dirname, 'data') - contrib_dirpath = os.path.join(self.processed_dirpath, dirname) - pattern_db = PatternDB.load(dirpath=data_dirpath, suffixes=suffixes, csv_orientation=csv_orientation, strict=strict) - - self.attach_metadata(pattern_db, dirname=dirname) - self.attach_labels(pattern_db=pattern_db, contrib_dirpath=contrib_dirpath, use_cif_labels=use_cif_labels) - if xray_info: - pattern_db.set_xray(xray_info=xray_info) - for p in pattern_db.patterns: - p.metadata.remove_filename() - self.logger.info(f'Finished processing contribution {dirname}') - - return pattern_db - - def get_csv_db(self, dirname: str, orientation : str, suffixes: Optional[list[str]] = None) -> PatternDB: - return self.get_db(dirname=dirname, csv_orientation=orientation, suffixes=suffixes, strict=False) - - # --------------------------------------- - # Parsing steps - - @log_execution - def attach_metadata(self, pattern_db : PatternDB, dirname : str): - form_dirpath = os.path.join(self.processed_dirpath, dirname, 'form.txt') - with open(form_dirpath, "r") as file: - lines = file.readlines() - form_data = {} - for line in lines: - if '=' in line: - key, value = line.strip().split('=', 1) - form_data[key] = value - - for p in pattern_db.patterns: - p.metadata.contributor_name = form_data["name_of_advisor"] - p.metadata.institution = form_data["contributing_institution"] - - - def attach_labels(self, pattern_db : PatternDB, contrib_dirpath: str, use_cif_labels : bool): - if use_cif_labels: - self.attach_cif_labels(pattern_db) - else: - self.attach_csv_labels(pattern_db, contrib_dirpath=contrib_dirpath) - - @log_execution - def attach_cif_labels(self, pattern_db : PatternDB): - for fpath, patterns in pattern_db.fpath_dict.items(): - dirpath = os.path.dirname(fpath) - cif_fnames = [fname for fname in os.listdir(dirpath) if PathTools.get_suffix(fname) == 'cif'] - - phases = [] - for fname in cif_fnames: - cif_fpath = os.path.join(dirpath, fname) - attached_cif_content = OpXRDProcessor.read_file(fpath=cif_fpath) - crystal_phase = OpXRDProcessor.safe_cif_read(cif_content=attached_cif_content) - phases.append(crystal_phase) - - phases = [p for p in phases if not p is None] - powder_experiment = PowderExperiment.from_multi_phase(phases=phases) - for p in patterns: - p.powder_experiment = powder_experiment - - - @log_execution - def attach_csv_labels(self, pattern_db : PatternDB, contrib_dirpath : str): - csv_fpath = os.path.join(contrib_dirpath, 'labels.csv') - - if not os.path.isfile(csv_fpath): - print(f'No labels available for contribution {os.path.basename(contrib_dirpath)}') - return - - for p in pattern_db.patterns: - if p.powder_experiment.is_nonempty(): - raise ValueError(f"Pattern {p.get_name()} is already labeled") - - data = pd.read_csv(csv_fpath, skiprows=1) - phases = [get_label_mapping(data=data, phase_num=num) for num in range(2)] - for pattern_fpath, file_patterns in pattern_db.fpath_dict.items(): - powder_experiment = get_powder_experiment(pattern_fpath=pattern_fpath, contrib_dirpath=contrib_dirpath, phases=phases) - - for p in file_patterns: - p.powder_experiment = powder_experiment - - - @log_execution - def save(self, pattern_db : PatternDB, dirname : str): - out_dirpath = os.path.join(self.final_dirpath, dirname) - if not os.path.isdir(out_dirpath): - os.makedirs(out_dirpath) - pattern_db.save(dirpath=out_dirpath, force_overwrite=True) - - # ----------------------------- - # Helper methods - - @staticmethod - def read_file(fpath: str) -> str: - with open(fpath, 'r') as file: - cif_content = file.read() - return cif_content - - @staticmethod - def safe_cif_read(cif_content: str) -> Optional[CrystalPhase]: - try: - extracted_phase = CrystalPhase.from_cif(cif_content) - except: - extracted_phase = None - return extracted_phase - - def get_final_dirpath(self, *path_elements : str): - return os.path.join(self.final_dirpath, *path_elements) \ No newline at end of file diff --git a/special/processors/rruff.py b/special/processors/rruff.py deleted file mode 100644 index 2b7c9f6..0000000 --- a/special/processors/rruff.py +++ /dev/null @@ -1,90 +0,0 @@ -import os -from dataclasses import dataclass - -from special.tools.spg_converter import SpacegroupConverter -from xrdpattern.crystal import CrystalBase, CrystalPhase -from xrdpattern.pattern import XrdPattern -from xrdpattern.xrd import PowderExperiment, XrayInfo - - -# --------------------------------------------------------- - -@dataclass -class RRUFFLabels: - spacegroup: str - lattice_parameters: list - wavelength: float - - -def extract_labels(fpath : str) -> RRUFFLabels: - spg = None - lattice_parameters = None - wavelength = None - - with open(fpath, 'r') as file: - lines = file.readlines() - - for line in lines: - if "CELL PARAMETERS:" in line: - lattice_parameters = list(map(float, line.split(':')[1].strip().split())) - elif "SPACE GROUP:" in line: - spg = line.split(':')[1].strip() - elif "X-RAY WAVELENGTH:" in line: - wavelength = float(line.split(':')[1].strip()) - - print(f'Cell params, space group, wavelength = {lattice_parameters, spg, wavelength}') - return RRUFFLabels(spacegroup=spg, lattice_parameters=lattice_parameters, wavelength=wavelength) - - -def extract_basename(fname): - basename = "__".join(fname.split("__")[:2]) - return basename - - -not_found_spgs = set() - -if __name__ == "__main__": - rruff_dirpath = '/home/daniel/Drive/data/workspace/rruff/' - pattern_dirpath = os.path.join(rruff_dirpath,'patterns') - structures_dirpath = os.path.join(rruff_dirpath,'structures') - output_dirpath = os.path.join(rruff_dirpath,'lps') - - STRUCTURE_NAME_FPATH_MAP = {} - copper_wavelength = 1.541838 - - for name in os.listdir(structures_dirpath): - struct_basename = extract_basename(fname=name) - print(f'Struct basename = {struct_basename}') - STRUCTURE_NAME_FPATH_MAP[struct_basename] = os.path.join(structures_dirpath, name) - - - pattern_names = os.listdir(pattern_dirpath) - for name in pattern_names: - pattern_fpath = os.path.join(pattern_dirpath, name) - base_name = extract_basename(name) - print(f'Base_name = {base_name}') - - try: - struct_fpath = STRUCTURE_NAME_FPATH_MAP[base_name] - print(f'\nExtracting labels from path: {struct_fpath}') - labels = extract_labels(fpath=struct_fpath) - spacegroup = SpacegroupConverter.to_int(labels.spacegroup) - - pattern = XrdPattern.load(fpath=pattern_fpath) - for p in labels.lattice_parameters: - if not isinstance(p, float): - raise ValueError(f'Expected float, got {type(p)}') - if not isinstance(spacegroup, int): - raise ValueError(f'Expected int, got {type(spacegroup)}') - - a,b,c,alpha, beta, gamma = labels.lattice_parameters - - crystal_structure = CrystalPhase(lengths=(a,b,c), angles=(alpha, beta, gamma), spacegroup=spacegroup, base=CrystalBase()) - artifacts = XrayInfo(primary_wavelength=labels.wavelength, secondary_wavelength=copper_wavelength) - pattern.powder_experiment = PowderExperiment(phases=[crystal_structure], xray_info=artifacts, is_simulated=False) - pattern.save(fpath=os.path.join(output_dirpath, base_name)) - - except Exception as e: - print(f'Error tryin to parse {base_name}: {e.__class__}') - - print(f'Not found spgs are {not_found_spgs}') \ No newline at end of file diff --git a/special/tools/__init__.py b/special/tools/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/t_tools/t_spg_converter.py b/tests/t_tools/t_spg_converter.py index 3cee683..65e450b 100644 --- a/tests/t_tools/t_spg_converter.py +++ b/tests/t_tools/t_spg_converter.py @@ -1,3 +1,5 @@ +from holytools.events import Timer + from special.tools.spg_converter import SpacegroupConverter from holytools.devtools import Unittest @@ -7,25 +9,24 @@ class SpacegroupConversionTest(Unittest): @classmethod def setUpClass(cls): cls.spg_integers = range(1, 231) + print(f'- Obtaining spg formulas') cls.spg_formulas = SpacegroupConverter.get_all_formulas() + print(f'- Setup complete\n') def test_to_int(self): for spg in self.spg_formulas: spg_int = SpacegroupConverter.to_int(spg) - # print(f'Converted {spg} to {spg_int}') self.assertIsInstance(spg_int, int) def test_to_formula(self): for spg in self.spg_integers: formula = SpacegroupConverter.to_formula(spg) - # print(f'Converted {spg} to {formula}') self.assertIsInstance(formula, str) def test_roundtrip(self): for spg in self.spg_integers: formula = SpacegroupConverter.to_formula(spg) spg2 = SpacegroupConverter.to_int(formula) - # print(f'Converted {spg} to {formula} and back to {spg2}') self.assertEqual(spg, spg2) diff --git a/xrdpattern/pattern/pattern.py b/xrdpattern/pattern/pattern.py index 3fa8522..40ad65e 100644 --- a/xrdpattern/pattern/pattern.py +++ b/xrdpattern/pattern/pattern.py @@ -97,7 +97,7 @@ def to_strictly_increasing(x : NDArray, y : NDArray): return x, y - def __eq__(self, other : XrdData): + def __eq__(self, other : XrdPattern): for attr in fields(self): v1, v2 = getattr(self, attr.name), getattr(other, attr.name) if isinstance(v1, np.ndarray): diff --git a/xrdpattern/pattern/visualization.py b/xrdpattern/pattern/visualization.py index f719927..a63f297 100644 --- a/xrdpattern/pattern/visualization.py +++ b/xrdpattern/pattern/visualization.py @@ -7,10 +7,11 @@ from matplotlib.ticker import MaxNLocator from mpl_toolkits.axes_grid1 import make_axes_locatable -from special.tools.spg_converter import SpacegroupConverter +from xrdpattern.tools.spg_converter import SpacegroupConverter from .pattern import XrdPattern + # ----------------------------------------- def multiplot(patterns : list[XrdPattern], start_idx : int): diff --git a/special/processors/__init__.py b/xrdpattern/tools/__init__.py similarity index 100% rename from special/processors/__init__.py rename to xrdpattern/tools/__init__.py diff --git a/special/tools/binary_analyser.py b/xrdpattern/tools/binary_analyser.py similarity index 100% rename from special/tools/binary_analyser.py rename to xrdpattern/tools/binary_analyser.py diff --git a/special/tools/csv_label.py b/xrdpattern/tools/csv_label.py similarity index 100% rename from special/tools/csv_label.py rename to xrdpattern/tools/csv_label.py diff --git a/special/tools/spg_converter.py b/xrdpattern/tools/spg_converter.py similarity index 100% rename from special/tools/spg_converter.py rename to xrdpattern/tools/spg_converter.py diff --git a/special/tools/spg_formulas.txt b/xrdpattern/tools/spg_formulas.txt similarity index 100% rename from special/tools/spg_formulas.txt rename to xrdpattern/tools/spg_formulas.txt diff --git a/xrdpattern/xrd/experiment.py b/xrdpattern/xrd/experiment.py index d812030..c545a5c 100644 --- a/xrdpattern/xrd/experiment.py +++ b/xrdpattern/xrd/experiment.py @@ -3,9 +3,9 @@ import math from dataclasses import dataclass, field from enum import Enum +from importlib.metadata import version from typing import Optional -import pkg_resources import torch from holytools.abstract import JsonDataclass @@ -206,5 +206,5 @@ def __eq__(self, other : Metadata): def remove_filename(self): self.filename = None -def get_library_version(library_name): - return pkg_resources.get_distribution(library_name).version +def get_library_version(library_name : str): + return version(library_name)