From 35df82e20721e5bd574c90582e10ddad54fe30bc Mon Sep 17 00:00:00 2001 From: RoyStegeman Date: Thu, 20 Jun 2024 14:28:17 +0100 Subject: [PATCH] pre-commit --- validphys2/src/validphys/loader.py | 41 +++++++++++++----------------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/validphys2/src/validphys/loader.py b/validphys2/src/validphys/loader.py index 602d87c4d3..7412281a8a 100644 --- a/validphys2/src/validphys/loader.py +++ b/validphys2/src/validphys/loader.py @@ -14,7 +14,6 @@ import sys import tarfile import tempfile -from typing import List import urllib.parse as urls import requests @@ -296,7 +295,7 @@ def available_hyperscans(self): return [] @property - @functools.lru_cache() + @functools.lru_cache def available_theories(self): """Return a string token for each of the available theories""" theory_token = 'theory_' @@ -306,7 +305,7 @@ def available_theories(self): } @property - @functools.lru_cache() + @functools.lru_cache def available_ekos(self): """Return a string token for each of the available theories""" return { @@ -314,7 +313,7 @@ def available_ekos(self): } @property - @functools.lru_cache() + @functools.lru_cache def _available_old_datasets(self): """Provide all available datasets At the moment this means cominbing the new and olf format datasets @@ -329,7 +328,7 @@ def _available_old_datasets(self): } @property - @functools.lru_cache() + @functools.lru_cache def available_datasets(self): """Provide all available datasets other then positivitiy and integrability. At the moment this only returns old datasets for which we have a translation available @@ -339,7 +338,7 @@ def available_datasets(self): return set(old_datasets) @property - @functools.lru_cache() + @functools.lru_cache def implemented_datasets(self): """Provide all implemented datasets that can be found in the datafiles folder regardless of whether they can be used for fits (i.e., whether they include a theory), @@ -351,7 +350,7 @@ def implemented_datasets(self): return datasets @property - @functools.lru_cache() + @functools.lru_cache def available_pdfs(self): return lhaindex.expand_local_names('*') @@ -512,17 +511,15 @@ def check_commondata( setname, metadata, legacy=True, datafile=datafile, sysfile=sysfile, plotfiles=plotfiles ) - @functools.lru_cache() + @functools.lru_cache def check_theoryID(self, theoryID): theoryID = str(theoryID) theopath = self._theories_path / f"theory_{theoryID}" if not theopath.exists(): - raise TheoryNotFound( - "Could not find theory {}. Folder '{}' not found".format(theoryID, theopath) - ) + raise TheoryNotFound(f"Could not find theory {theoryID}. Folder '{theopath}' not found") return TheoryIDSpec(theoryID, theopath, self.theorydb_folder) - @functools.lru_cache() + @functools.lru_cache def check_eko(self, theoryID): """Check the eko (and the parent theory) both exists and returns the path to it""" theory = self.check_theoryID(theoryID) @@ -554,7 +551,7 @@ def check_fktable(self, theoryID, setname, cfac): fkpath = theopath / 'fastkernel' / ('FK_%s.dat' % setname) if not fkpath.exists(): raise FKTableNotFound( - "Could not find FKTable for set '{}'. File '{}' not found".format(setname, fkpath) + f"Could not find FKTable for set '{setname}'. File '{fkpath}' not found" ) cfactors = self.check_cfactor(theoryID, setname, cfac) @@ -802,7 +799,7 @@ def check_dataset( rules=rules, ) - def check_experiment(self, name: str, datasets: List[DataSetSpec]) -> DataGroupSpec: + def check_experiment(self, name: str, datasets: list[DataSetSpec]) -> DataGroupSpec: """Loader method for instantiating DataGroupSpec objects. The NNPDF::Experiment object can then be instantiated using the load method. @@ -1089,15 +1086,13 @@ def _remote_files_from_url(self, url, index, thing='files'): resp = requests.get(index_url) resp.raise_for_status() except Exception as e: - raise RemoteLoaderError( - "Failed to fetch remote {} index {}: {}".format(thing, index_url, e) - ) from e + raise RemoteLoaderError(f"Failed to fetch remote {thing} index {index_url}: {e}") from e try: info = resp.json()['files'] except Exception as e: raise RemoteLoaderError( - "Malformed index {}. Expecting json with a key 'files': {}".format(index_url, e) + f"Malformed index {index_url}. Expecting json with a key 'files': {e}" ) from e return {file.split('.')[0]: url + file for file in info} @@ -1112,31 +1107,31 @@ def remote_files(self, urls, index, thing='files'): return d @property - @functools.lru_cache() + @functools.lru_cache def remote_fits(self): return self.remote_files(self.fit_urls, self.fit_index, thing="fits") @property - @functools.lru_cache() + @functools.lru_cache def remote_hyperscans(self): return self.remote_files(self.hyperscan_url, self.hyperscan_index, thing="hyperscan") @property - @functools.lru_cache() + @functools.lru_cache def remote_theories(self): token = 'theory_' rt = self.remote_files(self.theory_urls, self.theory_index, thing="theories") return {k[len(token) :]: v for k, v in rt.items()} @property - @functools.lru_cache() + @functools.lru_cache def remote_ekos(self): token = 'eko_' rt = self.remote_files(self.eko_urls, self.eko_index, thing="ekos") return {k[len(token) :]: v for k, v in rt.items()} @property - @functools.lru_cache() + @functools.lru_cache def remote_nnpdf_pdfs(self): return self.remote_files(self.nnpdf_pdfs_urls, self.nnpdf_pdfs_index, thing="PDFs")