diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..cec4431 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +7668662f988eb94e162c82f8281a4f4f5bdafd00 +73e5560b4b19e445d7fc3957cb31fa9da600eb42 diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml new file mode 100644 index 0000000..82ad245 --- /dev/null +++ b/.github/workflows/linter.yml @@ -0,0 +1,7 @@ +name: Linter +on: [pull_request] +jobs: + call-workflow: + uses: ISISComputingGroup/reusable-workflows/.github/workflows/linters.yml@main + with: + compare-branch: origin/master diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml deleted file mode 100644 index 8026f07..0000000 --- a/.github/workflows/pylint.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Pylint - -on: [push] - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v1 - with: - python-version: 3.8 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pylint - - name: Analysing the code with pylint - run: pylint --rcfile=.pylintrc $(find . -name "*.py" | xargs) \ No newline at end of file diff --git a/config.py b/config.py index a75b662..983730d 100644 --- a/config.py +++ b/config.py @@ -4,15 +4,18 @@ # pylint: disable=too-few-public-methods from enum import Enum + from LSI import LSI_Param # pylint: disable=import-error -from pvdb import STATIC_PV_DATABASE, Records # pylint: disable=unused-import + +from pvdb import Records # pylint: disable=unused-import + class Constants: """ Constants used by the LSICorrelator """ - DELTA_T = 0.0524288 # Magic number, seems to be time between measurements. + DELTA_T = 0.0524288 # Magic number, seems to be time between measurements. SLEEP_BETWEEN_MEASUREMENTS = 0.5 DATA_DIR = r"c:\Data" SIMULATE_ARCHIVE_DAT_FILE_NAME = "LSICORR_IOC_test_archive_save.dat" @@ -23,6 +26,7 @@ class Macro(Enum): """ Macros used by the LSICorrelator """ + SIMULATE = {"macro": "SIMULATE"} ADDRESS = {"macro": "ADDR"} FILEPATH = {"macro": "FILEPATH"} @@ -51,6 +55,7 @@ def add_prefix(self, prefix): """ return f"{prefix}{self.value}" + class LSiPVSeverity(Enum): """ Severity states of the LSi PV @@ -61,10 +66,12 @@ class LSiPVSeverity(Enum): INFO = "INFO" INVALID = "INVALID" + class Defaults: """ Default values for the LSICorrelator """ + # Set up the PV database defaults = { Records.CORRELATIONTYPE.value: LSI_Param.CorrelationType.AUTO, @@ -90,22 +97,24 @@ class Defaults: Records.OUTPUTFILE.value: "No data taken yet", Records.SIM.value: 0, Records.DISABLE.value: 0, - Records.MIN_TIME_LAG.value: 200 + Records.MIN_TIME_LAG.value: 200, } metadata_records = [ - Records.SCATTERING_ANGLE, - Records.MEASUREMENTDURATION, - Records.LASER_WAVELENGTH, - Records.SOLVENT_REFRACTIVE_INDEX, - Records.SOLVENT_VISCOSITY, - Records.SAMPLE_TEMP - ] + Records.SCATTERING_ANGLE, + Records.MEASUREMENTDURATION, + Records.LASER_WAVELENGTH, + Records.SOLVENT_REFRACTIVE_INDEX, + Records.SOLVENT_VISCOSITY, + Records.SAMPLE_TEMP, + ] + class Schema: """ File schema for the LSICorrelator """ + FILE_SCHEME = """{datetime} Pseudo Cross Correlation Scattering angle:\t{scattering_angle:.1f} diff --git a/correlator_driver_functions.py b/correlator_driver_functions.py index def6e20..88ecd6f 100644 --- a/correlator_driver_functions.py +++ b/correlator_driver_functions.py @@ -4,27 +4,30 @@ """ # pylint: disable=wrong-import-position -from __future__ import print_function, unicode_literals, division, absolute_import +from __future__ import absolute_import, division, print_function, unicode_literals -import sys import os +import sys import traceback from functools import wraps -from typing import Dict, TextIO, Tuple from time import sleep +from typing import Dict, TextIO, Tuple sys.path.insert(1, os.path.join(os.getenv("EPICS_KIT_ROOT"), "support", "lsicorr_vendor", "master")) sys.path.insert(2, os.path.join(os.getenv("EPICS_KIT_ROOT"), "ISIS", "inst_servers", "master")) import numpy as np # pylint: disable=import-error +from LSICorrelator import ( + LSICorrelator, # pylint: disable=import-error, wrong-import-position, wrong-import-order +) +from server_common.utilities import ( + print_and_log, # pylint: disable=import-error, wrong-import-position, wrong-import-order +) -from data_file_interaction import DataArrays, DataFile from config import Constants, Macro +from data_file_interaction import DataArrays, DataFile from mocked_correlator_api import MockedCorrelatorAPI -from server_common.utilities import print_and_log # pylint: disable=import-error, wrong-import-position, wrong-import-order -from LSICorrelator import LSICorrelator # pylint: disable=import-error, wrong-import-position, wrong-import-order - def _error_handler(func): """ @@ -33,6 +36,7 @@ def _error_handler(func): @param func: The function to wrap. @return: The wrapped function. """ + # pylint: disable=inconsistent-return-statements @wraps(func) def _wrapper(*args, **kwargs): @@ -41,6 +45,7 @@ def _wrapper(*args, **kwargs): # pylint: disable=broad-except except Exception: print_and_log(traceback.format_exc(), src="lsi ") + return _wrapper @@ -65,8 +70,8 @@ def __init__(self, macros: Dict[str, str], simulated: bool = False) -> None: except KeyError as key_error: raise RuntimeError("No IP address specified, cannot start") from key_error firmware_revision = macros.get( - Macro.FIRMWARE_REVISION.name, - Macro.FIRMWARE_REVISION.value["default"]) + Macro.FIRMWARE_REVISION.name, Macro.FIRMWARE_REVISION.value["default"] + ) if simulated: self.mocked_api = MockedCorrelatorAPI() @@ -80,7 +85,9 @@ def __init__(self, macros: Dict[str, str], simulated: bool = False) -> None: self.has_data = False @staticmethod - def remove_data_with_time_lags_lower_than_minimum(lags: np.ndarray, corr: np.ndarray, min_time_lag: float) -> Tuple[np.ndarray, np.ndarray]: # pylint: disable=line-too-long + def remove_data_with_time_lags_lower_than_minimum( + lags: np.ndarray, corr: np.ndarray, min_time_lag: float + ) -> Tuple[np.ndarray, np.ndarray]: # pylint: disable=line-too-long """ Remove lags and corresponding corr values which have lags values below the minimum time lag @param lags (np.ndarray): The original time lags values to remove values from @@ -92,12 +99,14 @@ def remove_data_with_time_lags_lower_than_minimum(lags: np.ndarray, corr: np.nda """ indices = [count for count in range(0, len(lags)) if lags[count] < min_time_lag] - lags = np.delete(lags,indices) - corr = np.delete(corr,indices) + lags = np.delete(lags, indices) + corr = np.delete(corr, indices) return lags, corr - def get_data_as_arrays(self, min_time_lag) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: # pylint: disable=line-too-long + def get_data_as_arrays( + self, min_time_lag + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: # pylint: disable=line-too-long """ Collects the correlation function, time lags, raw traces and time trace as numpy arrays. The correlation function and time lags are filtered to finite values only. @@ -124,7 +133,7 @@ def get_data_as_arrays(self, min_time_lag) -> Tuple[np.ndarray, np.ndarray, np.n trace_b = np.asarray(self.device.TraceChB) # Time axis is number of data points collected * scaling factor - trace_time = np.arange(len(trace_a))*Constants.DELTA_T + trace_time = np.arange(len(trace_a)) * Constants.DELTA_T return corr, lags, trace_a, trace_b, trace_time @@ -158,7 +167,9 @@ def take_data(self, min_time_lag) -> None: self.corr = corr self.lags = lags - def save_data(self, min_time_lag: float, user_file: TextIO, archive_file: TextIO, metadata: Dict) -> None: # pylint: disable=line-too-long + def save_data( + self, min_time_lag: float, user_file: TextIO, archive_file: TextIO, metadata: Dict + ) -> None: # pylint: disable=line-too-long """ Save the data to file. @param min_time_lag (float): The minimum time lag to include. diff --git a/correlator_pcaspy.py b/correlator_pcaspy.py index c67d666..557b45d 100644 --- a/correlator_pcaspy.py +++ b/correlator_pcaspy.py @@ -1,38 +1,40 @@ """ Correlator pcaspy and IOC Elements of the LSiCorrelator IOC """ -from __future__ import print_function, unicode_literals, division, absolute_import -from datetime import datetime +from __future__ import absolute_import, division, print_function, unicode_literals import argparse -import sys import os -import traceback -from typing import Dict, Any +import sys import time +import traceback # pylint: disable=wrong-import-position - from concurrent.futures import ThreadPoolExecutor +from datetime import datetime +from typing import Any, Dict sys.path.insert(1, os.path.join(os.getenv("EPICS_KIT_ROOT"), "support", "lsicorr_vendor", "master")) sys.path.insert(2, os.path.join(os.getenv("EPICS_KIT_ROOT"), "ISIS", "inst_servers", "master")) -from pcaspy import SimpleServer, Driver # pylint: disable=import-error -from pcaspy.alarm import Alarm, Severity # pylint: disable=import-error - from BlockServer.core.file_path_manager import FILEPATH_MANAGER # pylint: disable=import-error -from server_common.utilities import print_and_log # pylint: disable=import-error +from pcaspy import Driver, SimpleServer # pylint: disable=import-error +from pcaspy.alarm import Alarm, Severity # pylint: disable=import-error from server_common.channel_access import ChannelAccess # pylint: disable=import-error -from server_common.helpers import register_ioc_start, get_macro_values # pylint: disable=import-error +from server_common.helpers import ( # pylint: disable=import-error + get_macro_values, + register_ioc_start, +) +from server_common.utilities import print_and_log # pylint: disable=import-error +from config import PV, Constants, Defaults, LSiPVSeverity, Macro from correlator_driver_functions import LSiCorrelatorVendorInterface, _error_handler from pvdb import STATIC_PV_DATABASE, Records -from config import Constants, PV, LSiPVSeverity, Macro, Defaults NANOSECONDS_TO_SECONDS = 1e9 + def get_base_pv(reason: str) -> str: """ Trims trailing :SP off a PV name @@ -52,14 +54,15 @@ def remove_non_ascii(text_to_check: str) -> str: @return (str): The cleaned text """ # Remove anything other than alphanumerics and dashes/underscores - parsed_text = [char for char in text_to_check if char.isalnum() or char in '-_'] - return ''.join(parsed_text) + parsed_text = [char for char in text_to_check if char.isalnum() or char in "-_"] + return "".join(parsed_text) class LSiCorrelatorIOC(Driver): """ A class containing pcaspy and IOC elements of the LSiCorrelator IOC. """ + # pylint: disable=too-many-instance-attributes def __init__(self, pv_prefix: str, macros: Dict[str, str]) -> None: """ @@ -72,10 +75,11 @@ def __init__(self, pv_prefix: str, macros: Dict[str, str]) -> None: try: self.user_filepath = macros[Macro.FILEPATH.name] except KeyError as key_error: - raise RuntimeError(f"No file path specified to save data to: {key_error}".format( - key_error)) from key_error + raise RuntimeError( + f"No file path specified to save data to: {key_error}".format(key_error) + ) from key_error - self.simulated = macros[Macro.SIMULATE.name] == "1" # type: bool + self.simulated = macros[Macro.SIMULATE.name] == "1" # type: bool if self.simulated: print("WARNING! Started in simulation mode") @@ -94,7 +98,7 @@ def __init__(self, pv_prefix: str, macros: Dict[str, str]) -> None: if not os.path.isdir(self.user_filepath): self.update_error_pv_print_and_log( f"LSiCorrelatorDriver: {self.user_filepath} is invalid file path", - LSiPVSeverity.MAJOR.value + LSiPVSeverity.MAJOR.value, ) for record, default_value in defaults.items(): @@ -104,7 +108,9 @@ def __init__(self, pv_prefix: str, macros: Dict[str, str]) -> None: self.updatePVs() - def update_error_pv_print_and_log(self, error: str, severity: LSiPVSeverity = LSiPVSeverity.INFO, src: str = "LSI") -> None: # pylint: disable=line-too-long + def update_error_pv_print_and_log( + self, error: str, severity: LSiPVSeverity = LSiPVSeverity.INFO, src: str = "LSI" + ) -> None: # pylint: disable=line-too-long """ Updates the error PV with the provided error message, then prints and logs the error @param error (str): The error message to write to the error PV @@ -164,7 +170,9 @@ def update_param_and_fields(self, reason: str, value: Any) -> None: self.update_error_pv_print_and_log(f"{err}") @_error_handler - def update_pv_and_write_to_device(self, reason: str, value: Any, update_setpoint: bool = False) -> None: # pylint: disable=line-too-long + def update_pv_and_write_to_device( + self, reason: str, value: Any, update_setpoint: bool = False + ) -> None: # pylint: disable=line-too-long """ Helper function to update the value of a PV held in this driver and sets the value on the device. @@ -226,15 +234,10 @@ def write(self, reason: str, value: Any) -> None: if reason.endswith(":SP"): # Update both SP and non-SP fields THREADPOOL.submit( - self.update_pv_and_write_to_device, - get_base_pv(reason), - value, - update_setpoint=True) + self.update_pv_and_write_to_device, get_base_pv(reason), value, update_setpoint=True + ) else: - THREADPOOL.submit( - self.update_pv_and_write_to_device, - reason, - value) + THREADPOOL.submit(self.update_pv_and_write_to_device, reason, value) @_error_handler def read(self, reason: str) -> Any: @@ -283,8 +286,7 @@ def take_data(self) -> None: self.update_pv_and_write_to_device(Records.TAKING_DATA.name, True) - for repeat in range(first_repetition, no_repetitions+1): - + for repeat in range(first_repetition, no_repetitions + 1): self.update_pv_and_write_to_device(Records.CURRENT_REPETITION.name, repeat) if repeat == first_repetition and wait_at_start or repeat != first_repetition: @@ -299,15 +301,19 @@ def take_data(self) -> None: self.set_array_pv_value(Records.LAGS.name, self.driver.lags) # Save data to file - with open(self.get_user_filename(), "w+", encoding="utf-8") as user_file, \ - open(self.get_archive_filename(), "w+", encoding="utf-8") as archive_file: - self.driver.save_data(min_time_lag,user_file, archive_file, self.get_metadata()) + with open(self.get_user_filename(), "w+", encoding="utf-8") as user_file, open( + self.get_archive_filename(), "w+", encoding="utf-8" + ) as archive_file: + self.driver.save_data( + min_time_lag, user_file, archive_file, self.get_metadata() + ) else: # No data returned, correlator may be disconnected self.update_pv_and_write_to_device(Records.CONNECTED.name, False) self.update_error_pv_print_and_log( "LSiCorrelatorDriver: No data read, device could be disconnected", - LSiPVSeverity.INVALID) + LSiPVSeverity.INVALID, + ) self.set_disconnected_alarms(True) self.update_pv_and_write_to_device(Records.TAKING_DATA.name, False) @@ -338,8 +344,8 @@ def get_archive_filename(self) -> str: """ if self.simulated: full_filename = os.path.join( - self.user_filepath, - Constants.SIMULATE_ARCHIVE_DAT_FILE_NAME) + self.user_filepath, Constants.SIMULATE_ARCHIVE_DAT_FILE_NAME + ) else: timestamp = datetime.now().strftime("%Y-%m-%dT%H_%M_%S") run_number = ChannelAccess.caget(PV.RUNNUMBER.add_prefix(prefix=self.pv_prefix)) @@ -371,7 +377,7 @@ def get_user_filename(self) -> str: experiment_name = ChannelAccess.caget(PV.TITLE.add_prefix(prefix=self.pv_prefix)) # Remove characters that are not allowed in filename and replace with underscore (_) - compressed_experiment_name=remove_non_ascii(experiment_name) #pylint: disable=unused-variable + compressed_experiment_name = remove_non_ascii(experiment_name) # pylint: disable=unused-variable filename = f"{run_number}_{compressed_experiment_name}_{timestamp}.dat" # Update last used filename PV @@ -392,7 +398,9 @@ def serve_forever(ioc_name: str, pv_prefix: str, macros: Dict[str, str]) -> None @return: None """ - ioc_name_with_pv_prefix = "{pv_prefix}{ioc_name}:".format(pv_prefix=pv_prefix, ioc_name=ioc_name) # pylint: disable=line-too-long, consider-using-f-string + ioc_name_with_pv_prefix = "{pv_prefix}{ioc_name}:".format( + pv_prefix=pv_prefix, ioc_name=ioc_name + ) # pylint: disable=line-too-long, consider-using-f-string print_and_log(ioc_name_with_pv_prefix) server = SimpleServer() @@ -401,7 +409,8 @@ def serve_forever(ioc_name: str, pv_prefix: str, macros: Dict[str, str]) -> None # Run heartbeat IOC, this is done with a different prefix server.createPV( prefix="f{pv_prefix}CS:IOC:{ioc_name}:DEVIOS:", - pvdb={"HEARTBEAT": {"type": "int", "value": 0}}) + pvdb={"HEARTBEAT": {"type": "int", "value": 0}}, + ) # Looks like it does nothing, but this creates *and automatically registers* the driver # (via metaclasses in pcaspy). See declaration of DriverType in pcaspy/driver.py for details @@ -429,8 +438,9 @@ def main(): ) parser.add_argument("--ioc_name", required=True, type=str) - parser.add_argument("--pv_prefix", required=True, type=str, - help="The PV prefix of this instrument.") + parser.add_argument( + "--pv_prefix", required=True, type=str, help="The PV prefix of this instrument." + ) args = parser.parse_args() @@ -440,11 +450,7 @@ def main(): macros = get_macro_values() - serve_forever( - args.ioc_name, - args.pv_prefix, - macros - ) + serve_forever(args.ioc_name, args.pv_prefix, macros) if __name__ == "__main__": diff --git a/data_file_interaction.py b/data_file_interaction.py index dd35f00..c2a4250 100644 --- a/data_file_interaction.py +++ b/data_file_interaction.py @@ -1,15 +1,16 @@ """ Contains the data_file_interaction class which is used to interact with the data file. """ -from typing import Dict, TextIO, Tuple -from io import StringIO -from datetime import datetime + from dataclasses import dataclass +from datetime import datetime +from io import StringIO +from typing import Dict, TextIO, Tuple import numpy as np # pylint: disable=import-error -from pvdb import Records from config import Schema +from pvdb import Records @dataclass @@ -24,13 +25,16 @@ class DataArrays: trace_b: np.ndarray trace_time: np.ndarray + class DataFile: """ A data transfer object to store and format data to write to file. """ @staticmethod - def create_file_data(data_arrays: DataArrays, user_file: TextIO, archive_file: TextIO, metadata: Dict) -> 'DataFile': # pylint: disable=line-too-long + def create_file_data( + data_arrays: DataArrays, user_file: TextIO, archive_file: TextIO, metadata: Dict + ) -> "DataFile": # pylint: disable=line-too-long """ Create a data transfer object to store and format data to write to file. @param data_arrays (DataArrays): A data transfer object to store the relevant data ndarrays. @@ -42,11 +46,15 @@ def create_file_data(data_arrays: DataArrays, user_file: TextIO, archive_file: T @return (DataFile): A data transfer object to store and format data to write to file. """ data_file = DataFile(data_arrays, user_file, archive_file, metadata) - correlation_string, raw_channel_data_string = data_file._format_correlation_and_raw_channel_data() # pylint: disable=line-too-long, protected-access + correlation_string, raw_channel_data_string = ( + data_file._format_correlation_and_raw_channel_data() + ) # pylint: disable=line-too-long, protected-access data_file._structure_file_data(correlation_string, raw_channel_data_string) # pylint: disable=protected-access return data_file - def __init__(self, data_arrays: DataArrays, user_file: TextIO, archive_file: TextIO, metadata: Dict) -> None: # pylint: disable=line-too-long + def __init__( + self, data_arrays: DataArrays, user_file: TextIO, archive_file: TextIO, metadata: Dict + ) -> None: # pylint: disable=line-too-long """ Initialize the data transfer object. @param data_arrays (DataArrays): A data transfer object to store the relevant data ndarrays @@ -69,20 +77,17 @@ def _format_correlation_and_raw_channel_data(self) -> Tuple[StringIO, StringIO]: to write to file. """ - correlation_data = np.vstack(( - self.data_arrays.time_lags, - self.data_arrays.correlation)).T + correlation_data = np.vstack((self.data_arrays.time_lags, self.data_arrays.correlation)).T - raw_channel_data = np.vstack(( - self.data_arrays.trace_time, - self.data_arrays.trace_a, - self.data_arrays.trace_b)).T + raw_channel_data = np.vstack( + (self.data_arrays.trace_time, self.data_arrays.trace_a, self.data_arrays.trace_b) + ).T correlation_file = StringIO() - np.savetxt(correlation_file, correlation_data, delimiter='\t', fmt='%1.6e') + np.savetxt(correlation_file, correlation_data, delimiter="\t", fmt="%1.6e") correlation_string = correlation_file.getvalue() raw_channel_data_file = StringIO() - np.savetxt(raw_channel_data_file, raw_channel_data, delimiter='\t', fmt='%.6f') + np.savetxt(raw_channel_data_file, raw_channel_data, delimiter="\t", fmt="%.6f") raw_channel_data_string = raw_channel_data_file.getvalue() return correlation_string, raw_channel_data_string @@ -93,7 +98,9 @@ def _structure_file_data(self, correlation_string, raw_channel_data_string) -> N @param raw_channel_data_string (StringIO): The raw channel data to write to file. @return (None): None """ - correlation_string, raw_channel_data_string = self._format_correlation_and_raw_channel_data() # pylint: disable=line-too-long + correlation_string, raw_channel_data_string = ( + self._format_correlation_and_raw_channel_data() + ) # pylint: disable=line-too-long self.save_file = Schema.FILE_SCHEME.format( datetime=datetime.now().strftime("%m/%d/%Y\t%H:%M %p"), scattering_angle=self.metadata[Records.SCATTERING_ANGLE.name], @@ -105,7 +112,7 @@ def _structure_file_data(self, correlation_string, raw_channel_data_string) -> N avg_count_A=np.mean(self.data_arrays.trace_a), avg_count_B=np.mean(self.data_arrays.trace_b), correlation_function=correlation_string, - count_rate_history=raw_channel_data_string + count_rate_history=raw_channel_data_string, ) def write_to_file(self) -> None: diff --git a/mocked_correlator_api.py b/mocked_correlator_api.py index 176f3e7..61f41b7 100644 --- a/mocked_correlator_api.py +++ b/mocked_correlator_api.py @@ -1,14 +1,17 @@ """ Contains Mocked Correlator API for testing """ + from time import time -from mock import MagicMock # pylint: disable=import-error import numpy as np # pylint: disable=import-error +from mock import MagicMock # pylint: disable=import-error from pvdb import Records -elements_in_float_array = Records.CORRELATION_FUNCTION.value.database_entries["CORRELATION_FUNCTION"]["count"] # pylint: disable=line-too-long +elements_in_float_array = Records.CORRELATION_FUNCTION.value.database_entries[ + "CORRELATION_FUNCTION" +]["count"] # pylint: disable=line-too-long DATA_USED_IN_IOC_SYSTEM_TESTS = np.linspace(0, elements_in_float_array, elements_in_float_array) @@ -17,6 +20,7 @@ class MockedCorrelatorAPI: """ MockedCorrelatorAPI is a MagicMock object that can be used in place of a real correlator. """ + # pylint: disable=too-many-arguments, too-many-instance-attributes def __init__(self): self.device = MagicMock() @@ -70,7 +74,9 @@ def configure(self): raise RuntimeError("LSI --- Cannot configure: Measurement active") if self.device.disconnected: - raise RuntimeError("LSI --- Cannot configure: Correlator disconnected or measurement active") # pylint: disable=line-too-long + raise RuntimeError( + "LSI --- Cannot configure: Correlator disconnected or measurement active" + ) # pylint: disable=line-too-long def update(self): """ diff --git a/pvdb.py b/pvdb.py index 4ffcbfc..27d5099 100644 --- a/pvdb.py +++ b/pvdb.py @@ -1,20 +1,29 @@ """ Contains the PV definitions for the LSI_Param Enum """ -from __future__ import print_function, unicode_literals, division, absolute_import -import sys + +from __future__ import absolute_import, division, print_function, unicode_literals + import os -from functools import partial +import sys from enum import Enum +from functools import partial sys.path.insert(1, os.path.join(os.getenv("EPICS_KIT_ROOT"), "Support", "lsicorr_vendor", "master")) -from LSICorrelator import LSICorrelator # pylint: disable=import-error, wrong-import-position from LSI import LSI_Param # pylint: disable=import-error, wrong-import-position +from LSICorrelator import LSICorrelator # pylint: disable=import-error, wrong-import-position # pylint: disable=wrong-import-position, unused-import -from record import (Record, populate_enum_pv, float_pv_with_unit, do_nothing, - PARAM_FIELDS_BINARY, INT_AS_FLOAT_PV, CHAR_PV_FIELDS, FLOAT_ARRAY) +from record import ( + CHAR_PV_FIELDS, + FLOAT_ARRAY, + INT_AS_FLOAT_PV, + PARAM_FIELDS_BINARY, + Record, + float_pv_with_unit, + populate_enum_pv, +) def convert_pv_enum_to_lsi_enum(enum_class, pv_value): @@ -33,6 +42,7 @@ def convert_pv_enum_to_lsi_enum(enum_class, pv_value): return enum_as_list[pv_value] + def convert_lsi_enum_to_pv_value(enum_class, current_state): """ Takes a driver parameter and returns its associated enum value for the PV @@ -52,6 +62,7 @@ class Records(Enum): """ Enum containing the PV names for the LSI_Param Enum """ + @staticmethod def keys(): """ @@ -60,108 +71,84 @@ def keys(): """ return [member.name for member in Records] - CORRELATIONTYPE = Record("CORRELATIONTYPE", - populate_enum_pv(LSI_Param.CorrelationType), - convert_to_pv=partial( - convert_lsi_enum_to_pv_value, - LSI_Param.CorrelationType), - convert_from_pv=partial( - convert_pv_enum_to_lsi_enum, - LSI_Param.CorrelationType), - device_setter=LSICorrelator.setCorrelationType, - has_setpoint=True - ) - - NORMALIZATION = Record("NORMALIZATION", - populate_enum_pv(LSI_Param.Normalization), - convert_from_pv=partial( - convert_pv_enum_to_lsi_enum, - LSI_Param.Normalization), - convert_to_pv=partial( - convert_lsi_enum_to_pv_value, - LSI_Param.Normalization), - device_setter=LSICorrelator.setNormalization, - has_setpoint=True - ) - - MEASUREMENTDURATION = Record("MEASUREMENTDURATION", - INT_AS_FLOAT_PV, - convert_from_pv=round, - device_setter=LSICorrelator.setMeasurementDuration, - has_setpoint=True - ) - - SWAPCHANNELS = Record("SWAPCHANNELS", - populate_enum_pv(LSI_Param.SwapChannels), - convert_from_pv=partial( - convert_pv_enum_to_lsi_enum, - LSI_Param.SwapChannels), - convert_to_pv=partial( - convert_lsi_enum_to_pv_value, - LSI_Param.SwapChannels), - device_setter=LSICorrelator.setSwapChannels, - has_setpoint=True - ) - - SAMPLINGTIMEMULTIT = Record("SAMPLINGTIMEMULTIT", - populate_enum_pv(LSI_Param.SamplingTimeMultiT), - convert_from_pv=partial( - convert_pv_enum_to_lsi_enum, - LSI_Param.SamplingTimeMultiT), - convert_to_pv=partial( - convert_lsi_enum_to_pv_value, - LSI_Param.SamplingTimeMultiT), - device_setter=LSICorrelator.setSamplingTimeMultiT, - has_setpoint=True - ) - - TRANSFERRATE = Record("TRANSFERRATE", - populate_enum_pv(LSI_Param.TransferRate), - convert_from_pv=partial( - convert_pv_enum_to_lsi_enum, - LSI_Param.TransferRate), - convert_to_pv=partial( - convert_lsi_enum_to_pv_value, - LSI_Param.TransferRate), - device_setter=LSICorrelator.setTransferRate, - has_setpoint=True - ) - - OVERLOADLIMIT = Record("OVERLOADLIMIT", - {'type': 'float', 'prec': 0, 'value': 0.0, 'unit': 'Mcps'}, - convert_from_pv=round, - device_setter=LSICorrelator.setOverloadLimit, - has_setpoint=True - ) - - OVERLOADINTERVAL = Record("OVERLOADINTERVAL", - INT_AS_FLOAT_PV, - convert_from_pv=round, - device_setter=LSICorrelator.setOverloadTimeInterval, - has_setpoint=True - ) - - ERRORMSG = Record("ERRORMSG", - CHAR_PV_FIELDS - ) - - EXPERIMENTNAME = Record("EXPERIMENTNAME", - CHAR_PV_FIELDS, - has_setpoint=True - ) + CORRELATIONTYPE = Record( + "CORRELATIONTYPE", + populate_enum_pv(LSI_Param.CorrelationType), + convert_to_pv=partial(convert_lsi_enum_to_pv_value, LSI_Param.CorrelationType), + convert_from_pv=partial(convert_pv_enum_to_lsi_enum, LSI_Param.CorrelationType), + device_setter=LSICorrelator.setCorrelationType, + has_setpoint=True, + ) + + NORMALIZATION = Record( + "NORMALIZATION", + populate_enum_pv(LSI_Param.Normalization), + convert_from_pv=partial(convert_pv_enum_to_lsi_enum, LSI_Param.Normalization), + convert_to_pv=partial(convert_lsi_enum_to_pv_value, LSI_Param.Normalization), + device_setter=LSICorrelator.setNormalization, + has_setpoint=True, + ) + + MEASUREMENTDURATION = Record( + "MEASUREMENTDURATION", + INT_AS_FLOAT_PV, + convert_from_pv=round, + device_setter=LSICorrelator.setMeasurementDuration, + has_setpoint=True, + ) + + SWAPCHANNELS = Record( + "SWAPCHANNELS", + populate_enum_pv(LSI_Param.SwapChannels), + convert_from_pv=partial(convert_pv_enum_to_lsi_enum, LSI_Param.SwapChannels), + convert_to_pv=partial(convert_lsi_enum_to_pv_value, LSI_Param.SwapChannels), + device_setter=LSICorrelator.setSwapChannels, + has_setpoint=True, + ) + + SAMPLINGTIMEMULTIT = Record( + "SAMPLINGTIMEMULTIT", + populate_enum_pv(LSI_Param.SamplingTimeMultiT), + convert_from_pv=partial(convert_pv_enum_to_lsi_enum, LSI_Param.SamplingTimeMultiT), + convert_to_pv=partial(convert_lsi_enum_to_pv_value, LSI_Param.SamplingTimeMultiT), + device_setter=LSICorrelator.setSamplingTimeMultiT, + has_setpoint=True, + ) + + TRANSFERRATE = Record( + "TRANSFERRATE", + populate_enum_pv(LSI_Param.TransferRate), + convert_from_pv=partial(convert_pv_enum_to_lsi_enum, LSI_Param.TransferRate), + convert_to_pv=partial(convert_lsi_enum_to_pv_value, LSI_Param.TransferRate), + device_setter=LSICorrelator.setTransferRate, + has_setpoint=True, + ) + + OVERLOADLIMIT = Record( + "OVERLOADLIMIT", + {"type": "float", "prec": 0, "value": 0.0, "unit": "Mcps"}, + convert_from_pv=round, + device_setter=LSICorrelator.setOverloadLimit, + has_setpoint=True, + ) + + OVERLOADINTERVAL = Record( + "OVERLOADINTERVAL", + INT_AS_FLOAT_PV, + convert_from_pv=round, + device_setter=LSICorrelator.setOverloadTimeInterval, + has_setpoint=True, + ) + + ERRORMSG = Record("ERRORMSG", CHAR_PV_FIELDS) + + EXPERIMENTNAME = Record("EXPERIMENTNAME", CHAR_PV_FIELDS, has_setpoint=True) OUTPUTFILE = Record("OUTPUTFILE", CHAR_PV_FIELDS) - START = Record("START", - PARAM_FIELDS_BINARY, - has_setpoint=True - ) + START = Record("START", PARAM_FIELDS_BINARY, has_setpoint=True) - STOP = Record("STOP", - PARAM_FIELDS_BINARY, - convert_from_pv=bool, - has_setpoint=True - ) + STOP = Record("STOP", PARAM_FIELDS_BINARY, convert_from_pv=bool, has_setpoint=True) CORRELATION_FUNCTION = Record("CORRELATION_FUNCTION", FLOAT_ARRAY) @@ -171,79 +158,41 @@ def keys(): TRACEB = Record("TRACEB", FLOAT_ARRAY) - REPETITIONS = Record("REPETITIONS", - INT_AS_FLOAT_PV, - convert_from_pv=round, - has_setpoint=True - ) - - CURRENT_REPETITION = Record("CURRENT_REPETITION", - INT_AS_FLOAT_PV - ) - - RUNNING = Record("RUNNING", - PARAM_FIELDS_BINARY, - convert_from_pv=bool - ) - - TAKING_DATA = Record("TAKING_DATA", - PARAM_FIELDS_BINARY, - convert_from_pv=bool - ) - - WAITING = Record('WAITING', PARAM_FIELDS_BINARY, - convert_from_pv=bool - ) - - WAIT_AT_START = Record('WAIT_AT_START', PARAM_FIELDS_BINARY, - convert_from_pv=bool, - has_setpoint=True - ) - - CONNECTED = Record("CONNECTED", - PARAM_FIELDS_BINARY, - convert_from_pv=bool - ) - - SCATTERING_ANGLE = Record("SCATTERING_ANGLE", - float_pv_with_unit("degree"), - has_setpoint=True - ) - - SAMPLE_TEMP = Record("SAMPLE_TEMP", - float_pv_with_unit("K"), - has_setpoint=True - ) - - SOLVENT_VISCOSITY = Record("SOLVENT_VISCOSITY", - float_pv_with_unit("mPas"), - has_setpoint=True - ) - - SOLVENT_REFRACTIVE_INDEX = Record("SOLVENT_REFRACTIVE_INDEX", - float_pv_with_unit(""), - has_setpoint=True - ) - - LASER_WAVELENGTH = Record("LASER_WAVELENGTH", - float_pv_with_unit("nm"), - has_setpoint=True - ) - - SIM = Record("SIM", - {'type': 'enum', 'enums': ["NO", "YES"]}, - convert_from_pv=bool - ) - - DISABLE = Record("DISABLE", - {'type': 'enum', 'enums': ["NO", "YES"]}, - convert_from_pv=bool - ) - - WAIT = Record("WAIT",float_pv_with_unit('s'), has_setpoint = True) - - MIN_TIME_LAG =Record('MIN_TIME_LAG',float_pv_with_unit('ns'), has_setpoint= True ) + REPETITIONS = Record("REPETITIONS", INT_AS_FLOAT_PV, convert_from_pv=round, has_setpoint=True) + + CURRENT_REPETITION = Record("CURRENT_REPETITION", INT_AS_FLOAT_PV) + + RUNNING = Record("RUNNING", PARAM_FIELDS_BINARY, convert_from_pv=bool) + + TAKING_DATA = Record("TAKING_DATA", PARAM_FIELDS_BINARY, convert_from_pv=bool) + + WAITING = Record("WAITING", PARAM_FIELDS_BINARY, convert_from_pv=bool) + + WAIT_AT_START = Record( + "WAIT_AT_START", PARAM_FIELDS_BINARY, convert_from_pv=bool, has_setpoint=True + ) + + CONNECTED = Record("CONNECTED", PARAM_FIELDS_BINARY, convert_from_pv=bool) + + SCATTERING_ANGLE = Record("SCATTERING_ANGLE", float_pv_with_unit("degree"), has_setpoint=True) + + SAMPLE_TEMP = Record("SAMPLE_TEMP", float_pv_with_unit("K"), has_setpoint=True) + + SOLVENT_VISCOSITY = Record("SOLVENT_VISCOSITY", float_pv_with_unit("mPas"), has_setpoint=True) + + SOLVENT_REFRACTIVE_INDEX = Record( + "SOLVENT_REFRACTIVE_INDEX", float_pv_with_unit(""), has_setpoint=True + ) + + LASER_WAVELENGTH = Record("LASER_WAVELENGTH", float_pv_with_unit("nm"), has_setpoint=True) + + SIM = Record("SIM", {"type": "enum", "enums": ["NO", "YES"]}, convert_from_pv=bool) + + DISABLE = Record("DISABLE", {"type": "enum", "enums": ["NO", "YES"]}, convert_from_pv=bool) + + WAIT = Record("WAIT", float_pv_with_unit("s"), has_setpoint=True) + MIN_TIME_LAG = Record("MIN_TIME_LAG", float_pv_with_unit("ns"), has_setpoint=True) STATIC_PV_DATABASE = {} diff --git a/record.py b/record.py index 37c15ca..92e368e 100644 --- a/record.py +++ b/record.py @@ -1,49 +1,42 @@ """ Contains information used to define a PCASpy PV, its fields and how its values are read and set. """ -from typing import Dict, Optional, Callable + from enum import Enum +from typing import Callable, Dict, Optional + from pcaspy.alarm import AlarmStrings, SeverityStrings # pylint: disable=import-error PARAM_FIELDS_BINARY = { - 'type': 'enum', - 'enums': ["NO", "YES"], - 'info_field': { - 'archive': 'VAL', - 'INTEREST': 'HIGH' - } + "type": "enum", + "enums": ["NO", "YES"], + "info_field": {"archive": "VAL", "INTEREST": "HIGH"}, } INT_AS_FLOAT_PV = { - 'type': 'float', - 'prec': 0, - 'value': 0.0, - 'info_field': { - 'archive': 'VAL', - 'INTEREST': 'HIGH' - } + "type": "float", + "prec": 0, + "value": 0.0, + "info_field": {"archive": "VAL", "INTEREST": "HIGH"}, } CHAR_PV_FIELDS = { - 'type': 'char', - 'count': 400, - 'info_field': { - 'archive': 'VAL', - 'INTEREST': 'HIGH' - } + "type": "char", + "count": 400, + "info_field": {"archive": "VAL", "INTEREST": "HIGH"}, } -FLOAT_ARRAY = {'type': 'float', 'count': 400} +FLOAT_ARRAY = {"type": "float", "count": 400} # Truncate as enum can only contain 16 states -ALARM_STAT_PV_FIELDS = {'type': 'enum', 'enums': AlarmStrings[:16]} -ALARM_SEVR_PV_FIELDS = {'type': 'enum', 'enums': SeverityStrings} +ALARM_STAT_PV_FIELDS = {"type": "enum", "enums": AlarmStrings[:16]} +ALARM_SEVR_PV_FIELDS = {"type": "enum", "enums": SeverityStrings} def populate_enum_pv(enum: Enum): """ Creates an enum PV definition """ - return {'type': 'enum', 'enums': [member.name for member in enum]} + return {"type": "enum", "enums": [member.name for member in enum]} def float_pv_with_unit(unit: str): @@ -55,7 +48,13 @@ def float_pv_with_unit(unit: str): pv_definition (Dict): Contains the fields which define the PV """ - return {'type': 'float', 'unit': unit, 'info_field': {'archive': 'VAL', 'INTEREST': 'HIGH'}, 'prec': 3} + return { + "type": "float", + "unit": unit, + "info_field": {"archive": "VAL", "INTEREST": "HIGH"}, + "prec": 3, + } + # pylint: disable=unused-argument def null_device_setter(*args, **kwargs): @@ -89,11 +88,15 @@ class Record: """ # pylint: disable=too-many-arguments - def __init__(self, name: str, pv_definition: Dict, - has_setpoint: Optional[bool] = False, - convert_from_pv: Optional[Callable] = do_nothing, - convert_to_pv: Optional[Callable] = do_nothing, - device_setter: Optional[Callable] = null_device_setter): + def __init__( + self, + name: str, + pv_definition: Dict, + has_setpoint: Optional[bool] = False, + convert_from_pv: Optional[Callable] = do_nothing, + convert_to_pv: Optional[Callable] = do_nothing, + device_setter: Optional[Callable] = null_device_setter, + ): self.name = name self.pv_definition = pv_definition self.convert_from_pv = convert_from_pv @@ -126,22 +129,17 @@ def add_standard_fields(self) -> Dict: """ new_fields = {} - if 'count' in self.pv_definition: - new_fields.update({f"{self.name}.NELM": { - 'type': 'int', - 'value': self.pv_definition['count']} - }) - - new_fields.update({f"{self.name}.NORD": { - 'type': 'int', - 'value': 0} - }) - - if 'unit' in self.pv_definition: - new_fields.update({f"{self.name}.EGU": { - 'type': 'string', - 'value': self.pv_definition['unit']} - }) + if "count" in self.pv_definition: + new_fields.update( + {f"{self.name}.NELM": {"type": "int", "value": self.pv_definition["count"]}} + ) + + new_fields.update({f"{self.name}.NORD": {"type": "int", "value": 0}}) + + if "unit" in self.pv_definition: + new_fields.update( + {f"{self.name}.EGU": {"type": "string", "value": self.pv_definition["unit"]}} + ) return new_fields diff --git a/test_utils/test_data.py b/test_utils/test_data.py index 97deedd..783f518 100644 --- a/test_utils/test_data.py +++ b/test_utils/test_data.py @@ -1,6 +1,7 @@ """ Contains test data for the LSI Correlator unit tests """ + import os import numpy as np # pylint: disable=import-error @@ -8,14 +9,12 @@ current_path = os.path.dirname(os.path.realpath(__file__)) corr, lags = np.genfromtxt( - os.path.join(current_path, "correlation_function.csv"), - delimiter=",", - unpack=True) + os.path.join(current_path, "correlation_function.csv"), delimiter=",", unpack=True +) trace_a, trace_b, trace_time = np.genfromtxt( - os.path.join(current_path, "raw_data.csv"), - delimiter=',', - unpack=True) + os.path.join(current_path, "raw_data.csv"), delimiter=",", unpack=True +) test_data_file = os.path.join(current_path, "test_data.dat") diff --git a/tests.py b/tests.py index cadd84a..d23ff55 100644 --- a/tests.py +++ b/tests.py @@ -1,23 +1,19 @@ """ Contains Unit Tests for LSI Correlator """ -from tempfile import NamedTemporaryFile + import unittest -import numpy as np # pylint: disable=import-error +from tempfile import NamedTemporaryFile +import numpy as np # pylint: disable=import-error from correlator_driver_functions import LSiCorrelatorVendorInterface from pvdb import Records - from test_utils import test_data # pylint: disable=line-too-long, invalid-name -macros = { - "SIMULATE": "1", - "ADDR": "127.0.0.1", - "FIRMWARE_REVISION": "4.0.0.3" - } +macros = {"SIMULATE": "1", "ADDR": "127.0.0.1", "FIRMWARE_REVISION": "4.0.0.3"} class LSICorrelatorTests(unittest.TestCase): @@ -35,7 +31,9 @@ def setUp(self): self.device = self.driver.device self.mocked_api.disconnected = False - def test_GIVEN_device_disconnected_WHEN_data_taken_THEN_device_reads_no_data_and_disconnected(self): + def test_GIVEN_device_disconnected_WHEN_data_taken_THEN_device_reads_no_data_and_disconnected( + self, + ): """ Test that the device reads no data when disconnected """ @@ -56,7 +54,9 @@ def test_GIVEN_device_connected_WHEN_data_taken_THEN_device_reads_has_data_and_c self.assertTrue(self.driver.has_data) self.assertTrue(self.driver.is_connected) - def test_GIVEN_device_connected_WHEN_data_taken_THEN_driver_updated_with_correlation_and_time_lags(self): + def test_GIVEN_device_connected_WHEN_data_taken_THEN_driver_updated_with_correlation_and_time_lags( + self, + ): """ Test that the driver updates with the correlation and time lags """ @@ -68,7 +68,9 @@ def test_GIVEN_device_connected_WHEN_data_taken_THEN_driver_updated_with_correla self.assertTrue(np.allclose(self.driver.corr, test_data.corr_without_nans)) self.assertTrue(np.allclose(self.driver.lags, test_data.lags_without_nans)) - def test_GIVEN_device_has_data_WHEN_data_retrieved_from_device_THEN_time_trace_made_and_no_nans_in_correlation(self): + def test_GIVEN_device_has_data_WHEN_data_retrieved_from_device_THEN_time_trace_made_and_no_nans_in_correlation( + self, + ): """ Test that the time trace is made and no nans in correlation """ @@ -92,7 +94,9 @@ def test_WHEN_data_taken_THEN_start_called(self): self.driver.take_data(0) self.device.start.assert_called_once() - def test_WHEN_data_taken_AND_measurement_on_THEN_update_called_WHEN_measurement_off_THEN_update_not_called(self): + def test_WHEN_data_taken_AND_measurement_on_THEN_update_called_WHEN_measurement_off_THEN_update_not_called( + self, + ): """ Test that the update method is called when data is taken and measurement is on """ @@ -101,7 +105,9 @@ def test_WHEN_data_taken_AND_measurement_on_THEN_update_called_WHEN_measurement_ self.assertGreater(self.mocked_api.update_count, starting_update_count) self.assertFalse(self.mocked_api.update_called_when_measurement_not_on) - def test_WHEN_save_data_THEN_metadata_written_AND_correlation_data_written_AND_traces_written(self): + def test_WHEN_save_data_THEN_metadata_written_AND_correlation_data_written_AND_traces_written( + self, + ): """ Test that the metadata and correlation data are written and the traces are written """ @@ -117,17 +123,17 @@ def test_WHEN_save_data_THEN_metadata_written_AND_correlation_data_written_AND_t Records.LASER_WAVELENGTH.name: 642, Records.SOLVENT_REFRACTIVE_INDEX.name: 1.33, Records.SOLVENT_VISCOSITY.name: 1, - Records.SAMPLE_TEMP.name: 298 + Records.SAMPLE_TEMP.name: 298, } # Save data to two temporary files that are discarded - with NamedTemporaryFile(mode="w+") as user_file, NamedTemporaryFile(mode="w+") as archive_file: - + with NamedTemporaryFile(mode="w+") as user_file, NamedTemporaryFile( + mode="w+" + ) as archive_file: self.driver.save_data(0, user_file, archive_file, metadata) # Read test_data.dat with open(test_data.test_data_file, mode="r", encoding="utf-8") as test_data_file: - test_actual_data = test_data_file.read() # Go back to start of files after write and ignore firstline that has timestamp on it @@ -139,5 +145,5 @@ def test_WHEN_save_data_THEN_metadata_written_AND_correlation_data_written_AND_t self.assertEqual(test_actual_data, file.read()) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main()