diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml index e597372..6e10d33 100644 --- a/.github/workflows/actions.yml +++ b/.github/workflows/actions.yml @@ -51,7 +51,6 @@ jobs: - name: Generate coverage report run: | pip install pytest pytest-cov pywget - pytest --cov=./ --cov-report=xml pip install -r extras_require.txt pytest --cov=./ --cov-report=xml diff --git a/SciFiReaders/readers/microscopy/em/tem/emd_reader.py b/SciFiReaders/readers/microscopy/em/tem/emd_reader.py index 86a078c..198bafc 100644 --- a/SciFiReaders/readers/microscopy/em/tem/emd_reader.py +++ b/SciFiReaders/readers/microscopy/em/tem/emd_reader.py @@ -264,6 +264,18 @@ def get_image(self): name='y', units='nm', quantity='distance', dimension_type='spatial')) + + # for diffraction patterns + if '1/' in self.metadata['BinaryResult']['PixelUnitX']: + self.datasets[-1].set_dimension(0, sidpy.Dimension(np.arange(self.data_array.shape[0]) * (scale_x / 1e18), + name='u', units='1/nm', + quantity='reciprocal distance', + dimension_type='reciprocal')) + self.datasets[-1].set_dimension(1, sidpy.Dimension(np.arange(self.data_array.shape[1]) * (scale_y / 1e18), + name='v', units='1/nm', + quantity='reciprocal distance', + dimension_type='reciprocal')) + else: # There is a problem with random access of data due to chunking in hdf5 files # Speed-up copied from hyperspy.ioplugins.EMDReader.FEIEMDReader diff --git a/tests/readers/microscopy/spm/afm/test_gwy.py b/tests/readers/microscopy/spm/afm/test_gwy.py new file mode 100644 index 0000000..f1e6e77 --- /dev/null +++ b/tests/readers/microscopy/spm/afm/test_gwy.py @@ -0,0 +1,33 @@ +import pytest +import sidpy +import SciFiReaders as sr +from pywget import wget +import os +try: + import gwyfile +except ImportError: + import pip + pip.main(['install', 'gwyfile']) +root_path = "https://github.com/pycroscopy/SciFiDatasets/blob/main/data/microscopy/spm/afm/" + +@pytest.fixture +def gwy_file(): + file_path = 'PTO_110_Virgin0001.gwy' + wget.download(root_path + "/PTO_110_Virgin0001.gwy?raw=true", out=file_path) + yield file_path + os.remove(file_path) + +def test_load_test_gwy_file(gwy_file): + data_translator = sr.GwyddionReader(gwy_file) + datasets = data_translator.read(verbose=False) + assert len(datasets) == 4, f"Length of dataset should be 2 but is instead {len(datasets)}" + channel_names = ['HeightRetrace', 'AmplitudeRetrace', 'DeflectionRetrace', 'PhaseRetrace'] + channel_units = ['m', 'm', 'm', 'deg'] + channel_labels = [['x (m)', 'y (m)'], ['x (m)', 'y (m)'], ['x (m)', 'y (m)'], ['x (m)', 'y (m)']] + for ind, dataset in enumerate(datasets): + assert isinstance(dataset, sidpy.sid.dataset.Dataset), f"Dataset No. {ind} not read in as sidpy dataset but was instead read in as {type(dataset)}" + assert dataset.shape[0] == 256, f"Dataset[{ind}] is of size 512 but was read in as {dataset.shape[0]}" + assert isinstance(dataset._axes[0], sidpy.sid.dimension.Dimension), "Dataset should have dimension type of sidpy Dimension, but is instead {}".format(type(dataset._axes)) + assert dataset.quantity == channel_names[ind], "Dataset having inconsistent channel names" + assert dataset.units == channel_units[ind], "Dataset having inconsistent unit names" + assert dataset.labels == channel_labels[ind], "Dataset having inconsistent channel labels"