From c25941bfd9a196e4fb5e19fae2fe7cf79454d423 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 13:41:27 +0100 Subject: [PATCH] More pruning --- deps_write.py | 603 ------------------------ processing/grid_creation_algorithm.py | 141 ------ processing/providers.py | 2 - processing/schematisation_algorithms.py | 481 ------------------- tests/test_dependencies.py | 17 - 5 files changed, 1244 deletions(-) delete mode 100644 deps_write.py delete mode 100644 processing/grid_creation_algorithm.py diff --git a/deps_write.py b/deps_write.py deleted file mode 100644 index 9bb5b382..00000000 --- a/deps_write.py +++ /dev/null @@ -1,603 +0,0 @@ -"""Handle dependencies: installation and checking/logging. - -See :doc:`linked_external-dependencies_readme` -(``external-dependencies/README.rst``) for a full explanation of the -dependency handling. - -``python3 dependencies.py`` runs ``generate_constraints_txt()``: it generates -``constraints.txt``. - -:py:func:`ensure_everything_installed()` checks if :py:data:`DEPENDENCIES` are -installed and installs them if needed. - -:py:func:`check_importability()` double-checks if everything is importable. It also -logs the locations. - -Note that we use *logging* in ``check_importability()`` as we want to have the -result in the logfile. The rest of the module uses ``print()`` statements -because it gets executed before any logging has been configured. - -As we're called directly from ``__init__.py``, the imports should be -resticted. No qgis message boxes and so! - -""" -from collections import namedtuple -from pathlib import Path - -import importlib -import logging -import os -import pkg_resources -import platform -import setuptools # noqa: https://github.com/pypa/setuptools/issues/2993 -import shutil -import subprocess -import sys -import tarfile - - -# in case the dependency is a tar, the constraint should be the -# explicit version (e.g. "==3.8.0") -Dependency = namedtuple("Dependency", ["name", "package", "constraint", "tar"]) - -#: List of expected dependencies. -DEPENDENCIES = [ - Dependency("SQLAlchemy", "sqlalchemy", "==2.0.6", False), - Dependency("GeoAlchemy2", "geoalchemy2", "==0.15.*", False), - Dependency("pyqtgraph", "pyqtgraph", ">=0.13.2", False), - Dependency( - "importlib-resources", "importlib_resources", "", False - ), # backward compat. alembic - Dependency( - "zipp", "zipp", "", False - ), # backward compat. alembic - Dependency("Mako", "mako", "", False), - Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] - Dependency("alembic", "alembic", "==1.8.*", False), - Dependency("threedigrid", "threedigrid", "==2.2.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), - Dependency("threedidepth", "threedidepth", "==0.6.3", False), - Dependency("click", "click", ">=8.0", False), - Dependency("packaging", "packaging", "", False), - Dependency("typing-extensions", "typing_extensions", ">=4.2.0", False), - Dependency( - "colorama", "colorama", "", False - ), # dep of click and threedi-modelchecker (windows) - Dependency("networkx", "networkx", "", False), - Dependency("condenser", "condenser", ">=0.2.1", False), - Dependency("Shapely", "shapely", ">=2.0.0", False), - Dependency("threedigrid-builder", "threedigrid_builder", "==1.17.*", False), - Dependency("h5netcdf", "h5netcdf", "", False), - Dependency("greenlet", "greenlet", "!=0.4.17", False), - Dependency("threedi-mi-utils", "threedi_mi_utils", "==0.1.4", False), -] - -# On Windows, the hdf5 binary and thus h5py version depends on the QGis version -# QGis upgraded from hdf5 == 1.10.7 to hdf5 == 1.14.0 in QGis 3.28.6 -QGIS_VERSION = 34000 -if QGIS_VERSION < 32806 and platform.system() == "Windows": - SUPPORTED_HDF5_VERSIONS = ["1.10.7"] - H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==2.10.0", False) -elif QGIS_VERSION >= 34000 and platform.system() == "Windows": - SUPPORTED_HDF5_VERSIONS = ["1.14.0"] - H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==3.10.0", False) -else: - SUPPORTED_HDF5_VERSIONS = ["1.14.0"] - H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==3.8.0", True) - -if QGIS_VERSION < 32811 and platform.system() == "Windows": - WINDOWS_PLATFORM_DEPENDENCIES = [ - Dependency("scipy", "scipy", "==1.6.2", True), - ] -elif QGIS_VERSION >= 34000 and platform.system() == "Windows": - WINDOWS_PLATFORM_DEPENDENCIES = [ - Dependency("scipy", "scipy", "==1.13.0", True), - ] -else: - WINDOWS_PLATFORM_DEPENDENCIES = [ - Dependency("scipy", "scipy", "==1.10.1", False), - ] - -# If you add a dependency, also adjust external-dependencies/populate.sh -INTERESTING_IMPORTS = ["numpy", "osgeo", "pip", "setuptools"] - -OUR_DIR = Path(__file__).parent - -logger = logging.getLogger(__name__) - - - - -def ensure_everything_installed(): - """Check if DEPENDENCIES are installed and install them if missing.""" - - _remove_old_distributions(DEPENDENCIES + WINDOWS_PLATFORM_DEPENDENCIES + [H5PY_DEPENDENCY], _prev_dependencies_target_dir()) - - # If required, create deps folder and prepend to the path - target_dir = _dependencies_target_dir(create=True) - if str(target_dir) not in sys.path: - print(f"Prepending {target_dir} to sys.path") - sys.path.insert(0, str(target_dir)) - - _refresh_python_import_mechanism() - - profile_python_names = [item.name for item in _dependencies_target_dir().iterdir()] - print("Contents of our deps dir:\n %s" % "\n ".join(profile_python_names)) - - print("sys.path:") - for directory in sys.path: - print(" - %s" % directory) - - _ensure_prerequisite_is_installed() - - missing = _check_presence(DEPENDENCIES) - restart_required = False - if platform.system() == "Windows": - missing += _check_presence(WINDOWS_PLATFORM_DEPENDENCIES) - if not _ensure_h5py_installed(): - restart_required = True - - if missing: - print("Missing dependencies:") - for deps in missing: - print(deps.name) - - try: - _install_dependencies(missing, target_dir=target_dir) - except RuntimeError: - # In case some libraries are already imported, we cannot uninstall - # because QGIS acquires a lock on dll/pyd-files. Therefore - # we need to restart Qgis. - restart_required = True - pass - - restart_marker = Path(target_dir / "restarted.marker") - - if restart_required or not restart_marker.exists(): - if _is_windows(): - pass - # Always update the import mechanism - _refresh_python_import_mechanism() - - else: - print("Dependencies up to date") - - -def _ensure_h5py_installed(): - """ - On Windows Qgis comes with a hdf5 version installed. - This plugin uses the h5py python package, which is built against a specific version - of HDF5. The Qgis HDF5 version and the HDF5 version of the h5py package must be the - same, otherwise it will not work. In the external-dependencies folder we supply a - Windows version of h5py built using HDF5 1.10.7. On pypi there is no h5py 2.10.0 package available - built with Python 3.9 and HDF5 1.10.7. We need creat such wheel ourselves. - - The following situations can occur: - - | QGIS HDF5 = 1.10.7 | QGIS HDF5 = 1.14.0 - -----------------------------------|---------------------|--------------- - h5py build with 1.10.7 | A: Good | B: Qgis crash - h5py build with 1.14.0 | A: Qgis crash | B: Good - h5py build with other HDF5 version | A: Qgis crash | B: Qgis crash - - The different situations are marked A, B, and C in the table above. - - In version 3.28.6, QGis updated their HDF5.dll binary from 1.10.7 to 1.14.0. - - When the h5py is built for the qgis-included HDF5 DLL, - everything is good and the plugin can be loaded without any problems. - - A crash occurs when a user upgrades/downgrades their Qgis version when - the ThreediToolbox is already installed with a specific version of h5py. - In these cases we also need to upgrade/downgrade the h5py version installed with - ThreediToolbox. - - We use the H5pyMarker to mark the installed h5py version. This is because we cannot check the version - by importing h5py, as Qgis will crash if the HDF5 and h5py binaries do not match. - """ - if QGIS_VERSION < 32806 and platform.system() == "Windows": - hdf5_version = "1.10.7" - else: - hdf5_version = "1.14.0" - h5py_missing = _check_presence([H5PY_DEPENDENCY]) - marker_version = H5pyMarker.version() - if h5py_missing: - return _install_h5py(hdf5_version) - - if hdf5_version in SUPPORTED_HDF5_VERSIONS: - if marker_version == hdf5_version: - # Do nothing - pass - else: - return _install_h5py(hdf5_version) - - return True - - -def _install_h5py(hdf5_version: str): - if hdf5_version not in SUPPORTED_HDF5_VERSIONS: - # raise an error because we cannot continue - message = ( - f"Unsupported HDF5 version: {hdf5_version}. " - f"The following HDF5 versions are supported: {SUPPORTED_HDF5_VERSIONS}" - ) - raise RuntimeError(message) - - # In case the (old) h5py library is already imported, we cannot uninstall - # h5py because the windows acquires a lock on the *.dll-files. Therefore - # we need to restart Qgis. - # _uninstall_dependency(H5PY_DEPENDENCY) - try: - _install_dependencies([H5PY_DEPENDENCY], target_dir=_dependencies_target_dir()) - except RuntimeError: - return False - H5pyMarker.create(hdf5_version) - return True - - -class H5pyMarker: - """Marker indicating with which HDF5 binaries the h5py is installed. - - Currently, there is 1 supported HDF5 version: - - 1.10.7: use h5py from the external-dependencies folder in this repo - """ - - H5PY_MARKER = OUR_DIR / ".h5py_marker" - - @classmethod - def version(cls) -> str: - if cls.H5PY_MARKER.exists(): - with open(cls.H5PY_MARKER, "r") as marker: - version = marker.readline() - return version - else: - return "" - - @classmethod - def create(cls, version: str): - with open(cls.H5PY_MARKER, "w") as marker: - marker.write(version) - - @classmethod - def remove(cls): - cls.H5PY_MARKER.unlink() - - -def _ensure_prerequisite_is_installed(prerequisite="pip"): - """Check the basics: pip. - - People using OSGEO custom installs sometimes exclude those - dependencies. Our installation scripts fail, then, because of the missing - 'pip'. - - """ - try: - importlib.import_module(prerequisite) - except Exception as e: - msg = ( - "%s. 'pip', which we need, is missing. It is normally included with " - "python. You are *probably* using a custom minimal OSGEO release. " - "Please re-install with 'pip' included." - ) % e - print(msg) - raise RuntimeError(msg) - - -def _dependencies_target_dir(our_dir=OUR_DIR, create=False) -> Path: - """Return (and create) the desired deps folder - - This is the 'deps' subdirectory of the plugin home folder - - """ - target_dir = our_dir / "deps" - if not target_dir.exists() and create: - print(f"Creating target dir {target_dir}") - target_dir.mkdir() - - return target_dir - - -def _prev_dependencies_target_dir(our_dir=OUR_DIR) -> Path: - """Return python dir inside our profile - - Return two dirs up if we're inside the plugins dir. This was the - previous installation folder of the dependencies. - """ - if "plugins" in str(our_dir).lower(): - return OUR_DIR.parent.parent - - -def _remove_old_distributions(dependencies, path): - """Remove old distributions of dependencies - - In previous version of the Toolbox, depencencies were - stored in the users 'python' folder. This caused - versioning conflicts (as these dependencies were - not removed when the plugin was uninstalled). - - Removes all folders and files that contain the - dependency name or package name - """ - succeeded = True - files_to_remove = [ - node - for node in os.listdir(str(path)) - for dependency in dependencies - if (dependency.package in node or dependency.name in node) - ] - - for f in files_to_remove: - dep_path = str(path / f) - - try: - if os.path.exists(dep_path): - if os.path.isfile(dep_path): - print(f"Deleting file {f} from {path}") - os.remove(dep_path) - else: - print(f"Deleting folder {f} from {path}") - shutil.rmtree(dep_path) - except PermissionError as e: - print(f"Unable to remove {dep_path} ({str(e)})") - succeeded = False - - return succeeded - - -def check_importability(): - """Check if the dependendies are importable and log the locations. - - If something is not importable, which should not happen, it raises an - ImportError automatically. Which is exactly what we want, because we - cannot continue. - - """ - packages = [dependency.package for dependency in DEPENDENCIES] - packages += INTERESTING_IMPORTS - logger.info("sys.path:\n %s", "\n ".join(sys.path)) - deps_in_target_dir = [item.name for item in _dependencies_target_dir().iterdir()] - logger.info( - "Contents of our dependency dir:\n %s", - "\n ".join(deps_in_target_dir), - ) - for package in packages: - imported_package = importlib.import_module(package) - logger.info( - "Import '%s' found at \n '%s'", package, imported_package.__file__ - ) - - -def _uninstall_dependency(dependency): - print("Trying to uninstalling dependency %s" % dependency.name) - if dependency.tar: - # just remove the folders - path = _dependencies_target_dir() - items_to_remove = [node for node in os.listdir(str(path)) if (dependency.package in node or dependency.name in node)] - for f in items_to_remove: - dep_path = str(path / f) - - try: - if os.path.exists(dep_path): - if os.path.isfile(dep_path): - print(f"Deleting file {f} from {path}") - os.remove(dep_path) - else: - print(f"Deleting folder {f} from {path}") - shutil.rmtree(dep_path) - except PermissionError as e: - print(f"Unable to remove {dep_path} ({str(e)})") - return - - python_interpreter = _get_python_interpreter() - startupinfo = None - if _is_windows(): - startupinfo = subprocess.STARTUPINFO() - # Prevents terminal screens from popping up - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - process = subprocess.Popen( - [ - python_interpreter, - "-m", - "pip", - "uninstall", - "--yes", - (dependency.name), - ], - universal_newlines=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - startupinfo=startupinfo, - ) - # The input/output/error stream handling is a bit involved, but it is - # necessary because of a python bug on windows 7, see - # https://bugs.python.org/issue3905 . - i, o, e = (process.stdin, process.stdout, process.stderr) - i.close() - result = o.read() + e.read() - o.close() - e.close() - print(result) - exit_code = process.wait() - if exit_code: - print("Uninstalling %s failed" % dependency.name) - - -def _install_dependencies(dependencies, target_dir): - if not dependencies: - return - - python_interpreter = _get_python_interpreter() - base_command = [ - python_interpreter, - "-m", - "pip", - "install", - "--upgrade", - "--no-deps", - "--find-links", - str(OUR_DIR / "external-dependencies"), - "--no-index", - "--target", - str(target_dir), - ] - - dialog = None - bar = None - startupinfo = None - if _is_windows(): - pass - - for count, dependency in enumerate(dependencies): - _uninstall_dependency(dependency) - print("Installing '%s' into %s" % (dependency.name, target_dir)) - if dialog: - dialog.setLabelText(f"Installing {dependency.name}") - - if dependency.tar: - # Just extract the tar into the target folder, we already now it exists - tar_path = f"{(OUR_DIR / 'external-dependencies')}/{dependency.name}-{dependency.constraint[2:]}.tar" - tar = tarfile.open(tar_path) - tar.extractall(str(target_dir)) - tar.close() - else: - command = base_command + [dependency.name + dependency.constraint] - - process = subprocess.Popen( - command, - universal_newlines=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - startupinfo=startupinfo, - ) - # The input/output/error stream handling is a bit involved, but it is - # necessary because of a python bug on windows 7, see - # https://bugs.python.org/issue3905 . - i, o, e = (process.stdin, process.stdout, process.stderr) - i.close() - result = o.read() + e.read() - o.close() - e.close() - print(result) - exit_code = process.wait() - if exit_code: - if dialog: - dialog.close() - - raise RuntimeError( - f"Installing {dependency.name} failed ({exit_code}) ({result})" - ) - - print("Installed %s into %s" % (dependency.name, target_dir)) - if dependency.package in sys.modules: - print("Unloading old %s module" % dependency.package) - del sys.modules[dependency.package] - # check_importability() will be called soon, which will import them again. - # By removing them from sys.modules, we prevent older versions from - # sticking around. - - if bar: - bar.setValue(int((count / len(dependencies)) * 100)) - bar.update() - - if dialog: - dialog.close() - - -def _is_windows(): - """Return whether we are starting from QGIS on Windows.""" - executable = sys.executable - _, filename = os.path.split(executable) - if "python3" in filename.lower(): - return False - elif "qgis" in filename.lower(): - if platform.system().lower() == "darwin": - return False - else: - return True - else: - raise EnvironmentError("Unexpected value for sys.executable: %s" % executable) - - -def _get_python_interpreter(): - """Return the path to the python3 interpreter. - - Under linux sys.executable is set to the python3 interpreter used by Qgis. - However, under Windows/Mac this is not the case and sys.executable refers to the - Qgis start-up script. - """ - interpreter = None - executable = sys.executable - directory, _ = os.path.split(executable) - if _is_windows(): - interpreter = os.path.join(directory, "python3.exe") - elif platform.system().lower() == "darwin": - interpreter = os.path.join(directory, "bin", "python3") - else: - interpreter = executable - - assert os.path.exists(interpreter) # safety check - return interpreter - - -def _check_presence(dependencies): - """Check if all dependencies are present. Return missing dependencies.""" - missing = [] - for dependency in dependencies: - requirement = dependency.name + dependency.constraint - print("Checking presence of %s..." % requirement) - try: - result = pkg_resources.require(requirement) - print("Requirement %s found: %s" % (requirement, result)) - except pkg_resources.DistributionNotFound as e: - print( - "Dependency '%s' (%s) not found (%s)" - % (dependency.name, dependency.constraint, str(e)) - ) - missing.append(dependency) - except pkg_resources.VersionConflict as e: - print( - 'Version conflict:\n' - f' Installed: {e.dist}\n' - f' Required: {e.req}' - ) - if isinstance(e, pkg_resources.ContextualVersionConflict): - print(f' By: {e.required_by}') - missing.append(dependency) - except Exception as e: - print( - "Installing dependency '%s' (%s) went wrong (%s)" - % (dependency.name, dependency.constraint, str(e)) - ) - missing.append(dependency) - return missing - - -def _refresh_python_import_mechanism(): - """Refresh the import mechanism. - - This is required when deps are dynamically installed/removed. The modules - 'importlib' and 'pkg_resources' need to update their internal data structures. - """ - # This function should be called if any modules are created/installed while your - # program is running to guarantee all finders will notice the new module’s existence. - importlib.invalidate_caches() - - # https://stackoverflow.com/questions/58612272/pkg-resources-get-distributionmymodule-version-not-updated-after-reload - # Apparantely pkg_resources needs to be reloaded to be up-to-date with newly installed packages - importlib.reload(pkg_resources) - - -def generate_constraints_txt(target_dir=OUR_DIR): - """Called from the ``__main__`` to generate ``constraints.txt``.""" - constraints_file = target_dir / "constraints.txt" - lines = ["# Generated by dependencies.py"] - lines += [(dependency.name + dependency.constraint) for dependency in DEPENDENCIES] - lines.append("") - constraints_file.write_text("\n".join(lines)) - print("Wrote constraints to %s" % constraints_file) - - -if __name__ == "__main__": # pragma: no cover - generate_constraints_txt() diff --git a/processing/grid_creation_algorithm.py b/processing/grid_creation_algorithm.py deleted file mode 100644 index bcef07c3..00000000 --- a/processing/grid_creation_algorithm.py +++ /dev/null @@ -1,141 +0,0 @@ -import os -from collections import OrderedDict -from qgis.PyQt.QtCore import QCoreApplication -from qgis.core import ( - QgsProcessingAlgorithm, - QgsProcessingException, - QgsProcessingParameterFile, - QgsProcessingParameterFileDestination, - QgsVectorLayer, -) -from threedi_results_analysis.processing.processing_utils import gridadmin2geopackage, load_computational_layers -import logging -import io - - -class ThreeDiGenerateCompGridAlgorithm(QgsProcessingAlgorithm): - """ - Generate a gridadmin.h5 file out of Spatialite database and convert it to GeoPackage. - Created layers will be added to the map canvas after successful conversion. - """ - - INPUT_SPATIALITE = "INPUT_SPATIALITE" - OUTPUT = "OUTPUT" - LAYERS_TO_ADD = OrderedDict() - - def flags(self): - return super().flags() | QgsProcessingAlgorithm.FlagNoThreading - - def tr(self, string): - return QCoreApplication.translate("Processing", string) - - def createInstance(self): - return ThreeDiGenerateCompGridAlgorithm() - - def name(self): - return "threedi_generate_computational_grid" - - def displayName(self): - return self.tr("Computational grid from schematisation") - - def group(self): - return self.tr("Computational Grid") - - def groupId(self): - return "computational_grid" - - def shortHelpString(self): - return self.tr("Generate computational grid from schematization") - - def initAlgorithm(self, config=None): - - self.addParameter( - QgsProcessingParameterFile( - self.INPUT_SPATIALITE, - self.tr("Input SpatiaLite file"), - behavior=QgsProcessingParameterFile.File, - extension="sqlite", - ) - ) - - self.addParameter( - QgsProcessingParameterFileDestination( - self.OUTPUT, self.tr("Output computational grid file"), fileFilter="*.gpkg", - ) - ) - - def processAlgorithm(self, parameters, context, feedback): - input_spatialite = self.parameterAsString(parameters, self.INPUT_SPATIALITE, context) - if not input_spatialite: - raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT_SPATIALITE)) - - uri = input_spatialite + "|layername=v2_global_settings" - feedback.pushInfo(f"Reading DEM settings from: {uri}") - settings_lyr = QgsVectorLayer(uri, "glob_settings", "ogr") - if not settings_lyr.isValid(): - err = f"Global Spatialite settings table could not be loaded from {uri}\n" "Check your Spatialite file." - raise QgsProcessingException(f"Incorrect input Spatialite file:\n{err}") - try: - settings_feat = next(settings_lyr.getFeatures()) - except StopIteration: - err = f"No global settings entries in {uri}" "Check your Spatialite file." - raise QgsProcessingException(f"Incorrect input Spatialite file:\n{err}") - set_dem_rel_path = settings_feat["dem_file"] - if set_dem_rel_path: - input_spatialite_dir = os.path.dirname(input_spatialite) - set_dem_path = os.path.join(input_spatialite_dir, set_dem_rel_path) - feedback.pushInfo(f"DEM raster referenced in Spatialite settings:\n{set_dem_path}") - if not os.path.exists(set_dem_path): - set_dem_path = None - info = "The DEM referenced in the Spatialite settings doesn't exist - skipping." - feedback.pushInfo(info) - else: - set_dem_path = None - info = "There is no DEM file referenced in the Spatialite settings - skipping." - feedback.pushInfo(info) - output_gpkg_file = self.parameterAsFileOutput(parameters, self.OUTPUT, context) - if output_gpkg_file is None: - raise QgsProcessingException(self.invalidSourceError(parameters, self.OUTPUT)) - # If user is writing to the temporary file then QGIS adds '.file' extension, so we need to change it. - output_file_without_extension = output_gpkg_file.rsplit(".", 1)[0] - gridadmin_file = f"{output_file_without_extension}.h5" - if output_gpkg_file.endswith(".file"): - output_gpkg_file = f"{output_file_without_extension}.gpkg" - - def progress_rep(progress, info): - feedback.setProgress(int(progress * 100)) - feedback.pushInfo(info) - - # Capture threedigridbuilder logging - # logger = logging.getLogger("threedigrid_builder.grid.connection_nodes") - assert logger.hasHandlers() # Check whether we have the right one - log_capture_string = io.StringIO() - ch = logging.StreamHandler(log_capture_string) - ch.setFormatter(logging.Formatter(fmt='%(levelname)-8s :: %(message)s')) - ch.setLevel(logging.DEBUG) - logger.addHandler(ch) - try: - make_gridadmin(input_spatialite, set_dem_path, gridadmin_file, progress_callback=progress_rep) - except SchematisationError as e: - err = f"Creating grid file failed with the following error: {repr(e)}" - raise QgsProcessingException(err) - finally: - # Pull the contents back into a string and close the stream - log_contents = log_capture_string.getvalue() - log_capture_string.close() - logger.removeHandler(ch) - if log_contents: - feedback.pushWarning("3Di gridbuilder log:") - feedback.pushWarning(log_contents) - - feedback.setProgress(0) - gpkg_layers = gridadmin2geopackage(gridadmin_file, output_gpkg_file, context, feedback) - self.LAYERS_TO_ADD.update(gpkg_layers) - - return {self.OUTPUT: output_gpkg_file} - - def postProcessAlgorithm(self, context, feedback): - project = context.project() - load_computational_layers(self.LAYERS_TO_ADD, project) - self.LAYERS_TO_ADD.clear() - return {} diff --git a/processing/providers.py b/processing/providers.py index ad1cecd5..47a1a7ff 100644 --- a/processing/providers.py +++ b/processing/providers.py @@ -3,7 +3,6 @@ from qgis.PyQt.QtGui import QIcon # from threedi_results_analysis.processing.dwf_calculation_algorithm import DWFCalculatorAlgorithm from threedi_results_analysis.processing.gpkg_conversion_algorithm import ThreeDiConvertToGpkgAlgorithm -from threedi_results_analysis.processing.grid_creation_algorithm import ThreeDiGenerateCompGridAlgorithm from threedi_results_analysis.processing.cross_sectional_discharge_algorithm import CrossSectionalDischargeAlgorithm from threedi_results_analysis.processing.leak_detector_algorithms import ( DetectLeakingObstaclesAlgorithm, @@ -41,7 +40,6 @@ def loadAlgorithms(self, *args, **kwargs): self.addAlgorithm(MigrateAlgorithm()) # self.addAlgorithm(ImportHydXAlgorithm()) self.addAlgorithm(ThreeDiConvertToGpkgAlgorithm()) - self.addAlgorithm(ThreeDiGenerateCompGridAlgorithm()) # self.addAlgorithm(ImportSufHydAlgorithm()) # self.addAlgorithm(GuessIndicatorAlgorithm()) self.addAlgorithm(CrossSectionalDischargeAlgorithm()) diff --git a/processing/schematisation_algorithms.py b/processing/schematisation_algorithms.py index d55c33dc..580c6649 100644 --- a/processing/schematisation_algorithms.py +++ b/processing/schematisation_algorithms.py @@ -11,7 +11,6 @@ *************************************************************************** """ -import csv import os import shutil @@ -30,15 +29,11 @@ from threedi_results_analysis.utils.utils import backup_sqlite from qgis.PyQt.QtCore import QCoreApplication from qgis.core import ( - QgsProject, QgsProcessingAlgorithm, # QgsProcessingException, - QgsProcessingParameterBoolean, QgsProcessingParameterFile, - QgsProcessingParameterFileDestination, # QgsProcessingParameterFolderDestination, # QgsProcessingParameterString, - QgsVectorLayer, ) @@ -128,479 +123,3 @@ def tr(self, string): def createInstance(self): return MigrateAlgorithm() - - -# class CheckSchematisationAlgorithm(QgsProcessingAlgorithm): - # """ - # Run the schematisation checker - # """ - - # INPUT = "INPUT" - # OUTPUT = "OUTPUT" - # ADD_TO_PROJECT = "ADD_TO_PROJECT" - - # def initAlgorithm(self, config): - # self.addParameter( - # QgsProcessingParameterFile( - # self.INPUT, self.tr("3Di Spatialite"), extension="sqlite" - # ) - # ) - - # self.addParameter( - # QgsProcessingParameterFileDestination( - # self.OUTPUT, self.tr("Output"), fileFilter="csv" - # ) - # ) - - # self.addParameter( - # QgsProcessingParameterBoolean( - # self.ADD_TO_PROJECT, self.tr("Add result to project"), defaultValue=True - # ) - # ) - - # def processAlgorithm(self, parameters, context, feedback): - # self.add_to_project = self.parameterAsBoolean( - # parameters, self.ADD_TO_PROJECT, context - # ) - # self.output_file_path = None - # input_filename = self.parameterAsFile(parameters, self.INPUT, context) - # threedi_db = get_threedi_database(filename=input_filename, feedback=feedback) - # if not threedi_db: - # return {self.OUTPUT: None} - # try: - # model_checker = ThreediModelChecker(threedi_db) - # except errors.MigrationMissingError: - # feedback.pushWarning( - # "The selected 3Di model does not have the latest migration. Please " - # "migrate your model to the latest version." - # ) - # return {self.OUTPUT: None} - # schema = threedi_db.schema - # schema.set_spatial_indexes() - # generated_output_file_path = self.parameterAsFileOutput( - # parameters, self.OUTPUT, context - # ) - # self.output_file_path = f"{os.path.splitext(generated_output_file_path)[0]}.csv" - # session = model_checker.db.get_session() - # session.model_checker_context = model_checker.context - # total_checks = len(model_checker.config.checks) - # progress_per_check = 100.0 / total_checks - # checks_passed = 0 - # try: - # with open(self.output_file_path, "w", newline="") as output_file: - # writer = csv.writer(output_file) - # writer.writerow( - # [ - # "level", - # "error_code", - # "id", - # "table", - # "column", - # "value", - # "description", - # ] - # ) - # for i, check in enumerate(model_checker.checks(level="info")): - # model_errors = check.get_invalid(session) - # for error_row in model_errors: - # writer.writerow( - # [ - # check.level.name, - # check.error_code, - # error_row.id, - # check.table.name, - # check.column.name, - # getattr(error_row, check.column.name), - # check.description(), - # ] - # ) - # checks_passed += 1 - # feedback.setProgress(int(checks_passed * progress_per_check)) - # except PermissionError: - # # PermissionError happens for example when a user has the file already open - # # with Excel on Windows, which locks the file. - # feedback.pushWarning( - # f"Not enough permissions to write the file '{self.output_file_path}'.\n\n" - # "The file may be used by another program. Please close all " - # "other programs using the file or select another output " - # "file." - # ) - # return {self.OUTPUT: None} - - # return {self.OUTPUT: self.output_file_path} - - # def postProcessAlgorithm(self, context, feedback): - # if self.add_to_project: - # if self.output_file_path: - # result_layer = QgsVectorLayer( - # self.output_file_path, "3Di schematisation errors" - # ) - # QgsProject.instance().addMapLayer(result_layer) - # return {self.OUTPUT: self.output_file_path} - - # def name(self): - # """ - # Returns the algorithm name, used for identifying the algorithm. This - # string should be fixed for the algorithm, and must not be localised. - # The name should be unique within each provider. Names should contain - # lowercase alphanumeric characters only and no spaces or other - # formatting characters. - # """ - # return "check_schematisation" - - # def displayName(self): - # """ - # Returns the translated algorithm name, which should be used for any - # user-visible display of the algorithm name. - # """ - # return self.tr("Check Schematisation") - - # def group(self): - # """ - # Returns the name of the group this algorithm belongs to. This string - # should be localised. - # """ - # return self.tr(self.groupId()) - - # def groupId(self): - # """ - # Returns the unique ID of the group this algorithm belongs to. This - # string should be fixed for the algorithm, and must not be localised. - # The group id should be unique within each provider. Group id should - # contain lowercase alphanumeric characters only and no spaces or other - # formatting characters. - # """ - # return "Schematisation" - - # def tr(self, string): - # return QCoreApplication.translate("Processing", string) - - # def createInstance(self): - # return CheckSchematisationAlgorithm() - - -# class ImportSufHydAlgorithm(QgsProcessingAlgorithm): -# """ -# Import data from SufHyd to a 3Di Spatialite -# """ -# -# INPUT_SUFHYD_FILE = "INPUT_SUFHYD_FILE" -# TARGET_SQLITE = "TARGET_SQLITE" -# -# def initAlgorithm(self, config): -# self.addParameter( -# QgsProcessingParameterFile(self.INPUT_SUFHYD_FILE, self.tr("Sufhyd file"), extension="hyd")) -# -# self.addParameter( -# QgsProcessingParameterFile( -# self.TARGET_SQLITE, -# "Target 3Di Sqlite", -# extension="sqlite" -# ) -# ) -# -# def processAlgorithm(self, parameters, context, feedback): -# sufhyd_file = self.parameterAsString(parameters, self.INPUT_SUFHYD_FILE, context) -# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) -# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) -# if not threedi_db: -# return {} -# try: -# schema = threedi_db.schema -# schema.validate_schema() -# -# except errors.MigrationMissingError: -# feedback.pushWarning( -# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " -# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" -# ) -# return {} -# -# importer = Importer(sufhyd_file, threedi_db) -# importer.run_import() -# -# return {} -# -# def name(self): -# return "import_sufhyd" -# -# def displayName(self): -# return self.tr("Import Sufhyd") -# -# def group(self): -# return self.tr(self.groupId()) -# -# def groupId(self): -# return "Schematisation" -# -# def tr(self, string): -# return QCoreApplication.translate("Processing", string) -# -# def createInstance(self): -# return ImportSufHydAlgorithm() -# -# -# class GuessIndicatorAlgorithm(QgsProcessingAlgorithm): -# """ -# Guess manhole indicator, pipe friction and manhole storage -# area. -# """ -# -# TARGET_SQLITE = "TARGET_SQLITE" -# PIPE_FRICTION = "PIPE_FRICTION" -# MANHOLE_INDICATOR = "MANHOLE_INDICATOR" -# MANHOLE_AREA = "MANHOLE_AREA" -# ONLY_NULL_FIELDS = "ONLY_NULL_FIELDS" -# -# def initAlgorithm(self, config): -# -# self.addParameter( -# QgsProcessingParameterFile( -# self.TARGET_SQLITE, -# "Target 3Di Sqlite", -# extension="sqlite" -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.PIPE_FRICTION, -# description="Pipe friction", -# defaultValue=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.MANHOLE_INDICATOR, -# description="Manhole indicator", -# defaultValue=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.MANHOLE_AREA, -# description="Manhole area (only fills NULL fields)", -# defaultValue=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.ONLY_NULL_FIELDS, -# description="Only fill NULL fields", -# defaultValue=True, -# ) -# ) -# -# def processAlgorithm(self, parameters, context, feedback): -# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) -# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) -# if not threedi_db: -# return {} -# try: -# schema = threedi_db.schema -# schema.validate_schema() -# -# except errors.MigrationMissingError: -# feedback.pushWarning( -# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " -# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" -# ) -# return {} -# -# checks = [] -# -# if parameters[self.MANHOLE_INDICATOR]: -# checks.append("manhole_indicator") -# -# if parameters[self.PIPE_FRICTION]: -# checks.append("pipe_friction") -# -# if parameters[self.MANHOLE_AREA]: -# checks.append("manhole_area") -# -# guesser = guess_indicators_utils.Guesser(threedi_db) -# msg = guesser.run(checks, parameters[self.ONLY_NULL_FIELDS]) -# -# feedback.pushInfo(f"Guess indicators ready: {msg}") -# -# return {} -# -# def name(self): -# return "guess_indicators" -# -# def displayName(self): -# return self.tr("Guess Indicators") -# -# def group(self): -# return self.tr(self.groupId()) -# -# def groupId(self): -# return "Schematisation" -# -# def tr(self, string): -# return QCoreApplication.translate("Processing", string) -# -# def createInstance(self): -# return GuessIndicatorAlgorithm() -# -# -# class ImportHydXAlgorithm(QgsProcessingAlgorithm): -# """ -# Import data from GWSW HydX to a 3Di Spatialite -# """ -# -# INPUT_DATASET_NAME = "INPUT_DATASET_NAME" -# HYDX_DOWNLOAD_DIRECTORY = "HYDX_DOWNLOAD_DIRECTORY" -# INPUT_HYDX_DIRECTORY = "INPUT_HYDX_DIRECTORY" -# TARGET_SQLITE = "TARGET_SQLITE" -# -# def initAlgorithm(self, config): -# self.addParameter( -# QgsProcessingParameterFile( -# self.TARGET_SQLITE, "Target 3Di Spatialite", extension="sqlite" -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterFile( -# self.INPUT_HYDX_DIRECTORY, -# "GWSW HydX directory (local)", -# behavior=QgsProcessingParameterFile.Folder, -# optional=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterString( -# self.INPUT_DATASET_NAME, "GWSW dataset name (online)", optional=True -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterFolderDestination( -# self.HYDX_DOWNLOAD_DIRECTORY, -# "Destination directory for GWSW HydX dataset download", -# optional=True, -# ) -# ) -# -# def processAlgorithm(self, parameters, context, feedback): -# hydx_dataset_name = self.parameterAsString( -# parameters, self.INPUT_DATASET_NAME, context -# ) -# hydx_download_dir = self.parameterAsString( -# parameters, self.HYDX_DOWNLOAD_DIRECTORY, context -# ) -# hydx_path = self.parameterAsString( -# parameters, self.INPUT_HYDX_DIRECTORY, context -# ) -# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) -# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) -# if not threedi_db: -# raise QgsProcessingException( -# f"Unable to connect to 3Di spatialite '{out_path}'" -# ) -# try: -# schema = threedi_db.schema -# schema.validate_schema() -# -# except errors.MigrationMissingError: -# raise QgsProcessingException( -# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " -# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" -# ) -# if not (hydx_dataset_name or hydx_path): -# raise QgsProcessingException( -# "Either 'GWSW HydX directory (local)' or 'GWSW dataset name (online)' must be filled in!" -# ) -# if hydx_dataset_name and hydx_path: -# feedback.pushWarning( -# "Both 'GWSW dataset name (online)' and 'GWSW HydX directory (local)' are filled in. " -# "'GWSW dataset name (online)' will be ignored. This dataset will not be downloaded." -# ) -# elif hydx_dataset_name: -# try: -# hydx_download_path = Path(hydx_download_dir) -# hydx_download_dir_is_valid = hydx_download_path.is_dir() -# except TypeError: -# hydx_download_dir_is_valid = False -# if parameters[self.HYDX_DOWNLOAD_DIRECTORY] == "TEMPORARY_OUTPUT": -# hydx_download_dir_is_valid = True -# if not hydx_download_dir_is_valid: -# raise QgsProcessingException( -# f"'Destination directory for HydX dataset download' ({hydx_download_path}) is not a valid directory" -# ) -# hydx_path = download_hydx( -# dataset_name=hydx_dataset_name, -# target_directory=hydx_download_path, -# wait_times=[0.1, 1, 2, 3, 4, 5, 10], -# feedback=feedback, -# ) -# # hydx_path will be None if user has canceled the process during download -# if feedback.isCanceled(): -# raise QgsProcessingException("Process canceled") -# feedback.pushInfo(f"Starting import of {hydx_path} to {out_path}") -# log_path = Path(out_path).parent / "import_hydx.log" -# write_logging_to_file(log_path) -# feedback.pushInfo(f"Logging will be written to {log_path}") -# run_import_export(export_type="threedi", hydx_path=hydx_path, out_path=out_path) -# return {} -# -# def name(self): -# """ -# Returns the algorithm name, used for identifying the algorithm. This -# string should be fixed for the algorithm, and must not be localised. -# The name should be unique within each provider. Names should contain -# lowercase alphanumeric characters only and no spaces or other -# formatting characters. -# """ -# return "import_hydx" -# -# def displayName(self): -# """ -# Returns the translated algorithm name, which should be used for any -# user-visible display of the algorithm name. -# """ -# return self.tr("Import GWSW HydX") -# -# def shortHelpString(self): -# return """ -#

Introduction

-#

Use this processing algorithm to import data in the format of the Dutch "Gegevenswoordenboek Stedelijk Water (GWSW)". Either select a previously downloaded local dataset, or download a dataset directly from the server.

-#

A log file will be created in the same directory as the Target 3Di Spatialite. Please check this log file after the import has completed.  

-#

Parameters

-#

Target 3Di Spatialite

-#

Spatialite (.sqlite) file that contains the layers required by 3Di. Imported data will be added to any data already contained in the 3Di Spatialite.

-#

GWSW HydX directory (local)

-#

Use this option if you have already downloaded a GWSW HydX dataset to a local directory.

-#

GWSW dataset name (online)

-#

Use this option if you want to download a GWSW HydX dataset.

-#

Destination directory for GWSW HydX dataset download

-#

If you have chosen to download a GWSW HydX dataset, this is the directory it will be downloaded to.

-# """ -# -# def group(self): -# """ -# Returns the name of the group this algorithm belongs to. This string -# should be localised. -# """ -# return self.tr(self.groupId()) -# -# def groupId(self): -# """ -# Returns the unique ID of the group this algorithm belongs to. This -# string should be fixed for the algorithm, and must not be localised. -# The group id should be unique within each provider. Group id should -# contain lowercase alphanumeric characters only and no spaces or other -# formatting characters. -# """ -# return "Schematisation" -# -# def tr(self, string): -# return QCoreApplication.translate("Processing", string) -# -# def createInstance(self): -# return ImportHydXAlgorithm() diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 61cdfd41..b7a780d9 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,6 +1,5 @@ from pathlib import Path from threedi_results_analysis import dependencies -from threedi_results_analysis.dependencies import Dependency import mock import os @@ -55,22 +54,6 @@ def test_install_dependencies(tmpdir): dependencies.ensure_everything_installed() -# def test_uninstall_dependency(tmpdir, monkeypatch): - # python_path = os.getenv("PYTHONPATH", "") - # new_python_path = f"{python_path}:{tmpdir}" - # monkeypatch.setenv("PYTHONPATH", new_python_path) - - # small_dependencies = [ - # Dependency("threedi-modelchecker", "threedi_modelchecker", ">=1.0.0", False) - # ] - # dependencies._install_dependencies( - # small_dependencies, target_dir=tmpdir - # ) - # dependencies._uninstall_dependency(small_dependencies[0]) - # for directory in os.listdir(tmpdir): - # assert "threedi_modelchecker" not in directory - - def test_install_dependencies_with_error(tmpdir): wrong_dependencies = [missing_dependency] with pytest.raises(RuntimeError):