From dbc23059e9ce83f1972b2258eb0f12cfe11969f6 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Wed, 21 Aug 2024 15:04:50 +0200 Subject: [PATCH 01/17] bump threedi deps for testing --- dependencies.py | 6 +++--- external-dependencies/populate.sh | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/dependencies.py b/dependencies.py index 7b85e055..8cd289a3 100644 --- a/dependencies.py +++ b/dependencies.py @@ -59,8 +59,8 @@ Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.219.*", False), - Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.6.*", False), + Dependency("threedi-schema", "threedi_schema", "==0.224.*", False), + Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.10.*", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), Dependency("packaging", "packaging", "", False), @@ -71,7 +71,7 @@ Dependency("networkx", "networkx", "", False), Dependency("condenser", "condenser", ">=0.2.1", False), Dependency("Shapely", "shapely", ">=2.0.0", False), - Dependency("threedigrid-builder", "threedigrid_builder", "==1.14.*", False), + Dependency("threedigrid-builder", "threedigrid_builder", "==1.17.*", False), Dependency("hydxlib", "hydxlib", "==1.5.2", False), Dependency("h5netcdf", "h5netcdf", "", False), Dependency("greenlet", "greenlet", "!=0.4.17", False), diff --git a/external-dependencies/populate.sh b/external-dependencies/populate.sh index 44726778..92459400 100755 --- a/external-dependencies/populate.sh +++ b/external-dependencies/populate.sh @@ -67,7 +67,7 @@ wget https://files.pythonhosted.org/packages/cd/84/66072ee12c3e79061f183c09a24be # Download windows wheels (cp39, win, amd64) wget https://files.pythonhosted.org/packages/b2/8e/83d9e3bff5c0ff7a0ec7e850c785916e616ab20d8793943f9e1d2a987fab/shapely-2.0.0-cp39-cp39-win_amd64.whl -wget https://files.pythonhosted.org/packages/75/b7/7849ad6f25e17f72f4c7e411b36b456205db2c501ad495bfc97c1cd3b813/threedigrid_builder-1.14.4-cp39-cp39-win_amd64.whl +wget https://files.pythonhosted.org/packages/96/63/bc1f271b291be432f4db390dc8bee6854ee1894b72816f142f57801b3f5b/threedigrid_builder-1.17.0-cp311-cp311-win_amd64.whl wget https://files.pythonhosted.org/packages/b3/89/1d3b78577a6b2762cb254f6ce5faec9b7c7b23052d1cdb7237273ff37d10/greenlet-2.0.2-cp39-cp39-win_amd64.whl#sha256=db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564 wget https://files.pythonhosted.org/packages/5f/d6/5f59a5e5570c4414d94c6da4c97731deab832cbd14eaf23189d54a92d1e1/cftime-1.6.2-cp39-cp39-win_amd64.whl#sha256=86fe550b94525c327578a90b2e13418ca5ba6c636d5efe3edec310e631757eea @@ -75,8 +75,8 @@ wget https://files.pythonhosted.org/packages/5f/d6/5f59a5e5570c4414d94c6da4c9773 # Download linux wheels (both cp38 and cp310) wget https://files.pythonhosted.org/packages/06/07/0700e5e33c44bc87e19953244c29f73669cfb6f19868899170f9c7e34554/shapely-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl wget https://files.pythonhosted.org/packages/4e/03/f3bcb7d96aef6d56b62e2f25996f161c05f92a45d452165be2007b756e0f/shapely-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl -wget https://files.pythonhosted.org/packages/0b/22/8868b2266baa2f8f9747100232807de4151fd720322ee5d5d724d6415d63/threedigrid_builder-1.14.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl -wget https://files.pythonhosted.org/packages/bd/8c/db51b687ee80cf28787f7b862b62e77e096001513176252d824384d2739c/threedigrid_builder-1.14.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl +wget https://files.pythonhosted.org/packages/b7/6a/828470fc4c45cb3f8f046f3b737a6a527ad7d9959916ce05c153d8d5ae60/threedigrid_builder-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl +wget https://files.pythonhosted.org/packages/d4/22/08b7f0c1c28f5227e640fc5f216853b90b8ae616e72944a35e822cffc6ec/threedigrid_builder-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl wget https://files.pythonhosted.org/packages/6e/11/a1f1af20b6a1a8069bc75012569d030acb89fd7ef70f888b6af2f85accc6/greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470 wget https://files.pythonhosted.org/packages/e1/17/d8042d82f44c08549b535bf2e7d1e87aa1863df5ed6cf1cf773eb2dfdf67/cftime-1.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=acb294fdb80e33545ae54b4421df35c4e578708a5ffce1c00408b2294e70ecef wget https://files.pythonhosted.org/packages/44/51/bc9d47beee47afda1d335f05efa848dc403bd183344f03d431281518e8ab/cftime-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl#sha256=7a820e16357dbdc9723b2059f7178451de626a8b2e5f80b9d91a77e3dac42133 From 1ca6085e40b4d1d664e12e952f4343796a76748d Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Wed, 21 Aug 2024 16:01:45 +0200 Subject: [PATCH 02/17] bump geoalchemy2 --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index 8cd289a3..2dbef613 100644 --- a/dependencies.py +++ b/dependencies.py @@ -47,7 +47,7 @@ #: List of expected dependencies. DEPENDENCIES = [ Dependency("SQLAlchemy", "sqlalchemy", "==2.0.6", False), - Dependency("GeoAlchemy2", "geoalchemy2", "==0.13.*", False), + Dependency("GeoAlchemy2", "geoalchemy2", "==0.15.*", False), Dependency("pyqtgraph", "pyqtgraph", ">=0.13.2", False), Dependency( "importlib-resources", "importlib_resources", "", False From ff1b2eaaa20ab40a1046016190cc7aa48d78104e Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Wed, 21 Aug 2024 16:26:45 +0200 Subject: [PATCH 03/17] Disabled incompatible stuff --- .../guess_indicator/test_guess_indicators.py | 224 ------ processing/providers.py | 16 +- processing/schematisation_algorithms.py | 660 +++++++++--------- tests/test_spatialalchemy.py | 79 --- 4 files changed, 338 insertions(+), 641 deletions(-) delete mode 100644 processing/deps/guess_indicator/test_guess_indicators.py delete mode 100644 tests/test_spatialalchemy.py diff --git a/processing/deps/guess_indicator/test_guess_indicators.py b/processing/deps/guess_indicator/test_guess_indicators.py deleted file mode 100644 index fe91858b..00000000 --- a/processing/deps/guess_indicator/test_guess_indicators.py +++ /dev/null @@ -1,224 +0,0 @@ -from pathlib import Path -from sqlalchemy import update -from threedi_results_analysis.processing.deps.sql_models.constants import Constants -from threedi_results_analysis.processing.deps.sql_models.model_schematisation import BoundaryCondition1D -from threedi_results_analysis.processing.deps.sql_models.model_schematisation import ConnectionNode -from threedi_results_analysis.processing.deps.sql_models.model_schematisation import Manhole -from threedi_results_analysis.processing.deps.sql_models.model_schematisation import Pipe -from threedi_results_analysis.processing.deps.sql_models.model_schematisation import Pumpstation -from threedi_results_analysis.tests.test_init import TEST_DATA_DIR -from threedi_results_analysis.processing.deps.guess_indicator import guess_indicators_utils -from threedi_results_analysis.utils.threedi_database import ThreediDatabase - -import pytest -import shutil - - -def get_pump_or_outlet_manholes(session, ids): - """Get either manhole_outlet OR manhole_pump from sqlite. - :param ids: [list] this contains ids from either manhole_outlet or manhole_pump - :return: a dict with key=manhole.id, value=manhole.manhole_indicator - """ - manholes_id_indicator = {} - # only return manhole - sql_manholes = ( - session.query(Manhole) - .join(Manhole.connection_node) - .filter(ConnectionNode.id.in_(ids)) - ) - for manhole in sql_manholes: - # fill dict - manholes_id_indicator[manhole.id] = manhole.manhole_indicator - return manholes_id_indicator - - -def get_all_manholes(session): - """Get all manholes from sqlite (also manhole_outlet, manhole_pump). - :return: a dict with key=manhole.id, value=manhole.manhole_indicator""" - manholes_id_indicator = {} - sql_manholes = session.query(Manhole) - for manhole in sql_manholes: - # fill dict - manholes_id_indicator[manhole.id] = manhole.manhole_indicator - return manholes_id_indicator - - -def get_manholes(session, get_only_outlets=False, get_only_pumps=False, get_all=False): - """Get manholes from sqlite: OR only manhole_outlet, OR only manhole_pumps, OR - all manholes (manhole_outlet + manhole_pump + other manholes). - :return: manholes_id_indicator = a dict with key=id, value=manhole_indicator - :param session: sqlalchemy.orm.session - :param get_only_outlets: (boolean) only get manholes that are located on an outlet. - This means that a 1D boundary condition is located on the manhole. - :param get_only_pumps: (boolean) only get manholes that are located on an startnode - of a pumpstation. - :param get_all: manhole on outlet + manhole on pumpstation + other manholes - :return: manholes_id_indicator: (dict) - """ - # only 1 of kwargs may be True - assert sum([get_only_outlets, get_only_pumps, get_all]) == 1 - if get_only_pumps: - ids = session.query(Pumpstation.connection_node_start_id) - elif get_only_outlets: - ids = session.query(BoundaryCondition1D.connection_node_id) - if get_only_pumps or get_only_outlets: - manholes_id_indicator = get_pump_or_outlet_manholes(session, ids) - if get_all: - manholes_id_indicator = get_all_manholes(session) - return manholes_id_indicator - - -def get_all_pipes(session): - pipes = {} - # only return manhole - sql_pipes = session.query(Pipe) - for pipe in sql_pipes: - # fill dict - pipes[pipe.id] = (pipe.friction_type, pipe.friction_value, pipe.material) - return pipes - - -def empty_manhole_indicator(session, manholes_pre_empty): - """Empty column manhole_indicator (set is to NULL) of table v2_manhole.""" - manholes_ids = list(manholes_pre_empty) - update_query = ( - update(Manhole) - .where(Manhole.id.in_(manholes_ids)) - .values(manhole_indicator=None) - ) - session.execute(update_query) - session.commit() - - -def empty_pipe_friction_value(session, pipes_pre_empty): - """empty column manhole_indicator (set is to NULL) of table v2_manhole""" - pipes_ids = list(pipes_pre_empty) - update_query = ( - update(Pipe) - .where(Pipe.id.in_(pipes_ids)) - .values(friction_value=None, friction_type=None) - ) - session.execute(update_query) - session.commit() - - -@pytest.fixture() -def db(tmpdir): - """Copy original sqlite to tmpdir as we modify the sqlite data (with sqlalchemy - in these tests. Pytest fixes cleanup op tmpdir: "entries older than 3 temporary - directories will be removed".""" - sqlite_filename = "v2_bergermeer.sqlite" - orig_sqlite_path = TEST_DATA_DIR / "testmodel" / "v2_bergermeer" / sqlite_filename - tmp_sqlite_dir = Path(tmpdir) - tmp_sqlite_path = tmp_sqlite_dir / sqlite_filename - shutil.copy2(orig_sqlite_path, tmp_sqlite_path) - db_type = "spatialite" - db_set = {"db_path": tmp_sqlite_path} - db = ThreediDatabase(db_set, db_type) - return db - - -def test_guess_manhole_indicator(db): - session = db.get_session() - - # before we empty manholes, first get their [(id, manhole_indicator)] - manholes_pumps_pre_empty = get_manholes(session, get_only_pumps=True) - manholes_outlets_pre_empty = get_manholes(session, get_only_outlets=True) - all_manholes_pre_empty = get_manholes(session, get_all=True) - assert all_manholes_pre_empty, ( - "sqlite should have manholes, otherwise there is " "nothing to test" - ) - - # now lets empty column 'manhole_indicator' in all manholes - empty_manhole_indicator(session, all_manholes_pre_empty) - - all_manholes_pre_guess = get_manholes(session, get_all=True) - # all dict values should be None (<-- still test-prework, not actual testing), If - # it fails then empty_manhole_selection() does not a good job - assert not all(list(all_manholes_pre_guess.values())) - - # now put guesser to work - guesser = guess_indicators_utils.Guesser(db) - guesser.guess_manhole_indicator(only_empty_fields=False) - - # get a new session - session = db.get_session() - - manholes_pumps_after_guess = get_manholes(session, get_only_pumps=True) - manholes_outlet_after_guess = get_manholes(session, get_only_outlets=True) - all_manholes_after_guess = get_manholes(session, get_all=True) - - # manholes_pumps should have been updated to MANHOLE_INDICATOR_PUMPSTATION - if manholes_pumps_pre_empty: - manholes_pumps = list(manholes_pumps_after_guess.values()) - expected_value = Constants.MANHOLE_INDICATOR_PUMPSTATION - assert all([expected_value == manhole for manhole in manholes_pumps]) - - # manholes_outlets should have been updated to MANHOLE_INDICATOR_OUTLET - if manholes_outlets_pre_empty: - manholes_outlets = list(manholes_outlet_after_guess.values()) - expected_value = Constants.MANHOLE_INDICATOR_OUTLET - assert all([expected_value == manhole for manhole in manholes_outlets]) - - for k in manholes_outlets_pre_empty: - all_manholes_after_guess.pop(k, None) - - # The rest of the manholes should have been updated to MANHOLE_INDICATOR_MANHOLE - # To check this, we first pop the manholes_pumps and manholes_outlets from - # all_manholes - for manhole_dict in [manholes_pumps_pre_empty, manholes_outlets_pre_empty]: - for manhole_id in manhole_dict: - all_manholes_after_guess.pop(manhole_id, None) - - all_manholes = list(all_manholes_after_guess.values()) - expected_value = Constants.MANHOLE_INDICATOR_MANHOLE - assert all([expected_value == manhole for manhole in all_manholes]) - - -def test_guess_pipe_friction(db): - session = db.get_session() - pipes_pre_empty = get_all_pipes(session) - assert pipes_pre_empty, ( - "sqlite should have pipes, otherwise there is nothing to " "test" - ) - - # now lets empty columns 'friction_type' and 'friction_value' in all pipes - empty_pipe_friction_value(session, pipes_pre_empty) - - pipes_after_emtpy = get_all_pipes(session) - friction_types = [x[0] for x in list(pipes_after_emtpy.values())] - friction_values = [x[1] for x in list(pipes_after_emtpy.values())] - # all friction_types and friction_values should be None (<-- still test-prework, - # not actual testing), If it fails then empty_pipe_friction_value() doesnt work - assert not all(friction_types) - assert not all(friction_values) - - # now put guesser to work - guesser = guess_indicators_utils.Guesser(db) - guesser.guess_pipe_friction(only_empty_fields=False) - - # get a new session - session = db.get_session() - pipes_after_guess = get_all_pipes(session) - - # all friction_types must have been updated to FRICTION_TYPE_MANNING - assert all( - [ - x[0] == Constants.FRICTION_TYPE_MANNING - for x in list(pipes_after_guess.values()) - ] - ) - - material_frictionvalue_mapping = {} - # Constants.TABLE_MANNING is a set with tup - for material, friction_value in Constants.TABLE_MANNING: - material_frictionvalue_mapping[material] = friction_value - - # all friction_values 'x[1]' must have been updated to correct friction_value - # this friction_values depends on material 'x[2]' - assert all( - [ - x[1] == material_frictionvalue_mapping[x[2]] - for x in list(pipes_after_guess.values()) - ] - ) diff --git a/processing/providers.py b/processing/providers.py index 2841ad4d..56f80d4d 100644 --- a/processing/providers.py +++ b/processing/providers.py @@ -1,7 +1,7 @@ # See https://docs.qgis.org/3.10/en/docs/pyqgis_developer_cookbook/processing.html from qgis.core import QgsProcessingProvider from qgis.PyQt.QtGui import QIcon -from threedi_results_analysis.processing.dwf_calculation_algorithm import DWFCalculatorAlgorithm +# from threedi_results_analysis.processing.dwf_calculation_algorithm import DWFCalculatorAlgorithm from threedi_results_analysis.processing.gpkg_conversion_algorithm import ThreeDiConvertToGpkgAlgorithm from threedi_results_analysis.processing.grid_creation_algorithm import ThreeDiGenerateCompGridAlgorithm from threedi_results_analysis.processing.cross_sectional_discharge_algorithm import CrossSectionalDischargeAlgorithm @@ -12,9 +12,9 @@ from threedi_results_analysis.processing.schematisation_algorithms import ( CheckSchematisationAlgorithm, MigrateAlgorithm, - ImportSufHydAlgorithm, - GuessIndicatorAlgorithm, - ImportHydXAlgorithm, + # ImportSufHydAlgorithm, + # GuessIndicatorAlgorithm, + # ImportHydXAlgorithm, ) from threedi_results_analysis.processing.threedidepth_algorithms import ThreediDepthAlgorithm, ThreediMaxDepthAlgorithm @@ -28,14 +28,14 @@ class ThreediProvider(QgsProcessingProvider): def loadAlgorithms(self, *args, **kwargs): self.addAlgorithm(ThreediDepthAlgorithm()) self.addAlgorithm(ThreediMaxDepthAlgorithm()) - self.addAlgorithm(DWFCalculatorAlgorithm()) + # self.addAlgorithm(DWFCalculatorAlgorithm()) self.addAlgorithm(CheckSchematisationAlgorithm()) self.addAlgorithm(MigrateAlgorithm()) - self.addAlgorithm(ImportHydXAlgorithm()) + # self.addAlgorithm(ImportHydXAlgorithm()) self.addAlgorithm(ThreeDiConvertToGpkgAlgorithm()) self.addAlgorithm(ThreeDiGenerateCompGridAlgorithm()) - self.addAlgorithm(ImportSufHydAlgorithm()) - self.addAlgorithm(GuessIndicatorAlgorithm()) + # self.addAlgorithm(ImportSufHydAlgorithm()) + # self.addAlgorithm(GuessIndicatorAlgorithm()) self.addAlgorithm(CrossSectionalDischargeAlgorithm()) self.addAlgorithm(DetectLeakingObstaclesAlgorithm()) self.addAlgorithm(DetectLeakingObstaclesWithDischargeThresholdAlgorithm()) diff --git a/processing/schematisation_algorithms.py b/processing/schematisation_algorithms.py index fbe014f1..d388de48 100644 --- a/processing/schematisation_algorithms.py +++ b/processing/schematisation_algorithms.py @@ -15,17 +15,17 @@ import os import shutil -from hydxlib.scripts import run_import_export -from hydxlib.scripts import write_logging_to_file +# from hydxlib.scripts import run_import_export +# from hydxlib.scripts import write_logging_to_file from pathlib import Path from sqlalchemy.exc import OperationalError, DatabaseError -from threedi_results_analysis.processing.deps.sufhyd.import_sufhyd_main import Importer -from threedi_results_analysis.processing.deps.guess_indicator import guess_indicators_utils +# from threedi_results_analysis.processing.deps.sufhyd.import_sufhyd_main import Importer +# from threedi_results_analysis.processing.deps.guess_indicator import guess_indicators_utils from threedi_schema import ThreediDatabase from threedi_modelchecker import ThreediModelChecker from threedi_schema import errors -from threedi_results_analysis.processing.download_hydx import download_hydx +# from threedi_results_analysis.processing.download_hydx import download_hydx from threedi_results_analysis.utils.utils import backup_sqlite from qgis.PyQt.QtCore import QCoreApplication @@ -279,328 +279,328 @@ def createInstance(self): return CheckSchematisationAlgorithm() -class ImportSufHydAlgorithm(QgsProcessingAlgorithm): - """ - Import data from SufHyd to a 3Di Spatialite - """ - - INPUT_SUFHYD_FILE = "INPUT_SUFHYD_FILE" - TARGET_SQLITE = "TARGET_SQLITE" - - def initAlgorithm(self, config): - self.addParameter( - QgsProcessingParameterFile(self.INPUT_SUFHYD_FILE, self.tr("Sufhyd file"), extension="hyd")) - - self.addParameter( - QgsProcessingParameterFile( - self.TARGET_SQLITE, - "Target 3Di Sqlite", - extension="sqlite" - ) - ) - - def processAlgorithm(self, parameters, context, feedback): - sufhyd_file = self.parameterAsString(parameters, self.INPUT_SUFHYD_FILE, context) - out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) - threedi_db = get_threedi_database(filename=out_path, feedback=feedback) - if not threedi_db: - return {} - try: - schema = threedi_db.schema - schema.validate_schema() - - except errors.MigrationMissingError: - feedback.pushWarning( - "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " - "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" - ) - return {} - - importer = Importer(sufhyd_file, threedi_db) - importer.run_import() - - return {} - - def name(self): - return "import_sufhyd" - - def displayName(self): - return self.tr("Import Sufhyd") - - def group(self): - return self.tr(self.groupId()) - - def groupId(self): - return "Schematisation" - - def tr(self, string): - return QCoreApplication.translate("Processing", string) - - def createInstance(self): - return ImportSufHydAlgorithm() - - -class GuessIndicatorAlgorithm(QgsProcessingAlgorithm): - """ - Guess manhole indicator, pipe friction and manhole storage - area. - """ - - TARGET_SQLITE = "TARGET_SQLITE" - PIPE_FRICTION = "PIPE_FRICTION" - MANHOLE_INDICATOR = "MANHOLE_INDICATOR" - MANHOLE_AREA = "MANHOLE_AREA" - ONLY_NULL_FIELDS = "ONLY_NULL_FIELDS" - - def initAlgorithm(self, config): - - self.addParameter( - QgsProcessingParameterFile( - self.TARGET_SQLITE, - "Target 3Di Sqlite", - extension="sqlite" - ) - ) - - self.addParameter( - QgsProcessingParameterBoolean( - name=self.PIPE_FRICTION, - description="Pipe friction", - defaultValue=True, - ) - ) - - self.addParameter( - QgsProcessingParameterBoolean( - name=self.MANHOLE_INDICATOR, - description="Manhole indicator", - defaultValue=True, - ) - ) - - self.addParameter( - QgsProcessingParameterBoolean( - name=self.MANHOLE_AREA, - description="Manhole area (only fills NULL fields)", - defaultValue=True, - ) - ) - - self.addParameter( - QgsProcessingParameterBoolean( - name=self.ONLY_NULL_FIELDS, - description="Only fill NULL fields", - defaultValue=True, - ) - ) - - def processAlgorithm(self, parameters, context, feedback): - out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) - threedi_db = get_threedi_database(filename=out_path, feedback=feedback) - if not threedi_db: - return {} - try: - schema = threedi_db.schema - schema.validate_schema() - - except errors.MigrationMissingError: - feedback.pushWarning( - "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " - "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" - ) - return {} - - checks = [] - - if parameters[self.MANHOLE_INDICATOR]: - checks.append("manhole_indicator") - - if parameters[self.PIPE_FRICTION]: - checks.append("pipe_friction") - - if parameters[self.MANHOLE_AREA]: - checks.append("manhole_area") - - guesser = guess_indicators_utils.Guesser(threedi_db) - msg = guesser.run(checks, parameters[self.ONLY_NULL_FIELDS]) - - feedback.pushInfo(f"Guess indicators ready: {msg}") - - return {} - - def name(self): - return "guess_indicators" - - def displayName(self): - return self.tr("Guess Indicators") - - def group(self): - return self.tr(self.groupId()) - - def groupId(self): - return "Schematisation" - - def tr(self, string): - return QCoreApplication.translate("Processing", string) - - def createInstance(self): - return GuessIndicatorAlgorithm() - - -class ImportHydXAlgorithm(QgsProcessingAlgorithm): - """ - Import data from GWSW HydX to a 3Di Spatialite - """ - - INPUT_DATASET_NAME = "INPUT_DATASET_NAME" - HYDX_DOWNLOAD_DIRECTORY = "HYDX_DOWNLOAD_DIRECTORY" - INPUT_HYDX_DIRECTORY = "INPUT_HYDX_DIRECTORY" - TARGET_SQLITE = "TARGET_SQLITE" - - def initAlgorithm(self, config): - self.addParameter( - QgsProcessingParameterFile( - self.TARGET_SQLITE, "Target 3Di Spatialite", extension="sqlite" - ) - ) - - self.addParameter( - QgsProcessingParameterFile( - self.INPUT_HYDX_DIRECTORY, - "GWSW HydX directory (local)", - behavior=QgsProcessingParameterFile.Folder, - optional=True, - ) - ) - - self.addParameter( - QgsProcessingParameterString( - self.INPUT_DATASET_NAME, "GWSW dataset name (online)", optional=True - ) - ) - - self.addParameter( - QgsProcessingParameterFolderDestination( - self.HYDX_DOWNLOAD_DIRECTORY, - "Destination directory for GWSW HydX dataset download", - optional=True, - ) - ) - - def processAlgorithm(self, parameters, context, feedback): - hydx_dataset_name = self.parameterAsString( - parameters, self.INPUT_DATASET_NAME, context - ) - hydx_download_dir = self.parameterAsString( - parameters, self.HYDX_DOWNLOAD_DIRECTORY, context - ) - hydx_path = self.parameterAsString( - parameters, self.INPUT_HYDX_DIRECTORY, context - ) - out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) - threedi_db = get_threedi_database(filename=out_path, feedback=feedback) - if not threedi_db: - raise QgsProcessingException( - f"Unable to connect to 3Di spatialite '{out_path}'" - ) - try: - schema = threedi_db.schema - schema.validate_schema() - - except errors.MigrationMissingError: - raise QgsProcessingException( - "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " - "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" - ) - if not (hydx_dataset_name or hydx_path): - raise QgsProcessingException( - "Either 'GWSW HydX directory (local)' or 'GWSW dataset name (online)' must be filled in!" - ) - if hydx_dataset_name and hydx_path: - feedback.pushWarning( - "Both 'GWSW dataset name (online)' and 'GWSW HydX directory (local)' are filled in. " - "'GWSW dataset name (online)' will be ignored. This dataset will not be downloaded." - ) - elif hydx_dataset_name: - try: - hydx_download_path = Path(hydx_download_dir) - hydx_download_dir_is_valid = hydx_download_path.is_dir() - except TypeError: - hydx_download_dir_is_valid = False - if parameters[self.HYDX_DOWNLOAD_DIRECTORY] == "TEMPORARY_OUTPUT": - hydx_download_dir_is_valid = True - if not hydx_download_dir_is_valid: - raise QgsProcessingException( - f"'Destination directory for HydX dataset download' ({hydx_download_path}) is not a valid directory" - ) - hydx_path = download_hydx( - dataset_name=hydx_dataset_name, - target_directory=hydx_download_path, - wait_times=[0.1, 1, 2, 3, 4, 5, 10], - feedback=feedback, - ) - # hydx_path will be None if user has canceled the process during download - if feedback.isCanceled(): - raise QgsProcessingException("Process canceled") - feedback.pushInfo(f"Starting import of {hydx_path} to {out_path}") - log_path = Path(out_path).parent / "import_hydx.log" - write_logging_to_file(log_path) - feedback.pushInfo(f"Logging will be written to {log_path}") - run_import_export(export_type="threedi", hydx_path=hydx_path, out_path=out_path) - return {} - - def name(self): - """ - Returns the algorithm name, used for identifying the algorithm. This - string should be fixed for the algorithm, and must not be localised. - The name should be unique within each provider. Names should contain - lowercase alphanumeric characters only and no spaces or other - formatting characters. - """ - return "import_hydx" - - def displayName(self): - """ - Returns the translated algorithm name, which should be used for any - user-visible display of the algorithm name. - """ - return self.tr("Import GWSW HydX") - - def shortHelpString(self): - return """ -

Introduction

-

Use this processing algorithm to import data in the format of the Dutch "Gegevenswoordenboek Stedelijk Water (GWSW)". Either select a previously downloaded local dataset, or download a dataset directly from the server.

-

A log file will be created in the same directory as the Target 3Di Spatialite. Please check this log file after the import has completed.  

-

Parameters

-

Target 3Di Spatialite

-

Spatialite (.sqlite) file that contains the layers required by 3Di. Imported data will be added to any data already contained in the 3Di Spatialite.

-

GWSW HydX directory (local)

-

Use this option if you have already downloaded a GWSW HydX dataset to a local directory.

-

GWSW dataset name (online)

-

Use this option if you want to download a GWSW HydX dataset.

-

Destination directory for GWSW HydX dataset download

-

If you have chosen to download a GWSW HydX dataset, this is the directory it will be downloaded to.

- """ - - def group(self): - """ - Returns the name of the group this algorithm belongs to. This string - should be localised. - """ - return self.tr(self.groupId()) - - def groupId(self): - """ - Returns the unique ID of the group this algorithm belongs to. This - string should be fixed for the algorithm, and must not be localised. - The group id should be unique within each provider. Group id should - contain lowercase alphanumeric characters only and no spaces or other - formatting characters. - """ - return "Schematisation" - - def tr(self, string): - return QCoreApplication.translate("Processing", string) - - def createInstance(self): - return ImportHydXAlgorithm() +# class ImportSufHydAlgorithm(QgsProcessingAlgorithm): +# """ +# Import data from SufHyd to a 3Di Spatialite +# """ +# +# INPUT_SUFHYD_FILE = "INPUT_SUFHYD_FILE" +# TARGET_SQLITE = "TARGET_SQLITE" +# +# def initAlgorithm(self, config): +# self.addParameter( +# QgsProcessingParameterFile(self.INPUT_SUFHYD_FILE, self.tr("Sufhyd file"), extension="hyd")) +# +# self.addParameter( +# QgsProcessingParameterFile( +# self.TARGET_SQLITE, +# "Target 3Di Sqlite", +# extension="sqlite" +# ) +# ) +# +# def processAlgorithm(self, parameters, context, feedback): +# sufhyd_file = self.parameterAsString(parameters, self.INPUT_SUFHYD_FILE, context) +# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) +# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) +# if not threedi_db: +# return {} +# try: +# schema = threedi_db.schema +# schema.validate_schema() +# +# except errors.MigrationMissingError: +# feedback.pushWarning( +# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " +# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" +# ) +# return {} +# +# importer = Importer(sufhyd_file, threedi_db) +# importer.run_import() +# +# return {} +# +# def name(self): +# return "import_sufhyd" +# +# def displayName(self): +# return self.tr("Import Sufhyd") +# +# def group(self): +# return self.tr(self.groupId()) +# +# def groupId(self): +# return "Schematisation" +# +# def tr(self, string): +# return QCoreApplication.translate("Processing", string) +# +# def createInstance(self): +# return ImportSufHydAlgorithm() +# +# +# class GuessIndicatorAlgorithm(QgsProcessingAlgorithm): +# """ +# Guess manhole indicator, pipe friction and manhole storage +# area. +# """ +# +# TARGET_SQLITE = "TARGET_SQLITE" +# PIPE_FRICTION = "PIPE_FRICTION" +# MANHOLE_INDICATOR = "MANHOLE_INDICATOR" +# MANHOLE_AREA = "MANHOLE_AREA" +# ONLY_NULL_FIELDS = "ONLY_NULL_FIELDS" +# +# def initAlgorithm(self, config): +# +# self.addParameter( +# QgsProcessingParameterFile( +# self.TARGET_SQLITE, +# "Target 3Di Sqlite", +# extension="sqlite" +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterBoolean( +# name=self.PIPE_FRICTION, +# description="Pipe friction", +# defaultValue=True, +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterBoolean( +# name=self.MANHOLE_INDICATOR, +# description="Manhole indicator", +# defaultValue=True, +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterBoolean( +# name=self.MANHOLE_AREA, +# description="Manhole area (only fills NULL fields)", +# defaultValue=True, +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterBoolean( +# name=self.ONLY_NULL_FIELDS, +# description="Only fill NULL fields", +# defaultValue=True, +# ) +# ) +# +# def processAlgorithm(self, parameters, context, feedback): +# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) +# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) +# if not threedi_db: +# return {} +# try: +# schema = threedi_db.schema +# schema.validate_schema() +# +# except errors.MigrationMissingError: +# feedback.pushWarning( +# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " +# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" +# ) +# return {} +# +# checks = [] +# +# if parameters[self.MANHOLE_INDICATOR]: +# checks.append("manhole_indicator") +# +# if parameters[self.PIPE_FRICTION]: +# checks.append("pipe_friction") +# +# if parameters[self.MANHOLE_AREA]: +# checks.append("manhole_area") +# +# guesser = guess_indicators_utils.Guesser(threedi_db) +# msg = guesser.run(checks, parameters[self.ONLY_NULL_FIELDS]) +# +# feedback.pushInfo(f"Guess indicators ready: {msg}") +# +# return {} +# +# def name(self): +# return "guess_indicators" +# +# def displayName(self): +# return self.tr("Guess Indicators") +# +# def group(self): +# return self.tr(self.groupId()) +# +# def groupId(self): +# return "Schematisation" +# +# def tr(self, string): +# return QCoreApplication.translate("Processing", string) +# +# def createInstance(self): +# return GuessIndicatorAlgorithm() +# +# +# class ImportHydXAlgorithm(QgsProcessingAlgorithm): +# """ +# Import data from GWSW HydX to a 3Di Spatialite +# """ +# +# INPUT_DATASET_NAME = "INPUT_DATASET_NAME" +# HYDX_DOWNLOAD_DIRECTORY = "HYDX_DOWNLOAD_DIRECTORY" +# INPUT_HYDX_DIRECTORY = "INPUT_HYDX_DIRECTORY" +# TARGET_SQLITE = "TARGET_SQLITE" +# +# def initAlgorithm(self, config): +# self.addParameter( +# QgsProcessingParameterFile( +# self.TARGET_SQLITE, "Target 3Di Spatialite", extension="sqlite" +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterFile( +# self.INPUT_HYDX_DIRECTORY, +# "GWSW HydX directory (local)", +# behavior=QgsProcessingParameterFile.Folder, +# optional=True, +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterString( +# self.INPUT_DATASET_NAME, "GWSW dataset name (online)", optional=True +# ) +# ) +# +# self.addParameter( +# QgsProcessingParameterFolderDestination( +# self.HYDX_DOWNLOAD_DIRECTORY, +# "Destination directory for GWSW HydX dataset download", +# optional=True, +# ) +# ) +# +# def processAlgorithm(self, parameters, context, feedback): +# hydx_dataset_name = self.parameterAsString( +# parameters, self.INPUT_DATASET_NAME, context +# ) +# hydx_download_dir = self.parameterAsString( +# parameters, self.HYDX_DOWNLOAD_DIRECTORY, context +# ) +# hydx_path = self.parameterAsString( +# parameters, self.INPUT_HYDX_DIRECTORY, context +# ) +# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) +# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) +# if not threedi_db: +# raise QgsProcessingException( +# f"Unable to connect to 3Di spatialite '{out_path}'" +# ) +# try: +# schema = threedi_db.schema +# schema.validate_schema() +# +# except errors.MigrationMissingError: +# raise QgsProcessingException( +# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " +# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" +# ) +# if not (hydx_dataset_name or hydx_path): +# raise QgsProcessingException( +# "Either 'GWSW HydX directory (local)' or 'GWSW dataset name (online)' must be filled in!" +# ) +# if hydx_dataset_name and hydx_path: +# feedback.pushWarning( +# "Both 'GWSW dataset name (online)' and 'GWSW HydX directory (local)' are filled in. " +# "'GWSW dataset name (online)' will be ignored. This dataset will not be downloaded." +# ) +# elif hydx_dataset_name: +# try: +# hydx_download_path = Path(hydx_download_dir) +# hydx_download_dir_is_valid = hydx_download_path.is_dir() +# except TypeError: +# hydx_download_dir_is_valid = False +# if parameters[self.HYDX_DOWNLOAD_DIRECTORY] == "TEMPORARY_OUTPUT": +# hydx_download_dir_is_valid = True +# if not hydx_download_dir_is_valid: +# raise QgsProcessingException( +# f"'Destination directory for HydX dataset download' ({hydx_download_path}) is not a valid directory" +# ) +# hydx_path = download_hydx( +# dataset_name=hydx_dataset_name, +# target_directory=hydx_download_path, +# wait_times=[0.1, 1, 2, 3, 4, 5, 10], +# feedback=feedback, +# ) +# # hydx_path will be None if user has canceled the process during download +# if feedback.isCanceled(): +# raise QgsProcessingException("Process canceled") +# feedback.pushInfo(f"Starting import of {hydx_path} to {out_path}") +# log_path = Path(out_path).parent / "import_hydx.log" +# write_logging_to_file(log_path) +# feedback.pushInfo(f"Logging will be written to {log_path}") +# run_import_export(export_type="threedi", hydx_path=hydx_path, out_path=out_path) +# return {} +# +# def name(self): +# """ +# Returns the algorithm name, used for identifying the algorithm. This +# string should be fixed for the algorithm, and must not be localised. +# The name should be unique within each provider. Names should contain +# lowercase alphanumeric characters only and no spaces or other +# formatting characters. +# """ +# return "import_hydx" +# +# def displayName(self): +# """ +# Returns the translated algorithm name, which should be used for any +# user-visible display of the algorithm name. +# """ +# return self.tr("Import GWSW HydX") +# +# def shortHelpString(self): +# return """ +#

Introduction

+#

Use this processing algorithm to import data in the format of the Dutch "Gegevenswoordenboek Stedelijk Water (GWSW)". Either select a previously downloaded local dataset, or download a dataset directly from the server.

+#

A log file will be created in the same directory as the Target 3Di Spatialite. Please check this log file after the import has completed.  

+#

Parameters

+#

Target 3Di Spatialite

+#

Spatialite (.sqlite) file that contains the layers required by 3Di. Imported data will be added to any data already contained in the 3Di Spatialite.

+#

GWSW HydX directory (local)

+#

Use this option if you have already downloaded a GWSW HydX dataset to a local directory.

+#

GWSW dataset name (online)

+#

Use this option if you want to download a GWSW HydX dataset.

+#

Destination directory for GWSW HydX dataset download

+#

If you have chosen to download a GWSW HydX dataset, this is the directory it will be downloaded to.

+# """ +# +# def group(self): +# """ +# Returns the name of the group this algorithm belongs to. This string +# should be localised. +# """ +# return self.tr(self.groupId()) +# +# def groupId(self): +# """ +# Returns the unique ID of the group this algorithm belongs to. This +# string should be fixed for the algorithm, and must not be localised. +# The group id should be unique within each provider. Group id should +# contain lowercase alphanumeric characters only and no spaces or other +# formatting characters. +# """ +# return "Schematisation" +# +# def tr(self, string): +# return QCoreApplication.translate("Processing", string) +# +# def createInstance(self): +# return ImportHydXAlgorithm() diff --git a/tests/test_spatialalchemy.py b/tests/test_spatialalchemy.py deleted file mode 100644 index 4e5145b0..00000000 --- a/tests/test_spatialalchemy.py +++ /dev/null @@ -1,79 +0,0 @@ -from geoalchemy2.types import Geometry -from sqlalchemy import Column -from sqlalchemy import Integer -from sqlalchemy import String -from threedi_results_analysis.utils.threedi_database import ThreediDatabase -from sqlalchemy.orm import declarative_base -import logging -import os.path -import tempfile -import unittest - - -logger = logging.getLogger(__name__) - - -Base = declarative_base() - - -class User(Base): - __tablename__ = "users" - id = Column(Integer, primary_key=True) - name = Column(String) - - def __repr__(self): - return "" % (self.name) - - -class GeoTable(Base): - __tablename__ = "geotable" - id = Column(Integer, primary_key=True) - name = Column(String) - geom = Column( - Geometry(geometry_type="POINT", srid=4326, management=True, spatial_index=True) - ) - - def __repr__(self): - return "" % (self.geom) - - -class TestSpatialAlchemyWithSpatialite(unittest.TestCase): - def setUp(self): - self.tmp_directory = tempfile.mkdtemp() - self.file_path = os.path.join(self.tmp_directory, "testdb.sqlite") - - db = ThreediDatabase( - {"db_file": self.file_path, "db_path": self.file_path}, echo=True - ) - db.create_db() - self.engine = db.get_engine() - self.session = db.get_session() - - Base.metadata.bind = self.engine - Base.metadata.create_all(self.engine) - - def test_insert_and_get_normal_table(self): - user = User(name="test") - self.session.add(user) - self.session.commit() - - self.assertIsNotNone(user.id) - self.assertEqual(self.session.query(User).count(), 1) - user = self.session.query(User).limit(1)[0] - - self.assertEqual(user.name, "test") - - def test_insert_and_get_geo_data(self): - geo_table = GeoTable(geom="srid=4326;POINT(1.01234567 4.01234567)") - self.session.add(geo_table) - self.session.commit() - - self.assertIsNotNone(geo_table.id) - - self.assertEqual(self.session.query(GeoTable).count(), 1) - geo_table = self.session.query(GeoTable).limit(1)[0] - self.assertIsNotNone(geo_table.geom) - - def tearDown(self): - self.session.close_all() - os.remove(self.file_path) From a82c20bdbce7bf2933d4234db8fa750ede5f98d1 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Wed, 21 Aug 2024 17:49:15 +0200 Subject: [PATCH 04/17] flake8 --- processing/schematisation_algorithms.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/processing/schematisation_algorithms.py b/processing/schematisation_algorithms.py index d388de48..4a3334bd 100644 --- a/processing/schematisation_algorithms.py +++ b/processing/schematisation_algorithms.py @@ -17,7 +17,7 @@ # from hydxlib.scripts import run_import_export # from hydxlib.scripts import write_logging_to_file -from pathlib import Path +# from pathlib import Path from sqlalchemy.exc import OperationalError, DatabaseError # from threedi_results_analysis.processing.deps.sufhyd.import_sufhyd_main import Importer # from threedi_results_analysis.processing.deps.guess_indicator import guess_indicators_utils @@ -32,12 +32,12 @@ from qgis.core import ( QgsProject, QgsProcessingAlgorithm, - QgsProcessingException, + # QgsProcessingException, QgsProcessingParameterBoolean, QgsProcessingParameterFile, QgsProcessingParameterFileDestination, - QgsProcessingParameterFolderDestination, - QgsProcessingParameterString, + # QgsProcessingParameterFolderDestination, + # QgsProcessingParameterString, QgsVectorLayer, ) From 84b490b4b2a4cd9e8643a8fec9c980b75f736274 Mon Sep 17 00:00:00 2001 From: Ben van Basten Date: Thu, 22 Aug 2024 11:44:05 +0200 Subject: [PATCH 05/17] updated deps --- dependencies.py | 4 ++-- external-dependencies/populate.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dependencies.py b/dependencies.py index 2dbef613..274cf4bb 100644 --- a/dependencies.py +++ b/dependencies.py @@ -29,17 +29,18 @@ from PyQt5.QtWidgets import QProgressDialog from qgis.core import Qgis -import setuptools # noqa: https://github.com/pypa/setuptools/issues/2993 import importlib import logging import os import pkg_resources import platform +import setuptools # noqa: https://github.com/pypa/setuptools/issues/2993 import shutil import subprocess import sys import tarfile + # in case the dependency is a tar, the constraint should be the # explicit version (e.g. "==3.8.0") Dependency = namedtuple("Dependency", ["name", "package", "constraint", "tar"]) @@ -72,7 +73,6 @@ Dependency("condenser", "condenser", ">=0.2.1", False), Dependency("Shapely", "shapely", ">=2.0.0", False), Dependency("threedigrid-builder", "threedigrid_builder", "==1.17.*", False), - Dependency("hydxlib", "hydxlib", "==1.5.2", False), Dependency("h5netcdf", "h5netcdf", "", False), Dependency("greenlet", "greenlet", "!=0.4.17", False), Dependency("threedi-mi-utils", "threedi_mi_utils", "==0.1.2", False), diff --git a/external-dependencies/populate.sh b/external-dependencies/populate.sh index 92459400..d85c997b 100755 --- a/external-dependencies/populate.sh +++ b/external-dependencies/populate.sh @@ -67,7 +67,7 @@ wget https://files.pythonhosted.org/packages/cd/84/66072ee12c3e79061f183c09a24be # Download windows wheels (cp39, win, amd64) wget https://files.pythonhosted.org/packages/b2/8e/83d9e3bff5c0ff7a0ec7e850c785916e616ab20d8793943f9e1d2a987fab/shapely-2.0.0-cp39-cp39-win_amd64.whl -wget https://files.pythonhosted.org/packages/96/63/bc1f271b291be432f4db390dc8bee6854ee1894b72816f142f57801b3f5b/threedigrid_builder-1.17.0-cp311-cp311-win_amd64.whl +wget https://files.pythonhosted.org/packages/e0/cb/3b8c21f69f807e1436bd467df7d9a99e0ec68ea84b97e8ec5d0ce3a45aef/threedigrid_builder-1.17.0-cp39-cp39-win_amd64.whl#sha256=cf409d5dcd341122bd907210bb02e03d3818ba8f9cc83111bf5b770175c43400 wget https://files.pythonhosted.org/packages/b3/89/1d3b78577a6b2762cb254f6ce5faec9b7c7b23052d1cdb7237273ff37d10/greenlet-2.0.2-cp39-cp39-win_amd64.whl#sha256=db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564 wget https://files.pythonhosted.org/packages/5f/d6/5f59a5e5570c4414d94c6da4c97731deab832cbd14eaf23189d54a92d1e1/cftime-1.6.2-cp39-cp39-win_amd64.whl#sha256=86fe550b94525c327578a90b2e13418ca5ba6c636d5efe3edec310e631757eea From 26b9e4ba9bc1d3d1f37c2fc4791ed93b59be943d Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 26 Aug 2024 10:56:19 +0200 Subject: [PATCH 06/17] 0.224.1.dev0 --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index 274cf4bb..b8e11204 100644 --- a/dependencies.py +++ b/dependencies.py @@ -60,7 +60,7 @@ Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.224.*", False), + Dependency("threedi-schema", "threedi_schema", "==0.224.1.dev0", False), Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.10.*", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), From 2589e0fa92a4d1bf0161c14f8bc19c68b207de20 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Tue, 3 Sep 2024 15:09:15 +0200 Subject: [PATCH 07/17] Bump threedi-* dependencies threedi-schema==0.224.1 threedigrid-builder==1.17.1 threedi-modelchecker==2.10.2 --- dependencies.py | 2 +- external-dependencies/populate.sh | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dependencies.py b/dependencies.py index b8e11204..274cf4bb 100644 --- a/dependencies.py +++ b/dependencies.py @@ -60,7 +60,7 @@ Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.224.1.dev0", False), + Dependency("threedi-schema", "threedi_schema", "==0.224.*", False), Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.10.*", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), diff --git a/external-dependencies/populate.sh b/external-dependencies/populate.sh index d85c997b..6a46c5b0 100755 --- a/external-dependencies/populate.sh +++ b/external-dependencies/populate.sh @@ -67,7 +67,7 @@ wget https://files.pythonhosted.org/packages/cd/84/66072ee12c3e79061f183c09a24be # Download windows wheels (cp39, win, amd64) wget https://files.pythonhosted.org/packages/b2/8e/83d9e3bff5c0ff7a0ec7e850c785916e616ab20d8793943f9e1d2a987fab/shapely-2.0.0-cp39-cp39-win_amd64.whl -wget https://files.pythonhosted.org/packages/e0/cb/3b8c21f69f807e1436bd467df7d9a99e0ec68ea84b97e8ec5d0ce3a45aef/threedigrid_builder-1.17.0-cp39-cp39-win_amd64.whl#sha256=cf409d5dcd341122bd907210bb02e03d3818ba8f9cc83111bf5b770175c43400 +wget https://files.pythonhosted.org/packages/fe/e3/15a630b4cfd787bba84fb43beac7a25ae29c2b417ad7c4cfadae129a0819/threedigrid_builder-1.17.1-cp39-cp39-win_amd64.whl#sha256=24e06c136e399e1d6299c0a9b3e4c869084aba5878c833487fb13390aec10af8 wget https://files.pythonhosted.org/packages/b3/89/1d3b78577a6b2762cb254f6ce5faec9b7c7b23052d1cdb7237273ff37d10/greenlet-2.0.2-cp39-cp39-win_amd64.whl#sha256=db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564 wget https://files.pythonhosted.org/packages/5f/d6/5f59a5e5570c4414d94c6da4c97731deab832cbd14eaf23189d54a92d1e1/cftime-1.6.2-cp39-cp39-win_amd64.whl#sha256=86fe550b94525c327578a90b2e13418ca5ba6c636d5efe3edec310e631757eea @@ -75,8 +75,8 @@ wget https://files.pythonhosted.org/packages/5f/d6/5f59a5e5570c4414d94c6da4c9773 # Download linux wheels (both cp38 and cp310) wget https://files.pythonhosted.org/packages/06/07/0700e5e33c44bc87e19953244c29f73669cfb6f19868899170f9c7e34554/shapely-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl wget https://files.pythonhosted.org/packages/4e/03/f3bcb7d96aef6d56b62e2f25996f161c05f92a45d452165be2007b756e0f/shapely-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl -wget https://files.pythonhosted.org/packages/b7/6a/828470fc4c45cb3f8f046f3b737a6a527ad7d9959916ce05c153d8d5ae60/threedigrid_builder-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl -wget https://files.pythonhosted.org/packages/d4/22/08b7f0c1c28f5227e640fc5f216853b90b8ae616e72944a35e822cffc6ec/threedigrid_builder-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl +wget https://files.pythonhosted.org/packages/53/67/a3f7e0ec8936a2882c97eb1e595ea91c3a5de4869d023064bbc8886d6ab7/threedigrid_builder-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl +wget https://files.pythonhosted.org/packages/d0/6f/19252e21244e19adf6816cbc6b9d5f1f3cae72b9bd62f1f7da1bc484c9b1/threedigrid_builder-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl wget https://files.pythonhosted.org/packages/6e/11/a1f1af20b6a1a8069bc75012569d030acb89fd7ef70f888b6af2f85accc6/greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470 wget https://files.pythonhosted.org/packages/e1/17/d8042d82f44c08549b535bf2e7d1e87aa1863df5ed6cf1cf773eb2dfdf67/cftime-1.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=acb294fdb80e33545ae54b4421df35c4e578708a5ffce1c00408b2294e70ecef wget https://files.pythonhosted.org/packages/44/51/bc9d47beee47afda1d335f05efa848dc403bd183344f03d431281518e8ab/cftime-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl#sha256=7a820e16357dbdc9723b2059f7178451de626a8b2e5f80b9d91a77e3dac42133 From f4873b33c4d26d081da95b3533c91a086b143747 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 12:13:15 +0100 Subject: [PATCH 08/17] Update dependencies.py --- dependencies.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dependencies.py b/dependencies.py index 274cf4bb..ac4443fa 100644 --- a/dependencies.py +++ b/dependencies.py @@ -60,8 +60,8 @@ Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.224.*", False), - Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.10.*", False), + Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), + Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.14.1*", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), Dependency("packaging", "packaging", "", False), From 13ff96528c058fbf0aebdd88c5518973910e5ade Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 12:21:26 +0100 Subject: [PATCH 09/17] Update dependencies.py --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index ac12dad9..b2f30e97 100644 --- a/dependencies.py +++ b/dependencies.py @@ -61,7 +61,7 @@ Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), - Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.14.1*", False), + Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.14.1", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), Dependency("packaging", "packaging", "", False), From 320abcedfb1270866e93f07aec681e8a7490f621 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 12:29:15 +0100 Subject: [PATCH 10/17] Remove model checker --- dependencies.py | 2 +- processing/providers.py | 4 ++-- tests/test_dependencies.py | 28 ++++++++++++++-------------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/dependencies.py b/dependencies.py index b2f30e97..db53ff9e 100644 --- a/dependencies.py +++ b/dependencies.py @@ -61,7 +61,7 @@ Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), - Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.14.1", False), + # Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.14.1", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), Dependency("packaging", "packaging", "", False), diff --git a/processing/providers.py b/processing/providers.py index 6748bb5c..7b12162f 100644 --- a/processing/providers.py +++ b/processing/providers.py @@ -15,7 +15,7 @@ RastersToNetCDFAlgorithm, ) from threedi_results_analysis.processing.schematisation_algorithms import ( - CheckSchematisationAlgorithm, + # CheckSchematisationAlgorithm, MigrateAlgorithm, # ImportSufHydAlgorithm, # GuessIndicatorAlgorithm, @@ -47,7 +47,7 @@ def loadAlgorithms(self, *args, **kwargs): self.addAlgorithm(ThreediDepthAlgorithm()) self.addAlgorithm(ThreediMaxDepthAlgorithm()) # self.addAlgorithm(DWFCalculatorAlgorithm()) - self.addAlgorithm(CheckSchematisationAlgorithm()) + # self.addAlgorithm(CheckSchematisationAlgorithm()) self.addAlgorithm(MigrateAlgorithm()) # self.addAlgorithm(ImportHydXAlgorithm()) self.addAlgorithm(ThreeDiConvertToGpkgAlgorithm()) diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 963afe41..61cdfd41 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -55,20 +55,20 @@ def test_install_dependencies(tmpdir): dependencies.ensure_everything_installed() -def test_uninstall_dependency(tmpdir, monkeypatch): - python_path = os.getenv("PYTHONPATH", "") - new_python_path = f"{python_path}:{tmpdir}" - monkeypatch.setenv("PYTHONPATH", new_python_path) - - small_dependencies = [ - Dependency("threedi-modelchecker", "threedi_modelchecker", ">=1.0.0", False) - ] - dependencies._install_dependencies( - small_dependencies, target_dir=tmpdir - ) - dependencies._uninstall_dependency(small_dependencies[0]) - for directory in os.listdir(tmpdir): - assert "threedi_modelchecker" not in directory +# def test_uninstall_dependency(tmpdir, monkeypatch): + # python_path = os.getenv("PYTHONPATH", "") + # new_python_path = f"{python_path}:{tmpdir}" + # monkeypatch.setenv("PYTHONPATH", new_python_path) + + # small_dependencies = [ + # Dependency("threedi-modelchecker", "threedi_modelchecker", ">=1.0.0", False) + # ] + # dependencies._install_dependencies( + # small_dependencies, target_dir=tmpdir + # ) + # dependencies._uninstall_dependency(small_dependencies[0]) + # for directory in os.listdir(tmpdir): + # assert "threedi_modelchecker" not in directory def test_install_dependencies_with_error(tmpdir): From 46fc4558886c3b1d98b4fe9b1b4ca7307b4d4e47 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 12:35:52 +0100 Subject: [PATCH 11/17] ... --- dependencies.py | 1 - processing/schematisation_algorithms.py | 296 ++++++++++++------------ 2 files changed, 148 insertions(+), 149 deletions(-) diff --git a/dependencies.py b/dependencies.py index db53ff9e..30914e2a 100644 --- a/dependencies.py +++ b/dependencies.py @@ -61,7 +61,6 @@ Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.2.*", False), Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), - # Dependency("threedi-modelchecker", "threedi_modelchecker", "==2.14.1", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), Dependency("packaging", "packaging", "", False), diff --git a/processing/schematisation_algorithms.py b/processing/schematisation_algorithms.py index 4a3334bd..d55c33dc 100644 --- a/processing/schematisation_algorithms.py +++ b/processing/schematisation_algorithms.py @@ -23,7 +23,7 @@ # from threedi_results_analysis.processing.deps.guess_indicator import guess_indicators_utils from threedi_schema import ThreediDatabase -from threedi_modelchecker import ThreediModelChecker +# from threedi_modelchecker import ThreediModelChecker from threedi_schema import errors # from threedi_results_analysis.processing.download_hydx import download_hydx @@ -130,153 +130,153 @@ def createInstance(self): return MigrateAlgorithm() -class CheckSchematisationAlgorithm(QgsProcessingAlgorithm): - """ - Run the schematisation checker - """ - - INPUT = "INPUT" - OUTPUT = "OUTPUT" - ADD_TO_PROJECT = "ADD_TO_PROJECT" - - def initAlgorithm(self, config): - self.addParameter( - QgsProcessingParameterFile( - self.INPUT, self.tr("3Di Spatialite"), extension="sqlite" - ) - ) - - self.addParameter( - QgsProcessingParameterFileDestination( - self.OUTPUT, self.tr("Output"), fileFilter="csv" - ) - ) - - self.addParameter( - QgsProcessingParameterBoolean( - self.ADD_TO_PROJECT, self.tr("Add result to project"), defaultValue=True - ) - ) - - def processAlgorithm(self, parameters, context, feedback): - self.add_to_project = self.parameterAsBoolean( - parameters, self.ADD_TO_PROJECT, context - ) - self.output_file_path = None - input_filename = self.parameterAsFile(parameters, self.INPUT, context) - threedi_db = get_threedi_database(filename=input_filename, feedback=feedback) - if not threedi_db: - return {self.OUTPUT: None} - try: - model_checker = ThreediModelChecker(threedi_db) - except errors.MigrationMissingError: - feedback.pushWarning( - "The selected 3Di model does not have the latest migration. Please " - "migrate your model to the latest version." - ) - return {self.OUTPUT: None} - schema = threedi_db.schema - schema.set_spatial_indexes() - generated_output_file_path = self.parameterAsFileOutput( - parameters, self.OUTPUT, context - ) - self.output_file_path = f"{os.path.splitext(generated_output_file_path)[0]}.csv" - session = model_checker.db.get_session() - session.model_checker_context = model_checker.context - total_checks = len(model_checker.config.checks) - progress_per_check = 100.0 / total_checks - checks_passed = 0 - try: - with open(self.output_file_path, "w", newline="") as output_file: - writer = csv.writer(output_file) - writer.writerow( - [ - "level", - "error_code", - "id", - "table", - "column", - "value", - "description", - ] - ) - for i, check in enumerate(model_checker.checks(level="info")): - model_errors = check.get_invalid(session) - for error_row in model_errors: - writer.writerow( - [ - check.level.name, - check.error_code, - error_row.id, - check.table.name, - check.column.name, - getattr(error_row, check.column.name), - check.description(), - ] - ) - checks_passed += 1 - feedback.setProgress(int(checks_passed * progress_per_check)) - except PermissionError: - # PermissionError happens for example when a user has the file already open - # with Excel on Windows, which locks the file. - feedback.pushWarning( - f"Not enough permissions to write the file '{self.output_file_path}'.\n\n" - "The file may be used by another program. Please close all " - "other programs using the file or select another output " - "file." - ) - return {self.OUTPUT: None} - - return {self.OUTPUT: self.output_file_path} - - def postProcessAlgorithm(self, context, feedback): - if self.add_to_project: - if self.output_file_path: - result_layer = QgsVectorLayer( - self.output_file_path, "3Di schematisation errors" - ) - QgsProject.instance().addMapLayer(result_layer) - return {self.OUTPUT: self.output_file_path} - - def name(self): - """ - Returns the algorithm name, used for identifying the algorithm. This - string should be fixed for the algorithm, and must not be localised. - The name should be unique within each provider. Names should contain - lowercase alphanumeric characters only and no spaces or other - formatting characters. - """ - return "check_schematisation" - - def displayName(self): - """ - Returns the translated algorithm name, which should be used for any - user-visible display of the algorithm name. - """ - return self.tr("Check Schematisation") - - def group(self): - """ - Returns the name of the group this algorithm belongs to. This string - should be localised. - """ - return self.tr(self.groupId()) - - def groupId(self): - """ - Returns the unique ID of the group this algorithm belongs to. This - string should be fixed for the algorithm, and must not be localised. - The group id should be unique within each provider. Group id should - contain lowercase alphanumeric characters only and no spaces or other - formatting characters. - """ - return "Schematisation" - - def tr(self, string): - return QCoreApplication.translate("Processing", string) - - def createInstance(self): - return CheckSchematisationAlgorithm() +# class CheckSchematisationAlgorithm(QgsProcessingAlgorithm): + # """ + # Run the schematisation checker + # """ + + # INPUT = "INPUT" + # OUTPUT = "OUTPUT" + # ADD_TO_PROJECT = "ADD_TO_PROJECT" + + # def initAlgorithm(self, config): + # self.addParameter( + # QgsProcessingParameterFile( + # self.INPUT, self.tr("3Di Spatialite"), extension="sqlite" + # ) + # ) + + # self.addParameter( + # QgsProcessingParameterFileDestination( + # self.OUTPUT, self.tr("Output"), fileFilter="csv" + # ) + # ) + + # self.addParameter( + # QgsProcessingParameterBoolean( + # self.ADD_TO_PROJECT, self.tr("Add result to project"), defaultValue=True + # ) + # ) + + # def processAlgorithm(self, parameters, context, feedback): + # self.add_to_project = self.parameterAsBoolean( + # parameters, self.ADD_TO_PROJECT, context + # ) + # self.output_file_path = None + # input_filename = self.parameterAsFile(parameters, self.INPUT, context) + # threedi_db = get_threedi_database(filename=input_filename, feedback=feedback) + # if not threedi_db: + # return {self.OUTPUT: None} + # try: + # model_checker = ThreediModelChecker(threedi_db) + # except errors.MigrationMissingError: + # feedback.pushWarning( + # "The selected 3Di model does not have the latest migration. Please " + # "migrate your model to the latest version." + # ) + # return {self.OUTPUT: None} + # schema = threedi_db.schema + # schema.set_spatial_indexes() + # generated_output_file_path = self.parameterAsFileOutput( + # parameters, self.OUTPUT, context + # ) + # self.output_file_path = f"{os.path.splitext(generated_output_file_path)[0]}.csv" + # session = model_checker.db.get_session() + # session.model_checker_context = model_checker.context + # total_checks = len(model_checker.config.checks) + # progress_per_check = 100.0 / total_checks + # checks_passed = 0 + # try: + # with open(self.output_file_path, "w", newline="") as output_file: + # writer = csv.writer(output_file) + # writer.writerow( + # [ + # "level", + # "error_code", + # "id", + # "table", + # "column", + # "value", + # "description", + # ] + # ) + # for i, check in enumerate(model_checker.checks(level="info")): + # model_errors = check.get_invalid(session) + # for error_row in model_errors: + # writer.writerow( + # [ + # check.level.name, + # check.error_code, + # error_row.id, + # check.table.name, + # check.column.name, + # getattr(error_row, check.column.name), + # check.description(), + # ] + # ) + # checks_passed += 1 + # feedback.setProgress(int(checks_passed * progress_per_check)) + # except PermissionError: + # # PermissionError happens for example when a user has the file already open + # # with Excel on Windows, which locks the file. + # feedback.pushWarning( + # f"Not enough permissions to write the file '{self.output_file_path}'.\n\n" + # "The file may be used by another program. Please close all " + # "other programs using the file or select another output " + # "file." + # ) + # return {self.OUTPUT: None} + + # return {self.OUTPUT: self.output_file_path} + + # def postProcessAlgorithm(self, context, feedback): + # if self.add_to_project: + # if self.output_file_path: + # result_layer = QgsVectorLayer( + # self.output_file_path, "3Di schematisation errors" + # ) + # QgsProject.instance().addMapLayer(result_layer) + # return {self.OUTPUT: self.output_file_path} + + # def name(self): + # """ + # Returns the algorithm name, used for identifying the algorithm. This + # string should be fixed for the algorithm, and must not be localised. + # The name should be unique within each provider. Names should contain + # lowercase alphanumeric characters only and no spaces or other + # formatting characters. + # """ + # return "check_schematisation" + + # def displayName(self): + # """ + # Returns the translated algorithm name, which should be used for any + # user-visible display of the algorithm name. + # """ + # return self.tr("Check Schematisation") + + # def group(self): + # """ + # Returns the name of the group this algorithm belongs to. This string + # should be localised. + # """ + # return self.tr(self.groupId()) + + # def groupId(self): + # """ + # Returns the unique ID of the group this algorithm belongs to. This + # string should be fixed for the algorithm, and must not be localised. + # The group id should be unique within each provider. Group id should + # contain lowercase alphanumeric characters only and no spaces or other + # formatting characters. + # """ + # return "Schematisation" + + # def tr(self, string): + # return QCoreApplication.translate("Processing", string) + + # def createInstance(self): + # return CheckSchematisationAlgorithm() # class ImportSufHydAlgorithm(QgsProcessingAlgorithm): From 83b59abcbab2e64efaa6438f4358cb40a415a2a3 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 12:53:14 +0100 Subject: [PATCH 12/17] More pruning --- dependencies.py | 2 - deps_write.py | 603 ++++++++ processing/grid_creation_algorithm.py | 3 +- threedi_plugin.py | 9 - tool_water_balance/__init__.py | 3 - tool_water_balance/calculation.py | 1001 ------------- tool_water_balance/config.py | 368 ----- tool_water_balance/media/deltares-152x152.png | Bin 5733 -> 0 bytes tool_water_balance/media/deltares.png | Bin 2637 -> 0 bytes tool_water_balance/media/icon_toolbox.png | Bin 1780 -> 0 bytes .../media/weight-scale-deltares.png | Bin 1759 -> 0 bytes tool_water_balance/media/weight-scale.png | Bin 745 -> 0 bytes tool_water_balance/models/__init__.py | 0 tool_water_balance/models/wb_item.py | 66 - tool_water_balance/tests.py | 411 ------ tool_water_balance/tools.py | 147 -- tool_water_balance/utils.py | 286 ---- tool_water_balance/views/__init__.py | 0 tool_water_balance/views/custom_pg_Items.py | 64 - tool_water_balance/views/widgets.py | 1238 ----------------- 20 files changed, 604 insertions(+), 3597 deletions(-) create mode 100644 deps_write.py delete mode 100644 tool_water_balance/__init__.py delete mode 100644 tool_water_balance/calculation.py delete mode 100644 tool_water_balance/config.py delete mode 100644 tool_water_balance/media/deltares-152x152.png delete mode 100644 tool_water_balance/media/deltares.png delete mode 100644 tool_water_balance/media/icon_toolbox.png delete mode 100644 tool_water_balance/media/weight-scale-deltares.png delete mode 100644 tool_water_balance/media/weight-scale.png delete mode 100644 tool_water_balance/models/__init__.py delete mode 100644 tool_water_balance/models/wb_item.py delete mode 100644 tool_water_balance/tests.py delete mode 100644 tool_water_balance/tools.py delete mode 100644 tool_water_balance/utils.py delete mode 100644 tool_water_balance/views/__init__.py delete mode 100644 tool_water_balance/views/custom_pg_Items.py delete mode 100644 tool_water_balance/views/widgets.py diff --git a/dependencies.py b/dependencies.py index 30914e2a..bc0f74d0 100644 --- a/dependencies.py +++ b/dependencies.py @@ -71,10 +71,8 @@ Dependency("networkx", "networkx", "", False), Dependency("condenser", "condenser", ">=0.2.1", False), Dependency("Shapely", "shapely", ">=2.0.0", False), - Dependency("threedigrid-builder", "threedigrid_builder", "==1.17.*", False), Dependency("h5netcdf", "h5netcdf", "", False), Dependency("greenlet", "greenlet", "!=0.4.17", False), - Dependency("threedi-mi-utils", "threedi_mi_utils", "==0.1.4", False), ] # On Windows, the hdf5 binary and thus h5py version depends on the QGis version diff --git a/deps_write.py b/deps_write.py new file mode 100644 index 00000000..9bb5b382 --- /dev/null +++ b/deps_write.py @@ -0,0 +1,603 @@ +"""Handle dependencies: installation and checking/logging. + +See :doc:`linked_external-dependencies_readme` +(``external-dependencies/README.rst``) for a full explanation of the +dependency handling. + +``python3 dependencies.py`` runs ``generate_constraints_txt()``: it generates +``constraints.txt``. + +:py:func:`ensure_everything_installed()` checks if :py:data:`DEPENDENCIES` are +installed and installs them if needed. + +:py:func:`check_importability()` double-checks if everything is importable. It also +logs the locations. + +Note that we use *logging* in ``check_importability()`` as we want to have the +result in the logfile. The rest of the module uses ``print()`` statements +because it gets executed before any logging has been configured. + +As we're called directly from ``__init__.py``, the imports should be +resticted. No qgis message boxes and so! + +""" +from collections import namedtuple +from pathlib import Path + +import importlib +import logging +import os +import pkg_resources +import platform +import setuptools # noqa: https://github.com/pypa/setuptools/issues/2993 +import shutil +import subprocess +import sys +import tarfile + + +# in case the dependency is a tar, the constraint should be the +# explicit version (e.g. "==3.8.0") +Dependency = namedtuple("Dependency", ["name", "package", "constraint", "tar"]) + +#: List of expected dependencies. +DEPENDENCIES = [ + Dependency("SQLAlchemy", "sqlalchemy", "==2.0.6", False), + Dependency("GeoAlchemy2", "geoalchemy2", "==0.15.*", False), + Dependency("pyqtgraph", "pyqtgraph", ">=0.13.2", False), + Dependency( + "importlib-resources", "importlib_resources", "", False + ), # backward compat. alembic + Dependency( + "zipp", "zipp", "", False + ), # backward compat. alembic + Dependency("Mako", "mako", "", False), + Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] + Dependency("alembic", "alembic", "==1.8.*", False), + Dependency("threedigrid", "threedigrid", "==2.2.*", False), + Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), + Dependency("threedidepth", "threedidepth", "==0.6.3", False), + Dependency("click", "click", ">=8.0", False), + Dependency("packaging", "packaging", "", False), + Dependency("typing-extensions", "typing_extensions", ">=4.2.0", False), + Dependency( + "colorama", "colorama", "", False + ), # dep of click and threedi-modelchecker (windows) + Dependency("networkx", "networkx", "", False), + Dependency("condenser", "condenser", ">=0.2.1", False), + Dependency("Shapely", "shapely", ">=2.0.0", False), + Dependency("threedigrid-builder", "threedigrid_builder", "==1.17.*", False), + Dependency("h5netcdf", "h5netcdf", "", False), + Dependency("greenlet", "greenlet", "!=0.4.17", False), + Dependency("threedi-mi-utils", "threedi_mi_utils", "==0.1.4", False), +] + +# On Windows, the hdf5 binary and thus h5py version depends on the QGis version +# QGis upgraded from hdf5 == 1.10.7 to hdf5 == 1.14.0 in QGis 3.28.6 +QGIS_VERSION = 34000 +if QGIS_VERSION < 32806 and platform.system() == "Windows": + SUPPORTED_HDF5_VERSIONS = ["1.10.7"] + H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==2.10.0", False) +elif QGIS_VERSION >= 34000 and platform.system() == "Windows": + SUPPORTED_HDF5_VERSIONS = ["1.14.0"] + H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==3.10.0", False) +else: + SUPPORTED_HDF5_VERSIONS = ["1.14.0"] + H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==3.8.0", True) + +if QGIS_VERSION < 32811 and platform.system() == "Windows": + WINDOWS_PLATFORM_DEPENDENCIES = [ + Dependency("scipy", "scipy", "==1.6.2", True), + ] +elif QGIS_VERSION >= 34000 and platform.system() == "Windows": + WINDOWS_PLATFORM_DEPENDENCIES = [ + Dependency("scipy", "scipy", "==1.13.0", True), + ] +else: + WINDOWS_PLATFORM_DEPENDENCIES = [ + Dependency("scipy", "scipy", "==1.10.1", False), + ] + +# If you add a dependency, also adjust external-dependencies/populate.sh +INTERESTING_IMPORTS = ["numpy", "osgeo", "pip", "setuptools"] + +OUR_DIR = Path(__file__).parent + +logger = logging.getLogger(__name__) + + + + +def ensure_everything_installed(): + """Check if DEPENDENCIES are installed and install them if missing.""" + + _remove_old_distributions(DEPENDENCIES + WINDOWS_PLATFORM_DEPENDENCIES + [H5PY_DEPENDENCY], _prev_dependencies_target_dir()) + + # If required, create deps folder and prepend to the path + target_dir = _dependencies_target_dir(create=True) + if str(target_dir) not in sys.path: + print(f"Prepending {target_dir} to sys.path") + sys.path.insert(0, str(target_dir)) + + _refresh_python_import_mechanism() + + profile_python_names = [item.name for item in _dependencies_target_dir().iterdir()] + print("Contents of our deps dir:\n %s" % "\n ".join(profile_python_names)) + + print("sys.path:") + for directory in sys.path: + print(" - %s" % directory) + + _ensure_prerequisite_is_installed() + + missing = _check_presence(DEPENDENCIES) + restart_required = False + if platform.system() == "Windows": + missing += _check_presence(WINDOWS_PLATFORM_DEPENDENCIES) + if not _ensure_h5py_installed(): + restart_required = True + + if missing: + print("Missing dependencies:") + for deps in missing: + print(deps.name) + + try: + _install_dependencies(missing, target_dir=target_dir) + except RuntimeError: + # In case some libraries are already imported, we cannot uninstall + # because QGIS acquires a lock on dll/pyd-files. Therefore + # we need to restart Qgis. + restart_required = True + pass + + restart_marker = Path(target_dir / "restarted.marker") + + if restart_required or not restart_marker.exists(): + if _is_windows(): + pass + # Always update the import mechanism + _refresh_python_import_mechanism() + + else: + print("Dependencies up to date") + + +def _ensure_h5py_installed(): + """ + On Windows Qgis comes with a hdf5 version installed. + This plugin uses the h5py python package, which is built against a specific version + of HDF5. The Qgis HDF5 version and the HDF5 version of the h5py package must be the + same, otherwise it will not work. In the external-dependencies folder we supply a + Windows version of h5py built using HDF5 1.10.7. On pypi there is no h5py 2.10.0 package available + built with Python 3.9 and HDF5 1.10.7. We need creat such wheel ourselves. + + The following situations can occur: + + | QGIS HDF5 = 1.10.7 | QGIS HDF5 = 1.14.0 + -----------------------------------|---------------------|--------------- + h5py build with 1.10.7 | A: Good | B: Qgis crash + h5py build with 1.14.0 | A: Qgis crash | B: Good + h5py build with other HDF5 version | A: Qgis crash | B: Qgis crash + + The different situations are marked A, B, and C in the table above. + + In version 3.28.6, QGis updated their HDF5.dll binary from 1.10.7 to 1.14.0. + + When the h5py is built for the qgis-included HDF5 DLL, + everything is good and the plugin can be loaded without any problems. + + A crash occurs when a user upgrades/downgrades their Qgis version when + the ThreediToolbox is already installed with a specific version of h5py. + In these cases we also need to upgrade/downgrade the h5py version installed with + ThreediToolbox. + + We use the H5pyMarker to mark the installed h5py version. This is because we cannot check the version + by importing h5py, as Qgis will crash if the HDF5 and h5py binaries do not match. + """ + if QGIS_VERSION < 32806 and platform.system() == "Windows": + hdf5_version = "1.10.7" + else: + hdf5_version = "1.14.0" + h5py_missing = _check_presence([H5PY_DEPENDENCY]) + marker_version = H5pyMarker.version() + if h5py_missing: + return _install_h5py(hdf5_version) + + if hdf5_version in SUPPORTED_HDF5_VERSIONS: + if marker_version == hdf5_version: + # Do nothing + pass + else: + return _install_h5py(hdf5_version) + + return True + + +def _install_h5py(hdf5_version: str): + if hdf5_version not in SUPPORTED_HDF5_VERSIONS: + # raise an error because we cannot continue + message = ( + f"Unsupported HDF5 version: {hdf5_version}. " + f"The following HDF5 versions are supported: {SUPPORTED_HDF5_VERSIONS}" + ) + raise RuntimeError(message) + + # In case the (old) h5py library is already imported, we cannot uninstall + # h5py because the windows acquires a lock on the *.dll-files. Therefore + # we need to restart Qgis. + # _uninstall_dependency(H5PY_DEPENDENCY) + try: + _install_dependencies([H5PY_DEPENDENCY], target_dir=_dependencies_target_dir()) + except RuntimeError: + return False + H5pyMarker.create(hdf5_version) + return True + + +class H5pyMarker: + """Marker indicating with which HDF5 binaries the h5py is installed. + + Currently, there is 1 supported HDF5 version: + - 1.10.7: use h5py from the external-dependencies folder in this repo + """ + + H5PY_MARKER = OUR_DIR / ".h5py_marker" + + @classmethod + def version(cls) -> str: + if cls.H5PY_MARKER.exists(): + with open(cls.H5PY_MARKER, "r") as marker: + version = marker.readline() + return version + else: + return "" + + @classmethod + def create(cls, version: str): + with open(cls.H5PY_MARKER, "w") as marker: + marker.write(version) + + @classmethod + def remove(cls): + cls.H5PY_MARKER.unlink() + + +def _ensure_prerequisite_is_installed(prerequisite="pip"): + """Check the basics: pip. + + People using OSGEO custom installs sometimes exclude those + dependencies. Our installation scripts fail, then, because of the missing + 'pip'. + + """ + try: + importlib.import_module(prerequisite) + except Exception as e: + msg = ( + "%s. 'pip', which we need, is missing. It is normally included with " + "python. You are *probably* using a custom minimal OSGEO release. " + "Please re-install with 'pip' included." + ) % e + print(msg) + raise RuntimeError(msg) + + +def _dependencies_target_dir(our_dir=OUR_DIR, create=False) -> Path: + """Return (and create) the desired deps folder + + This is the 'deps' subdirectory of the plugin home folder + + """ + target_dir = our_dir / "deps" + if not target_dir.exists() and create: + print(f"Creating target dir {target_dir}") + target_dir.mkdir() + + return target_dir + + +def _prev_dependencies_target_dir(our_dir=OUR_DIR) -> Path: + """Return python dir inside our profile + + Return two dirs up if we're inside the plugins dir. This was the + previous installation folder of the dependencies. + """ + if "plugins" in str(our_dir).lower(): + return OUR_DIR.parent.parent + + +def _remove_old_distributions(dependencies, path): + """Remove old distributions of dependencies + + In previous version of the Toolbox, depencencies were + stored in the users 'python' folder. This caused + versioning conflicts (as these dependencies were + not removed when the plugin was uninstalled). + + Removes all folders and files that contain the + dependency name or package name + """ + succeeded = True + files_to_remove = [ + node + for node in os.listdir(str(path)) + for dependency in dependencies + if (dependency.package in node or dependency.name in node) + ] + + for f in files_to_remove: + dep_path = str(path / f) + + try: + if os.path.exists(dep_path): + if os.path.isfile(dep_path): + print(f"Deleting file {f} from {path}") + os.remove(dep_path) + else: + print(f"Deleting folder {f} from {path}") + shutil.rmtree(dep_path) + except PermissionError as e: + print(f"Unable to remove {dep_path} ({str(e)})") + succeeded = False + + return succeeded + + +def check_importability(): + """Check if the dependendies are importable and log the locations. + + If something is not importable, which should not happen, it raises an + ImportError automatically. Which is exactly what we want, because we + cannot continue. + + """ + packages = [dependency.package for dependency in DEPENDENCIES] + packages += INTERESTING_IMPORTS + logger.info("sys.path:\n %s", "\n ".join(sys.path)) + deps_in_target_dir = [item.name for item in _dependencies_target_dir().iterdir()] + logger.info( + "Contents of our dependency dir:\n %s", + "\n ".join(deps_in_target_dir), + ) + for package in packages: + imported_package = importlib.import_module(package) + logger.info( + "Import '%s' found at \n '%s'", package, imported_package.__file__ + ) + + +def _uninstall_dependency(dependency): + print("Trying to uninstalling dependency %s" % dependency.name) + if dependency.tar: + # just remove the folders + path = _dependencies_target_dir() + items_to_remove = [node for node in os.listdir(str(path)) if (dependency.package in node or dependency.name in node)] + for f in items_to_remove: + dep_path = str(path / f) + + try: + if os.path.exists(dep_path): + if os.path.isfile(dep_path): + print(f"Deleting file {f} from {path}") + os.remove(dep_path) + else: + print(f"Deleting folder {f} from {path}") + shutil.rmtree(dep_path) + except PermissionError as e: + print(f"Unable to remove {dep_path} ({str(e)})") + return + + python_interpreter = _get_python_interpreter() + startupinfo = None + if _is_windows(): + startupinfo = subprocess.STARTUPINFO() + # Prevents terminal screens from popping up + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + process = subprocess.Popen( + [ + python_interpreter, + "-m", + "pip", + "uninstall", + "--yes", + (dependency.name), + ], + universal_newlines=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + startupinfo=startupinfo, + ) + # The input/output/error stream handling is a bit involved, but it is + # necessary because of a python bug on windows 7, see + # https://bugs.python.org/issue3905 . + i, o, e = (process.stdin, process.stdout, process.stderr) + i.close() + result = o.read() + e.read() + o.close() + e.close() + print(result) + exit_code = process.wait() + if exit_code: + print("Uninstalling %s failed" % dependency.name) + + +def _install_dependencies(dependencies, target_dir): + if not dependencies: + return + + python_interpreter = _get_python_interpreter() + base_command = [ + python_interpreter, + "-m", + "pip", + "install", + "--upgrade", + "--no-deps", + "--find-links", + str(OUR_DIR / "external-dependencies"), + "--no-index", + "--target", + str(target_dir), + ] + + dialog = None + bar = None + startupinfo = None + if _is_windows(): + pass + + for count, dependency in enumerate(dependencies): + _uninstall_dependency(dependency) + print("Installing '%s' into %s" % (dependency.name, target_dir)) + if dialog: + dialog.setLabelText(f"Installing {dependency.name}") + + if dependency.tar: + # Just extract the tar into the target folder, we already now it exists + tar_path = f"{(OUR_DIR / 'external-dependencies')}/{dependency.name}-{dependency.constraint[2:]}.tar" + tar = tarfile.open(tar_path) + tar.extractall(str(target_dir)) + tar.close() + else: + command = base_command + [dependency.name + dependency.constraint] + + process = subprocess.Popen( + command, + universal_newlines=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + startupinfo=startupinfo, + ) + # The input/output/error stream handling is a bit involved, but it is + # necessary because of a python bug on windows 7, see + # https://bugs.python.org/issue3905 . + i, o, e = (process.stdin, process.stdout, process.stderr) + i.close() + result = o.read() + e.read() + o.close() + e.close() + print(result) + exit_code = process.wait() + if exit_code: + if dialog: + dialog.close() + + raise RuntimeError( + f"Installing {dependency.name} failed ({exit_code}) ({result})" + ) + + print("Installed %s into %s" % (dependency.name, target_dir)) + if dependency.package in sys.modules: + print("Unloading old %s module" % dependency.package) + del sys.modules[dependency.package] + # check_importability() will be called soon, which will import them again. + # By removing them from sys.modules, we prevent older versions from + # sticking around. + + if bar: + bar.setValue(int((count / len(dependencies)) * 100)) + bar.update() + + if dialog: + dialog.close() + + +def _is_windows(): + """Return whether we are starting from QGIS on Windows.""" + executable = sys.executable + _, filename = os.path.split(executable) + if "python3" in filename.lower(): + return False + elif "qgis" in filename.lower(): + if platform.system().lower() == "darwin": + return False + else: + return True + else: + raise EnvironmentError("Unexpected value for sys.executable: %s" % executable) + + +def _get_python_interpreter(): + """Return the path to the python3 interpreter. + + Under linux sys.executable is set to the python3 interpreter used by Qgis. + However, under Windows/Mac this is not the case and sys.executable refers to the + Qgis start-up script. + """ + interpreter = None + executable = sys.executable + directory, _ = os.path.split(executable) + if _is_windows(): + interpreter = os.path.join(directory, "python3.exe") + elif platform.system().lower() == "darwin": + interpreter = os.path.join(directory, "bin", "python3") + else: + interpreter = executable + + assert os.path.exists(interpreter) # safety check + return interpreter + + +def _check_presence(dependencies): + """Check if all dependencies are present. Return missing dependencies.""" + missing = [] + for dependency in dependencies: + requirement = dependency.name + dependency.constraint + print("Checking presence of %s..." % requirement) + try: + result = pkg_resources.require(requirement) + print("Requirement %s found: %s" % (requirement, result)) + except pkg_resources.DistributionNotFound as e: + print( + "Dependency '%s' (%s) not found (%s)" + % (dependency.name, dependency.constraint, str(e)) + ) + missing.append(dependency) + except pkg_resources.VersionConflict as e: + print( + 'Version conflict:\n' + f' Installed: {e.dist}\n' + f' Required: {e.req}' + ) + if isinstance(e, pkg_resources.ContextualVersionConflict): + print(f' By: {e.required_by}') + missing.append(dependency) + except Exception as e: + print( + "Installing dependency '%s' (%s) went wrong (%s)" + % (dependency.name, dependency.constraint, str(e)) + ) + missing.append(dependency) + return missing + + +def _refresh_python_import_mechanism(): + """Refresh the import mechanism. + + This is required when deps are dynamically installed/removed. The modules + 'importlib' and 'pkg_resources' need to update their internal data structures. + """ + # This function should be called if any modules are created/installed while your + # program is running to guarantee all finders will notice the new module’s existence. + importlib.invalidate_caches() + + # https://stackoverflow.com/questions/58612272/pkg-resources-get-distributionmymodule-version-not-updated-after-reload + # Apparantely pkg_resources needs to be reloaded to be up-to-date with newly installed packages + importlib.reload(pkg_resources) + + +def generate_constraints_txt(target_dir=OUR_DIR): + """Called from the ``__main__`` to generate ``constraints.txt``.""" + constraints_file = target_dir / "constraints.txt" + lines = ["# Generated by dependencies.py"] + lines += [(dependency.name + dependency.constraint) for dependency in DEPENDENCIES] + lines.append("") + constraints_file.write_text("\n".join(lines)) + print("Wrote constraints to %s" % constraints_file) + + +if __name__ == "__main__": # pragma: no cover + generate_constraints_txt() diff --git a/processing/grid_creation_algorithm.py b/processing/grid_creation_algorithm.py index 76d71d18..bcef07c3 100644 --- a/processing/grid_creation_algorithm.py +++ b/processing/grid_creation_algorithm.py @@ -8,7 +8,6 @@ QgsProcessingParameterFileDestination, QgsVectorLayer, ) -from threedigrid_builder import make_gridadmin, SchematisationError from threedi_results_analysis.processing.processing_utils import gridadmin2geopackage, load_computational_layers import logging import io @@ -108,7 +107,7 @@ def progress_rep(progress, info): feedback.pushInfo(info) # Capture threedigridbuilder logging - logger = logging.getLogger("threedigrid_builder.grid.connection_nodes") + # logger = logging.getLogger("threedigrid_builder.grid.connection_nodes") assert logger.hasHandlers() # Check whether we have the right one log_capture_string = io.StringIO() ch = logging.StreamHandler(log_capture_string) diff --git a/threedi_plugin.py b/threedi_plugin.py index 1bf0ea66..3b589d0b 100644 --- a/threedi_plugin.py +++ b/threedi_plugin.py @@ -32,7 +32,6 @@ from threedi_results_analysis.tool_graph.graph import ThreeDiGraph from threedi_results_analysis.tool_sideview.sideview import ThreeDiSideView from threedi_results_analysis.tool_statistics.statistics import StatisticsTool -from threedi_results_analysis.tool_water_balance import WaterBalanceTool from threedi_results_analysis.tool_watershed.watershed_analysis import ( ThreeDiWatershedAnalyst, ) @@ -95,7 +94,6 @@ def initGui(self): self.graph_tool = ThreeDiGraph(iface, self.model) self.sideview_tool = ThreeDiSideView(iface, self.model) self.stats_tool = StatisticsTool(iface, self.model) - self.water_balance_tool = WaterBalanceTool(iface, self.model) self.watershed_tool = ThreeDiWatershedAnalyst(iface, self.model) self.logfile_tool = ShowLogfile(iface) self.temporal_manager = TemporalManager(self.model) @@ -108,7 +106,6 @@ def initGui(self): (self.graph_tool, False), (self.sideview_tool, False), (self.stats_tool, False), - (self.water_balance_tool, False), (self.watershed_tool, False), (self.logfile_tool, True) ] @@ -203,12 +200,6 @@ def initGui(self): self.model.result_changed.connect(self.stats_tool.result_changed) self.model.grid_changed.connect(self.stats_tool.grid_changed) - # water balance signals - self.model.result_added.connect(self.water_balance_tool.result_added) - self.model.result_removed.connect(self.water_balance_tool.result_removed) - self.model.result_changed.connect(self.water_balance_tool.result_changed) - self.model.grid_changed.connect(self.water_balance_tool.grid_changed) - for tool, _ in self.tools: self.dockwidget.add_custom_actions(tool.get_custom_actions()) diff --git a/tool_water_balance/__init__.py b/tool_water_balance/__init__.py deleted file mode 100644 index 8291125d..00000000 --- a/tool_water_balance/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .tools import WaterBalanceTool - -WaterBalanceTool # flake8 diff --git a/tool_water_balance/calculation.py b/tool_water_balance/calculation.py deleted file mode 100644 index b89595fb..00000000 --- a/tool_water_balance/calculation.py +++ /dev/null @@ -1,1001 +0,0 @@ -from collections import defaultdict -from copy import deepcopy -import logging - -import numpy as np -import numpy.ma as ma -from qgis.core import QgsCoordinateTransform -from qgis.core import QgsFeatureRequest -from qgis.core import QgsPointXY -from qgis.core import QgsProject -from threedigrid_builder.constants import LineType -from threedigrid_builder.constants import NodeType - -from .config import AGG_CUMULATIVE_FLOW -from .config import AGG_FLOW -from .config import GRAPH_SERIES -from .config import INPUT_SERIES -from .config import TIME_UNITS_TO_SECONDS -from .utils import WrappedResult - -NO_ENDPOINT_ID = -9999 - -LINE_TYPES_1D = { - LineType.LINE_1D_EMBEDDED, - LineType.LINE_1D_ISOLATED, - LineType.LINE_1D_CONNECTED, - LineType.LINE_1D_LONG_CRESTED, - LineType.LINE_1D_SHORT_CRESTED, - LineType.LINE_1D_DOUBLE_CONNECTED, -} -LINE_TYPES_1D2D = { - LineType.LINE_1D2D_SINGLE_CONNECTED_CLOSED, - LineType.LINE_1D2D_SINGLE_CONNECTED_OPEN_WATER, - LineType.LINE_1D2D_DOUBLE_CONNECTED_CLOSED, - LineType.LINE_1D2D_DOUBLE_CONNECTED_OPEN_WATER, - LineType.LINE_1D2D_POSSIBLE_BREACH, - LineType.LINE_1D2D_ACTIVE_BREACH, - LineType.LINE_1D2D_GROUNDWATER, - 58, # Also LINE_1D2D_GROUNDWATER? -} -NODE_TYPES_1D = { - NodeType.NODE_1D_NO_STORAGE, - NodeType.NODE_1D_STORAGE, - NodeType.NODE_1D_BOUNDARIES, -} -NODE_TYPES_2D = { - NodeType.NODE_2D_OPEN_WATER, - NodeType.NODE_2D_BOUNDARIES, -} -NODE_TYPES_2D_GROUNDWATER = { - NodeType.NODE_2D_GROUNDWATER_BOUNDARIES, - NodeType.NODE_2D_GROUNDWATER, -} -NODE_TYPES_BOUNDARIES = { - NodeType.NODE_1D_BOUNDARIES, - NodeType.NODE_2D_BOUNDARIES, -} - -logger = logging.getLogger(__name__) - - -class WaterBalanceCalculation(object): - - def __init__(self, result, polygon, mapcrs): - self.wrapped_result = WrappedResult(result) - self.polygon = polygon - self.mapcrs = mapcrs - - logger.info("polygon of wb area: %s", self.polygon.asWkt()) - - ga = self.wrapped_result.threedi_result.gridadmin - - # total nr of x-dir (horizontal in topview) 2d lines - nr_2d_x_dir = ga.get_from_meta("liutot") - # total nr of y-dir (vertical in topview) 2d lines - nr_2d_y_dir = ga.get_from_meta("livtot") - # total nr of 2d lines - nr_2d = ga.get_from_meta("l2dtot") - # total nr of groundwater lines - start_gr = ga.get_from_meta("lgrtot") - - # get range of horizontal (in top view) surface water line ids - x2d_surf_range_min = 1 - x2d_surf_range_max = nr_2d_x_dir - self.x2d_surf_range = list( - range(x2d_surf_range_min, x2d_surf_range_max + 1) - ) # noqa - - # get range of vertical (in top view) surface water line ids - y2d_surf_range_min = x2d_surf_range_max + 1 - y2d_surf_range_max = x2d_surf_range_max + nr_2d_y_dir - self.y2d_surf_range = list(range(y2d_surf_range_min, y2d_surf_range_max + 1)) - - # get range of vertical (in side view) line ids in the gridadmin. - # These lines represent surface-groundwater (vertical) flow - vert_flow_range_min = y2d_surf_range_max + 1 - vert_flow_range_max = y2d_surf_range_max + nr_2d - self.vert_flow_range = list(range(vert_flow_range_min, vert_flow_range_max + 1)) - - if ga.has_groundwater: - # total nr of x-dir (horizontal in topview) 2d groundwater lines - x_grndwtr_range_min = start_gr + 1 - x_grndwtr_range_max = start_gr + nr_2d_x_dir - self.x_grndwtr_range = list( - range(x_grndwtr_range_min, x_grndwtr_range_max + 1) - ) - - # total nr of y-dir (vertical in topview) 2d groundwater lines - y_grndwtr_range_min = x_grndwtr_range_max + 1 - y_grndwtr_range_max = x_grndwtr_range_max + nr_2d - self.y_grndwtr_range = list( - range(y_grndwtr_range_min, y_grndwtr_range_max + 1) - ) - - line_selection, pump_selection = self._select_lines_and_pumps() - point_selection = self._select_points() - - def features_to_ids(features): - return [f["id"] for f in features] - - self.flowline_ids = { - k: features_to_ids(v) for k, v in line_selection.items() - } - self.pump_ids = { - k: features_to_ids(v) for k, v in pump_selection.items() - } - self.node_ids = { - k: features_to_ids(v) for k, v in point_selection.items() - } - - self.qgs_lines, self.qgs_points = self.convert_features( - line_selection=line_selection, - pump_selection=pump_selection, - point_selection=point_selection, - ) - - self.time, self.flow = self._get_aggregated_flows() - - @property - def label(self): - return self.wrapped_result.parent_text + " | " + self.wrapped_result.text - - def filter_series(self, key, series): - return [s for s in series if s[key] in self.wrapped_result] - - @property - def result(self): - return self.wrapped_result.result - - def _select_lines_and_pumps(self): - """Returns a tuple of dictionaries with features by category: - - line_selection = { - '1d_in': [...], - '1d_out': [...], - '1d_bound_in': [...], - ... - } - - pump_selection = { - 'in': [...], - 'out': [...], - } - - returned value = (line_selection, pump_selection) - """ - # TODO: implement model_part. One of the problems of not having - # this implemented is that the on hover map highlight selects all - # links, even when the 2D or 1D modelpart is selected in the combo box. - - # the '_out' and '_in' indicate the draw direction of the flow_line. - # a flow line can have in 1 simulation both positive and negative - # discharge (with extend to the draw direction). Later on, in - # get_aggregated_flows() this numpy timeserie is clipped with - # max=0 for flow in 1 direction and min=0 for flow in the opposite - # direction. - line_selection = { - "1d_in": [], - "1d_out": [], - "1d_bound_in": [], - "1d_bound_out": [], - "2d_in": [], - "2d_out": [], - "2d_bound_in": [], - "2d_bound_out": [], - # 1d2d flow lines intersect polygon (1d is inside polygon) - "1d__1d_2d_flow": [], - # 1d2d flow lines intersect polygon (2d is inside polygon) - "2d__1d_2d_flow": [], - # 1d2d exchange lines are within polygon (both nodes inside) - "1d_2d_exch": [], - "2d_groundwater_in": [], - "2d_groundwater_out": [], - "2d_vertical_infiltration": [], - # TODO: add 1d_2d_groundwater? - } - pump_selection = {"in": [], "out": []} - - lines = self.wrapped_result.lines - points = self.wrapped_result.points - pumps = self.wrapped_result.pumps - - # all links in and out - # use bounding box and spatial index to prefilter lines - request_filter = QgsFeatureRequest().setFilterRect( - self.polygon.get().boundingBox() - ) - for line in lines.getFeatures(request_filter): - line_type = line.attribute('line_type') - - if line_type == LineType.LINE_2D_VERTICAL: - geom = line.geometry().asPolyline() - # 2d vertical infiltration line is handmade diagonal (drawn - # from 2d point 15m towards south-west ). Thus, if at-least - # its startpoint is within polygon then include the line - if self.polygon.contains(QgsPointXY(geom[0])): - line_selection["2d_vertical_infiltration"].append(line) - - # test if lines are crossing boundary of polygon - if line.geometry().crosses(self.polygon): - geom = line.geometry().asPolyline() - # check if flow is in or out by testing if startpoint - # is inside polygon --> out - outgoing = self.polygon.contains(QgsPointXY(geom[0])) - # check if flow is in or out by testing if endpoint - # is inside polygon --> in - incoming = self.polygon.contains(QgsPointXY(geom[-1])) - - if incoming and outgoing: - # skip lines that do have start- and end vertex outside of - # polygon - pass - elif outgoing: - if line_type in LINE_TYPES_1D: - line_selection["1d_out"].append(line) - elif line_type in LINE_TYPES_1D2D: - # draw direction of 1d_2d is always from 2d node to - # 1d node. So when 2d node is inside polygon (and 1d - # node is not) we define it as a '2d__1d_2d_flow' link - # because - line_selection["2d__1d_2d_flow"].append(line) - elif incoming: - if line_type in LINE_TYPES_1D: - line_selection["1d_in"].append(line) - elif line_type in LINE_TYPES_1D2D: - # draw direction of 1d_2d is always from 2d node to - # 1d node. So when 1d node is inside polygon (and 2d - # node is not) we define it as a '1d__1d_2d_flow' link - line_selection["1d__1d_2d_flow"].append(line) - - if line_type == LineType.LINE_2D and not (incoming and outgoing): - # 2d lines are a separate story: discharge on a 2d - # link in the nc can be positive and negative during 1 - # simulation - like you would expect - but we also have - # to account for 2d link direction. We have to determine - # two things: - - # A) is 2d link a vertical or horizontal one. Why? - # vertical 2d lines (calc cells above each other): - # when positive discharge then flow is to north, negative - # discharge then flow southwards, while horizontal 2d lines - # (calc cells next to each other) yields positive discharge - # is flow to the east, negative is flow to west - - # B) how the start and endpoint are located with - # reference to each other. Why? a positive discharge on - # a vertical link in the north of your polygon DECREASES - # the volume in the polygon, while a positive discharge on - # a vertical link in the south of your polygon INCREASES - # the volume in the polygon). - - # so why not only determine (B)? - # because then a positive discharge on a diagonal 2d link - - # in topview e.g. left up to right down - can mean flow - # to east. But it can also mean flow to the north. If we - # know it is a vertical link we can be sure flow is to the - # north (thats why we need to know (A) - - # TODO: after I made this code Martijn Siemerink adviced: - # 2d links drawing direction is always from south to north - # OR west to east, so it not required to get start- and - # endpoint of a 2d link - - start_x = geom[0][0] - start_y = geom[0][1] - end_x = geom[-1][0] - end_y = geom[-1][1] - - # horizontal line? - if line.id() in self.x2d_surf_range: - # startpoint in polygon? - if self.polygon.contains(QgsPointXY(geom[0])): - # directed to east? - # long coords increase going east, so: - if end_x > start_x: - # thus, positive q means flow to east. - # Startpoint is in polygon. Endpoint is - # located eastwards of startpoint, so positive - # q means flow goes OUT!! of polygon - line_selection["2d_out"].append(line) - else: - line_selection["2d_in"].append(line) - # endpoint in polygon? - elif self.polygon.contains(QgsPointXY(geom[-1])): - # directed to east? - # long coords increase going east - if end_x > start_x: - # positive q means flow to east. Endpoint is - # inside polygon and located eastwards of - # startpoint, so positive q means flow goes - # INTO!! polygon - line_selection["2d_in"].append(line) - else: - line_selection["2d_out"].append(line) - - # vertical line? - if line.id() in self.y2d_surf_range: - # startpoint in polygon? - if self.polygon.contains(QgsPointXY(geom[0])): - # directed to north? - # lat coords increase going north, so: - if end_y > start_y: - # thus, positive q means flow to north. - # Startpoint is in polygon. Endpoint is - # located northwards of startpoint, so positive - # q means flow goes OUT!! of polygon - line_selection["2d_out"].append(line) - else: - line_selection["2d_in"].append(line) - # endpoint in polygon? - elif self.polygon.contains(QgsPointXY(geom[-1])): - # directed to north? - # lat coords increase going north, so: - if end_y > start_y: - # positive q means flow to north. Endpoint is - # inside polygon and located northwards of - # startpoint, so flow goes INTO!! polygon - line_selection["2d_in"].append(line) - else: - line_selection["2d_out"].append(line) - - if line_type == LineType.LINE_2D_GROUNDWATER and not (incoming and outgoing): - - start_x = geom[0][0] - start_y = geom[0][1] - end_x = geom[-1][0] - end_y = geom[-1][1] - - # horizontal line? - if line.id() in self.x_grndwtr_range: - # startpoint in polygon? - if self.polygon.contains(QgsPointXY(geom[0])): - if end_x > start_x: - line_selection["2d_groundwater_out"].append(line) - else: - line_selection["2d_groundwater_in"].append(line) - # endpoint in polygon? - elif self.polygon.contains(QgsPointXY(geom[-1])): - if end_x > start_x: - line_selection["2d_groundwater_in"].append(line) - else: - line_selection["2d_groundwater_out"].append(line) - # vertical line? - if line.id() in self.y_grndwtr_range: - # startpoint in polygon? - if self.polygon.contains(QgsPointXY(geom[0])): - if end_y > start_y: - line_selection["2d_groundwater_out"].append(line) - else: - line_selection["2d_groundwater_in"].append(line) - elif self.polygon.contains(QgsPointXY(geom[-1])): - if end_y > start_y: - line_selection["2d_groundwater_in"].append(line) - else: - line_selection["2d_groundwater_out"].append(line) - - elif line_type in LINE_TYPES_1D2D and line.geometry().within(self.polygon): - line_selection["1d_2d_exch"].append(line) - - # find boundaries in polygon - node_types_csv = ",".join(str(n.value) for n in NODE_TYPES_BOUNDARIES) - request_filter = ( - QgsFeatureRequest() - .setFilterRect(self.polygon.get().boundingBox()) - .setFilterExpression(f"node_type in ({node_types_csv})") - ) - - # all boundaries in polygon - for bound in points.getFeatures(request_filter): - if self.polygon.contains(QgsPointXY(bound.geometry().asPoint())): - # find link connected to boundary - request_filter_bound = QgsFeatureRequest().setFilterExpression( - f"calculation_node_id_start == {bound.id()} OR" - f"calculation_node_id_end == {bound.id()}" - ) - bound_lines = lines.getFeatures(request_filter_bound) - for bound_line in bound_lines: - if bound_line["calculation_node_id_start"] == bound["id"]: - if bound["node_type"] == NodeType.NODE_1D_BOUNDARIES: - line_selection["1d_bound_in"].append(bound_line) - else: # 2d - line_selection["2d_bound_in"].append(bound_line) - else: # out - if bound["node_type"] == NodeType.NODE_1D_BOUNDARIES: - line_selection["1d_bound_out"].append(bound_line) - else: # 2d - line_selection["2d_bound_out"].append(bound_line) - - # pumps - # use bounding box and spatial index to prefilter pumps - if pumps is None: - f_pumps = [] - else: - request_filter = QgsFeatureRequest().setFilterRect( - self.polygon.get().boundingBox() - ) - f_pumps = pumps.getFeatures(request_filter) - - for pump in f_pumps: - # test if lines are crossing boundary of polygon - pump_geometry = pump.geometry() - if pump_geometry.intersects(self.polygon): - pump_end_node_id = pump["node_idx2"] - linestring = pump_geometry.asPolyline() - # check if flow is in or out by testing if startpoint - # is inside polygon --> out - startpoint = QgsPointXY(linestring[0]) - endpoint = QgsPointXY(linestring[-1]) - outgoing = self.polygon.contains(startpoint) - # check if flow is in or out by testing if endpoint - # is inside polygon --> in - incoming = self.polygon.contains(endpoint) if not pump_end_node_id == NO_ENDPOINT_ID else False - - if incoming and outgoing: - # skip - pass - elif outgoing: - pump_selection["out"].append(pump) - elif incoming: - pump_selection["in"].append(pump) - - return line_selection, pump_selection - - def _select_points(self): - """Returns a dictionary with features by category: - - { - '1d': [..., ...], - '2d': [..., ...], - '2d_groundwater': [..., ...], - } - """ - - point_selection = {"1d": [], "2d": [], "2d_groundwater": []} - - # use bounding box and spatial index to prefilter lines - request_filter = QgsFeatureRequest().setFilterRect( - self.polygon.get().boundingBox() - ) - node_types = NODE_TYPES_1D | NODE_TYPES_2D | NODE_TYPES_2D_GROUNDWATER - node_types_csv = ",".join(str(n.value) for n in node_types) - request_filter.setFilterExpression(f"node_type in ({node_types_csv})") - # todo: check if boundary nodes could not have rain, infiltration, etc. - - node_type_map = {} - node_type_map.update({n.value: "1d" for n in NODE_TYPES_1D}) - node_type_map.update({n.value: "2d" for n in NODE_TYPES_2D}) - node_type_map.update({n.value: "2d_groundwater" - for n in NODE_TYPES_2D_GROUNDWATER}) - - for point in self.wrapped_result.points.getFeatures(request_filter): - # test if points are contained by polygon - if self.polygon.contains(point.geometry()): - point_selection[node_type_map[point['node_type']]].append(point) - return point_selection - - def _get_aggregated_flows(self): - """ - Returns a tuple (times, all_flows) defined as: - - times = array of timestamps - all_flows = array with shape (np.size(times, 0), len(INPUT_SERIES)) - """ - # constants referenced in record array - # shared by links and nodes - TYPE_1D = "1d" - TYPE_2D = "2d" - TYPE_2D_GROUNDWATER = "2d_groundwater" - # links only - TYPE_1D_BOUND_IN = "1d_bound_in" - TYPE_2D_BOUND_IN = "2d_bound_in" - TYPE_1D__1D_2D_EXCH = "1d__1d_2d_exch" - TYPE_2D__1D_2D_EXCH = "2d__1d_2d_exch" - TYPE_1D__1D_2D_FLOW = "1d__1d_2d_flow" - TYPE_2D__1D_2D_FLOW = "2d__1d_2d_flow" - TYPE_2D_VERTICAL_INFILTRATION = "2d_vertical_infiltration" - - ALL_TYPES = [ - TYPE_1D, - TYPE_2D, - TYPE_2D_GROUNDWATER, - TYPE_1D_BOUND_IN, - TYPE_2D_BOUND_IN, - TYPE_1D__1D_2D_EXCH, - TYPE_2D__1D_2D_EXCH, - TYPE_1D__1D_2D_FLOW, - TYPE_2D__1D_2D_FLOW, - TYPE_2D_VERTICAL_INFILTRATION, - ] - - NTYPE_MAXLEN = 25 - assert ( - max(list(map(len, ALL_TYPES))) <= NTYPE_MAXLEN - ), "NTYPE_MAXLEN insufficiently large for all values" - NTYPE_DTYPE = "U%s" % NTYPE_MAXLEN - - # LINKS - ####### - - # create numpy table with flowlink information - tlink = [] # id, 1d or 2d, in or out - for idx in self.flowline_ids["2d_in"]: - tlink.append((idx, TYPE_2D, 1)) - for idx in self.flowline_ids["2d_out"]: - tlink.append((idx, TYPE_2D, -1)) - - for idx in self.flowline_ids["2d_bound_in"]: - tlink.append((idx, TYPE_2D_BOUND_IN, 1)) - for idx in self.flowline_ids["2d_bound_out"]: - tlink.append((idx, TYPE_2D_BOUND_IN, -1)) - - for idx in self.flowline_ids["1d_in"]: - tlink.append((idx, TYPE_1D, 1)) - for idx in self.flowline_ids["1d_out"]: - tlink.append((idx, TYPE_1D, -1)) - - for idx in self.flowline_ids["1d_bound_in"]: - tlink.append((idx, TYPE_1D_BOUND_IN, 1)) - for idx in self.flowline_ids["1d_bound_out"]: - tlink.append((idx, TYPE_1D_BOUND_IN, -1)) - - for idx in self.flowline_ids["2d_groundwater_in"]: - tlink.append((idx, TYPE_2D_GROUNDWATER, 1)) - for idx in self.flowline_ids["2d_groundwater_out"]: - tlink.append((idx, TYPE_2D_GROUNDWATER, -1)) - - for idx in self.flowline_ids["2d_vertical_infiltration"]: - tlink.append((idx, TYPE_2D_VERTICAL_INFILTRATION, 1)) - - # 1d_2d flow intersects the polygon: - # the in- or out flow for 1d2d is different than flows dirs above: - # - discharge from 1d to 2d is always positive in the .nc - # - discharge from 2d to 1d is always negative in the .nc - # 1d__1d_2d_flow: 1d node is inside polygon, 2d node is outside. - # - positive discharge means flow outwards polygon - # - negative discharge means flow inwards polygon - # 2d__1d_2d_flow: 1d node is outside polygon, 2d node is inside - # - positive discharge means flow inwards polygon - # - negative discharge means flow outwards polygon - for idx in self.flowline_ids["1d__1d_2d_flow"]: - tlink.append((idx, TYPE_1D__1D_2D_FLOW, -1)) - # 1d_2d_out: 1d node is outside polygon, 2d node is inside - for idx in self.flowline_ids["2d__1d_2d_flow"]: - tlink.append((idx, TYPE_2D__1D_2D_FLOW, 1)) - # 1d_2d within the polygon (from 1d perspective so everything flipped) - for idx in self.flowline_ids["1d_2d_exch"]: - tlink.append((idx, TYPE_1D__1D_2D_EXCH, -1)) - # 1d_2d within the polygon (from 2d perspective) - for idx in self.flowline_ids["1d_2d_exch"]: - tlink.append((idx, TYPE_2D__1D_2D_EXCH, 1)) - - np_link = np.array( - tlink, dtype=[("id", int), ("ntype", NTYPE_DTYPE), ("dir", int)] - ) - - # sort for faster reading of netcdf - np_link.sort(axis=0) - - # create masks - mask_2d = np_link["ntype"] != TYPE_2D - mask_1d = np_link["ntype"] != TYPE_1D - mask_2d_bound = np_link["ntype"] != TYPE_2D_BOUND_IN - mask_1d_bound = np_link["ntype"] != TYPE_1D_BOUND_IN - - mask_1d__1d_2d_flow = np_link["ntype"] != TYPE_1D__1D_2D_FLOW - mask_2d__1d_2d_flow = np_link["ntype"] != TYPE_2D__1D_2D_FLOW - mask_1d__1d_2d_exch = np_link["ntype"] != TYPE_1D__1D_2D_EXCH - mask_2d__1d_2d_exch = np_link["ntype"] != TYPE_2D__1D_2D_EXCH - mask_2d_groundwater = np_link["ntype"] != TYPE_2D_GROUNDWATER - mask_2d_vertical_infiltration = ( - np_link["ntype"] != TYPE_2D_VERTICAL_INFILTRATION - ) - - threedi_result = self.wrapped_result.threedi_result - - # get all flows through incoming and outgoing flows - times = threedi_result.get_timestamps(parameter="q_cum") - - all_flows = np.zeros(shape=(len(times), len(INPUT_SERIES))) - # total_location = np.zeros(shape=(np.size(np_link, 0), 2)) - - # non-2d links - pos_pref = 0 - neg_pref = 0 - - if np_link.size > 0: - for ts_idx, t in enumerate(times): - # (1) inflow and outflow through 1d and 2d - # vol = threedi_result.get_values_by_timestep_nr('q', ts_idx, - # np_link['id']) * np_link['dir'] # * dt - - flow_pos = ( - threedi_result.get_values_by_timestep_nr( - "q_cum_positive", ts_idx, np_link["id"] - ) - * np_link["dir"] - ) - flow_neg = ( - threedi_result.get_values_by_timestep_nr( - "q_cum_negative", ts_idx, np_link["id"] - ) - * np_link["dir"] - * -1 - ) - - in_sum = flow_pos - pos_pref - out_sum = flow_neg - neg_pref - pos_pref = flow_pos - neg_pref = flow_neg - - # 2d flow (2d_in) - all_flows[ts_idx, 0] = ( - ma.masked_array(in_sum, mask=mask_2d).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_2d).clip(min=0).sum() - ) - # 2d flow (2d_out) - all_flows[ts_idx, 1] = ( - ma.masked_array(in_sum, mask=mask_2d).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_2d).clip(max=0).sum() - ) - - # 1d flow (1d_in) - all_flows[ts_idx, 2] = ( - ma.masked_array(in_sum, mask=mask_1d).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_1d).clip(min=0).sum() - ) - # 1d flow (1d_out) - all_flows[ts_idx, 3] = ( - ma.masked_array(in_sum, mask=mask_1d).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_1d).clip(max=0).sum() - ) - - # 2d bound (2d_bound_in) - all_flows[ts_idx, 4] = ( - ma.masked_array(in_sum, mask=mask_2d_bound).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_2d_bound).clip(min=0).sum() - ) - # 2d bound (2d_bound_out) - all_flows[ts_idx, 5] = ( - ma.masked_array(in_sum, mask=mask_2d_bound).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_2d_bound).clip(max=0).sum() - ) - - # 1d bound (1d_bound_in) - all_flows[ts_idx, 6] = ( - ma.masked_array(in_sum, mask=mask_1d_bound).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_1d_bound).clip(min=0).sum() - ) - # 1d bound (1d_bound_out) - all_flows[ts_idx, 7] = ( - ma.masked_array(in_sum, mask=mask_1d_bound).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_1d_bound).clip(max=0).sum() - ) - - # 1d__1d_2d_flow_in - all_flows[ts_idx, 8] = ( - ma.masked_array(in_sum, mask=mask_1d__1d_2d_flow).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_1d__1d_2d_flow) - .clip(min=0) - .sum() - ) - # 1d__1d_2d_flow_out - all_flows[ts_idx, 9] = ( - ma.masked_array(in_sum, mask=mask_1d__1d_2d_flow).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_1d__1d_2d_flow) - .clip(max=0) - .sum() - ) - - # 2d__1d_2d_flow_in - all_flows[ts_idx, 30] = ( - ma.masked_array(in_sum, mask=mask_2d__1d_2d_flow).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_2d__1d_2d_flow) - .clip(min=0) - .sum() - ) - # 2d__1d_2d_flow_out - all_flows[ts_idx, 31] = ( - ma.masked_array(in_sum, mask=mask_2d__1d_2d_flow).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_2d__1d_2d_flow) - .clip(max=0) - .sum() - ) - - # 1d (1d__1d_2d_exch_in) - all_flows[ts_idx, 10] = ( - ma.masked_array(in_sum, mask=mask_1d__1d_2d_exch).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_1d__1d_2d_exch) - .clip(min=0) - .sum() - ) - # 1d (1d__1d_2d_exch_out) - all_flows[ts_idx, 11] = ( - ma.masked_array(in_sum, mask=mask_1d__1d_2d_exch).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_1d__1d_2d_exch) - .clip(max=0) - .sum() - ) - - # 2d (2d__1d_2d_exch_in) - all_flows[ts_idx, 32] = ( - ma.masked_array(in_sum, mask=mask_2d__1d_2d_exch).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_2d__1d_2d_exch) - .clip(min=0) - .sum() - ) - # 2d (2d__1d_2d_exch_out) - all_flows[ts_idx, 33] = ( - ma.masked_array(in_sum, mask=mask_2d__1d_2d_exch).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_2d__1d_2d_exch) - .clip(max=0) - .sum() - ) - - # 2d groundwater (2d_groundwater_in) - all_flows[ts_idx, 23] = ( - ma.masked_array(in_sum, mask=mask_2d_groundwater).clip(min=0).sum() - + ma.masked_array(out_sum, mask=mask_2d_groundwater) - .clip(min=0) - .sum() - ) - # 2d groundwater (2d_groundwater_out) - all_flows[ts_idx, 24] = ( - ma.masked_array(in_sum, mask=mask_2d_groundwater).clip(max=0).sum() - + ma.masked_array(out_sum, mask=mask_2d_groundwater) - .clip(max=0) - .sum() - ) - - # NOTE: positive vertical infiltration is from surface to - # groundwater node. We make this negative because it's - # 'sink-like', and to make it in line with the - # infiltration_rate_simple which also has a -1 multiplication - # factor. - # 2d_vertical_infiltration (2d_vertical_infiltration_pos) - all_flows[ts_idx, 28] = ( - -1 - * ma.masked_array(in_sum, mask=mask_2d_vertical_infiltration) - .clip(min=0) - .sum() - + ma.masked_array(out_sum, mask=mask_2d_vertical_infiltration) - .clip(min=0) - .sum() - ) - # 2d_vertical_infiltration (2d_vertical_infiltration_neg) - all_flows[ts_idx, 29] = ( - -1 - * ma.masked_array(in_sum, mask=mask_2d_vertical_infiltration) - .clip(max=0) - .sum() - + ma.masked_array(out_sum, mask=mask_2d_vertical_infiltration) - .clip(max=0) - .sum() - ) - - # PUMPS - ####### - - tpump = [] - for idx in self.pump_ids["in"]: - tpump.append((idx, 1)) - for idx in self.pump_ids["out"]: - tpump.append((idx, -1)) - np_pump = np.array(tpump, dtype=[("id", int), ("dir", int)]) - np_pump.sort(axis=0) - - if np_pump.size > 0: - # pumps - pump_pref = 0 - for ts_idx, t in enumerate(times): - # (2) inflow and outflow through pumps - pump_flow = ( - threedi_result.get_values_by_timestep_nr( - "q_pump_cum", ts_idx, np_pump["id"] - ) - * np_pump["dir"] - ) - - flow_dt = pump_flow - pump_pref - pump_pref = pump_flow - - in_sum = flow_dt.clip(min=0) - out_sum = flow_dt.clip(max=0) - - all_flows[ts_idx, 12] = in_sum.sum() - all_flows[ts_idx, 13] = out_sum.sum() - - # NODES - ####### - - tnode = [] # id, 1d or 2d, in or out - for idx in self.node_ids["2d"]: - tnode.append((idx, TYPE_2D)) - for idx in self.node_ids["1d"]: - tnode.append((idx, TYPE_1D)) - for idx in self.node_ids["2d_groundwater"]: - tnode.append((idx, TYPE_2D_GROUNDWATER)) - NTYPE_DTYPE - np_node = np.array(tnode, dtype=[("id", int), ("ntype", NTYPE_DTYPE)]) - np_node.sort(axis=0) - - mask_2d_nodes = np_node["ntype"] != TYPE_2D - mask_1d_nodes = np_node["ntype"] != TYPE_1D - mask_2d_groundwater_nodes = np_node["ntype"] != TYPE_2D_GROUNDWATER - - np_2d_node = ma.masked_array(np_node["id"], mask=mask_2d_nodes).compressed() - np_1d_node = ma.masked_array(np_node["id"], mask=mask_1d_nodes).compressed() - np_2d_groundwater_node = ma.masked_array( - np_node["id"], mask=mask_2d_groundwater_nodes - ).compressed() - - for parameter, agg_method, node, pnr, factor in [ - ("rain", "_cum", np_2d_node, 14, 1), - # TODO: in old model results this parameter is called - # 'infiltration_rate', thus it is not backwards compatible right - # now - ("infiltration_rate_simple", "_cum", np_2d_node, 15, -1), - # TODO: inefficient because we look up q_lat data twice - ("q_lat", "_cum", np_2d_node, 16, 1), - ("q_lat", "_cum", np_1d_node, 17, 1), - ("leak", "_cum", np_2d_groundwater_node, 26, 1), - ("rain", "_cum", np_1d_node, 27, 1), - ("intercepted_volume", "_current", np_2d_node, 34, -1), - ("q_sss", "_cum", np_2d_node, 35, 1), - ]: - - if node.size > 0: - if parameter + agg_method in threedi_result.available_vars: - values_pref = 0 - for ts_idx, t in enumerate(times): - values = threedi_result.get_values_by_timestep_nr( - parameter + agg_method, ts_idx, node - ).sum() - values_dt = values - values_pref - values_pref = values - all_flows[ts_idx, pnr] = values_dt * factor - t_pref = 0 - - for ts_idx, t in enumerate(times): - if ts_idx == 0: - # just to make sure machine precision distortion - # is reduced for the first timestamp (everything - # should be 0 - all_flows[ts_idx] = all_flows[ts_idx] / (times[1] - t) - else: - all_flows[ts_idx] = all_flows[ts_idx] / (t - t_pref) - t_pref = t - - if np_node.size > 0: - # delta volume - t_pref = 0 - for ts_idx, t in enumerate(times): - # delta volume - if ts_idx == 0: - # volume difference first timestep is always 0 - all_flows[ts_idx, 18] = 0 - all_flows[ts_idx, 19] = 0 - all_flows[ts_idx, 25] = 0 - - vol_current = threedi_result.get_values_by_timestep_nr( - "vol_current", ts_idx, np_node["id"] - ) - td_vol_pref = ma.masked_array(vol_current, mask=mask_2d_nodes).sum() - od_vol_pref = ma.masked_array(vol_current, mask=mask_1d_nodes).sum() - td_vol_pref_gw = ma.masked_array( - vol_current, mask=mask_2d_groundwater_nodes - ).sum() - t_pref = t - else: - vol_current = threedi_result.get_values_by_timestep_nr( - "vol_current", ts_idx, np_node["id"] - ) - - td_vol = ma.masked_array(vol_current, mask=mask_2d_nodes).sum() - od_vol = ma.masked_array(vol_current, mask=mask_1d_nodes).sum() - td_vol_gw = ma.masked_array( - vol_current, mask=mask_2d_groundwater_nodes - ).sum() - - dt = t - t_pref - all_flows[ts_idx, 18] = (td_vol - td_vol_pref) / dt - all_flows[ts_idx, 19] = (od_vol - od_vol_pref) / dt - all_flows[ts_idx, 25] = (td_vol_gw - td_vol_pref_gw) / dt - - td_vol_pref = td_vol - od_vol_pref = od_vol - td_vol_pref_gw = td_vol_gw - t_pref = t - all_flows = np.nan_to_num(all_flows) - - return times, all_flows - - def convert_features(self, line_selection, pump_selection, point_selection): - """ - Return lines, points tuple of dicts, for the visualization - """ - transform = QgsCoordinateTransform( - self.wrapped_result.lines.crs(), self.mapcrs, QgsProject.instance(), - ) - qgs_lines = defaultdict(list) - for c, fl in line_selection.items(): - for f in fl: - geom = f.geometry() - geom.transform(transform) - geom = geom.asPolyline() - qgs_lines[c.rsplit('_in')[0].rsplit('_out')[0]].append(geom) - qgs_points = defaultdict(list) - for c, fl in pump_selection.items(): - for f in fl: - geom = f.geometry() - geom.transform(transform) - geom = geom.asPoint() - qgs_points["pumps_hoover"].append(geom) - for c, fl in point_selection.items(): - for f in fl: - geom = f.geometry() - geom.transform(transform) - geom = geom.asPoint() - qgs_points[c].append(geom) - return qgs_lines, qgs_points - - def get_graph_data(self, agg, time_units): - """ - Return data corresponding to the graph series. - """ - time = self.time - flow = self.flow - - flow_index = dict(INPUT_SERIES) - - graph_data = deepcopy(GRAPH_SERIES) - for idx, item in enumerate(graph_data): - item["fill_color"] = [ - int(c) for c in item["def_fill_color"].split(",") - ] - item["pen_color"] = [ - int(c) for c in item["def_pen_color"].split(",") - ] - # determine the flow indices for this items series - flow_indices = [ - flow_index[serie] for serie in item["series"] - ] - - # populate with data from flow - item["values"] = {} - assert item["default_method"] in ("net", "gross") - if item["default_method"] == "net": - sum = flow[:, flow_indices].sum(axis=1) - item["values"]["in"] = sum.clip(min=0) - item["values"]["out"] = sum.clip(max=0) - elif item["default_method"] == "gross": - sum_pos = np.zeros(shape=(np.size(time, 0),)) - sum_neg = np.zeros(shape=(np.size(time, 0),)) - for nr in flow_indices: - sum_pos += flow[:, nr].clip(min=0) - sum_neg += flow[:, nr].clip(max=0) - item["values"]["in"] = sum_pos - item["values"]["out"] = sum_neg - - if agg == AGG_CUMULATIVE_FLOW: - # aggregate the serie - diff = np.append([0], np.diff(time)) - item["values"]["in"] = np.cumsum( - diff * item["values"]["in"], axis=0 - ) - item["values"]["out"] = np.cumsum( - diff * item["values"]["out"], axis=0 - ) - - time = self.time / TIME_UNITS_TO_SECONDS[time_units] - agg_label = { - AGG_FLOW: ("Flow", "m³/s"), - AGG_CUMULATIVE_FLOW: ("Cumulative flow", "m³"), - }[agg] - - return { - "time": time, - "time_label": ("time", time_units), - "values": {item["name"]: item for item in graph_data}, - "values_label": agg_label - } diff --git a/tool_water_balance/config.py b/tool_water_balance/config.py deleted file mode 100644 index 8cee8987..00000000 --- a/tool_water_balance/config.py +++ /dev/null @@ -1,368 +0,0 @@ -# Qt linestyles, linecolors etc -# http://pyqt.sourceforge.net/Docs/PyQt4/qpen.html - -# QColor constructed from the given CMYK color values: -# c (cyan), m (magenta), y (yellow), k (black), and a (alpha-channel, -# i.e. transparency (0=totally transparant)). -# all numbers are integers between 0-256 - -FILL_TRANSP = ",150" -PEN_TRANSP = ",180" -FILL_TRANSP_VOL_CHANGE = ",0" - -# serie_name, index, modelpart for bars, modelpart for graph -INPUT_SERIES = [ - ("2d_in", 0), - ("2d_out", 1), - ("1d_in", 2), - ("1d_out", 3), - ("2d_bound_in", 4), - ("2d_bound_out", 5), - ("1d_bound_in", 6), - ("1d_bound_out", 7), - ("1d__1d_2d_flow_in", 8), - ("1d__1d_2d_flow_out", 9), - ("1d__1d_2d_exch_in", 10), - ("1d__1d_2d_exch_out", 11), - ("pump_in", 12), - ("pump_out", 13), - ("rain", 14), - ("infiltration_rate_simple", 15), - ("lat_2d", 16), - ("lat_1d", 17), - ("d_2d_vol", 18), - ("d_1d_vol", 19), - ("error_2d", 20), - ("error_1d", 21), - ("error_1d_2d", 22), - ("2d_groundwater_in", 23), - ("2d_groundwater_out", 24), - ("d_2d_groundwater_vol", 25), - ("leak", 26), - ("inflow", 27), - ("2d_vertical_infiltration_pos", 28), - ("2d_vertical_infiltration_neg", 29), - ("2d__1d_2d_flow_in", 30), - ("2d__1d_2d_flow_out", 31), - ("2d__1d_2d_exch_in", 32), - ("2d__1d_2d_exch_out", 33), - ("intercepted_volume", 34), - ("q_sss", 35), -] - -# graph series settings -GRAPH_SERIES = [ - { - "name": "2D flow", - "default_method": "gross", - "order": 1, - "def_fill_color": "63,81,181" + FILL_TRANSP, - "def_pen_color": "63,81,181" + PEN_TRANSP, - "series": ["2d_in", "2d_out"], - }, - { - "name": "2D boundary flow", - "default_method": "gross", - "order": 2, - "def_fill_color": "156,39,176" + FILL_TRANSP, - "def_pen_color": "156,39,176" + PEN_TRANSP, - "series": ["2d_bound_in", "2d_bound_out"], - }, - { - "name": "1D flow", - "default_method": "gross", - "order": 3, - "def_fill_color": "0,188,212" + FILL_TRANSP, - "def_pen_color": "0,188,212" + PEN_TRANSP, - "series": ["1d_in", "1d_out"], - }, - { - "name": "1D boundary flow", - "default_method": "gross", - "order": 4, - "def_fill_color": "156,39,176" + FILL_TRANSP, - "def_pen_color": "156,39,176" + PEN_TRANSP, - "series": ["1d_bound_in", "1d_bound_out"], - }, - { - "name": "2D flow to 1D (domain exchange)", - "default_method": "gross", - "order": 5, - "def_fill_color": "240,210,50" + FILL_TRANSP, - "def_pen_color": "240,210,50" + PEN_TRANSP, - "series": ["1d__1d_2d_exch_in", "1d__1d_2d_exch_out"], - }, - { - "name": "2D flow to 1D", - "default_method": "gross", - "order": 5, - "def_fill_color": "100,220,5" + FILL_TRANSP, - "def_pen_color": "100,220,5" + PEN_TRANSP, - "series": [ - "1d__1d_2d_flow_in", - "1d__1d_2d_flow_out", - "2d__1d_2d_flow_in", - "2d__1d_2d_flow_out", - ], - }, - { - "name": "pumps", - "default_method": "gross", - "order": 6, - "def_fill_color": "255,235,59" + FILL_TRANSP, - "def_pen_color": "255,235,59" + PEN_TRANSP, - "series": ["pump_in", "pump_out"], - }, - { - "name": "rain on 2D", - "default_method": "net", - "order": 7, - "def_fill_color": "0,150,136" + FILL_TRANSP, - "def_pen_color": "0,150,136" + PEN_TRANSP, - "series": ["rain"], - }, - { - "name": "lateral flow to 1D", - "default_method": "net", - "order": 8, - "def_fill_color": "76,175,80" + FILL_TRANSP, - "def_pen_color": "76,175,80" + PEN_TRANSP, - "series": ["lat_1d"], - }, - { - "name": "lateral flow to 2D", - "default_method": "net", - "order": 9, - "def_fill_color": "176,175,80" + FILL_TRANSP, - "def_pen_color": "176,175,80" + PEN_TRANSP, - "series": ["lat_2d"], - }, - { - "name": "constant infiltration", - "default_method": "net", - "order": 10, - "def_fill_color": "121,85,72" + FILL_TRANSP, - "def_pen_color": "121,85,72" + PEN_TRANSP, - "series": ["infiltration_rate_simple"], - }, - { - "name": "volume change 2D", - "default_method": "net", - "order": 11, - "def_fill_color": "244,67,54" + FILL_TRANSP_VOL_CHANGE, - "def_pen_color": "244,67,54" + PEN_TRANSP, - "series": ["d_2d_vol"], - }, - { - "name": "volume change 1D", - "default_method": "net", - "order": 12, - "def_fill_color": "255,152,0" + FILL_TRANSP_VOL_CHANGE, - "def_pen_color": "255,152,0" + PEN_TRANSP, - "series": ["d_1d_vol"], - }, - { - "name": "groundwater flow", - "default_method": "gross", - "order": 2.5, - "def_fill_color": "0,0,128" + FILL_TRANSP, - "def_pen_color": "0,0,128" + PEN_TRANSP, - "series": ["2d_groundwater_in", "2d_groundwater_out"], - }, - { - "name": "volume change groundwater", - "default_method": "net", - "order": 11.5, - "def_fill_color": "100,149,237" + FILL_TRANSP_VOL_CHANGE, - "def_pen_color": "100,149,237" + PEN_TRANSP, - "series": ["d_2d_groundwater_vol"], - }, - { - "name": "leakage", - "default_method": "net", - "order": 10.5, - "def_fill_color": "221,160,221" + FILL_TRANSP, - "def_pen_color": "221,160,221" + PEN_TRANSP, - "series": ["leak"], - }, - { - "name": "in/exfiltration (domain exchange)", - "default_method": "gross", - "order": 10.6, - "def_fill_color": "121,160,191" + FILL_TRANSP, - "def_pen_color": "121,160,191" + PEN_TRANSP, - "series": [ - "2d_vertical_infiltration_pos", - "2d_vertical_infiltration_neg", - ], - }, - { - "name": "interception", - "default_method": "net", - "order": 10.7, - "def_fill_color": "181,60,221" + FILL_TRANSP, - "def_pen_color": "181,60,221" + PEN_TRANSP, - "series": ["intercepted_volume"], - }, - { - "name": "0D rainfall runoff on 1D", - "default_method": "net", - "order": 7.1, - "def_fill_color": "50,130,136" + FILL_TRANSP, - "def_pen_color": "50,130,136" + PEN_TRANSP, - "series": ["inflow"], - }, - { - "name": "surface sources and sinks", - "default_method": "net", - "order": 7.2, - "def_fill_color": "204,255,51" + FILL_TRANSP, - "def_pen_color": "204,255,51" + PEN_TRANSP, - "series": ["q_sss"], - }, -] - -# uniqueness test -_series = [serie for item in GRAPH_SERIES for serie in item["series"]] -assert len(set(_series)) == len(_series) - -# barchart in/out series -BC_IO_SERIES = [ - { - # 'label_name': '1D: 1D-2D flow', - "label_name": "1D: 2D flow to 1D", - "in": ["1d__1d_2d_flow_in"], - "out": ["1d__1d_2d_flow_out"], - "type": "1d", - }, - { - # 'label_name': '2D: 1D-2D flow', - "label_name": "2D: 2D flow to 1D", - "in": ["2d__1d_2d_flow_in"], - "out": ["2d__1d_2d_flow_out"], - "type": "2d", - }, - { - # 'label_name': '1D-2D flow (all domains)', - "label_name": "2D flow to 1D (all domains)", - # does this make sense? - "in": ["1d__1d_2d_flow_in", "2d__1d_2d_flow_in"], - "out": ["1d__1d_2d_flow_out", "2d__1d_2d_flow_out"], - "type": "NETVOL", - }, - { - # 'label_name': '1D: 1D-2D exchange', - "label_name": "1D: 2D flow to 1D (domain exchange)", - "in": ["1d__1d_2d_exch_in"], - "out": ["1d__1d_2d_exch_out"], - "type": "1d", - }, - { - # 'label_name': '2D: 1D-2D exchange', - "label_name": "2D: 2D flow to 1D (domain exchange)", - "in": ["2d__1d_2d_exch_in"], - "out": ["2d__1d_2d_exch_out"], - "type": "2d", - }, - { - "label_name": "net change in storage", - "in": ["d_2d_vol"], - "out": ["d_2d_vol"], - "type": "2d", - }, - { - "label_name": "net change in storage", - "in": ["d_1d_vol"], - "out": ["d_1d_vol"], - "type": "1d", - }, - { - "label_name": "net change in storage", - "in": ["d_2d_groundwater_vol"], - "out": ["d_2d_groundwater_vol"], - "type": "2d_groundwater", - }, - { - "label_name": "leakage", - "in": ["leak"], - "out": ["leak"], - "type": "2d_groundwater", - }, - { - "label_name": "constant infiltration", - "in": ["infiltration_rate_simple"], - "out": ["infiltration_rate_simple"], - "type": "2d", - }, - {"label_name": "2D flow", "in": ["2d_in"], "out": ["2d_out"], "type": "2d"}, - {"label_name": "1D flow", "in": ["1d_in"], "out": ["1d_out"], "type": "1d"}, - { - "label_name": "groundwater flow", - "in": ["2d_groundwater_in"], - "out": ["2d_groundwater_out"], - "type": "2d_groundwater", - }, - { - "label_name": "lateral flow to 2D", - "in": ["lat_2d"], - "out": ["lat_2d"], - "type": "2d", - }, - { - "label_name": "lateral flow to 1D", - "in": ["lat_1d"], - "out": ["lat_1d"], - "type": "1d", - }, - { - "label_name": "2D boundary flow", - "in": ["2d_bound_in"], - "out": ["2d_bound_out"], - "type": "2d", - }, - { - "label_name": "1D boundary flow", - "in": ["1d_bound_in"], - "out": ["1d_bound_out"], - "type": "1d", - }, - { - "label_name": "0D rainfall runoff on 1D", - "in": ["inflow"], - "out": ["inflow"], - "type": "1d", - }, - { - "label_name": "in/exfiltration (domain exchange)", - # NOTE: for the argument why pos is out and neg is in, see the - # comment in ``WaterBalanceCalculation.get_aggregated_flows`` - "in": ["2d_vertical_infiltration_neg"], - "out": ["2d_vertical_infiltration_pos"], - "type": "2d_vert", - }, - { - "label_name": "change in storage", - "in": ["d_2d_vol", "d_2d_groundwater_vol", "d_1d_vol"], - "out": ["d_2d_vol", "d_2d_groundwater_vol", "d_1d_vol"], - "type": "NETVOL", - }, - {"label_name": "pump", "in": ["pump_in"], "out": ["pump_out"], "type": "1d"}, - {"label_name": "rain on 2D", "in": ["rain"], "out": ["rain"], "type": "2d"}, - { - "label_name": "interception", - "in": ["intercepted_volume"], - "out": ["intercepted_volume"], - "type": "2d", - }, - { - "label_name": "surface sources and sinks", - "in": ["q_sss"], - "out": ["q_sss"], - "type": "2d", - }, -] - -TIME_UNITS_TO_SECONDS = {"hrs": 3600, "mins": 60, "s": 1} - -AGG_CUMULATIVE_FLOW = "m³ cumulative" -AGG_FLOW = "m³/s" diff --git a/tool_water_balance/media/deltares-152x152.png b/tool_water_balance/media/deltares-152x152.png deleted file mode 100644 index 8fd0d0d57d8811562101685e818fe5e2b416ea3e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5733 zcmaJ_XIK+kw+>A`n7=&`Sg)0-*>30xG?VG(lSENUwtQ z-n)QOMXDg;#q*tWzWd{zJI~DQGVlAYwbx$Hp4k(jr}KcCf|UXQ08p!|DZ{SH{?9msiAuwYc+7y~yuj1R&S1yHbcvq1sXT@dyt7z$zg*yAHg4geqmp$*{}xV9Dq z>EB?qb#cF90|0XOeccgAClm%~gR)1vLP1{}T0lUwEfiz|))v!tS3)_U)%-kB z27Wq*NIxf}tS#uiJW$RTawXt`!XSXYF3zrA5ML2%i;{*l$$0@kB z9#F~669oi|+!aQOiHifJq(sCe!BSGf_@h;&WHT&C^~~Tvk^0w}ym-@D)PX>#-{a;VbOw#r3y> zGRh0-iFU`J-CTjc6%jUWk1$Zsm8bt1f{Xh!=e+qjUK6Xcm!cbmrk35lA6KBWu+mt&*$rFXZxOp17xjFwGMLh>M zjGLE(n>$d+Kni%r1np|;=HtcxCth0{qVDR2LAWAO>dH{im4XNwZ3~f-Rg#dDR+3Ut zl2H^FSGlX8ETy8LETeQ643?CXk(K_NtL%n+-+ht%SG~Wuw*T%6_+PoASI&t3p4k62(SLVcWzX;GKWTf__$TpE zu2HlPd#EY{#fBFlOC5rXMG7;H4;9d|ooNYm zGKZA2VWXah!$?@FSK+sQYO*zgh~ ztM&`boNTw#jl_JeY2$M0F^4a_6)yx(4Jj@XbCNZORi~rcwr{FP=Q^B!ltYJ3H2r6) zNCo3{kZJtx10(L}R*FKlX?CO_(#e|booJFPh%zsHdQj)beVfwtbI+^CoSmk8xj%m~ z)Vw{)yOQEOvf!)B>N`e_;usV^H9V`N3Gpu#jUy4VT1a^l?i=2$%!+>kPTzecl4;c{ zWo`wchRZ`9It3hig@{qm}BF7+-k5;zObQPjM5)|^?iGcO_N zvwB-W{qZ2P<2Iy*qUPZY37HI0*Eyx3(PO{e`1A5zq1f=e$Gfq)rx(e-P-=KgWZKT{ zc7ypI?}xOp#F=ELvOczRaV}Ot@5+>>h!$P((_m+l@}^vZKJA?8ciPRTX0jjHxLrR! za2UL9`-65)-Z|tN-ZRNE?cp>pKKe^OS1!RecPFpO+V_;A866vJybEMbs79M*k&X66 z4RS7Gm(&?szAGGCB&fS;KOQ$7^}xE9c)w{^|6!sxQ>eXBrMporu)&)>cBlBw0nqv6PH;zBtCMb%e~E@Fhe|2zF1GX1egq zN!0K-TgCO#JOm9@C(kqPon@?A5A8m>-}%s$K!~Rw+xUP=QT5s@>X+UI{FZi3k6o&L z8yLT7^Z*`m#Zu}!pAeOMIG4R@+Jx?xHVf{pei$WS>p4@HIaTA4y`1~tZ>jOFC#o#h zsmgAWo9*=J%X9k+UplaL3d(*o+f9%Z-74YqBKij;bsQ)OYvKy1x6>ccSIt0wY997D z6D+LS@5<$^vn3P;i3>oa)i{2^AR1A5d`+rE8v)dI_j=xJi;Kk$A$v%Ost$9R8wI z)J7eQXi~jSW&KnuR^PjOffX;T=nrohVw;XEQ6{o$(VIEvlmDT^w)q1*`C@6w!KYT{ zOMPs!)y(Wb`%ewqXUiXcF&!|^aBbVF5h5Mn-TVmvKp8_)ON~i16~y{9PGirj2tW}z zhM-|6)Ye=VZ`DYi|3Jal^2ZB;hN-9SoM3Vz%!`3->!{hNucpgDrx}^>l>X*A->Zk*G2!3OHk5n5FvN$)jeq!x z)(mqd5GKRj27c2HWIL5LNnI@;y|s~t$FN|n5?>98cDlTD^(WXxEs~5l0z}An+s~>Xkfv?+!>s6g9oTc^HE8+OrHT3RbQyHaN z_aB)*m+}sco=U8VA0`R7MM;3?E^&;75dm^a3AX!i1cM1wi7T5J`jlF|%Zsm4JEWCC znKbwfQf7dxh7j(&3qJ<=*PP!wKhs^< z45_tC)m>1)Ey6el?;Hdqehm14FR9qd8+NYdj!!b1wy^PiBg#@b;X;uQchF2HepXME z+wi%ZqhiGCi~)a}`(&o&*$=p~BPV%rr@j-lz{F7-mw%TSXMm$kkMZHJI5c~q36mHP z)LY@=+B70p0_4ZUQcT)&q2;l;Qs_$ot*xdSIhcf*eGD`yPnsR46}QO|E94#R6FXzU zk9FqUREipFb(ErHu(LC)AXNl32RDtIxa6tgqOC8?gD8Tqk8-(M7I7rSU)qE7yWoz* zcQKa=0`yf~LU0GdzG?FbPoe4p9y`gP!&Hqz5!ywn31hV08H>n$z{!J}5#mL=96;1Z zx_rB}&}+po5+_-r>Iz{UtE$9d7>Uq?G9!UNF-3U0IJ}Xv`reC}5Hr@oRGVaq@F)0* z(Z)~m_Gu*jje7liTZC@(9n?8$C~@0k<7a6Cbu7xK90QB<8xm%%P=!t-vQZ=Y!o@T` zLjdU^u^t7xU^_xwRERXWZ&}>0DH(l4R(PB#X}#cp30g^2a2(xsvP)CEw#KXoAHF5+ ztc~pi6a~KyuhrOKD#&qe(|5klYc%T!a48hCH7bB(%BM?IzI&XL;01HhLZQZ#@Y9qa zfr*?o5ya=&?v0-h(en}uzw}X%&5syns)9t*9Ctez63Wdv14!R{DP0lxmu7qY0g(dfGMLRw3v;rdU=QlFS zjVqp*8L;^cA57A{i>YCANm0iOpaMUur>}dz0rsXTknD`h)sK8fWokPfpeNy!rj%s! zd=p7yKcb<}KmU-hAosFyk$}fu4l;8S#@i#hR#x$ zwJ#R=6fel|^QtcDe@l*X7lvRI&b%)ye6uEln{5>j!F(8~sp1RWFEg##Tyx5u4ZjnrS)cD9UZ?x-qp z=d*oLz{~R(l|8C3AYJBdUFP(bII~}lJ5y4-;H)*+-{=M*!Psy>KcGRYi610d_WFY}+TzE6(!FCcAeKYQP) zUbe2uCk71elDMvQN%mP7U`Uu_= z;0<~#&NPGs0)tcjU;rQt*#o;>i?fGh78ONlP4i8c6&W_dp=6VghzC~gb3w8Xa~PT` z?`bvO_+--}Hb<#3z;~$RLYGliWQD>J{!T)o(UrNYe- zb@i!L^WR-1M?;`0k`XPOM&Q{APda>y?aVK)c%>B0c?0Fli?wsl98XtWFRnxTm#itL zujF|X7JlS0U7zG{t$Dk5mq%}CyK`R7Bpn;cpqgfdx?ZZ(b58x}?NMT@(RKonw*U#- z#DEjC9JpGhUf&}Op-4+hm6ffbt=?nU%+Ht^_z~&;X7*^rGPsZbMh!K}GM-%8R(nEE zC691l+?tNb6Rxa*a`DMcC$MBbh!(cxmrM=PoM5FM z8xNECbeXL_M9ewj`mDQj44TVmwV%FoVx3GFhzOgaF)s#JzLdcVYE1<;*St!sAT5&m z`hK~(v2OZ{@tXb&HS=h4UxP0@`IG!@o=(}l)E;;L-XQc`EmB=K@;}Q%8<%J&TkonnnN#0>fWfDct-S_!hRrDV7ztUZBKZ z2MN2Cu4&C=tu)5YwQ#m-Aq}`8Rr}sBQS&1UKIo=%bpJ5+l-L^Xu)1IM3(1JliBjY= zbw^>JBg^Vu#vtEZH~EDl3NuHN{$LQ3)GaK3d#cV5(02x;#jaT95P!|ft9olip!qu2 zu@K!^l7%XHCzbR;uq-A~lt9y{yUix6((9)fiHXPSa>?{5kWLZUYEa8cP|EscRm~D? z^3Jm2@kV`&yZpeXTLVdVrTe0wuVS&XIe9;=98g3BbJfbm97Pq)H*ZHybBhK__cHl29EgZmFDD3b3QJhTS_49uq8Pm08KQ2;(E(d;W$4|? zx84$hIkAJ!FR7N3=SW)I(6kd0t)|z>8kIZ}QXN*4@C&Qpa4zcE5#-QXf8vNAB&zIryQTSV zrwuXMc%3u+ptyo;b92d|RqF(#v?mi!@(qJGG-4BKbuY%S&R2-gMt+dnOWgiE%Pdq) zN6PD0C!<1Y(8bQ3a2V#tdU#u&y+7vuf;Qis9pk!Xvm(&#BTFUz66Yuu*)x%;L%N2A z&&%Re47x=GcZsGNs8Om-Wq6i5PuyjgGKVr%8Ddf>!_M53PVvSn}4+dBs38XqAYq%P)g_ac^T=q z_)}$()UT?$;fyT3HcRhRum_#|s;OW@)fQ&n=L*K7LC zC1xM<%@cYWZz7Ak%%xM?g3r~fc`j2^!#G1~K+a52u>Qk#aA};hBOw{dd0V zFS}f(?^2hg;|jRkpE8$m1$Ei{p3I%Oy6{4wrbr?%Yg`Qeh{OT(x- z@%&`@nma$QiS*0OfmN-rZKFV}NP()H4rl1BqbT+pw{ix~DSPGpE4#_wlM2oE_e6Mj63HQZ(*}pM743^CTqtH-NGBGmH}<$IrQwm${`wl*d?Uyb2eyhgKpD zE)72At6kJAZu8|7;I7^YW0&HOy)6yqpS@qb6m63X8uyenekA?q;od`{Nz=+Y^=n$3 zm$k2&OhbFVx+8*0a)bkh|xok7ril+f_9Xq@P!O*@M4=cWnot_At!aR?3*gU5IUXg@ODt#>s{Iva-~b2mDzKD$>42Q5-S;pxNX;Y_s>foQ9Z-0q$#q z;wt}|T~pslxoXc39Jp7dE7GMW`W@b*r9dkhrc;n#sV6td8y(#O2jDak-(0t0Ap`g`I9KD%^u&0*V;PntBU0FFH+Ma%FiPh g8{LBaVzLHEX^WKwl-=w7{o_|%MMt?*(K`5l0Qksb82|tP diff --git a/tool_water_balance/media/deltares.png b/tool_water_balance/media/deltares.png deleted file mode 100644 index 5ef8801b4450611f15dafab5a37180160bab79f4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2637 zcmaJ@X;@Ne8wPhB*V0@wL~;iNgamg5gaUPxQpd6oj)KIFie)A?Q*Kj=;gvO+j@hb} z=BQ`^TFb5f*A>U}FFR zfsCjWVx)S+FMoPE>VJXNp6}FywTu)aixMWv(wGte6vP%L01zslnFK@vOm@1s2k-@f zG@)EZj4XyuBd~;g1aldKkn=@qHVEV!AQv%NJU|9X0Ft-@Kj>_88x+E2`$40zbWge{ z1mJKfJ0w8VjxYvm2aknkLj(LFzH)+EfDg!+5IKLlKuVDNK|kmc)br(SBoy)iBIEf% z|4Ax_9svmvN&pBJfq}C;Q78xwhd_B@aX6F*1nr4JBR$nG4vz97U~vTWI>^Tds*WaM zClVrwdMmlq4Zhcszbt1C55O5pZd`K*p5A1yaXP3PeE4 zl5j;bu22A3R%9j!Q)GTnb*BH6z!!a%6-YlGlls7ra;6B0LU=Bh^a)6(|9>c-{~0Zn zMFRik`#*)HjC2uzj0B{@6bVb+xJ1Y0P$EKz1YpX95{6K?{nIH%aD+0Ulp_>DLZWaG zmuRklEliWTe!$b|1gbzPV+vRRmFNdmD84`rY;7B3f!6+07gAegWlSy8| z!B|gnFbRkI#3c$@DSSX6`^07c%U$v+)6e+U*jxt(Q(*T?GQ^bWn*w4Ee}| zINo;ijpnVGH`A$a>c$#glQmx7WlRAXj5+Jvt}YHO!!)@%n`2HdxTd=pLM5VViQntP z$l8WErr+lo9Y|(@D_#>DIAG11oT{fKPaDQcpZQ!&0>%){KIg~237-d&W@u?MjgRil zmPsFO`o)W0kSa~Oe7_cWf*F&nzF?j6a8JAPa;)`i5j^Al>GlKd@@+&D-HWh(U1JR& zMI#ufFS66kFtO8_d57$!R>HzH8Z;2)nZN3VD|dkz&&2t;k5u5Pz_tAV!5H*i`&M59 z$DsZ{Grr)y}V>xs*j`U9k}`t z_POrKi>8ibhu_y9Y2_J;0!$ul2+ec;GxV*2OYPih7$;P&7nLz;@`JvdY|tajk{T`b zWp1;bgI_SyLk}>Wrb=P@*G?SI@;_R%@7^T(=5?`#J$0oZYww<~jv-oO?`$wrM*Q@) z$agj8!C#KWSV`eP&eBUWhk(EbKSBm-nusw)L`Eem_W7B*uvW&VZqP{Pn)w{dg~0h8 z``2OmI?_(*XeTuHY#>e7hu6Ez|eB*TTc4^V;K=yoLhG zVYWwk`NdB*Zb{g=yF!nAWy18zs_a)8oL8Ep_wku70Y}@rF*l<-AwxGqTGmCwEKfL4 z9L5Jt^PG@3REofuVv=+infl^QSJ9~rugJO8Ic{lQGZkkQJ?$Q=5P#^m9w|}TUfMk2 z(z7q?QB-}D=$%%sLCTe@Rnrr-n2D96o!>1MK3B-_BUQt5N3?#uKc}-_e9;J9lDN_& z&LMVZwvAy8KV|M-<^gj@IcTA#_B`cnUO3d!J4|5gX|k`F_VWG)=Y8%LmHy!aOXe2k zj7`14rpf*fEsrd|y?lsa@4U`3rd`@s{S*5(_!ewvZJ)|?NP%;k-gU|6#J9Yi>xu4N zRaOr)AV%}i_vhW4yzWeRrAKv6v=Js8>?=dt2=cc#-^%DJ0s@?T5WhyY*P^0hziYOmZ`h}JeJD;=-I>Csv? zW|Jw4ik&O&UU#TKt~^}kTKyDmcKOU@nYP8|p16b%FVgHc*ThrjkDSd4I!W>GIo9}0 z^96IgI5BE#?R54qW9N_JZ1zQdUM{bD?(r_o=;wwi=epV?eAN5tW7Sq63ochEr;Glm zf~|lxcXlLe0zLHDfU6I%l7KcRa?a*p&Fj z>rACX^CMV`O@#6*3Zo42LY#XrE+GCPqsMhLxzSn0*oclC)VsP4|6-PY zMb$)I3BI%pUeM;#!|t~-dP)Nv+|IhI(7!%!Y`xWF=$)US8+M-3^)u!f=No5BpViHe zkDn?C`kQI_?e!yDuv}e7@z~LhjwPbzfn(fju618$n>Q5I{OLOQRRpOnId%AJ?TWpF zC%6A4zzpnnP>Qox_%<3`ny{}fk_p67t6R!$o$#mq*8e7P4b;{tC(zf*)l?-IeYqkf zu_szDIQMA&xo33btZGuM2vdWPTufrRLR?bH!Mb#KpB(xN? znJAk2w6uqk-6HGzClgdR^n3btWxfMw-E*|gyp5V?vv|spw#newvq#(5*Y4Yh+!xVI z$DlI@-ybB-S(%L5H0vMUFQ*NYXvyGEe{rSO5AixJZV9tEJCr-j_3owMu7-0mF(mwY z{lMIIv(ejL3e1~4SVufOVeQfsR5z}z;a n;FjM@FvUwI?=9yWL7Jdvc3P8OC@>f>C<<)jmq2^hYp=((y}P$}@AFvRyh%f8OBVb2>;31= z=bP{Id>?@idj0whf0;aG@?2{S)>?)OAp}xNPM$pZdR<-Je4s!dXnZKr#YJ=X?ca|8 zKrnn*tc9AI>bXD>&^b_mh!-uTp}zippuq5>fBWLLH-N}s0YtuZ1i&SrgW;#bEvp!? zS^+q37SSwoR9XQ@u>Z#i2m)+ivw#Nz$ii!L;K9C@K?6}dDkpB)Oy;9=AXV_A#YYG% z0tB~3Sil;DF~~w5;dx*TSVMID=TKp19?MqVbPqHTvIb-%OnmEe2qh8uJR*;eb%0P9 ztJKgTto4Pmrw1byr{;Z+Yj;kf#|dGKhs_zn?H&Ad^^@D$V$F+Q-u6POZv_HuHj4^5 zNMpFZ>KAm+nhikot-n*g@o8*03_=Vl$MX>R9#Six;=QV|OdR_e=1iVMS!t<1YY|NH z^yz;;+}POo@RFs=CO)@$(}x-W2{P~EgfudjBXiIF%>T-C0RAcjl^fTB7Fekm5Nohr z4-+to#JiCVi3PM~{$mb%;M^*r6}W3UOAd$N&!&bxQ~uqzsH0^ z*uerAgUDtGO`T3vRTZgp3WV^_JQNP&7CcI3&*RXKYXL3>@;uP30kB}Y)5x%cR1rEi zJ%ij(4?w4Ks40trkRTW&fCXD1n#&N2w^Ld&0s(|VAwpVXJcH@Fjs*gFE5J=FAd~Sc zliHzTghF70Yy#yt2!=oyP|817Czm1JnWm(q1SJH@(I{zA$#$l!Z-Bbh4D=L3whO5h zLMtki3WQ(*Aq8?UHx>y<31~%$*MpFfQKM=|Boa7EF>T5e>JA=ev@GSNSe*ILj@ z@Jz$naNj$S>qbclQV{L8cgw&?Yj8|~jB?0FqrBL3js-tiLTPc7vxzhQ{-uIb zsTd3aNaGQQSUl zoT8$3)k@$BkOcC<3j6@b;0_FS*_1Ivyc~_?cTo4hv#g!}W!^lSBG#FuU@Qe|xpVvk zUOSU!%f%yf{OS}1E5I7UpPj(5-R}UzfCS*(G(+WP{b$n(=s$R2Z^%6&Cdc+MUt`Ck z8(2GiH2>_C#{+rZK;8=CnQp4QZcH?a&_yT&U{L~W7;td|dEX4B0A0vJ#KHCx`*JT& zxRBvD}j4D7h%t-B=N2sp7rH;R*>jSk!-oh^9>6) z3b-1KAONI*8y>*+50G2X#bTfm(Ao2J&pbWerK`sD^)}|MSS}=<+eq42^8<>_TaLxv_rjy>gdJS3jhXjJ@5q zYuzhAOHi)`7|sT4TQYU&KvRn-;7e`}xn&dsyf8nB9`@liyaVoMurp*5X$ ziaKbKQ6j{FfYTCN7ng=7Fn34Xs$u5s-87s zKEmoJ4euQJMt585vIM`ot(9nhf9yfzE!Vo6e!b|fOG-+BuK_)Pe#XBZ;OnDN;3gmq zr~pnS^U&k-`>ww6=7PE-hbIAi_4~6hCbpZgT~Zrst81;>-&prvN6V>`R{%4Be+=h< z11yiXqrf9W7cVL=4`fEOe(pvi*C&!N|IR`@ULUHspe;bH!oN`czzS!}R}?P=t^ghe zHUWL99Jm*_2Kalt{Wx$T+ZJxC%n7^R6^L0HPs~Kr?W)u$`kFE3_1?(&4{{q%bgx^t z&H~ebNx+|h*}wxpSuzJ4U@dSQmV1WoGp6vo~zPq!?AIdne zxnxeyP{n{!pb3}{Tn4NHwj@*-?7-Z3^mhYi<5H~#9)D>2p0E{Ni*mb&h*dgVH(CWO z1bBL7@vFdj04gh;Q0*5!@AudVeKk7^&hOpwJ?~eh?Fiog%duT6Z+#3nFj$4eN$3FY z#f?lRmbE!RLol3Cv$*VeSA^SDRTX(f#eL@GKW*K2a^Jqr`21jS!~V|&tn)2_8K6@I zGrnuxJr6#!JKtEDd)+s`^YOa}s>g0x zbmw8f1qiKwIoGf5qwR;DBesun(UBfGeS%2sn*@fEU6cd*^DIAXrIJWiQ^HXwSXf;6f z2&4q6d7>s=L~aui-*80##Azujts&TJfQ>-!2*f&|A}xA8gxYZ++)1@R2A1noXICaVpVHxhM1Dv0ADeO@U}?u1m7rnt>d5>ow>SP7O(EFaQ= zlfX{kMGt@MR@+OO^1@W8tO+3TxcH2&aiG9)ooRaNE5w(%{inKQ>t zW-4_ikBk8E3&po&GruyCOlbN`;E7XkR}4qf|zc)`XuH1HXN^QIYz=eO9&F zwb5SS*1z!+dLaAk^d2Pg)RVE6@Z-jr{fP{ud~Y6%w2m^K z3n=Has|U+dEi&A<;Vxot#s5N#!Rz;%aMlk4UEa{HEjtmSN`?>!lvVtU&J%W50 z7^>757#dm_7=8hT8eT9klo~KFyh>nTu$sZZAYL$MSD+10f+@+{-GzZ+Rj;xUkjGiz z5m^kh={g8AI%&+V01C2~c>21sKjdU#=jYA8@i2^mfpNR1i(`n#@uOF4GeiPKSRd%` zmiACmO37`SbV*)hOW+^J8)i46mIPexY<2C@J>qrYk{|a*PhnBv8%`=m<{xr12%B`S zw^i_L*9{#Pfs|UYnL5UkXWmWavJF04`F&pTy*=jB|MxDOxB7R?(c1X#Q|mdJrY?FL zD(iS5P*-cC0*6@FHvdR*mea4xv;_8@SZ>KBHn4f`ED0>vna3n zVS{V@pNl2uE6#U^6rK&vj@y~)UuA#Tx@^W@b2Wka9{WWdR(b@>Og(ZtE_{*Yj#i@` zvuAkBxBZ~(pO?3w(_;Gk1shA()n1TT@J#-Z{^ozTXFU3TM->>WswJ)wB`Jv|saDBF zsX&Us$iUEC*T7QOz$C=L+{)0@%FtNbz{twLK*G165k*68eoAIqC2kFw0w-*N8YDqB t1m~xflqVLYGL)B>>t*I;7bhncr0V4trO$q6BL!5%;OXk;vd$@?2>|$`C*%MC diff --git a/tool_water_balance/models/__init__.py b/tool_water_balance/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tool_water_balance/models/wb_item.py b/tool_water_balance/models/wb_item.py deleted file mode 100644 index 569b116a..00000000 --- a/tool_water_balance/models/wb_item.py +++ /dev/null @@ -1,66 +0,0 @@ -from collections import OrderedDict -from random import randint -from threedi_results_analysis.models.base import BaseModel -from threedi_results_analysis.models.base_fields import CheckboxField -from threedi_results_analysis.models.base_fields import ColorField -from threedi_results_analysis.models.base_fields import ValueField -from threedi_results_analysis.utils.color import COLOR_LIST - -import numpy as np - - -EMPTY_TIMESERIES = np.array([], dtype=float) - - -def select_default_color(item_field): - """ - return color for lines - :param item_field: ItemField object - :return: tuple with the 3 color bands (values between 0-256) - """ - - model = item_field.row.model - colors = OrderedDict([(str(color), color) for color in COLOR_LIST]) - - for item in model.rows: - if str(item.color.value) in colors: - del colors[str(item.color.value)] - - if len(colors) >= 1: - return list(colors.values())[0] - - # predefined colors are all used, return random color - return (randint(0, 256), randint(0, 256), randint(0, 256), int(180)) - - -class WaterbalanceItemModel(BaseModel): - """Model implementation for water balance graph items""" - - ts = np.array([0]) - - class Fields(object): - """Fields and functions of ModelItem""" - - active = CheckboxField( - show=True, default_value=True, column_width=20, column_name="" - ) - fill_color = ColorField( - show=False, - column_width=30, - column_name="", - default_value=select_default_color, - ) - pen_color = ColorField( - show=True, - column_width=30, - column_name="", - default_value=select_default_color, - ) - name = ValueField( - show=True, - column_width=120, - column_name="Category" - ) - default_method = ValueField(show=False) - order = ValueField(show=False) - series = ValueField(show=False) diff --git a/tool_water_balance/tests.py b/tool_water_balance/tests.py deleted file mode 100644 index 5d496a75..00000000 --- a/tool_water_balance/tests.py +++ /dev/null @@ -1,411 +0,0 @@ -from pathlib import Path -from qgis.core import QgsCoordinateReferenceSystem -from qgis.core import QgsCoordinateTransform -from qgis.core import QgsGeometry -from qgis.core import QgsPointXY -from qgis.core import QgsProject -from threedi_results_analysis.tests.test_init import TEST_DATA_DIR -from threedi_results_analysis.tests.utilities import ensure_qgis_app_is_initialized - -from .tools import WaterBalanceCalculation -from .tools import WaterBalanceCalculationManager -from .views.widgets import INPUT_SERIES -from .views.widgets import WaterBalanceWidget -from .views.widgets import BarManager - -import mock -import numpy as np -import pytest - - -TESTMODEL_DIR = TEST_DATA_DIR / "testmodel" / "v2_bergermeer" -MODEL_SQLITE_PATH = TESTMODEL_DIR / "v2_bergermeer.sqlite" -assert Path.is_dir(TESTMODEL_DIR), "testmodel dir not found" -assert Path.is_file(MODEL_SQLITE_PATH), "modelspatialite not found" - - -# links_expected consists of (lines, pumps) -LINKS_EXPECTED = ( - { - "1d_in": [29136], - "1d_out": [], - "1d_bound_in": [], - "1d_bound_out": [], - "2d_in": [ - 3710, - 6573, - 10314, - 1557, - 8964, - 8900, - 8901, - 3689, - 3690, - 4879, - ], - "2d_out": [ - 10369, - 5348, - 4895, - 3730, - 10347, - ], - "2d_bound_in": [], - "2d_bound_out": [], - "1d__1d_2d_flow": [], - "2d__1d_2d_flow": [], - "1d_2d_exch": [30649, 31654], - "2d_groundwater_in": [ - 22984, - 17968, - 25375, - 26725, - 20121, - 25311, - 25312, - 20100, - 20101, - 21290, - ], - "2d_groundwater_out": [ - 26780, - 21306, - 20141, - 21759, - 26758, - ], - "2d_vertical_infiltration": [ - 14931, - 11591, - 12517, - 12503, - 12504, - 11592, - ], - }, - {"in": [], "out": []}, -) - -NODES_EXPECTED = { - "1d": [11553, 12961], - "2d": [554, 555, 1466, 1467, 1480, 3894], - "2d_groundwater": [5928, 5929, 6840, 6841, 6854, 9268], -} - -TIMESTEPS_EXPECTED = np.array( - [ - 0.0, - 300.46375545, - 602.91879098, - 918.32741104, - 1204.65442229, - 1504.12043258, - 1801.25668355, - ] -) - - -def _helper_round_numpy(one_array): - """round numpy values to certain decimal precision so that we can test it more easily""" - DECIMAL_PRECISION = 6 - return np.around(one_array, decimals=DECIMAL_PRECISION) - - -def _helper_get_input_series_id(input_serie_name): - id_found = [ - id[1] for id in INPUT_SERIES if id[0] == input_serie_name - ] - assert len(id_found) == 1 - return id_found[0] - - -def _helper_calculate_agg_flow(aggregated_flows, input_serie_id): - timesteps = aggregated_flows[0] - all_time_series = aggregated_flows[1] - cum_flow = 0 - prev_t = 0 - for timestep_index, time in enumerate(timesteps): - dt = time - prev_t - prev_t = time - flow = all_time_series[timestep_index, input_serie_id] * dt - cum_flow += flow - return cum_flow - - -@pytest.fixture -def wb_polygon(): - """WaterBalancePolyon is to select links, nodes, pumps from model schematisation""" - polygon_points = [ - QgsPointXY(4.70635793604164299, 52.64214387449186461), - QgsPointXY(4.70644905107772882, 52.64329192394655621), - QgsPointXY(4.70765176955406783, 52.64332836996099019), - QgsPointXY(4.70806178721645541, 52.6419889789305202), - QgsPointXY(4.70725997489889636, 52.64173385682948236), - QgsPointXY(4.70635793604164299, 52.64214387449186461), - ] - polygon = QgsGeometry.fromPolygonXY([polygon_points]) - tr = QgsCoordinateTransform( - QgsCoordinateReferenceSystem( - 4326, QgsCoordinateReferenceSystem.PostgisCrsId - ), - QgsCoordinateReferenceSystem( - 28992, QgsCoordinateReferenceSystem.PostgisCrsId - ), - QgsProject.instance(), - ) - polygon.transform(tr) - assert polygon.isGeosValid(), "polygon is GeoInvalid. WaterBalance tests will fail" - return polygon - - -@pytest.fixture() -def wb_calculation(three_di_result_item, wb_polygon): - three_di_result_item - ensure_qgis_app_is_initialized() - mapcrs = QgsCoordinateReferenceSystem( - 4326, QgsCoordinateReferenceSystem.PostgisCrsId - ) - wb_calculation = WaterBalanceCalculation( - result=three_di_result_item, - polygon=wb_polygon, - mapcrs=mapcrs, - ) - return wb_calculation - - -def test_get_incoming_and_outcoming_link_ids(wb_calculation): - links = wb_calculation.flowline_ids, wb_calculation.pump_ids - assert links == LINKS_EXPECTED - - -def test_get_nodes(wb_calculation): - nodes = wb_calculation.node_ids - assert nodes == NODES_EXPECTED - - -def test_time_steps_get_aggregated_flows(wb_calculation): - """test A) number of timesteps, B) wheter we get a time series for each link, - pump and node""" - time = wb_calculation.time - flow = wb_calculation.flow - - assert ( - _helper_round_numpy(time) == _helper_round_numpy(TIMESTEPS_EXPECTED) - ).all(), ( - "aggregation timesteps array is not what we expected (even when " - "we round numbers)" - ) - assert len(time) == len(flow), ( - "Number of time_steps is not equal to number of values in time_series" - ) - - assert len(INPUT_SERIES) == flow.shape[1], ( - "For all INPUT_SERIES elements " "a time series should be calculated" - ) - - -def test_get_aggregated_flows_2d_and_1d(wb_calculation): - aggregated_flows = wb_calculation.time, wb_calculation.flow - - EXPECTED_CUMM_2D_IN = 719.93930714 - ID = _helper_get_input_series_id("2d_in") - cumm_flow = _helper_calculate_agg_flow(aggregated_flows, ID) - assert _helper_round_numpy(cumm_flow) == _helper_round_numpy(EXPECTED_CUMM_2D_IN) - - EXPECTED_CUMM_2D_OUT = -12690.85412108924 - ID = _helper_get_input_series_id("2d_out") - cumm_flow = _helper_calculate_agg_flow(aggregated_flows, ID) - assert _helper_round_numpy(cumm_flow) == _helper_round_numpy(EXPECTED_CUMM_2D_OUT) - - EXPECTED_CUMM_1D_IN = 7.1771299119876979e-09 - ID = _helper_get_input_series_id("1d_in") - cumm_flow = _helper_calculate_agg_flow(aggregated_flows, ID) - assert _helper_round_numpy(cumm_flow) == _helper_round_numpy(EXPECTED_CUMM_1D_IN) - - EXPECTED_CUMM_1D_OUT = -2832.9581887459126 - ID = _helper_get_input_series_id("1d_out") - cumm_flow = _helper_calculate_agg_flow(aggregated_flows, ID) - assert _helper_round_numpy(cumm_flow) == _helper_round_numpy(EXPECTED_CUMM_1D_OUT) - - -@pytest.fixture() -@mock.patch( - "threedi_results_analysis.tool_water_balance.views.widgets.SelectPolygonTool" -) -def wb_widget(pt, wb_calculation): - ensure_qgis_app_is_initialized() - manager = WaterBalanceCalculationManager - iface = mock.Mock() - wb_widget = WaterBalanceWidget( - "3Di water balance", iface=iface, manager=manager, - ) - wb_widget.calc = wb_calculation - return wb_widget - - -def test_wb_widget_get_io_series_net(wb_widget): - io_series_net = wb_widget._get_io_series_net() - expected = [ - { - "label_name": "2D flow to 1D (all domains)", - "in": ["1d__1d_2d_flow_in", "2d__1d_2d_flow_in"], - "out": ["1d__1d_2d_flow_out", "2d__1d_2d_flow_out"], - "type": "NETVOL", - }, - { - "label_name": "leakage", - "in": ["leak"], - "out": ["leak"], - "type": "2d_groundwater", - }, - { - "label_name": "constant infiltration", - "in": ["infiltration_rate_simple"], - "out": ["infiltration_rate_simple"], - "type": "2d", - }, - {"label_name": "2D flow", "in": ["2d_in"], "out": ["2d_out"], "type": "2d"}, - {"label_name": "1D flow", "in": ["1d_in"], "out": ["1d_out"], "type": "1d"}, - { - "label_name": "groundwater flow", - "in": ["2d_groundwater_in"], - "out": ["2d_groundwater_out"], - "type": "2d_groundwater", - }, - { - "label_name": "lateral flow to 2D", - "in": ["lat_2d"], - "out": ["lat_2d"], - "type": "2d", - }, - { - "label_name": "lateral flow to 1D", - "in": ["lat_1d"], - "out": ["lat_1d"], - "type": "1d", - }, - { - "label_name": "2D boundary flow", - "in": ["2d_bound_in"], - "out": ["2d_bound_out"], - "type": "2d", - }, - { - "label_name": "1D boundary flow", - "in": ["1d_bound_in"], - "out": ["1d_bound_out"], - "type": "1d", - }, - { - "label_name": "0D rainfall runoff on 1D", - "in": ["inflow"], - "out": ["inflow"], - "type": "1d", - }, - { - "label_name": "change in storage", - "in": ["d_2d_vol", "d_2d_groundwater_vol", "d_1d_vol"], - "out": ["d_2d_vol", "d_2d_groundwater_vol", "d_1d_vol"], - "type": "NETVOL", - }, - {"label_name": "pump", "in": ["pump_in"], "out": ["pump_out"], "type": "1d"}, - {"label_name": "rain on 2D", "in": ["rain"], "out": ["rain"], "type": "2d"}, - { - "label_name": "interception", - "in": ["intercepted_volume"], - "out": ["intercepted_volume"], - "type": "2d", - }, - { - "label_name": "surface sources and sinks", - "in": ["q_sss"], - "out": ["q_sss"], - "type": "2d", - }, - ] - assert io_series_net == expected - - -def test_barmanger_2d_groundwater(wb_widget): - io_series_2d_groundwater = wb_widget._get_io_series_2d_groundwater() - bm_2d_groundwater = BarManager(io_series_2d_groundwater) - expected_labels = [ - "groundwater flow", - "in/exfiltration (domain exchange)", - "leakage", - "net change in storage", - ] - assert bm_2d_groundwater.xlabels == expected_labels - - -def helper_get_flows_and_dvol(domain=None): - STORAGE_CHANGE_LABELS = ["net change in storage", "change in storage"] - sum_inflow = 0 - sum_outflow = 0 - d_vol = 0 - for idx, label in enumerate(domain.xlabels): - inflow = domain.end_balance_in[idx] - outflow = domain.end_balance_out[idx] - if label in STORAGE_CHANGE_LABELS: - d_vol += inflow + outflow - else: - sum_inflow += inflow - sum_outflow += outflow - return sum_inflow, sum_outflow, d_vol - - -def test_water_balance_closure(wb_calculation, wb_widget, wb_polygon): - # The netto inflows and outflows of the three sub-domains (1d, 2d, - # 2d_groundwater) must equal the netto inflow and outflow""" - time = wb_calculation.time - flow = wb_calculation.flow - t1 = min(time) - t2 = max(time) - - io_series_net = wb_widget._get_io_series_net() - io_series_2d = wb_widget._get_io_series_2d() - io_series_2d_groundwater = wb_widget._get_io_series_2d_groundwater() - io_series_1d = wb_widget._get_io_series_1d() - - bm_net = BarManager(io_series_net) - bm_2d = BarManager(io_series_2d) - bm_2d_groundwater = BarManager(io_series_2d_groundwater) - bm_1d = BarManager(io_series_1d) - - # netto domain - bm_net.calc_balance(time, flow, t1, t2, net=True) - sum_inflow, sum_outflow, d_vol_net = helper_get_flows_and_dvol(domain=bm_net) - assert _helper_round_numpy(d_vol_net) == _helper_round_numpy( - sum([sum_inflow, sum_outflow]) - ) - - # 1d domain - bm_1d.calc_balance(time, flow, t1, t2) - sum_inflow, sum_outflow, d_vol_1d = helper_get_flows_and_dvol(domain=bm_1d) - assert _helper_round_numpy(d_vol_1d) == _helper_round_numpy( - sum([sum_inflow, sum_outflow]) - ) - - # 2d domain - bm_2d.calc_balance(time, flow, t1, t2) - sum_inflow, sum_outflow, d_vol_2d = helper_get_flows_and_dvol(domain=bm_2d) - assert _helper_round_numpy(d_vol_2d) == _helper_round_numpy( - sum([sum_inflow, sum_outflow]) - ) - - # 2d_groundwater domain - bm_2d_groundwater.calc_balance( - time, flow, t1, t2, invert=["in/exfiltration (domain exchange)"] - ) - sum_inflow, sum_outflow, d_vol_2d_gr = helper_get_flows_and_dvol( - domain=bm_2d_groundwater - ) - assert _helper_round_numpy(d_vol_2d_gr) == _helper_round_numpy( - sum([sum_inflow, sum_outflow]) - ) - - # the sum of volume changes in the 3 sub-domains must equal volume change - # of the netto domain - assert _helper_round_numpy( - sum([d_vol_1d, d_vol_2d, d_vol_2d_gr]) - ) == _helper_round_numpy(d_vol_net) diff --git a/tool_water_balance/tools.py b/tool_water_balance/tools.py deleted file mode 100644 index 25eb7dbf..00000000 --- a/tool_water_balance/tools.py +++ /dev/null @@ -1,147 +0,0 @@ -from pathlib import Path -import logging - -from qgis.PyQt.QtCore import Qt -from threedi_results_analysis.utils.user_messages import StatusProgressBar -from threedi_results_analysis.threedi_plugin_tool import ThreeDiPluginTool - -from .calculation import WaterBalanceCalculation -from .utils import WrappedResult -from .views.widgets import WaterBalanceWidget - -logger = logging.getLogger(__name__) - - -class WaterBalanceTool(ThreeDiPluginTool): - """QGIS Plugin Implementation.""" - - def __init__(self, iface, model): - """Constructor. - :param iface: An interface instance that will be passed to this class - which provides the hook by which you can manipulate the QGIS - application at run time. - :type iface: QgsInterface - """ - super().__init__() - self.iface = iface - self.icon_path = str(Path(__file__).parent.parent / 'icons' / 'weight-scale.png') - self.menu_text = u"Water balance tool" - - self.is_active = False - self.widget = None - self.manager = WaterBalanceCalculationManager(model=model, iface=iface) - - def run(self): - if self.is_active: - return - - widget = WaterBalanceWidget( - "3Di Water balance", manager=self.manager, iface=self.iface, - ) - widget.closingWidget.connect(self.on_close_child_widget) - self.iface.addDockWidget(Qt.BottomDockWidgetArea, widget) - widget.show() - - self.is_active = True - self.widget = widget - # TODO connect signals of results changes - - def on_unload(self): - """Cleanup necessary items here when plugin dockwidget is closed""" - if self.widget is not None: - self.widget.close() - self.active = False - - def on_close_child_widget(self): - """Cleanup necessary items here when plugin widget is closed""" - self.widget.closingWidget.disconnect(self.on_close_child_widget) - self.widget = None - self.is_active = False - # TODO disconnect signals of result changes - - def result_added(self, result): - self.action_icon.setEnabled(self.manager.model.number_of_results() > 0) - if self.is_active: - progress_bar = StatusProgressBar(1, "Calculating water balance...") - self.widget.add_result(result) - progress_bar.increase_progress() - - def result_removed(self, result): - if self.is_active: - self.widget.remove_result(result) - - def result_changed(self, result): - if self.is_active: - self.widget.change_result(result) - - def grid_changed(self, grid): - if self.is_active: - self.widget.change_grid(grid) - - -class WaterBalanceCalculationManager: - """ - Reset the cache - """ - def __init__(self, model, iface): - self.model = model - self.iface = iface - self._calculations = {} - self._polygon = None - - def add_result(self, result): - if self.polygon is None: - return False - - wrapped_result = WrappedResult(result) - if not wrapped_result.has_required_vars(): - return - if not wrapped_result.has_synchronized_timestamps(): - return - - polygon = self.polygon.transformed(crs=wrapped_result.lines.crs()) - mapcrs = self.iface.mapCanvas().mapSettings().destinationCrs() - calculation = WaterBalanceCalculation( - result=result, polygon=polygon, mapcrs=mapcrs, - ) - - self._calculations[result.path] = calculation - return True - - def remove_result(self, result): - try: - del self._calculations[result.path] - return True - except KeyError: - return False - - @property - def polygon(self): - return self._polygon - - @polygon.setter - def polygon(self, polygon): - if polygon is None: - self._polygon = None - self._calculations = {} - return - - self._polygon = polygon - for result in self.model.get_results(checked_only=False): - self.add_result(result) - - def __getitem__(self, result): - return self._calculations[result.path] - - def __iter__(self): - for calculation in self._calculations.values(): - yield calculation.result - - def __bool__(self): - return bool(self._calculations) - - def __contains__(self, result): - return result.path in self._calculations - - def __len__(self): - return len(self._calculations) diff --git a/tool_water_balance/utils.py b/tool_water_balance/utils.py deleted file mode 100644 index 8fdd9a35..00000000 --- a/tool_water_balance/utils.py +++ /dev/null @@ -1,286 +0,0 @@ - -from qgis.core import QgsCoordinateTransform -from qgis.core import QgsGeometry -from qgis.core import QgsProject -from qgis.PyQt.QtWidgets import QMessageBox - -from itertools import zip_longest -from logging import getLogger - -import numpy as np - -logger = getLogger(__name__) - - -class PolygonWithCRS: - def __init__(self, polygon, crs): - self.polygon = polygon - self.crs = crs - - def transformed(self, crs): - polygon = QgsGeometry(self.polygon) - qct = QgsCoordinateTransform(self.crs, crs, QgsProject.instance()) - polygon.transform(qct) - return polygon - - -class WrappedResult: - def __init__(self, result): - self.result = result - - def _get_layer_by_name(self, layer_name): - layer_id = self.result.parent().layer_ids[layer_name] - return QgsProject.instance().mapLayer(layer_id) - - @property - def lines(self): - return self._get_layer_by_name('flowline') - - @property - def points(self): - return self._get_layer_by_name('node') - - @property - def pumps(self): - return None # TODO - - @property - def text(self): - return self.result.text() - - @property - def parent_text(self): - return self.result.parent().text() - - @property - def threedi_result(self): - return self.result.threedi_result - - def has_required_vars(self): - if self.threedi_result.aggregate_result_admin is None: - self._pop_up_no_agg_found() - return False - - missing_agg_vars = self._get_missing_agg_vars(self.threedi_result) - if missing_agg_vars: - self._pop_up_missing_agg_vars(missing_agg_vars) - return False - return True - - def has_synchronized_timestamps(self): - threedi_result = self.threedi_result - t_q_cum = threedi_result.get_timestamps(parameter="q_cum") - t_vol_c = threedi_result.get_timestamps(parameter="vol_current") - if not (t_q_cum == t_vol_c).all(): - self._pop_up_not_synchronized_timestamps( - t_q_cum.tolist(), t_vol_c.tolist() - ) - return False - return True - - def _get_missing_agg_vars(self, threedi_result): - """Returns a list with tuples of aggregation vars (vol, discharge) + - methods (cum, current, etc) that are not (but should be) in the - v2_aggregation_settings - - 1. some vars_methods are always required: minimum_agg_vars - 2. some vars methods are required when included in the model - schematisation (e.g. pumps, laterals). - """ - check_available_vars = threedi_result.available_vars - - ga = threedi_result.gridadmin - gr = threedi_result.result_admin - - minimum_agg_vars = [ - ("q_cum_negative", "negative cumulative discharge"), - ("q_cum_positive", "negative cumulative discharge"), - ("q_cum", "cumulative discharge"), - ("vol_current", "current volume"), - ] - - # some vars must be aggregated when included in the model - # schematisation (e.g. pumps, laterals). problem is that threedigrid - # does not support e.g. ga.has_lateral, ga.has_leakage etc. For those - # fields, we read the threedigrid metadata. - simulated_vars_nodes = ga.nodes._meta.get_fields(only_names=True) - - if gr.has_pumpstations: - to_add = ("q_pump_cum", "cumulative pump discharge") - minimum_agg_vars.append(to_add) - - # TODO: wait for threedigrid's e.g. 'gr.has_rained') - # u'rain' is always in simulated_vars_nodes. So it does not make sense - # to check there. Thus, we're gonna read the nc's rain data - if np.nanmax(gr.nodes.rain) > 0: - to_add = ("rain_cum", "cumulative rain") - minimum_agg_vars.append(to_add) - - # gr.has_simple_infiltration and gr.has_interception are added to - # threedigrid some months after groundwater release. To coop with the - # .h5 that has been created in that period we use the meta data - try: - if gr.has_simple_infiltration: - to_add = ( - "infiltration_rate_simple_cum", - "cumulative infiltration rate", - ) - minimum_agg_vars.append(to_add) - except AttributeError: - if "infiltration" in simulated_vars_nodes: - to_add = ( - "infiltration_rate_simple_cum", - "cumulative infiltration rate", - ) - minimum_agg_vars.append(to_add) - - try: - if gr.has_interception: - to_add = ("intercepted_volume_current", "current interception") - minimum_agg_vars.append(to_add) - except AttributeError: - # gr.has_interception is added to threedigrid some months after - # groundwater release. To coop with .h5 that has been created in - # that period we read the simulated_vars_nodes - if "intercepted_volume" in simulated_vars_nodes: - to_add = ("intercepted_volume_current", "current interception") - minimum_agg_vars.append(to_add) - - if "q_lat" in simulated_vars_nodes: - to_add = ("q_lat_cum", "cumulative lateral discharge") - minimum_agg_vars.append(to_add) - - if "leak" in simulated_vars_nodes: - to_add = ("leak_cum", "cumulative leakage") - minimum_agg_vars.append(to_add) - - if "q_sss" in simulated_vars_nodes: - if np.count_nonzero(gr.nodes.timeseries(indexes=slice(0, -1)).q_sss) > 0: - minimum_agg_vars.append( - ("q_sss_cum", "cumulative surface sources and sinks") - ) - - missing_vars = [] - for required_var in minimum_agg_vars: - if required_var[0] not in check_available_vars: - msg = "the aggregation nc misses aggregation: %s", required_var[1] - logger.error(msg) - missing_vars.append(required_var[1]) - return missing_vars - - def _pop_up_no_agg_found(self): - header = "Error: No aggregation netcdf found" - msg = ( - "The Water balance tool requires an 'aggregate_results_3di.nc' " - "but this file could not be found. Please make sure you run " - "your simulation using the 'v2_aggregation_settings' table " - "with the following variables:" - "\n\ncurrent:" - "\n- volume" - "\n- interception (in case model has interception)" - "\n\ncumulative:" - "\n- rain" - "\n- discharge" - "\n- leakage (in case model has leakage)" - "\n- laterals (in case model has laterals)" - "\n- pump discharge (in case model has pumps)" - "\n- simple_infiltration (in case model has " - "simple_infiltration)" - "\n- sources and sinks (in case model has sources and sinks)" - "\n\npositive cumulative:" - "\n- discharge" - "\n\nnegative cumulative:" - "\n- discharge" - ) - QMessageBox.warning(None, header, msg) - - def _pop_up_missing_agg_vars(self, missing_vars): - header = "Error: Missing aggregation settings" - msg = ( - "The Water balance tool found the 'aggregate_results_3di.nc' but " - "the file does not include all required aggregation " - "variables. Please add them to the sqlite table " - "'v2_aggregation_settings' and run your simulation again. The " - "required variables are:" - "\n\ncurrent:" - "\n- volume" - "\n- interception (in case model has interception)" - "\n\ncumulative:" - "\n- rain" - "\n- discharge" - "\n- leakage (in case model has leakage)" - "\n- laterals (in case model has laterals)" - "\n- pump discharge (in case model has pumps)" - "\n- simple_infiltration (in case model has " - "simple_infiltration)" - "\n- sources and sinks (in case model has sources and sinks)" - "\n\npositive cumulative:" - "\n- discharge" - "\n\nnegative cumulative:" - "\n- discharge" - "\n\nYour aggregation .nc misses the following variables:\n" - + ", ".join(missing_vars) - ) - QMessageBox.warning(None, header, msg) - - def _pop_up_not_synchronized_timestamps(self, a, b): - header = "Error: timestamps are not synchronized" - table = "\n".join(f"{p} {q}" for p, q in zip_longest(a, b)) - msg = "q_cum and vol_current have different timesteps:\n" + table - logger.warning(msg) - QMessageBox.warning(None, header, msg) - - def __contains__(self, flow_name): - """ Return if a flow is relevant for the wrapped result. """ - if flow_name in { - "2D boundary flow", - "2D flow", - "lateral flow to 2D", - "rain on 2D", - "surface sources and sinks", - "volume change 2D", - }: - return self.result.threedi_result.gridadmin.has_2d - if flow_name in { - "1D boundary flow", - "1D flow", - "lateral flow to 1D", - "volume change 1D", - }: - return self.result.threedi_result.gridadmin.has_1d - if flow_name in { - "volume change groundwater", - "leakage", - "in/exfiltration (domain exchange)", - }: - return self.result.threedi_result.gridadmin.has_groundwater - if flow_name in { - "1D: 2D flow to 1D (domain exchange)", - "1D: 2D flow to 1D", - "2D flow to 1D (all domains)", - "2D flow to 1D (domain exchange)", - "2D flow to 1D", - "2D: 2D flow to 1D (domain exchange)", - "2D: 2D flow to 1D", - }: - return ( - self.result.threedi_result.gridadmin.has_1d and - self.result.threedi_result.gridadmin.has_2d - ) - if flow_name in { - "pumps", - "pump", - }: - return self.result.threedi_result.gridadmin.has_pumpstations - if flow_name == "groundwater flow": - return self.result.threedi_result.gridadmin.has_groundwater_flow - if flow_name in { - "simple infiltration", - "constant infiltration", - }: - return self.result.threedi_result.gridadmin.has_simple_infiltration - if flow_name == "interception": - return self.result.threedi_result.gridadmin.has_interception - if flow_name == "0D rainfall runoff on 1D": - return self.result.threedi_result.gridadmin.has_0d - return True # everything else relevant diff --git a/tool_water_balance/views/__init__.py b/tool_water_balance/views/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tool_water_balance/views/custom_pg_Items.py b/tool_water_balance/views/custom_pg_Items.py deleted file mode 100644 index dfe3fecf..00000000 --- a/tool_water_balance/views/custom_pg_Items.py +++ /dev/null @@ -1,64 +0,0 @@ -from pyqtgraph import AxisItem -from qgis.PyQt.QtCore import Qt - - -class RotateLabelAxisItem(AxisItem): - """ - Extends AxisItem to draw labels with an angle. - - Useful when using long text-labels which will otherwise overlap, for - example in a bar-graph. - - Currently only properly supported for the 'bottom' axis orientation. - """ - - def __init__(self, angle=15, *args, **kwargs): - """ - Initialize an new RotateLabelAxe with provided angle. - - RotateLabelAxe is a subclass of AxisItem. Other args and kwargs are - passed on to AxisItem. For more information, see - pyqtgraph.graphicsItems.AxisItem. - - Args: - angle: (int) angle of the labels, defaults to 15. - *args: - **kwargs: - """ - self.angle = angle - AxisItem.__init__(self, *args, **kwargs) - - def boundingRect(self): - # make big outer labels survive to the drawPicture() phase - return super().boundingRect().adjusted(-500, 0, 500, 0) - - def drawPicture(self, p, axisSpec, tickSpecs, textSpecs): - p.setRenderHint(p.Antialiasing, False) - p.setRenderHint(p.TextAntialiasing, True) - - # draw long line along axis - pen, p1, p2 = axisSpec - p.setPen(pen) - p.drawLine(p1, p2) - p.translate(0.5, 0) # resolves some damn pixel ambiguity - - # draw ticks - for pen, p1, p2 in tickSpecs: - p.setPen(pen) - p.drawLine(p1, p2) - - # Draw all text - if self.style["tickFont"] is not None: - p.setFont(self.style["tickFont"]) - p.setPen(self.pen()) - - text_flags = Qt.TextDontClip | Qt.AlignLeft | Qt.AlignTop - for rect, flags, text in textSpecs: - p.save() - p.translate(rect.center()) - p.rotate(self.angle) - p.scale(0.9, 0.9) - p.translate(-rect.center()) - rect.setX(rect.center().x()) - p.drawText(rect, text_flags, text) - p.restore() diff --git a/tool_water_balance/views/widgets.py b/tool_water_balance/views/widgets.py deleted file mode 100644 index 7bb7c721..00000000 --- a/tool_water_balance/views/widgets.py +++ /dev/null @@ -1,1238 +0,0 @@ -from qgis.core import Qgis -from qgis.core import QgsFeatureRequest -from qgis.core import QgsVectorLayer -from qgis.core import QgsWkbTypes -from qgis.core import QgsGeometry -from qgis.gui import QgsHighlight -from qgis.gui import QgsMapToolIdentify -from qgis.gui import QgsRubberBand -from qgis.gui import QgsVertexMarker -from qgis.PyQt.QtCore import pyqtSignal -from qgis.PyQt.QtCore import QEvent -from qgis.PyQt.QtCore import QMetaObject -from qgis.PyQt.QtCore import QSize -from qgis.PyQt.QtCore import Qt -from qgis.PyQt.QtGui import QBrush -from qgis.PyQt.QtGui import QColor -from qgis.PyQt.QtGui import QPalette -from qgis.PyQt.QtGui import QPixmap -from qgis.PyQt.QtGui import QTransform -from qgis.PyQt.QtWidgets import QAbstractItemView -from qgis.PyQt.QtWidgets import QComboBox -from qgis.PyQt.QtWidgets import QDockWidget -from qgis.PyQt.QtWidgets import QHBoxLayout -from qgis.PyQt.QtWidgets import QLabel -from qgis.PyQt.QtWidgets import QPushButton -from qgis.PyQt.QtWidgets import QSizePolicy -from qgis.PyQt.QtWidgets import QSpacerItem -from qgis.PyQt.QtWidgets import QSplitter -from qgis.PyQt.QtWidgets import QTabWidget -from qgis.PyQt.QtWidgets import QTableView -from qgis.PyQt.QtWidgets import QVBoxLayout -from qgis.PyQt.QtWidgets import QWidget -from threedi_results_analysis import PLUGIN_DIR -from threedi_results_analysis.tool_water_balance.views.custom_pg_Items import RotateLabelAxisItem -from threedi_results_analysis.utils.user_messages import messagebar_message, StatusProgressBar - -from ..utils import PolygonWithCRS -from ..config import AGG_CUMULATIVE_FLOW -from ..config import AGG_FLOW -from ..config import BC_IO_SERIES -from ..config import GRAPH_SERIES -from ..config import INPUT_SERIES -from ..config import TIME_UNITS_TO_SECONDS -from ..models.wb_item import WaterbalanceItemModel - -from collections import defaultdict -from copy import deepcopy -from itertools import chain -import functools -import logging -import numpy as np -import pyqtgraph as pg - - -logger = logging.getLogger(__name__) - -MSG_TITLE = "Water balance tool" -QCOLOR_RED = QColor(255, 0, 0) -POLYGON_TYPES = { - QgsWkbTypes.MultiPolygon, - QgsWkbTypes.MultiPolygonZ, - QgsWkbTypes.MultiPolygon25D, - QgsWkbTypes.Polygon, - QgsWkbTypes.PolygonZ, - QgsWkbTypes.Polygon25D, -} -VOLUME_CHANGE_SERIE_NAMES = { - "volume change 2D", - "volume change groundwater", - "volume change 1D", -} - -SERIES_NAME_TO_LINE_TYPES = { - "2D flow": ["2d"], - "2D boundary flow": ["2d_bound"], - "1D flow": ["1d"], - "1D boundary flow": ["1d_bound"], - "2D flow to 1D (domain exchange)": ["1d_2d_exch"], - "2D flow to 1D": ["1d__1d_2d_flow", "2d__1d_2d_flow"], - "pumps": ["pumps_hoover"], - "groundwater flow": ["2d_groundwater"], - "in/exfiltration (domain exchange)": [ - "2d_vertical_infiltration_pos", - "2d_vertical_infiltration_neg", - ], -} - -SERIES_NAME_TO_NODE_TYPES = { - "volume change": ["1d", "2d", "2d_groundwater"], - "volume change 2D": ["2d"], - "volume change 1D": ["1d"], - "volume change groundwater": ["2d_groundwater"], - "rain on 2D": ["2d"], - "0D rainfall runoff on 1D": ["1d"], - "lateral flow to 1D": ["1d"], - "lateral flow to 2D": ["2d"], - "leakage": ["2d"], - "interception": ["2d"], - "constant infiltration": ["2d"], - "external (rain and laterals)": ["1d", "2d"], - "surface sources and sinks": ["2d"], -} - - -# some helper functions and classes -####################### - - -def _get_request_filter(ids): - ids_flat = list(set([i for j in list(ids.values()) for i in j])) - return QgsFeatureRequest().setFilterFids(ids_flat) - - -def _get_feature_iterator(layer, request_filter): - # mainly pumps are often not present - if layer: - return layer.getFeatures(request_filter) - else: - return [] - -####################### - - -@functools.total_ordering -class Bar(object): - """Bar for water balance barchart with positive and negative components.""" - - SERIES_INDEX = dict(INPUT_SERIES) - - def __init__(self, label_name, in_series, out_series, type): - self.label_name = label_name - self.in_series = in_series - self.out_series = out_series - self.type = type - self._balance_in = None - self._balance_out = None - - @staticmethod - def _get_time_indices(ts, t1, t2): - """Time series indices in range t1-t2.""" - idx_x1 = np.searchsorted(ts, t1) - if not t2: - idx_x2 = len(ts) - else: - idx_x2 = np.searchsorted(ts, t2) - return np.arange(idx_x1, idx_x2) - - @property - def end_balance_in(self): - return self._balance_in - - def set_end_balance_in(self, ts, ts_series, t1=0, t2=None): - idxs = [self.SERIES_INDEX[name] for name in self.in_series] - ts_indices_sliced = self._get_time_indices(ts, t1, t2) - ts_deltas = np.concatenate(([0], np.diff(ts))) - # shape = (N_idxs, len(ts)) - balance_tmp = (ts_deltas * ts_series[:, idxs].T).clip(min=0) - self._balance_in = balance_tmp[:, ts_indices_sliced].sum() - - @property - def end_balance_out(self): - return self._balance_out - - def set_end_balance_out(self, ts, ts_series, t1=0, t2=None): - idxs = [self.SERIES_INDEX[name] for name in self.out_series] - ts_indices_sliced = self._get_time_indices(ts, t1, t2) - ts_deltas = np.concatenate(([0], np.diff(ts))) - balance_tmp = (ts_deltas * ts_series[:, idxs].T).clip(max=0) - self._balance_out = balance_tmp[:, ts_indices_sliced].sum() - - def calc_balance(self, ts, ts_series, t1=0, t2=None): - """Calculate balance values.""" - self.set_end_balance_in(ts, ts_series, t1, t2) - self.set_end_balance_out(ts, ts_series, t1, t2) - if self.is_storage_like: - self.convert_to_net() - - def convert_to_net(self): - """Make a bar that contains the net value (positive or negative).""" - # NOTE: use addition because out is negative - net_val = self._balance_in + self._balance_out - if net_val > 0: - self._balance_in = net_val - self._balance_out = 0 - else: - self._balance_in = 0 - self._balance_out = net_val - - def invert(self): - """Flip positive to negative and vice versa.""" - self._balance_in, self._balance_out = ( - -1 * self._balance_out, - -1 * self._balance_in, - ) - - @property - def is_storage_like(self): - return "storage" in self.label_name - - # add sorting - def __lt__(self, other): - # TODO: label_names are not unique, should add 'type' to make a - # primary key - if not self.is_storage_like and other.is_storage_like: - return True - elif self.is_storage_like and not other.is_storage_like: - return False - return self.label_name < other.label_name - - -class BarManager(object): - def __init__(self, series): - self.series = series - self.bars = sorted( - [ - Bar( - label_name=x["label_name"], - in_series=x["in"], - out_series=x["out"], - type=x["type"], - ) - for x in series - ] - ) - - def calc_balance(self, ts, ts_series, t1, t2, net=False, invert=[]): - for b in self.bars: - b.calc_balance(ts, ts_series, t1=t1, t2=t2) - if net: - b.convert_to_net() - if b.label_name in invert: - b.invert() - - @property - def x(self): - return np.arange(len(self.bars)) - - @property - def xlabels(self): - return [b.label_name for b in self.bars] - - @property - def end_balance_in(self): - return [b.end_balance_in for b in self.bars] - - @property - def end_balance_out(self): - return [b.end_balance_out for b in self.bars] - - -class WaterbalanceItemTable(QTableView): - hoverExitAllRows = pyqtSignal() # exit the whole widget - hoverExitRow = pyqtSignal(str) - hoverEnterRow = pyqtSignal(str) - - def __init__(self, parent=None): - super().__init__(parent) - self.setStyleSheet("QTreeView::item:hover{background-color:#FFFF00;}") - self.setMouseTracking(True) - self.verticalHeader().hide() - self.horizontalHeader().setStretchLastSection(True) - self.setSelectionBehavior(QAbstractItemView.SelectRows) - self.model = None - - self._last_hovered_row = -1 - self.viewport().installEventFilter(self) - - def on_close(self): - """ - unloading widget and remove all required stuff - :return: - """ - self.setMouseTracking(False) - self.viewport().removeEventFilter(self) - - def closeEvent(self, event): - """ - overwrite of QDockWidget class to emit signal - :param event: QEvent - """ - self.on_close() - event.accept() - - def eventFilter(self, widget, event): - result = QTableView.eventFilter(self, widget, event) - if widget is not self.viewport(): - return result - - if event.type() == QEvent.Leave: - self.hoverExitAllRows.emit() - new_row = -1 - elif event.type() == QEvent.MouseMove: - new_row = self.indexAt(event.pos()).row() - else: - return result - - old_row = self._last_hovered_row - - if old_row >= 0 and new_row != old_row: - old_name = self.model.rows[old_row].name.value - self.hover_exit(old_row) - self.hoverExitRow.emit(old_name) - - if new_row >= 0 and new_row != old_row: - new_name = self.model.rows[new_row].name.value - self.hover_enter(new_row) - self.hoverEnterRow.emit(new_name) - - self._last_hovered_row = new_row - return result - - def hover_enter(self, row_nr): - item = self.model.rows[row_nr] - name = item.name.value - - if name in VOLUME_CHANGE_SERIE_NAMES: - item.fill_color.value = item.fill_color.value[:3] + [0] - item.pen_color.value = item.pen_color.value[:3] + [255] - else: - item.fill_color.value = item.fill_color.value[:3] + [220] - item.pen_color.value = item.pen_color.value[:3] + [255] - - def hover_exit(self, row_nr): - - item = self.model.rows[row_nr] - name = item.name.value - - if name in VOLUME_CHANGE_SERIE_NAMES: - item.fill_color.value = item.fill_color.value[:3] + [0] - item.pen_color.value = item.pen_color.value[:3] + [180] - else: - item.fill_color.value = item.fill_color.value[:3] + [150] - item.pen_color.value = item.pen_color.value[:3] + [180] - - def setModel(self, model): - super().setModel(model) - self.model = model - self.model.set_column_sizes_on_view(self) - - -class WaterBalancePlotWidget(pg.PlotWidget): - def __init__(self, model, result): - super().__init__() - self.model = model - self.result = result - self.showGrid(True, True, 0.5) - self.hideButtons() - self.setLabel("bottom", "time", "hrs") - self.setLabel("left", "flow", "m³/s") - # Auto SI prefix scaling doesn't work properly with m3, m2 etc. - self.getAxis("left").enableAutoSIPrefix(False) - - self._plot_data_items = None - - def redraw_water_balance(self, time, time_label, values, values_label): - """ - Plotdata depends on the previous item, to be able to stack on top of - it. Therefore adding to the graph goes in reversed order. - """ - self.clear() - zeros = np.zeros(shape=(np.size(time, 0),)) - zero_serie = pg.PlotDataItem( - x=time, - y=zeros, - connect="finite", - pen=pg.mkPen(color=QColor(0, 0, 0, 200), width=1), - ) - self.addItem(zero_serie, ignoreBounds=True) - - # all item.name.value (e.g. '1d-2d flow', 'pumps', 'rain') have both a - # 'in' and 'out' flow: so two lines that together form a graph. - # However the volume change lines are summed into 1 line (so no out and - # in) - self._plot_data_items = defaultdict(dict) - for d7n in ["in", "out"]: - prev_serie = zeros - prev_pldi = zero_serie - for item in self.model.rows: - name = item.name.value - _plots = self._plot_data_items[name] - if item.active.value: - if name in VOLUME_CHANGE_SERIE_NAMES: - pen_color = item.pen_color.value - not_cum_serie = ( - values[name]["values"]["in"] + values[name]["values"]["out"] - ) - plot_item = pg.PlotDataItem( - x=time, - y=not_cum_serie, - connect="finite", - pen=pg.mkPen( - color=QColor(*pen_color), width=4, style=Qt.DashDotLine - ), - ) - # only get 1 line (the sum of 'in' and 'out') - _plots["sum"] = plot_item - - else: # name not in VOLUME_CHANGE_SERIE_NAMES - pen_color = item.pen_color.value - fill_color = item.fill_color.value - cum_serie = prev_serie + values[name]["values"][d7n] - plot_item = pg.PlotDataItem( - x=time, - y=cum_serie, - connect="finite", - pen=pg.mkPen(color=QColor(*pen_color), width=1), - ) - fill = pg.FillBetweenItem( - prev_pldi, plot_item, pg.mkBrush(*fill_color) - ) - # keep reference - _plots[d7n] = plot_item - _plots[d7n + "fill"] = fill - prev_serie = cum_serie - prev_pldi = plot_item - - # add PlotItems to graph - for d7n in ["in", "out"]: - for item in reversed(self.model.rows): - name = item.name.value - _plots = self._plot_data_items[name] - if item.active.value: - if name in VOLUME_CHANGE_SERIE_NAMES: - self.addItem(_plots["sum"], ignoreBounds=True) - else: # name not in VOLUME_CHANGE_SERIE_NAMES - self.addItem(_plots[d7n], ignoreBounds=True) - self.addItem(_plots[d7n + "fill"], ignoreBounds=True) - - # set range to contents - x_min = min(pdi.xData.min() for pdi in self.plotItem.listDataItems()) - x_max = max(pdi.xData.max() for pdi in self.plotItem.listDataItems()) - self.setXRange(x_min, x_max, padding=None, update=True) - - y_min = min(pdi.yData.min() for pdi in self.plotItem.listDataItems()) - y_max = max(pdi.yData.max() for pdi in self.plotItem.listDataItems()) - self.setYRange(y_min, y_max, padding=None, update=True) - - # one would say a simple autorange should work, but it does not - # self.autoRange(padding=0) - - # set labels - self.setLabel("left", *values_label) - self.setLabel("bottom", *time_label) - - # set labels for in and out fluxes TODO fix? - text_upper = pg.TextItem(text="out", anchor=(0, 1), angle=-90) - text_upper.setPos(0, 0) - text_lower = pg.TextItem(text="in", anchor=(1, 1), angle=-90) - text_lower.setPos(0, 0) - self.addItem(text_upper) - self.addItem(text_lower) - - def hover_enter_plot_highlight(self, name): - if name not in self._plot_data_items: # meaning it is not active - return - plots = self._plot_data_items[name] - item = [item for item in self.model.rows if item.name.value == name][0] - if "in" in plots: - plots["in"].setPen(color=item.pen_color.value, width=1) - plots["infill"].setBrush(pg.mkBrush(item.fill_color.value)) - if "out" in plots: - plots["out"].setPen(color=item.pen_color.value, width=1) - plots["outfill"].setBrush(pg.mkBrush(item.fill_color.value)) - if "sum" in plots: - plots["sum"].setPen( - color=item.pen_color.value, width=4, style=Qt.DashDotLine - ) - - def hover_exit_plot_highlight(self, name): - if name not in self._plot_data_items: # meaning it is not active - return - plots = self._plot_data_items[name] - item = [item for item in self.model.rows if item.name.value == name][0] - if "in" in plots: - plots["in"].setPen(color=item.pen_color.value, width=1) - plots["infill"].setBrush(pg.mkBrush(item.fill_color.value)) - if "out" in plots: - plots["out"].setPen(color=item.pen_color.value, width=1) - plots["outfill"].setBrush(pg.mkBrush(item.fill_color.value)) - if "sum" in plots: - plots["sum"].setPen( - color=item.pen_color.value, width=4, style=Qt.DashDotLine - ) - - -class WaterBalanceWidget(QDockWidget): - closingWidget = pyqtSignal() - - def __init__(self, title, iface, manager): - super().__init__(title) - - self.iface = iface - self.manager = manager - self.wb_polygon_highlight = None - - # setup ui - self.setup_ui(self) - - self.model = WaterbalanceItemModel() - self.model.dataChanged.connect(self.data_changed) - self.wb_item_table.setModel(self.model) - self.selection_vis = SelectionVisualisation(iface.mapCanvas()) - - # fill comboboxes with selections - self.agg_combo_box.insertItems(0, [AGG_FLOW, AGG_CUMULATIVE_FLOW]) - self.ts_units_combo_box.insertItems(0, ["hrs", "mins", "s"]) - - # add listeners - self.select_polygon_button.clicked.connect(self._set_map_tool) - self.chart_button.clicked.connect(self.show_barchart) - - self.wb_item_table.hoverEnterRow.connect(self.hover_enter_action) - self.wb_item_table.hoverExitRow.connect(self.hover_exit_action) - self.activate_all_button.clicked.connect(self.activate_layers) - self.deactivate_all_button.clicked.connect(self.deactivate_layers) - - self.tab_widget.currentChanged.connect(self.tab_changed) - self.agg_combo_box.currentIndexChanged.connect(self.combo_changed) - self.ts_units_combo_box.currentIndexChanged.connect(self.combo_changed) - - # initially turn on tool - self._set_map_tool() - - @property - def agg(self): - return self.agg_combo_box.currentText() - - @property - def time_units(self): - return self.ts_units_combo_box.currentText() - - def _get_io_series_net(self): - io_series_net = [ - x - for x in BC_IO_SERIES - if ( - x["type"] in ["2d", "2d_vert", "2d_groundwater", "1d"] - and "storage" not in x["label_name"] - and "exchange" not in x["label_name"] - and x["label_name"] != "1D: 2D flow to 1D" - and x["label_name"] != "2D: 2D flow to 1D" - and x["label_name"] != "1D: 2D flow to 1D (domain exchange)" - and x["label_name"] != "2D: 2D flow to 1D (domain exchange)" - ) - or x["type"] == "NETVOL" - ] - return io_series_net - - def _get_io_series_2d(self): - io_series_2d = [ - x - for x in BC_IO_SERIES - if x["type"] in ["2d", "2d_vert"] - and x["label_name"] != "1D: 2D flow to 1D" - and x["label_name"] != "1D: 2D flow to 1D (domain exchange)" - ] - return io_series_2d - - def _get_io_series_2d_groundwater(self): - io_series_2d_groundwater = [ - x for x in BC_IO_SERIES if x["type"] in ["2d_groundwater", "2d_vert"] - ] - return io_series_2d_groundwater - - def _get_io_series_1d(self): - io_series_1d = [ - x - for x in BC_IO_SERIES - if x["type"] == "1d" - and x["label_name"] != "2D: 2D flow to 1D" - and x["label_name"] != "2D: 2D flow to 1D (domain exchange)" - ] - return io_series_1d - - def show_barchart(self): - """ - Show a pop-up with a barchart based on the current temporal view in the - plot widget - """ - if not self.manager: - return - - # determine the time in seconds from the current plot state - plot_widget = self.tab_widget.currentWidget() - viewbox_state = plot_widget.getPlotItem().getViewBox().getState() - view_range = viewbox_state["viewRange"] - t1, t2 = view_range[0] - t1 = t1 * TIME_UNITS_TO_SECONDS[self.time_units] - t2 = t2 * TIME_UNITS_TO_SECONDS[self.time_units] - - self.wb_tabbed_view = QTabWidget() - - io_series_net = self._get_io_series_net() - io_series_2d = self._get_io_series_2d() - io_series_2d_groundwater = self._get_io_series_2d_groundwater() - io_series_1d = self._get_io_series_1d() - - for result in self.manager: - calc = self.manager[result] - tab_label = calc.label - time, flow = calc.time, calc.flow - relevant = functools.partial(calc.filter_series, "label_name") - - bm_net = BarManager(relevant(io_series_net)) - bm_2d = BarManager(relevant(io_series_2d)) - bm_2d_groundwater = BarManager(relevant(io_series_2d_groundwater)) - bm_1d = BarManager(relevant(io_series_1d)) - - bm_net.calc_balance(time, flow, t1, t2, net=True) - bm_2d.calc_balance(time, flow, t1, t2) - bm_2d_groundwater.calc_balance( - time, flow, t1, t2, invert=["in/exfiltration (domain exchange)"] - ) - bm_1d.calc_balance(time, flow, t1, t2) - - t_start = max(0, t1) - wb_barchart_widget = pg.GraphicsView(self.wb_tabbed_view) - - layout = pg.GraphicsLayout() - wb_barchart_widget.setCentralItem(layout) - text = "Water balance from t=%.2f to t=%.2f \n Model name: %s" % ( - t_start, - t2, - calc.wrapped_result.threedi_result.short_model_slug, - ) - layout.addLabel(text, row=0, col=0, colspan=3) - - def get_keyword_indexes(input_list, keyword): - """Return a list of indexes from `input_list` which contain the - `keyword`""" - bar_indexes_to_mark = [] - for index, label in enumerate(input_list): - if keyword in label: - bar_indexes_to_mark.append(index) - return bar_indexes_to_mark - - # We want to mark some bars with a different color. Labels with the key - # 'domain exchange' and the last label ('change in storage'). - domain_exchange_key = "(domain exchange)" - standard_in_brush = QBrush(QColor(0, 122, 204)) - standard_out_brush = QBrush(QColor(255, 128, 0)) - - domain_exchange_in_brush = QBrush( - QColor(0, 122, 204), style=Qt.BDiagPattern - ) # Qt.BDiagPattern - domain_exchange_in_brush.setTransform(QTransform().scale(0.01, 0.01)) - domain_exchange_out_brush = QBrush(QColor(255, 128, 0), style=Qt.BDiagPattern) - domain_exchange_out_brush.setTransform(QTransform().scale(0.01, 0.01)) - change_storage_brush = QBrush(QColor("grey")) - - # ##### - # Net # - # ##### - - domain_exchange_indexes = get_keyword_indexes( - bm_net.xlabels, domain_exchange_key - ) - in_brushes = [standard_in_brush] * (len(bm_net.xlabels) - 1) - for i in domain_exchange_indexes: - in_brushes[i] = domain_exchange_in_brush - in_brushes.append(change_storage_brush) - out_brushes = [standard_out_brush] * (len(bm_net.xlabels) - 1) - for i in domain_exchange_indexes: - out_brushes[i] = domain_exchange_out_brush - out_brushes.append(change_storage_brush) - - bg_net_in = pg.BarGraphItem( - x=bm_net.x, height=bm_net.end_balance_in, width=0.6, brushes=in_brushes - ) - bg_net_out = pg.BarGraphItem( - x=bm_net.x, height=bm_net.end_balance_out, width=0.6, brushes=out_brushes - ) - axis_net = RotateLabelAxisItem(25, "bottom") - net_plot = layout.addPlot( - row=1, col=0, colspan=2, axisItems={"bottom": axis_net} - ) - net_plot.addItem(bg_net_in) - net_plot.addItem(bg_net_out) - axis_net.setHeight(100) - axis_net.setTicks([list(zip(bm_net.x, bm_net.xlabels))]) - - net_plot.setTitle("Net water balance") - y_axis = net_plot.getAxis("left") - y_axis.setLabel("volume (m³)") - net_plot.getViewBox().setLimits(xMin=-1, xMax=max(bm_net.x) + 2) - - # # ###### - # # Logo # - # # ###### - - path_3di_logo = str(PLUGIN_DIR / "icons" / "icon.png") - logo_3di = QPixmap(path_3di_logo) - logo_3di = logo_3di.scaledToHeight(40) - label_3di = QLabel() - label_3di.setPixmap(logo_3di) - - path_topsector_logo = str(PLUGIN_DIR / "icons" / "topsector_small.png") - logo_topsector = QPixmap(path_topsector_logo) - logo_topsector = logo_topsector.scaledToHeight(40) - label_topsector = QLabel() - label_topsector.setPixmap(logo_topsector) - - path_deltares_logo = str(PLUGIN_DIR / "icons" / "deltares_small.png") - logo_deltares = QPixmap(path_deltares_logo) - logo_deltares = logo_deltares.scaledToHeight(40) - label_deltares = QLabel() - label_deltares.setPixmap(logo_deltares) - - logo_label_text = QLabel("Powered by 3Di, Topsector Water and Deltares") - - powered_by_widget = QWidget() - pallete = QPalette(QColor("white")) - powered_by_widget.setAutoFillBackground(True) - powered_by_widget.setPalette(pallete) - powered_by_layout = QVBoxLayout() - powered_by_widget.setMaximumHeight(130) - - logo_container = QWidget() - logo_container.setMaximumWidth(300) - logo_container_layout = QHBoxLayout() - logo_container_layout.addWidget(label_3di) - logo_container_layout.addWidget(label_topsector) - logo_container_layout.addWidget(label_deltares) - logo_container.setLayout(logo_container_layout) - - powered_by_layout.addWidget(logo_label_text) - powered_by_layout.addWidget(logo_container) - - powered_by_widget.setLayout(powered_by_layout) - logo_ProxyWidget = layout.scene().addWidget(powered_by_widget) - layout.addItem(logo_ProxyWidget, row=1, col=2) - - # # #### - # # 2D # - # # #### - - domain_exchange_indexes = get_keyword_indexes( - bm_2d.xlabels, domain_exchange_key - ) - in_brushes = [standard_in_brush] * (len(bm_2d.xlabels) - 1) - for i in domain_exchange_indexes: - in_brushes[i] = domain_exchange_in_brush - in_brushes.append(change_storage_brush) - out_brushes = [standard_out_brush] * (len(bm_2d.xlabels) - 1) - for i in domain_exchange_indexes: - out_brushes[i] = domain_exchange_out_brush - out_brushes.append(change_storage_brush) - - surface_in = pg.BarGraphItem( - x=bm_2d.x, height=bm_2d.end_balance_in, width=0.6, brushes=in_brushes - ) - surface_out = pg.BarGraphItem( - x=bm_2d.x, height=bm_2d.end_balance_out, width=0.6, brushes=out_brushes - ) - axis_surface = RotateLabelAxisItem(25, "bottom") - surface_plot = layout.addPlot(row=2, col=0, axisItems={"bottom": axis_surface}) - surface_plot.addItem(surface_in) - surface_plot.addItem(surface_out) - axis_surface.setHeight(100) - axis_surface.setTicks([list(zip(bm_net.x, bm_2d.xlabels))]) - - surface_plot.setTitle("2D surface water domain") - y_axis = surface_plot.getAxis("left") - y_axis.setLabel("volume (m³)") - - surface_plot.getViewBox().setLimits(xMin=-1, xMax=max(bm_2d.x) + 2) - - # # ################ - # # 2D groundwater # - # # ################ - - domain_exchange_indexes = get_keyword_indexes( - bm_2d_groundwater.xlabels, domain_exchange_key - ) - in_brushes = [standard_in_brush] * (len(bm_2d_groundwater.xlabels) - 1) - for i in domain_exchange_indexes: - in_brushes[i] = domain_exchange_in_brush - in_brushes.append(change_storage_brush) - out_brushes = [standard_out_brush] * (len(bm_2d_groundwater.xlabels) - 1) - for i in domain_exchange_indexes: - out_brushes[i] = domain_exchange_out_brush - out_brushes.append(change_storage_brush) - - groundwater_in = pg.BarGraphItem( - x=bm_2d_groundwater.x, - height=bm_2d_groundwater.end_balance_in, - width=0.6, - brushes=in_brushes, - ) - groundwater_out = pg.BarGraphItem( - x=bm_2d_groundwater.x, - height=bm_2d_groundwater.end_balance_out, - width=0.6, - brushes=out_brushes, - ) - axis_groundwater = RotateLabelAxisItem(25, "bottom") - groundwater_plot = layout.addPlot( - row=2, col=1, axisItems={"bottom": axis_groundwater} - ) - groundwater_plot.addItem(groundwater_in) - groundwater_plot.addItem(groundwater_out) - axis_groundwater.setHeight(100) - axis_groundwater.setTicks( - [list(zip(bm_2d_groundwater.x, bm_2d_groundwater.xlabels))] - ) - - groundwater_plot.setTitle("2D groundwater domain") - y_axis = groundwater_plot.getAxis("left") - y_axis.setLabel("volume (m³)") - groundwater_plot.getViewBox().setLimits( - xMin=-1, xMax=max(bm_2d_groundwater.x) + 2 - ) - - # # #### - # # 1D # - # # #### - - domain_exchange_indexes = get_keyword_indexes( - bm_1d.xlabels, domain_exchange_key - ) - in_brushes = [standard_in_brush] * (len(bm_1d.xlabels) - 1) - for i in domain_exchange_indexes: - in_brushes[i] = domain_exchange_in_brush - in_brushes.append(change_storage_brush) - out_brushes = [standard_out_brush] * (len(bm_1d.xlabels) - 1) - for i in domain_exchange_indexes: - out_brushes[i] = domain_exchange_out_brush - out_brushes.append(change_storage_brush) - - network1d_in = pg.BarGraphItem( - x=bm_1d.x, height=bm_1d.end_balance_in, width=0.6, brushes=in_brushes - ) - network1d_out = pg.BarGraphItem( - x=bm_1d.x, height=bm_1d.end_balance_out, width=0.6, brushes=out_brushes - ) - axis_network1d = RotateLabelAxisItem(25, "bottom") - network1d_plot = layout.addPlot( - row=2, col=2, axisItems={"bottom": axis_network1d} - ) - network1d_plot.addItem(network1d_in) - network1d_plot.addItem(network1d_out) - axis_network1d.setHeight(100) - axis_network1d.setTicks([list(zip(bm_1d.x, bm_1d.xlabels))]) - - network1d_plot.setTitle("1D network domain") - y_axis = network1d_plot.getAxis("left") - y_axis.setLabel("volume (m³)") - network1d_plot.getViewBox().setLimits(xMin=-1, xMax=max(bm_1d.x) + 2) - - # Link y-axes - surface_plot.setYLink(groundwater_plot) - surface_plot.setYLink(network1d_plot) - network1d_plot.setYLink(groundwater_plot) - - # Set y-range so all bars are visible - y_min = min( - bm_2d.end_balance_out - + bm_2d_groundwater.end_balance_out - + bm_1d.end_balance_out - ) - y_max = max( - bm_2d.end_balance_in - + bm_2d_groundwater.end_balance_in - + bm_1d.end_balance_in - ) - network1d_plot.setYRange(min=y_min, max=y_max) - - self.wb_tabbed_view.addTab(wb_barchart_widget, tab_label) - - self.wb_tabbed_view.setWindowTitle("Waterbalance") - self.wb_tabbed_view.resize(1000, 600) - self.wb_tabbed_view.show() - - def data_changed(self, index): - if self.model.columns[index.column()].name == "active": - self.update_water_balance(reset_model=False) - - def combo_changed(self, index): - self.update_water_balance(reset_model=False) - - def tab_changed(self, index): - self.update_water_balance(reset_model=False) - - def hover_enter_action(self, name): - if not self.manager: - return - - # plot highlight - self.tab_widget.currentWidget().hover_enter_plot_highlight(name) - - # map geometry highlight - result = self.tab_widget.currentWidget().result - calc = self.manager[result] - - # note that using getitem on qgs_lines and qgs_points works because - # they are defaultdict(list) - line_geoms = list(chain(*( - calc.qgs_lines[t] for t in SERIES_NAME_TO_LINE_TYPES.get(name, []) - ))) - point_geoms = list(chain(*( - calc.qgs_points[t] for t in SERIES_NAME_TO_NODE_TYPES.get(name, []) - ))) - - self.selection_vis.update(line_geoms, point_geoms) - - def hover_exit_action(self, name): - if not self.manager: - return - - # plot highlight - self.tab_widget.currentWidget().hover_exit_plot_highlight(name) - - # map geometry highlight - self.selection_vis.reset() - - def _set_map_tool(self): - self.iface.mapCanvas().setMapTool(self.map_tool_select_polygon) - - def _unset_map_tool(self): - self.iface.mapCanvas().unsetMapTool(self.map_tool_select_polygon) - - def activate_layers(self): - self.model.dataChanged.disconnect(self.data_changed) - for item in self.model.rows: - item.active.value = True - self.model.dataChanged.connect(self.data_changed) - self.update_water_balance(reset_model=False) - - def deactivate_layers(self): - self.model.dataChanged.disconnect(self.data_changed) - for item in self.model.rows: - item.active.value = False - self.model.dataChanged.connect(self.data_changed) - self.update_water_balance(reset_model=False) - - def get_table_data(self): - """ - Only the config, no actual result data. The link between item and data - will be the names of the series. This will be fed to insertRows on the - model. - """ - table_data = deepcopy(GRAPH_SERIES) - - for item in table_data: - item["active"] = True - item["fill_color"] = [ - int(c) for c in item["def_fill_color"].split(",") - ] - item["pen_color"] = [ - int(c) for c in item["def_pen_color"].split(",") - ] - - return table_data - - def update_water_balance(self, index=None, reset_model=False): - """ - Redraw plots after comboboxes or active tab changes. - """ - if not self.manager: - self.model.removeRows(0, len(self.model.rows)) - return - - plot_widget = self.tab_widget.currentWidget() - calc = self.manager[plot_widget.result] - - if reset_model: - table_data = calc.filter_series( - key="name", series=self.get_table_data(), - ) - self.model.removeRows(0, len(self.model.rows)) - self.model.insertRows(table_data) - - graph_data = calc.get_graph_data(agg=self.agg, time_units=self.time_units) - plot_widget.redraw_water_balance(**graph_data) - - def closeEvent(self, event): - self.select_polygon_button.clicked.disconnect(self._set_map_tool) - self.chart_button.clicked.disconnect(self.show_barchart) - - self.wb_item_table.hoverEnterRow.disconnect(self.hover_enter_action) - self.wb_item_table.hoverExitRow.disconnect(self.hover_exit_action) - self.activate_all_button.clicked.disconnect(self.activate_layers) - self.deactivate_all_button.clicked.disconnect(self.deactivate_layers) - - self.tab_widget.currentChanged.disconnect(self.tab_changed) - self.agg_combo_box.currentIndexChanged.disconnect(self.combo_changed) - self.ts_units_combo_box.currentIndexChanged.disconnect(self.combo_changed) - - self.unset_wb_polygon() - self._unset_map_tool() - self.closingWidget.emit() - event.accept() - - def setup_ui(self, dock_widget): - """ - initiate main Qt building blocks of interface - :param dock_widget: QDockWidget instance - """ - - dock_widget.setObjectName("dock_widget") - dock_widget.setAttribute(Qt.WA_DeleteOnClose) - - self.dock_widget_content = QWidget(self) - self.dock_widget_content.setObjectName("dockWidgetContent") - - self.main_vlayout = QVBoxLayout(self) - self.dock_widget_content.setLayout(self.main_vlayout) - - # add button to add objects to graphs - self.button_bar_hlayout = QHBoxLayout(self) - self.select_polygon_button = QPushButton("Select Polygon", self) - self.select_polygon_button.setCheckable(True) - self.button_bar_hlayout.addWidget(self.select_polygon_button) - self.chart_button = QPushButton(self) - self.button_bar_hlayout.addWidget(self.chart_button) - - self.agg_combo_box = QComboBox(self) - self.button_bar_hlayout.addWidget(self.agg_combo_box) - self.ts_units_combo_box = QComboBox(self) - self.button_bar_hlayout.addWidget(self.ts_units_combo_box) - - # now first add a QSpacerItem so that the QPushButton (added sub- - # sequently) are aligned on the right-side of the button_bar_hlayout - spacer_item = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum) - self.button_bar_hlayout.addItem(spacer_item) - - self.activate_all_button = QPushButton(self) - self.button_bar_hlayout.addWidget( - self.activate_all_button, alignment=Qt.AlignRight - ) - - self.deactivate_all_button = QPushButton(self) - self.button_bar_hlayout.addWidget( - self.deactivate_all_button, alignment=Qt.AlignRight - ) - - self.main_vlayout.addLayout(self.button_bar_hlayout) - - # add tabWidget for graphWidgets - self.splitter = QSplitter(self) - - # tab widget for the plots - self.tab_widget = QTabWidget(self.splitter) - self.tab_widget.setMinimumSize(QSize(240, 250)) - - self.splitter.addWidget(self.tab_widget) - - # table - self.wb_item_table = WaterbalanceItemTable(self.splitter) - self.wb_item_table.setMinimumSize(QSize(120, 0)) - self.splitter.addWidget(self.wb_item_table) - - self.splitter.setSizes([360, 120]) - self.splitter.setCollapsible(0, False) - self.splitter.setStretchFactor(0, 8) - self.splitter.setCollapsible(1, False) - self.splitter.setStretchFactor(1, 1) - - self.main_vlayout.addWidget(self.splitter) - - # add dockwidget - dock_widget.setWidget(self.dock_widget_content) - self.chart_button.setText("Show total balance") - self.activate_all_button.setText("activate all") - self.deactivate_all_button.setText("deactivate all") - QMetaObject.connectSlotsByName(dock_widget) - - # add selection maptool - self.map_tool_select_polygon = SelectPolygonTool( - widget=self, canvas=self.iface.mapCanvas(), - ) - self.map_tool_select_polygon.setButton(self.select_polygon_button) - self.map_tool_select_polygon.setCursor(Qt.CrossCursor) - - def add_result(self, result, update=True): - if not self.manager.add_result(result): - return - plot_widget = WaterBalancePlotWidget(model=self.model, result=result) - tab_label = self.manager[result].label - self.tab_widget.addTab(plot_widget, tab_label) - if update: - self.update_water_balance(reset_model=True) - - def _get_tab_index(self, result): - tab_widget = self.tab_widget - for tab_index in range(tab_widget.count()): - if tab_widget.widget(tab_index).result is result: - return tab_index - - def remove_result(self, result): - if not self.manager.remove_result(result): - return - tab_index = self._get_tab_index(result) - update = tab_index == self.tab_widget.currentIndex() - self.tab_widget.removeTab(tab_index) - if update: - self.update_water_balance(reset_model=True) - - def change_grid(self, grid): - results = [] - grid.model().get_results_from_item(grid, False, results) - for result in results: - if result not in self.manager: - return - - tab_index = self._get_tab_index(result) - tab_label = self.manager[result].label - self.tab_widget.setTabText(tab_index, tab_label) - - def change_result(self, result): - if result not in self.manager: - return - tab_index = self._get_tab_index(result) - tab_label = self.manager[result].label - self.tab_widget.setTabText(tab_index, tab_label) - - def set_wb_polygon(self, polygon, layer): - """ Highlight and set the current water balance polygon.""" - - # highlight must be done before transform - highlight = QgsHighlight(self.iface.mapCanvas(), polygon, layer) - highlight.setColor(QColor(0, 0, 255, 127)) - # highlight.setWidth(3) - - self.wb_polygon_highlight = highlight - self.manager.polygon = PolygonWithCRS(polygon=polygon, crs=layer.crs()) - progress_bar = StatusProgressBar( - len(self.manager), "Calculating water balance", - ) - for idx, result in enumerate(self.manager): - self.add_result(result, update=False) - progress_bar.increase_progress() - - self.update_water_balance(reset_model=True) - - def unset_wb_polygon(self): - """ De-highlight and unset the current water balance polygon.""" - if self.manager.polygon is None: - return - self.iface.mapCanvas().scene().removeItem(self.wb_polygon_highlight) - self.wb_polygon_highlight = None - self.manager.polygon = None - self.tab_widget.clear() - self.model.removeRows(0, len(self.model.rows)) - - -class SelectionVisualisation(object): - """Visualize selected lines and points.""" - - def __init__(self, canvas, color=QCOLOR_RED): - self.canvas = canvas - self.color = color - self.vertex_markers = [] - self.lines = [] - self.points = [] - - @functools.cached_property - def rb_line(self): - rb_line = QgsRubberBand(self.canvas, QgsWkbTypes.LineGeometry) - rb_line.setColor(self.color) - rb_line.setLineStyle(Qt.DotLine) - rb_line.setWidth(3) - return rb_line - - def show(self): - # visualize lines - multiline = QgsGeometry().fromMultiPolylineXY(self.lines) - self.rb_line.setToGeometry(multiline, None) - # visualize points - for p in self.points: - marker = QgsVertexMarker(self.canvas) - marker.setCenter(p) - marker.setIconType(QgsVertexMarker.ICON_BOX) - marker.setColor(self.color) - marker.setVisible(True) - self.vertex_markers.append(marker) - - def reset(self): - self.rb_line.reset(QgsWkbTypes.LineGeometry) - for m in self.vertex_markers: - m.setVisible(False) - # rubber bands are owned by the canvas, so we must explictly - # delete them - self.canvas.scene().removeItem(m) - self.vertex_markers = [] - self.lines = [] - self.points = [] - - def update(self, lines, points): - """lines and points are lists of QgsPoints and QgsPolylines.""" - self.reset() - self.lines = lines - self.points = points - self.show() - - def close(self): - self.reset() - # delete the rubberband we've been re-using - self.canvas.scene().removeItem(self.rb_line) - - -class SelectPolygonTool(QgsMapToolIdentify): - def __init__(self, widget, *args, **kwargs): - super().__init__(*args, **kwargs) - self.widget = widget - - # select at most one feature - self.identifyMenu().setAllowMultipleReturn(False) - - def canvasReleaseEvent(self, event): - self.widget.unset_wb_polygon() - layer_list = [] - for layer in self.parent().layers(): - if isinstance(layer, QgsVectorLayer): - if layer.wkbType() in POLYGON_TYPES: - layer_list.append(layer) - identify_results = self.identify( - x=int(event.pos().x()), - y=int(event.pos().y()), - layerList=layer_list, - mode=self.IdentifyMode.LayerSelection, - ) - if not identify_results: - msg = 'No polygons found in this location.' - messagebar_message(MSG_TITLE, msg, Qgis.Warning, 3) - return - - identify_result = identify_results[0] - layer = identify_result.mLayer - feature = identify_result.mFeature - - polygon = feature.geometry() - if not polygon.wkbType() in POLYGON_TYPES: - msg = 'Not a (suitable) polygon.' - messagebar_message(MSG_TITLE, msg, Qgis.Warning, 3) - return - - self.widget.set_wb_polygon(polygon=polygon, layer=layer) From 1d5747ee2c52f610e5dda969bdd6370bc8cd4595 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 13:01:03 +0100 Subject: [PATCH 13/17] Update dependencies.py --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index bc0f74d0..d7dbe992 100644 --- a/dependencies.py +++ b/dependencies.py @@ -59,7 +59,7 @@ Dependency("Mako", "mako", "", False), Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] Dependency("alembic", "alembic", "==1.8.*", False), - Dependency("threedigrid", "threedigrid", "==2.2.*", False), + Dependency("threedigrid", "threedigrid", "==2.3.*", False), Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), From 5bf6365387f33f9cec7d96250bf12bbdf92409f3 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 13:07:15 +0100 Subject: [PATCH 14/17] Update dependencies.py --- dependencies.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dependencies.py b/dependencies.py index d7dbe992..e13fa740 100644 --- a/dependencies.py +++ b/dependencies.py @@ -73,6 +73,7 @@ Dependency("Shapely", "shapely", ">=2.0.0", False), Dependency("h5netcdf", "h5netcdf", "", False), Dependency("greenlet", "greenlet", "!=0.4.17", False), + Dependency("threedi-mi-utils", "threedi_mi_utils", "==0.1.5", False), ] # On Windows, the hdf5 binary and thus h5py version depends on the QGis version From b2f9b4b27a0936ab95e0b6f31a825745fd71776a Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 13:13:49 +0100 Subject: [PATCH 15/17] Update providers.py --- processing/providers.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/processing/providers.py b/processing/providers.py index 7b12162f..ad1cecd5 100644 --- a/processing/providers.py +++ b/processing/providers.py @@ -14,16 +14,6 @@ from threedi_results_analysis.processing.rasters_to_netcdf_algorithm import ( RastersToNetCDFAlgorithm, ) -from threedi_results_analysis.processing.schematisation_algorithms import ( - # CheckSchematisationAlgorithm, - MigrateAlgorithm, - # ImportSufHydAlgorithm, - # GuessIndicatorAlgorithm, - # ImportHydXAlgorithm, -) -from threedi_results_analysis.processing.schematisation_algorithms import ( - ImportSufHydAlgorithm, -) from threedi_results_analysis.processing.schematisation_algorithms import ( MigrateAlgorithm, ) From c25941bfd9a196e4fb5e19fae2fe7cf79454d423 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Mon, 23 Dec 2024 13:41:27 +0100 Subject: [PATCH 16/17] More pruning --- deps_write.py | 603 ------------------------ processing/grid_creation_algorithm.py | 141 ------ processing/providers.py | 2 - processing/schematisation_algorithms.py | 481 ------------------- tests/test_dependencies.py | 17 - 5 files changed, 1244 deletions(-) delete mode 100644 deps_write.py delete mode 100644 processing/grid_creation_algorithm.py diff --git a/deps_write.py b/deps_write.py deleted file mode 100644 index 9bb5b382..00000000 --- a/deps_write.py +++ /dev/null @@ -1,603 +0,0 @@ -"""Handle dependencies: installation and checking/logging. - -See :doc:`linked_external-dependencies_readme` -(``external-dependencies/README.rst``) for a full explanation of the -dependency handling. - -``python3 dependencies.py`` runs ``generate_constraints_txt()``: it generates -``constraints.txt``. - -:py:func:`ensure_everything_installed()` checks if :py:data:`DEPENDENCIES` are -installed and installs them if needed. - -:py:func:`check_importability()` double-checks if everything is importable. It also -logs the locations. - -Note that we use *logging* in ``check_importability()`` as we want to have the -result in the logfile. The rest of the module uses ``print()`` statements -because it gets executed before any logging has been configured. - -As we're called directly from ``__init__.py``, the imports should be -resticted. No qgis message boxes and so! - -""" -from collections import namedtuple -from pathlib import Path - -import importlib -import logging -import os -import pkg_resources -import platform -import setuptools # noqa: https://github.com/pypa/setuptools/issues/2993 -import shutil -import subprocess -import sys -import tarfile - - -# in case the dependency is a tar, the constraint should be the -# explicit version (e.g. "==3.8.0") -Dependency = namedtuple("Dependency", ["name", "package", "constraint", "tar"]) - -#: List of expected dependencies. -DEPENDENCIES = [ - Dependency("SQLAlchemy", "sqlalchemy", "==2.0.6", False), - Dependency("GeoAlchemy2", "geoalchemy2", "==0.15.*", False), - Dependency("pyqtgraph", "pyqtgraph", ">=0.13.2", False), - Dependency( - "importlib-resources", "importlib_resources", "", False - ), # backward compat. alembic - Dependency( - "zipp", "zipp", "", False - ), # backward compat. alembic - Dependency("Mako", "mako", "", False), - Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] - Dependency("alembic", "alembic", "==1.8.*", False), - Dependency("threedigrid", "threedigrid", "==2.2.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), - Dependency("threedidepth", "threedidepth", "==0.6.3", False), - Dependency("click", "click", ">=8.0", False), - Dependency("packaging", "packaging", "", False), - Dependency("typing-extensions", "typing_extensions", ">=4.2.0", False), - Dependency( - "colorama", "colorama", "", False - ), # dep of click and threedi-modelchecker (windows) - Dependency("networkx", "networkx", "", False), - Dependency("condenser", "condenser", ">=0.2.1", False), - Dependency("Shapely", "shapely", ">=2.0.0", False), - Dependency("threedigrid-builder", "threedigrid_builder", "==1.17.*", False), - Dependency("h5netcdf", "h5netcdf", "", False), - Dependency("greenlet", "greenlet", "!=0.4.17", False), - Dependency("threedi-mi-utils", "threedi_mi_utils", "==0.1.4", False), -] - -# On Windows, the hdf5 binary and thus h5py version depends on the QGis version -# QGis upgraded from hdf5 == 1.10.7 to hdf5 == 1.14.0 in QGis 3.28.6 -QGIS_VERSION = 34000 -if QGIS_VERSION < 32806 and platform.system() == "Windows": - SUPPORTED_HDF5_VERSIONS = ["1.10.7"] - H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==2.10.0", False) -elif QGIS_VERSION >= 34000 and platform.system() == "Windows": - SUPPORTED_HDF5_VERSIONS = ["1.14.0"] - H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==3.10.0", False) -else: - SUPPORTED_HDF5_VERSIONS = ["1.14.0"] - H5PY_DEPENDENCY = Dependency("h5py", "h5py", "==3.8.0", True) - -if QGIS_VERSION < 32811 and platform.system() == "Windows": - WINDOWS_PLATFORM_DEPENDENCIES = [ - Dependency("scipy", "scipy", "==1.6.2", True), - ] -elif QGIS_VERSION >= 34000 and platform.system() == "Windows": - WINDOWS_PLATFORM_DEPENDENCIES = [ - Dependency("scipy", "scipy", "==1.13.0", True), - ] -else: - WINDOWS_PLATFORM_DEPENDENCIES = [ - Dependency("scipy", "scipy", "==1.10.1", False), - ] - -# If you add a dependency, also adjust external-dependencies/populate.sh -INTERESTING_IMPORTS = ["numpy", "osgeo", "pip", "setuptools"] - -OUR_DIR = Path(__file__).parent - -logger = logging.getLogger(__name__) - - - - -def ensure_everything_installed(): - """Check if DEPENDENCIES are installed and install them if missing.""" - - _remove_old_distributions(DEPENDENCIES + WINDOWS_PLATFORM_DEPENDENCIES + [H5PY_DEPENDENCY], _prev_dependencies_target_dir()) - - # If required, create deps folder and prepend to the path - target_dir = _dependencies_target_dir(create=True) - if str(target_dir) not in sys.path: - print(f"Prepending {target_dir} to sys.path") - sys.path.insert(0, str(target_dir)) - - _refresh_python_import_mechanism() - - profile_python_names = [item.name for item in _dependencies_target_dir().iterdir()] - print("Contents of our deps dir:\n %s" % "\n ".join(profile_python_names)) - - print("sys.path:") - for directory in sys.path: - print(" - %s" % directory) - - _ensure_prerequisite_is_installed() - - missing = _check_presence(DEPENDENCIES) - restart_required = False - if platform.system() == "Windows": - missing += _check_presence(WINDOWS_PLATFORM_DEPENDENCIES) - if not _ensure_h5py_installed(): - restart_required = True - - if missing: - print("Missing dependencies:") - for deps in missing: - print(deps.name) - - try: - _install_dependencies(missing, target_dir=target_dir) - except RuntimeError: - # In case some libraries are already imported, we cannot uninstall - # because QGIS acquires a lock on dll/pyd-files. Therefore - # we need to restart Qgis. - restart_required = True - pass - - restart_marker = Path(target_dir / "restarted.marker") - - if restart_required or not restart_marker.exists(): - if _is_windows(): - pass - # Always update the import mechanism - _refresh_python_import_mechanism() - - else: - print("Dependencies up to date") - - -def _ensure_h5py_installed(): - """ - On Windows Qgis comes with a hdf5 version installed. - This plugin uses the h5py python package, which is built against a specific version - of HDF5. The Qgis HDF5 version and the HDF5 version of the h5py package must be the - same, otherwise it will not work. In the external-dependencies folder we supply a - Windows version of h5py built using HDF5 1.10.7. On pypi there is no h5py 2.10.0 package available - built with Python 3.9 and HDF5 1.10.7. We need creat such wheel ourselves. - - The following situations can occur: - - | QGIS HDF5 = 1.10.7 | QGIS HDF5 = 1.14.0 - -----------------------------------|---------------------|--------------- - h5py build with 1.10.7 | A: Good | B: Qgis crash - h5py build with 1.14.0 | A: Qgis crash | B: Good - h5py build with other HDF5 version | A: Qgis crash | B: Qgis crash - - The different situations are marked A, B, and C in the table above. - - In version 3.28.6, QGis updated their HDF5.dll binary from 1.10.7 to 1.14.0. - - When the h5py is built for the qgis-included HDF5 DLL, - everything is good and the plugin can be loaded without any problems. - - A crash occurs when a user upgrades/downgrades their Qgis version when - the ThreediToolbox is already installed with a specific version of h5py. - In these cases we also need to upgrade/downgrade the h5py version installed with - ThreediToolbox. - - We use the H5pyMarker to mark the installed h5py version. This is because we cannot check the version - by importing h5py, as Qgis will crash if the HDF5 and h5py binaries do not match. - """ - if QGIS_VERSION < 32806 and platform.system() == "Windows": - hdf5_version = "1.10.7" - else: - hdf5_version = "1.14.0" - h5py_missing = _check_presence([H5PY_DEPENDENCY]) - marker_version = H5pyMarker.version() - if h5py_missing: - return _install_h5py(hdf5_version) - - if hdf5_version in SUPPORTED_HDF5_VERSIONS: - if marker_version == hdf5_version: - # Do nothing - pass - else: - return _install_h5py(hdf5_version) - - return True - - -def _install_h5py(hdf5_version: str): - if hdf5_version not in SUPPORTED_HDF5_VERSIONS: - # raise an error because we cannot continue - message = ( - f"Unsupported HDF5 version: {hdf5_version}. " - f"The following HDF5 versions are supported: {SUPPORTED_HDF5_VERSIONS}" - ) - raise RuntimeError(message) - - # In case the (old) h5py library is already imported, we cannot uninstall - # h5py because the windows acquires a lock on the *.dll-files. Therefore - # we need to restart Qgis. - # _uninstall_dependency(H5PY_DEPENDENCY) - try: - _install_dependencies([H5PY_DEPENDENCY], target_dir=_dependencies_target_dir()) - except RuntimeError: - return False - H5pyMarker.create(hdf5_version) - return True - - -class H5pyMarker: - """Marker indicating with which HDF5 binaries the h5py is installed. - - Currently, there is 1 supported HDF5 version: - - 1.10.7: use h5py from the external-dependencies folder in this repo - """ - - H5PY_MARKER = OUR_DIR / ".h5py_marker" - - @classmethod - def version(cls) -> str: - if cls.H5PY_MARKER.exists(): - with open(cls.H5PY_MARKER, "r") as marker: - version = marker.readline() - return version - else: - return "" - - @classmethod - def create(cls, version: str): - with open(cls.H5PY_MARKER, "w") as marker: - marker.write(version) - - @classmethod - def remove(cls): - cls.H5PY_MARKER.unlink() - - -def _ensure_prerequisite_is_installed(prerequisite="pip"): - """Check the basics: pip. - - People using OSGEO custom installs sometimes exclude those - dependencies. Our installation scripts fail, then, because of the missing - 'pip'. - - """ - try: - importlib.import_module(prerequisite) - except Exception as e: - msg = ( - "%s. 'pip', which we need, is missing. It is normally included with " - "python. You are *probably* using a custom minimal OSGEO release. " - "Please re-install with 'pip' included." - ) % e - print(msg) - raise RuntimeError(msg) - - -def _dependencies_target_dir(our_dir=OUR_DIR, create=False) -> Path: - """Return (and create) the desired deps folder - - This is the 'deps' subdirectory of the plugin home folder - - """ - target_dir = our_dir / "deps" - if not target_dir.exists() and create: - print(f"Creating target dir {target_dir}") - target_dir.mkdir() - - return target_dir - - -def _prev_dependencies_target_dir(our_dir=OUR_DIR) -> Path: - """Return python dir inside our profile - - Return two dirs up if we're inside the plugins dir. This was the - previous installation folder of the dependencies. - """ - if "plugins" in str(our_dir).lower(): - return OUR_DIR.parent.parent - - -def _remove_old_distributions(dependencies, path): - """Remove old distributions of dependencies - - In previous version of the Toolbox, depencencies were - stored in the users 'python' folder. This caused - versioning conflicts (as these dependencies were - not removed when the plugin was uninstalled). - - Removes all folders and files that contain the - dependency name or package name - """ - succeeded = True - files_to_remove = [ - node - for node in os.listdir(str(path)) - for dependency in dependencies - if (dependency.package in node or dependency.name in node) - ] - - for f in files_to_remove: - dep_path = str(path / f) - - try: - if os.path.exists(dep_path): - if os.path.isfile(dep_path): - print(f"Deleting file {f} from {path}") - os.remove(dep_path) - else: - print(f"Deleting folder {f} from {path}") - shutil.rmtree(dep_path) - except PermissionError as e: - print(f"Unable to remove {dep_path} ({str(e)})") - succeeded = False - - return succeeded - - -def check_importability(): - """Check if the dependendies are importable and log the locations. - - If something is not importable, which should not happen, it raises an - ImportError automatically. Which is exactly what we want, because we - cannot continue. - - """ - packages = [dependency.package for dependency in DEPENDENCIES] - packages += INTERESTING_IMPORTS - logger.info("sys.path:\n %s", "\n ".join(sys.path)) - deps_in_target_dir = [item.name for item in _dependencies_target_dir().iterdir()] - logger.info( - "Contents of our dependency dir:\n %s", - "\n ".join(deps_in_target_dir), - ) - for package in packages: - imported_package = importlib.import_module(package) - logger.info( - "Import '%s' found at \n '%s'", package, imported_package.__file__ - ) - - -def _uninstall_dependency(dependency): - print("Trying to uninstalling dependency %s" % dependency.name) - if dependency.tar: - # just remove the folders - path = _dependencies_target_dir() - items_to_remove = [node for node in os.listdir(str(path)) if (dependency.package in node or dependency.name in node)] - for f in items_to_remove: - dep_path = str(path / f) - - try: - if os.path.exists(dep_path): - if os.path.isfile(dep_path): - print(f"Deleting file {f} from {path}") - os.remove(dep_path) - else: - print(f"Deleting folder {f} from {path}") - shutil.rmtree(dep_path) - except PermissionError as e: - print(f"Unable to remove {dep_path} ({str(e)})") - return - - python_interpreter = _get_python_interpreter() - startupinfo = None - if _is_windows(): - startupinfo = subprocess.STARTUPINFO() - # Prevents terminal screens from popping up - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - process = subprocess.Popen( - [ - python_interpreter, - "-m", - "pip", - "uninstall", - "--yes", - (dependency.name), - ], - universal_newlines=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - startupinfo=startupinfo, - ) - # The input/output/error stream handling is a bit involved, but it is - # necessary because of a python bug on windows 7, see - # https://bugs.python.org/issue3905 . - i, o, e = (process.stdin, process.stdout, process.stderr) - i.close() - result = o.read() + e.read() - o.close() - e.close() - print(result) - exit_code = process.wait() - if exit_code: - print("Uninstalling %s failed" % dependency.name) - - -def _install_dependencies(dependencies, target_dir): - if not dependencies: - return - - python_interpreter = _get_python_interpreter() - base_command = [ - python_interpreter, - "-m", - "pip", - "install", - "--upgrade", - "--no-deps", - "--find-links", - str(OUR_DIR / "external-dependencies"), - "--no-index", - "--target", - str(target_dir), - ] - - dialog = None - bar = None - startupinfo = None - if _is_windows(): - pass - - for count, dependency in enumerate(dependencies): - _uninstall_dependency(dependency) - print("Installing '%s' into %s" % (dependency.name, target_dir)) - if dialog: - dialog.setLabelText(f"Installing {dependency.name}") - - if dependency.tar: - # Just extract the tar into the target folder, we already now it exists - tar_path = f"{(OUR_DIR / 'external-dependencies')}/{dependency.name}-{dependency.constraint[2:]}.tar" - tar = tarfile.open(tar_path) - tar.extractall(str(target_dir)) - tar.close() - else: - command = base_command + [dependency.name + dependency.constraint] - - process = subprocess.Popen( - command, - universal_newlines=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - startupinfo=startupinfo, - ) - # The input/output/error stream handling is a bit involved, but it is - # necessary because of a python bug on windows 7, see - # https://bugs.python.org/issue3905 . - i, o, e = (process.stdin, process.stdout, process.stderr) - i.close() - result = o.read() + e.read() - o.close() - e.close() - print(result) - exit_code = process.wait() - if exit_code: - if dialog: - dialog.close() - - raise RuntimeError( - f"Installing {dependency.name} failed ({exit_code}) ({result})" - ) - - print("Installed %s into %s" % (dependency.name, target_dir)) - if dependency.package in sys.modules: - print("Unloading old %s module" % dependency.package) - del sys.modules[dependency.package] - # check_importability() will be called soon, which will import them again. - # By removing them from sys.modules, we prevent older versions from - # sticking around. - - if bar: - bar.setValue(int((count / len(dependencies)) * 100)) - bar.update() - - if dialog: - dialog.close() - - -def _is_windows(): - """Return whether we are starting from QGIS on Windows.""" - executable = sys.executable - _, filename = os.path.split(executable) - if "python3" in filename.lower(): - return False - elif "qgis" in filename.lower(): - if platform.system().lower() == "darwin": - return False - else: - return True - else: - raise EnvironmentError("Unexpected value for sys.executable: %s" % executable) - - -def _get_python_interpreter(): - """Return the path to the python3 interpreter. - - Under linux sys.executable is set to the python3 interpreter used by Qgis. - However, under Windows/Mac this is not the case and sys.executable refers to the - Qgis start-up script. - """ - interpreter = None - executable = sys.executable - directory, _ = os.path.split(executable) - if _is_windows(): - interpreter = os.path.join(directory, "python3.exe") - elif platform.system().lower() == "darwin": - interpreter = os.path.join(directory, "bin", "python3") - else: - interpreter = executable - - assert os.path.exists(interpreter) # safety check - return interpreter - - -def _check_presence(dependencies): - """Check if all dependencies are present. Return missing dependencies.""" - missing = [] - for dependency in dependencies: - requirement = dependency.name + dependency.constraint - print("Checking presence of %s..." % requirement) - try: - result = pkg_resources.require(requirement) - print("Requirement %s found: %s" % (requirement, result)) - except pkg_resources.DistributionNotFound as e: - print( - "Dependency '%s' (%s) not found (%s)" - % (dependency.name, dependency.constraint, str(e)) - ) - missing.append(dependency) - except pkg_resources.VersionConflict as e: - print( - 'Version conflict:\n' - f' Installed: {e.dist}\n' - f' Required: {e.req}' - ) - if isinstance(e, pkg_resources.ContextualVersionConflict): - print(f' By: {e.required_by}') - missing.append(dependency) - except Exception as e: - print( - "Installing dependency '%s' (%s) went wrong (%s)" - % (dependency.name, dependency.constraint, str(e)) - ) - missing.append(dependency) - return missing - - -def _refresh_python_import_mechanism(): - """Refresh the import mechanism. - - This is required when deps are dynamically installed/removed. The modules - 'importlib' and 'pkg_resources' need to update their internal data structures. - """ - # This function should be called if any modules are created/installed while your - # program is running to guarantee all finders will notice the new module’s existence. - importlib.invalidate_caches() - - # https://stackoverflow.com/questions/58612272/pkg-resources-get-distributionmymodule-version-not-updated-after-reload - # Apparantely pkg_resources needs to be reloaded to be up-to-date with newly installed packages - importlib.reload(pkg_resources) - - -def generate_constraints_txt(target_dir=OUR_DIR): - """Called from the ``__main__`` to generate ``constraints.txt``.""" - constraints_file = target_dir / "constraints.txt" - lines = ["# Generated by dependencies.py"] - lines += [(dependency.name + dependency.constraint) for dependency in DEPENDENCIES] - lines.append("") - constraints_file.write_text("\n".join(lines)) - print("Wrote constraints to %s" % constraints_file) - - -if __name__ == "__main__": # pragma: no cover - generate_constraints_txt() diff --git a/processing/grid_creation_algorithm.py b/processing/grid_creation_algorithm.py deleted file mode 100644 index bcef07c3..00000000 --- a/processing/grid_creation_algorithm.py +++ /dev/null @@ -1,141 +0,0 @@ -import os -from collections import OrderedDict -from qgis.PyQt.QtCore import QCoreApplication -from qgis.core import ( - QgsProcessingAlgorithm, - QgsProcessingException, - QgsProcessingParameterFile, - QgsProcessingParameterFileDestination, - QgsVectorLayer, -) -from threedi_results_analysis.processing.processing_utils import gridadmin2geopackage, load_computational_layers -import logging -import io - - -class ThreeDiGenerateCompGridAlgorithm(QgsProcessingAlgorithm): - """ - Generate a gridadmin.h5 file out of Spatialite database and convert it to GeoPackage. - Created layers will be added to the map canvas after successful conversion. - """ - - INPUT_SPATIALITE = "INPUT_SPATIALITE" - OUTPUT = "OUTPUT" - LAYERS_TO_ADD = OrderedDict() - - def flags(self): - return super().flags() | QgsProcessingAlgorithm.FlagNoThreading - - def tr(self, string): - return QCoreApplication.translate("Processing", string) - - def createInstance(self): - return ThreeDiGenerateCompGridAlgorithm() - - def name(self): - return "threedi_generate_computational_grid" - - def displayName(self): - return self.tr("Computational grid from schematisation") - - def group(self): - return self.tr("Computational Grid") - - def groupId(self): - return "computational_grid" - - def shortHelpString(self): - return self.tr("Generate computational grid from schematization") - - def initAlgorithm(self, config=None): - - self.addParameter( - QgsProcessingParameterFile( - self.INPUT_SPATIALITE, - self.tr("Input SpatiaLite file"), - behavior=QgsProcessingParameterFile.File, - extension="sqlite", - ) - ) - - self.addParameter( - QgsProcessingParameterFileDestination( - self.OUTPUT, self.tr("Output computational grid file"), fileFilter="*.gpkg", - ) - ) - - def processAlgorithm(self, parameters, context, feedback): - input_spatialite = self.parameterAsString(parameters, self.INPUT_SPATIALITE, context) - if not input_spatialite: - raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT_SPATIALITE)) - - uri = input_spatialite + "|layername=v2_global_settings" - feedback.pushInfo(f"Reading DEM settings from: {uri}") - settings_lyr = QgsVectorLayer(uri, "glob_settings", "ogr") - if not settings_lyr.isValid(): - err = f"Global Spatialite settings table could not be loaded from {uri}\n" "Check your Spatialite file." - raise QgsProcessingException(f"Incorrect input Spatialite file:\n{err}") - try: - settings_feat = next(settings_lyr.getFeatures()) - except StopIteration: - err = f"No global settings entries in {uri}" "Check your Spatialite file." - raise QgsProcessingException(f"Incorrect input Spatialite file:\n{err}") - set_dem_rel_path = settings_feat["dem_file"] - if set_dem_rel_path: - input_spatialite_dir = os.path.dirname(input_spatialite) - set_dem_path = os.path.join(input_spatialite_dir, set_dem_rel_path) - feedback.pushInfo(f"DEM raster referenced in Spatialite settings:\n{set_dem_path}") - if not os.path.exists(set_dem_path): - set_dem_path = None - info = "The DEM referenced in the Spatialite settings doesn't exist - skipping." - feedback.pushInfo(info) - else: - set_dem_path = None - info = "There is no DEM file referenced in the Spatialite settings - skipping." - feedback.pushInfo(info) - output_gpkg_file = self.parameterAsFileOutput(parameters, self.OUTPUT, context) - if output_gpkg_file is None: - raise QgsProcessingException(self.invalidSourceError(parameters, self.OUTPUT)) - # If user is writing to the temporary file then QGIS adds '.file' extension, so we need to change it. - output_file_without_extension = output_gpkg_file.rsplit(".", 1)[0] - gridadmin_file = f"{output_file_without_extension}.h5" - if output_gpkg_file.endswith(".file"): - output_gpkg_file = f"{output_file_without_extension}.gpkg" - - def progress_rep(progress, info): - feedback.setProgress(int(progress * 100)) - feedback.pushInfo(info) - - # Capture threedigridbuilder logging - # logger = logging.getLogger("threedigrid_builder.grid.connection_nodes") - assert logger.hasHandlers() # Check whether we have the right one - log_capture_string = io.StringIO() - ch = logging.StreamHandler(log_capture_string) - ch.setFormatter(logging.Formatter(fmt='%(levelname)-8s :: %(message)s')) - ch.setLevel(logging.DEBUG) - logger.addHandler(ch) - try: - make_gridadmin(input_spatialite, set_dem_path, gridadmin_file, progress_callback=progress_rep) - except SchematisationError as e: - err = f"Creating grid file failed with the following error: {repr(e)}" - raise QgsProcessingException(err) - finally: - # Pull the contents back into a string and close the stream - log_contents = log_capture_string.getvalue() - log_capture_string.close() - logger.removeHandler(ch) - if log_contents: - feedback.pushWarning("3Di gridbuilder log:") - feedback.pushWarning(log_contents) - - feedback.setProgress(0) - gpkg_layers = gridadmin2geopackage(gridadmin_file, output_gpkg_file, context, feedback) - self.LAYERS_TO_ADD.update(gpkg_layers) - - return {self.OUTPUT: output_gpkg_file} - - def postProcessAlgorithm(self, context, feedback): - project = context.project() - load_computational_layers(self.LAYERS_TO_ADD, project) - self.LAYERS_TO_ADD.clear() - return {} diff --git a/processing/providers.py b/processing/providers.py index ad1cecd5..47a1a7ff 100644 --- a/processing/providers.py +++ b/processing/providers.py @@ -3,7 +3,6 @@ from qgis.PyQt.QtGui import QIcon # from threedi_results_analysis.processing.dwf_calculation_algorithm import DWFCalculatorAlgorithm from threedi_results_analysis.processing.gpkg_conversion_algorithm import ThreeDiConvertToGpkgAlgorithm -from threedi_results_analysis.processing.grid_creation_algorithm import ThreeDiGenerateCompGridAlgorithm from threedi_results_analysis.processing.cross_sectional_discharge_algorithm import CrossSectionalDischargeAlgorithm from threedi_results_analysis.processing.leak_detector_algorithms import ( DetectLeakingObstaclesAlgorithm, @@ -41,7 +40,6 @@ def loadAlgorithms(self, *args, **kwargs): self.addAlgorithm(MigrateAlgorithm()) # self.addAlgorithm(ImportHydXAlgorithm()) self.addAlgorithm(ThreeDiConvertToGpkgAlgorithm()) - self.addAlgorithm(ThreeDiGenerateCompGridAlgorithm()) # self.addAlgorithm(ImportSufHydAlgorithm()) # self.addAlgorithm(GuessIndicatorAlgorithm()) self.addAlgorithm(CrossSectionalDischargeAlgorithm()) diff --git a/processing/schematisation_algorithms.py b/processing/schematisation_algorithms.py index d55c33dc..580c6649 100644 --- a/processing/schematisation_algorithms.py +++ b/processing/schematisation_algorithms.py @@ -11,7 +11,6 @@ *************************************************************************** """ -import csv import os import shutil @@ -30,15 +29,11 @@ from threedi_results_analysis.utils.utils import backup_sqlite from qgis.PyQt.QtCore import QCoreApplication from qgis.core import ( - QgsProject, QgsProcessingAlgorithm, # QgsProcessingException, - QgsProcessingParameterBoolean, QgsProcessingParameterFile, - QgsProcessingParameterFileDestination, # QgsProcessingParameterFolderDestination, # QgsProcessingParameterString, - QgsVectorLayer, ) @@ -128,479 +123,3 @@ def tr(self, string): def createInstance(self): return MigrateAlgorithm() - - -# class CheckSchematisationAlgorithm(QgsProcessingAlgorithm): - # """ - # Run the schematisation checker - # """ - - # INPUT = "INPUT" - # OUTPUT = "OUTPUT" - # ADD_TO_PROJECT = "ADD_TO_PROJECT" - - # def initAlgorithm(self, config): - # self.addParameter( - # QgsProcessingParameterFile( - # self.INPUT, self.tr("3Di Spatialite"), extension="sqlite" - # ) - # ) - - # self.addParameter( - # QgsProcessingParameterFileDestination( - # self.OUTPUT, self.tr("Output"), fileFilter="csv" - # ) - # ) - - # self.addParameter( - # QgsProcessingParameterBoolean( - # self.ADD_TO_PROJECT, self.tr("Add result to project"), defaultValue=True - # ) - # ) - - # def processAlgorithm(self, parameters, context, feedback): - # self.add_to_project = self.parameterAsBoolean( - # parameters, self.ADD_TO_PROJECT, context - # ) - # self.output_file_path = None - # input_filename = self.parameterAsFile(parameters, self.INPUT, context) - # threedi_db = get_threedi_database(filename=input_filename, feedback=feedback) - # if not threedi_db: - # return {self.OUTPUT: None} - # try: - # model_checker = ThreediModelChecker(threedi_db) - # except errors.MigrationMissingError: - # feedback.pushWarning( - # "The selected 3Di model does not have the latest migration. Please " - # "migrate your model to the latest version." - # ) - # return {self.OUTPUT: None} - # schema = threedi_db.schema - # schema.set_spatial_indexes() - # generated_output_file_path = self.parameterAsFileOutput( - # parameters, self.OUTPUT, context - # ) - # self.output_file_path = f"{os.path.splitext(generated_output_file_path)[0]}.csv" - # session = model_checker.db.get_session() - # session.model_checker_context = model_checker.context - # total_checks = len(model_checker.config.checks) - # progress_per_check = 100.0 / total_checks - # checks_passed = 0 - # try: - # with open(self.output_file_path, "w", newline="") as output_file: - # writer = csv.writer(output_file) - # writer.writerow( - # [ - # "level", - # "error_code", - # "id", - # "table", - # "column", - # "value", - # "description", - # ] - # ) - # for i, check in enumerate(model_checker.checks(level="info")): - # model_errors = check.get_invalid(session) - # for error_row in model_errors: - # writer.writerow( - # [ - # check.level.name, - # check.error_code, - # error_row.id, - # check.table.name, - # check.column.name, - # getattr(error_row, check.column.name), - # check.description(), - # ] - # ) - # checks_passed += 1 - # feedback.setProgress(int(checks_passed * progress_per_check)) - # except PermissionError: - # # PermissionError happens for example when a user has the file already open - # # with Excel on Windows, which locks the file. - # feedback.pushWarning( - # f"Not enough permissions to write the file '{self.output_file_path}'.\n\n" - # "The file may be used by another program. Please close all " - # "other programs using the file or select another output " - # "file." - # ) - # return {self.OUTPUT: None} - - # return {self.OUTPUT: self.output_file_path} - - # def postProcessAlgorithm(self, context, feedback): - # if self.add_to_project: - # if self.output_file_path: - # result_layer = QgsVectorLayer( - # self.output_file_path, "3Di schematisation errors" - # ) - # QgsProject.instance().addMapLayer(result_layer) - # return {self.OUTPUT: self.output_file_path} - - # def name(self): - # """ - # Returns the algorithm name, used for identifying the algorithm. This - # string should be fixed for the algorithm, and must not be localised. - # The name should be unique within each provider. Names should contain - # lowercase alphanumeric characters only and no spaces or other - # formatting characters. - # """ - # return "check_schematisation" - - # def displayName(self): - # """ - # Returns the translated algorithm name, which should be used for any - # user-visible display of the algorithm name. - # """ - # return self.tr("Check Schematisation") - - # def group(self): - # """ - # Returns the name of the group this algorithm belongs to. This string - # should be localised. - # """ - # return self.tr(self.groupId()) - - # def groupId(self): - # """ - # Returns the unique ID of the group this algorithm belongs to. This - # string should be fixed for the algorithm, and must not be localised. - # The group id should be unique within each provider. Group id should - # contain lowercase alphanumeric characters only and no spaces or other - # formatting characters. - # """ - # return "Schematisation" - - # def tr(self, string): - # return QCoreApplication.translate("Processing", string) - - # def createInstance(self): - # return CheckSchematisationAlgorithm() - - -# class ImportSufHydAlgorithm(QgsProcessingAlgorithm): -# """ -# Import data from SufHyd to a 3Di Spatialite -# """ -# -# INPUT_SUFHYD_FILE = "INPUT_SUFHYD_FILE" -# TARGET_SQLITE = "TARGET_SQLITE" -# -# def initAlgorithm(self, config): -# self.addParameter( -# QgsProcessingParameterFile(self.INPUT_SUFHYD_FILE, self.tr("Sufhyd file"), extension="hyd")) -# -# self.addParameter( -# QgsProcessingParameterFile( -# self.TARGET_SQLITE, -# "Target 3Di Sqlite", -# extension="sqlite" -# ) -# ) -# -# def processAlgorithm(self, parameters, context, feedback): -# sufhyd_file = self.parameterAsString(parameters, self.INPUT_SUFHYD_FILE, context) -# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) -# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) -# if not threedi_db: -# return {} -# try: -# schema = threedi_db.schema -# schema.validate_schema() -# -# except errors.MigrationMissingError: -# feedback.pushWarning( -# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " -# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" -# ) -# return {} -# -# importer = Importer(sufhyd_file, threedi_db) -# importer.run_import() -# -# return {} -# -# def name(self): -# return "import_sufhyd" -# -# def displayName(self): -# return self.tr("Import Sufhyd") -# -# def group(self): -# return self.tr(self.groupId()) -# -# def groupId(self): -# return "Schematisation" -# -# def tr(self, string): -# return QCoreApplication.translate("Processing", string) -# -# def createInstance(self): -# return ImportSufHydAlgorithm() -# -# -# class GuessIndicatorAlgorithm(QgsProcessingAlgorithm): -# """ -# Guess manhole indicator, pipe friction and manhole storage -# area. -# """ -# -# TARGET_SQLITE = "TARGET_SQLITE" -# PIPE_FRICTION = "PIPE_FRICTION" -# MANHOLE_INDICATOR = "MANHOLE_INDICATOR" -# MANHOLE_AREA = "MANHOLE_AREA" -# ONLY_NULL_FIELDS = "ONLY_NULL_FIELDS" -# -# def initAlgorithm(self, config): -# -# self.addParameter( -# QgsProcessingParameterFile( -# self.TARGET_SQLITE, -# "Target 3Di Sqlite", -# extension="sqlite" -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.PIPE_FRICTION, -# description="Pipe friction", -# defaultValue=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.MANHOLE_INDICATOR, -# description="Manhole indicator", -# defaultValue=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.MANHOLE_AREA, -# description="Manhole area (only fills NULL fields)", -# defaultValue=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterBoolean( -# name=self.ONLY_NULL_FIELDS, -# description="Only fill NULL fields", -# defaultValue=True, -# ) -# ) -# -# def processAlgorithm(self, parameters, context, feedback): -# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) -# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) -# if not threedi_db: -# return {} -# try: -# schema = threedi_db.schema -# schema.validate_schema() -# -# except errors.MigrationMissingError: -# feedback.pushWarning( -# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " -# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" -# ) -# return {} -# -# checks = [] -# -# if parameters[self.MANHOLE_INDICATOR]: -# checks.append("manhole_indicator") -# -# if parameters[self.PIPE_FRICTION]: -# checks.append("pipe_friction") -# -# if parameters[self.MANHOLE_AREA]: -# checks.append("manhole_area") -# -# guesser = guess_indicators_utils.Guesser(threedi_db) -# msg = guesser.run(checks, parameters[self.ONLY_NULL_FIELDS]) -# -# feedback.pushInfo(f"Guess indicators ready: {msg}") -# -# return {} -# -# def name(self): -# return "guess_indicators" -# -# def displayName(self): -# return self.tr("Guess Indicators") -# -# def group(self): -# return self.tr(self.groupId()) -# -# def groupId(self): -# return "Schematisation" -# -# def tr(self, string): -# return QCoreApplication.translate("Processing", string) -# -# def createInstance(self): -# return GuessIndicatorAlgorithm() -# -# -# class ImportHydXAlgorithm(QgsProcessingAlgorithm): -# """ -# Import data from GWSW HydX to a 3Di Spatialite -# """ -# -# INPUT_DATASET_NAME = "INPUT_DATASET_NAME" -# HYDX_DOWNLOAD_DIRECTORY = "HYDX_DOWNLOAD_DIRECTORY" -# INPUT_HYDX_DIRECTORY = "INPUT_HYDX_DIRECTORY" -# TARGET_SQLITE = "TARGET_SQLITE" -# -# def initAlgorithm(self, config): -# self.addParameter( -# QgsProcessingParameterFile( -# self.TARGET_SQLITE, "Target 3Di Spatialite", extension="sqlite" -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterFile( -# self.INPUT_HYDX_DIRECTORY, -# "GWSW HydX directory (local)", -# behavior=QgsProcessingParameterFile.Folder, -# optional=True, -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterString( -# self.INPUT_DATASET_NAME, "GWSW dataset name (online)", optional=True -# ) -# ) -# -# self.addParameter( -# QgsProcessingParameterFolderDestination( -# self.HYDX_DOWNLOAD_DIRECTORY, -# "Destination directory for GWSW HydX dataset download", -# optional=True, -# ) -# ) -# -# def processAlgorithm(self, parameters, context, feedback): -# hydx_dataset_name = self.parameterAsString( -# parameters, self.INPUT_DATASET_NAME, context -# ) -# hydx_download_dir = self.parameterAsString( -# parameters, self.HYDX_DOWNLOAD_DIRECTORY, context -# ) -# hydx_path = self.parameterAsString( -# parameters, self.INPUT_HYDX_DIRECTORY, context -# ) -# out_path = self.parameterAsFile(parameters, self.TARGET_SQLITE, context) -# threedi_db = get_threedi_database(filename=out_path, feedback=feedback) -# if not threedi_db: -# raise QgsProcessingException( -# f"Unable to connect to 3Di spatialite '{out_path}'" -# ) -# try: -# schema = threedi_db.schema -# schema.validate_schema() -# -# except errors.MigrationMissingError: -# raise QgsProcessingException( -# "The selected 3Di spatialite does not have the latest database schema version. Please migrate this " -# "spatialite and try again: Processing > Toolbox > 3Di > Schematisation > Migrate spatialite" -# ) -# if not (hydx_dataset_name or hydx_path): -# raise QgsProcessingException( -# "Either 'GWSW HydX directory (local)' or 'GWSW dataset name (online)' must be filled in!" -# ) -# if hydx_dataset_name and hydx_path: -# feedback.pushWarning( -# "Both 'GWSW dataset name (online)' and 'GWSW HydX directory (local)' are filled in. " -# "'GWSW dataset name (online)' will be ignored. This dataset will not be downloaded." -# ) -# elif hydx_dataset_name: -# try: -# hydx_download_path = Path(hydx_download_dir) -# hydx_download_dir_is_valid = hydx_download_path.is_dir() -# except TypeError: -# hydx_download_dir_is_valid = False -# if parameters[self.HYDX_DOWNLOAD_DIRECTORY] == "TEMPORARY_OUTPUT": -# hydx_download_dir_is_valid = True -# if not hydx_download_dir_is_valid: -# raise QgsProcessingException( -# f"'Destination directory for HydX dataset download' ({hydx_download_path}) is not a valid directory" -# ) -# hydx_path = download_hydx( -# dataset_name=hydx_dataset_name, -# target_directory=hydx_download_path, -# wait_times=[0.1, 1, 2, 3, 4, 5, 10], -# feedback=feedback, -# ) -# # hydx_path will be None if user has canceled the process during download -# if feedback.isCanceled(): -# raise QgsProcessingException("Process canceled") -# feedback.pushInfo(f"Starting import of {hydx_path} to {out_path}") -# log_path = Path(out_path).parent / "import_hydx.log" -# write_logging_to_file(log_path) -# feedback.pushInfo(f"Logging will be written to {log_path}") -# run_import_export(export_type="threedi", hydx_path=hydx_path, out_path=out_path) -# return {} -# -# def name(self): -# """ -# Returns the algorithm name, used for identifying the algorithm. This -# string should be fixed for the algorithm, and must not be localised. -# The name should be unique within each provider. Names should contain -# lowercase alphanumeric characters only and no spaces or other -# formatting characters. -# """ -# return "import_hydx" -# -# def displayName(self): -# """ -# Returns the translated algorithm name, which should be used for any -# user-visible display of the algorithm name. -# """ -# return self.tr("Import GWSW HydX") -# -# def shortHelpString(self): -# return """ -#

Introduction

-#

Use this processing algorithm to import data in the format of the Dutch "Gegevenswoordenboek Stedelijk Water (GWSW)". Either select a previously downloaded local dataset, or download a dataset directly from the server.

-#

A log file will be created in the same directory as the Target 3Di Spatialite. Please check this log file after the import has completed.  

-#

Parameters

-#

Target 3Di Spatialite

-#

Spatialite (.sqlite) file that contains the layers required by 3Di. Imported data will be added to any data already contained in the 3Di Spatialite.

-#

GWSW HydX directory (local)

-#

Use this option if you have already downloaded a GWSW HydX dataset to a local directory.

-#

GWSW dataset name (online)

-#

Use this option if you want to download a GWSW HydX dataset.

-#

Destination directory for GWSW HydX dataset download

-#

If you have chosen to download a GWSW HydX dataset, this is the directory it will be downloaded to.

-# """ -# -# def group(self): -# """ -# Returns the name of the group this algorithm belongs to. This string -# should be localised. -# """ -# return self.tr(self.groupId()) -# -# def groupId(self): -# """ -# Returns the unique ID of the group this algorithm belongs to. This -# string should be fixed for the algorithm, and must not be localised. -# The group id should be unique within each provider. Group id should -# contain lowercase alphanumeric characters only and no spaces or other -# formatting characters. -# """ -# return "Schematisation" -# -# def tr(self, string): -# return QCoreApplication.translate("Processing", string) -# -# def createInstance(self): -# return ImportHydXAlgorithm() diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 61cdfd41..b7a780d9 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -1,6 +1,5 @@ from pathlib import Path from threedi_results_analysis import dependencies -from threedi_results_analysis.dependencies import Dependency import mock import os @@ -55,22 +54,6 @@ def test_install_dependencies(tmpdir): dependencies.ensure_everything_installed() -# def test_uninstall_dependency(tmpdir, monkeypatch): - # python_path = os.getenv("PYTHONPATH", "") - # new_python_path = f"{python_path}:{tmpdir}" - # monkeypatch.setenv("PYTHONPATH", new_python_path) - - # small_dependencies = [ - # Dependency("threedi-modelchecker", "threedi_modelchecker", ">=1.0.0", False) - # ] - # dependencies._install_dependencies( - # small_dependencies, target_dir=tmpdir - # ) - # dependencies._uninstall_dependency(small_dependencies[0]) - # for directory in os.listdir(tmpdir): - # assert "threedi_modelchecker" not in directory - - def test_install_dependencies_with_error(tmpdir): wrong_dependencies = [missing_dependency] with pytest.raises(RuntimeError): From 7a189033481fb78b548e4f02667980aa51e478e0 Mon Sep 17 00:00:00 2001 From: leendertvanwolfswinkel Date: Wed, 8 Jan 2025 11:35:35 +0100 Subject: [PATCH 17/17] threedi_schema 0.230.0.dev1 --- dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies.py b/dependencies.py index e13fa740..c6900ea9 100644 --- a/dependencies.py +++ b/dependencies.py @@ -60,7 +60,7 @@ Dependency("cftime", "cftime", ">=1.5.0", False), # threedigrid[results] Dependency("alembic", "alembic", "==1.8.*", False), Dependency("threedigrid", "threedigrid", "==2.3.*", False), - Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev0", False), + Dependency("threedi-schema", "threedi_schema", "==0.230.0.dev1", False), Dependency("threedidepth", "threedidepth", "==0.6.3", False), Dependency("click", "click", ">=8.0", False), Dependency("packaging", "packaging", "", False),