Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

align 11x with master #284

Merged
merged 11 commits into from
Feb 3, 2025
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
FROM geonode/geonode-base:latest-ubuntu-22.04
RUN rm -rf /usr/src/geonode
RUN git clone https://github.com/GeoNode/geonode.git /usr/src/geonode
RUN cd /usr/src/geonode && git fetch --all && git checkout master && cd -
RUN cd /usr/src/geonode && git fetch --all && git checkout 4.4.x && cd -
RUN mkdir -p /usr/src/importer

RUN cd ..
Expand Down
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
![PyPI - Downloads](https://img.shields.io/pypi/dm/geonode-importer) ![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/GeoNode/geonode-importer/runtests.yml) ![GitHub top language](https://img.shields.io/github/languages/top/GeoNode/geonode-importer)

# NOTE:
GeoNode 5 (master branch) includes the importer in its core. This repository and the `geonode-importer` package will be maintained for GeoNode <= 4.4.x

For more information https://github.com/GeoNode/geonode/issues/12368.

# geonode-importer

Expand Down
3 changes: 3 additions & 0 deletions importer/celery_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,9 @@ def create_geonode_resource(
handler_module_path, resource, _exec, **kwargs
)

# assign geonode resource to ExectionRequest
orchestrator.update_execution_request_obj(_exec, {"geonode_resource": resource})

# at the end recall the import_orchestrator for the next step
import_orchestrator.apply_async(
(
Expand Down
2 changes: 2 additions & 0 deletions importer/handlers/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@ def import_resource(self, files: dict, execution_id: str, **kwargs):
self.handle_metadata_resource(_exec, dataset, original_handler)

dataset.refresh_from_db()
# assign the resource to the execution_obj
orchestrator.update_execution_request_obj(_exec, {"geonode_resource": dataset})

orchestrator.evaluate_execution_progress(
execution_id, handler_module_path=str(self)
Expand Down
2 changes: 1 addition & 1 deletion importer/handlers/common/raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def extract_params_from_data(_data, action=None):
"""
if action == exa.COPY.value:
title = json.loads(_data.get("defaults"))
return {"title": title.pop("title")}, _data
return {"title": title.pop("title"), "store_spatial_file": True}, _data

return {
"skip_existing_layers": _data.pop("skip_existing_layers", "False"),
Expand Down
2 changes: 1 addition & 1 deletion importer/handlers/common/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def extract_params_from_data(_data, action=None):
"""
if action == exa.COPY.value:
title = json.loads(_data.get("defaults"))
return {"title": title.pop("title")}, _data
return {"title": title.pop("title"), "store_spatial_file": True}, _data

return {
"source": _data.pop("source", "upload"),
Expand Down
10 changes: 5 additions & 5 deletions importer/handlers/common/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def extract_params_from_data(_data, action=None):
"""
if action == exa.COPY.value:
title = json.loads(_data.get("defaults"))
return {"title": title.pop("title")}, _data
return {"title": title.pop("title"), "store_spatial_file": True}, _data

return {
"skip_existing_layers": _data.pop("skip_existing_layers", "False"),
Expand Down Expand Up @@ -220,7 +220,7 @@ def perform_last_step(execution_id):
that the execution is completed
"""
_exec = BaseHandler.perform_last_step(execution_id=execution_id)
if _exec and not _exec.input_params.get("store_spatial_file", False):
if _exec and not _exec.input_params.get("store_spatial_file", True):
resources = ResourceHandlerInfo.objects.filter(execution_request=_exec)
# getting all assets list
assets = filter(None, [get_default_asset(x.resource) for x in resources])
Expand Down Expand Up @@ -607,7 +607,7 @@ def create_geonode_resource(
resource_manager.set_thumbnail(None, instance=saved_dataset)

ResourceBase.objects.filter(alternate=alternate).update(dirty_state=False)

saved_dataset.refresh_from_db()
return saved_dataset

Expand Down Expand Up @@ -805,13 +805,13 @@ def _import_resource_rollback(self, exec_id, instance_name=None, *args, **kwargs
"Dynamic model does not exists, removing ogr2ogr table in progress"
)
if instance_name is None:
logger.info("No table created, skipping...")
logger.warning("No table created, skipping...")
return
db_name = os.getenv("DEFAULT_BACKEND_DATASTORE", "datastore")
with connections[db_name].cursor() as cursor:
cursor.execute(f"DROP TABLE {instance_name}")
except Exception as e:
logger.info(e)
logger.warning(e)
pass

def _publish_resource_rollback(self, exec_id, instance_name=None, *args, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion importer/handlers/shapefile/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def extract_params_from_data(_data, action=None):
"""
if action == exa.COPY.value:
title = json.loads(_data.get("defaults"))
return {"title": title.pop("title")}, _data
return {"title": title.pop("title"), "store_spatial_file": True}, _data

additional_params = {
"skip_existing_layers": _data.pop("skip_existing_layers", "False"),
Expand Down
48 changes: 27 additions & 21 deletions importer/handlers/tiles3d/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,15 @@ def can_handle(_data) -> bool:
This endpoint will return True or False if with the info provided
the handler is able to handle the file or not
"""
base = _data.get("base_file")
if not base:
try:
base = _data.get("base_file")
if not base:
return False
ext = base.split(".")[-1] if isinstance(base, str) else base.name.split(".")[-1]
if ext in ["json"] and Tiles3DFileHandler.is_3dtiles_json(base):
return True
except Exception:
return False
ext = base.split(".")[-1] if isinstance(base, str) else base.name.split(".")[-1]
input_filename = os.path.basename(base if isinstance(base, str) else base.name)
if ext in ["json"] and "tileset.json" in input_filename:
return True
return False

@staticmethod
Expand Down Expand Up @@ -90,25 +92,29 @@ def is_valid(files, user):
)

try:
with open(_file, "r") as _readed_file:
_file = json.loads(_readed_file.read())
# required key described in the specification of 3dtiles
# https://docs.ogc.org/cs/22-025r4/22-025r4.html#toc92
is_valid = all(
key in _file.keys() for key in ("asset", "geometricError", "root")
)

if not is_valid:
raise Invalid3DTilesException(
"The provided 3DTiles is not valid, some of the mandatory keys are missing. Mandatory keys are: 'asset', 'geometricError', 'root'"
)
_file = Tiles3DFileHandler.is_3dtiles_json(_file)

Tiles3DFileHandler.validate_3dtile_payload(payload=_file)

except Exception as e:
raise Invalid3DTilesException(e)

return True

@staticmethod
def is_3dtiles_json(_file):
with open(_file, "r") as _readed_file:
_file = json.loads(_readed_file.read())
# required key described in the specification of 3dtiles
# https://docs.ogc.org/cs/22-025r4/22-025r4.html#toc92
is_valid = all(key in _file.keys() for key in ("asset", "geometricError", "root"))

if not is_valid:
raise Invalid3DTilesException(
"The provided 3DTiles is not valid, some of the mandatory keys are missing. Mandatory keys are: 'asset', 'geometricError', 'root'"
)

return _file

@staticmethod
def validate_3dtile_payload(payload):
Expand All @@ -124,7 +130,7 @@ def validate_3dtile_payload(payload):
"The mandatory 'boundingVolume' for the key 'root' is missing"
)

error = payload.get("root", {}).get("geometricError", None)
error = payload.get("geometricError", None) or payload.get("root", {}).get("geometricError", None)
if error is None:
raise Invalid3DTilesException(
"The mandatory 'geometricError' for the key 'root' is missing"
Expand All @@ -138,7 +144,7 @@ def extract_params_from_data(_data, action=None):
"""
if action == exa.COPY.value:
title = json.loads(_data.get("defaults"))
return {"title": title.pop("title")}, _data
return {"title": title.pop("title"), "store_spatial_file": True}, _data

return {
"skip_existing_layers": _data.pop("skip_existing_layers", "False"),
Expand Down Expand Up @@ -212,7 +218,7 @@ def create_geonode_resource(
asset=None,
):
# we want just the tileset.json as location of the asset
asset.location = [path for path in asset.location if "tileset.json" in path]
asset.location = [path for path in asset.location if path.endswith(".json")]
asset.save()

resource = super().create_geonode_resource(
Expand Down
7 changes: 3 additions & 4 deletions importer/handlers/tiles3d/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def test_extract_params_from_data(self):
action="copy",
)

self.assertEqual(actual, {"title": "title_of_the_cloned_resource"})
self.assertEqual(actual, {'store_spatial_file': True, 'title': 'title_of_the_cloned_resource'})

def test_is_valid_should_raise_exception_if_the_3dtiles_is_invalid(self):
data = {
Expand Down Expand Up @@ -140,7 +140,7 @@ def test_validate_should_raise_exception_for_invalid_asset_key(self):
def test_validate_should_raise_exception_for_invalid_root_boundingVolume(self):
_json = {
"asset": {"version": "1.1"},
"geometricError": 1.0,
"geometricError": 1.0,
"root": {"foo": {"box": []}, "geometricError": 0.0},
}
_path = "/tmp/tileset.json"
Expand All @@ -159,7 +159,6 @@ def test_validate_should_raise_exception_for_invalid_root_boundingVolume(self):
def test_validate_should_raise_exception_for_invalid_root_geometricError(self):
_json = {
"asset": {"version": "1.1"},
"geometricError": 1.0,
"root": {"boundingVolume": {"box": []}, "foo": 0.0},
}
_path = "/tmp/tileset.json"
Expand All @@ -170,7 +169,7 @@ def test_validate_should_raise_exception_for_invalid_root_geometricError(self):

self.assertIsNotNone(_exc)
self.assertTrue(
"The mandatory 'geometricError' for the key 'root' is missing"
"The provided 3DTiles is not valid, some of the mandatory keys are missing. Mandatory keys are: 'asset', 'geometricError', 'root'"
in str(_exc.exception.detail)
)
os.remove(_path)
Expand Down
2 changes: 1 addition & 1 deletion importer/migrations/0006_dataset_migration.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def dataset_migration(apps, _):
pk__in=NewResources.objects.values_list("resource_id", flat=True)
).exclude(subtype__in=["remote", None]):
# generating orchestrator expected data file
if not old_resource.files:
if not hasattr(old_resource, "files"):
if old_resource.is_vector():
converted_files = [{"base_file": "placeholder.shp"}]
else:
Expand Down
54 changes: 54 additions & 0 deletions importer/migrations/0007_align_resourcehandler_with_asset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# Generated by Django 3.2.15 on 2022-10-04 13:03

import logging
from django.db import migrations
from importer.orchestrator import orchestrator
from geonode.layers.models import Dataset
from geonode.assets.utils import get_default_asset
from geonode.utils import get_allowed_extensions

logger = logging.getLogger("django")

def dataset_migration(apps, _):
NewResources = apps.get_model("importer", "ResourceHandlerInfo")
for old_resource in Dataset.objects.exclude(
pk__in=NewResources.objects.values_list("resource_id", flat=True)
).exclude(subtype__in=["remote", None]):
# generating orchestrator expected data file
if old_resource.resourcehandlerinfo_set.first() is None:
if get_default_asset(old_resource):
available_choices = get_allowed_extensions()
not_main_files = ["xml", "sld", "zip", "kmz"]
base_file_choices = set(x for x in available_choices if x not in not_main_files)
output_files = dict()
for _file in get_default_asset(old_resource).location:
if _file.split(".")[-1] in base_file_choices:
output_files.update({"base_file": _file})
break
else:
if old_resource.is_vector():
output_files = {"base_file": "placeholder.shp"}
else:
output_files = {"base_file": "placeholder.tiff"}

handler = orchestrator.get_handler(output_files)
if handler is None:
logger.error(f"Handler not found for resource: {old_resource}")
continue
handler.create_resourcehandlerinfo(
handler_module_path=str(handler),
resource=old_resource,
execution_id=None
)
else:
logger.debug(f"resourcehandler info already exists for the resource")


class Migration(migrations.Migration):
dependencies = [
("importer", "0006_dataset_migration"),
]

operations = [
migrations.RunPython(dataset_migration),
]
5 changes: 5 additions & 0 deletions importer/orchestrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,11 @@ def update_execution_request_status(
task_args=celery_task_request.args
)

def update_execution_request_obj(self, _exec_obj, payload):
ExecutionRequest.objects.filter(pk=_exec_obj.pk).update(**payload)
_exec_obj.refresh_from_db()
return _exec_obj

def _last_step(self, execution_id, handler_module_path):
"""
Last hookable step for each handler before mark the execution as completed
Expand Down
2 changes: 1 addition & 1 deletion importer/tests/end2end/test_end2end.py
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ def test_import_wms(self):
"lookup": resource_to_take,
"parse_remote_metadata": True,
}
initial_name = res.title
initial_name = res.title.lower().replace(" ", "_")
assert_payload = {
"subtype": "remote",
"title": res.title,
Expand Down
2 changes: 1 addition & 1 deletion importer/tests/unit/test_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ def test_publish_resource_if_overwrite_should_not_call_the_publishing(
"""
try:
with self.assertRaises(Exception):
get_resource.return_falue = True
get_resource.return_value = True
publish_resources.return_value = True
extract_resource_to_publish.return_value = [
{"crs": 4326, "name": "dataset3"}
Expand Down
Loading