Skip to content

Commit b2b038c

Browse files
[Fixes #201] Implement SldFileHandler (#203)
* [Fixes #201] Refactor metadata common and add SLD handler * Fix SLD file handler and black formatting
1 parent e29c70e commit b2b038c

27 files changed

+243
-91
lines changed

README.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ In GeoNode 4.1 `geonode-importer` replaced the previous importer logic.
2020
- **CSV** - Vector
2121
- **GeoTiff** - Raster
2222
- **XML** - Update XML file for a given resource
23+
- **SLD** - Update SLD file for a given resource
2324

2425
**IMPORTANT**: At the moment the importer doesn't support overwriting/skipping existing layers from the UI. Every upload will create a new dataset.
2526
Overwriting a layer (`overwrite_existing_layer`) and skipping an already existing layer (`skip_existing_layers`) is supported through the API.
@@ -107,7 +108,8 @@ IMPORTER_HANDLERS = os.getenv('IMPORTER_HANDLERS', [
107108
'importer.handlers.kml.handler.KMLFileHandler',
108109
'importer.handlers.csv.handler.CSVFileHandler',
109110
'importer.handlers.geotiff.handler.GeoTiffFileHandler',
110-
'importer.handlers.xml.handler.XMLFileHandler
111+
'importer.handlers.xml.handler.XMLFileHandler',
112+
'importer.handlers.sld.handler.SLDFileHandler'
111113
])
112114

113115
```

importer/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
project_dir = os.path.dirname(os.path.abspath(__file__))
2222

23-
VERSION = (1, 0, 7)
23+
VERSION = (1, 0, 8)
2424
__version__ = ".".join([str(i) for i in VERSION])
2525
__author__ = "geosolutions-it"
2626
__email__ = "info@geosolutionsgroup.com"

importer/handlers/base.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,6 @@ def can_handle_xml_file(self) -> bool:
6767
"""
6868
return True
6969

70-
7170
@property
7271
def can_handle_sld_file(self) -> bool:
7372
"""

importer/handlers/common/metadata.py

Lines changed: 37 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,27 +3,36 @@
33
from importer.handlers.base import BaseHandler
44
from importer.handlers.xml.serializer import MetadataFileSerializer
55
from importer.utils import ImporterRequestAction as ira
6+
from importer.orchestrator import orchestrator
7+
from django.shortcuts import get_object_or_404
8+
from geonode.layers.models import Dataset
69

710
logger = logging.getLogger(__name__)
811

912

1013
class MetadataFileHandler(BaseHandler):
1114
"""
12-
Handler to import KML files into GeoNode data db
15+
Handler to import metadata files into GeoNode data db
1316
It must provide the task_lists required to comple the upload
1417
"""
1518

1619
ACTIONS = {
17-
exa.IMPORT.value: (
18-
"start_import",
19-
"importer.import_resource"
20-
),
21-
ira.ROLLBACK.value: ()
20+
exa.IMPORT.value: ("start_import", "importer.import_resource"),
21+
ira.ROLLBACK.value: (),
2222
}
2323

2424
@staticmethod
25-
def has_serializer(_data) -> bool:
26-
return MetadataFileSerializer
25+
def has_serializer(data) -> bool:
26+
_base = data.get("base_file")
27+
if not _base:
28+
return False
29+
if (
30+
_base.endswith("xml") or _base.endswith("sld")
31+
if isinstance(_base, str)
32+
else _base.name.endswith("xml") or _base.name.endswith("sld")
33+
):
34+
return MetadataFileSerializer
35+
return False
2736

2837
@property
2938
def supported_file_extension_config(self):
@@ -47,5 +56,24 @@ def perform_last_step(execution_id):
4756
pass
4857

4958
def import_resource(self, files: dict, execution_id: str, **kwargs):
50-
pass
59+
_exec = orchestrator.get_execution_object(execution_id)
60+
# getting the dataset
61+
alternate = _exec.input_params.get("dataset_title")
62+
dataset = get_object_or_404(Dataset, alternate=alternate)
63+
64+
# retrieving the handler used for the dataset
65+
original_handler = orchestrator.load_handler(
66+
dataset.resourcehandlerinfo_set.first().handler_module_path
67+
)()
68+
69+
self.handle_metadata_resource(_exec, dataset, original_handler)
70+
71+
dataset.refresh_from_db()
72+
73+
orchestrator.evaluate_execution_progress(
74+
execution_id, handler_module_path=str(self)
75+
)
76+
return dataset
5177

78+
def handle_metadata_resource(self, _exec, dataset, original_handler):
79+
raise NotImplementedError

importer/handlers/common/raster.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
from osgeo import gdal
2626
from importer.celery_app import importer_app
2727
from geonode.storage.manager import storage_manager
28-
from geonode.geoserver.helpers import get_store
2928

3029
logger = logging.getLogger(__name__)
3130

@@ -250,7 +249,7 @@ def identify_authority(self, layer):
250249
raise Exception(
251250
"CRS authority code not found, fallback to default behaviour"
252251
)
253-
except:
252+
except Exception:
254253
spatial_ref = layer.GetSpatialRef()
255254
spatial_ref.AutoIdentifyEPSG()
256255
_name = spatial_ref.GetAuthorityName(None) or spatial_ref.GetAttrValue(
@@ -526,7 +525,7 @@ def rollback(
526525
step_index = steps.index(rollback_from_step)
527526
# the start_import, start_copy etc.. dont do anything as step, is just the start
528527
# so there is nothing to rollback
529-
steps_to_rollback = steps[1 : step_index + 1]
528+
steps_to_rollback = steps[1 : step_index + 1] # noqa
530529
if not steps_to_rollback:
531530
return
532531
# reversing the tuple to going backwards with the rollback
@@ -536,7 +535,7 @@ def rollback(
536535
istance_name = (
537536
find_key_recursively(kwargs, "new_dataset_alternate") or args[3]
538537
)
539-
except:
538+
except Exception:
540539
pass
541540

542541
logger.warning(

importer/handlers/common/tests_vector.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(
226226

227227
_open.assert_called_once()
228228
_open.assert_called_with(
229-
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
229+
"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
230230
+ os.getenv("DATABASE_HOST", "localhost")
231231
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
232232
+ self.valid_files.get("base_file")
@@ -256,7 +256,7 @@ def test_import_with_ogr2ogr_with_errors_should_raise_exception(self, _open):
256256

257257
_open.assert_called_once()
258258
_open.assert_called_with(
259-
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
259+
"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
260260
+ os.getenv("DATABASE_HOST", "localhost")
261261
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
262262
+ self.valid_files.get("base_file")

importer/handlers/common/vector.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ def identify_authority(self, layer):
279279
raise Exception(
280280
"CRS authority code not found, fallback to default behaviour"
281281
)
282-
except:
282+
except Exception:
283283
spatial_ref = layer.GetSpatialRef()
284284
spatial_ref.AutoIdentifyEPSG()
285285
_name = spatial_ref.GetAuthorityName(None) or spatial_ref.GetAttrValue(
@@ -375,7 +375,7 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str:
375375
)
376376

377377
# prepare the async chord workflow with the on_success and on_fail methods
378-
workflow = chord(group_to_call)(
378+
workflow = chord(group_to_call)( # noqa
379379
import_next_step.s(
380380
execution_id,
381381
str(self), # passing the handler module path
@@ -530,7 +530,7 @@ def create_dynamic_model_fields(
530530
# ones we have the schema, here we create a list of chunked value
531531
# so the async task will handle max of 30 field per task
532532
list_chunked = [
533-
layer_schema[i : i + 30] for i in range(0, len(layer_schema), 30)
533+
layer_schema[i : i + 30] for i in range(0, len(layer_schema), 30) # noqa
534534
]
535535

536536
# definition of the celery group needed to run the async workflow.
@@ -777,7 +777,7 @@ def rollback(
777777
step_index = steps.index(rollback_from_step)
778778
# the start_import, start_copy etc.. dont do anything as step, is just the start
779779
# so there is nothing to rollback
780-
steps_to_rollback = steps[1 : step_index + 1]
780+
steps_to_rollback = steps[1 : step_index + 1] # noqa
781781
if not steps_to_rollback:
782782
return
783783
# reversing the tuple to going backwards with the rollback
@@ -787,7 +787,7 @@ def rollback(
787787
instance_name = (
788788
find_key_recursively(kwargs, "new_dataset_alternate") or args[3]
789789
)
790-
except:
790+
except Exception:
791791
pass
792792

793793
logger.warning(

importer/handlers/csv/handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ def create_dynamic_model_fields(
208208
# ones we have the schema, here we create a list of chunked value
209209
# so the async task will handle max of 30 field per task
210210
list_chunked = [
211-
layer_schema[i : i + 30] for i in range(0, len(layer_schema), 30)
211+
layer_schema[i : i + 30] for i in range(0, len(layer_schema), 30) # noqa
212212
]
213213

214214
# definition of the celery group needed to run the async workflow.

importer/handlers/csv/tests.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -165,11 +165,11 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(
165165

166166
_open.assert_called_once()
167167
_open.assert_called_with(
168-
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
168+
"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
169169
+ os.getenv("DATABASE_HOST", "localhost")
170170
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
171171
+ self.valid_csv
172-
+ '" -nln alternate "dataset" -oo KEEP_GEOM_COLUMNS=NO -lco GEOMETRY_NAME=geometry -oo "GEOM_POSSIBLE_NAMES=geom*,the_geom*,wkt_geom" -oo "X_POSSIBLE_NAMES=x,long*" -oo "Y_POSSIBLE_NAMES=y,lat*"',
172+
+ '" -nln alternate "dataset" -oo KEEP_GEOM_COLUMNS=NO -lco GEOMETRY_NAME=geometry -oo "GEOM_POSSIBLE_NAMES=geom*,the_geom*,wkt_geom" -oo "X_POSSIBLE_NAMES=x,long*" -oo "Y_POSSIBLE_NAMES=y,lat*"', # noqa
173173
stdout=-1,
174174
stderr=-1,
175175
shell=True, # noqa

importer/handlers/geojson/tests.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(
131131

132132
_open.assert_called_once()
133133
_open.assert_called_with(
134-
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
134+
"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
135135
+ os.getenv("DATABASE_HOST", "localhost")
136136
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
137137
+ self.valid_files.get("base_file")

importer/handlers/gpkg/tests.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import copy
21
import os
32
import shutil
43
from django.test import TestCase, override_settings
@@ -117,15 +116,13 @@ def test_can_handle_should_return_false_for_other_files(self):
117116
def test_single_message_error_handler(self):
118117
# lets copy the file to the temporary folder
119118
# later will be removed
120-
shutil.copy(self.valid_gpkg, '/tmp')
119+
shutil.copy(self.valid_gpkg, "/tmp")
121120
exec_id = orchestrator.create_execution_request(
122121
user=get_user_model().objects.first(),
123122
func_name="funct1",
124123
step="step",
125124
input_params={
126-
"files": {
127-
"base_file": '/tmp/valid.gpkg'
128-
},
125+
"files": {"base_file": "/tmp/valid.gpkg"},
129126
"skip_existing_layer": True,
130127
"handler_module_path": str(self.handler),
131128
},
@@ -148,4 +145,4 @@ def test_single_message_error_handler(self):
148145
)
149146

150147
self.assertEqual("FAILURE", TaskResult.objects.get(task_id=str(exec_id)).status)
151-
self.assertFalse(os.path.exists('/tmp/valid.gpkg'))
148+
self.assertFalse(os.path.exists("/tmp/valid.gpkg"))

importer/handlers/shapefile/tests.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(
148148

149149
_open.assert_called_once()
150150
_open.assert_called_with(
151-
f"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
151+
"/usr/bin/ogr2ogr --config PG_USE_COPY YES -f PostgreSQL PG:\" dbname='test_geonode_data' host="
152152
+ os.getenv("DATABASE_HOST", "localhost")
153153
+ " port=5432 user='geonode_data' password='geonode_data' \" \""
154154
+ self.valid_shp.get("base_file")

importer/handlers/sld/__init__.py

Whitespace-only changes.

importer/handlers/sld/exceptions.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
from rest_framework.exceptions import APIException
2+
from rest_framework import status
3+
4+
5+
class InvalidSldException(APIException):
6+
status_code = status.HTTP_400_BAD_REQUEST
7+
default_detail = "The sld provided is invalid"
8+
default_code = "invalid_sld"
9+
category = "importer"

importer/handlers/sld/handler.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import logging
2+
3+
from geonode.resource.manager import resource_manager
4+
from importer.handlers.common.metadata import MetadataFileHandler
5+
from importer.handlers.sld.exceptions import InvalidSldException
6+
from owslib.etree import etree as dlxml
7+
8+
logger = logging.getLogger(__name__)
9+
10+
11+
class SLDFileHandler(MetadataFileHandler):
12+
"""
13+
Handler to import SLD files into GeoNode data db
14+
It must provide the task_lists required to comple the upload
15+
"""
16+
17+
@staticmethod
18+
def can_handle(_data) -> bool:
19+
"""
20+
This endpoint will return True or False if with the info provided
21+
the handler is able to handle the file or not
22+
"""
23+
base = _data.get("base_file")
24+
if not base:
25+
return False
26+
return (
27+
base.endswith(".sld")
28+
if isinstance(base, str)
29+
else base.name.endswith(".sld")
30+
)
31+
32+
@staticmethod
33+
def is_valid(files, user):
34+
"""
35+
Define basic validation steps
36+
"""
37+
# calling base validation checks
38+
39+
try:
40+
with open(files.get("base_file")) as _xml:
41+
dlxml.fromstring(_xml.read().encode())
42+
except Exception as err:
43+
raise InvalidSldException(
44+
f"Uploaded document is not SLD or is invalid: {str(err)}"
45+
)
46+
return True
47+
48+
def handle_metadata_resource(self, _exec, dataset, original_handler):
49+
if original_handler.can_handle_sld_file:
50+
original_handler.handle_sld_file(dataset, _exec)
51+
else:
52+
_path = _exec.input_params.get("files", {}).get(
53+
"sld_file", _exec.input_params.get("base_file", {})
54+
)
55+
resource_manager.exec(
56+
"set_style",
57+
None,
58+
instance=dataset,
59+
sld_file=_exec.input_params.get("files", {}).get("sld_file", ""),
60+
sld_uploaded=True if _path else False,
61+
vals={"dirty_state": True},
62+
)

0 commit comments

Comments
 (0)