diff --git a/geonode/geoserver/createlayer/forms.py b/geonode/geoserver/createlayer/forms.py
deleted file mode 100644
index 671ab669ab5..00000000000
--- a/geonode/geoserver/createlayer/forms.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#########################################################################
-#
-# Copyright (C) 2017 OSGeo
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see .
-#
-#########################################################################
-
-from django import forms
-from django.utils.translation import gettext_lazy as _
-
-GEOMETRY_TYPES = (
- ("Point", _("Points")),
- ("LineString", _("Lines")),
- ("Polygon", _("Polygons")),
-)
-
-
-class NewDatasetForm(forms.Form):
- """
- A form to create an empty layer in PostGIS.
- """
-
- name = forms.CharField(label=_("Dataset name"), max_length=255)
- title = forms.CharField(label=_("Dataset title"), max_length=255)
- geometry_type = forms.ChoiceField(label=_("Geometry type"), choices=GEOMETRY_TYPES)
-
- permissions = forms.CharField(
- widget=forms.HiddenInput(attrs={"name": "permissions", "id": "permissions"}), required=False
- )
-
- attributes = forms.CharField(
- widget=forms.HiddenInput(attrs={"name": "attributes", "id": "attributes"}), required=False, empty_value="{}"
- )
diff --git a/geonode/geoserver/createlayer/urls.py b/geonode/geoserver/createlayer/urls.py
index 5521de8d847..f3b257d5e12 100644
--- a/geonode/geoserver/createlayer/urls.py
+++ b/geonode/geoserver/createlayer/urls.py
@@ -16,8 +16,3 @@
# along with this program. If not, see .
#
#########################################################################
-
-from django.urls import re_path
-from . import views
-
-urlpatterns = [re_path(r"$", views.dataset_create, name="dataset_create")]
diff --git a/geonode/geoserver/createlayer/views.py b/geonode/geoserver/createlayer/views.py
deleted file mode 100644
index 0a165172da7..00000000000
--- a/geonode/geoserver/createlayer/views.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#########################################################################
-#
-# Copyright (C) 2017 OSGeo
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see .
-#
-#########################################################################
-
-import json
-
-from django.contrib.auth.decorators import login_required
-from django.shortcuts import render
-from django.template.defaultfilters import slugify
-from django.shortcuts import redirect
-
-from geonode.security.permissions import DEFAULT_PERMS_SPEC
-
-from .forms import NewDatasetForm
-from .utils import create_dataset
-
-
-@login_required
-def dataset_create(request, template="createlayer/dataset_create.html"):
- """
- Create an empty layer.
- """
- error = None
- if request.method == "POST":
- form = NewDatasetForm(request.POST)
- if form.is_valid():
- try:
- name = form.cleaned_data["name"]
- name = slugify(name.replace(".", "_"))
- title = form.cleaned_data["title"]
- geometry_type = form.cleaned_data["geometry_type"]
- attributes = form.cleaned_data["attributes"]
- permissions = DEFAULT_PERMS_SPEC
- layer = create_dataset(name, title, request.user.username, geometry_type, attributes)
- layer.set_permissions(json.loads(permissions), created=True)
- return redirect(layer)
- except Exception as e:
- error = f"{e} ({type(e)})"
- else:
- form = NewDatasetForm()
-
- ctx = {
- "form": form,
- "is_dataset": True,
- "error": error,
- }
-
- return render(request, template, context=ctx)
diff --git a/geonode/upload/api/views.py b/geonode/upload/api/views.py
index bd6d30f4d84..1ba3e8efd03 100644
--- a/geonode/upload/api/views.py
+++ b/geonode/upload/api/views.py
@@ -149,7 +149,7 @@ def create(self, request, *args, **kwargs):
}
# clone the memory files into local file system
- if "url" not in _data:
+ if "url" not in _data and not _data.get("is_empty", False):
storage_manager = StorageManager(
remote_files={k: v for k, v in _data.items() if k.endswith("_file")},
concrete_storage_manager=FileSystemStorageManager(),
diff --git a/geonode/upload/celery_tasks.py b/geonode/upload/celery_tasks.py
index f7a15a72a96..ac7fc7970ed 100644
--- a/geonode/upload/celery_tasks.py
+++ b/geonode/upload/celery_tasks.py
@@ -21,6 +21,7 @@
from typing import Optional
from celery import Task
+from django.db import connections
from django.utils import timezone
from django.utils.module_loading import import_string
from django.utils.translation import gettext_lazy
@@ -54,6 +55,7 @@
IMPORTER_RESOURCE_CREATION_RATE_LIMIT,
)
from geonode.upload.utils import (
+ DEFAULT_PK_COLUMN_NAME,
call_rollback_function,
call_on_failure,
error_handler,
@@ -399,7 +401,7 @@ def publish_resource(
_overwrite = _exec.input_params.get("overwrite_existing_layer")
_publisher = DataPublisher(handler_module_path)
-
+ kwargs.update({"exec_id": execution_id})
# extracting the crs and the resource name, are needed for publish the resource
data = _publisher.extract_resource_to_publish(_files, action, layer_name, alternate, **kwargs)
if data:
@@ -747,10 +749,25 @@ def _create_field(dynamic_model_schema, field, _kwargs):
row_to_insert.append(_create_field(dynamic_model_schema, field, _kwargs))
if row_to_insert:
- # the build creation improves the overall permformance with the DB
- FieldSchema.objects.bulk_create(row_to_insert, 30)
+ if dynamic_model_schema.managed:
+ # If the dynamic mode schema is managed we have to save each single field
+ # one by one. Doing this will allow Django to create column in the database
+ for field in row_to_insert:
+ if field.name == DEFAULT_PK_COLUMN_NAME:
+ # django automatically created a column name ID and use it as primary key by default
+ # if we try to create the column FID as needed, it will raise error.
+ # in this way we will just update the name from ID to FID
+ with connections[os.getenv("DEFAULT_BACKEND_DATASTORE", "datastore")].cursor() as cursor:
+ cursor.execute(
+ f"ALTER TABLE {dynamic_model_schema.name} RENAME COLUMN id TO {DEFAULT_PK_COLUMN_NAME};"
+ )
+ else:
+ field.save()
+ else:
+ # the build creation improves the overall permformance with the DB
+ FieldSchema.objects.bulk_create(row_to_insert, 30)
+ # fixing the schema model in django
- del row_to_insert
return "dynamic_model", layer_name, execution_id
diff --git a/geonode/upload/datastore.py b/geonode/upload/datastore.py
index 4fb3eb8c995..c50a702021c 100644
--- a/geonode/upload/datastore.py
+++ b/geonode/upload/datastore.py
@@ -44,12 +44,10 @@ def input_is_valid(self):
"""
Perform basic validation steps
"""
- if self.files:
- return self.handler.is_valid(self.files, self.user, execution_id=self.execution_id)
url = orchestrator.get_execution_object(exec_id=self.execution_id).input_params.get("url")
if url:
return self.handler.is_valid_url(url)
- return False
+ return self.handler.is_valid(self.files, self.user, execution_id=self.execution_id)
def _import_and_register(self, execution_id, task_name, **kwargs):
"""
diff --git a/geonode/upload/handlers/README.md b/geonode/upload/handlers/README.md
index 1c85117d55e..7d67ca55cb3 100644
--- a/geonode/upload/handlers/README.md
+++ b/geonode/upload/handlers/README.md
@@ -99,7 +99,7 @@ class BaseVectorFileHandler(BaseHandler):
return
@staticmethod
- def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
+ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate, **kwargs):
"""
Define the ogr2ogr command to be executed.
This is a default command that is needed to import a vector file. For Raster file
diff --git a/geonode/upload/handlers/common/vector.py b/geonode/upload/handlers/common/vector.py
index b5f900a2528..a5642ec52df 100644
--- a/geonode/upload/handlers/common/vector.py
+++ b/geonode/upload/handlers/common/vector.py
@@ -28,7 +28,7 @@
from geonode.security.permissions import _to_compact_perms
from geonode.storage.manager import StorageManager
from geonode.upload.publisher import DataPublisher
-from geonode.upload.utils import call_rollback_function
+from geonode.upload.utils import DEFAULT_PK_COLUMN_NAME, call_rollback_function
import json
import logging
import os
@@ -115,15 +115,13 @@ class BaseVectorFileHandler(BaseHandler):
ira.UPSERT.value: ("start_import", "geonode.upload.upsert_data", "geonode.upload.refresh_geonode_resource"),
}
- default_pk_column_name = "fid"
-
@property
def have_table(self):
return True
@property
def default_geometry_column_name(self):
- return "geometry"
+ return "geom"
@property
def supported_file_extension_config(self):
@@ -254,7 +252,7 @@ def _delete_resource(self, resource, catalog, workspace):
catalog.delete(res, purge="all", recurse=True)
@staticmethod
- def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
+ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate, **kwargs):
"""
Define the ogr2ogr command to be executed.
This is a default command that is needed to import a vector file
@@ -279,7 +277,7 @@ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
# vrt file is aready created in import_resource and vrt will be auto detected by ogr2ogr
# and also the base_file will work so can be used as alternative for fallback which will also be autodeteced by ogr2ogr.
input_file = files.get("temp_vrt_file") or files.get("base_file")
- options += f'"{input_file}"' + f" -lco FID={BaseVectorFileHandler.default_pk_column_name} "
+ options += f'"{input_file}"' + f" -lco FID={DEFAULT_PK_COLUMN_NAME} "
options += f'-nln {alternate} "{original_name}"'
@@ -609,7 +607,10 @@ def setup_dynamic_model(
- celery_group -> the celery group of the field creation
"""
- layer_name = self.fixup_name(layer.GetName())
+ layer_name = self.fixup_name(layer.GetName() if isinstance(layer, ogr.Layer) else layer)
+ is_dynamic_model_managed = orchestrator.get_execution_object(execution_id).input_params.get(
+ "is_dynamic_model_managed", False
+ )
workspace = DataPublisher(None).workspace
user_datasets = Dataset.objects.filter(owner=username, alternate__iexact=f"{workspace.name}:{layer_name}")
dynamic_schema = ModelSchema.objects.filter(name__iexact=layer_name)
@@ -632,7 +633,7 @@ def setup_dynamic_model(
dynamic_schema = ModelSchema.objects.create(
name=layer_name,
db_name="datastore",
- managed=False,
+ managed=is_dynamic_model_managed,
db_table_name=layer_name,
)
elif (
@@ -648,7 +649,7 @@ def setup_dynamic_model(
dynamic_schema, _ = ModelSchema.objects.get_or_create(
name=layer_name,
db_name="datastore",
- managed=False,
+ managed=is_dynamic_model_managed,
db_table_name=layer_name,
)
else:
@@ -674,25 +675,8 @@ def create_dynamic_model_fields(
return_celery_group: bool = True,
):
# retrieving the field schema from ogr2ogr and converting the type to Django Types
- layer_schema = [
- {"name": self.fixup_name(x.name), "class_name": self._get_type(x), "null": True} for x in layer.schema
- ]
- if (
- layer.GetGeometryColumn()
- or self.default_geometry_column_name
- and ogr.GeometryTypeToName(layer.GetGeomType()) not in ["Geometry Collection", "Unknown (any)", "None"]
- ):
- # the geometry colum is not returned rom the layer.schema, so we need to extract it manually
- layer_schema += [
- {
- "name": layer.GetGeometryColumn() or self.default_geometry_column_name,
- "class_name": GEOM_TYPE_MAPPING.get(
- self.promote_to_multi(ogr.GeometryTypeToName(layer.GetGeomType()))
- ),
- "dim": (2 if not ogr.GeometryTypeToName(layer.GetGeomType()).lower().startswith("3d") else 3),
- "authority": self.identify_authority(layer),
- }
- ]
+
+ layer_schema = self._define_dynamic_layer_schema(layer, execution_id=execution_id)
if not return_celery_group:
return layer_schema
@@ -717,6 +701,29 @@ def create_dynamic_model_fields(
return dynamic_model_schema, celery_group
+ def _define_dynamic_layer_schema(self, layer, **kwargs):
+ layer_schema = [
+ {"name": self.fixup_name(x.name), "class_name": self._get_type(x), "null": True} for x in layer.schema
+ ]
+ if (
+ layer.GetGeometryColumn()
+ or self.default_geometry_column_name
+ and ogr.GeometryTypeToName(layer.GetGeomType()) not in ["Geometry Collection", "Unknown (any)", "None"]
+ ):
+ # the geometry colum is not returned rom the layer.schema, so we need to extract it manually
+ layer_schema += [
+ {
+ "name": layer.GetGeometryColumn() or self.default_geometry_column_name,
+ "class_name": GEOM_TYPE_MAPPING.get(
+ self.promote_to_multi(ogr.GeometryTypeToName(layer.GetGeomType()))
+ ),
+ "dim": (2 if not ogr.GeometryTypeToName(layer.GetGeomType()).lower().startswith("3d") else 3),
+ "authority": self.identify_authority(layer),
+ }
+ ]
+
+ return layer_schema
+
def promote_to_multi(self, geometry_name: str):
"""
If needed change the name of the geometry, by promoting it to Multi
@@ -773,8 +780,7 @@ def create_geonode_resource(
self.handle_xml_file(saved_dataset, _exec)
self.handle_sld_file(saved_dataset, _exec)
-
- resource_manager.set_thumbnail(None, instance=saved_dataset)
+ self.handle_thumbnail(saved_dataset, _exec)
ResourceBase.objects.filter(alternate=alternate).update(dirty_state=False)
@@ -784,19 +790,25 @@ def create_geonode_resource(
if settings.IMPORTER_ENABLE_DYN_MODELS and self.have_table:
from django.db import connections
+ # then we can check for the PK
column = None
connection = connections["datastore"]
table_name = saved_dataset.alternate.split(":")[1]
+
+ schema = ModelSchema.objects.filter(name=table_name).first()
+ schema.managed = False
+ schema.save()
+
with connection.cursor() as cursor:
column = connection.introspection.get_primary_key_columns(cursor, table_name)
if column:
+ # getting the relative model schema
+ # better to always ensure that the schema is NOT managed
field = FieldSchema.objects.filter(name=column[0], model_schema__name=table_name).first()
if field:
field.kwargs.update({"primary_key": True})
field.save()
else:
- # getting the relative model schema
- schema = ModelSchema.objects.filter(name=table_name).first()
# creating the field needed as primary key
pk_field = FieldSchema(
name=column[0],
@@ -871,6 +883,9 @@ def handle_sld_file(self, saved_dataset: Dataset, _exec: ExecutionRequest):
vals={"dirty_state": True},
)
+ def handle_thumbnail(self, saved_dataset: Dataset, _exec: ExecutionRequest):
+ resource_manager.set_thumbnail(None, instance=saved_dataset)
+
def create_resourcehandlerinfo(
self,
handler_module_path: str,
@@ -1079,7 +1094,7 @@ def upsert_validation(self, files, execution_id, **kwargs: dict) -> Tuple[bool,
if "authority" in field and not skip_geom_eval:
if db_value := target_field.model_schema.as_model().objects.first():
skip_geom_eval = True
- if not str(db_value.geometry.srid) in field["authority"]:
+ if not str(db_value.geom.srid) in field["authority"]:
message = f"The file provided have a different authority ({field['authority']}) compared to the one in the DB: {db_value}"
raise UpsertException(message)
@@ -1117,7 +1132,7 @@ def __get_new_and_original_schema(self, files, execution_id):
layer = layers[0]
# evaluate if some of the fid entry is null. if is null we stop the workflow
# the user should provide the completed list with the fid set
- sql_query = f'SELECT * FROM "{layer.GetName()}" WHERE "fid" IS NULL'
+ sql_query = f'SELECT * FROM "{layer.GetName()}" WHERE "{DEFAULT_PK_COLUMN_NAME}" IS NULL'
# Execute the SQL query to the layer
result = all_layers.ExecuteSQL(sql_query)
@@ -1346,7 +1361,7 @@ def validate_feature(self, feature):
def extract_upsert_key(self, exec_obj, dynamic_model_instance):
# first we check if the upsert key is passed by the call
- key = exec_obj.input_params.get("upsert_key", "fid")
+ key = exec_obj.input_params.get("upsert_key", DEFAULT_PK_COLUMN_NAME)
if not key:
# if the upsert key is not passed, we use the primary key as upsert key
# the primary key is defined in the Fields of the dynamic model
@@ -1431,7 +1446,7 @@ def import_next_step(
actual_step,
layer_name,
alternate,
- exa.UPLOAD.value,
+ _exec.input_params.get("action", exa.UPLOAD.value),
)
import_orchestrator.apply_async(task_params, kwargs)
@@ -1439,7 +1454,7 @@ def import_next_step(
call_rollback_function(
execution_id,
handlers_module_path=handlers_module_path,
- prev_action=exa.UPLOAD.value,
+ prev_action=_exec.input_params.get("action", exa.UPLOAD.value),
layer=layer_name,
alternate=alternate,
error=e,
@@ -1476,7 +1491,7 @@ def import_with_ogr2ogr(
ogr_exe = shutil.which("ogr2ogr")
options = orchestrator.load_handler(handler_module_path).create_ogr2ogr_command(
- files, original_name, ovverwrite_layer, alternate
+ files, original_name, ovverwrite_layer, alternate, execution_id=execution_id
)
_datastore = settings.DATABASES["datastore"]
diff --git a/geonode/upload/handlers/csv/handler.py b/geonode/upload/handlers/csv/handler.py
index 7032364ab76..8170670e450 100644
--- a/geonode/upload/handlers/csv/handler.py
+++ b/geonode/upload/handlers/csv/handler.py
@@ -124,7 +124,7 @@ def get_ogr2ogr_driver(self):
return ogr.GetDriverByName("CSV")
@staticmethod
- def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
+ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate, **kwargs):
"""
Define the ogr2ogr command to be executed.
This is a default command that is needed to import a vector file
diff --git a/geonode/upload/handlers/csv/tests.py b/geonode/upload/handlers/csv/tests.py
index e7109b71ba8..3ce2b48f586 100644
--- a/geonode/upload/handlers/csv/tests.py
+++ b/geonode/upload/handlers/csv/tests.py
@@ -176,7 +176,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(self,
+ '\' " "'
+ self.valid_csv
+ '" -lco FID=fid'
- + ' -nln alternate "dataset" -oo KEEP_GEOM_COLUMNS=NO -lco GEOMETRY_NAME=geometry -oo "GEOM_POSSIBLE_NAMES=geom*,the_geom*,wkt_geom" -oo "X_POSSIBLE_NAMES=x,long*" -oo "Y_POSSIBLE_NAMES=y,lat*"', # noqa
+ + ' -nln alternate "dataset" -oo KEEP_GEOM_COLUMNS=NO -lco GEOMETRY_NAME=geom -oo "GEOM_POSSIBLE_NAMES=geom*,the_geom*,wkt_geom" -oo "X_POSSIBLE_NAMES=x,long*" -oo "Y_POSSIBLE_NAMES=y,lat*"', # noqa
stdout=-1,
stderr=-1,
shell=True, # noqa
diff --git a/geonode/upload/handlers/empty_dataset/__init__.py b/geonode/upload/handlers/empty_dataset/__init__.py
new file mode 100644
index 00000000000..3d2698aa4e1
--- /dev/null
+++ b/geonode/upload/handlers/empty_dataset/__init__.py
@@ -0,0 +1,18 @@
+#########################################################################
+#
+# Copyright (C) 2025 OSGeo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#########################################################################
diff --git a/geonode/upload/handlers/empty_dataset/handler.py b/geonode/upload/handlers/empty_dataset/handler.py
new file mode 100644
index 00000000000..2dc7342b86b
--- /dev/null
+++ b/geonode/upload/handlers/empty_dataset/handler.py
@@ -0,0 +1,252 @@
+#########################################################################
+#
+# Copyright (C) 2024 OSGeo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#########################################################################
+import logging
+
+from celery import chord, group
+from geonode.upload.api.exceptions import ImportException
+from geonode.upload.handlers.base import BaseHandler
+from geonode.upload.handlers.empty_dataset.utils import (
+ add_attributes_to_xml,
+ apply_restrictions_to_xml,
+ should_apply_restrictions,
+ validate_attributes,
+ base_xml,
+)
+from geonode.upload.handlers.utils import EMPTY_DATASET_SUPPORTED_TYPES, GEOM_TYPE_MAPPING, drop_dynamic_model_schema
+from geonode.upload.orchestrator import orchestrator
+from geonode.upload.handlers.common.vector import BaseVectorFileHandler, import_next_step
+from geonode.upload.handlers.empty_dataset.serializer import EmptyDatasetSerializer
+from geonode.resource.enumerator import ExecutionRequestAction as exa
+from geonode.upload.utils import DEFAULT_PK_COLUMN_NAME
+
+
+logger = logging.getLogger("importer")
+
+BBOX = [-180, -90, 180, 90]
+
+
+class EmptyDatasetHandler(BaseVectorFileHandler):
+ """
+ Handler to import GeoJson files into GeoNode data db
+ It must provide the task_lists required to comple the upload
+ """
+
+ # we dont need the upload action for the empty dataset
+ TASKS = BaseVectorFileHandler.TASKS.copy()
+ TASKS.pop(exa.UPLOAD.value)
+ # but we need the create one via the UI
+ TASKS.update(
+ {
+ exa.CREATE.value: (
+ "start_import",
+ "geonode.upload.import_resource",
+ "geonode.upload.publish_resource",
+ "geonode.upload.create_geonode_resource",
+ )
+ }
+ )
+
+ @property
+ def supported_file_extension_config(self):
+ return {}
+
+ @property
+ def default_geometry_column_name(self):
+ return "geom"
+
+ @staticmethod
+ def can_handle(_data) -> bool:
+ """
+ This endpoint will return True or False if with the info provided
+ the handler is able to handle the file or not
+ """
+ base = _data.get("base_file")
+ if not base and _data.get("is_empty"):
+ return True
+
+ return False
+
+ @staticmethod
+ def can_do(action) -> bool:
+ """
+ This endpoint will return True or False if with the info provided
+ the handler is able to handle the file or not
+ """
+ return action in EmptyDatasetHandler.TASKS
+
+ @staticmethod
+ def has_serializer(_data) -> bool:
+ """
+ This endpoint should return (if set) the custom serializer used in the API
+ to validate the input resource
+ """
+ if _data.get("attributes", None) is not None and _data.get("action") in EmptyDatasetHandler.TASKS:
+ return EmptyDatasetSerializer
+
+ @staticmethod
+ def is_valid(files, user, **kwargs):
+ # improve the logic once the structure is defined
+ exec_obj = orchestrator.get_execution_object(exec_id=kwargs.get("execution_id"))
+ params = exec_obj.input_params
+ if "geom" in params and "title" in params and "attributes" in params and params.get("is_empty"):
+ return True
+ else:
+ raise ImportException("The payload provided is not valid for an empty dataset")
+
+ @staticmethod
+ def extract_params_from_data(_data, action=None):
+ """
+ Remove from the _data the params that needs to save into the executionRequest object
+ all the other are returned
+ """
+ return {
+ "title": _data.pop("title", None),
+ "geom": _data.pop("geom", None),
+ "attributes": _data.pop("attributes", None),
+ "action": _data.pop("action", None),
+ "is_empty": _data.pop("is_empty", True),
+ "is_dynamic_model_managed": _data.pop("is_dynamic_model_managed", True),
+ }, _data
+
+ def import_resource(self, files, execution_id, **kwargs):
+ # define the dynamic model
+ try:
+
+ logger.info("Total number of layers available: 1")
+ exec_obj = orchestrator.get_execution_object(execution_id)
+
+ _input = {**exec_obj.input_params, **{"total_layers": 1}}
+ orchestrator.update_execution_request_status(execution_id=str(execution_id), input_params=_input)
+ dynamic_model = None
+
+ input_params = exec_obj.input_params
+
+ layer_name = self.fixup_name(input_params["title"])
+ task_name = "geonode.upload.import_resource"
+
+ (
+ dynamic_model,
+ alternate,
+ celery_group,
+ ) = self.setup_dynamic_model(
+ input_params.get("title"),
+ execution_id=execution_id,
+ should_be_overwritten=False,
+ username=exec_obj.user,
+ )
+
+ group_to_call = group(
+ celery_group.set(link_error=["dynamic_model_error_callback"]),
+ # ogr_res.set(link_error=["dynamic_model_error_callback"]),
+ )
+ # prepare the async chord workflow with the on_success and on_fail methods
+ workflow = chord(group_to_call)( # noqa
+ import_next_step.s(
+ execution_id,
+ str(self), # passing the handler module path
+ task_name,
+ layer_name,
+ alternate,
+ **kwargs,
+ )
+ )
+ return [layer_name], [alternate], execution_id
+ except Exception as e:
+ logger.error(e)
+ if dynamic_model:
+ """
+ In case of fail, we want to delete the dynamic_model schema and his field
+ to keep the DB in a consistent state
+ """
+ drop_dynamic_model_schema(dynamic_model)
+ raise e
+
+ def _get_type(self, _type):
+ return EMPTY_DATASET_SUPPORTED_TYPES.get(_type)
+
+ def _define_dynamic_layer_schema(self, layer, **kwargs):
+ exec_obj = orchestrator.get_execution_object(kwargs.get("execution_id"))
+ input_params = exec_obj.input_params
+ layer_schema = [
+ {
+ "name": self.fixup_name(name),
+ "class_name": self._get_type(options["type"]),
+ "null": options.get("nillable", False),
+ }
+ for name, options in input_params["attributes"].items()
+ ]
+ layer_schema += [
+ {
+ "name": self.default_geometry_column_name,
+ "class_name": GEOM_TYPE_MAPPING.get(
+ self.promote_to_multi(input_params.get("geom", self.default_geometry_column_name))
+ ),
+ "dim": 2,
+ "authority": "EPSG:4326",
+ }
+ ]
+ layer_schema += [
+ {
+ "name": DEFAULT_PK_COLUMN_NAME,
+ "class_name": "django.db.models.BigAutoField",
+ "null": False,
+ "primary_key": True,
+ }
+ ]
+ return layer_schema
+
+ def extract_resource_to_publish(self, files, action, layer_name, alternate, **kwargs):
+ return [{"name": alternate or layer_name, "crs": "EPSG:4326", "exec_id": kwargs.get("exec_id")}]
+
+ def handle_thumbnail(self, saved_dataset, _exec):
+ """
+ we can skip the thumbnail creation for an empty dataset
+ """
+ pass
+
+ @staticmethod
+ def publish_resources(resources, catalog, store, workspace):
+ # creating the gs resource as always
+ BaseVectorFileHandler().publish_resources(resources, catalog, store, workspace)
+ res = resources[0]
+ exec_obj = orchestrator.get_execution_object(exec_id=res.get("exec_id"))
+ attributes = exec_obj.input_params.get("attributes")
+ normalized_attributes = {BaseHandler().fixup_name(key): value for key, value in attributes.items()}
+ validate_attributes(normalized_attributes)
+
+ xml = add_attributes_to_xml(
+ {
+ **{DEFAULT_PK_COLUMN_NAME: {"type": "integer", "nillable": False}},
+ **normalized_attributes,
+ # include geometry as an available attribute
+ "geom": {"type": exec_obj.input_params.get("geom"), "nillable": False},
+ },
+ base_xml.format(name=res.get("name")),
+ )
+
+ if should_apply_restrictions(normalized_attributes):
+ xml = apply_restrictions_to_xml(normalized_attributes, xml)
+
+ url = (
+ f"{catalog.service_url}/workspaces/{workspace.name}/datastores/{store.name}/featuretypes/{res.get('name')}"
+ )
+
+ req = catalog.http_request(url, data=xml, method="PUT", headers={"Content-Type": "application/xml"})
+ req.raise_for_status()
+ return True
diff --git a/geonode/upload/handlers/empty_dataset/serializer.py b/geonode/upload/handlers/empty_dataset/serializer.py
new file mode 100644
index 00000000000..b40aad68816
--- /dev/null
+++ b/geonode/upload/handlers/empty_dataset/serializer.py
@@ -0,0 +1,37 @@
+#########################################################################
+#
+# Copyright (C) 2024 OSGeo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#########################################################################
+from rest_framework import serializers
+from dynamic_rest.serializers import DynamicModelSerializer
+
+from geonode.base.models import ResourceBase
+
+
+class EmptyDatasetSerializer(DynamicModelSerializer):
+ class Meta:
+ ref_name = "EmptyDatasetSerializer"
+ model = ResourceBase
+ view_name = "importer_upload"
+ fields = ("title", "geom", "attributes", "action", "is_empty", "is_dynamic_model_managed")
+
+ title = serializers.CharField()
+ geom = serializers.CharField()
+ attributes = serializers.JSONField()
+ action = serializers.CharField(required=True)
+ is_empty = serializers.BooleanField(default=True, read_only=True, required=False)
+ is_dynamic_model_managed = serializers.BooleanField(default=True, read_only=True, required=False)
diff --git a/geonode/upload/handlers/empty_dataset/tests.py b/geonode/upload/handlers/empty_dataset/tests.py
new file mode 100644
index 00000000000..449d717d344
--- /dev/null
+++ b/geonode/upload/handlers/empty_dataset/tests.py
@@ -0,0 +1,247 @@
+#########################################################################
+#
+# Copyright (C) 2024 OSGeo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#########################################################################
+from collections import namedtuple
+from unittest.mock import MagicMock
+from django.test import TestCase
+from geonode.resource.models import ExecutionRequest
+from geonode.upload.handlers.empty_dataset.handler import EmptyDatasetHandler
+from django.contrib.auth import get_user_model
+from geonode.base.populate_test_data import create_single_dataset
+from geonode.upload.handlers.empty_dataset.utils import add_attributes_to_xml, validate_attributes, base_xml
+
+
+class FakeObj:
+ name = "GeoNode"
+ status_code = 201
+
+ def raise_for_status(self):
+ return
+
+
+class TestEmptyDatasetHandler(TestCase):
+ databases = ("default", "datastore")
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.handler = EmptyDatasetHandler()
+ cls.user, _ = get_user_model().objects.get_or_create(username="admin")
+ cls.owner = get_user_model().objects.first()
+ cls.layer = create_single_dataset(name="empty_dataset", owner=cls.owner)
+ cls.attributes = {
+ "field_str": {"type": "string"},
+ "field_int": {"type": "integer"},
+ "field_date": {"type": "date"},
+ "field_float": {"type": "float"},
+ "field_str_options": {"type": "string", "nillable": False, "options": ["A", "B", "C"]},
+ "field_int_options": {"type": "integer", "nillable": False, "options": [1, 2, 3]},
+ "field_int_range": {"type": "integer", "nillable": False, "range": {"min": 1, "max": 10}},
+ "field_float_options": {"type": "float", "nillable": False, "options": [1.2, 2.4, 3.6]},
+ "field_float_range": {"type": "float", "nillable": False, "range": {"min": 1.5, "max": 10.5}},
+ }
+
+ def test_supported_file_extension_config_should_be_empty(self):
+ """
+ Config should not be exposed to be listed in the upload panel
+ """
+ self.assertDictEqual(self.handler.supported_file_extension_config, {})
+
+ def test_can_handle(self):
+ """
+ Should be true if the is_empty key is passed in the payload, false if not
+ """
+ payload = {"is_empty": True}
+ self.assertTrue(self.handler.can_handle(payload))
+ payload = {"is_empty": False}
+ self.assertFalse(self.handler.can_handle(payload))
+ payload = {"base_file": "some file"}
+ self.assertFalse(self.handler.can_handle(payload))
+
+ def test_can_do(self):
+ """
+ Handler should return True if can do the CREATE operation but not the UPLOAD
+ """
+ self.assertTrue(self.handler.can_do("create"))
+ self.assertFalse(self.handler.can_do("upload"))
+
+ def test_has_serializer(self):
+ """
+ Should retrun the serializer if the attributes are present and if is a create operation
+ """
+ payload = {"attributes": "some attributes", "action": "create"}
+ self.assertIsNotNone(self.handler.has_serializer(payload))
+ payload = {"attributes": "some attributes", "action": "upload"}
+ self.assertIsNone(self.handler.has_serializer(payload))
+ payload = {"action": "create"}
+ self.assertIsNone(self.handler.has_serializer(payload))
+
+ def test_is_valid(self):
+ """
+ Should be true if the payload is correct as expected
+ """
+ try:
+ exec_req = ExecutionRequest.objects.create(
+ user=self.user,
+ func_name="test",
+ input_params={
+ "geom": "Geometry",
+ "title": "Geometry",
+ "attributes": {
+ "field_str": {"type": "string"},
+ "field_int": {"type": "integer"},
+ },
+ "is_empty": True,
+ },
+ )
+
+ self.assertTrue(self.handler.is_valid({}, self.user, execution_id=str(exec_req.exec_id)))
+ finally:
+ if exec_req:
+ exec_req.delete()
+
+ def test_is_valid_is_false(self):
+ """
+ Should be false if the payload is not as expected
+ """
+ try:
+ exec_req = ExecutionRequest.objects.create(
+ user=self.user,
+ func_name="test",
+ input_params={
+ "geom": "Geometry",
+ "title": "Geometry",
+ "is_empty": True,
+ },
+ )
+ with self.assertRaises(Exception):
+ self.handler.is_valid({}, self.user, execution_id=str(exec_req.exec_id))
+ finally:
+ if exec_req:
+ exec_req.delete()
+
+ def test__define_dynamic_layer_schema(self):
+ try:
+ exec_req = ExecutionRequest.objects.create(
+ user=self.user,
+ func_name="test",
+ input_params={
+ "geom": "Geometry",
+ "title": "Geometry",
+ "attributes": {
+ "field_str": {"type": "string"},
+ "field_int": {"type": "integer"},
+ },
+ "is_empty": True,
+ },
+ )
+
+ expected_schema = [
+ {"name": "field_int", "class_name": "django.db.models.IntegerField", "null": False},
+ {"name": "field_str", "class_name": "django.db.models.CharField", "null": False},
+ {"name": "geom", "class_name": None, "dim": 2, "authority": "EPSG:4326"},
+ {"name": "fid", "class_name": "django.db.models.BigAutoField", "null": False, "primary_key": True},
+ ]
+ output_schema = self.handler._define_dynamic_layer_schema(None, execution_id=str(exec_req.exec_id))
+ self.assertEqual(expected_schema, output_schema)
+
+ finally:
+ if exec_req:
+ exec_req.delete()
+
+ def test_publish_resources(self):
+ """
+ Should publish the resource via the GeoServer catalog.
+ If we add the attributes, the call will also create the layer
+ so we dont have to call the super()
+ """
+ try:
+ exec_req = ExecutionRequest.objects.create(
+ user=self.user,
+ func_name="test",
+ input_params={
+ "geom": "Geometry",
+ "title": "Geometry",
+ "attributes": {
+ "field_str": {"type": "string"},
+ "field_int": {"type": "integer"},
+ },
+ "is_empty": True,
+ },
+ )
+ foo = FakeObj()
+ catalog = MagicMock()
+ catalog.http_request.return_value = foo
+ resources = [{"name": "my_empty_dataset", "crs": "EPSG:4326", "exec_id": str(exec_req.exec_id)}]
+ self.assertTrue(self.handler.publish_resources(resources, catalog, foo, foo))
+ finally:
+ if exec_req:
+ exec_req.delete()
+
+ def test_publish_resources_raise_exp_from_geoserver(self):
+ """
+ Should publish the resource via the GeoServer catalog.
+ If we add the attributes, the call will also create the layer
+ so we dont have to call the super()
+ """
+ try:
+ exec_req = ExecutionRequest.objects.create(
+ user=self.user,
+ func_name="test",
+ input_params={
+ "geom": "Geometry",
+ "title": "Geometry",
+ "attributes": {
+ "field_str": {"type": "string"},
+ "field_int": {"type": "integer"},
+ },
+ "is_empty": True,
+ },
+ )
+ foo = namedtuple("FakeObj", field_names=["name", "status_code"])
+ foo.name = "GeoNode"
+ foo.status_code = 500
+ catalog = MagicMock()
+ catalog.http_request.return_value = foo
+ resources = [{"name": "my_empty_dataset", "crs": "EPSG:4326", "exec_id": str(exec_req.exec_id)}]
+ with self.assertRaises(Exception):
+ self.handler.publish_resources(resources, catalog, foo, foo)
+ finally:
+ if exec_req:
+ exec_req.delete()
+
+ def test_utils_validate_attributes(self):
+ """
+ attributes should be valid
+ """
+ self.assertTrue(validate_attributes(self.attributes))
+
+ broken_attributes = {"field_float": {"fakeey": "float"}}
+ with self.assertRaises(Exception) as k:
+ validate_attributes(broken_attributes)
+
+ self.assertEqual(str(k.exception), "None is not a valid type for attribute field_float")
+
+ def test_utils_add_attributes_to_xml(self):
+ """
+ attributes should be valid
+ """
+ subset_attributes = {"field_str": {"type": "string"}, "field_int": {"type": "integer"}}
+ expected = b"mynamemynamemynameEPSG:4326-180180-9090EPSG:4326field_strjava.lang.Stringfalsefield_intjava.lang.Integerfalse" # noqa
+ actual_xml = add_attributes_to_xml(subset_attributes, base_xml.format(name="myname"))
+ self.assertEqual(expected, actual_xml)
diff --git a/geonode/upload/handlers/empty_dataset/utils.py b/geonode/upload/handlers/empty_dataset/utils.py
new file mode 100644
index 00000000000..21e0ef4046e
--- /dev/null
+++ b/geonode/upload/handlers/empty_dataset/utils.py
@@ -0,0 +1,126 @@
+#########################################################################
+#
+# Copyright (C) 2024 OSGeo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#########################################################################
+import logging
+
+import xml.etree.ElementTree as ET
+from geonode.upload.api.exceptions import ImportException
+
+
+logger = logging.getLogger("importer")
+
+BBOX = [-180, -90, 180, 90]
+DATA_QUALITY_MESSAGE = "Created with GeoNode"
+ATTRIBUTE_TYPE_MAP = {
+ "string": "java.lang.String",
+ "float": "java.lang.Float",
+ "integer": "java.lang.Integer",
+ "date": "java.sql.Date",
+ "Point": "com.vividsolutions.jts.geom.Point",
+ "LineString": "com.vividsolutions.jts.geom.LineString",
+ "Polygon": "com.vividsolutions.jts.geom.Polygon",
+}
+RESTRICTION_OPTIONS_TYPE_MAP = {"string": "string", "float": "float", "integer": "int"}
+
+base_xml = (
+ ""
+ "{name}"
+ "{name}"
+ "{name}"
+ "EPSG:4326"
+ f"{BBOX[0]}{BBOX[2]}{BBOX[1]}{BBOX[3]}"
+ "EPSG:4326"
+ ""
+ ""
+)
+
+
+def validate_attributes(attributes_dict):
+ for name in attributes_dict:
+ info = attributes_dict[name]
+ attr_type = info.get("type")
+ attr_options = info.get("options")
+ attr_range = info.get("range")
+ if len(name) == 0:
+ msg = f"You must provide an attribute name for attribute of type {attr_type}"
+ logger.error(msg)
+ raise ImportException(msg)
+ if not ATTRIBUTE_TYPE_MAP.get(attr_type):
+ msg = f"{attr_type} is not a valid type for attribute {name}"
+ logger.error(msg)
+ raise ImportException(msg)
+ if attr_type == "date" and attr_options:
+ msg = f"{attr_type} does not support options restriction"
+ logger.error(msg)
+ raise ImportException(msg)
+ if attr_type in ["date", "string"] and attr_range:
+ msg = f"{attr_type} does not support range restriction"
+ logger.error(msg)
+ raise ImportException(msg)
+ return True
+
+
+def should_apply_restrictions(attributes_dict):
+ for name in attributes_dict:
+ info = attributes_dict[name]
+ attr_options = info.get("options")
+ attr_range = info.get("range")
+ if attr_options or attr_range:
+ return True
+ return False
+
+
+def add_attributes_to_xml(attributes_dict, xml):
+ root = ET.fromstring(xml)
+ attributes_tag = root.find("attributes")
+ for name in attributes_dict:
+ info = attributes_dict[name]
+ attr_name = name
+ attr_type = ATTRIBUTE_TYPE_MAP.get(info.get("type"))
+ attr_nillable = "false"
+ if info.get("nillable"):
+ attr_nillable = "true"
+ attribute_tag = ET.SubElement(attributes_tag, "attribute")
+ ET.SubElement(attribute_tag, "name").text = f"{attr_name}"
+ ET.SubElement(attribute_tag, "binding").text = f"{attr_type}"
+ ET.SubElement(attribute_tag, "nillable").text = f"{attr_nillable}"
+ return ET.tostring(root)
+
+
+def apply_restrictions_to_xml(attributes_dict, xml):
+ root = ET.fromstring(xml)
+ attributes_tag = root.find("attributes")
+ for attribute in attributes_tag.findall("attribute"):
+ name = attribute.find("name").text
+ info = attributes_dict.get(name, None)
+ if info:
+ restrictions_range = info.get("range")
+ if restrictions_range:
+ min_restrictions_range = restrictions_range.get("min", None)
+ max_restrictions_range = restrictions_range.get("max", None)
+ range_tag = ET.SubElement(attribute, "range")
+ if min_restrictions_range is not None:
+ ET.SubElement(range_tag, "min").text = f"{min_restrictions_range}"
+ if max_restrictions_range is not None:
+ ET.SubElement(range_tag, "max").text = f"{max_restrictions_range}"
+ restrictions_options = info.get("options")
+ if restrictions_options:
+ options_tag = ET.SubElement(attribute, "options")
+ for option in restrictions_options:
+ ET.SubElement(options_tag, RESTRICTION_OPTIONS_TYPE_MAP.get(info.get("type"))).text = f"{option}"
+ return ET.tostring(root)
diff --git a/geonode/upload/handlers/geojson/handler.py b/geonode/upload/handlers/geojson/handler.py
index 4829674faee..73ccab474d7 100644
--- a/geonode/upload/handlers/geojson/handler.py
+++ b/geonode/upload/handlers/geojson/handler.py
@@ -123,7 +123,7 @@ def get_ogr2ogr_driver(self):
return ogr.GetDriverByName("GeoJSON")
@staticmethod
- def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
+ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate, **kwargs):
"""
Define the ogr2ogr command to be executed.
This is a default command that is needed to import a vector file
diff --git a/geonode/upload/handlers/geojson/tests.py b/geonode/upload/handlers/geojson/tests.py
index c037a86592e..00e88f7059c 100644
--- a/geonode/upload/handlers/geojson/tests.py
+++ b/geonode/upload/handlers/geojson/tests.py
@@ -145,7 +145,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(self,
+ '\' " "'
+ self.valid_files.get("base_file")
+ '" -lco FID=fid'
- + ' -nln alternate "dataset" -lco GEOMETRY_NAME=geometry',
+ + ' -nln alternate "dataset" -lco GEOMETRY_NAME=geom',
stdout=-1,
stderr=-1,
shell=True, # noqa
diff --git a/geonode/upload/handlers/kml/handler.py b/geonode/upload/handlers/kml/handler.py
index 0250959ae2d..ecdc12b41ba 100644
--- a/geonode/upload/handlers/kml/handler.py
+++ b/geonode/upload/handlers/kml/handler.py
@@ -149,7 +149,7 @@ def handle_xml_file(self, saved_dataset, _exec):
pass
@staticmethod
- def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
+ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate, **kwargs):
"""
Define the ogr2ogr command to be executed.
This is a default command that is needed to import a vector file
diff --git a/geonode/upload/handlers/shapefile/handler.py b/geonode/upload/handlers/shapefile/handler.py
index 8548692cb7f..0dc04a47ab8 100644
--- a/geonode/upload/handlers/shapefile/handler.py
+++ b/geonode/upload/handlers/shapefile/handler.py
@@ -164,7 +164,7 @@ def get_ogr2ogr_driver(self):
return ogr.GetDriverByName("ESRI Shapefile")
@staticmethod
- def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate):
+ def create_ogr2ogr_command(files, original_name, ovverwrite_layer, alternate, **kwargs):
"""
Define the ogr2ogr command to be executed.
This is a default command that is needed to import a vector file
diff --git a/geonode/upload/handlers/shapefile/tests.py b/geonode/upload/handlers/shapefile/tests.py
index 6624f58dd90..7bbe23045ca 100644
--- a/geonode/upload/handlers/shapefile/tests.py
+++ b/geonode/upload/handlers/shapefile/tests.py
@@ -172,7 +172,7 @@ def test_import_with_ogr2ogr_without_errors_should_call_the_right_command(self,
+ '\' " "'
+ self.valid_shp.get("base_file")
+ '" -lco FID=fid'
- + ' -nln alternate "dataset" -lco precision=no -lco GEOMETRY_NAME=geometry ',
+ + ' -nln alternate "dataset" -lco precision=no -lco GEOMETRY_NAME=geom ',
stdout=-1,
stderr=-1,
shell=True, # noqa
diff --git a/geonode/upload/handlers/utils.py b/geonode/upload/handlers/utils.py
index ea5900b728e..61b3a1b6778 100644
--- a/geonode/upload/handlers/utils.py
+++ b/geonode/upload/handlers/utils.py
@@ -31,6 +31,13 @@
logger = logging.getLogger("importer")
+EMPTY_DATASET_SUPPORTED_TYPES = {
+ "integer": "django.db.models.IntegerField",
+ "date": "django.db.models.DateField",
+ "float": "django.db.models.FloatField",
+ "string": "django.db.models.CharField",
+}
+
STANDARD_TYPE_MAPPING = {
"Integer64": "django.db.models.IntegerField",
diff --git a/geonode/upload/settings.py b/geonode/upload/settings.py
index 1496f62a0a2..aee5c730dfc 100644
--- a/geonode/upload/settings.py
+++ b/geonode/upload/settings.py
@@ -38,4 +38,5 @@
"geonode.upload.handlers.tiles3d.handler.Tiles3DFileHandler",
"geonode.upload.handlers.remote.tiles3d.RemoteTiles3DResourceHandler",
"geonode.upload.handlers.remote.wms.RemoteWMSResourceHandler",
+ "geonode.upload.handlers.empty_dataset.handler.EmptyDatasetHandler",
]
diff --git a/geonode/upload/utils.py b/geonode/upload/utils.py
index 610f89ac6ca..29128a6867a 100644
--- a/geonode/upload/utils.py
+++ b/geonode/upload/utils.py
@@ -37,6 +37,9 @@
from django.template.loader import render_to_string
+DEFAULT_PK_COLUMN_NAME = "fid"
+
+
def get_max_upload_size(slug):
try:
max_size = UploadSizeLimit.objects.get(slug=slug).max_size
diff --git a/geonode/urls.py b/geonode/urls.py
index 9cdd06680d3..b80cfbbe5de 100644
--- a/geonode/urls.py
+++ b/geonode/urls.py
@@ -170,11 +170,6 @@
]
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
- if settings.CREATE_LAYER:
- urlpatterns += [ # '',
- re_path(r"^createlayer/", include("geonode.geoserver.createlayer.urls")),
- ]
-
from geonode.geoserver.views import get_capabilities
# GeoServer Helper Views