Skip to content

Commit

Permalink
review points
Browse files Browse the repository at this point in the history
  • Loading branch information
mwigham committed Mar 1, 2024
1 parent 0b49c2c commit d0ee38d
Show file tree
Hide file tree
Showing 17 changed files with 2,058 additions and 152 deletions.
108 changes: 44 additions & 64 deletions src/apis/dataset/dataset_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from flask_restx import Namespace, Resource
from apis.dataset.DataCatalogLODHandler import DataCatalogLODHandler
from util.mime_type_util import MimeType
from models.ResourceURILevel import ResourceURILevel
from models.DatasetApiUriLevel import DatasetApiUriLevel
import util.ld_util
from util.APIUtil import APIUtil

Expand Down Expand Up @@ -81,7 +81,7 @@ def get(self, number=None):
All triples for the Dataset and its DataDownloads are included.
"""
dataset_uri = util.ld_util.generate_lod_resource_uri(
ResourceURILevel.DATASET, number, current_app.config["BENG_DATA_DOMAIN"]
DatasetApiUriLevel.DATASET, number, current_app.config["BENG_DATA_DOMAIN"]
)
# check if resource exists
if self.is_dataset(dataset_uri) is False:
Expand All @@ -97,7 +97,7 @@ def get(self, number=None):
best_match = request.accept_mimetypes.best_match(
lod_server_supported_mime_types
)
mime_type = None
mime_type = MimeType.JSON_LD # we choose to set a default if the user has not specified
if best_match is not None:
mime_type = MimeType(best_match)

Expand All @@ -118,25 +118,18 @@ def get(self, number=None):
"Could not generate an HTML view for this resource",
)

if mime_type:
# other content formats
logger.info(f"Get the serialization for dataset {dataset_uri}.")
res_string = DataCatalogLODHandler(current_app.config).get_dataset(
dataset_uri, mime_format=mime_type.to_ld_format()
)
if res_string:
return Response(res_string, mimetype=mime_type.value)
logger.error(
f"Could not fetch the serialization for dataset {dataset_uri}."
)
return APIUtil.toErrorResponse(
"bad_request", "Invalid URI or return format"
)

logger.error("Not a proper mime type in the request.")
# other content formats
logger.info(f"Get the serialization for dataset {dataset_uri}.")
res_string = DataCatalogLODHandler(current_app.config).get_dataset(
dataset_uri, mime_format=mime_type.to_ld_format()
)
if res_string:
return Response(res_string, mimetype=mime_type.value)
logger.error(
f"Could not fetch the serialization for dataset {dataset_uri}."
)
return APIUtil.toErrorResponse(
"internal_server_error",
"Error: No mime type detected...",
"bad_request", "Invalid URI or return format"
)

def is_dataset(self, dataset_uri: str) -> bool:
Expand Down Expand Up @@ -172,7 +165,7 @@ def get(self, number=None):
All triples describing the DataCatalog and its Datasets are included.
"""
data_catalog_uri = util.ld_util.generate_lod_resource_uri(
ResourceURILevel.DATACATALOG,
DatasetApiUriLevel.DATACATALOG,
number,
current_app.config["BENG_DATA_DOMAIN"],
)
Expand All @@ -191,7 +184,7 @@ def get(self, number=None):
best_match = request.accept_mimetypes.best_match(
lod_server_supported_mime_types
)
mime_type = None
mime_type = MimeType.JSON_LD # we choose to set a default if the user has not specified
if best_match is not None:
mime_type = MimeType(best_match)

Expand All @@ -213,27 +206,21 @@ def get(self, number=None):
"internal_server_error",
"Could not generate an HTML view for this resource",
)
if mime_type:
# other mime types
logger.info(
f"Getting the RDF in proper serialization format for data catalog: {data_catalog_uri}."
)
res_string = DataCatalogLODHandler(current_app.config).get_data_catalog(
data_catalog_uri, mime_format=mime_type.to_ld_format()
)
if res_string:
return Response(res_string, mimetype=mime_type.value)
logger.error(
f"Error in fetching the serialization for data catalog: {data_catalog_uri}."
)
return APIUtil.toErrorResponse(
"bad_request", "Invalid URI or return format"
)

logger.error("Not a proper mime type in the request.")
# other mime types
logger.info(
f"Getting the RDF in proper serialization format for data catalog: {data_catalog_uri}."
)
res_string = DataCatalogLODHandler(current_app.config).get_data_catalog(
data_catalog_uri, mime_format=mime_type.to_ld_format()
)
if res_string:
return Response(res_string, mimetype=mime_type.value)
logger.error(
f"Error in fetching the serialization for data catalog: {data_catalog_uri}."
)
return APIUtil.toErrorResponse(
"internal_server_error",
"Error: No mime type detected...",
"bad_request", "Invalid URI or return format"
)

def is_data_catalog(self, data_catalog_uri: str) -> bool:
Expand Down Expand Up @@ -271,7 +258,7 @@ class LODDataDownloadAPI(LODDataAPI):
def get(self, number=None):
"""Get the RDF for the DataDownload."""
data_download_uri = util.ld_util.generate_lod_resource_uri(
ResourceURILevel.DATADOWNLOAD,
DatasetApiUriLevel.DATADOWNLOAD,
number,
current_app.config["BENG_DATA_DOMAIN"],
)
Expand All @@ -287,7 +274,7 @@ def get(self, number=None):
best_match = request.accept_mimetypes.best_match(
lod_server_supported_mime_types
)
mime_type = None
mime_type = MimeType.JSON_LD # we choose to set a default if the user has not specified
if best_match is not None:
mime_type = MimeType(best_match)

Expand All @@ -310,27 +297,20 @@ def get(self, number=None):
"Could not generate an HTML view for this resource",
)

if mime_type:
# other return formats
logger.info(
f"Getting the RDF in proper serialization format for data download: {data_download_uri}."
)
res_string = DataCatalogLODHandler(current_app.config).get_data_download(
data_download_uri, mime_format=mime_type.to_ld_format()
)
if res_string:
return Response(res_string, mimetype=mime_type.value)
logger.error(
f"Error in fetching the serialization for data download: {data_download_uri}."
)
return APIUtil.toErrorResponse(
"bad_request", "Invalid URI or return format"
)

logger.error("Not a proper mime type in the request.")
# other return formats
logger.info(
f"Getting the RDF in proper serialization format for data download: {data_download_uri}."
)
res_string = DataCatalogLODHandler(current_app.config).get_data_download(
data_download_uri, mime_format=mime_type.to_ld_format()
)
if res_string:
return Response(res_string, mimetype=mime_type.value)
logger.error(
f"Error in fetching the serialization for data download: {data_download_uri}."
)
return APIUtil.toErrorResponse(
"internal_server_error",
"Error: No mime type detected...",
"bad_request", "Invalid URI or return format"
)

def is_data_download(self, data_download_uri: str) -> bool:
Expand Down
48 changes: 22 additions & 26 deletions src/apis/resource/resource_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import util.ld_util

from models.ResourceURILevel import ResourceURILevel
from models.ResourceApiUriLevel import ResourceApiUriLevel
from util.APIUtil import APIUtil
from util.mime_type_util import MimeType

Expand Down Expand Up @@ -39,14 +39,14 @@ def get(self, identifier, cat_type="program"):
best_match = request.accept_mimetypes.best_match(
lod_server_supported_mime_types
)
mime_type = None
mime_type = MimeType.JSON_LD
if best_match is not None:
mime_type = MimeType(best_match)

lod_url = None
try:
lod_url = util.ld_util.generate_lod_resource_uri(
ResourceURILevel(cat_type),
ResourceApiUriLevel(cat_type),
identifier,
current_app.config.get("BENG_DATA_DOMAIN"),
)
Expand Down Expand Up @@ -78,33 +78,29 @@ def get(self, identifier, cat_type="program"):
"Could not generate an HTML view for this resource.",
)

if mime_type:
logger.info(
f"Getting the RDF in the proper serialization format for {lod_url}."
)
rdf_graph = util.ld_util.get_lod_resource_from_rdf_store(
lod_url,
current_app.config.get("SPARQL_ENDPOINT"),
current_app.config.get("URI_NISV_ORGANISATION"),
logger.info(
f"Getting the RDF in the proper serialization format for {lod_url}."
)
rdf_graph = util.ld_util.get_lod_resource_from_rdf_store(
lod_url,
current_app.config.get("SPARQL_ENDPOINT"),
current_app.config.get("URI_NISV_ORGANISATION"),
)
if rdf_graph is not None:
serialised_graph = rdf_graph.serialize(
format=mime_type.to_ld_format(), auto_compact=True
)
if rdf_graph is not None:
serialised_graph = rdf_graph.serialize(
format=mime_type.to_ld_format(), auto_compact=True
)
if serialised_graph:
return Response(serialised_graph, mimetype=mime_type.value)
else:
return APIUtil.toErrorResponse(
"internal_server_error", "Serialisation failed"
)
if serialised_graph:
return Response(serialised_graph, mimetype=mime_type.value)
else:
return APIUtil.toErrorResponse(
"internal_server_error",
"No graph created. Check your resource type and identifier",
"internal_server_error", "Serialisation failed"
)

logger.error("No mime type was given.")
return APIUtil.toErrorResponse("bad_request", "No mime type detected...")
else:
return APIUtil.toErrorResponse(
"internal_server_error",
"No graph created. Check your resource type and identifier",
)

def _get_lod_view_resource(
self, resource_url: str, sparql_endpoint: str, nisv_organisation_uri: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,7 @@


@unique
class ResourceURILevel(Enum):
PROGRAM = "program"
SERIES = "series"
SEASON = "season"
SCENE = "scene"
class DatasetApiUriLevel(Enum):
DATASET = "dataset"
DATACATALOG = "datacatalog"
DATADOWNLOAD = "datadownload"
9 changes: 9 additions & 0 deletions src/models/ResourceApiUriLevel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from enum import Enum, unique


@unique
class ResourceApiUriLevel(Enum):
PROGRAM = "program"
SERIES = "series"
SEASON = "season"
SCENE = "scene"
32 changes: 28 additions & 4 deletions src/tests/unit_tests/apis/dataset/conftest.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,43 @@
import pytest


"""------------------------ DATA CATALOG HANDLER ----------------------"""
@pytest.fixture(scope="module")
def datacatalog_url():
def genDatacatalogURL(identifier):
return f"/id/datacatalog/{identifier}"

return genDatacatalogURL


@pytest.fixture(scope="module")
def datadownload_url():
def genDatadownloadURL(identifier):
return f"/id/datadownload/{identifier}"

return genDatadownloadURL


@pytest.fixture(scope="module")
def dataset_url():
def genDatasetURL(identifier):
return f"/id/dataset/{identifier}"

return genDatasetURL


@pytest.fixture(scope="module")
def i_datacatalog(load_file_as_graph):
return load_file_as_graph(__file__, "example_data_catalog.ttl")
"""Returns graph of an example data catalog"""
return load_file_as_graph(__file__, "data_catalog_unit_test.ttl")


@pytest.fixture(scope="module")
def i_dataset(load_file_as_graph):
return load_file_as_graph(__file__, "example_dataset.ttl")
"""Returns graph of an example dataset"""
return load_file_as_graph(__file__, "dataset.json")


@pytest.fixture(scope="module")
def i_datadownload(load_file_as_graph):
return load_file_as_graph(__file__, "example_datadownload.ttl")
"""Returns graph of an example datadownload"""
return load_file_as_graph(__file__, "datadownload.json")
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


from util.ld_util import generate_lod_resource_uri
from models.ResourceURILevel import ResourceURILevel
from models.DatasetApiUriLevel import DatasetApiUriLevel
from util.mime_type_util import MimeType
from rdflib import Graph
from rdflib.compare import to_isomorphic, graph_diff
Expand All @@ -21,18 +21,18 @@

DUMMY_DATA_DOWNLOAD_ID = "0001"
DUMMY_DATA_DOWNLOAD_URI = generate_lod_resource_uri(
ResourceURILevel.DATADOWNLOAD, DUMMY_DATA_DOWNLOAD_ID, DUMMY_BENG_DATA_DOMAIN
DatasetApiUriLevel.DATADOWNLOAD, DUMMY_DATA_DOWNLOAD_ID, DUMMY_BENG_DATA_DOMAIN
)


DUMMY_DATASET_ID = "0001"
DUMMY_DATASET_URI = generate_lod_resource_uri(
ResourceURILevel.DATASET, DUMMY_DATASET_ID, DUMMY_BENG_DATA_DOMAIN
DatasetApiUriLevel.DATASET, DUMMY_DATASET_ID, DUMMY_BENG_DATA_DOMAIN
)

DUMMY_DATA_CATALOG_ID = "0001"
DUMMY_DATA_CATALOG_URI = generate_lod_resource_uri(
ResourceURILevel.DATACATALOG, DUMMY_DATA_CATALOG_ID, DUMMY_BENG_DATA_DOMAIN
DatasetApiUriLevel.DATACATALOG, DUMMY_DATA_CATALOG_ID, DUMMY_BENG_DATA_DOMAIN
)

XML_ENCODING_DECLARATION = '<?xml version="1.0" encoding="utf-8"?>'
Expand Down
Loading

0 comments on commit d0ee38d

Please sign in to comment.