diff --git a/data_management/prov.py b/data_management/prov.py index 765fe32f..3f4b285d 100644 --- a/data_management/prov.py +++ b/data_management/prov.py @@ -1,49 +1,96 @@ import io import json -from prov.constants import PROV_ROLE, PROV_TYPE +from django.conf import settings +from prov.constants import PROV, PROV_ROLE, PROV_TYPE import prov.dot +from prov.identifier import QualifiedName import prov.model import prov.serializers +from rdflib import Graph from data_management.views import external_object from . import models -def _generate_object_meta(obj): +DCAT_VOCAB_PREFIX = 'dcat' +DCMITYPE_VOCAB_PREFIX = 'dcmitype' +DCTERMS_VOCAB_PREFIX = 'dcterms' +FAIR_VOCAB_PREFIX = 'fair' +FOAF_VOCAB_PREFIX = 'foaf' + + +def _generate_object_meta(obj, vocab_namespaces): data = [] - data.append(('last_updated', obj.last_updated)) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'modified'), + obj.last_updated, + ) + ) if obj.storage_location: - data.append(('storage', str(obj.storage_location))) + data.append( + ( + QualifiedName(PROV, 'atLocation'), + str(obj.storage_location), + ) + ) if obj.description: - data.append(('description', obj.description)) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'description'), + obj.description, + ) + ) for data_product in obj.data_products.all(): - data.append(('namespace', str(data_product.namespace))) - data.append(('name', str(data_product.name))) - data.append(('version', str(data_product.version))) + data.append( + ( + QualifiedName(vocab_namespaces[FAIR_VOCAB_PREFIX], 'namespace'), + str(data_product.namespace), + ) + ) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'title'), + str(data_product.name), + ) + ) + data.append( + ( + QualifiedName(vocab_namespaces[DCAT_VOCAB_PREFIX], 'hasVersion'), + str(data_product.version), + ) + ) if obj.file_type is not None: - data.append(('file_type', str(obj.file_type.name))) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'format'), + str(obj.file_type.name), + ) + ) return data -def _add_author_agents(authors, doc, entity): +def _add_author_agents(authors, doc, entity, reg_uri_prefix, vocab_namespaces): """ Add the authors to the entity as agents. @param authors: a list of authors from the Author table @param doc: a ProvDocument that the agent will belong to @param entity: the entity to attach the authors to + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab """ for author in authors: - agent_id = f'api/author/{author.id}' + agent_id = f'{reg_uri_prefix}:api/author/{author.id}' agent = doc.get_record(agent_id) # check to see if we have already created an agent for this author if len(agent) > 0: @@ -54,21 +101,35 @@ def _add_author_agents(authors, doc, entity): author_agent = doc.agent( agent_id, { - PROV_TYPE: 'prov:Person', - 'name': author.name, - 'identifier': author.identifier, + PROV_TYPE: QualifiedName(PROV, 'Person'), + QualifiedName( + vocab_namespaces[FOAF_VOCAB_PREFIX], 'name' + ): author.name, + QualifiedName( + vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'identifier' + ): author.identifier, }, ) - doc.wasAttributedTo(entity, author_agent, None, {PROV_ROLE: 'author'}) + doc.wasAttributedTo( + entity, + author_agent, + None, + {PROV_ROLE: QualifiedName( + vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'creator')}, + ) -def _add_code_repo_release(cr_activity, doc, code_repo): +def _add_code_repo_release( + cr_activity, doc, code_repo, reg_uri_prefix, vocab_namespaces +): """ Add code repo release to the code run activity. @param cr_activity: a prov.activity representing the code run @param doc: a ProvDocument that the entities will belong to @param code_repo: a code_repo object + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab """ try: @@ -78,73 +139,224 @@ def _add_code_repo_release(cr_activity, doc, code_repo): if code_repo_release is None: code_release_entity = doc.entity( - f'api/code_repo/{code_repo.id}', - ( - *_generate_object_meta(code_repo), - ), + f'{reg_uri_prefix}:api/object/{code_repo.id}', + (*_generate_object_meta(code_repo, vocab_namespaces),), ) else: code_release_entity = doc.entity( - f'api/code_repo_release/{code_repo_release.id}', + f'{reg_uri_prefix}:api/code_repo_release/{code_repo_release.id}', ( - *_generate_object_meta(code_repo), - ('name', code_repo_release.name), - ('version', code_repo_release.version), - ('website', code_repo_release.website), + ( + PROV_TYPE, + QualifiedName(vocab_namespaces[DCMITYPE_VOCAB_PREFIX], 'Software') + ), + *_generate_object_meta(code_repo, vocab_namespaces), + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'title'), + code_repo_release.name, + ), + ( + QualifiedName(vocab_namespaces[DCAT_VOCAB_PREFIX], 'hasVersion'), + code_repo_release.version, + ), + ( + QualifiedName(vocab_namespaces[FAIR_VOCAB_PREFIX], 'website'), + code_repo_release.website, + ), ), ) - _add_author_agents(code_repo.authors.all(), doc, code_release_entity) - doc.used(cr_activity, code_release_entity, None, None, {PROV_ROLE: 'software'}) + _add_author_agents( + code_repo.authors.all(), + doc, + code_release_entity, + reg_uri_prefix, + vocab_namespaces, + ) + doc.used( + cr_activity, + code_release_entity, + None, + None, + {PROV_ROLE: QualifiedName(vocab_namespaces[FAIR_VOCAB_PREFIX], 'software')}, + ) + + +def _add_code_run(dp_entity, doc, code_run, reg_uri_prefix, vocab_namespaces): + """ + Add code repo release to the code run activity. + + @param dp_entity: a prov.entity representing the data_product + @param doc: a ProvDocument that the entities will belong to + @param code_run: a code_run object + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab + + @return a prov.activity representing the code run + + """ + cr_activity = doc.activity( + f'{reg_uri_prefix}:api/code_run/{code_run.id}', + str(code_run.run_date), + None, + { + PROV_TYPE: QualifiedName(vocab_namespaces[FAIR_VOCAB_PREFIX], 'Run'), + QualifiedName( + vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'description' + ): code_run.description, + }, + ) + + doc.wasGeneratedBy(dp_entity, cr_activity) + + user_authors = models.UserAuthor.objects.filter(user=code_run.updated_by) + if len(user_authors) == 0: + run_agent = doc.agent( + f'{reg_uri_prefix}:api/user/{code_run.updated_by.id}', + { + PROV_TYPE: QualifiedName(PROV, 'Person'), + QualifiedName( + vocab_namespaces[FOAF_VOCAB_PREFIX], 'name' + ): code_run.updated_by.full_name(), + }, + ) + else: + # we have an author linked to the user + agent_id = f'{reg_uri_prefix}:api/author/{user_authors[0].author.id}' + agent = doc.get_record(agent_id) + # check to see if we have already created an agent for this author + if len(agent) > 0: + # The prov documentation says a ProvRecord is returned, but actually a + # list of ProvRecord is returned + run_agent = agent[0] + else: + run_agent = doc.agent( + agent_id, + { + PROV_TYPE: QualifiedName(PROV, 'Person'), + QualifiedName( + vocab_namespaces[FOAF_VOCAB_PREFIX], 'name' + ): user_authors[0].author.name, + QualifiedName( + vocab_namespaces[FAIR_VOCAB_PREFIX], 'identifier' + ): user_authors[0].author.identifier, + }, + ) + doc.wasStartedBy( + cr_activity, + run_agent, + None, + str(code_run.run_date), + None, + {PROV_ROLE: QualifiedName(vocab_namespaces[FAIR_VOCAB_PREFIX], 'code_runner')}, + ) + + return cr_activity -def _add_external_object(doc, data_product, data_product_entity): + +def _add_external_object( + doc, data_product, data_product_entity, reg_uri_prefix, vocab_namespaces +): """ Add an external_object entity to the provenance document for the given data product. @param doc: a ProvDocument that the entity will belong to @param data_product: a data_product from the DataProduct table @param data_product_entity: a prov.entity representing the data_product + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab """ # check for external object linked to the data product try: external_object = data_product.external_object - except ( - models.DataProduct.external_object.RelatedObjectDoesNotExist, - ): + except (models.DataProduct.external_object.RelatedObjectDoesNotExist,): return data = [] - data.append(('title', external_object.title)) - data.append(('release_date', external_object.release_date)) - data.append(('version', external_object.version)) + data.append( + (PROV_TYPE, QualifiedName(vocab_namespaces[DCAT_VOCAB_PREFIX], 'Dataset')) + ) + + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'title'), + external_object.title, + ) + ) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'issued'), + external_object.release_date, + ) + ) + data.append( + ( + QualifiedName(vocab_namespaces[DCAT_VOCAB_PREFIX], 'hasVersion'), + external_object.version, + ) + ) if external_object.identifier: - data.append(('identifier', external_object.identifier)) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'identifier'), + external_object.identifier, + ) + ) if external_object.alternate_identifier: - data.append(('alternate_identifier', external_object.alternate_identifier)) + data.append( + ( + QualifiedName( + vocab_namespaces[FAIR_VOCAB_PREFIX], 'alternate_identifier' + ), + external_object.alternate_identifier, + ) + ) if external_object.alternate_identifier_type: data.append( - ('alternate_identifier_type', external_object.alternate_identifier_type) + ( + QualifiedName( + vocab_namespaces[FAIR_VOCAB_PREFIX], 'alternate_identifier_type' + ), + external_object.alternate_identifier_type, + ) ) if external_object.description: - data.append(('description', external_object.description)) + data.append( + ( + QualifiedName(vocab_namespaces[DCTERMS_VOCAB_PREFIX], 'description'), + external_object.description, + ) + ) if external_object.original_store: - data.append(('original_store', str(external_object.original_store))) + data.append( + ( + QualifiedName(PROV, 'atLocation'), + str(external_object.original_store), + ) + ) external_object_entity = doc.entity( - f'api/external_object/{external_object.id}', (*data,) + f'{reg_uri_prefix}:api/external_object/{external_object.id}', (*data,) ) doc.specializationOf(external_object_entity, data_product_entity) def _add_linked_files( - cr_activity, doc, dp_entity, dp_id, input_objects, object_components + cr_activity, + doc, + dp_entity, + dp_id, + input_objects, + object_components, + reg_uri_prefix, + vocab_namespaces, ): """ Add linked files to the code run activity. @@ -156,6 +368,8 @@ def _add_linked_files( @param input_objects: boolean, 'True' if the object_components represent input objects @param object_components: a list of object_components from the ObjectComponent table + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab """ for component in object_components: @@ -167,24 +381,40 @@ def _add_linked_files( # we have already added the original data product continue - file_id = f'api/data_product/{data_product.id}' + file_id = f'{reg_uri_prefix}:api/data_product/{data_product.id}' file_entity = doc.entity( file_id, ( - (PROV_TYPE, 'file'), - *_generate_object_meta(obj), + ( + PROV_TYPE, + QualifiedName(vocab_namespaces[DCAT_VOCAB_PREFIX], 'Dataset'), + ), + *_generate_object_meta(obj, vocab_namespaces), ), ) # add external object linked to the data product - _add_external_object(doc, data_product, file_entity) + _add_external_object( + doc, data_product, file_entity, reg_uri_prefix, vocab_namespaces + ) - _add_author_agents(obj.authors.all(), doc, file_entity) + _add_author_agents( + obj.authors.all(), doc, file_entity, reg_uri_prefix, vocab_namespaces + ) if input_objects: # add link to the code run doc.used( - cr_activity, file_entity, None, None, {PROV_ROLE: 'input data'}) + cr_activity, + file_entity, + None, + None, + { + PROV_ROLE: QualifiedName( + vocab_namespaces[FAIR_VOCAB_PREFIX], 'input_data' + ) + }, + ) # add the link to the data product doc.wasDerivedFrom(dp_entity, file_entity) else: @@ -192,46 +422,82 @@ def _add_linked_files( doc.wasGeneratedBy(file_entity, cr_activity) -def _add_model_config(cr_activity, doc, model_config): +def _add_model_config(cr_activity, doc, model_config, reg_uri_prefix, vocab_namespaces): """ Add model config to the code run activity. @param cr_activity: a prov.activity representing the code run @param doc: a ProvDocument that the entities will belong to @param model_config: a model_config object + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab """ model_config_entity = doc.entity( - f'api/object/{model_config.id}', (*_generate_object_meta(model_config),) + f'{reg_uri_prefix}:api/object/{model_config.id}', + (*_generate_object_meta(model_config, vocab_namespaces),), ) - _add_author_agents(model_config.authors.all(), doc, model_config_entity) + _add_author_agents( + model_config.authors.all(), + doc, + model_config_entity, + reg_uri_prefix, + vocab_namespaces, + ) doc.used( - cr_activity, model_config_entity, None, None, {PROV_ROLE: 'model configuration'} + cr_activity, + model_config_entity, + None, + None, + { + PROV_ROLE: QualifiedName( + vocab_namespaces[FAIR_VOCAB_PREFIX], 'model_configuration' + ) + }, ) -def _add_submission_script(cr_activity, doc, submission_script): +def _add_submission_script( + cr_activity, doc, submission_script, reg_uri_prefix, vocab_namespaces +): """ Add submission script to the code run activity. @param cr_activity: a prov.activity representing the code run @param doc: a ProvDocument that the entities will belong to @param submission_script: a submission_script object + @param reg_uri_prefix: a str containing the name of the prefix + @param vocab_namespaces: a dict containing the Namespaces for the vocab """ submission_script_entity = doc.entity( - 'api/object/' + str(submission_script.id), - (*_generate_object_meta(submission_script),), + f'{reg_uri_prefix}:api/object/{submission_script.id}', + ( + ( + PROV_TYPE, + QualifiedName(vocab_namespaces[DCMITYPE_VOCAB_PREFIX], 'Software') + ), + *_generate_object_meta(submission_script, vocab_namespaces),), ) - _add_author_agents(submission_script.authors.all(), doc, submission_script_entity) + _add_author_agents( + submission_script.authors.all(), + doc, + submission_script_entity, + reg_uri_prefix, + vocab_namespaces, + ) doc.used( cr_activity, submission_script_entity, None, None, - {PROV_ROLE: 'submission script'}, + { + PROV_ROLE: QualifiedName( + vocab_namespaces[FAIR_VOCAB_PREFIX], 'submission_script' + ) + }, ) @@ -254,21 +520,50 @@ def generate_prov_document(data_product, request): """ url = request.build_absolute_uri('/') + cenral_registry_url = settings.CENTRAL_REGISTRY_URL + if not cenral_registry_url.endswith('/'): + cenral_registry_url = f'{cenral_registry_url}/' doc = prov.model.ProvDocument() - doc.set_default_namespace(url) + + if url == cenral_registry_url: + # we are using the main registry + reg_uri_prefix = 'reg' + doc.add_namespace(reg_uri_prefix, cenral_registry_url) + else: + # we are using a local registry + reg_uri_prefix = 'lreg' + doc.add_namespace(reg_uri_prefix, url) + + # the vocab namespace is always the main registry + doc.add_namespace(FAIR_VOCAB_PREFIX, f'{cenral_registry_url}vocab/#') + # we need to tell SONAR to ignore 'http' in the vocab URLs + doc.add_namespace(DCAT_VOCAB_PREFIX, 'http://www.w3.org/ns/dcat#') # NOSONAR + doc.add_namespace(DCMITYPE_VOCAB_PREFIX, 'http://purl.org/dc/dcmitype/') # NOSONAR + doc.add_namespace(DCTERMS_VOCAB_PREFIX, 'http://purl.org/dc/terms/') # NOSONAR + doc.add_namespace(FOAF_VOCAB_PREFIX, 'http://xmlns.com/foaf/spec/#') # NOSONAR + + vocab_namespaces = {} + for namespace in doc.get_registered_namespaces(): + vocab_namespaces[namespace.prefix] = namespace # add the data product dp_entity = doc.entity( - 'api/data_product/' + str(data_product.id), + f'{reg_uri_prefix}:api/data_product/{data_product.id}', ( - (PROV_TYPE, 'file'), - *_generate_object_meta(data_product.object), + (PROV_TYPE, QualifiedName(vocab_namespaces[DCAT_VOCAB_PREFIX], 'Dataset')), + *_generate_object_meta(data_product.object, vocab_namespaces), ), ) - _add_author_agents(data_product.object.authors.all(), doc, dp_entity) - _add_external_object(doc, data_product, dp_entity) + _add_author_agents( + data_product.object.authors.all(), + doc, + dp_entity, + reg_uri_prefix, + vocab_namespaces, + ) + _add_external_object(doc, data_product, dp_entity, reg_uri_prefix, vocab_namespaces) # add the activity, i.e. the code run components = data_product.object.components.all() @@ -279,47 +574,38 @@ def generate_prov_document(data_product, request): # there is no code run so we cannot add any more provenance data return doc - cr_activity = doc.activity( - 'api/code_run/' + str(code_run.id), - str(code_run.run_date), - None, - { - PROV_TYPE: 'run', - 'description': code_run.description, - }, - ) - - doc.wasGeneratedBy(dp_entity, cr_activity) - - run_agent = doc.agent( - f'api/user/{code_run.updated_by.id}', - { - PROV_TYPE: 'prov:Person', - 'name': code_run.updated_by.full_name(), - }, - ) - doc.wasStartedBy( - cr_activity, - run_agent, - None, - str(code_run.run_date), - None, - {PROV_ROLE: 'code runner'}, - ) + # add the code run, this is the central activity + cr_activity = _add_code_run( + dp_entity, doc, code_run, reg_uri_prefix, vocab_namespaces) # add the code repo release if code_run.code_repo is not None: - _add_code_repo_release(cr_activity, doc, code_run.code_repo) + _add_code_repo_release( + cr_activity, doc, code_run.code_repo, reg_uri_prefix, vocab_namespaces + ) # add the model config if code_run.model_config is not None: - _add_model_config(cr_activity, doc, code_run.model_config) + _add_model_config( + cr_activity, doc, code_run.model_config, reg_uri_prefix, vocab_namespaces + ) # add the submission script - _add_submission_script(cr_activity, doc, code_run.submission_script) + _add_submission_script( + cr_activity, doc, code_run.submission_script, reg_uri_prefix, vocab_namespaces + ) # add input files - _add_linked_files(cr_activity, doc, dp_entity, None, True, code_run.inputs.all()) + _add_linked_files( + cr_activity, + doc, + dp_entity, + None, + True, + code_run.inputs.all(), + reg_uri_prefix, + vocab_namespaces, + ) # add additional output files _add_linked_files( @@ -329,6 +615,8 @@ def generate_prov_document(data_product, request): data_product.id, False, code_run.outputs.all(), + reg_uri_prefix, + vocab_namespaces, ) return doc @@ -375,6 +663,20 @@ def serialize_prov_document( buf.seek(0) return buf.read() + elif format_ == 'json-ld': + with io.StringIO() as buf: + serializer = prov.serializers.get('rdf') + serializer(doc).serialize(buf) + buf.seek(0) + graph = Graph() + graph.parse(data=buf.read(), format='trig') + # we should be able to use `context = dict(graph.namespaces())` but this + # appears not to work in RDFlib 5.0.0 + context = {} + for prefix, uri in graph.namespaces(): + context[prefix] = str(uri) + return graph.serialize(format='json-ld', indent=4, context=context) + else: with io.StringIO() as buf: serializer = prov.serializers.get('json') diff --git a/data_management/rest/views.py b/data_management/rest/views.py index 5df9b32b..6d7015c7 100644 --- a/data_management/rest/views.py +++ b/data_management/rest/views.py @@ -67,6 +67,19 @@ def render(self, data, media_type=None, renderer_context=None): return data +class JSONLDRenderer(renderers.BaseRenderer): + """ + Custom renderer for returning JSON-LD data. + """ + media_type = 'application/ld+json' + format = 'json-ld' + charset = 'utf8' + render_style = 'text' + + def render(self, data, media_type=None, renderer_context=None): + return data + + class ProvnRenderer(renderers.BaseRenderer): """ Custom renderer for returning PROV-N data (as defined in https://www.w3.org/TR/2013/REC-prov-n-20130430/). @@ -95,27 +108,32 @@ def render(self, data, media_type=None, renderer_context=None): class ProvReportView(views.APIView): """ - API view for returning a PROV report for a DataProduct. + ***The provenance report for a `DataProduct`.*** + + The provenance report can be generated as `JSON`, `JSON-LD`, `XML` or `PROV-N`. + Optionally `JPEG` and `SVG` versions of the provenance may be available. + + ### Query parameters: + + `attributes` (optional): A boolean, when `True` (default) show additional + attributes of the objects on the image - This report can be returned as JSON (default) or XML or PROV-N using the custom - renderers. In addition if GraphViz is installed then JPEG and SVG renderers are also - available. + `aspect_ratio` (optional): A float used to define the ratio for the `JPEG` and + `SVG` images. The default is 0.71, which is equivalent to A4 landscape. - This method makes use of the following optional query parameters: - aspect_ratio: a float used to define the ratio for images - dpi: a float used to define the dpi for images - show_attributes: a boolean, shows attributes of elements when True + `dpi` (optional): A float used to define the dpi for the `JPEG` and `SVG` images """ try: Dot(prog='dot').create() # GraphViz is installed so the JPEG and SVG renderers are made available. renderer_classes = [renderers.BrowsableAPIRenderer, renderers.JSONRenderer, - JPEGRenderer, SVGRenderer, XMLRenderer, ProvnRenderer] + JSONLDRenderer, JPEGRenderer, SVGRenderer, XMLRenderer, + ProvnRenderer] except FileNotFoundError: # GraphViz is not installed so the JPEG and SVG renderers are NOT available. renderer_classes = [renderers.BrowsableAPIRenderer, renderers.JSONRenderer, - XMLRenderer, ProvnRenderer] + JSONLDRenderer, XMLRenderer, ProvnRenderer] def get(self, request, pk): data_product = get_object_or_404(models.DataProduct, pk=pk) diff --git a/data_management/tests/init_prov_db.py b/data_management/tests/init_prov_db.py index f6d3234c..15270293 100644 --- a/data_management/tests/init_prov_db.py +++ b/data_management/tests/init_prov_db.py @@ -12,6 +12,7 @@ StorageRoot, Issue, Namespace, + UserAuthor, ) from django.contrib.auth import get_user_model @@ -23,13 +24,11 @@ def reset_db(): Namespace.objects.all().delete() -def init_db(test=True): +def init_db(): user = get_user_model().objects.first() - - if test: - get_user_model().objects.create(username="testusera") - get_user_model().objects.create(username="testuserb") - get_user_model().objects.create(username="testuserc") + usera = get_user_model().objects.create(username="testusera") + get_user_model().objects.create(username="testuserb") + get_user_model().objects.create(username="testuserc") sr_github = StorageRoot.objects.create( updated_by=user, @@ -132,6 +131,7 @@ def init_db(test=True): a1 = Author.objects.create(updated_by=user, name="Ivana Valenti") a2 = Author.objects.create(updated_by=user, name="Maria Cipriani") a3 = Author.objects.create(updated_by=user, name="Rosanna Massabeti") + UserAuthor.objects.get_or_create(updated_by=user, user=usera, author=a1) o_code = Object.objects.create(updated_by=user, storage_location=sl_code) o_code_2 = Object.objects.create(updated_by=user, storage_location=sl_code) @@ -295,7 +295,7 @@ def init_db(test=True): cr2.outputs.set([o_output_3.components.first()]) cr3 = CodeRun.objects.create( - updated_by=user, + updated_by=usera, run_date="2021-07-17T19:21:11Z", submission_script=o_script, ) @@ -306,5 +306,4 @@ def init_db(test=True): if __name__ == "__main__": - # reset_db() - init_db(test=False) + init_db() diff --git a/data_management/tests/test_api.py b/data_management/tests/test_api.py index b696b25c..94bd3e89 100644 --- a/data_management/tests/test_api.py +++ b/data_management/tests/test_api.py @@ -822,6 +822,38 @@ def test_filter_by_key(self): class ProvAPITests(TestCase): + + DCAT_DATASET = "dcat:Dataset" + DCAT_HAS_VERSION = "dcat:hasVersion" + DCTERMS_CREATOR = "dcterms:creator" + DCTERMS_DESCRIPTION = "dcterms:description" + DCTERMS_FORMAT = "dcterms:format" + DCTERMS_IDENTIFIER = "dcterms:identifier" + DCTERMS_ISSUED = "dcterms:issued" + DCTERMS_MODIFIED = "dcterms:modified" + DCTERMS_TITLE = "dcterms:title" + FOAF_NAME = "foaf:name" + LREG_AUTHOR = "lreg:api/author/" + LREG_CODE_RUN = "lreg:api/code_run/" + LREG_DATA_PRODUCT = "lreg:api/data_product/" + LREG_OBJECT = "lreg:api/object/" + LREG_USER = "lreg:api/user/" + FAIR_INPUT_DATA = "fair:input_data" + FAIR_NAMESPACE = "fair:namespace" + PROV_AGENT = "prov:agent" + PROV_ACTIVITY = "prov:activity" + PROV_AT_LOCATION = "prov:atLocation" + PROV_ENTITY = "prov:entity" + PROV_GENERAL_ENTITY = "prov:generalEntity" + PROV_GENERATED_ENTITY = "prov:generatedEntity" + PROV_SPECIFIC_ENTITY = "prov:specificEntity" + PROV_PERSON = "prov:Person" + PROV_ROLE = "prov:role" + PROV_TYPE = "prov:type" + PROV_USED_ENTITY = "prov:usedEntity" + PROV_QUALIFIED_NAME = "prov:QUALIFIED_NAME" + XSD_DATE_TIME = "xsd:dateTime" + def setUp(self): self.user = get_user_model().objects.create(username="Test User") init_prov_db() @@ -838,251 +870,336 @@ def test_get_json(self): results = response.json() expected_result = { - "prov:type": "file", - "storage": "https://data.scrc.uk/api/text_file/input/1", - "description": "input 1 object", - "namespace": "prov", - "name": "this/is/cr/test/input/1", - "version": "0.2.0", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/input/1", + self.DCTERMS_DESCRIPTION: "input 1 object", + self.FAIR_NAMESPACE: "prov", + self.DCTERMS_TITLE: "this/is/cr/test/input/1", + self.DCAT_HAS_VERSION: "0.2.0", } - prov_out = results["entity"]["api/data_product/1"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_DATA_PRODUCT}1"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "prov:type": "file", - "storage": "https://data.scrc.uk/api/text_file/output/1", - "description": "output 1 object", - "namespace": "prov", - "name": "this/is/cr/test/output/1", - "version": "0.2.0", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/output/1", + self.DCTERMS_DESCRIPTION: "output 1 object", + self.FAIR_NAMESPACE: "prov", + self.DCTERMS_TITLE: "this/is/cr/test/output/1", + self.DCAT_HAS_VERSION: "0.2.0", } - prov_out = results["entity"]["api/data_product/2"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_DATA_PRODUCT}2"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "prov:type": "file", - "storage": "https://data.scrc.uk/api/text_file/output/2", - "description": "output 2 object", - "namespace": "prov", - "name": "this/is/cr/test/output/2", - "version": "0.2.0", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/output/2", + self.DCTERMS_DESCRIPTION: "output 2 object", + self.FAIR_NAMESPACE: "prov", + self.DCTERMS_TITLE: "this/is/cr/test/output/2", + self.DCAT_HAS_VERSION: "0.2.0", } - prov_out = results["entity"]["api/data_product/3"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_DATA_PRODUCT}3"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "prov:type": "file", - "storage": "https://data.scrc.uk/api/text_file/input/2", - "description": "input 2 object", - "namespace": "prov", - "name": "this/is/cr/test/input/2", - "version": "0.2.0", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/input/2", + self.DCTERMS_DESCRIPTION: "input 2 object", + self.FAIR_NAMESPACE: "prov", + self.DCTERMS_TITLE: "this/is/cr/test/input/2", + self.DCAT_HAS_VERSION: "0.2.0", } - prov_out = results["entity"]["api/data_product/4"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_DATA_PRODUCT}4"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "prov:type": "file", - "storage": "https://data.scrc.uk/api/text_file/input/3", - "description": "input 3 object", - "namespace": "prov", - "name": "this/is/cr/test/input/3", - "version": "0.2.0", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/input/3", + self.DCTERMS_DESCRIPTION: "input 3 object", + self.FAIR_NAMESPACE: "prov", + self.DCTERMS_TITLE: "this/is/cr/test/input/3", + self.DCAT_HAS_VERSION: "0.2.0", } - prov_out = results["entity"]["api/data_product/5"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_DATA_PRODUCT}5"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "title": "this is cr test input 1", - "release_date": {"$": "2020-07-10T18:38:00+00:00", "type": "xsd:dateTime"}, - "original_store": "https://example.org/file_strore/1.txt", - "version": "0.2.0", - "alternate_identifier": "this_is_cr_test_input_1", - "alternate_identifier_type": "text", - "description": "this is code run test input 1", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.DCTERMS_TITLE: "this is cr test input 1", + self.DCTERMS_ISSUED: { + "$": "2020-07-10T18:38:00+00:00", + "type": self.XSD_DATE_TIME, + }, + self.PROV_AT_LOCATION: "https://example.org/file_strore/1.txt", + self.DCAT_HAS_VERSION: "0.2.0", + "fair:alternate_identifier": "this_is_cr_test_input_1", + "fair:alternate_identifier_type": "text", + self.DCTERMS_DESCRIPTION: "this is code run test input 1", } - self.assertEqual(results["entity"]["api/external_object/1"], expected_result) + self.assertEqual( + results["entity"]["lreg:api/external_object/1"], expected_result + ) expected_result = { - "title": "this is cr test output 1", - "release_date": {"$": "2021-07-10T18:38:00+00:00", "type": "xsd:dateTime"}, - "original_store": "https://example.org/file_strore/2.txt", - "version": "0.2.0", - "alternate_identifier": "this_is_cr_test_output_1", - "alternate_identifier_type": "text", - "description": "this is code run test output 1", - "identifier": "this_is_cr_test_output_1_id", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.DCTERMS_TITLE: "this is cr test output 1", + self.DCTERMS_ISSUED: { + "$": "2021-07-10T18:38:00+00:00", + "type": self.XSD_DATE_TIME, + }, + self.PROV_AT_LOCATION: "https://example.org/file_strore/2.txt", + self.DCAT_HAS_VERSION: "0.2.0", + "fair:alternate_identifier": "this_is_cr_test_output_1", + "fair:alternate_identifier_type": "text", + self.DCTERMS_DESCRIPTION: "this is code run test output 1", + self.DCTERMS_IDENTIFIER: "this_is_cr_test_output_1_id", } - self.assertEqual(results["entity"]["api/external_object/2"], expected_result) + self.assertEqual( + results["entity"]["lreg:api/external_object/2"], expected_result + ) expected_result = { - "title": "this is cr test output 2", - "release_date": {"$": "2021-07-10T18:38:00+00:00", "type": "xsd:dateTime"}, - "version": "0.2.0", - "identifier": "this_is_cr_test_output_2", + self.PROV_TYPE: {"$": self.DCAT_DATASET, "type": self.PROV_QUALIFIED_NAME}, + self.DCTERMS_TITLE: "this is cr test output 2", + self.DCTERMS_ISSUED: { + "$": "2021-07-10T18:38:00+00:00", + "type": self.XSD_DATE_TIME, + }, + self.DCAT_HAS_VERSION: "0.2.0", + self.DCTERMS_IDENTIFIER: "this_is_cr_test_output_2", } - self.assertEqual(results["entity"]["api/external_object/3"], expected_result) + self.assertEqual( + results["entity"]["lreg:api/external_object/3"], expected_result + ) expected_result = { - "storage": "https://github.comScottishCovidResponse/SCRCdata repository", - "name": "ScottishCovidResponse/SCRCdata", - "version": "0.1.0", - "website": "https://github.com/ScottishCovidResponse/SCRCdata", + self.PROV_AT_LOCATION: "https://github.comScottishCovidResponse/SCRCdata repository", + self.DCTERMS_TITLE: "ScottishCovidResponse/SCRCdata", + self.DCAT_HAS_VERSION: "0.1.0", + "fair:website": "https://github.com/ScottishCovidResponse/SCRCdata", + self.PROV_TYPE: { + "$": "dcmitype:Software", + "type": self.PROV_QUALIFIED_NAME, + }, } - prov_out = results["entity"]["api/code_repo_release/1"] - del prov_out["last_updated"] + prov_out = results["entity"]["lreg:api/code_repo_release/1"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "storage": "https://data.scrc.uk/api/text_file/15/?format=text" + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/15/?format=text" } - prov_out = results["entity"]["api/object/3"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_OBJECT}3"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "file_type": "text file", - "storage": "https://data.scrc.uk/api/text_file/16/?format=text", + self.DCTERMS_FORMAT: "text file", + self.PROV_AT_LOCATION: "https://data.scrc.uk/api/text_file/16/?format=text", + self.PROV_TYPE: { + "$": "dcmitype:Software", + "type": self.PROV_QUALIFIED_NAME, + }, } - prov_out = results["entity"]["api/object/4"] - del prov_out["last_updated"] + prov_out = results["entity"][f"{self.LREG_OBJECT}4"] + del prov_out[self.DCTERMS_MODIFIED] self.assertEqual(prov_out, expected_result) expected_result = { - "api/code_run/1": { + f"{self.LREG_CODE_RUN}1": { "prov:startTime": "2021-07-17T18:21:11+00:00", - "prov:type": "run", - "description": "Test run", + self.PROV_TYPE: {"$": "fair:Run", "type": self.PROV_QUALIFIED_NAME}, + self.DCTERMS_DESCRIPTION: "Test run", } } self.assertEqual(results["activity"], expected_result) - expected_result = { - "api/author/3": {"prov:type": "prov:Person", "name": "Rosanna Massabeti"}, - "api/user/1": {"prov:type": "prov:Person", "name": "User Not Found"}, - "api/author/1": {"prov:type": "prov:Person", "name": "Ivana Valenti"}, - "api/author/2": {"prov:type": "prov:Person", "name": "Maria Cipriani"}, + f"{self.LREG_AUTHOR}3": { + self.PROV_TYPE: { + "$": self.PROV_PERSON, + "type": self.PROV_QUALIFIED_NAME, + }, + self.FOAF_NAME: "Rosanna Massabeti", + }, + f"{self.LREG_USER}1": { + self.PROV_TYPE: { + "$": self.PROV_PERSON, + "type": self.PROV_QUALIFIED_NAME, + }, + self.FOAF_NAME: "User Not Found", + }, + f"{self.LREG_AUTHOR}1": { + self.PROV_TYPE: { + "$": self.PROV_PERSON, + "type": self.PROV_QUALIFIED_NAME, + }, + self.FOAF_NAME: "Ivana Valenti", + }, + f"{self.LREG_AUTHOR}2": { + self.PROV_TYPE: { + "$": self.PROV_PERSON, + "type": self.PROV_QUALIFIED_NAME, + }, + self.FOAF_NAME: "Maria Cipriani", + }, } self.assertEqual(results["agent"], expected_result) expected_result = { "_:id2": { - "prov:specificEntity": "api/external_object/2", - "prov:generalEntity": "api/data_product/2", + self.PROV_SPECIFIC_ENTITY: "lreg:api/external_object/2", + self.PROV_GENERAL_ENTITY: f"{self.LREG_DATA_PRODUCT}2", }, "_:id8": { - "prov:specificEntity": "api/external_object/1", - "prov:generalEntity": "api/data_product/1", + self.PROV_SPECIFIC_ENTITY: "lreg:api/external_object/1", + self.PROV_GENERAL_ENTITY: f"{self.LREG_DATA_PRODUCT}1", }, "_:id18": { - "prov:specificEntity": "api/external_object/3", - "prov:generalEntity": "api/data_product/3", + self.PROV_SPECIFIC_ENTITY: "lreg:api/external_object/3", + self.PROV_GENERAL_ENTITY: f"{self.LREG_DATA_PRODUCT}3", }, } self.assertEqual(results["specializationOf"], expected_result) expected_result = { "_:id5": { - "prov:activity": "api/code_run/1", - "prov:entity": "api/code_repo_release/1", - "prov:role": "software", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + self.PROV_ENTITY: "lreg:api/code_repo_release/1", + self.PROV_ROLE: { + "$": "fair:software", + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id6": { - "prov:activity": "api/code_run/1", - "prov:entity": "api/object/3", - "prov:role": "model configuration", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + self.PROV_ENTITY: f"{self.LREG_OBJECT}3", + self.PROV_ROLE: { + "$": "fair:model_configuration", + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id7": { - "prov:activity": "api/code_run/1", - "prov:entity": "api/object/4", - "prov:role": "submission script", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + self.PROV_ENTITY: f"{self.LREG_OBJECT}4", + self.PROV_ROLE: { + "$": "fair:submission_script", + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id10": { - "prov:activity": "api/code_run/1", - "prov:entity": "api/data_product/1", - "prov:role": "input data", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}1", + self.PROV_ROLE: { + "$": self.FAIR_INPUT_DATA, + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id13": { - "prov:activity": "api/code_run/1", - "prov:entity": "api/data_product/4", - "prov:role": "input data", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}4", + self.PROV_ROLE: { + "$": self.FAIR_INPUT_DATA, + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id16": { - "prov:activity": "api/code_run/1", - "prov:entity": "api/data_product/5", - "prov:role": "input data", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}5", + self.PROV_ROLE: { + "$": self.FAIR_INPUT_DATA, + "type": self.PROV_QUALIFIED_NAME, + }, }, } self.assertEqual(results["used"], expected_result) expected_result = { "_:id1": { - "prov:entity": "api/data_product/2", - "prov:agent": "api/author/3", - "prov:role": "author", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}2", + self.PROV_AGENT: f"{self.LREG_AUTHOR}3", + self.PROV_ROLE: { + "$": self.DCTERMS_CREATOR, + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id9": { - "prov:entity": "api/data_product/1", - "prov:agent": "api/author/1", - "prov:role": "author", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}1", + self.PROV_AGENT: f"{self.LREG_AUTHOR}1", + self.PROV_ROLE: { + "$": self.DCTERMS_CREATOR, + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id12": { - "prov:entity": "api/data_product/4", - "prov:agent": "api/author/2", - "prov:role": "author", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}4", + self.PROV_AGENT: f"{self.LREG_AUTHOR}2", + self.PROV_ROLE: { + "$": self.DCTERMS_CREATOR, + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id15": { - "prov:entity": "api/data_product/5", - "prov:agent": "api/author/3", - "prov:role": "author", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}5", + self.PROV_AGENT: f"{self.LREG_AUTHOR}3", + self.PROV_ROLE: { + "$": self.DCTERMS_CREATOR, + "type": self.PROV_QUALIFIED_NAME, + }, }, "_:id19": { - "prov:entity": "api/data_product/3", - "prov:agent": "api/author/3", - "prov:role": "author", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}3", + self.PROV_AGENT: f"{self.LREG_AUTHOR}3", + self.PROV_ROLE: { + "$": self.DCTERMS_CREATOR, + "type": self.PROV_QUALIFIED_NAME, + }, }, } self.assertEqual(results["wasAttributedTo"], expected_result) expected_result = { "_:id11": { - "prov:generatedEntity": "api/data_product/2", - "prov:usedEntity": "api/data_product/1", + self.PROV_GENERATED_ENTITY: f"{self.LREG_DATA_PRODUCT}2", + self.PROV_USED_ENTITY: f"{self.LREG_DATA_PRODUCT}1", }, "_:id14": { - "prov:generatedEntity": "api/data_product/2", - "prov:usedEntity": "api/data_product/4", + self.PROV_GENERATED_ENTITY: f"{self.LREG_DATA_PRODUCT}2", + self.PROV_USED_ENTITY: f"{self.LREG_DATA_PRODUCT}4", }, "_:id17": { - "prov:generatedEntity": "api/data_product/2", - "prov:usedEntity": "api/data_product/5", + self.PROV_GENERATED_ENTITY: f"{self.LREG_DATA_PRODUCT}2", + self.PROV_USED_ENTITY: f"{self.LREG_DATA_PRODUCT}5", }, } self.assertEqual(results["wasDerivedFrom"], expected_result) expected_result = { "_:id3": { - "prov:entity": "api/data_product/2", - "prov:activity": "api/code_run/1", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}2", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", }, "_:id20": { - "prov:entity": "api/data_product/3", - "prov:activity": "api/code_run/1", + self.PROV_ENTITY: f"{self.LREG_DATA_PRODUCT}3", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", }, } self.assertEqual(results["wasGeneratedBy"], expected_result) expected_result = { "_:id4": { - "prov:activity": "api/code_run/1", - "prov:trigger": "api/user/1", + self.PROV_ACTIVITY: f"{self.LREG_CODE_RUN}1", + "prov:trigger": f"{self.LREG_USER}1", "prov:time": "2021-07-17T18:21:11+00:00", - "prov:role": "code runner", + self.PROV_ROLE: { + "$": "fair:code_runner", + "type": self.PROV_QUALIFIED_NAME, + }, } } self.assertEqual(results["wasStartedBy"], expected_result) @@ -1092,27 +1209,43 @@ def test_get_provn(self): client.force_authenticate(user=self.user) url = reverse("prov_report", kwargs={"pk": 1}) response = client.get( - url, format="provn", HTTP_ACCEPT="text/provenance-notation", HTTP_HOST='localhost' + url, + format="provn", + HTTP_ACCEPT="text/provenance-notation", + HTTP_HOST="localhost", ) self.assertEqual(response.status_code, 200) self.assertEqual( response["Content-Type"], "text/provenance-notation; charset=utf8" ) - result_bits = response.data.split("last_updated") + result_bits = response.data.split(self.DCTERMS_MODIFIED) result_end = result_bits[1].split("xsd:dateTime, ", 1)[1] result = result_bits[0] + result_end expected_result = """document - default + prefix lreg + prefix fair + prefix dcat + prefix dcmitype + prefix dcterms + prefix foaf - entity(api/data_product/1, [prov:type="file", storage="https://data.scrc.uk/api/text_file/input/1", description="input 1 object", namespace="prov", name="this/is/cr/test/input/1", version="0.2.0"]) - agent(api/author/1, [prov:type="prov:Person", name="Ivana Valenti"]) - wasAttributedTo(api/data_product/1, api/author/1, [prov:role="author"]) - entity(api/external_object/1, [title="this is cr test input 1", release_date="2020-07-10T18:38:00+00:00" %% xsd:dateTime, version="0.2.0", alternate_identifier="this_is_cr_test_input_1", alternate_identifier_type="text", description="this is code run test input 1", original_store="https://example.org/file_strore/1.txt"]) - specializationOf(api/external_object/1, api/data_product/1) + entity(lreg:api/data_product/1, [prov:type='dcat:Dataset', prov:atLocation="https://data.scrc.uk/api/text_file/input/1", dcterms:description="input 1 object", fair:namespace="prov", dcterms:title="this/is/cr/test/input/1", dcat:hasVersion="0.2.0"]) + agent(lreg:api/author/1, [prov:type='prov:Person', foaf:name="Ivana Valenti"]) + wasAttributedTo(lreg:api/data_product/1, lreg:api/author/1, [prov:role='dcterms:creator']) + entity(lreg:api/external_object/1, [prov:type='dcat:Dataset', dcterms:title="this is cr test input 1", dcterms:issued="2020-07-10T18:38:00+00:00" %% xsd:dateTime, dcat:hasVersion="0.2.0", fair:alternate_identifier="this_is_cr_test_input_1", fair:alternate_identifier_type="text", dcterms:description="this is code run test input 1", prov:atLocation="https://example.org/file_strore/1.txt"]) + specializationOf(lreg:api/external_object/1, lreg:api/data_product/1) endDocument""" self.assertEqual(result, expected_result) + def test_get_json_ld(self): + client = APIClient() + client.force_authenticate(user=self.user) + url = reverse("prov_report", kwargs={"pk": 1}) + response = client.get(url, format="json-ld", HTTP_ACCEPT="application/ld+json") + self.assertEqual(response.status_code, 200) + self.assertEqual(response["Content-Type"], "application/ld+json; charset=utf8") + def test_get_xml(self): client = APIClient() client.force_authenticate(user=self.user) @@ -1143,8 +1276,8 @@ def test_get_no_repo(self): url = reverse("prov_report", kwargs={"pk": 7}) response = client.get(url, format="xml", HTTP_ACCEPT="text/xml") self.assertEqual(response["Content-Type"], "text/xml; charset=utf8") - self.assertNotContains(response, "api/code_repo/", 200) - self.assertNotContains(response, "api/code_repo_release/", 200) + self.assertNotContains(response, "lreg:api/code_repo/", 200) + self.assertNotContains(response, "lreg:api/code_repo_release/", 200) def test_get_no_repo_release(self): client = APIClient() @@ -1152,4 +1285,4 @@ def test_get_no_repo_release(self): url = reverse("prov_report", kwargs={"pk": 6}) response = client.get(url, format="xml", HTTP_ACCEPT="text/xml") self.assertEqual(response["Content-Type"], "text/xml; charset=utf8") - self.assertContains(response, "api/code_repo/", status_code=200) + self.assertContains(response, f"{self.LREG_OBJECT}", status_code=200) diff --git a/drams/dev-settings.py b/drams/dev-settings.py index cc5445e3..b0813ef8 100644 --- a/drams/dev-settings.py +++ b/drams/dev-settings.py @@ -29,6 +29,8 @@ ALLOWED_HOSTS = ['data.scrc.uk', '127.0.0.1', 'localhost'] +# The URL of the central public registry +CENTRAL_REGISTRY_URL = 'https://data.scrc.uk/' # Application definition diff --git a/drams/local-settings.py b/drams/local-settings.py index 349381c5..19f5dee6 100644 --- a/drams/local-settings.py +++ b/drams/local-settings.py @@ -29,6 +29,8 @@ ALLOWED_HOSTS = ['127.0.0.1', 'localhost'] +# The URL of the central public registry +CENTRAL_REGISTRY_URL = 'https://data.scrc.uk/' # Application definition diff --git a/drams/settings.py b/drams/settings.py index 8c88559b..8cd673e0 100644 --- a/drams/settings.py +++ b/drams/settings.py @@ -35,6 +35,8 @@ ALLOWED_HOSTS = ['data.scrc.uk', '127.0.0.1', 'localhost'] +# The URL of the central public registry +CENTRAL_REGISTRY_URL = 'https://data.scrc.uk/' # Application definition diff --git a/drams/test-settings.py b/drams/test-settings.py index 0c61667c..2e56430a 100644 --- a/drams/test-settings.py +++ b/drams/test-settings.py @@ -29,6 +29,8 @@ ALLOWED_HOSTS = ['data.scrc.uk', '127.0.0.1', 'localhost'] +# The URL of the central public registry +CENTRAL_REGISTRY_URL = 'https://data.scrc.uk/' # Application definition diff --git a/drams/vagrant-settings.py b/drams/vagrant-settings.py index 658ed71d..38aa0b3d 100644 --- a/drams/vagrant-settings.py +++ b/drams/vagrant-settings.py @@ -29,6 +29,8 @@ ALLOWED_HOSTS = ['192.168.20.10', '127.0.0.1', 'localhost'] +# The URL of the central public registry +CENTRAL_REGISTRY_URL = 'https://data.scrc.uk/' # Application definition diff --git a/local-requirements.txt b/local-requirements.txt index 2a165b4f..a3b943fd 100644 --- a/local-requirements.txt +++ b/local-requirements.txt @@ -26,7 +26,7 @@ pyparsing==2.4.7 python-dateutil==2.8.1 python3-openid==3.1.0 pytz==2020.1 -rdflib==5.0.0 +rdflib==6.0.1 requests==2.25.0 requests-oauthlib==1.3.0 six==1.14.0 diff --git a/requirements.txt b/requirements.txt index 9fb4e2d8..c5c7e8fa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -27,7 +27,7 @@ pyparsing==2.4.7 python-dateutil==2.8.1 python3-openid==3.1.0 pytz==2020.1 -rdflib==5.0.0 +rdflib==6.0.1 requests==2.23.0 requests-oauthlib==1.3.0 six==1.14.0