diff --git a/README.md b/README.md index 82ce2b2d..86a20bdf 100644 --- a/README.md +++ b/README.md @@ -212,7 +212,7 @@ Single file mode: Batch mode: ```bash -./manage.py import_projects -f path/to/dir/with/json/files/ +./manage.py import_projects -d path/to/dir/with/json/files/ ``` Available commands: `import_projects`, `import_datasets`, `import_partners`. diff --git a/core/fixtures/contact-types.json b/core/fixtures/contact-types.json index 1ded1e33..b73e1436 100644 --- a/core/fixtures/contact-types.json +++ b/core/fixtures/contact-types.json @@ -1,4 +1,7 @@ [ + { + "name": "Researcher" + }, { "name": "Principal_Investigator" }, diff --git a/core/fixtures/elu-dataset.json b/core/fixtures/elu-dataset.json index d91e9cbb..4ef9f643 100644 --- a/core/fixtures/elu-dataset.json +++ b/core/fixtures/elu-dataset.json @@ -5,7 +5,7 @@ "type": "object", "allOf": [ { - "$ref": "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/elu-core.json" + "$ref": "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-core.json" } ], "properties": { @@ -145,6 +145,7 @@ "US", "PS", "IS", + "IP", "Other" ] }, diff --git a/core/fixtures/elu-project.json b/core/fixtures/elu-project.json index 57508b9a..aae37d60 100644 --- a/core/fixtures/elu-project.json +++ b/core/fixtures/elu-project.json @@ -11,7 +11,7 @@ "type": "object", "allOf": [ { - "$ref": "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/elu-study.json" + "$ref": "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-study.json" } ], "properties": { diff --git a/core/fixtures/elu-study.json b/core/fixtures/elu-study.json index 8c42df8e..c3324226 100644 --- a/core/fixtures/elu-study.json +++ b/core/fixtures/elu-study.json @@ -13,7 +13,7 @@ "type": "object", "allOf": [ { - "$ref": "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/elu-core.json" + "$ref": "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-core.json" } ], "properties": { diff --git a/core/forms/data_declaration.py b/core/forms/data_declaration.py index 726a8ffe..4a7a5707 100644 --- a/core/forms/data_declaration.py +++ b/core/forms/data_declaration.py @@ -1,11 +1,12 @@ from django import forms +from django.forms import ValidationError from django.shortcuts import get_object_or_404 from django.urls import reverse_lazy +from core.forms.use_restriction import UseRestrictionForm from core.models import DataDeclaration, Partner, Contract, GDPRRole from core.models.contract import PartnerRole -from django.forms import ValidationError -from core.forms.use_restriction import UseRestrictionForm + class DataDeclarationEditForm(forms.ModelForm): diff --git a/core/forms/dataset.py b/core/forms/dataset.py index 8ad5f843..054f02ec 100644 --- a/core/forms/dataset.py +++ b/core/forms/dataset.py @@ -46,7 +46,8 @@ def clean(self): if contract.project: if str(contract.project.id) != proj: project_inconsistency = True - self.add_error('project', "Dataset has existing link to Project {} via {}. Please remove link before updating this field.".format(contract.project.acronym, obj)) + error_msg = f"Dataset has existing link to Project {contract.project.acronym} via {obj}. Please remove link before updating this field." + self.add_error('project', error_msg) if project_inconsistency: errors.append("Unable to update project information.") diff --git a/core/forms/use_restriction.py b/core/forms/use_restriction.py index 0c56ac65..0c3ac305 100644 --- a/core/forms/use_restriction.py +++ b/core/forms/use_restriction.py @@ -2,6 +2,7 @@ from django.forms import CharField, ModelForm, Select from core.models import UseRestriction, RestrictionClass +from core.models.use_restriction import USE_RESTRICTION_CHOICES class UseRestrictionForm(ModelForm): @@ -11,16 +12,19 @@ class UseRestrictionForm(ModelForm): class Meta: model = UseRestriction - fields = ('restriction_class', 'notes') + fields = ('use_restriction_rule', 'restriction_class', 'notes', 'use_class_note') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) class_choices = [(None, "-----------------------")] class_choices.extend([(d.code, d.name) for d in RestrictionClass.objects.all()]) - self.fields['restriction_class'] = CharField(label='Category', help_text= 'Select the category of restrictions. These are \'GA4GH Consent Codes\'', required=True, widget=Select(choices=class_choices, attrs={'class': 'dummy-select'})) + self.fields['restriction_class'] = CharField(label='Use category', help_text= 'Select the category of restrictions. These are \'GA4GH Consent Codes\'', required=True, widget=Select(choices=class_choices, attrs={'class': 'dummy-select'})) self.fields['notes'].widget.attrs['cols'] = '70' - self.fields['notes'].widget.attrs['rows'] = '1' + self.fields['notes'].widget.attrs['rows'] = '5' + self.fields['use_restriction_rule'] = CharField(label='Use Restriction Rule', help_text= 'Does the rule constraints or forbids?', required=False, widget=Select(choices=USE_RESTRICTION_CHOICES, attrs={'class': 'dummy-select'})) + self.fields['use_class_note'].widget.attrs['cols'] = '70' + self.fields['use_class_note'].widget.attrs['rows'] = '3' def clean(self): @@ -36,4 +40,6 @@ def is_empty(self): cleaned_data = super().clean() restriction_class = cleaned_data.get('restriction_class') notes = cleaned_data.get('notes') - return not restriction_class and not notes \ No newline at end of file + use_class_note = cleaned_data.get('use_class_note') + use_restriction_rule = cleaned_data.get('use_restriction_rule') + return not restriction_class and not notes and not use_class_note and not use_restriction_rule diff --git a/core/importer/JSONSchemaValidator.py b/core/importer/JSONSchemaValidator.py index 2de61c37..ca9dec55 100644 --- a/core/importer/JSONSchemaValidator.py +++ b/core/importer/JSONSchemaValidator.py @@ -1,16 +1,19 @@ import json import jsonschema +import os import sys import urllib.request +from django.conf import settings + from core.exceptions import JSONSchemaValidationError from core.utils import DaisyLogger logger = DaisyLogger(__name__) -JSONSCHEMA_BASE_LOCAL_PATH = './core/fixtures/' -JSONSCHEMA_BASE_REMOTE_URL = "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/" +JSONSCHEMA_BASE_LOCAL_PATH = os.path.join(settings.BASE_DIR, 'core', 'fixtures') +JSONSCHEMA_BASE_REMOTE_URL = "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/" class BaseJSONSchemaValidator: @@ -43,7 +46,7 @@ def _preload_schema(self): import os logger.warn("Error (1/2) loading schema from disk for JSON validation...: " + str(e)) logger.warn("Working directory = " + os.getcwd()) - logger.warn("File path = " + self.base_path + self.schema_name) + logger.warn("File path = " + os.path.join(self.base_path, self.schema_name)) logger.warn("Will try to load the schema from URL...") try: @@ -51,16 +54,18 @@ def _preload_schema(self): return except: logger.error("Error (2/2) loading schema from URI for JSON validation...: " + str(e)) - logger.error("URL = " + self.base_url + self.schema_name) + logger.error("URL = " + os.path.join(self.base_url, self.schema_name)) raise Exception('Cannot load schema for JSON validation') def _load_schema_from_disk(self): - with open(self.base_path + self.schema_name, 'r') as opened_file: + file_path = os.path.join(self.base_path, self.schema_name) + with open(file_path, 'r') as opened_file: return json.load(opened_file) def _load_schema_from_url(self): - with urllib.request.urlopen(self.base_url + self.schema_name) as url: + file_path = os.path.join(self.base_url, self.schema_name) + with urllib.request.urlopen(file_path) as url: return json.loads(url.read().decode()) class DatasetJSONSchemaValidator(BaseJSONSchemaValidator): diff --git a/core/importer/base_importer.py b/core/importer/base_importer.py index f12aee85..eeda38f8 100644 --- a/core/importer/base_importer.py +++ b/core/importer/base_importer.py @@ -3,13 +3,13 @@ import re from datetime import datetime +from typing import Dict, List from django.conf import settings from django.contrib.auth.models import Group from core.constants import Groups as GroupConstants -from core.models import Partner, Contact, ContactType -from core.models import User +from core.models import Partner, Contact, ContactType, User from core.utils import DaisyLogger @@ -17,6 +17,14 @@ class BaseImporter: + """ + Abstract base class for an importer. + Provides common functions for opening/parsing/validating JSON files. + + Take a look on `ProjectsImporter` or `DatasetsImporter` for information + how an implementation should look like. + """ + class DateImportException(Exception): pass @@ -24,35 +32,116 @@ class DateImportException(Exception): @property def json_schema_validator(self): - raise NotImplementedError + """ + This validator will be used against the imported data + """ + raise NotImplementedError('You must implement `json_schema_validator` in your importer class') + + @property + def json_schema_uri(self): + """ + This attribute is used for detecting whether the importer can handle given json + """ + raise NotImplementedError('You must implement `json_schema_uri` in your importer class') + + def can_process_json(self, json_string: str) -> bool: + """ + Checks whether the imported JSON has the same "$schema" URI as the importer class (in `json_schema_uri` property) + """ + try: + object = json.loads(json_string) + return self.can_process_object(object) + except: + message = f'Couldn\'t check if the imported object has same "$schema" as the importer ({self.__class__.__name__}: {self.json_schema_uri}) - something went wrong while parsing the file' + self.logger.warn(message) + return False + + def can_process_object(self, json_object: Dict) -> bool: + """ + Checks whether the object has the same "$schema" URI as the importer class (in `json_schema_uri` property) + """ + if not json_object.get('$schema', False): + self.logger.debug('The imported object has no "$schema" attribute') + return False + if self.json_schema_uri == json_object.get('$schema'): + message = f'The imported object has the same "$schema" ({self.json_schema_uri}) as the importer ({self.__class__.__name__})' + self.logger.debug(message) + return True + schema_name = json_object.get('$schema') + message = f'The imported object has different "$schema" ({schema_name}) than the importer ({self.__class__.__name__}: {self.json_schema_uri})' + self.logger.debug(message) + return False + + def import_json_file(self, path_to_the_file: str, stop_on_error=False, verbose=False, validate=True) -> bool: + """ + Opens, loads and imports a JSON file. + """ + self.logger.info(f'Opening the file: {path_to_the_file}') + with open(path_to_the_file, encoding='utf-8') as json_file: + json_file_contents = json_file.read() + result = self.import_json(json_file_contents, stop_on_error, verbose) + self.logger.info(f'Successfully completed import for the file: {path_to_the_file}') + return result - def import_json(self, json_string, stop_on_error=False, verbose=False): - self.logger.info(f'Import ({self.__class__.__name__}) started for file') + def import_json(self, json_string: str, stop_on_error=False, verbose=False, validate=True) -> bool: result = True + importer_class_name = self.__class__.__name__ + self.logger.info(f'Attempting to use "{importer_class_name}" to parse and import the JSON') json_list = json.loads(json_string)['items'] - self.json_schema_validator.validate_items(json_list, self.logger) + result = self.import_object_list(json_list, stop_on_error, verbose) + status = 'success' if result else 'failed' + self.logger.info(f'Import ({importer_class_name}) result: {status}') + return result + + def import_object_list(self, json_list: List[Dict], stop_on_error=False, verbose=False, validate=True) -> bool: + """ + Validates and imports a list of objects. + """ + result = True + if validate: + validator_name = self.json_schema_validator.__class__.__name__ + self.logger.debug(f'Validating the file with "{validator_name}" against JSON schema...') + self.json_schema_validator.validate_items(json_list, self.logger) + self.logger.debug('...JSON schema is OK!') + else: + self.logger.debug(f'Proceeding without using the validation') + count = len(json_list) + verb = 'are' if count > 1 else 'is' + self.logger.debug(f'There {verb} {count} object(s) to be imported. Starting the process...') for item in json_list: - self.logger.debug(' * Importing item: "{}"...'.format(item.get('name', 'N/A'))) - try: - self.process_json(item) - except Exception as e: - self.logger.error('Import failed') - self.logger.error(str(e)) - if verbose: - import traceback - ex = traceback.format_exception(*sys.exc_info()) - self.logger.error('\n'.join([e for e in ex])) - if stop_on_error: - raise e - result = False - self.logger.info('... completed') - self.logger.info('Import ({}) result for file: {}'.format(self.__class__.__name__, 'success' if result else 'failed')) + result = self.import_object(item, stop_on_error, verbose) and result + self.logger.debug('Finished importing the object(s)') + return result + + def import_object(self, item: Dict, stop_on_error=False, verbose=False): + """ + Tries to import a single object + """ + item_name = item.get('name', 'N/A').encode('utf-8') + self.logger.debug(f'Trying to import item: "{item_name}"') + try: + result = self.process_json(item) + except Exception as e: + self.logger.error('Import failed: ') + self.logger.error(str(e)) + if verbose: + import traceback + ex = traceback.format_exception(*sys.exc_info()) + self.logger.error('\n'.join([e for e in ex])) + if stop_on_error: + raise e + result = False + self.logger.debug(f'Successfully imported item: {item_name}') return result def process_json(self, import_dict): raise NotImplementedError("Abstract method: Implement this method in the child class.") - def process_contacts(self, contacts_list): + def process_contacts(self, contacts_list: List[Dict]): + if not isinstance(contacts_list, list): + self.logger.warn('Contact list is not a list... Please check the imported file.') + return [], [], [] + local_custodians = [] local_personnel = [] external_contacts = [] @@ -60,67 +149,18 @@ def process_contacts(self, contacts_list): first_name = contact_dict.get('first_name').strip() last_name = contact_dict.get('last_name').strip() email = contact_dict.get('email', '').strip() - full_name = f"{first_name} {last_name}" - role_name = contact_dict.get('role') - _is_local_contact = self.is_local_contact(contact_dict) - if _is_local_contact: - user = User.objects.filter(first_name__icontains=first_name.lower(), - last_name__icontains=last_name.lower()) - if len(user) > 1: - users = User.objects.filter(first_name__icontains=first_name.lower(), - last_name__icontains=last_name.lower(), - email=email) - if len(users) != 1: - msg = 'Something went wrong - there are two contacts with the same first and last name, and it''s impossible to differentiate them' - self.logger.warning(msg, full_name) - user = users.first() - elif len(user) == 1: - user = user.first() - else: - user = None - if user is None: - self.logger.warning('No user found for %s - hence an inactive user will be created', full_name) - - usr_name = first_name.lower() + '.' + last_name.lower() - user = User.objects.create(username=usr_name, - password='', - first_name=first_name, - last_name=last_name, - is_active=False, - email=email) - user.staff = True - - if role_name == PRINCIPAL_INVESTIGATOR: - g = Group.objects.get(name=GroupConstants.VIP.value) - user.groups.add(g) - - user.save() + role_name = self.validate_contact_type(contact_dict.get('role')) + affiliations = contact_dict.get('affiliations', []) + if self.is_local_contact(contact_dict): + user = self.process_local_contact(first_name, last_name, email, role_name, affiliations) if role_name == PRINCIPAL_INVESTIGATOR: local_custodians.append(user) else: local_personnel.append(user) else: - contact = (Contact.objects.filter(first_name__icontains=first_name.lower(), - last_name__icontains=last_name.lower()) | Contact.objects.filter( - first_name__icontains=first_name.upper(), last_name__icontains=last_name.upper())).first() - if contact is None: - contact_type_pi, _ = ContactType.objects.get_or_create(name=role_name) - contact, _ = Contact.objects.get_or_create( - first_name=first_name, - last_name=last_name, - email=email, - type=contact_type_pi - ) - affiliations = contact_dict.get('affiliations') - for affiliation in affiliations: - partner = Partner.objects.filter(name=affiliation) - if len(partner): - contact.partners.add(partner[0]) - else: - self.logger.warning('no partner found for the affiliation: ' + affiliation) - contact.save() - external_contacts.append(contact) + contact = self.process_external_contact(first_name, last_name, email, role_name, affiliations) + external_contacts.append(contact) return local_custodians, local_personnel, external_contacts @@ -136,17 +176,85 @@ def process_date(self, date_string): year = match.group(1) month = match.group(2) day = match.group(3) - date_str = "{}-{}-{}".format(year, month, day) + date_str = f"{year}-{month}-{day}" try: r = datetime.strptime(date_str, "%Y-%m-%d").date() return r except (TypeError, ValueError): - raise self.DateImportException("Couldn't parse the following date: " + str(date_string)) + raise self.DateImportException(f"Couldn't parse the following date: {str(date_string)}") else: - raise self.DateImportException("Couldn't parse the following date: " + str(date_string)) + raise self.DateImportException(f"Couldn't parse the following date: {str(date_string)}") @staticmethod def is_local_contact(contact_dict): home_organisation = Partner.objects.get(acronym=settings.COMPANY) - _is_local_contact = home_organisation.name in contact_dict.get("affiliations") + _is_local_contact = home_organisation.name in contact_dict.get("affiliations") or home_organisation.acronym in contact_dict.get("affiliations") return _is_local_contact + + def validate_contact_type(self, contact_type): + try: + contact_type_obj = ContactType.objects.get(name=contact_type) + except ContactType.DoesNotExist: + self.logger.warning(f'Unknown contact type: {contact_type}. Setting to "Other".') + contact_type = 'Other' + return contact_type + + def process_local_contact(self, first_name, last_name, email, role_name, affiliations): + user = User.objects.filter(first_name__icontains=first_name,last_name__icontains=last_name) + if len(user) > 1: + users = User.objects.filter(first_name__icontains=first_name, + last_name__icontains=last_name, + email=email) + if len(users) != 1: + msg = 'Something went wrong - there are two contacts with the same first and last name, and it''s impossible to differentiate them' + self.logger.warning(msg) + user = users.first() + elif len(user) == 1: + user = user.first() + else: + user = None + if user is None: + self.logger.warning(f"No user found for '{first_name} {last_name}' - hence an inactive user will be created") + + usr_name = first_name.lower() + '.' + last_name.lower() + user = User.objects.create(username=usr_name, + password='', + first_name=first_name, + last_name=last_name, + is_active=False, + email=email) + user.staff = True + + if role_name == PRINCIPAL_INVESTIGATOR: + g = Group.objects.get(name=GroupConstants.VIP.value) + user.groups.add(g) + user.save() + return user + + def process_external_contact(self, first_name, last_name, email, role_name, affiliations): + contact = ( + Contact.objects.filter( + first_name__icontains=first_name, + last_name__icontains=last_name, + partners__name__in=affiliations) | + Contact.objects.filter( + first_name__icontains=first_name, + last_name__icontains=last_name, + partners__acronym__in=affiliations) + ).first() + if contact is None: + contact = Contact.objects.create( + first_name=first_name, + last_name=last_name, + email=email, + type=ContactType.objects.get(name=role_name) + ) + for affiliation in affiliations: + partner = Partner.objects.filter(name=affiliation) + if len(partner): + contact.partners.add(partner[0]) + else: + self.logger.warning(f"Cannot link contact '{first_name} {last_name}' to partner. No partner found for the affiliation: {affiliation}") + contact.save() + return contact + diff --git a/core/importer/datasets_exporter.py b/core/importer/datasets_exporter.py index 910dde66..acf05510 100644 --- a/core/importer/datasets_exporter.py +++ b/core/importer/datasets_exporter.py @@ -34,7 +34,7 @@ def export_to_file(self, file_handle, stop_on_error=False, verbose=False): logger.error('Dataset export failed') logger.error(str(e)) result = False - logger.info('Dataset export complete see file: {}'.format(file_handle)) + logger.info(f'Dataset export complete see file: {file_handle}') return result def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): @@ -44,13 +44,14 @@ def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): else: objects = Dataset.objects.all() for dataset in objects: - logger.debug(' * Exporting dataset: "{}"...'.format(dataset.__str__())) + dataset_repr = str(dataset) + logger.debug(f' * Exporting dataset: "{dataset_repr}"...') try: pd = dataset.to_dict() pd["source"] = settings.SERVER_URL dataset_dicts.append(pd) except Exception as e: - logger.error('Export failed for dataset {}'.format(dataset.title)) + logger.error(f'Export failed for dataset {dataset.title}') logger.error(str(e)) if verbose: import traceback @@ -60,7 +61,7 @@ def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): raise e logger.debug(" ... complete!") json.dump({ - "$schema": "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/elu-dataset.json", + "$schema": "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-dataset.json", "items": dataset_dicts}, buffer, indent=4) return buffer diff --git a/core/importer/datasets_importer.py b/core/importer/datasets_importer.py index ec22b4ab..2f524bc5 100644 --- a/core/importer/datasets_importer.py +++ b/core/importer/datasets_importer.py @@ -15,6 +15,7 @@ class DatasetsImporter(BaseImporter): """ json_schema_validator = DatasetJSONSchemaValidator() + json_schema_uri = 'https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-dataset.json' def process_json(self, dataset_dict): try: @@ -30,7 +31,8 @@ def process_json(self, dataset_dict): dataset = None if dataset: - self.logger.warning("Dataset with title '{}' already found. It will be updated.".format(title)) + title_to_show = title.encode('utf8') + self.logger.warning(f"Dataset with title '{title_to_show}' already found. It will be updated.") else: dataset = Dataset.objects.create(title=title) @@ -63,16 +65,20 @@ def process_json(self, dataset_dict): for local_custodian in local_custodians: local_custodian.assign_permissions_to_dataset(dataset) - self.process_datadeclarations(dataset_dict, dataset) + studies_map = self.process_datadeclarations(dataset_dict, dataset) + # Must be run after processing data declarations + self.process_studies(dataset_dict, studies_map) + + # Must be run after processing data declarations legal_bases = self.process_legal_bases(dataset_dict, dataset) if legal_bases: dataset.legal_basis_definitions.set(legal_bases, bulk=False) - # self.process_studies(dataset_dict, dataset) - dataset.save() + return True + # @staticmethod # def process_local_custodians(dataset_dict): # result = [] @@ -104,19 +110,22 @@ def process_json(self, dataset_dict): def process_project(self, project_acronym): try: - project = Project.objects.get(acronym=project_acronym.strip()) + acronym = project_acronym.strip() + project = Project.objects.get(acronym=acronym) return project except Project.DoesNotExist: - self.logger.warning("Tried to find project with acronym ='{}'; it was not found. Will try to look for the acronym...".format(project_acronym)) + msg = f"Tried to find project with acronym ='{acronym}'; it was not found. Will try to look for the acronym..." + self.logger.warning(msg) try: - project = Project.objects.get(title=project_acronym.strip()) + project = Project.objects.get(title=acronym) return project except Project.DoesNotExist: - self.logger.warning("Tried to find project with title ='{}'; it was not found. Will create a new one.".format(project_acronym.strip())) + msg = f"Tried to find project with title ='{acronym}'; it was not found. Will create a new one." + self.logger.warning(msg) project = Project.objects.create( - acronym=project_acronym.strip(), - title=project_acronym.strip() + acronym=acronym, + title=acronym ) return project @@ -221,15 +230,19 @@ def process_acl_info(self, storage_location_dict): return None def process_datadeclarations(self, dataset_dict, dataset): - + studies_map = {} datadec_dicts = dataset_dict.get('data_declarations', []) for ddec_dict in datadec_dicts: - self.process_datadeclaration(ddec_dict, dataset) + data_declaration, studies_map_key = self.process_datadeclaration(ddec_dict, dataset) + studies_map[studies_map_key] = data_declaration + + return studies_map def process_datadeclaration(self, datadec_dict, dataset): try: title = datadec_dict['title'] + title_to_show = title.encode('utf-8') except KeyError: raise DatasetImportError(data='Data declaration title missing') @@ -239,10 +252,14 @@ def process_datadeclaration(self, datadec_dict, dataset): datadec = None if datadec: - self.logger.warning("Data declaration with title '{}' already found. It will be updated.".format(title)) + msg = f"Data declaration with title '{title_to_show}' already found. It will be updated." + self.logger.warning(msg) else: datadec = DataDeclaration.objects.create(title=title, dataset=dataset) + if 'source_study' not in datadec_dict or len(datadec_dict.get('source_study')) == 0: + self.logger.warning(f"Data declaration with title '{title_to_show}' has no `source_study` set - there will be a problem processing study/cohort data.") + datadec.has_special_subjects = datadec_dict.get('has_special_subjects', False) datadec.data_types_notes = datadec_dict.get('data_type_notes', None) datadec.deidentification_method = self.process_deidentification_method(datadec_dict) @@ -255,7 +272,7 @@ def process_datadeclaration(self, datadec_dict, dataset): datadec.comments = datadec_dict.get('source_notes', None) datadec.embargo_date = datadec_dict.get('embargo_date', None) datadec.storage_duration_criteria = datadec_dict.get("storage_duration_criteria", None) - datadec.storage_end_date = datadec_dict.get("storage_end_date", None) + datadec.end_of_storage_duration = datadec_dict.get("storage_end_date", None) if 'data_types' in datadec_dict: datadec.data_types_received.set(self.process_datatypes(datadec_dict)) @@ -271,12 +288,14 @@ def process_datadeclaration(self, datadec_dict, dataset): datadec.save() datadec.updated = True + return datadec, datadec_dict.get('source_study') + def process_datatypes(self, datadec_dict): datatypes = [] for datatype_str in datadec_dict.get('data_types', []): datatype_str = datatype_str.strip() try: - datatype = DataType.objects.get(name=datatype_str) + datatype, _ = DataType.objects.get_or_create(name=datatype_str) except DataType.DoesNotExist: self.logger.error('Import failed') raise DatasetImportError(data=f'Cannot find data type: "{datatype_str}".') @@ -470,23 +489,53 @@ def process_legal_basis(self, legal_basis, dataset_object): return legal_basis_obj - def process_studies(self, dataset_object): + def process_studies(self, dataset_dict, studies_map): def _process_study(study): name = study.get('name', '') + safe_name = name.encode('utf-8') description = study.get('description', '') has_ethics_approval = study.get('has_ethics_approval', False) ethics_approval_notes = study.get('ethics_approval_notes', '') - url = study.get('url', '') # TODO: Currently this is lost - local_custodians, local_personnel, external_contacts = self.process_contacts(study.get("contacts", [])) + url = study.get('url', '') - cohort = Cohort( - ethics_confirmation=has_ethics_approval, - comments=description, - title=name, - ) + try: + cohort = Cohort.objects.get(title=name) + except Cohort.DoesNotExist: + cohort = None + + if cohort: + msg = f"Cohort with title '{safe_name}' already found. All fields are going to be updated." + self.logger.warning(msg) + else: + cohort = Cohort.objects.create(title=name) + cohort.description = description + cohort.ethics_confirmation = has_ethics_approval + cohort.ethics_notes = ethics_approval_notes + cohort.cohort_web_page = url + cohort.save() + cohort.updated = True + + local_custodians, local_personnel, external_contacts = self.process_contacts(study.get("contacts", [])) cohort.owners.set(external_contacts) + cohort.save() + cohort.updated = True + msg = f"Cohort '{safe_name}' imported successfully. Will try to link it to the data declaration..." + self.logger.info(msg) + + try: + data_declaration = studies_map.get(name) + if data_declaration is None: + raise KeyError() + if not isinstance(data_declaration, DataDeclaration): + raise KeyError() + data_declaration.cohorts.add(cohort) + data_declaration.save() + safe_title = data_declaration.title.encode('utf8') + self.logger.info(f"Cohort '{safe_name}' linked successfully to data declaration '{safe_title}'") + except: + self.logger.warning(f"The data declaration for the study '{safe_name}' not found. ") if 'studies' not in dataset_dict: return diff --git a/core/importer/elx_submission_importer.py b/core/importer/elx_submission_importer.py index fbecf7f4..7703b1bd 100644 --- a/core/importer/elx_submission_importer.py +++ b/core/importer/elx_submission_importer.py @@ -1,15 +1,12 @@ import json import sys - - from core.exceptions import DatasetImportError - +from core.importer.base_importer import BaseImporter +from core.importer.projects_importer import ProjectsImporter from core.models import Contact, Dataset, Project, ContactType - from core.utils import DaisyLogger -from .base_importer import BaseImporter -from .projects_importer import ProjectsImporter + logger = DaisyLogger(__name__) @@ -20,8 +17,7 @@ class DishSubmissionImporter(BaseImporter): and create relevant Dataset, Collaboration, (external Project) and DataDeclaration records in DAISY """ - class DateImportException(Exception): - pass + schema_name = '' def __init__(self, elixir_project_name): self.elixir_project_name = elixir_project_name @@ -30,7 +26,8 @@ def import_json(self, json_string, stop_on_error=False, verbose=False): try: logger.info('Import started') submission_dict = json.loads(json_string) - logger.debug(' * Importing Data Declaration: "{}"...'.format(submission_dict['name'])) + submission_name = submission_dict['name'].encode('utf8') + logger.debug(f' * Importing Data Declaration: "{submission_name}"...') if self.is_elixir_submission(submission_dict): project = Project.objects.filter(acronym=self.elixir_project_name).first() @@ -158,8 +155,8 @@ def process_submission_as_dataset(self, submission_dict, project): dataset = Dataset.objects.filter(title=elu_accession.strip()).first() if dataset is not None: - logger.warning( - "Dataset with title '{}' already found. It will be updated.".format(elu_accession.strip())) + msg = f"Dataset with title '{elu_accession.strip()}' already found. It will be updated." + logger.warning(msg) else: dataset = Dataset.objects.create(title=elu_accession.strip()) @@ -170,8 +167,7 @@ def process_submission_as_dataset(self, submission_dict, project): title = submission_dict['name'] scope_str = 'Elixir' if submission_dict['scope'] == 'e' else 'LCSB Collaboration' local_project_str = submission_dict.get('local_project', '') - dataset.comments = "ELU Accession: {}\nTitle: {}\nCreated On: {}\nScope: {}\nSubmitted to Project: {}".format( - elu_accession, title, created_on_str, scope_str, local_project_str) + dataset.comments = f"ELU Accession: {elu_accession}\nTitle: {title}\nCreated On: {created_on_str}\nScope: {scope_str}\nSubmitted to Project: {local_project_str}" local_custodians, local_personnel, external_contacts = self.process_contacts(submission_dict) diff --git a/core/importer/partners_exporter.py b/core/importer/partners_exporter.py index 82cf1861..54676099 100644 --- a/core/importer/partners_exporter.py +++ b/core/importer/partners_exporter.py @@ -21,7 +21,7 @@ def export_to_file(self, file_handle, stop_on_error=False, verbose=False): logger.error(str(e)) result = False - logger.info('Partner export complete see file: {}'.format(file_handle)) + logger.info(f'Partner export complete see file: {file_handle}') return result @@ -30,7 +30,7 @@ def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): partner_dicts = [] partners = Partner.objects.all() for partner in partners: - logger.debug(' * Exporting partner: "{}"...'.format(partner.name)) + logger.debug(f' * Exporting partner: "{partner.name}"...') try: pd = partner.to_dict() pd["source"] = settings.SERVER_URL @@ -47,7 +47,7 @@ def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): raise e logger.debug(" ... complete!") json.dump({ - "$schema": "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/elu-institution.json", + "$schema": "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-institution.json", "items": partner_dicts}, buffer , indent=4) return buffer diff --git a/core/importer/partners_importer.py b/core/importer/partners_importer.py index 44831bdc..c58de2e9 100644 --- a/core/importer/partners_importer.py +++ b/core/importer/partners_importer.py @@ -7,13 +7,12 @@ class PartnersImporter(BaseImporter): """ `PartersImporter`, should be able to fill the database with institutions information, based on JSON file complying to the schema in: - https://git-r3lab.uni.lu/pinar.alper/metadata-tools/blob/master/metadata_tools/resources/elu-institution.json + https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-institution.json Usage example: - def import_partner(): - with open("partners.json", "r") as file_with_partners: - importer = PartnersImporter() - importer.import_json(file_with_partners.read()) + def import_partners(): + importer = PartnersImporter() + importer.import_json_file("partners.json") """ json_schema_validator = InstitutionJSONSchemaValidator() diff --git a/core/importer/projects_exporter.py b/core/importer/projects_exporter.py index 88291d8c..4999c1cf 100644 --- a/core/importer/projects_exporter.py +++ b/core/importer/projects_exporter.py @@ -35,7 +35,7 @@ def export_to_file(self, file_handle, stop_on_error=False, verbose=False): logger.error('Project export failed') logger.error(str(e)) result = False - logger.info('Project export complete see file: {}'.format(file_handle)) + logger.info(f'Project export complete see file: {file_handle}') return result @@ -48,13 +48,14 @@ def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): objects = Project.objects.all() for project in objects: - logger.debug(' * Exporting project: "{}"...'.format(project.acronym)) + logger.debug(f' * Exporting project: "{project.acronym}"...') try: pd = project.to_dict() pd["source"] = settings.SERVER_URL project_dicts.append(pd) except Exception as e: - logger.error('Export failed for project {}'.format(project.__str__())) + project_repr = str(project) + logger.error(f'Export failed for project f{project_repr}') logger.error(str(e)) if verbose: import traceback @@ -64,7 +65,7 @@ def export_to_buffer(self, buffer, stop_on_error=False, verbose=False): raise e logger.debug(" ... complete!") json.dump({ - "$schema": "https://git-r3lab.uni.lu/pinar.alper/metadata-tools/raw/master/metadata_tools/resources/elu-project.json", + "$schema": "https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-project.json", "items": project_dicts}, buffer , indent=4) return buffer diff --git a/core/importer/projects_importer.py b/core/importer/projects_importer.py index f901f43a..5f463bdf 100644 --- a/core/importer/projects_importer.py +++ b/core/importer/projects_importer.py @@ -7,16 +7,16 @@ class ProjectsImporter(BaseImporter): """ `ProjectsImporter`, should be able to fill the database with projects' information, based on JSON file complying to the schema in: - https://git-r3lab.uni.lu/pinar.alper/metadata-tools/blob/master/metadata_tools/resources/elu-project.json + https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-project.json Usage example: def import_projects(): - with open("projects.json", "r") as file_with_projects: - importer = ProjectsImporter() - importer.import_json(file_with_projects.read()) + importer = ProjectsImporter() + importer.import_json_file("projects.json") """ json_schema_validator = ProjectJSONSchemaValidator() + json_schema_uri = 'https://raw.githubusercontent.com/elixir-luxembourg/json-schemas/master/schemas/elu-project.json' def process_json(self, project_dict): publications = [self.process_publication(publication_dict) @@ -47,7 +47,8 @@ def process_json(self, project_dict): elu_accession=elu_accession ) else: - self.logger.warning("Project with acronym '{}' already found. It will be updated.".format(acronym)) + acronym_to_show = acronym.encode('utf8') + self.logger.warning(f"Project with acronym '{acronym_to_show}' already found. It will be updated.") project.title = name project.description = description project.has_cner = has_cner @@ -60,8 +61,9 @@ def process_json(self, project_dict): if 'start_date' in project_dict and project_dict.get('start_date') and len(project_dict.get('start_date')) > 0: project.start_date = self.process_date(project_dict.get('start_date')) except self.DateImportException: + date_str = project_dict.get('start_date') message = "\tCouldn't import the 'start_date'. Does it follow the '%Y-%m-%d' format?\n\t" - message = message + 'Was: "{}". '.format(project_dict.get('start_date')) + message = message + f'Was: "{date_str}". ' message = message + "Continuing with empty value." self.logger.warning(message) @@ -69,8 +71,9 @@ def process_json(self, project_dict): if 'end_date' in project_dict and project_dict.get('end_date') and len(project_dict.get('end_date')) > 0: project.end_date = self.process_date(project_dict.get('end_date')) except self.DateImportException: + date_str = project_dict.get('end_date') message = "\tCouldn't import the 'end_date'. Does it follow the '%Y-%m-%d' format?\n\t" - message = message + 'Was: "{}". '.format(project_dict.get('end_date')) + message = message + f'Was: "{date_str}". ' message = message + "Continuing with empty value." self.logger.warning(message) @@ -95,6 +98,8 @@ def process_json(self, project_dict): for local_custodian in local_custodians: local_custodian.assign_permissions_to_dataset(project) + return True + @staticmethod def process_publication(publication_dict): # First, try to find if the publication is already in our database @@ -102,17 +107,13 @@ def process_publication(publication_dict): # Search by DOI if 'doi' in publication_dict and len(publication_dict.get('doi')) > 0: - publication = Publication.objects.filter(doi=publication_dict.get('doi')) - if len(publication): - publication = publication[0] + if Publication.objects.filter(doi=publication_dict.get('doi')).count() == 1: + publication = Publication.objects.get(doi=publication_dict.get('doi')) # Search by citation string if publication is None and 'citation_string' in publication_dict and len(publication_dict.get('citation_string')) > 0: - publication = Publication.objects.filter(citation=publication_dict.get('citation_string')) - if len(publication): - publication = publication[0] - else: - publication = None + if Publication.objects.filter(citation=publication_dict.get('citation_string')).count() == 1: + publication = Publication.objects.get(citation=publication_dict.get('citation_string')) # Create a new one if it does not exist if publication is None: diff --git a/core/management/commands/_private.py b/core/management/commands/_private.py index 5787fc1b..ca584e4f 100644 --- a/core/management/commands/_private.py +++ b/core/management/commands/_private.py @@ -13,12 +13,14 @@ def add_arguments(self, parser): '-d', '--directory', help='Directory with JSON files', - default=False) + default=False + ) parser.add_argument( '-f', '--file', help='Path to JSON file', - default=False) + default=False + ) parser.add_argument( '--verbose', action='store_true', @@ -32,22 +34,23 @@ def add_arguments(self, parser): def handle(self, *args, **options): try: - verbose = options.get('verbose') - exxit = options.get('exit') importer = self.get_importer() + + verbose = options.get('verbose') + should_exit_on_error = options.get('exit') path_to_json_file = options.get('file') path_to_json_directory = options.get('directory') if not(path_to_json_directory or path_to_json_file): - raise CommandError('Either directory or file must be specified!') + raise CommandError('Either directory (--directory) or file (--file) argument must be specified!') # Import files from directory if path_to_json_directory: - self.import_directory(importer, path_to_json_directory, verbose, exxit) + self.import_directory(importer, path_to_json_directory, verbose, should_exit_on_error) # Import records from file if path_to_json_file: - self.import_file(importer, path_to_json_file, verbose, exxit) + self.import_file(importer, path_to_json_file, verbose, should_exit_on_error) self.stdout.write(self.style.SUCCESS("Import was successful!")) @@ -60,20 +63,18 @@ def handle(self, *args, **options): def get_importer(self): raise NotImplementedError("Abstract method: Implement this method in the child class.") - def import_directory(self, importer, dir_path, verbose, exxit): + def import_directory(self, importer, dir_path, verbose, should_exit_on_error): for json_file_path in os.listdir(dir_path): if json_file_path.endswith(JSON_SUFFIX): - self.import_file(importer,json_file_path, verbose, exxit) + correct_path = os.path.join(dir_path, json_file_path) + self.import_file(importer, correct_path, verbose, should_exit_on_error) - def import_file(self, importer, full_path, verbose, exxit): - with open(full_path) as json_file: - json_file_contents = json_file.read() - self.stdout.write("Importing file %s" % full_path) - result = importer.import_json(json_file_contents, verbose=verbose) - if not result: - self.stdout.write(self.style.ERROR("Import failed")) - if exxit: - raise CommandError('Exited after error.') + def import_file(self, importer, full_path, verbose, should_exit_on_error): + if importer.import_json_file(full_path, should_exit_on_error, verbose=verbose): + return + self.stdout.write(self.style.ERROR("Import failed")) + if should_exit_on_error: + raise CommandError('Exited after error.') class ExportBaseCommand(BaseCommand): @@ -84,9 +85,13 @@ def add_arguments(self, parser): '-f', '--file', help='Path to JSON file', - default=False) + default=False + ) def handle(self, *args, **options): + if not(options.get('file')): + raise CommandError('File (--file) argument must be specified!') + try: path_to_json_file = options.get('file') with open(path_to_json_file, mode="w+", encoding='utf-8') as json_file: @@ -95,7 +100,7 @@ def handle(self, *args, **options): self.stdout.write(self.style.SUCCESS("Export complete!")) except Exception as e: - msg = f"Something went wrong during the import ({__file__}:class {self.__class__.__name__})! Is the path valid? Is the file valid? Details:" + msg = f"Something went wrong during the export ({__file__}:class {self.__class__.__name__})! Details:" self.stderr.write( self.style.ERROR(msg)) self.stderr.write(self.style.ERROR(str(e))) diff --git a/core/management/commands/load_demo_data.py b/core/management/commands/load_demo_data.py index 8e61650f..e7fa4080 100644 --- a/core/management/commands/load_demo_data.py +++ b/core/management/commands/load_demo_data.py @@ -16,19 +16,15 @@ class Command(BaseCommand): def _load_demo_projects(self): projects_json_file = os.path.join(DEMO_DATA_DIR, 'projects.json') - with open(projects_json_file, encoding='utf-8') as json_file: - json_file_contents = json_file.read() - importer = ProjectsImporter() - importer.import_json(json_file_contents, False, True) - self.stdout.write(self.style.SUCCESS("Project import successful!")) + importer = ProjectsImporter() + importer.import_json_file(projects_json_file, False, True) + self.stdout.write(self.style.SUCCESS("Project import successful!")) def _load_demo_datasets(self): dataset_json_file = os.path.join(DEMO_DATA_DIR, 'datasets.json') - with open(dataset_json_file, encoding='utf-8') as json_file: - json_file_contents = json_file.read() - importer = DatasetsImporter() - importer.import_json(json_file_contents, False, True) - self.stdout.write(self.style.SUCCESS("Dataset import successful!")) + importer = DatasetsImporter() + importer.import_json_file(dataset_json_file, False, True) + self.stdout.write(self.style.SUCCESS("Dataset import successful!")) def _create_demo_superuser(self): if User.objects.filter(username='admin').count() == 0: diff --git a/core/management/commands/load_initial_data.py b/core/management/commands/load_initial_data.py index cf2cf1f1..df313f58 100644 --- a/core/management/commands/load_initial_data.py +++ b/core/management/commands/load_initial_data.py @@ -176,7 +176,7 @@ def create_elu_cohorts(): i = Partner.objects.get(elu_accession=ins) institutes.append(i) except Partner.DoesNotExist: - raise FixtureImportError(data="unknown partner institute {}".format(ins)) + raise FixtureImportError(data=f"unknown partner institute {ins}") c.institutes.set(institutes) if 'owners' in cohort: owners = [] diff --git a/core/migrations/0014_cohort_url_ethics_notes.py b/core/migrations/0014_cohort_url_ethics_notes.py new file mode 100644 index 00000000..e0b7e722 --- /dev/null +++ b/core/migrations/0014_cohort_url_ethics_notes.py @@ -0,0 +1,23 @@ +# Generated by Django 2.2.13 on 2021-03-24 23:11 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0013_userestriction_use_class_note'), + ] + + operations = [ + migrations.AddField( + model_name='cohort', + name='cohort_web_page', + field=models.URLField(blank=True, help_text='If the cohort has a webpage, please provide its URL link here.', verbose_name='Cohorts URL page'), + ), + migrations.AddField( + model_name='cohort', + name='ethics_notes', + field=models.TextField(blank=True, default='', help_text='Provide notes on ethics approval. If it does not exist, please state justifications here.', null=True, verbose_name='Ethics Approval notes'), + ), + ] diff --git a/core/models/access.py b/core/models/access.py index 0956ebf4..3659e687 100644 --- a/core/models/access.py +++ b/core/models/access.py @@ -48,7 +48,7 @@ class Meta: ) def __str__(self): - return 'Access given to dataset {}: {}'.format(self.dataset.title, self.access_notes) + return f'Access given to dataset {self.dataset.title}: {self.access_notes}' @property def display_locations(self): diff --git a/core/models/cohort.py b/core/models/cohort.py index f74eba46..0b60a554 100644 --- a/core/models/cohort.py +++ b/core/models/cohort.py @@ -18,6 +18,16 @@ class AppMeta: verbose_name='Confirmation of Ethics Approval?', help_text='Is the existence of the study\'s ethics approval confirmed by the cohort owner.') + ethics_notes = models.TextField(verbose_name='Ethics Approval notes', + default='', + help_text='Provide notes on ethics approval. If it does not exist, please state justifications here.', + blank=True, + null=True) + + cohort_web_page = models.URLField(verbose_name='Cohort''s URL page', + help_text='If the cohort has a webpage, please provide its URL link here.', + blank=True) + comments = models.TextField(verbose_name='Comments', blank=True, null=True, help_text='Any additional remarks on this cohort.') @@ -51,7 +61,9 @@ def to_dict(self): 'owners': owners_dicts, 'title': self.title, 'institutes': [i.id for i in self.institutes.all()], - 'ethics_confirmation': self.ethics_confirmation + 'ethics_confirmation': self.ethics_confirmation, + 'ethics_approval_notes': self.ethics_notes, + 'url': self.cohort_web_page } return base_dict diff --git a/core/models/contact.py b/core/models/contact.py index d4a2a486..e55afd9c 100644 --- a/core/models/contact.py +++ b/core/models/contact.py @@ -51,10 +51,10 @@ class AppMeta: def __str__(self): - return "{} {} ({})".format(self.first_name, self.last_name, self.type.name) + return f"{self.first_name} {self.last_name} ({self.type.name})" def full_name(self): - return "{} {}".format(self.first_name, self.last_name) + return f"{self.first_name} {self.last_name}" def to_dict(self): partners_dict = [] diff --git a/core/models/contact_type.py b/core/models/contact_type.py index c94c38d3..42100a2f 100644 --- a/core/models/contact_type.py +++ b/core/models/contact_type.py @@ -18,4 +18,4 @@ class Meta: verbose_name='Name of the contact type', unique=True) def __str__(self): - return "{}".format(self.name) + return f"{self.name}" diff --git a/core/models/document.py b/core/models/document.py index eaa884a7..f111aa6f 100644 --- a/core/models/document.py +++ b/core/models/document.py @@ -16,9 +16,7 @@ def get_file_name(instance, filename): Return the path of the final path of the document on the filsystem. """ now = timezone.now().strftime('%Y/%m/%d') - return 'documents/{}/{}/{}_{}'.format( - instance.content_type.name, now, instance.object_id, filename - ) + return f'documents/{instance.content_type.name}/{now}/{instance.object_id}_{filename}' class Document(CoreModel): @@ -59,7 +57,7 @@ class Meta: null=True) def __str__(self): - return "{} ({})".format(self.content.name, self.content_object) + return f"{self.content.name} ({self.content_object})" @property def shortname(self): diff --git a/core/models/document_type.py b/core/models/document_type.py index 719b8c43..66e7f6cb 100644 --- a/core/models/document_type.py +++ b/core/models/document_type.py @@ -16,4 +16,4 @@ class Meta: verbose_name='Name of the type of the document') def __str__(self): - return "{}".format(self.name) + return f"{self.name}" \ No newline at end of file diff --git a/core/models/legal_basis.py b/core/models/legal_basis.py index 328d9817..0d27832c 100644 --- a/core/models/legal_basis.py +++ b/core/models/legal_basis.py @@ -46,7 +46,8 @@ class Meta: def __str__(self): - return 'Legal Bases for dataset {}: {}.'.format(self.dataset.title, ",".join(str(lbt.code) for lbt in self.legal_basis_types.all())) + legal_basis_types = ",".join(str(lbt.code) for lbt in self.legal_basis_types.all()) + return f'Legal Basis for dataset {self.dataset.title}: {legal_basis_types}.' def to_dict(self): return { diff --git a/core/models/legal_basis_type.py b/core/models/legal_basis_type.py index f1c4ceda..13093273 100644 --- a/core/models/legal_basis_type.py +++ b/core/models/legal_basis_type.py @@ -20,7 +20,7 @@ class Meta: verbose_name='Name', unique=True) def __str__(self): - return "{} [{}]".format(self.name, self.code) + return f"{self.name} [{self.code}]" def to_dict(self): return { diff --git a/core/models/share.py b/core/models/share.py index 927c1a0c..a1e2d89c 100644 --- a/core/models/share.py +++ b/core/models/share.py @@ -63,4 +63,4 @@ class Meta: ) def __str__(self): - return 'Share/Transfer of {} with {}.'.format(self.dataset.title, self.partner.name) + return f'Share/Transfer of {self.dataset.title} with {self.partner.name}' diff --git a/core/models/storage_location.py b/core/models/storage_location.py index 0fe26961..c9578ec2 100644 --- a/core/models/storage_location.py +++ b/core/models/storage_location.py @@ -58,6 +58,6 @@ class Meta: ) def __str__(self): - return '{} - {} - {}'.format(self.category, self.backend.name, self.location_description) + return f'{self.category} - {self.backend.name} - {self.location_description}' diff --git a/core/models/storage_resource.py b/core/models/storage_resource.py index 25234309..973c28f8 100644 --- a/core/models/storage_resource.py +++ b/core/models/storage_resource.py @@ -38,4 +38,4 @@ class Meta: def __str__(self): - return "{}".format(self.name) + return f"{self.name}" diff --git a/core/models/use_restriction.py b/core/models/use_restriction.py index c09bb64e..d9e6dca0 100644 --- a/core/models/use_restriction.py +++ b/core/models/use_restriction.py @@ -24,26 +24,26 @@ class Meta: help_text='The data declaration to which this restriction applies.') # use_class after renaming - restriction_class = models.CharField(verbose_name='Restriction class', + restriction_class = models.CharField(verbose_name='Use Category', max_length=20, blank=True, null=True, help_text='Select the GA4GH code for the restriction. Refer to \'GA4GH Consent Codes\' for a detailed explanation of each.') # use_class_note after renaming - notes = models.TextField(verbose_name='Description', + notes = models.TextField(verbose_name='Use Restriction note', max_length=255, blank=True, null=True, help_text='Provide a free text description of the restriction.') - use_class_note = models.TextField(verbose_name='Use restriction class note', + use_class_note = models.TextField(verbose_name='Use Category note', max_length=255, blank=True, null=True, help_text='A question asked when collecting the restriction class') - use_restriction_rule = models.TextField(verbose_name='Does the rule forbid (FORBIDDEN), constraint (CONSTRAINTS) or have no constraints (NO_CONSTRAINTS)?', + use_restriction_rule = models.TextField(verbose_name='Use Restriction Rule', choices=USE_RESTRICTION_CHOICES, default=USE_RESTRICTION_CHOICES.NO_CONSTRAINTS, blank=False, @@ -54,15 +54,35 @@ def clone_shallow(self): clone = UseRestriction() clone.restriction_class = self.restriction_class clone.notes = self.notes + clone.use_class_note = self.use_class_note + clone.use_restriction_rule = self.use_restriction_rule return clone def __str__(self): - return "{} - {}".format(self.restriction_class, self.notes) + if self.data_declaration_id is None: + title = '(no DataDeclaration coupled' + else: + title = self.data_declaration.title or '(DataDeclaration with no title)' + return f"{self.restriction_class} - on {title} - {self.notes}" def to_dict(self): + """ + Used for import/export - the keys are conformant to the schema + """ return { "use_class": self.restriction_class, "use_class_note": self.use_class_note, "use_restriction_note": self.notes, "use_restriction_rule": self.use_restriction_rule + } + + def serialize(self): + """ + Used for forms - the keys are conformant to the django model + """ + return { + "restriction_class": self.restriction_class, + "use_class_note": self.use_class_note, + "notes": self.notes, + "use_restriction_rule": self.use_restriction_rule } \ No newline at end of file diff --git a/core/models/user.py b/core/models/user.py index 84e3fe7e..d23e8e26 100644 --- a/core/models/user.py +++ b/core/models/user.py @@ -95,7 +95,7 @@ def __str__(self): return fullname or self.username def save(self, *args, **kw): - self.full_name = '{0} {1}'.format(self.first_name, self.last_name) + self.full_name = f'{self.first_name} {self.last_name}' super(User, self).save(*args, **kw) def is_part_of(self, *args): diff --git a/core/tests/data/datasets.json b/core/tests/data/datasets.json index bd7ea583..e494aee5 100644 --- a/core/tests/data/datasets.json +++ b/core/tests/data/datasets.json @@ -329,6 +329,7 @@ "ombudsman": "Biobank, Principle Investigator", "subjects_category": "cases_and_controls", "has_special_subjects": false, + "storage_end_date": "2030-05-10", "consent_status": "homogeneous", "use_restrictions": [ { diff --git a/core/tests/importer/test_datasets_importer.py b/core/tests/importer/test_datasets_importer.py index f4e3f7d9..55ee413c 100644 --- a/core/tests/importer/test_datasets_importer.py +++ b/core/tests/importer/test_datasets_importer.py @@ -14,7 +14,7 @@ def test_dummy(celery_session_worker, storage_resources, can_defer_constraint_ch @pytest.mark.django_db -def test_import_datasets(celery_session_worker, storage_resources, data_types, partners, gdpr_roles, can_defer_constraint_checks): +def test_import_datasets(celery_session_worker, storage_resources, contact_types, data_types, partners, gdpr_roles, can_defer_constraint_checks): VIP = factories.VIPGroup() factories.UserFactory.create(first_name='Igor', last_name='Teal', groups=[VIP], email="user@uni.edu") @@ -25,9 +25,9 @@ def test_import_datasets(celery_session_worker, storage_resources, data_types, p factories.UserFactory.create(first_name='Ali', last_name='Gator', groups=[VIP], email="user@uni.edu") data_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../data/datasets.json") - with open(data_file, "r") as f: - importer = DatasetsImporter() - importer.import_json(f.read(), True) + importer = DatasetsImporter() + importer.import_json_file(data_file, True) + assert 5 == Dataset.objects.all().count() assert 4 == Project.objects.all().count() @@ -53,3 +53,6 @@ def test_import_datasets(celery_session_worker, storage_resources, data_types, p ddecs = DataDeclaration.objects.all() assert 5 == ddecs.count() + + ddec = DataDeclaration.objects.get(title='XYZ') + assert "2030-05-10" == ddec.end_of_storage_duration.strftime("%Y-%m-%d") diff --git a/core/tests/importer/test_import_projects.py b/core/tests/importer/test_projects_importer.py similarity index 88% rename from core/tests/importer/test_import_projects.py rename to core/tests/importer/test_projects_importer.py index 0dc7e0aa..f0a2ce01 100644 --- a/core/tests/importer/test_import_projects.py +++ b/core/tests/importer/test_projects_importer.py @@ -8,7 +8,7 @@ @pytest.mark.django_db -def test_import_projects(celery_session_worker, partners): +def test_import_projects(celery_session_worker, contact_types, partners): VIP = factories.VIPGroup() @@ -22,9 +22,9 @@ def test_import_projects(celery_session_worker, partners): factories.UserFactory.create(first_name='James', last_name='BK') projects_json = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../data/projects.json") - with open(projects_json, "r") as file_with_projects: - importer = ProjectsImporter() - importer.import_json(file_with_projects.read(), True) + importer = ProjectsImporter() + importer.import_json_file(projects_json, True) + projects = Project.objects.all() assert 2 == projects.count() project1 = Project.objects.filter(acronym='In vitro disease modeling').first() @@ -45,3 +45,8 @@ def test_import_projects(celery_session_worker, partners): assert 11 == project2.start_date.month assert 1 == project2.start_date.day assert 1 == project2.publications.count() + + +@pytest.mark.django_db +def test_process_publication(*args, **kwargs): + pass \ No newline at end of file diff --git a/elixir_daisy/settings.py b/elixir_daisy/settings.py index 60aca6c9..2b992f36 100644 --- a/elixir_daisy/settings.py +++ b/elixir_daisy/settings.py @@ -177,10 +177,10 @@ 'disable_existing_loggers': False, 'formatters': { 'verbose': { - 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' + 'format': u'%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' }, 'simple': { - 'format': '%(levelname)s %(message)s' + 'format': u'%(levelname)s %(message)s' }, }, 'filters': { @@ -201,7 +201,7 @@ 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler', + 'class': 'django.utils.log.AdminEmailHandler' }, 'sql': { 'level': 'DEBUG', diff --git a/notification/email_sender.py b/notification/email_sender.py index 19f29cb9..5af371c8 100644 --- a/notification/email_sender.py +++ b/notification/email_sender.py @@ -23,7 +23,7 @@ def send_the_email(sender_email, recipients, subject, template, context): context['profile_url'] = reverse('profile') # prepare email - subject = "{p} {s}".format(p=SUBJECT_PREFIX, s=subject) + subject = f"{SUBJECT_PREFIX} {subject}" text_message = render_to_string('%s.txt' % template, context) html_message = render_to_string('%s.html' % template, context) msg = EmailMultiAlternatives( diff --git a/test/factories.py b/test/factories.py index 76beea0e..f6c133d1 100644 --- a/test/factories.py +++ b/test/factories.py @@ -46,9 +46,9 @@ class Meta: first_name = factory.Faker('first_name') last_name = factory.Faker('last_name') - last_name = factory.Faker('email') - full_name = factory.LazyAttribute(lambda x: '{0}.{1}'.format(x.first_name, x.last_name).lower()) - username = factory.LazyAttribute(lambda x: '{0}.{1}@uni.lux'.format(x.first_name, x.last_name).lower()) + email = factory.Faker('email') + full_name = factory.LazyAttribute(lambda x: f'{x.first_name}.{x.last_name}'.lower()) + username = factory.LazyAttribute(lambda x: f'{x.first_name}.{x.last_name}@uni.lux'.lower()) @factory.post_generation def groups(self, create, extracted, **kwargs): diff --git a/web/admin.py b/web/admin.py index 900ffce2..4f3dec80 100644 --- a/web/admin.py +++ b/web/admin.py @@ -45,6 +45,10 @@ class StorageResourceAdmin(admin.ModelAdmin): form = StorageResourceForm +class ProjectAdmin(admin.ModelAdmin): + exclude = ('gene_terms', 'disease_terms', 'phenotype_terms', 'study_terms') + + # DAISY core models admin.site.site_header = 'DAISY administration' admin.site.register(Access) @@ -65,7 +69,7 @@ class StorageResourceAdmin(admin.ModelAdmin): admin.site.register(Partner) admin.site.register(PartnerRole) # contract.py admin.site.register(PersonalDataType) -admin.site.register(Project) +admin.site.register(Project, ProjectAdmin) admin.site.register(Publication) admin.site.register(RestrictionClass) admin.site.register(SensitivityClass) diff --git a/web/static/css/daisy.scss b/web/static/css/daisy.scss index cf1e8ae4..a332144d 100644 --- a/web/static/css/daisy.scss +++ b/web/static/css/daisy.scss @@ -411,4 +411,9 @@ i.reset-form-button { border-radius: 3px 3px 3px 3px; padding: 0.25rem 0.5rem 0.25rem 0.5rem; text-align: center; +} + +/* "Add new" button visual fix - no underline under the button */ +a.no-underline:hover { + text-decoration: none; } \ No newline at end of file diff --git a/web/templates/_includes/card_list.html b/web/templates/_includes/card_list.html index 82ca0fb0..d20c99cf 100644 --- a/web/templates/_includes/card_list.html +++ b/web/templates/_includes/card_list.html @@ -32,7 +32,7 @@

{{ list_title }}

{% if add_new_link %}
- + diff --git a/web/templates/cohorts/cohort.html b/web/templates/cohorts/cohort.html index e5b2693b..0b5a4dcd 100644 --- a/web/templates/cohorts/cohort.html +++ b/web/templates/cohorts/cohort.html @@ -27,8 +27,12 @@

{{ cohort }}

{% endfor %} +
URL
+
{{ cohort.cohort_web_page | default:'-' }}
Ethics confirmation
{{ cohort.ethics_confirmation | yesno }}
+
Ethics confirmation notes
+
{{ cohort.ethics_notes | default:'-' }}
Accession number
{{ cohort.elu_accession | default:'-'}}
Remarks
diff --git a/web/templates/data_declarations/data_declaration.html b/web/templates/data_declarations/data_declaration.html index ccc8fa08..36e3f63b 100644 --- a/web/templates/data_declarations/data_declaration.html +++ b/web/templates/data_declarations/data_declaration.html @@ -41,7 +41,7 @@

{{ object.title }}

{{ object.comments | default:"" }}
{% endif %}
Source Cohort(s)
-
{{ data_declaration.cohorts.all | join:', ' }}
+
{{ object.cohorts.all | join:', ' }}
diff --git a/web/templates/data_declarations/data_declaration_form_edit.html b/web/templates/data_declarations/data_declaration_form_edit.html index 393d784b..2cf1f70d 100644 --- a/web/templates/data_declarations/data_declaration_form_edit.html +++ b/web/templates/data_declarations/data_declaration_form_edit.html @@ -80,10 +80,13 @@

{% block card_title %}Edit Data Declaration{% endblock %} {% include '_includes/field.html' with field=restriction_form.restriction_class %} + {% include '_includes/field.html' with field=restriction_form.use_restriction_rule %} - {% include '_includes/field.html' with field=restriction_form.notes %} - - + + {% include '_includes/field.html' with field=restriction_form.use_class_note %} + {% include '_includes/field.html' with field=restriction_form.notes %} + + {% comment %} {% endcomment %} {% endfor %} diff --git a/web/templates/datasets/dataset.html b/web/templates/datasets/dataset.html index 61d4edd9..6e911664 100644 --- a/web/templates/datasets/dataset.html +++ b/web/templates/datasets/dataset.html @@ -49,10 +49,10 @@

{{ dataset.title }}

{% if request.user.is_superuser %}

{% if dataset.is_published %} - unpublish + undo {% else %} - publish {% endif %}

diff --git a/web/templatetags/daisy_utils.py b/web/templatetags/daisy_utils.py index 0ddd4a72..69a31149 100644 --- a/web/templatetags/daisy_utils.py +++ b/web/templatetags/daisy_utils.py @@ -117,14 +117,7 @@ def render(self, context): icon = 'radio_button_checked' clazz = 'active' - # return html - return """
  • {icon}{facet_name} ({facet_count})
  • """.format( - url=url, - icon=icon, - clazz=clazz, - facet_name=current_facet[0], - facet_count=current_facet[1], - ) + return f'
  • {icon}{current_facet[0]} ({current_facet[1]})
  • ' @register.tag diff --git a/web/urls.py b/web/urls.py index 56f73a3b..af1701e5 100644 --- a/web/urls.py +++ b/web/urls.py @@ -13,11 +13,11 @@ from web.views.dashboard import dashboard from web.views.data_declarations import DatadeclarationDetailView, DatadeclarationEditView from web.views.datasets import DatasetDetailView, DatasetEditView, dataset_list, \ - DatasetCreateView, DatasetDelete + DatasetCreateView, DatasetDelete, publish_dataset, unpublish_dataset from web.views.export import contacts_export, cohorts_export, contracts_export, datasets_export, partners_export, projects_export from web.views.partner import PartnerCreateView, PartnerDelete, partner_search_view, PartnerDetailView, PartnerEditView, publish_partner -from web.views.projects import ProjectCreateView, ProjectEditView, ProjectDetailView, ProjectDelete +from web.views.projects import ProjectCreateView, ProjectEditView, ProjectDetailView, ProjectDelete, publish_project, unpublish_project from web.views.publication import PublicationCreateView, PublicationListView, PublicationEditView, \ add_publication_to_project, remove_publication_from_project, pick_publication_for_project @@ -36,7 +36,7 @@ path('definitions/cohorts/export', cohorts_export, name="cohorts_export"), path('definitions/cohorts/add', CohortCreateView.as_view(), name='cohort_add'), path('definitions/cohorts//edit', CohortEditView.as_view(), name="cohort_edit"), - path('definitions/cohorts///delete', CohortDelete.as_view(), name="cohort_delete"), + path('definitions/cohorts//delete', CohortDelete.as_view(), name="cohort_delete"), path('definitions/cohorts//', CohortDetailView.as_view(), name="cohort"), path('contracts/', contract_list, name="contracts"), @@ -85,6 +85,8 @@ path('dataset//', DatasetDetailView.as_view(), name="dataset"), path('dataset//edit', DatasetEditView.as_view(), name="dataset_edit"), path('dataset//delete', DatasetDelete.as_view(), name="dataset_delete"), + path('dataset//publish', publish_dataset, name="dataset_publish"), + path('dataset//unpublish', unpublish_dataset, name="dataset_unpublish"), @@ -142,6 +144,8 @@ path('project//edit', ProjectEditView.as_view(), name="project_edit"), path('project//delete', ProjectDelete.as_view(), name="project_delete"), path('project//', ProjectDetailView.as_view(), name="project"), + path('project//publish', publish_project, name="project_publish"), + path('project//unpublish', unpublish_project, name="project_unpublish"), path('project//add-personnel', add_personnel_to_project, name="add_personnel_to_project"), path('project//add-dataset', datasets.DatasetCreateView.as_view(), name="datasets_add_to_project"), path('project//del-personnel/', remove_personnel_from_project, diff --git a/web/views/contact.py b/web/views/contact.py index fbdb6a0f..412573f0 100644 --- a/web/views/contact.py +++ b/web/views/contact.py @@ -59,7 +59,7 @@ def add_contact_to_project(request, pk): else: error_messages = [] for field, error in form.errors.items(): - error_message = "{}: {}".format(field, error[0]) + error_message = f"{field}: {error[0]}" error_messages.append(error_message) add_message(request, messages.ERROR, "\n".join(error_messages)) return redirect(to='project', pk=pk) diff --git a/web/views/contracts.py b/web/views/contracts.py index 94c8ceda..b6de069c 100644 --- a/web/views/contracts.py +++ b/web/views/contracts.py @@ -165,7 +165,7 @@ def partner_role_delete(request, pk): # else: # error_messages = [] # for field, error in form.errors.items(): -# error_message = "{}: {}".format(field, error[0]) +# error_message = f"{field}: {error[0]}" # error_messages.append(error_message) # messages.add_message(request, messages.ERROR, "\n".join(error_messages)) # return redirect(to='contract', pk=pk) diff --git a/web/views/data_declarations.py b/web/views/data_declarations.py index 417f43b2..62ae93c0 100644 --- a/web/views/data_declarations.py +++ b/web/views/data_declarations.py @@ -1,24 +1,29 @@ import json +import operator + +from functools import reduce from django.contrib import messages -from django.http import HttpResponse +from django.core.paginator import Paginator +from django.db import IntegrityError, transaction +from django.db.models import Q +from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest from django.shortcuts import render, get_object_or_404, redirect from django.views.decorators.http import require_http_methods -from functools import reduce -from django.urls import reverse_lazy from django.views.generic import DetailView, UpdateView +from django.urls import reverse_lazy + from haystack.query import SearchQuerySet -from django.db import IntegrityError, transaction + from core.constants import Permissions -from core.forms import DataDeclarationForm, DataDeclarationSubFormOther, DataDeclarationSubFormNew, \ - DataDeclarationSubFormFromExisting, DataDeclarationEditForm +from core.forms import DataDeclarationForm, DataDeclarationSubFormOther, DataDeclarationSubFormNew +from core.forms import DataDeclarationSubFormFromExisting, DataDeclarationEditForm from core.forms.data_declaration import RestrictionFormset from core.models import Dataset, Partner, DataDeclaration, UseRestriction -from core.utils import DaisyLogger from core.permissions import permission_required, CheckerMixin, constants -from django.core.paginator import Paginator -from django.http import JsonResponse, HttpResponseBadRequest -from django.db.models import Q +from core.utils import DaisyLogger + + log = DaisyLogger(__name__) @@ -191,12 +196,10 @@ class DatadeclarationEditView(CheckerMixin, UpdateView): def get(self, request, *args, **kwargs): - data_declaration = self.get_object() declaration_form = DataDeclarationEditForm(instance=data_declaration) - restriction_data = [{'restriction_class': l.restriction_class, 'notes': l.notes} - for l in data_declaration.data_use_restrictions.all()] + restriction_data = [restriction.serialize() for restriction in data_declaration.data_use_restrictions.all()] restriction_formset = RestrictionFormset(initial=restriction_data) return render(request, self.template_name, { 'form': declaration_form, @@ -211,7 +214,6 @@ def post(self, request, **kwargs): declaration_form = DataDeclarationEditForm(request.POST, instance=data_declaration) restriction_formset = RestrictionFormset(request.POST) - import operator formset_valid = reduce(operator.and_, [res_form.is_valid() for res_form in restriction_formset], True) if declaration_form.is_valid() and formset_valid: @@ -225,7 +227,7 @@ def post(self, request, **kwargs): restriction = restriction_form.save(commit=False) restriction.data_declaration = data_declaration restriction.save() - messages.add_message(request, messages.SUCCESS, "data declaration {} edited".format(data_declaration.title)) + messages.add_message(request, messages.SUCCESS, f"data declaration {data_declaration.title} edited") except IntegrityError: #If the transaction failed messages.add_message(request, messages.ERROR, "An error occurred when saving data declaration") diff --git a/web/views/datasets.py b/web/views/datasets.py index 9d56a122..326a0ce2 100644 --- a/web/views/datasets.py +++ b/web/views/datasets.py @@ -1,5 +1,6 @@ from django.conf import settings from django.contrib import messages +from django.contrib.admin.views.decorators import staff_member_required from django.shortcuts import render, redirect, get_object_or_404 from django.urls import reverse_lazy from django.views.generic import CreateView, DetailView, UpdateView, DeleteView @@ -130,3 +131,17 @@ def get_context_data(self, **kwargs): +@staff_member_required +def publish_dataset(request, pk): + dataset = get_object_or_404(Dataset, pk=pk) + dataset.is_published = False + dataset.save() + return redirect(reverse_lazy('dataset', kwargs={'pk': dataset.id})) + + +@staff_member_required +def unpublish_dataset(request, pk): + dataset = get_object_or_404(Dataset, pk=pk) + dataset.is_published = True + dataset.save() + return redirect(reverse_lazy('dataset', kwargs={'pk': dataset.id})) \ No newline at end of file diff --git a/web/views/documents.py b/web/views/documents.py index 963f3dac..d28ecfc7 100644 --- a/web/views/documents.py +++ b/web/views/documents.py @@ -19,10 +19,10 @@ def rfc5987_content_disposition(file_name): ascii_name = unicodedata.normalize('NFKD', file_name).encode('ascii', 'ignore').decode() - header = 'attachment; filename="{}"'.format(ascii_name) + header = f'attachment; filename="{ascii_name}"' if ascii_name != file_name: quoted_name = urlquote(file_name) - header += '; filename*=UTF-8\'\'{}'.format(quoted_name) + header += f'; filename*=UTF-8\'\'{quoted_name}' return header diff --git a/web/views/projects.py b/web/views/projects.py index a119dbe6..ab458935 100644 --- a/web/views/projects.py +++ b/web/views/projects.py @@ -1,5 +1,6 @@ from django.conf import settings from django.contrib import messages +from django.contrib.admin.views.decorators import staff_member_required from django.contrib.contenttypes.models import ContentType from django.db import transaction, IntegrityError from django.http import HttpResponse @@ -265,3 +266,18 @@ def get_context_data(self, **kwargs): context['id'] = self.object.id return context + +@staff_member_required +def publish_project(request, pk): + project = get_object_or_404(Project, pk=pk) + project.is_published = False + project.save() + return redirect(reverse_lazy('project', kwargs={'pk': project.id})) + + +@staff_member_required +def unpublish_project(request, pk): + project = get_object_or_404(Project, pk=pk) + project.is_published = True + project.save() + return redirect(reverse_lazy('project', kwargs={'pk': project.id})) \ No newline at end of file diff --git a/web/views/publication.py b/web/views/publication.py index 3413940b..1221057c 100644 --- a/web/views/publication.py +++ b/web/views/publication.py @@ -91,7 +91,7 @@ def add_publication_to_project(request, pk): else: error_messages = [] for field, error in form.errors.items(): - error_message = "{}: {}".format(field, error[0]) + error_message = f"{field}: {error[0]}" error_messages.append(error_message) add_message(request, messages.ERROR, "\n".join(error_messages)) return redirect(to='project', pk=pk) diff --git a/web/views/users.py b/web/views/users.py index 6b68dcae..a66cad96 100644 --- a/web/views/users.py +++ b/web/views/users.py @@ -24,7 +24,7 @@ def add_personnel_to_project(request, pk): else: error_messages = [] for field, error in form.errors.items(): - error_message = "{}: {}".format(field, error[0]) + error_message = f"{field}: {error[0]}" error_messages.append(error_message) add_message(request, messages.ERROR, "\n".join(error_messages)) return redirect(to='project', pk=pk)