Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve linting with Flake8 #494

Open
wants to merge 5 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,12 @@ jobs:
- name: Check code formatting with Black
run: docker-compose exec -T web black --check .

- name: Lint code with Flake8
run: docker-compose exec -T web flake8

- name: Execute the tests
run: docker-compose exec -T web python setup.py pytest

- name: Stop containers
if: always()
run: docker-compose down
run: docker-compose down
4 changes: 4 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ repos:
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.0.0
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v8.15.0
hooks:
Expand Down
34 changes: 22 additions & 12 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@
import os

import pytest
from django.apps import apps
from django.conf import settings
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Group
from django.db.backends.postgresql.features import DatabaseFeatures
from guardian.shortcuts import assign_perm

from core.constants import Groups as GroupConstants
from core.management.commands.load_initial_data import Command as CommandLoadInitialData
from core.permissions import GROUP_PERMISSIONS


FIXTURE_DIR = os.path.join(settings.BASE_DIR, "core", "fixtures")

## FAKE LDAP DIRECTORY

# FAKE LDAP DIRECTORY
LCSB = ("OU=LCSB,OU=Faculties,OU=UNI-Users,DC=uni,DC=lux", {"ou": ["LCSB"]})
disabled_lcsb = (
"OU=LCSB,OU=Faculties,OU=UNI-DisabledUsers,DC=uni,DC=lux",
Expand All @@ -33,7 +30,10 @@ def make_fake_ldap_user(
"""
Create a fake LDAP user
"""
enc = lambda x: x.encode("utf-8")

def enc(x):
return x.encode("utf-8")

dn = "LCSB,OU=Faculties,OU=UNI-Users,DC=uni,DC=lux"
if is_external:
dn = "Administration,OU=FSTC,OU=Faculties,OU=UNI-Users,DC=uni,DC=lux"
Expand Down Expand Up @@ -120,22 +120,28 @@ def celery_session_worker(celery_session_worker):
# thoses users must correspond to those created in the LDAP tree
@pytest.fixture
def user_normal(django_user_model):
u = django_user_model.objects.create(username="normal.user", password="password")
u = django_user_model.objects.create_user(
username="normal.user", password="password", email="normal.user@email.com"
)
u.save()
return u


@pytest.fixture
def user_vip(django_user_model):
u = django_user_model.objects.create(username="pi.number1", password="password")
u = django_user_model.objects.create_user(
username="pi.number1", password="password", email="Pi@email.com"
)
g, _ = Group.objects.get_or_create(name=GroupConstants.VIP.value)
u.groups.add(g)
return u


@pytest.fixture
def user_data_steward(django_user_model):
u = django_user_model.objects.create(username="data.steward", password="password")
u = django_user_model.objects.create_user(
username="data.steward", password="password", email="data.steward@email.com"
)
g, _ = Group.objects.get_or_create(name=GroupConstants.DATA_STEWARD.value)
u.groups.add(g)
return u
Expand Down Expand Up @@ -176,12 +182,16 @@ def users(django_user_model, user_normal, user_vip, user_data_steward):
"""
password = "password"

u = django_user_model.objects.create(username="pi.number2", password=password)
u = django_user_model.objects.create_user(
username="pi.number2", password=password, email="Pi2@email.com"
)
g, _ = Group.objects.get_or_create(name=GroupConstants.VIP.value)
u.groups.add(g)
u.save()

u = django_user_model.objects.create(username="external.user", password=password)
u = django_user_model.objects.create_user(
username="external.user", password=password, email="external.user@email.com"
)
u.save()


Expand Down
2 changes: 1 addition & 1 deletion core/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class CoreConfig(AppConfig):
name = "core"

def ready(self):
import core.models.signals
import core.models.signals # noqa: F401

# Prevent use of conflicting versions of import JSON schemas
json_schema_uri = getattr(settings, "IMPORT_JSON_SCHEMAS_URI")
Expand Down
5 changes: 4 additions & 1 deletion core/forms/access.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@ class Meta:
"access_notes": Textarea(attrs={"rows": 2, "cols": 40}),
}
heading = "Record Access"
heading_help = "Specify who can access the data and for how long. You can define access of each person or describe group of users with access in remarks below."
heading_help = (
"Specify who can access the data and for how long. You can define access of each person or "
"describe group of users with access in remarks below."
)

def __init__(self, *args, **kwargs):
dataset = kwargs.pop("dataset", None)
Expand Down
6 changes: 5 additions & 1 deletion core/forms/data_declaration.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,11 @@ class Meta:
model = DataDeclaration
fields = ["title"]
heading = "Add new Data Declaration"
heading_help = "Dataset can have one or many subsets, called data declarations. These can capture data from a particular partner, cohort or data of a particular type. Define first declaration below. More declarations can be added later."
heading_help = (
"Dataset can have one or many subsets, called data declarations. These can capture data from "
"a particular partner, cohort or data of a particular type. Define first declaration below. "
"More declarations can be added later."
)

def __init__(self, *args, **kwargs):
self.dataset = kwargs.pop("dataset")
Expand Down
5 changes: 4 additions & 1 deletion core/forms/legal_basis.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@ class Meta:
fields = "__all__"
exclude = []
heading = "Add Legal Basis"
heading_help = "Capture the legal grounds for processing of this dataset under GDPR (personal data only). This can require support from your data stewards and data protection officer (DPO)."
heading_help = (
"Capture the legal grounds for processing of this dataset under GDPR (personal data only). "
"This can require support from your data stewards and data protection officer (DPO)."
)

def __init__(self, *args, **kwargs):
dataset = kwargs.pop("dataset", None)
Expand Down
2 changes: 1 addition & 1 deletion core/forms/partner.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from django.forms import ModelForm
from django import forms

from core.models import Partner


Expand Down
4 changes: 1 addition & 3 deletions core/forms/permission.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from django import forms

from guardian.shortcuts import get_objects_for_user, assign_perm, remove_perm

from core.models import Dataset, User
from core.models import User
from core import constants


Expand Down
5 changes: 4 additions & 1 deletion core/forms/storage_location.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@ class Meta:
fields = "__all__"
exclude = []
heading = "Add Storage Location"
heading_help = "Storage details ensure easy data retrieval and management. Record main location of the dataset. More storage locations can be added later."
heading_help = (
"Storage details ensure easy data retrieval and management. Record main location of the "
"dataset. More storage locations can be added later."
)

def __init__(self, *args, **kwargs):
dataset = kwargs.pop("dataset", None)
Expand Down
2 changes: 0 additions & 2 deletions core/forms/use_restriction.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from django.core.exceptions import ValidationError
from django.forms import CharField, ModelForm, Select

from core.models import UseRestriction, RestrictionClass
Expand Down Expand Up @@ -46,7 +45,6 @@ def __init__(self, *args, **kwargs):
def clean(self):
cleaned_data = super().clean()
restriction_class = cleaned_data.get("restriction_class")
notes = cleaned_data.get("notes")
if not restriction_class:
self.add_error(
"restriction_class", "Please select a valid restriction class"
Expand Down
5 changes: 2 additions & 3 deletions core/importer/JSONSchemaValidator.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import json
import jsonschema
import os
import sys
import urllib.request
import urllib

from django.conf import settings

Expand Down Expand Up @@ -74,7 +73,7 @@ def _load_schema(self, schema_name):

try:
return self._load_schema_from_url(schema_name)
except:
except (urllib.error.URLError, json.JSONDecodeError) as e:
logger.error(
"Error (2/2) loading schema from URI for JSON validation...: " + str(e)
)
Expand Down
26 changes: 18 additions & 8 deletions core/importer/base_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,11 @@ def can_process_json(self, json_string: str) -> bool:
try:
object = json.loads(json_string)
return self.can_process_object(object)
except:
message = f'Couldn\'t check if the imported object has same "$schema" as the importer ({self.__class__.__name__}: {self.json_schema_uri}) - something went wrong while parsing the file'
except json.JSONDecodeError:
message = (
f'Couldn\'t check if the imported object has same "$schema" as the importer '
f"({self.__class__.__name__}: {self.json_schema_uri}) - something went wrong while parsing the file"
)
self.logger.warning(message)
return False

Expand All @@ -82,11 +85,17 @@ def can_process_object(self, json_object: Dict) -> bool:
self.logger.debug('The imported object has no "$schema" attribute')
return False
if self.json_schema_uri == json_object.get("$schema"):
message = f'The imported object has the same "$schema" ({self.json_schema_uri}) as the importer ({self.__class__.__name__})'
message = (
f'The imported object has the same "$schema" ({self.json_schema_uri}) as the importer '
f"({self.__class__.__name__})"
)
self.logger.debug(message)
return True
schema_name = json_object.get("$schema")
message = f'The imported object has different "$schema" ({schema_name}) than the importer ({self.__class__.__name__}: {self.json_schema_uri})'
message = (
f'The imported object has different "$schema" ({schema_name}) than the importer '
f"({self.__class__.__name__}: {self.json_schema_uri})"
)
self.logger.debug(message)
return False

Expand Down Expand Up @@ -128,7 +137,7 @@ def import_object_list(self, json_list: List[Dict]) -> bool:
self.json_schema_validator.validate_items(json_list, self.logger)
self.logger.debug("...JSON schema is OK!")
else:
self.logger.debug(f"Proceeding without using the validation")
self.logger.debug("Proceeding without using the validation")
count = len(json_list)
verb = "are" if count > 1 else "is"
self.logger.debug(
Expand Down Expand Up @@ -170,7 +179,7 @@ def publish_object(self, object) -> bool:
try:
object.publish(save=True)
result = True
except AttributeError as e:
except AttributeError:
self.logger.warning(
f"Publishing this type of entity ({object._meta.object_name}) is not implemented - item is not published."
)
Expand Down Expand Up @@ -245,7 +254,7 @@ def is_local_contact(contact_dict):

def validate_contact_type(self, contact_type):
try:
contact_type_obj = ContactType.objects.get(name=contact_type)
ContactType.objects.get(name=contact_type)
except ContactType.DoesNotExist:
self.logger.warning(
f'Unknown contact type: {contact_type}. Setting to "Other".'
Expand Down Expand Up @@ -328,7 +337,8 @@ def process_external_contact(
contact.partners.add(partner[0])
else:
self.logger.warning(
f"Cannot link contact '{first_name} {last_name}' to partner. No partner found for the affiliation: {affiliation}"
f"Cannot link contact '{first_name} {last_name}' to partner. No partner found for the "
f"affiliation: {affiliation}"
)
contact.save()
return contact
3 changes: 1 addition & 2 deletions core/importer/datasets_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
DataDeclaration,
Dataset,
DataType,
Partner,
Project,
StorageResource,
Share,
Expand Down Expand Up @@ -583,7 +582,7 @@ def _process_study(study):
self.logger.info(
f"Cohort '{safe_name}' linked successfully to data declaration '{safe_title}'"
)
except:
except KeyError:
self.logger.warning(
f"The data declaration for the study '{safe_name}' not found. "
)
Expand Down
13 changes: 9 additions & 4 deletions core/importer/elx_submission_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@

from core.exceptions import DatasetImportError
from core.importer.base_importer import BaseImporter
from core.importer.projects_importer import ProjectsImporter
from core.models import Contact, Dataset, Project, ContactType
from core.models import Dataset, Project
from core.utils import DaisyLogger


Expand Down Expand Up @@ -34,7 +33,7 @@ def import_json(self, json_string, stop_on_error=False, verbose=False):
acronym=self.elixir_project_name
).first()

dataset = self.process_submission_as_dataset(submission_dict, project)
self.process_submission_as_dataset(submission_dict, project)
# contract = self.process_submission_as_contract(submission_dict, project)

# for study_dict in submission_dict.get('studies', []):
Expand Down Expand Up @@ -169,7 +168,13 @@ def process_submission_as_dataset(self, submission_dict, project):
"Elixir" if submission_dict["scope"] == "e" else "LCSB Collaboration"
)
local_project_str = submission_dict.get("local_project", "")
dataset.comments = f"ELU Accession: {elu_accession}\nTitle: {title}\nCreated On: {created_on_str}\nScope: {scope_str}\nSubmitted to Project: {local_project_str}"
dataset.comments = (
f"ELU Accession: {elu_accession}\n"
f"Title: {title}\n"
f"Created On: {created_on_str}\n"
f"Scope: {scope_str}\n"
f"Submitted to Project: {local_project_str}"
)

local_custodians, local_personnel, external_contacts = self.process_contacts(
submission_dict
Expand Down
2 changes: 1 addition & 1 deletion core/importer/partners_importer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from core.importer.base_importer import BaseImporter
from core.models.partner import Partner, SECTOR_CATEGORY
from core.models.partner import Partner
from core.importer.JSONSchemaValidator import InstitutionJSONSchemaValidator
from core.exceptions import PartnerImportError

Expand Down
9 changes: 2 additions & 7 deletions core/importer/projects_importer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from core.importer.base_importer import BaseImporter
from core.importer.JSONSchemaValidator import ProjectJSONSchemaValidator
from core.models import Partner, Project, Publication
from core.models import Project, Publication
from core.exceptions import ProjectImportError


Expand Down Expand Up @@ -178,12 +178,7 @@ def _process_date_attribute(self, project_obj, project_dict, attribute_name):
)
except self.DateImportException:
date_str = project_dict.get(attribute_name)
message = (
f"\tCouldn"
't import the "{attribute_name}". Does it follow the '
"%Y-%m-%d"
" format?\n\t"
)
message = f'\tCouldn\'t import the "{attribute_name}". Does it follow the %Y-%m-%d format?\n\t'
message = message + f'Was: "{date_str}". '
message = message + "Continuing with empty value."
self.logger.warning(message)
Loading
Loading