diff --git a/.env.example b/.env.example
index f3162eb2a..67b196a2f 100644
--- a/.env.example
+++ b/.env.example
@@ -1,3 +1,4 @@
+CORS_ALLOWED_ORIGINS=http://localhost:8001,http://localhost:8002
DJANGO_SETTINGS_MODULE=euphrosyne.settings
DB_USER=
DB_NAME=euphrosyne
@@ -11,6 +12,9 @@ EMAIL_PORT=1025
EMAIL_HOST_USER=
EMAIL_HOST_PASSWORD=
EMAIL_USE_TLS=false
+ELASTICSEARCH_HOST=http://localhost:9200
+ELASTICSEARCH_USERNAME=
+ELASTICSEARCH_PASSWORD=
EROS_HTTP_TOKEN=
EUPHROSYNE_TOOLS_API_URL=http://localhost:8001
DEFAULT_FROM_EMAIL=alexandre.hajjar@beta.gouv.fr
diff --git a/README.md b/README.md
index 6de6c45b9..6a1a55c02 100644
--- a/README.md
+++ b/README.md
@@ -28,10 +28,14 @@ Le contenu du fichier peut-être copié dans un nouveau fichier `.env` pour para
| Nom de la variable | Description |
| ------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| CORS_ALLOWED_ORIGINS | Valeur du header Access-Control-Allow-Origin pour les réponses sur les endpoints de l'api REST |
| DJANGO_SETTINGS_MODULE | Chemin Python vers le module settings Django. |
| DB\_\* | Variables relatives à la configuration de la base de données. |
| DJANGO_SECRET_KEY | [Clé secrète](https://docs.djangoproject.com/en/4.1/ref/settings/#std-setting-SECRET_KEY) utilisée par Django pour la signature cryptographique. Est également utilisé pour signer les tokens JWT (Euphrosyne Tools API). |
| DJANGO_DEBUG | Optionnel. Mode debug de Django. |
+| ELASTICSEARCH_HOST | Hôte de l'instance Elasticsearch (catalogue de données) |
+| ELASTICSEARCH_USERNAME | Credentials pour l'instance Elasticsearch (catalogue de données) |
+| ELASTICSEARCH_PASSWORD | Credentials pour l'instance Elasticsearch (catalogue de données) |
| EMAIL_HOST | Configuration du service d'e-mail. |
| EMAIL_PORT | " |
| EMAIL_HOST_USER | " |
diff --git a/cron.json b/cron.json
index 1bac487e8..8c7a02566 100644
--- a/cron.json
+++ b/cron.json
@@ -8,6 +8,10 @@
"command": "0 */6 * * * python manage.py check_project_data_availability",
"size": "S"
},
+ {
+ "command": "5 */6 * * * python manage.py index_elasticsearch_catalog",
+ "size": "S"
+ },
{
"command": "0 0 * * * python manage.py run_checks",
"size": "S"
diff --git a/data_request/__init__.py b/data_request/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/data_request/admin.py b/data_request/admin.py
new file mode 100644
index 000000000..e8c9d3950
--- /dev/null
+++ b/data_request/admin.py
@@ -0,0 +1,182 @@
+from django.contrib import admin, messages
+from django.db.models import Model, QuerySet
+from django.http import HttpRequest
+from django.http.response import HttpResponse
+from django.utils import timezone
+from django.utils.safestring import mark_safe
+from django.utils.translation import gettext
+from django.utils.translation import gettext_lazy as _
+
+from euphro_tools.exceptions import EuphroToolsException
+from lab.admin.mixins import LabAdminAllowedMixin
+from lab.permissions import is_lab_admin
+
+from .data_links import send_links
+from .models import DataAccessEvent, DataRequest
+
+
+class ReadonlyInlineMixin:
+
+ def has_change_permission(
+ self,
+ request: HttpRequest,
+ obj: Model | None = None, # pylint: disable=unused-argument
+ ) -> bool:
+ return False
+
+ def has_delete_permission(
+ self,
+ request: HttpRequest,
+ obj: Model | None = None, # pylint: disable=unused-argument
+ ) -> bool:
+ return is_lab_admin(request.user)
+
+ def has_add_permission(
+ self,
+ request: HttpRequest,
+ obj: Model | None = None, # pylint: disable=unused-argument
+ ) -> bool:
+ return False
+
+ def has_view_permission(
+ self,
+ request: HttpRequest,
+ obj: Model | None = None, # pylint: disable=unused-argument
+ ) -> bool:
+ return is_lab_admin(request.user)
+
+
+@admin.action(description=_("Accept request(s) (send download links)"))
+def action_send_links(
+ # pylint: disable=unused-argument
+ modeladmin: "DataRequestAdmin",
+ request: HttpRequest,
+ queryset: QuerySet[DataRequest],
+):
+ for data_request in queryset:
+ try:
+ send_links(data_request)
+ except EuphroToolsException as error:
+ modeladmin.message_user(
+ request,
+ _("Error sending links to %(email)s for %(data_request)s: %(error)s")
+ % {
+ "data_request": data_request,
+ "error": error,
+ "email": data_request.user_email,
+ },
+ level=messages.ERROR,
+ )
+ continue
+ data_request.sent_at = timezone.now()
+ if not data_request.request_viewed:
+ data_request.request_viewed = True
+ data_request.save()
+
+
+class BeenSeenListFilter(admin.SimpleListFilter):
+ # Human-readable title which will be displayed in the
+ # right admin sidebar just above the filter options.
+ title = _("has been sent")
+
+ # Parameter for the filter that will be used in the URL query.
+ parameter_name = "been_sent"
+
+ def lookups(self, request, model_admin):
+ return [
+ ("1", _("Yes")),
+ ("0", _("No")),
+ ]
+
+ def queryset(self, request: HttpRequest, queryset: QuerySet[DataRequest]):
+ if not self.value():
+ return queryset
+ return queryset.filter(sent_at__isnull=self.value() == "0")
+
+
+class DataAccessEventInline(ReadonlyInlineMixin, admin.TabularInline):
+ model = DataAccessEvent
+ extra = 0
+
+ fields = ("path", "access_time")
+ readonly_fields = ("path", "access_time")
+
+
+class RunInline(ReadonlyInlineMixin, admin.TabularInline):
+ model = DataRequest.runs.through
+ verbose_name = "Run"
+ verbose_name_plural = "Runs"
+ extra = 0
+
+ fields = ("run",)
+
+
+@admin.register(DataRequest)
+class DataRequestAdmin(LabAdminAllowedMixin, admin.ModelAdmin):
+ actions = [action_send_links]
+ list_filter = [BeenSeenListFilter]
+
+ list_display = (
+ "created",
+ "sent_at",
+ "user_email",
+ "user_first_name",
+ "user_last_name",
+ "display_viewed",
+ )
+
+ fields = (
+ "created",
+ "sent_at",
+ "user_email",
+ "user_first_name",
+ "user_last_name",
+ "user_institution",
+ "description",
+ )
+ readonly_fields = (
+ "created",
+ "user_email",
+ "user_first_name",
+ "user_last_name",
+ "user_institution",
+ "description",
+ )
+
+ inlines = [RunInline, DataAccessEventInline]
+
+ def has_change_permission(self, request: HttpRequest, obj: Model | None = None):
+ return False
+
+ def change_view(
+ self,
+ request: HttpRequest,
+ object_id: str,
+ form_url: str = "",
+ extra_context: dict[str, bool] | None = None,
+ ) -> HttpResponse:
+ obj = self.get_object(request, object_id)
+ if obj and not obj.request_viewed:
+ obj.request_viewed = True
+ obj.save()
+ response = super().change_view(request, object_id, form_url, extra_context)
+ return response
+
+ def changelist_view(
+ self, request: HttpRequest, extra_context: dict[str, str] | None = None
+ ):
+ extra_context = extra_context or {}
+ extra_context["title"] = gettext("Data requests")
+ return super().changelist_view(request, extra_context)
+
+ @admin.display(description=_("Is sent"), boolean=True)
+ def is_sent(self, obj: "DataRequest") -> bool:
+ return obj.sent_at is not None
+
+ @admin.display(description="")
+ def display_viewed(self, obj: "DataRequest") -> str:
+ if obj.request_viewed:
+ return ""
+ return mark_safe(
+ f'
{_("New")}
'
+ )
diff --git a/data_request/api_urls.py b/data_request/api_urls.py
new file mode 100644
index 000000000..0e165138f
--- /dev/null
+++ b/data_request/api_urls.py
@@ -0,0 +1,16 @@
+from django.urls import path
+
+from . import api_views
+
+urlpatterns = (
+ path(
+ "",
+ api_views.DataRequestCreateAPIView.as_view(),
+ name="create",
+ ),
+ path(
+ "access-event",
+ api_views.DataAccessEventCreateAPIView.as_view(),
+ name="create-access-event",
+ ),
+)
diff --git a/data_request/api_views.py b/data_request/api_views.py
new file mode 100644
index 000000000..d28413c17
--- /dev/null
+++ b/data_request/api_views.py
@@ -0,0 +1,52 @@
+from rest_framework import generics, serializers
+
+from data_request.emails import send_data_request_created_email
+from euphro_auth.jwt.authentication import EuphrosyneAdminJWTAuthentication
+from lab.runs.models import Run
+
+from .models import DataAccessEvent, DataRequest
+
+
+class DataRequestSerializer(serializers.ModelSerializer):
+ runs = serializers.PrimaryKeyRelatedField(
+ many=True,
+ queryset=Run.objects.only_not_embargoed(),
+ allow_empty=False,
+ )
+
+ class Meta:
+ model = DataRequest
+ fields = [
+ "user_email",
+ "user_first_name",
+ "user_last_name",
+ "user_institution",
+ "description",
+ "runs",
+ ]
+
+
+class DataRequestCreateAPIView(generics.CreateAPIView):
+ queryset = DataRequest.objects.all()
+ serializer_class = DataRequestSerializer
+
+ def perform_create(self, serializer: DataRequestSerializer):
+ super().perform_create(serializer)
+ send_data_request_created_email(serializer.instance.user_email)
+
+
+class DataAccessEventSerializer(serializers.ModelSerializer):
+ data_request = serializers.PrimaryKeyRelatedField(
+ queryset=DataRequest.objects.all(),
+ allow_empty=False,
+ )
+
+ class Meta:
+ model = DataAccessEvent
+ fields = ["data_request", "path"]
+
+
+class DataAccessEventCreateAPIView(generics.CreateAPIView):
+ queryset = DataAccessEvent.objects.all()
+ serializer_class = DataAccessEventSerializer
+ authentication_classes = [EuphrosyneAdminJWTAuthentication]
diff --git a/data_request/apps.py b/data_request/apps.py
new file mode 100644
index 000000000..0e60c0b7d
--- /dev/null
+++ b/data_request/apps.py
@@ -0,0 +1,6 @@
+from django.apps import AppConfig
+
+
+class DataRequestConfig(AppConfig):
+ default_auto_field = "django.db.models.BigAutoField"
+ name = "data_request"
diff --git a/data_request/data_links.py b/data_request/data_links.py
new file mode 100644
index 000000000..9f0e7874d
--- /dev/null
+++ b/data_request/data_links.py
@@ -0,0 +1,44 @@
+import datetime
+import typing
+
+from euphro_tools.download_urls import (
+ DataType,
+ fetch_token_for_run_data,
+ generate_download_url,
+)
+
+from .emails import LinkDict, send_data_email
+from .models import DataRequest
+
+NUM_DAYS_VALID = 7
+
+
+def send_links(data_request: DataRequest):
+ links: list[LinkDict] = []
+ expiration = datetime.datetime.now() + datetime.timedelta(days=NUM_DAYS_VALID)
+ for run in data_request.runs.all():
+ for data_type in typing.get_args(DataType):
+ project_name = run.project.name
+ token = fetch_token_for_run_data(
+ run.project.slug,
+ run.label,
+ data_type,
+ expiration=expiration,
+ data_request_id=str(data_request.id),
+ )
+ links.append(
+ {
+ "name": f"{run.label} ({project_name})",
+ "url": generate_download_url(
+ data_type=data_type,
+ project_slug=run.project.slug,
+ run_label=run.label,
+ token=token,
+ ),
+ "data_type": data_type,
+ }
+ )
+ send_data_email(
+ context={"links": links, "expiration_date": expiration},
+ email=data_request.user_email,
+ )
diff --git a/data_request/emails.py b/data_request/emails.py
new file mode 100644
index 000000000..741bd974b
--- /dev/null
+++ b/data_request/emails.py
@@ -0,0 +1,65 @@
+import datetime
+import logging
+import smtplib
+import typing
+
+from django.core import mail
+from django.template.loader import render_to_string
+from django.utils.html import strip_tags
+from django.utils.translation import gettext as _
+
+logger = logging.getLogger(__name__)
+
+
+class LinkDict(typing.TypedDict):
+ name: str
+ url: str
+ data_type: typing.Literal["raw_data", "processed_data"]
+
+
+class DataEmailContext(typing.TypedDict):
+ links: list[LinkDict]
+ expiration_date: datetime.datetime
+
+
+def send_data_request_created_email(
+ email: str,
+):
+ subject = _("[New AGLAE] Data request received")
+ template_path = "data_request/email/data-request-created.html"
+ _send_mail(subject, email, template_path)
+
+
+def send_data_email(
+ email: str,
+ context: DataEmailContext,
+):
+ subject = _("Your New AGLAE data links")
+ template_path = "data_request/email/data-links.html"
+ _send_mail(subject, email, template_path, context)
+
+
+def _send_mail(
+ subject: str,
+ email: str,
+ template_path: str,
+ context: typing.Mapping[str, typing.Any] | None = None,
+):
+ html_message = render_to_string(template_path, context=context)
+ plain_message = strip_tags(html_message)
+
+ try:
+ mail.send_mail(
+ subject,
+ plain_message,
+ from_email=None,
+ recipient_list=[email],
+ html_message=html_message,
+ )
+ except (smtplib.SMTPException, ConnectionError) as e:
+ logger.error(
+ "Error sending data request email to %s. Reason: %s",
+ email,
+ str(e),
+ )
+ raise e
diff --git a/data_request/migrations/0001_initial.py b/data_request/migrations/0001_initial.py
new file mode 100644
index 000000000..38c142217
--- /dev/null
+++ b/data_request/migrations/0001_initial.py
@@ -0,0 +1,59 @@
+# Generated by Django 5.0.6 on 2024-07-04 08:50
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ("lab", "0039_era_remove_period_period_unique_theso_joconde_id_and_more"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="DataRequest",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "request_viewed",
+ models.BooleanField(
+ default=False,
+ help_text="Has been viewed by an admin",
+ verbose_name="Viewed",
+ ),
+ ),
+ (
+ "sent_at",
+ models.DateTimeField(blank=True, null=True, verbose_name="Sent at"),
+ ),
+ (
+ "user_email",
+ models.EmailField(max_length=254, verbose_name="User emaio"),
+ ),
+ ("user_first_name", models.CharField(max_length=150)),
+ ("user_last_name", models.CharField(max_length=150)),
+ ("user_institution", models.CharField(blank=True, max_length=255)),
+ ("description", models.TextField(blank=True)),
+ ("created", models.DateTimeField(auto_now_add=True)),
+ ("modified", models.DateTimeField(auto_now=True)),
+ (
+ "runs",
+ models.ManyToManyField(related_name="data_requests", to="lab.run"),
+ ),
+ ],
+ options={
+ "verbose_name": "Data Request",
+ "verbose_name_plural": "Data Requests",
+ },
+ ),
+ ]
diff --git a/data_request/migrations/0002_alter_datarequest_options_alter_datarequest_created_and_more.py b/data_request/migrations/0002_alter_datarequest_options_alter_datarequest_created_and_more.py
new file mode 100644
index 000000000..d99abd028
--- /dev/null
+++ b/data_request/migrations/0002_alter_datarequest_options_alter_datarequest_created_and_more.py
@@ -0,0 +1,57 @@
+# Generated by Django 5.0.6 on 2024-07-15 13:55
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("data_request", "0001_initial"),
+ ]
+
+ operations = [
+ migrations.AlterModelOptions(
+ name="datarequest",
+ options={
+ "verbose_name": "data request",
+ "verbose_name_plural": "data requests",
+ },
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="created",
+ field=models.DateTimeField(auto_now_add=True, verbose_name="Created"),
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="description",
+ field=models.TextField(blank=True, verbose_name="Description"),
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="modified",
+ field=models.DateTimeField(auto_now=True, verbose_name="Modified"),
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="user_email",
+ field=models.EmailField(max_length=254, verbose_name="User email"),
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="user_first_name",
+ field=models.CharField(max_length=150, verbose_name="First name"),
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="user_institution",
+ field=models.CharField(
+ blank=True, max_length=255, verbose_name="Institution"
+ ),
+ ),
+ migrations.AlterField(
+ model_name="datarequest",
+ name="user_last_name",
+ field=models.CharField(max_length=150, verbose_name="Last name"),
+ ),
+ ]
diff --git a/data_request/migrations/0003_dataaccessevent.py b/data_request/migrations/0003_dataaccessevent.py
new file mode 100644
index 000000000..3aa00f944
--- /dev/null
+++ b/data_request/migrations/0003_dataaccessevent.py
@@ -0,0 +1,52 @@
+# Generated by Django 5.0.7 on 2024-07-19 13:53
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ (
+ "data_request",
+ "0002_alter_datarequest_options_alter_datarequest_created_and_more",
+ ),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="DataAccessEvent",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("path", models.CharField(max_length=255, verbose_name="Path")),
+ (
+ "access_time",
+ models.DateTimeField(auto_now_add=True, verbose_name="Access time"),
+ ),
+ (
+ "modified",
+ models.DateTimeField(auto_now=True, verbose_name="Modified"),
+ ),
+ (
+ "data_request",
+ models.ForeignKey(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="data_access_events",
+ to="data_request.datarequest",
+ ),
+ ),
+ ],
+ options={
+ "verbose_name": "data access event",
+ "verbose_name_plural": "data access events",
+ },
+ ),
+ ]
diff --git a/data_request/migrations/__init__.py b/data_request/migrations/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/data_request/models.py b/data_request/models.py
new file mode 100644
index 000000000..087eeb5fb
--- /dev/null
+++ b/data_request/models.py
@@ -0,0 +1,43 @@
+from django.db import models
+from django.utils.translation import gettext_lazy as _
+
+
+class DataRequest(models.Model):
+
+ class Meta:
+ verbose_name = _("data request")
+ verbose_name_plural = _("data requests")
+
+ runs = models.ManyToManyField("lab.Run", related_name="data_requests")
+
+ request_viewed = models.BooleanField(
+ _("Viewed"), help_text=_("Has been viewed by an admin"), default=False
+ )
+
+ sent_at = models.DateTimeField(_("Sent at"), null=True, blank=True)
+
+ user_email = models.EmailField(_("User email"))
+ user_first_name = models.CharField(_("First name"), max_length=150)
+ user_last_name = models.CharField(_("Last name"), max_length=150)
+ user_institution = models.CharField(_("Institution"), max_length=255, blank=True)
+
+ description = models.TextField(_("Description"), blank=True)
+
+ created = models.DateTimeField(_("Created"), auto_now_add=True)
+ modified = models.DateTimeField(_("Modified"), auto_now=True)
+
+
+class DataAccessEvent(models.Model):
+
+ class Meta:
+ verbose_name = _("data access event")
+ verbose_name_plural = _("data access events")
+
+ data_request = models.ForeignKey(
+ DataRequest, on_delete=models.CASCADE, related_name="data_access_events"
+ )
+
+ path = models.CharField(_("Path"), max_length=255)
+
+ access_time = models.DateTimeField(_("Access time"), auto_now_add=True)
+ modified = models.DateTimeField(_("Modified"), auto_now=True)
diff --git a/data_request/nav.py b/data_request/nav.py
new file mode 100644
index 000000000..4aaf58c73
--- /dev/null
+++ b/data_request/nav.py
@@ -0,0 +1,24 @@
+from django.http import HttpRequest
+from django.urls import reverse
+from django.utils.translation import gettext as _
+
+from lab.nav import NavItemJson
+from lab.permissions import is_lab_admin
+
+from .models import DataRequest
+
+
+def get_nav_items(request: HttpRequest) -> list[NavItemJson]:
+ items: list[NavItemJson] = []
+ if is_lab_admin(request.user):
+ items.append(
+ {
+ "title": _("Data requests"),
+ "href": reverse("admin:data_request_datarequest_changelist"),
+ "iconName": "fr-icon-download-line",
+ "exactPath": False,
+ "extraPath": None,
+ "badge": DataRequest.objects.filter(request_viewed=False).count(),
+ }
+ )
+ return items
diff --git a/data_request/templates/admin/data_request/datarequest/change_form.html b/data_request/templates/admin/data_request/datarequest/change_form.html
new file mode 100644
index 000000000..b03b98c54
--- /dev/null
+++ b/data_request/templates/admin/data_request/datarequest/change_form.html
@@ -0,0 +1,12 @@
+{% extends "admin/change_form.html" %}
+{% load admin_urls %}
+{% block object-tools %}
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/data_request/templates/data_request/email/data-links.html b/data_request/templates/data_request/email/data-links.html
new file mode 100644
index 000000000..52148305e
--- /dev/null
+++ b/data_request/templates/data_request/email/data-links.html
@@ -0,0 +1,31 @@
+{% load i18n %}
+
+{% autoescape off %}
+
+
+
+
+ {% trans "Data Request" %}
+
+
+ {% trans "Data Request" %}
+ {% trans "Thank you for your interest in New AGLAE data catalog. We have processed your request and generated the following links for you to download the data you have selected:" %}
+
+
+ {% for link in links %}
+ -
+ {{ link.name }}
+ - {% if link.data_type == "raw_data" %}{% trans "Raw data" %}{% else %}{% trans "Processed data" %}{% endif %}
+
+ {% endfor %}
+
+
+ {% trans "Please click on the links above to download the data. If you have any further questions or need assistance, please feel free to contact us." %}
+ {% blocktranslate with formatted_exp_date=expiration_date|date:"DATETIME_FORMAT"%}Note that the links will be active until {{ formatted_exp_date }}.{% endblocktranslate %}
+
+
+ {% trans "Thank you" %},
+ {% trans "The New AGLAE Team" %}
+
+
+{% endautoescape %}
diff --git a/data_request/templates/data_request/email/data-request-created.html b/data_request/templates/data_request/email/data-request-created.html
new file mode 100644
index 000000000..a9c5956e4
--- /dev/null
+++ b/data_request/templates/data_request/email/data-request-created.html
@@ -0,0 +1,18 @@
+{% load i18n %}
+
+{% autoescape off %}
+
+
+
+
+ {% trans "Data request received" %}
+
+
+ {% trans "Data Request" %}
+ {% trans "We have received your data request. A member of the New AGLAE team will review it as soon as posible and get back to you." %}
+
+ {% trans "Have a good day," %}
+ {% trans "The New AGLAE Team" %}
+
+
+{% endautoescape %}
diff --git a/data_request/tests/__init__.py b/data_request/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/data_request/tests/factories.py b/data_request/tests/factories.py
new file mode 100644
index 000000000..f06be51fc
--- /dev/null
+++ b/data_request/tests/factories.py
@@ -0,0 +1,27 @@
+import factory
+
+from lab.tests.factories import RunFactory
+
+from ..models import DataRequest
+
+
+class DataRequestFactory(factory.django.DjangoModelFactory):
+ class Meta:
+ model = DataRequest
+
+ user_first_name = factory.Faker("first_name")
+ user_last_name = factory.Faker("last_name")
+ user_email = factory.LazyAttribute(
+ lambda u: f"{u.user_first_name}.{u.user_last_name}@example.com".lower()
+ )
+ user_institution = factory.Faker("company")
+ description = factory.Faker("text")
+
+
+class DataRequestWithRunsFactory(DataRequestFactory):
+ @factory.post_generation
+ def runs(obj: DataRequest, create: bool, *args, **kwargs):
+ if not create:
+ return
+ for _ in range(3):
+ obj.runs.add(RunFactory()) # pylint: disable=no-member
diff --git a/data_request/tests/test_admin.py b/data_request/tests/test_admin.py
new file mode 100644
index 000000000..d40f0eee1
--- /dev/null
+++ b/data_request/tests/test_admin.py
@@ -0,0 +1,128 @@
+import datetime
+from unittest import mock
+
+from django.contrib.admin.sites import AdminSite
+from django.test import RequestFactory, TestCase
+from django.urls import reverse
+
+from euphro_auth.tests import factories as auth_factories
+from euphro_tools.exceptions import EuphroToolsException
+
+from ..admin import BeenSeenListFilter, DataRequestAdmin, action_send_links
+from ..models import DataRequest
+from . import factories
+
+
+class TestAdminActionSendLink(TestCase):
+ def setUp(self):
+ self.admin = DataRequestAdmin(DataRequest, admin_site=AdminSite())
+ patcher = mock.patch("data_request.admin.send_links")
+ self.send_links_mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ def test_send_links(self):
+ # Test calls send_links with the correct arguments
+ # Test set sent_at to the current time
+ dr = factories.DataRequestWithRunsFactory(sent_at=None, request_viewed=False)
+
+ action_send_links(
+ self.admin, RequestFactory(), DataRequest.objects.filter(id=dr.id)
+ )
+
+ self.send_links_mock.assert_called_with(dr)
+ dr.refresh_from_db()
+ assert dr.sent_at is not None
+ assert dr.request_viewed is True
+
+ def test_send_links_when_euphro_tools_exception(self):
+ model_admin_mock = mock.MagicMock()
+ dr = factories.DataRequestWithRunsFactory(sent_at=None, request_viewed=False)
+ self.send_links_mock.side_effect = EuphroToolsException()
+
+ action_send_links(
+ model_admin_mock, RequestFactory(), DataRequest.objects.filter(id=dr.id)
+ )
+
+ self.send_links_mock.reset_mock()
+ model_admin_mock.message_user.assert_called_once()
+ dr.refresh_from_db()
+ assert dr.sent_at is None
+ assert dr.request_viewed is False
+
+
+class TestAdminFilterBeenSeen(TestCase):
+ def setUp(self):
+ self.admin = DataRequestAdmin(DataRequest, admin_site=AdminSite())
+ self.dr_sent = factories.DataRequestFactory(sent_at=datetime.datetime.now())
+ self.dr_not_sent = factories.DataRequestFactory(sent_at=None)
+
+ def test_filter_when_no_value(self):
+ request = RequestFactory()
+ f = BeenSeenListFilter(
+ request=request, params={}, model=DataRequest, model_admin=self.admin
+ )
+ assert f.queryset(request, DataRequest.objects.all()).count() == 2
+
+ def test_filter_when_value_is_0(self):
+ request = RequestFactory()
+ f = BeenSeenListFilter(
+ request=request,
+ params={"been_sent": "0"},
+ model=DataRequest,
+ model_admin=self.admin,
+ )
+ assert list(f.queryset(request, DataRequest.objects.all()).all()) == [
+ self.dr_not_sent
+ ]
+
+ def test_filter_when_value_is_1(self):
+ request = RequestFactory()
+ f = BeenSeenListFilter(
+ request=request,
+ params={"been_sent": "1"},
+ model=DataRequest,
+ model_admin=self.admin,
+ )
+ assert list(f.queryset(request, DataRequest.objects.all()).all()) == [
+ self.dr_sent
+ ]
+
+
+class TestAdminDataRequest(TestCase):
+ def setUp(self):
+ self.admin = DataRequestAdmin(DataRequest, admin_site=AdminSite())
+
+ def test_change_view_set_request_viewed(self):
+ data_request = factories.DataRequestFactory(request_viewed=False)
+ request = RequestFactory().get(
+ reverse("admin:data_request_datarequest_change", args=[data_request.id])
+ )
+ request.user = auth_factories.LabAdminUserFactory()
+ self.admin.change_view(
+ request,
+ str(data_request.id),
+ )
+
+ data_request.refresh_from_db()
+ assert data_request.request_viewed
+
+ def test_display_is_sent(self):
+ assert (
+ self.admin.is_sent(
+ factories.DataRequestFactory(sent_at=datetime.datetime.now())
+ )
+ is True
+ )
+ assert self.admin.is_sent(factories.DataRequestFactory(sent_at=None)) is False
+
+ def test_display_viewed(self):
+ assert (
+ ''
+ in self.admin.display_viewed(
+ factories.DataRequestFactory(request_viewed=False)
+ )
+ )
+ assert (
+ self.admin.display_viewed(factories.DataRequestFactory(request_viewed=True))
+ == ""
+ )
diff --git a/data_request/tests/test_api_urls.py b/data_request/tests/test_api_urls.py
new file mode 100644
index 000000000..bad04e5ad
--- /dev/null
+++ b/data_request/tests/test_api_urls.py
@@ -0,0 +1,64 @@
+from unittest import mock
+
+import pytest
+from django.test import Client
+
+from euphro_auth.jwt.tokens import EuphroToolsAPIToken
+from lab.tests import factories as lab_factories
+
+from . import factories
+
+BASE_BODY_DATA = {
+ "user_email": "dev@witold.fr",
+ "user_first_name": "Dev",
+ "user_last_name": "Witold",
+ "user_institution": "Witold Institute of Technology",
+ "description": "I need this data for my research.",
+}
+
+
+@pytest.mark.django_db
+def test_create_view():
+ with mock.patch("data_request.api_views.send_data_request_created_email"):
+ data = {
+ **BASE_BODY_DATA,
+ "runs": [lab_factories.NotEmbargoedRun().id],
+ }
+ client = Client()
+ response = client.post("/api/data-request/", data=data)
+
+ assert response.status_code == 201
+
+
+@pytest.mark.django_db
+def test_create_view_with_embargoed_run():
+ with mock.patch("data_request.api_views.send_data_request_created_email"):
+ data = {
+ **BASE_BODY_DATA,
+ "runs": [lab_factories.RunFactory().id],
+ }
+ client = Client()
+ response = client.post("/api/data-request/", data=data)
+
+ assert response.status_code == 400
+ assert "runs" in response.json()
+
+
+@pytest.mark.django_db
+def test_data_access_event_create():
+ data_request = factories.DataRequestFactory()
+ token = EuphroToolsAPIToken.for_euphrosyne()
+ data = {
+ "data_request": data_request.id,
+ "path": "path/to/data",
+ }
+
+ response = Client().post(
+ "/api/data-request/access-event",
+ data=data,
+ headers={"Authorization": f"Bearer {token}"},
+ content_type="application/json",
+ )
+
+ assert response.status_code == 201
+ assert data_request.data_access_events.count() == 1
diff --git a/data_request/tests/test_api_views.py b/data_request/tests/test_api_views.py
new file mode 100644
index 000000000..c0fae3f6f
--- /dev/null
+++ b/data_request/tests/test_api_views.py
@@ -0,0 +1,47 @@
+from unittest import mock
+
+import pytest
+from rest_framework import serializers
+
+from lab.tests import factories
+
+from ..api_views import DataRequestCreateAPIView, DataRequestSerializer
+
+
+@pytest.mark.django_db
+def test_data_request_create_api_view_send_mail():
+ data = DataRequestSerializer(
+ None,
+ {
+ "user_email": "dev@euphrosyne.fr",
+ "user_first_name": "Dev",
+ "user_last_name": "euphrosyne",
+ "user_institution": "euphrosyne Institute of Technology",
+ "description": "I need this data for my research.",
+ "runs": [factories.NotEmbargoedRun().id],
+ },
+ )
+ data.is_valid(raise_exception=True)
+
+ with mock.patch(
+ "data_request.api_views.send_data_request_created_email"
+ ) as send_mail_mock:
+ DataRequestCreateAPIView().perform_create(serializer=data)
+ send_mail_mock.assert_called_with("dev@euphrosyne.fr")
+
+
+@pytest.mark.django_db
+def test_data_request_create_api_view_when_embargoed_run():
+ data = DataRequestSerializer(
+ None,
+ {
+ "user_email": "dev@euphrosyne.fr",
+ "user_first_name": "Dev",
+ "user_last_name": "euphrosyne",
+ "user_institution": "euphrosyne Institute of Technology",
+ "description": "I need this data for my research.",
+ "runs": [factories.RunFactory().id],
+ },
+ )
+ with pytest.raises(serializers.ValidationError):
+ data.is_valid(raise_exception=True)
diff --git a/data_request/tests/test_data_links.py b/data_request/tests/test_data_links.py
new file mode 100644
index 000000000..71fb4ee09
--- /dev/null
+++ b/data_request/tests/test_data_links.py
@@ -0,0 +1,87 @@
+import datetime
+from unittest import mock
+
+from django.test import TestCase
+
+from ..data_links import NUM_DAYS_VALID, send_links
+from . import factories
+
+
+class DataLinksTestCase(TestCase):
+ def setUp(self):
+ patcher = mock.patch(
+ "data_request.data_links.fetch_token_for_run_data", return_value="token"
+ )
+ self.fetch_token_mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ patcher = mock.patch(
+ "data_request.data_links.generate_download_url", return_value="http://url"
+ )
+ self.generate_download_url_mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ patcher = mock.patch("data_request.data_links.send_data_email")
+ self.send_data_email_mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ def test_send_links_set_expiration(self):
+ dr = factories.DataRequestWithRunsFactory()
+ now = datetime.datetime.now()
+ expiration = now + datetime.timedelta(days=NUM_DAYS_VALID)
+
+ with mock.patch("data_request.data_links.datetime.datetime") as datetime_mock:
+ datetime_mock.now.return_value = now
+ send_links(dr)
+ # data_type raw_data
+ assert (
+ self.fetch_token_mock.call_args_list[0][1]["expiration"] == expiration
+ )
+ # data_type processed_data
+ assert (
+ self.fetch_token_mock.call_args_list[1][1]["expiration"] == expiration
+ )
+
+ def test_send_data_email_context(self):
+ dr = factories.DataRequestFactory()
+ dr.runs.add(
+ factories.RunFactory(project__name="Project 1", label="Run 1"),
+ )
+
+ send_links(dr)
+
+ # data_type raw_data
+ assert self.send_data_email_mock.call_args_list[0][1]["context"]["links"] == [
+ {"name": "Run 1 (Project 1)", "url": "http://url", "data_type": "raw_data"},
+ {
+ "name": "Run 1 (Project 1)",
+ "url": "http://url",
+ "data_type": "processed_data",
+ },
+ ]
+ self.generate_download_url_mock.assert_has_calls(
+ [
+ mock.call(
+ data_type="raw_data",
+ project_slug="project-1",
+ run_label="Run 1",
+ token="token",
+ ),
+ mock.call(
+ data_type="processed_data",
+ project_slug="project-1",
+ run_label="Run 1",
+ token="token",
+ ),
+ ],
+ any_order=True,
+ )
+
+ def test_send_data_email_context_when_multiple_runs(self):
+ dr = factories.DataRequestWithRunsFactory()
+
+ send_links(dr)
+
+ assert (
+ len(self.send_data_email_mock.call_args_list[0][1]["context"]["links"]) == 6
+ )
diff --git a/data_request/tests/test_emails.py b/data_request/tests/test_emails.py
new file mode 100644
index 000000000..b41bb8814
--- /dev/null
+++ b/data_request/tests/test_emails.py
@@ -0,0 +1,38 @@
+from django.core import mail
+from django.test import SimpleTestCase
+from django.utils.translation import gettext
+
+from ..emails import send_data_email, send_data_request_created_email
+
+
+class DataRequestEmailsTestCase(SimpleTestCase):
+ def test_send_data_request_created_email(self):
+ send_data_request_created_email("test@test.fr")
+
+ assert len(mail.outbox) == 1
+ assert mail.outbox[0].subject == gettext("[New AGLAE] Data request received")
+ assert mail.outbox[0].to == ["test@test.fr"]
+
+ def test_send_data_email(self):
+ send_data_email(
+ "test@test.fr",
+ {
+ "links": [
+ {
+ "name": "Run 1 (Project 1)",
+ "url": "http://url",
+ "data_type": "raw_data",
+ },
+ {
+ "name": "Run 1 (Project 1)",
+ "url": "http://url",
+ "data_type": "processed_data",
+ },
+ ],
+ "expiration_date": "2021-07-01 00:00:00",
+ },
+ )
+
+ assert len(mail.outbox) == 1
+ assert mail.outbox[0].subject == gettext("Your New AGLAE data links")
+ assert mail.outbox[0].to == ["test@test.fr"]
diff --git a/data_request/tests/test_nav.py b/data_request/tests/test_nav.py
new file mode 100644
index 000000000..4f3f35278
--- /dev/null
+++ b/data_request/tests/test_nav.py
@@ -0,0 +1,30 @@
+import pytest
+from django.test import RequestFactory
+from django.utils.translation import gettext
+
+from euphro_auth.tests import factories as auth_factories
+
+from ..nav import get_nav_items
+from . import factories
+
+
+@pytest.mark.django_db
+def test_data_request_nav():
+ request = RequestFactory()
+ request.user = auth_factories.StaffUserFactory()
+
+ # pylint: disable=use-implicit-booleaness-not-comparison
+ assert get_nav_items(request) == []
+
+ request.user = auth_factories.LabAdminUserFactory()
+ factories.DataRequestFactory(request_viewed=False)
+ assert get_nav_items(request) == [
+ {
+ "title": gettext("Data requests"),
+ "href": "/data_request/datarequest/",
+ "iconName": "fr-icon-download-line",
+ "exactPath": False,
+ "extraPath": None,
+ "badge": 1,
+ }
+ ]
diff --git a/euphro_auth/jwt/authentication.py b/euphro_auth/jwt/authentication.py
new file mode 100644
index 000000000..6763506a8
--- /dev/null
+++ b/euphro_auth/jwt/authentication.py
@@ -0,0 +1,69 @@
+from datetime import timedelta
+
+from rest_framework_simplejwt.authentication import JWTAuthentication
+from rest_framework_simplejwt.exceptions import (
+ AuthenticationFailed,
+ InvalidToken,
+ TokenError,
+)
+from rest_framework_simplejwt.settings import api_settings
+from rest_framework_simplejwt.tokens import Token
+
+from ..models import User
+
+
+class EuphrosyneBackendToken(Token):
+ """
+ Custom token class for Euphrosyne backend communication.
+ """
+
+ token_type = "euphrosyne_backend"
+ lifetime = timedelta(minutes=5)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def verify(self) -> None:
+ # Subclass to remove JTI_CLAIM and token type check
+ self.check_exp()
+
+
+class EuphrosyneAdminJWTAuthentication(JWTAuthentication):
+ """Used to authenticate "euphrosyne" admin user in backend-to-backend communication.
+ For example when reiceiving data requests from Euphro Tools."""
+
+ def get_user(self, validated_token):
+ try:
+ user_id = validated_token[api_settings.USER_ID_CLAIM]
+ except KeyError as error:
+ raise InvalidToken(
+ "Token contained no recognizable user identification"
+ ) from error
+
+ if user_id != "euphrosyne":
+ raise AuthenticationFailed("Invalid user identification")
+
+ user = User(email="euphrosyne", is_lab_admin=True)
+
+ return user
+
+ def get_validated_token(self, raw_token: bytes) -> Token:
+ messages = []
+ try:
+ return EuphrosyneBackendToken(raw_token)
+ except TokenError as e:
+ messages.append(
+ {
+ "token_class": EuphrosyneBackendToken.__name__,
+ "token_type": EuphrosyneBackendToken.token_type,
+ "message": e.args[0],
+ }
+ )
+
+ raise InvalidToken(
+ {
+ "detail": "Given token not valid for "
+ f"{EuphrosyneBackendToken.token_type} type",
+ "messages": messages,
+ }
+ )
diff --git a/euphro_auth/jwt/tests/test_authentification.py b/euphro_auth/jwt/tests/test_authentification.py
new file mode 100644
index 000000000..6ed5f94dd
--- /dev/null
+++ b/euphro_auth/jwt/tests/test_authentification.py
@@ -0,0 +1,16 @@
+import pytest
+from rest_framework_simplejwt.exceptions import AuthenticationFailed
+
+from ...tests import factories
+from ..authentication import EuphrosyneAdminJWTAuthentication
+from ..tokens import EuphroToolsAPIToken
+
+
+@pytest.mark.django_db
+def test_euphrosyne_admin_jwt_authentication():
+ auth = EuphrosyneAdminJWTAuthentication()
+
+ assert auth.get_user(EuphroToolsAPIToken.for_euphrosyne())
+
+ with pytest.raises(AuthenticationFailed):
+ auth.get_user(EuphroToolsAPIToken.for_user(factories.StaffUserFactory()))
diff --git a/euphro_auth/jwt/tokens.py b/euphro_auth/jwt/tokens.py
index aff3365a3..58e1b299f 100644
--- a/euphro_auth/jwt/tokens.py
+++ b/euphro_auth/jwt/tokens.py
@@ -24,4 +24,6 @@ class EuphroToolsAPIToken(RefreshToken):
def for_euphrosyne(cls):
token = cls()
token[rf_simplejwt_api_settings.USER_ID_CLAIM] = "euphrosyne"
+ token["projects"] = []
+ token["is_admin"] = True
return token
diff --git a/euphro_auth/tests/factories.py b/euphro_auth/tests/factories.py
new file mode 100644
index 000000000..2611e1123
--- /dev/null
+++ b/euphro_auth/tests/factories.py
@@ -0,0 +1,20 @@
+import factory
+import factory.fuzzy
+from django.contrib.auth import get_user_model
+
+
+class StaffUserFactory(factory.django.DjangoModelFactory):
+ class Meta:
+ model = get_user_model()
+
+ first_name = factory.Faker("first_name")
+ last_name = factory.Faker("last_name")
+ email = factory.LazyAttribute(
+ lambda u: f"{u.first_name}.{u.last_name}@example.com".lower()
+ )
+ password = factory.Faker("password")
+ is_staff = True
+
+
+class LabAdminUserFactory(StaffUserFactory):
+ is_lab_admin = True
diff --git a/euphro_auth/tests/test_jwt_tokens.py b/euphro_auth/tests/test_jwt_tokens.py
new file mode 100644
index 000000000..6e65274bb
--- /dev/null
+++ b/euphro_auth/tests/test_jwt_tokens.py
@@ -0,0 +1,22 @@
+from django.test import TestCase
+
+from lab.tests import factories as lab_factories
+
+from ..jwt import tokens
+
+
+class TestJWTToken(TestCase):
+ def test_refresh_token_for_user(self):
+ project = lab_factories.ProjectWithLeaderFactory()
+ token = tokens.EuphroRefreshToken.for_user(project.leader.user)
+ assert token.payload["user_id"] == project.leader.user.id
+ assert token.payload["is_admin"] is False
+ assert token.payload["projects"] == [
+ {"id": project.id, "slug": project.slug, "name": project.name}
+ ]
+
+ def test_api_token_for_euphrosyne(self):
+ token = tokens.EuphroToolsAPIToken.for_euphrosyne()
+ assert token.payload["user_id"] == "euphrosyne"
+ assert token.payload["is_admin"] is True
+ assert isinstance(token.payload["projects"], list)
diff --git a/euphro_tools/download_urls.py b/euphro_tools/download_urls.py
new file mode 100644
index 000000000..1b9f879ec
--- /dev/null
+++ b/euphro_tools/download_urls.py
@@ -0,0 +1,56 @@
+"""Everything related to getting signed download link from euphro tools."""
+
+import os
+from datetime import datetime
+from typing import Literal
+
+import requests
+
+from euphro_auth.jwt.tokens import EuphroToolsAPIToken
+
+from .exceptions import EuphroToolsException
+from .utils import get_run_data_path
+
+DataType = Literal["raw_data", "processed_data"]
+
+
+def generate_download_url(
+ project_slug: str, run_label: str, data_type: DataType, token: str
+) -> str:
+ """Generate a download URL for a run's data.
+ Token can be obtained by calling fetch_token_for_run_data function."""
+ return (
+ os.environ["EUPHROSYNE_TOOLS_API_URL"]
+ + "/data/run-data-zip"
+ + f"?token={token}&path={get_run_data_path(project_slug, run_label, data_type)}"
+ )
+
+
+def fetch_token_for_run_data(
+ project_slug: str,
+ run_label: str,
+ data_type: DataType,
+ expiration: datetime | None = None,
+ data_request_id: str | None = None,
+) -> str:
+ query_params = f"?path={get_run_data_path(project_slug, run_label, data_type)}"
+ if expiration:
+ query_params += f"&expiration={expiration.isoformat()}"
+ token_url = (
+ os.environ["EUPHROSYNE_TOOLS_API_URL"]
+ + f"/data/{project_slug}/token"
+ + f"?path={get_run_data_path(project_slug, run_label, data_type)}"
+ )
+ if data_request_id:
+ token_url += f"&data_request={data_request_id}"
+ bearer_token = EuphroToolsAPIToken.for_euphrosyne().access_token
+ try:
+ request = requests.get(
+ token_url,
+ timeout=5,
+ headers={"Authorization": f"Bearer {bearer_token}"},
+ )
+ request.raise_for_status()
+ except (requests.HTTPError, requests.ConnectionError) as error:
+ raise EuphroToolsException from error
+ return request.json()["token"]
diff --git a/euphro_tools/exceptions.py b/euphro_tools/exceptions.py
new file mode 100644
index 000000000..bd112bb7f
--- /dev/null
+++ b/euphro_tools/exceptions.py
@@ -0,0 +1,2 @@
+class EuphroToolsException(Exception):
+ pass
diff --git a/euphro_tools/tests/test_download_urls.py b/euphro_tools/tests/test_download_urls.py
new file mode 100644
index 000000000..9367e71e3
--- /dev/null
+++ b/euphro_tools/tests/test_download_urls.py
@@ -0,0 +1,74 @@
+from datetime import datetime
+from unittest import mock
+
+import pytest
+from django.test import TestCase
+from requests.exceptions import HTTPError
+
+from euphro_tools.exceptions import EuphroToolsException
+
+from ..download_urls import fetch_token_for_run_data, generate_download_url
+
+
+class TestDownloaldUrls(TestCase):
+ def setUp(self):
+ patcher = mock.patch("euphro_tools.download_urls.requests.get")
+ self.requests_get_mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ patcher = mock.patch(
+ "euphro_tools.download_urls.EuphroToolsAPIToken.for_euphrosyne"
+ )
+ self.token_mock = patcher.start()
+ self.addCleanup(patcher.stop)
+
+ patcher = mock.patch.dict(
+ "os.environ",
+ {
+ "EUPHROSYNE_TOOLS_API_URL": "http://example.com",
+ },
+ )
+ patcher.start()
+ self.addCleanup(patcher.stop)
+
+ self.token_mock.return_value.access_token = "access"
+
+ def test_generate_download_url(self):
+ url = generate_download_url("project_slug", "run_label", "raw_data", "token")
+ assert (
+ url
+ # pylint: disable=line-too-long
+ == "http://example.com/data/run-data-zip?token=token&path=projects/project_slug/runs/run_label/raw_data"
+ )
+
+ def test_fetch_token_for_run_data(self):
+ now = datetime.now()
+ self.requests_get_mock.return_value.json.return_value = {"token": "token"}
+
+ token = fetch_token_for_run_data(
+ "project_slug", "run_label", "raw_data", expiration=now
+ )
+
+ assert token == "token"
+ assert self.requests_get_mock.call_args[0][0] == (
+ # pylint: disable=line-too-long
+ "http://example.com/data/project_slug/token?path=projects/project_slug/runs/run_label/raw_data"
+ )
+ assert self.requests_get_mock.call_args[1]["headers"] == {
+ "Authorization": "Bearer access"
+ }
+
+ def test_fetch_token_for_run_data_with_data_request_id(self):
+ self.requests_get_mock.return_value.json.return_value = {"token": "token"}
+ fetch_token_for_run_data(
+ "project_slug", "run_label", "raw_data", data_request_id="1"
+ )
+ assert "data_request=1" in self.requests_get_mock.call_args[0][0]
+
+ def test_fetch_token_for_run_data_raise_euphro_tools_exception(
+ self,
+ ):
+ self.requests_get_mock.side_effect = HTTPError()
+
+ with pytest.raises(EuphroToolsException):
+ fetch_token_for_run_data("project_slug", "run_label", "raw_data")
diff --git a/euphro_tools/tests/test_utils.py b/euphro_tools/tests/test_utils.py
new file mode 100644
index 000000000..eb4e9eb48
--- /dev/null
+++ b/euphro_tools/tests/test_utils.py
@@ -0,0 +1,23 @@
+from ..utils import _get_project_path, _get_run_path, get_run_data_path
+
+
+def test_get_project_path():
+ assert _get_project_path("test-project") == "projects/test-project"
+
+
+def test_get_run_path():
+ assert (
+ _get_run_path("test-project", "test-run")
+ == "projects/test-project/runs/test-run"
+ )
+
+
+def test_get_run_data_path():
+ assert (
+ get_run_data_path("test-project", "test-run", "raw_data")
+ == "projects/test-project/runs/test-run/raw_data"
+ )
+ assert (
+ get_run_data_path("test-project", "test-run", "processed_data")
+ == "projects/test-project/runs/test-run/processed_data"
+ )
diff --git a/euphro_tools/utils.py b/euphro_tools/utils.py
new file mode 100644
index 000000000..bfe788780
--- /dev/null
+++ b/euphro_tools/utils.py
@@ -0,0 +1,15 @@
+from typing import Literal
+
+
+def get_run_data_path(
+ project_slug: str, run_label: str, data_type: Literal["raw_data", "processed_data"]
+):
+ return _get_run_path(project_slug, run_label) + f"/{data_type}"
+
+
+def _get_run_path(project_slug: str, run_label: str):
+ return _get_project_path(project_slug) + f"/runs/{run_label}"
+
+
+def _get_project_path(project_slug: str):
+ return f"projects/{project_slug}"
diff --git a/euphrosyne/api_urls.py b/euphrosyne/api_urls.py
index a00fd6d8d..2950cbddd 100644
--- a/euphrosyne/api_urls.py
+++ b/euphrosyne/api_urls.py
@@ -14,4 +14,5 @@
),
path("auth/", include("euphro_auth.api_urls")),
path("lab/", include("lab.api_urls")),
+ path("data-request/", include("data_request.api_urls")),
]
diff --git a/euphrosyne/assets/css/utils.css b/euphrosyne/assets/css/utils.css
index cbe1babdc..32197fd46 100644
--- a/euphrosyne/assets/css/utils.css
+++ b/euphrosyne/assets/css/utils.css
@@ -16,6 +16,11 @@
justify-content: space-between;
}
+.flex-container---end {
+ display: flex;
+ justify-content: end;
+}
+
.flex-flow--wrap {
flex-flow: wrap;
}
diff --git a/euphrosyne/assets/js/components/header/HeaderNav.tsx b/euphrosyne/assets/js/components/header/HeaderNav.tsx
index e7535786d..98b809272 100644
--- a/euphrosyne/assets/js/components/header/HeaderNav.tsx
+++ b/euphrosyne/assets/js/components/header/HeaderNav.tsx
@@ -56,6 +56,11 @@ export default function HeaderNav({ currentPath, items }: HeaderNavProps) {
}
>
{item.title}
+ {!!item.badge && (
+
+ {item.badge}
+
+ )}
))}
diff --git a/euphrosyne/assets/js/web-components/dating-open-theso-type-ahead.ts b/euphrosyne/assets/js/web-components/dating-open-theso-type-ahead.ts
new file mode 100644
index 000000000..d358f7756
--- /dev/null
+++ b/euphrosyne/assets/js/web-components/dating-open-theso-type-ahead.ts
@@ -0,0 +1,31 @@
+import { Result } from "../type-ahead-list.component";
+import {
+ OpenThesoTypeAhead,
+ SearchType,
+ OpenThesoResult,
+} from "./open-theso-type-ahead";
+
+export class DatingOpenThesoTypeAhead extends OpenThesoTypeAhead {
+ searchType: SearchType = "fullpathSearch";
+
+ connectedCallback(): void {
+ this.thesorusId = this.getAttribute("thesorus-id") || "";
+ super.connectedCallback();
+ }
+
+ async fetchResults(query: string): Promise {
+ const data = await this.doFetch(query);
+ return data.map((item: OpenThesoResult[]) => ({
+ label: item.map((i) => i.label).join(" > "),
+ id: item.slice(-1)[0].id,
+ })) as Result[];
+ }
+}
+
+customElements.define(
+ "dating-open-theso-type-ahead",
+ DatingOpenThesoTypeAhead,
+ {
+ extends: "div",
+ },
+);
diff --git a/euphrosyne/assets/js/web-components/material-type-ahead.ts b/euphrosyne/assets/js/web-components/material-type-ahead.ts
index 6130cf6b8..125b6fce9 100644
--- a/euphrosyne/assets/js/web-components/material-type-ahead.ts
+++ b/euphrosyne/assets/js/web-components/material-type-ahead.ts
@@ -1,34 +1,21 @@
-import { TypeAheadList, Result } from "../type-ahead-list.component";
-
-interface OpenThesoResult {
- id: string;
- arkId: string;
- label: string;
-}
+import { Result } from "../type-ahead-list.component";
+import {
+ OpenThesoResult,
+ OpenThesoTypeAhead,
+ SearchType,
+} from "./open-theso-type-ahead";
// eslint-disable-next-line @typescript-eslint/no-unused-vars
-class MaterialTypeAhead extends TypeAheadList {
- async fetchResults(query: string): Promise {
- const q = encodeURIComponent(query);
- const response = await fetch(
- `https://opentheso.huma-num.fr/opentheso/openapi/v1/concept/th291/autocomplete/${q}?lang=fr&exactMatch=false`,
- );
+class MaterialTypeAhead extends OpenThesoTypeAhead {
+ thesorusId = "th291";
+ searchType: SearchType = "autocomplete";
- if (response && response.status === 404) {
- return [];
- }
- if (!response || !response.ok) {
- throw new Error("Failed to fetch results");
- }
-
- const data = await response.json();
- if (!data || !data.length) {
- return [];
- }
+ async fetchResults(query: string): Promise {
+ const data = await this.doFetch(query);
return data.map((item: OpenThesoResult) => ({
label: item.label,
id: item.id,
- }));
+ })) as Result[];
}
}
diff --git a/euphrosyne/assets/js/web-components/open-theso-type-ahead.ts b/euphrosyne/assets/js/web-components/open-theso-type-ahead.ts
new file mode 100644
index 000000000..019e00373
--- /dev/null
+++ b/euphrosyne/assets/js/web-components/open-theso-type-ahead.ts
@@ -0,0 +1,44 @@
+import { TypeAheadList } from "../type-ahead-list.component";
+
+export type SearchType = "fullpathSearch" | "autocomplete";
+
+export interface OpenThesoResult {
+ id: string;
+ arkId: string;
+ label: string;
+}
+
+export abstract class OpenThesoTypeAhead extends TypeAheadList {
+ thesorusId?: string;
+ abstract searchType: SearchType;
+
+ async doFetch(query: string): Promise {
+ if (!this.thesorusId) {
+ throw new Error("thesorus-id attribute is required");
+ }
+ const q = encodeURIComponent(query);
+
+ let url;
+
+ if (this.searchType === "fullpathSearch") {
+ url = `https://opentheso.huma-num.fr/opentheso/openapi/v1/concept/${this.thesorusId}/search/fullpath?q=${q}&lang=fr&exactMatch=false`;
+ } else {
+ // autocomplete
+ url = `https://opentheso.huma-num.fr/opentheso/openapi/v1/concept/${this.thesorusId}/autocomplete/${q}?lang=fr&exactMatch=false`;
+ }
+ const response = await fetch(url);
+
+ if (response && response.status === 404) {
+ return [];
+ }
+ if (!response || !response.ok) {
+ throw new Error("Failed to fetch results");
+ }
+
+ const data = await response.json();
+ if (!data || !data.length) {
+ return [];
+ }
+ return data;
+ }
+}
diff --git a/euphrosyne/assets/js/web-components/period-type-ahead.ts b/euphrosyne/assets/js/web-components/period-type-ahead.ts
deleted file mode 100644
index 98a30c2a0..000000000
--- a/euphrosyne/assets/js/web-components/period-type-ahead.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-import { TypeAheadList, Result } from "../type-ahead-list.component";
-
-interface OpenThesoResult {
- id: string;
- arkId: string;
- label: string;
-}
-
-// eslint-disable-next-line @typescript-eslint/no-unused-vars
-class PeriodTypeAhead extends TypeAheadList {
- async fetchResults(query: string): Promise {
- const q = encodeURIComponent(query);
- const response = await fetch(
- `https://opentheso.huma-num.fr/opentheso/openapi/v1/concept/th289/search/fullpath?q=${q}&lang=fr&exactMatch=false`,
- );
-
- if (response && response.status === 404) {
- return [];
- }
- if (!response || !response.ok) {
- throw new Error("Failed to fetch results");
- }
-
- const data = await response.json();
- if (!data || !data.length) {
- return [];
- }
- return data.map((item: OpenThesoResult[]) => ({
- label: item.map((i) => i.label).join(" > "),
- id: item.slice(-1)[0].id,
- }));
- }
-}
-
-customElements.define("period-type-ahead", PeriodTypeAhead, {
- extends: "div",
-});
diff --git a/euphrosyne/nav.py b/euphrosyne/nav.py
new file mode 100644
index 000000000..06b6dddfe
--- /dev/null
+++ b/euphrosyne/nav.py
@@ -0,0 +1,9 @@
+from django.http import HttpRequest
+
+from data_request.nav import get_nav_items as data_request_get_nav_items
+from lab.nav import NavItemJson
+from lab.nav import get_nav_items as lab_get_nav_items
+
+
+def get_nav_items(request: HttpRequest) -> list[NavItemJson]:
+ return lab_get_nav_items(request) + data_request_get_nav_items(request)
diff --git a/euphrosyne/settings.py b/euphrosyne/settings.py
index 55e310563..e21b6fca7 100644
--- a/euphrosyne/settings.py
+++ b/euphrosyne/settings.py
@@ -18,6 +18,7 @@
import dj_database_url
import psycopg2
import sentry_sdk
+from django.http import HttpRequest
from sentry_sdk.integrations.django import DjangoIntegration
# pylint: disable=abstract-class-instantiated
@@ -46,6 +47,12 @@
["localhost", ".scalingo.io"] if not DEBUG else []
)
+CORS_ALLOWED_ORIGINS = (
+ os.environ["CORS_ALLOWED_ORIGINS"].split(",")
+ if os.getenv("CORS_ALLOWED_ORIGINS")
+ else []
+)
+
CSRF_TRUSTED_ORIGINS = os.getenv("CSRF_TRUSTED_ORIGINS", "").split()
SITE_URL = os.environ["SITE_URL"]
@@ -54,6 +61,7 @@
# Application definition
INSTALLED_APPS = [
+ "corsheaders",
"euphrosyne.apps.AdminConfig",
"euphro_auth",
"django.forms",
@@ -68,11 +76,13 @@
"graphene_django",
"django_filters",
"lab",
+ "data_request",
"orcid_oauth",
"static_pages",
] + (["debug_toolbar"] if DEBUG else [])
MIDDLEWARE = (["debug_toolbar.middleware.DebugToolbarMiddleware"] if DEBUG else []) + [
+ "corsheaders.middleware.CorsMiddleware",
"django.middleware.security.SecurityMiddleware",
"whitenoise.middleware.WhiteNoiseMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
@@ -315,3 +325,16 @@ def build_development_db_name(base_db_name):
GRAPHENE = {"SCHEMA": "lab.schema.schema"}
HDF5_ENABLE = os.getenv("HDF5_ENABLE", "false") == "true"
+
+ELASTICSEARCH_HOST = os.getenv("ELASTICSEARCH_HOST")
+ELASTICSEARCH_USERNAME = os.getenv("ELASTICSEARCH_USERNAME")
+ELASTICSEARCH_PASSWORD = os.getenv("ELASTICSEARCH_PASSWORD")
+
+
+def _get_nav_items(request: HttpRequest) -> list:
+ from .nav import get_nav_items # pylint: disable=import-outside-toplevel
+
+ return get_nav_items(request)
+
+
+NAV_GET_NAV_ITEMS = _get_nav_items
diff --git a/euphrosyne/tests/test_nav.py b/euphrosyne/tests/test_nav.py
new file mode 100644
index 000000000..07eea48fa
--- /dev/null
+++ b/euphrosyne/tests/test_nav.py
@@ -0,0 +1,65 @@
+import pytest
+from django.test import RequestFactory
+
+from euphro_auth.tests import factories as auth_factories
+
+from ..nav import get_nav_items
+
+
+@pytest.mark.django_db
+def test_get_nav_items_for_admin():
+ request = RequestFactory().get("/")
+ request.user = auth_factories.LabAdminUserFactory()
+ assert get_nav_items(request) == [
+ {
+ "title": "Tableau de bord",
+ "href": "/",
+ "iconName": "fr-icon-calendar-line",
+ "exactPath": True,
+ "extraPath": [],
+ },
+ {
+ "title": "Projets",
+ "href": "/lab/project/",
+ "iconName": "fr-icon-survey-line",
+ "extraPath": ["/lab/run/"],
+ "exactPath": False,
+ },
+ {
+ "title": "Utilisateurs",
+ "href": "/euphro_auth/user/",
+ "iconName": "fr-icon-user-line",
+ "exactPath": False,
+ "extraPath": ["/euphro_auth/userinvitation/"],
+ },
+ {
+ "title": "Demandes de données",
+ "href": "/data_request/datarequest/",
+ "iconName": "fr-icon-download-line",
+ "exactPath": False,
+ "extraPath": None,
+ "badge": 0,
+ },
+ ]
+
+
+@pytest.mark.django_db
+def test_get_nav_items_for_staff():
+ request = RequestFactory().get("/")
+ request.user = auth_factories.StaffUserFactory()
+ assert get_nav_items(request) == [
+ {
+ "title": "Projets",
+ "href": "/lab/project/",
+ "iconName": "fr-icon-survey-line",
+ "extraPath": ["/lab/run/"],
+ "exactPath": False,
+ },
+ {
+ "title": "Compte",
+ "href": f"/euphro_auth/user/{request.user.id}/change/",
+ "iconName": "fr-icon-user-line",
+ "exactPath": False,
+ "extraPath": [],
+ },
+ ]
diff --git a/euphrosyne/tests/test_script_json_data.py b/euphrosyne/tests/test_script_json_data.py
index b23281344..79b96fa71 100644
--- a/euphrosyne/tests/test_script_json_data.py
+++ b/euphrosyne/tests/test_script_json_data.py
@@ -2,7 +2,8 @@
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
-from lab.tests.factories import ProjectWithLeaderFactory, StaffUserFactory
+from euphro_auth.tests.factories import StaffUserFactory
+from lab.tests.factories import ProjectWithLeaderFactory
class TestScriptJsonData(TestCase):
diff --git a/lab/api_urls.py b/lab/api_urls.py
index 5ce18a119..b39767ff6 100644
--- a/lab/api_urls.py
+++ b/lab/api_urls.py
@@ -1,4 +1,4 @@
-from django.urls import path
+from django.urls import include, path
from .api_views.calendar import CalendarView
from .api_views.objectgroup import get_eros_object
@@ -12,7 +12,7 @@
urlpatterns = [
path(
- "calendar",
+ "calendar/",
CalendarView.as_view(),
name="calendar",
),
@@ -51,4 +51,5 @@
get_eros_object,
name="objectgroup-c2rmf-fetch",
),
+ path("catalog/", include("lab.elasticsearch.api_urls"), name="catalog-api"),
]
diff --git a/lab/api_views/serializers.py b/lab/api_views/serializers.py
index 5552029f4..f294e79ba 100644
--- a/lab/api_views/serializers.py
+++ b/lab/api_views/serializers.py
@@ -117,10 +117,15 @@ class Meta:
class _RunObjectGroupObjectGroupSerializer(serializers.ModelSerializer):
+ dating = serializers.SerializerMethodField()
+
class Meta:
model = ObjectGroup
fields = ("label", "id", "object_count", "dating", "materials")
+ def get_dating(self, obj: ObjectGroup):
+ return obj.dating_era.label if obj.dating_era else ""
+
class RunObjectGroupSerializer(serializers.ModelSerializer):
objectgroup = _RunObjectGroupObjectGroupSerializer()
diff --git a/lab/elasticsearch/__init__.py b/lab/elasticsearch/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/lab/elasticsearch/api_urls.py b/lab/elasticsearch/api_urls.py
new file mode 100644
index 000000000..f70dd4efa
--- /dev/null
+++ b/lab/elasticsearch/api_urls.py
@@ -0,0 +1,21 @@
+from django.urls import path
+
+from . import api_views
+
+urlpatterns = (
+ path(
+ "search",
+ api_views.search,
+ name="search",
+ ),
+ path(
+ "aggregate",
+ api_views.aggregate_field,
+ name="aggregate",
+ ),
+ path(
+ "aggregate-created",
+ api_views.aggregate_created,
+ name="aggregate-created",
+ ),
+)
diff --git a/lab/elasticsearch/api_views.py b/lab/elasticsearch/api_views.py
new file mode 100644
index 000000000..89f527de0
--- /dev/null
+++ b/lab/elasticsearch/api_views.py
@@ -0,0 +1,37 @@
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from .client import CatalogClient
+
+
+@api_view(["POST"])
+def search(request):
+ """Catalog search endpoint"""
+ results = CatalogClient().search(**request.data)
+ return Response(results)
+
+
+@api_view(["GET"])
+def aggregate_field(request):
+ """Catalog aggregation endpoint"""
+ exclude = None
+ if "field" not in request.query_params:
+ return Response({"error": "'field' query param is required"}, status=400)
+ if "exclude" in request.query_params:
+ exclude = request.query_params["exclude"].split(",")
+ results = CatalogClient().aggregate_terms(
+ request.query_params["field"],
+ query=request.query_params.get("query"),
+ exclude=exclude,
+ )
+ return Response(results)
+
+
+@api_view(["GET"])
+def aggregate_created(request):
+ """Catalog aggregation endpoint"""
+ results = CatalogClient().aggregate_date(
+ "created",
+ "year",
+ )
+ return Response(results)
diff --git a/lab/elasticsearch/catalog.py b/lab/elasticsearch/catalog.py
new file mode 100644
index 000000000..397eece0c
--- /dev/null
+++ b/lab/elasticsearch/catalog.py
@@ -0,0 +1,281 @@
+import logging
+from typing import TypedDict
+
+from slugify import slugify
+
+from lab.methods.dto import method_model_to_dto
+from lab.models import ObjectGroup, Project
+from lab.objects.c2rmf import ErosHTTPError, fetch_full_objectgroup_from_eros
+from lab.participations.models import Participation
+from lab.runs.models import Run
+from lab.thesauri.opentheso import (
+ fetch_era_parent_ids_from_id,
+ fetch_period_parent_ids_from_id,
+)
+
+from .documents import (
+ CatalogItem,
+ LeaderDoc,
+ ObjectGroupDoc,
+ ObjectPageData,
+ ProjectPageData,
+ RunDoc,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class LocationDict(TypedDict):
+ lat: float
+ lon: float
+
+
+class DatingDict(TypedDict, total=False):
+ dating_period_label: str | None
+ dating_period_theso_huma_num_id: str | None
+ dating_period_theso_huma_num_parent_ids: list[str] | None
+ dating_era_label: str | None
+ dating_era_theso_huma_num_id: str | None
+ dating_era_theso_huma_num_parent_ids: list[str] | None
+
+
+def _create_leader_doc(leader: Participation):
+ doc = LeaderDoc(
+ user_first_name=leader.user.first_name,
+ user_last_name=leader.user.last_name,
+ )
+ if leader.institution:
+ doc.institution_name = leader.institution.name # type: ignore[assignment]
+ doc.institution_country = leader.institution.country # type: ignore[assignment]
+ return doc
+
+
+def _create_project_page_data(
+ runs: list[Run], object_groups: list[ObjectGroup], leader: Participation | None
+):
+ page_data = ProjectPageData(leader=_create_leader_doc(leader) if leader else None)
+ for run in runs:
+ page_data.add_run(
+ run=RunDoc(
+ id=run.id,
+ label=run.label,
+ start_date=run.start_date,
+ particle_type=run.particle_type,
+ energy_in_kev=run.energy_in_keV,
+ beamline=run.beamline,
+ project_slug=run.project.slug,
+ is_data_embargoed=run.is_data_embargoed,
+ ),
+ methods=method_model_to_dto(run),
+ )
+ for object_group in object_groups:
+ discovery_place_label: str | None = None
+ dating_era_label: str | None = None
+ dating_period_label: str | None = None
+ if object_group.c2rmf_id:
+ # Fetch object group from EROS
+ object_group = (
+ fetch_full_objectgroup_from_eros(
+ c2rmf_id=object_group.c2rmf_id, object_group=object_group
+ )
+ or object_group
+ )
+ else:
+ # Fetch thesauri information for non-EROS object groups
+ if object_group.dating_period:
+ dating_period_label = object_group.dating_period.label
+ if object_group.dating_era:
+ dating_era_label = object_group.dating_era.label
+
+ if object_group.discovery_place_location:
+ discovery_place_label = object_group.discovery_place_location.label
+ page_data.add_object_group(
+ object_group=ObjectGroupDoc(
+ **{
+ "id": object_group.id,
+ "c2rmf_id": object_group.c2rmf_id,
+ "label": object_group.label,
+ "materials": object_group.materials,
+ "discovery_place_label": discovery_place_label,
+ "collection": object_group.collection,
+ "inventory": object_group.inventory,
+ "dating_period_label": dating_period_label,
+ "dating_era_label": dating_era_label,
+ }
+ ),
+ objects=list(
+ object_group.object_set.values("label", "inventory", "collection")
+ ),
+ )
+ return page_data
+
+
+def _create_object_group_page_data(projects: list[Project], runs: list[Run]):
+ page_data = ObjectPageData()
+ for run in runs:
+ page_data.add_run(
+ run=RunDoc(
+ id=run.id,
+ label=run.label,
+ start_date=run.start_date,
+ particle_type=run.particle_type,
+ energy_in_kev=run.energy_in_keV,
+ beamline=run.beamline,
+ project_slug=run.project.slug,
+ is_data_embargoed=run.is_data_embargoed,
+ ),
+ methods=method_model_to_dto(run),
+ )
+ for project in projects:
+ page_data.add_project(
+ name=project.name,
+ slug=project.slug,
+ leader=_create_leader_doc(project.leader) if project.leader else None,
+ )
+ return page_data
+
+
+# pylint: disable=too-many-arguments
+def build_project_catalog_document(
+ project: Project,
+ materials: list[str],
+ leader: Participation | None,
+ object_groups: list[ObjectGroup],
+ object_group_locations: list[LocationDict],
+ runs: list[Run],
+):
+ page_data = _create_project_page_data(
+ leader=leader, runs=runs, object_groups=object_groups
+ )
+ _id = f"project-{project.id}"
+ catalog_item = CatalogItem(
+ meta={"id": _id},
+ category="project",
+ id=_id,
+ name=project.name,
+ slug=project.slug,
+ materials=materials,
+ comments=project.comments,
+ status=str(project.status),
+ created=project.created,
+ project_page_data=page_data,
+ discovery_place_points=object_group_locations,
+ is_data_available=project.is_data_available,
+ )
+ return catalog_item
+
+
+# pylint: disable=too-many-arguments, too-many-locals
+def build_object_group_catalog_document(
+ object_group: ObjectGroup,
+ projects: list[Project],
+ runs: list[Run],
+ is_data_available: bool,
+):
+ # Page data
+ page_data = _create_object_group_page_data(projects=projects, runs=runs)
+
+ # Location
+ location_geopoint: LocationDict | None = None
+ location_label: str | None = None
+ locations = []
+
+ if object_group.c2rmf_id:
+ # Fetch object group from EROS
+ object_group = _fetch_object_group_from_eros(
+ c2rmf_id=object_group.c2rmf_id, object_group=object_group
+ )
+
+ if (
+ object_group.discovery_place_location
+ and object_group.discovery_place_location.latitude
+ and object_group.discovery_place_location.longitude
+ ):
+ location_geopoint = {
+ "lat": object_group.discovery_place_location.latitude,
+ "lon": object_group.discovery_place_location.longitude,
+ }
+ location_label = object_group.discovery_place_location.label
+ locations = [location_geopoint]
+
+ # Dating
+ dating_dict: DatingDict = {}
+ for field_name in ["dating_period", "dating_era"]:
+ fetch_parent_ids_fn = (
+ fetch_era_parent_ids_from_id
+ if field_name == "dating_era"
+ else fetch_period_parent_ids_from_id
+ )
+ if getattr(object_group, field_name):
+ theso_huma_num_parent_ids = fetch_parent_ids_fn(
+ getattr(object_group, field_name).concept_id
+ )
+ dating_dict = {
+ **dating_dict,
+ f"{field_name}_label": getattr( # type: ignore
+ object_group, field_name
+ ).label,
+ f"{field_name}_theso_huma_num_id": getattr( # type: ignore
+ object_group, field_name
+ ).concept_id,
+ # type: ignore
+ f"{field_name}_theso_huma_num_parent_ids": theso_huma_num_parent_ids,
+ }
+ _id = f"object-{object_group.id}"
+ catalog_item = CatalogItem(
+ meta={"id": _id},
+ id=_id,
+ category="object",
+ name=object_group.label,
+ slug=slugify(object_group.label) + f"-{object_group.id}",
+ is_data_available=is_data_available,
+ created=object_group.created,
+ materials=object_group.materials,
+ object_page_data=page_data,
+ c2rmf_id=object_group.c2rmf_id,
+ collection=object_group.collection,
+ inventory_number=object_group.inventory,
+ inventory_numbers=[object_group.object_set.values_list("inventory", flat=True)],
+ discovery_place_label=location_label,
+ discovery_place_point=location_geopoint,
+ discovery_place_points=locations,
+ **dating_dict,
+ )
+
+ collections = []
+ inventory_numbers = []
+ if object_group.collection:
+ collections.append(object_group.collection)
+ if object_group.inventory:
+ inventory_numbers.append(object_group.inventory)
+ for obj in object_group.object_set.all():
+ catalog_item.add_object(
+ label=obj.label,
+ collection=obj.collection,
+ inventory=obj.inventory,
+ )
+ if obj.collection:
+ collections.append(obj.collection)
+ if obj.inventory:
+ inventory_numbers.append(obj.inventory)
+
+ catalog_item.collections = list(set(collections)) # type: ignore[assignment]
+ catalog_item.inventory_numbers = list(
+ set(inventory_numbers)
+ ) # type: ignore[assignment]
+
+ return catalog_item
+
+
+def _fetch_object_group_from_eros(
+ c2rmf_id: str, object_group: ObjectGroup
+) -> ObjectGroup:
+ try:
+ object_group_with_eros_information = fetch_full_objectgroup_from_eros(
+ c2rmf_id=c2rmf_id,
+ object_group=object_group,
+ )
+ except ErosHTTPError as error:
+ logger.error("Failed to fetch object group from EROS: %s", error, exc_info=True)
+ return object_group
+ return object_group_with_eros_information or object_group
diff --git a/lab/elasticsearch/client.py b/lab/elasticsearch/client.py
new file mode 100644
index 000000000..1d13cd790
--- /dev/null
+++ b/lab/elasticsearch/client.py
@@ -0,0 +1,132 @@
+import logging
+from typing import Generic, TypedDict, TypeVar, Unpack
+
+from django.conf import settings
+from opensearchpy import OpenSearch
+
+from lab.elasticsearch import queries
+from lab.objects.models import ObjectGroup
+from lab.projects.models import Project
+from lab.runs.models import Run
+
+from .catalog import (
+ LocationDict,
+ build_object_group_catalog_document,
+ build_project_catalog_document,
+)
+from .documents import CatalogItem
+
+logger = logging.getLogger(__name__)
+
+_T = TypeVar("_T")
+
+
+class Singleton(type, Generic[_T]):
+ _instances: dict["Singleton[_T]", _T] = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super().__call__(*args, **kwargs)
+ return cls._instances[cls]
+
+
+class ObjectGroupExtraDict(TypedDict):
+ projects: list[Project]
+ runs: list[Run]
+ is_data_available: bool
+
+
+class CatalogClient(metaclass=Singleton):
+ def __init__(self):
+ for setting in [
+ "ELASTICSEARCH_USERNAME",
+ "ELASTICSEARCH_PASSWORD",
+ "ELASTICSEARCH_HOST",
+ ]:
+ if not getattr(settings, setting):
+ raise ValueError(
+ "Incorrect elasticsearch configuration. %s is missing" % setting
+ )
+ user, password, (protocol, host) = (
+ settings.ELASTICSEARCH_USERNAME,
+ settings.ELASTICSEARCH_PASSWORD,
+ settings.ELASTICSEARCH_HOST.split("://"),
+ )
+ self.client = client = OpenSearch(
+ hosts=[f"{protocol}://{user}:{password}@{host}"],
+ http_compress=True, # enables gzip compression for request bodies
+ use_ssl=False,
+ ssl_assert_hostname=False,
+ ssl_show_warn=False,
+ )
+ CatalogItem.init(using=client)
+
+ def search(self, **kwargs: Unpack[queries.QueryParams]):
+ query = queries.filter_query(kwargs)
+ return self.client.search(index="catalog", body=query)
+
+ def aggregate_terms(
+ self, field: str, query: str | None = None, exclude: list[str] | None = None
+ ):
+ return self.client.search(
+ queries.terms_agg(field, query=query, exclude=exclude)
+ )
+
+ def aggregate_date(self, field: str, interval: str):
+ return self.client.search(queries.date_historiogram_agg(field, interval))
+
+ def index_from_projects(self, projects: list[Project]):
+ """Index projects and related object groups"""
+ objectgroups_dict: dict[ObjectGroup, ObjectGroupExtraDict] = {}
+ for project in projects:
+ leader = project.leader
+ runs = list(project.runs.all())
+ objectgroups = list(
+ set(obj for run in runs for obj in run.run_object_groups.all())
+ )
+ materials = []
+ locations: list[LocationDict] = []
+ for objectgroup in objectgroups:
+ materials.extend(objectgroup.materials)
+ if (
+ objectgroup.discovery_place_location
+ and objectgroup.discovery_place_location.latitude
+ and objectgroup.discovery_place_location.longitude
+ ):
+ locations.append(
+ {
+ "lat": objectgroup.discovery_place_location.latitude,
+ "lon": objectgroup.discovery_place_location.longitude,
+ }
+ )
+ if objectgroup not in objectgroups_dict:
+ objectgroups_dict[objectgroup] = {
+ "projects": [project],
+ "runs": runs,
+ "is_data_available": project.is_data_available,
+ }
+ else:
+ objectgroups_dict[objectgroup]["runs"].extend(runs)
+ objectgroups_dict[objectgroup]["projects"].append(project)
+ objectgroups_dict[objectgroup][
+ "is_data_available"
+ ] |= project.is_data_available
+ logger.debug("Saving project %s", str(project))
+ item = build_project_catalog_document(
+ project=project,
+ materials=list(set(materials)),
+ leader=leader,
+ object_groups=objectgroups,
+ object_group_locations=locations,
+ runs=runs,
+ )
+ item.save(using=self.client)
+ for obj, extra in objectgroups_dict.items():
+ logger.debug("Saving object group %s", str(obj))
+ item = build_object_group_catalog_document(
+ object_group=obj,
+ projects=extra["projects"],
+ runs=extra["runs"],
+ is_data_available=extra["is_data_available"],
+ )
+ item.save(using=self.client)
diff --git a/lab/elasticsearch/documents.py b/lab/elasticsearch/documents.py
new file mode 100644
index 000000000..df3ce6f9d
--- /dev/null
+++ b/lab/elasticsearch/documents.py
@@ -0,0 +1,185 @@
+from typing import TypedDict
+
+import opensearchpy as os
+
+from lab.methods.dto import DetectorDTO, MethodDTO
+
+
+class _ObjectDict(TypedDict):
+ label: str
+ collection: str
+ inventory: str
+
+
+class ObjectDoc(os.InnerDoc):
+ label = os.Text()
+ collection = os.Keyword()
+ inventory = os.Keyword()
+
+
+class ObjectGroupDoc(os.InnerDoc):
+ id = os.Keyword()
+ c2rmf_id = os.Keyword()
+ label = os.Text()
+ materials = os.Text(multi=True)
+ discovery_place_label = os.Text()
+ collection = os.Keyword()
+ inventory = os.Keyword()
+ dating_period_label = os.Text()
+ dating_era_label = os.Text()
+ objects = os.Object(ObjectDoc, multi=True)
+
+ def add_object(self, label: str, collection: str, inventory: str):
+ self.objects.append(
+ ObjectDoc(label=label, collection=collection, inventory=inventory)
+ )
+
+
+class LeaderDoc(os.InnerDoc):
+ user_first_name = os.Keyword()
+ user_last_name = os.Keyword()
+ institution_name = os.Keyword()
+ institution_country = os.Keyword()
+
+
+class DetectorDoc(os.InnerDoc):
+ name = os.Keyword()
+ filters = os.Keyword(multi=True)
+
+
+class MethodDoc(os.InnerDoc):
+ name = os.Keyword()
+ detectors = os.Object(DetectorDoc, multi=True)
+
+ def add_detector(self, name: str, filters: list[str]):
+ self.detectors.append(DetectorDoc(name=name, filters=filters))
+
+
+class RunDoc(os.InnerDoc):
+ id = os.Keyword()
+ label = os.Text()
+ start_date = os.Date()
+ particle_type = os.Keyword()
+ energy_in_kev = os.Keyword()
+ beamline = os.Keyword()
+ methods = os.Object(MethodDoc, multi=True)
+ project_slug = os.Keyword()
+ is_data_embargoed = os.Boolean()
+
+ def add_method(
+ self,
+ name: str,
+ detectors: list[DetectorDTO] | None = None,
+ ):
+ method_doc = MethodDoc(name=name)
+ if detectors:
+ for detector in detectors:
+ method_doc.add_detector(detector.name, detector.filters)
+ self.methods.append(method_doc)
+
+
+class ProjectDoc(os.InnerDoc):
+ name = os.Text()
+ slug = os.Keyword()
+ leader = os.Object(LeaderDoc)
+
+
+class ProjectPageData(os.InnerDoc):
+ leader = os.Object(LeaderDoc)
+ runs = os.Object(RunDoc, multi=True)
+ object_groups = os.Object(ObjectGroupDoc, multi=True)
+
+ def add_object_group(
+ self,
+ object_group: ObjectGroupDoc,
+ objects: list[_ObjectDict] | None = None,
+ ):
+ if objects:
+ for obj in objects:
+ object_group.add_object(
+ obj["label"], obj["collection"], obj["inventory"]
+ )
+ self.object_groups.append(object_group)
+
+ def add_run(
+ self,
+ run: RunDoc,
+ methods: list[MethodDTO] | None = None,
+ ):
+ if methods:
+ for method in methods:
+ run.add_method(method.name, method.detectors)
+ self.runs.append(run)
+
+
+class ObjectPageData(os.InnerDoc):
+ runs = os.Object(RunDoc, multi=True)
+ projects = os.Object(ProjectDoc, multi=True)
+
+ def add_run(
+ self,
+ run: RunDoc,
+ methods: list[MethodDTO] | None = None,
+ ):
+ if methods:
+ for method in methods:
+ run.add_method(method.name, method.detectors)
+ self.runs.append(run)
+
+ def add_project(
+ self,
+ name: str,
+ slug: str,
+ leader: LeaderDoc | None = None,
+ ):
+ project = ProjectDoc(name=name, slug=slug, leader=leader)
+ self.projects.append(project)
+
+
+class CatalogItem(os.Document):
+
+ class Index:
+ name = "catalog"
+
+ id = os.Keyword()
+ category = os.Keyword()
+ name = os.Text()
+ slug = os.Keyword()
+ created = os.Date()
+ materials = os.Keyword(multi=True)
+ is_data_available = os.Boolean()
+
+ project_page_data = os.Object(ProjectPageData)
+ object_page_data = os.Object(ObjectPageData)
+
+ # Project specific fields
+ comments = os.Text()
+ status = os.Keyword()
+ discovery_place_points = os.GeoPoint(multi=True)
+
+ # Object specific fields
+ c2rmf_id = os.Keyword()
+ discovery_place_label = os.Text()
+ discovery_place_point = os.GeoPoint()
+ collection = os.Text()
+ inventory_number = os.Keyword()
+ objects = os.Object(ObjectDoc, multi=True)
+ collections = os.Keyword(multi=True)
+ inventory_numbers = os.Keyword(multi=True)
+
+ dating_period_label = os.Text()
+ dating_period_theso_huma_num_id = os.Keyword()
+ dating_period_theso_huma_num_parent_ids = os.Keyword(multi=True)
+ dating_era_label = os.Text()
+ dating_era_theso_huma_num_id = os.Keyword()
+ dating_era_theso_huma_num_parent_ids = os.Keyword(multi=True)
+
+ def add_object(
+ self,
+ label: str,
+ collection: str,
+ inventory: str,
+ ):
+ self.objects.append(
+ ObjectDoc(label=label, collection=collection, inventory=inventory)
+ )
diff --git a/lab/elasticsearch/queries.py b/lab/elasticsearch/queries.py
new file mode 100644
index 000000000..a0a0e66fc
--- /dev/null
+++ b/lab/elasticsearch/queries.py
@@ -0,0 +1,301 @@
+import dataclasses
+import datetime
+from typing import Literal, NotRequired, TypedDict, cast
+
+from lab.projects.models import Project
+
+
+class GeoPoint(TypedDict):
+ lat: float
+ lon: float
+
+
+class Location(TypedDict):
+ top_left: NotRequired[GeoPoint]
+ bottom_right: NotRequired[GeoPoint]
+ top_right: NotRequired[GeoPoint]
+ bottom_left: NotRequired[GeoPoint]
+
+
+class QueryParams(TypedDict, total=False):
+ q: str
+ status: Project.Status
+ materials: list[str]
+ dating_period_ids: list[str]
+ dating_era_ids: list[str]
+ category: Literal["project", "object"]
+ c2rmf_id: str
+ created_from: datetime.datetime
+ created_to: datetime.datetime
+ location: Location
+ collection: str
+ inventory: str
+ is_data_available: bool
+ _from: int
+ size: int
+ sort: Literal["asc", "desc"]
+
+
+@dataclasses.dataclass
+class Query:
+ must: list[dict]
+ filter: list[dict]
+
+ def __init__(self):
+ self.must = []
+ self.filter = []
+
+ def build_query(
+ self,
+ params: QueryParams,
+ size: int | None = None,
+ _from: int | None = None,
+ sort: Literal["asc", "desc"] | None = None,
+ ):
+ self._process_params(params)
+ if self.must or self.filter:
+ query = {
+ "query": {
+ "bool": {
+ "filter": self.filter,
+ "must": self.must,
+ },
+ },
+ }
+ else:
+ query = match_all_query()
+ query["sort"] = _sort_expression("created", "desc")
+ query = _paginate_query(query, size, _from)
+ if sort:
+ query["sort"] = _sort_expression("created", sort)
+ return query
+
+ def _process_params(self, params: QueryParams):
+ """Process query params into ES query.
+ Populates self.must and self.filter with query expressions."""
+ for key, value in params.items():
+ if (not isinstance(value, bool) and not value) or value is None:
+ continue
+
+ # Filter queries -- if we add more filters we could create match fn
+ # like _match_param_to_filter
+ if key == "category":
+ value = cast(Literal["project", "object"], value)
+ self.filter.append(_category_filter(value))
+ continue
+
+ # Must queries
+ query = self._match_param_to_query(key, params)
+ if query:
+ self.must.append(query)
+
+ # pylint: disable=too-many-return-statements
+ def _match_param_to_query(
+ self,
+ key: str,
+ params: QueryParams,
+ ):
+ match key:
+ case "q":
+ return _search_box_query(
+ params["q"],
+ fields=["name", "collections", "collection", "materials"],
+ )
+ case "status":
+ return _status_query(params["status"])
+ case "materials":
+ return _materials_query(params["materials"])
+ case "dating_period_ids":
+ return _dating_period_query(params["dating_period_ids"])
+ case "dating_era_ids":
+ return _dating_era_query(params["dating_era_ids"])
+ case "category":
+ return _category_filter(params["category"])
+ case "c2rmf_id":
+ return _c2rmf_id_query(params["c2rmf_id"])
+ case "created_from":
+ return _created_query(created_from=params["created_from"])
+ case "created_to":
+ return _created_query(created_to=params["created_to"])
+ case "location":
+ return _discovery_place_query(params["location"])
+ case "collection":
+ return _collection_query(params["collection"])
+ case "inventory":
+ return _inventory_query(params["inventory"])
+ case "is_data_available":
+ return _is_data_available_query(params["is_data_available"])
+
+
+def match_all_query():
+ return {
+ "query": {
+ "match_all": {},
+ },
+ }
+
+
+def filter_query(
+ params: QueryParams,
+):
+ size = params.pop("size", None)
+ _from = params.pop("from", None) # type: ignore
+ sort = params.pop("sort", None)
+ return Query().build_query(
+ params,
+ size=size,
+ _from=_from,
+ sort=sort,
+ )
+
+
+def terms_agg(field: str, query: str | None = None, exclude: list[str] | None = None):
+ expr = {field: {"terms": {"field": field}}}
+ if query:
+ expr[field]["terms"]["include"] = f".*{query}.*"
+ if exclude:
+ expr[field]["terms"]["exclude"] = "|".join(exclude)
+ return {"size": 0, "aggs": expr}
+
+
+def date_historiogram_agg(field: str, interval: str):
+ return {
+ "size": 0,
+ "aggs": {
+ field: {
+ "date_histogram": {
+ "field": field,
+ "calendar_interval": interval,
+ }
+ }
+ },
+ }
+
+
+def _paginate_query(query: dict, size: int | None = None, _from: int | None = None):
+ return {
+ **query,
+ "from": _from or 0,
+ "size": size or 10,
+ }
+
+
+def _search_box_query(q: str, fields: list[str]):
+ return {
+ "multi_match": {
+ "query": q,
+ "fields": fields,
+ },
+ }
+
+
+def _status_query(status: Project.Status):
+ return _term_query("status", status) # type: ignore
+
+
+def _materials_query(materials: list[str]):
+ return _terms_query("materials", materials)
+
+
+def _dating_era_query(dating_ids: list[str]):
+ return _terms_query("dating_era_theso_huma_num_parent_ids", dating_ids)
+
+
+def _dating_period_query(dating_ids: list[str]):
+ return _terms_query("dating_period_theso_huma_num_parent_ids", dating_ids)
+
+
+def _category_filter(category: Literal["project", "object"]):
+ return _term_query("category", category)
+
+
+def _c2rmf_id_query(c2rmf_id: str):
+ return _term_query("c2rmf_id", c2rmf_id)
+
+
+def _created_query(
+ created_from: datetime.datetime | None = None,
+ created_to: datetime.datetime | None = None,
+):
+ return {
+ "range": {
+ "created": {
+ "gte": (created_from or datetime.datetime.min),
+ "lte": (created_to or datetime.datetime.max),
+ },
+ },
+ }
+
+
+def _is_data_available_query(is_data_available: bool):
+ return _term_query("is_data_available", is_data_available)
+
+
+# https:#opensearch.org/docs/latest/query-dsl/geo-and-xy/geo-bounding-box/
+def _discovery_place_query(location: Location):
+ return {
+ "geo_bounding_box": {
+ "discovery_place_points": {
+ **location,
+ },
+ }
+ }
+
+
+def _collection_query(collection: str):
+ return _should_query(
+ _search_box_query(
+ collection,
+ [
+ "collection",
+ "project_page_data.object_groups.collection",
+ "project_page_data.object_groups.objects.collection",
+ ],
+ ),
+ _should_query(
+ _terms_query("collection", [collection]),
+ _term_query("project_page_data.object_groups.collection", collection),
+ _term_query(
+ "project_page_data.object_groups.objects.collection", collection
+ ),
+ ),
+ )
+
+
+def _inventory_query(inventory: str):
+ return _should_query(
+ _terms_query("inventory_numbers", [inventory]),
+ _term_query("project_page_data.object_groups.inventory", inventory),
+ _term_query("project_page_data.object_groups.objects.inventory", inventory),
+ )
+
+
+def _should_query(*queries: dict):
+ return {
+ "bool": {
+ "should": [*queries],
+ },
+ }
+
+
+def _terms_query(field: str, values: list[str]):
+ return {
+ "terms": {
+ field: values,
+ },
+ }
+
+
+def _term_query(field: str, value: str | bool):
+ return {
+ "term": {
+ field: value,
+ },
+ }
+
+
+def _sort_expression(field: str, order: Literal["asc", "desc"]):
+ exp = {
+ field: {"order": order},
+ }
+ return [exp]
diff --git a/lab/elasticsearch/tests/__init__.py b/lab/elasticsearch/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/lab/elasticsearch/tests/_mock.py b/lab/elasticsearch/tests/_mock.py
new file mode 100644
index 000000000..39f4c9986
--- /dev/null
+++ b/lab/elasticsearch/tests/_mock.py
@@ -0,0 +1,131 @@
+# pylint: disable=line-too-long
+
+import datetime
+
+BASE_SEARCH_PARAMS = {
+ "q": "q",
+ "status": "Status.DATA_AVAILABLE",
+ "materials": ["material1", "material2"],
+ "dating_period_ids": ["period1", "period2"],
+ "dating_era_ids": ["era1", "era2"],
+ "category": "project",
+ "c2rmf_id": "c2rmf_id",
+ "created_from": datetime.datetime(2021, 1, 1).strftime("%Y-%m-%d"),
+ "created_to": datetime.datetime(2021, 12, 31).strftime("%Y-%m-%d"),
+ "location": {
+ "top_left": {"lat": 1.0, "lon": 1.0},
+ "bottom_right": {"lat": 1.0, "lon": 1.0},
+ },
+ "collection": "collection",
+ "inventory": "inventory",
+ "is_data_available": True,
+ "_from": 0,
+ "size": 10,
+ "sort": "asc",
+}
+
+
+BASE_SEARCH_PARAMS_RELATED_QUERY = {
+ "query": {
+ "bool": {
+ "filter": [{"term": {"category": "project"}}],
+ "must": [
+ {
+ "multi_match": {
+ "query": "q",
+ "fields": ["name", "collections", "collection", "materials"],
+ }
+ },
+ {"term": {"status": "Status.DATA_AVAILABLE"}},
+ {"terms": {"materials": ["material1", "material2"]}},
+ {
+ "terms": {
+ "dating_period_theso_huma_num_parent_ids": [
+ "period1",
+ "period2",
+ ]
+ }
+ },
+ {"terms": {"dating_era_theso_huma_num_parent_ids": ["era1", "era2"]}},
+ {"term": {"c2rmf_id": "c2rmf_id"}},
+ {
+ "range": {
+ "created": {
+ "gte": "2021-01-01",
+ "lte": datetime.datetime(9999, 12, 31, 23, 59, 59, 999999),
+ }
+ }
+ },
+ {
+ "range": {
+ "created": {
+ "gte": datetime.datetime(1, 1, 1, 0, 0),
+ "lte": "2021-12-31",
+ }
+ }
+ },
+ {
+ "geo_bounding_box": {
+ "discovery_place_points": {
+ "top_left": {"lat": 1.0, "lon": 1.0},
+ "bottom_right": {"lat": 1.0, "lon": 1.0},
+ }
+ }
+ },
+ {
+ "bool": {
+ "should": [
+ {
+ "multi_match": {
+ "query": "collection",
+ "fields": [
+ "collection",
+ "project_page_data.object_groups.collection",
+ "project_page_data.object_groups.objects.collection",
+ ],
+ }
+ },
+ {
+ "bool": {
+ "should": [
+ {"terms": {"collection": ["collection"]}},
+ {
+ "term": {
+ "project_page_data.object_groups.collection": "collection"
+ }
+ },
+ {
+ "term": {
+ "project_page_data.object_groups.objects.collection": "collection"
+ }
+ },
+ ]
+ }
+ },
+ ]
+ }
+ },
+ {
+ "bool": {
+ "should": [
+ {"terms": {"inventory_numbers": ["inventory"]}},
+ {
+ "term": {
+ "project_page_data.object_groups.inventory": "inventory"
+ }
+ },
+ {
+ "term": {
+ "project_page_data.object_groups.objects.inventory": "inventory"
+ }
+ },
+ ]
+ }
+ },
+ {"term": {"is_data_available": True}},
+ ],
+ }
+ },
+ "from": 0,
+ "size": 10,
+}
diff --git a/lab/elasticsearch/tests/conftest.py b/lab/elasticsearch/tests/conftest.py
new file mode 100644
index 000000000..c29903996
--- /dev/null
+++ b/lab/elasticsearch/tests/conftest.py
@@ -0,0 +1,13 @@
+from unittest import mock
+
+import pytest
+
+
+@pytest.fixture(
+ autouse=True,
+ scope="package",
+)
+def elasticsearch_client():
+ patcher = mock.patch("opensearchpy.OpenSearch")
+ yield patcher.start()
+ patcher.stop()
diff --git a/lab/elasticsearch/tests/test_api_views.py b/lab/elasticsearch/tests/test_api_views.py
new file mode 100644
index 000000000..7e37a624d
--- /dev/null
+++ b/lab/elasticsearch/tests/test_api_views.py
@@ -0,0 +1,64 @@
+import json
+from unittest import mock
+
+from django.test import Client, SimpleTestCase
+
+from ._mock import BASE_SEARCH_PARAMS
+
+BASE_API_URL = "/api/lab/catalog"
+
+
+class TestProjectListView(SimpleTestCase):
+ def setUp(self):
+ self.client = Client()
+
+ @mock.patch("lab.elasticsearch.api_views.CatalogClient")
+ def test_search_view(self, mock_cls: mock.MagicMock):
+ mock_cls.return_value.search.return_value = {"results": []}
+ params = BASE_SEARCH_PARAMS
+ response = self.client.post(
+ f"{BASE_API_URL}/search",
+ json.dumps(params),
+ content_type="application/json",
+ )
+
+ mock_cls.return_value.search.assert_called_once_with(**params)
+
+ assert response.status_code == 200
+ assert response.json() == {"results": []}
+
+ def test_aggregate_field_view(self):
+ with mock.patch("lab.elasticsearch.api_views.CatalogClient") as mock_cls:
+ mock_cls.return_value.aggregate_terms.return_value = {"results": []}
+ response = self.client.get(
+ f"{BASE_API_URL}/aggregate"
+ + "?field=field"
+ + "&query=query"
+ + "&exclude=exclude1,exclude2"
+ )
+
+ mock_cls.return_value.aggregate_terms.assert_called_once_with(
+ "field",
+ query="query",
+ exclude=["exclude1", "exclude2"],
+ )
+
+ assert response.status_code == 200
+
+ def test_aggregate_field_view_when_field_is_missing(self):
+ response = self.client.get(f"{BASE_API_URL}/aggregate")
+ assert response.status_code == 400
+ assert response.json() == {"error": "'field' query param is required"}
+
+ def test_aggregate_created_view(self):
+ with mock.patch("lab.elasticsearch.api_views.CatalogClient") as mock_cls:
+ mock_cls.return_value.aggregate_date.return_value = {"results": []}
+ response = self.client.get(f"{BASE_API_URL}/aggregate-created")
+
+ mock_cls.return_value.aggregate_date.assert_called_once_with(
+ "created",
+ "year",
+ )
+
+ assert response.status_code == 200
+ assert response.json() == {"results": []}
diff --git a/lab/elasticsearch/tests/test_catalog.py b/lab/elasticsearch/tests/test_catalog.py
new file mode 100644
index 000000000..557831120
--- /dev/null
+++ b/lab/elasticsearch/tests/test_catalog.py
@@ -0,0 +1,231 @@
+from unittest import mock
+
+import pytest
+from slugify import slugify
+
+from lab.objects.c2rmf import ErosHTTPError
+from lab.tests import factories
+
+from ..catalog import (
+ _create_project_page_data,
+ _fetch_object_group_from_eros,
+ build_object_group_catalog_document,
+ build_project_catalog_document,
+)
+
+
+@pytest.mark.django_db
+def test_build_project_catalog_document():
+ run = factories.RunFactory(project=factories.ProjectWithLeaderFactory())
+ project = run.project
+ objectgroups = factories.ObjectGroupFactory.create_batch(3)
+ for objectgroup in objectgroups:
+ run.run_object_groups.add(objectgroup)
+
+ document = build_project_catalog_document(
+ project=project,
+ materials=["or", "verre"],
+ leader=project.leader,
+ object_groups=objectgroups,
+ object_group_locations=[{"lat": 0, "lon": 0}],
+ runs=[run],
+ )
+
+ assert document.id == f"project-{project.id}"
+ assert document.category == "project"
+ assert document.name == project.name
+ assert document.slug == project.slug
+ assert document.materials == ["or", "verre"]
+ assert document.comments == project.comments
+ assert document.status == str(project.status)
+ assert document.created == project.created
+ assert document.project_page_data.leader == {
+ "user_first_name": document.project_page_data.leader.user_first_name,
+ "user_last_name": document.project_page_data.leader.user_last_name,
+ }
+ assert document.project_page_data.runs == [
+ {
+ "id": run.id,
+ "beamline": run.beamline,
+ "start_date": run.start_date,
+ "label": run.label,
+ "energy_in_kev": run.energy_in_keV,
+ "particle_type": run.particle_type,
+ "project_slug": run.project.slug,
+ "is_data_embargoed": run.is_data_embargoed,
+ }
+ ]
+ assert document.project_page_data.object_groups == [
+ {
+ "c2rmf_id": objectgroup.c2rmf_id,
+ "collection": objectgroup.collection,
+ "dating_era_label": objectgroup.dating_era.label,
+ "dating_period_label": objectgroup.dating_period.label,
+ "discovery_place_label": (
+ objectgroup.discovery_place.label
+ if objectgroup.discovery_place
+ else None
+ ),
+ "id": objectgroup.id,
+ "inventory": objectgroup.inventory,
+ "label": objectgroup.label,
+ "materials": objectgroup.materials,
+ "objects": [
+ {
+ "label": object.label,
+ "inventory": object.inventory,
+ "collection": object.collection,
+ }
+ for object in objectgroup.object_set.all()
+ ],
+ }
+ for objectgroup in objectgroups
+ ]
+ assert document.discovery_place_points == [{"lat": 0, "lon": 0}]
+ assert document.is_data_available == project.is_data_available
+
+
+@pytest.mark.django_db
+def test_build_object_group_catalog_document():
+ dating_period = factories.PeriodFactory(concept_id=123)
+ dating_era = factories.EraFactory(concept_id=345)
+ discovery_place = factories.LocationFactory()
+ object_group = factories.ObjectGroupFactory(
+ dating_period=dating_period,
+ dating_era=dating_era,
+ discovery_place_location=discovery_place,
+ inventory="123",
+ )
+ factories.ObjectFactory.create_batch(3, group=object_group, inventory="456")
+
+ project = factories.ProjectWithLeaderFactory()
+ run = factories.RunFactory(project=project)
+ run.run_object_groups.add(object_group)
+
+ with mock.patch(
+ "lab.elasticsearch.catalog.fetch_period_parent_ids_from_id",
+ return_value=[345, 567],
+ ) as fetch_period_mock:
+ with mock.patch(
+ "lab.elasticsearch.catalog.fetch_era_parent_ids_from_id",
+ return_value=[890, 445],
+ ) as fetch_era_mock:
+ document = build_object_group_catalog_document(
+ object_group=object_group,
+ runs=[run],
+ projects=[run.project],
+ is_data_available=True,
+ )
+
+ fetch_period_mock.assert_called_once_with(123)
+ fetch_era_mock.assert_called_once_with(345)
+
+ assert document.object_page_data.runs == [
+ {
+ "id": run.id,
+ "beamline": run.beamline,
+ "start_date": run.start_date,
+ "label": run.label,
+ "energy_in_kev": run.energy_in_keV,
+ "particle_type": run.particle_type,
+ "project_slug": run.project.slug,
+ "is_data_embargoed": run.is_data_embargoed,
+ }
+ ]
+ assert document.object_page_data.projects == [
+ {
+ "name": project.name,
+ "slug": project.slug,
+ "leader": {
+ "user_first_name": project.leader.user.first_name,
+ "user_last_name": project.leader.user.last_name,
+ },
+ }
+ ]
+
+ assert document.meta.id == f"object-{object_group.id}"
+ assert document.id == f"object-{object_group.id}"
+ assert document.category == "object"
+ assert document.slug == slugify(object_group.label) + f"-{object_group.id}"
+ assert document.is_data_available is True
+ assert document.materials == object_group.materials
+ assert document.collection == object_group.collection
+ assert document.c2rmf_id == object_group.c2rmf_id
+ assert document.inventory_number == object_group.inventory
+ assert set(document.inventory_numbers) == set(["456", "123"])
+ assert document.name == object_group.label
+ assert len(document.objects) == object_group.object_set.count()
+ assert all(
+ field in doc
+ for doc in document.objects
+ for field in ["label", "inventory", "collection"]
+ )
+
+ assert document.discovery_place_label == object_group.discovery_place_location.label
+ assert document.discovery_place_point == {
+ "lat": object_group.discovery_place_location.latitude,
+ "lon": object_group.discovery_place_location.longitude,
+ }
+ assert document.discovery_place_points == [
+ {
+ "lat": object_group.discovery_place_location.latitude,
+ "lon": object_group.discovery_place_location.longitude,
+ }
+ ]
+
+ assert document.dating_period_label == dating_period.label
+ assert document.dating_period_theso_huma_num_id == 123
+ assert document.dating_period_theso_huma_num_parent_ids == [345, 567]
+
+ assert document.dating_era_label == dating_era.label
+ assert document.dating_era_theso_huma_num_id == 345
+ assert document.dating_era_theso_huma_num_parent_ids == [890, 445]
+
+
+@pytest.mark.django_db
+@mock.patch("lab.elasticsearch.catalog.fetch_full_objectgroup_from_eros")
+def test_build_object_group_calls_eros_if_c2rmf_id(eros_mock: mock.MagicMock):
+ object_group = factories.ObjectGroupFactory(
+ c2rmf_id="abc",
+ )
+ eros_mock.return_value = object_group
+ build_object_group_catalog_document(
+ object_group=object_group,
+ runs=[],
+ projects=[],
+ is_data_available=True,
+ )
+
+ eros_mock.assert_called_once_with(c2rmf_id="abc", object_group=object_group)
+
+
+@pytest.mark.django_db
+@mock.patch("lab.elasticsearch.catalog.fetch_full_objectgroup_from_eros")
+def test_create_project_page_data_calls_eros_if_c2rmf_id(eros_mock: mock.MagicMock):
+ object_group = factories.ObjectGroupFactory(
+ c2rmf_id="abc",
+ )
+ eros_mock.return_value = object_group
+ _create_project_page_data(
+ runs=[factories.RunFactory()], object_groups=[object_group], leader=None
+ )
+
+ eros_mock.assert_called_once_with(c2rmf_id="abc", object_group=object_group)
+
+
+@pytest.mark.django_db
+@mock.patch("lab.elasticsearch.catalog.fetch_full_objectgroup_from_eros")
+def test_fetch_object_group_from_eros_if_eros_fails_returns_og(
+ eros_mock: mock.MagicMock,
+):
+ object_group = factories.ObjectGroupFactory(
+ c2rmf_id="abc",
+ )
+ eros_mock.side_effect = ErosHTTPError
+ assert (
+ _fetch_object_group_from_eros(
+ c2rmf_id="abc",
+ object_group=object_group,
+ )
+ == object_group
+ )
diff --git a/lab/elasticsearch/tests/test_client.py b/lab/elasticsearch/tests/test_client.py
new file mode 100644
index 000000000..53d6baa72
--- /dev/null
+++ b/lab/elasticsearch/tests/test_client.py
@@ -0,0 +1,113 @@
+from unittest import mock
+
+import pytest
+from pytest import fixture
+
+from lab.tests import factories as lab_factories
+
+from ..client import CatalogClient
+from ._mock import BASE_SEARCH_PARAMS, BASE_SEARCH_PARAMS_RELATED_QUERY
+
+
+@fixture(name="catalog_client", scope="function")
+def catalog_client_fixture():
+ with mock.patch("lab.elasticsearch.client.OpenSearch"):
+ yield CatalogClient()
+
+
+@fixture(autouse=True)
+def set_es_settings(settings):
+ settings.ELASTICSEARCH_USERNAME = "user"
+ settings.ELASTICSEARCH_PASSWORD = "password"
+ settings.ELASTICSEARCH_HOST = "http://localhost:9200"
+
+
+def test_search(catalog_client: CatalogClient):
+ catalog_client.search(**BASE_SEARCH_PARAMS) # type: ignore
+ catalog_client.client.search.assert_called_with(
+ index="catalog",
+ body={
+ **BASE_SEARCH_PARAMS_RELATED_QUERY,
+ "sort": [{"created": {"order": "asc"}}],
+ },
+ )
+
+
+def test_aggregate_terms(catalog_client: CatalogClient):
+ catalog_client.aggregate_terms("field", query="query", exclude=["exclude"])
+ catalog_client.client.search.assert_called_with(
+ {
+ "size": 0,
+ "aggs": {
+ "field": {
+ "terms": {
+ "field": "field",
+ "include": ".*query.*",
+ "exclude": "exclude",
+ }
+ }
+ },
+ }
+ )
+
+
+def test_aggregate_date(catalog_client: CatalogClient):
+ catalog_client.aggregate_date("field", "interval")
+ catalog_client.client.search.assert_called_with(
+ {
+ "size": 0,
+ "aggs": {
+ "field": {
+ "date_histogram": {
+ "field": "field",
+ "calendar_interval": "interval",
+ }
+ }
+ },
+ }
+ )
+
+
+@pytest.mark.django_db
+def test_index_from_projects(catalog_client: CatalogClient):
+ dating_period = lab_factories.PeriodFactory(concept_id=123)
+ discovery_place = lab_factories.LocationFactory()
+ object_group = lab_factories.ObjectGroupFactory(
+ dating_period=dating_period,
+ discovery_place_location=discovery_place,
+ inventory="123",
+ )
+ lab_factories.ObjectFactory.create_batch(3, group=object_group, inventory="456")
+
+ project = lab_factories.ProjectWithLeaderFactory()
+ run = lab_factories.RunFactory(project=project)
+ run.run_object_groups.add(object_group)
+
+ with mock.patch(
+ "lab.elasticsearch.client.build_object_group_catalog_document"
+ ) as build_object_group_mock:
+ with mock.patch(
+ "lab.elasticsearch.client.build_project_catalog_document"
+ ) as build_project_group_mock:
+ catalog_client.index_from_projects(projects=[project])
+
+ build_project_group_mock.assert_called_once_with(
+ project=project,
+ materials=list(set(object_group.materials)),
+ leader=project.leader,
+ object_groups=[object_group],
+ object_group_locations=[
+ {
+ "lat": float(discovery_place.latitude),
+ "lon": float(discovery_place.longitude),
+ }
+ ],
+ runs=[run],
+ )
+
+ build_object_group_mock.assert_called_once_with(
+ object_group=object_group,
+ projects=[project],
+ runs=[run],
+ is_data_available=project.is_data_available,
+ )
diff --git a/lab/elasticsearch/tests/test_queries.py b/lab/elasticsearch/tests/test_queries.py
new file mode 100644
index 000000000..ea100ac74
--- /dev/null
+++ b/lab/elasticsearch/tests/test_queries.py
@@ -0,0 +1,53 @@
+from unittest import mock
+
+from .. import queries
+from ._mock import BASE_SEARCH_PARAMS, BASE_SEARCH_PARAMS_RELATED_QUERY
+
+
+def test_build_query_with_all_params():
+ query = queries.Query().build_query(BASE_SEARCH_PARAMS)
+ assert query == BASE_SEARCH_PARAMS_RELATED_QUERY
+
+
+def test_build_query_with_no_params_return_match_all():
+ query = queries.Query().build_query({})
+ assert query == {
+ "query": {"match_all": {}},
+ "sort": [{"created": {"order": "desc"}}],
+ "from": 0,
+ "size": 10,
+ }
+
+
+def test_terms_agg_query():
+ query = queries.terms_agg("materials", query="or", exclude=["argent"])
+ assert query == {
+ "size": 0,
+ "aggs": {
+ "materials": {
+ "terms": {
+ "field": "materials",
+ "include": ".*or.*",
+ "exclude": "argent",
+ }
+ }
+ },
+ }
+
+
+def test_date_historiogram_agg_query():
+ query = queries.date_historiogram_agg("created", "year")
+ assert query == {
+ "size": 0,
+ "aggs": {
+ "created": {
+ "date_histogram": {"field": "created", "calendar_interval": "year"}
+ }
+ },
+ }
+
+
+def test_filter_query():
+ with mock.patch("lab.elasticsearch.queries.Query.build_query") as mock_build_query:
+ queries.filter_query({"size": 50, "from": 40, "sort": "desc"})
+ mock_build_query.assert_called_with({}, size=50, _from=40, sort="desc")
diff --git a/lab/management/commands/index_elasticsearch_catalog.py b/lab/management/commands/index_elasticsearch_catalog.py
new file mode 100644
index 000000000..99f115a79
--- /dev/null
+++ b/lab/management/commands/index_elasticsearch_catalog.py
@@ -0,0 +1,22 @@
+import logging
+
+from django.core.management.base import BaseCommand
+
+from ...elasticsearch.client import CatalogClient
+from ...models import Project
+
+logger = logging.getLogger(__name__)
+
+
+class Command(BaseCommand):
+ def handle(self, *args, **options):
+ projects = (
+ Project.objects.only_finished()
+ .only_public()
+ .order_by("-created")
+ .prefetch_related("runs__run_object_groups")
+ .distinct()
+ )
+ self.stdout.write(f"Found {len(projects)} projects to index")
+
+ CatalogClient().index_from_projects(projects)
diff --git a/lab/migrations/0038_location_unique_geonames_id.py b/lab/migrations/0038_location_unique_geonames_id.py
new file mode 100644
index 000000000..60e0de9e3
--- /dev/null
+++ b/lab/migrations/0038_location_unique_geonames_id.py
@@ -0,0 +1,19 @@
+# Generated by Django 5.0.6 on 2024-06-11 08:35
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("lab", "0038_alter_run_filters_for_detector_he1_and_more"),
+ ]
+
+ operations = [
+ migrations.AddConstraint(
+ model_name="location",
+ constraint=models.UniqueConstraint(
+ fields=("geonames_id",), name="unique_geonames_id"
+ ),
+ ),
+ ]
diff --git a/lab/migrations/0039_era_remove_period_period_unique_theso_joconde_id_and_more.py b/lab/migrations/0039_era_remove_period_period_unique_theso_joconde_id_and_more.py
new file mode 100644
index 000000000..d51484e8d
--- /dev/null
+++ b/lab/migrations/0039_era_remove_period_period_unique_theso_joconde_id_and_more.py
@@ -0,0 +1,160 @@
+# Generated by Django 5.0.6 on 2024-06-21 10:08
+
+import django.db.models.deletion
+from django.db import migrations, models
+
+
+def move_period_to_era(apps, _):
+ """We have to move period to era because we saved Opentheso Humanum 'era'
+ with the field name 'period'."""
+ period_model = apps.get_model("lab", "Period")
+ era_model = apps.get_model("lab", "Era")
+ objectgroup_model = apps.get_model("lab", "ObjectGroup")
+ for period in period_model.objects.all():
+ era = era_model.objects.create(
+ label=period.label, concept_id=period.theso_joconde_id
+ )
+ for og in objectgroup_model.objects.filter(dating=period):
+ og.dating_era = era
+ og.save()
+ period.delete()
+
+
+def reverse_move_period_to_era(apps, _):
+ period_model = apps.get_model("lab", "Period")
+ era_model = apps.get_model("lab", "Era")
+ objectgroup_model = apps.get_model("lab", "ObjectGroup")
+ for era in era_model.objects.all():
+ period = period_model.objects.create(
+ label=era.label, theso_joconde_id=era.concept_id
+ )
+ for og in objectgroup_model.objects.filter(dating_era=era):
+ og.dating = period
+ og.save()
+ era.delete()
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("lab", "0038_location_unique_geonames_id"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="Era",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ (
+ "concept_id",
+ models.CharField(
+ blank=True,
+ max_length=255,
+ null=True,
+ verbose_name="Concept ID on Open Theso",
+ ),
+ ),
+ ("label", models.CharField(max_length=255, verbose_name="Label")),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.AddConstraint(
+ model_name="era",
+ constraint=models.UniqueConstraint(
+ fields=("label", "concept_id"),
+ name="era_thesorus_concept_unique_label_concept_id",
+ ),
+ ),
+ migrations.AddConstraint(
+ model_name="era",
+ constraint=models.UniqueConstraint(
+ fields=("concept_id",), name="era_thesorus_concept_unique_concept_id"
+ ),
+ ),
+ migrations.AddField(
+ model_name="objectgroup",
+ name="dating_era",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="lab.era",
+ verbose_name="Era",
+ ),
+ ),
+ migrations.RunSQL(
+ "SET CONSTRAINTS ALL IMMEDIATE;", "SET CONSTRAINTS ALL DEFERRED;"
+ ),
+ migrations.RunPython(
+ move_period_to_era,
+ reverse_move_period_to_era,
+ ),
+ migrations.RunSQL(
+ "SET CONSTRAINTS ALL DEFERRED;", "SET CONSTRAINTS ALL IMMEDIATE;"
+ ),
+ migrations.RemoveConstraint(
+ model_name="period",
+ name="period_unique_theso_joconde_id",
+ ),
+ migrations.RemoveConstraint(
+ model_name="period",
+ name="period_unique_label_theso_joconde_id",
+ ),
+ migrations.RemoveField(
+ model_name="objectgroup",
+ name="dating",
+ ),
+ migrations.RemoveField(
+ model_name="period",
+ name="theso_joconde_id",
+ ),
+ migrations.AddField(
+ model_name="objectgroup",
+ name="dating_period",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ to="lab.period",
+ verbose_name="Period",
+ ),
+ ),
+ migrations.AddField(
+ model_name="period",
+ name="concept_id",
+ field=models.CharField(
+ blank=True,
+ max_length=255,
+ null=True,
+ verbose_name="Concept ID on Open Theso",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="period",
+ name="label",
+ field=models.CharField(max_length=255, verbose_name="Label"),
+ ),
+ migrations.AddConstraint(
+ model_name="period",
+ constraint=models.UniqueConstraint(
+ fields=("label", "concept_id"),
+ name="period_thesorus_concept_unique_label_concept_id",
+ ),
+ ),
+ migrations.AddConstraint(
+ model_name="period",
+ constraint=models.UniqueConstraint(
+ fields=("concept_id",), name="period_thesorus_concept_unique_concept_id"
+ ),
+ ),
+ ]
diff --git a/lab/nav.py b/lab/nav.py
new file mode 100644
index 000000000..dcc692fe1
--- /dev/null
+++ b/lab/nav.py
@@ -0,0 +1,65 @@
+import typing
+
+from django.http import HttpRequest
+from django.urls import reverse
+from django.utils.translation import gettext as _
+
+from lab.permissions import is_lab_admin
+
+
+class NavItemJson(typing.TypedDict):
+ title: str
+ href: str
+ iconName: str
+ extraPath: list[str] | None
+ exactPath: bool
+ badge: typing.NotRequired[int | None]
+
+
+def get_nav_items(request: HttpRequest) -> list[NavItemJson]:
+ items: list[NavItemJson] = [
+ {
+ "title": str(_("Projects")),
+ "href": reverse("admin:lab_project_changelist"),
+ "iconName": "fr-icon-survey-line",
+ "extraPath": [reverse("admin:lab_run_changelist")],
+ "exactPath": False,
+ }
+ ]
+
+ if request.user:
+ if is_lab_admin(request.user):
+ items.insert(
+ 0,
+ {
+ "title": str(_("Dashboard")),
+ "href": reverse("admin:index"),
+ "iconName": "fr-icon-calendar-line",
+ "exactPath": True,
+ "extraPath": [],
+ },
+ )
+ items.append(
+ {
+ "title": str(_("Users")),
+ "href": reverse("admin:euphro_auth_user_changelist"),
+ "iconName": "fr-icon-user-line",
+ "exactPath": False,
+ "extraPath": [
+ reverse("admin:euphro_auth_userinvitation_changelist")
+ ],
+ }
+ )
+ else: # non-admin user
+ items.append(
+ {
+ "title": str(_("Account")),
+ "href": reverse(
+ "admin:euphro_auth_user_change", args=[request.user.id]
+ ),
+ "iconName": "fr-icon-user-line",
+ "exactPath": False,
+ "extraPath": [],
+ }
+ )
+ return items
diff --git a/lab/objects/assets/js/services.ts b/lab/objects/assets/js/services.ts
index d2e4d47a5..0380dfc0c 100644
--- a/lab/objects/assets/js/services.ts
+++ b/lab/objects/assets/js/services.ts
@@ -5,7 +5,7 @@ interface ObjectGroupResponseElement {
id: number;
label: string;
object_count: number;
- dating: string;
+ dating: string; // this is linked to Django ObjectGroup.dating_era
materials: string[];
}
@@ -32,6 +32,9 @@ export async function fetchRunObjectGroups(
}));
}
+/**
+ * Fetches the object groups available to import for a run.
+ */
export async function fetchAvailableObjectGroups(
runId: string,
): Promise {
diff --git a/lab/objects/c2rmf.py b/lab/objects/c2rmf.py
index b6b8c60fe..a8030cb53 100644
--- a/lab/objects/c2rmf.py
+++ b/lab/objects/c2rmf.py
@@ -1,10 +1,13 @@
import os
import typing
+from functools import lru_cache
from typing import Any
import requests
-from ..models import ObjectGroup, Period
+from lab.thesauri.models import Era
+
+from ..models import ObjectGroup
class ErosHTTPError(requests.RequestException):
@@ -50,6 +53,7 @@ class ErosData(typing.TypedDict):
images: typing.NotRequired[list[ErosImage]]
+@lru_cache
def _fetch_object_group_from_eros(c2rmf_id: str) -> ErosData | None:
"""Fetch object group from EROS."""
token = os.environ["EROS_HTTP_TOKEN"]
@@ -90,7 +94,7 @@ def fetch_full_objectgroup_from_eros(
updated_og.label = data["title"]
if data.get("dtfrom") or data.get("period"):
dating_label = data.get("dtfrom") or data["period"]
- updated_og.dating = Period(label=dating_label)
+ updated_og.dating_era = Era(label=dating_label)
updated_og.collection = data.get("collection") or ""
updated_og.inventory = data.get("inv") or ""
updated_og.materials = (data.get("support") or "").split(" / ")
diff --git a/lab/objects/forms.py b/lab/objects/forms.py
index 57a32bad2..e2b12ffcd 100644
--- a/lab/objects/forms.py
+++ b/lab/objects/forms.py
@@ -9,7 +9,7 @@
from . import widgets
from .c2rmf import ErosHTTPError, fetch_partial_objectgroup_from_eros
-from .models import Location, ObjectGroup, Period
+from .models import Era, Location, ObjectGroup, Period
logger = logging.getLogger(__name__)
@@ -39,7 +39,8 @@ class Meta:
"add_type",
"label",
"object_count",
- "dating",
+ "dating_era",
+ "dating_period",
"materials",
"discovery_place_location",
"inventory",
@@ -51,20 +52,22 @@ class Meta:
Click on suggestion or add a comma to add to the list."
),
"discovery_place_location": _("Start typing to search for a location"),
- "dating": _("Start typing to search for a period"),
+ "dating_period": _("Start typing to search for a period"),
}
widgets = {
"materials": TagsInput(),
"discovery_place_location": widgets.LocationAutoCompleteWidget(),
- "dating": widgets.DatingAutoCompleteWidget(),
+ "dating_period": widgets.PeriodDatingAutoCompleteWidget(),
+ "dating_era": widgets.EraDatingAutoCompleteWidget(),
}
def __init__(self, *args, instance: ObjectGroup | None = None, **kwargs):
super().__init__(*args, **kwargs, instance=instance) # type: ignore[misc]
# We must check if attribute is in self.fields because it can be removed
# in admin view when page is readlonly.
- if "dating" in self.fields:
- self.fields["dating"].required = True
+ for field_name in ["dating_period", "dating_era"]:
+ if field_name in self.fields:
+ self.fields[field_name].required = True
# Set object count initial value
if "object_count" in self.fields:
self.fields["object_count"].initial = 1
@@ -82,11 +85,12 @@ def __init__(self, *args, instance: ObjectGroup | None = None, **kwargs):
self.fields["discovery_place_location"].widget.instance = (
instance.discovery_place_location
)
- self.fields["dating"].widget.instance = instance.dating
+ self.fields["dating_period"].widget.instance = instance.dating_period
+ self.fields["dating_era"].widget.instance = instance.dating_era
def full_clean(self):
self.try_populate_discovery_place_location()
- self.try_populate_dating()
+ self.try_populate_dating_models()
return super().full_clean()
def try_populate_discovery_place_location(self):
@@ -116,16 +120,17 @@ def try_populate_discovery_place_location(self):
) # make a copy of the data because self.data is immutable
self.data["discovery_place_location"] = location.pk
- def try_populate_dating(self):
- if not self.data.get("dating") and self.data.get("dating__label"):
- period, _ = Period.objects.get_or_create(
- label=self.data["dating__label"],
- theso_joconde_id=self.data.get("dating__theso_joconde_id"),
- )
- self.data = (
- self.data.copy()
- ) # make a copy of the data because self.data is immutable
- self.data["dating"] = period.pk
+ def try_populate_dating_models(self):
+ for field_name, theso_model in [("dating_period", Period), ("dating_era", Era)]:
+ if not self.data.get(field_name) and self.data.get(f"{field_name}__label"):
+ period, _ = theso_model.objects.get_or_create(
+ label=self.data[f"{field_name}__label"],
+ concept_id=self.data.get(f"{field_name}__concept_id"),
+ )
+ self.data = (
+ self.data.copy()
+ ) # make a copy of the data because self.data is immutable
+ self.data[field_name] = period.pk
def is_multipart(self) -> Any:
if not self.instance.id:
@@ -197,7 +202,8 @@ class Meta:
"c2rmf_id",
"label",
"object_count",
- "dating",
+ "dating_era",
+ "dating_period",
"materials",
"discovery_place_location",
"inventory",
diff --git a/lab/objects/models.py b/lab/objects/models.py
index 625e250d3..19ace09d2 100644
--- a/lab/objects/models.py
+++ b/lab/objects/models.py
@@ -4,27 +4,7 @@
from shared.models import TimestampedModel
-
-class Period(models.Model):
- label = models.CharField(_("Name"), max_length=255)
-
- theso_joconde_id = models.CharField(
- "Joconde Thesorus ID", max_length=255, null=True, blank=True
- )
-
- def __str__(self) -> str:
- return str(self.label)
-
- class Meta:
- constraints = [
- models.UniqueConstraint(
- fields=["label", "theso_joconde_id"],
- name="period_unique_label_theso_joconde_id",
- ),
- models.UniqueConstraint(
- fields=["theso_joconde_id"], name="period_unique_theso_joconde_id"
- ),
- ]
+from ..thesauri.models import Era, Period
class Location(models.Model):
@@ -40,6 +20,7 @@ class Meta:
models.UniqueConstraint(
fields=["latitude", "longitude"], name="unique_lat_long"
),
+ models.UniqueConstraint(fields=["geonames_id"], name="unique_geonames_id"),
]
def __str__(self) -> str:
@@ -65,10 +46,17 @@ class ObjectGroup(TimestampedModel):
max_length=255,
blank=True,
)
- dating = models.ForeignKey(
+ dating_period = models.ForeignKey(
Period,
on_delete=models.SET_NULL,
- verbose_name=_("Dating"),
+ verbose_name=_("Period"),
+ blank=True,
+ null=True,
+ )
+ dating_era = models.ForeignKey(
+ Era,
+ on_delete=models.SET_NULL,
+ verbose_name=_("Era"),
blank=True,
null=True,
)
@@ -103,9 +91,10 @@ def __str__(self) -> str:
"object_count": self.object_count
}
label = "({}) {}".format(count_str, label)
- materials = ", ".join(self.materials)
-
- return f"{label} - {self.dating} - {materials}"
+ if self.materials:
+ materials = ", ".join(self.materials)
+ label = f"{label} - {materials}"
+ return label
class Meta:
verbose_name = _("Object / Sample")
diff --git a/lab/objects/tests/forms/test_objectgroup_form.py b/lab/objects/tests/forms/test_objectgroup_form.py
index b335c72e1..d605b7b78 100644
--- a/lab/objects/tests/forms/test_objectgroup_form.py
+++ b/lab/objects/tests/forms/test_objectgroup_form.py
@@ -1,6 +1,8 @@
import pytest
from django.forms import widgets
+from lab.thesauri.models import Era
+
from ...forms import ObjectGroupAddChoices, ObjectGroupForm
from ...models import Location, ObjectGroup, Period
@@ -120,31 +122,46 @@ def test_try_populate_discovery_place_location_updates_lat_and_long():
@pytest.mark.django_db
def test_try_populate_dating_create_period():
label = "Moyen âge"
- theso_joconde_id = 1234
+ concept_id = 1234
form = ObjectGroupForm()
form.data = {
- "dating__label": label,
- "dating__theso_joconde_id": theso_joconde_id,
+ "dating_period__label": label,
+ "dating_period__concept_id": concept_id,
+ }
+
+ form.try_populate_dating_models()
+
+ dating_period = Period.objects.get(label=label, concept_id=concept_id)
+ assert form.data["dating_period"] == dating_period.pk
+
+
+@pytest.mark.django_db
+def test_try_populate_dating_find_period():
+ dating = Period.objects.create(label="Moyen âge", concept_id=1234)
+
+ form = ObjectGroupForm()
+ form.data = {
+ "dating_period__label": "Moyen âge",
+ "dating_period__concept_id": 1234,
}
- form.try_populate_dating()
+ form.try_populate_dating_models()
- dating = Period.objects.get(label=label, theso_joconde_id=theso_joconde_id)
- assert form.data["dating"] == dating.pk
+ assert form.data["dating_period"] == dating.pk
@pytest.mark.django_db
-def test_try_populate_dating_find_dating():
- dating = Period.objects.create(label="Moyen âge", theso_joconde_id=1234)
+def test_try_populate_dating_find_era():
+ dating = Era.objects.create(label="IIe siècle", concept_id=1234)
form = ObjectGroupForm()
form.data = {
- "dating__label": "Moyen âge",
- "dating__theso_joconde_id": 1234,
+ "dating_era__label": "IIe siècle",
+ "dating_era__concept_id": 1234,
}
- form.try_populate_dating()
+ form.try_populate_dating_models()
- assert form.data["dating"] == dating.pk
+ assert form.data["dating_era"] == dating.pk
diff --git a/lab/objects/tests/test_c2mrf.py b/lab/objects/tests/test_c2mrf.py
index da28ebe0f..af02f726d 100644
--- a/lab/objects/tests/test_c2mrf.py
+++ b/lab/objects/tests/test_c2mrf.py
@@ -4,7 +4,7 @@
fetch_full_objectgroup_from_eros,
fetch_partial_objectgroup_from_eros,
)
-from ..models import ObjectGroup, Period
+from ..models import Era, ObjectGroup
EROS_RESPONSE = {
"ctechnique": "majolique lustrée",
@@ -39,7 +39,7 @@ def test_fetch_full_objectgroup_from_eros(_):
assert og.object_count == 1
assert og.c2rmf_id == "C2RMF65980"
assert og.label == "Majolique"
- assert isinstance(og.dating, Period)
- assert og.dating.label == "1500"
+ assert isinstance(og.dating_era, Era)
+ assert og.dating_era.label == "1500"
assert og.inventory == "ODUT 01107"
assert og.materials == ["terre cuite"]
diff --git a/lab/objects/tests/test_object_formset.py b/lab/objects/tests/test_object_formset.py
index 6c25e0dfe..78f97a428 100644
--- a/lab/objects/tests/test_object_formset.py
+++ b/lab/objects/tests/test_object_formset.py
@@ -147,7 +147,7 @@ def setUp(self):
self.inline = ObjectInline(ObjectGroup, admin_site=AdminSite())
self.objectgroup = ObjectGroup(
label="Object group",
- dating=Period.objects.get_or_create(label="XIXe")[0],
+ dating_period=Period.objects.get_or_create(label="XIXe")[0],
materials=["wood"],
object_count=0,
)
diff --git a/lab/objects/tests/test_objectgroup_admin.py b/lab/objects/tests/test_objectgroup_admin.py
index 659f0102a..4219f3bf9 100644
--- a/lab/objects/tests/test_objectgroup_admin.py
+++ b/lab/objects/tests/test_objectgroup_admin.py
@@ -5,6 +5,7 @@
from django.test import RequestFactory, TestCase
from django.urls import reverse
+from euphro_auth.tests import factories as auth_factories
from lab.tests import factories
from ..admin import ObjectGroupAdmin
@@ -33,7 +34,7 @@ def test_change_objectgroup_is_allowed_if_project_member(self):
assert self.objectgroup_admin.has_change_permission(request, self.object_group)
def test_change_objectgroup_is_forbidden_for_non_participant(self):
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
request = RequestFactory().get(
reverse(CHANGE_VIEWNAME, args=[self.object_group.id])
)
diff --git a/lab/objects/widgets.py b/lab/objects/widgets.py
index d95c40935..e1a1dd7f3 100644
--- a/lab/objects/widgets.py
+++ b/lab/objects/widgets.py
@@ -2,9 +2,10 @@
from django.forms import widgets
+from lab.thesauri.models import ThesorusConceptModel
from lab.widgets import AutoCompleteWidget
-from .models import Location, Period
+from .models import Era, Location, Period
class ImportFromInput(widgets.TextInput):
@@ -68,13 +69,41 @@ class Media:
class DatingAutoCompleteWidget(AutoCompleteWidget):
+ model: type[ThesorusConceptModel] | None = None
+
template_name = "widgets/dating_autocomplete_widget.html"
+ def get_context(
+ self, name: str, value: Any, attrs: dict[str, Any] | None
+ ) -> dict[str, Any]:
+ context = super().get_context(name, value, attrs)
+ if self.model is None:
+ raise ValueError("model is required")
+ context["widget"]["field_name"] = self.model.__name__.lower()
+ # pylint: disable=no-member
+ context["widget"]["opentheso_theso_id"] = self.model.OPENTHESO_THESO_ID
+ return context
+
+
+class PeriodDatingAutoCompleteWidget(DatingAutoCompleteWidget):
model = Period
+ typeahead_list_webcomponent_name = "period-type-ahead"
class Media:
js = (
- "web-components/period-type-ahead.js",
+ "web-components/dating-open-theso-type-ahead.js",
"js/widgets/dating-autocomplete-widget.js",
)
css = {"all": ("css/widgets/autocomplete-widget.css",)}
+
+
+class EraDatingAutoCompleteWidget(DatingAutoCompleteWidget):
+ model = Era
+ typeahead_list_webcomponent_name = "era-type-ahead"
+
+ class Media:
+ js = (
+ "web-components/dating-open-theso-type-ahead.js",
+ "js/widgets/era-autocomplete-widget.js",
+ )
+ css = {"all": ("css/widgets/autocomplete-widget.css",)}
diff --git a/lab/projects/models.py b/lab/projects/models.py
index ea0a16e75..d49dd05f3 100644
--- a/lab/projects/models.py
+++ b/lab/projects/models.py
@@ -4,7 +4,6 @@
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
-from django.db.models import Q
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from slugify import slugify
@@ -27,10 +26,7 @@ def only_finished(self):
return self.filter(runs__end_date__lt=timezone.now())
def only_public(self):
- not_embargoed_projects = Run.objects.filter(
- Q(embargo_date__lt=timezone.now())
- ).values_list("project_id", flat=True)
- return self.filter(confidential=False, id__in=not_embargoed_projects)
+ return self.filter(confidential=False, runs__isnull=False)
def filter_by_status(self, status: "Project.Status"):
if status == Project.Status.TO_SCHEDULE:
diff --git a/lab/projects/tests/test_admin.py b/lab/projects/tests/test_admin.py
index f3dc5f54f..6d1948df3 100644
--- a/lab/projects/tests/test_admin.py
+++ b/lab/projects/tests/test_admin.py
@@ -11,13 +11,9 @@
from django.utils.formats import date_format
from django.utils.translation import gettext_lazy as _
+from euphro_auth.tests.factories import LabAdminUserFactory
from lab.models import Institution
-from lab.tests.factories import (
- LabAdminUserFactory,
- ProjectFactory,
- ProjectWithLeaderFactory,
- RunFactory,
-)
+from lab.tests.factories import ProjectFactory, ProjectWithLeaderFactory, RunFactory
from ..admin import (
BeamTimeRequestInline,
diff --git a/lab/runs/models.py b/lab/runs/models.py
index d9c554a42..298aa73ea 100644
--- a/lab/runs/models.py
+++ b/lab/runs/models.py
@@ -11,6 +11,9 @@ class RunManager(models.Manager):
def only_finished(self):
return super().get_queryset().filter(end_date__lt=timezone.now())
+ def only_not_embargoed(self):
+ return super().get_queryset().filter(embargo_date__lte=timezone.now())
+
class Run(TimestampedModel, MethodModel):
class Meta:
@@ -87,3 +90,7 @@ def next_status(self) -> Status:
except IndexError as exception:
raise AttributeError("Run has no next status") from exception
return next_status
+
+ @property
+ def is_data_embargoed(self):
+ return self.embargo_date is None or self.embargo_date > timezone.now().date()
diff --git a/lab/runs/tests/test_admin.py b/lab/runs/tests/test_admin.py
index ec82dbbc4..54163c98c 100644
--- a/lab/runs/tests/test_admin.py
+++ b/lab/runs/tests/test_admin.py
@@ -9,6 +9,7 @@
from django.urls import reverse
from django.utils import timezone
+from euphro_auth.tests import factories as auth_factories
from lab.projects.models import Project
from lab.tests import factories
@@ -21,7 +22,7 @@ class TestRunAdminPermissions(TestCase):
def setUp(self):
self.run_admin = RunAdmin(model=Run, admin_site=AdminSite())
self.project = factories.ProjectWithLeaderFactory()
- self.lab_admin = factories.LabAdminUserFactory()
+ self.lab_admin = auth_factories.LabAdminUserFactory()
self.member = get_user_model().objects.get(participation__project=self.project)
self.new_run = factories.RunFactory(
status=Run.Status.CREATED, project=self.project
@@ -70,9 +71,9 @@ def setUp(self):
self.run_admin = RunAdmin(model=Run, admin_site=AdminSite())
self.project = factories.ProjectWithLeaderFactory()
self.leader_user = self.project.leader.user
- self.member_user = factories.StaffUserFactory()
+ self.member_user = auth_factories.StaffUserFactory()
self.project.members.add(self.member_user)
- self.lab_admin_user = factories.LabAdminUserFactory()
+ self.lab_admin_user = auth_factories.LabAdminUserFactory()
self.run = factories.RunFactory(project=self.project)
def test_project_is_readonly_when_change(self):
@@ -136,28 +137,28 @@ def test_get_project_when_editing(self):
def test_project_when_adding_from_project(self):
project = factories.ProjectFactory()
request = RequestFactory().get(f"{self.add_url}?project={project.id}")
- user = request.user = factories.StaffUserFactory()
+ user = request.user = auth_factories.LabAdminUserFactory()
project.members.add(user)
# pylint: disable=protected-access
assert self.run_admin._get_project(request) == project
def test_project_is_none_when_adding(self):
request = RequestFactory().get(reverse("admin:lab_run_add"))
- request.user = factories.StaffUserFactory()
+ request.user = auth_factories.LabAdminUserFactory()
# pylint: disable=protected-access
assert self.run_admin._get_project(request) is None
def test_project_is_none_when_not_a_member(self):
project = factories.ProjectFactory()
request = RequestFactory().get(f"{self.add_url}?project={project.id}")
- request.user = factories.StaffUserFactory()
+ request.user = auth_factories.StaffUserFactory()
# pylint: disable=protected-access
assert self.run_admin._get_project(request) is None
def test_project_when_admin(self):
project = factories.ProjectFactory()
request = RequestFactory().get(f"{self.add_url}?project={project.id}")
- request.user = factories.LabAdminUserFactory()
+ request.user = auth_factories.LabAdminUserFactory()
# pylint: disable=protected-access
assert self.run_admin._get_project(request) == project
@@ -165,8 +166,8 @@ def test_project_when_admin(self):
class TestRunAdminViewAsLeader(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
- self.staff_user = factories.StaffUserFactory()
- self.project_leader_user = factories.StaffUserFactory()
+ self.staff_user = auth_factories.StaffUserFactory()
+ self.project_leader_user = auth_factories.StaffUserFactory()
self.project = factories.ProjectFactory()
self.project.participation_set.create(
user=self.project_leader_user, is_leader=True
@@ -262,7 +263,7 @@ def test_changing_run(self):
class TestRunAdminViewAsAdmin(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
- self.admin_user = factories.LabAdminUserFactory()
+ self.admin_user = auth_factories.LabAdminUserFactory()
self.admin_project_1 = factories.ProjectFactory()
self.admin_project_1.participation_set.create(
user=self.admin_user, is_leader=True
@@ -356,7 +357,7 @@ def setUp(self):
)
+ f"?project={self.project.id}"
)
- self.admin_user = factories.LabAdminUserFactory()
+ self.admin_user = auth_factories.LabAdminUserFactory()
self.request.user = self.admin_user
self.request.resolver_match = MagicMock()
self.run_admin = RunAdmin(Run, admin_site=AdminSite())
@@ -381,7 +382,7 @@ class TestRunAdminScheduleAction(TestCase):
def setUp(self):
self.run = factories.RunFactory()
self.project = self.run.project
- self.admin_user = factories.LabAdminUserFactory()
+ self.admin_user = auth_factories.LabAdminUserFactory()
self.run_admin = RunAdmin(Run, admin_site=AdminSite())
now = timezone.now()
@@ -452,7 +453,7 @@ def test_schedule_action_as_non_admin(self):
),
data=self.correct_data,
)
- request.user = factories.StaffUserFactory()
+ request.user = auth_factories.StaffUserFactory()
self.run.project.members.add(request.user)
with pytest.raises(PermissionDenied):
diff --git a/lab/runs/tests/test_admin_change_state_action.py b/lab/runs/tests/test_admin_change_state_action.py
index ba3c0f4c5..81a17fa1e 100644
--- a/lab/runs/tests/test_admin_change_state_action.py
+++ b/lab/runs/tests/test_admin_change_state_action.py
@@ -10,6 +10,8 @@
from django.test.client import RequestFactory
from django.urls import reverse
+from euphro_auth.tests import factories as auth_factories
+
from ...tests import factories
from .. import admin_actions
from ..admin import RunAdmin
@@ -125,7 +127,7 @@ def test_no_method_selected_does_not_allow_for_ask_exec():
def test_member_can_ask_for_execution():
run = factories.RunFactory.build(status=Run.Status.CREATED)
- user = factories.StaffUserFactory.build()
+ user = auth_factories.StaffUserFactory.build()
admin_actions.validate_execute_needs_admin(user, run)
@@ -136,7 +138,7 @@ def test_member_can_ask_for_execution():
)
def test_admin_can_change_state(status):
run = factories.RunFactory.build(status=status)
- user = factories.LabAdminUserFactory.build()
+ user = auth_factories.LabAdminUserFactory.build()
admin_actions.validate_execute_needs_admin(user, run)
@@ -147,7 +149,7 @@ def test_admin_can_change_state(status):
)
def test_member_cant_do_more_than_ask_for_execution(status):
run = factories.RunFactory.build(status=status)
- user = factories.StaffUserFactory.build()
+ user = auth_factories.StaffUserFactory.build()
with pytest.raises(ValidationError):
admin_actions.validate_execute_needs_admin(user, run)
diff --git a/lab/runs/tests/test_models.py b/lab/runs/tests/test_models.py
index a4d7398f8..b3a6bc1e6 100644
--- a/lab/runs/tests/test_models.py
+++ b/lab/runs/tests/test_models.py
@@ -1,6 +1,7 @@
import pytest
from ...models import Run
+from ...tests import factories
def test_next_action():
@@ -10,3 +11,13 @@ def test_next_action():
assert Run(status=Run.Status.ONGOING).next_status() == Run.Status.FINISHED
with pytest.raises(AttributeError):
Run(status=Run.Status.FINISHED).next_status()
+
+
+@pytest.mark.django_db
+def test_not_embargoed_qs():
+ factories.RunFactory() # create embargoed run
+ not_embargoed_run = factories.NotEmbargoedRun()
+
+ qs = Run.objects.only_not_embargoed()
+ assert qs.count() == 1
+ assert qs.first() == not_embargoed_run
diff --git a/lab/schema.py b/lab/schema.py
index 2a612daef..fab9e6944 100644
--- a/lab/schema.py
+++ b/lab/schema.py
@@ -1,167 +1,18 @@
# pylint: disable=no-member,unused-argument
-import itertools
from datetime import datetime
from typing import Literal
import graphene
-from django.db.models import F, Prefetch, Q, Sum
+from django.db.models import F, Sum
from django.utils import timezone
-from graphene_django import DjangoObjectType
-from euphro_auth.models import User
-
-from .methods.dto import method_model_to_dto
-from .models import Institution, Object, ObjectGroup, Participation, Project, Run
+from .models import ObjectGroup, Project, Run
StatPeriodLiteral = Literal["all", "year"]
THIS_YEAR_START_DT = datetime(timezone.now().year, 1, 1)
-class ObjectType(DjangoObjectType):
- class Meta:
- model = Object
- fields = ("label", "collection")
-
-
-class ObjectGroupType(DjangoObjectType):
- data_available = graphene.Boolean(required=True)
- discovery_place = graphene.String()
- dating = graphene.String()
-
- class Meta:
- model = ObjectGroup
- fields = (
- "id",
- "c2rmf_id",
- "label",
- "materials",
- "discovery_place",
- "collection",
- "dating",
- "object_set",
- "runs",
- "data_available",
- "created",
- )
-
- def resolve_data_available(self, info):
- if not hasattr(self, "is_data_available"):
- raise AttributeError(
- "is_data_available must be annotated on the queryset in the \
- Quey section (lab.schema.Query). See \
- https://docs.djangoproject.com/en/4.2/topics/db/aggregation/"
- )
- return self.is_data_available
-
- def resolve_discovery_place(self, info):
- return self.discovery_place_location.label if self.discovery_place else ""
-
- def resolve_dating(self, info):
- return self.dating.label if self.dating else ""
-
-
-class InstitutionType(DjangoObjectType):
- class Meta:
- model = Institution
- fields = ("name", "country")
-
-
-class UserType(DjangoObjectType):
- class Meta:
- model = User
- fields = ("first_name", "last_name")
-
-
-class LeaderType(DjangoObjectType):
- class Meta:
- model = Participation
- fields = ("user", "institution")
-
-
-class DetectorType(graphene.ObjectType):
- name = graphene.String(required=True)
- enabled = graphene.Boolean(required=True)
- filters = graphene.List(graphene.String, required=True)
-
-
-class MethodType(graphene.ObjectType):
- name = graphene.String(required=True)
- enabled = graphene.Boolean(required=True)
- detectors = graphene.List(DetectorType, required=True)
-
-
-class RunType(DjangoObjectType):
- methods = graphene.List(MethodType)
-
- class Meta:
- model = Run
- fields = (
- "label",
- "start_date",
- "particle_type",
- "energy_in_keV",
- "beamline",
- "methods",
- "project",
- )
-
- def resolve_methods(self, info):
- methods = []
- for method_dto in method_model_to_dto(self):
- method = MethodType(name=method_dto.name, detectors=[])
- for detector_dto in method_dto.detectors:
- method.detectors.append(
- DetectorType(
- name=detector_dto.name,
- filters=detector_dto.filters,
- )
- )
- methods.append(method)
- return methods
-
-
-class ProjectType(DjangoObjectType):
- class Meta:
- model = Project
- fields = (
- "name",
- "status",
- "comments",
- "runs",
- "leader",
- "object_groups",
- "slug",
- "created",
- )
-
- object_group_materials = graphene.List(graphene.String)
- status = graphene.String()
- leader = graphene.Field(LeaderType)
- object_groups = graphene.List(ObjectGroupType)
-
- def resolve_status(self, info):
- return self.status
-
- def resolve_object_group_materials(self, info):
- project_materials = ObjectGroup.objects.filter(runs__project=self).values_list(
- "materials", flat=True
- )
- return set(itertools.chain(*project_materials))
-
- def resolve_leader(self, info):
- return self.leader
-
- def resolve_object_groups(self, info):
- return list(
- set(
- objectgroup
- for run in self.runs.all()
- for objectgroup in run.run_object_groups.all()
- )
- )
-
-
class LabStatType(graphene.ObjectType):
total_projects = graphene.Int()
total_object_groups = graphene.Int()
@@ -207,42 +58,8 @@ def get_total_hours(period: StatPeriodLiteral):
class Query(graphene.ObjectType):
- last_projects = graphene.List(ProjectType, limit=graphene.Int())
- project_detail = graphene.Field(ProjectType, slug=graphene.String())
- object_group_detail = graphene.Field(ObjectGroupType, pk=graphene.String())
stats = graphene.Field(LabStatsType)
- def resolve_last_projects(self, info, limit=None):
- projects = (
- Project.objects.only_finished()
- .only_public()
- .order_by("-created")
- .distinct()
- )
- if limit:
- projects = projects[:limit]
- return projects
-
- def resolve_project_detail(self, _, slug):
- return (
- Project.objects.only_finished()
- .prefetch_related(
- "runs",
- "runs__run_object_groups",
- "runs__run_object_groups__object_set",
- )
- .filter(slug=slug)
- .first()
- )
-
- def resolve_object_group_detail(self, _, pk): # pylint: disable=invalid-name
- return (
- ObjectGroup.objects.filter(pk=pk)
- .prefetch_related(Prefetch("runs", queryset=Run.objects.only_finished()))
- .annotate(is_data_available=Q(runs__project__is_data_available=True))
- .first()
- )
-
def resolve_stats(self, info):
return {
"all": LabStatType(
diff --git a/lab/static/css/admin/objectgroup.css b/lab/static/css/admin/objectgroup.css
index fad6e6b93..59551b24e 100644
--- a/lab/static/css/admin/objectgroup.css
+++ b/lab/static/css/admin/objectgroup.css
@@ -34,7 +34,7 @@ input[type="submit"][disabled].default {
margin-bottom: 1rem;
}
-.field-discovery_place_location, .field-dating, .field-materials {
+.field-discovery_place_location, .field-dating_period, .field-dating_era, .field-materials {
overflow: initial;
}
diff --git a/lab/static/js/widgets/dating-autocomplete-widget.js b/lab/static/js/widgets/dating-autocomplete-widget.js
index 748ee0592..0ef5d0c0d 100644
--- a/lab/static/js/widgets/dating-autocomplete-widget.js
+++ b/lab/static/js/widgets/dating-autocomplete-widget.js
@@ -8,7 +8,7 @@
const parentField = event.target.closest(baseSelector);
parentField.querySelector(`${baseSelector}__label`).value = label;
- parentField.querySelector(`${baseSelector}__theso_joconde_id`).value = id;
+ parentField.querySelector(`${baseSelector}__concept_id`).value = id;
parentField.querySelector(".typeahead-list").classList.add("hidden");
}
@@ -19,21 +19,20 @@
const idInput = datingField.querySelector(`${baseSelector}__id`);
if (idInput.value && idInput.value !== "") {
idInput.value = "";
- datingField.querySelector(`${baseSelector}__theso_joconde_id`).value = "";
+ datingField.querySelector(`${baseSelector}__concept_id`).value = "";
}
}
document.addEventListener("DOMContentLoaded", function () {
- // Add event listeners to institution input elements
+ // Add event listeners to dating input elements
document.querySelectorAll(`${baseSelector}`).forEach((el) => {
el.querySelector(`${baseSelector}__label`).addEventListener(
"input",
onInput,
);
- el.querySelector("div[is='period-type-ahead']").addEventListener(
- "result-click",
- onResultClicked,
- );
+ el.querySelector(
+ "div[is='dating-open-theso-type-ahead']",
+ ).addEventListener("result-click", onResultClicked);
});
});
})();
diff --git a/lab/templates/widgets/dating_autocomplete_widget.html b/lab/templates/widgets/dating_autocomplete_widget.html
index 5aeb0aa46..038f08b8d 100644
--- a/lab/templates/widgets/dating_autocomplete_widget.html
+++ b/lab/templates/widgets/dating_autocomplete_widget.html
@@ -1,9 +1,34 @@
\ No newline at end of file
diff --git a/lab/templatetags/list_results.py b/lab/templatetags/list_results.py
index 0d49339cd..1fc9aa4c9 100644
--- a/lab/templatetags/list_results.py
+++ b/lab/templatetags/list_results.py
@@ -1,7 +1,6 @@
import datetime
-import typing
from itertools import groupby
-from typing import Any, Callable, TypedDict
+from typing import Callable, TypedDict
from django import template
from django.contrib.admin.templatetags.admin_list import (
@@ -14,7 +13,6 @@
from django.db.models.functions import TruncMonth
from django.db.models.query import QuerySet
from django.utils.html import format_html
-from django_stubs_ext import WithAnnotations
from lab.projects.models import ProjectQuerySet
@@ -132,24 +130,16 @@ class MonthAnnotation(TypedDict):
month: datetime.datetime
-if typing.TYPE_CHECKING:
- MonthAnnotedProject = WithAnnotations[Project, MonthAnnotation] # type: ignore
-else:
- MonthAnnotedProject = WithAnnotations[Project]
-
-
def group_results_by_month(changelist: ChangeList):
- rl: MonthAnnotedProject = changelist.result_list.annotate(
+ rl = changelist.result_list.annotate(
month=TruncMonth("first_run_date") # type: ignore[arg-type]
)
changelist.result_list = rl
return _group_results(changelist, attr_getter_fn=lambda p: p.month)
-def _group_results(
- changelist: ChangeList, attr_getter_fn: Callable[[MonthAnnotedProject], Any]
-):
+def _group_results(changelist: ChangeList, attr_getter_fn: Callable):
"""
Group the results from a ChangeList based on a specific attribute.
diff --git a/lab/templatetags/nav.py b/lab/templatetags/nav.py
index 2620effd9..b93eb58a6 100644
--- a/lab/templatetags/nav.py
+++ b/lab/templatetags/nav.py
@@ -1,72 +1,17 @@
import json
-from typing import NotRequired, TypedDict
from django import template
+from django.conf import settings
from django.http import HttpRequest
-from django.urls import reverse
-from django.utils.translation import gettext_lazy as _
-from lab.permissions import is_lab_admin
+from ..nav import NavItemJson
register = template.Library()
-class NavItemJson(TypedDict):
- title: str
- href: str
- iconName: str
- extraPath: list[str] | None
- exactPath: bool
- badge: NotRequired[int | None]
-
-
@register.simple_tag()
def nav_items_json(request: HttpRequest):
- items: list[NavItemJson] = [
- {
- "title": str(_("Projects")),
- "href": reverse("admin:lab_project_changelist"),
- "iconName": "fr-icon-survey-line",
- "extraPath": [reverse("admin:lab_run_changelist")],
- "exactPath": False,
- }
- ]
-
- if request.user:
- if is_lab_admin(request.user):
- items.insert(
- 0,
- {
- "title": str(_("Dashboard")),
- "href": reverse("admin:index"),
- "iconName": "fr-icon-calendar-line",
- "exactPath": True,
- "extraPath": [],
- },
- )
- items.append(
- {
- "title": str(_("Users")),
- "href": reverse("admin:euphro_auth_user_changelist"),
- "iconName": "fr-icon-user-line",
- "exactPath": False,
- "extraPath": [
- reverse("admin:euphro_auth_userinvitation_changelist")
- ],
- }
- )
- else: # non-admin user
- items.append(
- {
- "title": str(_("Account")),
- "href": reverse(
- "admin:euphro_auth_user_change", args=[request.user.id]
- ),
- "iconName": "fr-icon-user-line",
- "exactPath": False,
- "extraPath": [],
- }
- )
+ items: list[NavItemJson] = settings.NAV_GET_NAV_ITEMS(request)
data = json.dumps({"currentPath": request.path, "items": items})
return data
diff --git a/lab/tests/api_views/test_calendar.py b/lab/tests/api_views/test_calendar.py
index 8850f6bf7..ebbafc13a 100644
--- a/lab/tests/api_views/test_calendar.py
+++ b/lab/tests/api_views/test_calendar.py
@@ -5,8 +5,9 @@
from django.urls import reverse
from django.utils import timezone
+from euphro_auth.tests.factories import LabAdminUserFactory, StaffUserFactory
from lab.api_views.calendar import CalendarSerializer, CalendarView
-from lab.tests.factories import LabAdminUserFactory, RunFactory, StaffUserFactory
+from lab.tests.factories import RunFactory
class CalendarViewTestCase(TestCase):
diff --git a/lab/tests/api_views/test_objects.py b/lab/tests/api_views/test_objects.py
index 33a7313f3..e91b1e9c0 100644
--- a/lab/tests/api_views/test_objects.py
+++ b/lab/tests/api_views/test_objects.py
@@ -3,17 +3,16 @@
from django.test import Client, TestCase
from django.urls import reverse
+from euphro_auth.tests import factories as auth_factories
from lab.objects.c2rmf import ErosHTTPError
-from .. import factories
-
class TestProjectListView(TestCase):
def setUp(self):
self.client = client = Client()
self.api_url = reverse("api:objectgroup-c2rmf-fetch")
- client.force_login(factories.LabAdminUserFactory())
+ client.force_login(auth_factories.LabAdminUserFactory())
def test_route_auth(self):
client = Client()
diff --git a/lab/tests/api_views/test_project.py b/lab/tests/api_views/test_project.py
index 773c7a104..c36b77305 100644
--- a/lab/tests/api_views/test_project.py
+++ b/lab/tests/api_views/test_project.py
@@ -2,6 +2,7 @@
from django.urls import reverse
from django.utils import timezone
+from euphro_auth.tests import factories as auth_factories
from lab.api_views.project import (
IsLabAdminUser,
ProjectList,
@@ -18,7 +19,7 @@ def setUp(self):
self.client = client = Client()
self.api_url = reverse("api:project-list")
- client.force_login(factories.LabAdminUserFactory())
+ client.force_login(auth_factories.LabAdminUserFactory())
self.june_project = factories.RunFactory(
start_date="2023-06-01 10:00:00", end_date="2023-06-30 10:00:00"
@@ -113,7 +114,7 @@ def setUp(self):
self.client = client = Client()
self.api_url = reverse("api:project-upcoming-list")
- client.force_login(factories.LabAdminUserFactory())
+ client.force_login(auth_factories.LabAdminUserFactory())
self.last_month_project = factories.RunFactory(
start_date=timezone.now() - timezone.timedelta(days=30),
diff --git a/lab/tests/api_views/test_run.py b/lab/tests/api_views/test_run.py
index 9a112430c..505c62161 100644
--- a/lab/tests/api_views/test_run.py
+++ b/lab/tests/api_views/test_run.py
@@ -1,6 +1,8 @@
from django.test import Client, TestCase
from django.urls import reverse
+from euphro_auth.tests import factories as auth_factories
+
from .. import factories
@@ -10,7 +12,7 @@ def setUp(self):
self.client = client = Client()
self.api_url = reverse("api:run-detail-methods", args=[run.id])
- client.force_login(factories.LabAdminUserFactory())
+ client.force_login(auth_factories.LabAdminUserFactory())
self.june_project = factories.RunFactory(
start_date="2023-06-01 10:00:00", end_date="2023-06-30 10:00:00"
diff --git a/lab/tests/api_views/test_run_objectgroup.py b/lab/tests/api_views/test_run_objectgroup.py
index c41c245b5..56dd2e3b6 100644
--- a/lab/tests/api_views/test_run_objectgroup.py
+++ b/lab/tests/api_views/test_run_objectgroup.py
@@ -3,6 +3,8 @@
import pytest
from rest_framework.permissions import IsAdminUser
+from euphro_auth.tests import factories as auth_factories
+
from ... import models
from ...api_views import run_objectgroup as views
from ...api_views import serializers
@@ -23,7 +25,7 @@ def test_run_objectgroup_mixin_permissions():
@pytest.mark.django_db
@pytest.mark.usefixtures("run")
def test_run_objectgroup_mixin_get_queryset_when_user():
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
view = views.RunObjectGroupMixin()
view.request = mock.MagicMock(user=user)
@@ -35,7 +37,7 @@ def test_run_objectgroup_mixin_get_queryset_when_user():
def test_run_objectgroup_mixin_get_queryset_when_member(
run: models.Run,
):
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
run.project.members.add(user)
view = views.RunObjectGroupMixin()
@@ -46,7 +48,7 @@ def test_run_objectgroup_mixin_get_queryset_when_member(
@pytest.mark.django_db
@pytest.mark.usefixtures("run")
def test_run_objectgroup_mixin_get_queryset_when_admin():
- user = factories.LabAdminUserFactory()
+ user = auth_factories.LabAdminUserFactory()
view = views.RunObjectGroupMixin()
view.request = mock.MagicMock(user=user)
@@ -81,7 +83,7 @@ def test_run_object_group_view_get_queryset(run: models.Run):
def test_run_object_group_view_perform_create_when_admin(run: models.Run):
view = views.RunObjectGroupView()
view.kwargs = {"run_id": run.id}
- view.request = mock.MagicMock(user=factories.LabAdminUserFactory())
+ view.request = mock.MagicMock(user=auth_factories.LabAdminUserFactory())
objectgroup = factories.ObjectGroupFactory()
serializer = serializers.RunObjectGroupCreateSerializer(
@@ -97,7 +99,7 @@ def test_run_object_group_view_perform_create_when_admin(run: models.Run):
@pytest.mark.django_db
def test_run_object_group_view_perform_create_when_member(run: models.Run):
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
view = views.RunObjectGroupView()
view.kwargs = {"run_id": run.id}
view.request = mock.MagicMock(user=user)
@@ -119,7 +121,7 @@ def test_run_object_group_view_perform_create_when_member(run: models.Run):
@pytest.mark.django_db
def test_run_object_group_view_perform_create_when_not_member(run: models.Run):
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
view = views.RunObjectGroupView()
view.kwargs = {"run_id": run.id}
view.request = mock.MagicMock(user=user)
@@ -142,7 +144,7 @@ def test_run_object_group_available_view_lookup_field():
def test_run_object_group_available_view_get_queryset_when_admin():
project = factories.ProjectFactory()
view = views.RunObjectGroupAvailableListView()
- view.request = mock.MagicMock(user=factories.LabAdminUserFactory())
+ view.request = mock.MagicMock(user=auth_factories.LabAdminUserFactory())
run_1 = factories.RunFactory(project=project)
run_2 = factories.RunFactory(project=project)
@@ -157,7 +159,7 @@ def test_run_object_group_available_view_get_queryset_when_admin():
@pytest.mark.django_db
def test_run_object_group_available_view_get_queryset_when_member():
project = factories.ProjectFactory()
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
project.members.add(user)
view = views.RunObjectGroupAvailableListView()
@@ -175,7 +177,7 @@ def test_run_object_group_available_view_get_queryset_when_member():
@pytest.mark.django_db
def test_run_object_group_available_view_get_queryset_when_not_member():
- user = factories.StaffUserFactory()
+ user = auth_factories.StaffUserFactory()
view = views.RunObjectGroupAvailableListView()
view.request = mock.MagicMock(user=user)
@@ -191,7 +193,7 @@ def test_run_object_group_available_view_get_queryset_when_not_member():
def test_run_object_group_available_view_get_queryset_when_run_has_object():
project = factories.ProjectFactory()
view = views.RunObjectGroupAvailableListView()
- view.request = mock.MagicMock(user=factories.LabAdminUserFactory())
+ view.request = mock.MagicMock(user=auth_factories.LabAdminUserFactory())
run_1 = factories.RunFactory(project=project)
run_2 = factories.RunFactory(project=project)
diff --git a/lab/tests/factories.py b/lab/tests/factories.py
index 0443fd309..64456f78a 100644
--- a/lab/tests/factories.py
+++ b/lab/tests/factories.py
@@ -2,30 +2,16 @@
import factory
import factory.fuzzy
-from django.contrib.auth import get_user_model
+
+from euphro_auth.tests.factories import StaffUserFactory
+from lab.thesauri.models import Era
from ..models import Object, ObjectGroup, Participation, Period, Project, Run
+from ..objects.models import Location
NOW = datetime.now(tz=timezone.utc)
-class StaffUserFactory(factory.django.DjangoModelFactory):
- class Meta:
- model = get_user_model()
-
- first_name = factory.Faker("first_name")
- last_name = factory.Faker("last_name")
- email = factory.LazyAttribute(
- lambda u: f"{u.first_name}.{u.last_name}@example.com".lower()
- )
- password = factory.Faker("password")
- is_staff = True
-
-
-class LabAdminUserFactory(StaffUserFactory):
- is_lab_admin = True
-
-
class ParticipationFactory(factory.django.DjangoModelFactory):
class Meta:
model = Participation
@@ -107,6 +93,10 @@ def _adjust_kwargs(cls, **kwargs):
}
+class NotEmbargoedRun(RunFactory):
+ embargo_date = NOW.date() - timedelta(days=1)
+
+
class RunReadyToAskExecFactory(RunFactory):
# pylint: disable=no-member
status = Run.Status.CREATED.value
@@ -120,6 +110,13 @@ class Meta:
label = factory.Faker("date")
+class EraFactory(factory.django.DjangoModelFactory):
+ class Meta:
+ model = Era
+
+ label = factory.Faker("date")
+
+
class ObjectFactory(factory.django.DjangoModelFactory):
class Meta:
model = Object
@@ -132,10 +129,20 @@ class Meta:
model = ObjectGroup
label = factory.Faker("name")
- dating = factory.SubFactory(PeriodFactory)
+ dating_period = factory.SubFactory(PeriodFactory)
+ dating_era = factory.SubFactory(EraFactory)
materials = factory.fuzzy.FuzzyChoice(["wood", "stone", "glass", "metal"], list)
object_count = 3
@factory.post_generation
def objects(self, *args, **kwargs):
return ObjectFactory.create_batch(3, group_id=self.id)
+
+
+class LocationFactory(factory.django.DjangoModelFactory):
+ class Meta:
+ model = Location
+
+ label = factory.Faker("name")
+ latitude = factory.Faker("latitude")
+ longitude = factory.Faker("longitude")
diff --git a/lab/tests/managers/test_project_manager.py b/lab/tests/managers/test_project_manager.py
index 9f98c9ecd..9ef20bdd4 100644
--- a/lab/tests/managers/test_project_manager.py
+++ b/lab/tests/managers/test_project_manager.py
@@ -31,26 +31,6 @@ def test_project_only_public_confidential(self):
)
self.assertEqual(Project.objects.only_public().count(), 1)
- def test_project_only_public_embargo(self):
- public_project = Project.objects.create(
- name="Public Project", confidential=False
- )
- RunFactory(
- project=public_project,
- embargo_date=timezone.now() - timezone.timedelta(days=1),
- )
- empbargoed_project = Project.objects.create(
- name="Embargoes Project", confidential=False
- )
- RunFactory(
- project=empbargoed_project,
- embargo_date=timezone.now() + timezone.timedelta(days=1),
- )
-
- qs = Project.objects.only_public()
- self.assertEqual(qs.count(), 1)
- self.assertEqual(qs.first().id, public_project.id)
-
def test_project_only_public_when_no_run(self):
public_project = Project.objects.create(
name="Public Project", confidential=False
diff --git a/lab/tests/templatetags/test_nav.py b/lab/tests/templatetags/test_nav.py
index 101afd449..ad881468c 100644
--- a/lab/tests/templatetags/test_nav.py
+++ b/lab/tests/templatetags/test_nav.py
@@ -1,8 +1,9 @@
import json
from unittest import mock
+import pytest
from django.urls import reverse
-from django.utils.translation import gettext_lazy as _
+from django.utils.translation import gettext as _
from euphro_auth.models import User
from lab.templatetags.nav import nav_items_json
@@ -32,23 +33,26 @@ def test_nav_items_json_when_user():
assert data["currentPath"] == "/path"
+@pytest.mark.django_db
def test_nav_items_json_when_admin():
data = json.loads(
nav_items_json(
mock.MagicMock(user=mock.MagicMock(is_lab_admin=True), path="/path")
)
)
- assert len(data["items"]) == 3
+ assert len(data["items"]) == 4
assert data["items"][0]["title"] == str(_("Dashboard"))
assert data["items"][0]["href"] == reverse("admin:index")
assert data["items"][0]["iconName"] == "fr-icon-calendar-line"
assert data["items"][0]["extraPath"] == []
assert data["items"][0]["exactPath"] is True
+
assert data["items"][1]["title"] == str(_("Projects"))
assert data["items"][1]["href"] == reverse("admin:lab_project_changelist")
assert data["items"][1]["iconName"] == "fr-icon-survey-line"
assert data["items"][1]["extraPath"] == [reverse("admin:lab_run_changelist")]
assert data["items"][1]["exactPath"] is False
+
assert data["items"][2]["title"] == str(_("Users"))
assert data["items"][2]["href"] == reverse("admin:euphro_auth_user_changelist")
assert data["items"][2]["iconName"] == "fr-icon-user-line"
@@ -57,4 +61,13 @@ def test_nav_items_json_when_admin():
]
assert data["items"][2]["exactPath"] is False
+ assert data["items"][3]["title"] == str(_("Data requests"))
+ assert data["items"][3]["href"] == reverse(
+ "admin:data_request_datarequest_changelist"
+ )
+ assert data["items"][3]["iconName"] == "fr-icon-download-line"
+ assert data["items"][3]["exactPath"] is False
+ assert data["items"][3]["extraPath"] is None
+ assert data["items"][3]["badge"] == 0
+
assert data["currentPath"] == "/path"
diff --git a/lab/tests/test_opentheso.py b/lab/tests/test_opentheso.py
new file mode 100644
index 000000000..6ad71e70e
--- /dev/null
+++ b/lab/tests/test_opentheso.py
@@ -0,0 +1,20 @@
+from unittest import mock
+
+from ..thesauri.opentheso import fetch_parent_ids_from_id
+
+
+@mock.patch("lab.thesauri.opentheso.requests")
+def test_fetch_parent_ids_from_id(request_mock: mock.MagicMock):
+ response_data = { # type: ignore
+ "ELEMENT/IGNORED_ELEMENT": {},
+ "ELEMENT/FIRST": {},
+ "ELEMENT/BLABLA/SECOND": {},
+ }
+ request_mock.get.return_value.json.return_value = response_data
+
+ assert fetch_parent_ids_from_id("theso_id", "concept_id") == ["FIRST", "SECOND"]
+ request_mock.get.assert_called_once_with(
+ # pylint: disable=line-too-long
+ "https://opentheso.huma-num.fr/opentheso/openapi/v1/concept/theso_id/concept_id/expansion?way=top",
+ timeout=5,
+ )
diff --git a/lab/tests/test_schema.py b/lab/tests/test_schema.py
index 6aa7bcc64..5bdbf0100 100644
--- a/lab/tests/test_schema.py
+++ b/lab/tests/test_schema.py
@@ -4,10 +4,9 @@
from django.utils import timezone
from graphene.test import Client
-from lab.objects.models import Location
from lab.schema import schema
-from .factories import FinishedProject, ObjectGroupFactory, ProjectFactory, RunFactory
+from .factories import ObjectGroupFactory, ProjectFactory, RunFactory
@pytest.fixture(name="client")
@@ -15,137 +14,6 @@ def graphene_client():
return Client(schema)
-@pytest.mark.django_db
-def test_resolve_last_projects_are_finished(client):
- finished_project = FinishedProject()
- ProjectFactory() # not finished project
- executed = client.execute(
- """
- query {
- lastProjects(limit: 3) {
- name
- status
- comments
- slug
- }
- }
- """
- )
-
- assert executed == {
- "data": {
- "lastProjects": [
- {
- "name": finished_project.name,
- "status": "Status.FINISHED",
- "comments": finished_project.comments,
- "slug": finished_project.slug,
- }
- ]
- }
- }
-
-
-@pytest.mark.django_db
-def test_resolve_last_projects_are_confidential(client):
- public_project = ProjectFactory(name="Finished project", confidential=False)
- public_project.runs.create(
- start_date=timezone.now() - timezone.timedelta(days=2),
- end_date=timezone.now() - timezone.timedelta(days=1),
- embargo_date=timezone.now() - timezone.timedelta(days=1),
- )
- confidential_project = ProjectFactory(
- name="Not Finished Project", confidential=True
- )
- confidential_project.runs.create(
- start_date=timezone.now() - timezone.timedelta(days=1),
- end_date=timezone.now() - timezone.timedelta(days=1),
- )
- executed = client.execute(
- """
- query {
- lastProjects(limit: 3) {
- name
- status
- comments
- slug
- }
- }
- """
- )
-
- assert executed == {
- "data": {
- "lastProjects": [
- {
- "name": public_project.name,
- "status": "Status.FINISHED",
- "comments": public_project.comments,
- "slug": public_project.slug,
- }
- ]
- }
- }
-
-
-@pytest.mark.django_db
-def test_resolve_project_detail(client):
- project = FinishedProject()
- executed = client.execute(
- """
- query {
- projectDetail(slug: "%s") {
- name
- comments
- slug
- }
- }
- """
- % project.slug
- )
-
- assert executed == {
- "data": {
- "projectDetail": {
- "name": project.name,
- "comments": project.comments,
- "slug": project.slug,
- }
- }
- }
-
-
-@pytest.mark.django_db
-def test_resolve_object_group_detail(client):
- location = Location.objects.create(label="Location")
- objectgroup = ObjectGroupFactory(discovery_place_location=location)
- objectgroup.runs.add(FinishedProject().runs.first())
- executed = client.execute(
- """
- query {
- objectGroupDetail(pk: "%s") {
- label
- discoveryPlace
- collection
- dataAvailable
- }
- }
- """
- % objectgroup.id
- )
-
- assert executed == {
- "data": {
- "objectGroupDetail": {
- "label": objectgroup.label,
- "discoveryPlace": objectgroup.discovery_place,
- "collection": objectgroup.collection,
- "dataAvailable": False,
- }
- }
- }
-
-
@pytest.mark.django_db
def test_resolve_stats(client):
last_year_project = ProjectFactory()
@@ -199,7 +67,7 @@ def test_resolve_stats(client):
@pytest.mark.django_db
-def test_resolve_when_nothing(client):
+def test_resolve_stats_when_nothing(client):
executed = client.execute(
"""
query StatsQuery {
diff --git a/lab/thesauri/__init__.py b/lab/thesauri/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/lab/thesauri/models.py b/lab/thesauri/models.py
new file mode 100644
index 000000000..0f26034a7
--- /dev/null
+++ b/lab/thesauri/models.py
@@ -0,0 +1,40 @@
+from django.db import models
+
+
+class ThesorusConceptModel(models.Model):
+ """Abstract model for concept in thesauri."""
+
+ OPENTHESO_THESO_ID: str | None = None
+
+ concept_id = models.CharField(
+ "Concept ID on Open Theso",
+ max_length=255,
+ null=True,
+ blank=True,
+ )
+ label = models.CharField("Label", max_length=255)
+
+ class Meta:
+ abstract = True
+
+ constraints = [
+ models.UniqueConstraint(
+ fields=["label", "concept_id"],
+ name="%(class)s_thesorus_concept_unique_label_concept_id",
+ ),
+ models.UniqueConstraint(
+ fields=["concept_id"],
+ name="%(class)s_thesorus_concept_unique_concept_id",
+ ),
+ ]
+
+ def __str__(self) -> str:
+ return str(self.label)
+
+
+class Period(ThesorusConceptModel):
+ OPENTHESO_THESO_ID = "th287"
+
+
+class Era(ThesorusConceptModel):
+ OPENTHESO_THESO_ID = "th289"
diff --git a/lab/thesauri/opentheso.py b/lab/thesauri/opentheso.py
new file mode 100644
index 000000000..6844ccb34
--- /dev/null
+++ b/lab/thesauri/opentheso.py
@@ -0,0 +1,44 @@
+import logging
+from functools import lru_cache
+
+import requests
+
+from .models import Era, Period
+
+logger = logging.getLogger(__name__)
+
+
+@lru_cache
+def fetch_parent_ids_from_id(theso_id: str, concept_id: str) -> list[str]:
+ try:
+ response = requests.get(
+ # pylint: disable=line-too-long
+ f"https://opentheso.huma-num.fr/opentheso/openapi/v1/concept/{theso_id}/{concept_id}/expansion?way=top",
+ timeout=5,
+ )
+ except requests.exceptions.RequestException as e:
+ logger.error("Failed to fetch parent ids from OpenTheso: %s", e)
+ return []
+ if not response.ok:
+ logger.error(
+ "Failed to fetch parent ids from OpenTheso: %s %s",
+ response.status_code,
+ response.text,
+ )
+ return []
+ data = response.json()
+ if data and isinstance(data, dict):
+ # We exclude the first item as it is common to all branches
+ return [key.split("/")[-1] for key, _ in list(data.items())[1:]]
+ logger.error(
+ "Failed to fetch parent ids from OpenTheso. Invalid response. %s", response.text
+ )
+ return []
+
+
+def fetch_era_parent_ids_from_id(concept_id: str):
+ return fetch_parent_ids_from_id(Era.OPENTHESO_THESO_ID, concept_id)
+
+
+def fetch_period_parent_ids_from_id(concept_id: str):
+ return fetch_parent_ids_from_id(Period.OPENTHESO_THESO_ID, concept_id)
diff --git a/locale/fr/LC_MESSAGES/django.mo b/locale/fr/LC_MESSAGES/django.mo
index 6aee655f5..021d0388c 100644
Binary files a/locale/fr/LC_MESSAGES/django.mo and b/locale/fr/LC_MESSAGES/django.mo differ
diff --git a/locale/fr/LC_MESSAGES/django.po b/locale/fr/LC_MESSAGES/django.po
index 878568016..1ed520c70 100644
--- a/locale/fr/LC_MESSAGES/django.po
+++ b/locale/fr/LC_MESSAGES/django.po
@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 0.1\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2024-09-27 13:55+0200\n"
+"POT-Creation-Date: 2024-10-23 12:27+0200\n"
"PO-Revision-Date: 2021-09-09 19:04+0200\n"
"Language: \n"
"MIME-Version: 1.0\n"
@@ -10,6 +10,132 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+msgid "Accept request(s) (send download links)"
+msgstr "Accepter les demandes (envoyer les liens de téléchargement)"
+
+#, python-format
+msgid "Error sending links to %(email)s for %(data_request)s: %(error)s"
+msgstr ""
+"Une erreur est survenue lors de l'envoi des liens à %(email)s pour "
+"%(data_request)s : %(error)s"
+
+msgid "has been sent"
+msgstr "envoyé"
+
+msgid "Yes"
+msgstr "Oui"
+
+msgid "No"
+msgstr "Non"
+
+msgid "Data requests"
+msgstr "Demandes de données"
+
+msgid "Is sent"
+msgstr "Envoyé"
+
+msgid "[New AGLAE] Data request received"
+msgstr "[New AGLAE] Demande de données reçue"
+
+msgid "Your New AGLAE data links"
+msgstr "Vos liens de données New AGLAE"
+
+msgid "data request"
+msgstr "demande de données"
+
+msgid "data requests"
+msgstr "demandes de données"
+
+msgid "Viewed"
+msgstr "Vue"
+
+msgid "Has been viewed by an admin"
+msgstr "A été vue par un admin"
+
+msgid "Sent at"
+msgstr "Date d'envoi"
+
+msgid "User email"
+msgstr "Adresse email de l'utilisateur"
+
+msgid "First name"
+msgstr "Prénom"
+
+msgid "Last name"
+msgstr "Nom"
+
+msgid "Institution"
+msgstr "Institution"
+
+msgid "Description"
+msgstr "Description"
+
+msgid "Created"
+msgstr "Créé"
+
+msgid "Modified"
+msgstr "Modifié"
+
+msgid "data access event"
+msgstr "événement d'accès aux données"
+
+msgid "data access events"
+msgstr "événements d'accès aux données"
+
+msgid "Path"
+msgstr "Chemin"
+
+msgid "Access time"
+msgstr "Heure d'accès"
+
+msgid "Data Request"
+msgstr "Demande de données"
+
+msgid ""
+"Thank you for your interest in New AGLAE data catalog. We have processed "
+"your request and generated the following links for you to download the data "
+"you have selected:"
+msgstr ""
+"Merci pour votre intérêt pour le catalogue de données de New AGLAE. Nous "
+"avons traité votre demande et généré les liens suivants pour télécharger les "
+"données que vous avez sélectionnées :"
+
+msgid "Raw data"
+msgstr "Données brutes"
+
+msgid "Processed data"
+msgstr "Données travaillées"
+
+msgid ""
+"Please click on the links above to download the data. If you have any "
+"further questions or need assistance, please feel free to contact us."
+msgstr ""
+"Cliquez sur les liens ci-dessus pour télécharger les données. Si vous avez "
+"des questions ou besoin d'aide, n'hésitez pas à nous contacter."
+
+#, python-format
+msgid "Note that the links will be active until %(formatted_exp_date)s."
+msgstr "Les liens sont disponibles jusqu'au %(formatted_exp_date)s."
+
+msgid "Thank you"
+msgstr "Merci"
+
+msgid "The New AGLAE Team"
+msgstr "L'équipe New AGLAE"
+
+msgid "Data request received"
+msgstr "Demande de données reçue"
+
+msgid ""
+"We have received your data request. A member of the New AGLAE team will "
+"review it as soon as posible and get back to you."
+msgstr ""
+"Nous avons bien reçu votre demande de données. Un membre de l'équipe New "
+"AGLAE la traitera dès que possible."
+
+msgid "Have a good day,"
+msgstr "Bonne journée,"
+
msgid "Related project"
msgstr "Projet apparenté"
@@ -22,12 +148,6 @@ msgstr "Statut"
msgid "Invitation completed"
msgstr "Invitation complétée"
-msgid "Yes"
-msgstr "Oui"
-
-msgid "No"
-msgstr "Non"
-
msgid "Profile"
msgstr "Profil"
@@ -80,12 +200,6 @@ msgstr "[Euphrosyne] Invitation à s'inscrire"
msgid "Email"
msgstr "Email"
-msgid "First name"
-msgstr "Prénom"
-
-msgid "Last name"
-msgstr "Nom"
-
msgid "An account with this email already exists."
msgstr "Un compte avec cet email existe déjà."
@@ -362,6 +476,12 @@ msgstr "filtres {}"
msgid "{} / {} filters choice"
msgstr "choix de filtres {} / {}"
+msgid "Projects"
+msgstr "Projets"
+
+msgid "Account"
+msgstr "Compte"
+
msgid ""
"We cannot process the CSV file because it is not valid. Please refer to the "
"template to fill it properly."
@@ -432,8 +552,11 @@ msgstr "Nombre d'objets"
msgid "Inventory number"
msgstr "Inventaire"
-msgid "Dating"
-msgstr "Datation"
+msgid "Period"
+msgstr "Période"
+
+msgid "Era"
+msgstr "Époque"
msgid "Materials"
msgstr "Matériaux"
@@ -510,9 +633,6 @@ msgstr ""
msgid "Renaming this project is not posible for now : %s"
msgstr "Renommer ce projet est impossible pour le moment : %s"
-msgid "Projects"
-msgstr "Projets"
-
msgid "To schedule"
msgstr "A planifier"
@@ -544,9 +664,6 @@ msgstr "Aucun chef de projet"
msgid "User"
msgstr "Utilisateur"
-msgid "Institution"
-msgstr "Institution"
-
msgid "Scheduled"
msgstr "Planifié"
@@ -684,9 +801,6 @@ msgstr "Embargo permanent"
msgid "The end date must be after the start date"
msgstr "La date de fin doit être après la date de début"
-msgid "Created"
-msgstr "Créé"
-
msgid "Proton"
msgstr "Proton"
@@ -961,9 +1075,6 @@ msgstr "Télécharger les données brutes du run %(label)s"
msgid "Download processed data of run %(label)s"
msgstr "Télécharger les données travaillées du run %(label)s"
-msgid "Account"
-msgstr "Compte"
-
msgid "Run(s)"
msgstr "Run(s)"
@@ -981,9 +1092,6 @@ msgstr "Vérifiez vos informations"
msgid "Date this entry was first created"
msgstr "Date de création de cette entrée"
-msgid "Modified"
-msgstr "Modifié"
-
msgid "Date this entry was most recently changed."
msgstr "Date de la dernière modification de cette entrée"
diff --git a/pylint.txt b/pylint.txt
new file mode 100644
index 000000000..e69de29bb
diff --git a/requirements/base.txt b/requirements/base.txt
index 8bfb70394..21cbfb7c8 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,5 +1,6 @@
-i https://pypi.org/simple
docutils==0.21.2
+django-cors-headers==4.3.1
dj-database-url==2.2.0
django-debug-toolbar==4.4.6
django-filter==24.3
@@ -10,6 +11,7 @@ django-stubs[compatible-mypy]==5.1.0 # not dev because used in code (WithAnnotat
graphene-django==3.2.2
gunicorn==23.0.0
markdown==3.7
+opensearch-py==2.5.0
psycopg2==2.9.10
python-dotenv==1.0.1
python3-openid==3.2.0