Skip to content

Commit

Permalink
Merge pull request #147 from NOWUM/dev
Browse files Browse the repository at this point in the history
Add more test cases
  • Loading branch information
maurerle authored Aug 14, 2023
2 parents 6a1253c + b7c38e7 commit 856d660
Show file tree
Hide file tree
Showing 61 changed files with 1,233 additions and 707 deletions.
66 changes: 2 additions & 64 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ jobs:

- name: Install requirements
run: |
sudo apt-get update && sudo apt-get install --no-install-recommends -y glpk-utils python3-swiglpk && sudo rm -rf /var/lib/apt/lists/*
pip install --upgrade --upgrade-strategy eager -r requirements-dev.txt -e .
- name: Run tests
Expand All @@ -102,7 +103,7 @@ jobs:
result.xml
uplaod_coverage_results:
upload_coverage_results:
needs: test
runs-on: ubuntu-latest
name: "Upload code coverage"
Expand Down Expand Up @@ -217,66 +218,3 @@ jobs:
- name: Push the Docker image to GitHub Container Registry
run: |
docker push ${{ steps.get_tag.outputs.DOCKER_TAG }}
deploy_dev:
needs: [ build_dev_image ]
runs-on: ubuntu-latest
concurrency: ssh-connection # only one ssh connection at a time
name: "Deploy dev image"
if: ${{ success() && github.actor != 'dependabot[bot]' }}
steps:
- name: Update deployment status - start
uses: bobheadxi/deployments@v1.4.0
id: deployment
with:
step: start
token: ${{ github.token }}
env: Development
no_override: false
desc: "Development deployment for main branch"
ref: "main" # dev deployment of main branch
transient: true

- name: Install VPN
run: |
sudo /sbin/modprobe tun
sudo apt install openconnect
- name: Connect VPN
run: |
echo "${{ secrets.VPN_PASS }}" | sudo openconnect ${{ secrets.VPN_URL }} --background --user=${{ secrets.VPN_USER }} --passwd-on-stdin
- name: Deploy docker container on private server
uses: appleboy/ssh-action@v0.1.4
with:
host: ${{ secrets.SSH_URL }}
username: ${{ secrets.SSH_USER }}
password: ${{ secrets.SSH_PASS }}
script: |
docker system prune -af
docker pull ${{ needs.build_dev_image.outputs.image_tag }}
docker ps --filter publish=9000
docker rm -f $(docker ps --filter publish=9000 -aq)
docker run -d -p 9000:8080 --name "dev" ${{ needs.build_dev_image.outputs.image_tag }}
- name: Disconnect VPN
if: ${{ always() }}
run: |
sudo pkill openconnect
- name: Get env url
id: get_env_url
run: |
ENV_URL="http://${{ secrets.SSH_URL }}:9000"
echo ::set-output name=ENV_URL::"${ENV_URL}"
- name: Update deployment status - finish
uses: bobheadxi/deployments@v1.4.0
if: always()
with:
step: finish
token: ${{ github.token }}
status: ${{ job.status }}
deployment_id: ${{ steps.deployment.outputs.deployment_id }}
env_url: ${{ steps.get_env_url.outputs.env_url }}
102 changes: 0 additions & 102 deletions .github/workflows/pull-request-done.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,46 +40,6 @@ jobs:
MATRIX_CONTEXT: ${{ toJSON(matrix) }}
run: echo "$MATRIX_CONTEXT"

preview_delete:
runs-on: ubuntu-latest
concurrency: ssh-connection # only one ssh connection at a time
name: "Delete preview"
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Update deployment status - deactivate
uses: bobheadxi/deployments@v1.4.0
id: deactivate
with:
step: deactivate-env
token: ${{ github.token }}
env: PR-${{ github.event.number }}-Preview
desc: "Preview deployment for PR #${{ github.event.number }} was pruned."

- name: Install VPN
run: |
sudo /sbin/modprobe tun
sudo apt install openconnect
- name: Connect VPN
run: |
echo "${{ secrets.VPN_PASS }}" | sudo openconnect ${{ secrets.VPN_URL }} --background --user=${{ secrets.VPN_USER }} --passwd-on-stdin
- name: Stop docker container on private server
uses: appleboy/ssh-action@v0.1.4
with:
host: ${{ secrets.SSH_URL }}
username: ${{ secrets.SSH_USER }}
password: ${{ secrets.SSH_PASS }}
script: |
docker ps --filter publish=$((9000 + ${{ github.event.number }}))
docker rm -f $(docker ps --filter publish=$((9000 + ${{ github.event.number }})) -aq) > /dev/null || true
- name: Disconnect VPN
if: ${{ always() }}
run: |
sudo pkill openconnect
create_release:
runs-on: "ubuntu-latest"
if: github.event.pull_request.merged == true && startsWith( github.head_ref, 'release/')
Expand Down Expand Up @@ -180,65 +140,3 @@ jobs:
run: |
docker push ${{ steps.get_tag.outputs.DOCKER_TAG }}
deploy_prod:
needs: [ create_release, build_prod_image ]
runs-on: ubuntu-latest
concurrency: ssh-connection # only one ssh connection at a time
name: "Deploy production image"
if: ${{ success() && github.actor != 'dependabot[bot]' }}
steps:
- name: Update deployment status - start
uses: bobheadxi/deployments@v1.4.0
id: deployment
with:
step: start
token: ${{ github.token }}
env: Production
no_override: false
desc: "Production deployment for latest release"
ref: "v${{ needs.create_release.outputs.version }}" # tag of current release
transient: true

- name: Install VPN
run: |
sudo /sbin/modprobe tun
sudo apt install openconnect
- name: Connect VPN
run: |
echo "${{ secrets.VPN_PASS }}" | sudo openconnect ${{ secrets.VPN_URL }} --background --user=${{ secrets.VPN_USER }} --passwd-on-stdin
- name: Deploy docker container on private server
uses: appleboy/ssh-action@v0.1.4
with:
host: ${{ secrets.SSH_URL }}
username: ${{ secrets.SSH_USER }}
password: ${{ secrets.SSH_PASS }}
script: |
docker system prune -af
docker pull ${{ needs.build_prod_image.outputs.image_tag }}
docker ps --filter publish=8080
docker rm -f $(docker ps --filter publish=8080 -aq)
docker run -d -p 8080:8080 --name "production" ${{ needs.build_prod_image.outputs.image_tag }}
- name: Disconnect VPN
if: ${{ always() }}
run: |
sudo pkill openconnect
- name: Get env url
id: get_env_url
run: |
ENV_URL="http://${{ secrets.SSH_URL }}:8080"
echo ::set-output name=ENV_URL::"${ENV_URL}"
- name: Update deployment status - finish
uses: bobheadxi/deployments@v1.4.0
if: always()
with:
step: finish
token: ${{ github.token }}
status: ${{ job.status }}
deployment_id: ${{ steps.deployment.outputs.deployment_id }}
env_url: ${{ steps.get_env_url.outputs.env_url }}
67 changes: 2 additions & 65 deletions .github/workflows/pull-request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ jobs:

- name: Install requirements
run: |
sudo apt-get update && sudo apt-get install --no-install-recommends -y glpk-utils python3-swiglpk && sudo rm -rf /var/lib/apt/lists/*
pip install --upgrade --upgrade-strategy eager -r requirements-dev.txt -e .
- name: Run tests
Expand All @@ -107,7 +108,7 @@ jobs:
result.xml
uplaod_coverage_results:
upload_coverage_results:
needs: test
runs-on: ubuntu-latest
name: "Upload code coverage"
Expand Down Expand Up @@ -183,67 +184,3 @@ jobs:
- name: Push the Docker image to GitHub Container Registry
run: |
docker push ${{ steps.get_tag.outputs.DOCKER_TAG }}
deploy_pr:
needs: [ build_pr_image ]
runs-on: ubuntu-latest
concurrency: ssh-connection # only one ssh connection at a time
name: "Deploy preview image"
if: ${{ success() && github.actor != 'dependabot[bot]' }}
steps:
- name: Update deployment status - start
uses: bobheadxi/deployments@v1.4.0
id: deployment
with:
step: start
token: ${{ github.token }}
env: PR-${{ github.event.number }}-Preview
no_override: false
desc: "Preview deployment for PR #${{ github.event.number }}"
ref: ${{ github.head_ref }}
transient: true

- name: Install VPN
run: |
sudo /sbin/modprobe tun
sudo apt install openconnect
- name: Connect VPN
run: |
echo "${{ secrets.VPN_PASS }}" | sudo openconnect ${{ secrets.VPN_URL }} --background --user=${{ secrets.VPN_USER }} --passwd-on-stdin
- name: Deploy docker container on private server
uses: appleboy/ssh-action@v0.1.4
with:
host: ${{ secrets.SSH_URL }}
username: ${{ secrets.SSH_USER }}
password: ${{ secrets.SSH_PASS }}
script: |
docker system prune -af
docker pull ${{ needs.build_pr_image.outputs.image_tag }}
docker ps --filter publish=$((9000 + ${{ github.event.number }}))
docker rm -f $(docker ps --filter publish=$((9000 + ${{ github.event.number }})) -aq)
docker run -d -p $((9000 + ${{ github.event.number }})):8080 --name "pr-preview-$((9000 + ${{ github
.event.number }}))" ${{ needs.build_pr_image.outputs.image_tag }}
- name: Disconnect VPN
if: ${{ always() }}
run: |
sudo pkill openconnect
- name: Get env url
id: get_env_url
run: |
ENV_URL="http://${{ secrets.SSH_URL }}:$((9000 + ${{ github.event.number }} ))"
echo ::set-output name=ENV_URL::"${ENV_URL}"
- name: Update deployment status - finish
uses: bobheadxi/deployments@v1.4.0
if: always()
with:
step: finish
token: ${{ github.token }}
status: ${{ job.status }}
deployment_id: ${{ steps.deployment.outputs.deployment_id }}
env_url: ${{ steps.get_env_url.outputs.env_url }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ coverage.xml
.hypothesis/
.pytest_cache/
cover/
generated/

# Translations
*.mo
Expand Down
29 changes: 22 additions & 7 deletions ensysmod/api/api.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,23 @@
from fastapi import APIRouter

from .endpoints import users, authentication, energy_sources, datasets, energy_commodities, energy_sinks, \
energy_storages, energy_transmissions, energy_conversions, regions, ts_capacity_max, ts_operation_rate_fix, \
ts_operation_rate_max, ts_capacity_fix, energy_models, datasets_permissions
from .endpoints import (
authentication,
datasets,
datasets_permissions,
energy_commodities,
energy_conversions,
energy_models,
energy_sinks,
energy_sources,
energy_storages,
energy_transmissions,
regions,
ts_capacity_fix,
ts_capacity_max,
ts_operation_rate_fix,
ts_operation_rate_max,
users,
)

api_router = APIRouter()
api_router.include_router(authentication.router, prefix="/auth", tags=["Authentication"])
Expand All @@ -18,7 +33,7 @@
api_router.include_router(energy_transmissions.router, prefix="/transmissions", tags=["Energy Transmissions"])
api_router.include_router(energy_models.router, prefix="/models", tags=["Energy Models"])

api_router.include_router(ts_capacity_fix.router, prefix="/fix-capacities", tags=["TS Capacities Fix"])
api_router.include_router(ts_capacity_max.router, prefix="/max-capacities", tags=["TS Capacities Max"])
api_router.include_router(ts_operation_rate_fix.router, prefix="/fix-operation-rates", tags=["TS Operation Rates Fix"])
api_router.include_router(ts_operation_rate_max.router, prefix="/max-operation-rates", tags=["TS Operation Rates Max"])
api_router.include_router(ts_capacity_fix.router, prefix="/fix-capacities", tags=["Fix Capacities"])
api_router.include_router(ts_capacity_max.router, prefix="/max-capacities", tags=["Max Capacities"])
api_router.include_router(ts_operation_rate_fix.router, prefix="/fix-operation-rates", tags=["Fix Operation Rates"])
api_router.include_router(ts_operation_rate_max.router, prefix="/max-operation-rates", tags=["Max Operation Rates"])
23 changes: 13 additions & 10 deletions ensysmod/api/endpoints/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
from io import BytesIO
from typing import List

from fastapi import APIRouter, Depends, HTTPException, status, UploadFile, File
from fastapi import APIRouter, Depends, File, HTTPException, UploadFile, status
from fastapi.encoders import jsonable_encoder
from fastapi.responses import FileResponse
from sqlalchemy.orm import Session

from ensysmod import schemas, model, crud
from ensysmod import crud, model, schemas
from ensysmod.api import deps, permissions
from ensysmod.core.file_download import export_data
from ensysmod.core.file_upload import process_dataset_zip_archive
Expand All @@ -19,10 +19,10 @@


@router.get("/", response_model=List[schemas.Dataset])
def all_datasets(db: Session = Depends(deps.get_db),
current: model.User = Depends(deps.get_current_user),
skip: int = 0,
limit: int = 100) -> List[schemas.Dataset]:
def get_all_datasets(db: Session = Depends(deps.get_db),
current: model.User = Depends(deps.get_current_user),
skip: int = 0,
limit: int = 100) -> List[schemas.Dataset]:
"""
Retrieve all datasets.
"""
Expand All @@ -36,7 +36,7 @@ def get_dataset(dataset_id: int,
"""
Retrieve a dataset.
"""
return crud.dataset.get(db, dataset_id)
return crud.dataset.get(db=db, id=dataset_id)


@router.post("/", response_model=schemas.Dataset,
Expand Down Expand Up @@ -83,10 +83,13 @@ def remove_dataset(dataset_id: int,


@router.post("/{dataset_id}/upload", response_model=schemas.ZipArchiveUploadResult)
def upload_zip_archive(dataset_id: int,
def upload_dataset_zip(dataset_id: int,
file: UploadFile = File(...),
db: Session = Depends(deps.get_db),
current: model.User = Depends(deps.get_current_user)):
"""
Upload a dataset as zip.
"""
if file.content_type not in ["application/x-zip-compressed", "application/zip", "application/zip-compressed"]:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"File must be a zip archive. You provided {file.content_type}!")
Expand All @@ -107,11 +110,11 @@ def upload_zip_archive(dataset_id: int,


@router.get("/{dataset_id}/download")
def download_zip_archive(dataset_id: int,
def download_dataset_zip(dataset_id: int,
db: Session = Depends(deps.get_db),
current: model.User = Depends(deps.get_current_user)):
"""
Downloads the dataset as zip
Download a dataset as zip.
"""
dataset = crud.dataset.get(db=db, id=dataset_id)
if dataset is None:
Expand Down
Loading

0 comments on commit 856d660

Please sign in to comment.