Skip to content

Commit

Permalink
Merge pull request #47 from gabrielmscampos/refactor/oc-deployment
Browse files Browse the repository at this point in the history
Deployment migration to CERN's PaaS
  • Loading branch information
gabrielmscampos authored Jun 21, 2024
2 parents 002f2a9 + 2c9ffb0 commit f2c02bb
Show file tree
Hide file tree
Showing 36 changed files with 995 additions and 198 deletions.
55 changes: 55 additions & 0 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# The default python installed in this release is 3.6.9
# Originally the application was design for 3.6.8, bumping the minor
# version is not a problem
FROM ubuntu:18.04

ENV LANG=C.UTF-8

ARG ROOT_BIN=root_v6.24.08.Linux-ubuntu18-x86_64-gcc7.5.tar.gz

WORKDIR /opt

RUN apt-get update -qq \
&& ln -sf /usr/share/zoneinfo/UTC /etc/localtime \
&& apt-get -y install wget \
&& wget https://raw.githubusercontent.com/root-project/root-docker/6.24.06-ubuntu20.04/ubuntu/packages \
&& apt-get -y install $(cat packages) python3-pip \
&& rm -rf /var/lib/apt/lists/* \
&& wget https://root.cern/download/${ROOT_BIN} \
&& tar -xzvf ${ROOT_BIN} \
&& rm -f ${ROOT_BIN} \
&& echo /opt/root/lib >> /etc/ld.so.conf \
&& ldconfig

ENV ROOTSYS /opt/root
ENV PATH $ROOTSYS/bin:$PATH
ENV PYTHONPATH $ROOTSYS/lib:$PYTHONPATH
ENV CLING_STANDARD_PCH none
ENV PYTHONNUNBUFFERED=1

ARG UID=1000
ARG GID=1000

ENV USERNAME=app
ENV HOME=/home/$USERNAME
ENV APP_HOME=$HOME/backend

RUN mkdir -p $HOME
RUN mkdir $APP_HOME
RUN addgroup --system --gid $GID $USERNAME && adduser --system --ingroup $USERNAME --uid $UID $USERNAME

WORKDIR $APP_HOME

COPY requirements.txt $APP_HOME
RUN pip3 install --upgrade pip
RUN pip3 install -r requirements.txt

COPY backend $APP_HOME/backend
COPY run.sh $APP_HOME

RUN chown -R $USERNAME:$USERNAME $APP_HOME

USER $USERNAME

EXPOSE 5000

26 changes: 23 additions & 3 deletions backend/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import logging
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.exc import ProgrammingError
from sqlalchemy import (
Text,
Column,
Expand Down Expand Up @@ -277,7 +278,20 @@ def check_gui_file(file):


# setup DB
def setup_db():
def setup_db(db_path):
db_path = "/".join(db_path.split("/")[:-1]) + "/postgres"
try:
engine = sqlalchemy.create_engine(db_path)
conn = engine.connect()
conn.execute("COMMIT")
conn.execute(f"CREATE DATABASE {os.getenv('DB_NAME')}")
conn.close()
engine.dispose()
except ProgrammingError as err:
if "already exists" in str(err):
print("Database already exists")
else:
raise err
Base.metadata.create_all(engine)


Expand All @@ -286,6 +300,12 @@ def setup_db():
from dotenv import load_dotenv

load_dotenv()
db_path = os.environ.get("HDQM2_DB_PATH")
db_path = get_formatted_db_uri(
username=os.getenv("DB_USERNAME"),
password=os.getenv("DB_PASSWORD"),
host=os.getenv("DB_HOST"),
port=os.getenv("DB_PORT"),
db_name=os.getenv("DB_NAME"),
)
create_session(db_path)
setup_db()
setup_db(db_path)
14 changes: 7 additions & 7 deletions backend/dqm_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,17 +258,17 @@ def process_gui_root(file, trend_cfgs, mes):
fmt="%(asctime)s %(levelname)-8s %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
)

handler = TimedRotatingFileHandler(
LOGPATH, when="h", interval=24, backupCount=int(NLOGS)
)
handler.setFormatter(formatter)
handler.setLevel(LOGLEVEL)
if os.getenv("LOG_TO_FILE"):
handler = TimedRotatingFileHandler(
LOGPATH, when="h", interval=24, backupCount=int(NLOGS)
)
handler.setFormatter(formatter)
handler.setLevel(LOGLEVEL)
logger.addHandler(handler)

handler2 = logging.StreamHandler(sys.stdout)
handler2.setFormatter(formatter)
handler2.setLevel(LOGLEVEL)

logger.addHandler(handler)
logger.addHandler(handler2)

logger.info("Start " + str(__file__))
Expand Down
3 changes: 1 addition & 2 deletions backend/rr_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,7 @@ def get_rr_run(run: int) -> dict:
Why not use the runregistry python package?
"""
logger.info("Get RR data for run %s ..." % run)
# TODO: replace with production one once 1.0.0 runregistry is published.
url = "https://cmsrunregistry-qa.web.cern.ch/api/runs_filtered_ordered"
url = "https://cmsrunregistry.web.cern.ch/api/runs_filtered_ordered"
request = """
{
"page" : 0,
Expand Down
23 changes: 0 additions & 23 deletions deployment/nginx/conf.d/hdqm-test.conf

This file was deleted.

23 changes: 0 additions & 23 deletions deployment/nginx/conf.d/hdqm.conf

This file was deleted.

38 changes: 0 additions & 38 deletions deployment/nginx/nginx.conf

This file was deleted.

11 changes: 0 additions & 11 deletions deployment/service/hdqm-extract-test.service

This file was deleted.

9 changes: 0 additions & 9 deletions deployment/service/hdqm-extract-test.timer

This file was deleted.

13 changes: 0 additions & 13 deletions deployment/service/hdqm-extract.service

This file was deleted.

9 changes: 0 additions & 9 deletions deployment/service/hdqm-extract.timer

This file was deleted.

10 changes: 0 additions & 10 deletions deployment/service/hdqm-test.service

This file was deleted.

10 changes: 0 additions & 10 deletions deployment/service/hdqm.service

This file was deleted.

12 changes: 12 additions & 0 deletions frontend/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
FROM nginx:stable-alpine

COPY frontend /usr/share/nginx/html

RUN chmod -R 777 /var/log/nginx /var/cache/nginx /var/run \
&& chgrp -R 0 /etc/nginx \
&& chmod -R g+rwX /etc/nginx \
&& rm /etc/nginx/conf.d/default.conf

EXPOSE 8081

CMD ["nginx", "-g", "daemon off;"]
81 changes: 81 additions & 0 deletions oc/dev/configmaps/nginx.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: nginx-reverse-proxy-conf
namespace: cms-hdqm-dev
data:
nginx.conf: |
worker_processes 4;
error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid;
events { worker_connections 1024; }
http {
ssl_session_cache shared:SSL:10m;
ssl_session_timeout 30m;
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=one:8m max_size=3000m inactive=600m;
proxy_temp_path /var/tmp;
include /etc/nginx/mime.types;
default_type application/octet-stream;
sendfile on;
keepalive_timeout 65;
gzip_comp_level 6;
gzip_vary on;
gzip_min_length 1000;
gzip_proxied any;
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
gzip_buffers 16 8k;
upstream app_backend {
server backend:5000 fail_timeout=0;
}
server {
listen 8081;
server_name localhost;
# Disable buffering for an authenticating proxy
proxy_buffering off;
# Allow big headers (oauth2-proxy cookies) - OKD routers allow headers up to 32k
proxy_buffer_size 32k;
proxy_buffers 8 64k;
proxy_busy_buffers_size 64k;
# Do not restrict upload size (consider setting a limit here if application does not need large request bodies)
client_max_body_size 0;
location /oauth2/ {
proxy_pass http://cern-auth-proxy:4180; # TODO: must match the Release name used to deploy the cern-auth-proxy service
proxy_set_header Host $host;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Auth-Request-Redirect $request_uri;
}
location / {
auth_request /oauth2/auth;
error_page 401 = /oauth2/start;
root /usr/share/nginx/html;
index index.html;
expires -1;
add_header 'Access-Control-Allow-Origin' '*';
add_header Pragma "no-cache";
add_header Cache-Control "no-store, no-cache, must-revalidate, post-check=0, pre-check=0";
try_files $uri$args $uri$args/ $uri $uri/ /index.html =404;
}
location /api {
auth_request /oauth2/auth;
error_page 401 = /oauth2/start;
proxy_pass http://app_backend/api;
proxy_hide_header 'Access-Control-Allow-Origin';
add_header 'Access-Control-Allow-Origin' '*';
proxy_read_timeout 90;
}
}
}
Loading

0 comments on commit f2c02bb

Please sign in to comment.