Skip to content

Commit

Permalink
Add new service account (#313)
Browse files Browse the repository at this point in the history
* New requirements: PyJWT

Also upgrade cloud-info-provider

* Token generator

* Add an AccessToken generator

Also split into 2 separate files for better management

* Improve code of generator

* Add new script

* Update cloud-info-provider

* Fixing tests and linting

* Try to fix python testing

* Update IISAS-FedCloud-cloud.yaml (#311)

Add vo.usegalaxy.eu

* vo.fuvex.es is not operational anymore

* Specity py version

* No need for setuptools

* Test token generator

* Add missing library

* Improve code

* Call the token generator

* Further improvements

---------

Co-authored-by: astalosj <73936420+astalosj@users.noreply.github.com>
  • Loading branch information
enolfc and astalosj authored Oct 17, 2023
1 parent d2e98ca commit 1a94d45
Show file tree
Hide file tree
Showing 14 changed files with 593 additions and 231 deletions.
13 changes: 9 additions & 4 deletions .github/workflows/python.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: Python testing

on: pull_request
on:
- pull_request
- push

jobs:
test:
Expand All @@ -9,11 +11,14 @@ jobs:

steps:
- uses: actions/checkout@v4
- name: Set up Python
- name: Set up Python
uses: actions/setup-python@v4
- name: Test cloud-info-generator
with:
python-version: '3.11'
- name: Test cloud-info-generator
run: |
cd cloud-info
pip install -r requirements.txt
pip install .
python3 -m cloud_info_catchall.test
cd cloud_info_catchall
python3 -m unittest
27 changes: 23 additions & 4 deletions cloud-info/ams-wrapper.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,20 +34,39 @@ curl -f "https://$AMS_HOST/v1/projects/$AMS_PROJECT/topics/$AMS_TOPIC?key=$AMS_T

# Attempt to generate the site configuration
AUTO_CONFIG_PATH="$(mktemp -d)"

# First get valid access token
export CHECKIN_SECRETS_FILE="$CHECKIN_SECRETS_PATH/secrets.yaml"
if VO_SECRETS_PATH="$AUTO_CONFIG_PATH/vos" config-generator > "$AUTO_CONFIG_PATH/site.yaml"; then
# this worked, let's update the env
export CHECKIN_SECRETS_PATH="$AUTO_CONFIG_PATH/vos"
export CLOUD_INFO_CONFIG="$AUTO_CONFIG_PATH/site.yaml"
# TODO(enolfc): avoid creating new tokens for every provider
export ACCESS_TOKEN_FILE="$AUTO_CONFIG_PATH/token.yaml"
USE_ACCESS_TOKEN=0
if token-generator; then
# TODO(enolfc): even if this belows fails, we should use access token as it will provide
# access to more projects
if SECRETS_FILE="$ACCESS_TOKEN_FILE" config-generator > "$AUTO_CONFIG_PATH/site.yaml"; then
# this worked, let's update the env
export CHECKIN_SECRETS_PATH="$AUTO_CONFIG_PATH/vos"
export CLOUD_INFO_CONFIG="$AUTO_CONFIG_PATH/site.yaml"
USE_ACCESS_TOKEN=1
fi
fi

# Any OS related parameter should be available as env variables
if test "$CHECKIN_SECRETS_PATH" = ""; then
# Case 1: manual config
cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \
--middleware "$CLOUD_INFO_MIDDLEWARE" \
--ignore-share-errors \
--format glue21 > cloud-info.out
elif test "$USE_ACCESS_TOKEN" -eq 1; then
# Case 2: access token style
cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \
--middleware "$CLOUD_INFO_MIDDLEWARE" \
--ignore-share-errors \
--auth-refresher accesstoken \
--format glue21 > cloud-info.out
else
# Case 3: oidc refresh style
cloud-info-provider-service --yaml-file "$CLOUD_INFO_CONFIG" \
--middleware "$CLOUD_INFO_MIDDLEWARE" \
--ignore-share-errors \
Expand Down
147 changes: 57 additions & 90 deletions cloud-info/cloud_info_catchall/config_generator.py
Original file line number Diff line number Diff line change
@@ -1,115 +1,82 @@
"""Discover projects for cloud-info-povider and generate configuration
Takes its own configuration from env variables:
CHECKIN_SECRETS_FILE: yaml file with the check-in secrets to get access tokens
CHECKIN_OIDC_TOKEN: URL for token refreshal
SECRETS_FILE: yaml file with the secrets to access shares
The yaml includes as many credentials as wanted in 2 formats
```
---
secret_name:
client_id:"client id"
client_secret: "client_secret"
refresh_token: "refresh_token"
other_secret:
access_token: "access token"
```
Any other formats will be ignored
VO_SECRETS_PATH: directory to create VO structure with credentials
for cloud-info-provider
TOKEN_URL: URL to refresh tokens
OS_AUTH_URL, OS_IDENTITY_PROVIDER, OS_PROTOCOL: OpenStack endpoint config
SITE_NAME: site name
"""

import logging
import os

import fedcloudclient.endpoint as fedcli
import yaml
from cloud_info_provider.auth_refreshers.oidc_refresh import OidcRefreshToken
from cloud_info_catchall.share_discovery import (
AccessTokenShareDiscovery,
RefresherShareDiscovery,
)


class ShareDiscovery:
def __init__(self, auth_url, identity_provider, protocol, token_url, vo_dir):
self.auth_url = auth_url
self.identity_provider = identity_provider
self.protocol = protocol
self.token_url = token_url
self.vo_dir = vo_dir

def refresh_token(self, secret):
# fake the options for refreshing
# avoids code duplication but not very clean
class Opt:
timeout = 10

refresher = OidcRefreshToken(Opt)
return refresher._refresh_token(
self.token_url,
secret.get("client_id", None),
secret.get("client_secret", None),
secret.get("refresh_token", None),
"openid email profile voperson_id eduperson_entitlement",
)

def get_token_shares(self, access_token):
# rely on fedcloudclient for getting token
# exchange access_token for Keystone token
shares = {}
try:
token = fedcli.retrieve_unscoped_token(
self.auth_url, access_token, self.protocol
)
except fedcli.TokenException:
# this check-in account does not have access to the site, ignore
return shares
projects = fedcli.get_projects_from_single_site(self.auth_url, token)
for p in projects:
vo = p.get("VO", None)
if not vo:
logging.warning(
"Discarding project %s as it does not have VO property", p["name"]
)
continue
if not p.get("enabled", False):
logging.warning("Discarding project %s as it is not enabled", p["name"])
continue
shares[vo] = {"auth": {"project_id": p["id"]}}
return shares
def read_secrets(secrets_file):
with open(secrets_file, "r") as f:
return yaml.load(f.read(), Loader=yaml.SafeLoader)

def generate_shares(self, secrets):
shares = {}
for s in secrets:
# not our thing
if not isinstance(secrets[s], dict):
continue
access_token = self.refresh_token(secrets[s])
token_shares = self.get_token_shares(access_token)
shares.update(token_shares)
# create the directory structure for the cloud-info-provider
for d in token_shares:
dir_path = os.path.join(self.vo_dir, d)
os.makedirs(dir_path, exist_ok=True)
for field in "client_id", "client_secret", "refresh_token":
with open(os.path.join(dir_path, field), "w+") as f:
f.write(secrets[s].get(field, None) or "")
if not shares:
logging.error("No shares generated!")
raise Exception("No shares found!")
return shares

def generate_config(self, site_name, secrets):
shares = self.generate_shares(secrets)
return {"site": {"name": site_name}, "compute": {"shares": shares}}
def generate_shares(config, secrets):
"""calls the share discovery class according to the secret type
that we have"""
shares = {}
for s in secrets:
# not our thing
if not isinstance(secrets[s], dict):
continue
if "client_id" in secrets[s] and "refresh_token" in secrets[s]:
discoverer = RefresherShareDiscovery(config, secrets[s])
elif "access_token" in secrets[s]:
discoverer = AccessTokenShareDiscovery(config, secrets[s])
token_shares = discoverer.get_token_shares()
shares.update(token_shares)
if not shares:
logging.error("No shares generated!")
raise Exception("No shares found!")
return shares


def read_secrets(secrets_file):
with open(secrets_file, "r") as f:
return yaml.load(f.read(), Loader=yaml.SafeLoader)
def generate_shares_config(config, secrets):
shares = generate_shares(config, secrets)
return {"site": {"name": config["site_name"]}, "compute": {"shares": shares}}


def main():
logging.basicConfig()
# get config from env
checkin_secrets_file = os.environ["CHECKIN_SECRETS_FILE"]
checkin_token_url = os.environ["CHECKIN_OIDC_TOKEN"]
os_auth_url = os.environ["OS_AUTH_URL"]
os_identity_provider = os.environ["OS_IDENTITY_PROVIDER"]
os_protocol = os.environ["OS_PROTOCOL"]
site_name = os.environ["SITE_NAME"]
vo_dir = os.environ["VO_SECRETS_PATH"]
secrets = read_secrets(checkin_secrets_file)
disc = ShareDiscovery(
os_auth_url, os_identity_provider, os_protocol, checkin_token_url, vo_dir
)
config = disc.generate_config(site_name, secrets)
print(yaml.dump(config))
secrets_file = os.environ["SECRETS_FILE"]
config = {
"auth_url": os.environ["OS_AUTH_URL"],
"identity_provider": os.environ["OS_IDENTITY_PROVIDER"],
"protocol": os.environ["OS_PROTOCOL"],
"site_name": os.environ["SITE_NAME"],
"token_url": os.environ.get("TOKEN_URL", ""),
"vo_dir": os.environ.get("VO_SECRETS_PATH", ""),
}
secrets = read_secrets(secrets_file)
shares_config = generate_shares_config(config, secrets)
print(yaml.dump(shares_config))


if __name__ == "__main__":
Expand Down
99 changes: 99 additions & 0 deletions cloud-info/cloud_info_catchall/share_discovery.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
"""Discover projects for cloud-info-povider and generate configuration
"""

import logging
import os

import fedcloudclient.endpoint as fedcli
from cloud_info_provider.auth_refreshers.oidc_refresh import OidcRefreshToken


class ShareDiscovery:
def __init__(self, config, secret):
self.auth_url = config["auth_url"]
self.identity_provider = config["identity_provider"]
self.protocol = config["protocol"]
self.secret = secret

def build_share(self, project, access_token):
return {"auth": {"project_id": project["id"]}}

def get_token_shares(self):
access_token = self.get_token()
# rely on fedcloudclient for getting token
# exchange access_token for Keystone token
shares = {}
try:
token = fedcli.retrieve_unscoped_token(
self.auth_url, access_token, self.protocol
)
except fedcli.TokenException:
# this check-in account does not have access to the site, ignore
return shares
projects = fedcli.get_projects_from_single_site(self.auth_url, token)
for p in projects:
vo = p.get("VO", None)
if not vo:
logging.warning(
"Discarding project %s as it does not have VO property", p["name"]
)
continue
if not p.get("enabled", False):
logging.warning("Discarding project %s as it is not enabled", p["name"])
continue
shares[vo] = self.build_share(p, access_token)
self.config_shares(shares, access_token)
return shares

def config_shares(self, shares, access_token):
"""do any additional configuration to support the shares"""
pass

def get_token(self):
raise NotImplementedError


class RefresherShareDiscovery(ShareDiscovery):
"""Refreshes tokens using a refresh token and creates a VO configuration
for its refresh again by cloud-info-provider"""

def __init__(self, config, secret):
super().__init__(config, secret)
self.token_url = config["token_url"]
self.vo_dir = config["vo_dir"]

def get_token(self):
# fake the options for refreshing
# avoids code duplication but not very clean
class Opt:
timeout = 10

refresher = OidcRefreshToken(Opt)
return refresher._refresh_token(
self.token_url,
self.secret.get("client_id", None),
self.secret.get("client_secret", None),
self.secret.get("refresh_token", None),
"openid email profile voperson_id eduperson_entitlement",
)

def config_shares(self, shares, access_token):
# create the directory structure for the cloud-info-provider
for d in shares:
dir_path = os.path.join(self.vo_dir, d)
os.makedirs(dir_path, exist_ok=True)
for field in "client_id", "client_secret", "refresh_token":
with open(os.path.join(dir_path, field), "w+") as f:
f.write(self.secret.get(field, None) or "")


class AccessTokenShareDiscovery(ShareDiscovery):
"""Uses existing access token to create VO configuration"""

def get_token(self):
return self.secret["access_token"]

def build_share(self, project, access_token):
s = super().build_share(project, access_token)
s["auth"].update({"access_token": access_token})
return s
Loading

0 comments on commit 1a94d45

Please sign in to comment.