Skip to content

Commit

Permalink
Allow db config via separate var envs
Browse files Browse the repository at this point in the history
  • Loading branch information
zechmeister committed Feb 26, 2024
1 parent bba3f2e commit dc1e071
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 18 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ queries:
timestampColumn: createdAt
```
Set the `DBURI` environment variable to a connection string libpq recognises.
Set the `DBURI` environment variable to a connection string libpq recognises. Alternatively, set `DB_NAME`, `DB_HOST`, `DB_USER`, `DB_PASSWORD` and `DB_PORT`.

This script assumes to be scheduled hourly. It collects data for defined queries for the last hour. So you probably want to use it with a cronjob like this:

Expand Down
55 changes: 38 additions & 17 deletions query_and_submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,23 @@

metrics_token = os.environ.get("METRICS_WEBHOOK_TOKEN")

db_uri = (
os.environ.get("DBURI")
if "DBURI" in os.environ
else (
f"postgresql://{os.environ['DB_USER']}"
f":{os.environ['DB_PASSWORD']}"
f"@{os.environ['DB_HOST']}"
f":{os.environ['DB_PORT']}"
f"/{os.environ['DB_NAME']}"
)
)

with open("/opt/config.yaml") as f:
config = yaml.safe_load(f)
config = yaml.safe_load(f)

if "preExecuteScript" in config:
os.system(config["preExecuteScript"])
os.system(config["preExecuteScript"])

one_hour_ago = dt.datetime.now() - dt.timedelta(hours=1)
interval_start = one_hour_ago.strftime("%Y-%m-%dT%H:00:00Z")
Expand All @@ -21,24 +33,33 @@
metrics = {}

for metric_name, query in config["queries"].items():
sql = query["countSql"]
condition_keyword = "and" if re.search("where", sql, re.IGNORECASE) else "where"
timestamp_col = query["timestampColumn"]
metrics[metric_name] = json.loads(subprocess.check_output(["psql", os.environ["DBURI"], "-Atc", f"{sql} {condition_keyword} \"{timestamp_col}\" between '{interval_start}' and '{interval_end}'"]))
sql = query["countSql"]
condition_keyword = "and" if re.search("where", sql, re.IGNORECASE) else "where"
timestamp_col = query["timestampColumn"]
metrics[metric_name] = json.loads(
subprocess.check_output(
[
"psql",
db_uri,
"-Atc",
f"{sql} {condition_keyword} \"{timestamp_col}\" between '{interval_start}' and '{interval_end}'",
]
)
)

response = requests.post(
url=config["endpoint"],
json={
"startInterval": interval_start,
"endInterval": interval_end,
**metrics,
},
headers={
"xc-token": metrics_token,
}
url=config["endpoint"],
json={
"startInterval": interval_start,
"endInterval": interval_end,
**metrics,
},
headers={
"xc-token": metrics_token,
},
)

if response.ok:
print("export done")
print("export done")
else:
print("export failed")
print("export failed")

0 comments on commit dc1e071

Please sign in to comment.