Skip to content
This repository has been archived by the owner on Jul 15, 2020. It is now read-only.

Commit

Permalink
Merge pull request #13 from venicegeo/14500-Audit_Log_Part2
Browse files Browse the repository at this point in the history
Added code to run the logger with correct details
  • Loading branch information
dbazile authored Feb 8, 2017
2 parents 141ced2 + 24d15d4 commit 8ca5d80
Show file tree
Hide file tree
Showing 31 changed files with 305 additions and 412 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -162,3 +162,4 @@ cf set-env bf-api PIAZZA_API_KEY <valid Piazza API key>
| `CATALOG` | Overrides the Beachfront Image Catalog hostname |
| `SECRET_KEY` | Overrides the randomly-generated secret key used by Flask for session I/O |
| `DEBUG_MODE` | Set to `1` to start the server in debug mode. Note that this will have some fairly noisy logs. |
| `MUTE_LOGS` | Set to `1` to mute the logs (happens by default in test mode) |
10 changes: 6 additions & 4 deletions bfapi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@

import bfapi.logger


DEBUG_MODE = os.getenv('DEBUG_MODE') == '1'
if DEBUG_MODE:
print('*' * 80, '\u26A0 SERVER IS RUNNING IN DEBUG MODE'.center(80), '*' * 80, sep='\n\n\n')
MUTE_LOGS = os.getenv('MUTE_LOGS') == '1'


bfapi.logger.init(DEBUG_MODE)
bfapi.logger.init(
debug=DEBUG_MODE,
muted=MUTE_LOGS,
)
4 changes: 2 additions & 2 deletions bfapi/db/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def _install():
def _install_if_needed():
log = logging.getLogger(__name__)

log.info('Checking to see if installation is required')
log.info('Checking to see if installation is required',action='Checking for instalation')

# Load SQL script
try:
Expand All @@ -132,7 +132,7 @@ def _install_if_needed():
raise InstallationError('schema execution failed', err)

if is_installed:
log.info('Schema exists and will not be reinstalled')
log.info('Schema exists and will not be reinstalled',action='Schema exists and will not be reinstalled')
return

_install()
Expand Down
29 changes: 28 additions & 1 deletion bfapi/db/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,16 @@
# specific language governing permissions and limitations under the License.

from datetime import datetime

import logging
from bfapi.db import Connection, ResultProxy

def delete_job_user(
conn: Connection,
*,
job_id: str,
user_id: str) -> bool:
log = logging.getLogger(__name__)
log.info('Db delete job user', action='database delete record')
query = """
DELETE FROM __beachfront__job_user
WHERE job_id = %(job_id)s
Expand Down Expand Up @@ -51,6 +53,8 @@ def insert_detection(
job_id: str,
feature_collection: str) -> None:
# FIXME -- I know we can do better than this...
log = logging.getLogger(__name__)
log.info('Db insert detection', action='database insert record')
query = """
INSERT INTO __beachfront__detection (job_id, feature_id, geometry)
SELECT %(job_id)s AS job_id,
Expand Down Expand Up @@ -79,6 +83,9 @@ def insert_job(
tide_min_24h: float,
tide_max_24h: float,
user_id: str) -> None:

log = logging.getLogger(__name__)
log.info('Db insert job', action='database insert record')
query = """
INSERT INTO __beachfront__job (job_id, algorithm_id, algorithm_name, algorithm_version, created_by, name,
scene_id, status, tide, tide_min_24h, tide_max_24h)
Expand Down Expand Up @@ -107,6 +114,8 @@ def insert_job_failure(
error_message: str,
execution_step: str,
job_id: str) -> None:
log = logging.getLogger(__name__)
log.info('Db insert job failure', action='database insert record')
query = """
INSERT INTO __beachfront__job_error (job_id, error_message, execution_step)
VALUES (%(job_id)s, %(error_message)s, %(execution_step)s)
Expand All @@ -124,6 +133,8 @@ def insert_job_user(
*,
job_id: str,
user_id: str) -> None:
log = logging.getLogger(__name__)
log.info('Db job user', action='database insert record')
query = """
INSERT INTO __beachfront__job_user (job_id, user_id)
VALUES (%(job_id)s, %(user_id)s)
Expand All @@ -141,6 +152,8 @@ def select_detections(
*,
job_id: str) -> ResultProxy:
# Construct the GeoJSON directly where the data lives
log = logging.getLogger(__name__)
log.info('Db select detection', action='database query record')
query = """
SELECT to_json(fc)::text AS "feature_collection"
FROM (SELECT 'FeatureCollection' AS "type",
Expand All @@ -165,6 +178,8 @@ def select_job(
conn: Connection,
*,
job_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select job', action='database query record')
query = """
SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h,
e.error_message, e.execution_step,
Expand All @@ -185,6 +200,8 @@ def select_jobs_for_inputs(
*,
algorithm_id: str,
scene_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select jobs for inputs', action='database query record')
query = """
SELECT job_id,
CASE status
Expand All @@ -210,6 +227,8 @@ def select_jobs_for_productline(
*,
productline_id: str,
since: datetime) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select jobs for productline', action='database query record')
query = """
SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h,
ST_AsGeoJSON(s.geometry) AS geometry, s.sensor_name, s.captured_on
Expand All @@ -232,6 +251,8 @@ def select_jobs_for_scene(
conn: Connection,
*,
scene_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select jobs for scene', action='database select record')
query = """
SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h,
ST_AsGeoJSON(s.geometry) AS geometry, s.sensor_name, s.captured_on
Expand All @@ -251,6 +272,8 @@ def select_jobs_for_user(
conn: Connection,
*,
user_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select jobs for users', action='database query record')
query = """
SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h,
e.error_message, e.execution_step,
Expand All @@ -269,6 +292,8 @@ def select_jobs_for_user(


def select_outstanding_jobs(conn: Connection) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select outstanding jobs', action='database query record')
query = """
SELECT job_id,
DATE_TRUNC('second', NOW() - created_on) AS age
Expand All @@ -284,6 +309,8 @@ def update_status(
*,
job_id: str,
status: str) -> None:
log = logging.getLogger(__name__)
log.info('Db update status', action='database update record')
query = """
UPDATE __beachfront__job
SET status = %(status)s
Expand Down
14 changes: 13 additions & 1 deletion bfapi/db/productlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,16 @@
# specific language governing permissions and limitations under the License.

from datetime import date

import logging
from bfapi.db import Connection, ResultProxy


def delete_productline(
conn: Connection,
*,
productline_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db delete productline', action='database delete record')
query = """
UPDATE __beachfront__productline
SET deleted = TRUE
Expand All @@ -45,6 +47,8 @@ def insert_productline(
start_on: date,
stop_on: date = None,
user_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db insert productline', action='database insert record')
query = """
INSERT INTO __beachfront__productline (productline_id, algorithm_id, algorithm_name, category, created_by, max_cloud_cover, name, owned_by, spatial_filter_id, start_on, stop_on, bbox)
VALUES (%(productline_id)s, %(algorithm_id)s, %(algorithm_name)s, %(category)s, %(user_id)s, %(max_cloud_cover)s, %(name)s, %(user_id)s, %(spatial_filter_id)s, %(start_on)s, %(stop_on)s, ST_MakeEnvelope(%(min_x)s, %(min_y)s, %(max_x)s, %(max_y)s))
Expand Down Expand Up @@ -74,6 +78,8 @@ def insert_productline_job(
*,
job_id: str,
productline_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db insert productline job', action='database insert record')
query = """
INSERT INTO __beachfront__productline_job (job_id, productline_id)
VALUES (%(job_id)s, %(productline_id)s)
Expand All @@ -87,6 +93,8 @@ def insert_productline_job(


def select_all(conn: Connection):
log = logging.getLogger(__name__)
log.info('Db select all', action='database query record')
query = """
SELECT productline_id, algorithm_id, algorithm_name, category, compute_mask, created_by,
created_on, max_cloud_cover, name, owned_by, spatial_filter_id, start_on, stop_on,
Expand All @@ -102,6 +110,8 @@ def select_productline(
conn: Connection,
*,
productline_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select productline', action='database query record')
query = """
SELECT productline_id, algorithm_id, algorithm_name, category, compute_mask, created_by,
created_on, max_cloud_cover, name, owned_by, spatial_filter_id, start_on, stop_on,
Expand All @@ -125,6 +135,8 @@ def select_summary_for_scene(
min_y: float,
max_x: float,
max_y: float) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select summary for scene', action='database query record')
query = """
SELECT productline_id, algorithm_id, name, owned_by
FROM __beachfront__productline
Expand Down
4 changes: 3 additions & 1 deletion bfapi/db/scenes.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import json
from datetime import datetime

import logging
from bfapi.db import Connection


Expand All @@ -27,6 +27,8 @@ def insert(
resolution: int,
scene_id: str,
sensor_name: str) -> str:
log = logging.getLogger(__name__)
log.info('Db insert scene', action='database insert record')
query = """
INSERT INTO __beachfront__scene (scene_id, captured_on, catalog_uri, cloud_cover, geometry, resolution, sensor_name)
VALUES (%(scene_id)s, %(captured_on)s, %(catalog_uri)s, %(cloud_cover)s, ST_GeomFromGeoJSON(%(geometry)s),
Expand Down
8 changes: 7 additions & 1 deletion bfapi/db/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@
# specific language governing permissions and limitations under the License.

from bfapi.db import Connection, ResultProxy

import logging

def select_user(
conn: Connection,
*,
user_id: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select user', action='database query record')
query = """
SELECT u.user_id, u.user_name, u.api_key, u.created_on
FROM __beachfront__user u
Expand All @@ -33,6 +35,8 @@ def select_user_by_api_key(
conn: Connection,
*,
api_key: str) -> ResultProxy:
log = logging.getLogger(__name__)
log.info('Db select user by api', action='database query record')
query = """
SELECT u.user_id, u.user_name, u.api_key, u.created_on
FROM __beachfront__user u
Expand All @@ -50,6 +54,8 @@ def insert_user(
user_id: str,
user_name: str,
api_key: str) -> None:
log = logging.getLogger(__name__)
log.info('Db insert user', action='database insert record')
query = """
INSERT INTO __beachfront__user (user_id, user_name, api_key)
VALUES (%(user_id)s, %(user_name)s, %(api_key)s)
Expand Down
9 changes: 7 additions & 2 deletions bfapi/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
import datetime
import os
import logging.config

import sys


ACTOR_SYSTEM = 'SYSTEM'
APP_NAME = 'beachfront'
FACILITY = 1
Expand All @@ -39,14 +39,19 @@
}


def init(debug: bool = False):
def init(*, debug: bool, muted: bool):
logging.basicConfig(
format=FORMAT,
level=logging.DEBUG if debug else logging.INFO,
stream=sys.stdout,
style='{',
)
logging.setLoggerClass(AuditableLogger)

# Prevent spamming test outputs
if muted:
logging.root.handlers = [logging.NullHandler()]

log = logging.getLogger(__name__)
log.debug('Initialized')

Expand Down
12 changes: 11 additions & 1 deletion bfapi/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from flask_cors import CORS

from bfapi import config, db, middleware, routes, service
from bfapi import DEBUG_MODE, MUTE_LOGS

FALLBACK_MIMETYPE = 'text/plain'

Expand Down Expand Up @@ -61,6 +62,14 @@ def banner():
if not key.isupper() or 'PASSWORD' in key:
continue
configurations.append('{key:>38} : {value}'.format(key=key, value=value))

warnings = []
if DEBUG_MODE:
warnings.append(' \u26A0 WARNING: SERVER IS RUNNING IN DEBUG MODE\n')

if MUTE_LOGS:
warnings.append(' \u26A0 WARNING: LOGS ARE MUTED\n')

print(
'-' * 120,
'',
Expand All @@ -69,13 +78,14 @@ def banner():
'',
*configurations,
'',
*warnings,
'-' * 120,
sep='\n',
flush=True
)


def init(app):
def init(app: flask.Flask):
banner()
config.validate()
db.init()
Expand Down
2 changes: 2 additions & 0 deletions bfapi/service/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def serialize(self):

def list_all() -> List[Algorithm]:
log = logging.getLogger(__name__)
log.info('Algorithms service list all algorithms', action='Service algorithms list all ')
try:
log.info('Fetching beachfront services from Piazza', action='fetch services')
services = piazza.get_services('^BF_Algo_')
Expand Down Expand Up @@ -84,6 +85,7 @@ def list_all() -> List[Algorithm]:

def get(service_id: str) -> Algorithm:
log = logging.getLogger(__name__)
log.info('Algorithms service get algorithms', action=' service algorithms get')
try:
log.info('Fetch beachfront service `%s` from Piazza', service_id, action='fetch service', actee=service_id)
service = piazza.get_service(service_id)
Expand Down
Loading

0 comments on commit 8ca5d80

Please sign in to comment.