diff --git a/README.md b/README.md index 92cc9b74..38ea69b9 100644 --- a/README.md +++ b/README.md @@ -162,3 +162,4 @@ cf set-env bf-api PIAZZA_API_KEY | `CATALOG` | Overrides the Beachfront Image Catalog hostname | | `SECRET_KEY` | Overrides the randomly-generated secret key used by Flask for session I/O | | `DEBUG_MODE` | Set to `1` to start the server in debug mode. Note that this will have some fairly noisy logs. | +| `MUTE_LOGS` | Set to `1` to mute the logs (happens by default in test mode) | diff --git a/bfapi/__init__.py b/bfapi/__init__.py index fa482e87..4fc6ef6f 100644 --- a/bfapi/__init__.py +++ b/bfapi/__init__.py @@ -15,9 +15,11 @@ import bfapi.logger - DEBUG_MODE = os.getenv('DEBUG_MODE') == '1' -if DEBUG_MODE: - print('*' * 80, '\u26A0 SERVER IS RUNNING IN DEBUG MODE'.center(80), '*' * 80, sep='\n\n\n') +MUTE_LOGS = os.getenv('MUTE_LOGS') == '1' + -bfapi.logger.init(DEBUG_MODE) +bfapi.logger.init( + debug=DEBUG_MODE, + muted=MUTE_LOGS, +) diff --git a/bfapi/db/__init__.py b/bfapi/db/__init__.py index 80245ff3..a65dd60f 100644 --- a/bfapi/db/__init__.py +++ b/bfapi/db/__init__.py @@ -113,7 +113,7 @@ def _install(): def _install_if_needed(): log = logging.getLogger(__name__) - log.info('Checking to see if installation is required') + log.info('Checking to see if installation is required',action='Checking for instalation') # Load SQL script try: @@ -132,7 +132,7 @@ def _install_if_needed(): raise InstallationError('schema execution failed', err) if is_installed: - log.info('Schema exists and will not be reinstalled') + log.info('Schema exists and will not be reinstalled',action='Schema exists and will not be reinstalled') return _install() diff --git a/bfapi/db/jobs.py b/bfapi/db/jobs.py index a59e17d7..715e7df8 100644 --- a/bfapi/db/jobs.py +++ b/bfapi/db/jobs.py @@ -12,7 +12,7 @@ # specific language governing permissions and limitations under the License. from datetime import datetime - +import logging from bfapi.db import Connection, ResultProxy def delete_job_user( @@ -20,6 +20,8 @@ def delete_job_user( *, job_id: str, user_id: str) -> bool: + log = logging.getLogger(__name__) + log.info('Db delete job user', action='database delete record') query = """ DELETE FROM __beachfront__job_user WHERE job_id = %(job_id)s @@ -51,6 +53,8 @@ def insert_detection( job_id: str, feature_collection: str) -> None: # FIXME -- I know we can do better than this... + log = logging.getLogger(__name__) + log.info('Db insert detection', action='database insert record') query = """ INSERT INTO __beachfront__detection (job_id, feature_id, geometry) SELECT %(job_id)s AS job_id, @@ -79,6 +83,9 @@ def insert_job( tide_min_24h: float, tide_max_24h: float, user_id: str) -> None: + + log = logging.getLogger(__name__) + log.info('Db insert job', action='database insert record') query = """ INSERT INTO __beachfront__job (job_id, algorithm_id, algorithm_name, algorithm_version, created_by, name, scene_id, status, tide, tide_min_24h, tide_max_24h) @@ -107,6 +114,8 @@ def insert_job_failure( error_message: str, execution_step: str, job_id: str) -> None: + log = logging.getLogger(__name__) + log.info('Db insert job failure', action='database insert record') query = """ INSERT INTO __beachfront__job_error (job_id, error_message, execution_step) VALUES (%(job_id)s, %(error_message)s, %(execution_step)s) @@ -124,6 +133,8 @@ def insert_job_user( *, job_id: str, user_id: str) -> None: + log = logging.getLogger(__name__) + log.info('Db job user', action='database insert record') query = """ INSERT INTO __beachfront__job_user (job_id, user_id) VALUES (%(job_id)s, %(user_id)s) @@ -141,6 +152,8 @@ def select_detections( *, job_id: str) -> ResultProxy: # Construct the GeoJSON directly where the data lives + log = logging.getLogger(__name__) + log.info('Db select detection', action='database query record') query = """ SELECT to_json(fc)::text AS "feature_collection" FROM (SELECT 'FeatureCollection' AS "type", @@ -165,6 +178,8 @@ def select_job( conn: Connection, *, job_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select job', action='database query record') query = """ SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h, e.error_message, e.execution_step, @@ -185,6 +200,8 @@ def select_jobs_for_inputs( *, algorithm_id: str, scene_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select jobs for inputs', action='database query record') query = """ SELECT job_id, CASE status @@ -210,6 +227,8 @@ def select_jobs_for_productline( *, productline_id: str, since: datetime) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select jobs for productline', action='database query record') query = """ SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h, ST_AsGeoJSON(s.geometry) AS geometry, s.sensor_name, s.captured_on @@ -232,6 +251,8 @@ def select_jobs_for_scene( conn: Connection, *, scene_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select jobs for scene', action='database select record') query = """ SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h, ST_AsGeoJSON(s.geometry) AS geometry, s.sensor_name, s.captured_on @@ -251,6 +272,8 @@ def select_jobs_for_user( conn: Connection, *, user_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select jobs for users', action='database query record') query = """ SELECT j.job_id, j.algorithm_name, j.algorithm_version, j.created_by, j.created_on, j.name, j.scene_id, j.status, j.tide, j.tide_min_24h, j.tide_max_24h, e.error_message, e.execution_step, @@ -269,6 +292,8 @@ def select_jobs_for_user( def select_outstanding_jobs(conn: Connection) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select outstanding jobs', action='database query record') query = """ SELECT job_id, DATE_TRUNC('second', NOW() - created_on) AS age @@ -284,6 +309,8 @@ def update_status( *, job_id: str, status: str) -> None: + log = logging.getLogger(__name__) + log.info('Db update status', action='database update record') query = """ UPDATE __beachfront__job SET status = %(status)s diff --git a/bfapi/db/productlines.py b/bfapi/db/productlines.py index 53f9e00d..eaf46265 100644 --- a/bfapi/db/productlines.py +++ b/bfapi/db/productlines.py @@ -12,7 +12,7 @@ # specific language governing permissions and limitations under the License. from datetime import date - +import logging from bfapi.db import Connection, ResultProxy @@ -20,6 +20,8 @@ def delete_productline( conn: Connection, *, productline_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db delete productline', action='database delete record') query = """ UPDATE __beachfront__productline SET deleted = TRUE @@ -45,6 +47,8 @@ def insert_productline( start_on: date, stop_on: date = None, user_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db insert productline', action='database insert record') query = """ INSERT INTO __beachfront__productline (productline_id, algorithm_id, algorithm_name, category, created_by, max_cloud_cover, name, owned_by, spatial_filter_id, start_on, stop_on, bbox) VALUES (%(productline_id)s, %(algorithm_id)s, %(algorithm_name)s, %(category)s, %(user_id)s, %(max_cloud_cover)s, %(name)s, %(user_id)s, %(spatial_filter_id)s, %(start_on)s, %(stop_on)s, ST_MakeEnvelope(%(min_x)s, %(min_y)s, %(max_x)s, %(max_y)s)) @@ -74,6 +78,8 @@ def insert_productline_job( *, job_id: str, productline_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db insert productline job', action='database insert record') query = """ INSERT INTO __beachfront__productline_job (job_id, productline_id) VALUES (%(job_id)s, %(productline_id)s) @@ -87,6 +93,8 @@ def insert_productline_job( def select_all(conn: Connection): + log = logging.getLogger(__name__) + log.info('Db select all', action='database query record') query = """ SELECT productline_id, algorithm_id, algorithm_name, category, compute_mask, created_by, created_on, max_cloud_cover, name, owned_by, spatial_filter_id, start_on, stop_on, @@ -102,6 +110,8 @@ def select_productline( conn: Connection, *, productline_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select productline', action='database query record') query = """ SELECT productline_id, algorithm_id, algorithm_name, category, compute_mask, created_by, created_on, max_cloud_cover, name, owned_by, spatial_filter_id, start_on, stop_on, @@ -125,6 +135,8 @@ def select_summary_for_scene( min_y: float, max_x: float, max_y: float) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select summary for scene', action='database query record') query = """ SELECT productline_id, algorithm_id, name, owned_by FROM __beachfront__productline diff --git a/bfapi/db/scenes.py b/bfapi/db/scenes.py index 4b183c7f..0157ff2e 100644 --- a/bfapi/db/scenes.py +++ b/bfapi/db/scenes.py @@ -13,7 +13,7 @@ import json from datetime import datetime - +import logging from bfapi.db import Connection @@ -27,6 +27,8 @@ def insert( resolution: int, scene_id: str, sensor_name: str) -> str: + log = logging.getLogger(__name__) + log.info('Db insert scene', action='database insert record') query = """ INSERT INTO __beachfront__scene (scene_id, captured_on, catalog_uri, cloud_cover, geometry, resolution, sensor_name) VALUES (%(scene_id)s, %(captured_on)s, %(catalog_uri)s, %(cloud_cover)s, ST_GeomFromGeoJSON(%(geometry)s), diff --git a/bfapi/db/users.py b/bfapi/db/users.py index a30af722..47136e84 100644 --- a/bfapi/db/users.py +++ b/bfapi/db/users.py @@ -12,12 +12,14 @@ # specific language governing permissions and limitations under the License. from bfapi.db import Connection, ResultProxy - +import logging def select_user( conn: Connection, *, user_id: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select user', action='database query record') query = """ SELECT u.user_id, u.user_name, u.api_key, u.created_on FROM __beachfront__user u @@ -33,6 +35,8 @@ def select_user_by_api_key( conn: Connection, *, api_key: str) -> ResultProxy: + log = logging.getLogger(__name__) + log.info('Db select user by api', action='database query record') query = """ SELECT u.user_id, u.user_name, u.api_key, u.created_on FROM __beachfront__user u @@ -50,6 +54,8 @@ def insert_user( user_id: str, user_name: str, api_key: str) -> None: + log = logging.getLogger(__name__) + log.info('Db insert user', action='database insert record') query = """ INSERT INTO __beachfront__user (user_id, user_name, api_key) VALUES (%(user_id)s, %(user_name)s, %(api_key)s) diff --git a/bfapi/logger.py b/bfapi/logger.py index ae4b7c06..07906dc6 100644 --- a/bfapi/logger.py +++ b/bfapi/logger.py @@ -14,9 +14,9 @@ import datetime import os import logging.config - import sys + ACTOR_SYSTEM = 'SYSTEM' APP_NAME = 'beachfront' FACILITY = 1 @@ -39,7 +39,7 @@ } -def init(debug: bool = False): +def init(*, debug: bool, muted: bool): logging.basicConfig( format=FORMAT, level=logging.DEBUG if debug else logging.INFO, @@ -47,6 +47,11 @@ def init(debug: bool = False): style='{', ) logging.setLoggerClass(AuditableLogger) + + # Prevent spamming test outputs + if muted: + logging.root.handlers = [logging.NullHandler()] + log = logging.getLogger(__name__) log.debug('Initialized') diff --git a/bfapi/server.py b/bfapi/server.py index 7d188601..65bfa4a2 100644 --- a/bfapi/server.py +++ b/bfapi/server.py @@ -15,6 +15,7 @@ from flask_cors import CORS from bfapi import config, db, middleware, routes, service +from bfapi import DEBUG_MODE, MUTE_LOGS FALLBACK_MIMETYPE = 'text/plain' @@ -61,6 +62,14 @@ def banner(): if not key.isupper() or 'PASSWORD' in key: continue configurations.append('{key:>38} : {value}'.format(key=key, value=value)) + + warnings = [] + if DEBUG_MODE: + warnings.append(' \u26A0 WARNING: SERVER IS RUNNING IN DEBUG MODE\n') + + if MUTE_LOGS: + warnings.append(' \u26A0 WARNING: LOGS ARE MUTED\n') + print( '-' * 120, '', @@ -69,13 +78,14 @@ def banner(): '', *configurations, '', + *warnings, '-' * 120, sep='\n', flush=True ) -def init(app): +def init(app: flask.Flask): banner() config.validate() db.init() diff --git a/bfapi/service/algorithms.py b/bfapi/service/algorithms.py index 2ccfc7da..9e29a8be 100644 --- a/bfapi/service/algorithms.py +++ b/bfapi/service/algorithms.py @@ -55,6 +55,7 @@ def serialize(self): def list_all() -> List[Algorithm]: log = logging.getLogger(__name__) + log.info('Algorithms service list all algorithms', action='Service algorithms list all ') try: log.info('Fetching beachfront services from Piazza', action='fetch services') services = piazza.get_services('^BF_Algo_') @@ -84,6 +85,7 @@ def list_all() -> List[Algorithm]: def get(service_id: str) -> Algorithm: log = logging.getLogger(__name__) + log.info('Algorithms service get algorithms', action=' service algorithms get') try: log.info('Fetch beachfront service `%s` from Piazza', service_id, action='fetch service', actee=service_id) service = piazza.get_service(service_id) diff --git a/bfapi/service/geoserver.py b/bfapi/service/geoserver.py index 6dd1a06d..bdb72939 100644 --- a/bfapi/service/geoserver.py +++ b/bfapi/service/geoserver.py @@ -24,7 +24,8 @@ def install_if_needed(): log = logging.getLogger(__name__) - log.info('Checking to see if installation is required') + log.info('Geoserver service install if needed', action='service geoserver install') + log.info('Checking to see if installation is required',action='Check for required installation') is_installed = True @@ -37,13 +38,14 @@ def install_if_needed(): install_style(DETECTIONS_STYLE_ID) if is_installed: - log.info('GeoServer components exist and will not be reinstalled') + log.info('GeoServer components exist and will not be reinstalled',action='GeoServer exists') else: - log.info('Installation complete!') + log.info('Installation complete!',action='GeoServer installed') def install_layer(layer_id: str): log = logging.getLogger(__name__) + log.info('Geoserver service install layer', action=' service geoserver install') log.info('Installing `%s`', layer_id) try: response = requests.post( @@ -131,6 +133,7 @@ def install_layer(layer_id: str): def install_style(style_id: str): log = logging.getLogger(__name__) + log.info('Geoserver service install style', action=' service geoserver install') log.info('Installing `%s`', style_id) try: response = requests.post( @@ -197,6 +200,7 @@ def install_style(style_id: str): def layer_exists(layer_id: str) -> bool: log = logging.getLogger(__name__) + log.info('Geoserver service layer exists', action=' service geoserver check') try: response = requests.get( 'http://{}/geoserver/rest/layers/{}'.format( # FIXME -- https please @@ -214,6 +218,7 @@ def layer_exists(layer_id: str) -> bool: def style_exists(style_id: str) -> bool: log = logging.getLogger(__name__) + log.info('Geoserver service style exists', action=' service geoserver check') try: response = requests.get( 'http://{}/geoserver/rest/styles/{}'.format( # FIXME -- https please diff --git a/bfapi/service/jobs.py b/bfapi/service/jobs.py index c96ff7be..3d706731 100644 --- a/bfapi/service/jobs.py +++ b/bfapi/service/jobs.py @@ -105,6 +105,7 @@ def create( job_name: str, planet_api_key: str) -> Job: log = logging.getLogger(__name__) + log.info('Job service create', action='service job create',actor=user_id) # Fetch prerequisites try: @@ -197,6 +198,7 @@ def create( def forget(user_id: str, job_id: str) -> None: log = logging.getLogger(__name__) + log.info('Job service forget', action=' service job forget',actor=user_id) conn = db.get_connection() try: if not db.jobs.exists(conn, job_id=job_id): @@ -212,6 +214,7 @@ def forget(user_id: str, job_id: str) -> None: def get(user_id: str, job_id: str) -> Job: log = logging.getLogger(__name__) + log.info('Job service get', action='service job get',actor=user_id) conn = db.get_connection() try: @@ -248,6 +251,7 @@ def get(user_id: str, job_id: str) -> Job: def get_all(user_id: str) -> List[Job]: log = logging.getLogger(__name__) + log.info('Job service get all', action='service job get all',actor=user_id) conn = db.get_connection() try: @@ -284,6 +288,7 @@ def get_all(user_id: str) -> List[Job]: def get_by_productline(productline_id: str, since: datetime) -> List[Job]: log = logging.getLogger(__name__) + log.info('Job service get by productline', action=' service job get by productline') conn = db.get_connection() try: @@ -318,6 +323,7 @@ def get_by_productline(productline_id: str, since: datetime) -> List[Job]: def get_by_scene(scene_id: str) -> List[Job]: log = logging.getLogger(__name__) + log.info('Job service get by scene', action=' service job get by scene') conn = db.get_connection() try: @@ -357,6 +363,7 @@ def get_detections(job_id: str) -> str: """ log = logging.getLogger(__name__) + log.info('Job service get detections', action='service job get detections') conn = db.get_connection() log.info('Packaging detections for ', job_id) @@ -384,7 +391,8 @@ def start_worker( raise Error('worker already started') log = logging.getLogger(__name__) - log.info('Starting worker thread') + log.info('Job service start worker', action='service job start worker') + log.info('Starting worker thread', action='Worker started') _worker = Worker(job_ttl, interval) _worker.start() @@ -394,6 +402,7 @@ def stop_worker(): if not _worker: return log = logging.getLogger(__name__) + log.info('Job service stop worker', action='service job stop worker') log.info('Stopping worker thread') _worker.terminate() _worker = None diff --git a/bfapi/service/piazza.py b/bfapi/service/piazza.py index 1a69e6e6..de857c79 100644 --- a/bfapi/service/piazza.py +++ b/bfapi/service/piazza.py @@ -12,7 +12,7 @@ # specific language governing permissions and limitations under the License. from typing import List - +import logging import requests import time @@ -70,6 +70,8 @@ def __init__( # def create_trigger(*, data_inputs: dict, event_type_id: str, name: str, service_id: str) -> str: + log = logging.getLogger(__name__) + log.info('Piazza service create trigger', action='service piazza create trigger') try: response = requests.post( 'https://{}/trigger'.format(PIAZZA), @@ -118,6 +120,8 @@ def create_trigger(*, data_inputs: dict, event_type_id: str, name: str, service_ def deploy(data_id: str, *, poll_interval: int = 3, max_poll_attempts: int = 10) -> str: + log = logging.getLogger(__name__) + log.info('Piazza service deploy', action='service piazza deploy') try: response = requests.post( 'https://{}/deployment'.format(PIAZZA), @@ -169,6 +173,8 @@ def deploy(data_id: str, *, poll_interval: int = 3, max_poll_attempts: int = 10) def execute(service_id: str, data_inputs: dict, data_output: list = None) -> str: + log = logging.getLogger(__name__) + log.info('Piazza service execute', action='service piazza execute') try: response = requests.post( 'https://{}/job'.format(PIAZZA), @@ -210,6 +216,8 @@ def execute(service_id: str, data_inputs: dict, data_output: list = None) -> str def get_file(data_id: str) -> requests.Response: + log = logging.getLogger(__name__) + log.info('Piazza service get file', action='service piazza get file') try: response = requests.get( 'https://{}/file/{}'.format(PIAZZA, data_id), @@ -228,6 +236,8 @@ def get_file(data_id: str) -> requests.Response: def get_service(service_id: str) -> ServiceDescriptor: + log = logging.getLogger(__name__) + log.info('Piazza service get service', action='service piazza get service') try: response = requests.get( 'https://{}/service/{}'.format(PIAZZA, service_id), @@ -253,6 +263,8 @@ def get_service(service_id: str) -> ServiceDescriptor: def get_services(pattern: str, count: int = 100) -> List[ServiceDescriptor]: + log = logging.getLogger(__name__) + log.info('Piazza service get service', action='service piazza get service') try: response = requests.get( 'https://{}/service'.format(PIAZZA), @@ -282,6 +294,8 @@ def get_services(pattern: str, count: int = 100) -> List[ServiceDescriptor]: def get_status(job_id: str) -> Status: + log = logging.getLogger(__name__) + log.info('Piazza service get status', action='service piazza get status') try: response = requests.get( 'https://{}/job/{}'.format(PIAZZA, job_id), @@ -354,6 +368,8 @@ def get_status(job_id: str) -> Status: def get_triggers(name: str) -> list: + log = logging.getLogger(__name__) + log.info('Piazza service get trigger', action='service piazza get trigger') try: response = requests.post( 'https://{}/trigger/query'.format(PIAZZA), @@ -394,6 +410,8 @@ def register_service( timeout: int = 60, url: str, version: str = '0.0') -> str: + log = logging.getLogger(__name__) + log.info('Piazza service register service', action='service piazza register service') try: response = requests.post( 'https://{}/service'.format(PIAZZA), diff --git a/bfapi/service/productlines.py b/bfapi/service/productlines.py index 7569c761..024be789 100644 --- a/bfapi/service/productlines.py +++ b/bfapi/service/productlines.py @@ -103,6 +103,7 @@ def create_productline( stop_on: date, user_id: str) -> ProductLine: log = logging.getLogger(__name__) + log.info('Productline service create productline', action='service productline create productline') algorithm = service.algorithms.get(algorithm_id) productline_id = _create_id() log.info('Creating product line <%s>', productline_id) @@ -147,6 +148,7 @@ def create_productline( def delete_productline(user_id: str, productline_id: str) -> None: log = logging.getLogger(__name__) + log.info('Productline service delete productline', action='service productline delete productline') conn = db.get_connection() try: @@ -168,6 +170,7 @@ def delete_productline(user_id: str, productline_id: str) -> None: def get_all() -> List[ProductLine]: log = logging.getLogger(__name__) + log.info('Productline service get all', action='service productline get all') conn = db.get_connection() try: diff --git a/bfapi/service/scenes.py b/bfapi/service/scenes.py index 60ea095b..bb3f4815 100644 --- a/bfapi/service/scenes.py +++ b/bfapi/service/scenes.py @@ -78,6 +78,7 @@ def __init__( def activate(scene: Scene, planet_api_key: str, user_id: str) -> Optional[str]: log = logging.getLogger(__name__) + log.info('Scenes service activate scene', action='service scenes activate scene') if scene.status == STATUS_ACTIVE: return scene.geotiff_multispectral @@ -110,6 +111,8 @@ def activate(scene: Scene, planet_api_key: str, user_id: str) -> Optional[str]: def create_download_url(scene_id: str, planet_api_key: str = '') -> str: # HACK HACK HACK HACK HACK HACK HACK HACK HACK HACK HACK HACK HACK HACK HACK # FIXME -- hopefully this endpoint can move into the IA Broker eventually + log = logging.getLogger(__name__) + log.info('Scenes service create download url', action='service scenes create download url') return 'https://bf-api.{}/v0/scene/{}.TIF?planet_api_key={}'.format( DOMAIN, scene_id, @@ -120,6 +123,7 @@ def create_download_url(scene_id: str, planet_api_key: str = '') -> str: def get(scene_id: str, planet_api_key: str, *, with_tides: bool = True) -> Scene: log = logging.getLogger(__name__) + log.info('Scenes service get scene', action='service scenes get scene') platform, external_id = _parse_scene_id(scene_id) diff --git a/bfapi/service/users.py b/bfapi/service/users.py index a87d2581..eaff0858 100644 --- a/bfapi/service/users.py +++ b/bfapi/service/users.py @@ -41,6 +41,7 @@ def __init__( def authenticate_via_api_key(api_key: str) -> User: log = logging.getLogger(__name__) + log.info('Users service auth api key', action='service users auth api key') if not PATTERN_API_KEY.match(api_key): log.error('Cannot verify malformed API key: "%s"', api_key) @@ -71,6 +72,7 @@ def authenticate_via_api_key(api_key: str) -> User: def authenticate_via_geoaxis(auth_code: str) -> User: log = logging.getLogger(__name__) + log.info('Users service auth geoaxis', action='service users auth geoaxis') access_token = _request_geoaxis_access_token(auth_code) user_id, user_name = _fetch_geoaxis_profile(access_token) @@ -86,6 +88,7 @@ def authenticate_via_geoaxis(auth_code: str) -> User: def get_by_id(user_id: str) -> User: log = logging.getLogger(__name__) + log.info('Users service get by id', action='service users get by id') log.debug('Searching database for user "%s"', user_id) conn = db.get_connection() diff --git a/test/db/test_init.py b/test/db/test_init.py index 6ca4d9ff..25aba682 100644 --- a/test/db/test_init.py +++ b/test/db/test_init.py @@ -21,7 +21,7 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_does_things(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InstallTest(unittest.TestCase): @@ -29,4 +29,4 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_does_things(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') diff --git a/test/db/test_jobs.py b/test/db/test_jobs.py index 4c2a5f77..497d7816 100644 --- a/test/db/test_jobs.py +++ b/test/db/test_jobs.py @@ -21,13 +21,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class ExistsTest(unittest.TestCase): @@ -35,13 +35,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InsertDetectionTest(unittest.TestCase): @@ -49,13 +49,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InsertJobTest(unittest.TestCase): @@ -63,13 +63,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InsertJobFailureTest(unittest.TestCase): @@ -77,13 +77,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InsertJobUserTest(unittest.TestCase): @@ -91,13 +91,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectJobTest(unittest.TestCase): @@ -105,13 +105,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectJobsForInputsTest(unittest.TestCase): @@ -119,13 +119,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectJobsForProductlineTest(unittest.TestCase): @@ -133,13 +133,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectJobsForSceneTest(unittest.TestCase): @@ -147,13 +147,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectJobsForUserTest(unittest.TestCase): @@ -161,13 +161,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectSummaryForStatusTest(unittest.TestCase): @@ -175,13 +175,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class UpdateStatusTest(unittest.TestCase): @@ -189,10 +189,10 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') diff --git a/test/db/test_productlines.py b/test/db/test_productlines.py index 6d4e56d2..3bd89aa5 100644 --- a/test/db/test_productlines.py +++ b/test/db/test_productlines.py @@ -20,19 +20,19 @@ class InsertProductLineTest(unittest.TestCase): def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_execution_fails(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InsertProductLineJobTest(unittest.TestCase): def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_execution_fails(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') diff --git a/test/db/test_scenes.py b/test/db/test_scenes.py index 6d93c9cb..9373a557 100644 --- a/test/db/test_scenes.py +++ b/test/db/test_scenes.py @@ -21,10 +21,10 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') diff --git a/test/db/test_users.py b/test/db/test_users.py index e94af743..087c4cde 100644 --- a/test/db/test_users.py +++ b/test/db/test_users.py @@ -21,13 +21,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class SelectUserByApiKeyTest(unittest.TestCase): @@ -35,13 +35,13 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') class InsertOrUpdateUserTest(unittest.TestCase): @@ -49,10 +49,10 @@ def setUp(self): self.conn = unittest.mock.Mock() def test_sends_correct_query(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_sends_correct_parameters(self): - self.skipTest('Not implemented') + self.skipTest('Not yet implemented') def test_throws_when_connection_throws(self): - self.skipTest('Not implemented') \ No newline at end of file + self.skipTest('Not yet implemented') diff --git a/test/fixtures/environment-vars.test.sh b/test/fixtures/environment-vars.test.sh index 00351e3c..4dd4cfb3 100644 --- a/test/fixtures/environment-vars.test.sh +++ b/test/fixtures/environment-vars.test.sh @@ -1,3 +1,4 @@ +MUTE_LOGS=1 GEOAXIS='test-geoaxis.test.localdomain' GEOAXIS_CLIENT_ID='test-geoaxis-client-id' GEOAXIS_SECRET='test-geoaxis-secret' diff --git a/test/helpers.py b/test/helpers.py index 85f9279d..50fbd96b 100644 --- a/test/helpers.py +++ b/test/helpers.py @@ -11,8 +11,10 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import typing +import io +import logging import unittest.mock +from typing import List import sqlalchemy.engine as _sqlalchemyengine import sqlalchemy.exc as _sqlalchemyexc @@ -30,16 +32,37 @@ def mock_database(): return mock +def get_logger(name: str, fmt: str = '%(levelname)s - %(message)s'): + wrapper = LoggerWrapper(name, fmt) + wrapper.install() + return wrapper + + +class LoggerWrapper: + def __init__(self, name: str, fmt: str): + self._stream = io.StringIO() + self._logger = logging.getLogger(name) + self._handler = logging.StreamHandler(self._stream) + self._handler.setFormatter(logging.Formatter(fmt)) + + def install(self): + self._logger.handlers.clear() + self._logger.addHandler(self._handler) + + def destroy(self): + self._logger.handlers.clear() + + @property + def lines(self) -> List[str]: + return self._stream.getvalue().splitlines() + + class MockDBConnection(unittest.mock.Mock): def __init__(self, *args, **kwargs): super().__init__(spec=_sqlalchemyengine.Connection, *args, **kwargs) self._original_get_connection = None self._original_print_diagnostics = None - self.transactions = [] # type: typing.List[unittest.mock.Mock] - - # - # Lifecycle - # + self.transactions = [] # type: List[unittest.mock.Mock] def begin(self): transaction = unittest.mock.Mock(spec=_sqlalchemyengine.Transaction) @@ -57,10 +80,6 @@ def destroy(self): bfapi.db.get_connection = self._original_get_connection bfapi.db.print_diagnostics = self._original_print_diagnostics - # - # Assertion Helpers - # - @property def executed(self): return self.execute.called diff --git a/test/routes/test_init.py b/test/routes/test_init.py index cf3eb450..852ad686 100644 --- a/test/routes/test_init.py +++ b/test/routes/test_init.py @@ -11,7 +11,6 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import logging import unittest from unittest.mock import call, patch @@ -45,17 +44,11 @@ def test_redirects_to_geoaxis_oauth_authorization(self): class LoginTest(unittest.TestCase): def setUp(self): - self._logger = logging.getLogger('bfapi.routes') - self._logger.disabled = True - self.mock_authenticate = self.create_mock('bfapi.service.users.authenticate_via_geoaxis', return_value=None) self.mock_redirect = self.create_mock('flask.redirect') self.request = self.create_mock('flask.request', path='/login', args={}) self.session = self.create_mock('flask.session', spec=dict) - def tearDown(self): - self._logger.disabled = False - def create_mock(self, target, **kwargs): patcher = patch(target, **kwargs) self.addCleanup(patcher.stop) @@ -111,15 +104,9 @@ def test_rejects_when_users_service_throws(self): class LogoutTest(unittest.TestCase): def setUp(self): - self._logger = logging.getLogger('bfapi.routes') - self._logger.disabled = True - self.session = self.create_mock('flask.session', spec=dict) self.request = self.create_mock('flask.request') - def tearDown(self): - self._logger.disabled = False - def create_mock(self, target, **kwargs): patcher = patch(target, **kwargs) self.addCleanup(patcher.stop) diff --git a/test/service/test_algorithms.py b/test/service/test_algorithms.py index 925154ea..41755e1b 100644 --- a/test/service/test_algorithms.py +++ b/test/service/test_algorithms.py @@ -11,7 +11,6 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import logging import unittest from unittest.mock import patch, MagicMock @@ -20,13 +19,6 @@ @patch('bfapi.service.piazza.get_services') class ListAllTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.algorithms') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_requests_algorithms_from_piazza(self, mock: MagicMock): algorithms.list_all() self.assertEqual(('^BF_Algo_',), mock.call_args[0]) @@ -118,13 +110,6 @@ def test_throws_when_piazza_throws(self, mock: MagicMock): @patch('bfapi.service.piazza.get_service') class GetTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.algorithms') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_requests_algorithms_from_piazza(self, mock: MagicMock): mock.return_value = create_service() algorithms.get('test-service-id') diff --git a/test/service/test_geoserver.py b/test/service/test_geoserver.py index 4c494bde..1ec8fd2d 100644 --- a/test/service/test_geoserver.py +++ b/test/service/test_geoserver.py @@ -11,7 +11,6 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import logging import unittest import xml.etree.ElementTree as et from unittest.mock import patch @@ -26,13 +25,6 @@ @rm.Mocker() class InstallIfNeededTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.geoserver') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_calls_correct_urls(self, m: rm.Mocker): m.get('/geoserver/rest/layers/bfdetections') m.get('/geoserver/rest/styles/bfdetections') @@ -65,13 +57,6 @@ def test_installs_detections_style_if_missing(self, m): @rm.Mocker() class InstallLayerTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.geoserver') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_calls_correct_url(self, m: rm.Mocker): m.post('/geoserver/rest/workspaces/piazza/datastores/piazza/featuretypes') geoserver.install_layer('test-layer-id') @@ -93,8 +78,8 @@ def test_sends_correct_payload(self, m: rm.Mocker): self.assertEqual('180.0', xml.findtext('./nativeBoundingBox/maxx')) self.assertEqual('90.0', xml.findtext('./nativeBoundingBox/maxy')) self.assertEqual('test-layer-id', xml.findtext('./metadata/entry[@key="JDBC_VIRTUAL_TABLE"]/virtualTable/name')) - self.assertRegex(xml.findtext('./metadata/entry[@key="JDBC_VIRTUAL_TABLE"]/virtualTable/sql'), - r'SELECT \* FROM \w+') + self.assertIn('SELECT * FROM __beachfront__geoserver', + xml.findtext('./metadata/entry[@key="JDBC_VIRTUAL_TABLE"]/virtualTable/sql')) def test_throws_on_http_error(self, m: rm.Mocker): m.post('/geoserver/rest/workspaces/piazza/datastores/piazza/featuretypes', status_code=500) @@ -110,13 +95,6 @@ def test_throws_if_geoserver_is_unreachable(self, _): @rm.Mocker() class InstallStyleTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.geoserver') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_calls_correct_url_when_creating_sld(self, m: rm.Mocker): m.post('/geoserver/rest/styles') m.put('/geoserver/rest/layers/bfdetections') @@ -185,13 +163,6 @@ def test_throws_if_geoserver_is_unreachable_when_setting_default_style(self, m: @rm.Mocker() class LayerExistsTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.geoserver') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_calls_correct_url(self, m: rm.Mocker): m.get('/geoserver/rest/layers/test-layer-id') geoserver.layer_exists('test-layer-id') @@ -219,13 +190,6 @@ def test_throws_if_geoserver_is_unreachable(self, _): @rm.Mocker() class StyleExistsTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.geoserver') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def test_calls_correct_url(self, m: rm.Mocker): m.get('/geoserver/rest/styles/test-style-id') geoserver.style_exists('test-style-id') diff --git a/test/service/test_jobs.py b/test/service/test_jobs.py index 01a86377..9b854335 100644 --- a/test/service/test_jobs.py +++ b/test/service/test_jobs.py @@ -11,9 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import io import json -import logging import unittest from datetime import datetime, timedelta from unittest.mock import call, patch, Mock @@ -34,8 +32,7 @@ class CreateJobTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') self.mock_requests = rm.Mocker() # type: rm.Mocker self.mock_requests.start() @@ -49,20 +46,7 @@ def setUp(self): def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False - - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream + self.logger.destroy() def create_mock(self, target_name): patcher = patch(target_name) @@ -313,54 +297,54 @@ def test_does_not_create_database_record_if_cannot_start(self): def test_logs_creation_success(self): self.mock_get_algo.return_value = create_algorithm() self.mock_get_scene.return_value = create_scene() - logstream = self.create_logstream() jobs.create('test-user-id', 'test-scene-id', 'test-service-id', 'test-name', 'test-planet-api-key') self.assertEqual([ + 'INFO - Job service create', 'INFO - Dispatching to ', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_creation_failure_during_algorithm_retrieval(self): self.mock_get_algo.side_effect = algorithms.NotFound('test-algo-id') - logstream = self.create_logstream() with self.assertRaises(jobs.PreprocessingError): jobs.create('test-user-id', 'test-scene-id', 'test-algo-id', 'test-name', 'test-planet-api-key') self.assertEqual([ + 'INFO - Job service create', 'ERROR - Preprocessing error: algorithm `test-algo-id` does not exist', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_creation_failure_during_scene_retrieval(self): self.mock_get_algo.return_value = create_algorithm() self.mock_get_scene.side_effect = scenes.NotFound('test-scene-id') - logstream = self.create_logstream() with self.assertRaises(jobs.PreprocessingError): jobs.create('test-user-id', 'test-scene-id', 'test-algo-id', 'test-name', 'test-planet-api-key') self.assertEqual([ + 'INFO - Job service create', 'ERROR - Preprocessing error: scene `test-scene-id` not found in catalog', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_creation_failure_during_execution(self): self.mock_get_algo.return_value = create_algorithm() self.mock_get_scene.return_value = create_scene() self.mock_execute.side_effect = piazza.ServerError(400) - logstream = self.create_logstream() with self.assertRaises(piazza.ServerError): jobs.create('test-user-id', 'test-scene-id', 'test-algo-id', 'test-name', 'test-planet-api-key') self.assertEqual([ + 'INFO - Job service create', 'INFO - Dispatching to ', 'ERROR - Could not execute via Piazza: Piazza server error (HTTP 400)' - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_creation_failure_during_database_insert(self): self.mock_get_algo.return_value = create_algorithm() self.mock_get_scene.return_value = create_scene() self.mock_insert_job.side_effect = helpers.create_database_error() - logstream = self.create_logstream() with self.assertRaises(DatabaseError): jobs.create('test-user-id', 'test-scene-id', 'test-algo-id', 'test-name', 'test-planet-api-key') self.assertEqual([ + 'INFO - Job service create', 'INFO - Dispatching to ', "ERROR - Could not save job to database", - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_throws_when_piazza_throws(self): self.mock_get_algo.return_value = create_algorithm() @@ -406,12 +390,11 @@ def test_throws_when_algorithm_is_unknown(self): class ForgetJobTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_deletes_job_user_record(self, _, mock_delete: Mock): jobs.forget('test-user-id', 'test-job-id') @@ -436,12 +419,11 @@ def test_throws_if_delete_failed(self, _, mock_delete: Mock): class GetAllJobsTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_returns_a_list_of_jobs(self, mock: Mock): mock.return_value.fetchall.return_value = [create_job_db_record()] @@ -551,12 +533,11 @@ def test_handles_database_errors_gracefully(self, mock: Mock): class GetDetectionsTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_returns_a_stringified_feature_collection(self, _, mock_select_detections: Mock): mock_select_detections.return_value.scalar.return_value = '{"type":"FeatureCollection","features":[]}' @@ -595,12 +576,11 @@ def test_throws_if_job_not_found(self, mock_exists: Mock, _): class GetJobTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_returns_job(self, mock_select: Mock, _): mock_select.return_value.fetchone.return_value = create_job_db_record() @@ -697,12 +677,11 @@ def test_adds_job_to_user_tracker(self, mock_select: Mock, mock_insert: Mock): class GetByProductlineTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_returns_a_list_of_jobs(self, mock: Mock): mock.return_value.fetchall.return_value = [create_job_db_record()] @@ -816,12 +795,11 @@ def test_handles_database_errors_gracefully(self, mock: Mock): class GetBySceneTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_returns_a_list_of_jobs(self, mock: Mock): mock.return_value.fetchall.return_value = [create_job_db_record()] @@ -930,13 +908,12 @@ def test_handles_database_errors_gracefully(self, mock: Mock): class StartWorkerTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): jobs.stop_worker() self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_module_does_not_start_worker_until_invoked(self, _): self.assertIsNone(jobs._worker) @@ -967,12 +944,11 @@ def test_throws_if_worker_already_exists(self, _): class StopWorkerTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.service.jobs') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def test_stops_worker(self, _): jobs.start_worker() @@ -996,9 +972,8 @@ class WorkerRunTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.jobs.worker') - self._logger.disabled = True - self._logger_for_module = logging.getLogger('bfapi.service.jobs') + self.logger = helpers.get_logger('bfapi.service.jobs.worker') + self._logger_for_module = helpers.get_logger('bfapi.service.jobs') self._logger_for_module.disabled = True self.mock_sleep = self.create_mock('time.sleep') @@ -1013,22 +988,9 @@ def setUp(self): def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() self._logger_for_module.disabled = False - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream - def create_mock(self, target_name): patcher = patch(target_name) self.addCleanup(patcher.stop) @@ -1068,18 +1030,16 @@ def test_honors_interval(self): self.assertEqual(call(1234), self.mock_sleep.call_args) def test_logs_empty_cycles(self): - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ 'INFO - Nothing to do; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_failing_during_execution(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary()] self.mock_getstatus.return_value = piazza.Status(piazza.STATUS_ERROR) - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1087,14 +1047,13 @@ def test_logs_jobs_failing_during_execution(self): 'INFO - <001/test-job-id> polled (Error; age=7 days, 12:34:56)', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_failing_during_geometry_resolution(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=ONE_WEEK)] self.mock_getstatus.return_value = piazza.Status(piazza.STATUS_SUCCESS, data_id='test-execution-output-id') self.mock_getfile.side_effect = piazza.ServerError(404) - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1104,7 +1063,7 @@ def test_logs_jobs_failing_during_geometry_resolution(self): 'ERROR - <001/test-job-id> Could not resolve detections data ID: during postprocessing, could not fetch execution output: Piazza server error (HTTP 404)', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_failing_during_geometry_retrieval(self): def getfile(id_): @@ -1116,7 +1075,6 @@ def getfile(id_): self.mock_getstatus.return_value = piazza.Status(piazza.STATUS_SUCCESS, data_id='test-execution-output-id') self.mock_getfile.side_effect = getfile - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1127,7 +1085,7 @@ def getfile(id_): 'ERROR - <001/test-job-id> Could not fetch data ID : Piazza server error (HTTP 404)', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_failing_during_geometry_insertion(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=ONE_WEEK)] @@ -1136,7 +1094,6 @@ def test_logs_jobs_failing_during_geometry_insertion(self): self.mock_getfile.return_value.text = 'lorem ipsum' self.mock_insert_detections.side_effect = helpers.create_database_error() - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1148,12 +1105,11 @@ def test_logs_jobs_failing_during_geometry_insertion(self): 'ERROR - <001/test-job-id> Could not save status and detections to database', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_that_are_still_running(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=timedelta(minutes=20))] self.mock_getstatus.return_value = piazza.Status(piazza.STATUS_RUNNING) - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1161,14 +1117,13 @@ def test_logs_jobs_that_are_still_running(self): 'INFO - <001/test-job-id> polled (Running; age=0:20:00)', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_that_succeed(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary()] self.mock_getstatus.return_value = piazza.Status(piazza.STATUS_SUCCESS, data_id='test-execution-output-id') self.mock_getfile.return_value.json.return_value = create_execution_output() self.mock_getfile.return_value.text = 'X' * 2048000 - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1179,12 +1134,11 @@ def test_logs_jobs_that_succeed(self): 'INFO - <001/test-job-id> Saving detections to database (2.0MB)', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_jobs_that_time_out(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=ONE_WEEK)] self.mock_getstatus.return_value = piazza.Status(piazza.STATUS_RUNNING) - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1193,12 +1147,11 @@ def test_logs_jobs_that_time_out(self): 'WARNING - <001/test-job-id> appears to have stalled and will no longer be tracked', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_piazza_server_errors(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=ONE_WEEK)] self.mock_getstatus.side_effect = piazza.ServerError(500) - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1206,12 +1159,11 @@ def test_logs_piazza_server_errors(self): 'ERROR - <001/test-job-id> call to Piazza failed: Piazza server error (HTTP 500)', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_piazza_response_parsing_errors(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=ONE_WEEK)] self.mock_getstatus.side_effect = piazza.InvalidResponse('test-error', 'lorem ipsum') - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1219,12 +1171,11 @@ def test_logs_piazza_response_parsing_errors(self): 'ERROR - <001/test-job-id> call to Piazza failed: invalid Piazza response: test-error', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_piazza_auth_failures(self): self.mock_select_jobs.return_value.fetchall.return_value = [create_job_db_summary(age=ONE_WEEK)] self.mock_getstatus.side_effect = piazza.Unauthorized() - logstream = self.create_logstream() worker = self.create_worker() worker.run() self.assertEqual([ @@ -1232,7 +1183,7 @@ def test_logs_piazza_auth_failures(self): 'ERROR - <001/test-job-id> credentials rejected during polling!', 'INFO - Cycle complete; next run at {:%TZ}'.format(datetime.utcnow()), 'INFO - Stopped', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_throws_when_database_throws_during_discovery(self): self.mock_select_jobs.side_effect = helpers.create_database_error() diff --git a/test/service/test_productlines.py b/test/service/test_productlines.py index b0d43f75..0870421d 100644 --- a/test/service/test_productlines.py +++ b/test/service/test_productlines.py @@ -11,7 +11,6 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import logging import unittest from datetime import datetime from unittest.mock import patch, MagicMock @@ -21,7 +20,6 @@ from bfapi.db import DatabaseError from bfapi.service import productlines from bfapi.service.algorithms import Algorithm, NotFound, ValidationError -from bfapi.service.jobs import Job DATE_START = datetime.utcfromtimestamp(1400000000) DATE_STOP = datetime.utcfromtimestamp(1500000000) @@ -33,12 +31,9 @@ class CreateProductlineTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.productlines') - self._logger.disabled = True def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False def test_returns_productline(self, _): record = productlines.create_productline( @@ -315,15 +310,12 @@ def test_gracefully_handles_database_error(self, _): class DeleteProductLineTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.productlines') - self._logger.disabled = True self.mock_delete = self.create_mock('bfapi.db.productlines.delete_productline') self.mock_select = self.create_mock('bfapi.db.productlines.select_productline') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False def create_mock(self, target_name): patcher = patch(target_name) @@ -366,12 +358,9 @@ def test_throws_on_database_error_during_delete(self): class GetAllTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.productlines') - self._logger.disabled = True def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False def test_returns_list_of_productlines(self, mock: MagicMock): mock.return_value.fetchall.return_value = [create_productline_db_record()] @@ -473,25 +462,6 @@ def create_algorithm(): ) -def create_job(job_id: str = 'test-job-id'): - return Job( - algorithm_name='test-algo-name', - algorithm_version='test-algo-version', - created_by='test-created-by', - created_on=datetime.utcnow(), - geometry={}, - job_id=job_id, - name='test-job-name', - time_of_collect=datetime.utcnow(), - scene_sensor_name='test-scene-sensor-name', - scene_id='test-scene-id', - status='test-status', - tide=5.4321, - tide_min_24h=-10.0, - tide_max_24h=10.0, - ) - - def create_productline_db_record(record_id: str = 'test-productline-id'): return { 'productline_id': record_id, diff --git a/test/service/test_scenes.py b/test/service/test_scenes.py index 408d8a8d..bea110e8 100644 --- a/test/service/test_scenes.py +++ b/test/service/test_scenes.py @@ -12,7 +12,6 @@ # specific language governing permissions and limitations under the License. import json -import logging import unittest from datetime import datetime from unittest.mock import patch @@ -28,13 +27,6 @@ @rm.Mocker() class ActivateSceneTest(unittest.TestCase): - def setUp(self): - self._logger = logging.getLogger('bfapi.service.scenes') - self._logger.disabled = True - - def tearDown(self): - self._logger.disabled = False - def create_mock(self, target_name, **kwargs): patcher = patch(target_name, **kwargs) self.addCleanup(patcher.stop) @@ -123,8 +115,6 @@ def test_accepts_blank_planet_api_key(self): class GetSceneTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.scenes') - self._logger.disabled = True self.mock_requests = rm.Mocker() # type: rm.Mocker self.mock_requests.start() @@ -132,7 +122,6 @@ def setUp(self): def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False def test_calls_correct_url_for_planetscope_scene(self): self.mock_requests.get('/planet/planetscope/test-scene-id', text=RESPONSE_SCENE_INACTIVE) diff --git a/test/service/test_users.py b/test/service/test_users.py index 55d9c0d5..2ea2ec49 100644 --- a/test/service/test_users.py +++ b/test/service/test_users.py @@ -11,9 +11,7 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import io import json -import logging import unittest from datetime import datetime from unittest.mock import patch @@ -21,19 +19,19 @@ import requests import requests_mock as rm +from test import helpers + from bfapi.db import DatabaseError from bfapi.service import users -from test import helpers API_KEY = '0123456789abcdef0123456789abcdef' class AuthenticateViaGeoaxisTest(unittest.TestCase): def setUp(self): - self._logger = logging.getLogger('bfapi.service.users') - self._logger.disabled = True self._mockdb = helpers.mock_database() + self.logger = helpers.get_logger('bfapi.service.users') self.mock_insert_user = self.create_mock('bfapi.db.users.insert_user') self.mock_get_by_id = self.create_mock('bfapi.service.users.get_by_id') self.mock_requests = rm.Mocker() # type: rm.Mocker @@ -42,26 +40,13 @@ def setUp(self): def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def create_mock(self, target_name): patcher = patch(target_name) self.addCleanup(patcher.stop) return patcher.start() - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream - def test_calls_correct_url_for_token_request(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_ISSUED) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE) @@ -231,112 +216,99 @@ def test_logs_success_for_new_user(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_ISSUED) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE) self.mock_get_by_id.return_value = None - logstream = self.create_logstream() users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'INFO - Creating user account for "cn=test-commonname, OU=test-org-unit, O=test-org, C=test-country"', 'INFO - User "cn=test-commonname, OU=test-org-unit, O=test-org, C=test-country" has logged in successfully', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_success_for_existing_user(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_ISSUED) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE) self.mock_get_by_id.return_value = create_user_db_record() - logstream = self.create_logstream() users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'INFO - User "cn=test-commonname, OU=test-org-unit, O=test-org, C=test-country" has logged in successfully', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_when_geoaxis_denies_token_request(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_DENIED, status_code=401) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE) - logstream = self.create_logstream() with self.assertRaises(users.Unauthorized): users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'ERROR - GeoAxis rejected user auth code: {', ' "error": "invalid_grant",', ' "error_description": "Invalid Grant: grant has been revoked; grant_type=azc "', '}', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_when_geoaxis_denies_profile_request(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_ISSUED) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE_UNAUTHORIZED, status_code=401) - logstream = self.create_logstream() with self.assertRaises(users.Unauthorized): users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'ERROR - GeoAxis rejected access token: {', ' "message": "Failed in authorization",', ' "oicErrorCode": "IDAAS-12345"', '}', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_when_geoaxis_throws_during_token_request(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text='oh noes', status_code=500) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE) - logstream = self.create_logstream() with self.assertRaises(users.GeoaxisError): users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'ERROR - GeoAxis returned HTTP 500: oh noes', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_when_geoaxis_throws_during_profile_request(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_ISSUED) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text='oh noes', status_code=500) - logstream = self.create_logstream() with self.assertRaises(users.GeoaxisError): users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'ERROR - GeoAxis returned HTTP 500: oh noes', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_from_database_insert(self): self.mock_requests.post('/ms_oauth/oauth2/endpoints/oauthservice/tokens', text=RESPONSE_GEOAXIS_TOKEN_ISSUED) self.mock_requests.get('/ms_oauth/resources/userprofile/me', text=RESPONSE_GEOAXIS_PROFILE) self.mock_get_by_id.return_value = None self.mock_insert_user.side_effect = helpers.create_database_error() - logstream = self.create_logstream() with self.assertRaises(DatabaseError): users.authenticate_via_geoaxis('test-auth-code') self.assertEqual([ + 'INFO - Users service auth geoaxis', 'INFO - Creating user account for "cn=test-commonname, OU=test-org-unit, O=test-org, C=test-country"', 'ERROR - Could not save user account "cn=test-commonname, OU=test-org-unit, O=test-org, C=test-country" to database', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) class AuthenticateViaApiKeyTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.users') - self._logger.disabled = True + + self.logger = helpers.get_logger('bfapi.service.users') self.mock_select_user_by_api_key = self.create_mock('bfapi.db.users.select_user_by_api_key') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def create_mock(self, target_name): patcher = patch(target_name) self.addCleanup(patcher.stop) return patcher.start() - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream - def test_returns_a_user(self): self.mock_select_user_by_api_key.return_value.fetchone.return_value = create_user_db_record() user = users.authenticate_via_api_key(API_KEY) @@ -374,69 +346,55 @@ def test_throws_when_api_key_is_malformed(self): def test_logs_success(self): self.mock_select_user_by_api_key.return_value.fetchone.return_value = create_user_db_record() - logstream = self.create_logstream() users.authenticate_via_api_key(API_KEY) self.assertEqual([ - # Intentionally blank - ], logstream.getvalue().splitlines()) + 'INFO - Users service auth api key', + ], self.logger.lines) def test_logs_failure_from_unauthorized_api_key(self): self.mock_select_user_by_api_key.return_value.fetchone.return_value = None - logstream = self.create_logstream() with self.assertRaises(users.Unauthorized): users.authenticate_via_api_key(API_KEY) self.assertEqual([ + 'INFO - Users service auth api key', 'ERROR - Unauthorized API key "0123456789abcdef0123456789abcdef"' - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_from_malformed_api_key(self): self.mock_select_user_by_api_key.return_value.fetchone.return_value = None - logstream = self.create_logstream() with self.assertRaises(users.MalformedAPIKey): users.authenticate_via_api_key('definitely not correctly formed') self.assertEqual([ + 'INFO - Users service auth api key', 'ERROR - Cannot verify malformed API key: "definitely not correctly formed"' - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_failure_from_database_select(self): self.mock_select_user_by_api_key.side_effect = helpers.create_database_error() - logstream = self.create_logstream() with self.assertRaises(DatabaseError): users.authenticate_via_api_key(API_KEY) self.assertEqual([ + 'INFO - Users service auth api key', """ERROR - Database query for API key "0123456789abcdef0123456789abcdef" failed""", - ], logstream.getvalue().splitlines()) + ], self.logger.lines) class GetByIdTest(unittest.TestCase): def setUp(self): self._mockdb = helpers.mock_database() - self._logger = logging.getLogger('bfapi.service.users') - self._logger.disabled = True + + self.logger = helpers.get_logger('bfapi.service.users') self.mock_select_user = self.create_mock('bfapi.db.users.select_user') def tearDown(self): self._mockdb.destroy() - self._logger.disabled = False + self.logger.destroy() def create_mock(self, target_name): patcher = patch(target_name) self.addCleanup(patcher.stop) return patcher.start() - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream - def test_throws_when_database_query_fails(self): self.mock_select_user.side_effect = helpers.create_database_error() with self.assertRaises(DatabaseError): @@ -469,12 +427,12 @@ def test_assigns_correct_api_key(self): def test_logs_database_failure_during_select(self): self.mock_select_user.side_effect = helpers.create_database_error() - logstream = self.create_logstream() with self.assertRaises(DatabaseError): users.get_by_id('test-user-id') self.assertEqual([ + 'INFO - Users service get by id', """ERROR - Database query for user ID "test-user-id" failed""", - ], logstream.getvalue().splitlines()) + ], self.logger.lines) # diff --git a/test/test_middleware.py b/test/test_middleware.py index 992b067c..f9b9646f 100644 --- a/test/test_middleware.py +++ b/test/test_middleware.py @@ -11,13 +11,13 @@ # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. -import io -import logging import unittest from unittest.mock import call, patch import flask +from test import helpers + from bfapi.service import users from bfapi import middleware @@ -60,16 +60,10 @@ def test_adds_correct_cache_control(self): class AuthFilterTest(unittest.TestCase): def setUp(self): - self._logger = logging.getLogger('bfapi.middleware') - self._logger.disabled = True - self.mock_authenticate = self.create_mock('bfapi.service.users.authenticate_via_api_key', side_effect=create_user) self.request = self.create_mock('flask.request', spec=flask.Request) self.session = self.create_mock('flask.session', new={}) - def tearDown(self): - self._logger.disabled = False - def create_mock(self, target_name, **kwargs): patcher = patch(target_name, **kwargs) self.addCleanup(patcher.stop) @@ -162,9 +156,7 @@ class CSRFFilterTest(unittest.TestCase): maxDiff = 4096 def setUp(self): - self._logger = logging.getLogger('bfapi.middleware') - self._logger.disabled = True - + self.logger = helpers.get_logger('bfapi.middleware') self.request = self.create_mock('flask.request', spec=flask.Request, headers={}, @@ -174,26 +166,13 @@ def setUp(self): is_xhr=False) def tearDown(self): - self._logger.disabled = False + self.logger.destroy() def create_mock(self, target_name, **kwargs): patcher = patch(target_name, **kwargs) self.addCleanup(patcher.stop) return patcher.start() - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream - def test_allows_non_cors_requests(self): endpoints = ( '/v0/services', @@ -298,7 +277,6 @@ def test_rejects_cors_requests_not_marked_as_xhr(self): self.assertEqual(('Access Denied: CORS request validation failed', 403), response) def test_logs_rejection_of_cors_requests_from_unknown_origin(self): - logstream = self.create_logstream() origins = ( 'http://beachfront.geointservices.io', # Not HTTPS 'http://bf-swagger.geointservices.io', # Not HTTPS @@ -318,10 +296,9 @@ def test_logs_rejection_of_cors_requests_from_unknown_origin(self): 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`http://instaspotifriendspacebooksterifygram.com` referrer=`None` ip=`1.2.3.4` is_xhr=`True`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://beachfront.geointservices.io.totallynotaphishingattempt.com` referrer=`None` ip=`1.2.3.4` is_xhr=`True`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://bf-swagger.geointservices.io.totallynotaphishingattempt.com` referrer=`None` ip=`1.2.3.4` is_xhr=`True`', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_rejection_of_cors_preflights_from_unknown_origin(self): - logstream = self.create_logstream() origins = ( 'http://beachfront.geointservices.io', # Not HTTPS 'http://bf-swagger.geointservices.io', # Not HTTPS @@ -342,10 +319,9 @@ def test_logs_rejection_of_cors_preflights_from_unknown_origin(self): 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`http://instaspotifriendspacebooksterifygram.com` referrer=`None` ip=`1.2.3.4` is_xhr=`True`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://beachfront.geointservices.io.totallynotaphishingattempt.com` referrer=`None` ip=`1.2.3.4` is_xhr=`True`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://bf-swagger.geointservices.io.totallynotaphishingattempt.com` referrer=`None` ip=`1.2.3.4` is_xhr=`True`', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_rejection_of_cors_requests_not_marked_as_xhr(self): - logstream = self.create_logstream() origins = AUTHORIZED_ORIGINS for origin in origins: self.request.reset_mock() @@ -365,10 +341,9 @@ def test_logs_rejection_of_cors_requests_not_marked_as_xhr(self): 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://bf-swagger.stage.geointservices.io` referrer=`None` ip=`1.2.3.4` is_xhr=`False`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://bf-swagger.arbitrary.subdomain.geointservices.io` referrer=`None` ip=`1.2.3.4` is_xhr=`False`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`https://localhost:8080` referrer=`None` ip=`1.2.3.4` is_xhr=`False`', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) def test_logs_rejection_of_cors_requests_that_look_spoofed(self): - logstream = self.create_logstream() origins = AUTHORIZED_ORIGINS for origin in origins: self.request.reset_mock() @@ -389,13 +364,12 @@ def test_logs_rejection_of_cors_requests_that_look_spoofed(self): 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`None` referrer=`https://bf-swagger.stage.geointservices.io` ip=`1.2.3.4` is_xhr=`False`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`None` referrer=`https://bf-swagger.arbitrary.subdomain.geointservices.io` ip=`1.2.3.4` is_xhr=`False`', 'WARNING - Possible CSRF attempt: endpoint=`/protected` origin=`None` referrer=`https://localhost:8080` ip=`1.2.3.4` is_xhr=`False`', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) class HTTPSFilterTest(unittest.TestCase): def setUp(self): - self._logger = logging.getLogger('bfapi.middleware') - self._logger.disabled = True + self.logger = helpers.get_logger('bfapi.middleware') self.request = self.create_mock('flask.request', path='/test-path', referrer='http://test-referrer') @@ -404,21 +378,8 @@ def create_mock(self, target_name, **kwargs): self.addCleanup(patcher.stop) return patcher.start() - def create_logstream(self) -> io.StringIO: - def cleanup(): - self._logger.propagate = True - - self._logger.propagate = False - self._logger.disabled = False - stream = io.StringIO() - handler = logging.StreamHandler(stream) - handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s')) - self._logger.addHandler(handler) - self.addCleanup(cleanup) - return stream - def tearDown(self): - self._logger.disabled = False + self.logger.destroy() def test_rejects_non_https_requests(self): self.request.is_secure = False @@ -427,11 +388,10 @@ def test_rejects_non_https_requests(self): def test_logs_rejection(self): self.request.is_secure = False - logstream = self.create_logstream() middleware.https_filter() self.assertEqual([ 'WARNING - Rejecting non-HTTPS request: endpoint=`/test-path` referrer=`http://test-referrer`', - ], logstream.getvalue().splitlines()) + ], self.logger.lines) #