diff --git a/src/connectors/aws_collect.py b/src/connectors/aws_collect.py index 327098606..75aafc12e 100644 --- a/src/connectors/aws_collect.py +++ b/src/connectors/aws_collect.py @@ -8,6 +8,7 @@ ClientError, DataNotFoundError, ) +from aiohttp.client_exceptions import ServerTimeoutError from collections import defaultdict, namedtuple import csv from datetime import datetime, timedelta @@ -1244,7 +1245,7 @@ async def load_task_response(client, task): ): yield x - except (ClientError, DataNotFoundError) as e: + except (ClientError, DataNotFoundError, ServerTimeoutError) as e: log.info(format_exception_only(e)) for x in process_aws_response(task, e): yield x diff --git a/src/connectors/salesforce_event_log.py b/src/connectors/salesforce_event_log.py index 89c17a910..9be4a2d65 100644 --- a/src/connectors/salesforce_event_log.py +++ b/src/connectors/salesforce_event_log.py @@ -143,6 +143,11 @@ def ingest(table_name, options): # This will create a single line JSON file containing an array of objects json.dump(list(reader), f) + if shutil.disk_usage("/").free < 2**30: + # running out of disk space, next run will catch up + break + + # Copy all the staged .json files into the landing table log.info(f'Uploading all files to Snowflake stage: {table_name}.') db.copy_file_to_table_stage(table_name, os.path.join(temp_dir, '*.json'))