Skip to content

Commit

Permalink
Dev (#3)
Browse files Browse the repository at this point in the history
* change to SQL to no longer get all partitions from partition table, only the online and online_active tables. Small change to main thread for better cpu management

* added modified library for azure caller

* Added extra logging to cover errors to attempt calls to azure, added a health check to see if azure can be reached, changed the database calls to only get the last 3 months opf data. including the PasswordHandler.py as this was missed in the include in previous versions, but was in released versions

* changed health check timeout to 10 seconds, removed second call to health check

* added retry for aws calls, changed format to match ARN needed for security hub and added enhanced logging

* Increased version number on installation script added back in account id as check fro starting aws thread

* Changed names for config items for azure sentinel to match the ui of sentinel
  • Loading branch information
michaelNevinFP authored May 6, 2021
1 parent 474f7af commit 2dd5c11
Show file tree
Hide file tree
Showing 7 changed files with 77 additions and 44 deletions.
2 changes: 0 additions & 2 deletions ASFFMapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,6 @@ def map_sql_to_azure():
if cleaned_findings.__len__() >= 1:
date_time_obj = datetime.datetime.strptime(cleaned_findings[-1]["CreatedAt"], '%Y-%m-%dT%H:%M:%SZ')
offset_time = date_time_obj + datetime.timedelta(seconds=1)
#Persistence().set_date(str(offset_time), 'AzureUpdateDate')
return cleaned_findings, str(offset_time)
return cleaned_findings, None
else:
Expand Down Expand Up @@ -577,7 +576,6 @@ def map_sql_to_asff():
if cleaned_findings.__len__() >= 1:
date_time_obj = datetime.datetime.strptime(cleaned_findings[-1]["CreatedAt"], '%Y-%m-%dT%H:%M:%SZ')
offset_time = date_time_obj + datetime.timedelta(seconds=1)
Persistence().set_date(str(offset_time), 'AWSUpdateDate')
return cleaned_findings, str(offset_time)
return cleaned_findings, None
else:
Expand Down
9 changes: 2 additions & 7 deletions CloudHandlers/AzureDataCollector.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
#!/usr/bin/env python3
import requests
from datacollectorapiMOD import client
import logging
import collections

from requests import ReadTimeout

from Config import Configurations, Persistence


Expand All @@ -20,10 +16,9 @@ def azure_api_call(customer_id, shared_key, log_type, send_list, offset_time):
Persistence().set_date(str(offset_time), 'AzureUpdateDate')



def azure_data_collector(json_records, offset_time):
customer_id = Configurations.get_configurations()['AzureCustomerId']
shared_key = Configurations.get_configurations()['AzureSharedKey']
customer_id = Configurations.get_configurations()['AzureWorkspaceID']
shared_key = Configurations.get_configurations()['AzurePrimaryKey']
log_type = Configurations.get_configurations()['LogName']

numbers_deque = collections.deque(json_records)
Expand Down
66 changes: 50 additions & 16 deletions CloudHandlers/SecurityHubTool.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import json
import os
import logging
import collections
import botocore
from botocore.config import Config
from Config import Configurations, Insights, Persistence
import boto3
from botocore.exceptions import EndpointConnectionError, ParamValidationError, ClientError
Expand All @@ -10,16 +13,23 @@ def enable_security_hub():

try:
aws_connection().enable_security_hub()
except aws_connection().exceptions.ResourceConflictException as exception:
logging.info(exception)
except (aws_connection().exceptions.ResourceConflictException, botocore.exceptions.ReadTimeoutError) as exception:
if type(exception) == botocore.exceptions.ReadTimeoutError:
logging.error(exception)
elif type(exception) == aws_connection().exceptions.ResourceConflictException:
logging.info('Account is already subscribed to Security Hub')


def enable_import_findings_for_product():

test = Configurations.get_arn()
try:
aws_connection().enable_import_findings_for_product(ProductArn=Configurations.get_arn())
except aws_connection().exceptions.ResourceConflictException as exception:
logging.info(exception)
aws_connection().enable_import_findings_for_product(ProductArn=test)
except (aws_connection().exceptions.ResourceConflictException, botocore.exceptions.ReadTimeoutError) as exception:
if type(exception) == botocore.exceptions.ReadTimeoutError:
logging.error(exception)
elif type(exception) == aws_connection().exceptions.ResourceConflictException:
logging.info('Account Already has enabled import findings for this product')


class CreateInsight:
Expand Down Expand Up @@ -87,10 +97,17 @@ def second(self):
def aws_connection():
keys = Configurations()

config = Config(
retries={
'max_attempts': 5,
'mode': 'standard'
}
)
client = boto3.client('securityhub',
aws_access_key_id=keys.get_configurations()['aws_access_key_id'],
aws_secret_access_key=keys.get_configurations()['aws_secret_access_key'],
region_name=keys.get_configurations()['region_name']
region_name=keys.get_configurations()['region_name'],
config=config
)
return client

Expand All @@ -111,7 +128,7 @@ def number_1():
CreateInsight.second(CreateInsight())


def amazon_security_hub(asff_findings):
def amazon_security_hub(asff_findings, offset_time):
try:

numbers_deque = collections.deque(asff_findings)
Expand All @@ -120,18 +137,35 @@ def amazon_security_hub(asff_findings):
send_list = [numbers_deque.popleft() for _i in range(99)]

try:
aws_connection().batch_import_findings(Findings=send_list)
except ParamValidationError as pv:

logging.error(pv.args[0])
response = aws_connection().batch_import_findings(Findings=send_list)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
Persistence().set_date(offset_time, 'AWSUpdateDate')
failed_count = response['FailedCount']
success_count = response['SuccessCount']
logging.info(
f' Security Hub successful response, FailedCount : {failed_count}, SuccessCount : {success_count}')
if failed_count:
FailedFindings = json.dumps(response['FailedFindings'])
logging.error(f'Failed Findings - {FailedFindings}')
except ParamValidationError as e:

logging.error(e.args[0])
else:
send_list = ([numbers_deque.popleft() for _i in range(len(numbers_deque))])

try:
aws_connection().batch_import_findings(Findings=send_list)
except ParamValidationError as pv:

logging.error(pv.args[0])
response = aws_connection().batch_import_findings(Findings=send_list)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
Persistence().set_date(offset_time, 'AWSUpdateDate')
failed_count = response['FailedCount']
success_count = response['SuccessCount']
logging.info(f' Security Hub successful response, FailedCount : {failed_count}, SuccessCount : {success_count}')
if failed_count:
FailedFindings = json.dumps(response['FailedFindings'])
logging.error(f'Failed Findings - {FailedFindings}')
except ParamValidationError as e:

logging.error(e.args[0])

except (EndpointConnectionError, ClientError) as exception:

Expand Down Expand Up @@ -178,6 +212,6 @@ def insight_creator():
CreateInsight.second(CreateInsight())
i += 1

except (EndpointConnectionError, ClientError) as exception:
except (EndpointConnectionError, ClientError, botocore.exceptions.ReadTimeoutError) as exception:

logging.error(exception)
4 changes: 1 addition & 3 deletions Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,13 +115,11 @@ def get_configurations():
@staticmethod
def get_arn():
region = ''
aws_account_id = ''
with open('Config.json', 'r') as f:
parsed_json = json.load(f)
region = parsed_json['region_name']
aws_account_id = parsed_json['AwsAccountId']
f.close()
return f'arn:aws:securityhub:{region}:{aws_account_id}:product/forcepoint/forcepoint-dlp'
return f'arn:aws:securityhub:{region}:365761988620:product/forcepoint/forcepoint-dlp'


class Insights:
Expand Down
31 changes: 19 additions & 12 deletions DLPExporter.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# !/usr/bin/env python3
import os
import time

import botocore
import pyodbc
import requests
from requests import ReadTimeout
Expand Down Expand Up @@ -59,26 +61,26 @@ def __init__(self):

def run(self):
while True:
customer_id = Configurations.get_configurations()['AzureCustomerId']
shared_key = Configurations.get_configurations()['AzureSharedKey']
customer_id = Configurations.get_configurations()['AzureWorkspaceID']
shared_key = Configurations.get_configurations()['AzurePrimaryKey']
log_type = Configurations.get_configurations()['LogName']
api = client.DataCollectorAPIClient(customer_id, shared_key)

try:
health = api.health_check(log_type)
if health.status_code != 500:
#Do logging for test version
# Do logging for test version
json_file, offset_time = Mapper.map_sql_to_azure()
if not json_file:
#logging.info("No Data received, azure thread is sleeping for 5 minutes before retrying")
# logging.info("No Data received, azure thread is sleeping for 5 minutes before retrying")
time.sleep(300)
elif (len(json_file)) >= 1:
azure_data_collector(json_file, offset_time)
else:
logging.error("Azure cannot be reached, azure thread is sleeping for 5 minutes before retrying")
time.sleep(300)
except (requests.exceptions.ConnectionError, ReadTimeout) as e:
logging.error(f'{e}: error occurred, Azure threading is sleeping for 5 minutes before retrying')
logging.error(f'{e}: error occurred, Azure thread is sleeping for 5 minutes before retrying')
time.sleep(300)


Expand All @@ -93,11 +95,16 @@ def run(self):

while True:

json_file = Mapper.map_sql_to_asff()
json_file, offset_time = Mapper.map_sql_to_asff()
if not json_file:
time.sleep(300)
elif (len(json_file)) >= 1:
amazon_security_hub(json_file)
try:
amazon_security_hub(json_file, offset_time)
except botocore.exceptions.ReadTimeoutError as exception:
logging.error(f'{exception}: error occurred, AWS thread is sleeping for 5 minutes before '
f'retrying')
time.sleep(300)


class CreateInsight(Thread):
Expand Down Expand Up @@ -128,8 +135,8 @@ def run(self):
elif config['Database_Connection']['Trusted_Connection'] == 'no' and (args.key == 0 or '0'):
config.set_key(args.key)
try:
if config['AwsAccountId'] and config['aws_access_key_id'] \
and config['aws_secret_access_key'] and config['region_name']:
if config['AwsAccountId'] and config['aws_access_key_id'] and config['aws_secret_access_key'] \
and config['region_name']:
logging.info('AWS is configured on')
try:
if DatabaseConnection.get_connection() != 'none':
Expand All @@ -150,7 +157,7 @@ def run(self):
"aws_access_key_id, aws_secret_access_key, region_name)")

try:
if config['AzureCustomerId'] and config['AzureSharedKey']:
if config['AzureWorkspaceID'] and config['AzurePrimaryKey']:
logging.info('Azure is configured on')

try:
Expand All @@ -167,7 +174,7 @@ def run(self):
else:
logging.info("configure the config.json if you need azure")
except KeyError:
logging.info("Ignore if not using Sentinel. Some fields are missing from the config (AzureCustomerId, "
"AzureSharedKey)")
logging.info("Ignore if not using Sentinel. Some fields are missing from the config (AzureWorkspaceID, "
"AzurePrimaryKey)")

os.system("pause")
4 changes: 2 additions & 2 deletions config.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"UID": "",
"PWD": ""
},
"AzureCustomerId": "",
"AzureSharedKey": "",
"AzureWorkspaceID": "",
"AzurePrimaryKey": "",
"LogName": "ForcepointDLPEvents"
}
5 changes: 3 additions & 2 deletions makeFile.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -91,12 +91,13 @@ if ($azureInstall -eq $false) {
}


Write-Output [96m Creating fp-dlp-exporter-aws-azure-v1-8-2.zip[0m
Write-Output [96m Creating fp-dlp-exporter-aws-azure-v1-8-3.zip[0m
Write-Output -----------------------------
Write-Output -
Write-Output -

Compress-Archive .\fp-dlp-exporter-aws-azure-v1 .\fp-dlp-exporter-aws-azure-v1-8-2.zip

Compress-Archive .\fp-dlp-exporter-aws-azure-v1 .\fp-dlp-exporter-aws-azure-v1-8-3.zip
Write-Output  Clean up[0m
Write-Output -----------------------------
Write-Output -
Expand Down

0 comments on commit 2dd5c11

Please sign in to comment.