Skip to content

Commit

Permalink
Merge pull request #1 from Forcepoint/dev
Browse files Browse the repository at this point in the history
added password encryption,
  • Loading branch information
michaelNevinFP authored Mar 24, 2021
2 parents cbec3fe + 40857cf commit 06e6fb8
Show file tree
Hide file tree
Showing 9 changed files with 225 additions and 104 deletions.
196 changes: 122 additions & 74 deletions ASFFMapper.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
import datetime

import logging
from logger import LogConfig
from JsonFormatClass import JsonFormatClass, Network, Finding, RelatedFinding, Resource, Details, ProductFields, \
Severity, UserDefinedFields
from Config import Configurations, Persistence
Expand Down Expand Up @@ -279,83 +280,128 @@ def map_sql_to_azure():
policy_events = execute_policy_events(partition, row[0])

for policy in policy_events:
extra_data_sql = ExtraData()
event_data = query_events(partition, row[0])
service_id = event_data[0].SERVICE_ID
source_id = event_data[0].SOURCE_ID

user_data = query_users(source_id)
domain_id = user_data[0].DOMAIN_ID

services_data = query_services(service_id)
destination_data = query_destinations_users(partition, row[0])
formatted_date = __iso8601_format(
json.dumps(event_data[0].INSERT_DATE, indent=4, sort_keys=True,
default=str)[1:-4])

findings_object = Finding()
findings_object.GeneratorId = str(event_data[0].ID)
findings_object.CreatedAt = formatted_date
findings_object.Description = event_data[0].SUBJECT or "Description Not found"

findings_object.Protocol = services_data[0].PROTOCOL_ID.split("_")[1]
if domain_id is not None:
findings_object.SourceDomain = query_domains(domain_id)[0].NAME
else:
findings_object.SourceDomain = 'none'
findings_object.SourceIpV4 = user_data[0].IP
findings_object.SourcePort = event_data[0].PORT

if destination_data[0][0].DOMAIN_ID is not None:
findings_object.DestinationDomain = query_domains(destination_data[0][0].DOMAIN_ID)[0].NAME
else:
findings_object.DestinationDomain = 'none'

findings_object.DestinationIpV4 = destination_data[0][0].IP
findings_object.DestinationPort = destination_data[0][0].PORT

findings_object.ExternalId = event_data[0].EXTERNAL_ID

findings_object.Title = 'Forcepoint DLP Incident'
findings_object.UpdatedAt = formatted_date

findings_object.ForcepointDLPSourceIP = user_data[0].IP
findings_object.Text = services_data[0].CHANNEL_NAME
findings_object.UpdatedAt = formatted_date
findings_object.UpdatedBy = services_data[0].AGENT_NAME
try:

extra_data_sql = ExtraData()
event_data = query_events(partition, row[0])
service_id = event_data[0].SERVICE_ID
source_id = event_data[0].SOURCE_ID

user_data = query_users(source_id)
domain_id = user_data[0].DOMAIN_ID if len(user_data) > 0 else None

services_data = query_services(service_id)
destination_data = query_destinations_users(partition, row[0])
formatted_date = __iso8601_format(
json.dumps(event_data[0].INSERT_DATE, indent=4, sort_keys=True,
default=str)[1:-4])

findings_object = Finding()
findings_object.GeneratorId = str(event_data[0].ID)
findings_object.CreatedAt = formatted_date
findings_object.Description = event_data[0].SUBJECT or "Description Not found"

findings_object.Protocol = services_data[0].PROTOCOL_ID.split("_")[1]
if domain_id is not None:
findings_object.SourceDomain = query_domains(domain_id)[0].NAME
else:
findings_object.SourceDomain = 'none'
findings_object.SourceIpV4 = user_data[0].IP if len(user_data) > 0 else None
findings_object.SourcePort = event_data[0].PORT if len(event_data) > 0 else None

try:
if destination_data[0][0].DOMAIN_ID is not None:
findings_object.DestinationDomain = \
query_domains(destination_data[0][0].DOMAIN_ID)[0].NAME if len(
destination_data) > 0 else None
else:
findings_object.DestinationDomain = 'none'
except IndexError:
findings_object.DestinationDomain = 'none'

try:
findings_object.DestinationIpV4 = destination_data[0][0].IP if len(
destination_data) > 0 else None
except IndexError:
findings_object.DestinationIpV4 = 'none'
try:
findings_object.DestinationPort = destination_data[0][0].PORT if len(
destination_data) > 0 else None
except IndexError:
findings_object.DestinationPort = 'none'

findings_object.ExternalId = event_data[0].EXTERNAL_ID if len(event_data) > 0 else None

findings_object.Title = 'Forcepoint DLP Incident'
findings_object.UpdatedAt = formatted_date

findings_object.ForcepointDLPSourceIP = user_data[0].IP if len(user_data) > 0 else None
findings_object.Text = services_data[0].CHANNEL_NAME if len(services_data) > 0 else None
findings_object.UpdatedAt = formatted_date
findings_object.UpdatedBy = services_data[0].AGENT_NAME if len(services_data) > 0 else None

extra_data_sql.extra_data_handler([query_policy_categories(policy.POLICY_CATEGORY_ID)[0].NAME],
'RuleName')

extra_data_sql.extra_data_handler([user_data[0].EMAIL if len(user_data) > 0 else None],
'SourceEmail')
extra_data_sql.extra_data_handler([user_data[0].EXTRA_DATA if len(user_data) > 0 else None],
'SourceExtraData')
extra_data_sql.extra_data_handler([user_data[0].FULL_NAME if len(user_data) > 0 else None],
'SourceFullName')
extra_data_sql.extra_data_handler([user_data[0].HOSTNAME if len(user_data) > 0 else None],
'SourceHostname')
extra_data_sql.extra_data_handler([user_data[0].LOGIN_NAME if len(user_data) > 0 else None],
'SourceLoginName')

for i in range(len(destination_data)):
if len(destination_data[i]) > 0:
extra_data_sql.extra_data_handler([destination_data[i][0].COMMON_NAME],
'DestinationCommonName')
extra_data_sql.extra_data_handler([destination_data[i][0].HOSTNAME], 'DestinationHostname')
extra_data_sql.extra_data_handler([destination_data[i][0].EMAIL], 'DestinationEmail')
extra_data_sql.extra_data_handler([destination_data[i][0].EXTRA_DATA],
'DestinationExtraData')
if i >= 1:
if destination_data[i][0].DOMAIN_ID is not None:
extra_data_sql.extra_data_handler([query_domains(destination_data[i][0].DOMAIN_ID)[
0].NAME], 'DestinationDomain')

else:
findings_object.DestinationDomain = 'none'
extra_data_sql.extra_data_handler([destination_data[i][0].IP], 'DestinationIpV4')
extra_data_sql.extra_data_handler([destination_data[i][0].PORT], 'DestinationPort')
else:
findings_object.DestinationCommonName = 'none'
findings_object.DestinationHostname = 'none'
findings_object.DestinationEmail = 'none'
findings_object.DestinationExtraData = 'none'
findings_object.DestinationDomain = 'none'
findings_object.DestinationIpV4 = 'none'
findings_object.DestinationPort = 'none'

extra_data_sql.extra_data_handler([query_policy_categories(policy.POLICY_CATEGORY_ID)[0].NAME],
'RuleName')
extra_data_sql.extra_data_handler([user_data[0].EMAIL], 'SourceEmail')
extra_data_sql.extra_data_handler([user_data[0].EXTRA_DATA], 'SourceExtraData')
extra_data_sql.extra_data_handler([user_data[0].FULL_NAME], 'SourceFullName')
extra_data_sql.extra_data_handler([user_data[0].HOSTNAME], 'SourceHostname')
extra_data_sql.extra_data_handler([user_data[0].LOGIN_NAME], 'SourceLoginName')
# extra_data_sql.extra_data_handler([user_data[0].Username], 'SourceUsername')
findings_object.Type = 'Forcepoint DLP'

for i in range(len(destination_data)):
extra_data_sql.extra_data_handler([destination_data[i][0].COMMON_NAME], 'DestinationCommonName')
# user_defined_extras['DestinationUsername.' + str(i + 1)]
extra_data_sql.extra_data_handler([destination_data[i][0].HOSTNAME], 'DestinationHostname')
extra_data_sql.extra_data_handler([destination_data[i][0].EMAIL], 'DestinationEmail')
extra_data_sql.extra_data_handler([destination_data[i][0].EXTRA_DATA], 'DestinationExtraData')
if i >= 1:
if destination_data[i][0].DOMAIN_ID is not None:
extra_data_sql.extra_data_handler([query_domains(destination_data[i][0].DOMAIN_ID)[
0].NAME], 'DestinationDomain')
findings_object.Id = 'incident_Id-%s-rule_id-%s' % (row[0], policy[0])
findings_object.Severity = policy.SEVERITY or 'none'
findings_object.PolicyCategoryId = str(query_policy_categories(policy.POLICY_CATEGORY_ID)[0].ID)
findings_object.__dict__.update(extra_data_sql.extra_data_storage)
logs.append(findings_object.__dict__)

else:
findings_object.DestinationDomain = 'none'
extra_data_sql.extra_data_handler([destination_data[i][0].IP], 'DestinationIpV4')
extra_data_sql.extra_data_handler([destination_data[i][0].PORT], 'DestinationPort')
except IndexError:
event_data = query_events(partition, row[0])

findings_object.Type = 'Forcepoint DLP'
try:
formatted_date = __iso8601_format(
json.dumps(event_data[0].INSERT_DATE, indent=4, sort_keys=True,
default=str)[1:-4])
except:
formatted_date = None

findings_object.Id = 'incident_Id-%s-rule_id-%s' % (row[0], policy[0])
findings_object.Severity = policy.SEVERITY
findings_object.PolicyCategoryId = str(query_policy_categories(policy.POLICY_CATEGORY_ID)[0].ID)
findings_object.__dict__.update(extra_data_sql.extra_data_storage)
logs.append(findings_object.__dict__)
row_as_list = [x for x in policy]
f = open("unsent-events.txt", "a")
f.writelines(f'Event ID: {str(row_as_list[1])}, Created at: {str(formatted_date)}\n')
f.close()

logs_object = JsonFormatClass()

Expand Down Expand Up @@ -387,6 +433,7 @@ def map_sql_to_azure():

def map_sql_to_asff():
event_list, partitions = get_events('AWSUpdateDate')
LogConfig()

if bool(partitions):

Expand Down Expand Up @@ -520,7 +567,8 @@ def map_sql_to_asff():

for i in range(len(cleaned_findings)):
cleaned_findings[i]['Severity']['Label'] = __severity_label(cleaned_findings[i]['Severity']['Normalized'])
cleaned_findings[i]['Severity']['Normalized'] = (__normalized_severity(__normalized_severity_from_db(cleaned_findings[i]['Severity']['Normalized'])))
cleaned_findings[i]['Severity']['Normalized'] = (
__normalized_severity(__normalized_severity_from_db(cleaned_findings[i]['Severity']['Normalized'])))

if cleaned_findings.__len__() >= 1:
date_time_obj = datetime.datetime.strptime(cleaned_findings[-1]["CreatedAt"], '%Y-%m-%dT%H:%M:%SZ')
Expand Down
32 changes: 18 additions & 14 deletions CloudHandlers/AzureDataCollector.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,37 @@
#!/usr/bin/env python3
import requests
from datacollectorapi import client
from datacollectorapiMOD import client
import logging
import collections

from requests import ReadTimeout

from Config import Configurations


def azure_api_call(customer_id, shared_key, log_type, send_list):
api = client.DataCollectorAPIClient(customer_id, shared_key)
try:
result = api.post_data(log_type, send_list, timeout=30.0)
if result:
logging.info(f'azure api response: {result}')
except (requests.exceptions.ConnectionError, ReadTimeout) as e:
logging.error(f'{e}: error occurred')


def azure_data_collector(json_records):
customer_id = Configurations.get_configurations()['AzureCustomerId']
shared_key = Configurations.get_configurations()['AzureSharedKey']
log_type = Configurations.get_configurations()['LogName']

api = client.DataCollectorAPIClient(customer_id, shared_key)

numbers_deque = collections.deque(json_records)

while bool(numbers_deque):

if len(numbers_deque) > 50:
send_list = [numbers_deque.popleft() for _i in range(49)]
try:
api.post_data(log_type, send_list)
except requests.exceptions.ConnectionError:
logging.error('Connection to Azure can not be established')
azure_api_call(customer_id, shared_key, log_type, send_list)

else:
send_list = ([numbers_deque.popleft() for _i in range(len(numbers_deque))])
try:
api.post_data(log_type, send_list)
except requests.exceptions.ConnectionError:
logging.error('Connection to Azure can not be established')



azure_api_call(customer_id, shared_key, log_type, send_list)
36 changes: 36 additions & 0 deletions Config.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,42 @@
import json
from pathlib import Path
from PasswordEncryption.PasswordHandler import decrypt_password, encrypt_password

secure_key = ''


def get_key():
return secure_key


def set_key(key):
global secure_key
secure_key = key


def get_db_config():
with open('Config.json', 'r') as jsonFile:
parsed_json = json.load(jsonFile)
return parsed_json


def set_db_config(config):
with open('Config.json', 'w') as jsonFile:
json.dump(config, jsonFile, indent=4)


class DatabaseConnection:

@staticmethod
def save_password(password, username):
key, encrypted_pwd = encrypt_password(password)
config = get_db_config()
config["Database_Connection"]['PWD'] = encrypted_pwd
config["Database_Connection"]['UID'] = username
set_db_config(config)
set_key(key)
return key

@staticmethod
def get_connection():
with open('Config.json', 'r') as jsonFile:
Expand All @@ -16,6 +50,8 @@ def get_connection():
return 'none'
else:
del connection_string_list['Trusted_Connection']
connection_string_list['PWD'] = decrypt_password(secure_key, connection_string_list['PWD']).decode(
"utf-8")
if connection_string_list['Server'] == "" or connection_string_list['Database'] == "" or \
connection_string_list['UID'] == "" or connection_string_list['PWD'] == "":
return 'none'
Expand Down
17 changes: 13 additions & 4 deletions DLPExporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from xmltodict import parse
from threading import Thread
import logging
from pathlib import Path
import argparse


class ManualAWS(Thread):
Expand Down Expand Up @@ -61,7 +61,6 @@ def run(self):
time.sleep(300)
elif (len(json_file)) >= 1:
azure_data_collector(json_file)
time.sleep(300)


class AWSAutoCheck(Thread):
Expand All @@ -80,7 +79,6 @@ def run(self):
time.sleep(300)
elif (len(json_file)) >= 1:
amazon_security_hub(json_file)
time.sleep(300)


class CreateInsight(Thread):
Expand All @@ -97,8 +95,19 @@ def run(self):
if __name__ == "__main__":
LogConfig()

parser = argparse.ArgumentParser(description='DLPExporter')
parser.add_argument('--key', action="store", dest='key', default='0')
parser.add_argument('--password', action="store", dest='password', default='0')
parser.add_argument('--username', action="store", dest='username', default='0')
args = parser.parse_args()
config = Configurations.get_configurations()

if config['Database_Connection']['Trusted_Connection'] == 'no' and (args.key == 0 or '0'):
key = DatabaseConnection.save_password(args.password, args.username)
f = open("secret-key.txt", "w")
f.write(key)
f.close()
elif config['Database_Connection']['Trusted_Connection'] == 'no' and (args.key == 0 or '0'):
config.set_key(args.key)
try:
if config['AwsAccountId'] and config['aws_access_key_id'] \
and config['aws_secret_access_key'] and config['region_name']:
Expand Down
4 changes: 2 additions & 2 deletions DatabaseConnector.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ def get_events(cloud_provider):
try:
partitions = get_partitions()

events_query = 'SELECT * FROM [wbsn-data-security].[dbo].[PA_EVENTS_%s] '
events_query = 'SELECT TOP (49) * FROM [wbsn-data-security].[dbo].[PA_EVENTS_%s] '
where_less_than_90 = 'where INSERT_DATE >= dateadd(MM, -3, getdate()) and [INSERT_DATE] > Convert(datetime, ' \
'\'%s\' ) '
if len(partitions) > 1:
events_query = events_query % partitions[0]
events_query += where_less_than_90 % Persistence.get_date(cloud_provider)[cloud_provider]
for partition in partitions:
events_query += 'UNION ALL SELECT * FROM [wbsn-data-security].[dbo].[PA_EVENTS_%S]' % partition
events_query += 'UNION ALL SELECT * FROM [wbsn-data-security].[dbo].[PA_EVENTS_%s]' % partition
events_query += where_less_than_90 % Persistence.get_date(cloud_provider)[cloud_provider]
else:

Expand Down
Loading

0 comments on commit 06e6fb8

Please sign in to comment.