# Enable or disable the available processing modules [on/off]. # If you add a custom processing module to your Cuckoo setup, you have to add # a dedicated entry in this file, or it won't be executed. # You can also add additional options under the section of your module and # they will be available in your Python class. # Requires dependencies of software in vm as by: # https://www.fireeye.com/blog/threat-research/2016/02/greater_visibilityt.html # Windows 7 SP1, .NET at least 4.5, powershell 5 preferly over v4 # KB3109118 - Script block logging back port update for WMF4 # x64 - https://cuckoo.sh/vmcloak/Windows6.1-KB3109118-v4-x64.msu # x32 - https://cuckoo.sh/vmcloak/Windows6.1-KB3109118-v4-x86.msu # KB2819745 - WMF 4 (Windows Management Framework version 4) update for Windows 7 # x64 - https://cuckoo.sh/vmcloak/Windows6.1-KB2819745-x64-MultiPkg.msu # x32 - https://cuckoo.sh/vmcloak/Windows6.1-KB2819745-x86-MultiPkg.msu # KB3191566 - https://www.microsoft.com/en-us/download/details.aspx?id=54616 # You should create following registry entries # reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\ModuleLogging\ModuleNames" /v * /t REG_SZ /d * /f /reg:64 # reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\ScriptBlockLogging" /v EnableScriptBlockLogging /t REG_DWORD /d 00000001 /f /reg:64 # reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\Transcription" /v EnableTranscripting /t REG_DWORD /d 00000001 /f /reg:64 # reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\Transcription" /v OutputDirectory /t REG_SZ /d C:\PSTranscipts /f /reg:64 # reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\Transcription" /v EnableInvocationHeader /t REG_DWORD /d 00000001 /f /reg:64 [curtain] enabled = yes [sysmon] enabled = no [analysisinfo] enabled = yes # FLARE capa -> to update rules utils/community.py -cr # install -> cd /tmp && git clone --recurse-submodules https://github.com/fireeye/capa.git && cd capa && git submodule update --init rules && python -m pip3 install . [flare_capa] enabled = no # Generate it always or generate on demand only(user need to click button to generate it), still should be enabled to use this feature on demand on_demand = no # Analyze binary payloads static = no # Analyze CAPE payloads cape = no # Analyze ProcDump procdump = no [decompression] enabled = no [dumptls] enabled = no [behavior] enabled = yes [debug] enabled = yes [detections] enabled = yes # Signatures behavior = yes yara = yes suricata = yes virustotal = no clamav = no [dropped] enabled = yes # Amount of text to carve from plaintext files (bytes) buffer = 8192 # It is intended in CAPE for procdump to replace procmemory... [procdump] enabled = yes # ... but this mechanism may still be switched on [procmemory] enabled = yes strings = yes [procmon] enabled = no [memory] enabled = no [usage] enabled = no [network] enabled = yes sort_pcap = no # DNS whitelisting to ignore domains/IPs configured in network.py # This should be disabled when utilizing InetSim/Remnux as we end up resolving # the IP from fakedns which would then remove all domains associated with that # resolved IP dnswhitelist = yes # additional entries dnswhitelist_file = extra/whitelist_domains.txt ipwhitelist = no ipwhitelist_file = extra/whitelist_ips.txt # Should the server use a compressed version of behavioural logs? This helps # in saving space in Mongo, accelerates searchs and reduce the size of the # final JSON report. [loop_detection] enabled = no [static] enabled = yes # Scan for UserDB.TXT signature matches userdb_signature = no # Enable a WHOIS lookup for the target domain of a URL analyses whois = yes # If you want to use the Procyon Java decompiler, install procyon with # 'apt install procyon-decompiler' # or use the latest jar file and replace it in the path option # Uncomment the procyon_path option # In testing, it generally seems to produce the best Java decompilation: # if using apt version set it to /usr/bin/procyon procyon_path = data/procyon.jar [strings] enabled = yes nullterminated_only = no minchars = 5 [trid] # Specify the path to the trid binary to use for static analysis. enabled = no identifier = data/trid/trid definitions = data/trid/triddefs.trd [targetinfo] enabled = yes [virustotal] enabled = yes on_demand = no timeout = 60 # remove empty detections remove_empty = yes # Add your VirusTotal API key here. The default API key, kindly provided # by the VirusTotal team, should enable you with a sufficient throughput # and while being shared with all our users, it shouldn't affect your use. key = a0283a2c3d55728300d064874239b5346fb991317e8449fe43c902879d758088 do_file_lookup = yes do_url_lookup = yes urlscrub = (^http:\/\/serw\.clicksor\.com\/redir\.php\?url=|&InjectedParam=.+$) [suricata] # Notes on getting this to work check install_suricata function: # https://github.com/doomedraven/Tools/blob/master/Sandbox/cape2.sh enabled = yes #Runmode "cli" or "socket" runmode = socket #Outputfiles # if evelog is specified, it will be used instead of the per-protocol log files evelog = eve.json # per-protocol log files # #alertlog = alert.json #httplog = http.json #tlslog = tls.json #sshlog = ssh.json #dnslog = dns.json fileslog = files-json.log filesdir = files # Amount of text to carve from plaintext files (bytes) buffer = 8192 #Used for creating an archive of extracted files 7zbin = /usr/bin/7z zippass = infected ##Runmode "cli" options bin = /usr/bin/suricata conf = /etc/suricata/suricata.yaml ##Runmode "socket" Options socket_file = /tmp/suricata-command.socket [cif] enabled = no # url of CIF server url = https://your-cif-server.com/api # CIF API key key = your-api-key-here # time to wait for server to respond, in seconds timeout = 60 # minimum confidence level of returned results: # 25=not confident, 50=automated, 75=somewhat confident, 85=very confident, 95=certain # defaults to 85 confidence = 85 # don't log queries by default, set to 'no' to log queries nolog = yes # max number of results per query per_lookup_limit = 20 # max number of queries per analysis per_analysis_limit = 200 [CAPE] enabled = yes # Deduplicate screenshots [deduplication] # # Available hashs functions: # ahash: Average hash # phash: Perceptual hash # dhash: Difference hash # whash-haar: Haar wavelet hash # whash-db4: Daubechies wavelet hash enabled = yes hashmethod = ahash [vba2graph] # Mac - brew install graphviz # Ubuntu - sudo apt-get install graphviz # Arch - sudo pacman -S graphviz+ # sudo pip3 install networkx>=2.1 graphviz>=0.8.4 pydot>=1.2.4 enabled = yes on_demand = yes # ja3 finger print db with descriptions # https://github.com/trisulnsm/trisul-scripts/blob/master/lua/frontend_scripts/reassembly/ja3/prints/ja3fingerprint.json [ja3] ja3_path = data/ja3/ja3fingerprint.json [maliciousmacrobot] # https://maliciousmacrobot.readthedocs.io # Install mmbot # sudo pip3 install mmbot # Create/Set required paths # Populate benign_path and malicious_path with appropriate macro maldocs (try the tests/samples in the github) # https://github.com/egaus/MaliciousMacroBot/tree/master/tests/samples # Create modeldata.pickle with your maldocs (this does not append to the model, it overwrites it) # # mmb = MaliciousMacroBot(benign_path, malicious_path, model_path, retain_sample_contents=False) # result = mmb.mmb_init_model(modelRebuild=True) # # Copy your model file and vocab.txt to your model_path enabled = no benign_path = /opt/cuckoo/data/mmbot/benign malicious_path = /opt/cuckoo/data/mmbot/malicious model_path = /opt/cuckoo/data/mmbot/model [xlsdeobf] # pip3 install git+https://github.com/DissectMalware/XLMMacroDeobfuscator.git enabled = no on_demand = no [boxjs] enabled = no timeout = 60 url = http://your_super_box_js:9000 # Extractors [mwcp] enabled = yes modules_path = modules/processing/parsers/mwcp/ [ratdecoders] enabled = yes modules_path = modules/processing/parsers/RATDecoders/ [malduck] enabled = yes modules_path = modules/processing/parsers/malduck/ [CAPE_extractors] enabled = yes # Must ends with / modules_path = modules/processing/parsers/CAPE/