diff --git a/acknowledgment.md b/acknowledgment.md index 25a09abac44..4b0c8612d5e 100644 --- a/acknowledgment.md +++ b/acknowledgment.md @@ -25,5 +25,6 @@ * kalos * Spiralem * John Peng +* mabj * And many others <3 diff --git a/agent/agent.py b/agent/agent.py index 02c02eb0587..898dd8fad2d 100644 --- a/agent/agent.py +++ b/agent/agent.py @@ -22,7 +22,7 @@ import http.server import socketserver -AGENT_VERSION = "0.10" +AGENT_VERSION = "0.11" AGENT_FEATURES = [ "execpy", "pinning", "logs", "largefile", "unicodepath", ] @@ -36,6 +36,10 @@ state = dict() state["status"] = STATUS_INIT +#To send output to stdin comment out this 2 lines +sys.stdout = StringIO() +sys.stderr = StringIO() + class MiniHTTPRequestHandler(http.server.SimpleHTTPRequestHandler): server_version = "Cuckoo Agent" @@ -196,7 +200,7 @@ def json_success(message, **kwargs): @app.route("/") def get_index(): return json_success( - "Cuckoo Agent!", version=AGENT_VERSION, features=AGENT_FEATURES + "CAPE Agent!", version=AGENT_VERSION, features=AGENT_FEATURES ) @app.route("/status") @@ -428,10 +432,5 @@ def do_kill(): parser = argparse.ArgumentParser() parser.add_argument("host", nargs="?", default="0.0.0.0") parser.add_argument("port", nargs="?", default="8000") - parser.add_argument("--redirout", action="store_true", default=False) args = parser.parse_args() - if args.redirout: - sys.stdout = StringIO() - sys.stderr = StringIO() - app.run(host=args.host, port=int(args.port)) diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index fce22395258..9241007dc81 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -808,9 +808,7 @@ def dump_file(self, filepath, metadata="", pids=False, category="files"): log.info("Error dumping file from path \"%s\": %s", filepath, e) return - #filename = "%s_%s" % (sha256[:16], os.path.basename(filepath)) - filename = os.path.basename(filepath) - upload_path = os.path.join(category, filename) + upload_path = os.path.join(category, sha256) try: upload_to_host( diff --git a/analyzer/windows/dll/capemon.dll b/analyzer/windows/dll/capemon.dll index 127719b3545..0b216b65f66 100755 Binary files a/analyzer/windows/dll/capemon.dll and b/analyzer/windows/dll/capemon.dll differ diff --git a/analyzer/windows/dll/capemon_x64.dll b/analyzer/windows/dll/capemon_x64.dll index 678a2b93955..878c9e51658 100755 Binary files a/analyzer/windows/dll/capemon_x64.dll and b/analyzer/windows/dll/capemon_x64.dll differ diff --git a/analyzer/windows/dll/logs_dispatcher.exe b/analyzer/windows/dll/logs_dispatcher.exe deleted file mode 100644 index 09ea1838d03..00000000000 Binary files a/analyzer/windows/dll/logs_dispatcher.exe and /dev/null differ diff --git a/analyzer/windows/dll/zer0m0n.sys b/analyzer/windows/dll/zer0m0n.sys deleted file mode 100644 index ca9ade1c25c..00000000000 Binary files a/analyzer/windows/dll/zer0m0n.sys and /dev/null differ diff --git a/analyzer/windows/dll/zer0m0n_x64.sys b/analyzer/windows/dll/zer0m0n_x64.sys deleted file mode 100644 index 1289ead1ac8..00000000000 Binary files a/analyzer/windows/dll/zer0m0n_x64.sys and /dev/null differ diff --git a/analyzer/windows/lib/core/packages.py b/analyzer/windows/lib/core/packages.py index 8eaffefe7a9..8b0b74d410d 100644 --- a/analyzer/windows/lib/core/packages.py +++ b/analyzer/windows/lib/core/packages.py @@ -58,7 +58,7 @@ def choose_package(file_type, file_name, exports, target): return "rar" elif "Macromedia Flash" in file_type or file_name.endswith(".swf"): return "swf" - elif file_name.endswith((b".py", ".pyc")) or "Python script" in file_type: + elif file_name.endswith((".py", ".pyc")) or "Python script" in file_type: return "python" elif file_name.endswith(".msi"): return "msi" diff --git a/analyzer/windows/modules/packages/Compression.py b/analyzer/windows/modules/packages/Compression.py index c5390120a78..be5edbf1941 100644 --- a/analyzer/windows/modules/packages/Compression.py +++ b/analyzer/windows/modules/packages/Compression.py @@ -16,8 +16,8 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Compression.dll" - self.options["dll_64"] = "Compression_x64.dll" + self.options["compression"] = "1" + self.options["procdump"] = "0" def start(self, path): args = self.options.get("arguments") diff --git a/analyzer/windows/modules/packages/Compression_dll.py b/analyzer/windows/modules/packages/Compression_dll.py index bfa1e4337a3..aef39ffced9 100644 --- a/analyzer/windows/modules/packages/Compression_dll.py +++ b/analyzer/windows/modules/packages/Compression_dll.py @@ -18,8 +18,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Compression.dll" - self.options["dll_64"] = "Compression_x64.dll" + self.options["compression"] = "1" + self.options["procdump"] = "0" #if self.config.timeout > 10: # self.config.timeout = 5 diff --git a/analyzer/windows/modules/packages/Compression_doc.py b/analyzer/windows/modules/packages/Compression_doc.py index 4fc360b7ed2..b9f2571b4d9 100644 --- a/analyzer/windows/modules/packages/Compression_doc.py +++ b/analyzer/windows/modules/packages/Compression_doc.py @@ -22,8 +22,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Compression.dll" - self.options["dll_64"] = "Compression_x64.dll" + self.options["compression"] = "1" + self.options["procdump"] = "0" def start(self, path): self.options["dll"] = "Compression.dll" diff --git a/analyzer/windows/modules/packages/Compression_js.py b/analyzer/windows/modules/packages/Compression_js.py index 771891be639..e6d9725620b 100644 --- a/analyzer/windows/modules/packages/Compression_js.py +++ b/analyzer/windows/modules/packages/Compression_js.py @@ -17,8 +17,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Compression.dll" - self.options["dll_64"] = "Compression_x64.dll" + self.options["compression"] = "1" + self.options["procdump"] = "0" def start(self, path): wscript = self.get_path("wscript.exe") diff --git a/analyzer/windows/modules/packages/Compression_pdf.py b/analyzer/windows/modules/packages/Compression_pdf.py index 2aea118d32a..31d0ec16af7 100644 --- a/analyzer/windows/modules/packages/Compression_pdf.py +++ b/analyzer/windows/modules/packages/Compression_pdf.py @@ -15,8 +15,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Compression.dll" - self.options["dll_64"] = "Compression_x64.dll" + self.options["compression"] = "1" + self.options["procdump"] = "0" def start(self, path): reader = self.get_path_glob("Adobe Reader") diff --git a/analyzer/windows/modules/packages/Compression_zip.py b/analyzer/windows/modules/packages/Compression_zip.py index 41c7c54baf6..5ff586528bf 100644 --- a/analyzer/windows/modules/packages/Compression_zip.py +++ b/analyzer/windows/modules/packages/Compression_zip.py @@ -35,8 +35,8 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Compression.dll" - self.options["dll_64"] = "Compression_x64.dll" + self.options["compression"] = "1" + self.options["procdump"] = "0" def extract_zip(self, zip_path, extract_path, password, recursion_depth): """Extracts a nested ZIP file. diff --git a/analyzer/windows/modules/packages/Emotet.py b/analyzer/windows/modules/packages/Emotet.py index 70ac6c69a9e..48dfd81fcf2 100644 --- a/analyzer/windows/modules/packages/Emotet.py +++ b/analyzer/windows/modules/packages/Emotet.py @@ -16,8 +16,10 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Extraction.dll" - self.options["exclude-apis"] = "RegOpenKeyExA" + self.options["extraction"] = "1" + self.options["procdump"] = "0" + self.options["single-process"] = "1" + self.options["exclude-apis"] = "RegOpenKeyExA:SendMessageA" def start(self, path): args = self.options.get("arguments") diff --git a/analyzer/windows/modules/packages/Emotet_batch.py b/analyzer/windows/modules/packages/Emotet_batch.py index 45d1416e086..4fd7127d2b4 100644 --- a/analyzer/windows/modules/packages/Emotet_batch.py +++ b/analyzer/windows/modules/packages/Emotet_batch.py @@ -32,8 +32,8 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" self.options["exclude-apis"] = "RegOpenKeyExA" self.options["single-process"] = "1" diff --git a/analyzer/windows/modules/packages/Emotet_doc.py b/analyzer/windows/modules/packages/Emotet_doc.py index 0cafcbc1432..1fba5700564 100644 --- a/analyzer/windows/modules/packages/Emotet_doc.py +++ b/analyzer/windows/modules/packages/Emotet_doc.py @@ -2,7 +2,6 @@ # Copyright(C) 2019 Kevin O'Reilly (kevoreilly@gmail.com) # See the file 'docs/LICENSE' for copying permission. -from __future__ import absolute_import import os from lib.common.abstracts import Package @@ -19,7 +18,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" self.options["exclude-apis"] = "RegOpenKeyExA" def start(self, path): diff --git a/analyzer/windows/modules/packages/Extraction.py b/analyzer/windows/modules/packages/Extraction.py index eda9beacb97..e61ad2ecf94 100644 --- a/analyzer/windows/modules/packages/Extraction.py +++ b/analyzer/windows/modules/packages/Extraction.py @@ -4,23 +4,27 @@ from __future__ import absolute_import import os +import shutil from lib.common.abstracts import Package class Extraction(Package): """CAPE Extraction analysis package.""" + #PATHS = [ + # ("SystemRoot", "system32"), + #] def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options self.pids = [] - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def start(self, path): arguments = self.options.get("arguments") - + # If the file doesn't have an extension, add .exe # See CWinApp::SetCurrentHandles(), it will throw # an exception that will crash the app if it does @@ -29,5 +33,5 @@ def start(self, path): new_path = path + ".exe" os.rename(path, new_path) path = new_path - + return self.execute(path, arguments, path) diff --git a/analyzer/windows/modules/packages/Extraction_dll.py b/analyzer/windows/modules/packages/Extraction_dll.py index b5f5964d8f2..b749b950e2a 100644 --- a/analyzer/windows/modules/packages/Extraction_dll.py +++ b/analyzer/windows/modules/packages/Extraction_dll.py @@ -18,8 +18,7 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" def start(self, path): rundll32 = self.get_path("rundll32.exe") @@ -47,4 +46,4 @@ def start(self, path): rundll32 = newname return self.execute(rundll32, args, path) - + diff --git a/analyzer/windows/modules/packages/Extraction_jar.py b/analyzer/windows/modules/packages/Extraction_jar.py index f3630ead6cf..1862afefb54 100644 --- a/analyzer/windows/modules/packages/Extraction_jar.py +++ b/analyzer/windows/modules/packages/Extraction_jar.py @@ -15,8 +15,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def start(self, path): java = self.get_path_glob("Java") diff --git a/analyzer/windows/modules/packages/Extraction_js.py b/analyzer/windows/modules/packages/Extraction_js.py index 07c6b12ee25..430f7accf69 100644 --- a/analyzer/windows/modules/packages/Extraction_js.py +++ b/analyzer/windows/modules/packages/Extraction_js.py @@ -17,8 +17,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def start(self, path): wscript = self.get_path("wscript.exe") diff --git a/analyzer/windows/modules/packages/Extraction_pdf.py b/analyzer/windows/modules/packages/Extraction_pdf.py index 93668e208c1..937c864c0a9 100644 --- a/analyzer/windows/modules/packages/Extraction_pdf.py +++ b/analyzer/windows/modules/packages/Extraction_pdf.py @@ -15,7 +15,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def start(self, path): reader = self.get_path_glob("Adobe Reader") diff --git a/analyzer/windows/modules/packages/Extraction_ps1.py b/analyzer/windows/modules/packages/Extraction_ps1.py index 50eab2b80e5..7361fbf2f37 100644 --- a/analyzer/windows/modules/packages/Extraction_ps1.py +++ b/analyzer/windows/modules/packages/Extraction_ps1.py @@ -17,8 +17,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def start(self, path): powershell = self.get_path_glob("PowerShell") diff --git a/analyzer/windows/modules/packages/Extraction_regsvr.py b/analyzer/windows/modules/packages/Extraction_regsvr.py index f4c81f05465..5c9d93be012 100644 --- a/analyzer/windows/modules/packages/Extraction_regsvr.py +++ b/analyzer/windows/modules/packages/Extraction_regsvr.py @@ -4,6 +4,7 @@ from __future__ import absolute_import import os +import shutil from lib.common.abstracts import Package @@ -17,8 +18,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def start(self, path): regsvr32 = self.get_path("regsvr32.exe") diff --git a/analyzer/windows/modules/packages/Extraction_zip.py b/analyzer/windows/modules/packages/Extraction_zip.py index 4cfa27a50de..bf78e1f2b72 100644 --- a/analyzer/windows/modules/packages/Extraction_zip.py +++ b/analyzer/windows/modules/packages/Extraction_zip.py @@ -31,8 +31,8 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Extraction.dll" - self.options["dll_64"] = "Extraction_x64.dll" + self.options["extraction"] = "1" + self.options["procdump"] = "0" def extract_zip(self, zip_path, extract_path, password, recursion_depth): """Extracts a nested ZIP file. @@ -103,8 +103,8 @@ def get_infos(self, zip_path): def start(self, path): root = os.environ["TEMP"] password = self.options.get("password") - exe_regex = re.compile(b'(\.exe|\.scr|\.msi|\.bat|\.lnk|\.js|\.jse|\.vbs|\.vbe|\.wsf)$',flags=re.IGNORECASE) - dll_regex = re.compile(b'(\.dll|\.ocx)$',flags=re.IGNORECASE) + exe_regex = re.compile('(\.exe|\.scr|\.msi|\.bat|\.lnk|\.js|\.jse|\.vbs|\.vbe|\.wsf)$',flags=re.IGNORECASE) + dll_regex = re.compile('(\.dll|\.ocx)$',flags=re.IGNORECASE) zipinfos = self.get_infos(path) self.extract_zip(path, root, password, 0) diff --git a/analyzer/windows/modules/packages/Hancitor.py b/analyzer/windows/modules/packages/Hancitor.py index 3b32be7c5b5..e4c594d8a61 100644 --- a/analyzer/windows/modules/packages/Hancitor.py +++ b/analyzer/windows/modules/packages/Hancitor.py @@ -16,8 +16,7 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Hancitor.dll" - #self.options["dll_64"] = "Hancitor_x64.dll" + self.options["hancitor"] = "1" def start(self, path): args = self.options.get("arguments") diff --git a/analyzer/windows/modules/packages/Hancitor_dll.py b/analyzer/windows/modules/packages/Hancitor_dll.py new file mode 100644 index 00000000000..134a2b041b6 --- /dev/null +++ b/analyzer/windows/modules/packages/Hancitor_dll.py @@ -0,0 +1,43 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +from __future__ import absolute_import +import os +import shutil + +from lib.common.abstracts import Package + +class Hancitor_Dll(Package): + """DLL analysis package.""" + PATHS = [ + ("SystemRoot", "system32", "rundll32.exe"), + ] + + def start(self, path): + rundll32 = self.get_path("rundll32.exe") + function = self.options.get("function", "#1") + arguments = self.options.get("arguments") + dllloader = self.options.get("dllloader") + self.options["hancitor"] = "1" + + # Check file extension. + ext = os.path.splitext(path)[-1].lower() + # If the file doesn't have the proper .dll extension force it + # and rename it. This is needed for rundll32 to execute correctly. + # See ticket #354 for details. + if ext != ".dll": + new_path = path + ".dll" + os.rename(path, new_path) + path = new_path + + args = "\"{0}\",{1}".format(path, function) + if arguments: + args += " {0}".format(arguments) + + if dllloader: + newname = os.path.join(os.path.dirname(rundll32), dllloader) + shutil.copy(rundll32, newname) + rundll32 = newname + + return self.execute(rundll32, args, path) diff --git a/analyzer/windows/modules/packages/Hancitor_doc.py b/analyzer/windows/modules/packages/Hancitor_doc.py index 9880887ccfe..73bd87549ef 100644 --- a/analyzer/windows/modules/packages/Hancitor_doc.py +++ b/analyzer/windows/modules/packages/Hancitor_doc.py @@ -20,8 +20,7 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Hancitor.dll" - #self.options["dll_64"] = "Hancitor_x64.dll" + self.options["hancitor"] = "1" def start(self, path): word = self.get_path_glob("Microsoft Office Word") diff --git a/analyzer/windows/modules/packages/Injection.py b/analyzer/windows/modules/packages/Injection.py index f0da00261af..1322ebf057b 100644 --- a/analyzer/windows/modules/packages/Injection.py +++ b/analyzer/windows/modules/packages/Injection.py @@ -16,8 +16,8 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Injection.dll" - self.options["dll_64"] = "Injection_x64.dll" + self.options["injection"] = "1" + self.options["procdump"] = "0" def start(self, path): args = self.options.get("arguments") diff --git a/analyzer/windows/modules/packages/Injection_dll.py b/analyzer/windows/modules/packages/Injection_dll.py index 7a066e7b36e..82c762d6cad 100644 --- a/analyzer/windows/modules/packages/Injection_dll.py +++ b/analyzer/windows/modules/packages/Injection_dll.py @@ -19,9 +19,9 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Injection.dll" - self.options["dll_64"] = "Injection_x64.dll" - + self.options["injection"] = "1" + self.options["procdump"] = "0" + def start(self, path): rundll32 = self.get_path("rundll32.exe") function = self.options.get("function", "#1") diff --git a/analyzer/windows/modules/packages/Injection_doc.py b/analyzer/windows/modules/packages/Injection_doc.py index d4ef05d447f..13ccf1bcafc 100644 --- a/analyzer/windows/modules/packages/Injection_doc.py +++ b/analyzer/windows/modules/packages/Injection_doc.py @@ -20,8 +20,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Injection.dll" - self.options["dll_64"] = "Injection_x64.dll" + self.options["injection"] = "1" + self.options["procdump"] = "0" def start(self, path): word = self.get_path_glob("Microsoft Office Word") diff --git a/analyzer/windows/modules/packages/Injection_js.py b/analyzer/windows/modules/packages/Injection_js.py index 743f3d4e2aa..a37b0d4ce3f 100644 --- a/analyzer/windows/modules/packages/Injection_js.py +++ b/analyzer/windows/modules/packages/Injection_js.py @@ -17,8 +17,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Injection.dll" - self.options["dll_64"] = "Injection_x64.dll" + self.options["injection"] = "1" + self.options["procdump"] = "0" def start(self, path): wscript = self.get_path("wscript.exe") diff --git a/analyzer/windows/modules/packages/Injection_pdf.py b/analyzer/windows/modules/packages/Injection_pdf.py index eff42e3daa9..334ae69b3aa 100644 --- a/analyzer/windows/modules/packages/Injection_pdf.py +++ b/analyzer/windows/modules/packages/Injection_pdf.py @@ -15,7 +15,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Injection.dll" + self.options["injection"] = "1" + self.options["procdump"] = "0" def start(self, path): reader = self.get_path_glob("Adobe Reader") diff --git a/analyzer/windows/modules/packages/Injection_ps1.py b/analyzer/windows/modules/packages/Injection_ps1.py index 3715d1a32d3..c801654a8bd 100644 --- a/analyzer/windows/modules/packages/Injection_ps1.py +++ b/analyzer/windows/modules/packages/Injection_ps1.py @@ -1,8 +1,10 @@ # Copyright (C) 2010-2015 Cuckoo Foundation. # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. -import os + from __future__ import absolute_import +import os + from lib.common.abstracts import Package class Injection_PS1(Package): @@ -15,7 +17,8 @@ def __init__(self, options={}, config=None): """@param options: options dict.""" self.config = config self.options = options - self.options["dll"] = "Injection.dll" + self.options["injection"] = "1" + self.options["procdump"] = "0" def start(self, path): powershell = self.get_path_glob("PowerShell") diff --git a/analyzer/windows/modules/packages/Injection_zip.py b/analyzer/windows/modules/packages/Injection_zip.py index 8238249e6f0..08f69180b41 100644 --- a/analyzer/windows/modules/packages/Injection_zip.py +++ b/analyzer/windows/modules/packages/Injection_zip.py @@ -35,8 +35,8 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Injection.dll" - self.options["dll_64"] = "Injection_x64.dll" + self.options["injection"] = "1" + self.options["procdump"] = "0" def extract_zip(self, zip_path, extract_path, password, recursion_depth): """Extracts a nested ZIP file. diff --git a/analyzer/windows/modules/packages/Shellcode-Extraction.py b/analyzer/windows/modules/packages/Shellcode-Extraction.py index 65b126208bb..df7e0f93cd5 100644 --- a/analyzer/windows/modules/packages/Shellcode-Extraction.py +++ b/analyzer/windows/modules/packages/Shellcode-Extraction.py @@ -22,22 +22,22 @@ def __init__(self, options={}, config=None): self.config = config self.options = options self.pids = [] - self.options["dll"] = "Extraction.dll" - + self.options["extraction"] = "1" + self.options["procdump"] = "0" + log.info("Timeout: " + str(self.config.timeout)) def start(self, path): - self.options["dll"] = "Extraction.dll" loaderpath = "bin\\loader.exe" #arguments = path arguments = "shellcode " + path - + # we need to move out of the analyzer directory # due to a check in monitor dll basepath = os.path.dirname(path) newpath = os.path.join(basepath, os.path.basename(loaderpath)) shutil.copy(loaderpath, newpath) - + log.info("[-] newpath : "+newpath) log.info("[-] arguments : "+arguments) #log.info("[-] Path: "+path) diff --git a/changelog.md b/changelog.md index 141650a1f60..5a75f43e455 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,12 @@ +### [17-01-2020] +* Bson data compression to remove api spamming, [details](http://security.neurolabs.club/2019/12/inline-loop-detection-for-compressing.html), thanks @mabj +* Many bug fixes in cleaners.py, thanks @Enzok + +### [14-01-2020] +* Fix local_settings +* move all in 1 dlls, example option to capemon: combo=1,extraction=1,injection=1,compression=1 +* Fix ratelimit enabled/disabled in /api/ +* Agent now by default set outout to StringIO to make it works with pythonw without extra args ### [08-01-2020] * Screenshot deduplicacion algorithm is configurable now and default set to ahash, pr #10, thanks @wmetcalf diff --git a/conf/processing.conf b/conf/processing.conf index eae450e2ec4..9b5de956546 100644 --- a/conf/processing.conf +++ b/conf/processing.conf @@ -69,6 +69,11 @@ dnswhitelist = yes # additional entries dnswhitelist_file = extra/whitelist_domains.txt +# Should the server use a compressed version of behavioural logs? This helps +# in saving space in Mongo, accelerates searchs and reduce the size of the +# final JSON report. +[loop_detection] +enabled = no [static] enabled = yes @@ -107,28 +112,9 @@ do_url_lookup = yes urlscrub = (^http:\/\/serw\.clicksor\.com\/redir\.php\?url=|&InjectedParam=.+$) [suricata] -# Notes on getting this to work: -# Install Suricata 2.1 beta -# $ sudo add-apt-repository ppa:oisf/suricata-beta -# $ sudo apt-get update -# $ sudo apt-get install libhtp1 suricata -# Create /etc/suricata/rules/cuckoo.rules: -# echo "alert http any any -> any any (msg:\"FILE store all\"; filestore; noalert; sid:15; rev:1;)" | sudo tee /etc/suricata/rules/cuckoo.rules -# Edit /etc/suricata/suricata.yaml -# Ensure the eve log is enabled -# You can disable the fast and unified2-alert logs to save space -# Enable file-store, set force-md5 to yes -# Enable file-log -# Add " - cuckoo.rules" to the list under "rules-files:" -# Under "reassembly:", which is under "stream:", set depth to 0 (without any measurement unit) -# Set request-body-limit and response-body-limit to 0 (without any measurement unit), under "default-config:" -# You may also need the following line under "stream:", but test it without it first -# async-oneside: yes -# Set EXTERNAL_NET to "any" -# You can use the etupdate script to install and update the Emerging Threats ruleset -# https://github.com/seanthegeek/etupdate - -##GlobalSettings +# Notes on getting this to work check install_suricata function: +# https://github.com/doomedraven/Tools/blob/master/Cuckoo/cuckoo3.sh + enabled = yes #Runmode "cli" or "socket" runmode = socket @@ -155,7 +141,6 @@ zippass = infected bin = /usr/bin/suricata conf = /etc/suricata/suricata.yaml ##Runmode "socket" Options -pylib_dir = /usr/lib/python2.7/dist-packages/suricatasc/ socket_file = /var/run/suricata/suricata-command.socket [cif] diff --git a/conf/proxmox.conf b/conf/proxmox.conf new file mode 100644 index 00000000000..46493c8c7fa --- /dev/null +++ b/conf/proxmox.conf @@ -0,0 +1,68 @@ +[proxmox] +# The name or IP where to reach the Proxmox REST API. May include a port number +# separated by colon if different from the default 8006. +hostname = REPLACE_ME + +# Name and password of the user to use for logging into the Proxmox REST API. +# The username might need to include a realm name appended with an @. +username = REPLACE_ME +password = REPLACE_ME + +# Default network interface. +interface = REPLACE_ME + +# Specify a comma-separated list of available machines to be used. For each +# specified ID you have to define a dedicated section containing the details +# on the respective machine. (E.g. cuckoo1,cuckoo2,cuckoo3) +machines = cuckoo1 + + +[cuckoo1] +# Specify the label name of the current machine as specified in your +# libvirt configuration. +label = cuckoo1 + +# Specify the operating system platform used by current machine +# [windows/darwin/linux]. +platform = windows + +# Specify the IP address of the current virtual machine. Make sure that the +# IP address is valid and that the host machine is able to reach it. If not, +# the analysis will fail. You may want to configure your network settings in +# /etc/libvirt//networks/ +ip = 192.168.122.105 + +# (Optional) Specify the snapshot name to use. If you do not specify a snapshot +# name, the KVM MachineManager will use the current snapshot. +# Example (Snapshot1 is the snapshot name): +# snapshot = Snapshot1 + +# (Optional) Specify the name of the network interface that should be used +# when dumping network traffic from this machine with tcpdump. If specified, +# overrides the default interface specified in auxiliary.conf +# Example (virbr0 is the interface name): +# interface = virbr0 + +# (Optional) Specify the IP of the Result Server, as your virtual machine sees it. +# The Result Server will always bind to the address and port specified in cuckoo.conf, +# however you could set up your virtual network to use NAT/PAT, so you can specify here +# the IP address for the Result Server as your machine sees it. If you don't specify an +# address here, the machine will use the default value from cuckoo.conf. +# NOTE: if you set this option you have to set result server IP to 0.0.0.0 in cuckoo.conf. +# Example: +# resultserver_ip = 192.168.122.101 + +# (Optional) Specify the port for the Result Server, as your virtual machine sees it. +# The Result Server will always bind to the address and port specified in cuckoo.conf, +# however you could set up your virtual network to use NAT/PAT, so you can specify here +# the port for the Result Server as your machine sees it. If you don't specify a port +# here, the machine will use the default value from cuckoo.conf. +# Example: +# resultserver_port = 2042 + +# (Optional) Set your own tags. These are comma separated and help to identify +# specific VMs. You can run samples on VMs with tag you require. +# Note that the x64 tag is currently special. For submitted 64-bit PE files, +# the x64 tag will automatically be added, forcing them to be run on a 64-bit +# VM. For this reason, make sure all 64-bit VMs have the x64 tag. +# tags = windows_xp_sp3,x32,acrobat_reader_6 diff --git a/data/yara/CAPE/AgentTeslav2.yar b/data/yara/CAPE/AgentTeslav2.yar new file mode 100644 index 00000000000..b1e0cb5771f --- /dev/null +++ b/data/yara/CAPE/AgentTeslav2.yar @@ -0,0 +1,25 @@ +rule AgentTeslav2 { + meta: + author = "ditekshen" + description = "AgenetTesla Type 2 Keylogger payload" + cape_type = "AgentTesla Type 2 Payload" + strings: + $s1 = "get_kbHook" ascii + $s2 = "GetPrivateProfileString" ascii + $s3 = "get_OSFullName" ascii + $s4 = "get_PasswordHash" ascii + $s5 = "#\"(')'*)+','-'/.0.1.2.3.54647498:9;9=<>=?=" wide + $s6 = "\"!#!$!%!&!-," wide + $s7 = "logins" fullword wide + $s8 = "keylog" fullword wide + $s9 = "1.85 (Hash, version 2, native byte-order)" wide + + $cl1 = "Postbox" fullword ascii + $cl2 = "BlackHawk" fullword ascii + $cl3 = "WaterFox" fullword ascii + $cl4 = "CyberFox" fullword ascii + $cl5 = "IceDragon" fullword ascii + $cl6 = "Thunderbird" fullword ascii + condition: + (uint16(0) == 0x5a4d and 6 of ($s*)) or (6 of ($s*) and 2 of ($cl*)) +} diff --git a/data/yara/CAPE/AveMaria.yar b/data/yara/CAPE/AveMaria.yar new file mode 100644 index 00000000000..dfea35e4e50 --- /dev/null +++ b/data/yara/CAPE/AveMaria.yar @@ -0,0 +1,28 @@ +rule AveMaria { + meta: + author = "ditekshen" + description = "AveMaria variant payload" + cape_type = "AveMaria Payload" + strings: + $s1_1 = "PK11_CheckUserPassword" fullword ascii + $s1_2 = "PK11_Authenticate" fullword ascii + $s1_3 = "PK11SDR_Decrypt" fullword ascii + $s1_4 = "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Winlogon\\SpecialAccounts\\UserList" fullword ascii + $s1_5 = "AVE_MARIA" ascii wide + $s1_6 = "127.0.0." ascii + + $s2_1 = "RDPClip" fullword wide + $s2_2 = "Grabber" fullword wide + $s2_3 = "Ave_Maria Stealer OpenSource" wide + $s2_4 = "\\MidgetPorn\\workspace\\MsgBox.exe" wide + $s2_5 = "@\\cmd.exe" wide + $s2_6 = "/n:%temp%\\ellocnak.xml" wide + $s2_7 = "Hey I'm Admin" wide + + $d1 = "softokn3.dll" fullword wide + $d2 = "nss3.dll" fullword wide + $d3 = "logins.json" wide + $d4 = "Asend.db" fullword wide + condition: + (uint16(0) == 0x5a4d and (4 of ($s2*) and 2 of ($d*)) or (all of ($s1*))) or ((4 of ($s1*) and 2 of ($d*)) or (all of ($s1*))) +} diff --git a/data/yara/CAPE/DanaBot.yar b/data/yara/CAPE/DanaBot.yar deleted file mode 100644 index 5d79572829f..00000000000 --- a/data/yara/CAPE/DanaBot.yar +++ /dev/null @@ -1,13 +0,0 @@ -rule DanaBot -{ - meta: - author = "kevoreilly" - description = "DanaBot decrypt function" - cape_type = "DanaBot Payload" - - strings: - $decrypt1 = {83 FA 20 88 CD 7C 3F 66 89 08 66 89 48 02 66 89 48 04 66 89 48 06 83 EA 10 DD 00 DD 14 02 DD 54 02 08 89 C1 83 E1 07 83 E9 08 29 C8 01 CA 01 D0 F7 DA DD 14 02 DD 54 02 08 83 C2 10 7C F4 DD C0 D9 F7 C3} - - condition: - uint16(0) == 0x5A4D and any of them -} diff --git a/data/yara/CAPE/ISRStealer.yar b/data/yara/CAPE/ISRStealer.yar new file mode 100644 index 00000000000..035cda3463b --- /dev/null +++ b/data/yara/CAPE/ISRStealer.yar @@ -0,0 +1,17 @@ +rule ISRStealer { + meta: + author = "ditekshen" + description = "ISRStealer payload" + cape_type = "ISRStealer Payload" + strings: + $s1 = "&password=" wide + $s2 = "&pcname=" wide + $s3 = "MSVBVM60.DLL" ascii + $s4 = "MSVBVM60.DLL" wide + $s5 = "Core Software For : Public" wide + $s6 = "" wide + $s7 = "" wide + $s8 = "/scomma" wide + condition: + (uint16(0) == 0x5a4d and filesize < 4000KB and 6 of them) or all of them +} diff --git a/data/yara/CAPE/Laturo.yar b/data/yara/CAPE/Laturo.yar new file mode 100644 index 00000000000..01a40b1ff0e --- /dev/null +++ b/data/yara/CAPE/Laturo.yar @@ -0,0 +1,24 @@ +rule Laturo { + meta: + author = "ditekshen" + description = "Laturo information stealer payload" + cape_type = "Laturo Payload" + strings: + $str1 = "cmd.exe /c ping 127.0.0.1" ascii wide + $str2 = "cmd.exe /c start" ascii wide + $str3 = "\\RapidLoader\\" ascii + $str4 = "loader/gate.php" ascii wide + $str5 = "Hwid:" ascii wide + $str6 = "Special:" ascii wide + $str7 = "logs=%s" ascii + $data1 = "cookies.%u.txt" nocase ascii wide + $data2 = "passwords.%u.txt" nocase ascii wide + $data3 = "credentials.%u.txt" nocase ascii wide + $data4 = "cards.%u.txt" nocase ascii wide + $data5 = "autofill.%u.txt" nocase ascii wide + $data6 = "loginusers.vdf" ascii wide + $data7 = "screenshot.bmp" nocase ascii wide + $data8 = "webcam.bmp" nocase ascii wide + condition: + uint16(0) == 0x5a4d and 5 of ($str*) and 1 of ($data*) +} diff --git a/data/yara/CAPE/LimeRAT.yar b/data/yara/CAPE/LimeRAT.yar new file mode 100644 index 00000000000..cb6f3989cf8 --- /dev/null +++ b/data/yara/CAPE/LimeRAT.yar @@ -0,0 +1,18 @@ +rule LimeRAT { + meta: + author = "ditekshen" + description = "LimeRAT payload" + cape_type = "LimeRAT Payload" + strings: + $s1 = "schtasks /create /f /sc ONLOGON /RL HIGHEST /tn LimeRAT-Admin /tr" wide + $s2 = "\\vboxhook.dll" fullword wide + $s3 = "Win32_Processor.deviceid=\"CPU0\"" fullword wide + $s4 = "select CommandLine from Win32_Process where Name='{0}'" wide + $s5 = "Minning..." fullword wide + $s6 = "Regasm.exe" fullword wide + $s7 = "Flood!" fullword wide + $s8 = "Rans-Status" fullword wide + $s9 = "cmd.exe /c ping 0" wide + condition: + uint16(0) == 0x5a4d and 5 of them +} diff --git a/data/yara/CAPE/QuasarRAT.yar b/data/yara/CAPE/QuasarRAT.yar new file mode 100644 index 00000000000..ddcdf40cbb2 --- /dev/null +++ b/data/yara/CAPE/QuasarRAT.yar @@ -0,0 +1,22 @@ +rule QuasarRAT { + meta: + author = "ditekshen" + description = "QuasarRAT payload" + cape_type = "QuasarRAT Payload" + strings: + $s1 = "GetKeyloggerLogsResponse" fullword ascii + $s2 = "GetKeyloggerLogs" fullword ascii + $s3 = "/>Log created on" wide + $s4 = "User: {0}{3}Pass: {1}{3}Host: {2}" wide + $s5 = "Domain: {1}{0}Cookie Name: {2}{0}Value: {3}{0}Path: {4}{0}Expired: {5}{0}HttpOnly: {6}{0}Secure: {7}" wide + $s6 = "grabber_" wide + $s7 = "" ascii + $s8 = "k__BackingField" fullword ascii + $s9 = "" ascii + $s10 = "add_OnHotKeysDown" ascii + $mutex = "QSR_MUTEX_" ascii wide + $ua1 = "Mozilla/5.0 (Windows NT 6.3; rv:48.0) Gecko/20100101 Firefox/48.0" fullword wide + $us2 = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A" fullword wide + condition: + uint16(0) == 0x5a4d and ($mutex or (all of ($ua*) and 2 of them) or 6 of ($s*)) +} diff --git a/data/yara/CAPE/REvil.yar b/data/yara/CAPE/REvil.yar index e2ba109fa44..0cadc304364 100644 --- a/data/yara/CAPE/REvil.yar +++ b/data/yara/CAPE/REvil.yar @@ -5,12 +5,15 @@ rule REvil description = "REvil Payload" cape_type = "REvil Payload" strings: - $RE1 = "expand 32-byte kexpand 16-byte k" ascii fullword - $RE2 = "sysshadow" ascii fullword - $RE3 = "SCROLLBAR" ascii fullword - $RE4 = "msctfime ui" ascii fullword - $RE5 = "\\BaseNamedObjects\\%S" wide fullword - $decode = {33 D2 8A 9C 3D FC FE FF FF 8B C7 0F B6 CB F7 75 0C 8B 45 08 0F B6 04 02 03 C6 03 C8 0F B6 F1 8A 84 35 FC FE FF FF 88 84 3D FC FE FF FF 47 88 9C 35 FC FE FF FF 81 FF 00 01 00 00 72 C3} + $OtherRE1 = "expand 32-byte kexpand 16-byte k" ascii fullword + $OtherRE2 = "sysshadow" ascii fullword + $OtherRE3 = "SCROLLBAR" ascii fullword + $OtherRE4 = "msctfime ui" ascii fullword + $OtherRE5 = "\\BaseNamedObjects\\%S" wide fullword + $RE_dec = "rwdec_x86_debug.pdb" ascii + $GCREvil_string_decoder_opcodes = {33 D2 8A 9C 3D FC FE FF FF 8B C7 0F B6 CB F7 75 0C 8B 45 08 0F B6 04 02 03 C6 03 C8 0F B6 F1 8A 84 35 FC FE FF FF 88 84 3D FC FE FF FF 47 88 9C 35 FC FE FF FF 81 FF 00 01 00 00 72 C3 } + $REvil_string_decoder_opcodes1 = {8B C1 8A 1C 39 33 D2 0F B6 CB F7 75 ?? 8B 45 ?? 0F B6 04 02 03 C6 03 C8 0F B6 F1 8B 4D ?? 8A 04 3E 88 04 39 41 88 1C 3E 89 4D ?? 81 F9 00 01 00 00 } condition: - uint16(0) == 0x5A4D and $decode and any of ($RE*) + uint16(0) == 0x5a4d + and (($GCREvil_string_decoder_opcodes and any of ($OtherRE*)) or any of ($REvil_string_decoder_opcodes*)) and not $RE_dec } diff --git a/data/yara/CAPE/SCInject.yar b/data/yara/CAPE/SCInject.yar new file mode 100644 index 00000000000..0b71632bccd --- /dev/null +++ b/data/yara/CAPE/SCInject.yar @@ -0,0 +1,28 @@ +rule SCInject { + meta: + author = "ditekshen" + description = "Shellcode injector and downloader via RegAsm.exe payload" + cape_type = "Shellcode injector and downloader Payload" + strings: + $s1 = "wininet.dll" fullword ascii + $s2 = "ShellExecuteW" fullword ascii + $s3 = "SHCreateDirectoryExW" fullword ascii + $s4 = "Software\\Microsoft\\Windows\\CurrentVersion\\RunOnce" fullword ascii + $s5 = "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko" fullword ascii + + $o1 = "msvbvm60.dll" fullword wide + $o2 = "\\syswow64\\" fullword wide + $o3 = "\\system32\\" fullword wide + $o4 = "\\Microsoft.NET\\Framework\\" fullword wide + $o5 = "USERPROFILE=" wide nocase + $o6 = "windir=" fullword wide + $o7 = "APPDATA=" nocase wide + $o8 = "RegAsm.exe" fullword wide + + $url1 = "https://drive.google.com/uc?export=download&id=" ascii + $url2 = "https://onedrive.live.com/download?cid=" ascii + $url3 = "http://myurl/myfile.bin" fullword ascii + $url4 = "http" ascii // fallback + condition: + all of ($s*) and 2 of ($o*) and 1 of ($url*) +} diff --git a/data/yara/CAPE/XpertRAT.yar b/data/yara/CAPE/XpertRAT.yar new file mode 100644 index 00000000000..14de402a36b --- /dev/null +++ b/data/yara/CAPE/XpertRAT.yar @@ -0,0 +1,28 @@ +rule XpertRAT { + meta: + author = "ditekshen" + description = "XpertRAT payload" + cape_type = "XpertRAT Payload" + strings: + $v1 = "[XpertRAT-Mutex]" fullword wide + $v2 = "XPERTPLUGIN" fullword wide + $v3 = "+Xpert+3." wide + $v4 = "keylog.tmp" fullword wide + $v5 = "\\TempReg.reg" fullword wide + + $s1 = "ClsKeylogger" fullword ascii nocase + $s2 = "clsCamShot" fullword ascii nocase + $s3 = "ClsShellCommand" fullword ascii nocase + $s4 = "ClsRemoteDesktop" fullword ascii nocase + $s5 = "ClsScreenRemote" fullword ascii nocase + $s6 = "ClsSoundRemote" fullword ascii nocase + $s7 = "MdlHidder" fullword ascii + $s8 = "modKeylog" fullword ascii + $s9 = "modWipe" fullword ascii + $s10 = "modDelProcInUse" fullword ascii + $s11= "Socket_DataArrival" fullword ascii + $s12 = "cZip_EndCompress" fullword ascii + + condition: + uint16(0) == 0x5a4d and (3 of ($v*) or 6 of ($s*)) +} diff --git a/lib/cuckoo/common/cape_utils.py b/lib/cuckoo/common/cape_utils.py index 9757cd1cfac..c9b82648202 100644 --- a/lib/cuckoo/common/cape_utils.py +++ b/lib/cuckoo/common/cape_utils.py @@ -36,7 +36,7 @@ HAS_MALWARECONFIGS = False print("Missed RATDecoders -> pip3 install git+https://github.com/kevthehermit/RATDecoders") -cape_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers", "malwareconfig") +cape_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers", "CAPE") CAPE_DECODERS = [ os.path.basename(decoder)[:-3] for decoder in glob.glob(cape_decoders + "/[!_]*.py") @@ -44,7 +44,7 @@ for name in CAPE_DECODERS: try: - file, pathname, description = imp.find_module(name, [CAPE_DECODERS]) + file, pathname, description = imp.find_module(name, [cape_decoders]) module = imp.load_module(name, file, pathname, description) malware_parsers[name] = module except (ImportError, IndexError) as e: diff --git a/lib/cuckoo/common/compressor.py b/lib/cuckoo/common/compressor.py new file mode 100644 index 00000000000..3850725af8b --- /dev/null +++ b/lib/cuckoo/common/compressor.py @@ -0,0 +1,186 @@ +import os +import struct +import binascii +import logging + +log = logging.getLogger(__name__) + +try: + import bson + HAVE_BSON = True +except ImportError: + HAVE_BSON = False +else: + # The BSON module provided by pymongo works through its "BSON" class. + if hasattr(bson, "BSON"): + bson_decode = lambda d: bson.BSON(d).decode() + # The BSON module provided by "pip install bson" works through the + # "loads" function (just like pickle etc.) + elif hasattr(bson, "loads"): + bson_decode = lambda d: bson.loads(d) + else: + HAVE_BSON = False + +class NGram: + def __init__(self, order): + self.order = order + self.buffer = [] + + def add(self, element): + tmp = None + if not element: + return tmp + + if len(self.buffer) == self.order * 2: + tmp = self.buffer.pop(0) + + if type(element) == list: + self.buffer.append(element) + else: + self.buffer.append([element, 1]) + + self.analyse() + return tmp + + def analyse(self): + tmp = [c[0][0] for c in self.buffer] + if tmp[0:self.order] == tmp[self.order:]: + for i in range(self.order): + self.buffer[i][1] += self.buffer[i+self.order][1] + self.buffer = self.buffer[0:self.order] + +class Compressor: + def __init__(self, level): + self.level = level + self.ngrams = [ NGram(i) for i in range(1,level+1) ] + self.final = [] + + def add(self, element): + head, tail = (self.ngrams[0], self.ngrams[1:]) + out = head.add(element) + + for t in tail: + out = t.add(out) + + if out: + self.final.append(out) + + def flush(self): + for i in range(len(self.ngrams)): + current_buffer = self.ngrams[i].buffer + for out in current_buffer: + for u in range(i+1, len(self.ngrams)): + out = self.ngrams[u].add(out) + if out: + self.final.append(out) + +class CuckooBsonCompressor: + def __init__(self): + self.threads = {} + self.callmap = {} + self.head = [] + self.ccounter = 0 + + def __next_message(self): + data = self.fd_in.read(4) + if not data: + return (False, False) + _size = struct.unpack('I', data)[0] + data += self.fd_in.read(_size - 4) + self.raw_data = data + return (data, bson_decode(data)) + + def run(self, file_path): + if not os.path.isfile(file_path) and os.stat(file_path).st_size: + log.warning('File %s does not exists or it is invalid.', file_path) + return False + + self.fd_in = open(file_path, 'rb') + + msg = '---' + while msg: + data, msg = self.__next_message() + + if msg: + mtype = msg.get('type') # message type [debug, new_process, info] + if mtype not in ['debug', 'new_process', 'info']: + _id = msg.get('I', -1) + if not self.category.startswith('__'): + tid = msg.get('T', -1) + time = msg.get('t', 0) + + if tid not in self.threads: + self.threads[tid] = Compressor(100) + + csum = self.checksum(msg) + self.ccounter += 1 + v = (csum, self.ccounter, time) + self.threads[tid].add(v) + + if csum not in self.callmap: + self.callmap[csum] = msg + else: + self.head.append(data) + else: + self.category = msg.get('category', 'None') + self.head.append(data) + + self.fd_in.close() + + return self.flush(file_path) + + def flush(self, file_path): + # This function flushes ngram buffers within compressor and merges + # threads compressed call lists trying preserve original order + + compressed_path = file_path + '.compressed' + if os.path.isfile(compressed_path): + os.remove(compressed_path) + + fd = open(compressed_path, 'wb') + + for d in self.head: + fd.write(d) + + final = [] + for tid, c in self.threads.items(): + c.flush() + for element, repeated in c.final: + data = self.callmap.get(element[0]).copy() + data['r'] += repeated + data['t'] = element[2] + data['order'] = element[1] + final.append(data) + + final.sort(key=lambda x: x['order']) + + if final and os.path.isfile(compressed_path): + for d in final: + d.pop('order') + edata = bson.BSON.encode(d) + fd.write(edata) + + os.rename(file_path, '{}.raw'.format(file_path)) + os.symlink('{}.compressed'.format(file_path), file_path) + else: + return False + + return True + + def checksum(self, msg): + # This function calculates a 4 bytes checksum for each call + # this value is used for identifying a call setup. + + index = msg.get('I', -1) + args = ''.join([ str(c) for c in msg['args'] ]) + content = [ + str(index), # api call + str(msg['T']), # thread id + str(msg['R']), # caller + str(args), # call args + str(self.category), # category + str(msg['P']) # parentcaller + ] + content = ''.join(content) + + return binascii.crc32(bytes(content, 'utf8')) diff --git a/lib/cuckoo/common/suri_specs.py b/lib/cuckoo/common/suri_specs.py new file mode 100644 index 00000000000..269434d8265 --- /dev/null +++ b/lib/cuckoo/common/suri_specs.py @@ -0,0 +1,183 @@ +argsd = { + "pcap-file": [ + { + "name": "filename", + "required": 1, + }, + { + "name": "output-dir", + "required": 1, + }, + { + "name": "tenant", + "type": int, + "required": 0, + }, + { + "name": "continuous", + "required": 0, + }, + { + "name": "delete-when-done", + "required": 0, + }, + ], + "pcap-file-continuous": [ + { + "name": "filename", + "required": 1, + }, + { + "name": "output-dir", + "required": 1, + }, + { + "name": "continuous", + "val": True, + "required": 1, + }, + { + "name": "tenant", + "type": int, + "required": 0, + }, + { + "name": "delete-when-done", + "required": 0, + }, + ], + "iface-stat": [ + { + "name": "iface", + "required": 1, + }, + ], + "conf-get": [ + { + "name": "variable", + "required": 1, + } + ], + "unregister-tenant-handler": [ + { + "name": "id", + "type": int, + "required": 1, + }, + { + "name": "htype", + "required": 1, + }, + { + "name": "hargs", + "type": int, + "required": 0, + }, + ], + "register-tenant-handler": [ + { + "name": "id", + "type": int, + "required": 1, + }, + { + "name": "htype", + "required": 1, + }, + { + "name": "hargs", + "type": int, + "required": 0, + }, + ], + "unregister-tenant": [ + { + "name": "id", + "type": int, + "required": 1, + }, + ], + "register-tenant": [ + { + "name": "id", + "type": int, + "required": 1, + }, + { + "name": "filename", + "required": 1, + }, + ], + "reload-tenant": [ + { + "name": "id", + "type": int, + "required": 1, + }, + { + "name": "filename", + "required": 1, + }, + ], + "add-hostbit": [ + { + "name": "ipaddress", + "required": 1, + }, + { + "name": "hostbit", + "required": 1, + }, + { + "name": "expire", + "type": int, + "required": 1, + }, + ], + "remove-hostbit": [ + { + "name": "ipaddress", + "required": 1, + }, + { + "name": "hostbit", + "required": 1, + }, + ], + "list-hostbit": [ + { + "name": "ipaddress", + "required": 1, + }, + ], + "memcap-set": [ + { + "name": "config", + "required": 1, + }, + { + "name": "memcap", + "required": 1, + }, + ], + "memcap-show": [ + { + "name": "config", + "required": 1, + }, + ], + "dataset-add": [ + { + "name": "setname", + "required": 1, + }, + { + "name": "settype", + "required": 1, + }, + { + "name": "datavalue", + "required": 1, + }, + ], + } diff --git a/lib/cuckoo/common/suricatasc.py b/lib/cuckoo/common/suricatasc.py new file mode 100644 index 00000000000..6bb8a8b1b63 --- /dev/null +++ b/lib/cuckoo/common/suricatasc.py @@ -0,0 +1,273 @@ +#!/usr/bin/python +# Copyright(C) 2012 Open Information Security Foundation + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 2 of the License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. + +try: + import simplejson as json +except ImportError: + import json +import readline +import select +import sys +from socket import AF_UNIX, error, socket + +from .suri_specs import argsd + +SURICATASC_VERSION = "1.0" +VERSION = "0.2" +INC_SIZE = 1024 + + +class SuricataException(Exception): + """ + Generic class for suricatasc exception + """ + def __init__(self, value): + super(SuricataException, self).__init__(value) + self.value = value + + def __str__(self): + return str(self.value) + + +class SuricataNetException(SuricataException): + """ + Exception raised when a network error occurs + """ + + +class SuricataCommandException(SuricataException): + """ + Exception raised when the command is incorrect + """ + + +class SuricataReturnException(SuricataException): + """ + Exception raised when return message is incorrect + """ + + +class SuricataCompleter: + def __init__(self, words): + self.words = words + self.generator = None + + def complete(self, text): + for word in self.words: + if word.startswith(text): + yield word + + def __call__(self, text, state): + if state == 0: + self.generator = self.complete(text) + try: + return next(self.generator) + except StopIteration: + return None + return None + + +class SuricataSC: + def __init__(self, sck_path, verbose=False): + self.basic_commands = [ + "shutdown", + "quit", + "pcap-file-number", + "pcap-file-list", + "pcap-last-processed", + "pcap-interrupt", + "iface-list", + ] + self.fn_commands = [ + "pcap-file", + "pcap-file-continuous", + "iface-stat", + "conf-get", + "unregister-tenant-handler", + "register-tenant-handler", + "unregister-tenant", + "register-tenant", + "reload-tenant", + "add-hostbit", + "remove-hostbit", + "list-hostbit", + "memcap-set", + "memcap-show", + "dataset-add", + ] + self.cmd_list = self.basic_commands + self.fn_commands + self.sck_path = sck_path + self.verbose = verbose + self.socket = socket(AF_UNIX) + + def json_recv(self): + cmdret = None + data = "" + while True: + if sys.version < '3': + data += self.socket.recv(INC_SIZE) + else: + data += self.socket.recv(INC_SIZE).decode('iso-8859-1') + if data.endswith('\n'): + cmdret = json.loads(data) + break + return cmdret + + def send_command(self, command, arguments=None): + if command not in self.cmd_list and command != 'command-list': + raise SuricataCommandException("Command not found: {}".format(command)) + + cmdmsg = {} + cmdmsg['command'] = command + if arguments: + cmdmsg['arguments'] = arguments + if self.verbose: + print("SND: " + json.dumps(cmdmsg)) + cmdmsg_str = json.dumps(cmdmsg) + "\n" + if sys.version < '3': + self.socket.send(cmdmsg_str) + else: + self.socket.send(bytes(cmdmsg_str, 'iso-8859-1')) + + ready = select.select([self.socket], [], [], 600) + if ready[0]: + cmdret = self.json_recv() + else: + cmdret = None + if not cmdret: + raise SuricataReturnException("Unable to get message from server") + + if self.verbose: + print("RCV: "+ json.dumps(cmdret)) + + return cmdret + + def connect(self): + try: + if self.socket == None: + self.socket = socket(AF_UNIX) + self.socket.connect(self.sck_path) + except error as err: + raise SuricataNetException(err) + + self.socket.settimeout(10) + #send version + if self.verbose: + print("SND: " + json.dumps({"version": VERSION})) + if sys.version < '3': + self.socket.send(json.dumps({"version": VERSION})) + else: + self.socket.send(bytes(json.dumps({"version": VERSION}), 'iso-8859-1')) + + ready = select.select([self.socket], [], [], 600) + if ready[0]: + cmdret = self.json_recv() + else: + cmdret = None + + if not cmdret: + raise SuricataReturnException("Unable to get message from server") + + if self.verbose: + print("RCV: "+ json.dumps(cmdret)) + + if cmdret["return"] == "NOK": + raise SuricataReturnException("Error: %s" % (cmdret["message"])) + + cmdret = self.send_command("command-list") + + # we silently ignore NOK as this means server is old + if cmdret["return"] == "OK": + self.cmd_list = cmdret["message"]["commands"] + self.cmd_list.append("quit") + + def close(self): + self.socket.close() + self.socket = None + + def execute(self, command): + full_cmd = command.split() + cmd = full_cmd[0] + cmd_specs = argsd[cmd] + required_args_count = len([d["required"] for d in cmd_specs if d["required"] and not "val" in d]) + arguments = dict() + for c, spec in enumerate(cmd_specs, 1): + spec_type = str if "type" not in spec else spec["type"] + if spec["required"]: + if spec.get("val"): + arguments[spec["name"]] = spec_type(spec["val"]) + continue + try: + arguments[spec["name"]] = spec_type(full_cmd[c]) + except IndexError: + phrase = " at least" if required_args_count != len(cmd_specs) else "" + msg = "Missing arguments: expected{} {}".format(phrase, required_args_count) + raise SuricataCommandException(msg) + except ValueError as ve: + raise SuricataCommandException("Erroneous arguments: {}".format(ve)) + elif c < len(full_cmd): + arguments[spec["name"]] = spec_type(full_cmd[c]) + return cmd, arguments + + def parse_command(self, command): + arguments = None + cmd = command.split()[0] if command else None + if cmd in self.cmd_list: + if cmd in self.fn_commands: + cmd, arguments = getattr(self, "execute")(command=command) + else: + raise SuricataCommandException("Unknown command {}".format(command)) + return cmd, arguments + + def interactive(self): + print("Command list: " + ", ".join(self.cmd_list)) + try: + readline.set_completer(SuricataCompleter(self.cmd_list)) + readline.set_completer_delims(";") + readline.parse_and_bind('tab: complete') + while True: + if sys.version < '3': + command = raw_input(">>> ").strip() + else: + command = input(">>> ").strip() + if command == "quit": + break + try: + cmd, arguments = self.parse_command(command) + except SuricataCommandException as err: + print(err) + continue + try: + cmdret = self.send_command(cmd, arguments) + except IOError as err: + # try to reconnect and resend command + print("Connection lost, trying to reconnect") + try: + self.close() + self.connect() + except SuricataNetException as err: + print("Can't reconnect to suricata socket, discarding command") + continue + cmdret = self.send_command(cmd, arguments) + #decode json message + if cmdret["return"] == "NOK": + print("Error:") + print(json.dumps(cmdret["message"], sort_keys=True, indent=4, separators=(',', ': '))) + else: + print("Success:") + print(json.dumps(cmdret["message"], sort_keys=True, indent=4, separators=(',', ': '))) + except KeyboardInterrupt: + print("[!] Interrupted") diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index 752e816cabd..0b23a52b8d2 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -107,7 +107,7 @@ def fix_section_permission(path): log.info("[-] Missed dependency pefile") -# Submission hooks to set options based on some naming patrons +# Submission hooks to set options based on some naming patterns def recon(filename, orig_options, timeout, enforce_timeout): filename = filename.lower() if not isinstance(filename, str): diff --git a/lib/cuckoo/core/database.py b/lib/cuckoo/core/database.py index 2ed368dcade..e130ba653bc 100644 --- a/lib/cuckoo/core/database.py +++ b/lib/cuckoo/core/database.py @@ -444,7 +444,10 @@ def delete_tag_orphans(session, ctx): def __del__(self): """Disconnects pool.""" - self.engine.dispose() + try: + self.engine.dispose() + except KeyError: + pass def _connect_database(self, connection_string): """Connect to a Database. @@ -706,9 +709,10 @@ def guest_set_status(self, task_id, status): session = self.Session() try: guest = session.query(Guest).filter_by(task_id=task_id).first() - guest.status = status - session.commit() - session.refresh(guest) + if guest is not None: + guest.status = status + session.commit() + session.refresh(guest) except SQLAlchemyError as e: log.exception("Database error logging guest start: {0}".format(e)) session.rollback() diff --git a/lib/cuckoo/core/plugins.py b/lib/cuckoo/core/plugins.py index fb4b960c3ca..d993a5095ae 100644 --- a/lib/cuckoo/core/plugins.py +++ b/lib/cuckoo/core/plugins.py @@ -351,10 +351,14 @@ def run(self): self.results["malfamily_tag"] = "VirusTotal" # fall back to ClamAV detection - if not family and self.results["info"]["category"] == "file" and "clamav" in self.results.get("target", {}).get("file", {}) and self.results["target"]["file"]["clamav"] and self.results["target"]["file"]["clamav"].startswith("Win.Trojan."): - words = re.findall(r"[A-Za-z0-9]+", self.results["target"]["file"]["clamav"]) - family = words[2] - self.results["malfamily_tag"] = "ClamAV" + if not family and self.results["info"]["category"] == "file" and "clamav" in self.results.get("target", {}).get("file", {}) and self.results["target"]["file"]["clamav"]: + for detection in self.results["target"]["file"]["clamav"]: + if family: + break + elif detection.startswith("Win.Trojan."): + words = re.findall(r"[A-Za-z0-9]+", detection) + family = words[2] + self.results["malfamily_tag"] = "ClamAV" if self.results.get("cape", False): self.results["malfamily"] = self.results["cape"] @@ -646,6 +650,8 @@ def __init__(self, task, results, reprocess=False): # remove unwanted/duplicate information from reporting for process in results["behavior"]["processes"]: process["calls"].begin_reporting() + # required to convert object to list + process["calls"] = list(process["calls"]) self.results = results self.analysis_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task["id"])) diff --git a/lib/cuckoo/core/scheduler.py b/lib/cuckoo/core/scheduler.py index 54b2a267f48..b8af1864157 100644 --- a/lib/cuckoo/core/scheduler.py +++ b/lib/cuckoo/core/scheduler.py @@ -345,8 +345,11 @@ def launch_analysis(self): if self.cfg.cuckoo.memory_dump or self.task.memory: try: dump_path = get_memdump_path(self.task.id) - free_space_monitor(RAM=True) - machinery.dump_memory(self.machine.label, dump_path) + need_space, space_available = free_space_monitor(os.path.dirname(dump_path), return_value=True) + if need_space: + log.error("Not enough free disk space! Could not dump ram (Only %d MB!)", space_available) + else: + machinery.dump_memory(self.machine.label, dump_path) except NotImplementedError: log.error("The memory dump functionality is not available " "for the current machine manager.") diff --git a/modules/machinery/kvmremote.py b/modules/machinery/kvmremote.py index c201b7ce656..913f3830271 100644 --- a/modules/machinery/kvmremote.py +++ b/modules/machinery/kvmremote.py @@ -47,7 +47,7 @@ def _initialize(self, module_name): super(KVMRemote, self)._initialize(module_name) hypervs_labels = self.options.get("kvmremote")["hypervisors"] - hypervs_labels = ("".join(hypervs_labels.split())).split(b",") + hypervs_labels = ("".join(hypervs_labels.split())).split(",") for machine in self.machines(): machine_cfg = self.options.get(machine.label) @@ -88,46 +88,48 @@ def dump_memory(self, label, path): @param path: path to where to store the memory dump. """ - # ssh and create save file then copy to path + # ssh and create save file then copy to path try: # create the memory dump file ourselves first so it doesn't end up root/root 0600 # it'll still be owned by root, so we can't delete it, but at least we can read it fd = open(path, "w") fd.close() + try: + from subprocess import DEVNULL # py3k + except ImportError: + import os + DEVNULL = open(os.devnull, 'wb') - try: - from subprocess import DEVNULL # py3k - except ImportError: - import os - DEVNULL = open(os.devnull, 'wb') + # this triggers local dump - # this triggers local dump + #self.vms[label].coreDump(path, flags=libvirt.VIR_DUMP_MEMORY_ONLY) - #self.vms[label].coreDump(path, flags=libvirt.VIR_DUMP_MEMORY_ONLY) - - machine_label = None - hypverv_cfg = None - # use first + machine_label = None + hypverv_cfg = None + # use first for machine in self.machines(): machine_cfg = self.options.get(machine.label) hyperv_cfg = self.options.get(machine_cfg.hypervisor) - break + break - remote_host = hyperv_cfg['remote_host'] + remote_host = hyperv_cfg['remote_host'] - log.info("Dumping volatile memory remotely @ %s (%s)" % (remote_host, label)) + log.info("Dumping volatile memory remotely @ %s (%s)" % + (remote_host, label)) - remote_output = subprocess.check_output(['ssh', remote_host, "virsh", "dump", "--memory-only", label, "/data/memory/%s.memory.dump" % (label) ], stderr=DEVNULL) - log.debug("Copying memory from remote host") - remote_output = subprocess.check_output(['scp', '-q', remote_host + ":/data/memory/%s.memory.dump" % label, path ], stderr=DEVNULL) - log.debug("Removing memory from remote host") - remote_output = subprocess.check_output(['ssh', remote_host, "rm", "-f", "/data/memory/%s.memory.dump" % (label) ], stderr=DEVNULL) + remote_output = subprocess.check_output( + ['ssh', remote_host, "virsh", "dump", "--memory-only", label, "/data/memory/%s.memory.dump" % (label)], stderr=DEVNULL) + log.debug("Copying memory from remote host") + remote_output = subprocess.check_output( + ['scp', '-q', remote_host + ":/data/memory/%s.memory.dump" % label, path], stderr=DEVNULL) + log.debug("Removing memory from remote host") + remote_output = subprocess.check_output( + ['ssh', remote_host, "rm", "-f", "/data/memory/%s.memory.dump" % (label)], stderr=DEVNULL) - if not os.path.isfile(path): - raise CuckooMachineError("Error dumping memory virtual machine " - "{0}: {1}".format(label, "file not found")) + if not os.path.isfile(path): + raise CuckooMachineError("Error dumping memory virtual machine " + "{0}: {1}".format(label, "file not found")) except libvirt.libvirtError as e: raise CuckooMachineError("Error dumping memory virtual machine " - "{0}: {1}".format(label, e)) - + "{0}: {1}".format(label, e)) \ No newline at end of file diff --git a/modules/machinery/proxmox.py b/modules/machinery/proxmox.py new file mode 100644 index 00000000000..a4a7215ce71 --- /dev/null +++ b/modules/machinery/proxmox.py @@ -0,0 +1,230 @@ +# Copyright (C) 2017 Menlo Security +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +import logging +import time +try: + from proxmoxer import ProxmoxAPI, ResourceException +except ImportError: + print("Missed dependency: pip3 install proxmoxer==1.0.3") + +from lib.cuckoo.common.abstracts import Machinery +from lib.cuckoo.common.config import config +from lib.cuckoo.common.exceptions import CuckooCriticalError +from lib.cuckoo.common.exceptions import CuckooMachineError + +# silence overly verbose INFO level logging default of proxmoxer module +logging.getLogger("proxmoxer").setLevel(logging.WARNING) + +log = logging.getLogger(__name__) +cfg = Config() + +class Proxmox(Machinery): + """Manage Proxmox sandboxes.""" + def __init__(self): + super(Proxmox, self).__init__() + self.node = None + self.vm = None + self.timeout = int(cfg.timeouts.vm_state) + + def _initialize_check(self): + """Ensures that credentials have been entered into the config file. + @raise CuckooCriticalError: if no credentials were provided + """ + if not self.options.proxmox.username or not self.options.proxmox.password: + raise CuckooCriticalError( + "Proxmox credentials are missing, please add them to " + "the Proxmox machinery configuration file." + ) + if not self.options.proxmox.hostname: + raise CuckooCriticalError("Proxmox hostname not set") + + super(Proxmox, self)._initialize_check() + + def find_vm(self, label): + """Find a VM in the Proxmox cluster and remember its node and vm proxy + objects for extraction of additional data by other methods. + + @param label: the label of the VM to be compared to the VM's name in + Proxmox. + @raise CuckooMachineError: if the VM cannot be found.""" + proxmox = ProxmoxAPI(self.options.proxmox.hostname, + user=self.options.proxmox.username, + password=self.options.proxmox.password, + verify_ssl=False) + + # /cluster/resources[type=vm] will give us all VMs no matter which node + # they reside on + try: + vms = proxmox.cluster.resources.get(type="vm") + except ResourceException as e: + raise CuckooMachineError("Error enumerating VMs: %s" % e) + + for vm in vms: + if vm["name"] == label: + # dynamically address + # /nodes//{qemu,lxc,openvz,...}/ to get handle on + # VM + node = proxmox.nodes(vm["node"]) + hv = node.__getattr__(vm["type"]) + vm = hv.__getattr__(str(vm["vmid"])) + + # remember various request proxies for subsequent actions + self.node = node + self.vm = vm + return + + raise CuckooMachineError("Not found") + + def wait_for_task(self, taskid): + """Wait for long-running Proxmox task to finish. + + Only to be called after successfully having called find_vm() or having + otherwise initialised the Proxmox node object to work against. + + @param taskid: id of Proxmox task to wait for + @raise CuckooMachineError: if task status cannot be determined.""" + if not self.node: + raise CuckcooMachineError( + "BUG: Target Proxmox node not initialized.") + + elapsed = 0 + while elapsed < self.timeout: + try: + task = self.node.tasks(taskid).status.get() + except ResourceException as e: + raise CuckooMachineError("Error getting status of task " + "%s: %s" % (taskid, e)) + + if task["status"] == "stopped": + return task + + log.debug("Waiting for task %s to finish: %s", taskid, task) + time.sleep(1) + elapsed += 1 + + return None + + def find_snapshot(self, label): + """Find a specific or the most current snapshot of a VM. + + Only to be called after successfully having called find_vm() or having + otherwise initialised the VM object to work against. + + @param label: VM label for additional parameter retrieval + @raise CuckooMachineError: if snapshots cannot be enumerated.""" + # use a statically configured snapshot name if configured without any + # additional checks. User has to make sure it exists then. + snapshot = self.db.view_machine_by_label(label).snapshot + if snapshot: + return snapshot + + if not self.vm: + raise CuckcooMachineError("BUG: Target VM not initialized.") + + # heuristically determine the most recent snapshot if no snapshot name + # is explicitly configured. + log.debug("No snapshot configured for VM %s, determining most recent " + "one", label) + try: + snapshots = self.vm.snapshot.get() + except ResourceException as e: + raise CuckooMachineError("Error enumerating snapshots: %s" % e) + + snaptime = 0 + snapshot = None + for snap in snapshots: + # ignore "meta-snapshot" current which is the current state + if snap["name"] == "current": + continue + + if snap["snaptime"] > snaptime: + snaptime = snap["snaptime"] + snapshot = snap["name"] + + return snapshot + + def rollback(self, label): + """Roll back a VM's status to a statically configured or the most recent + snapshot. + + @param label: VM label for lookup in Proxmox and additional parameter + retrieval. + @raise CuckooMachineError: if snapshot cannot be found, reverting the + machine to the snapshot cannot be triggered + or times out or fails for another reason.""" + + snapshot = self.find_snapshot(label) + if not snapshot: + raise CuckooMachineError("No snapshot found - check config") + + try: + log.debug("Reverting VM %s to snapshot %s", label, snapshot) + taskid = self.vm.snapshot(snapshot).rollback.post() + except ResourceException as e: + raise CuckooMachineError("Couldn't trigger rollback to " + "snapshot %s: %s" % (snapshot, e)) + + task = self.wait_for_task(taskid) + if not task: + raise CuckooMachineError("Timeout expired while rolling back to " + "snapshot %s" % snapshot) + if task["exitstatus"] != "OK": + raise CuckooMachineError("Rollback to snapshot %s failed: %s" + % (snapshot, task["exitstatus"])) + + def start(self, label, task): + """Roll back VM to known-pristine snapshot and optionally start it if + not already running after reverting to the snapshot. + + @param label: VM label for lookup by name in Proxmox additional + parameter retrieval. + @raise CuckooMachineError: if snapshot cannot be found, reverting the + machine to the snapshot or starting the VM + cannot be triggered or times out or fails + for another reason.""" + self.find_vm(label) + self.rollback(label) + + try: + status = self.vm.status.current.get() + except ResourceException as e: + raise CuckooMachineError("Couldn't get status: %s" % e) + + if status["status"] == "running": + log.debug("VM already running after rollback, no need to start it") + return + + try: + log.debug("Starting VM %s", label) + taskid = self.vm.status.start.post() + except ResourceException as e: + raise CuckooMachineError("Couldn't trigger start: %s" % e) + + task = self.wait_for_task(taskid) + if not task: + raise CuckooMachineError("Timeout expired while starting") + if task["exitstatus"] != "OK": + raise CuckooMachineError("Start failed: %s" % task["exitstatus"]) + + def stop(self, label): + """Do a hard shutdown of the VM. + + @param label: VM label for lookup by name in Proxmox. + @raise CuckooMachineError: if VM cannot be found or stopping it cannot + be triggered or times out or fails for + another reason.""" + self.find_vm(label) + + try: + log.debug("Stopping VM %s", label) + taskid = self.vm.status.stop.post() + except ResourceException as e: + raise CuckooMachineError("Couldn't trigger stop: %s" % e) + + task = self.wait_for_task(taskid) + if not task: + raise CuckooMachineError("Timeout expired while stopping") + if task["exitstatus"] != "OK": + raise CuckooMachineError("Stop failed: %s" % task["exitstatus"]) diff --git a/modules/machinery/vmwareserver.py b/modules/machinery/vmwareserver.py index 3454fdbb343..caebf9b4b99 100644 --- a/modules/machinery/vmwareserver.py +++ b/modules/machinery/vmwareserver.py @@ -57,7 +57,7 @@ def _check_snapshot(self, vmx_path, snapshot): " listSnapshots " + "\"" + vmx_path + "\"" try: - p = subprocess.Popen(check_string, universal_newlines=True,, shell=True) + p = subprocess.Popen(check_string, universal_newlines=True, shell=True) output, _ = p.communicate() except OSError as e: raise CuckooMachineError("Unable to get snapshot list for %s. " diff --git a/modules/processing/CAPE.py b/modules/processing/CAPE.py index 3e396bb9f2b..c310be2ce89 100644 --- a/modules/processing/CAPE.py +++ b/modules/processing/CAPE.py @@ -23,7 +23,6 @@ import re import hashlib import imp -import datetime from lib.cuckoo.common.abstracts import Processing from lib.cuckoo.common.constants import CUCKOO_ROOT @@ -169,29 +168,19 @@ def process_file(self, file_path, CAPE_output, append_file, metadata={}): if file_path.endswith("_info.txt"): return - texttypes = [ - "ASCII", - "Windows Registry text", - "XML document text", - "Unicode text", - ] - - textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) - is_binary_file = lambda bytes: bool(bytes.translate(None, textchars)) - file_info = File(file_path, metadata.get("metadata", "")).get_all() # Get the file data - with open(file_info["path"], "rb") as file_open: - file_data = file_open.read(buf + 1) - - if is_binary_file(file_data[:8192]): - file_info["data"] = None - else: - if len(file_data) > buf: - file_info["data"] = convert_to_printable(file_data[:buf] + " ") - else: - file_info["data"] = convert_to_printable(file_data) + try: + with open(file_info["path"], "r") as file_open: + file_data = file_open.read(buf + 1) + + if len(file_data) > buf: + file_info["data"] = convert_to_printable(file_data[:buf] + " ") + else: + file_info["data"] = convert_to_printable(file_data) + except UnicodeDecodeError as e: + pass if metadata.get("pids", False): if len(metadata["pids"]) == 1: @@ -440,14 +429,14 @@ def process_file(self, file_path, CAPE_output, append_file, metadata={}): "payload", "config", "loader" - ] + ] try: for type in extraction_types: if type in hit["meta"]["cape_type"].lower(): file_info["cape_type"] = hit["meta"]["cape_type"] cape_name = hit["name"].replace('_', ' ') - except: - pass + except Exception as e: + print(e) type_strings = file_info["type"].split() if "-bit" not in file_info["cape_type"]: if type_strings[0] in ("PE32+", "PE32"): diff --git a/modules/processing/behavior.py b/modules/processing/behavior.py index 88fbdb789d2..61e369dd37a 100644 --- a/modules/processing/behavior.py +++ b/modules/processing/behavior.py @@ -11,10 +11,12 @@ from lib.cuckoo.common.abstracts import Processing from lib.cuckoo.common.config import Config from lib.cuckoo.common.netlog import BsonParser +from lib.cuckoo.common.compressor import CuckooBsonCompressor from lib.cuckoo.common.utils import convert_to_printable, pretty_print_arg, pretty_print_retval, logtime, default_converter, bytes2str log = logging.getLogger(__name__) cfg = Config() +cfg_process = Config("processing") def fix_key(key): """Fix a registry key to have it normalized. @@ -345,9 +347,11 @@ def _parse(self, row): class Processes: """Processes analyzer.""" - def __init__(self, logs_path): + def __init__(self, logs_path, task): """@param logs_path: logs path.""" + self.task = task self._logs_path = logs_path + self.options = dict((value.strip() for value in option.split("=", 1)) for option in self.task["options"].split(",") if option and '=' in option) def run(self): """Run analysis. @@ -368,6 +372,10 @@ def run(self): for file_name in os.listdir(self._logs_path): file_path = os.path.join(self._logs_path, file_name) + # Check if Loop Detection is enabled globally or locally (as an option) + if cfg_process.loop_detection.enabled or self.options.get("loop_detection"): + self.compress_log_file(file_path) + if os.path.isdir(file_path): continue @@ -400,6 +408,19 @@ def run(self): return results + def compress_log_file(self, file_path): + if file_path.endswith(".bson") and os.stat(file_path).st_size: + if not CuckooBsonCompressor().run(file_path): + log.warning("Could not execute loop detection analysis.") + else: + log.info("BSON was compressed successfully.") + return True + else: + log.warning("Nonexistent or empty BSON file \"%s\".", file_path) + + return False + + class Summary: """Generates summary information.""" @@ -1189,7 +1210,7 @@ def run(self): @return: results dict. """ behavior = {} - behavior["processes"] = Processes(self.logs_path).run() + behavior["processes"] = Processes(self.logs_path, self.task).run() instances = [ Anomaly(), diff --git a/modules/processing/dropped.py b/modules/processing/dropped.py index 361e977bb07..747c0cbeaa0 100644 --- a/modules/processing/dropped.py +++ b/modules/processing/dropped.py @@ -41,52 +41,25 @@ def run(self): file_path = os.path.join(dir_name, file_name) file_info = File(file_path=file_path).get_all() file_info.update(meta.get(file_info["path"], {})) - dropped_files.append(file_info) - - for dir_name, dir_names, file_names in os.walk(self.package_files): - for file_name in file_names: - file_path = os.path.join(dir_name, file_name) - file_info = File(file_path=file_path).get_all() - dropped_files.append(file_info) - - return dropped_files - - # ToDo adapt - textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) - is_binary_file = lambda bytes: bool(bytes.translate(None, textchars)) - file_names = os.listdir(self.dropped_path) - for file_name in file_names: - file_path = os.path.join(self.dropped_path, file_name) - if not os.path.isfile(file_path): - continue - if file_name.endswith("_info.txt"): - continue - guest_paths = [line.strip() for line in open(file_path + "_info.txt")] - guest_name = guest_paths[0].split("\\")[-1] - file_info = File(file_path=file_path, guest_paths=guest_paths, file_name=guest_name).get_all() - texttypes = [ - "ASCII", - "Windows Registry text", - "XML document text", - "Unicode text", - ] - readit = False - for texttype in texttypes: - if texttype in file_info["type"]: - readit = True - break - - if is_binary_file(open(file_info["path"], 'rb').read(8192)): - pass - else: - if readit: + guest_path = file_info["filepath"] + guest_name = guest_path.split("\\")[-1] + file_info["guest_paths"] = [guest_path] + file_info["name"] = guest_name + try: with open(file_info["path"], "r") as drop_open: filedata = drop_open.read(buf + 1) if len(filedata) > buf: file_info["data"] = convert_to_printable(filedata[:buf] + " ") else: file_info["data"] = convert_to_printable(filedata) + except UnicodeDecodeError as e: + pass + dropped_files.append(file_info) - dropped_files.append(file_info) + for dir_name, dir_names, file_names in os.walk(self.package_files): + for file_name in file_names: + file_path = os.path.join(dir_name, file_name) + file_info = File(file_path=file_path).get_all() + dropped_files.append(file_info) return dropped_files diff --git a/modules/processing/parsers/CAPE/SCInject.py b/modules/processing/parsers/CAPE/SCInject.py new file mode 100644 index 00000000000..02281ef1bb8 --- /dev/null +++ b/modules/processing/parsers/CAPE/SCInject.py @@ -0,0 +1,19 @@ +try: + import re2 as re +except ImportError: + import re + +url_regex = re.compile('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+') + +def config(data): + urls_dict = dict() + + try: + urls_dict["URls"] = [url.lower() for url in url_regex.match(data)] + except Exception as e: + print(e) + + if "URLs" in urls_dict and len(urls_dict["URLs"]) > 1: + return urls_dict + + return None diff --git a/modules/processing/parsers/mwcp/DridexDropper.py b/modules/processing/parsers/mwcp/DridexLoader.py similarity index 97% rename from modules/processing/parsers/mwcp/DridexDropper.py rename to modules/processing/parsers/mwcp/DridexLoader.py index 97f81ed630c..b42619652fb 100644 --- a/modules/processing/parsers/mwcp/DridexDropper.py +++ b/modules/processing/parsers/mwcp/DridexLoader.py @@ -25,7 +25,7 @@ MAX_IP_STRING_SIZE = 16 # aaa.bbb.ccc.ddd\0 -class DridexDropper(Parser): +class DridexLoader(Parser): DESCRIPTION = 'DridexDropper configuration parser.' AUTHOR = 'kevoreilly' @@ -43,7 +43,7 @@ def run(self): line, c2va_offset = False, False for match in matches: - if match.rule != 'DridexDropper': + if match.rule != 'DridexLoader': continue for item in match.strings: diff --git a/modules/processing/static.py b/modules/processing/static.py index 2f5123e01c1..2ab823321ff 100644 --- a/modules/processing/static.py +++ b/modules/processing/static.py @@ -205,7 +205,8 @@ def _get_custom_attrs(self): item["value"] = convert_to_printable(valueval) ret.append(item) return ret - except: + except Exception as e: + print(e) return None def _get_assembly_refs(self): @@ -227,7 +228,8 @@ def _get_assembly_refs(self): ret.append(item) return ret - except: + except Exception as e: + print(e) return None def _get_assembly_info(self): @@ -240,7 +242,8 @@ def _get_assembly_info(self): if line.startswith("Version:"): ret["version"] = convert_to_printable(line[8:].strip()) return ret - except: + except Exception as e: + print(e) return None def _get_type_refs(self): @@ -259,7 +262,8 @@ def _get_type_refs(self): ret.append(item) return sorted(ret) - except: + except Exception as e: + print(e) return None def run(self): @@ -308,7 +312,10 @@ def _get_peid_signatures(self): try: sig_path = os.path.join(CUCKOO_ROOT, "data", "peutils", "UserDB.TXT") signatures = peutils.SignatureDatabase(sig_path) - return set(list(signatures.match_all(self.pe, ep_only=True))) + result = signatures.match_all(self.pe, ep_only=True) + if not result: + return None + return set(list(result)) except Exception as e: log.error(e, exc_info=True) return None @@ -1366,6 +1373,46 @@ def run(self): return None results = self._parse(self.file_path) return results +''' +class HwpDocument(object): + """Static analysis of HWP documents.""" + + def __init__(self, filepath, results): + self.filepath = filepath + self.files = {} + #self.ex = ExtractManager.for_task(task_id) + + def unpack_hwp(self): + """Unpacks ole-based zip files.""" + ole = olefile.OleFileIO(self.filepath) + streams = ole.listdir() + for stream in streams: + stream_name = '/'.join(stream) + content = ole.openstream(stream).read() + try: + stream_content = zlib.decompress(ole.openstream(stream).read(), -15) + self.files[stream_name] = stream_content + except Exception as e: + print(e) + ole.close() + + def extract_eps(self): + """Extract some information from Encapsulated Post Script files.""" + ret = [] + for filename, content in self.files.items(): + if filename.lower().endswith(".eps") or filename.lower().endswith(".ps"): + ret.append(content) + return ret + + def run(self): + self.unpack_hwp() + + self.ex.peek_office(self.files) + + return { + "eps": self.extract_eps() + } +''' class Java(object): """Java Static Analysis""" @@ -1646,6 +1693,8 @@ def run(self): static = PDF(self.file_path).run() elif HAVE_OLETOOLS and package in ("doc", "ppt", "xls", "pub"): static = Office(self.file_path, self.results).run() + #elif HAVE_OLETOOLS and package in ("hwp", "hwp"): + # static = HwpDocument(self.file_path, self.results).run() elif "Java Jar" in thetype or self.task["target"].endswith(".jar"): decomp_jar = self.options.get("procyon_path", None) if decomp_jar and not os.path.exists(decomp_jar): diff --git a/modules/processing/suricata.py b/modules/processing/suricata.py index 35b4cf0cc54..ebd504e3618 100644 --- a/modules/processing/suricata.py +++ b/modules/processing/suricata.py @@ -4,7 +4,7 @@ from __future__ import absolute_import import datetime -import json +import simplejson as json import logging import os import shutil @@ -63,30 +63,22 @@ def run(self): SURICATA_FILE_BUFFER = self.options.get("buffer", 8192) Z7_PATH = self.options.get("7zbin", None) FILES_ZIP_PASS = self.options.get("zippass", None) - SURICATA_FILE_COPY_DST_DIR = self.options.get("file_copy_dest_dir", None) - SURICATA_FILE_COPY_MAGIC_RE = self.options.get("file_magic_re", None) - if SURICATA_FILE_COPY_MAGIC_RE: - try: - SURICATA_FILE_COPY_MAGIC_RE = re.compile(SURICATA_FILE_COPY_MAGIC_RE) - except: - log.warning("Failed to compile suricata copy magic RE" % (SURICATA_FILE_COPY_MAGIC_RE)) - SURICATA_FILE_COPY_MAGIC_RE = None + # Socket SURICATA_SOCKET_PATH = self.options.get("socket_file", None) - SURICATA_SOCKET_PYLIB = self.options.get("pylib_dir", None) # Command Line SURICATA_BIN = self.options.get("bin", None) - suricata = {} - suricata["alerts"]=[] - suricata["tls"]=[] - suricata["perf"]=[] - suricata["files"]=[] - suricata["http"]=[] - suricata["dns"]=[] - suricata["ssh"]=[] - suricata["file_info"]=[] + suricata = dict() + suricata["alerts"] = [] + suricata["tls"] = [] + suricata["perf"] = [] + suricata["files"] = [] + suricata["http"] = [] + suricata["dns"] = [] + suricata["ssh"] = [] + suricata["fileinfo"] = [] suricata["eve_log_full_path"] = None suricata["alert_log_full_path"] = None @@ -131,10 +123,10 @@ def run(self): pass if not os.path.exists(SURICATA_CONF): - log.warning("Unable to Run Suricata: Conf File %s Does Not Exist" % (SURICATA_CONF)) + log.warning("Unable to Run Suricata: Conf File {} Does Not Exist".format(SURICATA_CONF)) return suricata["alerts"] if not os.path.exists(self.pcap_path): - log.warning("Unable to Run Suricata: Pcap file %s Does Not Exist" % (self.pcap_path)) + log.warning("Unable to Run Suricata: Pcap file {} Does Not Exist".format(self.pcap_path)) return suricata["alerts"] # Add to this if you wish to ignore any SIDs for the suricata alert logs @@ -152,19 +144,18 @@ def run(self): ] if SURICATA_RUNMODE == "socket": - if SURICATA_SOCKET_PYLIB != None: - sys.path.append(SURICATA_SOCKET_PYLIB) try: - from suricatasc import SuricataSC + #from suricatasc import SuricataSC + from lib.cuckoo.common.suricatasc import SuricataSC except Exception as e: - log.warning("Failed to import suricatasc lib %s" % (e)) + log.warning("Failed to import suricatasc lib {}".format(e)) return suricata loopcnt = 0 maxloops = 24 loopsleep = 5 - args = {} + args = dict() args["filename"] = self.pcap_path args["output-dir"] = self.logs_path @@ -173,36 +164,39 @@ def run(self): suris.connect() suris.send_command("pcap-file",args) except Exception as e: - log.warning("Failed to connect to socket and send command %s: %s" % (SURICATA_SOCKET_PATH, e)) + log.warning("Failed to connect to socket and send command {}: {}".format(SURICATA_SOCKET_PATH, e)) return suricata while loopcnt < maxloops: try: pcap_flist = suris.send_command("pcap-file-list") current_pcap = suris.send_command("pcap-current") - log.debug("pcapfile list: %s current pcap: %s" % (pcap_flist, current_pcap)) + log.debug("pcapfile list: {} current pcap: {}".format(pcap_flist, current_pcap)) - if self.pcap_path not in pcap_flist["message"]["files"] and current_pcap["message"] != self.pcap_path: + if self.pcap_path not in pcap_flist["message"]["files"] and \ + current_pcap["message"] != self.pcap_path: log.debug("Pcap not in list and not current pcap lets assume it's processed") break else: loopcnt = loopcnt + 1 time.sleep(loopsleep) except Exception as e: - log.warning("Failed to get pcap status breaking out of loop %s" % (e)) + log.warning("Failed to get pcap status breaking out of loop {}".format(e)) break if loopcnt == maxloops: - log.warning("Loop timeout of %ssec occured waiting for file %s to finish processing" % (maxloops * loopsleep, pcapfile)) + logstr = "Loop timeout of {} sec occurred waiting for file {} to finish processing" + log.warning(logstr.format(maxloops * loopsleep, current_pcap)) return suricata elif SURICATA_RUNMODE == "cli": if not os.path.exists(SURICATA_BIN): - log.warning("Unable to Run Suricata: Bin File %s Does Not Exist" % (SURICATA_CONF)) + log.warning("Unable to Run Suricata: Bin File {} Does Not Exist".format(SURICATA_CONF)) return suricata["alerts"] - cmd = "%s -c %s -k none -l %s -r %s" % (SURICATA_BIN,SURICATA_CONF,self.logs_path,self.pcap_path) - ret,stdout,stderr = self.cmd_wrapper(cmd) + cmdstr = "{} -c {} -k none -l {} -r {}" + cmd = cmdstr.format(SURICATA_BIN, SURICATA_CONF, self.logs_path, self.pcap_path) + ret, stdout, stderr = self.cmd_wrapper(cmd) if ret != 0: - log.warning("Suricata returned a Exit Value Other than Zero %s" % (stderr)) - return suricata + log.warning("Suricata returned a Exit Value Other than Zero {}".format(stderr)) + return suricata else: log.warning("Unknown Suricata Runmode") @@ -223,12 +217,13 @@ def run(self): if not datalist: log.warning("Suricata: Failed to find usable Suricata log file") + parsed_files = [] for data in datalist: for line in data.splitlines(): try: parsed = json.loads(line) except: - log.warning("Suricata: Failed to parse line as json" % (line)) + log.warning("Suricata: Failed to parse line {} as json".format(line)) continue if 'event_type' in parsed: @@ -276,8 +271,10 @@ def run(self): hlog["dstport"] = parsed["dest_port"] hlog["dstip"] = parsed["dest_ip"] hlog["timestamp"] = parsed["timestamp"].replace("T", " ") - keyword = ("uri", "length", "hostname", "status", "http_method", "contenttype", "ua", "referrer") - keyword_suri = ("url", "length", "hostname", "status", "http_method", "http_content_type", "http_user_agent", "http_refer") + keyword = ("uri", "length", "hostname", "status", "http_method", "contenttype", "ua", + "referrer") + keyword_suri = ("url", "length", "hostname", "status", "http_method", "http_content_type", + "http_user_agent", "http_refer") for key, key_s in zip(keyword, keyword_suri): try: hlog[key] = parsed["http"].get(key_s, "None") @@ -301,62 +298,76 @@ def run(self): suricata["ssh"].append(parsed) elif parsed["event_type"] == "dns": suricata["dns"].append(parsed) - - if os.path.exists(SURICATA_FILE_LOG_FULL_PATH): - suricata["file_log_full_path"] = SURICATA_FILE_LOG_FULL_PATH - f = open(SURICATA_FILE_LOG_FULL_PATH, "rb").readlines() - for l in f: - try: - d = json.loads(l) - except: - log.warning("failed to load JSON from file log") - continue - # Some log entries do not have an id - if "id" not in d: - continue - src_file = "%s/file.%s" % (SURICATA_FILES_DIR_FULL_PATH,d["id"]) - if os.path.exists(src_file): - if SURICATA_FILE_COPY_MAGIC_RE and SURICATA_FILE_COPY_DST_DIR and os.path.exists(SURICATA_FILE_COPY_DST_DIR): + elif parsed["event_type"] == "fileinfo": + flog = dict() + flog["http_host"] = parsed.get("http", {}).get("hostname", "") + flog["http_uri"] = parsed.get("http", {}).get("url", "") + flog["http_referer"] = parsed.get("http", {}).get("referer", "") + flog["http_user_agent"] = parsed.get("http", {}).get("http_user_agent", "") + flog["protocol"] = parsed.get("proto", "") + flog["magic"] = parsed.get("fileinfo", {}).get("magic", "") + flog["size"] = parsed.get("fileinfo", {}).get("size", "") + flog["stored"] = parsed.get("fileinfo", {}).get("stored", "") + flog["sha256"] = parsed.get("fileinfo", {}).get("sha256", "") + flog["md5"] = parsed.get("fileinfo", {}).get("md5", "") + flog["filename"] = parsed.get("fileinfo", {}).get("filename", "") + if "/" in flog["filename"]: + flog["filename"] = flog["filename"].split("/")[-1] + parsed_files.append(flog) + + if parsed_files: + for sfile in parsed_files: + if sfile.get("stored", False): + filename = sfile["sha256"] + src_file = "{}/{}/{}".format(SURICATA_FILES_DIR_FULL_PATH, filename[0:2], filename) + dst_file = "{}/{}".format(SURICATA_FILES_DIR_FULL_PATH, filename) + if os.path.exists(src_file): try: - m = re.search(SURICATA_FILE_COPY_MAGIC_RE,d["magic"]) - if m: - dst_file = "%s/%s" % (SURICATA_FILE_COPY_DST_DIR,d["md5"]) - shutil.copy2(src_file,dst_file) - log.warning("copied %s to %s" % (src_file,dst_file)) - except Exception as e: - log.warning("Unable to copy suricata file: %s" % e) - file_info = File(file_path=src_file).get_all() - texttypes = [ - "ASCII", - "Windows Registry text", - "XML document text", - "Unicode text", - ] - readit = False - for texttype in texttypes: - if texttype in file_info["type"]: - readit = True + shutil.move(src_file, dst_file) + except OSError as e: + log.warning("Unable to move suricata file: {}".format(e)) break - if readit: - with open(file_info["path"], "rb") as drop_open: - filedata = drop_open.read(SURICATA_FILE_BUFFER + 1) - if len(filedata) > SURICATA_FILE_BUFFER: - file_info["data"] = convert_to_printable(filedata[:SURICATA_FILE_BUFFER] + " ") - else: - file_info["data"] = convert_to_printable(filedata) - d["file_info"] = file_info - if "/" in d["filename"]: - d["filename"] = d["filename"].split("/")[-1] - suricata["files"].append(d) - else: - log.warning("Suricata: Failed to find file log at %s" % (SURICATA_FILE_LOG_FULL_PATH)) - - if SURICATA_FILES_DIR_FULL_PATH and os.path.exists(SURICATA_FILES_DIR_FULL_PATH) and Z7_PATH and os.path.exists(Z7_PATH): - # /usr/bin/7z a -pinfected -y files.zip files files-json.log - cmd = "cd %s && %s a -p%s -y files.zip %s %s" % (self.logs_path,Z7_PATH,FILES_ZIP_PASS,SURICATA_FILE_LOG,SURICATA_FILES_DIR) - ret,stdout,stderr = self.cmd_wrapper(cmd) - if ret != 0: - log.warning("Suricata: Failed to create %s/files.zip" % (self.logs_path)) + texttypes = [ + "ASCII", + "Windows Registry text", + "XML document text", + "Unicode text", + ] + readit = False + file_info = File(file_path=dst_file).get_all() + for texttype in texttypes: + if texttype in file_info["type"]: + readit = True + break + if readit: + with open(file_info["path"], "r") as drop_open: + filedata = drop_open.read(SURICATA_FILE_BUFFER + 1) + if len(filedata) > SURICATA_FILE_BUFFER: + file_info["data"] = convert_to_printable( + filedata[:SURICATA_FILE_BUFFER] + " ") + else: + file_info["data"] = convert_to_printable(filedata) + sfile["file_info"] = file_info + suricata["files"].append(sfile) + with open(SURICATA_FILE_LOG_FULL_PATH, "w") as drop_log: + drop_log.write(json.dumps(suricata["files"], indent=4)) + + # Cleanup file subdirectories left behind by messy Suricata + for d in [dirpath for (dirpath, dirnames, filenames) in os.walk(SURICATA_FILES_DIR_FULL_PATH) + if len(dirnames) == 0 and len(filenames) == 0]: + try: + shutil.rmtree(d) + except OSError as e: + log.warning("Unable to delete suricata file subdirectories: {}".format(e)) + + if SURICATA_FILES_DIR_FULL_PATH and os.path.exists(SURICATA_FILES_DIR_FULL_PATH) and Z7_PATH \ + and os.path.exists(Z7_PATH): + # /usr/bin/7z a -pinfected -y files.zip files-json.log files + cmdstr = "cd {} && {} a -p{} -y files.zip {} {}" + cmd = cmdstr.format(self.logs_path, Z7_PATH, FILES_ZIP_PASS, SURICATA_FILE_LOG, SURICATA_FILES_DIR) + ret, stdout, stderr = self.cmd_wrapper(cmd) + if ret > 1: + log.warning("Suricata: Failed to create {}/files.zip - Error {}".format(self.logs_path, ret)) suricata["alerts"] = self.sort_by_timestamp(suricata["alerts"]) suricata["http"] = self.sort_by_timestamp(suricata["http"]) diff --git a/modules/reporting/compressresults.py b/modules/reporting/compressresults.py index 9ec40e53493..a56b3e46bbc 100644 --- a/modules/reporting/compressresults.py +++ b/modules/reporting/compressresults.py @@ -3,7 +3,7 @@ # See the file 'docs/LICENSE' for copying permission. from __future__ import absolute_import -import json +import simplejson as json from bson import ObjectId from bson.binary import Binary import zlib @@ -36,7 +36,7 @@ def run(self, results): results[keyword] = Binary(compressed_data) # compress behaviour analysis (enhanced & summary) - for keyword in ("enhanced", "summary"): + for keyword in ("enhanced"): if keyword in results["behavior"]: compressed_behavior_enhanced = zlib.compress(JSONEncoder().encode(results["behavior"][keyword]).encode('utf8')) results["behavior"][keyword] = Binary(compressed_behavior_enhanced) diff --git a/modules/reporting/submitCAPE.py b/modules/reporting/submitCAPE.py index f4876af7ec1..eec15304862 100644 --- a/modules/reporting/submitCAPE.py +++ b/modules/reporting/submitCAPE.py @@ -41,7 +41,7 @@ "Compression", "Compression_dll", "Compression_doc", "Compression_zip", "Compression_js", "Compression_pdf", "Debugger", "Debugger_dll", "Debugger_doc", "DumpOnAPI", "Doppelganging", "Emotet", "Emotet_doc", "EvilGrab", "Extraction", "Extraction_dll", "Extraction_regsvr", "Extraction_zip", "Extraction_ps1", "Extraction_jar", "Extraction_pdf", "Extraction_js", - "Hancitor", "Hancitor_doc", "IcedID", "Injection", "Injection_dll", "Injection_doc", "Injection_pdf", "Injection_zip", + "Hancitor", "Hancitor_dll", "Hancitor_doc", "IcedID", "Injection", "Injection_dll", "Injection_doc", "Injection_pdf", "Injection_zip", "Injection_ps1", "Injection_js", "PlugX", "PlugXPayload", "PlugX_dll", "PlugX_doc", "PlugX_zip", "QakBot", "RegBinary", "Sedreco", "Sedreco_dll", "Shellcode-Extraction", "TrickBot", "TrickBot_doc", "UPX", "UPX_dll", "Ursnif" ] @@ -451,6 +451,8 @@ def run(self, results): package = 'Hancitor_doc' elif parent_package in ('exe', 'Injection', 'Compression'): package = 'Hancitor' + elif parent_package in ('dll', 'Injection_dll', 'Compression_dll'): + package = 'Hancitor_dll' # if 'RegBinary' in detections or 'CreatesLargeKey' in detections: elif 'RegBinary' in detections: diff --git a/modules/signatures/cape_extracted.py b/modules/signatures/cape_extracted.py index 4c1c54725e8..a3c54d2208b 100644 --- a/modules/signatures/cape_extracted.py +++ b/modules/signatures/cape_extracted.py @@ -29,6 +29,8 @@ def run(self): if "CAPE" in self.results: for cape in self.results.get("CAPE", []): capetype = cape.get("cape_type", "") + if not capetype: + capetype = cape.get("description", "") yara = cape.get("cape_yara", "") process = cape.get("process_name", "") if capetype and process: diff --git a/modules/signatures/cypherit_mutex.py b/modules/signatures/cypherit_mutex.py new file mode 100644 index 00000000000..3ae7c8678f5 --- /dev/null +++ b/modules/signatures/cypherit_mutex.py @@ -0,0 +1,41 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class CypherITMutexes(Signature): + name = "cypherit_mutexes" + description = "Creates known CypherIT/Frenchy Shellcode mutexes" + severity = 3 + categories = ["trojan"] + families = ["AgentTesla", "HawkEye", "Nanocore", "Formbook", "Remcos", "Njrat", "Azorult", "Fareit", "Lokibot", "Predator"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1045"] + + def run(self): + indicators = [ + "frenchy_shellcode_\d+$", + "Startup_shellcode_\d+$", + ] + + for indicator in indicators: + match = self.check_mutex(pattern=indicator, regex=True, all=True) + if match: + for mut in match: + self.data.append({"mutex": mut}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/disables_notificationcenter.py b/modules/signatures/disables_notificationcenter.py new file mode 100644 index 00000000000..d8dd6ce1010 --- /dev/null +++ b/modules/signatures/disables_notificationcenter.py @@ -0,0 +1,38 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class DisablesNotificationCenter(Signature): + name = "disables_notificationcenter" + description = "Disables Windows Notification Center" + severity = 3 + categories = ["generic"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1089"] + + def run(self): + indicators = [ + ".*\\\\Explorer\\\\DisableNotificationCenter$", + ] + + for indicator in indicators: + match = self.check_write_key(pattern=indicator, regex=True) + if match: + self.data.append({"regkey": match}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/disables_smartscreen.py b/modules/signatures/disables_smartscreen.py new file mode 100644 index 00000000000..a1399627888 --- /dev/null +++ b/modules/signatures/disables_smartscreen.py @@ -0,0 +1,51 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class DisablesSmartScreen(Signature): + name = "disables_smartscreen" + description = "Modifies or disables Windows SmartScreen" + severity = 3 + categories = ["generic"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1089"] + + def run(self): + re_match = False + cmd_match = False + indicators = [ + ".*\\\\Windows\\\\CurrentVersion\\\\explorer\\\\SmartScreenEnabled$", + ".*\\\\Windows\\\\CurrentVersion\\\\AppHost\\\\SmartScreenEnabled$", + ".*\\\\MicrosoftEdge\\\\PhishingFilter$", + ] + + for indicator in indicators: + match = self.check_write_key(pattern=indicator, regex=True) + if match: + self.data.append({"regkey": match}) + re_match = True + + cmdpat = ".*\"SmartScreenEnabled\".*\"Off\".*" + match = self.check_executed_command(pattern=cmdpat, regex=True) + if match: + self.data.append({"command": match}) + cmd_match = True + + if re_match or cmd_match: + return True + + return False \ No newline at end of file diff --git a/modules/signatures/disables_winfirewall.py b/modules/signatures/disables_winfirewall.py new file mode 100644 index 00000000000..fdc42408887 --- /dev/null +++ b/modules/signatures/disables_winfirewall.py @@ -0,0 +1,40 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class DisablesWindowsFirewall(Signature): + name = "disables_winfirewall" + description = "Disables Windows firewall" + severity = 3 + categories = ["generic"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1089"] + + def run(self): + indicators = [ + "netsh\s+firewall\s+set.*disable", + "netsh\s+advfirewall\s+set.*off", + ] + + for indicator in indicators: + match = self.check_executed_command(pattern=indicator, regex=True, all=True) + if match: + for fwcmd in match: + self.data.append({"command": fwcmd}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/downloader_phorpiex_mutex.py b/modules/signatures/downloader_phorpiex_mutex.py new file mode 100644 index 00000000000..0e7bc8f8a2d --- /dev/null +++ b/modules/signatures/downloader_phorpiex_mutex.py @@ -0,0 +1,38 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class PhorpiexMutexes(Signature): + name = "phorpiex_mutexes" + description = "Creates known Phorpiex mutexes" + severity = 3 + categories = ["downloader", "dropper"] + families = ["Phorpiex"] + authors = ["ditekshen"] + minimum = "0.5" + + def run(self): + indicators = [ + ".:-Tldr-:.", + ] + + for indicator in indicators: + match_mutex = self.check_mutex(pattern=indicator, regex=True) + if match_mutex: + self.data.append({"mutex": match_mutex}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/infostealer_predatorthethief.py b/modules/signatures/infostealer_predatorthethief.py new file mode 100644 index 00000000000..10cf763076a --- /dev/null +++ b/modules/signatures/infostealer_predatorthethief.py @@ -0,0 +1,69 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# References: +# https://any.run/malware-trends/predator +# https://securelist.com/a-predatory-tale/89779/ +# https://fumik0.com/2018/10/15/predator-the-thief-in-depth-analysis-v2-3-5/ + +from lib.cuckoo.common.abstracts import Signature + +class PredatorTheThiefMutexes(Signature): + name = "predatorthethief_mutexes" + description = "Creates Predator The Thief information stealer mutexes" + severity = 3 + categories = ["infostealer"] + families = ["PredatorTheThief"] + authors = ["ditekshen"] + minimum = "0.5" + + def run(self): + indicators = [ + "SyystemServs", + ] + + for indicator in indicators: + match = self.check_mutex(pattern=indicator, regex=True) + if match: + self.data.append({"mutex": match}) + return True + + return False + +class PredatorTheThiefFiles(Signature): + name = "predatorthethief_files" + description = "Creates Predator The Thief information stealer files" + severity = 3 + categories = ["infostealer"] + families = ["PredatorTheThief"] + authors = ["ditekshen"] + minimum = "0.5" + + def run(self): + indicators = [ + ".*\\\\vlmi\{lulz\}yg\.col$", + ".*\\\\forms\.(log|txt)$", + ".*\\\\cards\.(log|txt)$", + ".*\\\\password\.(log|txt)$", + ".*\\\\Information\.(log|txt)$", + ] + + for indicator in indicators: + match = self.check_file(pattern=indicator, regex=True) + if match: + self.data.append({"file": match}) + return True + + return False diff --git a/modules/signatures/malware_data_encryption.py b/modules/signatures/malware_data_encryption.py new file mode 100644 index 00000000000..1ad41a3baf4 --- /dev/null +++ b/modules/signatures/malware_data_encryption.py @@ -0,0 +1,115 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class EnryptDataAgentTeslaHTTP(Signature): + name = "encrypt_data_agenttesla_http" + description = "AgentTesla HTTP vairant keylogger detected encrypting data potentially prior to exfiltrating it" + severity = 3 + categories = ["keylogger", "infostealer"] + families = ["AgentTesla"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1022"] + evented = True + + def __init__(self, *args, **kwargs): + Signature.__init__(self, *args, **kwargs) + self.match = False + + filter_apinames = set(["CryptEncrypt"]) + + def on_call(self, call, process): + buff = self.get_argument(call, "Buffer") + if buff: + if "type=" in buff and "hwid=" in buff and "pcname=" in buff: + self.match = True + self.data.append({"data": buff}) + + def on_complete(self): + return self.match + +class EnryptDataNanoCore(Signature): + name = "encrypt_data_nanocore" + description = "NanoCore keylogger detected encrypting data potentially prior to exfiltrating it" + severity = 3 + categories = ["keylogger", "infostealer"] + families = ["NanoCore"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1022"] + evented = True + + def __init__(self, *args, **kwargs): + Signature.__init__(self, *args, **kwargs) + self.username = str() + self.hostname = str() + self.match = False + + filter_apinames = set(["CryptEncrypt", "GetComputerNameW", "GetUserNameW"]) + + def on_call(self, call, process): + if call["api"] == "GetComputerNameW": + self.hostname = self.get_argument(call, "ComputerName") + if call["api"] == "GetUserNameW": + self.username = self.get_argument(call, "Name") + if call["api"] == "CryptEncrypt": + buff = self.get_argument(call, "Buffer") + if buff: + if buff.startswith("\\x00\\x00\\x00\\x00\\x12") and "\\x0c\\x07" in buff: + if self.hostname and self.username: + combo = self.hostname + "\\" + self.username + if combo in buff: + self.match == True + + def on_complete(self): + return self.match + +class EnryptDataAgentTeslaHTTPT2(Signature): + name = "encrypt_data_agentteslat2_http" + description = "AgentTesla v2 HTTP vairant keylogger detected encrypting data potentially prior to exfiltrating it" + severity = 3 + categories = ["keylogger", "infostealer"] + families = ["Predator"] + authors = ["ditekshen"] + minimum = "0.5" + ttp = ["T1022"] + evented = True + + def __init__(self, *args, **kwargs): + Signature.__init__(self, *args, **kwargs) + self.username = str() + self.hostname = str() + self.match = False + + filter_apinames = set(["CryptEncrypt", "GetComputerNameW", "GetUserNameW"]) + + def on_call(self, call, process): + if call["api"] == "GetComputerNameW": + self.hostname = self.get_argument(call, "ComputerName") + if call["api"] == "GetUserNameW": + self.username = self.get_argument(call, "Name") + if call["api"] == "CryptEncrypt": + buff = self.get_argument(call, "Buffer") + if buff: + if "22afd24bbac47acb3f315177181b7d3cb7cd0270291f14b3" in buff: + if self.hostname and self.username: + combo = self.hostname + "/" + self.username + if combo in buff: + self.match == True + + def on_complete(self): + return self.match diff --git a/modules/signatures/network_temp_file_storage.py b/modules/signatures/network_temp_file_storage.py new file mode 100644 index 00000000000..1d4a35ec221 --- /dev/null +++ b/modules/signatures/network_temp_file_storage.py @@ -0,0 +1,48 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class NetworkTempFileService(Signature): + name = "network_temp_file_storage" + description = "Queries or connects to anonymous/temporary file storage service" + severity = 2 + categories = ["network"] + authors = ["ditekshen"] + minimum = "1.2" + + def run(self): + domain_indicators = [ + "plik.root.gg", + "gp.tt", + "wetransfer.com", + "send-anywhere.com", + "sendgb.com", + "send.firefox.com", + "volafile.org", + "uploadfiles.io", + "sendpace.com", + "filedropper.com", + "myairbridge.com" + ] + + found_matches = False + + for indicator in domain_indicators: + if self.check_domain(pattern=indicator): + self.data.append({"domain" : indicator}) + found_matches = True + + return found_matches \ No newline at end of file diff --git a/modules/signatures/ransomware_revil_mutex.py b/modules/signatures/ransomware_revil_mutex.py new file mode 100644 index 00000000000..66c42ae1517 --- /dev/null +++ b/modules/signatures/ransomware_revil_mutex.py @@ -0,0 +1,54 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class RevilMutexes(Signature): + name = "revil_mutexes" + description = "Creates known REvil/Sodinokibi ransomware mutexes" + severity = 3 + categories = ["ransomware"] + families = ["REvil", "Sodinokibi"] + authors = ["ditekshen"] + minimum = "0.5" + + def run(self): + indicators = [ + "112983B0-B4C9-4F9B-96C4-E5394FB8A5B4", + "1DB960B8-E5C3-F077-5D68-EEE2E637EE0B", + "206D87E0-0E60-DF25-DD8F-8E4E7D1E3BF0", + "3555A3D6-37B3-0919-F7BE-F3AAB5B6644A", + "552FFA80-3393-423d-8671-7BA046BB5906", + "6CAC559B-02B4-D929-3675-2706BBB8CF66", + "859B4E91-BAF1-3DBB-E616-E9E99E851136", + "879EBE58-4C9F-A6BE-96A3-4C51826CEC2F", + "95B97D2B-4513-2041-E8A5-AC7446F12075", + "BF29B630-7648-AADF-EC8A-94647D2349D6", + "C126B3B3-6B51-F91C-6FDF-DD2C70FA45E6", + "C19C0A84-FA11-3F9C-C3BC-0BCB16922ABF", + "C817795D-7756-05BF-A69E-6ED0CE91EAC4", + "D382D713-AA87-457D-DDD3-C3DDD8DFBC96", + "DAE678E1-967E-6A19-D564-F7FCA6E7AEBC", + "FB864EC7-B361-EA6D-545C-E1A167CCBE95", + "FDC9FA6E-8257-3E98-2600-E72145612F09", + ] + + for indicator in indicators: + match_mutex = self.check_mutex(pattern=indicator, regex=True) + if match_mutex: + self.data.append({"mutex": match_mutex}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/rat_karagany.py b/modules/signatures/rat_karagany.py new file mode 100644 index 00000000000..9108946ba81 --- /dev/null +++ b/modules/signatures/rat_karagany.py @@ -0,0 +1,78 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +try: + import re2 as re +except ImportError: + import re + +from lib.cuckoo.common.abstracts import Signature + +class KaraganyEventObjects(Signature): + name = "karagany_system_event_objects" + description = "Creates system event objects associated with Karagany/xFrost RAT" + severity = 3 + categories = ["RAT"] + families = ["Karagany", "xFrost"] + authors = ["ditekshen"] + minimum = "0.5" + + def __init__(self, *args, **kwargs): + Signature.__init__(self, *args, **kwargs) + self.match = False + self.event_objects = [ + "__klg__", + "__pickill__", + "__klgkillsoft__", + ] + + filter_apinames = set(["NtCreateEvent", "NtCreateEventEx"]) + + def on_call(self, call, process): + event = self.get_argument(call, "EventName") + if event: + for obj in self.event_objects: + if obj in event: + self.match = True + self.data.append({"system_event_object": event}) + + def on_complete(self): + return self.match + +class KaraganyFiles(Signature): + name = "karagany_files" + description = "Creates files/directories associated with Karagany/xFrost RAT" + severity = 3 + categories = ["RAT"] + families = ["Karagany", "xFrost"] + authors = ["ditekshen"] + minimum = "0.5" + + def on_complete(self): + indicators = [ + ".*\\\\up_stat.txt$", + ".*\\\\stat_ag.txt$", + ".*\\\\serv_stat.txt$", + ".*\\\\svchost\d+\.txt$", + ".*\\\\Update\\\\Tmp\\\\.*", + ] + + for indicator in indicators: + match = self.check_write_file(patten=indicator, regex=True, all=True) + if match: + self.data.append({"path": match}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/rat_xpert.py b/modules/signatures/rat_xpert.py new file mode 100644 index 00000000000..eb5143d62d0 --- /dev/null +++ b/modules/signatures/rat_xpert.py @@ -0,0 +1,62 @@ +# Copyright (C) 2019 ditekshen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from lib.cuckoo.common.abstracts import Signature + +class XpertRATMutexes(Signature): + name = "xpertrat_mutexes" + description = "Creates Xpert RAT mutexes" + severity = 3 + categories = ["RAT"] + families = ["Xpert"] + authors = ["ditekshen"] + minimum = "0.5" + + def run(self): + indicators = [ + "G2L6E3O1-E775-G5J4-R4C2-P5F660S1R4A8", + ] + + for indicator in indicators: + match_mutex = self.check_mutex(pattern=indicator, regex=True) + if match_mutex: + self.data.append({"mutex": match_mutex}) + return True + + return False + +class XpertRATFiles(Signature): + name = "xpertrat_files" + description = "Creates Xpert RAT files" + severity = 3 + categories = ["RAT"] + families = ["Xpert"] + authors = ["ditekshen"] + minimum = "0.5" + + def run(self): + indicators = [ + ".*\\\\ut$", + ".*\\\\Temp\\\\.*\.bmp" + ".*\\\\G2L6E3O1-E775-G5J4-R4C2-P5F660S1R4A8$" + ] + + for indicator in indicators: + match = self.check_write_file(pattern=indicator, regex=True) + if match: + self.data.append({"file": match}) + return True + + return False \ No newline at end of file diff --git a/modules/signatures/reads_self.py b/modules/signatures/reads_self.py index eb339c444f5..b1b1e768c1b 100644 --- a/modules/signatures/reads_self.py +++ b/modules/signatures/reads_self.py @@ -28,7 +28,7 @@ def __hash__(self): return hash(self.__repr__()) def set_file_pos(self, buffer): - self.fpos = struct.unpack_from("Q", buffer)[0] + self.fpos = struct.unpack_from("Q", buffer.encode("utf-8"))[0] def read(self, len): self.fpos = self.fpos + len diff --git a/modules/signatures/windows_utilities.py b/modules/signatures/windows_utilities.py index df62d9ebdd3..6bd447f8b62 100644 --- a/modules/signatures/windows_utilities.py +++ b/modules/signatures/windows_utilities.py @@ -243,7 +243,7 @@ class WMICCommandSuspicious(Signature): evented = True def run(self): - arguments = [ + self.arguments = [ "antivirusproduct", "baseboard", "bios", @@ -276,7 +276,7 @@ def run(self): for argument in self.arguments: if argument in lower: ret = True - self.data.append({"command" : cmdline}) + self.data.append({"command": cmdline}) return ret diff --git a/systemd/README.md b/systemd/README.md index af3b38c838e..ef5e6f7f21f 100644 --- a/systemd/README.md +++ b/systemd/README.md @@ -8,8 +8,16 @@ These files help run all the various parts of CAPE as systemd services, so that - `cuckoo-wsgi.service` - Runs the Cuckoo web interface as a WSGI application using Gunicorn bound to `127.0.0.1:8000` ## Setup +0. You need to edit the default values in systemd to not get `too many open files` + sudo sed -i "s/#DefaultLimitNOFILE=/DefaultLimitNOFILE=1048576/g" /etc/systemd/user.conf + sudo sed -i "s/#DefaultLimitNOFILE=/DefaultLimitNOFILE=1048576/g" /etc/systemd/system.conf -1. Install virtualenv +* to verify changes + ```bash + systemctl show cuckoo-processor|grep LimitNOFILE #replace cuckoo-processor with another systemd daemon after install them all + ``` + +1. (optional) Install virtualenv ```bash sudo apt-get install -y python3-virtualenv @@ -28,17 +36,14 @@ These files help run all the various parts of CAPE as systemd services, so that sudo su cuckoo ``` -5. Create a virtualenv at `/opt/CAPE/venv` +5. (optional) Create a virtualenv at `/opt/CAPE/venv` ```bash virtualenv /opt/CAPE/venv ``` -6. Install required Python packages inside the virtualenv - - ```bash - /opt/CAPE/venv/bin/pip3 install -U -r /opt/CAPE/requirements.txt - ``` +6. (optional) Install required Python packages inside the virtualenv + * dependencies now installed by https://github.com/doomedraven/Tools/blob/master/Cuckoo/cuckoo3.sh 7. Edit configuration files in `/opt/CAPE/conf` as needed 8. Return to your user @@ -47,11 +52,11 @@ These files help run all the various parts of CAPE as systemd services, so that exit ``` -9. Install the `systemd` service unit configuration files +9. Install the `systemd` service unit configuration files(you need modify ExecStart= if you using virtualenv, just comment current one and uncomment another one) ```bash - sudo cp /opt/CAPE/systemd/*.service /opt/systemd/system - sudo cp /opt/CAPE/systemd/*.timer /opt/systemd/system + sudo cp /opt/CAPE/systemd/*.service /etc/systemd/system + sudo cp /opt/CAPE/systemd/*.timer /etc/systemd/system sudo sudo systemctl daemon-reload sudo systemctl enable suricata-update.service sudo systemctl enable suricata-update.timer @@ -64,11 +69,11 @@ These files help run all the various parts of CAPE as systemd services, so that 10. Start the services for the first time ```bash - sudo service suricata-update start - sudo service cuckoo-rooter start - sudo service cuckoo-processor start - sudo service cuckoo start - sudo service cuckoo-wsgi start + sudo systemctl start suricata-update.service + sudo systemctl start cuckoo-rooter.service + sudo systemctl start cuckoo-processor.service + sudo systemctl start cuckoo.service + sudo systemctl start cuckoo-wsgi.service ``` ## Troubleshooting @@ -76,7 +81,7 @@ These files help run all the various parts of CAPE as systemd services, so that To view the status and console output of a service: ```bash -service cuckoo status +sudo systemctl status cuckoo ``` To view the full output of a service (including crashed services): diff --git a/systemd/cuckoo-processor.service b/systemd/cuckoo-processor.service index 4f9ff7bc782..92cc9af7a40 100644 --- a/systemd/cuckoo-processor.service +++ b/systemd/cuckoo-processor.service @@ -1,11 +1,13 @@ [Unit] Description=Cuckoo report processor -Documentation=https://github.com/kevoreilly/CAPE +Documentation=https://github.com/kevoreilly/CAPEv2 Wants=cuckoo-rooter.service After=cuckoo-rooter.service [Service] -ExecStart=/opt/CAPE/venv/bin/python3 /opt/CAPE/utils/process.py -p7 auto +WorkingDirectory=/opt/CAPE/utils/ +ExecStart=/usr/bin/python process.py -p7 auto +#ExecStart=/opt/CAPE/venv/bin/python3 /opt/CAPE/utils/process.py -p7 auto User=cuckoo Group=cuckoo Restart=always diff --git a/systemd/cuckoo-rooter.service b/systemd/cuckoo-rooter.service index 9a93f11633d..2c7405df587 100644 --- a/systemd/cuckoo-rooter.service +++ b/systemd/cuckoo-rooter.service @@ -1,11 +1,13 @@ [Unit] Description=Cuckoo rooter -Documentation=https://github.com/kevoreilly/CAPE +Documentation=https://github.com/kevoreilly/CAPEv2 Wants=network-online.target After=syslog.target network.target [Service] -ExecStart=/opt/CAPE/venv/bin/python3 /opt/CAPE/utils/rooter.py +WorkingDirectory=/opt/CAPE/utils/ +ExecStart=/usr/bin/python3 rooter.py +#ExecStart=/opt/CAPE/venv/bin/python3 /opt/CAPE/utils/rooter.py User=root Group=root Restart=always diff --git a/systemd/cuckoo-wsgi.service b/systemd/cuckoo-wsgi.service index 470ca04d8fc..d9f4f5a2a78 100644 --- a/systemd/cuckoo-wsgi.service +++ b/systemd/cuckoo-wsgi.service @@ -1,12 +1,13 @@ [Unit] Description=Cuckoo WSGI app -Documentation=https://github.com/kevoreilly/CAPE +Documentation=https://github.com/kevoreilly/CAPEv2 Wants=cuckoo.service After=cuckoo.service [Service] -WorkingDirectory=/opt/CAPE/web -ExecStart=/opt/CAPE/venv/bin/gunicorn -b 127.0.0.1:8000 web.wsgi +WorkingDirectory=/opt/CAPEv2/web +ExecStart=/usr/bin/python3 manage.py runserver 0.0.0.0:8000 +#ExecStart=/opt/CAPEv2/venv/bin/gunicorn -b 127.0.0.1:8000 web.wsgi User=cuckoo Group=cuckoo Restart=always diff --git a/systemd/cuckoo.service b/systemd/cuckoo.service index 98a0eac448e..e401437dfd1 100644 --- a/systemd/cuckoo.service +++ b/systemd/cuckoo.service @@ -1,11 +1,11 @@ [Unit] Description=Cuckoo -Documentation=https://github.com/kevoreilly/CAPE -Wants=cuckoo-cuckoo-processor.service -After=cuckoo-cuckoo-processor.service +Documentation=https://github.com/kevoreilly/CAPEv2 [Service] -ExecStart=/opt/CAPE/venv/bin/python3 /opt/CAPE/cuckoo.py +WorkingDirectory=/opt/CAPEv2/ +ExecStart=/usr/bin/python3 cuckoo.py +#ExecStart=/opt/CAPEv2/venv/bin/python3 /opt/CAPEv2/cuckoo.py User=cuckoo Group=cuckoo Restart=always diff --git a/systemd/suricata-update.service b/systemd/suricata-update.service index baf9c4dcb9c..dad62b72d80 100644 --- a/systemd/suricata-update.service +++ b/systemd/suricata-update.service @@ -5,7 +5,7 @@ Description=suricata-update User=root Group=root Type=oneshot -ExecStart=/opt/CAPE/venv/bin/suricata-update +ExecStart=/opt/CAPEv2/venv/bin/suricata-update [Install] WantedBy=multi-user.target diff --git a/utils/api.py b/utils/api.py index 92a37cfda62..40d5e3caf0b 100644 --- a/utils/api.py +++ b/utils/api.py @@ -107,7 +107,7 @@ def tasks_create_file(): shrike_sid = request.forms.get("shrike_sid", None) shrike_refer = request.forms.get("shrike_refer", None) static = bool(request.POST.get("static", False)) - unique = bool(request.form.get("unique", False)) + unique = bool(request.forms.get("unique", False)) if memory.upper() == 'FALSE' or memory == '0': memory = False else: @@ -124,7 +124,7 @@ def tasks_create_file(): if unique and db.check_file_uniq(File(temp_file_path).get_sha256()): resp = {"error": True, "error_value": "Duplicated file, disable unique option to force submission"} - return jsonize(resp, response=True) + return jsonize(resp) if pcap: if data.filename.lower().endswith(".saz"): @@ -138,7 +138,7 @@ def tasks_create_file(): else: resp = {"error": True, "error_value": "Failed to convert PCAP to SAZ"} - return jsonize(resp, response=True) + return jsonize(resp) else: path = temp_file_path task_id = db.add_pcap(file_path=path) @@ -441,7 +441,7 @@ def tasks_iocs(task_id, detail=False): if buf is None: resp = {"error": True, "error_value": "Sample not found in database"} - return jsonize(resp, response=True) + return jsonize(resp) data = {} if "tr_extractor" in buf: diff --git a/utils/cleaners.py b/utils/cleaners.py index fbe31bc1150..830734e45f2 100644 --- a/utils/cleaners.py +++ b/utils/cleaners.py @@ -34,6 +34,7 @@ # Initialize the database connection. db = Database() +mdb = rep_config.mongodb.get("db", "cuckoo") def connect_to_mongo(): conn = False @@ -42,7 +43,6 @@ def connect_to_mongo(): from pymongo import MongoClient host = rep_config.mongodb.get("host", "127.0.0.1") port = rep_config.mongodb.get("port", 27017) - mdb = rep_config.mongodb.get("db", "cuckoo") user = rep_config.mongodb.get("username", None) password = rep_config.mongodb.get("password", None) try: @@ -52,7 +52,7 @@ def connect_to_mongo(): username=user, password=password, authSource=mdb - )[mdb] + ) except Exception as e: log.warning("Unable to connect to MongoDB database: {}, {}".format(mdb, e)) @@ -97,9 +97,9 @@ def delete_data(tid): def delete_mongo_data(tid): try: - results_db = connect_to_mongo() + results_db = connect_to_mongo()[mdb] analyses = results_db.analysis.find({"info.id": int(tid)}) - if analyses.count > 0: + if analyses.count() > 0: for analysis in analyses: log.info("deleting MongoDB data for Task #{0}".format(tid)) for process in analysis.get("behavior", {}).get("processes", []): @@ -145,10 +145,10 @@ def cuckoo_clean(): print("Can't connect to mongo") return try: - conn.drop_database(conn._Database__name) + conn.drop_database(mdb) conn.close() except: - log.warning("Unable to drop MongoDB database: %s", conn._Database__name) + log.warning("Unable to drop MongoDB database: %s", mdb) if rep_config.elasticsearchdb and rep_config.elasticsearchdb.enabled and not rep_config.elasticsearchdb.searchonly: es = False @@ -269,8 +269,7 @@ def cuckoo_clean_failed_url_tasks(): # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() - - results_db = connect_to_mongo() + results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return @@ -294,8 +293,7 @@ def cuckoo_clean_lower_score(args): create_structure() init_console_logging() id_arr = [] - - results_db = connect_to_mongo() + results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return @@ -322,7 +320,7 @@ def cuckoo_clean_before_day(args): init_console_logging() id_arr = [] - results_db = connect_to_mongo() + results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return @@ -362,8 +360,7 @@ def cuckoo_clean_sorted_pcap_dump(): # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() - - results_db = connect_to_mongo() + results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return @@ -400,7 +397,7 @@ def cuckoo_clean_pending_tasks(): create_structure() init_console_logging() - results_db = connect_to_mongo() + results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return diff --git a/utils/dist.py b/utils/dist.py index 1d1867f88b3..af5dd06e842 100644 --- a/utils/dist.py +++ b/utils/dist.py @@ -616,7 +616,10 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p if node.name != "master": # don't do nothing if nothing in pending # Get tasks from main_db submitted through web interface - main_db_tasks = main_db.list_tasks(status=TASK_PENDING, order_by=desc("priority"), options_like=options_like)#, limit=pend_tasks_num) + main_db_tasks = main_db.list_tasks(status=TASK_PENDING, order_by=desc("priority"), options_like=options_like, limit=pend_tasks_num) + if not main_db_tasks: + return + if main_db_tasks: for t in main_db_tasks: force_push = False @@ -684,7 +687,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p else: main_db.set_status(t.id, TASK_DISTRIBUTED) limit += 1 - if limit == pend_tasks_num: + if limit == pend_tasks_num or limit == len(main_db_tasks): db.close() return @@ -793,7 +796,7 @@ def run(self): STATUSES = statuses #first submit tasks with specified node - self.submit_tasks(node.id, MINIMUMQUEUE[node.name]*2, "%node={}%".format(node.name), force_push_push=True) + self.submit_tasks(node.id, MINIMUMQUEUE[node.name], "%node={}%".format(node.name), force_push_push=True) # Balance the tasks, works fine if no tags are set node_name = min(STATUSES, key=lambda k: STATUSES[k]["completed"] + STATUSES[k]["pending"]) diff --git a/web/analysis/urls.py b/web/analysis/urls.py index 6bc1e989b4d..30fc736a330 100644 --- a/web/analysis/urls.py +++ b/web/analysis/urls.py @@ -19,7 +19,8 @@ url(r"^shrike/(?P\d+)/$", views.shrike, name='shrike'), url(r"^remove/(?P\d+)/$", views.remove, name='remove'), url(r"^chunk/(?P\d+)/(?P\d+)/(?P\d+)/$", views.chunk, name='chunk'), - url(r"^filtered/(?P\d+)/(?P\d+)/(?P\w+)/(?P[!]?[A-Za-z_0-9,%]*)/$", views.filtered_chunk, name='filtered_chunk'), + url(r"^filtered/(?P\d+)/(?P\d+)/(?P\w+)/(?P[!]?[A-Za-z_0-9,%]*)/(?P\w+)/(?P\w+)/$", + views.filtered_chunk, name='filtered_chunk'), url(r"^search/(?P\d+)/$", views.search_behavior, name='search_behavior'), url(r"^search/$", views.search, name='search'), url(r"^pending/$", views.pending, name='pending'), diff --git a/web/analysis/views.py b/web/analysis/views.py index fa1cabfa5bf..87e3d57340b 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -377,7 +377,7 @@ def chunk(request, task_id, pid, pagenum): @require_safe @conditional_login_required(login_required, settings.WEB_AUTHENTICATION) -def filtered_chunk(request, task_id, pid, category, apilist): +def filtered_chunk(request, task_id, pid, category, apilist, caller, tid): """Filters calls for call category. @param task_id: cuckoo task id @param pid: pid you want calls @@ -422,6 +422,8 @@ def filtered_chunk(request, task_id, pid, category, apilist): apis = apilist.split(',') apis[:] = [s.strip().lower() for s in apis if len(s.strip())] + tid = int(tid) + # Populate dict, fetching data from all calls and selecting only appropriate category/APIs. for call in process["calls"]: if enabledconf["mongodb"]: @@ -429,7 +431,11 @@ def filtered_chunk(request, task_id, pid, category, apilist): if es_as_db: chunk = es.search(index=fullidx, doc_type="calls", q="_id: \"%s\"" % call)['hits']['hits'][0]['_source'] for call in chunk["calls"]: - if category == "all" or call["category"] == category: + # filter by call or tid + if caller != "null" or tid != 0: + if call["caller"] == caller and call["thread_id"] == tid: + filtered_process["calls"].append(call) + elif category == "all" or call["category"] == category: if len(apis) > 0: add_call = -1 for api in apis: diff --git a/web/api/views.py b/web/api/views.py index bbb351022d1..946765c1093 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -166,14 +166,14 @@ def index(request): if apiconf.filecreate.get("enabled"): raterps = apiconf.filecreate.get("rps", None) raterpm = apiconf.filecreate.get("rpm", None) - rateblock = True + rateblock = limiter if apiconf.taskiocs.get("enabled"): raterps = apiconf.taskiocs.get("rps") raterpm = apiconf.taskiocs.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_iocs(request, task_id, detail=None): @@ -685,7 +685,7 @@ def tasks_create_file(request): if apiconf.urlcreate.get("enabled"): raterps = apiconf.urlcreate.get("rps", None) raterpm = apiconf.urlcreate.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) @csrf_exempt @@ -792,7 +792,7 @@ def tasks_create_url(request): if apiconf.urlcreate.get("enabled"): raterps = apiconf.urlcreate.get("rps", None) raterpm = apiconf.urlcreate.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @@ -911,7 +911,7 @@ def tasks_create_dlnexec(request): if apiconf.vtdl.get("enabled"): raterps = apiconf.vtdl.get("rps", None) raterpm = apiconf.vtdl.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) @csrf_exempt @@ -1047,7 +1047,7 @@ def tasks_vtdl(request): if apiconf.fileview.get("enabled"): raterps = apiconf.fileview.get("rps", None) raterpm = apiconf.fileview.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def files_view(request, md5=None, sha1=None, sha256=None, sample_id=None): @@ -1102,7 +1102,7 @@ def files_view(request, md5=None, sha1=None, sha256=None, sample_id=None): if apiconf.tasksearch.get("enabled"): raterps = apiconf.tasksearch.get("rps", None) raterpm = apiconf.tasksearch.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_search(request, md5=None, sha1=None, sha256=None): @@ -1162,7 +1162,7 @@ def tasks_search(request, md5=None, sha1=None, sha256=None): if apiconf.extendedtasksearch.get("enabled"): raterps = apiconf.extendedtasksearch.get("rps", None) raterpm = apiconf.extendedtasksearch.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) @csrf_exempt @@ -1354,7 +1354,7 @@ def ext_tasks_search(request): if apiconf.tasklist.get("enabled"): raterps = apiconf.tasklist.get("rps", None) raterpm = apiconf.tasklist.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_list(request, offset=None, limit=None, window=None): @@ -1425,7 +1425,7 @@ def tasks_list(request, offset=None, limit=None, window=None): if apiconf.taskview.get("enabled"): raterps = apiconf.taskview.get("rps", None) raterpm = apiconf.taskview.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_view(request, task_id): @@ -1465,7 +1465,7 @@ def tasks_view(request, task_id): if apiconf.taskresched.get("enabled"): raterps = apiconf.taskresched.get("rps", None) raterpm = apiconf.taskresched.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_reschedule(request, task_id): @@ -1497,7 +1497,7 @@ def tasks_reschedule(request, task_id): if apiconf.taskdelete.get("enabled"): raterps = apiconf.taskdelete.get("rps", None) raterpm = apiconf.taskdelete.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_delete(request, task_id): @@ -1530,7 +1530,7 @@ def tasks_delete(request, task_id): if apiconf.taskstatus.get("enabled"): raterps = apiconf.taskstatus.get("rps", None) raterpm = apiconf.taskstatus.get("rpm", None) - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_status(request, task_id): @@ -1556,7 +1556,7 @@ def tasks_status(request, task_id): if apiconf.taskreport.get("enabled"): raterps = apiconf.taskreport.get("rps") raterpm = apiconf.taskreport.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_report(request, task_id, report_format="json"): @@ -1649,7 +1649,7 @@ def tasks_report(request, task_id, report_format="json"): if apiconf.taskiocs.get("enabled"): raterps = apiconf.taskiocs.get("rps") raterpm = apiconf.taskiocs.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_iocs(request, task_id, detail=None): @@ -1882,7 +1882,7 @@ def tasks_iocs(request, task_id, detail=None): if apiconf.taskscreenshot.get("enabled"): raterps = apiconf.taskscreenshot.get("rps") raterpm = apiconf.taskscreenshot.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_screenshot(request, task_id, screenshot="all"): @@ -1935,7 +1935,7 @@ def tasks_screenshot(request, task_id, screenshot="all"): if apiconf.taskpcap.get("enabled"): raterps = apiconf.taskpcap.get("rps") raterpm = apiconf.taskpcap.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_pcap(request, task_id): @@ -1971,7 +1971,7 @@ def tasks_pcap(request, task_id): if apiconf.taskdropped.get("enabled"): raterps = apiconf.taskdropped.get("rps") raterpm = apiconf.taskdropped.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_dropped(request, task_id): @@ -2012,7 +2012,7 @@ def tasks_dropped(request, task_id): if apiconf.tasksurifile.get("enabled"): raterps = apiconf.tasksurifile.get("rps") raterpm = apiconf.tasksurifile.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_surifile(request, task_id): @@ -2049,7 +2049,7 @@ def tasks_surifile(request, task_id): if apiconf.rollingsuri.get("enabled"): raterps = apiconf.rollingsuri.get("rps") raterpm = apiconf.rollingsuri.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) @@ -2084,7 +2084,7 @@ def tasks_rollingsuri(request, window=60): if apiconf.rollingshrike.get("enabled"): raterps = apiconf.rollingshrike.get("rps") raterpm = apiconf.rollingshrike.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) @@ -2129,7 +2129,7 @@ def tasks_rollingshrike(request, window=60, msgfilter=None): if apiconf.taskprocmemory.get("enabled"): raterps = apiconf.taskprocmemory.get("rps") raterpm = apiconf.taskprocmemory.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_procmemory(request, task_id, pid="all"): @@ -2204,7 +2204,7 @@ def tasks_procmemory(request, task_id, pid="all"): if apiconf.taskfullmemory.get("enabled"): raterps = apiconf.taskfullmemory.get("rps") raterpm = apiconf.taskfullmemory.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def tasks_fullmemory(request, task_id): @@ -2256,7 +2256,7 @@ def tasks_fullmemory(request, task_id): if apiconf.sampledl.get("enabled"): raterps = apiconf.sampledl.get("rps") raterpm = apiconf.sampledl.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def get_files(request, stype, value): @@ -2300,7 +2300,7 @@ def get_files(request, stype, value): if apiconf.machinelist.get("enabled"): raterps = apiconf.machinelist.get("rps") raterpm = apiconf.machinelist.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def machines_list(request): @@ -2324,7 +2324,7 @@ def machines_list(request): if apiconf.machineview.get("enabled"): raterps = apiconf.machineview.get("rps") raterpm = apiconf.machineview.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def machines_view(request, name=None): @@ -2350,7 +2350,7 @@ def machines_view(request, name=None): if apiconf.cuckoostatus.get("enabled"): raterps = apiconf.cuckoostatus.get("rps") raterpm = apiconf.cuckoostatus.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) def cuckoo_status(request): @@ -2384,7 +2384,7 @@ def cuckoo_status(request): if apiconf.cuckoostatus.get("enabled"): raterps = apiconf.cuckoostatus.get("rps") raterpm = apiconf.cuckoostatus.get("rpm") - rateblock = True + rateblock = limiter @ratelimit(key="ip", rate=raterps, block=rateblock) @ratelimit(key="ip", rate=raterpm, block=rateblock) diff --git a/web/static/css/style.css b/web/static/css/style.css index 673db15991a..25acd06c5ac 100644 --- a/web/static/css/style.css +++ b/web/static/css/style.css @@ -133,7 +133,12 @@ a.tag-label { .page-header { margin-bottom: 10px; } - +.filter-box { + display : none; + padding-top: 15px; + height : auto; + width : 40%; +} /* Fix the icon tab override */ .nav-tabs > .active > a > [class^="icon-"],.nav-tabs>.active>a>[class*=" icon-"] { background-image:url("../img/glyphicons-halflings.png") !important; } @@ -179,3 +184,7 @@ pre { background-color: white; color: #5bc0de; } +#filter-toggle { + display: inline-block; + cursor: pointer; +} diff --git a/web/submission/views.py b/web/submission/views.py index e276edf9aaf..6a079d3a1f4 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -163,10 +163,6 @@ def index(request, resubmit_hash=False): if options: options += "," options += "procmemdump=1,procdump=1" - #else: - # if options: - # options += "," - # options += "procdump=1" if request.POST.get("process_memory"): if options: diff --git a/web/templates/analysis/behavior/_processes.html b/web/templates/analysis/behavior/_processes.html index 1a1101a89c7..6da41b1333d 100644 --- a/web/templates/analysis/behavior/_processes.html +++ b/web/templates/analysis/behavior/_processes.html @@ -1,134 +1,164 @@ {% load analysis_tags %}
- {% include "analysis/behavior/_search.html" %} - {% for process in analysis.behavior.processes %} -
-
{{process.process_name}}, PID: {{process.process_id}}, Parent PID: {{process.parent_id}} -
Full Path: {{process.module_path}} - {% if process.environ.CommandLine %} -
Command Line: {{ process.environ.CommandLine }} - {% endif %} + {% include "analysis/behavior/_search.html" %} + {% for process in analysis.behavior.processes %} +
+
+ {{process.process_name}}, PID: {{process.process_id}}, Parent PID: {{process.parent_id}} +
Full Path: {{process.module_path}} + {% if process.environ.CommandLine %} +
Command Line: {{ process.environ.CommandLine }} + {% endif %}
- default - registry - filesystem - network - process - threading - services - device - synchronization - crypto - browser - all -
- - + +
+ + +

Additional Filters

+
+
+
+ + +
+
+ +
+
@@ -136,17 +166,17 @@ $(document).ready(function() { $("#badge_default_{{process.process_id}}").click(function() { load_chunk({{process.process_id}}, 1); }); - $("#badge_network_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "network"); }); - $("#badge_filesystem_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "filesystem"); }); - $("#badge_registry_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "registry"); }); - $("#badge_process_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "process"); }); - $("#badge_threading_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "threading"); }); - $("#badge_services_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "services"); }); - $("#badge_device_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "device"); }); - $("#badge_sync_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "synchronization"); }); - $("#badge_crypto_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "crypto"); }); - $("#badge_browser_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "browser"); }); - $("#badge_all_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "all"); }); + $("#badge_network_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "network", "null", 0); }); + $("#badge_filesystem_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "filesystem", "null", 0); }); + $("#badge_registry_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "registry", "null", 0); }); + $("#badge_process_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "process", "null", 0); }); + $("#badge_threading_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "threading", "null", 0); }); + $("#badge_services_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "services", "null", 0); }); + $("#badge_device_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "device", "null", 0); }); + $("#badge_sync_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "synchronization", "null", 0); }); + $("#badge_crypto_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "crypto", "null", 0); }); + $("#badge_browser_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "browser", "null", 0); }); + $("#badge_all_{{process.process_id}}").click(function() { load_filtered_chunk({{process.process_id}}, "all", "null", 0); }); $('.tab-content').on('click', '.call-link', function (event) { cid = $(this).attr('data-cid'); pid = $(this).attr('data-pid'); @@ -154,6 +184,18 @@ event.preventDefault(); }); + // toggles additional filtering + $("#filter-toggle").click(function(){ + $(".filter-box").toggle() + }) + // submits filters to backend + $("#submit-filter").click(function(){ + var pid = {{process.process_id}}; + var caller = ($("#callerfilter_"+pid).val() == "") ? "null" : $("#callerfilter_"+pid).val(); + var tid = ($("#tidfilter_"+pid).val() == "") ? 0 : $("#tidfilter_"+pid).val(); + console.log(caller, tid) + load_filtered_chunk(pid, "all", caller, tid); + }) }); @@ -168,9 +210,9 @@
- {% endfor %} + {% endfor %}
diff --git a/web/templates/analysis/dropped/index.html b/web/templates/analysis/dropped/index.html index 16e1911664f..71a92bdae1e 100644 --- a/web/templates/analysis/dropped/index.html +++ b/web/templates/analysis/dropped/index.html @@ -5,7 +5,7 @@ - + diff --git a/web/templates/analysis/network/_suricata_files.html b/web/templates/analysis/network/_suricata_files.html index 8fe4ade8356..013558356fe 100644 --- a/web/templates/analysis/network/_suricata_files.html +++ b/web/templates/analysis/network/_suricata_files.html @@ -45,7 +45,7 @@ - - {% endif %} - {% if file.id %} - {% if file.file_info.data %} - + - {% endif %} - {% endif %} + {% endif %}
File name{{file.name}}{{file.name|safe}}
Associated Filenames
MD5{{file.md5}} + {{file.md5}} {% if file.moloch_md5_url %} [MLCH] {% endif %} @@ -57,7 +57,7 @@
SHA256{{file.file_info.sha256}} + {{file.sha256}} {% if file.moloch_sha256_url %} [MLCH] {% endif %} @@ -112,21 +112,19 @@ Search for Analysis
Download + Download {% if file.file_info.data %} -  Display Text +  Display Text {% endif %}
{{file.file_info.data}}
{% endfor %} diff --git a/web/templates/analysis/procdump/index.html b/web/templates/analysis/procdump/index.html index b6c89e85c99..d78e3b617a7 100644 --- a/web/templates/analysis/procdump/index.html +++ b/web/templates/analysis/procdump/index.html @@ -80,6 +80,8 @@ {% for sign in file.cape_yara %} {% if sign.meta.cape_type %}
  • {{sign.meta.cape_type}}
  • + {% elif sign.meta.description %} +
  • {{sign.meta.description}}
  • {% endif %} {% endfor %} diff --git a/web/templates/analysis/report.html b/web/templates/analysis/report.html index f801f395870..36d01f78958 100644 --- a/web/templates/analysis/report.html +++ b/web/templates/analysis/report.html @@ -45,7 +45,7 @@ {% if analysis.behavior.processes %}
  • Behavioral Analysis
  • {% endif %}
  • Network Analysis
  • {% if analysis.info.category != "pcap" and analysis.info.category != "static" %} -
  • Dropped Files ({{analysis.dropped}})
  • +
  • Dropped Files
  • {% if analysis.procmemory|length %}
  • Process Memory ({{analysis.procmemory|length}})
  • {% endif %} {% if analysis.memory %}
  • Memory Analysis
  • {% endif %} {% if analysis.procdump|length %}
  • Process Dumps ({{analysis.procdump|length}})
  • {% endif %} diff --git a/web/templates/analysis/search.html b/web/templates/analysis/search.html index 322c0921a3f..561812a9962 100644 --- a/web/templates/analysis/search.html +++ b/web/templates/analysis/search.html @@ -8,7 +8,7 @@
    - +
    diff --git a/web/templates/submission/index.html b/web/templates/submission/index.html index aba9f5d1c77..71b10d8e92a 100644 --- a/web/templates/submission/index.html +++ b/web/templates/submission/index.html @@ -362,6 +362,10 @@ referrer Specify the referrer to be used for URL tasks, overriding the default Google referrer + + loop_detection + Set this option to 1 to enable loop detection (compress call logs - behavior analysis) + static Check if config can be extracted statically, if not extracts, send to vm @@ -370,15 +374,15 @@ Dl&Exec add headers example dnl_user_agent: "CAPE the best", dnl_referer: google - + servicename - for service package Specify the service name - + servicedesc - for service package Service description - + arguments - for service package Service arguments diff --git a/web/web/local_settings.py b/web/web/local_settings.py index a76f6ccd3f3..206737fac3d 100644 --- a/web/web/local_settings.py +++ b/web/web/local_settings.py @@ -29,11 +29,11 @@ # Allow verbose debug error message in case of application fault. # It's strongly suggested to set it to False if you are serving the # web application from a web server front-end (i.e. Apache). -DEBUG = True +DEBUG = False # A list of strings representing the host/domain names that this Django site # can serve. # Values in this list can be fully qualified names (e.g. 'www.example.com'). # When DEBUG is True or when running tests, host validation is disabled; any # host will be accepted. Thus it's usually only necessary to set it in production. -ALLOWED_HOSTS = ["*"] \ No newline at end of file +ALLOWED_HOSTS = ["*"] diff --git a/web/web/settings.py b/web/web/settings.py index c37bf8d8e60..0eab32f1497 100644 --- a/web/web/settings.py +++ b/web/web/settings.py @@ -259,7 +259,7 @@ LOCAL_SETTINGS except NameError: try: - from local_settings import * + from .local_settings import * except ImportError: pass