From 6c9efa0a9b3a06f45a9de98de81fcc111477c777 Mon Sep 17 00:00:00 2001 From: mbushkov Date: Tue, 28 May 2024 19:08:06 +0200 Subject: [PATCH] Version bump to 3.4.7.4. Syncing recent changes. (#1091) --- CHANGELOG.md | 49 +- api_client/python/grr_api_client/flow.py | 21 - grr/core/grr_response_core/lib/parsers/all.py | 8 - .../lib/parsers/linux_release_parser.py | 277 ------- .../lib/parsers/linux_release_parser_test.py | 230 ------ grr/proto/grr_response_proto/api/flow.proto | 81 -- grr/proto/grr_response_proto/deprecated.proto | 82 +++ .../grr_response_server/fleetspeak_cps.py | 2 +- grr/server/grr_response_server/flow.py | 30 +- grr/server/grr_response_server/flow_test.py | 64 +- .../flows/general/registry_init.py | 1 + .../flows/general/software.py | 399 ++++++++++ .../flows/general/software_test.py | 690 +++++++++++++++++ .../gui/api_call_robot_router.py | 38 +- .../gui/api_call_robot_router_test.py | 67 +- .../gui/api_call_router.py | 24 - .../api_call_router_with_approval_checks.py | 16 - ...i_call_router_with_approval_checks_test.py | 24 - .../gui/api_call_router_without_checks.py | 14 - .../gui/api_integration_tests/flow_test.py | 109 --- .../gui/api_plugins/flow.py | 336 ++------- .../gui/api_plugins/flow_test.py | 695 ++---------------- .../gui/api_plugins/mig_flow.py | 68 -- .../gui/api_plugins/user_test.py | 10 +- .../selenium_tests/v2/approval_page_test.py | 10 +- .../gui/selenium_tests/v2/flow_test.py | 66 +- .../clients_form/clients_form.ng.html | 20 +- .../new_hunt/clients_form/clients_form.scss | 4 + .../new_hunt/clients_form/clients_form.ts | 35 +- .../clients_form/clients_form_test.ts | 42 ++ .../gui/ui/lib/api/api_interfaces.ts | 40 - version.ini | 2 +- 32 files changed, 1603 insertions(+), 1951 deletions(-) delete mode 100644 grr/core/grr_response_core/lib/parsers/linux_release_parser.py delete mode 100644 grr/core/grr_response_core/lib/parsers/linux_release_parser_test.py create mode 100644 grr/server/grr_response_server/flows/general/software.py create mode 100644 grr/server/grr_response_server/flows/general/software_test.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 383d240fc..f8f1ecda9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +### Removed + +* Removed the `ListFlowApplicableParsers` API method. +* Removed the `ListParsedFlowResults` API method. + +## [3.4.7.4] - 2024-05-28 + ### Removed * Removed support for Chipsec based flows. @@ -13,13 +22,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 by individual and combination of system name, release and version). * Removed support for foreman rules using `uname` of an endpoint (this can be simulated by using 3 rules for system name, release and version). -* GRR server Debian package is removed when github actions are updated. The - docker image and Docker Compose stack (see section "Added") are the - recommended wait of running GRR in the future. * Removed the `provides` field from the `Artifact` message. This change has been done in anticipation of the removal of the same field from the official GitHub repository (ForensicArtifacts/artifacts#275). - +* **GRR server Debian package**. We stopped providing the GRR server Debian + package as the main way of distributing GRR server and client binaries. + Instead we make GRR Docker image a preferred way for running GRR in a + demo or production environment. See the documentation [here](https://grr-doc.readthedocs.io/en/latest/installing-and-running-grr/via-docker-compose.html). +* **Artifact parsers**. ArtifactCollector flow supported parsing collected files + and output of executed commands. Its parsers were not properly maintained, + were often outdated and fragile. We're converted selected parsers + into standalone flows (`CollectDistroInfo`, `CollectInstalledSoftware`, `CollectHardwareInfo`) and removed the artifact parsing subsystem. + The ArtifactCollector now works as if "apply_parsers" arguments + attribute is set to False. At some point the "apply_parsers" attribute will be + deprecated completely. ### Added * GRR docker image which contains all grr server components and client @@ -31,6 +47,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 to decode a crowdstrike quarantine encoded file, given as a `BinaryChunkIterator`. +### Fixed + +* YARA memory scanning improvements (matching context options, consuming less bandwidth). ### API removed @@ -58,19 +77,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Planned for removal -Note: GRR release 3.4.7.1 is the **last release** containing the following -features: - -* **Artifact parsers**. ArtifactCollector flow supports parsing collected files - and output of executed commands. Its parsers are not properly maintained, - are often outdated and fragile. We're going to convert selected parsers - into standalone flows and remove the artifact parsing subsystem: - the ArtifactCollector will always work as if "apply_parsers" arguments - attribute is set to False. Afterwards the "apply_parsers" attribute will be - deprecated completely. We will provide documentation on integrating - GRR and ArtifactCollector with well-maintained parsing frameworks like - [Plaso](https://plaso.readthedocs.io/en/latest/index.html). - * **Built-in cron jobs**. Built-in cron jobs are primarily used for periodic hunts. We will provide documentation on how to easily replicate the current functionality using external scheduling systems (like Linux cron, @@ -80,15 +86,6 @@ features: when migrating it to external schedulers, please reach out to us via email or GitHub. -* **GRR server Debian package**. We will stop providing the GRR server Debian - package as the main way of distributing GRR server and client binaries. - Instead we will make GRR Docker image a preferred way for running GRR in a - demo or production environment. - -If your workflow depends on any of the above, please feel free reach out to -us via [grr-users](https://groups.google.com/forum/#!forum/grr-users) Google -Group or [GitHub](https://github.com/google/grr/issues). - ## [3.4.7.1] - 2023-10-23 ### Added diff --git a/api_client/python/grr_api_client/flow.py b/api_client/python/grr_api_client/flow.py index 08ac6ba98..a04538f2f 100644 --- a/api_client/python/grr_api_client/flow.py +++ b/api_client/python/grr_api_client/flow.py @@ -86,27 +86,6 @@ def ListResults(self) -> utils.ItemsIterator[FlowResult]: items = self._context.SendIteratorRequest("ListFlowResults", args) return utils.MapItemsIterator(lambda data: FlowResult(data=data), items) - def ListParsedResults(self) -> utils.ItemsIterator[FlowResult]: - args = flow_pb2.ApiListParsedFlowResultsArgs( - client_id=self.client_id, flow_id=self.flow_id - ) - items = self._context.SendIteratorRequest("ListParsedFlowResults", args) - return utils.MapItemsIterator(lambda data: FlowResult(data=data), items) - - def ListApplicableParsers( - self, - ) -> flow_pb2.ApiListFlowApplicableParsersResult: - """Lists parsers that are applicable to results of the flow.""" - args = flow_pb2.ApiListFlowApplicableParsersArgs( - client_id=self.client_id, flow_id=self.flow_id - ) - - result = self._context.SendRequest("ListFlowApplicableParsers", args) - if not isinstance(result, flow_pb2.ApiListFlowApplicableParsersResult): - raise TypeError(f"Unexpected type: '{type(result)}'") - - return result - def GetExportedResultsArchive(self, plugin_name) -> utils.BinaryChunkIterator: args = flow_pb2.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id=self.flow_id, plugin_name=plugin_name diff --git a/grr/core/grr_response_core/lib/parsers/all.py b/grr/core/grr_response_core/lib/parsers/all.py index c76875024..a295513d2 100644 --- a/grr/core/grr_response_core/lib/parsers/all.py +++ b/grr/core/grr_response_core/lib/parsers/all.py @@ -1,16 +1,8 @@ #!/usr/bin/env python """A module for registering all known parsers.""" -from grr_response_core.lib import parsers -from grr_response_core.lib.parsers import linux_release_parser - def Register(): """Adds all known parsers to the registry.""" # pyformat: disable - - # File multi-parsers. - parsers.MULTI_FILE_PARSER_FACTORY.Register( - "LinuxReleaseInfo", linux_release_parser.LinuxReleaseParser) - # pyformat: enable diff --git a/grr/core/grr_response_core/lib/parsers/linux_release_parser.py b/grr/core/grr_response_core/lib/parsers/linux_release_parser.py deleted file mode 100644 index bd863b4da..000000000 --- a/grr/core/grr_response_core/lib/parsers/linux_release_parser.py +++ /dev/null @@ -1,277 +0,0 @@ -#!/usr/bin/env python -"""Simple parsers for Linux Release files.""" - -import collections -import re -from typing import IO, Iterable, Iterator - -from grr_response_core.lib import parsers -from grr_response_core.lib import utils -from grr_response_core.lib.rdfvalues import anomaly as rdf_anomaly -from grr_response_core.lib.rdfvalues import client as rdf_client -from grr_response_core.lib.rdfvalues import paths as rdf_paths -from grr_response_core.lib.rdfvalues import protodict as rdf_protodict -from grr_response_core.lib.util import precondition - -# Parameters identifying the Linux OS-type and version in /etc/os-release. -_SYSTEMD_OS_RELEASE_NAME = 'NAME' -_SYSTEMD_OS_RELEASE_VERSION = 'VERSION_ID' - -ParsedRelease = collections.namedtuple('ParsedRelease', 'release, major, minor') -WeightedReleaseFile = collections.namedtuple( - 'WeightedReleaseFile', 'weight, path, processor' -) - - -class ReleaseParseHandler: - """Base class for distribution data file parse handlers.""" - - def __init__(self, contents): - """Initialise the parser, presenting file contents to parse. - - Args: - contents: file contents that are to be parsed. - """ - precondition.AssertOptionalType(contents, str) - self.contents = contents - - def Parse(self): - """Parse the contents of the release file and return results. - - Return: - A tuple with two items. The first is a Boolean value that determines - whether or not a complete result is given ; that is, whether or not the - parse is conclusive. The second is a ParsedRelease object that defines - the distribution information. - """ - raise NotImplementedError('parse() missing') - - -class LsbReleaseParseHandler(ReleaseParseHandler): - """Parse /etc/lsb-release file.""" - - # Keys for name/release in the lsb-release file. - LSB_NAME_KEY = 'DISTRIB_ID' - LSB_RELEASE_KEY = 'DISTRIB_RELEASE' - - # Distributions for which a fallback is not needed to be more specific. - NO_FALLBACK_NEEDED = ['ubuntu', 'linuxmint'] - - def Parse(self): - name = None - release = None - major = 0 - minor = 0 - complete = False - - # Hacky key=value parser. - for line in self.contents.splitlines(): - line = line.strip() - if '=' not in line: - continue - - key, value = line.split('=', 1) - key = key.strip() - value = value.strip() - - if key == self.LSB_NAME_KEY: - name = value - elif key == self.LSB_RELEASE_KEY: - release = value - - # If the LSB file was not malformed and contained all we need, we are almost - # done... - complete = all([name, release]) - - # ... however, check for systems for which lsb-release is NOT enough data. - if complete: - complete = name.lower() in self.NO_FALLBACK_NEEDED - - # Check that we have a valid release number. - if complete: - if '.' not in release: - complete = False - else: - release_parts = release.split('.', 1) - major, minor = [int(x.strip()) for x in release_parts] - - return complete, ParsedRelease(name, major, minor) - - -class ReleaseFileParseHandler(ReleaseParseHandler): - """Parse 'release' files (eg, oracle-release, redhat-release).""" - - RH_RE = re.compile(r'release (\d[\d]*)\.(\d[\d]*)') - - def __init__(self, name): - super().__init__(None) - - self.name = name - - # TODO(hanuszczak): But... why? ¯\_(ツ)_/¯ - def __call__(self, contents): - """Small hack to let instances act as if they are bare classes.""" - self.contents = contents - return self - - def Parse(self): - major = 0 - minor = 0 - complete = False - data = self.contents.strip() - - if self.name in ['RedHat', 'OracleLinux', 'OEL', 'Rocky']: - check = self.RH_RE.search(data) - if check is not None: - major = int(check.group(1)) - minor = int(check.group(2)) - complete = True - else: - complete = False - - return complete, ParsedRelease(self.name, major, minor) - - -class LinuxReleaseParser(parsers.MultiFileParser[rdf_protodict.Dict]): - """Parser for Linux distribution information.""" - - output_types = [rdf_protodict.Dict] - - # TODO: The parser has to be invoked explicitly, we should not - # relly on magic parsing anymore. - supported_artifacts = [] - - # Multiple files exist to define a Linux distribution, some of which are more - # accurate than others under certain circumstances. We assign a weight and - # allow handling to fall through to the next file to get the most-specific - # distribution. - WEIGHTS = ( - # Top priority: systems with lsb-release. - WeightedReleaseFile(0, '/etc/lsb-release', LsbReleaseParseHandler), - # Oracle Linux (formerly OEL). - WeightedReleaseFile( - 10, '/etc/oracle-release', ReleaseFileParseHandler('OracleLinux') - ), - # OEL. - WeightedReleaseFile( - 11, '/etc/enterprise-release', ReleaseFileParseHandler('OEL') - ), - # Rocky. - WeightedReleaseFile( - 12, '/etc/rocky-release', ReleaseFileParseHandler('Rocky') - ), - # RHEL-based. - WeightedReleaseFile( - 20, '/etc/redhat-release', ReleaseFileParseHandler('RedHat') - ), - # Debian-based. - WeightedReleaseFile( - 20, '/etc/debian_version', ReleaseFileParseHandler('Debian') - ), - # TODO(user): These weights are pointless - we can remove - # them while preserving functionality. ReleaseFileParseHandler should - # be deleted and replaced with a function. - ) - - def _Combine(self, pathspecs, file_objects): - result = {} - for pathspec, file_object in zip(pathspecs, file_objects): - path = pathspec.path - file_object.seek(0) - contents = utils.ReadFileBytesAsUnicode(file_object) - result[path] = contents - return result - - def ParseFiles( - self, - knowledge_base: rdf_client.KnowledgeBase, - pathspecs: Iterable[rdf_paths.PathSpec], - filedescs: Iterable[IO[bytes]], - ) -> Iterator[rdf_protodict.Dict]: - del knowledge_base # Unused. - - # Collate files into path: contents dictionary. - found_files = self._Combine(pathspecs, filedescs) - - # Determine collected files and apply weighting. - weights = [w for w in self.WEIGHTS if w.path in found_files] - weights = sorted(weights, key=lambda x: x.weight) - - for _, path, handler in weights: - contents = found_files[path] - obj = handler(contents) - - complete, result = obj.Parse() - if result is None: - continue - elif complete: - yield rdf_protodict.Dict({ - 'os_release': result.release, - 'os_major_version': result.major, - 'os_minor_version': result.minor, - }) - return - - # Amazon AMIs place release info in /etc/system-release. - # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/amazon-linux-ami-basics.html - system_release = found_files.get('/etc/system-release', None) - if system_release and 'Amazon Linux' in system_release: - match_object = ReleaseFileParseHandler.RH_RE.search(system_release) - if match_object and match_object.lastindex > 1: - yield rdf_protodict.Dict({ - 'os_release': 'AmazonLinuxAMI', - 'os_major_version': int(match_object.group(1)), - 'os_minor_version': int(match_object.group(2)), - }) - return - - # Fall back to /etc/os-release. - results_dict = self._ParseOSReleaseFile(found_files) - if results_dict is not None: - yield results_dict - return - - # No successful parse. - yield rdf_anomaly.Anomaly( - type='PARSER_ANOMALY', symptom='Unable to determine distribution.' - ) - - def _ParseOSReleaseFile(self, matches_dict): - # The spec for the os-release file is given at - # https://www.freedesktop.org/software/systemd/man/os-release.html. - try: - os_release_contents = matches_dict['/etc/os-release'] - except KeyError: - return None - os_release_name = None - os_major_version = None - os_minor_version = None - for entry in os_release_contents.splitlines(): - entry_parts = entry.split('=', 1) - if len(entry_parts) != 2: - continue - key = entry_parts[0].strip() - # Remove whitespace and quotes from the value (leading and trailing). - value = entry_parts[1].strip('\t \'"') - if key == _SYSTEMD_OS_RELEASE_NAME: - os_release_name = value - elif key == _SYSTEMD_OS_RELEASE_VERSION: - match_object = re.search(r'(?P\d+)\.?(?P\d+)?', value) - if match_object is not None: - os_major_version = int(match_object.group('major')) - minor_match = match_object.group('minor') - # Some platforms (e.g. Google's Container-Optimized OS) do not have - # multi-part version numbers so we use a default minor version of - # zero. - os_minor_version = 0 if minor_match is None else int(minor_match) - if ( - os_release_name - and os_major_version is not None - and os_minor_version is not None - ): - return rdf_protodict.Dict({ - 'os_release': os_release_name, - 'os_major_version': os_major_version, - 'os_minor_version': os_minor_version, - }) - return None diff --git a/grr/core/grr_response_core/lib/parsers/linux_release_parser_test.py b/grr/core/grr_response_core/lib/parsers/linux_release_parser_test.py deleted file mode 100644 index bfc49e624..000000000 --- a/grr/core/grr_response_core/lib/parsers/linux_release_parser_test.py +++ /dev/null @@ -1,230 +0,0 @@ -#!/usr/bin/env python -"""Unit test for the linux distribution parser.""" - -import io -import os - -from absl import app - -from grr_response_core.lib.parsers import linux_release_parser -from grr_response_core.lib.rdfvalues import anomaly as rdf_anomaly -from grr_response_core.lib.rdfvalues import paths as rdf_paths -from grr_response_core.lib.rdfvalues import protodict as rdf_protodict -from grr.test_lib import test_lib - - -class LinuxReleaseParserTest(test_lib.GRRBaseTest): - """Test parsing of linux distribution collection.""" - - def setUp(self): - super().setUp() - self.parser_test_dir = os.path.join(self.base_path, "parser_test") - - def testMalformedLsbReleaseFile(self): - path = os.path.join(self.parser_test_dir, "lsb-release-bad") - with io.open(path, "r") as f: - data = f.read() - parser = linux_release_parser.LsbReleaseParseHandler(data) - - complete, result = parser.Parse() - - self.assertFalse(complete) - self.assertTupleEqual((None, 0, 0), result) - - def testGoodLsbReleaseFile(self): - path = os.path.join(self.parser_test_dir, "lsb-release") - with io.open(path, "r") as f: - data = f.read() - parser = linux_release_parser.LsbReleaseParseHandler(data) - - complete, result = parser.Parse() - - self.assertTrue(complete) - self.assertTupleEqual(("Ubuntu", 14, 4), result) - - def testFallbackLsbReleaseFile(self): - path = os.path.join(self.parser_test_dir, "lsb-release-notubuntu") - with io.open(path, "r") as f: - data = f.read() - parser = linux_release_parser.LsbReleaseParseHandler(data) - - complete, result = parser.Parse() - - self.assertFalse(complete) - self.assertTupleEqual(("NotUbuntu", 0, 0), result) - - def testReleaseFileRedHatish(self): - path = os.path.join(self.parser_test_dir, "oracle-release") - with io.open(path, "r") as f: - data = f.read() - parser = linux_release_parser.ReleaseFileParseHandler("OracleLinux") - parser(data) - - complete, result = parser.Parse() - - self.assertTrue(complete) - self.assertTupleEqual(("OracleLinux", 6, 5), result) - - def testMalformedReleaseFileRedHatish(self): - path = os.path.join(self.parser_test_dir, "oracle-release-bad") - with io.open(path, "r") as f: - data = f.read() - parser = linux_release_parser.ReleaseFileParseHandler("OracleLinux") - parser(data) - - complete, result = parser.Parse() - - self.assertFalse(complete) - self.assertTupleEqual(("OracleLinux", 0, 0), result) - - def _CreateTestData(self, testdata): - """Create 'stats' and 'file_objects' lists for passing to ParseMultiple.""" - pathspecs = [] - files = [] - for filepath, localfile in testdata: - files.append(open(localfile, "rb")) - - p = rdf_paths.PathSpec(path=filepath) - pathspecs.append(p) - - return pathspecs, files - - def testEndToEndUbuntu(self): - parser = linux_release_parser.LinuxReleaseParser() - - testdata = [ - ("/etc/lsb-release", os.path.join(self.parser_test_dir, "lsb-release")), - ] - pathspecs, files = self._CreateTestData(testdata) - - result = list(parser.ParseFiles(None, pathspecs, files)).pop() - - self.assertIsInstance(result, rdf_protodict.Dict) - self.assertEqual("Ubuntu", result["os_release"]) - self.assertEqual(14, result["os_major_version"]) - self.assertEqual(4, result["os_minor_version"]) - - def testEndToEndOracleLinux(self): - parser = linux_release_parser.LinuxReleaseParser() - - testdata = [ - ( - "/etc/lsb-release", - os.path.join(self.parser_test_dir, "lsb-release-notubuntu"), - ), - ( - "/etc/redhat-release", - os.path.join(self.parser_test_dir, "redhat-release"), - ), - ( - "/etc/oracle-release", - os.path.join(self.parser_test_dir, "oracle-release"), - ), - ] - pathspecs, files = self._CreateTestData(testdata) - - result = list(parser.ParseFiles(None, pathspecs, files)).pop() - - self.assertIsInstance(result, rdf_protodict.Dict) - self.assertEqual("OracleLinux", result["os_release"]) - self.assertEqual(6, result["os_major_version"]) - self.assertEqual(5, result["os_minor_version"]) - - def testEndToEndRockyLinux(self): - parser = linux_release_parser.LinuxReleaseParser() - - testdata = [ - ( - "/etc/lsb-release", - os.path.join(self.parser_test_dir, "lsb-release-notubuntu"), - ), - ( - "/etc/redhat-release", - os.path.join(self.parser_test_dir, "redhat-release"), - ), - ( - "/etc/rocky-release", - os.path.join(self.parser_test_dir, "rocky-release"), - ), - ] - pathspecs, files = self._CreateTestData(testdata) - - result = list(parser.ParseFiles(None, pathspecs, files)).pop() - - self.assertIsInstance(result, rdf_protodict.Dict) - self.assertEqual("Rocky", result["os_release"]) - self.assertEqual(8, result["os_major_version"]) - self.assertEqual(8, result["os_minor_version"]) - - def testEndToEndAmazon(self): - parser = linux_release_parser.LinuxReleaseParser() - test_data = [ - ( - "/etc/system-release", - os.path.join(self.parser_test_dir, "amazon-system-release"), - ), - ] - pathspecs, file_objects = self._CreateTestData(test_data) - actual_result = list(parser.ParseFiles(None, pathspecs, file_objects)) - expected_result = [ - rdf_protodict.Dict({ - "os_release": "AmazonLinuxAMI", - "os_major_version": 2018, - "os_minor_version": 3, - }) - ] - self.assertCountEqual(actual_result, expected_result) - - def testEndToEndCoreOS(self): - parser = linux_release_parser.LinuxReleaseParser() - test_data = [ - ( - "/etc/os-release", - os.path.join(self.parser_test_dir, "coreos-os-release"), - ), - ] - pathspecs, file_objects = self._CreateTestData(test_data) - actual_result = list(parser.ParseFiles(None, pathspecs, file_objects)) - expected_result = [ - rdf_protodict.Dict({ - "os_release": "Container Linux by CoreOS", - "os_major_version": 2023, - "os_minor_version": 4, - }) - ] - self.assertCountEqual(actual_result, expected_result) - - def testEndToEndGoogleCOS(self): - parser = linux_release_parser.LinuxReleaseParser() - test_data = [ - ( - "/etc/os-release", - os.path.join(self.parser_test_dir, "google-cos-os-release"), - ), - ] - pathspecs, file_objects = self._CreateTestData(test_data) - actual_result = list(parser.ParseFiles(None, pathspecs, file_objects)) - expected_result = [ - rdf_protodict.Dict({ - "os_release": "Container-Optimized OS", - "os_major_version": 69, - "os_minor_version": 0, - }) - ] - self.assertCountEqual(actual_result, expected_result) - - def testAnomaly(self): - parser = linux_release_parser.LinuxReleaseParser() - - result = list(parser.ParseFiles(None, [], [])) - - self.assertLen(result, 1) - self.assertIsInstance(result[0], rdf_anomaly.Anomaly) - - -def main(args): - test_lib.main(args) - - -if __name__ == "__main__": - app.run(main) diff --git a/grr/proto/grr_response_proto/api/flow.proto b/grr/proto/grr_response_proto/api/flow.proto index 70a890e79..805f6c6b5 100644 --- a/grr/proto/grr_response_proto/api/flow.proto +++ b/grr/proto/grr_response_proto/api/flow.proto @@ -221,87 +221,6 @@ message ApiListFlowResultsResult { }]; } -// Arguments for the API method that parses results of the artifact collection -// flow on the fly. -message ApiListParsedFlowResultsArgs { - // An identifier of the client of the flow to parse the responses for. - optional string client_id = 1 [(sem_type) = { type: "ApiClientId" }]; - - // An identifier of the flow to parse the responses for. - optional string flow_id = 2 [(sem_type) = { type: "ApiFlowId" }]; - - // An offset of the flow results to parse. - optional uint64 offset = 3; - - // Number of flow results to parse. - // - // Note that the number of parsed responses can be different as a single flow - // result can be parsed into zero or more items. - optional uint64 count = 4; -} - -// Results of the API method that parses results of the artifact collection flow -// on the fly. -message ApiListParsedFlowResultsResult { - // A collection of parsed flow results. - // - // Note that a single original flow response can result in multiple parsed - // flow responses. - repeated ApiFlowResult items = 1; - - // A collection of parse errors. - // - // Note that having some values here does not necessarily mean that no items - // are returned: parsing one response can lead to an error, but parsing other - // one can succeed. - repeated string errors = 2; -} - -// Arguments for the API method that retrieves descriptors of all parsers that -// are applicable to the results of the specified artifact collector flow. -message ApiListFlowApplicableParsersArgs { - // An identifier of the client of the flow to list the parsers for. - optional string client_id = 1 [(sem_type) = { type: "ApiClientId" }]; - - // An identifier of the flow to list the parsers for. - optional string flow_id = 2 [(sem_type) = { type: "ApiFlowId" }]; -} - -// Results for the API method that retrieves descriptors of all parsers that are -// applicable to the results of the specified artifact collector flow. -message ApiListFlowApplicableParsersResult { - // Descriptors of all applicable parsers. - // - // Note that if the flow parsed its results already during its execution (i.e. - // it is not a flow with on-demand parsing) the list of parsers is going to be - // empty. - repeated ApiParserDescriptor parsers = 1; -} - -// Describes a single parser registered within the system that processes results -// of the artifact collector flow (if applicable). -message ApiParserDescriptor { - // Enumeration of all parser types. - enum Type { - // An unknown parser. - UNKNOWN = 0; - // A parser that is able to parse a single response. - SINGLE_RESPONSE = 1; - // A parser that parses all responses at once. - MULTI_RESPONSE = 2; - // A parser that is able to parse contents of a single collected file. - SINGLE_FILE = 3; - // A parser that parses contents of all collected files at once. - MULTI_FILE = 4; - } - - // A name under which the parser is registered in the system. - optional string name = 1; - - // A type of the parser. - optional Type type = 2; -} - message ApiListFlowLogsArgs { optional string client_id = 1 [(sem_type) = { type: "ApiClientId", description: "Client id." }]; diff --git a/grr/proto/grr_response_proto/deprecated.proto b/grr/proto/grr_response_proto/deprecated.proto index e67f2a4c1..d210cbf53 100644 --- a/grr/proto/grr_response_proto/deprecated.proto +++ b/grr/proto/grr_response_proto/deprecated.proto @@ -10,6 +10,7 @@ syntax = "proto2"; import "google/protobuf/any.proto"; import "grr_response_proto/anomaly.proto"; import "grr_response_proto/api/client.proto"; +import "grr_response_proto/api/flow.proto"; import "grr_response_proto/artifact.proto"; import "grr_response_proto/jobs.proto"; import "grr_response_proto/knowledge_base.proto"; @@ -1352,3 +1353,84 @@ message ApiGetDecodedFileArgs { // TODO(hanuszczak): Add support for decoding file content as observed at // specific timestamp. } + +// Arguments for the API method that retrieves descriptors of all parsers that +// are applicable to the results of the specified artifact collector flow. +message ApiListFlowApplicableParsersArgs { + // An identifier of the client of the flow to list the parsers for. + optional string client_id = 1 [(sem_type) = { type: "ApiClientId" }]; + + // An identifier of the flow to list the parsers for. + optional string flow_id = 2 [(sem_type) = { type: "ApiFlowId" }]; +} + +// Results for the API method that retrieves descriptors of all parsers that are +// applicable to the results of the specified artifact collector flow. +message ApiListFlowApplicableParsersResult { + // Descriptors of all applicable parsers. + // + // Note that if the flow parsed its results already during its execution (i.e. + // it is not a flow with on-demand parsing) the list of parsers is going to be + // empty. + repeated ApiParserDescriptor parsers = 1; +} + +// Describes a single parser registered within the system that processes results +// of the artifact collector flow (if applicable). +message ApiParserDescriptor { + // Enumeration of all parser types. + enum Type { + // An unknown parser. + UNKNOWN = 0; + // A parser that is able to parse a single response. + SINGLE_RESPONSE = 1; + // A parser that parses all responses at once. + MULTI_RESPONSE = 2; + // A parser that is able to parse contents of a single collected file. + SINGLE_FILE = 3; + // A parser that parses contents of all collected files at once. + MULTI_FILE = 4; + } + + // A name under which the parser is registered in the system. + optional string name = 1; + + // A type of the parser. + optional Type type = 2; +} + +// Arguments for the API method that parses results of the artifact collection +// flow on the fly. +message ApiListParsedFlowResultsArgs { + // An identifier of the client of the flow to parse the responses for. + optional string client_id = 1 [(sem_type) = { type: "ApiClientId" }]; + + // An identifier of the flow to parse the responses for. + optional string flow_id = 2 [(sem_type) = { type: "ApiFlowId" }]; + + // An offset of the flow results to parse. + optional uint64 offset = 3; + + // Number of flow results to parse. + // + // Note that the number of parsed responses can be different as a single flow + // result can be parsed into zero or more items. + optional uint64 count = 4; +} + +// Results of the API method that parses results of the artifact collection flow +// on the fly. +message ApiListParsedFlowResultsResult { + // A collection of parsed flow results. + // + // Note that a single original flow response can result in multiple parsed + // flow responses. + repeated ApiFlowResult items = 1; + + // A collection of parse errors. + // + // Note that having some values here does not necessarily mean that no items + // are returned: parsing one response can lead to an error, but parsing other + // one can succeed. + repeated string errors = 2; +} diff --git a/grr/server/grr_response_server/fleetspeak_cps.py b/grr/server/grr_response_server/fleetspeak_cps.py index feb1b8e6c..0294528e9 100644 --- a/grr/server/grr_response_server/fleetspeak_cps.py +++ b/grr/server/grr_response_server/fleetspeak_cps.py @@ -20,7 +20,7 @@ class Subscriber: def __init__(self) -> None: for var in ( "Server.fleetspeak_cps_project", - "Server.cleetspeak_cps_subscription", + "Server.fleetspeak_cps_subscription", ): if not config.CONFIG[var]: raise ConfigError(f"Missing config value for {var}") diff --git a/grr/server/grr_response_server/flow.py b/grr/server/grr_response_server/flow.py index 206257b61..f85b0b2ab 100644 --- a/grr/server/grr_response_server/flow.py +++ b/grr/server/grr_response_server/flow.py @@ -23,6 +23,7 @@ import traceback from typing import Optional, Sequence +from google.protobuf import any_pb2 from grr_response_core.lib import rdfvalue from grr_response_core.lib import registry from grr_response_core.lib import type_info @@ -34,7 +35,6 @@ from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects from grr_response_server.rdfvalues import flow_runner as rdf_flow_runner from grr_response_server.rdfvalues import mig_flow_objects -from grr_response_server.rdfvalues import mig_flow_runner GRR_FLOW_INVALID_FLOW_COUNT = metrics.Counter("grr_flow_invalid_flow_count") @@ -353,8 +353,13 @@ def StartFlow(client_id=None, return rdf_flow.flow_id -def ScheduleFlow(client_id: str, creator: str, flow_name, flow_args, - runner_args) -> rdf_flow_objects.ScheduledFlow: +def ScheduleFlow( + client_id: str, + creator: str, + flow_name: str, + flow_args: any_pb2.Any, + runner_args: flows_pb2.FlowRunnerArgs, +) -> flows_pb2.ScheduledFlow: """Schedules a Flow on the client, to be started upon approval grant.""" scheduled_flow = flows_pb2.ScheduledFlow() scheduled_flow.client_id = client_id @@ -362,15 +367,12 @@ def ScheduleFlow(client_id: str, creator: str, flow_name, flow_args, scheduled_flow.scheduled_flow_id = RandomFlowId() scheduled_flow.flow_name = flow_name # TODO: Stop relying on `AsPrimitiveProto`. - scheduled_flow.flow_args.Pack(flow_args.AsPrimitiveProto()) - scheduled_flow.runner_args.CopyFrom( - mig_flow_runner.ToProtoFlowRunnerArgs(runner_args) - ) + scheduled_flow.flow_args.CopyFrom(flow_args) + scheduled_flow.runner_args.CopyFrom(runner_args) scheduled_flow.create_time = int(rdfvalue.RDFDatetime.Now()) data_store.REL_DB.WriteScheduledFlow(scheduled_flow) - - return mig_flow_objects.ToRDFScheduledFlow(scheduled_flow) + return scheduled_flow def UnscheduleFlow(client_id: str, creator: str, @@ -383,13 +385,8 @@ def UnscheduleFlow(client_id: str, creator: str, def ListScheduledFlows( client_id: str, creator: str) -> Sequence[rdf_flow_objects.ScheduledFlow]: """Lists all scheduled flows of a user on a client.""" - return list( - map( - mig_flow_objects.ToRDFScheduledFlow, - data_store.REL_DB.ListScheduledFlows( - client_id=client_id, creator=creator - ), - ) + return data_store.REL_DB.ListScheduledFlows( + client_id=client_id, creator=creator ) @@ -417,6 +414,7 @@ def StartScheduledFlows(client_id: str, creator: str) -> None: scheduled_flows = ListScheduledFlows(client_id, creator) for sf in scheduled_flows: try: + sf = mig_flow_objects.ToRDFScheduledFlow(sf) flow_id = _StartScheduledFlow(sf) logging.info("Started Flow %s/%s from ScheduledFlow %s", client_id, flow_id, sf.scheduled_flow_id) diff --git a/grr/server/grr_response_server/flow_test.py b/grr/server/grr_response_server/flow_test.py index 9f4bac92f..a4c2d67ff 100644 --- a/grr/server/grr_response_server/flow_test.py +++ b/grr/server/grr_response_server/flow_test.py @@ -2,11 +2,14 @@ """Tests for flows.""" import random +from typing import Optional from unittest import mock from absl import app from absl.testing import absltest +from google.protobuf import any_pb2 +from google.protobuf import message as message_pb2 from grr_response_client import actions from grr_response_core.lib import rdfvalue from grr_response_core.lib import type_info @@ -25,7 +28,6 @@ from grr_response_server.flows import file from grr_response_server.flows.general import file_finder from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects -from grr_response_server.rdfvalues import flow_runner as rdf_flow_runner from grr_response_server.rdfvalues import mig_flow_objects from grr_response_server.rdfvalues import output_plugin as rdf_output_plugin from grr.test_lib import acl_test_lib @@ -607,19 +609,35 @@ def SetupUser(self, username="u0"): data_store.REL_DB.WriteGRRUser(username) return username - def ScheduleFlow(self, **kwargs): - merged_kwargs = { - "flow_name": file.CollectFilesByKnownPath.__name__, - "flow_args": rdf_file_finder.CollectFilesByKnownPathArgs( - paths=["/foo{}".format(random.randint(0, 1000))] - ), - "runner_args": rdf_flow_runner.FlowRunnerArgs( - cpu_limit=random.randint(0, 60) - ), - **kwargs, - } + def ScheduleFlow( + self, + client_id: str, + creator: str, + flow_name: Optional[str] = None, + flow_args: Optional[message_pb2.Message] = None, + runner_args: Optional[flows_pb2.FlowRunnerArgs] = None, + ) -> flows_pb2.ScheduledFlow: + if flow_name is None: + flow_name = file.CollectFilesByKnownPath.__name__ + + if not flow_args: + flow_args = flows_pb2.CollectFilesByKnownPathArgs( + paths=["/foo{}".format(random.randint(0, 1000))] + ) + + if not runner_args: + runner_args = flows_pb2.FlowRunnerArgs(cpu_limit=random.randint(0, 60)) + + any_flow_args = any_pb2.Any() + any_flow_args.Pack(flow_args) - return flow.ScheduleFlow(**merged_kwargs) + return flow.ScheduleFlow( + client_id=client_id, + creator=creator, + flow_name=flow_name, + flow_args=any_flow_args, + runner_args=runner_args, + ) def testScheduleFlowCreatesMultipleScheduledFlows(self): client_id0 = self.SetupClient(0) @@ -657,8 +675,8 @@ def testStartScheduledFlowsCreatesFlow(self): client_id=client_id, creator=username, flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), + flow_args=flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"]), + runner_args=flows_pb2.FlowRunnerArgs(cpu_limit=60), ) flow.StartScheduledFlows(client_id, username) @@ -725,8 +743,8 @@ def testStartScheduledFlowsHandlesErrorInFlowConstructor(self): client_id=client_id, creator=username, flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), + flow_args=flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"]), + runner_args=flows_pb2.FlowRunnerArgs(cpu_limit=60), ) with mock.patch.object( @@ -750,8 +768,8 @@ def testStartScheduledFlowsHandlesErrorInFlowArgsValidation(self): client_id=client_id, creator=username, flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), + flow_args=flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"]), + runner_args=flows_pb2.FlowRunnerArgs(cpu_limit=60), ) with mock.patch.object( @@ -775,16 +793,16 @@ def testStartScheduledFlowsContinuesNextOnFailure(self): client_id=client_id, creator=username, flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), + flow_args=flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"]), + runner_args=flows_pb2.FlowRunnerArgs(cpu_limit=60), ) self.ScheduleFlow( client_id=client_id, creator=username, flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), + flow_args=flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"]), + runner_args=flows_pb2.FlowRunnerArgs(cpu_limit=60), ) with mock.patch.object( diff --git a/grr/server/grr_response_server/flows/general/registry_init.py b/grr/server/grr_response_server/flows/general/registry_init.py index f21bc2b3e..0a9cc9d86 100644 --- a/grr/server/grr_response_server/flows/general/registry_init.py +++ b/grr/server/grr_response_server/flows/general/registry_init.py @@ -22,6 +22,7 @@ from grr_response_server.flows.general import read_low_level from grr_response_server.flows.general import registry from grr_response_server.flows.general import registry_finder +from grr_response_server.flows.general import software from grr_response_server.flows.general import timeline from grr_response_server.flows.general import transfer from grr_response_server.flows.general import webhistory diff --git a/grr/server/grr_response_server/flows/general/software.py b/grr/server/grr_response_server/flows/general/software.py new file mode 100644 index 000000000..7c764d820 --- /dev/null +++ b/grr/server/grr_response_server/flows/general/software.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +"""Flows for collection information about installed software.""" +import datetime +import plistlib +import re + +from grr_response_core.lib.rdfvalues import client_action as rdf_client_action +from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder +from grr_response_core.lib.rdfvalues import mig_client +from grr_response_core.lib.rdfvalues import mig_file_finder +from grr_response_core.lib.rdfvalues import protodict as rdf_protodict +from grr_response_proto import flows_pb2 +from grr_response_proto import jobs_pb2 +from grr_response_proto import sysinfo_pb2 +from grr_response_server import data_store +from grr_response_server import file_store +from grr_response_server import flow_base +from grr_response_server import flow_responses +from grr_response_server import server_stubs +from grr_response_server.models import blobs + + +class CollectInstalledSoftware(flow_base.FlowBase): + """Flow that collects information about software installed on an endpoint.""" + + category = "/Collectors/" + behaviours = flow_base.BEHAVIOUR_DEBUG + + # TODO: Add `result_types` declaration once we migrate away from + # the artifact collector in this flow and the types are known. + + def Start(self) -> None: + if self.client_os == "Linux": + dpkg_args = rdf_client_action.ExecuteRequest() + dpkg_args.cmd = "/usr/bin/dpkg" + dpkg_args.args.append("--list") + + self.CallClient( + server_stubs.ExecuteCommand, + dpkg_args, + next_state=self._ProcessDpkgResults.__name__, + ) + + rpm_args = rdf_client_action.ExecuteRequest() + rpm_args.cmd = "/bin/rpm" + + # TODO: Remove branching once updated agent is rolled out to + # a reasonable portion of the fleet. + if self.client_version <= 3473: + rpm_args.args.append("-qa") + else: + rpm_args.args.append("--query") + rpm_args.args.append("--all") + rpm_args.args.append("--queryformat") + # pylint: disable=line-too-long + # pyformat: disable + rpm_args.args.append("%{NAME}|%{EPOCH}|%{VERSION}|%{RELEASE}|%{ARCH}|%{INSTALLTIME}|%{VENDOR}|%{SOURCERPM}\n") + # pylint: enable=line-too-long + # pyformat: enable + + self.CallClient( + server_stubs.ExecuteCommand, + rpm_args, + next_state=self._ProcessRpmResults.__name__, + ) + + if self.client_os == "Windows": + win32_product_args = rdf_client_action.WMIRequest() + win32_product_args.query = """ + SELECT Name, Vendor, Description, InstallDate, InstallDate2, Version + FROM Win32_Product + """.strip() + + self.CallClient( + server_stubs.WmiQuery, + win32_product_args, + next_state=self._ProcessWin32ProductResults.__name__, + ) + + win32_quick_fix_engineering_args = rdf_client_action.WMIRequest() + # TODO: Query only columns that we explicitly care about. + # + # So far the artifact used wildcard and so for the time being we simply + # follow it but we should have explicit list of columns that we care about + # here instead. + win32_quick_fix_engineering_args.query = """ + SELECT * + FROM Win32_QuickFixEngineering + """.strip() + + self.CallClient( + server_stubs.WmiQuery, + win32_quick_fix_engineering_args, + next_state=self._ProcessWin32QuickFixEngineeringResults.__name__, + ) + + if self.client_os == "Darwin": + ff_args = flows_pb2.FileFinderArgs() + ff_args.pathtype = jobs_pb2.PathSpec.PathType.OS + ff_args.paths.append("/Library/Receipts/InstallHistory.plist") + ff_args.action.action_type = flows_pb2.FileFinderAction.Action.DOWNLOAD + + self.CallClient( + server_stubs.FileFinderOS, + mig_file_finder.ToRDFFileFinderArgs(ff_args), + next_state=self._ProcessInstallHistoryPlist.__name__, + ) + + def _ProcessDpkgResults( + self, + responses: flow_responses.Responses[rdf_client_action.ExecuteResponse], + ) -> None: + if not responses.success: + self.Log("Failed to collect Debian package list: %s", responses.status) + return + + result = sysinfo_pb2.SoftwarePackages() + + if len(responses) != 1: + raise flow_base.FlowError( + f"Unexpected number of responses: {len(responses)}", + ) + + response = list(responses)[0] + + if response.exit_status != 0: + self.Log( + "dpkg quit abnormally (status: %s, stdout: %s, stderr: %s)", + response.exit_status, + response.stdout, + response.stderr, + ) + return + + stdout = response.stdout.decode("utf-8", "backslashreplace") + lines = iter(stdout.splitlines()) + + # Output starts with column descriptors and the actual list of packages + # starts after the separator indicated by `+++-`. Thus, we iterate until + # we hit this header and continue parsing from there. + + for line in lines: + if line.startswith("+++-"): + break + + for line in lines: + # Just in case the output contains any trailing newlines or blanks, we + # strip each line and skip those that are empty. + line = line.strip() + if not line: + continue + + try: + [status, name, version, arch, description] = line.split(None, 4) + except ValueError: + self.Log("Invalid dpkg package description format: %r", line) + continue + + package = result.packages.add() + package.name = name + package.version = version + package.architecture = arch + package.description = description + + # Status indicator is desired state in first char, current state in the + # second char and error in the third (or empty if installed correctly). + if status[1:2] == "i": + package.install_state = sysinfo_pb2.SoftwarePackage.INSTALLED + + if result.packages: + self.SendReply(mig_client.ToRDFSoftwarePackages(result)) + + def _ProcessRpmResults( + self, + responses: flow_responses.Responses[rdf_client_action.ExecuteResponse], + ) -> None: + if not responses.success: + self.Log("Failed to collect RPM package list: %s", responses.status) + return + + result = sysinfo_pb2.SoftwarePackages() + + if len(responses) != 1: + raise flow_base.FlowError( + f"Unexpected number of responses: {len(responses)}", + ) + + response = list(responses)[0] + + if response.exit_status != 0: + self.Log( + "RPM quit abnormally (status: %s, stdout: %s, stderr: %s)", + response.exit_status, + response.stdout, + response.stderr, + ) + return + + stdout = response.stdout.decode("utf-8", "backslashreplace") + + for line in stdout.splitlines(): + # Just in case the output contains any trailing newlines or blanks, we + # strip each line and skip those that are empty. + line = line.strip() + if not line: + continue + + # TODO: Remove branching once updated agent is rolled out to + # a reasonable portion of the fleet. + if self.client_version <= 3473: + if (match := _RPM_PACKAGE_REGEX.match(line)) is None: + self.Log("Invalid RPM package description format: %r", line) + continue + + package = result.packages.add() + package.install_state = sysinfo_pb2.SoftwarePackage.INSTALLED + package.name = match["name"] + package.version = match["version"] + package.architecture = match["arch"] + else: + try: + [ + name, + epoch, + version, + release, + arch, + install_time, + vendor, + source_rpm, + ] = line.split("|") + except ValueError: + self.Log("Invalid RPM package description format: %r", line) + continue + + try: + install_date = datetime.datetime.fromtimestamp(int(install_time)) + except ValueError: + self.Log("Invalid RPM package installation time: %s", install_time) + continue + + package = result.packages.add() + package.install_state = sysinfo_pb2.SoftwarePackage.INSTALLED + package.name = name + package.version = f"{version}-{release}" + package.installed_on = int(install_date.timestamp()) + + if arch != "(none)": + package.architecture = arch + + if epoch != "(none)": + try: + package.epoch = int(epoch) + except ValueError: + self.Log("Invalid RPM package epoch: %s", epoch) + + if vendor != "(none)": + package.publisher = vendor + + if source_rpm != "(none)": + package.source_rpm = source_rpm + + if result.packages: + self.SendReply(mig_client.ToRDFSoftwarePackages(result)) + + def _ProcessWin32ProductResults( + self, + responses: flow_responses.Responses[rdf_protodict.Dict], + ): + if not responses.success: + self.Log("Failed to collect `Win32_Product`: %s", responses.status) + return + + result = sysinfo_pb2.SoftwarePackages() + + for response in responses: + package = result.packages.add() + package.install_state = sysinfo_pb2.SoftwarePackage.INSTALLED + + if name := response.get("Name"): + package.name = name + + if description := response.get("Description"): + package.description = description + + if version := response.get("Version"): + package.version = version + + if vendor := response.get("Vendor"): + package.publisher = vendor + + if install_date := response.get("InstallDate"): + try: + install_date = datetime.datetime.strptime(install_date, "%Y%m%d") + package.installed_on = int(install_date.timestamp() * 1_000_000) + except ValueError: + self.Log("Invalid product installation date: %s", install_date) + + if result.packages: + self.SendReply(mig_client.ToRDFSoftwarePackages(result)) + + def _ProcessWin32QuickFixEngineeringResults( + self, + responses: flow_responses.Responses[rdf_protodict.Dict], + ): + if not responses.success: + status = responses.status + self.Log("Failed to collect `Win32_QuickFixEngineering`: %s", status) + return + + result = sysinfo_pb2.SoftwarePackages() + + for response in responses: + package = result.packages.add() + + if hot_fix_id := response.get("HotFixID"): + package.name = hot_fix_id + + if caption := response.get("Caption"): + package.description = caption + + if description := response.get("Description"): + # We use both WMI "description" and "caption" as source for the output + # description. If both of them are available, we concatenate the two. + if package.description: + package.description = f"{package.description}\n\n{description}" + else: + package.description = description + + if installed_by := response.get("InstalledBy"): + package.installed_by = installed_by + + if installed_on := response.get("InstalledOn"): + try: + install_date = datetime.datetime.strptime(installed_on, "%m/%d/%Y") + package.installed_on = int(install_date.timestamp() * 1_000_000) + except ValueError: + self.Log("Invalid hotfix installation date: %s", installed_on) + + if result.packages: + self.SendReply(mig_client.ToRDFSoftwarePackages(result)) + + def _ProcessInstallHistoryPlist( + self, + responses: flow_responses.Responses[rdf_file_finder.FileFinderResult], + ) -> None: + if not responses.success: + message = f"Failed to collect install history plist: {responses.status}" + raise flow_base.FlowError(message) + + if len(responses) != 1: + message = f"Unexpected number of flow responses: {len(responses)}" + raise flow_base.FlowError(message) + + response = mig_file_finder.ToProtoFileFinderResult(list(responses)[0]) + + blob_ids = [ + blobs.BlobID(chunk.digest) for chunk in response.transferred_file.chunks + ] + blobs_by_id = data_store.BLOBS.ReadAndWaitForBlobs( + blob_ids, + timeout=file_store.BLOBS_READ_TIMEOUT, + ) + + content = b"".join(blobs_by_id[blob_id] for blob_id in blob_ids) + try: + plist = plistlib.loads(content, fmt=plistlib.FMT_XML) # pytype: disable=wrong-arg-types + except plistlib.InvalidFileException as error: + message = f"Failed to parse install history plist: {error}" + raise flow_base.FlowError(message) from error + + if not isinstance(plist, list): + message = f"Unexpected install history plist type: {type(plist)}" + raise flow_base.FlowError(message) + + result = sysinfo_pb2.SoftwarePackages() + + for item in plist: + package = result.packages.add() + + if display_name := item.get("displayName"): + package.name = display_name + + if display_version := item.get("displayVersion"): + package.version = display_version + + if package_identifiers := item.get("packageIdentifiers"): + package.description = ",".join(package_identifiers) + + if date := item.get("date"): + package.installed_on = int(date.timestamp() * 1_000_000) + + if result.packages: + self.SendReply(mig_client.ToRDFSoftwarePackages(result)) + + +_RPM_PACKAGE_REGEX = re.compile( + r"^(?P.*)-(?P.*-\d+\.\w+)\.(?P\w+)$" +) diff --git a/grr/server/grr_response_server/flows/general/software_test.py b/grr/server/grr_response_server/flows/general/software_test.py new file mode 100644 index 000000000..b1ac42cd9 --- /dev/null +++ b/grr/server/grr_response_server/flows/general/software_test.py @@ -0,0 +1,690 @@ +#!/usr/bin/env python +import datetime +import hashlib +from typing import Iterator + +from absl.testing import absltest + +from grr_response_client import actions +from grr_response_core.lib import rdfvalue +from grr_response_core.lib.rdfvalues import client_action as rdf_client_action +from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder +from grr_response_core.lib.rdfvalues import mig_client_action +from grr_response_core.lib.rdfvalues import mig_file_finder +from grr_response_core.lib.rdfvalues import mig_protodict +from grr_response_core.lib.rdfvalues import protodict as rdf_protodict +from grr_response_proto import flows_pb2 +from grr_response_proto import jobs_pb2 +from grr_response_proto import objects_pb2 +from grr_response_server import data_store +from grr_response_server.databases import db as abstract_db +from grr_response_server.databases import db_test_utils +from grr_response_server.flows.general import software +from grr_response_server.models import protodicts +from grr.test_lib import action_mocks +from grr.test_lib import flow_test_lib +from grr.test_lib import testing_startup + + +class CollectInstalledSoftwareTest(flow_test_lib.FlowTestsBaseclass): + + @classmethod + def setUpClass(cls): + super().setUpClass() + testing_startup.TestInit() + + def testLinuxDebian(self) -> None: + assert data_store.REL_DB is not None + db: abstract_db.Database = data_store.REL_DB + + creator = db_test_utils.InitializeUser(db) + client_id = db_test_utils.InitializeClient(db) + + snapshot = objects_pb2.ClientSnapshot() + snapshot.client_id = client_id + snapshot.knowledge_base.os = "Linux" + db.WriteClientSnapshot(snapshot) + + flow_id = flow_test_lib.StartAndRunFlow( + software.CollectInstalledSoftware, + action_mocks.ExecuteCommandActionMock( + cmd="/usr/bin/dpkg", + args=["--list"], + exit_status=0, + stdout="""\ +Desired=Unknown/Install/Remove/Purge/Hold +| Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend +|/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad) +||/ Name Version Architecture Description ++++-=======-===============-============-======================================================= +ii 7zip 23.01+dfsg-7 amd64 7-Zip file archiver with a high compression ratio +ii acl 2.3.1-6 amd64 access control list - utilities +ii adduser 3.137 all add and remove users and groups +ii bash 5.2.21-2 amd64 GNU Bourne Again SHell +ii sudo 1.9.15p5-2 amd64 Provide limited super user privileges to specific users +ii xorg 1:7.7+23+build1 amd64 X.Org X Window System +""".encode("utf-8"), + ), + client_id=client_id, + creator=creator, + ) + + results = flow_test_lib.GetFlowResults(client_id, flow_id) + + self.assertLen(results, 1) + + packages_by_name = { + package.name: package for package in results[0].packages + } + + self.assertLen(packages_by_name, 6) + + self.assertEqual(packages_by_name["7zip"].version, "23.01+dfsg-7") + self.assertEqual(packages_by_name["7zip"].architecture, "amd64") + self.assertNotEmpty(packages_by_name["7zip"].description) + + self.assertEqual(packages_by_name["acl"].version, "2.3.1-6") + self.assertEqual(packages_by_name["acl"].architecture, "amd64") + self.assertNotEmpty(packages_by_name["acl"].description) + + self.assertEqual(packages_by_name["adduser"].version, "3.137") + self.assertEqual(packages_by_name["adduser"].architecture, "all") + self.assertNotEmpty(packages_by_name["adduser"].description) + + self.assertEqual(packages_by_name["bash"].version, "5.2.21-2") + self.assertEqual(packages_by_name["bash"].architecture, "amd64") + self.assertNotEmpty(packages_by_name["bash"].description) + + self.assertEqual(packages_by_name["sudo"].version, "1.9.15p5-2") + self.assertEqual(packages_by_name["sudo"].architecture, "amd64") + self.assertNotEmpty(packages_by_name["sudo"].description) + + self.assertEqual(packages_by_name["xorg"].version, "1:7.7+23+build1") + self.assertEqual(packages_by_name["xorg"].architecture, "amd64") + self.assertNotEmpty(packages_by_name["xorg"].description) + + def testLinuxFedoraPre3473(self) -> None: + assert data_store.REL_DB is not None + db: abstract_db.Database = data_store.REL_DB + + creator = db_test_utils.InitializeUser(db) + client_id = db_test_utils.InitializeClient(db) + + snapshot = objects_pb2.ClientSnapshot() + snapshot.client_id = client_id + snapshot.knowledge_base.os = "Linux" + snapshot.startup_info.client_info.client_version = 3473 + db.WriteClientSnapshot(snapshot) + + flow_id = flow_test_lib.StartAndRunFlow( + software.CollectInstalledSoftware, + action_mocks.ExecuteCommandActionMock( + cmd="/bin/rpm", + args=["-qa"], + exit_status=0, + stdout="""\ +bash-5.2.21-1.fc39.x86_64 +ca-certificates-2023.2.60_v7.0.306-2.fc39.noarch +elfutils-default-yama-scope-0.190-1.fc39.noarch +grep-3.11-3.fc39.x86_64 +gzip-1.12-6.fc39.x86_64 +python3-3.12.0-1.fc39.x86_64 +yum-4.18.1-2.fc39.noarch +rpm-4.19.0-1.fc39.x86_64 +""".encode("utf-8"), + ), + client_id=client_id, + creator=creator, + ) + + results = flow_test_lib.GetFlowResults(client_id, flow_id) + + self.assertLen(results, 1) + + packages_by_name = { + package.name: package for package in results[0].packages + } + + self.assertLen(packages_by_name, 8) + + # TODO: Update version checks once know what the proper way + # forward for reporting it is. + + bash = packages_by_name["bash"] + self.assertEqual(bash.version, "5.2.21-1.fc39") + self.assertEqual(bash.architecture, "x86_64") + + ca_certificates = packages_by_name["ca-certificates"] + self.assertEqual(ca_certificates.version, "2023.2.60_v7.0.306-2.fc39") + self.assertEqual(ca_certificates.architecture, "noarch") + + elfutils = packages_by_name["elfutils-default-yama-scope"] + self.assertEqual(elfutils.version, "0.190-1.fc39") + self.assertEqual(elfutils.architecture, "noarch") + + grep = packages_by_name["grep"] + self.assertEqual(grep.version, "3.11-3.fc39") + self.assertEqual(grep.architecture, "x86_64") + + gzip = packages_by_name["gzip"] + self.assertEqual(gzip.version, "1.12-6.fc39") + self.assertEqual(gzip.architecture, "x86_64") + + python3 = packages_by_name["python3"] + self.assertEqual(python3.version, "3.12.0-1.fc39") + self.assertEqual(python3.architecture, "x86_64") + + yum = packages_by_name["yum"] + self.assertEqual(yum.version, "4.18.1-2.fc39") + self.assertEqual(packages_by_name["yum"].architecture, "noarch") + + rpm = packages_by_name["rpm"] + self.assertEqual(rpm.version, "4.19.0-1.fc39") + self.assertEqual(rpm.architecture, "x86_64") + + def testLinuxFedoraPost3473(self) -> None: + assert data_store.REL_DB is not None + db: abstract_db.Database = data_store.REL_DB + + creator = db_test_utils.InitializeUser(db) + client_id = db_test_utils.InitializeClient(db) + + snapshot = objects_pb2.ClientSnapshot() + snapshot.client_id = client_id + snapshot.knowledge_base.os = "Linux" + snapshot.startup_info.client_info.client_version = 3474 + db.WriteClientSnapshot(snapshot) + + flow_id = flow_test_lib.StartAndRunFlow( + software.CollectInstalledSoftware, + action_mocks.ExecuteCommandActionMock( + cmd="/bin/rpm", + exit_status=0, + stdout="""\ +bash|(none)|5.2.26|1.fc39|x86_64|1711525869|Fedora Project|bash-5.2.26-1.fc39.src.rpm +ca-certificates|(none)|2023.2.60_v7.0.306|2.fc39|noarch|1711525871|Fedora Project|ca-certificates-2023.2.60_v7.0.306-2.fc39.src.rpm +elfutils-default-yama-scope|(none)|0.191|2.fc39|noarch|1711525870|Fedora Project|elfutils-0.191-2.fc39.src.rpm +grep|(none)|3.11|3.fc39|x86_64|1711525871|Fedora Project|grep-3.11-3.fc39.src.rpm +gzip|(none)|1.12|6.fc39|x86_64|1711525875|Fedora Project|gzip-1.12-6.fc39.src.rpm +python3|(none)|3.12.2|2.fc39|x86_64|1711525873|Fedora Project|python3.12-3.12.2-2.fc39.src.rpm +yum|(none)|4.19.0|1.fc39|noarch|1711525875|Fedora Project|dnf-4.19.0-1.fc39.src.rpm +rpm|(none)|4.19.1.1|1.fc39|x86_64|1711525876|Fedora Project|rpm-4.19.1.1-1.fc39.src.rpm +vim-minimal|2|9.1.181|1.fc39|x86_64|1711525876|Fedora Project|vim-9.1.181-1.fc39.src.rpm +gpg-pubkey|(none)|18b8e74c|62f2920f|(none)|1711525880|(none)|(none) + """.encode("utf-8"), + ), + client_id=client_id, + creator=creator, + ) + + results = flow_test_lib.GetFlowResults(client_id, flow_id) + + self.assertLen(results, 1) + + packages_by_name = { + package.name: package for package in results[0].packages + } + + self.assertLen(packages_by_name, 10) + + # TODO: Update version checks once know what the proper way + # forward for reporting it is. + + bash = packages_by_name["bash"] + self.assertEqual(bash.version, "5.2.26-1.fc39") + self.assertEqual(bash.architecture, "x86_64") + self.assertEqual(bash.installed_on, 1711525869) + self.assertEqual(bash.publisher, "Fedora Project") + self.assertEqual(bash.source_rpm, "bash-5.2.26-1.fc39.src.rpm") + + ca_certificates = packages_by_name["ca-certificates"] + self.assertEqual(ca_certificates.version, "2023.2.60_v7.0.306-2.fc39") + self.assertEqual(ca_certificates.architecture, "noarch") + self.assertEqual(ca_certificates.installed_on, 1711525871) + self.assertEqual(ca_certificates.publisher, "Fedora Project") + + elfutils = packages_by_name["elfutils-default-yama-scope"] + self.assertEqual(elfutils.version, "0.191-2.fc39") + self.assertEqual(elfutils.architecture, "noarch") + self.assertEqual(elfutils.installed_on, 1711525870) + self.assertEqual(elfutils.publisher, "Fedora Project") + self.assertEqual(elfutils.source_rpm, "elfutils-0.191-2.fc39.src.rpm") + + grep = packages_by_name["grep"] + self.assertEqual(grep.version, "3.11-3.fc39") + self.assertEqual(grep.architecture, "x86_64") + self.assertEqual(grep.installed_on, 1711525871) + self.assertEqual(grep.publisher, "Fedora Project") + self.assertEqual(grep.source_rpm, "grep-3.11-3.fc39.src.rpm") + + gzip = packages_by_name["gzip"] + self.assertEqual(gzip.version, "1.12-6.fc39") + self.assertEqual(gzip.architecture, "x86_64") + self.assertEqual(gzip.installed_on, 1711525875) + self.assertEqual(gzip.publisher, "Fedora Project") + self.assertEqual(gzip.source_rpm, "gzip-1.12-6.fc39.src.rpm") + + python3 = packages_by_name["python3"] + self.assertEqual(python3.version, "3.12.2-2.fc39") + self.assertEqual(python3.architecture, "x86_64") + self.assertEqual(python3.installed_on, 1711525873) + self.assertEqual(python3.publisher, "Fedora Project") + self.assertEqual(python3.source_rpm, "python3.12-3.12.2-2.fc39.src.rpm") + + yum = packages_by_name["yum"] + self.assertEqual(yum.version, "4.19.0-1.fc39") + self.assertEqual(yum.architecture, "noarch") + self.assertEqual(yum.installed_on, 1711525875) + self.assertEqual(yum.publisher, "Fedora Project") + self.assertEqual(yum.source_rpm, "dnf-4.19.0-1.fc39.src.rpm") + + rpm = packages_by_name["rpm"] + self.assertEqual(rpm.version, "4.19.1.1-1.fc39") + self.assertEqual(rpm.architecture, "x86_64") + self.assertEqual(rpm.installed_on, 1711525876) + self.assertEqual(rpm.publisher, "Fedora Project") + self.assertEqual(rpm.source_rpm, "rpm-4.19.1.1-1.fc39.src.rpm") + + vim_minimal = packages_by_name["vim-minimal"] + self.assertEqual(vim_minimal.version, "9.1.181-1.fc39") + self.assertEqual(vim_minimal.architecture, "x86_64") + self.assertEqual(vim_minimal.installed_on, 1711525876) + self.assertEqual(vim_minimal.publisher, "Fedora Project") + self.assertEqual(vim_minimal.epoch, 2) + self.assertEqual(vim_minimal.source_rpm, "vim-9.1.181-1.fc39.src.rpm") + + gpg_pubkey = packages_by_name["gpg-pubkey"] + self.assertEqual(gpg_pubkey.version, "18b8e74c-62f2920f") + self.assertEqual(gpg_pubkey.installed_on, 1711525880) + self.assertFalse(gpg_pubkey.HasField("epoch")) + self.assertFalse(gpg_pubkey.HasField("architecture")) + self.assertFalse(gpg_pubkey.HasField("vendor")) + self.assertFalse(gpg_pubkey.HasField("source_rpm")) + + def testWindows(self) -> None: + assert data_store.REL_DB is not None + db: abstract_db.Database = data_store.REL_DB + + creator = db_test_utils.InitializeUser(db) + client_id = db_test_utils.InitializeClient(db) + + snapshot = objects_pb2.ClientSnapshot() + snapshot.client_id = client_id + snapshot.knowledge_base.os = "Windows" + db.WriteClientSnapshot(snapshot) + + class ActionMock(action_mocks.ActionMock): + + def WmiQuery( + self, + args: rdf_client_action.WMIRequest, + ) -> Iterator[rdf_protodict.Dict]: + args = mig_client_action.ToProtoWMIRequest(args) + + if not args.query.upper().startswith("SELECT "): + raise RuntimeError("Non-`SELECT` WMI query") + + if "Win32_Product" in args.query: + for result in [ + { + "Name": "Rust 1.76 (MSVC 64-bit)", + "InstallDate": "20231229", + "Version": "1.75.0.0", + "Vendor": "The Rust Project Developers", + "Description": "Rust 1.75 (MSVC 64-bit)", + }, + { + "Name": "Python 3.11.3 Core Interpreter (64-bit)", + "InstallDate": "20230523", + "Version": "3.11.3150.0", + "Vendor": "Python Software Foundation", + "Description": "Python 3.11.3 Core Interpreter (64-bit)", + }, + { + "Name": "Google Chrome", + "InstallDate": "20230920", + "Version": "122.0.6261.128", + "Vendor": "Google LLC", + "Description": "Google Chrome", + }, + { + "Name": "7-Zip 22.01 (x64 edition)", + "InstallDate": "20230320", + "Version": "22.01.00.0", + "Vendor": "Igor Pavlov", + "Description": "7-Zip 22.01 (x64 edition)", + }, + { + "Name": "AMD Settings", + "InstallDate": "20230320", + "Version": "2022.1025.1410.1936", + "Vendor": "Advanced Micro Devices, Inc.", + "Description": "AMD Settings", + }, + ]: + yield mig_protodict.ToRDFDict(protodicts.Dict(result)) + elif "Win32_QuickFixEngineering" in args.query: + for result in [ + { + "HotFixID": "KB5033909", + "InstalledOn": "1/10/2024", + "InstalledBy": "NT AUTHORITY\\SYSTEM", + "Caption": "http://support.microsoft.com/?kbid=5033909", + "Description": "Update", + }, + { + "HotFixID": "KB4577586", + "InstalledOn": "2/22/2023", + "InstalledBy": "", + "Caption": "https://support.microsoft.com/help/4577586", + "Description": "Update", + }, + { + "HotFixID": "KB5012170", + "InstalledOn": "2/22/2023", + "InstalledBy": "", + "Caption": "https://support.microsoft.com/help/5012170", + "Description": "Security Update", + }, + { + "HotFixID": "KB5035845", + "InstalledOn": "3/13/2024", + "InstalledBy": "NT AUTHORITY\\SYSTEM", + "Caption": "https://support.microsoft.com/help/5035845", + "Description": "Security Update", + }, + { + "HotFixID": "KB5034224", + "InstalledOn": "2/14/2024", + "InstalledBy": "NT AUTHORITY\\SYSTEM", + "Caption": "", + "Description": "Update", + }, + ]: + yield mig_protodict.ToRDFDict(protodicts.Dict(result)) + else: + raise RuntimeError(f"Unexpected WMI query: {args.query!r}") + + flow_id = flow_test_lib.StartAndRunFlow( + software.CollectInstalledSoftware, + ActionMock(), + client_id=client_id, + creator=creator, + ) + + results = flow_test_lib.GetFlowResults(client_id, flow_id) + + self.assertLen(results, 2) + + packages_by_name = { + package.name: package for package in results[0].packages + } + + self.assertLen(packages_by_name, 5) + + rust = packages_by_name["Rust 1.76 (MSVC 64-bit)"] + self.assertEqual(rust.version, "1.75.0.0") + self.assertEqual(rust.publisher, "The Rust Project Developers") + self.assertEqual( + rust.installed_on / 1_000_000, + datetime.datetime(year=2023, month=12, day=29).timestamp(), + ) + + python = packages_by_name["Python 3.11.3 Core Interpreter (64-bit)"] + self.assertEqual(python.version, "3.11.3150.0") + self.assertEqual(python.publisher, "Python Software Foundation") + self.assertEqual( + python.installed_on / 1_000_000, + datetime.datetime(year=2023, month=5, day=23).timestamp(), + ) + + chrome = packages_by_name["Google Chrome"] + self.assertEqual(chrome.version, "122.0.6261.128") + self.assertEqual(chrome.publisher, "Google LLC") + self.assertEqual( + chrome.installed_on / 1_000_000, + datetime.datetime(year=2023, month=9, day=20).timestamp(), + ) + + zip7 = packages_by_name["7-Zip 22.01 (x64 edition)"] + self.assertEqual(zip7.version, "22.01.00.0") + self.assertEqual(zip7.publisher, "Igor Pavlov") + self.assertEqual( + zip7.installed_on / 1_000_000, + datetime.datetime(year=2023, month=3, day=20).timestamp(), + ) + + amd = packages_by_name["AMD Settings"] + self.assertEqual(amd.version, "2022.1025.1410.1936") + self.assertEqual(amd.publisher, "Advanced Micro Devices, Inc.") + self.assertEqual( + amd.installed_on / 1_000_000, + datetime.datetime(year=2023, month=3, day=20).timestamp(), + ) + + packages_by_name = { + package.name: package for package in results[1].packages + } + + self.assertLen(packages_by_name, 5) + + self.assertEqual( + packages_by_name["KB5033909"].installed_by, + "NT AUTHORITY\\SYSTEM", + ) + self.assertEqual( + packages_by_name["KB5033909"].installed_on / 1_000_000, + datetime.datetime(year=2024, month=1, day=10).timestamp(), + ) + self.assertIn( + "http://support.microsoft.com/?kbid=5033909", + packages_by_name["KB5033909"].description, + ) + self.assertIn( + "Update", + packages_by_name["KB5033909"].description, + ) + + self.assertEqual( + packages_by_name["KB4577586"].installed_on / 1_000_000, + datetime.datetime(year=2023, month=2, day=22).timestamp(), + ) + self.assertIn( + "https://support.microsoft.com/help/4577586", + packages_by_name["KB4577586"].description, + ) + self.assertIn( + "Update", + packages_by_name["KB4577586"].description, + ) + + self.assertEqual( + packages_by_name["KB5012170"].installed_on / 1_000_000, + datetime.datetime(year=2023, month=2, day=22).timestamp(), + ) + self.assertIn( + "https://support.microsoft.com/help/5012170", + packages_by_name["KB5012170"].description, + ) + self.assertIn( + "Security Update", + packages_by_name["KB5012170"].description, + ) + + self.assertEqual( + packages_by_name["KB5035845"].installed_by, + "NT AUTHORITY\\SYSTEM", + ) + self.assertEqual( + packages_by_name["KB5035845"].installed_on / 1_000_000, + datetime.datetime(year=2024, month=3, day=13).timestamp(), + ) + self.assertIn( + "https://support.microsoft.com/help/5035845", + packages_by_name["KB5035845"].description, + ) + self.assertIn( + "Security Update", + packages_by_name["KB5035845"].description, + ) + + self.assertEqual( + packages_by_name["KB5034224"].installed_by, + "NT AUTHORITY\\SYSTEM", + ) + self.assertIn( + "Update", + packages_by_name["KB5034224"].description, + ) + + def testMacos(self) -> None: + assert data_store.REL_DB is not None + db = abstract_db.Database = data_store.REL_DB + + creator = db_test_utils.InitializeUser(db) + client_id = db_test_utils.InitializeClient(db) + + snapshot = objects_pb2.ClientSnapshot() + snapshot.client_id = client_id + snapshot.knowledge_base.os = "Darwin" + db.WriteClientSnapshot(snapshot) + + class FakeFileFinderOS(actions.ActionPlugin): + + in_rdfvalue = rdf_file_finder.FileFinderArgs + out_rdfvalues = [rdf_file_finder.FileFinderResult] + + def Run(self, args: rdf_file_finder.FileFinderArgs) -> None: + args = mig_file_finder.ToProtoFileFinderArgs(args) + + if args.pathtype != jobs_pb2.PathSpec.PathType.OS: + raise RuntimeError(f"Unexpected path type: {args.pathtype}") + + if list(args.paths) != ["/Library/Receipts/InstallHistory.plist"]: + raise RuntimeError(f"Unexpected paths: {args.paths}") + + blob = jobs_pb2.DataBlob() + blob.data = """\ + + + + + + date + 2023-07-17T08:45:50Z + displayName + macOS 13.4.1 + displayVersion + 13.4.1 + processName + softwareupdated + + + date + 2023-07-17T08:56:15Z + displayName + grrd + displayVersion + + packageIdentifiers + + com.google.corp.grrd + + processName + installer + + + date + 2024-05-08T13:13:12Z + displayName + osquery + displayVersion + + packageIdentifiers + + io.osquery.agent + + processName + installer + + + + """.encode("utf-8") + + self.SendReply( + mig_protodict.ToRDFDataBlob(blob), + session_id=rdfvalue.SessionID(flow_name="TransferStore"), + ) + + result = flows_pb2.FileFinderResult() + result.transferred_file.chunk_size = len(blob.data) + + stat_entry = result.stat_entry + stat_entry.st_mode = 0o0644 + stat_entry.st_size = len(blob.data) + stat_entry.pathspec.pathtype = jobs_pb2.PathSpec.PathType.OS + stat_entry.pathspec.path = "/Library/Receipts/InstallHistory.plist" + + chunk = result.transferred_file.chunks.add() + chunk.offset = 0 + chunk.length = len(blob.data) + chunk.digest = hashlib.sha256(blob.data).digest() + + self.SendReply( + mig_file_finder.ToRDFFileFinderResult(result), + ) + + flow_id = flow_test_lib.StartAndRunFlow( + software.CollectInstalledSoftware, + action_mocks.ActionMock.With({ + "FileFinderOS": FakeFileFinderOS, + }), + client_id=client_id, + creator=creator, + ) + + results = flow_test_lib.GetFlowResults(client_id, flow_id) + self.assertLen(results, 1) + + packages_by_name = { + package.name: package for package in results[0].packages + } + + self.assertLen(packages_by_name, 3) + + self.assertEqual( + packages_by_name["macOS 13.4.1"].version, + "13.4.1", + ) + self.assertEqual( + packages_by_name["macOS 13.4.1"].installed_on / 1_000_000, + datetime.datetime( + year=2023, month=7, day=17, hour=8, minute=45, second=50 + ).timestamp(), + ) + + self.assertEqual( + packages_by_name["grrd"].description, + "com.google.corp.grrd", + ) + self.assertEqual( + packages_by_name["grrd"].installed_on / 1_000_000, + datetime.datetime( + year=2023, month=7, day=17, hour=8, minute=56, second=15 + ).timestamp(), + ) + + self.assertEqual( + packages_by_name["osquery"].description, + "io.osquery.agent", + ) + self.assertEqual( + packages_by_name["osquery"].installed_on / 1_000_000, + datetime.datetime( + year=2024, month=5, day=8, hour=13, minute=13, second=12 + ).timestamp(), + ) + + +if __name__ == "__main__": + absltest.main() diff --git a/grr/server/grr_response_server/gui/api_call_robot_router.py b/grr/server/grr_response_server/gui/api_call_robot_router.py index ca1015d4a..ed9a314b5 100644 --- a/grr/server/grr_response_server/gui/api_call_robot_router.py +++ b/grr/server/grr_response_server/gui/api_call_robot_router.py @@ -3,7 +3,9 @@ from typing import Optional +from google.protobuf import any_pb2 from grr_response_core.lib import rdfvalue +from grr_response_core.lib.rdfvalues import mig_file_finder from grr_response_core.lib.rdfvalues import paths as rdf_paths from grr_response_core.lib.rdfvalues import structs as rdf_structs from grr_response_proto import api_call_router_pb2 @@ -86,14 +88,24 @@ class ApiRobotCreateFlowHandler(api_call_handler_base.ApiCallHandler): args_type = api_flow.ApiCreateFlowArgs result_type = api_flow.ApiFlow + proto_args_type = flow_pb2.ApiCreateFlowArgs + proto_result_type = flow_pb2.ApiFlow - def __init__(self, override_flow_name=None, override_flow_args=None): + def __init__( + self, + override_flow_name: str = None, + override_flow_args: Optional[any_pb2.Any] = None, + ) -> None: super().__init__() self.override_flow_name = override_flow_name self.override_flow_args = override_flow_args - def Handle(self, args, context=None): + def Handle( + self, + args: flow_pb2.ApiCreateFlowArgs, + context: Optional[api_call_context.ApiCallContext] = None, + ) -> flow_pb2.ApiFlow: if not args.client_id: raise RuntimeError("Client id has to be specified.") @@ -103,9 +115,13 @@ def Handle(self, args, context=None): delegate = api_flow.ApiCreateFlowHandler() # Note that runner_args are dropped. From all the arguments We use only # the flow name and the arguments. - delegate_args = api_flow.ApiCreateFlowArgs(client_id=args.client_id) + delegate_args = flow_pb2.ApiCreateFlowArgs(client_id=args.client_id) delegate_args.flow.name = self.override_flow_name or args.flow.name - delegate_args.flow.args = self.override_flow_args or args.flow.args + if self.override_flow_args: + delegate_args.flow.args.CopyFrom(self.override_flow_args) + else: + delegate_args.flow.args.CopyFrom(args.flow.args) + return delegate.Handle(delegate_args, context=context) @@ -286,14 +302,24 @@ def _FixFileFinderArgs(self, source_args): return new_args - def CreateFlow(self, args, context=None): + def CreateFlow( + self, + args: api_flow.ApiCreateFlowArgs, + context: Optional[api_call_context.ApiCallContext] = None, + ) -> ApiRobotCreateFlowHandler: if not args.client_id: raise ValueError("client_id must be provided") if args.flow.name in self.allowed_file_finder_flow_names: self._CheckFileFinderArgs(args.flow.args) override_flow_name = self.effective_file_finder_flow_name - override_flow_args = self._FixFileFinderArgs(args.flow.args) + if file_finder_args := self._FixFileFinderArgs(args.flow.args): + override_flow_args = any_pb2.Any() + override_flow_args.Pack( + mig_file_finder.ToProtoFileFinderArgs(file_finder_args) + ) + else: + override_flow_args = None throttler = self._GetFileFinderThrottler() elif args.flow.name in self.allowed_artifact_collector_flow_names: self._CheckArtifactCollectorFlowArgs(args.flow.args) diff --git a/grr/server/grr_response_server/gui/api_call_robot_router_test.py b/grr/server/grr_response_server/gui/api_call_robot_router_test.py index efabb6b21..9db557174 100644 --- a/grr/server/grr_response_server/gui/api_call_robot_router_test.py +++ b/grr/server/grr_response_server/gui/api_call_robot_router_test.py @@ -1,10 +1,16 @@ #!/usr/bin/env python """Tests for ApiCallRobotRouter.""" +from typing import Optional + from absl import app +from google.protobuf import any_pb2 +from google.protobuf import message from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder +from grr_response_proto import flows_pb2 +from grr_response_proto.api import flow_pb2 from grr_response_server import access_control from grr_response_server import flow_base from grr_response_server.flows.general import collectors @@ -36,35 +42,40 @@ def setUp(self): def testPassesFlowArgsThroughIfNoOverridesSpecified(self): h = rr.ApiRobotCreateFlowHandler() - args = api_flow.ApiCreateFlowArgs(client_id=self.client_id) + args = flow_pb2.ApiCreateFlowArgs(client_id=self.client_id) args.flow.name = file_finder.FileFinder.__name__ - args.flow.args = rdf_file_finder.FileFinderArgs(paths=["foo"]) + args.flow.args.Pack(flows_pb2.FileFinderArgs(paths=["foo"])) f = h.Handle(args=args, context=self.context) - self.assertEqual(f.args.paths, ["foo"]) + flow_args = flows_pb2.FileFinderArgs() + f.args.Unpack(flow_args) + self.assertEqual(flow_args.paths, ["foo"]) def testOverridesFlowNameIfOverrideArgIsSpecified(self): h = rr.ApiRobotCreateFlowHandler( override_flow_name=AnotherFileFinder.__name__ ) # pylint: disable=undefined-variable - args = api_flow.ApiCreateFlowArgs(client_id=self.client_id) + args = flow_pb2.ApiCreateFlowArgs(client_id=self.client_id) args.flow.name = file_finder.FileFinder.__name__ - args.flow.args = rdf_file_finder.FileFinderArgs(paths=["foo"]) + args.flow.args.Pack(flows_pb2.FileFinderArgs(paths=["foo"])) f = h.Handle(args=args, context=self.context) self.assertEqual(f.name, AnotherFileFinder.__name__) # pylint: disable=undefined-variable def testOverridesFlowArgsThroughIfOverridesSpecified(self): - override_flow_args = rdf_file_finder.FileFinderArgs(paths=["bar"]) + override_flow_args = any_pb2.Any() + override_flow_args.Pack(flows_pb2.FileFinderArgs(paths=["bar"])) h = rr.ApiRobotCreateFlowHandler(override_flow_args=override_flow_args) - args = api_flow.ApiCreateFlowArgs(client_id=self.client_id) + args = flow_pb2.ApiCreateFlowArgs(client_id=self.client_id) args.flow.name = file_finder.FileFinder.__name__ - args.flow.args = rdf_file_finder.FileFinderArgs(paths=["foo"]) + args.flow.args.Pack(flows_pb2.FileFinderArgs(paths=["foo"])) f = h.Handle(args=args, context=self.context) - self.assertEqual(f.args.paths, ["bar"]) + flow_args = flows_pb2.FileFinderArgs() + f.args.Unpack(flow_args) + self.assertEqual(flow_args.paths, ["bar"]) class ApiCallRobotRouterTest(acl_test_lib.AclTestMixin, test_lib.GRRBaseTest): @@ -234,7 +245,9 @@ def testFileFinderHashMaxFileSizeCanBeOverridden(self): context=self.context, ) - ha = handler.override_flow_args.action.hash + override_flow_args = flows_pb2.FileFinderArgs() + handler.override_flow_args.Unpack(override_flow_args) + ha = override_flow_args.action.hash self.assertEqual(ha.oversized_file_policy, ha.OversizedFilePolicy.SKIP) self.assertEqual(ha.max_size, 42) @@ -266,9 +279,14 @@ def testFileFinderDownloadMaxFileSizeCanBeOverridden(self): context=self.context, ) - da = handler.override_flow_args.action.download - self.assertEqual(da.oversized_file_policy, da.OversizedFilePolicy.SKIP) - self.assertEqual(da.max_size, 42) + override_flow_args = flows_pb2.FileFinderArgs() + handler.override_flow_args.Unpack(override_flow_args) + override_da = override_flow_args.action.download + self.assertEqual( + override_da.oversized_file_policy, + flows_pb2.FileFinderHashActionOptions.OversizedFilePolicy.SKIP, + ) + self.assertEqual(override_da.max_size, 42) def testArtifactCollectorWorksWhenEnabledAndArgumentsAreCorrect(self): router = None @@ -399,15 +417,24 @@ def testOnlyFileFinderAndArtifactCollectorFlowsAreAllowed(self): context=self.context, ) - def _CreateFlowWithRobotId(self, flow_name=None, flow_args=None): + def _CreateFlowWithRobotId( + self, + flow_name: Optional[str] = None, + flow_args: Optional[message.Message] = None, + ): flow_name = flow_name or file_finder.FileFinder.__name__ handler = rr.ApiRobotCreateFlowHandler() + + api_flow_args = flow_pb2.ApiCreateFlowArgs() + api_flow_args.client_id = self.client_id + if flow_name: + api_flow_args.flow.name = flow_name + if flow_args: + api_flow_args.flow.args.Pack(flow_args) + flow_result = handler.Handle( - api_flow.ApiCreateFlowArgs( - client_id=self.client_id, - flow=api_flow.ApiFlow(name=flow_name, args=flow_args), - ), + args=api_flow_args, context=self.context, ) return flow_result.flow_id @@ -583,9 +610,7 @@ def testGetFlowFilesArchiveReturnsNonLimitedHandlerForArtifactsWhenNeeded( flow_id = self._CreateFlowWithRobotId( flow_name=AnotherArtifactCollector.__name__, # pylint: disable=undefined-variable - flow_args=rdf_artifacts.ArtifactCollectorFlowArgs( - artifact_list=["Foo"] - ), + flow_args=flows_pb2.ArtifactCollectorFlowArgs(artifact_list=["Foo"]), ) handler = router.GetFlowFilesArchive( api_flow.ApiGetFlowFilesArchiveArgs( diff --git a/grr/server/grr_response_server/gui/api_call_router.py b/grr/server/grr_response_server/gui/api_call_router.py index 93785c619..a422013da 100644 --- a/grr/server/grr_response_server/gui/api_call_router.py +++ b/grr/server/grr_response_server/gui/api_call_router.py @@ -659,30 +659,6 @@ def ListFlowResults(self, args, context=None): raise NotImplementedError() - @Category("Flows") - @ArgsType(api_flow.ApiListParsedFlowResultsArgs) - @ResultType(api_flow.ApiListParsedFlowResultsResult) - @Http("GET", "/api/clients//flows//results/parsed") - def ListParsedFlowResults( - self, - args: api_flow.ApiListParsedFlowResultsArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> api_flow.ApiListParsedFlowResultsHandler: - """Lists parsed results of the artifact collect flow.""" - raise NotImplementedError() - - @Category("Flows") - @ArgsType(api_flow.ApiListFlowApplicableParsersArgs) - @ResultType(api_flow.ApiListFlowApplicableParsersResult) - @Http("GET", "/api/clients//flows//results/parsers") - def ListFlowApplicableParsers( - self, - args: api_flow.ApiListFlowApplicableParsersArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> api_flow.ApiListFlowApplicableParsersHandler: - """Lists parsers that are applicable to results of the specified flow.""" - raise NotImplementedError() - @Category("Flows") @ArgsType(api_flow.ApiGetExportedFlowResultsArgs) @ResultBinaryStream() diff --git a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py index 651746e5a..429573ef1 100644 --- a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py +++ b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks.py @@ -467,22 +467,6 @@ def ListFlowResults(self, args, context=None): return self.delegate.ListFlowResults(args, context=context) - def ListParsedFlowResults( - self, - args: api_flow.ApiListParsedFlowResultsArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> api_flow.ApiListParsedFlowResultsHandler: - self._CheckFlowOrClientAccess(args, context) - return self.delegate.ListParsedFlowResults(args, context=context) - - def ListFlowApplicableParsers( - self, - args: api_flow.ApiListFlowApplicableParsersArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> api_flow.ApiListFlowApplicableParsersHandler: - self.access_checker.CheckClientAccess(context, args.client_id) - return self.delegate.ListFlowApplicableParsers(args, context=context) - def GetExportedFlowResults(self, args, context=None): self._CheckFlowOrClientAccess(args, context) diff --git a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py index 9a987771c..e5aee2763 100644 --- a/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py +++ b/grr/server/grr_response_server/gui/api_call_router_with_approval_checks_test.py @@ -337,7 +337,6 @@ def testGetOsqueryResultsChecksClientAccessIfNotPartOfHunt(self): "CreateFlow", "CancelFlow", "ListFlowRequests", - "ListFlowApplicableParsers", "ListFlowOutputPlugins", "ListFlowOutputPluginLogs", "ListFlowOutputPluginErrors", @@ -391,7 +390,6 @@ def testClientFlowsMethodsAreAccessChecked(self): ACCESS_CHECKED_METHODS.extend([ "GetFlow", "ListFlowResults", - "ListParsedFlowResults", "GetExportedFlowResults", "GetFlowResultsExportCommand", "GetFlowFilesArchive", @@ -408,12 +406,6 @@ def testHuntFlowExceptionsRaisesRaisesIfFlowIsNotFound(self): with self.assertRaises(api_call_handler_base.ResourceNotFoundError): self.router.ListFlowResults(args, context=self.context) - args = api_flow.ApiListParsedFlowResultsArgs( - client_id=self.client_id, flow_id="12345678" - ) - with self.assertRaises(api_call_handler_base.ResourceNotFoundError): - self.router.ListParsedFlowResults(args, context=self.context) - args = api_flow.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id="12345678" ) @@ -449,13 +441,6 @@ def testHuntFlowExceptionsGrantsAccessIfPartOfHunt(self): ) self.CheckMethodIsNotAccessChecked(self.router.ListFlowResults, args=args) - args = api_flow.ApiListParsedFlowResultsArgs( - client_id=self.client_id, flow_id=flow_id - ) - self.CheckMethodIsNotAccessChecked( - self.router.ListParsedFlowResults, args=args - ) - args = api_flow.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id=flow_id ) @@ -497,15 +482,6 @@ def testHuntFlowExceptionsChecksClientAccessIfNotPartOfHunt(self): args=args, ) - args = api_flow.ApiListParsedFlowResultsArgs( - client_id=self.client_id, flow_id=flow_id - ) - self.CheckMethodIsAccessChecked( - self.router.ListParsedFlowResults, - "CheckClientAccess", - args=args, - ) - args = api_flow.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id=flow_id ) diff --git a/grr/server/grr_response_server/gui/api_call_router_without_checks.py b/grr/server/grr_response_server/gui/api_call_router_without_checks.py index 60bfb0b40..c8c10c177 100644 --- a/grr/server/grr_response_server/gui/api_call_router_without_checks.py +++ b/grr/server/grr_response_server/gui/api_call_router_without_checks.py @@ -184,20 +184,6 @@ def ListFlowRequests(self, args, context=None): def ListFlowResults(self, args, context=None): return api_flow.ApiListFlowResultsHandler() - def ListParsedFlowResults( - self, - args: api_flow.ApiListParsedFlowResultsArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> api_flow.ApiListParsedFlowResultsHandler: - return api_flow.ApiListParsedFlowResultsHandler() - - def ListFlowApplicableParsers( - self, - args: api_flow.ApiListFlowApplicableParsersArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> api_flow.ApiListFlowApplicableParsersHandler: - return api_flow.ApiListFlowApplicableParsersHandler() - def GetExportedFlowResults(self, args, context=None): return api_flow.ApiGetExportedFlowResultsHandler() diff --git a/grr/server/grr_response_server/gui/api_integration_tests/flow_test.py b/grr/server/grr_response_server/gui/api_integration_tests/flow_test.py index 348961172..909412a9d 100644 --- a/grr/server/grr_response_server/gui/api_integration_tests/flow_test.py +++ b/grr/server/grr_response_server/gui/api_integration_tests/flow_test.py @@ -4,24 +4,18 @@ import io import threading import time -from typing import Iterable import zipfile from absl import app from grr_api_client import errors as grr_api_errors from grr_response_core.lib import rdfvalue -from grr_response_core.lib.parsers import abstract as parser -from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts from grr_response_core.lib.rdfvalues import client as rdf_client -from grr_response_core.lib.rdfvalues import client_action as rdf_client_action from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs from grr_response_core.lib.rdfvalues import paths as rdf_paths -from grr_response_proto.api import flow_pb2 from grr_response_server import data_store from grr_response_server import flow_base from grr_response_server.databases import db -from grr_response_server.flows.general import collectors from grr_response_server.flows.general import processes from grr_response_server.gui import api_integration_test_lib from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects @@ -29,7 +23,6 @@ from grr_response_server.rdfvalues import objects as rdf_objects from grr.test_lib import action_mocks from grr.test_lib import flow_test_lib -from grr.test_lib import parser_test_lib from grr.test_lib import test_lib from grr.test_lib import vfs_test_lib @@ -170,108 +163,6 @@ def testListResultsForListProcessesFlow(self): self.assertLen(results, 1) self.assertEqual(process.AsPrimitiveProto(), results[0].payload) - def testListParsedFlowResults(self): - client_id = self.SetupClient(0) - flow_id = "4815162342ABCDEF" - - flow = rdf_flow_objects.Flow() - flow.client_id = client_id - flow.flow_id = flow_id - flow.flow_class_name = collectors.ArtifactCollectorFlow.__name__ - flow.args = rdf_artifacts.ArtifactCollectorFlowArgs(apply_parsers=False) - flow.persistent_data = {"knowledge_base": rdf_client.KnowledgeBase()} - data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow)) - - result = rdf_flow_objects.FlowResult() - result.client_id = client_id - result.flow_id = flow_id - result.tag = "artifact:Echo" - - response = rdf_client_action.ExecuteResponse() - response.stderr = "Lorem ipsum.".encode("utf-8") - - result.payload = response - data_store.REL_DB.WriteFlowResults( - [mig_flow_objects.ToProtoFlowResult(result)] - ) - - response = rdf_client_action.ExecuteResponse() - response.stderr = "Dolor sit amet.".encode("utf-8") - - result.payload = response - data_store.REL_DB.WriteFlowResults( - [mig_flow_objects.ToProtoFlowResult(result)] - ) - - class StderrToStdoutParser( - parser.SingleResponseParser[rdf_client_action.ExecuteResponse] - ): - - supported_artifacts = ["Echo"] - - def ParseResponse( - self, - knowledge_base: rdf_client.KnowledgeBase, - response: rdf_client_action.ExecuteResponse, - ) -> Iterable[rdf_client_action.ExecuteResponse]: - del knowledge_base # Unused. - - if not isinstance(response, rdf_client_action.ExecuteResponse): - raise TypeError(f"Unexpected response type: {type(response)}") - - parsed_response = rdf_client_action.ExecuteResponse() - parsed_response.stdout = response.stderr - - return [parsed_response] - - with parser_test_lib._ParserContext("StderrToStdout", StderrToStdoutParser): - results = self.api.Client(client_id).Flow(flow_id).ListParsedResults() - - stdouts = [result.payload.stdout.decode("utf-8") for result in results] - self.assertLen(stdouts, 2) - self.assertEqual(stdouts[0], "Lorem ipsum.") - self.assertEqual(stdouts[1], "Dolor sit amet.") - - def testListFlowApplicableParsers(self): - client_id = self.SetupClient(0) - flow_id = "4815162342ABCDEF" - - flow = rdf_flow_objects.Flow() - flow.client_id = client_id - flow.flow_id = flow_id - flow.flow_class_name = collectors.ArtifactCollectorFlow.__name__ - flow.args = rdf_artifacts.ArtifactCollectorFlowArgs(apply_parsers=False) - data_store.REL_DB.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow)) - - result = rdf_flow_objects.FlowResult() - result.client_id = client_id - result.flow_id = flow_id - result.tag = "artifact:Fake" - result.payload = rdf_client_action.ExecuteResponse(stderr=b"foobar") - data_store.REL_DB.WriteFlowResults( - [mig_flow_objects.ToProtoFlowResult(result)] - ) - - class FakeParser(parser.SingleResponseParser[None]): - - supported_artifacts = ["Fake"] - - def ParseResponse( - self, - knowledge_base: rdf_client.KnowledgeBase, - response: rdfvalue.RDFValue, - ) -> Iterable[None]: - raise NotImplementedError() - - with parser_test_lib._ParserContext("Fake", FakeParser): - results = self.api.Client(client_id).Flow(flow_id).ListApplicableParsers() - - self.assertLen(results.parsers, 1) - - result = results.parsers[0] - self.assertEqual(result.name, "Fake") - self.assertEqual(result.type, flow_pb2.ApiParserDescriptor.SINGLE_RESPONSE) - def testWaitUntilDoneReturnsWhenFlowCompletes(self): client_id = self.SetupClient(0) diff --git a/grr/server/grr_response_server/gui/api_plugins/flow.py b/grr/server/grr_response_server/gui/api_plugins/flow.py index 0e03e3daa..9d21bffaf 100644 --- a/grr/server/grr_response_server/gui/api_plugins/flow.py +++ b/grr/server/grr_response_server/gui/api_plugins/flow.py @@ -5,22 +5,21 @@ import itertools import logging import re -from typing import Any, Callable, DefaultDict, Dict, Iterable, Iterator, List, Mapping, Optional, Sequence, Tuple, Type +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Type from grr_response_core import config -from grr_response_core.lib import parsers from grr_response_core.lib import rdfvalue from grr_response_core.lib import registry from grr_response_core.lib import utils from grr_response_core.lib.rdfvalues import flows as rdf_flows from grr_response_core.lib.rdfvalues import mig_protodict +from grr_response_core.lib.rdfvalues import mig_structs from grr_response_core.lib.rdfvalues import paths as rdf_paths from grr_response_core.lib.rdfvalues import structs as rdf_structs from grr_response_proto import flows_pb2 from grr_response_proto import output_plugin_pb2 from grr_response_proto.api import flow_pb2 from grr_response_server import access_control -from grr_response_server import artifact from grr_response_server import data_store from grr_response_server import data_store_utils from grr_response_server import flow @@ -29,7 +28,6 @@ from grr_response_server import notification from grr_response_server import output_plugin from grr_response_server.databases import db -from grr_response_server.flows.general import collectors from grr_response_server.gui import api_call_context from grr_response_server.gui import api_call_handler_base from grr_response_server.gui import api_call_handler_utils @@ -41,6 +39,7 @@ from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects from grr_response_server.rdfvalues import flow_runner as rdf_flow_runner from grr_response_server.rdfvalues import mig_flow_objects +from grr_response_server.rdfvalues import mig_flow_runner from grr_response_server.rdfvalues import objects as rdf_objects @@ -313,6 +312,25 @@ def GetFlowDocumentation(flow_cls: Type[flow_base.FlowBase]) -> str: return flow_descriptor +def InitApiScheduledFlowFromScheduledFlow( + scheduled_flow: flows_pb2.ScheduledFlow, +) -> flow_pb2.ApiScheduledFlow: + """Creates an ApiScheduledFlow from a ScheduledFlow.""" + api_scheduled_flow = flow_pb2.ApiScheduledFlow() + protobuf_utils.CopyAttr( + scheduled_flow, api_scheduled_flow, "scheduled_flow_id" + ) + protobuf_utils.CopyAttr(scheduled_flow, api_scheduled_flow, "client_id") + protobuf_utils.CopyAttr(scheduled_flow, api_scheduled_flow, "creator") + protobuf_utils.CopyAttr(scheduled_flow, api_scheduled_flow, "flow_name") + protobuf_utils.CopyAttr(scheduled_flow, api_scheduled_flow, "create_time") + if scheduled_flow.HasField("flow_args"): + api_scheduled_flow.flow_args.CopyFrom(scheduled_flow.flow_args) + if scheduled_flow.HasField("runner_args"): + api_scheduled_flow.runner_args.CopyFrom(scheduled_flow.runner_args) + return api_scheduled_flow + + def _GetFlowClass( api_flow: flow_pb2.ApiFlow, ) -> Optional[Type[flow_base.FlowBase]]: @@ -599,187 +617,6 @@ def Handle(self, args, context=None): ) -class ApiListParsedFlowResultsArgs(rdf_structs.RDFProtoStruct): - """An RDF wrapper for the arguments of the method for parsing flow results.""" - - protobuf = flow_pb2.ApiListParsedFlowResultsArgs - rdf_deps = [ - client.ApiClientId, - ApiFlowId, - ] - - -class ApiListParsedFlowResultsResult(rdf_structs.RDFProtoStruct): - """An RDF wrapper for the results of the method for parsing flow results.""" - - protobuf = flow_pb2.ApiListParsedFlowResultsResult - rdf_deps = [ - ApiFlowResult, - ] - - -class ApiListParsedFlowResultsHandler(api_call_handler_base.ApiCallHandler): - """An API handler for the method for on-demand parsing of flow results.""" - - args_type = ApiListParsedFlowResultsArgs - result_type = ApiListParsedFlowResultsResult - - def Handle( - self, - args: ApiListParsedFlowResultsArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> ApiListParsedFlowResultsResult: - client_id = str(args.client_id) - flow_id = str(args.flow_id) - - flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id) - flow_obj = mig_flow_objects.ToRDFFlow(flow_obj) - if flow_obj.flow_class_name != collectors.ArtifactCollectorFlow.__name__: - message = "Not an artifact-collector flow: {}" - raise ValueError(message.format(flow_obj.flow_class_name)) - - if flow_obj.args.apply_parsers: - message = "Flow already parsed its results" - raise ValueError(message) - - flow_results = data_store.REL_DB.ReadFlowResults( - client_id=client_id, - flow_id=flow_id, - offset=args.offset, - count=args.count, - ) - flow_results = [mig_flow_objects.ToRDFFlowResult(r) for r in flow_results] - flow_results_by_artifact = _GroupFlowResultsByArtifact(flow_results) - - # We determine results collection timestamp as the maximum of timestamps of - # individual flow results. We cannot use just the flow timestamp for this, - # because flows can be modified, affecting the timestamp. We also don't want - # to use flow start time, because it can be to "early" to do parsing. - if flow_results: - flow_results_timestamp = max([_.timestamp for _ in flow_results]) - else: - flow_results_timestamp = None - - knowledge_base = flow_obj.persistent_data["knowledge_base"] - - result = ApiListParsedFlowResultsResult() - - for artifact_name, flow_results in flow_results_by_artifact.items(): - factory = parsers.ArtifactParserFactory(artifact_name) - - applicator = artifact.ParserApplicator( - factory, - client_id=client_id, - knowledge_base=knowledge_base, - timestamp=flow_results_timestamp, - ) - applicator.Apply([flow_result.payload for flow_result in flow_results]) - - for response in applicator.Responses(): - item = ApiFlowResult() - item.payload_type = response.__class__.__name__ - item.payload = response - item.tag = f"artifact:{artifact_name}" - result.items.Append(item) - - result.errors.Extend(map(str, applicator.Errors())) - - return result - - -class ApiParserDescriptor(rdf_structs.RDFProtoStruct): - """An RDF wrapper for parser descriptor protobuf.""" - - protobuf = flow_pb2.ApiParserDescriptor - rdf_deps = [] - - -class ApiListFlowApplicableParsersArgs(rdf_structs.RDFProtoStruct): - """An RDF wrapper for arguments of the method listing applicable parsers.""" - - protobuf = flow_pb2.ApiListFlowApplicableParsersArgs - rdf_deps = [ - client.ApiClientId, - ApiFlowId, - ] - - -class ApiListFlowApplicableParsersResult(rdf_structs.RDFProtoStruct): - """An RDF wrapper for result of the method listing applicable parsers.""" - - protobuf = flow_pb2.ApiListFlowApplicableParsersResult - rdf_deps = [ - ApiParserDescriptor, - ] - - -class ApiListFlowApplicableParsersHandler(api_call_handler_base.ApiCallHandler): - """An API handler for the method for listing applicable parsers.""" - - args_type = ApiListFlowApplicableParsersArgs - result_type = ApiListFlowApplicableParsersResult - - _FLOW_RESULTS_BATCH_SIZE = 5000 - - def Handle( - self, - args: ApiListFlowApplicableParsersArgs, - context: Optional[api_call_context.ApiCallContext] = None, - ) -> ApiListFlowApplicableParsersResult: - client_id = str(args.client_id) - flow_id = str(args.flow_id) - - flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id) - flow_obj = mig_flow_objects.ToRDFFlow(flow_obj) - if flow_obj.flow_class_name != collectors.ArtifactCollectorFlow.__name__: - message = "Not an artifact-collector flow: {}" - raise ValueError(message.format(flow_obj.flow_class_name)) - - if flow_obj.args.apply_parsers: - # The parsers were already applied, there is nothing applicable anymore. - return ApiListFlowApplicableParsersResult() - - parser_names_by_type: DefaultDict[int, List[str]] = collections.defaultdict( - list - ) - - flow_results_offset = 0 - while True: - flow_results = data_store.REL_DB.ReadFlowResults( - client_id=client_id, - flow_id=flow_id, - offset=flow_results_offset, - count=self._FLOW_RESULTS_BATCH_SIZE, - ) - flow_results = [mig_flow_objects.ToRDFFlowResult(r) for r in flow_results] - flow_results_offset += self._FLOW_RESULTS_BATCH_SIZE - - if not flow_results: - break - - for artifact_name in _GroupFlowResultsByArtifact(flow_results): - factory = parsers.ArtifactParserFactory(artifact_name) - parser_names_by_type[ApiParserDescriptor.Type.SINGLE_RESPONSE].extend( - factory.SingleResponseParserNames() - ) - parser_names_by_type[ApiParserDescriptor.Type.MULTI_RESPONSE].extend( - factory.MultiResponseParserNames() - ) - parser_names_by_type[ApiParserDescriptor.Type.SINGLE_FILE].extend( - factory.SingleFileParserNames() - ) - parser_names_by_type[ApiParserDescriptor.Type.MULTI_FILE].extend( - factory.MultiFileParserNames() - ) - - result = ApiListFlowApplicableParsersResult() - for tpe, parser_names in parser_names_by_type.items(): - for parser_name in parser_names: - result.parsers.append(ApiParserDescriptor(type=tpe, name=parser_name)) - - return result - - class ApiListFlowLogsArgs(rdf_structs.RDFProtoStruct): protobuf = flow_pb2.ApiListFlowLogsArgs rdf_deps = [ @@ -1421,13 +1258,16 @@ class ApiCreateFlowArgs(rdf_structs.RDFProtoStruct): def _SanitizeApiCreateFlowArgs( - args: ApiCreateFlowArgs, -) -> Tuple[Type[flow_base.FlowBase], rdf_flow_runner.FlowRunnerArgs]: + args: flow_pb2.ApiCreateFlowArgs, +) -> Tuple[Type[flow_base.FlowBase], flows_pb2.FlowRunnerArgs]: """Validates and sanitizes args for flow scheduling and starting.""" + if not args.client_id: raise ValueError("client_id must be provided") - runner_args = args.flow.runner_args.Copy() + runner_args = flows_pb2.FlowRunnerArgs() + runner_args.CopyFrom(args.flow.runner_args) + flow_name = args.flow.name if not flow_name: flow_name = runner_args.flow_name @@ -1440,15 +1280,15 @@ def _SanitizeApiCreateFlowArgs( # # TODO(user): Refactor the code to remove the HIDDEN label from # FlowRunnerArgs.output_plugins. - runner_args.ClearFieldsWithLabel( - rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins" - ) + for field_name, descriptor in runner_args.DESCRIPTOR.fields_by_name.items(): + if field_name == "output_plugins": + continue + if descriptor.label == "HIDDEN": + runner_args.ClearField(field_name) - if args.original_flow: - runner_args.original_flow = rdf_objects.FlowReference( - flow_id=str(args.original_flow.flow_id), - client_id=str(args.original_flow.client_id), - ) + if args.HasField("original_flow"): + runner_args.original_flow.flow_id = args.original_flow.flow_id + runner_args.original_flow.client_id = args.original_flow.client_id flow_cls = registry.FlowRegistry.FlowClassByName(flow_name) return flow_cls, runner_args @@ -1459,8 +1299,15 @@ class ApiCreateFlowHandler(api_call_handler_base.ApiCallHandler): args_type = ApiCreateFlowArgs result_type = ApiFlow + proto_args_type = flow_pb2.ApiCreateFlowArgs + proto_result_type = flow_pb2.ApiFlow - def Handle(self, args, context=None): + def Handle( + self, + args: flow_pb2.ApiCreateFlowArgs, + context: Optional[api_call_context.ApiCallContext] = None, + ) -> flow_pb2.ApiFlow: + assert context is not None flow_cls, runner_args = _SanitizeApiCreateFlowArgs(args) cpu_limit = None @@ -1470,21 +1317,22 @@ def Handle(self, args, context=None): if runner_args.HasField("network_bytes_limit"): network_bytes_limit = runner_args.network_bytes_limit + rdf_runner_args = mig_flow_runner.ToRDFFlowRunnerArgs(runner_args) + rdf_flow_args = mig_structs.ToRDFAnyValue(args.flow.args) flow_id = flow.StartFlow( - client_id=str(args.client_id), + client_id=args.client_id, cpu_limit=cpu_limit, creator=context.username, - flow_args=args.flow.args, + flow_args=rdf_flow_args.Unpack(flow_cls.args_type), flow_cls=flow_cls, network_bytes_limit=network_bytes_limit, - original_flow=runner_args.original_flow, - output_plugins=runner_args.output_plugins, + original_flow=rdf_runner_args.original_flow, + output_plugins=rdf_runner_args.output_plugins, ) flow_obj = data_store.REL_DB.ReadFlowObject(str(args.client_id), flow_id) res = InitApiFlowFromFlowObject(flow_obj) - res = ToRDFApiFlow(res) - res.context = None + res.ClearField("context") return res @@ -1672,42 +1520,34 @@ class ApiScheduledFlow(rdf_structs.RDFProtoStruct): rdfvalue.RDFDatetime, ] - @classmethod - def FromScheduledFlow(cls, scheduled_flow): - return cls( - scheduled_flow_id=scheduled_flow.scheduled_flow_id, - client_id=scheduled_flow.client_id, - creator=scheduled_flow.creator, - flow_name=scheduled_flow.flow_name, - flow_args=scheduled_flow.flow_args, - runner_args=scheduled_flow.runner_args, - create_time=scheduled_flow.create_time, - ) - class ApiScheduleFlowHandler(api_call_handler_base.ApiCallHandler): """Schedules a flow on a client, to be started upon approval grant.""" args_type = ApiCreateFlowArgs result_type = ApiScheduledFlow + proto_args_type = flow_pb2.ApiCreateFlowArgs + protoresult_type = flow_pb2.ApiScheduledFlow - def Handle(self, args, context=None): - flow_cls, runner_args = _SanitizeApiCreateFlowArgs(args) + def Handle( + self, + args: flow_pb2.ApiCreateFlowArgs, + context: Optional[api_call_context.ApiCallContext] = None, + ) -> flow_pb2.ApiScheduledFlow: + assert context is not None - args.flow.args.Validate() + flow_cls, runner_args = _SanitizeApiCreateFlowArgs(args) # TODO: Handle the case where the requesting user already has # approval to start the flow on the client. - scheduled_flow = flow.ScheduleFlow( - client_id=str(args.client_id), + client_id=args.client_id, creator=context.username, flow_name=flow_cls.__name__, flow_args=args.flow.args, runner_args=runner_args, ) - - return ApiScheduledFlow.FromScheduledFlow(scheduled_flow) + return InitApiScheduledFlowFromScheduledFlow(scheduled_flow) class ApiListScheduledFlowsArgs(rdf_structs.RDFProtoStruct): @@ -1729,15 +1569,21 @@ class ApiListScheduledFlowsHandler(api_call_handler_base.ApiCallHandler): args_type = ApiListScheduledFlowsArgs result_type = ApiListScheduledFlowsResult + proto_args_type = flow_pb2.ApiListScheduledFlowsArgs + proto_result_type = flow_pb2.ApiListScheduledFlowsResult - def Handle(self, args, context=None): + def Handle( + self, + args: flow_pb2.ApiListScheduledFlowsArgs, + context: Optional[api_call_context.ApiCallContext] = None, + ) -> flow_pb2.ApiListScheduledFlowsResult: results = flow.ListScheduledFlows( client_id=args.client_id, creator=args.creator ) results = sorted(results, key=lambda sf: sf.create_time) - results = [ApiScheduledFlow.FromScheduledFlow(sf) for sf in results] + results = [InitApiScheduledFlowFromScheduledFlow(sf) for sf in results] - return ApiListScheduledFlowsResult(scheduled_flows=results) + return flow_pb2.ApiListScheduledFlowsResult(scheduled_flows=results) class ApiUnscheduleFlowArgs(rdf_structs.RDFProtoStruct): @@ -1759,47 +1605,27 @@ class ApiUnscheduleFlowHandler(api_call_handler_base.ApiCallHandler): args_type = ApiUnscheduleFlowArgs result_type = ApiUnscheduleFlowResult + proto_args_type = flow_pb2.ApiUnscheduleFlowArgs + proto_result_type = flow_pb2.ApiUnscheduleFlowResult + + def Handle( + self, + args: flow_pb2.ApiUnscheduleFlowArgs, + context: Optional[api_call_context.ApiCallContext] = None, + ) -> flow_pb2.ApiUnscheduleFlowResult: + assert context is not None - def Handle(self, args, context=None): flow.UnscheduleFlow( client_id=args.client_id, creator=context.username, scheduled_flow_id=args.scheduled_flow_id, ) - return ApiUnscheduleFlowResult() + return flow_pb2.ApiUnscheduleFlowResult() _TAG_ARTIFACT_NAME = re.compile(r"artifact:(?P\w+)") -def _GroupFlowResultsByArtifact( - results: Iterable[rdf_flow_objects.FlowResult], -) -> Mapping[str, rdf_flow_objects.FlowResult]: - """Groups flow results by the artifact that caused their collection. - - Note that flow results that did not originate from any artifact are going to - be ignored and will not be included in the output. - - Args: - results: Flow results to group. - - Returns: - A dictionary mapping artifact names to flow result sequences. - """ - flow_results_by_artifact = {} - - for result in results: - artifact_name_match = _TAG_ARTIFACT_NAME.match(result.tag) - if artifact_name_match is None: - continue - - artifact_name = artifact_name_match["name"] - artifact_results = flow_results_by_artifact.setdefault(artifact_name, []) - artifact_results.append(result) - - return flow_results_by_artifact - - # Copy of the migration function in mig_flow to avoid circular import. def ToRDFApiFlow(proto: flow_pb2.ApiFlow) -> ApiFlow: return ApiFlow.FromSerializedBytes(proto.SerializeToString()) diff --git a/grr/server/grr_response_server/gui/api_plugins/flow_test.py b/grr/server/grr_response_server/gui/api_plugins/flow_test.py index a88cc96b3..1536ff267 100644 --- a/grr/server/grr_response_server/gui/api_plugins/flow_test.py +++ b/grr/server/grr_response_server/gui/api_plugins/flow_test.py @@ -5,7 +5,6 @@ import os import random import tarfile -from typing import IO, Iterable, Iterator from unittest import mock import zipfile @@ -15,28 +14,19 @@ from grr_response_core.lib import rdfvalue from grr_response_core.lib import utils -from grr_response_core.lib.parsers import abstract as abstract_parser -from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts -from grr_response_core.lib.rdfvalues import client as rdf_client -from grr_response_core.lib.rdfvalues import client_action as rdf_client_action from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder from grr_response_core.lib.rdfvalues import paths as rdf_paths from grr_response_core.lib.rdfvalues import test_base as rdf_test_base -from grr_response_core.lib.util import precondition from grr_response_core.lib.util import temp from grr_response_proto import flows_pb2 from grr_response_proto import objects_pb2 from grr_response_proto.api import flow_pb2 -from grr_response_server import artifact_registry from grr_response_server import data_store -from grr_response_server import file_store from grr_response_server import flow from grr_response_server import flow_base -from grr_response_server import flow_responses from grr_response_server.databases import db as abstract_db from grr_response_server.databases import db_test_utils from grr_response_server.flows import file -from grr_response_server.flows.general import collectors from grr_response_server.flows.general import file_finder from grr_response_server.flows.general import processes from grr_response_server.gui import api_call_context @@ -46,14 +36,10 @@ from grr_response_server.gui.api_plugins import mig_flow from grr_response_server.output_plugins import test_plugins from grr_response_server.rdfvalues import flow_objects as rdf_flow_objects -from grr_response_server.rdfvalues import flow_runner as rdf_flow_runner -from grr_response_server.rdfvalues import mig_flow_objects from grr.test_lib import action_mocks from grr.test_lib import db_test_lib -from grr.test_lib import fleetspeak_test_lib from grr.test_lib import flow_test_lib from grr.test_lib import hunt_test_lib -from grr.test_lib import parser_test_lib from grr.test_lib import test_lib @@ -549,560 +535,6 @@ def testReturnsNothingWhenFilteringByNonExistingTag(self): self.assertEmpty(result.items) -class ApiListFlowApplicableParsersHandler(absltest.TestCase): - - class FakeSingleResponseParser(abstract_parser.SingleResponseParser[None]): - - supported_artifacts = ["Fake"] - - def ParseResponse( - self, - knowledge_base: rdf_client.KnowledgeBase, - response: rdfvalue.RDFValue, - ) -> Iterator[None]: - raise NotImplementedError() - - class FakeMultiResponseParser(abstract_parser.MultiResponseParser[None]): - - supported_artifacts = ["Fake"] - - def ParseResponses( - self, - knowledge_base: rdf_client.KnowledgeBase, - responses: Iterable[rdfvalue.RDFValue], - ) -> Iterator[None]: - raise NotImplementedError() - - class FakeSingleFileParser(abstract_parser.SingleFileParser[None]): - - supported_artifacts = ["Fake"] - - def ParseFile( - self, - knowledge_base: rdf_client.KnowledgeBase, - pathspec: rdf_paths.PathSpec, - filedesc: IO[bytes], - ) -> Iterator[None]: - raise NotImplementedError() - - class FakeMultiFileParser(abstract_parser.MultiFileParser[None]): - - supported_artifacts = ["Fake"] - - def ParseFiles( - self, - knowledge_base: rdf_client.KnowledgeBase, - pathspecs: Iterable[rdf_paths.PathSpec], - filedescs: Iterable[IO[bytes]], - ) -> Iterator[None]: - raise NotImplementedError() - - def setUp(self): - super().setUp() - self.handler = flow_plugin.ApiListFlowApplicableParsersHandler() - - @db_test_lib.WithDatabase - def testIncorrectFlowType(self, db: abstract_db.Database) -> None: - client_id = db_test_utils.InitializeClient(db) - flow_id = "4815162342ABCDEF" - - flow_obj = rdf_flow_objects.Flow() - flow_obj.client_id = client_id - flow_obj.flow_id = flow_id - flow_obj.flow_class_name = "NotArtifactCollector" - db.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj)) - - args = flow_plugin.ApiListFlowApplicableParsersArgs() - args.client_id = client_id - args.flow_id = flow_id - - with self.assertRaisesRegex(ValueError, "Not an artifact-collector flow"): - self.handler.Handle(args) - - @parser_test_lib.WithParser("FakeSingleResponse", FakeSingleResponseParser) - @parser_test_lib.WithParser("FakeMultiResponse", FakeMultiResponseParser) - @parser_test_lib.WithParser("FakeSingleFile", FakeSingleFileParser) - @parser_test_lib.WithParser("FakeMultiFile", FakeMultiFileParser) - @db_test_lib.WithDatabase - def testAlreadyAppliedParsers(self, db: abstract_db.Database) -> None: - client_id = db_test_utils.InitializeClient(db) - flow_id = "4815162342ABCDEF" - - flow_obj = rdf_flow_objects.Flow() - flow_obj.client_id = client_id - flow_obj.flow_id = flow_id - flow_obj.flow_class_name = collectors.ArtifactCollectorFlow.__name__ - flow_obj.args = rdf_artifacts.ArtifactCollectorFlowArgs(apply_parsers=True) - db.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj)) - - flow_result = rdf_flow_objects.FlowResult() - flow_result.client_id = client_id - flow_result.flow_id = flow_id - flow_result.tag = "artifact:Fake" - db.WriteFlowResults([mig_flow_objects.ToProtoFlowResult(flow_result)]) - - args = flow_plugin.ApiListFlowApplicableParsersArgs() - args.client_id = client_id - args.flow_id = flow_id - - result = self.handler.Handle(args) - self.assertEmpty(result.parsers) - - @parser_test_lib.WithParser("FakeSingleResponse", FakeSingleResponseParser) - @parser_test_lib.WithParser("FakeMultiResponse", FakeMultiResponseParser) - @parser_test_lib.WithParser("FakeSingleFile", FakeSingleFileParser) - @parser_test_lib.WithParser("FakeMultiFile", FakeMultiFileParser) - @db_test_lib.WithDatabase - def testNotAppliedParsers(self, db: abstract_db.Database) -> None: - client_id = db_test_utils.InitializeClient(db) - flow_id = "4815162342ABCDEF" - - flow_obj = rdf_flow_objects.Flow() - flow_obj.client_id = client_id - flow_obj.flow_id = flow_id - flow_obj.flow_class_name = collectors.ArtifactCollectorFlow.__name__ - flow_obj.args = rdf_artifacts.ArtifactCollectorFlowArgs(apply_parsers=False) - db.WriteFlowObject(mig_flow_objects.ToProtoFlow(flow_obj)) - - flow_result = rdf_flow_objects.FlowResult() - flow_result.client_id = client_id - flow_result.flow_id = flow_id - flow_result.tag = "artifact:Fake" - flow_result.payload = rdfvalue.RDFString("foobar") - db.WriteFlowResults([mig_flow_objects.ToProtoFlowResult(flow_result)]) - - args = flow_plugin.ApiListFlowApplicableParsersArgs() - args.client_id = client_id - args.flow_id = flow_id - - result = self.handler.Handle(args) - self.assertCountEqual( - result.parsers, - [ - flow_plugin.ApiParserDescriptor( - type=flow_plugin.ApiParserDescriptor.Type.SINGLE_RESPONSE, - name="FakeSingleResponse", - ), - flow_plugin.ApiParserDescriptor( - type=flow_plugin.ApiParserDescriptor.Type.MULTI_RESPONSE, - name="FakeMultiResponse", - ), - flow_plugin.ApiParserDescriptor( - type=flow_plugin.ApiParserDescriptor.Type.SINGLE_FILE, - name="FakeSingleFile", - ), - flow_plugin.ApiParserDescriptor( - type=flow_plugin.ApiParserDescriptor.Type.MULTI_FILE, - name="FakeMultiFile", - ), - ], - ) - - -class ApiListParsedFlowResultsHandlerTest(absltest.TestCase): - - ECHO1337_ARTIFACT_SOURCE = rdf_artifacts.ArtifactSource( - type=rdf_artifacts.ArtifactSource.SourceType.COMMAND, - attributes={ - "cmd": "/bin/echo", - "args": ["1337"], - }, - ) - - ECHO1337_ARTIFACT = rdf_artifacts.Artifact( - name="FakeArtifact", - doc="Lorem ipsum.", - sources=[ECHO1337_ARTIFACT_SOURCE], - ) - - class FakeExecuteCommand(action_mocks.ActionMock): - - def ExecuteCommand( - self, - args: rdf_client_action.ExecuteRequest, - ) -> Iterable[rdf_client_action.ExecuteResponse]: - if args.cmd != "/bin/echo": - raise ValueError(f"Unsupported command: {args.cmd}") - - stdout = " ".join(args.args).encode("utf-8") - return [rdf_client_action.ExecuteResponse(stdout=stdout)] - - def setUp(self): - super().setUp() - self.handler = flow_plugin.ApiListParsedFlowResultsHandler() - - @db_test_lib.WithDatabase - def testValidatesFlowName(self, db: abstract_db.Database): - context = _CreateContext(db) - - class FakeFlow(flow_base.FlowBase): - - def Start(self): - self.CallState("End") - - def End(self, responses: flow_responses.Responses) -> None: - del responses # Unused. - - client_id = db_test_utils.InitializeClient(db) - flow_id = flow_test_lib.TestFlowHelper( - FakeFlow.__name__, client_id=client_id, creator=context.username - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - args = flow_plugin.ApiListParsedFlowResultsArgs() - args.client_id = client_id - args.flow_id = flow_id - - with self.assertRaisesRegex(ValueError, "artifact-collector"): - self.handler.Handle(args, context=context) - - @db_test_lib.WithDatabase - @fleetspeak_test_lib.WithFleetspeakConnector - def testValidatesParsersWereNotApplied(self, db: abstract_db.Database, _): - context = _CreateContext(db) - - client_id = db_test_utils.InitializeClient(db) - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(self.ECHO1337_ARTIFACT) - - flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() - flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name] - flow_args.apply_parsers = True - - flow_id = flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - self.FakeExecuteCommand(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - args = flow_plugin.ApiListParsedFlowResultsArgs() - args.client_id = client_id - args.flow_id = flow_id - - with self.assertRaisesRegex(ValueError, "already parsed"): - self.handler.Handle(args, context=context) - - @db_test_lib.WithDatabase - @fleetspeak_test_lib.WithFleetspeakConnector - def testParsesArtifactCollectionResults(self, db: abstract_db.Database, _): - context = _CreateContext(db) - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(self.ECHO1337_ARTIFACT) - - flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() - flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name] - flow_args.apply_parsers = False - - client_id = db_test_utils.InitializeClient(db) - flow_id = flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - self.FakeExecuteCommand(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - class FakeParser( - abstract_parser.SingleResponseParser[rdf_client_action.ExecuteResponse], - ): - - supported_artifacts = [self.ECHO1337_ARTIFACT.name] - - def ParseResponse( - self, - knowledge_base: rdf_client.KnowledgeBase, - response: rdf_client_action.ExecuteResponse, - ) -> Iterable[rdf_client_action.ExecuteResponse]: - precondition.AssertType(response, rdf_client_action.ExecuteResponse) - - parsed_response = rdf_client_action.ExecuteResponse() - parsed_response.stdout = response.stdout - parsed_response.stderr = b"4815162342" - return [parsed_response] - - with parser_test_lib._ParserContext("Fake", FakeParser): - args = flow_plugin.ApiListParsedFlowResultsArgs( - client_id=client_id, flow_id=flow_id, offset=0, count=1024 - ) - - result = self.handler.Handle(args, context=context) - - self.assertEmpty(result.errors) - self.assertLen(result.items, 1) - - response = result.items[0].payload - self.assertIsInstance(response, rdf_client_action.ExecuteResponse) - self.assertEqual(response.stdout, b"1337") - self.assertEqual(response.stderr, b"4815162342") - - @db_test_lib.WithDatabase - @fleetspeak_test_lib.WithFleetspeakConnector - def testReportsArtifactCollectionErrors(self, db: abstract_db.Database, _): - context = _CreateContext(db) - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(self.ECHO1337_ARTIFACT) - - flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() - flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name] - flow_args.apply_parsers = False - - client_id = db_test_utils.InitializeClient(db) - flow_id = flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - self.FakeExecuteCommand(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - class FakeParser( - abstract_parser.SingleResponseParser[rdf_client_action.ExecuteResponse], - ): - - supported_artifacts = [self.ECHO1337_ARTIFACT.name] - - def ParseResponse( - self, - knowledge_base: rdf_client.KnowledgeBase, - response: rdf_client_action.ExecuteResponse, - ) -> Iterable[rdf_client_action.ExecuteResponse]: - del knowledge_base, response # Unused. - raise abstract_parser.ParseError("Lorem ipsum.") - - with parser_test_lib._ParserContext("Fake", FakeParser): - args = flow_plugin.ApiListParsedFlowResultsArgs( - client_id=client_id, flow_id=flow_id, offset=0, count=1024 - ) - - result = self.handler.Handle(args, context=context) - - self.assertEmpty(result.items) - self.assertLen(result.errors, 1) - self.assertEqual(result.errors[0], "Lorem ipsum.") - - @db_test_lib.WithDatabase - @fleetspeak_test_lib.WithFleetspeakConnector - def testUsesKnowledgebaseFromFlow(self, db: abstract_db.Database, _): - context = _CreateContext(db) - - client_id = db_test_utils.InitializeClient(db) - - # This is the snapshot that is visible to the flow and should be used for - # parsing results. - snapshot = objects_pb2.ClientSnapshot() - snapshot.client_id = client_id - snapshot.knowledge_base.os = "redox" - db.WriteClientSnapshot(snapshot) - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(self.ECHO1337_ARTIFACT) - - flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() - flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name] - flow_args.apply_parsers = False - - flow_id = flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - self.FakeExecuteCommand(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - class FakeParser( - abstract_parser.SingleResponseParser[rdf_client_action.ExecuteResponse], - ): - - supported_artifacts = [self.ECHO1337_ARTIFACT.name] - - def ParseResponse( - self, - knowledge_base: rdf_client.KnowledgeBase, - response: rdf_client_action.ExecuteResponse, - ) -> Iterable[rdf_client_action.ExecuteResponse]: - precondition.AssertType(response, rdf_client_action.ExecuteResponse) - - parsed_response = rdf_client_action.ExecuteResponse() - parsed_response.stdout = response.stdout - parsed_response.stderr = knowledge_base.os.encode("utf-8") - return [parsed_response] - - # This is a snapshot written to the database after the responses were - # collected, so this should not be used for parsing. - snapshot = objects_pb2.ClientSnapshot() - snapshot.client_id = client_id - snapshot.knowledge_base.os = "linux" - db.WriteClientSnapshot(snapshot) - - with parser_test_lib._ParserContext("Fake", FakeParser): - args = flow_plugin.ApiListParsedFlowResultsArgs( - client_id=client_id, flow_id=flow_id, offset=0, count=1024 - ) - - result = self.handler.Handle(args, context=context) - - self.assertEmpty(result.errors) - self.assertLen(result.items, 1) - - response = result.items[0].payload - self.assertIsInstance(response, rdf_client_action.ExecuteResponse) - self.assertEqual(response.stdout, b"1337") - self.assertEqual(response.stderr.decode("utf-8"), "redox") - - @db_test_lib.WithDatabase - @fleetspeak_test_lib.WithFleetspeakConnector - def testUsesCollectionTimeFiles(self, db: abstract_db.Database, _): - context = _CreateContext(db) - client_id = db_test_utils.InitializeClient(db) - - snapshot = objects_pb2.ClientSnapshot() - snapshot.client_id = client_id - snapshot.knowledge_base.os = "redox" - db.WriteClientSnapshot(snapshot) - - with temp.AutoTempFilePath() as temp_filepath: - fake_artifact_source = rdf_artifacts.ArtifactSource( - type=rdf_artifacts.ArtifactSource.SourceType.FILE, - attributes={ - "paths": [temp_filepath], - }, - ) - - fake_artifact = rdf_artifacts.Artifact( - name="FakeArtifact", - doc="Lorem ipsum.", - sources=[fake_artifact_source], - ) - - flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() - flow_args.artifact_list = [fake_artifact.name] - flow_args.apply_parsers = False - - with io.open(temp_filepath, mode="wb") as temp_filedesc: - temp_filedesc.write(b"OLD") - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(fake_artifact) - - # First, we run the artifact collector to collect the old file and save - # the flow id to parse the results later. - flow_id = flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - action_mocks.FileFinderClientMock(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - with io.open(temp_filepath, mode="wb") as temp_filedesc: - temp_filedesc.write(b"NEW") - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(fake_artifact) - - # Now, we run the artifact collector again to collect the new file to - # update to this version on the server. The parsing should be performed - # against the previous flow. - flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - action_mocks.FileFinderClientMock(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - class FakeFileParser(abstract_parser.SingleFileParser[rdfvalue.RDFBytes]): - - supported_artifacts = [fake_artifact.name] - - def ParseFile( - self, - knowledge_base: rdf_client.KnowledgeBase, - pathspec: rdf_paths.PathSpec, - filedesc: file_store.BlobStream, - ) -> Iterable[rdfvalue.RDFBytes]: - del knowledge_base, pathspec # Unused. - return [rdfvalue.RDFBytes(filedesc.Read())] - - with parser_test_lib._ParserContext("FakeFile", FakeFileParser): - args = flow_plugin.ApiListParsedFlowResultsArgs( - client_id=client_id, flow_id=flow_id, offset=0, count=1024 - ) - - result = self.handler.Handle(args, context=context) - - self.assertEmpty(result.errors) - self.assertLen(result.items, 1) - - response = result.items[0].payload - self.assertEqual(response, b"OLD") - - @db_test_lib.WithDatabase - @fleetspeak_test_lib.WithFleetspeakConnector - def testEmptyResults(self, db: abstract_db.Database, _): - context = _CreateContext(db) - client_id = db_test_utils.InitializeClient(db) - - fake_artifact = rdf_artifacts.Artifact( - name="FakeArtifact", doc="Lorem ipsum.", sources=[] - ) - - with mock.patch.object( - artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry() - ) as registry: - registry.RegisterArtifact(fake_artifact) - - flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() - flow_args.artifact_list = [fake_artifact.name] - flow_args.apply_parsers = False - - flow_id = flow_test_lib.TestFlowHelper( - collectors.ArtifactCollectorFlow.__name__, - self.FakeExecuteCommand(), - client_id=client_id, - args=flow_args, - creator=context.username, - ) - - flow_test_lib.FinishAllFlowsOnClient(client_id) - - args = flow_plugin.ApiListParsedFlowResultsArgs( - client_id=client_id, flow_id=flow_id, offset=0, count=1024 - ) - - result = self.handler.Handle(args, context=context) - self.assertEmpty(result.errors) - self.assertEmpty(result.items) - - def _CreateContext(db: abstract_db.Database) -> api_call_context.ApiCallContext: username = "".join(random.choice("abcdef") for _ in range(8)) db.WriteGRRUser(username) @@ -1147,21 +579,20 @@ def testScheduleFlow(self, db: abstract_db.Database): client_id = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() - args = flow_plugin.ApiCreateFlowArgs( - client_id=client_id, - flow=flow_plugin.ApiFlow( - name=file.CollectFilesByKnownPath.__name__, - args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), - ), - ) - + args = flow_pb2.ApiCreateFlowArgs() + args.client_id = client_id + args.flow.name = file.CollectFilesByKnownPath.__name__ + args.flow.args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) + args.flow.runner_args.CopyFrom(flows_pb2.FlowRunnerArgs(cpu_limit=60)) sf = handler.Handle(args, context=context) + self.assertEqual(sf.client_id, client_id) self.assertEqual(sf.creator, context.username) self.assertNotEmpty(sf.scheduled_flow_id) self.assertEqual(sf.flow_name, file.CollectFilesByKnownPath.__name__) - self.assertEqual(sf.flow_args.paths, ["/foo"]) + flow_args = flows_pb2.CollectFilesByKnownPathArgs() + sf.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.paths, ["/foo"]) self.assertEqual(sf.runner_args.cpu_limit, 60) @db_test_lib.WithDatabase @@ -1171,53 +602,34 @@ def testListScheduledFlows(self, db: abstract_db.Database): client_id2 = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() - sf1 = handler.Handle( - flow_plugin.ApiCreateFlowArgs( - client_id=client_id1, - flow=flow_plugin.ApiFlow( - name=file.CollectFilesByKnownPath.__name__, - args=rdf_file_finder.CollectFilesByKnownPathArgs( - paths=["/foo"] - ), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), - ), - ), - context=context, - ) - sf2 = handler.Handle( - flow_plugin.ApiCreateFlowArgs( - client_id=client_id1, - flow=flow_plugin.ApiFlow( - name=file.CollectFilesByKnownPath.__name__, - args=rdf_file_finder.CollectFilesByKnownPathArgs( - paths=["/foo"] - ), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), - ), - ), - context=context, - ) - handler.Handle( - flow_plugin.ApiCreateFlowArgs( - client_id=client_id2, - flow=flow_plugin.ApiFlow( - name=file.CollectFilesByKnownPath.__name__, - args=rdf_file_finder.CollectFilesByKnownPathArgs( - paths=["/foo"] - ), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), - ), - ), - context=context, - ) + args = flow_pb2.ApiCreateFlowArgs() + args.client_id = client_id1 + args.flow.name = file.CollectFilesByKnownPath.__name__ + args.flow.args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) + args.flow.runner_args.CopyFrom(flows_pb2.FlowRunnerArgs(cpu_limit=60)) + sf1 = handler.Handle(args, context=context) + + args = flow_pb2.ApiCreateFlowArgs() + args.client_id = client_id1 + args.flow.name = file.CollectFilesByKnownPath.__name__ + args.flow.args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) + args.flow.runner_args.CopyFrom(flows_pb2.FlowRunnerArgs(cpu_limit=60)) + sf2 = handler.Handle(args, context=context) + + args = flow_pb2.ApiCreateFlowArgs() + args.client_id = client_id2 + args.flow.name = file.CollectFilesByKnownPath.__name__ + args.flow.args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) + args.flow.runner_args.CopyFrom(flows_pb2.FlowRunnerArgs(cpu_limit=60)) + handler.Handle(args, context=context) handler = flow_plugin.ApiListScheduledFlowsHandler() - args = flow_plugin.ApiListScheduledFlowsArgs( + args = flow_pb2.ApiListScheduledFlowsArgs( client_id=client_id1, creator=context.username ) results = handler.Handle(args, context=context) - self.assertEqual(results.scheduled_flows, [sf1, sf2]) + self.assertCountEqual(results.scheduled_flows, [sf1, sf2]) @db_test_lib.WithDatabase def testUnscheduleFlowRemovesScheduledFlow(self, db: abstract_db.Database): @@ -1225,46 +637,33 @@ def testUnscheduleFlowRemovesScheduledFlow(self, db: abstract_db.Database): client_id = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() - sf1 = handler.Handle( - flow_plugin.ApiCreateFlowArgs( - client_id=client_id, - flow=flow_plugin.ApiFlow( - name=file.CollectFilesByKnownPath.__name__, - args=rdf_file_finder.CollectFilesByKnownPathArgs( - paths=["/foo"] - ), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), - ), - ), - context=context, - ) - sf2 = handler.Handle( - flow_plugin.ApiCreateFlowArgs( - client_id=client_id, - flow=flow_plugin.ApiFlow( - name=file.CollectFilesByKnownPath.__name__, - args=rdf_file_finder.CollectFilesByKnownPathArgs( - paths=["/foo"] - ), - runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60), - ), - ), - context=context, - ) + args = flow_pb2.ApiCreateFlowArgs() + args.client_id = client_id + args.flow.name = file.CollectFilesByKnownPath.__name__ + args.flow.args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) + args.flow.runner_args.CopyFrom(flows_pb2.FlowRunnerArgs(cpu_limit=60)) + sf1 = handler.Handle(args, context=context) + + args = flow_pb2.ApiCreateFlowArgs() + args.client_id = client_id + args.flow.name = file.CollectFilesByKnownPath.__name__ + args.flow.args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) + args.flow.runner_args.CopyFrom(flows_pb2.FlowRunnerArgs(cpu_limit=60)) + sf2 = handler.Handle(args, context=context) handler = flow_plugin.ApiUnscheduleFlowHandler() - args = flow_plugin.ApiUnscheduleFlowArgs( + args = flow_pb2.ApiUnscheduleFlowArgs( client_id=client_id, scheduled_flow_id=sf1.scheduled_flow_id ) handler.Handle(args, context=context) handler = flow_plugin.ApiListScheduledFlowsHandler() - args = flow_plugin.ApiListScheduledFlowsArgs( + args = flow_pb2.ApiListScheduledFlowsArgs( client_id=client_id, creator=context.username ) results = handler.Handle(args, context=context) - self.assertEqual(results.scheduled_flows, [sf2]) + self.assertCountEqual(results.scheduled_flows, [sf2]) def main(argv): diff --git a/grr/server/grr_response_server/gui/api_plugins/mig_flow.py b/grr/server/grr_response_server/gui/api_plugins/mig_flow.py index c2a045416..43543e0f7 100644 --- a/grr/server/grr_response_server/gui/api_plugins/mig_flow.py +++ b/grr/server/grr_response_server/gui/api_plugins/mig_flow.py @@ -125,74 +125,6 @@ def ToRDFApiListFlowResultsResult( ) -def ToProtoApiListParsedFlowResultsArgs( - rdf: flow.ApiListParsedFlowResultsArgs, -) -> flow_pb2.ApiListParsedFlowResultsArgs: - return rdf.AsPrimitiveProto() - - -def ToRDFApiListParsedFlowResultsArgs( - proto: flow_pb2.ApiListParsedFlowResultsArgs, -) -> flow.ApiListParsedFlowResultsArgs: - return flow.ApiListParsedFlowResultsArgs.FromSerializedBytes( - proto.SerializeToString() - ) - - -def ToProtoApiListParsedFlowResultsResult( - rdf: flow.ApiListParsedFlowResultsResult, -) -> flow_pb2.ApiListParsedFlowResultsResult: - return rdf.AsPrimitiveProto() - - -def ToRDFApiListParsedFlowResultsResult( - proto: flow_pb2.ApiListParsedFlowResultsResult, -) -> flow.ApiListParsedFlowResultsResult: - return flow.ApiListParsedFlowResultsResult.FromSerializedBytes( - proto.SerializeToString() - ) - - -def ToProtoApiParserDescriptor( - rdf: flow.ApiParserDescriptor, -) -> flow_pb2.ApiParserDescriptor: - return rdf.AsPrimitiveProto() - - -def ToRDFApiParserDescriptor( - proto: flow_pb2.ApiParserDescriptor, -) -> flow.ApiParserDescriptor: - return flow.ApiParserDescriptor.FromSerializedBytes(proto.SerializeToString()) - - -def ToProtoApiListFlowApplicableParsersArgs( - rdf: flow.ApiListFlowApplicableParsersArgs, -) -> flow_pb2.ApiListFlowApplicableParsersArgs: - return rdf.AsPrimitiveProto() - - -def ToRDFApiListFlowApplicableParsersArgs( - proto: flow_pb2.ApiListFlowApplicableParsersArgs, -) -> flow.ApiListFlowApplicableParsersArgs: - return flow.ApiListFlowApplicableParsersArgs.FromSerializedBytes( - proto.SerializeToString() - ) - - -def ToProtoApiListFlowApplicableParsersResult( - rdf: flow.ApiListFlowApplicableParsersResult, -) -> flow_pb2.ApiListFlowApplicableParsersResult: - return rdf.AsPrimitiveProto() - - -def ToRDFApiListFlowApplicableParsersResult( - proto: flow_pb2.ApiListFlowApplicableParsersResult, -) -> flow.ApiListFlowApplicableParsersResult: - return flow.ApiListFlowApplicableParsersResult.FromSerializedBytes( - proto.SerializeToString() - ) - - def ToProtoApiListFlowLogsArgs( rdf: flow.ApiListFlowLogsArgs, ) -> flow_pb2.ApiListFlowLogsArgs: diff --git a/grr/server/grr_response_server/gui/api_plugins/user_test.py b/grr/server/grr_response_server/gui/api_plugins/user_test.py index 4b105fd55..108192994 100644 --- a/grr/server/grr_response_server/gui/api_plugins/user_test.py +++ b/grr/server/grr_response_server/gui/api_plugins/user_test.py @@ -5,11 +5,12 @@ from absl import app +from google.protobuf import any_pb2 from grr_response_core import config from grr_response_core.lib import rdfvalue from grr_response_core.lib.rdfvalues import client as rdf_client -from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder from grr_response_core.lib.rdfvalues import flows as rdf_flows +from grr_response_proto import flows_pb2 from grr_response_proto import user_pb2 from grr_response_server import access_control from grr_response_server import cronjobs @@ -25,7 +26,6 @@ from grr_response_server.gui import approval_checks from grr_response_server.gui.api_plugins import user as user_plugin from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs -from grr_response_server.rdfvalues import flow_runner as rdf_flow_runner from grr_response_server.rdfvalues import mig_objects from grr_response_server.rdfvalues import objects as rdf_objects from grr.test_lib import acl_test_lib @@ -399,12 +399,14 @@ def testStartsScheduledFlowsIfGrantedApprovalIsValid(self): ) def testErrorDuringStartFlowDoesNotBubbleUpToApprovalApiCall(self): + any_flow_args = any_pb2.Any() + any_flow_args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) flow.ScheduleFlow( client_id=self.client_id, creator=self.context.username, flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(), + flow_args=any_flow_args, + runner_args=flows_pb2.FlowRunnerArgs(), ) with mock.patch.object( diff --git a/grr/server/grr_response_server/gui/selenium_tests/v2/approval_page_test.py b/grr/server/grr_response_server/gui/selenium_tests/v2/approval_page_test.py index 03796703c..8878d4235 100644 --- a/grr/server/grr_response_server/gui/selenium_tests/v2/approval_page_test.py +++ b/grr/server/grr_response_server/gui/selenium_tests/v2/approval_page_test.py @@ -1,11 +1,11 @@ #!/usr/bin/env python from absl import app -from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder +from google.protobuf import any_pb2 +from grr_response_proto import flows_pb2 from grr_response_server import flow from grr_response_server.flows import file from grr_response_server.gui import gui_test_lib -from grr_response_server.rdfvalues import flow_runner as rdf_flow_runner from grr.test_lib import test_lib @@ -62,12 +62,14 @@ def testScheduledFlowsAreShown(self): self.CreateUser("requestrick") self.CreateUser("approveannie") + any_flow_args = any_pb2.Any() + any_flow_args.Pack(flows_pb2.CollectFilesByKnownPathArgs(paths=["/foo"])) flow.ScheduleFlow( client_id=client_id, creator="requestrick", flow_name=file.CollectFilesByKnownPath.__name__, - flow_args=rdf_file_finder.CollectFilesByKnownPathArgs(paths=["/foo"]), - runner_args=rdf_flow_runner.FlowRunnerArgs(), + flow_args=any_flow_args, + runner_args=flows_pb2.FlowRunnerArgs(), ) approval_id = self.RequestClientApproval( diff --git a/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py b/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py index d89c04e4c..00ffe2403 100644 --- a/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py +++ b/grr/server/grr_response_server/gui/selenium_tests/v2/flow_test.py @@ -5,7 +5,10 @@ from grr_response_core import config from grr_response_core.lib import rdfvalue +from grr_response_proto import flows_pb2 from grr_response_proto import objects_pb2 +from grr_response_proto import timeline_pb2 +from grr_response_proto.api import flow_pb2 from grr_response_server import artifact_registry from grr_response_server import data_store from grr_response_server import maintenance_utils @@ -36,7 +39,7 @@ def _ListFlows(client_id: str, creator: str): def _ListScheduledFlows(client_id: str, creator: str): handler = api_flow.ApiListScheduledFlowsHandler() return handler.Handle( - api_flow.ApiListScheduledFlowsArgs(client_id=client_id, creator=creator), + flow_pb2.ApiListScheduledFlowsArgs(client_id=client_id, creator=creator), context=api_call_context.ApiCallContext(username=creator), ).scheduled_flows @@ -117,7 +120,9 @@ def GetFirstScheduledFlow(): self.assertEqual(scheduled_flow.client_id, self.client_id) self.assertEqual(scheduled_flow.creator, self.test_username) self.assertEqual(scheduled_flow.flow_name, 'CollectMultipleFiles') - self.assertEqual(scheduled_flow.flow_args.path_expressions, ['/foo/test']) + flow_args = flows_pb2.CollectMultipleFilesArgs() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.path_expressions, ['/foo/test']) self.assertFalse(scheduled_flow.error) def testApprovalGrantStartsScheduledFlow(self): @@ -239,7 +244,9 @@ def GetFirstScheduledFlow(): scheduled_flow = self.WaitUntil(GetFirstScheduledFlow) self.assertEqual(scheduled_flow.flow_name, timeline.TimelineFlow.__name__) - self.assertEqual(scheduled_flow.flow_args.root, b'/foo/test') + flow_args = timeline_pb2.TimelineArgs() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.root, b'/foo/test') def testCollectMultipleFilesFlow(self): self.Open(f'/v2/clients/{self.client_id}') @@ -322,7 +329,8 @@ def GetFirstScheduledFlow(): return scheduled_flows[0] if len(scheduled_flows) == 1 else None scheduled_flow = self.WaitUntil(GetFirstScheduledFlow) - args = scheduled_flow.flow_args + args = flows_pb2.CollectMultipleFilesArgs() + scheduled_flow.flow_args.Unpack(args) self.assertEqual( scheduled_flow.flow_name, file.CollectMultipleFiles.__name__ @@ -332,27 +340,27 @@ def GetFirstScheduledFlow(): ) self.assertEqual( args.modification_time.min_last_modified_time, - rdfvalue.RDFDatetime.FromHumanReadable('2000-01-01 11:00:00'), + int(rdfvalue.RDFDatetime.FromHumanReadable('2000-01-01 11:00:00')), ) self.assertEqual( args.modification_time.max_last_modified_time, - rdfvalue.RDFDatetime.FromHumanReadable('2000-01-02 22:00:00'), + int(rdfvalue.RDFDatetime.FromHumanReadable('2000-01-02 22:00:00')), ) self.assertEqual( args.access_time.min_last_access_time, - rdfvalue.RDFDatetime.FromHumanReadable('2000-02-01 11:00:00'), + int(rdfvalue.RDFDatetime.FromHumanReadable('2000-02-01 11:00:00')), ) self.assertEqual( args.access_time.max_last_access_time, - rdfvalue.RDFDatetime.FromHumanReadable('2000-02-02 22:00:00'), + int(rdfvalue.RDFDatetime.FromHumanReadable('2000-02-02 22:00:00')), ) self.assertEqual( args.inode_change_time.min_last_inode_change_time, - rdfvalue.RDFDatetime.FromHumanReadable('2000-03-01 11:00:00'), + int(rdfvalue.RDFDatetime.FromHumanReadable('2000-03-01 11:00:00')), ) self.assertEqual( args.inode_change_time.max_last_inode_change_time, - rdfvalue.RDFDatetime.FromHumanReadable('2000-03-02 22:00:00'), + int(rdfvalue.RDFDatetime.FromHumanReadable('2000-03-02 22:00:00')), ) self.assertEqual(args.size.min_file_size, 1024) self.assertEqual(args.size.max_file_size, 2048) @@ -400,9 +408,9 @@ def GetFirstScheduledFlow(): self.assertEqual( scheduled_flow.flow_name, collectors.ArtifactCollectorFlow.__name__ ) - self.assertEqual( - scheduled_flow.flow_args.artifact_list, ['FakeFileArtifact'] - ) + flow_args = flows_pb2.ArtifactCollectorFlowArgs() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.artifact_list, ['FakeFileArtifact']) def testScheduleArtifactCollectorFlowWithDefaultArtifacts(self): artifact_registry.REGISTRY.AddDefaultSources() @@ -438,9 +446,9 @@ def GetFirstScheduledFlow(): self.assertEqual( scheduled_flow.flow_name, collectors.ArtifactCollectorFlow.__name__ ) - self.assertEqual( - scheduled_flow.flow_args.artifact_list, ['LinuxHardwareInfo'] - ) + flow_args = flows_pb2.ArtifactCollectorFlowArgs() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.artifact_list, ['LinuxHardwareInfo']) def _SetUpAdminUser(self): data_store.REL_DB.WriteGRRUser( @@ -485,11 +493,13 @@ def GetFirstScheduledFlow(): self.assertEqual( scheduled_flow.flow_name, administrative.LaunchBinary.__name__ ) + flow_args = flows_pb2.LaunchBinaryArgs() + scheduled_flow.flow_args.Unpack(flow_args) self.assertEqual( - scheduled_flow.flow_args.binary, + flow_args.binary, 'aff4:/config/executables/windows/test.exe', ) - self.assertEqual(scheduled_flow.flow_args.command_line, '--foo --bar') + self.assertEqual(flow_args.command_line, '--foo --bar') def testScheduleLaunchExecutePythonHackFlow(self): self._SetUpAdminUser() @@ -532,8 +542,12 @@ def GetFirstScheduledFlow(): self.assertEqual( scheduled_flow.flow_name, administrative.ExecutePythonHack.__name__ ) - self.assertEqual(scheduled_flow.flow_args.hack_name, 'windows/test.py') - self.assertEqual(scheduled_flow.flow_args.py_args['fookey'], 'foovalue') + flow_args = flows_pb2.ExecutePythonHackArgs() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.hack_name, 'windows/test.py') + self.assertLen(flow_args.py_args.dat, 1) + self.assertEqual(flow_args.py_args.dat[0].k.string, 'fookey') + self.assertEqual(flow_args.py_args.dat[0].v.string, 'foovalue') def testDumpProcessMemoryFlow(self): self.Open(f'/v2/clients/{self.client_id}') @@ -562,8 +576,10 @@ def GetFirstScheduledFlow(): self.assertEqual( scheduled_flow.flow_name, memory.DumpProcessMemory.__name__ ) - self.assertEqual(scheduled_flow.flow_args.process_regex, 'python\\d') - self.assertTrue(scheduled_flow.flow_args.skip_shared_regions) + flow_args = flows_pb2.YaraProcessDumpArgs() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.process_regex, 'python\\d') + self.assertTrue(flow_args.skip_shared_regions) def testYaraProcessScanFlow(self): self.Open(f'/v2/clients/{self.client_id}') @@ -591,8 +607,10 @@ def GetFirstScheduledFlow(): scheduled_flow = self.WaitUntil(GetFirstScheduledFlow) self.assertEqual(scheduled_flow.flow_name, memory.YaraProcessScan.__name__) - self.assertEqual(scheduled_flow.flow_args.process_regex, 'python\\d') - self.assertTrue(scheduled_flow.flow_args.skip_shared_regions) + flow_args = flows_pb2.YaraProcessScanRequest() + scheduled_flow.flow_args.Unpack(flow_args) + self.assertEqual(flow_args.process_regex, 'python\\d') + self.assertTrue(flow_args.skip_shared_regions) class FlowCreationTestWithApprovalsDisabled(gui_test_lib.GRRSeleniumTest): diff --git a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ng.html b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ng.html index 4ad67392c..de87382cf 100644 --- a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ng.html +++ b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ng.html @@ -89,14 +89,20 @@
{{ title }}
- - {{ os.key }} - + + {{ os.key }} +
+ + No clients will match, select at least one OS. +
diff --git a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.scss b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.scss index 443890229..ebc0e30a2 100644 --- a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.scss +++ b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.scss @@ -94,6 +94,10 @@ width: 70%; } } + + .rule-error { + font-size: small; + } } .rule-title { diff --git a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ts b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ts index ef26a86ee..8f7d86d3a 100644 --- a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ts +++ b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form.ts @@ -5,7 +5,14 @@ import { HostBinding, HostListener, } from '@angular/core'; -import {FormArray, FormBuilder, FormControl, FormGroup} from '@angular/forms'; +import { + AbstractControl, + FormArray, + FormBuilder, + FormControl, + FormGroup, + ValidationErrors, +} from '@angular/forms'; import { ForemanClientRule, @@ -37,6 +44,22 @@ const OS_DEFAULTS = { 'Linux': false, } as const; +function atLeastOneOS(control: AbstractControl): ValidationErrors | null { + const group = control as FormGroup; + let hasTrueValue = false; + Object.keys(group.controls).forEach((key) => { + if (group.controls[key].value) { + hasTrueValue = true; + } + }); + + if (hasTrueValue) { + return {}; + } + + return {'nothingSelected': true}; +} + /** * Provides the forms for new hunt configuration. */ @@ -239,11 +262,17 @@ export class ClientsForm { [osName]: new FormControl(osValue, {nonNullable: true}), })), ); - return this.fb.group({ + const osFormGroup = this.fb.group({ 'type': [ForemanClientRuleType.OS], 'name': [name], - 'options': this.fb.group(operatingSystemsControls), + 'options': this.fb.group( + operatingSystemsControls as {[key: string]: FormControl}, + { + 'validators': [atLeastOneOS], + }, + ), }); + return osFormGroup; } integerOperator( diff --git a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form_test.ts b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form_test.ts index 39d0d3be8..e9fa78c19 100644 --- a/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form_test.ts +++ b/grr/server/grr_response_server/gui/ui/components/hunt/new_hunt/clients_form/clients_form_test.ts @@ -147,6 +147,48 @@ describe('clients form test', () => { expect(text).toContain('Operating System'); }); + it('os rule validator shows warning to user', async () => { + const fixture = TestBed.createComponent(ClientsForm); + fixture.detectChanges(); + + await setCheckboxValue(fixture, '[id=condition_0_windows]', true); + + expect(await getCheckboxValue(fixture, '[id=condition_0_windows]')).toBe( + true, + ); + expect(await getCheckboxValue(fixture, '[id=condition_0_linux]')).toBe( + false, + ); + expect(await getCheckboxValue(fixture, '[id=condition_0_darwin]')).toBe( + false, + ); + expect( + fixture.debugElement.query(By.css('[name=condition_0] .rule-error')), + ).toBeNull(); + + // Add a second rule with no OS selected and show error + const loader = TestbedHarnessEnvironment.loader(fixture); + const menu = await loader.getHarness(MatMenuHarness); + await selectMenuOptionAt(menu, 2); + + expect(await getCheckboxValue(fixture, '[id=condition_1_windows]')).toBe( + false, + ); + expect(await getCheckboxValue(fixture, '[id=condition_1_linux]')).toBe( + false, + ); + expect(await getCheckboxValue(fixture, '[id=condition_1_darwin]')).toBe( + false, + ); + expect( + fixture.debugElement.query(By.css('[name=condition_1] .rule-error')), + ).toBeTruthy(); + expect( + fixture.debugElement.query(By.css('[name=condition_1] .rule-error')) + .nativeElement.textContent, + ).toContain('No clients will match'); + }); + it('deletes the form when clicking cancel', () => { const fixture = TestBed.createComponent(ClientsForm); fixture.detectChanges(); diff --git a/grr/server/grr_response_server/gui/ui/lib/api/api_interfaces.ts b/grr/server/grr_response_server/gui/ui/lib/api/api_interfaces.ts index f54bc0099..c7c5b0589 100644 --- a/grr/server/grr_response_server/gui/ui/lib/api/api_interfaces.ts +++ b/grr/server/grr_response_server/gui/ui/lib/api/api_interfaces.ts @@ -1336,17 +1336,6 @@ export declare interface ApiListFilesResult { readonly items?: readonly ApiFile[]; } -/** ApiListFlowApplicableParsersArgs proto mapping. */ -export declare interface ApiListFlowApplicableParsersArgs { - readonly clientId?: string; - readonly flowId?: string; -} - -/** ApiListFlowApplicableParsersResult proto mapping. */ -export declare interface ApiListFlowApplicableParsersResult { - readonly parsers?: readonly ApiParserDescriptor[]; -} - /** ApiListFlowDescriptorsResult proto mapping. */ export declare interface ApiListFlowDescriptorsResult { readonly items?: readonly ApiFlowDescriptor[]; @@ -1627,20 +1616,6 @@ export declare interface ApiListOutputPluginDescriptorsResult { readonly items?: readonly ApiOutputPluginDescriptor[]; } -/** ApiListParsedFlowResultsArgs proto mapping. */ -export declare interface ApiListParsedFlowResultsArgs { - readonly clientId?: string; - readonly flowId?: string; - readonly offset?: ProtoUint64; - readonly count?: ProtoUint64; -} - -/** ApiListParsedFlowResultsResult proto mapping. */ -export declare interface ApiListParsedFlowResultsResult { - readonly items?: readonly ApiFlowResult[]; - readonly errors?: readonly string[]; -} - /** ApiListPendingUserNotificationsArgs proto mapping. */ export declare interface ApiListPendingUserNotificationsArgs { readonly timestamp?: RDFDatetime; @@ -1821,21 +1796,6 @@ export enum ApiOutputPluginDescriptorPluginType { LIVE = 'LIVE', } -/** ApiParserDescriptor proto mapping. */ -export declare interface ApiParserDescriptor { - readonly name?: string; - readonly type?: ApiParserDescriptorType; -} - -/** ApiParserDescriptor.Type proto mapping. */ -export enum ApiParserDescriptorType { - UNKNOWN = 'UNKNOWN', - SINGLE_RESPONSE = 'SINGLE_RESPONSE', - MULTI_RESPONSE = 'MULTI_RESPONSE', - SINGLE_FILE = 'SINGLE_FILE', - MULTI_FILE = 'MULTI_FILE', -} - /** ApiRDFAllowedEnumValueDescriptor proto mapping. */ export declare interface ApiRDFAllowedEnumValueDescriptor { readonly name?: string; diff --git a/version.ini b/version.ini index 290e886f4..c76e8804e 100644 --- a/version.ini +++ b/version.ini @@ -3,7 +3,7 @@ major = 3 minor = 4 revision = 7 -release = 3 +release = 4 packageversion = %(major)s.%(minor)s.%(revision)spost%(release)s packagedepends = %(packageversion)s