diff --git a/pyhindsight/analysis.py b/pyhindsight/analysis.py index 0fa624d..d51985e 100644 --- a/pyhindsight/analysis.py +++ b/pyhindsight/analysis.py @@ -917,7 +917,7 @@ def generate_excel(self, output_object): s.write(row_number, 11, item.file_size, black_value_format) s.write(row_number, 12, item.magic_results, black_value_format) - elif item.row_type.startswith("local storage"): + elif item.row_type.startswith(("local storage", "session storage")): s.write_string(row_number, 0, item.row_type, black_type_format) s.write_string(row_number, 1, item.origin, black_url_format) s.write_string(row_number, 2, item.key, black_field_format) diff --git a/pyhindsight/browsers/chrome.py b/pyhindsight/browsers/chrome.py index 0e8acfa..c8eb9bf 100644 --- a/pyhindsight/browsers/chrome.py +++ b/pyhindsight/browsers/chrome.py @@ -13,6 +13,8 @@ import puremagic import urllib import base64 + +import pyhindsight.lib.ccl_chrome_indexeddb.ccl_blink_value_deserializer from pyhindsight.browsers.webbrowser import WebBrowser from pyhindsight import utils @@ -945,6 +947,41 @@ def get_local_storage(self, path, dir_name): log.info(f' - Parsed {len(results)} items from {len(filtered_listing)} files') self.parsed_storage.extend(results) + def get_session_storage(self, path, dir_name): + results = [] + + # Grab file list of 'Session Storage' directory + ss_path = os.path.join(path, dir_name) + log.info('Session Storage:') + log.info(f' - Reading from {ss_path}') + + session_storage_listing = os.listdir(ss_path) + log.debug(f' - {len(session_storage_listing)} files in Session Storage directory') + + # Session Storage parsing is thanks to Alex Caithness of CCL Forensics; ccl_chrome_indexeddb + # is bundled with Hindsight with his consent (and our thanks!). The below logic is adapted + # from his Chromium_dump_session_storage.py script. + import pathlib + from pyhindsight.lib.ccl_chrome_indexeddb import ccl_chromium_sessionstorage + + ss_ldb_records = ccl_chromium_sessionstorage.SessionStoreDb(pathlib.Path(ss_path)) + for origin in ss_ldb_records.iter_hosts(): + origin_kvs = ss_ldb_records.get_all_for_host(origin) + for key, values in origin_kvs.items(): + for value in values: + results.append(Chrome.SessionStorageItem( + self.profile_path, origin, key, value.value, value.leveldb_sequence_number, 'Live', ss_path)) + + # Some records don't have an associated host for some unknown reason; still include them. + for key, value in ss_ldb_records.iter_orphans(): + results.append(Chrome.SessionStorageItem( + self.profile_path, '', key, value.value, value.leveldb_sequence_number, 'Live', ss_path)) + + ss_ldb_records.close() + self.artifacts_counts['Session Storage'] = len(results) + log.info(f' - Parsed {len(results)} Session Storage items') + self.parsed_storage.extend(results) + def get_extensions(self, path, dir_name): results = [] log.info('Extensions:') @@ -2327,6 +2364,13 @@ def process(self): self.artifacts_display['Local Storage'], self.artifacts_counts.get('Local Storage', '0'))) + if 'Session Storage' in input_listing: + self.get_session_storage(self.profile_path, 'Session Storage') + self.artifacts_display['Session Storage'] = 'Session Storage records' + print(self.format_processing_output( + self.artifacts_display['Session Storage'], + self.artifacts_counts.get('Session Storage', '0'))) + if 'Extensions' in input_listing: self.get_extensions(self.profile_path, 'Extensions') self.artifacts_display['Extensions'] = 'Extensions' diff --git a/pyhindsight/browsers/webbrowser.py b/pyhindsight/browsers/webbrowser.py index b29aaa4..e5a54f6 100644 --- a/pyhindsight/browsers/webbrowser.py +++ b/pyhindsight/browsers/webbrowser.py @@ -349,6 +349,29 @@ def __init__(self, profile, origin, key, value, seq, state, source_path, last_mo self.source_path = source_path self.last_modified = last_modified + class SessionStorageItem(StorageItem): + def __init__(self, profile, origin, key, value, seq, state, source_path): + """ + + :param profile: The path to the browser profile this item is part of. + :param origin: The web origin this SessionStorage item belongs to. + :param key: The key of the SessionStorage item. + :param value: The value of the SessionStorage item (rendered in UTF-16). + :param seq: The sequence number of the key. + :param state: The state of the record (live or deleted). + :param source_path: The path to the source of the record. + """ + super(WebBrowser.SessionStorageItem, self).__init__( + 'session storage', profile=profile, origin=origin, key=key, value=value, seq=seq, state=state, + source_path=source_path) + self.profile = profile + self.origin = origin + self.key = key + self.value = value + self.seq = seq + self.state = state + self.source_path = source_path + class FileSystemItem(StorageItem): def __init__(self, profile, origin, key, value, seq, state, source_path, last_modified=None, file_exists=None, file_size=None, magic_results=None): diff --git a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_blink_value_deserializer.py b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_blink_value_deserializer.py index 44ce669..ca1aa8a 100644 --- a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_blink_value_deserializer.py +++ b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_blink_value_deserializer.py @@ -25,7 +25,7 @@ import typing from dataclasses import dataclass -import ccl_v8_value_deserializer +from pyhindsight.lib.ccl_chrome_indexeddb import ccl_v8_value_deserializer # See: https://chromium.googlesource.com/chromium/src/third_party/+/master/blink/renderer/bindings/core/v8/serialization @@ -101,7 +101,7 @@ class Constants: # height:uint32_t, pixelDataLength:uint32_t, # data:byte[pixelDataLength] # -> ImageBitmap (ref) - tag_kImageBitmapTransferTag = "G" # index:uint32_t -> ImageBitmap. For ImageBitmap transfer + tag_kImageBitmapTransferTag = b"G" # index:uint32_t -> ImageBitmap. For ImageBitmap transfer tag_kOffscreenCanvasTransferTag = b"H" # index, width, height, id, # filter_quality::uint32_t -> # OffscreenCanvas. For OffscreenCanvas diff --git a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_indexeddb.py b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_indexeddb.py index cbf9e5d..de59c0c 100644 --- a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_indexeddb.py +++ b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_indexeddb.py @@ -1,5 +1,5 @@ """ -Copyright 2020, CCL Forensics +Copyright 2020-2021, CCL Forensics Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in @@ -20,6 +20,7 @@ SOFTWARE. """ +import sys import struct import os import pathlib @@ -34,7 +35,7 @@ import ccl_v8_value_deserializer import ccl_blink_value_deserializer -__version__ = "0.2" +__version__ = "0.6" __description__ = "Module for reading Chromium IndexedDB LevelDB databases." __contact__ = "Alex Caithness" @@ -135,12 +136,23 @@ def __init__(self, buffer: bytes): else: raise ValueError() # Shouldn't happen + # trim the raw_key in case this is an inner key: + self.raw_key = self.raw_key[0: self._raw_length] + def __repr__(self): return f"" def __str__(self): return self.__repr__() + def __eq__(self, other): + if not isinstance(other, IdbKey): + raise NotImplementedError() + return self.raw_key == other.raw_key + + def __ne__(self, other): + return not self == other + class IndexedDBExternalObjectType(enum.IntEnum): # see: https://github.com/chromium/chromium/blob/master/content/browser/indexed_db/indexed_db_external_object.h @@ -278,12 +290,16 @@ def get_meta(self, db_id: int, obj_store_id: int, meta_type: ObjectStoreMetadata class IndexedDbRecord: - def __init__(self, owner: "IndexedDb", db_id: int, obj_store_id: int, key: IdbKey, value: typing.Any): + def __init__( + self, owner: "IndexedDb", db_id: int, obj_store_id: int, key: IdbKey, + value: typing.Any, is_live: bool, ldb_seq_no: int): self.owner = owner self.db_id = db_id self.obj_store_id = obj_store_id self.key = key self.value = value + self.is_live = is_live + self.sequence_number = ldb_seq_no def resolve_blob_index(self, blob_index: ccl_blink_value_deserializer.BlobIndex) -> IndexedDBExternalObject: """Resolve a ccl_blink_value_deserializer.BlobIndex to its IndexedDBExternalObject @@ -372,7 +388,9 @@ def _get_raw_database_metadata(self, live_only=True): if record.key.startswith(prefix) and record.state == ccl_leveldb.KeyState.Live: # we only want live keys and the newest version thereof (highest seq) meta_type = record.key[len(prefix)] - db_meta[(db_id.dbid_no, meta_type)] = record + old_version = db_meta.get((db_id.dbid_no, meta_type)) + if old_version is None or old_version.seq < record.seq: + db_meta[(db_id.dbid_no, meta_type)] = record return db_meta @@ -403,7 +421,7 @@ def _get_raw_object_store_metadata(self, live_only=True): def iterate_records( self, db_id: int, store_id: int, *, - live_only=True, bad_deserializer_data_handler: typing.Callable[[IdbKey, bytes], typing.Any] = None): + live_only=False, bad_deserializer_data_handler: typing.Callable[[IdbKey, bytes], typing.Any] = None): if db_id > 0x7f or store_id > 0x7f: raise NotImplementedError("there could be this many dbs or object stores, but I don't support it yet") @@ -423,7 +441,11 @@ def iterate_records( blink_type_tag = record.value[val_idx] if blink_type_tag != 0xff: # TODO: probably don't want to fail hard here long term... - raise ValueError("Blink type tag not present") + if bad_deserializer_data_handler is not None: + bad_deserializer_data_handler(key, record.value) + continue + else: + raise ValueError("Blink type tag not present") val_idx += 1 blink_version, varint_raw = _le_varint_from_bytes(record.value[val_idx:]) @@ -437,9 +459,11 @@ def iterate_records( value = deserializer.read() except Exception: if bad_deserializer_data_handler is not None: - bad_deserializer_data_handler(key, record.value[val_idx:]) + bad_deserializer_data_handler(key, record.value) + continue raise - yield IndexedDbRecord(self, db_id, store_id, key, value) + yield IndexedDbRecord(self, db_id, store_id, key, value, + record.state == ccl_leveldb.KeyState.Live, record.seq) def get_blob_info(self, db_id: int, store_id: int, raw_key: bytes, file_index: int) -> IndexedDBExternalObject: if db_id > 0x7f or store_id > 0x7f: @@ -501,14 +525,31 @@ def name(self) -> str: return self._raw_db.get_object_store_metadata( self._dbid_no, self._obj_store_id, ObjectStoreMetadataType.StoreName) + @staticmethod + def _log_error(key: IdbKey, data: bytes): + sys.stderr.write(f"ERROR decoding key: {key}\n") + def get_blob(self, raw_key: bytes, file_index: int) -> typing.BinaryIO: return self._raw_db.get_blob(self._dbid_no, self.object_store_id, raw_key, file_index) # def __iter__(self): # yield from self._raw_db.iterate_records(self._dbid_no, self._obj_store_id) - def iterate_records(self): - yield from self._raw_db.iterate_records(self._dbid_no, self._obj_store_id) + def iterate_records( + self, *, live_only=False, errors_to_stdout=False, + bad_deserializer_data_handler: typing.Callable[[IdbKey, bytes], typing.Any] = None): + + def _handler(key, record): + if bad_deserializer_data_handler is not None: + bad_deserializer_data_handler(key, record) + if errors_to_stdout: + WrappedObjectStore._log_error(key, record) + + handler = _handler if errors_to_stdout or bad_deserializer_data_handler is not None else None + + yield from self._raw_db.iterate_records( + self._dbid_no, self._obj_store_id, live_only=live_only, + bad_deserializer_data_handler=handler) def __repr__(self): return f"" @@ -526,7 +567,8 @@ def __init__(self, raw_db: IndexedDb, dbid: DatabaseId): self._obj_store_names = tuple(names) # pre-compile object store wrappers as there's little overhead self._obj_stores = tuple( - WrappedObjectStore(self._raw_db, self.db_number, i) for i in range(1, self.object_store_count + 1)) + WrappedObjectStore( + self._raw_db, self.db_number, i) for i in range(1, self.object_store_count + 1)) @property def name(self) -> str: @@ -568,7 +610,7 @@ def __len__(self): def __contains__(self, item): return item in self._obj_store_names - def __getitem__(self, item) -> "WrappedObjectStore": + def __getitem__(self, item) -> WrappedObjectStore: if isinstance(item, int): return self.get_object_store_by_id(item) elif isinstance(item, str): @@ -623,7 +665,7 @@ def __contains__(self, item): else: raise TypeError("keys must be provided as a tuple of (name, origin) or a str (if only single origin) or int") - def __getitem__(self, item: typing.Union[int, str, typing.Tuple[str, str]]) -> "WrappedDatabase": + def __getitem__(self, item: typing.Union[int, str, typing.Tuple[str, str]]) -> WrappedDatabase: if isinstance(item, int): if item in self._db_number_lookup: return self._db_number_lookup[item] diff --git a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_sessionstorage.py b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_sessionstorage.py new file mode 100644 index 0000000..8bc6982 --- /dev/null +++ b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_chromium_sessionstorage.py @@ -0,0 +1,218 @@ +""" +Copyright 2021, CCL Forensics +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" + +import sys +import pathlib +import typing +import dataclasses +from types import MappingProxyType + +from . import ccl_leveldb + +__version__ = "0.1" +__description__ = "Module for reading the Chromium leveldb sessionstorage format" +__contact__ = "Alex Caithness" + +# See: https://source.chromium.org/chromium/chromium/src/+/main:components/services/storage/dom_storage/session_storage_metadata.cc +# et al + +_NAMESPACE_PREFIX = b"namespace-" +_MAP_ID_PREFIX = b"map-" + +log = None + + +@dataclasses.dataclass(frozen=True) +class SessionStoreValue: + value: str + guid: typing.Optional[str] + leveldb_sequence_number: int + + +class SessionStoreDb: + # todo: get all grouped by namespace by host? + # todo: get all grouped by namespace by host.key? + # todo: consider refactoring to only getting metadata on first pass and everything else on demand? + def __init__(self, in_dir: pathlib.Path): + if not in_dir.is_dir(): + raise IOError("Input directory is not a directory") + + self._ldb = ccl_leveldb.RawLevelDb(in_dir) + + # If performance is a concern we should refactor this, but slow and steady for now + + # First collect the namespace (session/tab guid + host) and map-ids together + self._map_id_to_host = {} # map_id: (guid, host) + self._deleted_keys = set() + + for rec in self._ldb.iterate_records_raw(): + if rec.user_key.startswith(_NAMESPACE_PREFIX): + if rec.user_key == _NAMESPACE_PREFIX: + continue # bogus entry near the top usually + try: + key = rec.user_key.decode("utf-8") + except UnicodeDecodeError: + print(f"Invalid namespace key: {rec.user_key}") + continue + + split_key = key.split("-", 2) + if len(split_key) != 3: + print(f"Invalid namespace key: {key}") + continue + + _, guid, host = split_key + + if not host: + continue # TODO investigate why this happens + + # normalize host to lower just in case + host = host.lower() + guid_host_pair = guid, host + + if rec.state == ccl_leveldb.KeyState.Deleted: + self._deleted_keys.add(guid_host_pair) + else: + try: + map_id = rec.value.decode("utf-8") + except UnicodeDecodeError: + print(f"Invalid namespace value: {key}") + continue + + if not map_id: + continue # TODO: investigate why this happens/do we want to keep the host around somewhere? + + #if map_id in self._map_id_to_host_guid and self._map_id_to_host_guid[map_id] != guid_host_pair: + if map_id in self._map_id_to_host and self._map_id_to_host[map_id] != host: + print("Map ID Collision!") + print(f"map_id: {map_id}") + print(f"Old host: {self._map_id_to_host[map_id]}") + print(f"New host: {guid_host_pair}") + raise ValueError("map_id collision") + else: + self._map_id_to_host[map_id] = host + + # freeze stuff + self._map_id_to_host = MappingProxyType(self._map_id_to_host) + self._deleted_keys = frozenset(self._deleted_keys) + + self._host_lookup = {} # {host: {ss_key: [SessionStoreValue, ...]}} + self._orphans = [] # list of tuples of key, value where we can't get the host + for rec in self._ldb.iterate_records_raw(): + if rec.user_key.startswith(_MAP_ID_PREFIX): + try: + key = rec.user_key.decode("utf-8") + except UnicodeDecodeError: + print(f"Invalid map id key: {rec.user_key}") + continue + + if rec.state == ccl_leveldb.KeyState.Deleted: + continue # TODO: do we want to keep the key around because the presence is important? + + split_key = key.split("-", 2) + if len(split_key) != 3: + print(f"Invalid map id key: {key}") + continue + + _, map_id, ss_key = split_key + + if not split_key: + # TODO what does it mean when there is no key here? + # The value will also be a single number (encoded utf-8) + continue + + try: + value = rec.value.decode("UTF-16-LE") + except UnicodeDecodeError: + # print(f"Error decoding value for {key}") + # print(f"Raw Value: {rec.value}") + continue + + #guid_host_pair = self._map_id_to_host_guid.get(map_id) + host = self._map_id_to_host.get(map_id) + #if not guid_host_pair: + if not host: + self._orphans.append((ss_key, SessionStoreValue(value, None, rec.seq))) + else: + #guid, host = guid_host_pair + self._host_lookup.setdefault(host, {}) + self._host_lookup[host].setdefault(ss_key, []) + self._host_lookup[host][ss_key].append(SessionStoreValue(value, None, rec.seq)) + + def __contains__(self, item: typing.Union[str, typing.Tuple[str, str]]) -> bool: + """if item is a str, returns true if that host is present + if item is a tuple of (str, str), returns True if that host and key pair are present""" + if isinstance(item, str): + return item in self._host_lookup + elif isinstance(item, tuple) and len(item) == 2: + host, key = item + return host in self._host_lookup and key in self._host_lookup[host] + else: + raise TypeError("item must be a string or a tuple of (str, str)") + + def iter_hosts(self) -> typing.Iterable[str]: + yield from self._host_lookup.keys() + + def get_all_for_host(self, host): + if host not in self: + return {} + result_raw = dict(self._host_lookup[host]) + for ss_key in result_raw: + result_raw[ss_key] = tuple(result_raw[ss_key]) + return result_raw + + def get_session_storage_key(self, host, key): + if (host, key) not in self: + return tuple() + return tuple(self._host_lookup[host][key]) + + def iter_orphans(self): + yield from self._orphans + + def __getitem__(self, item: typing.Union[str, typing.Tuple[str, str]]): + if item not in self: + raise KeyError(item) + + if isinstance(item, str): + return self.get_all_for_host(item) + elif isinstance(item, tuple) and len(item) == 2: + return self.get_session_storage_key(*item) + else: + raise TypeError("item must be a string or a tuple of (str, str)") + + def __iter__(self): + """iterates the hosts present""" + return self.iter_hosts() + + def close(self): + self._ldb.close() + + +def main(args): + ldb_in_dir = pathlib.Path(args[0]) + ssdb = SessionStoreDb(ldb_in_dir) + + print("Hosts in db:") + for host in ssdb: + print(host) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_leveldb.py b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_leveldb.py index 54e9200..e6c5e3b 100644 --- a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_leveldb.py +++ b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_leveldb.py @@ -1,5 +1,5 @@ """ -Copyright 2020, CCL Forensics +Copyright 2020-2021, CCL Forensics Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in @@ -33,7 +33,7 @@ from pyhindsight.lib.ccl_chrome_indexeddb import ccl_simplesnappy -__version__ = "0.2" +__version__ = "0.4" __description__ = "A module for reading LevelDB databases" __contact__ = "Alex Caithness" @@ -128,6 +128,17 @@ class Record: offset: int was_compressed: bool + @property + def user_key(self): + if self.file_type == FileType.Ldb: + if len(self.key) < 8: + return self.key + else: + return self.key[0:-8] + else: + return self.key + + @classmethod def ldb_record(cls, key: bytes, value: bytes, origin_file: os.PathLike, offset: int, was_compressed: bool): @@ -188,7 +199,7 @@ def __iter__(self) -> typing.Iterable[RawBlockEntry]: class LdbFile: - """A leveldb table (.ldb) file.""" + """A leveldb table (.ldb or .sst) file.""" BLOCK_TRAILER_SIZE = 5 FOOTER_SIZE = 48 MAGIC = 0xdb4775248b80fb57 @@ -526,7 +537,7 @@ def close(self): class RawLevelDb: - DATA_FILE_PATTERN = r"[0-9]{6}\.(ldb|log)" + DATA_FILE_PATTERN = r"[0-9]{6}\.(ldb|log|sst)" def __init__(self, in_dir: os.PathLike): @@ -540,7 +551,7 @@ def __init__(self, in_dir: os.PathLike): if file.is_file() and re.match(RawLevelDb.DATA_FILE_PATTERN, file.name): if file.suffix.lower() == ".log": self._files.append(LogFile(file)) - elif file.suffix.lower() == ".ldb": + elif file.suffix.lower() == ".ldb" or file.suffix.lower() == ".sst": self._files.append(LdbFile(file)) if file.is_file() and re.match(ManifestFile.MANIFEST_FILENAME_PATTERN, file.name): manifest_no = int(re.match(ManifestFile.MANIFEST_FILENAME_PATTERN, file.name).group(1), 16) diff --git a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_v8_value_deserializer.py b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_v8_value_deserializer.py index 442bb61..d80750d 100644 --- a/pyhindsight/lib/ccl_chrome_indexeddb/ccl_v8_value_deserializer.py +++ b/pyhindsight/lib/ccl_chrome_indexeddb/ccl_v8_value_deserializer.py @@ -73,6 +73,12 @@ def __eq__(self, other): return True return False + def __repr__(self): + return "" + + def __str__(self): + return "" + class Constants: # Constants diff --git a/pyhindsight/plugins/unfurl_interpretation.py b/pyhindsight/plugins/unfurl_interpretation.py index 98b5026..d5bdc5f 100644 --- a/pyhindsight/plugins/unfurl_interpretation.py +++ b/pyhindsight/plugins/unfurl_interpretation.py @@ -20,7 +20,7 @@ # Config friendlyName = "Unfurl" description = "Run storage values through Unfurl" -artifactTypes = ["local storage"] # Artifacts that this plugin processes +artifactTypes = ["local storage", "session storage"] # Artifacts that this plugin processes remoteLookups = 1 # if this plugin will query online sources/databases browser = "Chrome" # browsers that the plugin applies to browserVersion = 1 # browser versions that the plugin applies to