Skip to content

Commit

Permalink
Merge pull request #198 from obsidianforensics/api-cli-split
Browse files Browse the repository at this point in the history
Split out `cli` so it can be used without installing the web_app deps
  • Loading branch information
obsidianforensics authored Jun 29, 2024
2 parents 137cdcb + 3e2b3cb commit bf64641
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 80 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ optional-dependencies.lookups = { file = ["requirements-lookups.txt"] }
optional-dependencies.all = { file = ["requirements-all.txt"] }

[project.scripts]
unfurl = "unfurl.app:cli"
unfurl = "unfurl.cli:command_line_interface"
unfurl_app = "unfurl.app:web_app"

[project.urls]
Expand Down
2 changes: 1 addition & 1 deletion unfurl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

__author__ = "Ryan Benson"
__version__ = "20240625"
__version__ = "20240626"
__email__ = "ryan@dfir.blog"

import logging
Expand Down
77 changes: 1 addition & 76 deletions unfurl/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse

import configparser
import csv
import os
from unfurl import core
from flask import Flask, render_template, request, redirect, url_for
from flask_cors import CORS
from flask_restx import Api, Namespace, Resource
Expand Down Expand Up @@ -136,75 +133,3 @@ def web_app(host='localhost', port='5000', debug='True', remote_lookups=False):
unfurl_host=host,
unfurl_port=port,
remote_lookups=remote_lookups)


def cli():
parser = argparse.ArgumentParser(
description='unfurl takes a URL and expands ("unfurls") it into a directed graph, extracting every '
'bit of information from the URL and exposing the obscured.')
parser.add_argument(
'what_to_unfurl',
help='what to unfurl. typically this is a URL, but it also supports integers (timestamps), '
'encoded protobufs, and more. if this is instead a file path, unfurl will open '
'that file and process each line in it as a URL.')
parser.add_argument(
'-d', '--detailed', help='show more detailed explanations.', action='store_true')
parser.add_argument(
'-f', '--filter', help='only output lines that match this filter.')
parser.add_argument(
'-l', '--lookups', help='allow remote lookups to enhance results.', action='store_true')
parser.add_argument(
'-o', '--output',
help='file to save output (as CSV) to. if omitted, output is sent to '
'stdout (typically this means displayed in the console).')
parser.add_argument(
'-t', '--type', help='Type of output to produce', choices=['tree', 'json'], default='tree'
)
parser.add_argument(
'-v', '-V', '--version', action='version', version=f'unfurl v{core.unfurl.__version__}')
args = parser.parse_args()

items_to_unfurl = []

if os.path.isfile(args.what_to_unfurl):
with open(args.what_to_unfurl, errors='ignore') as f:
for input_url in f:
items_to_unfurl.append(input_url.rstrip())

else:
items_to_unfurl.append(args.what_to_unfurl)

if args.output:
with open(args.output, 'w', newline='', encoding='utf-8') as csv_file:
csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
csv_writer.writerow(['url', 'unfurled'])

for item in items_to_unfurl:
unfurl_instance = core.Unfurl(remote_lookups=args.lookups)
unfurl_instance.add_to_queue(
data_type='url', key=None,
value=item)
unfurl_instance.parse_queue()
if args.type == 'json':
csv_writer.writerow(
[item, unfurl_instance.generate_full_json()])
else:
csv_writer.writerow(
[item, unfurl_instance.generate_text_tree(
detailed=args.detailed,
output_filter=args.filter)])

else:
for item in items_to_unfurl:
unfurl_instance = core.Unfurl(remote_lookups=args.lookups)
unfurl_instance.add_to_queue(
data_type='url', key=None,
value=item)
unfurl_instance.parse_queue()

if args.type == 'json':
print(unfurl_instance.generate_full_json())
else:
print(unfurl_instance.generate_text_tree(
detailed=args.detailed, output_filter=args.filter))
print()
92 changes: 92 additions & 0 deletions unfurl/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
#!/usr/bin/env python3

# Copyright 2024 Ryan Benson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
import csv
import os
from unfurl import core


def command_line_interface():
parser = argparse.ArgumentParser(
description='unfurl takes a URL and expands ("unfurls") it into a directed graph, extracting every '
'bit of information from the URL and exposing the obscured.')
parser.add_argument(
'what_to_unfurl',
help='what to unfurl. typically this is a URL, but it also supports integers (timestamps), '
'encoded protobufs, and more. if this is instead a file path, unfurl will open '
'that file and process each line in it as a URL.')
parser.add_argument(
'-d', '--detailed', help='show more detailed explanations.', action='store_true')
parser.add_argument(
'-f', '--filter', help='only output lines that match this filter.')
parser.add_argument(
'-l', '--lookups', help='allow remote lookups to enhance results.', action='store_true')
parser.add_argument(
'-o', '--output',
help='file to save output (as CSV) to. if omitted, output is sent to '
'stdout (typically this means displayed in the console).')
parser.add_argument(
'-t', '--type', help='Type of output to produce', choices=['tree', 'json'], default='tree'
)
parser.add_argument(
'-v', '-V', '--version', action='version', version=f'unfurl v{core.unfurl.__version__}')
args = parser.parse_args()

items_to_unfurl = []

if os.path.isfile(args.what_to_unfurl):
with open(args.what_to_unfurl, errors='ignore') as f:
for input_url in f:
items_to_unfurl.append(input_url.rstrip())

else:
items_to_unfurl.append(args.what_to_unfurl)

if args.output:
with open(args.output, 'w', newline='', encoding='utf-8') as csv_file:
csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
csv_writer.writerow(['url', 'unfurled'])

for item in items_to_unfurl:
unfurl_instance = core.Unfurl(remote_lookups=args.lookups)
unfurl_instance.add_to_queue(
data_type='url', key=None,
value=item)
unfurl_instance.parse_queue()
if args.type == 'json':
csv_writer.writerow(
[item, unfurl_instance.generate_full_json()])
else:
csv_writer.writerow(
[item, unfurl_instance.generate_text_tree(
detailed=args.detailed,
output_filter=args.filter)])

else:
for item in items_to_unfurl:
unfurl_instance = core.Unfurl(remote_lookups=args.lookups)
unfurl_instance.add_to_queue(
data_type='url', key=None,
value=item)
unfurl_instance.parse_queue()

if args.type == 'json':
print(unfurl_instance.generate_full_json())
else:
print(unfurl_instance.generate_text_tree(
detailed=args.detailed, output_filter=args.filter))
print()
4 changes: 2 additions & 2 deletions unfurl/scripts/unfurl_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from unfurl.app import cli
from unfurl.cli import command_line_interface

cli()
command_line_interface()

0 comments on commit bf64641

Please sign in to comment.