-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinfo-disclosure.py
133 lines (104 loc) · 4.83 KB
/
info-disclosure.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import argparse
import requests
import re
import subprocess
from fake_useragent import UserAgent
import urllib3
from pwn import log
from tabulate import tabulate
from termcolor import colored
urllib3.disable_warnings()
def count_extensions(url_list, extensions):
counts = {ext: 0 for ext in extensions}
for url in url_list:
for ext in extensions:
if url.endswith(f".{ext}") or f".{ext}?" in url:
counts[ext] += 1
return counts
class CustomArgumentParser(argparse.ArgumentParser):
def error(self, message):
self.print_usage()
print(f"\nPlease provide a domain using the --domain argument.\nUse --help for more information.")
self.exit(2)
def parse_args():
parser = CustomArgumentParser(description="Download files until a certain size limit is reached.")
parser.add_argument('--domain', type=str, help="Domain to analyze (e.g., example.com)")
parser.add_argument('--size', type=int, help="Size to download in MB")
args = parser.parse_args()
if not args.domain:
parser.error("Missing required argument: --domain")
return args
def main():
args = parse_args()
domain = args.domain
size_limit_mb = args.size if args.size else None
extensions_of_interest = [
"xls", "xml", "xlsx", "json", "pdf", "sql", "doc", "docx", "pptx", "txt", "zip", "tar",
"gz", "tgz", "bak", "7z", "rar", "log", "cache", "secret", "db", "backup", "yml", "config",
"csv", "yaml", "md", "md5", "exe", "dll", "bin", "ini", "bat", "sh", "deb", "rpm", "iso",
"img", "apk", "msi", "dmg", "tmp", "crt", "pem", "key", "pub", "asc"
]
extensions_pattern = "|".join(extensions_of_interest)
wburl = f"https://web.archive.org/cdx/search/cdx?url=*.{domain}/*&collapse=urlkey&output=text&fl=original&filter=original:.*\\.({extensions_pattern})"
print(f"Fetching URLs for *.{domain}* with specified extensions...")
response_logger = log.progress("Downloading URLs")
size_logger = log.progress("Data downloaded")
try:
ua = UserAgent()
headers = {"user-agent": ua.chrome}
response = requests.get(wburl, headers=headers, stream=True, verify=False)
if response.status_code != 200:
response_logger.failure(f"Failed to fetch URLs. HTTP Status Code: {response.status_code}")
return
raw_urls = []
total_size = 0
size_limit_bytes = size_limit_mb * 1024 * 1024 if size_limit_mb else None
for chunk in response.iter_content(chunk_size=1024):
total_size += len(chunk)
if size_limit_bytes and total_size > size_limit_bytes:
break
size_logger.status(f"{total_size} bytes downloaded / {size_limit_bytes if size_limit_bytes else 'N/A'} bytes")
raw_urls.append(chunk.decode("utf-8", errors="ignore"))
raw_urls = "".join(raw_urls).splitlines()
response_logger.success("URLs fetched successfully")
size_logger.success(f"Total downloaded: {total_size} bytes")
if size_limit_mb and total_size >= size_limit_bytes:
size_logger.success(f"Download limit of {size_limit_mb} MB reached.")
except Exception as e:
response_logger.failure(f"Error occurred: {e}")
return
print("Running uro to deduplicate and clean URLs...")
url_logger = log.progress("Processing URLs with uro")
try:
process = subprocess.Popen(
["uro"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
)
deduplicated_urls, errors = process.communicate(input="\n".join(raw_urls))
if process.returncode != 0:
url_logger.failure(f"uro command failed with error: {errors}")
return
deduplicated_urls = deduplicated_urls.splitlines()
url_logger.success(f"Processed {len(deduplicated_urls)} unique URLs")
except Exception as e:
url_logger.failure(f"Error running uro: {e}")
return
print(f"Total unique URLs fetched: {len(deduplicated_urls)}")
extension_counts = count_extensions(deduplicated_urls, extensions_of_interest)
table_data = []
for ext, count in extension_counts.items():
if count == 0:
count_colored = colored(count, 'red')
else:
count_colored = colored(count, 'green')
table_data.append([ext, count_colored])
print(tabulate(table_data, headers=["Extension", "Occurrences"], tablefmt="pretty"))
output_file = "info_disclosed_urls.txt"
with open(output_file, "w") as f:
f.write("\n".join(deduplicated_urls))
print(f"Filtered URLs saved to {output_file}.")
if __name__ == "__main__":
main()