forked from eveem-org/panoramix
-
Notifications
You must be signed in to change notification settings - Fork 1
/
bulk_decompile.py
109 lines (71 loc) · 2.63 KB
/
bulk_decompile.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
'''
A helper script that takes all the binary code from cache_code,
and decompiles all of it, saving results into cache_pan
It uses multi-processing.
This is useful with testing new changes to the decompiler. Run the decompilation
on a few hundred contracts and look for any serious bugs / weird results.
I also use it to decompile all the contracts with a new Eveem release.
Just fetch all the bytecodes into cache_code (e.g. from BigQuery), and then run
it through all of them. The strongest AWS instance can handle ~100 processes,
and after ~24h it should have all the bytecodes decompiled.
It would be rather easy to optimise this with some kind of a database and
bytecode deduplication, but it would make the code more complex and dependency-ridden.
'''
import json
from subprocess import call
from queue import Queue
import sys
import threading
import time
import logging
import os
from various import addr_list, random_addresses
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-9s) %(message)s',)
stuff = []
path = 'cache_code/'
'''
uncomment to decompile all contracts in cache_code
for dname in os.listdir(path):
if not os.path.isdir(path+dname):
continue
for fname in os.listdir(path+dname):
addr = fname[:-4]
full_fname = path+dname+'/'+fname
if os.stat(full_fname).st_size > 0:
stuff.append(addr)
'''
stuff = random_addresses # or addr_list for more complex examples
print('binaries found:', len(stuff))
if len(sys.argv)<3:
print("bulk_decompile start_loc end_loc num_threads [--force]")
exit()
def queued(q):
while True:
addr = q.get()
if addr == 'die':
logging.debug('end of queue')
break
logging.debug('addr: %s' % addr)
call(['python3.8','panoramix.py', addr])#, '--upload'])
stuff = sorted(stuff)
if __name__ == '__main__':
queue = Queue()
threads = []
for i in range(int(sys.argv[3])):
t = threading.Thread(target=queued, name='thread_'+str(i), args=[queue])
t.start()
threads.append(t)
mini_queue = []
for addr in stuff[int(sys.argv[1]):int(sys.argv[2])]:
if '--force' not in sys.argv and os.path.isfile('cache_pan/'+addr[:5]+'/'+addr+'.pan'):
print('skipping '+addr)
continue
mini_queue.append(addr)
if len(mini_queue) > 10:
queue.put(','.join(mini_queue))
mini_queue = []
queue.put(','.join(mini_queue))
for i in range(int(sys.argv[3])):
queue.put('die')
print('waiting for threads..')