|
| 1 | +# Example commands: (You can either given path to the directory which has all the .log files or just one .log file name) |
| 2 | +# python reorg_analyzer.py /path/to/logs_directory |
| 3 | +# or |
| 4 | +# python reorg_analyzer.py bsc.log |
| 5 | +# or |
| 6 | +# python reorg_analyzer.py bsc.log.2024-10-3* |
| 7 | +import re |
| 8 | +import os |
| 9 | +import argparse |
| 10 | +from collections import defaultdict |
| 11 | +import glob |
| 12 | + |
| 13 | +def parse_logs(file_paths): |
| 14 | + # Regular expressions to match log lines |
| 15 | + re_import = re.compile( |
| 16 | + r't=.* lvl=info msg="Imported new chain segment" number=(\d+) ' |
| 17 | + r'hash=([0-9a-fx]+) miner=([0-9a-zA-Zx]+)' |
| 18 | + ) |
| 19 | + re_reorg = re.compile( |
| 20 | + r't=.* lvl=info msg="Chain reorg detected" number=(\d+) hash=([0-9a-fx]+) ' |
| 21 | + r'drop=(\d+) dropfrom=([0-9a-fx]+) add=(\d+) addfrom=([0-9a-fx]+)' |
| 22 | + ) |
| 23 | + |
| 24 | + # Dictionaries to store block information |
| 25 | + block_info = {} |
| 26 | + reorgs = [] |
| 27 | + |
| 28 | + for log_file_path in file_paths: |
| 29 | + try: |
| 30 | + with open(log_file_path, 'r', encoding='utf-8') as f: |
| 31 | + for line in f: |
| 32 | + # Check for imported block lines |
| 33 | + match_import = re_import.search(line) |
| 34 | + if match_import: |
| 35 | + block_number = int(match_import.group(1)) |
| 36 | + block_hash = match_import.group(2) |
| 37 | + miner = match_import.group(3) |
| 38 | + block_info[block_hash] = { |
| 39 | + 'number': block_number, |
| 40 | + 'miner': miner |
| 41 | + } |
| 42 | + continue |
| 43 | + |
| 44 | + # Check for reorg lines |
| 45 | + match_reorg = re_reorg.search(line) |
| 46 | + if match_reorg: |
| 47 | + reorg_number = int(match_reorg.group(1)) |
| 48 | + reorg_hash = match_reorg.group(2) |
| 49 | + drop_count = int(match_reorg.group(3)) |
| 50 | + drop_from_hash = match_reorg.group(4) |
| 51 | + add_count = int(match_reorg.group(5)) |
| 52 | + add_from_hash = match_reorg.group(6) |
| 53 | + reorgs.append({ |
| 54 | + 'number': reorg_number, |
| 55 | + 'hash': reorg_hash, |
| 56 | + 'drop_count': drop_count, |
| 57 | + 'drop_from_hash': drop_from_hash, |
| 58 | + 'add_count': add_count, |
| 59 | + 'add_from_hash': add_from_hash |
| 60 | + }) |
| 61 | + except Exception as e: |
| 62 | + print(f"Error reading file {log_file_path}: {e}") |
| 63 | + |
| 64 | + return block_info, reorgs |
| 65 | + |
| 66 | +def analyze_reorgs(block_info, reorgs): |
| 67 | + results = [] |
| 68 | + validator_reorgs = defaultdict(lambda: {'count': 0, 'blocks': []}) |
| 69 | + |
| 70 | + for reorg in reorgs: |
| 71 | + # Get the dropped and added block hashes |
| 72 | + dropped_hash = reorg['drop_from_hash'] |
| 73 | + added_hash = reorg['add_from_hash'] |
| 74 | + |
| 75 | + # Get miner information |
| 76 | + dropped_miner = block_info.get(dropped_hash, {}).get('miner', 'Unknown') |
| 77 | + added_miner = block_info.get(added_hash, {}).get('miner', 'Unknown') |
| 78 | + |
| 79 | + # Construct the result |
| 80 | + result = { |
| 81 | + 'reorg_at_block': reorg['number'], |
| 82 | + 'dropped_block_hash': dropped_hash, |
| 83 | + 'added_block_hash': added_hash, |
| 84 | + 'dropped_miner': dropped_miner, |
| 85 | + 'added_miner': added_miner, |
| 86 | + 'responsible_validator': added_miner |
| 87 | + } |
| 88 | + results.append(result) |
| 89 | + |
| 90 | + # Update the validator reorgs data |
| 91 | + validator = added_miner |
| 92 | + validator_reorgs[validator]['count'] += 1 |
| 93 | + validator_reorgs[validator]['blocks'].append(reorg['number']) |
| 94 | + |
| 95 | + return results, validator_reorgs |
| 96 | + |
| 97 | +def get_log_files(paths): |
| 98 | + log_files = [] |
| 99 | + for path in paths: |
| 100 | + # Expand patterns |
| 101 | + expanded_paths = glob.glob(path) |
| 102 | + if not expanded_paths: |
| 103 | + print(f"No files matched the pattern: {path}") |
| 104 | + continue |
| 105 | + for expanded_path in expanded_paths: |
| 106 | + if os.path.isfile(expanded_path): |
| 107 | + log_files.append(expanded_path) |
| 108 | + elif os.path.isdir(expanded_path): |
| 109 | + # Get all files in the directory |
| 110 | + files_in_dir = [ |
| 111 | + os.path.join(expanded_path, f) |
| 112 | + for f in os.listdir(expanded_path) |
| 113 | + if os.path.isfile(os.path.join(expanded_path, f)) |
| 114 | + ] |
| 115 | + log_files.extend(files_in_dir) |
| 116 | + else: |
| 117 | + print(f"Invalid path: {expanded_path}") |
| 118 | + if not log_files: |
| 119 | + print("No log files to process.") |
| 120 | + exit(1) |
| 121 | + return log_files |
| 122 | + |
| 123 | +def main(): |
| 124 | + parser = argparse.ArgumentParser(description='Analyze BSC node logs for reorgs.') |
| 125 | + parser.add_argument('paths', nargs='+', help='Path(s) to log files, directories, or patterns.') |
| 126 | + args = parser.parse_args() |
| 127 | + |
| 128 | + log_files = get_log_files(args.paths) |
| 129 | + |
| 130 | + print("Processing the following files:") |
| 131 | + for f in log_files: |
| 132 | + print(f" - {f}") |
| 133 | + |
| 134 | + block_info, reorgs = parse_logs(log_files) |
| 135 | + results, validator_reorgs = analyze_reorgs(block_info, reorgs) |
| 136 | + |
| 137 | + # Print the detailed reorg results |
| 138 | + for res in results: |
| 139 | + print(f"Reorg detected at block number {res['reorg_at_block']}:") |
| 140 | + print(f" Dropped block hash: {res['dropped_block_hash']}") |
| 141 | + print(f" Dropped miner: {res['dropped_miner']}") |
| 142 | + print(f" Added block hash: {res['added_block_hash']}") |
| 143 | + print(f" Added miner: {res['added_miner']}") |
| 144 | + print(f" Validator responsible for reorg: {res['responsible_validator']}") |
| 145 | + print('-' * 60) |
| 146 | + |
| 147 | + # Print the aggregated summary |
| 148 | + print("\nAggregated Validators Responsible for Reorgs:\n") |
| 149 | + print(f"{'Validator Address':<46} {'Number of Reorgs':<16} {'Block Numbers'}") |
| 150 | + print('-' * 90) |
| 151 | + for validator, data in sorted(validator_reorgs.items(), key=lambda x: x[1]['count'], reverse=True): |
| 152 | + block_numbers = ', '.join(map(str, data['blocks'])) |
| 153 | + print(f"{validator:<46} {data['count']:<16} {block_numbers}") |
| 154 | + |
| 155 | +if __name__ == '__main__': |
| 156 | + main() |
| 157 | + |
| 158 | +# Example Output: |
| 159 | +# Reorg detected at block number 43989479: |
| 160 | +# Dropped block hash: 0x8f97c466adc41449f98a51efd6c9b0ee480373a0d87d23fe0cbc78bcedb32f34 |
| 161 | +# Dropped miner: 0xB4647b856CB9C3856d559C885Bed8B43e0846a48 |
| 162 | +# Added block hash: 0x057f65b6852d269b61766387fecfbeed5b360fb3ffc8d80a73d674c3ad3237cc |
| 163 | +# Added miner: 0x58567F7A51a58708C8B40ec592A38bA64C0697Df |
| 164 | +# Validator responsible for reorg: 0x58567F7A51a58708C8B40ec592A38bA64C0697Df |
| 165 | +# ------------------------------------------------------------ |
| 166 | +# ... (additional reorg details) |
| 167 | +# ------------------------------------------------------------ |
| 168 | + |
| 169 | +# Aggregated Validators Responsible for Reorgs: |
| 170 | + |
| 171 | +# Validator Address Number of Reorgs Block Numbers |
| 172 | +# ------------------------------------------------------------------------------------------ |
| 173 | +# 0x4e5acf9684652BEa56F2f01b7101a225Ee33d23g 13 43962513, 43966037, 43971672, ... |
| 174 | +# 0x58567F7A51a58708C8B40ec592A38bA64C0697Df 9 43989479, 43996288, 43998896, ... |
| 175 | +# 0x7E1FdF03Eb3aC35BF0256694D7fBe6B6d7b3E0c9 4 43990167, 43977391, 43912043, ... |
| 176 | +# ... (additional validators) |
0 commit comments