-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathentity_ids.py
204 lines (158 loc) · 7.03 KB
/
entity_ids.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
import xmltodict
import xml.etree.ElementTree as ET
import glob
import os
import re
from config import *
from utils import *
############################
# Entity IDs
############################
# Gather mob-list*.xml into a giant table of { id, name }.
# Go through and check that id and name still match what we have in SQL.
# If not, do a search/replace to try and update them
def index_of_first(lst, pred):
for i,v in enumerate(lst):
if pred(v):
return i
return None
def entity_ids():
# Collect zone information
areas = {}
tree = ET.parse('res/area-names.xml')
areas_xml = xmltodict.parse(ET.tostring(tree.getroot(), encoding='unicode'))
for area in areas_xml['thing-list']['thing']:
try:
index = int(area['field'][0]['#text'])
name = area['field'][1]['#text']
name = sanitize_zone_name(name)
# Make folders
if not os.path.exists('out/scripts/zones/' + name):
os.makedirs('out/scripts/zones/' + name)
except:
pass
areas[index] = name
# Extract Client NPC id information (build extracted_npc_data dict)
extracted_npc_data = {}
handled_zones = []
zone_ids = []
dialog_table_list = glob.glob('res/mob-list-*.xml')
for item in dialog_table_list:
with open(item, 'r', encoding='utf-8') as file:
# Sanitize name
zone_num_str = item.replace('res\mob-list-', '')
zone_num_str = zone_num_str.replace('.xml', '')
# TODO: Handle zone 50-2
if zone_num_str == "50-2":
#print("SKIPPING AHT URGHAN WHITEGATE PART 2!")
continue
zone_num = int(zone_num_str)
zone_name = areas[zone_num]
# Skip obviously invalid or missing zones
if zone_name.lower() == "unknown" or zone_name.lower() == "none":
#print(f"Skipping zone name: unknown or none ({zone_num})")
continue
# Skip if there is any wrap-around or strangeness from missing zones
if zone_name in handled_zones:
#print(f"Skipping wrapped-around zone: {zone_num}")
continue
extracted_npc_data[zone_num] = []
# Parse as XML
data = file.read()
zone_tree = ET.fromstring(data)
# Prepare filenames
raw_output_filename = 'out/scripts/zones/' + zone_name + "/NPC_IDs.txt"
with open(raw_output_filename, 'w+') as out_file:
for entry in zone_tree:
# Index: The number
index = str(entry[0].text.strip())
name = str(entry[1].text.strip())
extracted_npc_data[zone_num].append((index, name))
out_file.write(f"{index} {name}\n")
# Mark as done
handled_zones.append(zone_name)
zone_ids.append(zone_num)
# Extract Server Data (build server_zone_data dict)
server_zone_data = {}
for zone_id in zone_ids: #range(230, 231): #
server_zone_data[zone_id] = []
# Start looking at target file
server_filename = SERVER_DIR + "/sql/npc_list.sql"
sql_data = ""
with open(server_filename, 'r') as server_file:
sql_data = server_file.read()
# Regex out all the entries between the current zone header, and the next one
matches = re.findall(fr"(?<=\(Zone {zone_id})(.*?)(?=Zone [0-9]+\))", sql_data, re.DOTALL)
if len(matches) == 0:
print(f"Unable to match on regex for Zone {zone_id}\n")
continue
section = matches[0]
for index, line in enumerate(section.split("\n")):
# TODO: Handle startswith "-- NC: "
# Ignore comments
if line.startswith("-- "):
continue
# Ignore all non-insert statements
if not "INSERT INTO" in line:
continue
# Extract out the npcid and the polutils_name
line_data = line.split("(")
line_data = line_data[1].split(",")
server_id = line_data[0].replace("\'", "")
server_name = line_data[2].replace("\'", "")
server_zone_data[zone_id].append((server_id, server_name))
# Start main shifting logic using zone_id, server_zone_data, and extracted_npc_data
zone_name = areas[zone_id]
server_data = server_zone_data[zone_id]
extracted_client_data = extracted_npc_data[zone_id]
# Bail on empty zones
if len(server_data) == 0 or len(extracted_client_data) == 0:
continue
# Client index 0 is always: (0, none), so get rid of that
extracted_client_data.pop(0)
# Server Entries
first_server_entry = server_data[0]
last_server_entry = server_data[-1]
# Client Entries
first_client_entry = extracted_client_data[0]
last_client_entry = extracted_client_data[-1]
print(zone_id, zone_name)
# Track how many times we've seen duplicated of the same name, so
# we know how many we need to skip when searching forwards
name_skip_counts = {}
client_idx = 0
for server_idx, server_entry in enumerate(server_data):
client_entry = extracted_client_data[client_idx]
server_entry_id = server_entry[0]
server_entry_name = server_entry[1]
if server_entry_name == " ":
server_entry_name = ""
client_entry_id = client_entry[0]
client_entry_name = client_entry[1]
skip_count = name_skip_counts.get(server_entry_name, None)
if skip_count is not None:
name_skip_counts[server_entry_name] = skip_count + 1
skip_count = name_skip_counts[server_entry_name]
else:
name_skip_counts[server_entry_name] = 0
skip_count = 0
while server_entry_name != client_entry_name:
# Seek from the _beginning_ of the client data, skipping
# n amount of names we've already seen (this accounts
# for backwards shifts!)
skip_current = 0
for client_idx, client_entry in enumerate(extracted_client_data):
client_entry_id = client_entry[0]
client_entry_name = client_entry[1]
skip_target = name_skip_counts[server_entry_name]
skip_diff = skip_target - skip_current
if server_entry_name == client_entry_name and skip_diff == 0:
pass
# Handle new entries
#-- NC: INSERT INTO `npc_list` VALUES (17461595,'NOT_CAPTURED',' ',0,0.000,0.000,0.000,0,50,50,0,0,0,0,0,0x0000320000000000000000000000000000000000,0,NULL,0);
# Collect list of shifts
# Apply shifts to npc_list.sql
# Hunt for those shifts in the server codebase and find/replace
# - nm_spawn_points.sql
# Enable to test just this file
#entity_ids()