Skip to content

Commit

Permalink
Lots of clean up left, but its been like 3 days bruhh
Browse files Browse the repository at this point in the history
  • Loading branch information
Lilaa3 committed Jan 14, 2024
1 parent 0b29320 commit 4fa7e9b
Show file tree
Hide file tree
Showing 9 changed files with 7,962 additions and 702 deletions.
6 changes: 3 additions & 3 deletions fast64_internal/oot/cutscene/properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,13 +204,13 @@ class OOTCSListProperty(PropertyGroup):

def draw_props(self, layout: UILayout, listIndex: int, objName: str, collectionType: str):
box = layout.box().column()
enumName = getEnumName(ootEnumCSListType, self.listType)
enum_name = getEnumName(ootEnumCSListType, self.listType)

# Draw current command tab
box.prop(
self,
"expandTab",
text=enumName,
text=enum_name,
icon="TRIA_DOWN" if self.expandTab else "TRIA_RIGHT",
)

Expand Down Expand Up @@ -274,7 +274,7 @@ def draw_props(self, layout: UILayout, listIndex: int, objName: str, collectionT
# ``p`` type:
# OOTCSTextProperty | OOTCSLightSettingsProperty | OOTCSTimeProperty |
# OOTCSSeqProperty | OOTCSMiscProperty | OOTCSRumbleProperty
p.draw_props(box, self, listIndex, i, objName, collectionType, enumName.removesuffix(" List"))
p.draw_props(box, self, listIndex, i, objName, collectionType, enum_name.removesuffix(" List"))

if len(data) == 0:
box.label(text="No items in " + getEnumName(ootEnumCSListType, self.listType))
Expand Down
295 changes: 295 additions & 0 deletions fast64_internal/sm64/animation/c_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,295 @@
from dataclasses import dataclass, field
import re
from typing import List, Union

from ...utility import PluginError


@dataclass
class IfDefMacro:
type: str = ""
value: str = ""


@dataclass
class ParsedValue:
value: Union[str, int, float]
if_def: IfDefMacro

def set_or_add(self, value):
if isinstance(self.value, list):
self.value.append(value)
else:
self.value = value


@dataclass
class MacroCall(ParsedValue):
name: str = ""


@dataclass
class DesignatedValue(ParsedValue):
name: str = ""


@dataclass
class Include:
path: str = ""


@dataclass
class Initialization(ParsedValue):
keywords: List[str] = field(default_factory=list)
is_extern: bool = False
is_static: bool = False
is_const: bool = False
is_enum: bool = False
is_struct: bool = False
pointer_depth: int = 0

origin_path: str = ""

def array_to_struct_dict(self, struct_definition: list[str]):
if not isinstance(self.value, ParsedValue) or not isinstance(self.value.value, list):
raise PluginError("Assumed struct is not a list.")
struct_dict = {}
for i, element in enumerate(self.value.value):
if isinstance(element, DesignatedValue):
struct_dict[element.name] = element.value
else:
struct_dict[struct_definition[i]] = element.value
return struct_dict


delimiters = (
"->",
">>",
"<<",
"/*",
"\n",
"=",
";",
"{",
"}",
"(",
")",
"[",
"]",
",",
"&",
"^",
"#",
":",
'"',
"'",
"|",
"\\" "/",
"%",
"*",
".",
"+",
"-",
">",
"<",
)

delimiters_pattern = "|".join(map(re.escape, delimiters))

token_pattern = re.compile(
r"""
(?: # Non-capturing group for alternatives
[^{delimiters_pattern}\s"']+ # Match characters that are not delimiters or whitespace or quotes
|
"[^"]*" # Match double-quoted strings
|
'[^']*' # Match single-quoted strings
)
|
[{delimiters_pattern}] # Match any of the delimiters
""".format(
delimiters_pattern=re.escape(delimiters_pattern)
),
re.VERBOSE,
)

comment_pattern = re.compile(r"/\*.*?\*/|//.*?$", re.DOTALL | re.MULTILINE)


class CParser:
def get_tabs(self):
return "\t" * len(self.stack)

def handle_accumulated_tokens(self):
if not self.accumulated_tokens:
return
joined = " ".join(self.accumulated_tokens)
joined_stripped = joined.replace(" ", "").replace("\n", "").replace("\t", "")
if not joined_stripped:
return

if joined_stripped.startswith(('"', "'")):
value = joined
elif joined_stripped.startswith("0x"):
value = int(joined_stripped, 16)
else:
try:
value = float(joined_stripped)
if value.is_integer():
value = int(value)
except ValueError:
value = joined_stripped

self.stack[-1].set_or_add(ParsedValue(value, self.if_defs.copy()))
if isinstance(self.stack[-1], DesignatedValue):
self.stack.pop()

self.accumulated_tokens.clear()

def read_macro(self, prev_token: str, cur_token: str):
if cur_token == "\n" and not prev_token == "\\":
macro_type = self.accumulated_macro_tokens[0]
if macro_type == "include":
if self.stack:
self.stack[-1].set_or_add(Include(self.accumulated_macro_tokens[1]))
elif macro_type in {"ifdef", "if", "ifndef"}:
self.if_defs.append(IfDefMacro(macro_type, " ".join(self.accumulated_macro_tokens[1:])))
pass
elif macro_type in {"elif", "else"}:
self.if_defs.pop()
self.if_defs.append(IfDefMacro(macro_type, " ".join(self.accumulated_macro_tokens[1:])))
elif macro_type == "endif":
self.if_defs.pop()
else:
raise PluginError(f"Unimplemented macro. {macro_type}")

self.accumulated_macro_tokens.clear()
self.reading_macro = False
return

self.accumulated_macro_tokens.append(cur_token)

def read_values(self, prev_token: str, cur_token: str):
if cur_token == "=":
designated_value = DesignatedValue(
None, self.if_defs.copy(), "".join(self.accumulated_tokens).strip().replace(".", "", 1)
)
self.accumulated_tokens.clear()

self.stack[-1].set_or_add(designated_value)
self.stack.append(designated_value)
elif cur_token == "(":
macro = MacroCall([], self.if_defs.copy(), "".join(self.accumulated_tokens).strip())
self.accumulated_tokens.clear()
self.stack[-1].set_or_add(macro)
self.stack.append(macro)
elif cur_token == "{":
self.handle_accumulated_tokens()

array = ParsedValue([], self.if_defs.copy())

if cur_token == "(":
array.name = prev_token

self.stack[-1].set_or_add(array)
self.stack.append(array)
elif cur_token in {"}", ")"} or (cur_token == ";" and not self.reading_function):
self.handle_accumulated_tokens()

self.stack.pop()
if len(self.stack) == 1 and self.reading_function:
# Exiting stack because of function
self.stack.pop()
if len(self.stack) == 0:
self.reading_function = False
self.reading_keywords = True
self.cur_initializer = Initialization(None, IfDefMacro())
elif isinstance(self.stack[-1], DesignatedValue):
self.stack.pop()
elif cur_token == ";" or cur_token == ",":
self.handle_accumulated_tokens()
else:
self.accumulated_tokens.append(cur_token)

def read_keywords(self, prev_token: str, cur_token: str):
if self.reading_array_size:
if cur_token == "]":
self.reading_array_size = False
return
else:
if cur_token == "[":
self.reading_array_size = True
return

add_token = False
if cur_token == "static":
self.cur_initializer.is_static = True
elif cur_token == "const":
self.cur_initializer.is_const = True
elif cur_token == "extern":
self.cur_initializer.is_extern = True
elif cur_token == "enum":
self.cur_initializer.is_enum = True
elif cur_token == "struct":
self.cur_initializer.is_struct = True
elif cur_token == "*":
self.cur_initializer.pointer_depth += 1
else:
add_token = True

if not add_token:
return

if cur_token in {"=", "{", ";"}:
self.values.append(self.cur_initializer)
if prev_token == ")" and cur_token == "{":
self.reading_function = True

self.stack.append(self.cur_initializer)

self.cur_initializer.name = self.cur_initializer.keywords[-1]
self.values_by_name[self.cur_initializer.name] = self.cur_initializer
self.cur_initializer.origin_path = self.origin_path
self.cur_initializer.if_def = self.if_defs.copy()
self.reading_keywords = False

elif not cur_token in {"\n"}:
self.cur_initializer.keywords.append(cur_token)

def read_c_text(self, text: str, origin_path: str = ""):
self.cur_initializer = Initialization(None, IfDefMacro())
self.reading_array_size = False
self.reading_keywords = True
self.reading_function = False # Used for stack stuff, functions are not supported
self.reading_macro = False

self.stack: list[ParsedValue] = []
self.accumulated_tokens: list[str] = []
self.accumulated_macro_tokens: list[str] = []
self.if_defs: list[IfDefMacro] = []

self.origin_path = origin_path

tokens = re.findall(token_pattern, re.sub(comment_pattern, "", text))

prev_token = ""
for i, cur_token in enumerate(tokens):
prev_token = tokens[i - 1] if i > 0 else ""
# next_token = tokens[i + 1]
if cur_token == "#":
self.reading_macro = True
continue
if self.reading_macro:
self.read_macro(prev_token, cur_token)
continue

if self.reading_keywords:
self.read_keywords(prev_token, cur_token)
if cur_token == "=":
continue # HACK!!!
if not self.reading_keywords:
self.read_values(prev_token, cur_token)

def __init__(self) -> None:
self.values: list[Initialization] = []
self.values_by_name: dict[str, Initialization] = {}
Loading

0 comments on commit 4fa7e9b

Please sign in to comment.