Skip to content

Commit

Permalink
BugFix: Agent Python did not account for variable scope.
Browse files Browse the repository at this point in the history
Closes #1125
  • Loading branch information
Robadob authored and ptheywood committed Dec 15, 2023
1 parent 24b167f commit 79525da
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 5 deletions.
14 changes: 9 additions & 5 deletions swig/python/codegen/codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def __init__(self, tree, file = sys.stdout):
self.future_imports = []
self._indent = 0
# dict of locals used to determine if variable already exists in assignments
self._locals = ["pyflamegpu"]
self._locals = {"pyflamegpu": 0}
self._device_functions = []
self._message_iterator_var = None # default
self._input_message_var = 'message_in' # default
Expand Down Expand Up @@ -189,6 +189,10 @@ def leave(self):
"Decrease the indentation level and Print '}'"
self._indent -= 1
self.fill("}")
# Purge _locals of out of scope variables
d_key = [key for key, val in self._locals.items() if val > self._indent]
for key in d_key:
del self._locals[key]

def dispatch(self, tree):
"Dispatcher function, dispatching tree type T to method _T."
Expand Down Expand Up @@ -252,7 +256,7 @@ def dispatchFGPUFunctionArgs(self, tree):
Type hinting is required to translate a type into a FLAME GPU Message type implementation
"""
# reset the locals variable stack
self._locals = ["pyflamegpu"]
self._locals = {"pyflamegpu": 0}
if len(tree.args.args) != 2:
self.RaiseError(tree, "Expected two FLAME GPU function arguments (input message and output message)")
# input message
Expand Down Expand Up @@ -308,7 +312,7 @@ def dispatchFGPUDeviceFunctionArgs(self, tree):
Handles arguments for a FLAME GPU device function. Arguments must use type hinting to be translated to cpp.
"""
# reset the locals variable stack
self._locals = ["pyflamegpu"]
self._locals = {"pyflamegpu": 0}
# input message
first = True
annotation = None
Expand All @@ -322,7 +326,7 @@ def dispatchFGPUDeviceFunctionArgs(self, tree):
self.dispatchType(arg.annotation)
self.write(f" {arg.arg}")
# add arg to local variable stack
self._locals.append(arg.arg)
self._locals[arg.arg] = self._indent
first = False

def dispatchMessageIteratorCall(self, tree):
Expand Down Expand Up @@ -732,7 +736,7 @@ def _Assign(self, t):
if self._indent == 0:
self.write("constexpr ")
self.write("auto ")
self._locals.append(t.targets[0].id)
self._locals[t.targets[0].id] = self._indent
self.dispatch(t.targets[0])
self.write(" = ")
self.dispatch(t.value)
Expand Down
18 changes: 18 additions & 0 deletions tests/python/codegen/test_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,21 @@ class Foo: pass
a += 3;
"""

py_var_scope = """\
for i in range(10):
foo = pyflamegpu.getVariableInt("foo")
for i in range(10):
foo = pyflamegpu.getVariableInt("foo")
"""
cpp_var_scope = """\
for (int i=0;i<10;i++){
auto foo = FLAMEGPU->getVariable<int>("foo");
}
for (int i=0;i<10;i++){
auto foo = FLAMEGPU->getVariable<int>("foo");
}
"""

py_with_simple = """\
with f():
suite1
Expand Down Expand Up @@ -824,6 +839,9 @@ def test_variable_annotation(self):

def test_variable_existing(self):
self._checkExpected(py_var_existing, cpp_var_existing)

def test_variable_scope(self):
self._checkExpected(py_var_scope, cpp_var_scope)


def test_with(self):
Expand Down

0 comments on commit 79525da

Please sign in to comment.