Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] flatten_keys and unflatten_keys as context managers #908

Merged
merged 1 commit into from
Jul 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tensordict/_td.py
Original file line number Diff line number Diff line change
Expand Up @@ -2952,7 +2952,7 @@ def keys(
leaves_only: bool = False,
is_leaf: Callable[[Type], bool] | None = None,
) -> _TensorDictKeysView:
if not include_nested and not leaves_only:
if not include_nested and not leaves_only and is_leaf is None:
return _StringKeys(self._tensordict.keys())
else:
return self._nested_keys(
Expand Down
14 changes: 14 additions & 0 deletions tensordict/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7931,6 +7931,18 @@ def __exit__(self, exc_type, exc_val, exc_tb):
return out.update(self.transpose(dim0, dim1), inplace=False)
else:
return out.update_(self.transpose(dim0, dim1))
elif last_op == type(self).flatten_keys.__name__:
sep = args[0] if args else "."
if not out.is_locked:
return out.update(self.unflatten_keys(sep), inplace=False)
else:
return out.update_(self.unflatten_keys(sep))
elif last_op == type(self).unflatten_keys.__name__:
sep = args[0] if args else "."
if not out.is_locked:
return out.update(self.flatten_keys(sep), inplace=False)
else:
return out.update_(self.flatten_keys(sep))
elif last_op == type(self).flatten.__name__:
if len(args) == 2:
dim0, dim1 = args
Expand Down Expand Up @@ -8623,6 +8635,7 @@ def contiguous(self) -> T:
...

@cache # noqa: B019
@_as_context_manager()
def flatten_keys(
self,
separator: str = ".",
Expand Down Expand Up @@ -8778,6 +8791,7 @@ def _flatten_keys_inplace(self, separator, is_leaf):
return self

@cache # noqa: B019
@_as_context_manager()
def unflatten_keys(self, separator: str = ".", inplace: bool = False) -> T:
"""Converts a flat tensordict into a nested one, recursively.

Expand Down
3 changes: 3 additions & 0 deletions tensordict/persistent.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
)
from tensordict.memmap import MemoryMappedTensor
from tensordict.utils import (
_as_context_manager,
_CloudpickleWrapper,
_KEY_ERROR,
_LOCK_ERROR,
Expand Down Expand Up @@ -943,13 +944,15 @@ def _exclude(
"Create a regular tensordict first using the `to_tensordict` method."
)

@_as_context_manager()
def flatten_keys(self, separator: str = ".", inplace: bool = False) -> T:
if inplace:
raise ValueError(
"Cannot call flatten_keys in_place with a PersistentTensorDict."
)
return self.to_tensordict().flatten_keys(separator=separator)

@_as_context_manager()
def unflatten_keys(self, separator: str = ".", inplace: bool = False) -> T:
if inplace:
raise ValueError(
Expand Down
15 changes: 15 additions & 0 deletions test/test_tensordict.py
Original file line number Diff line number Diff line change
Expand Up @@ -3514,6 +3514,21 @@ def test_flatten_keys(self, td_name, device, inplace, separator):
else:
assert td_flatten is not td

def test_flatten_keys_decorator(self, td_name, device):
td = getattr(self, td_name)(device)
with td.flatten_keys(",") as tdflat:
assert set(tdflat.keys(True, True, is_leaf=_is_leaf_nontensor)) == set(
tdflat.keys(is_leaf=_is_leaf_nontensor)
)
with tdflat.unflatten_keys(",") as td_orig:
assert (td_orig == td).all()
if not td.is_locked:
td_orig["new", "data"] = torch.zeros(td_orig.shape)
if not td.is_locked:
assert (tdflat["new,data"] == 0).all()
if not td.is_locked:
assert (td["new", "data"] == 0).all()

def test_flatten_unflatten(self, td_name, device):
td = getattr(self, td_name)(device)
shape = td.shape[:3]
Expand Down
Loading