Skip to content

Commit

Permalink
amend
Browse files Browse the repository at this point in the history
  • Loading branch information
vmoens committed Nov 24, 2023
1 parent c5b4bda commit ed5b511
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions tutorials/sphinx-tutorials/rb_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
# replay buffer is a straightforward process, as shown in the following
# example:
#
import tempfile

from torchrl.data import ReplayBuffer

Expand Down Expand Up @@ -175,8 +176,9 @@
######################################################################
# We can also customize the storage location on disk:
#
tempdir = tempfile.TemporaryDirectory()
buffer_lazymemmap = ReplayBuffer(
storage=LazyMemmapStorage(size, scratch_dir="/tmp/memmap/")
storage=LazyMemmapStorage(size, scratch_dir=tempdir)
)
buffer_lazymemmap.extend(data)
print(f"The buffer has {len(buffer_lazymemmap)} elements")
Expand Down Expand Up @@ -207,8 +209,9 @@

from torchrl.data import TensorDictReplayBuffer

tempdir = tempfile.TemporaryDirectory()
buffer_lazymemmap = TensorDictReplayBuffer(
storage=LazyMemmapStorage(size, scratch_dir="/tmp/memmap/"), batch_size=12
storage=LazyMemmapStorage(size, scratch_dir=tempdir), batch_size=12
)
buffer_lazymemmap.extend(data)
print(f"The buffer has {len(buffer_lazymemmap)} elements")
Expand Down Expand Up @@ -248,8 +251,9 @@ class MyData:
batch_size=[1000],
)

tempdir = tempfile.TemporaryDirectory()
buffer_lazymemmap = TensorDictReplayBuffer(
storage=LazyMemmapStorage(size, scratch_dir="/tmp/memmap/"), batch_size=12
storage=LazyMemmapStorage(size, scratch_dir=tempdir), batch_size=12
)
buffer_lazymemmap.extend(data)
print(f"The buffer has {len(buffer_lazymemmap)} elements")
Expand Down

0 comments on commit ed5b511

Please sign in to comment.