Skip to content

Commit

Permalink
Revert "Set all_reduce_token to None when exiting" (#6321)
Browse files Browse the repository at this point in the history
  • Loading branch information
vanbasten23 committed Jan 19, 2024
1 parent dce624c commit ad669d5
Show file tree
Hide file tree
Showing 4 changed files with 1 addition and 29 deletions.
1 change: 0 additions & 1 deletion test/run_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,6 @@ function run_mp_op_tests {
run_test "$CDIR/test_mp_save.py"
run_test "$CDIR/test_mp_mesh_reduce.py"
run_test "$CDIR/test_mp_sync_batch_norm.py"
run_test "$CDIR/test_mp_early_exit.py"
run_pt_xla_debug "$CDIR/debug_tool/test_mp_pt_xla_debug.py"
run_xla_backend_mp "$CDIR/test_torch_distributed_all_gather_xla_backend.py"
run_xla_backend_mp "$CDIR/test_torch_distributed_all_reduce_xla_backend.py"
Expand Down
26 changes: 0 additions & 26 deletions test/test_mp_early_exit.py

This file was deleted.

1 change: 1 addition & 0 deletions test/test_zero1.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
class XlaZeRO1Test(TestCase):

@unittest.skipIf(xr.device_type() == 'TPU', "Crash on TPU")
@unittest.skipIf(xr.device_type() == 'CUDA', "Crash on CUDA")
def test_zero1(self):
device = xm.xla_device()

Expand Down
2 changes: 0 additions & 2 deletions torch_xla/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,6 @@ def _setup_tpu_vm_library_path() -> bool:


def _prepare_to_exit():
device = _XLAC._xla_get_default_device()
_XLAC._set_all_reduce_token(device, None)
_XLAC._prepare_to_exit()
if int(os.environ.get('PT_XLA_DEBUG', '0')):
_summarize_fn_tracker()
Expand Down

0 comments on commit ad669d5

Please sign in to comment.