From 23d1272bf50c20c899f9eeb94e2f5df4f4e432a4 Mon Sep 17 00:00:00 2001 From: Jiewen Tan Date: Mon, 11 Dec 2023 22:12:28 +0000 Subject: [PATCH] nit --- test/spmd/test_xla_sharding.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/spmd/test_xla_sharding.py b/test/spmd/test_xla_sharding.py index 66f15fc0662d..e6d49e27d7ef 100644 --- a/test/spmd/test_xla_sharding.py +++ b/test/spmd/test_xla_sharding.py @@ -1052,7 +1052,6 @@ def test_backward_optimization_barrier(self): # The first layer won't have gradients in the hook. Not sure why. xs.xla_sharding.apply_backward_optimization_barrier(model.fc2) - # optimizer.zero_grad() x = torch.randn(2, 128).to(xm.xla_device()) y = model(x) loss = y.sum()