Skip to content

Commit

Permalink
add ut
Browse files Browse the repository at this point in the history
  • Loading branch information
MRXLT committed Aug 23, 2020
1 parent 5d42420 commit 9094782
Show file tree
Hide file tree
Showing 4 changed files with 937 additions and 50 deletions.
26 changes: 19 additions & 7 deletions python/paddle/fluid/tests/unittests/test_adam_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,6 @@ def test_adam_op_with_state_dict(self):

adam = paddle.optimizer.Adam(0.001, parameters=emb.parameters())
state_dict = adam.state_dict()

adam.set_state_dict(state_dict)

#learning_rate is Decay
Expand All @@ -463,7 +462,7 @@ def test_adam_op_with_state_dict(self):
learning_rate=learning_rate,
weight_decay=fluid.regularizer.L2Decay(0.001),
parameters=emb.parameters())

lr = adam.get_lr()
state_dict = adam.state_dict()
adam.set_state_dict(state_dict)

Expand All @@ -474,14 +473,23 @@ def test_adam_op_with_state_dict(self):
adam = paddle.optimizer.Adam(
learning_rate=learning_rate, parameters=emb.parameters())

state_dict = adam.state_dict()
adam.set_state_dict(state_dict)
params = adam.get_opti_var_name_list()
assert (params is not None)

params = adam.get_opti_var_name_list()
assert (params is not None)
def test_adam_with_grad_clip(self):
paddle.disable_static()
value = np.arange(26).reshape(2, 13).astype("float32")
a = fluid.dygraph.to_variable(value)
linear = fluid.Linear(13, 5, dtype="float32")
clip = fluid.clip.GradientClipByGlobalNorm(clip_norm=1.0)
adam = paddle.optimizer.Adam(
0.1, parameters=linear.parameters(), grad_clip=clip)
out = linear(a)
out.backward()
adam.step()
adam.clear_gradients()

def test_adam_op_with_set_lr(self):
import paddle
paddle.disable_static()
linear = paddle.nn.Linear(10, 10)
adam = paddle.optimizer.Adam(0.1, parameters=linear.parameters())
Expand All @@ -496,6 +504,10 @@ def test_adam_op_with_set_lr(self):
cur_lr = adam.get_lr()
assert (np.float32(lr) == cur_lr)

with self.assertRaises(TypeError):
lr = int(1)
adam.set_lr(lr)


if __name__ == "__main__":
unittest.main()
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_adamw_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def test_adamw_op_coverage(self):
parameters=linear.parameters(),
apply_decay_param_fun=lambda name: True,
weight_decay=0.01)
assert (adam.__str__ is not None)
assert (adam.__str__() is not None)

def test_adamw_op(self):
place = fluid.CPUPlace()
Expand Down
Loading

1 comment on commit 9094782

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.