Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add unit test #1

Merged
merged 3 commits into from
May 23, 2017
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions python/mxnet/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1018,3 +1018,25 @@ def set_env_var(key, val, default_val=""):
prev_val = os.environ.get(key, default_val)
os.environ[key] = val
return prev_val

def same_array(array1, array2):
"""Check whether two NDArrays sharing the same memory block

Parameters
----------

array1 : NDArray
First NDArray to be checked
array2 : NDArray
Second NDArray to be checked

Returns
-------
bool
Whether two NDArrays share the same memory
"""
array1[:] += 1
if(not np.array_equal(array1.asnumpy(), array2.asnumpy())):
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use test_utils.same(array1.asnumpy(), array2.asnumpy) to keep the unit test interface consistent.

return False
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Change the value of array1 back before return False.

array1[:] -= 1
return np.array_equal(array1.asnumpy(), array2.asnumpy())
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use test_utils.same

75 changes: 75 additions & 0 deletions tests/python/unittest/test_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import mxnet.ndarray as nd
import numpy as np
from functools import reduce
from mxnet.module.executor_group import DataParallelExecutorGroup

def test_module_dtype():
dtype = np.float16
Expand Down Expand Up @@ -254,6 +255,79 @@ def mean_abs(x):
break
assert(mon_result_counts == [2, 2, 1, 6, 6, 4])

def test_executor_group():
def test_exec_group_create(origin_exec_grp, shared_exec_grp,
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In this test, group1 is shared_group for group2/3.

shared_arg_names, extra_input=[], extra_arg=[]):
# Test shared data arrays
for i in range(len(origin_exec_grp.execs)):
for data_name, array in origin_exec_grp.shared_data_arrays[i].items():
assert data_name in shared_exec_grp.shared_data_arrays[i], "Shared data not in exec group."
assert mx.test_utils.same_array(array, shared_exec_grp.shared_data_arrays[i][data_name]),\
"Data not sharing memory."
for input_name in extra_input:
assert input_name in shared_exec_grp.execs[i].arg_dict,\
"Extra input not in shared executor group."

# Test shared argument arrays and gradient arrays
for i in range(len(origin_exec_grp.execs)):
exec1 = origin_exec_grp.execs[i]
exec2 = shared_exec_grp.execs[i]
for arg_name in shared_arg_names:
assert arg_name in exec2.arg_dict, "Shared argument not in exec group."
assert mx.test_utils.same_array(exec1.arg_dict[arg_name], exec2.arg_dict[arg_name]),\
"Argument not sharing memory."
for arg_name in extra_arg:
assert arg_name in exec2.arg_dict, "Extra argument not in shared executor group."
for arg_name, grad in origin_exec_grp.grad_req.items():
assert grad == shared_exec_grp.grad_req[arg_name], "Gradient requirements inconsistent"
for arg_name in shared_arg_names:
assert arg_name in exec2.grad_dict, "Shared argument gradient not in exec group."
assert mx.test_utils.same_array(exec1.grad_dict[arg_name], exec2.grad_dict[arg_name]),\
"Argument gradient not sharing memory."

contexts = [mx.cpu(0), mx.cpu(1)]
workload = [1] * len(contexts)
batch_size = 16
num_hidden = 4
data_shapes1 = [('data1', (batch_size, 10))]
data_shapes2 = [('data1', (batch_size, 10)), ('data2', (batch_size, 10))]
label_shapes = [('softmax_label', (batch_size,))]

data1 = mx.sym.Variable('data1')
data2 = mx.sym.Variable('data2')
fc1 = mx.sym.FullyConnected(data=data1, name='fc1', num_hidden=num_hidden)
mlp1 = mx.sym.SoftmaxOutput(data=fc1, name='softmax')
fc1 = mx.sym.FullyConnected(data=data1 + data2, name='fc1', num_hidden=num_hidden)
fc2 = mx.sym.FullyConnected(data=fc1, name='fc2', num_hidden=num_hidden)
mlp2 = mx.sym.SoftmaxOutput(data=fc2, name='softmax')

arg_names = mlp1.list_arguments()
input_names = [name[0] for name in data_shapes1] + [name[0] for name in label_shapes]
shared_arg_names = [name for name in arg_names if name not in input_names]

exec_group1 = DataParallelExecutorGroup(symbol=mlp1, contexts=contexts,
workload=workload, data_shapes=data_shapes1,
label_shapes=label_shapes, param_names=shared_arg_names,
for_training=True, inputs_need_grad=False)

# Test two executor groups with the same symbol sharing memory
exec_group2 = DataParallelExecutorGroup(symbol=mlp1, contexts=contexts,
workload=workload, data_shapes=data_shapes1,
label_shapes=label_shapes, param_names=shared_arg_names,
for_training=True, inputs_need_grad=False,
shared_group=exec_group1)
test_exec_group_create(exec_group1, exec_group2, shared_arg_names)

# Test two executor groups with different symbol sharing memory
exec_group3 = DataParallelExecutorGroup(symbol=mlp2, contexts=contexts,
workload=workload, data_shapes=data_shapes2,
label_shapes=label_shapes, param_names=shared_arg_names,
for_training=True, inputs_need_grad=False,
shared_group=exec_group1)
extra_input = ['data2']
extra_arg = ['fc2_weight', 'fc2_bias']
test_exec_group_create(exec_group1, exec_group3, shared_arg_names, extra_input, extra_arg)

if __name__ == '__main__':
test_module_dtype()
test_module_input_grads()
Expand All @@ -263,3 +337,4 @@ def mean_abs(x):
test_module_layout()
test_module_switch_bucket()
test_monitor()
test_executor_group()