Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Switched to using sets for O(1) op membership checks
Browse files Browse the repository at this point in the history
  • Loading branch information
connorgoggins committed Feb 13, 2020
1 parent 7d01559 commit 75532b6
Showing 1 changed file with 16 additions and 36 deletions.
52 changes: 16 additions & 36 deletions benchmark/opperf/utils/op_registry_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,14 +113,14 @@ def prepare_op_inputs(op, arg_params):
inputs = []

# 4d tensor is needed only by following two ops
ops_4d = ['depth_to_space', 'space_to_depth']
ops_4d = {'depth_to_space', 'space_to_depth'}

# 3d tensor is needed by following ops
ops_3d = ['CTCLoss', 'ctc_loss']
ops_3d = {'CTCLoss', 'ctc_loss'}

# For ops with args that need to change shape/value for different ops
custom_data = ['Activation', 'LeakyReLU', 'Softmax', 'BilinearSampler', 'GridGenerator',
'sample_multinomial', 'linalg_maketrian', 'squeeze', 'fill_element_0index']
custom_data = {'Activation', 'LeakyReLU', 'Softmax', 'BilinearSampler', 'GridGenerator',
'sample_multinomial', 'linalg_maketrian', 'squeeze', 'fill_element_0index'}

# Prepare op to default input mapping
arg_values = {}
Expand Down Expand Up @@ -257,7 +257,7 @@ def get_all_random_sampling_operators():
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
# Additional Random Sampling ops which do not start with "random_" or "sample_"
additional_random_sampling_ops = ['GridGenerator', 'BilinearSampler']
additional_random_sampling_ops = {'GridGenerator', 'BilinearSampler'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -277,10 +277,10 @@ def get_all_linalg_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
other_linalg_ops = ['moments']
other_linalg_ops = {'moments'}

# Already tested linalg_potrf independently
independently_tested = ['linalg_potrf']
independently_tested = {'linalg_potrf'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand Down Expand Up @@ -319,7 +319,7 @@ def get_all_nn_activation_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
nn_activation_ops = ['Softmax', 'SoftmaxActivation', 'softmin', 'Activation', 'LeakyReLU', 'hard_sigmoid', 'softmax', 'log_softmax']
nn_activation_ops = {'Softmax', 'SoftmaxActivation', 'softmin', 'Activation', 'LeakyReLU', 'hard_sigmoid', 'softmax', 'log_softmax'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -339,9 +339,9 @@ def get_all_optimizer_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
optimizer_ops = ['mp_sgd_update', 'signum_update', 'rmspropalex_update', 'ftml_update', 'rmsprop_update',
optimizer_ops = {'mp_sgd_update', 'signum_update', 'rmspropalex_update', 'ftml_update', 'rmsprop_update',
'sgd_mom_update', 'signsgd_update', 'mp_sgd_mom_update', 'ftrl_update', 'sgd_update',
'adam_update']
'adam_update'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -360,7 +360,7 @@ def get_all_sorting_searching_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
sort_search_ops = ['sort', 'argsort', 'argmax', 'argmin', 'topk']
sort_search_ops = {'sort', 'argsort', 'argmax', 'argmin', 'topk'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -380,7 +380,7 @@ def get_all_rearrange_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
rearrange_ops = ['transpose','swapaxes','flip','depth_to_space','space_to_depth']
rearrange_ops = {'transpose','swapaxes','flip','depth_to_space','space_to_depth'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -400,7 +400,7 @@ def get_remaining_miscellaneous_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
misc_ops = ['squeeze', 'all_finite', 'clip', 'multi_lars', 'SequenceReverse', 'SequenceLast', 'SequenceMask', 'cast_storage', 'cumsum', 'fill_element_0index']
misc_ops = {'squeeze', 'all_finite', 'clip', 'multi_lars', 'SequenceReverse', 'SequenceLast', 'SequenceMask', 'cast_storage', 'cumsum', 'fill_element_0index'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -424,8 +424,8 @@ def get_all_indexing_routines():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
indexing_routines = ['slice', 'slice_axis', 'slice_like', 'take', 'one_hot',
'where', 'ravel_multi_index', 'gather_nd', 'pick']
indexing_routines = {'slice', 'slice_axis', 'slice_like', 'take', 'one_hot',
'where', 'ravel_multi_index', 'gather_nd', 'pick'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -445,7 +445,7 @@ def get_all_loss_operators():
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
loss_ops = ['smooth_l1', 'CTCLoss', 'ctc_loss', 'MakeLoss', 'softmax_cross_entropy']
loss_ops = {'smooth_l1', 'CTCLoss', 'ctc_loss', 'MakeLoss', 'softmax_cross_entropy'}

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()
Expand All @@ -458,26 +458,6 @@ def get_all_loss_operators():
return loss_mx_operators


def get_remaining_miscellaneous_operators():
"""Gets remaining Miscellaneous operators registered with MXNet not covered by individual tests.
Returns
-------
{"operator_name": {"has_backward", "nd_op_handle", "params"}}
"""
misc_ops = ['squeeze', 'all_finite', 'clip', 'multi_lars', 'SequenceReverse', 'SequenceLast', 'SequenceMask', 'cast_storage', 'cumsum', 'fill_element_0index']

# Get all mxnet operators
mx_operators = _get_all_mxnet_operators()

# Filter for Miscellaneous operators
misc_mx_operators = {}
for op_name, _ in mx_operators.items():
if op_name in misc_ops:
misc_mx_operators[op_name] = mx_operators[op_name]
return misc_mx_operators


def get_operators_with_no_benchmark(operators_with_benchmark):
"""Gets all MXNet operators with not benchmark.
Expand Down

0 comments on commit 75532b6

Please sign in to comment.