-
Notifications
You must be signed in to change notification settings - Fork 5.6k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
"add sequence conv layer" #5117
Changes from 4 commits
8d349d9
6e2d084
6c530d4
a0bcf08
192e514
22625a4
f0b9a31
715e95f
6079bb9
eb5a899
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,7 @@ | |
|
||
__all__ = [ | ||
'fc', 'data', 'cross_entropy', 'conv2d', 'pool2d', 'embedding', 'concat', | ||
'StaticRNN' | ||
'StaticRNN', 'cast' | ||
] | ||
|
||
|
||
|
@@ -61,6 +61,7 @@ def fc(input, | |
def embedding(input, | ||
size, | ||
data_type='float32', | ||
is_sparse=False, | ||
param_attr=None, | ||
program=None, | ||
init_program=None): | ||
|
@@ -72,7 +73,8 @@ def embedding(input, | |
type='lookup_table', | ||
inputs={'Ids': input, | ||
'W': w}, | ||
outputs={'Out': tmp}) | ||
outputs={'Out': tmp}, | ||
attrs={'is_sparse': is_sparse}) | ||
return tmp | ||
|
||
|
||
|
@@ -161,6 +163,18 @@ def func(**kwargs): | |
_create_op_func_('dropout') | ||
|
||
|
||
def cast(x, data_type, program=None): | ||
helper = LayerHelper('cast', **locals()) | ||
out = helper.create_tmp_variable(dtype=data_type) | ||
helper.append_op( | ||
type='cast', | ||
inputs={'X': [x]}, | ||
outputs={'Out': [out]}, | ||
attrs={'in_data_type': x.data_type, | ||
'out_data_type': out.data_type}) | ||
return out | ||
|
||
|
||
def concat(input, axis, program=None, init_program=None): | ||
helper = LayerHelper('concat', **locals()) | ||
if not isinstance(input, list) and not isinstance(input, tuple): | ||
|
@@ -204,6 +218,47 @@ def square_error_cost(input, label, **kwargs): | |
return square_out | ||
|
||
|
||
def conv1d(input, | ||
num_filters, | ||
name=None, | ||
filter_size=3, | ||
act=None, | ||
stride=1, | ||
padding=None, | ||
bias_attr=None, | ||
param_attr=None, | ||
program=None, | ||
init_program=None): | ||
# FIXME(dzh) : want to unify the argument of python layer | ||
# function. So we ignore some unecessary attributes. | ||
# such as, padding_trainable, context_start. | ||
|
||
helper = LayerHelper('sequence_conv', **locals()) | ||
dtype = helper.input_dtype() | ||
|
||
filter_shape = [num_filters, filter_size] | ||
filter = helper.create_parameter( | ||
attr=helper.param_attr, shape=filter_shape, dtype=dtype) | ||
pre_bias = helper.create_tmp_variable(dtype) | ||
|
||
input.set_lod(lod) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Where is the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. removed. |
||
helper.append_op( | ||
type='sequence_conv', | ||
inputs={ | ||
'X': input, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. "X": [input] There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done. |
||
'Filter': filter, | ||
}, | ||
outputs={"Out": pre_bias}, | ||
attrs={ | ||
'context_stride': stride, | ||
'context_start': 0, | ||
'context_length': filter_size | ||
}) | ||
|
||
pre_act = helper.append_bias_op(pre_bias) | ||
return helper.append_activation(pre_act) | ||
|
||
|
||
def conv2d(input, | ||
num_filters, | ||
name=None, | ||
|
@@ -256,6 +311,35 @@ def conv2d(input, | |
return helper.append_activation(pre_act) | ||
|
||
|
||
def pool1d(input, | ||
pool_size, | ||
pool_type, | ||
pool_stride=1, | ||
pool_padding=0, | ||
global_pooling=False, | ||
program=None, | ||
init_program=None): | ||
# FIXME(dzh) : want to unify the argument of python layer | ||
# function. So we ignore some unecessary attributes | ||
|
||
ENUM_POOL_TYPE = ["max", "avg", "sqrt", "last", "first"] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Initialize There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done. |
||
if pool_type not in ENUM_POOL_TYPE: | ||
raise ValueError("Unknown pool_type: '%s'. It can only be %s.", | ||
str(pool_type), " ".join(ENUM_POOL_TYPE)) | ||
|
||
helper = LayerHelper('sequence_pool', **locals()) | ||
dtype = helper.input_dtype() | ||
pool_out = helper.create_tmp_variable(dtype) | ||
|
||
helper.append_op( | ||
type="sequence_pool", | ||
inputs={"X": input}, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done. |
||
outputs={"Out": pool_out}, | ||
attrs={"strategy": pool_type, }) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. remove There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Done. |
||
|
||
return pool_out | ||
|
||
|
||
def pool2d(input, | ||
pool_size, | ||
pool_type, | ||
|
@@ -275,7 +359,7 @@ def pool2d(input, | |
if isinstance(pool_padding, int): | ||
pool_padding = [pool_padding, pool_padding] | ||
|
||
helper = LayerHelper('conv2d', **locals()) | ||
helper = LayerHelper('pool2d', **locals()) | ||
dtype = helper.input_dtype() | ||
pool_out = helper.create_tmp_variable(dtype) | ||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe
sequence_conv
is a better name?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Rename
conv1d
tosequence_conv
after discuss with @Canpio offline, since this function's behavior is different withconv2d
,conv3d
, fixed.