From ddde829a1ccf99cecd194fc27e008d49945e921a Mon Sep 17 00:00:00 2001 From: dzhwinter Date: Tue, 31 Oct 2017 10:11:35 -0700 Subject: [PATCH] Fix/sequence pool (#5229) * "modify layers.py" * "fix pool interface" * "add export type to layers" * "fix based on comment" --- python/paddle/v2/framework/layers.py | 75 +++++++++++++++------------- python/paddle/v2/framework/nets.py | 9 +--- 2 files changed, 43 insertions(+), 41 deletions(-) diff --git a/python/paddle/v2/framework/layers.py b/python/paddle/v2/framework/layers.py index 6451d11e2b686..5fdad52f2124d 100644 --- a/python/paddle/v2/framework/layers.py +++ b/python/paddle/v2/framework/layers.py @@ -5,7 +5,8 @@ __all__ = [ 'fc', 'data', 'cross_entropy', 'conv2d', 'pool2d', 'embedding', 'concat', - 'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'accuracy' + 'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'sums', 'cos_sim', + 'batch_norm', 'accuracy' ] @@ -165,18 +166,6 @@ def func(**kwargs): _create_op_func_('reshape') -def cast(x, data_type, program=None): - helper = LayerHelper('cast', **locals()) - out = helper.create_tmp_variable(dtype=data_type) - helper.append_op( - type='cast', - inputs={'X': [x]}, - outputs={'Out': [out]}, - attrs={'in_data_type': x.data_type, - 'out_data_type': out.data_type}) - return out - - def cast(x, data_type, program=None): helper = LayerHelper('cast', **locals()) out = helper.create_tmp_variable(dtype=data_type) @@ -191,9 +180,7 @@ def cast(x, data_type, program=None): def concat(input, axis, program=None, init_program=None): helper = LayerHelper('concat', **locals()) - if not isinstance(input, list) and not isinstance(input, tuple): - input = [input] - out = helper.create_tmp_variable(dtype=input[0].data_type) + out = helper.create_tmp_variable(dtype=helper.input_dtype()) helper.append_op( type='concat', inputs={'X': input}, @@ -202,6 +189,28 @@ def concat(input, axis, program=None, init_program=None): return out +def sums(input, program=None, init_program=None): + helper = LayerHelper('sum', **locals()) + out = helper.create_tmp_variable(dtype=helper.input_dtype()) + helper.append_op(type='sum', inputs={'X': [input]}, outputs={'Out': out}) + return out + + +def cos_sim(X, Y, program=None, init_program=None): + helper = LayerHelper('cos_sim', **locals()) + out = helper.create_tmp_variable(dtype=helper.input_dtype("X")) + xnorm = helper.create_tmp_variable(dtype=helper.input_dtype("X")) + ynorm = helper.create_tmp_variable(dtype=helper.input_dtype("X")) + helper.append_op( + type='cos_sim', + inputs={'X': [X], + 'Y': [Y]}, + outputs={'Out': [out], + 'XNorm': [xnorm], + 'YNorm': [ynorm]}) + return out, xnorm, ynorm + + def cross_entropy(input, label, **kwargs): helper = LayerHelper('cross_entropy', **kwargs) out = helper.create_tmp_variable(dtype=input.data_type) @@ -254,9 +263,7 @@ def accuracy(input, label, k=1, **kwargs): def sequence_conv(input, num_filters, - name=None, filter_size=3, - act=None, stride=1, padding=None, bias_attr=None, @@ -270,7 +277,7 @@ def sequence_conv(input, helper = LayerHelper('sequence_conv', **locals()) dtype = helper.input_dtype() - filter_shape = [num_filters, filter_size] + filter_shape = [filter_size * input.shape[1], num_filters] filter = helper.create_parameter( attr=helper.param_attr, shape=filter_shape, dtype=dtype) pre_bias = helper.create_tmp_variable(dtype) @@ -279,7 +286,7 @@ def sequence_conv(input, type='sequence_conv', inputs={ 'X': [input], - 'Filter': filter, + 'Filter': [filter], }, outputs={"Out": pre_bias}, attrs={ @@ -287,7 +294,6 @@ def sequence_conv(input, 'context_start': 0, 'context_length': filter_size }) - pre_act = helper.append_bias_op(pre_bias) return helper.append_activation(pre_act) @@ -344,31 +350,32 @@ def conv2d(input, return helper.append_activation(pre_act) -def sequence_pool(input, - pool_size, - pool_type, - pool_stride=1, - pool_padding=0, - global_pooling=False, - program=None, - init_program=None): +def sequence_pool(input, pool_type, program=None, init_program=None): # FIXME(dzh) : want to unify the argument of python layer # function. So we ignore some unecessary attributes - ENUM_POOL_TYPE = set(["max", "avg", "sqrt", "last", "first"]) - if pool_type not in ENUM_POOL_TYPE: + ENUM_POOL_TYPE = dict({ + "AVERAGE": 0, + "SUM": 1, + "SQRT": 2, + "MAX": 3, + "LAST": 4, + "FIRST": 5 + }) + if pool_type.upper() not in ENUM_POOL_TYPE: raise ValueError("Unknown pool_type: '%s'. It can only be %s.", - str(pool_type), " ".join(ENUM_POOL_TYPE)) + str(pool_type), " ".join(ENUM_POOL_TYPE.keys())) helper = LayerHelper('sequence_pool', **locals()) dtype = helper.input_dtype() pool_out = helper.create_tmp_variable(dtype) + # FIXME(dzh): strategy helper.append_op( type="sequence_pool", inputs={"X": [input]}, - outputs={"Out": pool_out}, - attrs={"strategy": pool_type}) + outputs={"Out": [pool_out]}, + attrs={"strategy": ENUM_POOL_TYPE[pool_type.upper()]}) return pool_out diff --git a/python/paddle/v2/framework/nets.py b/python/paddle/v2/framework/nets.py index a9998073e164a..8191b5ef44de8 100644 --- a/python/paddle/v2/framework/nets.py +++ b/python/paddle/v2/framework/nets.py @@ -101,24 +101,19 @@ def __extend_list__(obj): def sequence_conv_pool(input, num_filters, filter_size, - pool_size, - pool_stride, - act, + pool_type="max", program=None, init_program=None): conv_out = layers.sequence_conv( input=input, num_filters=num_filters, filter_size=filter_size, - act=act, program=program, init_program=init_program) pool_out = layers.sequence_pool( input=conv_out, - pool_size=pool_size, - pool_type='max', - pool_stride=pool_stride, + pool_type=pool_type, program=program, init_program=init_program) return pool_out