Skip to content

Commit

Permalink
Fix/sequence pool (PaddlePaddle#5229)
Browse files Browse the repository at this point in the history
* "modify layers.py"

* "fix pool interface"

* "add export type to layers"

* "fix based on comment"
  • Loading branch information
dzhwinter authored and zchen0211 committed Oct 31, 2017
1 parent 2113d6e commit ddde829
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 41 deletions.
75 changes: 41 additions & 34 deletions python/paddle/v2/framework/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@

__all__ = [
'fc', 'data', 'cross_entropy', 'conv2d', 'pool2d', 'embedding', 'concat',
'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'accuracy'
'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'sums', 'cos_sim',
'batch_norm', 'accuracy'
]


Expand Down Expand Up @@ -165,18 +166,6 @@ def func(**kwargs):
_create_op_func_('reshape')


def cast(x, data_type, program=None):
helper = LayerHelper('cast', **locals())
out = helper.create_tmp_variable(dtype=data_type)
helper.append_op(
type='cast',
inputs={'X': [x]},
outputs={'Out': [out]},
attrs={'in_data_type': x.data_type,
'out_data_type': out.data_type})
return out


def cast(x, data_type, program=None):
helper = LayerHelper('cast', **locals())
out = helper.create_tmp_variable(dtype=data_type)
Expand All @@ -191,9 +180,7 @@ def cast(x, data_type, program=None):

def concat(input, axis, program=None, init_program=None):
helper = LayerHelper('concat', **locals())
if not isinstance(input, list) and not isinstance(input, tuple):
input = [input]
out = helper.create_tmp_variable(dtype=input[0].data_type)
out = helper.create_tmp_variable(dtype=helper.input_dtype())
helper.append_op(
type='concat',
inputs={'X': input},
Expand All @@ -202,6 +189,28 @@ def concat(input, axis, program=None, init_program=None):
return out


def sums(input, program=None, init_program=None):
helper = LayerHelper('sum', **locals())
out = helper.create_tmp_variable(dtype=helper.input_dtype())
helper.append_op(type='sum', inputs={'X': [input]}, outputs={'Out': out})
return out


def cos_sim(X, Y, program=None, init_program=None):
helper = LayerHelper('cos_sim', **locals())
out = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
xnorm = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
ynorm = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
helper.append_op(
type='cos_sim',
inputs={'X': [X],
'Y': [Y]},
outputs={'Out': [out],
'XNorm': [xnorm],
'YNorm': [ynorm]})
return out, xnorm, ynorm


def cross_entropy(input, label, **kwargs):
helper = LayerHelper('cross_entropy', **kwargs)
out = helper.create_tmp_variable(dtype=input.data_type)
Expand Down Expand Up @@ -254,9 +263,7 @@ def accuracy(input, label, k=1, **kwargs):

def sequence_conv(input,
num_filters,
name=None,
filter_size=3,
act=None,
stride=1,
padding=None,
bias_attr=None,
Expand All @@ -270,7 +277,7 @@ def sequence_conv(input,
helper = LayerHelper('sequence_conv', **locals())
dtype = helper.input_dtype()

filter_shape = [num_filters, filter_size]
filter_shape = [filter_size * input.shape[1], num_filters]
filter = helper.create_parameter(
attr=helper.param_attr, shape=filter_shape, dtype=dtype)
pre_bias = helper.create_tmp_variable(dtype)
Expand All @@ -279,15 +286,14 @@ def sequence_conv(input,
type='sequence_conv',
inputs={
'X': [input],
'Filter': filter,
'Filter': [filter],
},
outputs={"Out": pre_bias},
attrs={
'context_stride': stride,
'context_start': 0,
'context_length': filter_size
})

pre_act = helper.append_bias_op(pre_bias)
return helper.append_activation(pre_act)

Expand Down Expand Up @@ -344,31 +350,32 @@ def conv2d(input,
return helper.append_activation(pre_act)


def sequence_pool(input,
pool_size,
pool_type,
pool_stride=1,
pool_padding=0,
global_pooling=False,
program=None,
init_program=None):
def sequence_pool(input, pool_type, program=None, init_program=None):
# FIXME(dzh) : want to unify the argument of python layer
# function. So we ignore some unecessary attributes

ENUM_POOL_TYPE = set(["max", "avg", "sqrt", "last", "first"])
if pool_type not in ENUM_POOL_TYPE:
ENUM_POOL_TYPE = dict({
"AVERAGE": 0,
"SUM": 1,
"SQRT": 2,
"MAX": 3,
"LAST": 4,
"FIRST": 5
})
if pool_type.upper() not in ENUM_POOL_TYPE:
raise ValueError("Unknown pool_type: '%s'. It can only be %s.",
str(pool_type), " ".join(ENUM_POOL_TYPE))
str(pool_type), " ".join(ENUM_POOL_TYPE.keys()))

helper = LayerHelper('sequence_pool', **locals())
dtype = helper.input_dtype()
pool_out = helper.create_tmp_variable(dtype)

# FIXME(dzh): strategy
helper.append_op(
type="sequence_pool",
inputs={"X": [input]},
outputs={"Out": pool_out},
attrs={"strategy": pool_type})
outputs={"Out": [pool_out]},
attrs={"strategy": ENUM_POOL_TYPE[pool_type.upper()]})

return pool_out

Expand Down
9 changes: 2 additions & 7 deletions python/paddle/v2/framework/nets.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,24 +101,19 @@ def __extend_list__(obj):
def sequence_conv_pool(input,
num_filters,
filter_size,
pool_size,
pool_stride,
act,
pool_type="max",
program=None,
init_program=None):
conv_out = layers.sequence_conv(
input=input,
num_filters=num_filters,
filter_size=filter_size,
act=act,
program=program,
init_program=init_program)

pool_out = layers.sequence_pool(
input=conv_out,
pool_size=pool_size,
pool_type='max',
pool_stride=pool_stride,
pool_type=pool_type,
program=program,
init_program=init_program)
return pool_out

0 comments on commit ddde829

Please sign in to comment.