Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/sequence pool #5229

Merged
merged 6 commits into from
Oct 31, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 41 additions & 34 deletions python/paddle/v2/framework/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@

__all__ = [
'fc', 'data', 'cross_entropy', 'conv2d', 'pool2d', 'embedding', 'concat',
'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'accuracy'
'StaticRNN', 'cast', 'sequence_conv', 'sequence_pool', 'sums', 'cos_sim',
'batch_norm', 'accuracy'
]


Expand Down Expand Up @@ -165,18 +166,6 @@ def func(**kwargs):
_create_op_func_('reshape')


def cast(x, data_type, program=None):
helper = LayerHelper('cast', **locals())
out = helper.create_tmp_variable(dtype=data_type)
helper.append_op(
type='cast',
inputs={'X': [x]},
outputs={'Out': [out]},
attrs={'in_data_type': x.data_type,
'out_data_type': out.data_type})
return out


def cast(x, data_type, program=None):
helper = LayerHelper('cast', **locals())
out = helper.create_tmp_variable(dtype=data_type)
Expand All @@ -191,9 +180,7 @@ def cast(x, data_type, program=None):

def concat(input, axis, program=None, init_program=None):
helper = LayerHelper('concat', **locals())
if not isinstance(input, list) and not isinstance(input, tuple):
input = [input]
out = helper.create_tmp_variable(dtype=input[0].data_type)
out = helper.create_tmp_variable(dtype=helper.input_dtype())
helper.append_op(
type='concat',
inputs={'X': input},
Expand All @@ -202,6 +189,28 @@ def concat(input, axis, program=None, init_program=None):
return out


def sums(input, program=None, init_program=None):
helper = LayerHelper('sum', **locals())
out = helper.create_tmp_variable(dtype=helper.input_dtype())
helper.append_op(type='sum', inputs={'X': [input]}, outputs={'Out': out})
return out


def cos_sim(X, Y, program=None, init_program=None):
helper = LayerHelper('cos_sim', **locals())
out = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
xnorm = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
ynorm = helper.create_tmp_variable(dtype=helper.input_dtype("X"))
helper.append_op(
type='cos_sim',
inputs={'X': [X],
'Y': [Y]},
outputs={'Out': [out],
'XNorm': [xnorm],
'YNorm': [ynorm]})
return out, xnorm, ynorm


def cross_entropy(input, label, **kwargs):
helper = LayerHelper('cross_entropy', **kwargs)
out = helper.create_tmp_variable(dtype=input.data_type)
Expand Down Expand Up @@ -251,9 +260,7 @@ def accuracy(input, label, k=1, **kwargs):

def sequence_conv(input,
num_filters,
name=None,
filter_size=3,
act=None,
stride=1,
padding=None,
bias_attr=None,
Expand All @@ -267,7 +274,7 @@ def sequence_conv(input,
helper = LayerHelper('sequence_conv', **locals())
dtype = helper.input_dtype()

filter_shape = [num_filters, filter_size]
filter_shape = [filter_size * input.shape[1], num_filters]
filter = helper.create_parameter(
attr=helper.param_attr, shape=filter_shape, dtype=dtype)
pre_bias = helper.create_tmp_variable(dtype)
Expand All @@ -276,15 +283,14 @@ def sequence_conv(input,
type='sequence_conv',
inputs={
'X': [input],
'Filter': filter,
'Filter': [filter],
},
outputs={"Out": pre_bias},
attrs={
'context_stride': stride,
'context_start': 0,
'context_length': filter_size
})

pre_act = helper.append_bias_op(pre_bias)
return helper.append_activation(pre_act)

Expand Down Expand Up @@ -341,31 +347,32 @@ def conv2d(input,
return helper.append_activation(pre_act)


def sequence_pool(input,
pool_size,
pool_type,
pool_stride=1,
pool_padding=0,
global_pooling=False,
program=None,
init_program=None):
def sequence_pool(input, pool_type, program=None, init_program=None):
# FIXME(dzh) : want to unify the argument of python layer
# function. So we ignore some unecessary attributes

ENUM_POOL_TYPE = set(["max", "avg", "sqrt", "last", "first"])
if pool_type not in ENUM_POOL_TYPE:
ENUM_POOL_TYPE = dict({
"AVERAGE": 0,
"SUM": 1,
"SQRT": 2,
"MAX": 3,
"LAST": 4,
"FIRST": 5
})
if pool_type.upper() not in ENUM_POOL_TYPE:
raise ValueError("Unknown pool_type: '%s'. It can only be %s.",
str(pool_type), " ".join(ENUM_POOL_TYPE))
str(pool_type), " ".join(ENUM_POOL_TYPE.keys()))

helper = LayerHelper('sequence_pool', **locals())
dtype = helper.input_dtype()
pool_out = helper.create_tmp_variable(dtype)

# FIXME(dzh): strategy
helper.append_op(
type="sequence_pool",
inputs={"X": [input]},
outputs={"Out": pool_out},
attrs={"strategy": pool_type})
outputs={"Out": [pool_out]},
attrs={"strategy": ENUM_POOL_TYPE[pool_type.upper()]})
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's this change for?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"max", "sqrt" are keywords of Python, I cannot bear our keyword overloading anymore.....

Copy link
Collaborator

@JiayiFeng JiayiFeng Oct 31, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am wondering why ENUM_POOL_TYPE becomes a map.

Copy link
Contributor Author

@dzhwinter dzhwinter Oct 31, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because attribute of "strategy" needs a fixed enum index, not a string.

    AddAttr<int>(
        "strategy",
        "(int, default AVERAGE) the pooling strategy of SequencePoolOp.")
        .SetDefault(AVERAGE)
        .InEnum({AVERAGE, SUM, SQRT, MAX, LAST, FIRST});

And this attribute will be definitely replaced with another PR, and the sequence_conv attributes, etc. Those tons of code will be rewritten after I finished the book chapter 5.


return pool_out

Expand Down
9 changes: 2 additions & 7 deletions python/paddle/v2/framework/nets.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,24 +101,19 @@ def __extend_list__(obj):
def sequence_conv_pool(input,
num_filters,
filter_size,
pool_size,
pool_stride,
act,
pool_type="max",
program=None,
init_program=None):
conv_out = layers.sequence_conv(
input=input,
num_filters=num_filters,
filter_size=filter_size,
act=act,
program=program,
init_program=init_program)

pool_out = layers.sequence_pool(
input=conv_out,
pool_size=pool_size,
pool_type='max',
pool_stride=pool_stride,
pool_type=pool_type,
program=program,
init_program=init_program)
return pool_out