Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove main_program and startup_program as the paramter of layer function #6655

Merged
merged 7 commits into from
Dec 19, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 30 additions & 45 deletions python/paddle/v2/fluid/evaluator.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np

import layers
from framework import Program, unique_name, Variable
from framework import Program, unique_name, Variable, program_guard
from layer_helper import LayerHelper

__all__ = ['Accuracy', 'ChunkEvaluator']
Expand Down Expand Up @@ -49,15 +49,12 @@ def reset(self, executor, reset_program=None):
if reset_program is None:
reset_program = Program()

for var in self.states:
assert isinstance(var, Variable)
g_var = _clone_var_(reset_program.current_block(), var)
layers.fill_constant(
shape=g_var.shape,
value=0.0,
dtype=g_var.dtype,
out=g_var,
main_program=reset_program)
with program_guard(main_program=reset_program):
for var in self.states:
assert isinstance(var, Variable)
g_var = _clone_var_(reset_program.current_block(), var)
layers.fill_constant(
shape=g_var.shape, value=0.0, dtype=g_var.dtype, out=g_var)

executor.run(reset_program)

Expand Down Expand Up @@ -104,33 +101,27 @@ def __init__(self, input, label, k=1, **kwargs):
self.total = self.create_state(dtype='int64', shape=[1], suffix='total')
self.correct = self.create_state(
dtype='int64', shape=[1], suffix='correct')
kwargs = {'main_program': main_program}
total = self.helper.create_tmp_variable(dtype='int')
correct = self.helper.create_tmp_variable(dtype='int')
acc = layers.accuracy(
input=input,
label=label,
k=k,
total=total,
correct=correct,
**kwargs)
total = layers.cast(x=total, dtype='int64', **kwargs)
correct = layers.cast(x=correct, dtype='int64', **kwargs)
layers.sums(input=[self.total, total], out=self.total, **kwargs)
layers.sums(input=[self.correct, correct], out=self.correct, **kwargs)
input=input, label=label, k=k, total=total, correct=correct)
total = layers.cast(x=total, dtype='int64')
correct = layers.cast(x=correct, dtype='int64')
layers.sums(input=[self.total, total], out=self.total)
layers.sums(input=[self.correct, correct], out=self.correct)

self.metrics.append(acc)

def eval(self, executor, eval_program=None):
if eval_program is None:
eval_program = Program()
block = eval_program.current_block()
kwargs = {'main_program': eval_program}
total = _clone_var_(block, self.total)
correct = _clone_var_(block, self.correct)
total = layers.cast(total, dtype='float32', **kwargs)
correct = layers.cast(correct, dtype='float32', **kwargs)
out = layers.elementwise_div(x=correct, y=total, **kwargs)
with program_guard(main_program=eval_program):
total = _clone_var_(block, self.total)
correct = _clone_var_(block, self.correct)
total = layers.cast(total, dtype='float32')
correct = layers.cast(correct, dtype='float32')
out = layers.elementwise_div(x=correct, y=total)
return np.array(executor.run(eval_program, fetch_list=[out])[0])


Expand All @@ -141,14 +132,14 @@ class ChunkEvaluator(Evaluator):
numbers.
"""

def __init__(self,
input,
label,
chunk_scheme,
num_chunk_types,
excluded_chunk_types=None,
**kwargs):
super(ChunkEvaluator, self).__init__("chunk_eval", **kwargs)
def __init__(
self,
input,
label,
chunk_scheme,
num_chunk_types,
excluded_chunk_types=None, ):
super(ChunkEvaluator, self).__init__("chunk_eval")
main_program = self.helper.main_program
if main_program.current_block().idx != 0:
raise ValueError("You can only invoke Evaluator in root block")
Expand All @@ -159,34 +150,28 @@ def __init__(self,
dtype='int64', shape=[1], suffix='num_label_chunks')
self.num_correct_chunks = self.create_state(
dtype='int64', shape=[1], suffix='num_correct_chunks')
kwargs = {'main_program': main_program}
precision, recall, f1_score, num_infer_chunks, num_label_chunks, num_correct_chunks = layers.chunk_eval(
input=input,
label=label,
chunk_scheme=chunk_scheme,
num_chunk_types=num_chunk_types,
excluded_chunk_types=excluded_chunk_types,
**kwargs)
excluded_chunk_types=excluded_chunk_types, )
Copy link
Collaborator

@JiayiFeng JiayiFeng Dec 18, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The line 153:

kwargs = {'main_program': main_program}

should also be removed. I can not comment on unchanged line so I leave it here.

layers.sums(
input=[self.num_infer_chunks, num_infer_chunks],
out=self.num_infer_chunks,
**kwargs)
out=self.num_infer_chunks)
layers.sums(
input=[self.num_label_chunks, num_label_chunks],
out=self.num_label_chunks,
**kwargs)
out=self.num_label_chunks)
layers.sums(
input=[self.num_correct_chunks, num_correct_chunks],
out=self.num_correct_chunks,
**kwargs)
out=self.num_correct_chunks)

self.metrics.extend([precision, recall, f1_score])

def eval(self, executor, eval_program=None):
if eval_program is None:
eval_program = Program()
block = eval_program.current_block()
kwargs = {'main_program': eval_program}
num_infer_chunks, num_label_chunks, num_correct_chunks = executor.run(
eval_program,
fetch_list=[_clone_var_(block, state) for state in self.states])
Expand Down
19 changes: 2 additions & 17 deletions python/paddle/v2/fluid/layer_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,11 @@ def name(self):

@property
def main_program(self):
prog = self.kwargs.get('main_program', None)
if prog is None:
return default_main_program()
else:
return prog
return default_main_program()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As we can change the default program, I think it's improper to keep calling it default.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe just main_program() is cool. It could be done in the following PR


@property
def startup_program(self):
prog = self.kwargs.get('startup_program', None)
if prog is None:
return default_startup_program()
else:
return prog
return default_startup_program()

def append_op(self, *args, **kwargs):
return self.main_program.current_block().append_op(*args, **kwargs)
Expand Down Expand Up @@ -151,13 +143,6 @@ def set_variable_initializer(self, var, initializer):
persistable=True,
initializer=initializer)

@property
def to_kwargs(self):
return {
'main_program': self.main_program,
'startup_program': self.startup_program
}

def append_bias_op(self, input_var, dim_start=1, dim_end=None):
"""
Append bias operator and return its output. If the user does not set
Expand Down
Loading