Skip to content

Commit

Permalink
remove flag
Browse files Browse the repository at this point in the history
  • Loading branch information
kangguangli committed Apr 7, 2023
1 parent 5ba29f7 commit 67e0cfc
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 187 deletions.
3 changes: 0 additions & 3 deletions paddle/fluid/framework/new_executor/interpretercore.cc
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,6 @@ PADDLE_DEFINE_EXPORTED_bool(new_executor_use_local_scope,
true,
"Use local_scope in new executor(especially used "
"in UT), can turn off for better performance");
PADDLE_DEFINE_EXPORTED_bool(control_flow_use_new_executor,
true,
"Use new executor in control flow op");

DECLARE_bool(check_nan_inf);
DECLARE_bool(benchmark);
Expand Down
1 change: 0 additions & 1 deletion paddle/fluid/framework/new_executor/interpretercore.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
#include "paddle/fluid/platform/device_event.h"

DECLARE_bool(new_executor_use_local_scope);
DECLARE_bool(control_flow_use_new_executor);

namespace paddle {
namespace framework {
Expand Down
123 changes: 43 additions & 80 deletions paddle/fluid/operators/controlflow/conditional_block_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -95,48 +95,29 @@ class ConditionalBlockOp : public ConditionalOp {
auto &skip_vars =
Attr<std::vector<std::string>>(ConditionalOp::kSkipEagerDeletionVars);

if (FLAGS_control_flow_use_new_executor) {
LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][ConditionalBlock] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
VLOG(10) << "[interpreterCore cache]" << core_.get();
VLOG_IF(10, core_)
<< platform::is_same_place(core_->GetPlace(), dev_place);

framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new InterpreterCore(
dev_place, *block, &cur_scope, execution_config));
VLOG(10) << "[interpreterCore cache]"
<< "new created:" << core_;
} else {
BuildScopeForControlFlowOp(*core_, *block, &cur_scope);
core_->reset_scope(&cur_scope);
}

core_->Run({}, false);

LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][ConditionalBlock] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
VLOG(10) << "[interpreterCore cache]" << core_.get();
VLOG_IF(10, core_) << platform::is_same_place(core_->GetPlace(),
dev_place);

framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new InterpreterCore(
dev_place, *block, &cur_scope, execution_config));
VLOG(10) << "[interpreterCore cache]"
<< "new created:" << core_;
} else {
if (!exec_ || !platform::is_same_place(exec_->GetPlace(), dev_place)) {
auto &pdesc = *block->Program();
exec_.reset(new Executor(dev_place));
if (FLAGS_use_mkldnn) exec_->EnableMKLDNN(pdesc);
ctx_ = exec_->Prepare(pdesc, block->ID(), skip_vars, false);
#ifdef PADDLE_WITH_MKLDNN
platform::AttachPointerHashToMKLDNNKey(exec_.get(), dev_place);
platform::RegisterModelLayout(ctx_->ops_, dev_place);
#endif
}
exec_->RunPreparedContext(ctx_.get(),
&cur_scope,
/* create_local_scope */ false,
/* create_vars */ true,
/* keep_kids */ true);
BuildScopeForControlFlowOp(*core_, *block, &cur_scope);
core_->reset_scope(&cur_scope);
}

core_->Run({}, false);
}
}

Expand Down Expand Up @@ -208,47 +189,28 @@ class ConditionalBlockGradOp : public ConditionalOp {
VLOG(3) << "Conditional Grad block.idx = " << block->ID()
<< ", scope = " << &cur_scope;

if (FLAGS_control_flow_use_new_executor) {
LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][ConditionalGradBlock] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
VLOG(10) << "[interpreterCore cache]" << core_.get();
VLOG_IF(10, core_)
<< platform::is_same_place(core_->GetPlace(), dev_place);

framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(inside_grads.begin(), inside_grads.end());

core_.reset(new InterpreterCore(
dev_place, *block, &cur_scope, execution_config));
VLOG(10) << "[interpreterCore cache]"
<< "new created:" << core_;
} else {
BuildScopeForControlFlowOp(*core_, *block, &cur_scope);
core_->reset_scope(&cur_scope);
}
core_->Run({}, false);

LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][ConditionalGradBlock] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
VLOG(10) << "[interpreterCore cache]" << core_.get();
VLOG_IF(10, core_) << platform::is_same_place(core_->GetPlace(),
dev_place);

framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(inside_grads.begin(), inside_grads.end());

core_.reset(new InterpreterCore(
dev_place, *block, &cur_scope, execution_config));
VLOG(10) << "[interpreterCore cache]"
<< "new created:" << core_;
} else {
if (!exec_ || !platform::is_same_place(exec_->GetPlace(), dev_place)) {
auto &pdesc = *block->Program();
exec_.reset(new Executor(dev_place));
if (FLAGS_use_mkldnn) exec_->EnableMKLDNN(pdesc);
ctx_ = exec_->Prepare(pdesc, block->ID(), inside_grads, false);
#ifdef PADDLE_WITH_MKLDNN
platform::AttachPointerHashToMKLDNNKey(exec_.get(), dev_place);
platform::RegisterModelLayout(ctx_->ops_, dev_place);
#endif
}
exec_->RunPreparedContext(ctx_.get(),
&cur_scope,
/* create_local_scope */ false,
/* create_vars */ true,
/* keep_kids */ true);
BuildScopeForControlFlowOp(*core_, *block, &cur_scope);
core_->reset_scope(&cur_scope);
}
core_->Run({}, false);

AssignLocalGradientToParentScope(
dev_place, cur_scope, scope, inside_grads, outside_grads, inputs);
Expand Down Expand Up @@ -398,7 +360,8 @@ struct FilterNoGradInput<framework::OpDesc> {
std::vector<std::string> *vec) {
auto f = [desc](const std::string &name) -> std::string {
if (name == framework::kEmptyVarName) {
// don't drop empty var name, you can use Input(name, true) to drop it.
// don't drop empty var name, you can use Input(name, true) to drop
// it.
return framework::kEmptyVarName;
}
auto var_desc =
Expand Down
119 changes: 42 additions & 77 deletions paddle/fluid/operators/controlflow/while_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -199,26 +199,18 @@ class WhileOp : public framework::OperatorBase {
}
}

if (FLAGS_control_flow_use_new_executor) {
LOG_FIRST_N(INFO, 1) << "[ControlFlow][WhileOp] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
framework::Scope placeholder; // Don't care if it's valid, just for
// initialize InterpreterCore
framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new framework::InterpreterCore(
dev_place, *block, &placeholder, execution_config));
}
} else {
if (!executor_ ||
!platform::is_same_place(executor_->GetPlace(), dev_place)) {
executor_.reset(new framework::Executor(dev_place));
ctx_ = executor_->Prepare(*program, block->ID(), skip_vars);
}
LOG_FIRST_N(INFO, 1) << "[ControlFlow][WhileOp] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
framework::Scope placeholder; // Don't care if it's valid, just for
// initialize InterpreterCore
framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new framework::InterpreterCore(
dev_place, *block, &placeholder, execution_config));
}

if (!is_test) {
Expand All @@ -244,22 +236,17 @@ class WhileOp : public framework::OperatorBase {
}
}
}
if (FLAGS_control_flow_use_new_executor) {
BuildScopeForControlFlowOp(*core_, *block, &current_scope);
core_->reset_scope(&current_scope);
core_->Run({}, false);

// restore inputs place
for (const auto &n : input_var_original_places) {
const std::string &in_name = n.first;
const phi::Place &original_place = n.second;
// input vars exist in `scope` not `current_scope`
TransferVariablePlace(&scope, in_name, original_place, dev_ctx);
}

} else {
executor_->RunPreparedContext(
ctx_.get(), &current_scope, false, true, true);
BuildScopeForControlFlowOp(*core_, *block, &current_scope);
core_->reset_scope(&current_scope);
core_->Run({}, false);

// restore inputs place
for (const auto &n : input_var_original_places) {
const std::string &in_name = n.first;
const phi::Place &original_place = n.second;
// input vars exist in `scope` not `current_scope`
TransferVariablePlace(&scope, in_name, original_place, dev_ctx);
}

for (auto &var_rename : rename_vars) {
Expand All @@ -273,12 +260,8 @@ class WhileOp : public framework::OperatorBase {
} else {
auto &current_scope = scope.NewScope();

if (FLAGS_control_flow_use_new_executor) {
BuildScopeForControlFlowOp(*core_, *block, &current_scope);
core_->reset_scope(&current_scope);
} else {
executor_->CreateVariables(*program, &current_scope, block->ID());
}
BuildScopeForControlFlowOp(*core_, *block, &current_scope);
core_->reset_scope(&current_scope);

while (cond_data) {
for (auto &name : current_scope.LocalVarNames()) {
Expand All @@ -295,12 +278,7 @@ class WhileOp : public framework::OperatorBase {
}
}

if (FLAGS_control_flow_use_new_executor) {
core_->Run({}, false);
} else {
executor_->RunPreparedContext(
ctx_.get(), &current_scope, false, false, false);
}
core_->Run({}, false);

cond_data = GetCondData(
scope.FindVar(Input(kCondition))->Get<phi::DenseTensor>());
Expand Down Expand Up @@ -391,28 +369,20 @@ class WhileGradOp : public framework::OperatorBase {
outside_og_names.size(),
inside_og_names.size()));

if (FLAGS_control_flow_use_new_executor) {
LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][WhileGradOp] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
std::set<std::string> skip_gc_vars(skip_vars.begin(), skip_vars.end());
framework::Scope placeholder; // Don't care if it's valid, just for
// initialize InterpreterCore
framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new framework::InterpreterCore(
dev_place, *block, &placeholder, execution_config));
}
} else {
if (!executor_ ||
!platform::is_same_place(executor_->GetPlace(), dev_place)) {
executor_.reset(new framework::Executor(dev_place));
ctx_ = executor_->Prepare(*program, block->ID(), skip_vars);
}
LOG_FIRST_N(INFO, 1)
<< "[ControlFlow][WhileGradOp] New Executor is Running.";
if (!core_ || !platform::is_same_place(core_->GetPlace(), dev_place)) {
std::set<std::string> skip_gc_vars(skip_vars.begin(), skip_vars.end());
framework::Scope placeholder; // Don't care if it's valid, just for
// initialize InterpreterCore
framework::interpreter::ExecutionConfig execution_config;
execution_config.create_local_scope = false;
execution_config.used_for_control_flow_op = true;
execution_config.skip_gc_vars =
std::set<std::string>(skip_vars.begin(), skip_vars.end());

core_.reset(new framework::InterpreterCore(
dev_place, *block, &placeholder, execution_config));
}

for (auto cur_scope_iter = step_scopes->rbegin();
Expand Down Expand Up @@ -504,14 +474,9 @@ class WhileGradOp : public framework::OperatorBase {
}
}

if (FLAGS_control_flow_use_new_executor) {
BuildScopeForControlFlowOp(*core_, *block, *cur_scope_iter);
core_->reset_scope(*cur_scope_iter);
core_->Run({}, false);
} else {
executor_->RunPreparedContext(
ctx_.get(), *cur_scope_iter, false, true, true);
}
BuildScopeForControlFlowOp(*core_, *block, *cur_scope_iter);
core_->reset_scope(*cur_scope_iter);
core_->Run({}, false);

// The Outputs(kXGRAD) contains the names of the gradient of parameters
// and inputs.
Expand Down
20 changes: 0 additions & 20 deletions python/paddle/fluid/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,26 +493,6 @@ def _to_str(var):
return _to_str(var)


def _is_dy2st_enable_standalone_executor():
return framework._dy2st_enable_standalone_executor_ in [
1,
'1',
True,
'True',
'true',
]


def _is_cuda_graph_enable_standalone_executor():
return framework._cuda_graph_enable_standalone_executor_ in [
1,
'1',
True,
'True',
'true',
]


def _prepare_fleet_executor():
from ..distributed.fleet.proto import fleet_executor_desc_pb2

Expand Down
6 changes: 0 additions & 6 deletions python/paddle/fluid/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,6 @@ def __setattr__(self, name, val):
_current_cuda_graph_mode = None
_global_flags_ = core.globals()

_dy2st_enable_standalone_executor_ = os.environ.get(
'FLAGS_DY2ST_USE_STANDALONE_EXECUTOR', 1
)
_cuda_graph_enable_standalone_executor_ = os.environ.get(
'FLAGS_CUDA_GRAPH_USE_STANDALONE_EXECUTOR', 0
)

# special_op_attrs, extra_op_attrs are prepared for printing warnings
# when turning on FLAGS_print_extra_attrs
Expand Down

0 comments on commit 67e0cfc

Please sign in to comment.