Skip to content

Commit

Permalink
[DoubleGrad PR #6] Fixed issues with TensorWrapper::recover() interfa…
Browse files Browse the repository at this point in the history
  • Loading branch information
jim19930609 authored Apr 2, 2022
1 parent f482613 commit a5e00bb
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1249,9 +1249,9 @@ def GenerateNodeDefinition(self, grad_node_creation_str):

is_optional = (name in self.optional_inputs)
if is_optional:
tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverOptionalTensorWrapper(&this->{tensor_wrapper_name}, nullptr);"
tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverOptionalTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());"
else:
tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, nullptr);"
tensor_wrapper_recover_str = f"{indent}auto {transformed_tensor_name} = egr::EagerUtils::RecoverTensorWrapper(&this->{tensor_wrapper_name}, this->shared_from_this());"
grad_api_args[grad_api_position] = transformed_tensor_name
get_grad_in_args_list.append(tensor_wrapper_recover_str)

Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/eager/grad_node_info.h
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class GradSlotMeta {
std::shared_ptr<phi::DenseTensorMeta> meta_ = nullptr;
};

class GradNodeBase {
class GradNodeBase : public std::enable_shared_from_this<GradNodeBase> {
public:
GradNodeBase() { VLOG(6) << "Construct GradNodeBase"; }
GradNodeBase(size_t bwd_in_slot_num, size_t bwd_out_slot_num);
Expand Down
11 changes: 6 additions & 5 deletions paddle/fluid/eager/tensor_wrapper.h
Original file line number Diff line number Diff line change
Expand Up @@ -95,18 +95,19 @@ class TensorWrapper {
}

check_inplace_version();

// if it's full_reserved just return the full copy of tensor
if (full_reserved_) {
return intermidiate_tensor_;
} else {
paddle::experimental::Tensor recovered_tensor = intermidiate_tensor_;
if (!full_reserved_) {
std::shared_ptr<GradNodeBase> new_grad_node = grad_node;
auto p_ab_autograd_meta =
std::make_shared<AutogradMeta>(Edge(new_grad_node, out_rank_info_));
intermidiate_tensor_.set_autograd_meta(
recovered_tensor.set_autograd_meta(
std::static_pointer_cast<paddle::experimental::AbstractAutogradMeta>(
p_ab_autograd_meta));
return intermidiate_tensor_;
}

return recovered_tensor;
}

void check_inplace_version() {
Expand Down

0 comments on commit a5e00bb

Please sign in to comment.