Skip to content

Commit

Permalink
fix recompute memory leak (PaddlePaddle#63420) (PaddlePaddle#63441)
Browse files Browse the repository at this point in the history
* fix recompute memory leak

* polish code
  • Loading branch information
deepllz authored Apr 15, 2024
1 parent e416e07 commit 5eda9ba
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 33 deletions.
27 changes: 0 additions & 27 deletions paddle/fluid/eager/tensor_wrapper.h
Original file line number Diff line number Diff line change
Expand Up @@ -106,33 +106,6 @@ class TensorWrapper {
}
}

#ifndef PADDLE_NO_PYTHON
TensorWrapper(const TensorWrapper& other) {
no_need_buffer_ = other.no_need_buffer_;
intermidiate_tensor_ = other.intermidiate_tensor_;
weak_grad_node_ = other.weak_grad_node_;
inplace_version_snapshot_ = other.inplace_version_snapshot_;
packed_value_ = other.packed_value_;
unpack_hook_ = other.unpack_hook_;
if (packed_value_) {
packed_value_->inc_ref();
}
}

TensorWrapper& operator=(const TensorWrapper& other) {
no_need_buffer_ = other.no_need_buffer_;
intermidiate_tensor_ = other.intermidiate_tensor_;
weak_grad_node_ = other.weak_grad_node_;
inplace_version_snapshot_ = other.inplace_version_snapshot_;
packed_value_ = other.packed_value_;
unpack_hook_ = other.unpack_hook_;
if (packed_value_) {
packed_value_->inc_ref();
}
return *this;
}
#endif

paddle::Tensor recover() {
VLOG(6) << "Recover tensor: " << intermidiate_tensor_.name()
<< " for wrapper";
Expand Down
29 changes: 23 additions & 6 deletions paddle/fluid/pybind/eager_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1669,6 +1669,7 @@ PyObjectHolder::PyObjectHolder(PyObject* ptr) { ptr_ = ptr; }

PyObjectHolder::~PyObjectHolder() {
::pybind11::gil_scoped_acquire gil;
// NOTE(deepllz): ptr_ is owned by this object, so release it in destructor.
Py_XDECREF(ptr_);
}

Expand Down Expand Up @@ -1703,7 +1704,10 @@ std::shared_ptr<egr::PyObjectHolderBase> PackHook::operator()(
bool grad_tmp = egr::Controller::Instance().HasGrad();
egr::Controller::Instance().SetHasGrad(false);
::pybind11::gil_scoped_acquire gil;
auto args = PyTuple_New(1);
PyObject* args = PyTuple_New(1);
PADDLE_ENFORCE_NOT_NULL(args,
paddle::platform::errors::External(
pybind11::detail::error_string().c_str()));
PyTuple_SET_ITEM(args, 0, paddle::pybind::ToPyObject(tensor));
PyObject* ret = PyObject_Call(hook_, args, nullptr);
PADDLE_ENFORCE_NOT_NULL(ret,
Expand All @@ -1718,7 +1722,10 @@ void* PackHook::operator()(void* py_tensor) {
bool grad_tmp = egr::Controller::Instance().HasGrad();
egr::Controller::Instance().SetHasGrad(false);
::pybind11::gil_scoped_acquire gil;
auto args = PyTuple_New(1);
PyObject* args = PyTuple_New(1);
PADDLE_ENFORCE_NOT_NULL(args,
paddle::platform::errors::External(
pybind11::detail::error_string().c_str()));
Py_INCREF(reinterpret_cast<PyObject*>(py_tensor));
PyTuple_SET_ITEM(args, 0, reinterpret_cast<PyObject*>(py_tensor));
PyObject* ret = PyObject_Call(hook_, args, nullptr);
Expand All @@ -1742,13 +1749,20 @@ paddle::Tensor UnPackHook::operator()(
bool grad_tmp = egr::Controller::Instance().HasGrad();
egr::Controller::Instance().SetHasGrad(false);
::pybind11::gil_scoped_acquire gil;
auto args = PyTuple_New(1);
Py_INCREF(reinterpret_cast<PyObject*>(packed_value->get()));
PyTuple_SET_ITEM(args, 0, reinterpret_cast<PyObject*>(packed_value->get()));
PyObject* args = PyTuple_New(1);
PADDLE_ENFORCE_NOT_NULL(args,
paddle::platform::errors::External(
pybind11::detail::error_string().c_str()));
PyObject* py_packed_value = reinterpret_cast<PyObject*>(packed_value->get());
Py_INCREF(py_packed_value);
PyTuple_SET_ITEM(args, 0, py_packed_value);
PyObject* ret = PyObject_Call(hook_, args, nullptr);
PADDLE_ENFORCE_NOT_NULL(ret,
paddle::platform::errors::External(
pybind11::detail::error_string().c_str()));
// NOTE(deepllz): tupledealloc will cause the reference count of the objects
// in it to be decremented by one, so no need to call
// Py_XDECREF(py_packed_value)
Py_XDECREF(args);
egr::Controller::Instance().SetHasGrad(grad_tmp);

Expand All @@ -1767,7 +1781,10 @@ void* UnPackHook::operator()(void* packed_value, void* other) {
bool grad_tmp = egr::Controller::Instance().HasGrad();
egr::Controller::Instance().SetHasGrad(false);
::pybind11::gil_scoped_acquire gil;
auto args = PyTuple_New(1);
PyObject* args = PyTuple_New(1);
PADDLE_ENFORCE_NOT_NULL(args,
paddle::platform::errors::External(
pybind11::detail::error_string().c_str()));
Py_INCREF(reinterpret_cast<PyObject*>(packed_value));
PyTuple_SET_ITEM(args, 0, reinterpret_cast<PyObject*>(packed_value));
PyObject* ret = PyObject_Call(hook_, args, nullptr);
Expand Down

0 comments on commit 5eda9ba

Please sign in to comment.