diff --git a/paddle/fluid/pybind/eager_method.cc b/paddle/fluid/pybind/eager_method.cc index 9fee5d11d40ed..36d2e3e3d6c5b 100644 --- a/paddle/fluid/pybind/eager_method.cc +++ b/paddle/fluid/pybind/eager_method.cc @@ -569,10 +569,18 @@ static PyObject* tensor_method__is_initialized(TensorObject* self, static PyObject* tensor_method__is_dense_tensor_hold_allocation( TensorObject* self, PyObject* args, PyObject* kwargs) { EAGER_TRY - auto dense_tensor = - std::dynamic_pointer_cast(self->tensor.impl()); - if (dense_tensor) { - return ToPyObject(dense_tensor->IsInitialized()); + if (!self->tensor.defined()) { + return ToPyObject(false); + } + if (self->tensor.is_dense_tensor()) { + return ToPyObject( + std::dynamic_pointer_cast(self->tensor.impl()) + ->IsInitialized()); + } else if (self->tensor.is_dist_tensor()) { + return ToPyObject( + static_cast(self->tensor.impl().get()) + ->value() + .IsInitialized()); } else { return ToPyObject(false); } diff --git a/python/paddle/tensor/to_string.py b/python/paddle/tensor/to_string.py index 97b8268fb6fe5..dfe2346a2fb1a 100644 --- a/python/paddle/tensor/to_string.py +++ b/python/paddle/tensor/to_string.py @@ -363,17 +363,30 @@ def dist_tensor_to_string(tensor, prefix='Tensor'): if tensor.dtype == core.VarDesc.VarType.BF16: dtype = 'bfloat16' - _template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, dist_attr={dist_attr},\n{indent}{data})" - return _template.format( - prefix=prefix, - shape=tensor.shape, - dtype=dtype, - place=tensor._place_str, - stop_gradient=tensor.stop_gradient, - dist_attr=tensor.dist_attr, - indent=' ' * indent, - data=None, - ) + if not tensor._is_dense_tensor_hold_allocation(): + _template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, dist_attr={dist_attr}, GlobalDenseTensor Not initialized)" + return _template.format( + prefix=prefix, + shape=tensor.shape, + dtype=dtype, + place=tensor._place_str, + stop_gradient=tensor.stop_gradient, + dist_attr=tensor.dist_attr, + ) + else: + indent = len(prefix) + 1 + data = _format_dense_tensor(tensor, indent) + _template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, dist_attr={dist_attr}, GlobalDenseTensor=\n{indent}{data})" + return _template.format( + prefix=prefix, + shape=tensor.shape, + dtype=dtype, + place=tensor._place_str, + stop_gradient=tensor.stop_gradient, + dist_attr=tensor.dist_attr, + indent=' ' * indent, + data=data, + ) def tensor_to_string(tensor, prefix='Tensor'):