Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "Revert "[AIR] Deprecations for 2.3"" #31867

Merged
merged 1 commit into from
Jan 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 1 addition & 37 deletions python/ray/air/checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
upload_to_uri,
)
from ray.air.constants import PREPROCESSOR_KEY, CHECKPOINT_ID_ATTR
from ray.util.annotations import Deprecated, DeveloperAPI, PublicAPI
from ray.util.annotations import DeveloperAPI, PublicAPI

if TYPE_CHECKING:
from ray.data.preprocessor import Preprocessor
Expand Down Expand Up @@ -415,42 +415,6 @@ def to_dict(self) -> dict:
checkpoint_data[PREPROCESSOR_KEY] = self._override_preprocessor
return checkpoint_data

@classmethod
@Deprecated(
message="To restore a checkpoint from a remote object ref, call "
"`ray.get(obj_ref)` instead."
)
def from_object_ref(cls, obj_ref: ray.ObjectRef) -> "Checkpoint":
"""Create checkpoint object from object reference.

Args:
obj_ref: ObjectRef pointing to checkpoint data.

Returns:
Checkpoint: checkpoint object.
"""
raise DeprecationWarning(
"`from_object_ref` is deprecated and will be removed in a future Ray "
"version. To restore a Checkpoint from a remote object ref, call "
"`ray.get(obj_ref)` instead.",
)

@Deprecated(
message="To store the checkpoint in the Ray object store, call `ray.put(ckpt)` "
"instead of `ckpt.to_object_ref()`."
)
def to_object_ref(self) -> ray.ObjectRef:
"""Return checkpoint data as object reference.

Returns:
ray.ObjectRef: ObjectRef pointing to checkpoint data.
"""
raise DeprecationWarning(
"`to_object_ref` is deprecated and will be removed in a future Ray "
"version. To store the checkpoint in the Ray object store, call "
"`ray.put(ckpt)` instead of `ckpt.to_object_ref()`.",
)

@classmethod
def from_directory(cls, path: Union[str, os.PathLike]) -> "Checkpoint":
"""Create checkpoint object from directory.
Expand Down
12 changes: 4 additions & 8 deletions python/ray/data/preprocessors/batch_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,23 +80,19 @@ def __init__(
Union[np.ndarray, Dict[str, np.ndarray]],
],
],
batch_format: Optional[BatchFormat] = None,
batch_format: Optional[BatchFormat],
batch_size: Optional[Union[int, Literal["default"]]] = "default",
# TODO: Make batch_format required from user
# TODO: Introduce a "zero_copy" format
# TODO: We should reach consistency of args between BatchMapper and map_batches.
):
if not batch_format:
raise DeprecationWarning(
"batch_format is a required argument for BatchMapper from Ray 2.1."
"You must specify either 'pandas' or 'numpy' batch format."
)

if batch_format not in [
BatchFormat.PANDAS,
BatchFormat.NUMPY,
]:
raise ValueError("BatchMapper only supports pandas and numpy batch format.")
raise ValueError(
"BatchMapper only supports 'pandas' or 'numpy' batch format."
)

self.batch_format = batch_format
self.batch_size = batch_size
Expand Down
16 changes: 0 additions & 16 deletions python/ray/train/__init__.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,13 @@
from ray._private.usage import usage_lib
from ray.train.backend import BackendConfig
from ray.train.constants import TRAIN_DATASET_KEY
from ray.train.train_loop_utils import (
get_dataset_shard,
load_checkpoint,
local_rank,
report,
save_checkpoint,
world_rank,
world_size,
)
from ray.train.trainer import TrainingIterator


usage_lib.record_library_usage("train")

__all__ = [
"BackendConfig",
"get_dataset_shard",
"load_checkpoint",
"local_rank",
"report",
"save_checkpoint",
"TrainingIterator",
"world_rank",
"world_size",
"TRAIN_DATASET_KEY",
]
10 changes: 0 additions & 10 deletions python/ray/train/tests/test_torch_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,16 +378,6 @@ def __getstate__(self):
assert results.checkpoint


def test_torch_prepare_model_deprecated():
model = torch.nn.Linear(1, 1)

with pytest.raises(DeprecationWarning):
train.torch.prepare_model(model, wrap_ddp=True)

with pytest.raises(DeprecationWarning):
train.torch.prepare_model(model, ddp_kwargs={"x": "y"})


if __name__ == "__main__":
import sys

Expand Down
15 changes: 0 additions & 15 deletions python/ray/train/torch/train_loop_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,6 @@ def prepare_model(
move_to_device: bool = True,
parallel_strategy: Optional[str] = "ddp",
parallel_strategy_kwargs: Optional[Dict[str, Any]] = None,
# Deprecated args.
wrap_ddp: bool = False,
ddp_kwargs: Optional[Dict[str, Any]] = None,
) -> torch.nn.Module:
"""Prepares the model for distributed execution.

Expand All @@ -76,18 +73,6 @@ def prepare_model(
or "fsdp", respectively.
"""

if wrap_ddp:
raise DeprecationWarning(
"The `wrap_ddp` argument is deprecated as of Ray 2.1. Use the "
"`parallel_strategy` argument instead."
)

if ddp_kwargs:
raise DeprecationWarning(
"The `ddp_kwargs` argument is deprecated as of Ray 2.1. Use the "
"`parallel_strategy_kwargs` arg instead."
)

if parallel_strategy == "fsdp" and FullyShardedDataParallel is None:
raise ImportError(
"FullyShardedDataParallel requires torch>=1.11.0. "
Expand Down
233 changes: 0 additions & 233 deletions python/ray/train/train_loop_utils.py

This file was deleted.