Skip to content

Commit

Permalink
[Bugfix] Offline mode fix (vllm-project#8376)
Browse files Browse the repository at this point in the history
Signed-off-by: Joe Runde <Joseph.Runde@ibm.com>
Signed-off-by: Amit Garg <mitgarg17495@gmail.com>
  • Loading branch information
joerunde authored and garg-amit committed Oct 28, 2024
1 parent 7932801 commit 60379ec
Showing 1 changed file with 16 additions and 1 deletion.
17 changes: 16 additions & 1 deletion vllm/transformers_utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,22 @@ def file_or_path_exists(model: Union[str, Path], config_name, revision,
if Path(model).exists():
return (Path(model) / config_name).is_file()

return file_exists(model, config_name, revision=revision, token=token)
# Offline mode support: Check if config file is cached already
cached_filepath = try_to_load_from_cache(repo_id=model,
filename=config_name,
revision=revision)
if isinstance(cached_filepath, str):
# The config file exists in cache- we can continue trying to load
return True

# NB: file_exists will only check for the existence of the config file on
# hf_hub. This will fail in offline mode.
try:
return file_exists(model, config_name, revision=revision, token=token)
except huggingface_hub.errors.OfflineModeIsEnabled:
# Don't raise in offline mode, all we know is that we don't have this
# file cached.
return False


def get_config(
Expand Down

0 comments on commit 60379ec

Please sign in to comment.