fix: prevent offline freeze by fixing stats retry and forwarding local_files_only (#5016)

Fixes #2393.

- `_utils.py`: `has_internet()` now respects `HF_HUB_OFFLINE` with truthy variant parsing in addition to `TRANSFORMERS_OFFLINE`.
- `_utils.py`: replace uncontrolled `except Exception: stats_check()` retry (which had no time limit and could freeze on Kaggle offline mode) with a logged skip.
- `loader.py`: forward `local_files_only` from kwargs into all `AutoConfig.from_pretrained` and `PeftConfig.from_pretrained` probes in `FastLanguageModel.from_pretrained` and `FastModel.from_pretrained`, including the PEFT base-model reload paths.
This commit is contained in:
David Solanas Sanz 2026-04-15 13:51:31 +02:00 committed by GitHub
parent f9ef639dde
commit 1fcb2502cf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 17 additions and 2 deletions

View file

@ -1350,6 +1350,11 @@ import socket
def has_internet(host = "8.8.8.8", port = 53, timeout = 3):
if os.environ.get("TRANSFORMERS_OFFLINE", "0") == "1":
return False
OFFLINE_TRUE = {"1", "true", "yes", "on"}
if os.environ.get("HF_HUB_OFFLINE", "").strip().lower() in OFFLINE_TRUE:
return False
try:
socket.setdefaulttimeout(timeout)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@ -1468,8 +1473,8 @@ def _get_statistics(statistics = None, force_download = True):
"```"
)
except Exception:
# Try no time limit check
stats_check()
logger.debug("Unsloth: stats_check failed with an exception.")
# Don't retry without a time limit — would freeze offline
def get_statistics(local_files_only = False):

View file

@ -439,12 +439,15 @@ class FastLanguageModel(FastLlamaModel):
peft_error = None
model_config = None
peft_config = None
local_files_only = kwargs.get("local_files_only", False)
try:
model_config = AutoConfig.from_pretrained(
model_name,
token = token,
revision = revision,
trust_remote_code = trust_remote_code,
local_files_only = local_files_only,
)
is_model = True
except ImportError:
@ -470,6 +473,7 @@ class FastLanguageModel(FastLlamaModel):
token = token,
revision = revision,
trust_remote_code = trust_remote_code,
local_files_only = local_files_only,
)
is_peft = True
except ImportError:
@ -566,6 +570,7 @@ class FastLanguageModel(FastLlamaModel):
model_name,
token = token,
trust_remote_code = trust_remote_code,
local_files_only = local_files_only,
)
if not was_disabled:
@ -1049,12 +1054,15 @@ class FastModel(FastBaseModel):
peft_error = None
model_config = None
peft_config = None
local_files_only = kwargs.get("local_files_only", False)
try:
model_config = AutoConfig.from_pretrained(
model_name,
token = token,
revision = revision,
trust_remote_code = trust_remote_code,
local_files_only = local_files_only,
)
is_model = True
except ImportError:
@ -1080,6 +1088,7 @@ class FastModel(FastBaseModel):
token = token,
revision = revision,
trust_remote_code = trust_remote_code,
local_files_only = local_files_only,
)
is_peft = True
except ImportError:
@ -1330,6 +1339,7 @@ class FastModel(FastBaseModel):
model_name,
token = token,
trust_remote_code = trust_remote_code,
local_files_only = local_files_only,
)
if not was_disabled: