Skip to content

Commit ff2464e

Browse files
committed
Throw when pretrained weights not available and pretrained=True (principle of least surprise).
1 parent 8ce9a2c commit ff2464e

File tree

1 file changed

+3
-4
lines changed

1 file changed

+3
-4
lines changed

timm/models/_builder.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -152,8 +152,7 @@ def load_pretrained(
152152
"""
153153
pretrained_cfg = pretrained_cfg or getattr(model, 'pretrained_cfg', None)
154154
if not pretrained_cfg:
155-
_logger.warning("Invalid pretrained config, cannot load weights.")
156-
return
155+
raise RuntimeError("Invalid pretrained config, cannot load weights. Use `pretrained=False` for random init.")
157156

158157
load_from, pretrained_loc = _resolve_pretrained_source(pretrained_cfg)
159158
if load_from == 'state_dict':
@@ -186,8 +185,8 @@ def load_pretrained(
186185
else:
187186
state_dict = load_state_dict_from_hf(pretrained_loc)
188187
else:
189-
_logger.warning("No pretrained weights exist or were found for this model. Using random initialization.")
190-
return
188+
model_name = pretrained_cfg.get('architecture', 'this model')
189+
raise RuntimeError(f"No pretrained weights exist for {model_name}. Use `pretrained=False` for random init.")
191190

192191
if filter_fn is not None:
193192
try:

0 commit comments

Comments
 (0)