We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e4cdd82 commit 59f1996Copy full SHA for 59f1996
torchtune/models/clip/_position_embeddings.py
@@ -282,7 +282,7 @@ def _resize_local_position_embedding(
282
283
return local_pos_embed
284
285
- # TODO: Switchswitch to public method after 2.5 is stable
+ # TODO: Switch to public method after 2.5 is stable
286
@staticmethod
287
def _resize_global_position_embedding(
288
global_pos_embed: torch.Tensor,
@@ -468,7 +468,7 @@ def __init__(
468
# Register load hook to interpolate positional embeddings
469
self._register_load_state_dict_pre_hook(self._load_state_dict_hook)
470
471
472
@torch.no_grad()
473
def _load_state_dict_hook(
474
self,
0 commit comments