We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e69fd47 commit e4cdd82Copy full SHA for e4cdd82
torchtune/models/clip/_position_embeddings.py
@@ -282,6 +282,7 @@ def _resize_local_position_embedding(
282
283
return local_pos_embed
284
285
+ # TODO: Switchswitch to public method after 2.5 is stable
286
@staticmethod
287
def _resize_global_position_embedding(
288
global_pos_embed: torch.Tensor,
@@ -467,6 +468,7 @@ def __init__(
467
468
# Register load hook to interpolate positional embeddings
469
self._register_load_state_dict_pre_hook(self._load_state_dict_hook)
470
471
472
@torch.no_grad()
473
def _load_state_dict_hook(
474
self,
0 commit comments