Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

from_pretrained has correct return type (i.e. HookedSAETransformer.from_pretrained returns HookedSAETransformer) #743

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 16 additions & 3 deletions transformer_lens/HookedTransformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,18 @@
"""
import logging
import os
from typing import Dict, List, NamedTuple, Optional, Tuple, Union, cast, overload
from typing import (
Dict,
List,
NamedTuple,
Optional,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)

import einops
import numpy as np
Expand Down Expand Up @@ -67,6 +78,8 @@
"bf16": torch.bfloat16,
}

T = TypeVar("T", bound="HookedTransformer")


class Output(NamedTuple):
"""Output Named Tuple.
Expand Down Expand Up @@ -1053,7 +1066,7 @@ def move_model_modules_to_device(self):

@classmethod
def from_pretrained(
cls,
cls: Type[T],
model_name: str,
fold_ln: bool = True,
center_writing_weights: bool = True,
Expand All @@ -1072,7 +1085,7 @@ def from_pretrained(
dtype="float32",
first_n_layers: Optional[int] = None,
**from_pretrained_kwargs,
) -> "HookedTransformer":
) -> T:
"""Load in a Pretrained Model.

Load in pretrained model weights to the HookedTransformer format and optionally to do some
Expand Down
Loading