Skip to content

Commit

Permalink
Apply isort and black reformatting
Browse files Browse the repository at this point in the history
Signed-off-by: akoumpa <akoumpa@users.noreply.github.com>
  • Loading branch information
akoumpa committed Jun 26, 2024
1 parent 72e7775 commit f658854
Showing 1 changed file with 15 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ def encode(self, x):


class BERTTokenizer(AbstractEncoder):
""" Uses a pretrained BERT tokenizer by huggingface. Vocab size: 30522 (?)"""
"""Uses a pretrained BERT tokenizer by huggingface. Vocab size: 30522 (?)"""

def __init__(self, device="cuda", vq_interface=True, max_length=77):
super().__init__()
Expand Down Expand Up @@ -530,7 +530,10 @@ def __init__(
print(f"Downloading clip with", arch, version, cache_dir)
self.device = device
model, _, _ = open_clip.create_model_and_transforms(
arch, device=torch.device("cpu"), pretrained=version, cache_dir=cache_dir,
arch,
device=torch.device("cpu"),
pretrained=version,
cache_dir=cache_dir,
)
del model.visual
self.model = model
Expand Down Expand Up @@ -669,7 +672,11 @@ def build_tokenizer(self, cfg):
legacy=legacy,
)

_, self.text_transform = get_preprocess_fns(cfg, self.tokenizer, is_train=False,)
_, self.text_transform = get_preprocess_fns(
cfg,
self.tokenizer,
is_train=False,
)
self.max_length = cfg.text.get("max_position_embeddings")

def load_model(self, cfg, state_dict):
Expand Down Expand Up @@ -764,7 +771,11 @@ def __init__(
super().__init__()
assert layer in self.LAYERS
self.projection_dim = 1280
model, _, _ = open_clip.create_model_and_transforms(arch, device=torch.device("cpu"), pretrained=version,)
model, _, _ = open_clip.create_model_and_transforms(
arch,
device=torch.device("cpu"),
pretrained=version,
)
del model.visual
self.model = model

Expand Down

0 comments on commit f658854

Please sign in to comment.