Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tokenize.TokenError: ('unterminated string literal (detected at line 1122)', (1122, 1)) #170

Open
returnaaa opened this issue Nov 4, 2024 · 0 comments

Comments

@returnaaa
Copy link

Traceback (most recent call last):
File "/root/autodl-tmp/EVA/EVA-CLIP/rei/test.py", line 2, in
from eva_clip import create_model_and_transforms, get_tokenizer
File "/root/autodl-tmp/EVA/EVA-CLIP/rei/eva_clip/init.py", line 2, in
from .factory import create_model, create_model_and_transforms, create_model_from_pretrained, get_tokenizer
File "/root/autodl-tmp/EVA/EVA-CLIP/rei/eva_clip/factory.py", line 12, in
from .model import CLIP, CustomCLIP, convert_weights_to_lp, convert_to_custom_text_state_dict,
File "/root/autodl-tmp/EVA/EVA-CLIP/rei/eva_clip/model.py", line 21, in
from .eva_vit_model import EVAVisionTransformer
File "/root/autodl-tmp/EVA/EVA-CLIP/rei/eva_clip/eva_vit_model.py", line 15, in
from .transformer import PatchDropout
File "/root/autodl-tmp/EVA/EVA-CLIP/rei/eva_clip/transformer.py", line 31, in
import xformers.ops as xops
File "/root/miniconda3/lib/python3.12/site-packages/xformers/ops/init.py", line 8, in
from .fmha import (
File "/root/miniconda3/lib/python3.12/site-packages/xformers/ops/fmha/init.py", line 10, in
from . import (
File "/root/miniconda3/lib/python3.12/site-packages/xformers/ops/fmha/triton_splitk.py", line 548, in
_get_splitk_kernel(num_groups)
File "/root/miniconda3/lib/python3.12/site-packages/xformers/ops/fmha/triton_splitk.py", line 503, in _get_splitk_kernel
_fwd_kernel_splitK_unrolled = unroll_varargs(_fwd_kernel_splitK, N=num_groups)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/xformers/triton/vararg_kernel.py", line 166, in unroll_varargs
jitted_fn = triton.jit(fn)
^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/triton/runtime/jit.py", line 882, in jit
return decorator(fn)
^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/triton/runtime/jit.py", line 871, in decorator
return JITFunction(
^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/site-packages/triton/runtime/jit.py", line 704, in init
self.starting_line_number = inspect.getsourcelines(fn)[1]
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/inspect.py", line 1270, in getsourcelines
return getblock(lines[lnum:]), lnum + 1
^^^^^^^^^^^^^^^^^^^^^^
File "/root/miniconda3/lib/python3.12/inspect.py", line 1237, in getblock
for _token in tokens:
File "/root/miniconda3/lib/python3.12/tokenize.py", line 582, in _generate_tokens_from_c_tokenizer
raise TokenError(msg, (e.lineno, e.offset)) from None
tokenize.TokenError: ('unterminated string literal (detected at line 1122)', (1122, 1))

import torch
from eva_clip import create_model_and_transforms, get_tokenizer
from PIL import Image

model_name = "EVA02-CLIP-B-16"
pretrained = "eva_clip" # or "/path/to/EVA02_CLIP_B_psz16_s8B.pt"

image_path = "./cat.jpg"
caption = ["a diagram", "a dog", "a cat"]

device = "cuda" if torch.cuda.is_available() else "cpu"
model, _, preprocess = create_model_and_transforms(model_name, pretrained, force_custom_clip=True)
tokenizer = get_tokenizer(model_name)
model = model.to(device)

image = preprocess(Image.open(image_path)).unsqueeze(0).to(device)
text = tokenizer(["a diagram", "a dog", "a cat"]).to(device)

with torch.no_grad(), torch.cuda.amp.autocast():
image_features = model.encode_image(image)
text_features = model.encode_text(text)
image_features /= image_features.norm(dim=-1, keepdim=True)
text_features /= text_features.norm(dim=-1, keepdim=True)

text_probs = (100.0 * image_features @ text_features.T).softmax(dim=-1)

print("Label probs:", text_probs) # prints: [[0.8275, 0.1372, 0.0352]]

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant