|
@@ -2,7 +2,7 @@ import os
|
|
|
import torch
|
|
|
from transformers import AutoModel, AutoConfig, CLIPProcessor
|
|
|
|
|
|
-MODEL_NAME = "EVA-CLIP-8B"
|
|
|
+MODEL_NAME = "BAAI/EVA-CLIP-8B"
|
|
|
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
DTYPE = torch.float16 if DEVICE == "cuda" else torch.float32
|
|
|
MAX_BATCH = int(os.getenv("MAX_BATCH", "32"))
|