|
@@ -3,7 +3,7 @@ import torch
|
|
|
|
|
|
from transformers import AutoModel, AutoConfig, AutoTokenizer
|
|
|
|
|
|
-MODEL_NAME = "BAAI/EVA-CLIP-8B-plus"
|
|
|
+MODEL_NAME = "BAAI/EVA-CLIP-8B-448"
|
|
|
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
DTYPE = torch.float16 if DEVICE == "cuda" else torch.float32
|
|
|
MAX_BATCH = int(os.getenv("MAX_BATCH", "32"))
|