|
@@ -1,11 +1,11 @@
|
|
|
import os
|
|
|
import torch
|
|
|
|
|
|
-from transformers import AutoModel, AutoConfig, CLIPImageProcessor, AutoTokenizer
|
|
|
+from transformers import AutoModel, AutoConfig, AutoTokenizer
|
|
|
|
|
|
MODEL_NAME = "BAAI/EVA-CLIP-8B"
|
|
|
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
-DTYPE = torch.float16 if DEVICE == "cuda" else torch.float32
|
|
|
+DTYPE = torch.int8 if DEVICE == "cuda" else torch.float32
|
|
|
MAX_BATCH = int(os.getenv("MAX_BATCH", "32"))
|
|
|
|
|
|
print(f"[model_config] Loading {MODEL_NAME} on {DEVICE} dtype={DTYPE} ...")
|