Spaces:
Sleeping
Sleeping
Update clip to fp16
Browse files
app.py
CHANGED
@@ -77,7 +77,7 @@ def get_config():
|
|
77 |
print("cuda available:",torch.cuda.is_available())
|
78 |
print("cuda device count:",torch.cuda.device_count())
|
79 |
print("cuda device name:",torch.cuda.get_device_name(0))
|
80 |
-
|
81 |
print(os.system("nvcc --version"))
|
82 |
|
83 |
empty_context = np.load("assets/contexts/empty_context.npy")
|
@@ -86,7 +86,7 @@ config = get_config()
|
|
86 |
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
|
87 |
print(device)
|
88 |
# Load open_clip and vq model
|
89 |
-
prompt_model,_,_ = open_clip.create_model_and_transforms('ViT-bigG-14', 'laion2b_s39b_b160k',
|
90 |
prompt_model = prompt_model.to(device)
|
91 |
prompt_model.eval()
|
92 |
tokenizer = open_clip.get_tokenizer('ViT-bigG-14')
|
|
|
77 |
print("cuda available:",torch.cuda.is_available())
|
78 |
print("cuda device count:",torch.cuda.device_count())
|
79 |
print("cuda device name:",torch.cuda.get_device_name(0))
|
80 |
+
print(os.system("nvidia-smi"))
|
81 |
print(os.system("nvcc --version"))
|
82 |
|
83 |
empty_context = np.load("assets/contexts/empty_context.npy")
|
|
|
86 |
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
|
87 |
print(device)
|
88 |
# Load open_clip and vq model
|
89 |
+
prompt_model,_,_ = open_clip.create_model_and_transforms('ViT-bigG-14', 'laion2b_s39b_b160k',precision='fp16')
|
90 |
prompt_model = prompt_model.to(device)
|
91 |
prompt_model.eval()
|
92 |
tokenizer = open_clip.get_tokenizer('ViT-bigG-14')
|