Spaces:
Running
on
Zero
Running
on
Zero
gokaygokay
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -23,8 +23,19 @@ import requests
|
|
23 |
from RealESRGAN import RealESRGAN
|
24 |
|
25 |
|
26 |
-
import
|
27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
def download_file(url, folder_path, filename):
|
30 |
if not os.path.exists(folder_path):
|
@@ -106,8 +117,10 @@ DEFAULT_POSITIVE_SUFFIX = "(masterpiece) very_aesthetic hi_res absurd_res supera
|
|
106 |
DEFAULT_NEGATIVE_PREFIX = "Score_1 score_2 score _3 text low_res"
|
107 |
DEFAULT_NEGATIVE_SUFFIX = "Nsfw oversaturated crappy_art low_quality blurry bad_anatomy extra_digits fewer_digits simple_background very_displeasing watermark signature"
|
108 |
|
109 |
-
# Initialize Florence model
|
110 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
|
|
|
|
111 |
florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True).to(device).eval()
|
112 |
florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True)
|
113 |
|
|
|
23 |
from RealESRGAN import RealESRGAN
|
24 |
|
25 |
|
26 |
+
from unittest.mock import patch
|
27 |
+
from typing import Union
|
28 |
+
from transformers.dynamic_module_utils import get_imports
|
29 |
+
|
30 |
+
|
31 |
+
def fixed_get_imports(filename):
|
32 |
+
"""Work around for https://huggingface.co/microsoft/phi-1_5/discussions/72."""
|
33 |
+
if not str(filename).endswith("/modeling_florence2.py"):
|
34 |
+
return get_imports(filename)
|
35 |
+
imports = get_imports(filename)
|
36 |
+
imports.remove("flash_attn")
|
37 |
+
return imports
|
38 |
+
|
39 |
|
40 |
def download_file(url, folder_path, filename):
|
41 |
if not os.path.exists(folder_path):
|
|
|
117 |
DEFAULT_NEGATIVE_PREFIX = "Score_1 score_2 score _3 text low_res"
|
118 |
DEFAULT_NEGATIVE_SUFFIX = "Nsfw oversaturated crappy_art low_quality blurry bad_anatomy extra_digits fewer_digits simple_background very_displeasing watermark signature"
|
119 |
|
|
|
120 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
121 |
+
|
122 |
+
#def load_models():
|
123 |
+
#with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports):
|
124 |
florence_model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True).to(device).eval()
|
125 |
florence_processor = AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True)
|
126 |
|