wjbmattingly commited on
Commit
6690643
1 Parent(s): 3f06e75

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -0
app.py CHANGED
@@ -9,10 +9,20 @@ import numpy as np
9
  import requests
10
  from io import BytesIO
11
  from unittest.mock import patch
 
12
 
13
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
14
 
15
  model_dir = 'medieval-data/florence2-medieval-bbox-line-detection'
 
 
 
 
 
 
 
 
 
16
  with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports):
17
  # Load the configuration
18
  config = AutoConfig.from_pretrained(model_dir, trust_remote_code=True)
 
9
  import requests
10
  from io import BytesIO
11
  from unittest.mock import patch
12
+ from transformers.dynamic_module_utils import get_imports
13
 
14
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
15
 
16
  model_dir = 'medieval-data/florence2-medieval-bbox-line-detection'
17
+
18
+ def fixed_get_imports(filename: str | os.PathLike) -> list[str]:
19
+ """Work around for https://huggingface.co/microsoft/phi-1_5/discussions/72."""
20
+ if not str(filename).endswith("/modeling_florence2.py"):
21
+ return get_imports(filename)
22
+ imports = get_imports(filename)
23
+ imports.remove("flash_attn")
24
+ return imports
25
+
26
  with patch("transformers.dynamic_module_utils.get_imports", fixed_get_imports):
27
  # Load the configuration
28
  config = AutoConfig.from_pretrained(model_dir, trust_remote_code=True)