Spaces:
Running
on
L4
Running
on
L4
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import AutoProcessor, AutoModelForCausalLM
|
3 |
-
import spaces
|
4 |
|
5 |
import requests
|
6 |
import copy
|
@@ -12,7 +12,6 @@ import matplotlib.patches as patches
|
|
12 |
|
13 |
import random
|
14 |
import numpy as np
|
15 |
-
import torch
|
16 |
import gc
|
17 |
|
18 |
DESCRIPTION = "# [Florence-2 Demo](https://huggingface.co/microsoft/Florence-2-large)"
|
@@ -30,7 +29,6 @@ model_id='microsoft/Florence-2-large'
|
|
30 |
model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-large', trust_remote_code=True).to("cuda").eval()
|
31 |
processor = AutoProcessor.from_pretrained('microsoft/Florence-2-large', trust_remote_code=True)
|
32 |
|
33 |
-
@spaces.GPU
|
34 |
def run_example(task_prompt, image, text_input=None, model_id='microsoft/Florence-2-large'):
|
35 |
if text_input is None:
|
36 |
prompt = task_prompt
|
|
|
1 |
+
import torch
|
2 |
import gradio as gr
|
3 |
from transformers import AutoProcessor, AutoModelForCausalLM
|
|
|
4 |
|
5 |
import requests
|
6 |
import copy
|
|
|
12 |
|
13 |
import random
|
14 |
import numpy as np
|
|
|
15 |
import gc
|
16 |
|
17 |
DESCRIPTION = "# [Florence-2 Demo](https://huggingface.co/microsoft/Florence-2-large)"
|
|
|
29 |
model = AutoModelForCausalLM.from_pretrained('microsoft/Florence-2-large', trust_remote_code=True).to("cuda").eval()
|
30 |
processor = AutoProcessor.from_pretrained('microsoft/Florence-2-large', trust_remote_code=True)
|
31 |
|
|
|
32 |
def run_example(task_prompt, image, text_input=None, model_id='microsoft/Florence-2-large'):
|
33 |
if text_input is None:
|
34 |
prompt = task_prompt
|