Spaces:
Runtime error
Runtime error
Jyotiyadav
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -5,15 +5,16 @@ import torchvision
|
|
5 |
from transformers import pipeline
|
6 |
auth_token = os.environ.get("HUGGING_FACE_HUB_TOKEN")
|
7 |
|
8 |
-
# Initialize the pipeline
|
9 |
-
pipe = pipeline(
|
10 |
-
"text-generation",
|
11 |
-
model="Jyotiyadav/mistral_7B_NER",
|
12 |
-
torch_dtype=torch.bfloat16,
|
13 |
-
device_map="auto")
|
14 |
-
|
15 |
# Function to generate output based on input
|
16 |
-
def generate_output(input_text):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
# Prompt for extracting information
|
18 |
prompt = f'''
|
19 |
Your task is to extract the information corresponding to the provided labels from the below given email.
|
@@ -52,16 +53,17 @@ def generate_output(input_text):
|
|
52 |
result = pipe(
|
53 |
f"<s>[INST] {prompt} [/INST]",
|
54 |
do_sample=True,
|
55 |
-
max_new_tokens=
|
56 |
-
temperature=
|
57 |
-
top_k=
|
58 |
-
top_p=
|
59 |
num_return_sequences=1,
|
60 |
)
|
61 |
|
62 |
# Return the generated text
|
63 |
return result[0]['generated_text']
|
64 |
|
|
|
65 |
examples = [
|
66 |
'''
|
67 |
COTIZACION FLETE MARITIMO OC 4500325343 Buongiorno ;
|
@@ -89,13 +91,26 @@ Del. Alvaro Obregon ;
|
|
89 |
|
90 |
|
91 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
# Create a Gradio interface
|
94 |
-
iface = gr.Interface(
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
|
|
|
|
|
|
|
|
|
5 |
from transformers import pipeline
|
6 |
auth_token = os.environ.get("HUGGING_FACE_HUB_TOKEN")
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
# Function to generate output based on input
|
9 |
+
def generate_output(input_text, max_new_tokens, temperature, top_k, top_p, model):
|
10 |
+
# Initialize the pipeline
|
11 |
+
pipe = pipeline(
|
12 |
+
"text-generation",
|
13 |
+
model=model,
|
14 |
+
torch_dtype=torch.bfloat16,
|
15 |
+
device_map="auto"
|
16 |
+
)
|
17 |
+
|
18 |
# Prompt for extracting information
|
19 |
prompt = f'''
|
20 |
Your task is to extract the information corresponding to the provided labels from the below given email.
|
|
|
53 |
result = pipe(
|
54 |
f"<s>[INST] {prompt} [/INST]",
|
55 |
do_sample=True,
|
56 |
+
max_new_tokens=max_new_tokens,
|
57 |
+
temperature=temperature,
|
58 |
+
top_k=top_k,
|
59 |
+
top_p=top_p,
|
60 |
num_return_sequences=1,
|
61 |
)
|
62 |
|
63 |
# Return the generated text
|
64 |
return result[0]['generated_text']
|
65 |
|
66 |
+
|
67 |
examples = [
|
68 |
'''
|
69 |
COTIZACION FLETE MARITIMO OC 4500325343 Buongiorno ;
|
|
|
91 |
|
92 |
|
93 |
|
94 |
+
# Create Gradio inputs
|
95 |
+
inputs = [
|
96 |
+
gr.inputs.Textbox(label="Input Text"),
|
97 |
+
gr.inputs.Number(label="Max New Tokens", default=32000),
|
98 |
+
gr.inputs.Slider(label="Temperature", minimum=0.0, maximum=1.0, default=0.1, step=0.01),
|
99 |
+
gr.inputs.Number(label="Top K", default=0),
|
100 |
+
gr.inputs.Number(label="Top P", default=0),
|
101 |
+
gr.inputs.Textbox(label="Model", default="Jyotiyadav/mistral_7B_NER")
|
102 |
+
]
|
103 |
|
104 |
# Create a Gradio interface
|
105 |
+
iface = gr.Interface(
|
106 |
+
fn=generate_output,
|
107 |
+
inputs=inputs,
|
108 |
+
outputs="text",
|
109 |
+
#examples=examples,
|
110 |
+
title="Information Extraction with Mistral-7B",
|
111 |
+
description="Generate Information Extraction with OpenLLM.",
|
112 |
+
debug=True
|
113 |
+
)
|
114 |
+
|
115 |
+
# Launch the interface
|
116 |
+
iface.launch()
|