Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
import whisper
|
3 |
from langchain_openai import ChatOpenAI
|
|
|
4 |
from utils import RefineDataSummarizer
|
5 |
from utils import (
|
6 |
prompt_template,
|
@@ -47,8 +48,8 @@ def time_stamped_text(transcript_result):
|
|
47 |
|
48 |
|
49 |
def transcript(file_dir, model_type, time_stamp):
|
50 |
-
model_dir = os.path.join('models', model_type)
|
51 |
-
|
52 |
model = whisper.load_model(model_dir)
|
53 |
result = model.transcribe(file_dir, language='English', task='transcribe')
|
54 |
|
@@ -69,13 +70,25 @@ def upload_file(file_paths):
|
|
69 |
return file_paths
|
70 |
|
71 |
|
72 |
-
def summary(text, chunk_num, chunk_overlap,
|
73 |
-
if user_api == "Not Provided":
|
74 |
-
api_key = os.getenv("openai_api")
|
75 |
-
|
76 |
-
|
77 |
-
api_key =
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
rds = RefineDataSummarizer(llm=llm, prompt_template=prompt, refine_template=refine_prompt)
|
80 |
result = rds.get_summarization(text, chunk_num=chunk_num, chunk_overlap=chunk_overlap)
|
81 |
text = result["output_text"]
|
@@ -125,12 +138,20 @@ with gr.Blocks() as demo:
|
|
125 |
)
|
126 |
|
127 |
with gr.Accordion(open=False, label=["llm settings"]):
|
128 |
-
user_api = gr.Textbox(placeholder="If Empty, Use Default Key", label="Your API Key", value="Not Provided")
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
llm_type = gr.Dropdown(
|
130 |
[
|
131 |
-
|
132 |
-
|
133 |
-
|
|
|
|
|
134 |
], label="LLM Type", value="gpt-4-1106-preview")
|
135 |
SunmmaryButton = gr.Button("Summary", variant="primary")
|
136 |
summary_text = gr.Textbox(placeholder="Summary Result", label="Summary")
|
@@ -153,7 +174,7 @@ with gr.Blocks() as demo:
|
|
153 |
transcript_text,
|
154 |
chunk_num,
|
155 |
chunk_overlap,
|
156 |
-
user_api,
|
157 |
llm_type,
|
158 |
prompt,
|
159 |
refine_prompt
|
|
|
1 |
import gradio as gr
|
2 |
import whisper
|
3 |
from langchain_openai import ChatOpenAI
|
4 |
+
from langchain_openai import AzureChatOpenAI
|
5 |
from utils import RefineDataSummarizer
|
6 |
from utils import (
|
7 |
prompt_template,
|
|
|
48 |
|
49 |
|
50 |
def transcript(file_dir, model_type, time_stamp):
|
51 |
+
# model_dir = os.path.join('models', model_type)
|
52 |
+
model_dir = "E:\\Whisper\\" + model_type
|
53 |
model = whisper.load_model(model_dir)
|
54 |
result = model.transcribe(file_dir, language='English', task='transcribe')
|
55 |
|
|
|
70 |
return file_paths
|
71 |
|
72 |
|
73 |
+
def summary(text, chunk_num, chunk_overlap, llm_type, prompt, refine_prompt):
|
74 |
+
#if user_api == "Not Provided":
|
75 |
+
# api_key = os.getenv("openai_api")
|
76 |
+
#deployment_name = llm_type
|
77 |
+
#else:
|
78 |
+
# api_key = user_api
|
79 |
+
#api_key = api_key.strip()
|
80 |
+
# llm = ChatOpenAI(temperature=1, openai_api_key=api_key, model_name=llm_type)
|
81 |
+
|
82 |
+
os.environ["AZURE_OPENAI_API_KEY"] = os.getenv("azure_api")
|
83 |
+
os.environ["AZURE_OPENAI_ENDPOINT"] = os.getenv("azure_endpoint")
|
84 |
+
openai_api_version=os.getenv("azure_api_version")
|
85 |
+
deployment_name = llm_type
|
86 |
+
|
87 |
+
llm = AzureChatOpenAI(
|
88 |
+
openai_api_version=openai_api_version,
|
89 |
+
azure_deployment=deployment_name
|
90 |
+
)
|
91 |
+
|
92 |
rds = RefineDataSummarizer(llm=llm, prompt_template=prompt, refine_template=refine_prompt)
|
93 |
result = rds.get_summarization(text, chunk_num=chunk_num, chunk_overlap=chunk_overlap)
|
94 |
text = result["output_text"]
|
|
|
138 |
)
|
139 |
|
140 |
with gr.Accordion(open=False, label=["llm settings"]):
|
141 |
+
# user_api = gr.Textbox(placeholder="If Empty, Use Default Key", label="Your API Key", value="Not Provided")
|
142 |
+
# llm_type = gr.Dropdown(
|
143 |
+
# [
|
144 |
+
# "gpt-3.5-turbo",
|
145 |
+
# "gpt-3.5-turbo-16k",
|
146 |
+
# "gpt-4-1106-preview"
|
147 |
+
# ], label="LLM Type", value="gpt-4-1106-preview")
|
148 |
llm_type = gr.Dropdown(
|
149 |
[
|
150 |
+
"gpt-4-32k",
|
151 |
+
"gpt-4",
|
152 |
+
"gpt-4-1106-preview",
|
153 |
+
"gpt-35-turbo",
|
154 |
+
"gpt-35-turbo-16k"
|
155 |
], label="LLM Type", value="gpt-4-1106-preview")
|
156 |
SunmmaryButton = gr.Button("Summary", variant="primary")
|
157 |
summary_text = gr.Textbox(placeholder="Summary Result", label="Summary")
|
|
|
174 |
transcript_text,
|
175 |
chunk_num,
|
176 |
chunk_overlap,
|
177 |
+
#user_api,
|
178 |
llm_type,
|
179 |
prompt,
|
180 |
refine_prompt
|