PerfectGPT / app.py
lruizap's picture
Update app.py
dbd9832
raw
history blame
2.65 kB
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
from transformers import pipeline
import torch
import gradio as gr
# chatgpt-gpt4-prompts-bart-large-cnn-samsum
tokenizer = AutoTokenizer.from_pretrained(
"Kaludi/chatgpt-gpt4-prompts-bart-large-cnn-samsum")
model = AutoModelForSeq2SeqLM.from_pretrained(
"Kaludi/chatgpt-gpt4-prompts-bart-large-cnn-samsum", from_tf=True)
# zephyr
pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-alpha",torch_dtype=torch.bfloat16, device_map="auto")
def generate(inputuno, inputdos, max_new_tokens=3556, top_p=0.95, repetition_penalty=1.0):
top_p = float(top_p)
prompt = inputuno
promptdos = inputdos
generate_kwargs = dict(
temperature=fixed_temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
messages = [
{
"role": "system", "content": str(new_prompt)
},
{
"role": "user", "content": str(promptdos)
},
]
stream = pipe.tokenizer.apply_chat_template(messages, **generate_kwargs, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
yield output
return output
def generatePrompt(inputuno, inputdos):
prompt = inputuno
promptdos = inputdos
batch = tokenizer(prompt, return_tensors="pt")
generated_ids = model.generate(batch["input_ids"])
output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
new_prompt = output[0]
messages = [
{
"role": "system", "content": str(new_prompt)
},
{
"role": "user", "content": str(promptdos)
},
]
# https://huggingface.co./docs/transformers/main/en/chat_templating
final_prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
outputs = pipe(final_prompt, do_sample=True,)
return outputs[0]["generated_text"]
#
# Interface
input_prompt = gr.Textbox(label="Actua como: ", value="Chef")
input_promptdos = gr.Textbox(label="Prompt: ", value="Recipe for ham croquettes")
output_component = gr.Textbox(label="Output: ")
examples = [["photographer"], ["developer"], ["teacher"], [
"human resources staff"], ["recipe for ham croquettes"]]
description = ""
PerfectGPT = gr.Interface(generate, inputs=[input_prompt, input_promptdos], outputs=output_component, examples=examples, title="๐Ÿ—ฟ PerfectGPT v1 ๐Ÿ—ฟ", description=description)
PerfectGPT.launch()