bk-sandbox / app.py
bkoz's picture
updated
af21b88
raw
history blame contribute delete
No virus
1.08 kB
#
# Simple example.
#
import spaces
from diffusers import DiffusionPipeline
import os
import torch
from transformers import pipeline
import gradio as gr
token = os.getenv("HUGGINGFACE_API_TOKEN")
model = "meta-llama/Meta-Llama-3-8B-Instruct"
model = "instructlab/granite-7b-lab"
model = "ibm/granite-7b-base"
model = "ibm-granite/granite-3b-code-instruct"
print(f'Loading model {model}')
pipe = pipeline("text-generation", model, torch_dtype=torch.bfloat16, device_map="auto", token=token)
# pipe.to('cuda')
@spaces.GPU
def generate(prompt):
response = pipe(prompt, max_new_tokens=512)
# r = response[0]['generated_text'][-1]['content']
print(f'Response received!')
r = response[0]['generated_text']
return r
input_textbox = gr.Textbox(
label="Prompt",
info="Ask me something.",
lines=3,
value="# Write a python function to read a csv file using pandas and print rows 20 through 25."
)
gr.Interface(
fn=generate,
inputs=input_textbox,
outputs=gr.Text(),
title=model
).launch()