File size: 1,040 Bytes
b88328d
 
 
e7ec2ae
b88328d
 
 
 
e7ec2ae
b88328d
 
 
 
 
e7ec2ae
b88328d
 
e7ec2ae
b88328d
e7ec2ae
b88328d
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import streamlit as st

# Загрузка модели и токенизатора
model_name = "Richieburundi/Ariginalmodel"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

def generate_text(input_text, max_length=50):
    inputs = tokenizer(input_text, return_tensors="pt")
    outputs = model.generate(**inputs, max_length=max_length, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95, num_beams=5)
    generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return generated_text

st.title("AI Text Generation")
st.write("Enter some text, and the AI will generate a response.")

input_text = st.text_area("Input Text", height=200)

if st.button("Generate Text"):
    try:
        generated_text = generate_text(input_text)
        st.write("Generated Text:")
        st.write(generated_text)
    except Exception as e:
        st.error(f"Error generating text: {e}")