MultiLLM / app.py
selvamaniandiappan's picture
Update app.py
fc29257 verified
raw
history blame
2.06 kB
import streamlit as st
import os
from transformers import pipeline
import requests
API_TOKEN = os.getenv("hugkey")
text = st.text_input("Enter text in your language")
#tran_API_URL = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-mul-en"
#tran_API_URL = "https://api-inference.huggingface.co/models/google-t5/t5-small"
tran_API_URL = "https://api-inference.huggingface.co/models/facebook/m2m100_418M"
generate_API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
headers = {"Authorization": f"Bearer {API_TOKEN}"}
# translator = pipeline("translation", model="Helsinki-NLP/opus-mt-mul-en") #loading Helsinki's Multilingual Model which translates to English
# text_generator = pipeline("text-generation", model="gpt2")
def Translate_query(input_text):
payload = {"inputs": f"{input_text}", "parameters": {"forced_bos_token_id": 128000}}
response = requests.post(tran_API_URL, headers=headers, json=payload)
return response.json()
def Generate_query(payload):
response = requests.post(generate_API_URL, headers=headers, json=payload)
return response.json()
if st.button("Generate"):
translated_output = Translate_query(text)
#translated_output = translated_output[0]["translation_text"]
st.write(translated_output)
# generated_output = Generate_query({"inputs": translated_output,})
# generated_output = generated_output[0]['generated_text']
# st.write(generated_output)
# col1,col2 = st.columns(2)
# with col1:
# if st.button("Translate"):
# if text:
# result = translator(text)
# # Display the translated text
# translated_text = result[0]['translation_text']
# st.write(translated_text)
# else:
# st.error("Enter a text",icon="🚨")
# if translated_text:
# # Generate text based on input
# generated_text = text_generator(translated_text, max_length=100, num_return_sequences=1)
# st.write("Generated Text:", generated_text[0]['generated_text'])