File size: 601 Bytes
2f2be53 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
import streamlit as st
from models.toxicity_MODEL import text2toxicity
st.title('Toxicity Detection')
st.write('This tool classifies text as toxic or non-toxic using RuBERT.')
user_input = st.text_area("Enter text to classify", "Type your text here...")
if st.button('Classify'):
toxicity_score = text2toxicity(user_input)
st.write('Toxicity score:', toxicity_score)
# Optional: Interpret the score for the user
if toxicity_score > 0.5:
st.write("This text is likely to be considered toxic.")
else:
st.write("This text is likely to be considered non-toxic.")
|