import streamlit as st | |
from models.toxicity_MODEL import text2toxicity | |
st.title('Toxicity Detection') | |
st.write('This tool classifies text as toxic or non-toxic using RuBERT.') | |
user_input = st.text_area("Enter text to classify", "Type your text here...") | |
if st.button('Classify'): | |
toxicity_score = text2toxicity(user_input) | |
st.write('Toxicity score:', toxicity_score) | |
# Optional: Interpret the score for the user | |
if toxicity_score > 0.5: | |
st.write("This text is likely to be considered toxic.") | |
else: | |
st.write("This text is likely to be considered non-toxic.") | |