|
import streamlit as st |
|
|
|
from app_models.toxicity_MODEL import text2toxicity |
|
|
|
|
|
def run(): |
|
st.title('Toxicity Detection') |
|
st.write('This tool classifies text as toxic or non-toxic using RuBERT.') |
|
|
|
user_input = st.text_area("Enter text to classify", "Type your text here...") |
|
|
|
if st.button('Classify'): |
|
toxicity_score = text2toxicity(user_input) |
|
st.write('Toxicity score:', toxicity_score) |
|
|
|
|
|
if toxicity_score > 0.5: |
|
st.write("This text is likely to be considered toxic.") |
|
else: |
|
st.write("This text is likely to be considered non-toxic.") |
|
|