File size: 2,010 Bytes
e80e0c2
 
 
 
 
 
 
 
 
 
 
 
 
 
691c9b2
e80e0c2
 
 
 
3f9e16a
 
 
 
 
 
e80e0c2
3f9e16a
e80e0c2
3f9e16a
e80e0c2
 
3f9e16a
 
e80e0c2
 
3f9e16a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import streamlit as st
import torch
from transformers import pipeline
from transformers import AutoModelForQuestionAnswering
from transformers import AutoTokenizer

# Replace with your Hugging Face model repository path
model_repo_path = 'waqasali1707/Extractive-QA-Model'

# Load the model and tokenizer
model = AutoModelForQuestionAnswering.from_pretrained(model_repo_path)
tokenizer = AutoTokenizer.from_pretrained(model_repo_path)

# Initialize the question-answering pipeline
question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer)

# Streamlit app layout
st.title("Question Answering App")

# Define session state keys
if 'context' not in st.session_state:
    st.session_state.context = ""
if 'question' not in st.session_state:
    st.session_state.question = ""

# User input
text_input = st.text_area("Enter the Context first", value=st.session_state.context, height=180)

# Update session state with the new context
if st.button("Next"):
    if text_input:
        st.session_state.context = text_input
        st.session_state.question = ""  # Clear the previous question
    else:
        st.warning("Please enter some context to move to next part.")

# Show question input if context is available
if st.session_state.context:
    question_input = st.text_area("Enter the Question", value=st.session_state.question, height=100)
    
    if st.button("Answer"):
        if question_input:
            st.session_state.question = question_input
            with st.spinner("Generating Answer..."):
                try:
                    answer = question_answerer(question=question_input, context=st.session_state.context)
                    # Display the answer
                    st.subheader("Answer")
                    st.write(answer.get('answer', 'No answer found'))
                except Exception as e:
                    st.error(f"Error during Question Answering: {e}")
        else:
            st.warning("Please enter some Question to get Answer.")