import streamlit as st from langchain_chroma import Chroma from langchain_core.prompts import ChatPromptTemplate from langchain_google_genai import GoogleGenerativeAI from langchain_huggingface import HuggingFaceEmbeddings # Configuration GOOGLE_API_KEY = "AIzaSyB-7cKMdUpA5kTccpNxd72IT5CjeSgSmkc" # Replace with your API key CHROMA_DB_DIR = "./chroma_db_" # Directory for ChromaDB MODEL_NAME = "flax-sentence-embeddings/all_datasets_v4_MiniLM-L6" # Initialize Embeddings and Models embeddings_model = HuggingFaceEmbeddings(model_name=MODEL_NAME) db = Chroma(collection_name="vector_database", embedding_function=embeddings_model, persist_directory=CHROMA_DB_DIR) genai_model = GoogleGenerativeAI(api_key=GOOGLE_API_KEY, model="gemini-1.5-flash") # Streamlit UI st.set_page_config( page_title="Pega CDH Intelligent Assistant", page_icon="🤖", layout="wide", initial_sidebar_state="expanded" ) # Sidebar Navigation st.sidebar.image("https://www.pega.com/sites/all/themes/pega/images/logo.svg", use_column_width=True) st.sidebar.title("Pega CDH Assistant 🤝") st.sidebar.markdown("Navigate to the features you need:") menu = st.sidebar.radio( "Menu", ["Ask an Intelligent Question", "Explore Knowledge Hub", "Give Feedback"], index=0 ) # Style Enhancements st.markdown(""" """, unsafe_allow_html=True) # Main Interface if menu == "Ask an Intelligent Question": st.title("Pega CDH Intelligent Question Assistant") st.subheader("Get precise answers powered by Pega Customer Decision Hub.") st.image("https://via.placeholder.com/800x200?text=Empower+Your+Decisions+with+Pega+AI", use_column_width=True) # Input Section query = st.text_input( "What's your question today?", placeholder="Type your question to unlock insights from Pega's knowledge base..." ) if query: with st.spinner("Analyzing your question and fetching an answer..."): docs_chroma = db.similarity_search_with_score(query, k=4) context_text = "\n\n".join([doc.page_content for doc, _score in docs_chroma]) # Generate Answer PROMPT_TEMPLATE = """ Answer the question based only on the following context: {context} Answer the question based on the above context: {question}. """ prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE) prompt = prompt_template.format(context=context_text, question=query) response_text = genai_model.invoke(prompt) # Chat-Like Output st.subheader("Your Conversation") st.markdown(f"**You:** {query}") st.markdown(f"**Pega Assistant:** {response_text}") elif menu == "Explore Knowledge Hub": st.title("Pega Knowledge Hub 📚") st.subheader("Search through the rich repository of Pega's insights.") st.write("Use the filters below to explore relevant topics.") # Add advanced filters and browsing options for knowledge hub elif menu == "Give Feedback": st.title("Your Feedback Matters 💡") st.subheader("Help us improve by sharing your experience.") feedback = st.text_area( "Share your suggestions or comments below:", placeholder="Write your feedback here..." ) if st.button("Submit Feedback"): st.success("Thank you for your feedback! 🙏") # Footer st.sidebar.markdown("---") st.sidebar.info("**Pega Customer Decision Hub Assistant v1.0** | Powered by OpenAI and Pega Technologies.")