import streamlit as st import openai import requests # Function to call ChatGPT (OpenAI) def query_chatgpt(prompt): openai.api_key = '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' # Add your OpenAI API Key response = openai.Completion.create( model="gpt-3.5-turbo", # Updated model prompt=prompt, max_tokens=150 ) return response.choices[0].text.strip() # Function to query Gemini API (Google's Generative Language API) def query_gemini(prompt): api_key = "AIzaSyDqtrotGVDil7ffARikpCb3SO5yn0x-YXE" # Replace with your Gemini API Key url = "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent" headers = { 'Content-Type': 'application/json' } data = { "contents": [{ "parts": [{"text": prompt}] }] } # Make the API request response = requests.post(url, headers=headers, params={"key": api_key}, json=data) # Check if request is successful if response.status_code == 200: return response.json().get('content', 'No content returned') else: return f"Error: {response.status_code}, {response.text}" # Streamlit layout st.title("Compare AI Models: ChatGPT and Gemini") prompt = st.text_area("Enter your query:") if st.button("ChatGPT"): if prompt: chatgpt_response = query_chatgpt(prompt) st.subheader("ChatGPT Response:") st.write(chatgpt_response) else: st.warning("Please enter a prompt.") if st.button("Gemini"): if prompt: gemini_response = query_gemini(prompt) st.subheader("Gemini Response:") st.write(gemini_response) else: st.warning("Please enter a prompt.")