Spaces:
Running
Running
import streamlit as st | |
import openai | |
import os | |
from dotenv import load_dotenv | |
api_key = os.getenv("OPENAI_API_KEY") | |
client = OpenAI(api_key=api_key) | |
def query_llm(prompt): | |
"""Query OpenAI's GPT model.""" | |
try: | |
response = client.chat.completions.create( | |
model="gpt-3.5-turbo", | |
messages=[ | |
{"role": "system", "content": "You are an expert in MOF material science."}, | |
{"role": "user", "content": prompt}, | |
], | |
) | |
return response.choices[0].message.content.strip() | |
except Exception as e: | |
st.error(f"Error querying LLM: {e}") | |
return None | |
st.title("MOF Prediction & Functionalization") | |
st.header("MOF Description") | |
mof_description = st.text_area("Describe the MOF (e.g., metal, pore size, functional groups)", height=150) # Adjust height as needed | |
task = st.selectbox( | |
"Select a task:", | |
("Predict Properties", "Functionalization Suggestions", "Synthesis Plan") | |
) | |
if task == "Predict Properties": | |
if st.button("Predict Properties"): | |
st.text("Processing...") | |
prompt = f"Predict the CO₂ adsorption capacity and selectivity of a MOF with the following description:\n\n{mof_description}" | |
prediction = query_llm(prompt) | |
if prediction: | |
st.subheader("Predicted MOF Properties") | |
st.write(prediction) | |
elif task == "Functionalization Suggestions": | |
st.header("Functionalization Details") | |
if st.button("Get Functionalization Suggestions"): | |
st.text("Processing...") | |
prompt = f"Suggest functionalization strategies for a MOF with the following description to enhance CO₂ capture:\n\n{mof_description}" | |
func_suggestions = query_llm(prompt) | |
if func_suggestions: | |
st.subheader("Suggested Functionalization Strategies") | |
st.write(func_suggestions) | |
elif task == "Synthesis Plan": | |
st.header("Synthesis Details") | |
if st.button("Generate Synthesis Plan"): | |
st.text("Processing...") | |
prompt = f"Provide a synthesis plan for a MOF with the following description:\n\n{mof_description}" | |
synthesis_plan = query_llm(prompt) | |
if synthesis_plan: | |
st.subheader("AI-Generated Synthesis Plan") | |
st.write(synthesis_plan) | |
st.info("Combine with DFT and ML models for high-throughput screening.") |