Spaces:
Running
Running
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import openai
|
3 |
+
import os
|
4 |
+
from dotenv import load_dotenv
|
5 |
+
|
6 |
+
api_key = os.getenv("OPENAI_API_KEY")
|
7 |
+
client = OpenAI(api_key=api_key)
|
8 |
+
def query_llm(prompt):
|
9 |
+
"""Query OpenAI's GPT model."""
|
10 |
+
try:
|
11 |
+
response = client.chat.completions.create(
|
12 |
+
model="gpt-3.5-turbo",
|
13 |
+
messages=[
|
14 |
+
{"role": "system", "content": "You are an expert in MOF material science."},
|
15 |
+
{"role": "user", "content": prompt},
|
16 |
+
],
|
17 |
+
)
|
18 |
+
return response.choices[0].message.content.strip()
|
19 |
+
except Exception as e:
|
20 |
+
st.error(f"Error querying LLM: {e}")
|
21 |
+
return None
|
22 |
+
|
23 |
+
|
24 |
+
st.title("MOF Prediction & Functionalization")
|
25 |
+
|
26 |
+
|
27 |
+
st.header("MOF Description")
|
28 |
+
mof_description = st.text_area("Describe the MOF (e.g., metal, pore size, functional groups)", height=150) # Adjust height as needed
|
29 |
+
|
30 |
+
task = st.selectbox(
|
31 |
+
"Select a task:",
|
32 |
+
("Predict Properties", "Functionalization Suggestions", "Synthesis Plan")
|
33 |
+
)
|
34 |
+
|
35 |
+
if task == "Predict Properties":
|
36 |
+
if st.button("Predict Properties"):
|
37 |
+
st.text("Processing...")
|
38 |
+
prompt = f"Predict the CO₂ adsorption capacity and selectivity of a MOF with the following description:\n\n{mof_description}"
|
39 |
+
prediction = query_llm(prompt)
|
40 |
+
if prediction:
|
41 |
+
st.subheader("Predicted MOF Properties")
|
42 |
+
st.write(prediction)
|
43 |
+
|
44 |
+
elif task == "Functionalization Suggestions":
|
45 |
+
st.header("Functionalization Details")
|
46 |
+
if st.button("Get Functionalization Suggestions"):
|
47 |
+
st.text("Processing...")
|
48 |
+
prompt = f"Suggest functionalization strategies for a MOF with the following description to enhance CO₂ capture:\n\n{mof_description}"
|
49 |
+
func_suggestions = query_llm(prompt)
|
50 |
+
if func_suggestions:
|
51 |
+
st.subheader("Suggested Functionalization Strategies")
|
52 |
+
st.write(func_suggestions)
|
53 |
+
|
54 |
+
elif task == "Synthesis Plan":
|
55 |
+
st.header("Synthesis Details")
|
56 |
+
if st.button("Generate Synthesis Plan"):
|
57 |
+
st.text("Processing...")
|
58 |
+
prompt = f"Provide a synthesis plan for a MOF with the following description:\n\n{mof_description}"
|
59 |
+
synthesis_plan = query_llm(prompt)
|
60 |
+
if synthesis_plan:
|
61 |
+
st.subheader("AI-Generated Synthesis Plan")
|
62 |
+
st.write(synthesis_plan)
|
63 |
+
|
64 |
+
st.info("Combine with DFT and ML models for high-throughput screening.")
|