File size: 3,933 Bytes
66ff3e9
 
8658e51
b944409
 
991d86a
2a864c8
66ff3e9
991d86a
 
 
308b14b
b1216f7
275029a
b1216f7
618ae34
b1216f7
2a23908
 
 
 
 
c1a4bd7
 
 
 
4702f20
 
56d35aa
67cfc8c
 
991d86a
 
33d5d58
305a1a4
7a77207
 
 
 
305a1a4
c482a7c
4cb67db
 
 
 
 
 
 
7a77207
 
4cb67db
 
 
 
 
 
bdc4589
4cb67db
 
4394f5b
4cb67db
 
c482a7c
4cb67db
c482a7c
4cb67db
 
33d5d58
4cb67db
33d5d58
 
7a77207
 
 
 
 
33d5d58
7a77207
 
b3c9596
dbe50a2
 
3d0edff
dbe50a2
 
 
 
 
3d0edff
dbe50a2
 
 
 
 
 
 
c482a7c
 
 
 
 
 
 
 
 
3d0edff
 
c482a7c
 
dbe50a2
 
c482a7c
1ce6b5d
b3c9596
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import gradio as gr
import requests
import json
import os

# Retrieve the OpenRouter API Key from the Space secrets
API_KEY = os.getenv("OpenRouter_API_KEY")

# Define available models for selection
MODEL_OPTIONS = [
    "openai/gpt-4o-mini-2024-07-18",
    "openai/chatgpt-4o-latest",
    "anthropic/claude-3.5-sonnet",
    "anthropic/claude-3.5-haiku",
    "google/gemini-pro-1.5",
    "google/gemini-flash-1.5",
    "cohere/command-r-plus",
    "meta-llama/llama-3.1-405b-instruct",
    "nvidia/llama-3.1-nemotron-70b-instruct",
    "qwen/qwen-2.5-7b-instruct",
    "mistralai/mistral-large-2411",
    "microsoft/phi-3-medium-128k-instruct",
    "meta-llama/llama-3.1-405b-instruct:free",
    "nousresearch/hermes-3-llama-3.1-405b:free",
    "mistralai/mistral-7b-instruct:free",
    "microsoft/phi-3-medium-128k-instruct:free",
    "liquid/lfm-40b:free",
    "claude-3.5-haiku",
    "nikunj/claude-3.5-haiku",
    "DeepInfra/claude-3.5-haiku",
    "Amazon Bedrock/claude-3.5-haiku"
]

def generate_text(input_text, selected_model, history):
    if history is None:
        history = []  # Initialize history as a list if it's None

    # Append the user's input to the conversation history
    history.append({"role": "user", "content": input_text})
    
    # Call the API to get the model response
    response = requests.post(
        url="https://openrouter.ai/api/v1/chat/completions",
        headers={
            "Authorization": f"Bearer {API_KEY}",
            "Content-Type": "application/json"
        },
        data=json.dumps({
            "model": selected_model,  # Use the selected model
            "messages": history,  # Send the entire conversation history
            "top_p": 1,
            "temperature": 1,
            "frequency_penalty": 0,
            "presence_penalty": 0,
            "repetition_penalty": 1,
            "top_k": 0,
            "max_tokens": 8000
        })
    )
    
    # Handle errors
    if response.status_code != 200:
        return f"Error: {response.status_code}, {response.text}", history, history
    
    # Parse and generate the response
    try:
        response_json = response.json()
        generated_response = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.")
    except json.JSONDecodeError:
        generated_response = "Error: Unable to parse response."

    # Append the model's response to the conversation history
    history.append({"role": "assistant", "content": generated_response})
    
    # Format the history for display
    display_history = "\n\n".join([f"{msg['role'].capitalize()}: {msg['content']}" for msg in history])
    
    return generated_response, display_history, history  # Return the updated history for continuity


# Inject custom CSS directly into the Gradio interface for scrollbars
css = """
    .output-textbox {
        height: 150px;
        overflow: auto;
        border: 1px solid #ddd;
        padding: 10px;
    }
    .history-textbox {
        height: 300px;
        overflow: auto;
        border: 1px solid #ddd;
        padding: 10px;
    }
"""

# Create Gradio interface with a dropdown for model selection
iface = gr.Interface(
    fn=generate_text,
    inputs=[
        gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here"),
        gr.Dropdown(choices=MODEL_OPTIONS, label="Select Model", value=MODEL_OPTIONS[0]),
        gr.State()  # This is where we maintain the history state (input)
    ],
    outputs=[
        gr.Textbox(label="Response", placeholder="Response will be shown here", elem_classes=["output-textbox"]),
        gr.Textbox(label="History", placeholder="Interaction history will be shown here", lines=10, interactive=False, elem_classes=["history-textbox"]),
        gr.State()  # History needs to be output as well
    ],
    title="Chat with OpenRouter Models",
    css=css  # Apply the custom CSS here
)

iface.launch()