ziixh commited on
Commit
67a4038
·
verified ·
1 Parent(s): 2c52598

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -8
app.py CHANGED
@@ -4,8 +4,8 @@ import requests
4
  import gradio as gr
5
  import torch
6
 
7
- # Load the Hugging Face model and tokenizer (only once)
8
- model_name = "distilgpt2" # Smaller and faster model
9
  tokenizer = AutoTokenizer.from_pretrained(model_name)
10
  model = AutoModelForCausalLM.from_pretrained(model_name)
11
 
@@ -31,9 +31,9 @@ def generate_smart_contract(language, requirements):
31
  # Create a prompt for the model
32
  prompt = f"Generate a {language} smart contract with the following requirements: {requirements}"
33
 
34
- # Use the Hugging Face model to generate code
35
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu")
36
- outputs = model.generate(**inputs, max_length=150) # Reduced max_length
37
  generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
38
 
39
  # Enhance the code using Groq API
@@ -104,9 +104,7 @@ h1 {
104
  text-align: center;
105
  font-size: 2.5em;
106
  margin-bottom: 20px;
107
- background: linear-gradient(135deg, #6a11cb 0%, #2575fc 100%);
108
- -webkit-background-clip: text;
109
- -webkit-text-fill-color: transparent;
110
  transform-style: preserve-3d;
111
  transform: translateZ(30px);
112
  }
@@ -132,10 +130,13 @@ h1 {
132
  def generate_contract(language, requirements):
133
  return generate_smart_contract(language, requirements)
134
 
 
 
 
135
  interface = gr.Interface(
136
  fn=generate_contract,
137
  inputs=[
138
- gr.Textbox(label="Programming Language", placeholder="e.g., Solidity"),
139
  gr.Textbox(label="Requirements", placeholder="e.g., ERC20 token with minting functionality")
140
  ],
141
  outputs=gr.Textbox(label="Generated Smart Contract"),
 
4
  import gradio as gr
5
  import torch
6
 
7
+ # Load the CodeParrot model and tokenizer (only once)
8
+ model_name = "codeparrot/codeparrot-small" # CodeParrot model
9
  tokenizer = AutoTokenizer.from_pretrained(model_name)
10
  model = AutoModelForCausalLM.from_pretrained(model_name)
11
 
 
31
  # Create a prompt for the model
32
  prompt = f"Generate a {language} smart contract with the following requirements: {requirements}"
33
 
34
+ # Use the CodeParrot model to generate code
35
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu")
36
+ outputs = model.generate(**inputs, max_length=300) # Increased max_length for better results
37
  generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
38
 
39
  # Enhance the code using Groq API
 
104
  text-align: center;
105
  font-size: 2.5em;
106
  margin-bottom: 20px;
107
+ color: white; /* White title color */
 
 
108
  transform-style: preserve-3d;
109
  transform: translateZ(30px);
110
  }
 
130
  def generate_contract(language, requirements):
131
  return generate_smart_contract(language, requirements)
132
 
133
+ # Dropdown options for programming languages
134
+ languages = ["Solidity", "Vyper", "Rust", "JavaScript", "Python"]
135
+
136
  interface = gr.Interface(
137
  fn=generate_contract,
138
  inputs=[
139
+ gr.Dropdown(label="Programming Language", choices=languages, value="Solidity"), # Dropdown menu
140
  gr.Textbox(label="Requirements", placeholder="e.g., ERC20 token with minting functionality")
141
  ],
142
  outputs=gr.Textbox(label="Generated Smart Contract"),