NeoPy commited on
Commit
2936823
·
verified ·
1 Parent(s): 840e123

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -150,7 +150,7 @@ Have a conversation with an AI using your reference voice!
150
  global chat_model_state, chat_tokenizer_state
151
  if chat_model_state is None:
152
  gr.Info("Loading chat model...")
153
- model_name = "deepseek-ai/DeepSeek-V3"
154
  chat_model_state = AutoModelForCausalLM.from_pretrained(
155
  model_name, device_map="auto"
156
  )
@@ -161,7 +161,7 @@ Have a conversation with an AI using your reference voice!
161
  load_chat_model_btn.click(load_chat_model, outputs=[load_chat_model_btn, chat_interface_container])
162
  else:
163
  chat_interface_container = gr.Column()
164
- model_name = "deepseek-ai/DeepSeek-V3"
165
  chat_model_state = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto", trust_remote_code=True)
166
  chat_tokenizer_state = AutoTokenizer.from_pretrained(model_name)
167
 
 
150
  global chat_model_state, chat_tokenizer_state
151
  if chat_model_state is None:
152
  gr.Info("Loading chat model...")
153
+ model_name = "google/gemma-2-2b-it"
154
  chat_model_state = AutoModelForCausalLM.from_pretrained(
155
  model_name, device_map="auto"
156
  )
 
161
  load_chat_model_btn.click(load_chat_model, outputs=[load_chat_model_btn, chat_interface_container])
162
  else:
163
  chat_interface_container = gr.Column()
164
+ model_name = "google/gemma-2-2b-it"
165
  chat_model_state = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto", trust_remote_code=True)
166
  chat_tokenizer_state = AutoTokenizer.from_pretrained(model_name)
167