GenAICoder commited on
Commit
7c5299e
·
verified ·
1 Parent(s): bc79dca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -87,7 +87,8 @@ def get_conversational_chain(retriever):
87
  #llm= pipeline("text-generation", model="nvidia/Llama3-ChatQA-1.5-8B")
88
  #repo_id='meta-llama/Meta-Llama-3-70B'
89
  #repo_id = 'mistralai/Mixtral-8x7B-Instruct-v0.1'
90
- repo_id= 'nvidia/Llama3-ChatQA-1.5-8B'
 
91
  llm = HuggingFaceEndpoint(repo_id=repo_id, temperature=0.3,token = access_token)
92
  #tokenizer = AutoTokenizer.from_pretrained("google/gemma-1.1-2b-it")
93
  #llm = AutoModelForCausalLM.from_pretrained("google/gemma-1.1-2b-it")
 
87
  #llm= pipeline("text-generation", model="nvidia/Llama3-ChatQA-1.5-8B")
88
  #repo_id='meta-llama/Meta-Llama-3-70B'
89
  #repo_id = 'mistralai/Mixtral-8x7B-Instruct-v0.1'
90
+ #repo_id= 'nvidia/Llama3-ChatQA-1.5-8B'
91
+ repo_id= 'google/gemma-1.1-2b-it'
92
  llm = HuggingFaceEndpoint(repo_id=repo_id, temperature=0.3,token = access_token)
93
  #tokenizer = AutoTokenizer.from_pretrained("google/gemma-1.1-2b-it")
94
  #llm = AutoModelForCausalLM.from_pretrained("google/gemma-1.1-2b-it")