mamgain93 commited on
Commit
97d2add
·
1 Parent(s): 650ae28
Files changed (1) hide show
  1. app.py +7 -18
app.py CHANGED
@@ -1,18 +1,5 @@
1
  import os
2
- import subprocess
3
-
4
- def install(package):
5
- subprocess.check_call([os.sys.executable, "-m", "pip", "install", package])
6
-
7
- # Install required packages
8
- install("transformers==4.41.0")
9
- install("langchain==0.0.175")
10
- install("huggingface_hub==0.14.1")
11
- install("streamlit==1.17.0")
12
-
13
-
14
- from transformers import pipeline
15
- # from langchain_huggingface import HuggingFaceEndpoint
16
  import streamlit as st
17
  from langchain_core.prompts import PromptTemplate
18
  from langchain_core.output_parsers import StrOutputParser
@@ -31,10 +18,12 @@ def get_llm_hf_inference(model_id=model_id, max_new_tokens=128, temperature=0.1)
31
  Returns:
32
  - llm (HuggingFaceEndpoint): The language model for HuggingFace inference.
33
  """
34
- llm = pipeline('text-generation',
35
- model=model_id,
36
- tokenizer=model_id,
37
- use_auth_token=os.getenv("HF_TOKEN"))
 
 
38
  return llm
39
 
40
  # Configure the Streamlit app
 
1
  import os
2
+ from langchain_huggingface import HuggingFaceEndpoint
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  import streamlit as st
4
  from langchain_core.prompts import PromptTemplate
5
  from langchain_core.output_parsers import StrOutputParser
 
18
  Returns:
19
  - llm (HuggingFaceEndpoint): The language model for HuggingFace inference.
20
  """
21
+ llm = HuggingFaceEndpoint(
22
+ repo_id=model_id,
23
+ max_new_tokens=max_new_tokens,
24
+ temperature=temperature,
25
+ token = os.getenv("HF_TOKEN")
26
+ )
27
  return llm
28
 
29
  # Configure the Streamlit app