mamgain93 commited on
Commit
0daa5e6
·
1 Parent(s): 54d3af7
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import os
2
- from langchain_huggingface import HuggingFaceEndpoint
 
3
  import streamlit as st
4
  from langchain_core.prompts import PromptTemplate
5
  from langchain_core.output_parsers import StrOutputParser
@@ -18,12 +19,10 @@ def get_llm_hf_inference(model_id=model_id, max_new_tokens=128, temperature=0.1)
18
  Returns:
19
  - llm (HuggingFaceEndpoint): The language model for HuggingFace inference.
20
  """
21
- llm = HuggingFaceEndpoint(
22
- repo_id=model_id,
23
- max_new_tokens=max_new_tokens,
24
- temperature=temperature,
25
- token = os.getenv("HF_TOKEN")
26
- )
27
  return llm
28
 
29
  # Configure the Streamlit app
 
1
  import os
2
+ from transformers import pipeline
3
+ # from langchain_huggingface import HuggingFaceEndpoint
4
  import streamlit as st
5
  from langchain_core.prompts import PromptTemplate
6
  from langchain_core.output_parsers import StrOutputParser
 
19
  Returns:
20
  - llm (HuggingFaceEndpoint): The language model for HuggingFace inference.
21
  """
22
+ llm = pipeline('text-generation',
23
+ model=model_id,
24
+ tokenizer=model_id,
25
+ use_auth_token=os.getenv("HF_TOKEN"))
 
 
26
  return llm
27
 
28
  # Configure the Streamlit app