import os from dotenv import load_dotenv from fastapi import FastAPI, Request, HTTPException from langchain.document_loaders import WebBaseLoader from langchain.chains.summarize import load_summarize_chain from langchain import HuggingFaceHub from huggingface_hub import InferenceClient #from bs4 import BeautifulSoup import requests import sys load_dotenv() hf_token = os.environ.get('HUGGINGFACEHUB_API_TOKEN') repo_id=os.environ.get('repo_id') #port = os.getenv('port') llm = HuggingFaceHub(repo_id=repo_id, # for StarChat huggingfacehub_api_token=hf_token, model_kwargs={"min_length": 512, # for StarChat "max_new_tokens": 1024, "do_sample": True, # for StarChat "temperature": 0.01, "top_k": 50, "top_p": 0.95, "eos_token_id": 49155}) chain = load_summarize_chain(llm, chain_type="stuff") app = FastAPI() @app.post('/api/chat') async def home_api(request: Request): data = await request.json() user_query = data['user_question'] print(user_query) return {"Message": "FastAPI Home API Deploy Success on HF"} @app.post('/api/chat') async def chat(request: Request): data = await request.json() user_query = data['user_question'] print(user_query) if user_query.strip() != "": try: loader = WebBaseLoader(user_query) print(user_query) docs = loader.load() result = chain.run(docs) print("AI Summarization: " + result) return {'response': result} except Exception as e: err_msg = "Wrong URL or URL not parsable." print(err_msg) raise HTTPException(status_code=400, detail=err_msg) else: raise HTTPException(status_code=400, detail="Invalid user question")