LiteraLingo_Dev / utility.py
Angelawork
helper functions
f01316a
raw
history blame
927 Bytes
import os
import urllib.request
import gradio as gr
from transformers import T5Tokenizer, T5ForConditionalGeneration
import huggingface_hub
import re
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import time
import transformers
import requests
import globals
def fetch_model(url, filename):
if not os.path.isfile(filename):
urllib.request.urlretrieve(url, filename)
print("File downloaded successfully.")
else:
print("File already exists.")
def api_query(API_URL, headers, payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def post_process(model_output,input):
start_pos = model_output.find(input)
if start_pos != -1:
answer = model_output[start_pos + len(input):].strip()
else:
answer = model_output
print("'Literal meaning:' not found in the text.")
answer.replace("\n", "")
return answer