Textwizai / main.py
Erfan11's picture
Update main.py
b58127a verified
raw
history blame
835 Bytes
import os
import tensorflow as tf
from dotenv import load_dotenv
from transformers import BertTokenizerFast
# Load environment variables
load_dotenv()
def load_model(model_path):
# Load the TensorFlow model using from_tf=True
model = tf.keras.models.load_model(model_path)
return model
def load_tokenizer(model_path):
tokenizer = BertTokenizerFast.from_pretrained(model_path)
return tokenizer
def predict(text, model, tokenizer):
inputs = tokenizer(text, return_tensors="tf")
outputs = model(inputs)
return outputs
def main():
model_path = os.getenv('MODEL_PATH')
model = load_model(model_path)
tokenizer = load_tokenizer(model_path)
# Example usage
text = "Sample input text"
result = predict(text, model, tokenizer)
print(result)
if __name__ == "__main__":
main()