Spaces:
Sleeping
Sleeping
File size: 835 Bytes
9a6b849 b58127a 9a6b849 b58127a 9a6b849 b58127a 9a6b849 b58127a 9a6b849 b58127a 9a6b849 b58127a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
import os
import tensorflow as tf
from dotenv import load_dotenv
from transformers import BertTokenizerFast
# Load environment variables
load_dotenv()
def load_model(model_path):
# Load the TensorFlow model using from_tf=True
model = tf.keras.models.load_model(model_path)
return model
def load_tokenizer(model_path):
tokenizer = BertTokenizerFast.from_pretrained(model_path)
return tokenizer
def predict(text, model, tokenizer):
inputs = tokenizer(text, return_tensors="tf")
outputs = model(inputs)
return outputs
def main():
model_path = os.getenv('MODEL_PATH')
model = load_model(model_path)
tokenizer = load_tokenizer(model_path)
# Example usage
text = "Sample input text"
result = predict(text, model, tokenizer)
print(result)
if __name__ == "__main__":
main() |