Spaces:
Sleeping
Sleeping
import os | |
import tensorflow as tf | |
from dotenv import load_dotenv | |
from transformers import BertTokenizerFast | |
# Load environment variables | |
load_dotenv() | |
def load_model(model_path): | |
# Load the TensorFlow model using from_tf=True | |
model = tf.keras.models.load_model(model_path) | |
return model | |
def load_tokenizer(model_path): | |
tokenizer = BertTokenizerFast.from_pretrained(model_path) | |
return tokenizer | |
def predict(text, model, tokenizer): | |
inputs = tokenizer(text, return_tensors="tf") | |
outputs = model(inputs) | |
return outputs | |
def main(): | |
model_path = os.getenv('Erfan11/Neuracraft') | |
model = load_model(model_path) | |
tokenizer = load_tokenizer(model_path) | |
# Example usage | |
text = "Sample input text" | |
result = predict(text, model, tokenizer) | |
print(result) | |
if __name__ == "__main__": | |
main() |