Spaces:
Sleeping
Sleeping
import os | |
from dotenv import load_dotenv | |
import torch | |
from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
# Load environment variables | |
load_dotenv() | |
def load_model(model_path): | |
model = AutoModelForSequenceClassification.from_pretrained(model_path) | |
tokenizer = AutoTokenizer.from_pretrained(model_path) | |
return model, tokenizer | |
def predict(text, model, tokenizer): | |
inputs = tokenizer(text, return_tensors="pt") | |
outputs = model(**inputs) | |
return outputs | |
def main(): | |
model_path = os.getenv('MODEL_PATH') | |
model, tokenizer = load_model(model_path) | |
# Example usage | |
text = "Sample input text" | |
result = predict(text, model, tokenizer) | |
print(result) | |
if __name__ == "__main__": | |
main() | |
from transformers import BertForSequenceClassification | |
# Load the TensorFlow model using from_tf=True | |
model = BertForSequenceClassification.from_pretrained( | |
"Erfan11/Neuracraft", | |
from_tf=True, | |
use_auth_token="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" | |
) | |
# Additional code to run your app can go here (for example, Streamlit or Gradio interface) |