from flask import Flask, request, jsonify from transformers import AutoTokenizer app = Flask(__name__) # Load tokenizer tokenizer = AutoTokenizer.from_pretrained("verbalyze/Text2Text_Conversation_Pretrained_V2__model") @app.route('/tokenize', methods=['POST']) def tokenize(): data = request.json prompt = data.get("prompt", "") if not prompt: return jsonify({"error": "No prompt provided"}), 400 # Tokenize the prompt input_ids = tokenizer(prompt, return_tensors='pt').input_ids.tolist() return jsonify({"input_ids": input_ids}) if __name__ == '__main__': app.run(host='0.0.0.0', port=5001)