import gradio as gr import time import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, BitsAndBytesConfig def load_model(): model_name = 'SantiagoMJ/Lama-3-8b-RETIE-SER-V2-30' tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) if torch.cuda.is_available(): bnb_config = BitsAndBytesConfig( load_in_4bit=True, bnb_4bit_quant_type="nf4", bnb_4bit_compute_dtype=torch.float16, bnb_4bit_use_double_quant=False, ) model = AutoModelForCausalLM.from_pretrained( model_name, quantization_config=bnb_config, device_map='auto' ) else: model = AutoModelForCausalLM.from_pretrained( model_name, device_map='auto' ) return model, tokenizer def generate_response(message, history): prompt = f"[INST] {message} [/INST]" result = pipe(prompt) response = result[0]['generated_text'].replace(prompt, "").strip() return response # Definimos estilos CSS personalizados css = """ #chat-container { border-radius: 10px; background-color: #ffffff; padding: 20px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); } #header { background: linear-gradient(135deg, #f8f9fa 0%, #e9ecef 100%); padding: 20px; border-radius: 10px; margin-bottom: 20px; border: 1px solid #e0e0e0; } #title { text-align: center; margin-bottom: 5px; } #subtitle { text-align: center; color: #666; font-size: 0.9em; } #input-container { background-color: white; border-radius: 8px; padding: 15px; margin-top: 10px; border: 1px solid #e0e0e0; } .message-box { height: 500px !important; overflow-y: auto; padding: 20px; background-color: white; border-radius: 8px; margin-bottom: 15px; border: 1px solid #e0e0e0; } .bot-message { background-color: #f8f9fa !important; border: 1px solid #e9ecef; } .user-message { background-color: #f8f9fa !important; border: 1px solid #e9ecef; } .custom-button { border: 1px solid #e0e0e0 !important; background-color: white !important; color: #666 !important; transition: all 0.3s ease !important; } .custom-button:hover { background-color: #f8f9fa !important; border-color: #666 !important; } """ with gr.Blocks(css=css) as demo: with gr.Column(elem_id="chat-container"): # Header mejorado with gr.Column(elem_id="header"): gr.Markdown("""

NPC - RETIE

DE SERINGTEC

Asistente Virtual Especializado en Normatividad Eléctrica

""") # Chat Interface chat_interface = gr.Chatbot( [], elem_id="chatbox", height=500, bubble_full_width=False, avatar_images=("👤", "🤖"), show_label=False, container=True ) # Status indicator status = gr.Markdown("*Sistema listo para responder consultas*") # Input Container with gr.Column(elem_id="input-container"): with gr.Row(): txt = gr.Textbox( show_label=False, placeholder="Escribe tu pregunta sobre normatividad eléctrica aquí...", container=False, scale=7 ) submit_btn = gr.Button("Enviar 📤", scale=1, variant="primary") with gr.Row(): clear_btn = gr.Button("Limpiar Chat 🗑️", size="sm", elem_classes="custom-button") example_btn = gr.Button("Ver Ejemplo 💡", size="sm", elem_classes="custom-button") # Instructions with gr.Accordion("ℹ️ Guía de Uso", open=False): gr.Markdown(""" ### Cómo usar este asistente: 1. **Escribe tu pregunta** relacionada con normatividad eléctrica 2. **Envía tu consulta** usando el botón 'Enviar' o presionando Enter 3. **Espera la respuesta** del asistente 4. **Revisa el historial** de la conversación en la ventana superior ### Tipos de consultas recomendadas: - Preguntas sobre el RETIE - Dudas sobre instalaciones eléctricas - Consultas sobre normatividad - Requerimientos técnicos """) def user(user_message, history): return "", history + [[user_message, None]] def bot(history): status.value = "*🤔 Procesando tu consulta...*" user_message = history[-1][0] bot_response = generate_response(user_message, history) history[-1][1] = bot_response status.value = "*✅ Sistema listo para responder consultas*" return history def clear_history(): return None def show_example(): return "¿Cuáles son los requisitos principales para la declaración de cumplimiento de una instalación eléctrica?" # Event handlers txt.submit(user, [txt, chat_interface], [txt, chat_interface], queue=False).then( bot, chat_interface, chat_interface ) submit_btn.click(user, [txt, chat_interface], [txt, chat_interface], queue=False).then( bot, chat_interface, chat_interface ) clear_btn.click(clear_history, None, chat_interface) example_btn.click(show_example, None, txt) # Lanzamos la interfaz demo.launch(share=True)