AIdeaText commited on
Commit
fa70157
·
verified ·
1 Parent(s): 525418e

Update modules/semantic/semantic_interface.py

Browse files
Files changed (1) hide show
  1. modules/semantic/semantic_interface.py +119 -167
modules/semantic/semantic_interface.py CHANGED
@@ -28,137 +28,95 @@ from ..database.semantic_export import export_user_interactions
28
  def display_semantic_interface(lang_code, nlp_models, semantic_t):
29
  """
30
  Interfaz para el análisis semántico
 
 
 
 
31
  """
32
- # Mantener la página actual
33
- st.session_state.page = 'semantic'
34
-
35
- # Inicializar estados si no existen
36
- if 'semantic_has_file' not in st.session_state:
37
- st.session_state.semantic_has_file = False
38
- if 'semantic_result' not in st.session_state:
39
- st.session_state.semantic_result = None
40
-
41
- # Estilos para los botones
42
- st.markdown("""
43
- <style>
44
- .stButton button {
45
- width: 100%;
46
- height: 38px;
47
- }
48
- </style>
49
- """, unsafe_allow_html=True)
50
-
51
- # Contenedor principal para controles
52
- with st.container():
53
- col_upload, col_analyze, col_export, col_new = st.columns([4,2,2,2])
54
-
55
- # Columna para carga de archivo
56
- with col_upload:
57
- uploaded_file = st.file_uploader(
58
- semantic_t.get('file_uploader', 'Upload text file'),
59
- type=['txt'],
60
- key="semantic_file_uploader"
61
- )
62
- # Actualizar estado cuando se carga un archivo
63
- if uploaded_file is not None:
64
- st.session_state.semantic_has_file = True
65
- if 'file_content' not in st.session_state or st.session_state.file_content != uploaded_file:
66
- st.session_state.file_content = uploaded_file
67
- st.session_state.semantic_result = None # Resetear resultado si el archivo cambia
68
- else:
69
- st.session_state.semantic_has_file = False
70
-
71
- # Columna para botón de análisis
72
- with col_analyze:
73
- analyze_disabled = not st.session_state.semantic_has_file
74
- analyze_button = st.button(
75
- semantic_t.get('semantic_analyze_button', 'Analyze'),
76
- disabled=analyze_disabled,
77
- key="semantic_analysis_button",
78
- use_container_width=True
79
- )
80
-
81
- # Columna para botón de exportación
82
- with col_export:
83
- export_disabled = st.session_state.semantic_result is None
84
- export_button = st.button(
85
- semantic_t.get('semantic_export_button', 'Export'),
86
- disabled=export_disabled,
87
- key="semantic_export_button",
88
- use_container_width=True
89
- )
90
-
91
- # Columna para botón de nuevo análisis
92
- with col_new:
93
- new_disabled = not st.session_state.semantic_has_file
94
- new_button = st.button(
95
- semantic_t.get('semantic_new_button', 'New'),
96
- disabled=new_disabled,
97
- key="semantic_new_button",
98
- use_container_width=True
99
- )
100
-
101
- st.markdown("---")
102
-
103
- # Procesar análisis
104
- if analyze_button and uploaded_file is not None:
105
- try:
106
- text_content = uploaded_file.getvalue().decode('utf-8')
107
 
108
- with st.spinner(semantic_t.get('processing', 'Processing...')):
109
- analysis_result = perform_semantic_analysis(
110
- text_content,
111
- nlp_models[lang_code],
112
- lang_code
113
- )
114
-
115
- if analysis_result['success']:
116
- st.session_state.semantic_result = analysis_result
117
-
118
- # Guardar en base de datos
119
- if store_student_semantic_result(
120
- st.session_state.username,
121
- text_content,
122
- analysis_result
123
- ):
124
- st.success(semantic_t.get('success_message', 'Analysis saved successfully'))
125
- # Mostrar resultados
126
- display_semantic_results(analysis_result, lang_code, semantic_t)
127
- else:
128
- st.error(semantic_t.get('error_message', 'Error saving analysis'))
129
- else:
130
- st.error(analysis_result['message'])
131
-
132
- except Exception as e:
133
- st.error(f"Error: {str(e)}")
134
-
135
- # Manejar exportación
136
- if export_button and st.session_state.semantic_result is not None:
137
- try:
138
- pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
139
- st.download_button(
140
- label=semantic_t.get('download_pdf', 'Download PDF'),
141
- data=pdf_buffer,
142
- file_name="semantic_analysis.pdf",
143
- mime="application/pdf",
144
- key="semantic_download_button"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
  )
146
- except Exception as e:
147
- st.error(f"Error exporting: {str(e)}")
148
-
149
- # Manejar nuevo análisis
150
- if new_button:
151
- st.session_state.semantic_result = None
152
- st.session_state.semantic_has_file = False
153
- st.session_state.file_content = None
154
- st.rerun()
155
-
156
- # Mostrar resultados o mensaje inicial
157
- if st.session_state.semantic_result is not None:
158
- display_semantic_results(st.session_state.semantic_result, lang_code, semantic_t)
159
- elif not st.session_state.semantic_has_file:
160
- st.info(semantic_t.get('initial_message', 'Upload a file to begin analysis'))
161
 
 
 
 
162
 
163
  def display_semantic_results(result, lang_code, semantic_t):
164
  """
@@ -170,43 +128,37 @@ def display_semantic_results(result, lang_code, semantic_t):
170
 
171
  analysis = result['analysis']
172
 
173
- # Crear tabs para los resultados
174
- tab1, tab2 = st.tabs([
175
- semantic_t.get('concepts_tab', 'Key Concepts Analysis'),
176
- semantic_t.get('entities_tab', 'Entities Analysis')
177
- ])
178
-
179
- # Tab 1: Conceptos Clave
180
- with tab1:
181
- col1, col2 = st.columns(2)
182
-
183
- # Columna 1: Lista de conceptos
184
- with col1:
185
- st.subheader(semantic_t.get('key_concepts', 'Key Concepts'))
186
- concept_text = "\n".join([
187
- f"• {concept} ({frequency:.2f})"
188
- for concept, frequency in analysis['key_concepts']
189
- ])
190
- st.markdown(concept_text)
191
-
192
- # Columna 2: Gráfico de conceptos
193
- with col2:
194
- st.subheader(semantic_t.get('concept_graph', 'Concepts Graph'))
195
- st.image(analysis['concept_graph'])
196
-
197
- # Tab 2: Entidades
198
- with tab2:
199
- col1, col2 = st.columns(2)
200
-
201
- # Columna 1: Lista de entidades
202
- with col1:
203
- st.subheader(semantic_t.get('identified_entities', 'Identified Entities'))
204
- if 'entities' in analysis:
205
- for entity_type, entities in analysis['entities'].items():
206
- st.markdown(f"**{entity_type}**")
207
- st.markdown("• " + "\n• ".join(entities))
208
-
209
- # Columna 2: Gráfico de entidades
210
- with col2:
211
- st.subheader(semantic_t.get('entity_graph', 'Entities Graph'))
212
- st.image(analysis['entity_graph'])
 
28
  def display_semantic_interface(lang_code, nlp_models, semantic_t):
29
  """
30
  Interfaz para el análisis semántico
31
+ Args:
32
+ lang_code: Código del idioma actual
33
+ nlp_models: Modelos de spaCy cargados
34
+ semantic_t: Diccionario de traducciones semánticas
35
  """
36
+ try:
37
+ # Inicializar el estado de la entrada
38
+ input_key = f"semantic_input_{lang_code}"
39
+ if input_key not in st.session_state:
40
+ st.session_state[input_key] = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
+ if 'semantic_analysis_counter' not in st.session_state:
43
+ st.session_state.semantic_analysis_counter = 0
44
+
45
+ # Campo de entrada de texto con key única
46
+ text_input = st.text_area(
47
+ semantic_t.get('text_input_label', 'Enter text to analyze'),
48
+ height=150,
49
+ placeholder=semantic_t.get('text_input_placeholder', 'Enter your text here...'),
50
+ value=st.session_state[input_key],
51
+ key=f"semantic_text_area_{st.session_state.semantic_analysis_counter}"
52
+ )
53
+
54
+ # Opción para cargar archivo con key única
55
+ uploaded_file = st.file_uploader(
56
+ semantic_t.get('file_uploader', 'Or upload a text file'),
57
+ type=['txt'],
58
+ key=f"semantic_file_uploader_{st.session_state.semantic_analysis_counter}"
59
+ )
60
+
61
+ # Botón de análisis con key única
62
+ analyze_button = st.button(
63
+ semantic_t.get('analyze_button', 'Analyze text'),
64
+ key=f"semantic_analyze_button_{st.session_state.semantic_analysis_counter}"
65
+ )
66
+
67
+ if analyze_button:
68
+ if text_input or uploaded_file is not None:
69
+ try:
70
+ with st.spinner(semantic_t.get('processing', 'Processing...')):
71
+ text_content = uploaded_file.getvalue().decode('utf-8') if uploaded_file else text_input
72
+
73
+ analysis_result = process_semantic_input(
74
+ text_content,
75
+ lang_code,
76
+ nlp_models,
77
+ semantic_t
78
+ )
79
+
80
+ if analysis_result['success']:
81
+ st.session_state.semantic_result = analysis_result
82
+ st.session_state.semantic_analysis_counter += 1
83
+
84
+ # Guardar en la base de datos antes de mostrar resultados
85
+ if store_student_semantic_result(
86
+ st.session_state.username,
87
+ text_content,
88
+ analysis_result['analysis']
89
+ ):
90
+ st.success(semantic_t.get('success_message', 'Analysis saved successfully'))
91
+ # Mostrar resultados
92
+ display_semantic_results(
93
+ analysis_result,
94
+ lang_code,
95
+ semantic_t
96
+ )
97
+ else:
98
+ st.error(semantic_t.get('error_message', 'Error saving analysis'))
99
+ else:
100
+ st.error(analysis_result['message'])
101
+ except Exception as e:
102
+ logger.error(f"Error en análisis semántico: {str(e)}")
103
+ st.error(semantic_t.get('error_processing', f'Error processing text: {str(e)}'))
104
+ else:
105
+ st.warning(semantic_t.get('warning_message', 'Please enter text or upload a file'))
106
+
107
+ # Mostrar resultados previos
108
+ elif 'semantic_result' in st.session_state and st.session_state.semantic_result is not None:
109
+ display_semantic_results(
110
+ st.session_state.semantic_result,
111
+ lang_code,
112
+ semantic_t
113
  )
114
+ else:
115
+ st.info(semantic_t.get('initial_message', 'Enter text to begin analysis'))
 
 
 
 
 
 
 
 
 
 
 
 
 
116
 
117
+ except Exception as e:
118
+ logger.error(f"Error general en interfaz semántica: {str(e)}")
119
+ st.error("Se produjo un error. Por favor, intente de nuevo.")
120
 
121
  def display_semantic_results(result, lang_code, semantic_t):
122
  """
 
128
 
129
  analysis = result['analysis']
130
 
131
+ # Mostrar conceptos clave
132
+ with st.expander(semantic_t.get('key_concepts', 'Key Concepts'), expanded=True):
133
+ concept_text = " | ".join([
134
+ f"{concept} ({frequency:.2f})"
135
+ for concept, frequency in analysis['key_concepts']
136
+ ])
137
+ st.write(concept_text)
138
+
139
+ # Mostrar gráfico de relaciones conceptuales
140
+ with st.expander(semantic_t.get('conceptual_relations', 'Conceptual Relations'), expanded=True):
141
+ st.image(analysis['concept_graph'])
142
+
143
+ # Mostrar gráfico de entidades
144
+ with st.expander(semantic_t.get('entity_relations', 'Entity Relations'), expanded=True):
145
+ st.image(analysis['entity_graph'])
146
+
147
+ # Mostrar entidades identificadas
148
+ if 'entities' in analysis:
149
+ with st.expander(semantic_t.get('identified_entities', 'Identified Entities'), expanded=True):
150
+ for entity_type, entities in analysis['entities'].items():
151
+ st.subheader(entity_type)
152
+ st.write(", ".join(entities))
153
+
154
+ # Botón de exportación
155
+ if st.button(semantic_t.get('export_button', 'Export Analysis'),
156
+ key=f"semantic_export_{st.session_state.semantic_analysis_counter}"):
157
+ pdf_buffer = export_user_interactions(st.session_state.username, 'semantic')
158
+ st.download_button(
159
+ label=semantic_t.get('download_pdf', 'Download PDF'),
160
+ data=pdf_buffer,
161
+ file_name="semantic_analysis.pdf",
162
+ mime="application/pdf",
163
+ key=f"semantic_download_{st.session_state.semantic_analysis_counter}"
164
+ )