CHANGE: Enable debug

This commit is contained in:
2025-12-21 13:45:47 +01:00
parent c25ec806a5
commit 7320798c29
5 changed files with 260 additions and 44 deletions

8
src/.env Normal file
View File

@@ -0,0 +1,8 @@
FLASK_DEBUG=true
FLASK_PORT=5000
FLASK_HOST=0.0.0.0
DB_HOST=10.10.5.32
DB_PORT=5432
DB_NAME=postgres
DB_USER=postgres
DB_PASSWORD=tfmuocdfcarvajal

View File

@@ -1,6 +1,8 @@
from flask import Flask, jsonify, request from flask import Flask, jsonify, request
import threading import threading
import os
from datetime import datetime from datetime import datetime
from dotenv import load_dotenv
from core.model_loader import ModelManager from core.model_loader import ModelManager
from core.predictor import PredictionHandler from core.predictor import PredictionHandler
@@ -9,24 +11,37 @@ from config.models_config import ModelConfig
app = Flask(__name__) app = Flask(__name__)
# Configuración desde variables de entorno
load_dotenv()
DEBUG_MODE = os.getenv('FLASK_DEBUG', 'false').lower() in ('true', '1', 't')
PORT = int(os.getenv('FLASK_PORT', '5000'))
HOST = os.getenv('FLASK_HOST', '0.0.0.0')
# Inicializar componentes # Inicializar componentes
model_config = ModelConfig() model_config = ModelConfig()
model_manager = ModelManager(model_config) model_manager = ModelManager(model_config, debug_mode=DEBUG_MODE)
prediction_handler = PredictionHandler(model_manager, model_config) prediction_handler = PredictionHandler(model_manager, model_config)
model_trainer = ModelTrainer(model_manager, model_config) model_trainer = ModelTrainer(model_manager, model_config)
# Comprobar que las funciones estan disponibles en el código # Función de debug
@app.before_first_request def debug_check_functions():
def check_functions(): """Verificar que todas las funciones existen (solo en modo debug)"""
"""Verificar que todas las funciones existen""" if DEBUG_MODE:
import importlib
from utils.dynamic_loader import execute_function from utils.dynamic_loader import execute_function
print("=" * 50)
print("DEBUG: Verificando funciones de modelos")
print("=" * 50)
for model_type in model_config.get_all_model_types(): for model_type in model_config.get_all_model_types():
config = model_config.get_model_config(model_type) config = model_config.get_model_config(model_type)
print(f"Verificando modelo: {model_type}") print(f"\nModelo: {model_type}")
print(f" Descripción: {config.get('description', 'Sin descripción')}")
print(f" Módulo: {config.get('module')}") print(f" Módulo: {config.get('module')}")
print(f" Función entrenamiento: {config.get('train_function')}") print(f" Función entrenamiento: {config.get('train_function')}")
print(f" Función datos: {config.get('data_function')}") print(f" Función datos: {config.get('data_function')}")
print(f" Parámetros requeridos: {config.get('required_params', [])}")
# Verificar que la función de entrenamiento existe # Verificar que la función de entrenamiento existe
try: try:
@@ -35,12 +50,27 @@ def check_functions():
print(f" ✓ Función de entrenamiento encontrada") print(f" ✓ Función de entrenamiento encontrada")
else: else:
print(f" ✗ Función de entrenamiento NO encontrada") print(f" ✗ Función de entrenamiento NO encontrada")
# Listar funciones disponibles en el módulo
available_funcs = [f for f in dir(module) if not f.startswith('_')]
print(f" Funciones disponibles: {', '.join(available_funcs[:10])}")
if len(available_funcs) > 10:
print(f" ... y {len(available_funcs) - 10} más")
except ImportError as e:
print(f" ✗ Error importando módulo: {str(e)}")
except Exception as e: except Exception as e:
print(f" ✗ Error: {str(e)}") print(f" ✗ Error: {str(e)}")
print("\n" + "=" * 50)
print(f"DEBUG: Inicializando {len(model_config.get_all_model_types())} modelos")
print("=" * 50)
# Inicializar modelos al arrancar # Inicializar modelos al arrancar
model_manager.init_models() model_manager.init_models()
# Ejecutar chequeo de debug si está activado
debug_check_functions()
@app.route("/train", methods=["POST"]) @app.route("/train", methods=["POST"])
def train(): def train():
"""Entrenar modelos""" """Entrenar modelos"""
@@ -78,6 +108,11 @@ def register_new_model():
try: try:
new_config = model_config.register_model_type(data) new_config = model_config.register_model_type(data)
if DEBUG_MODE:
print(f"DEBUG: Nuevo modelo registrado - {data['type']}")
print(f" Configuración: {new_config}")
return jsonify({ return jsonify({
"status": "model type registered", "status": "model type registered",
"model_type": data["type"], "model_type": data["type"],
@@ -103,7 +138,8 @@ def list_models():
"loaded": model is not None, "loaded": model is not None,
"required_params": model_config_info.get("required_params", []), "required_params": model_config_info.get("required_params", []),
"module": model_config_info.get("module"), "module": model_config_info.get("module"),
"train_function": model_config_info.get("train_function") "train_function": model_config_info.get("train_function"),
"output_type": model_config_info.get("output_type", "unknown")
} }
# Añadir información del archivo si existe # Añadir información del archivo si existe
@@ -125,11 +161,74 @@ def health():
loaded_models = model_manager.get_loaded_models_status() loaded_models = model_manager.get_loaded_models_status()
all_models = model_config.get_all_model_types() all_models = model_config.get_all_model_types()
return jsonify({ response = {
"status": "healthy" if all(loaded_models.values()) else "partial", "status": "healthy" if all(loaded_models.values()) else "partial",
"models_loaded": loaded_models, "models_loaded": loaded_models,
"total_configured": len(all_models), "total_configured": len(all_models),
"loaded_count": sum(1 for v in loaded_models.values() if v) "loaded_count": sum(1 for v in loaded_models.values() if v),
"debug_mode": DEBUG_MODE
}
if DEBUG_MODE:
response["environment"] = {
"host": HOST,
"port": PORT,
"python_version": os.getenv("PYTHON_VERSION", "unknown")
}
return jsonify(response)
@app.route("/debug", methods=["GET"])
def debug_info():
"""Endpoint de información de debug (solo disponible en modo debug)"""
if not DEBUG_MODE:
return jsonify({"error": "Debug mode is disabled"}), 403
import sys
import importlib
# Información del sistema
system_info = {
"python_version": sys.version,
"platform": sys.platform,
"working_directory": os.getcwd(),
"environment_variables": {
k: v for k, v in os.environ.items()
if k.startswith(('FLASK_', 'DB_', 'PYTHON'))
}
}
# Información de módulos cargados
loaded_modules = {}
for model_type in model_config.get_all_model_types():
config = model_config.get_model_config(model_type)
module_name = config.get('module')
if module_name in sys.modules:
module = sys.modules[module_name]
loaded_modules[module_name] = {
"file": getattr(module, '__file__', 'unknown'),
"functions": [f for f in dir(module) if not f.startswith('_')]
}
# Información de memoria de modelos
models_memory = {}
for model_type, model in model_manager.models_cache.items():
if model is not None:
try:
models_memory[model_type] = {
"type": type(model).__name__,
"attributes": [attr for attr in dir(model) if not attr.startswith('_')]
}
except:
models_memory[model_type] = {"type": "unknown"}
return jsonify({
"system": system_info,
"loaded_modules": loaded_modules,
"models_in_memory": models_memory,
"debug_mode": DEBUG_MODE,
"timestamp": datetime.now().isoformat()
}) })
@app.route("/", methods=["GET"]) @app.route("/", methods=["GET"])
@@ -171,12 +270,31 @@ def index():
} }
] ]
if DEBUG_MODE:
endpoints.append({
"path": "/debug",
"method": "GET",
"description": "Información de debug",
"note": "Solo disponible en modo debug"
})
return jsonify({ return jsonify({
"service": "Model Prediction API", "service": "Model Prediction API",
"version": "1.0.0", "version": "1.0.0",
"debug_mode": DEBUG_MODE,
"endpoints": endpoints, "endpoints": endpoints,
"available_models": model_config.get_all_model_types() "available_models": model_config.get_all_model_types()
}) })
if __name__ == "__main__": if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True) print(f"🚀 Iniciando servidor en {HOST}:{PORT}")
print(f"🔧 Modo debug: {'ACTIVADO' if DEBUG_MODE else 'DESACTIVADO'}")
print(f"📊 Modelos configurados: {len(model_config.get_all_model_types())}")
if DEBUG_MODE:
print("\n📋 Variables de entorno relevantes:")
for key, value in os.environ.items():
if key.startswith(('FLASK_', 'DB_')):
print(f" {key}: {value}")
app.run(host=HOST, port=PORT, debug=DEBUG_MODE)

View File

@@ -2,6 +2,7 @@ import joblib
import os import os
from datetime import date from datetime import date
from typing import Dict, Any, Optional from typing import Dict, Any, Optional
import sys
from config.models_config import ModelConfig from config.models_config import ModelConfig
from utils.dynamic_loader import execute_function from utils.dynamic_loader import execute_function
@@ -10,42 +11,70 @@ from model_registry import register, load_meta
class ModelManager: class ModelManager:
"""Gestión de carga y cache de modelos""" """Gestión de carga y cache de modelos"""
def __init__(self, model_config: ModelConfig): def __init__(self, model_config: ModelConfig, debug_mode: bool = False):
self.model_config = model_config self.model_config = model_config
self.debug_mode = debug_mode
self.models_cache: Dict[str, Any] = {} self.models_cache: Dict[str, Any] = {}
self.model_dir = "models" self.model_dir = "models"
def init_models(self): def init_models(self):
"""Inicializar todos los modelos configurados""" """Inicializar todos los modelos configurados"""
print(f"\n📦 Inicializando {len(self.model_config.get_all_model_types())} modelos...")
for model_type in self.model_config.get_all_model_types(): for model_type in self.model_config.get_all_model_types():
try: try:
self.models_cache[model_type] = self._load_or_train_model(model_type) self.models_cache[model_type] = self._load_or_train_model(model_type)
print(f"✓ Modelo '{model_type}' cargado correctamente") print(f" ✓ Modelo '{model_type}' cargado correctamente")
except Exception as e: except Exception as e:
print(f"✗ Error cargando modelo '{model_type}': {str(e)}") print(f" ✗ Error cargando modelo '{model_type}': {str(e)}")
if self.debug_mode:
import traceback
traceback.print_exc()
self.models_cache[model_type] = None self.models_cache[model_type] = None
loaded_count = sum(1 for v in self.models_cache.values() if v is not None)
print(f"\n{loaded_count}/{len(self.models_cache)} modelos inicializados")
def _load_or_train_model(self, model_type: str): def _load_or_train_model(self, model_type: str):
"""Cargar o entrenar un modelo dinámicamente""" """Cargar o entrenar un modelo dinámicamente"""
config = self.model_config.get_model_config(model_type) config = self.model_config.get_model_config(model_type)
if self.debug_mode:
print(f" [DEBUG] Procesando modelo: {model_type}")
print(f" [DEBUG] Configuración: {config}")
# Intentar cargar el modelo existente # Intentar cargar el modelo existente
model = self._load_model_from_disk(model_type) model = self._load_model_from_disk(model_type)
if model is not None: if model is not None:
if self.debug_mode:
print(f" [DEBUG] Modelo cargado desde disco: {model_type}")
return model return model
if self.debug_mode:
print(f" [DEBUG] Modelo no encontrado en disco, entrenando nuevo: {model_type}")
# Si no existe, entrenar nuevo modelo # Si no existe, entrenar nuevo modelo
print(f"Modelo {model_type} no se encontro, creación del modelo...") print(f" 🔄 Modelo {model_type} no encontrado, entrenando nuevo...")
# Obtener datos # Obtener datos
df = self._load_data_for_model(model_type, config) df = self._load_data_for_model(model_type, config)
if df.empty:
raise ValueError(f"No se pudieron cargar datos para el modelo {model_type}")
if self.debug_mode:
print(f" [DEBUG] Datos cargados: {len(df)} filas, {len(df.columns)} columnas")
print(f" [DEBUG] Columnas: {list(df.columns)}")
# Entrenar modelo # Entrenar modelo
model = self._train_model(model_type, config, df) model = self._train_model(model_type, config, df)
# Guardar modelo # Guardar modelo
filename = self._save_model(model_type, model, len(df)) filename = self._save_model(model_type, model, len(df))
if self.debug_mode:
print(f" [DEBUG] Modelo guardado como: {filename}")
return model return model
def _load_model_from_disk(self, model_type: str): def _load_model_from_disk(self, model_type: str):
@@ -54,34 +83,81 @@ class ModelManager:
file = meta.get("current", {}).get(model_type) file = meta.get("current", {}).get(model_type)
if not file: if not file:
if self.debug_mode:
print(f" [DEBUG] No hay metadatos para el modelo: {model_type}")
return None return None
model_path = os.path.join(self.model_dir, file) model_path = os.path.join(self.model_dir, file)
if not os.path.exists(model_path): if not os.path.exists(model_path):
if self.debug_mode:
print(f" [DEBUG] Archivo no encontrado: {model_path}")
return None return None
if self.debug_mode:
print(f" [DEBUG] Cargando modelo desde: {model_path}")
file_size = os.path.getsize(model_path)
print(f" [DEBUG] Tamaño del archivo: {file_size / 1024:.2f} KB")
return joblib.load(model_path) return joblib.load(model_path)
def _load_data_for_model(self, model_type: str, config: Dict[str, Any]): def _load_data_for_model(self, model_type: str, config: Dict[str, Any]):
"""Cargar datos para un modelo específico""" """Cargar datos para un modelo específico"""
from db import fetch_data, fetch_data_legacy
data_function = config.get("data_function") data_function = config.get("data_function")
if self.debug_mode:
print(f" [DEBUG] Función de datos: {data_function}")
try:
if data_function == "fetch_data": if data_function == "fetch_data":
return fetch_data() from db import fetch_data
df = fetch_data()
elif data_function == "fetch_data_legacy": elif data_function == "fetch_data_legacy":
return fetch_data_legacy() from db import fetch_data_legacy
df = fetch_data_legacy()
else: else:
# Ejecutar función personalizada # Ejecutar función personalizada
module_name = config.get("module", "models_train") module_name = config.get("module", "models_train")
return execute_function(module_name, data_function) if self.debug_mode:
print(f" [DEBUG] Ejecutando función personalizada: {module_name}.{data_function}")
df = execute_function(module_name, data_function)
if self.debug_mode and not df.empty:
print(f" [DEBUG] Datos cargados exitosamente")
print(f" [DEBUG] Forma de los datos: {df.shape}")
print(f" [DEBUG] Primeras filas:\n{df.head()}")
return df
except Exception as e:
if self.debug_mode:
import traceback
print(f" [DEBUG] Error cargando datos:")
traceback.print_exc()
raise ValueError(f"Error cargando datos para {model_type}: {str(e)}")
def _train_model(self, model_type: str, config: Dict[str, Any], df): def _train_model(self, model_type: str, config: Dict[str, Any], df):
"""Entrenar un modelo""" """Entrenar un modelo"""
module_name = config.get("module", "models_train") module_name = config.get("module", "models_train")
train_function = config.get("train_function") train_function = config.get("train_function")
return execute_function(module_name, train_function, df)
if self.debug_mode:
print(f" [DEBUG] Entrenando con: {module_name}.{train_function}")
try:
model = execute_function(module_name, train_function, df)
if self.debug_mode:
print(f" [DEBUG] Modelo entrenado exitosamente")
print(f" [DEBUG] Tipo de modelo: {type(model).__name__}")
return model
except Exception as e:
if self.debug_mode:
import traceback
print(f" [DEBUG] Error entrenando modelo:")
traceback.print_exc()
raise ValueError(f"Error entrenando modelo {model_type}: {str(e)}")
def _save_model(self, model_type: str, model, rows: int) -> str: def _save_model(self, model_type: str, model, rows: int) -> str:
"""Guardar modelo en disco""" """Guardar modelo en disco"""
@@ -89,11 +165,20 @@ class ModelManager:
filename = f"{model_type}_xgb_{today}.joblib" filename = f"{model_type}_xgb_{today}.joblib"
os.makedirs(self.model_dir, exist_ok=True) os.makedirs(self.model_dir, exist_ok=True)
joblib.dump(model, os.path.join(self.model_dir, filename)) model_path = os.path.join(self.model_dir, filename)
if self.debug_mode:
print(f" [DEBUG] Guardando modelo en: {model_path}")
joblib.dump(model, model_path)
# Registrar en metadata # Registrar en metadata
register(model_type, filename, rows) register(model_type, filename, rows)
if self.debug_mode:
file_size = os.path.getsize(model_path)
print(f" [DEBUG] Modelo guardado: {file_size / 1024:.2f} KB")
return filename return filename
def get_model(self, model_type: str): def get_model(self, model_type: str):
@@ -103,6 +188,9 @@ class ModelManager:
def reload_model(self, model_type: str): def reload_model(self, model_type: str):
"""Recargar un modelo específico""" """Recargar un modelo específico"""
try: try:
if self.debug_mode:
print(f" [DEBUG] Recargando modelo: {model_type}")
self.models_cache[model_type] = self._load_or_train_model(model_type) self.models_cache[model_type] = self._load_or_train_model(model_type)
return True return True
except Exception as e: except Exception as e:

View File

@@ -20,7 +20,8 @@ dependencies = [
'pandas', 'pandas',
'xgboost', 'xgboost',
'scikit-learn', 'scikit-learn',
'joblib' 'joblib',
'python-dotenv'
] ]
[tool.setuptools] [tool.setuptools]

View File

@@ -5,3 +5,4 @@ scikit-learn
joblib joblib
SQLAlchemy SQLAlchemy
psycopg2-binary psycopg2-binary
python-dotenv