300 lines
10 KiB
Python
300 lines
10 KiB
Python
from flask import Flask, jsonify, request
|
|
import threading
|
|
import os
|
|
from datetime import datetime
|
|
from dotenv import load_dotenv
|
|
|
|
from core.model_loader import ModelManager
|
|
from core.predictor import PredictionHandler
|
|
from core.model_trainer import ModelTrainer
|
|
from config.models_config import ModelConfig
|
|
|
|
app = Flask(__name__)
|
|
|
|
# Configuración desde variables de entorno
|
|
load_dotenv()
|
|
DEBUG_MODE = os.getenv('FLASK_DEBUG', 'false').lower() in ('true', '1', 't')
|
|
PORT = int(os.getenv('FLASK_PORT', '5000'))
|
|
HOST = os.getenv('FLASK_HOST', '0.0.0.0')
|
|
|
|
# Inicializar componentes
|
|
model_config = ModelConfig()
|
|
model_manager = ModelManager(model_config, debug_mode=DEBUG_MODE)
|
|
prediction_handler = PredictionHandler(model_manager, model_config)
|
|
model_trainer = ModelTrainer(model_manager, model_config)
|
|
|
|
# Función de debug
|
|
def debug_check_functions():
|
|
"""Verificar que todas las funciones existen (solo en modo debug)"""
|
|
if DEBUG_MODE:
|
|
import importlib
|
|
from utils.dynamic_loader import execute_function
|
|
|
|
print("=" * 50)
|
|
print("DEBUG: Verificando funciones de modelos")
|
|
print("=" * 50)
|
|
|
|
for model_type in model_config.get_all_model_types():
|
|
config = model_config.get_model_config(model_type)
|
|
print(f"\nModelo: {model_type}")
|
|
print(f" Descripción: {config.get('description', 'Sin descripción')}")
|
|
print(f" Módulo: {config.get('module')}")
|
|
print(f" Función entrenamiento: {config.get('train_function')}")
|
|
print(f" Función datos: {config.get('data_function')}")
|
|
print(f" Parámetros requeridos: {config.get('required_params', [])}")
|
|
|
|
# Verificar que la función de entrenamiento existe
|
|
try:
|
|
module = importlib.import_module(config.get('module'))
|
|
if hasattr(module, config.get('train_function')):
|
|
print(f" ✓ Función de entrenamiento encontrada")
|
|
else:
|
|
print(f" ✗ Función de entrenamiento NO encontrada")
|
|
|
|
# Listar funciones disponibles en el módulo
|
|
available_funcs = [f for f in dir(module) if not f.startswith('_')]
|
|
print(f" Funciones disponibles: {', '.join(available_funcs[:10])}")
|
|
if len(available_funcs) > 10:
|
|
print(f" ... y {len(available_funcs) - 10} más")
|
|
except ImportError as e:
|
|
print(f" ✗ Error importando módulo: {str(e)}")
|
|
except Exception as e:
|
|
print(f" ✗ Error: {str(e)}")
|
|
|
|
print("\n" + "=" * 50)
|
|
print(f"DEBUG: Inicializando {len(model_config.get_all_model_types())} modelos")
|
|
print("=" * 50)
|
|
|
|
# Inicializar modelos al arrancar
|
|
model_manager.init_models()
|
|
|
|
# Ejecutar chequeo de debug si está activado
|
|
debug_check_functions()
|
|
|
|
@app.route("/train", methods=["POST"])
|
|
def train():
|
|
"""Entrenar modelos"""
|
|
model_type = request.args.get('model_type')
|
|
|
|
# Validar que el tipo de modelo existe
|
|
if model_type and not model_config.model_exists(model_type):
|
|
return jsonify({
|
|
"error": f"Model type '{model_type}' not found",
|
|
"available_models": model_config.get_all_model_types()
|
|
}), 400
|
|
|
|
threading.Thread(target=model_trainer.background_train, args=(model_type,)).start()
|
|
|
|
return jsonify({
|
|
"status": "training started",
|
|
"model_type": model_type or "all",
|
|
"timestamp": datetime.now().isoformat()
|
|
})
|
|
|
|
@app.route("/predict", methods=["GET"])
|
|
def predict():
|
|
"""Endpoint único para predicciones"""
|
|
return prediction_handler.handle_predict_request(request.args)
|
|
|
|
@app.route("/demand", methods=["GET"])
|
|
def demand():
|
|
"""Endpoint para obtener todas las predicciones"""
|
|
return prediction_handler.handle_demand_request(request.args)
|
|
|
|
@app.route("/models/register", methods=["POST"])
|
|
def register_new_model():
|
|
"""Registrar un nuevo tipo de modelo dinámicamente"""
|
|
data = request.get_json()
|
|
|
|
try:
|
|
new_config = model_config.register_model_type(data)
|
|
|
|
if DEBUG_MODE:
|
|
print(f"DEBUG: Nuevo modelo registrado - {data['type']}")
|
|
print(f" Configuración: {new_config}")
|
|
|
|
return jsonify({
|
|
"status": "model type registered",
|
|
"model_type": data["type"],
|
|
"config": new_config
|
|
})
|
|
except ValueError as e:
|
|
return jsonify({"error": str(e)}), 400
|
|
except Exception as e:
|
|
return jsonify({"error": f"Registration failed: {str(e)}"}), 500
|
|
|
|
@app.route("/models", methods=["GET"])
|
|
def list_models():
|
|
"""Listar todos los modelos disponibles"""
|
|
models_info = []
|
|
|
|
for model_type in model_config.get_all_model_types():
|
|
model = model_manager.get_model(model_type)
|
|
model_config_info = model_config.get_model_config(model_type)
|
|
|
|
model_info = {
|
|
"type": model_type,
|
|
"description": model_config_info.get("description", ""),
|
|
"loaded": model is not None,
|
|
"required_params": model_config_info.get("required_params", []),
|
|
"module": model_config_info.get("module"),
|
|
"train_function": model_config_info.get("train_function"),
|
|
"output_type": model_config_info.get("output_type", "unknown")
|
|
}
|
|
|
|
# Añadir información del archivo si existe
|
|
from model_registry import load_meta
|
|
meta = load_meta()
|
|
if model_type in meta.get("current", {}):
|
|
model_info["file"] = meta["current"][model_type]
|
|
|
|
models_info.append(model_info)
|
|
|
|
return jsonify({
|
|
"available_models": models_info,
|
|
"total": len(models_info)
|
|
})
|
|
|
|
@app.route("/health", methods=["GET"])
|
|
def health():
|
|
"""Endpoint de salud"""
|
|
loaded_models = model_manager.get_loaded_models_status()
|
|
all_models = model_config.get_all_model_types()
|
|
|
|
response = {
|
|
"status": "healthy" if all(loaded_models.values()) else "partial",
|
|
"models_loaded": loaded_models,
|
|
"total_configured": len(all_models),
|
|
"loaded_count": sum(1 for v in loaded_models.values() if v),
|
|
"debug_mode": DEBUG_MODE
|
|
}
|
|
|
|
if DEBUG_MODE:
|
|
response["environment"] = {
|
|
"host": HOST,
|
|
"port": PORT,
|
|
"python_version": os.getenv("PYTHON_VERSION", "unknown")
|
|
}
|
|
|
|
return jsonify(response)
|
|
|
|
@app.route("/debug", methods=["GET"])
|
|
def debug_info():
|
|
"""Endpoint de información de debug (solo disponible en modo debug)"""
|
|
if not DEBUG_MODE:
|
|
return jsonify({"error": "Debug mode is disabled"}), 403
|
|
|
|
import sys
|
|
import importlib
|
|
|
|
# Información del sistema
|
|
system_info = {
|
|
"python_version": sys.version,
|
|
"platform": sys.platform,
|
|
"working_directory": os.getcwd(),
|
|
"environment_variables": {
|
|
k: v for k, v in os.environ.items()
|
|
if k.startswith(('FLASK_', 'DB_', 'PYTHON'))
|
|
}
|
|
}
|
|
|
|
# Información de módulos cargados
|
|
loaded_modules = {}
|
|
for model_type in model_config.get_all_model_types():
|
|
config = model_config.get_model_config(model_type)
|
|
module_name = config.get('module')
|
|
|
|
if module_name in sys.modules:
|
|
module = sys.modules[module_name]
|
|
loaded_modules[module_name] = {
|
|
"file": getattr(module, '__file__', 'unknown'),
|
|
"functions": [f for f in dir(module) if not f.startswith('_')]
|
|
}
|
|
|
|
# Información de memoria de modelos
|
|
models_memory = {}
|
|
for model_type, model in model_manager.models_cache.items():
|
|
if model is not None:
|
|
try:
|
|
models_memory[model_type] = {
|
|
"type": type(model).__name__,
|
|
"attributes": [attr for attr in dir(model) if not attr.startswith('_')]
|
|
}
|
|
except:
|
|
models_memory[model_type] = {"type": "unknown"}
|
|
|
|
return jsonify({
|
|
"system": system_info,
|
|
"loaded_modules": loaded_modules,
|
|
"models_in_memory": models_memory,
|
|
"debug_mode": DEBUG_MODE,
|
|
"timestamp": datetime.now().isoformat()
|
|
})
|
|
|
|
@app.route("/", methods=["GET"])
|
|
def index():
|
|
"""Página principal con documentación"""
|
|
endpoints = [
|
|
{
|
|
"path": "/predict",
|
|
"method": "GET",
|
|
"description": "Realizar predicción",
|
|
"parameters": "model_type (required), otros según modelo"
|
|
},
|
|
{
|
|
"path": "/demand",
|
|
"method": "GET",
|
|
"description": "Obtener predicciones masivas",
|
|
"parameters": "model_type (required), limit (opcional)"
|
|
},
|
|
{
|
|
"path": "/train",
|
|
"method": "POST",
|
|
"description": "Entrenar modelos",
|
|
"parameters": "model_type (opcional)"
|
|
},
|
|
{
|
|
"path": "/models",
|
|
"method": "GET",
|
|
"description": "Listar modelos disponibles"
|
|
},
|
|
{
|
|
"path": "/models/register",
|
|
"method": "POST",
|
|
"description": "Registrar nuevo tipo de modelo"
|
|
},
|
|
{
|
|
"path": "/health",
|
|
"method": "GET",
|
|
"description": "Estado del sistema"
|
|
}
|
|
]
|
|
|
|
if DEBUG_MODE:
|
|
endpoints.append({
|
|
"path": "/debug",
|
|
"method": "GET",
|
|
"description": "Información de debug",
|
|
"note": "Solo disponible en modo debug"
|
|
})
|
|
|
|
return jsonify({
|
|
"service": "Model Prediction API",
|
|
"version": "1.0.0",
|
|
"debug_mode": DEBUG_MODE,
|
|
"endpoints": endpoints,
|
|
"available_models": model_config.get_all_model_types()
|
|
})
|
|
|
|
if __name__ == "__main__":
|
|
print(f"🚀 Iniciando servidor en {HOST}:{PORT}")
|
|
print(f"🔧 Modo debug: {'ACTIVADO' if DEBUG_MODE else 'DESACTIVADO'}")
|
|
print(f"📊 Modelos configurados: {len(model_config.get_all_model_types())}")
|
|
|
|
if DEBUG_MODE:
|
|
print("\n📋 Variables de entorno relevantes:")
|
|
for key, value in os.environ.items():
|
|
if key.startswith(('FLASK_', 'DB_')):
|
|
print(f" {key}: {value}")
|
|
|
|
app.run(host=HOST, port=PORT, debug=DEBUG_MODE) |