Spaces:
Running
Running
| # app/main.py | |
| # Lojiz Platform + Aida AI - Graph-Based Architecture (v1 Primary) | |
| from fastapi import FastAPI, Request | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from fastapi.responses import JSONResponse | |
| from fastapi.exceptions import RequestValidationError | |
| from contextlib import asynccontextmanager | |
| import logging | |
| import os | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| # CORE IMPORTS | |
| try: | |
| from app.config import settings | |
| from app.database import connect_db, disconnect_db, ensure_indexes as ensure_auth_indexes | |
| from app.routes import auth | |
| except ImportError as e: | |
| logger.error(f"Core import error: {e}") | |
| raise | |
| try: | |
| from app.core.exceptions import AuthException | |
| except ImportError: | |
| AuthException = Exception | |
| # ============================================================ | |
| # AI IMPORTS - GRAPH-BASED ARCHITECTURE | |
| # ============================================================ | |
| try: | |
| from app.ai.routes.chat import router as ai_chat_router | |
| from app.ai.config import ( | |
| validate_ai_startup, | |
| check_redis_health, | |
| check_qdrant_health, | |
| redis_client, | |
| qdrant_client, | |
| ) | |
| from app.ai.memory.redis_context_memory import get_memory_manager | |
| from app.ml.models.ml_listing_extractor import get_ml_extractor | |
| logger.info("β Graph-based AI architecture loaded") | |
| except ImportError as e: | |
| logger.error(f"AI import error: {e}") | |
| raise | |
| from app.models.listing import ensure_listing_indexes | |
| # ENVIRONMENT | |
| environment = os.getenv("ENVIRONMENT", "development") | |
| is_production = environment == "production" | |
| # LIFESPAN | |
| async def lifespan(app: FastAPI): | |
| """Application lifespan - startup and shutdown""" | |
| logger.info("=" * 70) | |
| logger.info("π Starting Lojiz Platform + Aida AI") | |
| logger.info(" Architecture: Graph-Based (State Machine + Validation)") | |
| logger.info(" Primary Endpoint: /ai/v1 (Graph-Based)") | |
| logger.info(" Fallback Endpoint: /ai/v2 (Legacy)") | |
| logger.info("=" * 70) | |
| # STARTUP | |
| try: | |
| logger.info("Connecting to MongoDB...") | |
| await connect_db() | |
| await ensure_auth_indexes() | |
| await ensure_listing_indexes() | |
| logger.info("β MongoDB connected and indexed") | |
| except Exception as e: | |
| logger.critical(f"β MongoDB connection failed - aborting startup: {e}") | |
| raise | |
| try: | |
| logger.info("Connecting to Redis...") | |
| if redis_client: | |
| await redis_client.ping() | |
| logger.info("β Redis connected") | |
| else: | |
| logger.warning("β οΈ Redis not available (optional)") | |
| except Exception as e: | |
| logger.warning(f"β οΈ Redis connection failed (continuing without): {e}") | |
| try: | |
| logger.info("Connecting to Qdrant...") | |
| if qdrant_client: | |
| await qdrant_client.get_collections() | |
| logger.info("β Qdrant connected") | |
| else: | |
| logger.warning("β οΈ Qdrant not available (optional)") | |
| except Exception as e: | |
| logger.warning(f"β οΈ Qdrant connection failed (continuing without): {e}") | |
| try: | |
| logger.info("Validating AI components...") | |
| ai_checks = await validate_ai_startup() | |
| logger.info("β AI components validated") | |
| except Exception as e: | |
| logger.warning(f"β οΈ AI validation warning: {e}") | |
| try: | |
| logger.info("Initializing ML Extractor...") | |
| ml = get_ml_extractor() | |
| logger.info("β ML Extractor ready") | |
| except Exception as e: | |
| logger.warning(f"β οΈ ML Extractor initialization warning: {e}") | |
| try: | |
| logger.info("Initializing Memory Manager...") | |
| manager = get_memory_manager() | |
| logger.info("β Memory Manager ready") | |
| except Exception as e: | |
| logger.warning(f"β οΈ Memory Manager initialization warning: {e}") | |
| logger.info("=" * 70) | |
| logger.info("β APPLICATION READY - Graph-Based Architecture Active!") | |
| logger.info("=" * 70) | |
| yield | |
| # SHUTDOWN | |
| logger.info("=" * 70) | |
| logger.info("π Shutting down Lojiz Platform + Aida AI") | |
| logger.info("=" * 70) | |
| try: | |
| try: | |
| ml = get_ml_extractor() | |
| ml.currency_mgr.clear_cache() | |
| logger.info("β ML caches cleared") | |
| except: | |
| pass | |
| from app.database import disconnect_db | |
| await disconnect_db() | |
| logger.info("β MongoDB disconnected") | |
| if redis_client: | |
| await redis_client.close() | |
| logger.info("β Redis closed") | |
| logger.info("β Shutdown complete") | |
| except Exception as e: | |
| logger.warning(f"β οΈ Shutdown warning: {e}") | |
| # FASTAPI SETUP | |
| app = FastAPI( | |
| title="Lojiz Platform + Aida AI", | |
| description="Real-estate platform with conversational AI assistant (Graph-Based Architecture)", | |
| version="2.0.0", | |
| lifespan=lifespan, | |
| ) | |
| # CORS | |
| cors_origins = [ | |
| "https://lojiz.onrender.com", | |
| "https://lojiz.com", | |
| "https://www.lojiz.com", | |
| ] if is_production else [ | |
| "http://localhost", | |
| "http://localhost:3000", | |
| "http://localhost:5173", | |
| "http://localhost:8080", # Test UI | |
| "http://127.0.0.1", | |
| "http://127.0.0.1:3000", | |
| "http://127.0.0.1:5173", | |
| "http://127.0.0.1:8080", # Test UI | |
| ] | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=cors_origins, | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| expose_headers=["*"], | |
| max_age=600, | |
| ) | |
| # EXCEPTION HANDLERS | |
| async def validation_exception_handler(request: Request, exc: RequestValidationError): | |
| logger.error(f"Validation error: {exc}") | |
| errors = [] | |
| for error in exc.errors(): | |
| field = ".".join(str(loc) for loc in error["loc"][1:]) | |
| errors.append({"field": field, "message": error["msg"]}) | |
| return JSONResponse( | |
| status_code=400, | |
| content={ | |
| "success": False, | |
| "message": "Validation error. Please check your input.", | |
| "error_code": "VALIDATION_ERROR", | |
| "errors": errors, | |
| }, | |
| ) | |
| async def auth_exception_handler(request: Request, exc: AuthException): # type: ignore | |
| logger.warning(f"Auth error [{exc.error_code}]: {exc.message}") | |
| response = {"success": False, "message": exc.message, "error_code": exc.error_code} | |
| if exc.data: | |
| response["data"] = exc.data | |
| return JSONResponse(status_code=exc.status_code, content=response) | |
| # ROUTERS | |
| logger.info("=" * 70) | |
| logger.info("Registering routers...") | |
| logger.info("=" * 70) | |
| # Authentication | |
| app.include_router(auth.router, prefix="/api/auth", tags=["Authentication"]) | |
| # ============================================================ | |
| # AI CHAT ROUTES (SINGLE GRAPH-BASED ENDPOINT) | |
| # ============================================================ | |
| # Consolidated to single endpoint: /ai/ask (Graph-Based) | |
| try: | |
| from app.ai.routes.chat import router as ai_chat_router | |
| app.include_router( | |
| ai_chat_router, | |
| prefix="/ai", | |
| tags=["AIDA AI Chat (Graph-Based)"] | |
| ) | |
| logger.info("β AIDA AI Chat registered at /ai/ask (Graph-Based)") | |
| except ImportError as e: | |
| logger.error(f"β AI Chat import error: {e}") | |
| # ============================================================ | |
| # LISTING ROUTERS | |
| # ============================================================ | |
| from app.routes.listing import router as listing_router | |
| from app.routes.user_public import router as user_public_router | |
| from app.routes.websocket_listings import router as ws_router | |
| app.include_router(listing_router, prefix="/api/listings", tags=["Listings"]) | |
| app.include_router(user_public_router, prefix="/api/users", tags=["Users"]) | |
| app.include_router(ws_router, tags=["WebSocket"]) | |
| logger.info("=" * 70) | |
| logger.info("β All routers registered successfully") | |
| logger.info("=" * 70) | |
| # ENDPOINTS | |
| async def health_check(): | |
| """Health check endpoint""" | |
| try: | |
| redis_ok = False | |
| if redis_client: | |
| try: | |
| await redis_client.ping() | |
| redis_ok = True | |
| except: | |
| redis_ok = False | |
| qdrant_ok = False | |
| if qdrant_client: | |
| try: | |
| await qdrant_client.get_collections() | |
| qdrant_ok = True | |
| except: | |
| qdrant_ok = False | |
| try: | |
| ml = get_ml_extractor() | |
| ml_ok = ml is not None | |
| except: | |
| ml_ok = False | |
| return { | |
| "status": "healthy", | |
| "service": "Lojiz Platform + Aida AI", | |
| "version": "2.0.0", | |
| "architecture": "Graph-Based (State Machine + Validation)", | |
| "environment": environment, | |
| "ai_endpoints": { | |
| "primary": "/ai/v1 (Graph-Based)", | |
| "fallback": "/ai/v2 (Legacy)", | |
| }, | |
| "components": { | |
| "mongodb": "connected", | |
| "redis": "connected" if redis_ok else "disconnected", | |
| "qdrant": "connected" if qdrant_ok else "disconnected", | |
| "ml": "ready" if ml_ok else "not ready", | |
| } | |
| } | |
| except Exception as e: | |
| logger.error(f"Health check failed: {e}") | |
| return { | |
| "status": "unhealthy", | |
| "error": str(e), | |
| } | |
| async def root(): | |
| """Root endpoint - API information""" | |
| return { | |
| "message": "Welcome to Lojiz Platform + Aida AI", | |
| "docs": "/docs", | |
| "health": "/health", | |
| "environment": environment, | |
| "version": "2.0.0", | |
| "architecture": "Graph-Based (State Machine + Validation)", | |
| "description": "Real-estate platform with conversational AI assistant (Aida)", | |
| "ai_chat": { | |
| "primary": "/ai/v1/ask (Graph-Based - 95% reliable)", | |
| "fallback": "/ai/v2/ask (Legacy - for emergency use)", | |
| }, | |
| } | |
| async def options_handler(full_path: str): | |
| """Handle CORS preflight requests""" | |
| return JSONResponse(status_code=200, content={}) | |
| # RUN | |
| # To run this application: | |
| # Development: uvicorn app.main:app --reload | |
| # Production: gunicorn -w 4 -k uvicorn.workers.UvicornWorker app.main:app | |
| # HF Spaces: python app.py |