from contextlib import asynccontextmanager from fastapi import FastAPI from myapp.routers import buildings_router from myapp.routers import buildings_search_router from myapp.routers import prototypes_router from training.helpers.load_best_model import load_best_model from myapp.config.model_config import TRAINED_CLASSIFICATION_MODEL @asynccontextmanager async def lifespan(app: FastAPI): print("Starting up: Loading Neural Network Model...") try: model, prototype_tensor, class_ids, feature_extractor, id_to_tag, tag_to_id = load_best_model() TRAINED_CLASSIFICATION_MODEL["model"] = model TRAINED_CLASSIFICATION_MODEL["prototype_tensor"] = prototype_tensor TRAINED_CLASSIFICATION_MODEL["class_ids"] = class_ids TRAINED_CLASSIFICATION_MODEL["feature_extractor"] = feature_extractor TRAINED_CLASSIFICATION_MODEL["id_to_tag"] = id_to_tag TRAINED_CLASSIFICATION_MODEL['tag_to_id'] = tag_to_id TRAINED_CLASSIFICATION_MODEL["model"].eval() print("Model loaded.") except Exception as e: print(f"Failed to load model: {e}") yield print("Shutting down: Releasing resources.") TRAINED_CLASSIFICATION_MODEL.clear() app = FastAPI(title="3D Model Search API", version="1.0.0", lifespan=lifespan) app.include_router(buildings_router.router) app.include_router(buildings_search_router.router) app.include_router(prototypes_router.router) @app.get("/", response_description="API is running") async def root(): """ Root endpoint """ return {"message": "API is running"}