Spaces:
Paused
Paused
Commit
·
37d2311
1
Parent(s):
91d001e
- main.py +42 -0
- poetry.lock +0 -0
- pyproject.toml +26 -0
main.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import asynccontextmanager
|
| 2 |
+
from fastapi import FastAPI
|
| 3 |
+
from myapp.routers import buildings_router
|
| 4 |
+
from myapp.routers import buildings_search_router
|
| 5 |
+
from myapp.routers import prototypes_router
|
| 6 |
+
from myapp.training.utils.load_best_model import load_best_model
|
| 7 |
+
from myapp.config.model_config import TRAINED_CLASSIFICATION_MODEL
|
| 8 |
+
|
| 9 |
+
@asynccontextmanager
|
| 10 |
+
async def lifespan(app: FastAPI):
|
| 11 |
+
print("Starting up: Loading Neural Network Model...")
|
| 12 |
+
try:
|
| 13 |
+
model, prototype_tensor, class_ids, feature_extractor, id_to_tag, tag_to_id = load_best_model()
|
| 14 |
+
TRAINED_CLASSIFICATION_MODEL["model"] = model
|
| 15 |
+
TRAINED_CLASSIFICATION_MODEL["prototype_tensor"] = prototype_tensor
|
| 16 |
+
TRAINED_CLASSIFICATION_MODEL["class_ids"] = class_ids
|
| 17 |
+
TRAINED_CLASSIFICATION_MODEL["feature_extractor"] = feature_extractor
|
| 18 |
+
TRAINED_CLASSIFICATION_MODEL["id_to_tag"] = id_to_tag
|
| 19 |
+
TRAINED_CLASSIFICATION_MODEL['tag_to_id'] = tag_to_id
|
| 20 |
+
TRAINED_CLASSIFICATION_MODEL["model"].eval()
|
| 21 |
+
print("Model loaded.")
|
| 22 |
+
except Exception as e:
|
| 23 |
+
print(f"Failed to load model: {e}")
|
| 24 |
+
|
| 25 |
+
yield
|
| 26 |
+
|
| 27 |
+
print("Shutting down: Releasing resources.")
|
| 28 |
+
TRAINED_CLASSIFICATION_MODEL.clear()
|
| 29 |
+
|
| 30 |
+
app = FastAPI(title="3D Model Search API", version="1.0.0", lifespan=lifespan)
|
| 31 |
+
|
| 32 |
+
app.include_router(buildings_router.router)
|
| 33 |
+
app.include_router(buildings_search_router.router)
|
| 34 |
+
app.include_router(prototypes_router.router)
|
| 35 |
+
|
| 36 |
+
@app.get("/", response_description="API is running")
|
| 37 |
+
async def root():
|
| 38 |
+
"""
|
| 39 |
+
Root endpoint
|
| 40 |
+
"""
|
| 41 |
+
return {"message": "API is running"}
|
| 42 |
+
|
poetry.lock
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
pyproject.toml
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "myapp"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = ""
|
| 5 |
+
authors = [
|
| 6 |
+
{name = "Your Name",email = "[email protected]"}
|
| 7 |
+
]
|
| 8 |
+
requires-python = ">=3.12,<3.14"
|
| 9 |
+
dependencies = [
|
| 10 |
+
"fastapi (>=0.116.1,<0.117.0)",
|
| 11 |
+
"uvicorn (>=0.35.0,<0.36.0)",
|
| 12 |
+
"pydantic (>=2.11.9,<3.0.0)",
|
| 13 |
+
"supabase (>=2.18.1,<3.0.0)",
|
| 14 |
+
"b2sdk (>=2.10.0,<3.0.0)",
|
| 15 |
+
"python-multipart (>=0.0.20,<0.0.21)",
|
| 16 |
+
"torch (>=2.8.0,<3.0.0)",
|
| 17 |
+
"pinecone (>=7.3.0,<8.0.0)",
|
| 18 |
+
"pillow (>=11.3.0,<12.0.0)",
|
| 19 |
+
"transformers (>=4.56.2,<5.0.0)",
|
| 20 |
+
"pandas (>=2.3.3,<3.0.0)",
|
| 21 |
+
]
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
[build-system]
|
| 25 |
+
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
| 26 |
+
build-backend = "poetry.core.masonry.api"
|