Spaces:
Paused
Paused
Commit
·
a7b41f5
1
Parent(s):
f39c031
All files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitignore +174 -0
- .gitignore:Zone.Identifier +0 -0
- Dockerfile +39 -0
- Dockerfile.hugging-face +43 -0
- Dockerfile.hugging-face:Zone.Identifier +0 -0
- Dockerfile:Zone.Identifier +0 -0
- __init__.py +0 -0
- __init__.py:Zone.Identifier +0 -0
- database/buckets/building_client.py +30 -0
- database/buckets/building_client.py:Zone.Identifier +0 -0
- database/buckets/client.py +42 -0
- database/buckets/client.py:Zone.Identifier +0 -0
- database/buckets/config.py +20 -0
- database/buckets/config.py:Zone.Identifier +0 -0
- database/metadata/building_client.py +50 -0
- database/metadata/building_client.py:Zone.Identifier +0 -0
- database/metadata/client.py +51 -0
- database/metadata/client.py:Zone.Identifier +0 -0
- database/metadata/config.py +15 -0
- database/metadata/config.py:Zone.Identifier +0 -0
- database/vectors/building_client.py +41 -0
- database/vectors/building_client.py:Zone.Identifier +0 -0
- database/vectors/client.py +15 -0
- database/vectors/client.py:Zone.Identifier +0 -0
- database/vectors/config.py +12 -0
- database/vectors/config.py:Zone.Identifier +0 -0
- logging/log.py +73 -0
- logging/log.py:Zone.Identifier +0 -0
- main.py +16 -0
- main.py:Zone.Identifier +0 -0
- models/Building.py +48 -0
- models/Building.py:Zone.Identifier +0 -0
- poetry.lock +0 -0
- poetry.lock:Zone.Identifier +0 -0
- pyproject.toml +29 -0
- pyproject.toml:Zone.Identifier +0 -0
- requirements.txt +2 -0
- routers/buildings_router.py +155 -0
- routers/buildings_router.py:Zone.Identifier +0 -0
- routers/buildings_search_router.py +44 -0
- routers/buildings_search_router.py:Zone.Identifier +0 -0
- search/models/config.py +4 -0
- search/models/config.py:Zone.Identifier +0 -0
- search/models/depth_model.py +7 -0
- search/models/depth_model.py:Zone.Identifier +0 -0
- search/models/edges_model.py +28 -0
- search/models/edges_model.py:Zone.Identifier +0 -0
- search/models/image_model.py +4 -0
- search/models/image_model.py:Zone.Identifier +0 -0
- search/models/segmentation_model.py +8 -0
.gitignore
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
share/python-wheels/
|
| 24 |
+
*.egg-info/
|
| 25 |
+
.installed.cfg
|
| 26 |
+
*.egg
|
| 27 |
+
MANIFEST
|
| 28 |
+
|
| 29 |
+
# PyInstaller
|
| 30 |
+
# Usually these files are written by a python script from a template
|
| 31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 32 |
+
*.manifest
|
| 33 |
+
*.spec
|
| 34 |
+
|
| 35 |
+
# Installer logs
|
| 36 |
+
pip-log.txt
|
| 37 |
+
pip-delete-this-directory.txt
|
| 38 |
+
|
| 39 |
+
# Unit test / coverage reports
|
| 40 |
+
htmlcov/
|
| 41 |
+
.tox/
|
| 42 |
+
.nox/
|
| 43 |
+
.coverage
|
| 44 |
+
.coverage.*
|
| 45 |
+
.cache
|
| 46 |
+
nosetests.xml
|
| 47 |
+
coverage.xml
|
| 48 |
+
*.cover
|
| 49 |
+
*.py,cover
|
| 50 |
+
.hypothesis/
|
| 51 |
+
.pytest_cache/
|
| 52 |
+
cover/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
.pybuilder/
|
| 76 |
+
target/
|
| 77 |
+
|
| 78 |
+
# Jupyter Notebook
|
| 79 |
+
.ipynb_checkpoints
|
| 80 |
+
|
| 81 |
+
# IPython
|
| 82 |
+
profile_default/
|
| 83 |
+
ipython_config.py
|
| 84 |
+
|
| 85 |
+
# pyenv
|
| 86 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 88 |
+
# .python-version
|
| 89 |
+
|
| 90 |
+
# pipenv
|
| 91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 94 |
+
# install all needed dependencies.
|
| 95 |
+
#Pipfile.lock
|
| 96 |
+
|
| 97 |
+
# UV
|
| 98 |
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
| 99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 100 |
+
# commonly ignored for libraries.
|
| 101 |
+
#uv.lock
|
| 102 |
+
|
| 103 |
+
# poetry
|
| 104 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 105 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 106 |
+
# commonly ignored for libraries.
|
| 107 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 108 |
+
#poetry.lock
|
| 109 |
+
|
| 110 |
+
# pdm
|
| 111 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 112 |
+
#pdm.lock
|
| 113 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
| 114 |
+
# in version control.
|
| 115 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
| 116 |
+
.pdm.toml
|
| 117 |
+
.pdm-python
|
| 118 |
+
.pdm-build/
|
| 119 |
+
|
| 120 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 121 |
+
__pypackages__/
|
| 122 |
+
|
| 123 |
+
# Celery stuff
|
| 124 |
+
celerybeat-schedule
|
| 125 |
+
celerybeat.pid
|
| 126 |
+
|
| 127 |
+
# SageMath parsed files
|
| 128 |
+
*.sage.py
|
| 129 |
+
|
| 130 |
+
# Environments
|
| 131 |
+
.env
|
| 132 |
+
.venv
|
| 133 |
+
env/
|
| 134 |
+
venv/
|
| 135 |
+
ENV/
|
| 136 |
+
env.bak/
|
| 137 |
+
venv.bak/
|
| 138 |
+
|
| 139 |
+
# Spyder project settings
|
| 140 |
+
.spyderproject
|
| 141 |
+
.spyproject
|
| 142 |
+
|
| 143 |
+
# Rope project settings
|
| 144 |
+
.ropeproject
|
| 145 |
+
|
| 146 |
+
# mkdocs documentation
|
| 147 |
+
/site
|
| 148 |
+
|
| 149 |
+
# mypy
|
| 150 |
+
.mypy_cache/
|
| 151 |
+
.dmypy.json
|
| 152 |
+
dmypy.json
|
| 153 |
+
|
| 154 |
+
# Pyre type checker
|
| 155 |
+
.pyre/
|
| 156 |
+
|
| 157 |
+
# pytype static type analyzer
|
| 158 |
+
.pytype/
|
| 159 |
+
|
| 160 |
+
# Cython debug symbols
|
| 161 |
+
cython_debug/
|
| 162 |
+
|
| 163 |
+
# PyCharm
|
| 164 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 165 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 166 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 167 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 168 |
+
#.idea/
|
| 169 |
+
|
| 170 |
+
# Ruff stuff:
|
| 171 |
+
.ruff_cache/
|
| 172 |
+
|
| 173 |
+
# PyPI configuration file
|
| 174 |
+
.pypirc
|
.gitignore:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
Dockerfile
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.12-slim
|
| 2 |
+
|
| 3 |
+
# Set environment variables
|
| 4 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
| 5 |
+
ENV PYTHONUNBUFFERED=1
|
| 6 |
+
|
| 7 |
+
# Install system dependencies
|
| 8 |
+
RUN apt-get update && \
|
| 9 |
+
apt-get install -y --no-install-recommends curl build-essential \
|
| 10 |
+
libglib2.0-0 \
|
| 11 |
+
libsm6 \
|
| 12 |
+
libxrender1 \
|
| 13 |
+
libxext6 \
|
| 14 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 15 |
+
|
| 16 |
+
# Install Poetry
|
| 17 |
+
RUN curl -sSL https://install.python-poetry.org | python3 -
|
| 18 |
+
|
| 19 |
+
# Add Poetry to PATH
|
| 20 |
+
ENV PATH="/root/.local/bin:$PATH"
|
| 21 |
+
ENV PYTHONPATH=/app
|
| 22 |
+
|
| 23 |
+
# Set working directory
|
| 24 |
+
WORKDIR /app/myapp
|
| 25 |
+
|
| 26 |
+
# Copy only pyproject.toml and poetry.lock first (caching dependencies)
|
| 27 |
+
COPY pyproject.toml poetry.lock* ./
|
| 28 |
+
|
| 29 |
+
# Install dependencies
|
| 30 |
+
RUN poetry install --no-root --only main
|
| 31 |
+
|
| 32 |
+
# Copy the full project
|
| 33 |
+
COPY . .
|
| 34 |
+
|
| 35 |
+
# Expose port
|
| 36 |
+
EXPOSE 8000
|
| 37 |
+
|
| 38 |
+
# Command to run FastAPI
|
| 39 |
+
CMD ["poetry", "run", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
Dockerfile.hugging-face
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.12-slim
|
| 2 |
+
|
| 3 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
| 4 |
+
ENV PYTHONUNBUFFERED=1
|
| 5 |
+
|
| 6 |
+
# Install system dependencies
|
| 7 |
+
RUN apt-get update && \
|
| 8 |
+
apt-get install -y --no-install-recommends curl build-essential \
|
| 9 |
+
libglib2.0-0 \
|
| 10 |
+
libsm6 \
|
| 11 |
+
libxrender1 \
|
| 12 |
+
libxext6 \
|
| 13 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 14 |
+
|
| 15 |
+
RUN curl -sSL https://install.python-poetry.org | python3 -
|
| 16 |
+
|
| 17 |
+
# Add Poetry to PATH
|
| 18 |
+
ENV PATH="/root/.local/bin:$PATH"
|
| 19 |
+
ENV PYTHONPATH=/app
|
| 20 |
+
|
| 21 |
+
WORKDIR /app/myapp
|
| 22 |
+
|
| 23 |
+
COPY pyproject.toml poetry.lock* ./
|
| 24 |
+
|
| 25 |
+
# Install dependencies
|
| 26 |
+
RUN poetry install --no-root --only main
|
| 27 |
+
|
| 28 |
+
# Copy the full project
|
| 29 |
+
COPY . .
|
| 30 |
+
|
| 31 |
+
EXPOSE 7860
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
ENV SUPABASE_URL=""
|
| 35 |
+
ENV SUPABASE_KEY=""
|
| 36 |
+
ENV B2_KEY_ID=""
|
| 37 |
+
ENV B2_APP_KEY=""
|
| 38 |
+
ENV B2_BUCKET_NAME=""
|
| 39 |
+
ENV PINECONE_API_KEY=""
|
| 40 |
+
ENV PINECONE_REGION=""
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
CMD ["poetry", "run", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860", "--reload"]
|
Dockerfile.hugging-face:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
Dockerfile:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
__init__.py
ADDED
|
File without changes
|
__init__.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/buckets/building_client.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from b2sdk.v2 import B2Api
|
| 2 |
+
from fastapi import HTTPException, UploadFile
|
| 3 |
+
from myapp.database.buckets.client import BucketClient
|
| 4 |
+
from myapp.database.buckets.config import BUILDING_BUCKET_NAME, b2_api
|
| 5 |
+
from myapp.logging.log import log_call
|
| 6 |
+
|
| 7 |
+
class BuildingBucketClient(BucketClient):
|
| 8 |
+
def __init__(self, b2_api: B2Api):
|
| 9 |
+
super().__init__(b2_api, BUILDING_BUCKET_NAME)
|
| 10 |
+
|
| 11 |
+
@log_call("Uploaded model file '{remote_file_name}' to building bucket")
|
| 12 |
+
async def upload_model(self, file_contents: bytes, remote_file_name: str):
|
| 13 |
+
try:
|
| 14 |
+
super().upload_model(file_contents, remote_file_name)
|
| 15 |
+
except Exception as e:
|
| 16 |
+
raise HTTPException(status_code=500, detail=f"Failed to upload model file: {str(e)}")
|
| 17 |
+
|
| 18 |
+
@log_call("Deleted file '{remote_file_name}' from building bucket")
|
| 19 |
+
async def delete_file(self, remote_file_name: str):
|
| 20 |
+
super().delete_file(remote_file_name)
|
| 21 |
+
|
| 22 |
+
async def get_all_file_names(self, path: str = "") -> list[str]:
|
| 23 |
+
try:
|
| 24 |
+
file_names = [file_info.file_name for file_info, _ in self.bucket.ls(path, recursive=True)]
|
| 25 |
+
return file_names
|
| 26 |
+
except Exception as e:
|
| 27 |
+
raise HTTPException(status_code=500, detail=f"Failed to list files in bucket: {str(e)}")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
building_bucket_client = BuildingBucketClient(b2_api)
|
database/buckets/building_client.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/buckets/client.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
from b2sdk.v2 import B2Api
|
| 3 |
+
from fastapi import HTTPException
|
| 4 |
+
|
| 5 |
+
from myapp.utils.constants import ResultStatus
|
| 6 |
+
|
| 7 |
+
class BucketClient:
|
| 8 |
+
def __init__(self, b2_api: B2Api, bucket_name: str):
|
| 9 |
+
self.bucket = b2_api.get_bucket_by_name(bucket_name)
|
| 10 |
+
|
| 11 |
+
def upload_model(self, file_bytes: bytes, remote_file_name: str):
|
| 12 |
+
try:
|
| 13 |
+
self.bucket.get_file_info_by_name(remote_file_name)
|
| 14 |
+
return # File already exists, skip upload
|
| 15 |
+
except Exception:
|
| 16 |
+
pass
|
| 17 |
+
self.bucket.upload_bytes(file_bytes, remote_file_name)
|
| 18 |
+
|
| 19 |
+
def get_file_by_url(self, image_url: str):
|
| 20 |
+
downloaded_file = io.BytesIO()
|
| 21 |
+
|
| 22 |
+
self.bucket.download_file_by_name(image_url).save(downloaded_file)
|
| 23 |
+
downloaded_file.seek(0)
|
| 24 |
+
|
| 25 |
+
return downloaded_file.read()
|
| 26 |
+
|
| 27 |
+
def delete_file(self, remote_file_name: str):
|
| 28 |
+
try:
|
| 29 |
+
file_version = self.bucket.get_file_info_by_name(remote_file_name)
|
| 30 |
+
if not file_version:
|
| 31 |
+
raise HTTPException(status_code=404, detail=f"File '{remote_file_name}' not found in bucket.")
|
| 32 |
+
self.bucket.delete_file_version(file_version.id_, remote_file_name)
|
| 33 |
+
except Exception as e:
|
| 34 |
+
raise HTTPException(status_code=500, detail=f"Failed to delete file '{remote_file_name}': {str(e)}")
|
| 35 |
+
|
| 36 |
+
async def delete_folder(self, folder_path: str):
|
| 37 |
+
try:
|
| 38 |
+
files_to_delete = [file_info for file_info, _ in self.bucket.ls(folder_path, recursive=True)]
|
| 39 |
+
for file_info in files_to_delete:
|
| 40 |
+
self.bucket.delete_file_version(file_info.id_, file_info.file_name)
|
| 41 |
+
except Exception as e:
|
| 42 |
+
raise HTTPException(status_code=500, detail=f"Failed to delete folder '{folder_path}': {str(e)}")
|
database/buckets/client.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/buckets/config.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from enum import Enum
|
| 2 |
+
import os
|
| 3 |
+
from b2sdk.v2 import InMemoryAccountInfo, B2Api
|
| 4 |
+
|
| 5 |
+
BUILDING_BUCKET_NAME = os.environ.get("B2_BUCKET_NAME")
|
| 6 |
+
BUCKET_STORAGE_PREFIX = "https://f002.backblazeb2.com/file/"
|
| 7 |
+
BUILDING_BUCKET_URL_PREFIX = BUCKET_STORAGE_PREFIX + BUILDING_BUCKET_NAME + "/"
|
| 8 |
+
|
| 9 |
+
class BuildingBucketFolders(str, Enum):
|
| 10 |
+
MODELS = "models/"
|
| 11 |
+
IMAGES = "images/"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
b2_info = InMemoryAccountInfo()
|
| 15 |
+
b2_api = B2Api(b2_info)
|
| 16 |
+
b2_api.authorize_account(
|
| 17 |
+
"production",
|
| 18 |
+
os.environ.get("B2_KEY_ID"),
|
| 19 |
+
os.environ.get("B2_APP_KEY")
|
| 20 |
+
)
|
database/buckets/config.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/metadata/building_client.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from supabase import Client
|
| 2 |
+
from myapp.database.metadata.client import DatabaseSQLClient
|
| 3 |
+
from myapp.database.metadata.config import Tables, database_client
|
| 4 |
+
from myapp.models.Building import BuildingColumns, BuildingCreate, BuildingUpdateMetadata
|
| 5 |
+
from myapp.logging.log import log_call
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class BuildingSQLClient(DatabaseSQLClient):
|
| 9 |
+
def __init__(self, database_connection: Client):
|
| 10 |
+
super().__init__(Tables.BUILDINGS.value, database_connection)
|
| 11 |
+
|
| 12 |
+
@log_call("Inserted building '{building.name}' into metadata database")
|
| 13 |
+
def insert(self, building: BuildingCreate):
|
| 14 |
+
response = self.table_request().select(BuildingColumns.ID.value).eq(BuildingColumns.NAME.value, building.name).execute()
|
| 15 |
+
|
| 16 |
+
point_wkt = f"SRID=4326;POINT({building.location.longitude} {building.location.latitude})" if building.location else None
|
| 17 |
+
data = {
|
| 18 |
+
BuildingColumns.NAME.value: building.name,
|
| 19 |
+
BuildingColumns.MODEL_URL.value: building.model_url,
|
| 20 |
+
BuildingColumns.LOCATION.value: point_wkt
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
if response.data:
|
| 24 |
+
building_id = response.data[0][BuildingColumns.ID.value]
|
| 25 |
+
return super().update(building_id, data, BuildingColumns.ID.value)
|
| 26 |
+
else:
|
| 27 |
+
return super().insert(data)
|
| 28 |
+
|
| 29 |
+
def update(self, building: BuildingUpdateMetadata):
|
| 30 |
+
point_wkt = f"SRID=4326;POINT({building.location.longitude} {building.location.latitude})" if building.location else None
|
| 31 |
+
data = {
|
| 32 |
+
BuildingColumns.LOCATION.value: point_wkt
|
| 33 |
+
}
|
| 34 |
+
return super().update(building.name, data, property_name=BuildingColumns.NAME.value)
|
| 35 |
+
|
| 36 |
+
@log_call("Deleted building '{building_name}' from metadata database")
|
| 37 |
+
def delete(self, building_name: str):
|
| 38 |
+
return super().delete(building_name, property_name=BuildingColumns.NAME.value)
|
| 39 |
+
|
| 40 |
+
def record_exists(self, building_name: str) -> bool:
|
| 41 |
+
return super().record_exists(building_name, property_name=BuildingColumns.NAME.value)
|
| 42 |
+
|
| 43 |
+
def get_all_buildings_names(self) -> list[str]:
|
| 44 |
+
response = self.table_request().select(BuildingColumns.NAME.value).execute()
|
| 45 |
+
if response.data is None:
|
| 46 |
+
return []
|
| 47 |
+
return [record[BuildingColumns.NAME.value] for record in response.data]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
building_metadata_client = BuildingSQLClient(database_connection=database_client)
|
database/metadata/building_client.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/metadata/client.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import HTTPException
|
| 2 |
+
from postgrest import SyncRequestBuilder
|
| 3 |
+
from supabase import Client
|
| 4 |
+
|
| 5 |
+
from myapp.models.Building import BuildingColumns, Location
|
| 6 |
+
|
| 7 |
+
class DatabaseSQLClient:
|
| 8 |
+
def __init__(self, table: str, database_connection: Client):
|
| 9 |
+
self.table = table
|
| 10 |
+
self.client = database_connection
|
| 11 |
+
|
| 12 |
+
def insert(self, data: dict):
|
| 13 |
+
response = self.client.table(self.table).insert(data).execute()
|
| 14 |
+
|
| 15 |
+
if not response.data:
|
| 16 |
+
raise HTTPException(status_code=500, detail=f"Supabase insert failed: {response.data}")
|
| 17 |
+
|
| 18 |
+
inserted_row = response.data[0]
|
| 19 |
+
inserted_row[BuildingColumns.LOCATION.value] = _geojson_to_location(inserted_row[BuildingColumns.LOCATION.value])
|
| 20 |
+
|
| 21 |
+
return inserted_row
|
| 22 |
+
|
| 23 |
+
def update(self, property_value, data: dict, property_name: str = BuildingColumns.ID.value):
|
| 24 |
+
response = self.client.table(self.table).update(data).eq(property_name, property_value).execute()
|
| 25 |
+
|
| 26 |
+
if not response.data:
|
| 27 |
+
raise HTTPException(status_code=500, detail=f"Supabase update failed: {response.data}")
|
| 28 |
+
|
| 29 |
+
updated_row = response.data[0]
|
| 30 |
+
updated_row[BuildingColumns.LOCATION.value] = _geojson_to_location(updated_row[BuildingColumns.LOCATION.value])
|
| 31 |
+
|
| 32 |
+
return updated_row
|
| 33 |
+
|
| 34 |
+
def delete(self, property_value, property_name: str = BuildingColumns.ID.value):
|
| 35 |
+
return self.client.table(self.table).delete().eq(property_name, property_value).execute()
|
| 36 |
+
|
| 37 |
+
def record_exists(self, property_value, property_name: str = BuildingColumns.ID.value) -> bool:
|
| 38 |
+
response = self.client.table(self.table).select(BuildingColumns.ID.value).eq(property_name, property_value).execute()
|
| 39 |
+
return bool(response.data)
|
| 40 |
+
|
| 41 |
+
def table_request(self) -> SyncRequestBuilder:
|
| 42 |
+
return self.client.table(self.table)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _geojson_to_location(location_geojson: dict) -> Location:
|
| 46 |
+
try:
|
| 47 |
+
lon, lat = location_geojson["coordinates"]
|
| 48 |
+
except (KeyError, ValueError, TypeError):
|
| 49 |
+
raise ValueError("Invalid GeoJSON format for Point")
|
| 50 |
+
|
| 51 |
+
return Location(latitude=lat, longitude=lon)
|
database/metadata/client.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/metadata/config.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from enum import Enum
|
| 3 |
+
from supabase import create_client, Client
|
| 4 |
+
|
| 5 |
+
url: str = os.environ.get("SUPABASE_URL")
|
| 6 |
+
key: str = os.environ.get("SUPABASE_KEY")
|
| 7 |
+
database_client: Client = create_client(url, key)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Tables(str, Enum):
|
| 11 |
+
BUILDINGS = "buildings"
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class DatabaseQueries(str, Enum):
|
| 15 |
+
GET_NEARBY_PLACES = "get_nearby_places"
|
database/metadata/config.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/vectors/building_client.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
from pinecone import Pinecone, QueryResponse
|
| 3 |
+
from myapp.database.vectors.client import DatabaseVectorClient
|
| 4 |
+
from myapp.database.vectors.config import VectorIndexes, pinecone_client
|
| 5 |
+
from myapp.logging.log import log_call
|
| 6 |
+
from myapp.models.Building import BuildingColumns, BuildingVector
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class BuildingVectorClient(DatabaseVectorClient):
|
| 10 |
+
def __init__(self, database_connection: Pinecone):
|
| 11 |
+
super().__init__(database_connection, VectorIndexes.BUILDINGS.value)
|
| 12 |
+
|
| 13 |
+
def upsert_vectors(self, vectors: list[BuildingVector]):
|
| 14 |
+
building_vectors = []
|
| 15 |
+
for vector in vectors:
|
| 16 |
+
vector_id = self._hash_image_url(vector.img_bytes)
|
| 17 |
+
building_vectors.append({
|
| 18 |
+
BuildingColumns.ID.value: vector_id,
|
| 19 |
+
"values": vector.values,
|
| 20 |
+
"metadata": {BuildingColumns.MODEL_URL.value: vector.model_url, "render_url": vector.render_url}
|
| 21 |
+
})
|
| 22 |
+
return super().upsert_vectors(building_vectors)
|
| 23 |
+
|
| 24 |
+
@log_call("Searched top {top_k} vectors in building vector index")
|
| 25 |
+
def search(self, vector: list[float], top_k: int = 5, include_metadata: bool = True, filter: dict = None) -> QueryResponse:
|
| 26 |
+
return super().search(vector, top_k, include_metadata, filter)
|
| 27 |
+
|
| 28 |
+
@log_call("Deleted vectors with url: {model_url} from building vector index")
|
| 29 |
+
def delete_by_url(self, model_url: str):
|
| 30 |
+
"""
|
| 31 |
+
Deletes all vectors whose metadata.url matches the given URL.
|
| 32 |
+
"""
|
| 33 |
+
self.index.delete(filter={"model_url": {"$eq": model_url}})
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _hash_image_url(self, img_bytes: bytes) -> str:
|
| 37 |
+
"""Generate SHA-256 hash of image content for a stable ID."""
|
| 38 |
+
return hashlib.sha256(img_bytes).hexdigest()
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
building_vector_client = BuildingVectorClient(database_connection=pinecone_client)
|
database/vectors/building_client.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/vectors/client.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pinecone import Pinecone, QueryResponse
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class DatabaseVectorClient:
|
| 5 |
+
def __init__(self, pinecone_client: Pinecone, index_name: str):
|
| 6 |
+
self.index_name = index_name
|
| 7 |
+
self.index = pinecone_client.Index(index_name)
|
| 8 |
+
|
| 9 |
+
def upsert_vectors(self, vectors: list[dict]):
|
| 10 |
+
self.index.upsert(vectors)
|
| 11 |
+
|
| 12 |
+
def search(self, vector: list[float], top_k: int = 5, include_metadata: bool = True, filter: dict = None) -> QueryResponse:
|
| 13 |
+
return self.index.query(vector=vector, top_k=top_k, include_metadata=include_metadata, filter=filter)
|
| 14 |
+
|
| 15 |
+
|
database/vectors/client.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
database/vectors/config.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from enum import StrEnum
|
| 2 |
+
import os
|
| 3 |
+
from pinecone import Pinecone
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
PINECONE_API_KEY = os.getenv("PINECONE_API_KEY")
|
| 7 |
+
PINECONE_REGION = os.getenv("PINECONE_REGION")
|
| 8 |
+
|
| 9 |
+
pinecone_client = Pinecone(api_key=PINECONE_API_KEY)
|
| 10 |
+
|
| 11 |
+
class VectorIndexes(StrEnum):
|
| 12 |
+
BUILDINGS = "models-renderings"
|
database/vectors/config.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
logging/log.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import json
|
| 3 |
+
from functools import wraps
|
| 4 |
+
from inspect import signature
|
| 5 |
+
|
| 6 |
+
# Configure logging
|
| 7 |
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
| 8 |
+
|
| 9 |
+
def log_call(message: str, log_return: bool = False):
|
| 10 |
+
"""
|
| 11 |
+
Decorator for string-based logging with argument placeholders.
|
| 12 |
+
Example: @log_call("Processing user {user_id}", log_return=True)
|
| 13 |
+
"""
|
| 14 |
+
def decorator(func):
|
| 15 |
+
@wraps(func)
|
| 16 |
+
def wrapper(*args, **kwargs):
|
| 17 |
+
sig = signature(func)
|
| 18 |
+
bound_args = sig.bind(*args, **kwargs)
|
| 19 |
+
bound_args.apply_defaults()
|
| 20 |
+
|
| 21 |
+
# Pre-call log
|
| 22 |
+
try:
|
| 23 |
+
log_msg = message.format(**bound_args.arguments)
|
| 24 |
+
except Exception as e:
|
| 25 |
+
log_msg = f"{message} | (formatting failed: {e})"
|
| 26 |
+
logging.info(log_msg)
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
result = func(*args, **kwargs)
|
| 30 |
+
if log_return:
|
| 31 |
+
logging.info("Return value: %r", result)
|
| 32 |
+
return result
|
| 33 |
+
except Exception as e:
|
| 34 |
+
logging.exception("Exception in %s: %s", func.__name__, e)
|
| 35 |
+
raise
|
| 36 |
+
return wrapper
|
| 37 |
+
return decorator
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def log_call_json(log_return: bool = False):
|
| 41 |
+
"""
|
| 42 |
+
Decorator for JSON-based logging.
|
| 43 |
+
Captures function name, args, return value, and errors.
|
| 44 |
+
"""
|
| 45 |
+
def decorator(func):
|
| 46 |
+
@wraps(func)
|
| 47 |
+
def wrapper(*args, **kwargs):
|
| 48 |
+
sig = signature(func)
|
| 49 |
+
bound_args = sig.bind(*args, **kwargs)
|
| 50 |
+
bound_args.apply_defaults()
|
| 51 |
+
|
| 52 |
+
log_entry = {
|
| 53 |
+
"function": func.__name__,
|
| 54 |
+
"args": dict(bound_args.arguments),
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
result = func(*args, **kwargs)
|
| 59 |
+
if log_return:
|
| 60 |
+
log_entry["result"] = result
|
| 61 |
+
log_entry["status"] = "success"
|
| 62 |
+
logging.info(json.dumps(log_entry, default=str))
|
| 63 |
+
return result
|
| 64 |
+
except Exception as e:
|
| 65 |
+
log_entry["status"] = "error"
|
| 66 |
+
log_entry["error"] = {
|
| 67 |
+
"type": type(e).__name__,
|
| 68 |
+
"message": str(e),
|
| 69 |
+
}
|
| 70 |
+
logging.error(json.dumps(log_entry, default=str))
|
| 71 |
+
raise
|
| 72 |
+
return wrapper
|
| 73 |
+
return decorator
|
logging/log.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
main.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
from myapp.routers import buildings_router
|
| 3 |
+
from myapp.routers import buildings_search_router
|
| 4 |
+
|
| 5 |
+
app = FastAPI(title="3D Model Search API", version="1.0.0")
|
| 6 |
+
|
| 7 |
+
app.include_router(buildings_router.router)
|
| 8 |
+
app.include_router(buildings_search_router.router)
|
| 9 |
+
|
| 10 |
+
@app.get("/", response_description="API is running")
|
| 11 |
+
async def root():
|
| 12 |
+
"""
|
| 13 |
+
Root endpoint
|
| 14 |
+
"""
|
| 15 |
+
return {"message": "API is running"}
|
| 16 |
+
|
main.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
models/Building.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
from enum import Enum
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from pydantic import BaseModel, Field
|
| 5 |
+
|
| 6 |
+
class Location(BaseModel):
|
| 7 |
+
latitude: float = Field(...)
|
| 8 |
+
longitude: float = Field(...)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class BuildingCreate(BaseModel):
|
| 12 |
+
"""
|
| 13 |
+
Model of 3D Building
|
| 14 |
+
"""
|
| 15 |
+
name: str = Field(...)
|
| 16 |
+
location: Location = Field(...)
|
| 17 |
+
model_url: Optional[str] = Field(None)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Building(BaseModel):
|
| 21 |
+
id: str
|
| 22 |
+
name: str
|
| 23 |
+
model_url: str
|
| 24 |
+
location: Location
|
| 25 |
+
created_at: datetime
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class BuildingUpdateMetadata(BaseModel):
|
| 29 |
+
name: str = Field(...)
|
| 30 |
+
location: Location = Field(...)
|
| 31 |
+
|
| 32 |
+
class BuildingSearch(BaseModel):
|
| 33 |
+
id: str
|
| 34 |
+
model_url: str
|
| 35 |
+
|
| 36 |
+
class BuildingVector(BaseModel):
|
| 37 |
+
values: List[float]
|
| 38 |
+
img_bytes: bytes
|
| 39 |
+
model_url: str
|
| 40 |
+
render_url: str
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class BuildingColumns(Enum):
|
| 44 |
+
ID = "id"
|
| 45 |
+
NAME = "name"
|
| 46 |
+
MODEL_URL = "model_url"
|
| 47 |
+
LOCATION = "location"
|
| 48 |
+
CREATED_AT = "created_at"
|
models/Building.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
poetry.lock
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
poetry.lock:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
pyproject.toml
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "myapp"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = ""
|
| 5 |
+
authors = [
|
| 6 |
+
{name = "Your Name",email = "[email protected]"}
|
| 7 |
+
]
|
| 8 |
+
requires-python = ">=3.12,<3.14"
|
| 9 |
+
dependencies = [
|
| 10 |
+
"fastapi (>=0.116.1,<0.117.0)",
|
| 11 |
+
"uvicorn (>=0.35.0,<0.36.0)",
|
| 12 |
+
"pydantic (>=2.11.9,<3.0.0)",
|
| 13 |
+
"supabase (>=2.18.1,<3.0.0)",
|
| 14 |
+
"b2sdk (>=2.10.0,<3.0.0)",
|
| 15 |
+
"python-multipart (>=0.0.20,<0.0.21)",
|
| 16 |
+
"torch (>=2.8.0,<3.0.0)",
|
| 17 |
+
"pinecone (>=7.3.0,<8.0.0)",
|
| 18 |
+
"pillow (>=11.3.0,<12.0.0)",
|
| 19 |
+
"clip @ git+https://github.com/openai/CLIP.git",
|
| 20 |
+
"redis (>=6.4.0,<7.0.0)",
|
| 21 |
+
"transformers (>=4.56.2,<5.0.0)",
|
| 22 |
+
"onnxruntime (>=1.23.0,<2.0.0)",
|
| 23 |
+
"opencv-python-headless (>=4.12.0.88,<5.0.0.0)",
|
| 24 |
+
]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
[build-system]
|
| 28 |
+
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
| 29 |
+
build-backend = "poetry.core.masonry.api"
|
pyproject.toml:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
requirements.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn[standard]
|
routers/buildings_router.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List
|
| 2 |
+
from fastapi import APIRouter, Body, File, Form, HTTPException, UploadFile, status
|
| 3 |
+
from myapp.database.metadata.building_client import building_metadata_client
|
| 4 |
+
from myapp.database.buckets.building_client import building_bucket_client
|
| 5 |
+
from myapp.database.vectors.building_client import building_vector_client
|
| 6 |
+
from myapp.models.Building import Building, BuildingCreate, BuildingUpdateMetadata, BuildingVector, BuildingColumns
|
| 7 |
+
from myapp.database.buckets.config import BuildingBucketFolders
|
| 8 |
+
from myapp.search.preprocess.embed_models import get_image_embeddings, process_building
|
| 9 |
+
from myapp.utils.image_utils import get_image_stream
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
router = APIRouter(
|
| 13 |
+
prefix="/buildings",
|
| 14 |
+
tags=["buildings"]
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@router.post(
|
| 19 |
+
"/add/",
|
| 20 |
+
response_description="Building has been succesfully added!",
|
| 21 |
+
status_code=status.HTTP_200_OK,
|
| 22 |
+
response_model=Building,
|
| 23 |
+
)
|
| 24 |
+
async def add_building(
|
| 25 |
+
building = Form(...),
|
| 26 |
+
model_renders_files: List[UploadFile] = File(None),
|
| 27 |
+
):
|
| 28 |
+
building: Building = BuildingCreate.model_validate_json(building)
|
| 29 |
+
|
| 30 |
+
if len(model_renders_files) < 4:
|
| 31 |
+
raise HTTPException(status_code=400, detail="At least 4 model renders must be provided.")
|
| 32 |
+
|
| 33 |
+
# Upload model files to Bucket Storage
|
| 34 |
+
renders_vectors: List[BuildingVector] = []
|
| 35 |
+
file_folder_name = f"{BuildingBucketFolders.MODELS.value}{building.name}/"
|
| 36 |
+
print("Processing files...", building.name)
|
| 37 |
+
for model_render_file in model_renders_files:
|
| 38 |
+
remote_file_name: str = file_folder_name + model_render_file.filename
|
| 39 |
+
img_bytes = await model_render_file.read()
|
| 40 |
+
# await building_bucket_client.upload_model(img_bytes, remote_file_name)
|
| 41 |
+
# image = get_image_stream(img_bytes)
|
| 42 |
+
img_embeddings = process_building(img_bytes)
|
| 43 |
+
renders_vectors.append(BuildingVector(
|
| 44 |
+
values=img_embeddings,
|
| 45 |
+
img_bytes=img_bytes,
|
| 46 |
+
model_url=file_folder_name,
|
| 47 |
+
render_url=remote_file_name
|
| 48 |
+
))
|
| 49 |
+
|
| 50 |
+
# Upsert model renders vectors into Vector DB
|
| 51 |
+
print("Uploading vectors...")
|
| 52 |
+
for batch_start in range(0, len(renders_vectors), 25):
|
| 53 |
+
batch = renders_vectors[batch_start:batch_start + 25]
|
| 54 |
+
if batch:
|
| 55 |
+
building_vector_client.upsert_vectors(batch)
|
| 56 |
+
|
| 57 |
+
# Insert building metadata into database
|
| 58 |
+
building.model_url = file_folder_name
|
| 59 |
+
building = building_metadata_client.insert(building)
|
| 60 |
+
if not building:
|
| 61 |
+
raise HTTPException(status_code=500, detail="Failed to insert building model.")
|
| 62 |
+
|
| 63 |
+
return building
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@router.delete(
|
| 67 |
+
"/{building_name}/",
|
| 68 |
+
response_description="Building has been succesfully deleted!",
|
| 69 |
+
status_code=status.HTTP_200_OK,
|
| 70 |
+
)
|
| 71 |
+
async def delete_building(building_name: str):
|
| 72 |
+
"""
|
| 73 |
+
Delete building by name
|
| 74 |
+
"""
|
| 75 |
+
if not building_metadata_client.record_exists(building_name):
|
| 76 |
+
raise HTTPException(status_code=404, detail=f"Building with name '{building_name}' not found.")
|
| 77 |
+
|
| 78 |
+
response = building_metadata_client.table_request().select(BuildingColumns.MODEL_URL.value).eq(BuildingColumns.NAME.value, building_name).execute()
|
| 79 |
+
model_url = response.data[0].get(BuildingColumns.MODEL_URL.value, []) if response.data else []
|
| 80 |
+
|
| 81 |
+
# Delete renders from Bucket Storage
|
| 82 |
+
# try:
|
| 83 |
+
# await building_bucket_client.delete_folder(model_url)
|
| 84 |
+
# except Exception as e:
|
| 85 |
+
# raise HTTPException(status_code=500, detail=f"Failed to delete building files from bucket: {str(e)}")
|
| 86 |
+
|
| 87 |
+
# Delete vectors from Vector DB
|
| 88 |
+
building_vector_client.delete_by_url(model_url)
|
| 89 |
+
|
| 90 |
+
# Delete building metadata from database
|
| 91 |
+
response = building_metadata_client.delete(building_name)
|
| 92 |
+
if not response.data:
|
| 93 |
+
raise HTTPException(status_code=500, detail="Failed to delete building.")
|
| 94 |
+
|
| 95 |
+
return {"message": f"Building with name '{building_name}' has been deleted with its models files."}
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@router.get(
|
| 99 |
+
"/",
|
| 100 |
+
response_description="List all building names",
|
| 101 |
+
status_code=status.HTTP_200_OK,
|
| 102 |
+
response_model=list[str],
|
| 103 |
+
)
|
| 104 |
+
async def get_all_buildings_names() -> list[str]:
|
| 105 |
+
"""
|
| 106 |
+
Return list of buildings names stored in database.
|
| 107 |
+
"""
|
| 108 |
+
return await building_metadata_client.get_all_buildings_names()
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@router.post(
|
| 113 |
+
"/update_metadata/",
|
| 114 |
+
response_description="Building metadata has been succesfully updated!",
|
| 115 |
+
status_code=status.HTTP_200_OK,
|
| 116 |
+
)
|
| 117 |
+
async def update_building_metadata(building: BuildingUpdateMetadata = Body(...)):
|
| 118 |
+
"""
|
| 119 |
+
Search for place matching to photo
|
| 120 |
+
"""
|
| 121 |
+
building = building_metadata_client.update(building)
|
| 122 |
+
return building
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
# @router.post(
|
| 126 |
+
# "/add_model/",
|
| 127 |
+
# response_description="3D Model has been succesfully added!",
|
| 128 |
+
# status_code=status.HTTP_200_OK,
|
| 129 |
+
# )
|
| 130 |
+
# async def add_building_model(
|
| 131 |
+
# building_name: str = Form(...),
|
| 132 |
+
# model_file: UploadFile = File(...),
|
| 133 |
+
# ):
|
| 134 |
+
# """
|
| 135 |
+
# Upload 3D model file to existing building
|
| 136 |
+
# """
|
| 137 |
+
# response = building_metadata_client.table_request().select(BuildingColumns.MODELS_URLS.value).eq(BuildingColumns.NAME.value, building_name).execute()
|
| 138 |
+
# if not response.data:
|
| 139 |
+
# raise HTTPException(status_code=404, detail=f"Building with name '{building_name}' not found.")
|
| 140 |
+
|
| 141 |
+
# current_models_urls: list[str] = response.data[0].get(BuildingColumns.MODELS_URLS.value, [])
|
| 142 |
+
# file_folder_name = f"{BuildingBucketFolders.MODELS.value}{building_name}/"
|
| 143 |
+
# remote_file_name = file_folder_name + {model_file.filename}
|
| 144 |
+
# new_model_url = BUILDING_BUCKET_URL_PREFIX + remote_file_name
|
| 145 |
+
|
| 146 |
+
# if new_model_url in current_models_urls:
|
| 147 |
+
# raise HTTPException(status_code=400, detail=f"Model file already exists in the building's {BuildingColumns.MODELS_URLS.value}.")
|
| 148 |
+
|
| 149 |
+
# # Upload the model file to Bucket Storage
|
| 150 |
+
# building_bucket_client.upload_model(model_file, remote_file_name)
|
| 151 |
+
|
| 152 |
+
# # Update the building's models_urls in the database
|
| 153 |
+
# updated_urls = current_models_urls + [file_folder_name]
|
| 154 |
+
# update_response = building_metadata_client.table_request().update({BuildingColumns.MODELS_URLS.value: updated_urls}).eq(BuildingColumns.NAME.value, building_name).execute()
|
| 155 |
+
# return update_response
|
routers/buildings_router.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
routers/buildings_search_router.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List
|
| 2 |
+
from fastapi import APIRouter, File, Form, HTTPException, UploadFile, status
|
| 3 |
+
from fastapi.responses import Response
|
| 4 |
+
from myapp.database.vectors.building_client import building_vector_client
|
| 5 |
+
from myapp.database.buckets.building_client import building_bucket_client
|
| 6 |
+
from myapp.models.Building import BuildingColumns, BuildingSearch, Location
|
| 7 |
+
from myapp.search.search_utils import get_buildings_in_proximity, search_best_match
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
router = APIRouter(
|
| 11 |
+
prefix="/buildings_search",
|
| 12 |
+
tags=["buildings_search"]
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@router.post(
|
| 17 |
+
"/find/",
|
| 18 |
+
response_description="Get building",
|
| 19 |
+
status_code=status.HTTP_200_OK,
|
| 20 |
+
)
|
| 21 |
+
async def find_historical_models(location = Form(...), building_image: UploadFile = File(...)):
|
| 22 |
+
"""
|
| 23 |
+
Search for place matching to photo
|
| 24 |
+
"""
|
| 25 |
+
lat, lon = map(float, location.split(","))
|
| 26 |
+
location = Location(latitude=lat, longitude=lon)
|
| 27 |
+
|
| 28 |
+
nearby_buildings: List[BuildingSearch] = get_buildings_in_proximity(location.latitude, location.longitude)
|
| 29 |
+
models_to_search = [building[BuildingColumns.MODEL_URL.value] for building in nearby_buildings]
|
| 30 |
+
|
| 31 |
+
# if not models_to_search:
|
| 32 |
+
# raise HTTPException(status_code=404, detail="No nearby buildings found.")
|
| 33 |
+
|
| 34 |
+
img_bytes = await building_image.read()
|
| 35 |
+
result_buildings = search_best_match(img_bytes, models_to_search, building_vector_client)
|
| 36 |
+
building_found_render_urls = [match["metadata"]["render_url"] for match in result_buildings if "metadata" in match and "render_url" in match["metadata"]]
|
| 37 |
+
|
| 38 |
+
if not building_found_render_urls:
|
| 39 |
+
raise HTTPException(status_code=404, detail="No matching renders found.")
|
| 40 |
+
|
| 41 |
+
image_url = building_found_render_urls[0]
|
| 42 |
+
image_file = building_bucket_client.get_file_by_url(image_url)
|
| 43 |
+
|
| 44 |
+
return Response(content=image_file, media_type="image/png")
|
routers/buildings_search_router.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
search/models/config.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
search/models/config.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
search/models/depth_model.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers import DPTImageProcessor, DPTForDepthEstimation
|
| 2 |
+
from myapp.search.models.config import device
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
depth_model_name = "Intel/dpt-hybrid-midas"
|
| 6 |
+
depth_processor = DPTImageProcessor.from_pretrained(depth_model_name)
|
| 7 |
+
depth_model = DPTForDepthEstimation.from_pretrained(depth_model_name).to(device)
|
search/models/depth_model.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
search/models/edges_model.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torchvision.models as models
|
| 4 |
+
import torchvision.transforms as transforms
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class ResNet512(nn.Module):
|
| 8 |
+
def __init__(self):
|
| 9 |
+
super(ResNet512, self).__init__()
|
| 10 |
+
resnet = models.resnet18(weights=models.ResNet18_Weights.DEFAULT)
|
| 11 |
+
self.features = nn.Sequential(*list(resnet.children())[:-1])
|
| 12 |
+
self.fc = nn.Linear(resnet.fc.in_features, 512)
|
| 13 |
+
|
| 14 |
+
def forward(self, x):
|
| 15 |
+
x = self.features(x)
|
| 16 |
+
x = torch.flatten(x, 1)
|
| 17 |
+
x = self.fc(x)
|
| 18 |
+
return x
|
| 19 |
+
|
| 20 |
+
edges_model = ResNet512()
|
| 21 |
+
edges_model.eval()
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
edges_transform = transforms.Compose([
|
| 25 |
+
transforms.Resize((224, 224)),
|
| 26 |
+
transforms.ToTensor(),
|
| 27 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
|
| 28 |
+
])
|
search/models/edges_model.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
search/models/image_model.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import clip
|
| 2 |
+
from myapp.search.models.config import device
|
| 3 |
+
|
| 4 |
+
image_model, image_processor = clip.load("ViT-B/32", device=device)
|
search/models/image_model.py:Zone.Identifier
ADDED
|
Binary file (25 Bytes). View file
|
|
|
search/models/segmentation_model.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers import AutoImageProcessor, AutoModelForSemanticSegmentation
|
| 2 |
+
from myapp.search.models.config import device
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
seg_model_name = "nvidia/segformer-b0-finetuned-ade-512-512"
|
| 6 |
+
seg_processor = AutoImageProcessor.from_pretrained(seg_model_name, use_fast=True)
|
| 7 |
+
seg_model = AutoModelForSemanticSegmentation.from_pretrained(seg_model_name).to(device)
|
| 8 |
+
seg_model.config.output_hidden_states = True
|