Spaces:
Running
Running
s3 storage
Browse files- Dockerfile +1 -3
- docker-compose.yml +25 -1
- frontend/src/pages/AnalyticsPage/AnalyticsPage.tsx +0 -1
- frontend/src/pages/MapDetailsPage/MapDetailPage.tsx +0 -9
- frontend/src/pages/UploadPage/UploadPage.tsx +0 -2
- py_backend/alembic/env.py +17 -21
- py_backend/alembic/versions/b8fc40bfe3c7_initial_schema_seed.py +0 -3
- py_backend/app/database.py +4 -8
- py_backend/app/images.py +1 -1
- py_backend/app/main.py +64 -39
- py_backend/app/routers/caption.py +3 -10
- py_backend/app/routers/images.py +7 -17
- py_backend/app/routers/upload.py +0 -6
- py_backend/app/services/vlm_service.py +0 -2
- py_backend/app/storage.py +7 -47
- py_backend/tests/test_explore_page.py +0 -21
- py_backend/tests/test_openai_integration.py +0 -6
- py_backend/tests/test_upload_flow.py +1 -18
- start-local.bat +39 -0
- start-local.sh +50 -0
Dockerfile
CHANGED
@@ -28,13 +28,11 @@
|
|
28 |
# Copy built frontend into the image (served by FastAPI)
|
29 |
COPY --from=fe /fe/dist /app/static
|
30 |
|
31 |
-
# Data dirs & sensible defaults
|
32 |
RUN mkdir -p /data/uploads && chmod -R 777 /data
|
33 |
-
ENV STORAGE_PROVIDER=local
|
34 |
ENV STORAGE_DIR=/data/uploads
|
35 |
ENV HF_HOME=/data/.cache/huggingface
|
36 |
|
37 |
-
# Spaces provides PORT; default to 7860 locally
|
38 |
ENV PORT=7860
|
39 |
EXPOSE 7860
|
40 |
|
|
|
28 |
# Copy built frontend into the image (served by FastAPI)
|
29 |
COPY --from=fe /fe/dist /app/static
|
30 |
|
31 |
+
# Data dirs & sensible defaults
|
32 |
RUN mkdir -p /data/uploads && chmod -R 777 /data
|
|
|
33 |
ENV STORAGE_DIR=/data/uploads
|
34 |
ENV HF_HOME=/data/.cache/huggingface
|
35 |
|
|
|
36 |
ENV PORT=7860
|
37 |
EXPOSE 7860
|
38 |
|
docker-compose.yml
CHANGED
@@ -9,7 +9,7 @@ services:
|
|
9 |
POSTGRES_PASSWORD: promptaid
|
10 |
POSTGRES_DB: promptaid
|
11 |
ports:
|
12 |
-
- "
|
13 |
volumes:
|
14 |
- pgdata:/var/lib/postgresql/data
|
15 |
|
@@ -31,6 +31,7 @@ services:
|
|
31 |
environment:
|
32 |
MINIO_ROOT_USER: promptaid
|
33 |
MINIO_ROOT_PASSWORD: promptaid
|
|
|
34 |
ports:
|
35 |
- "9000:9000"
|
36 |
- "9001:9001"
|
@@ -38,6 +39,29 @@ services:
|
|
38 |
- minio_data:/data
|
39 |
depends_on:
|
40 |
- postgres
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
volumes:
|
43 |
pgdata:
|
|
|
9 |
POSTGRES_PASSWORD: promptaid
|
10 |
POSTGRES_DB: promptaid
|
11 |
ports:
|
12 |
+
- "5434:5432" # Changed from 5433 to avoid conflict
|
13 |
volumes:
|
14 |
- pgdata:/var/lib/postgresql/data
|
15 |
|
|
|
31 |
environment:
|
32 |
MINIO_ROOT_USER: promptaid
|
33 |
MINIO_ROOT_PASSWORD: promptaid
|
34 |
+
MINIO_DEFAULT_BUCKETS: promptaid
|
35 |
ports:
|
36 |
- "9000:9000"
|
37 |
- "9001:9001"
|
|
|
39 |
- minio_data:/data
|
40 |
depends_on:
|
41 |
- postgres
|
42 |
+
healthcheck:
|
43 |
+
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
44 |
+
interval: 30s
|
45 |
+
timeout: 20s
|
46 |
+
retries: 3
|
47 |
+
|
48 |
+
app:
|
49 |
+
build: .
|
50 |
+
ports:
|
51 |
+
- "8000:8000"
|
52 |
+
env_file:
|
53 |
+
- .env
|
54 |
+
environment:
|
55 |
+
- DATABASE_URL=postgresql://promptaid:promptaid@postgres:5432/promptaid
|
56 |
+
- S3_ENDPOINT=http://minio:9000 # Override for container networking
|
57 |
+
depends_on:
|
58 |
+
postgres:
|
59 |
+
condition: service_healthy
|
60 |
+
minio:
|
61 |
+
condition: service_healthy
|
62 |
+
volumes:
|
63 |
+
- ./py_backend:/app
|
64 |
+
- /app/__pycache__
|
65 |
|
66 |
volumes:
|
67 |
pgdata:
|
frontend/src/pages/AnalyticsPage/AnalyticsPage.tsx
CHANGED
@@ -184,7 +184,6 @@ export default function AnalyticsPage() {
|
|
184 |
setTypesLookup(types);
|
185 |
setRegionsLookup(regions);
|
186 |
} catch {
|
187 |
-
// Silently handle errors for lookup data
|
188 |
}
|
189 |
}, []);
|
190 |
|
|
|
184 |
setTypesLookup(types);
|
185 |
setRegionsLookup(regions);
|
186 |
} catch {
|
|
|
187 |
}
|
188 |
}, []);
|
189 |
|
frontend/src/pages/MapDetailsPage/MapDetailPage.tsx
CHANGED
@@ -46,12 +46,10 @@ export default function MapDetailPage() {
|
|
46 |
const [regions, setRegions] = useState<{r_code: string, label: string}[]>([]);
|
47 |
const [countries, setCountries] = useState<{c_code: string, label: string, r_code: string}[]>([]);
|
48 |
|
49 |
-
// Carousel state
|
50 |
const [hasPrevious, setHasPrevious] = useState(false);
|
51 |
const [hasNext, setHasNext] = useState(false);
|
52 |
const [isNavigating, setIsNavigating] = useState(false);
|
53 |
|
54 |
-
// Search and filter state
|
55 |
const [search, setSearch] = useState('');
|
56 |
const [srcFilter, setSrcFilter] = useState('');
|
57 |
const [catFilter, setCatFilter] = useState('');
|
@@ -75,7 +73,6 @@ export default function MapDetailPage() {
|
|
75 |
const data = await response.json();
|
76 |
setMap(data);
|
77 |
|
78 |
-
// Check for previous/next items
|
79 |
await checkNavigationAvailability(id);
|
80 |
} catch (err: unknown) {
|
81 |
setError(err instanceof Error ? err.message : 'Unknown error occurred');
|
@@ -97,7 +94,6 @@ export default function MapDetailPage() {
|
|
97 |
|
98 |
const checkNavigationAvailability = async (currentId: string) => {
|
99 |
try {
|
100 |
-
// Fetch all image IDs to determine navigation
|
101 |
const response = await fetch('/api/images');
|
102 |
if (response.ok) {
|
103 |
const images = await response.json();
|
@@ -123,10 +119,8 @@ export default function MapDetailPage() {
|
|
123 |
|
124 |
let targetIndex: number;
|
125 |
if (direction === 'previous') {
|
126 |
-
// Wrap around to the last item if at the beginning
|
127 |
targetIndex = currentIndex === 0 ? images.length - 1 : currentIndex - 1;
|
128 |
} else {
|
129 |
-
// Wrap around to the first item if at the end
|
130 |
targetIndex = currentIndex === images.length - 1 ? 0 : currentIndex + 1;
|
131 |
}
|
132 |
|
@@ -155,18 +149,15 @@ export default function MapDetailPage() {
|
|
155 |
|
156 |
const [isGenerating, setIsGenerating] = useState(false);
|
157 |
|
158 |
-
// Filter the current map based on search and filter criteria
|
159 |
const filteredMap = useMemo(() => {
|
160 |
if (!map) return null;
|
161 |
|
162 |
-
// Check if map matches search criteria
|
163 |
const matchesSearch = !search ||
|
164 |
map.title?.toLowerCase().includes(search.toLowerCase()) ||
|
165 |
map.generated?.toLowerCase().includes(search.toLowerCase()) ||
|
166 |
map.source?.toLowerCase().includes(search.toLowerCase()) ||
|
167 |
map.event_type?.toLowerCase().includes(search.toLowerCase());
|
168 |
|
169 |
-
// Check if map matches filter criteria
|
170 |
const matchesSource = !srcFilter || map.source === srcFilter;
|
171 |
const matchesCategory = !catFilter || map.event_type === catFilter;
|
172 |
const matchesRegion = !regionFilter ||
|
|
|
46 |
const [regions, setRegions] = useState<{r_code: string, label: string}[]>([]);
|
47 |
const [countries, setCountries] = useState<{c_code: string, label: string, r_code: string}[]>([]);
|
48 |
|
|
|
49 |
const [hasPrevious, setHasPrevious] = useState(false);
|
50 |
const [hasNext, setHasNext] = useState(false);
|
51 |
const [isNavigating, setIsNavigating] = useState(false);
|
52 |
|
|
|
53 |
const [search, setSearch] = useState('');
|
54 |
const [srcFilter, setSrcFilter] = useState('');
|
55 |
const [catFilter, setCatFilter] = useState('');
|
|
|
73 |
const data = await response.json();
|
74 |
setMap(data);
|
75 |
|
|
|
76 |
await checkNavigationAvailability(id);
|
77 |
} catch (err: unknown) {
|
78 |
setError(err instanceof Error ? err.message : 'Unknown error occurred');
|
|
|
94 |
|
95 |
const checkNavigationAvailability = async (currentId: string) => {
|
96 |
try {
|
|
|
97 |
const response = await fetch('/api/images');
|
98 |
if (response.ok) {
|
99 |
const images = await response.json();
|
|
|
119 |
|
120 |
let targetIndex: number;
|
121 |
if (direction === 'previous') {
|
|
|
122 |
targetIndex = currentIndex === 0 ? images.length - 1 : currentIndex - 1;
|
123 |
} else {
|
|
|
124 |
targetIndex = currentIndex === images.length - 1 ? 0 : currentIndex + 1;
|
125 |
}
|
126 |
|
|
|
149 |
|
150 |
const [isGenerating, setIsGenerating] = useState(false);
|
151 |
|
|
|
152 |
const filteredMap = useMemo(() => {
|
153 |
if (!map) return null;
|
154 |
|
|
|
155 |
const matchesSearch = !search ||
|
156 |
map.title?.toLowerCase().includes(search.toLowerCase()) ||
|
157 |
map.generated?.toLowerCase().includes(search.toLowerCase()) ||
|
158 |
map.source?.toLowerCase().includes(search.toLowerCase()) ||
|
159 |
map.event_type?.toLowerCase().includes(search.toLowerCase());
|
160 |
|
|
|
161 |
const matchesSource = !srcFilter || map.source === srcFilter;
|
162 |
const matchesCategory = !catFilter || map.event_type === catFilter;
|
163 |
const matchesRegion = !regionFilter ||
|
frontend/src/pages/UploadPage/UploadPage.tsx
CHANGED
@@ -344,7 +344,6 @@ export default function UploadPage() {
|
|
344 |
if (!imageUrl) return;
|
345 |
setIsLoading(true);
|
346 |
try {
|
347 |
-
// 1) Create a NEW image from server-side URL fetch
|
348 |
const res = await fetch('/api/contribute/from-url', {
|
349 |
method: 'POST',
|
350 |
headers: { 'Content-Type': 'application/json' },
|
@@ -463,7 +462,6 @@ export default function UploadPage() {
|
|
463 |
throw new Error((json.error as string) || `Delete failed with status ${res.status}`);
|
464 |
}
|
465 |
|
466 |
-
// If this was a contribution, navigate to explore page
|
467 |
if (searchParams.get('isContribution') === 'true') {
|
468 |
navigate('/explore');
|
469 |
} else {
|
|
|
344 |
if (!imageUrl) return;
|
345 |
setIsLoading(true);
|
346 |
try {
|
|
|
347 |
const res = await fetch('/api/contribute/from-url', {
|
348 |
method: 'POST',
|
349 |
headers: { 'Content-Type': 'application/json' },
|
|
|
462 |
throw new Error((json.error as string) || `Delete failed with status ${res.status}`);
|
463 |
}
|
464 |
|
|
|
465 |
if (searchParams.get('isContribution') === 'true') {
|
466 |
navigate('/explore');
|
467 |
} else {
|
py_backend/alembic/env.py
CHANGED
@@ -2,10 +2,8 @@ import os
|
|
2 |
import sys
|
3 |
from dotenv import load_dotenv
|
4 |
|
5 |
-
# Load local .env when running migrations locally
|
6 |
load_dotenv()
|
7 |
|
8 |
-
# Allow "from app import ..." imports
|
9 |
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
10 |
|
11 |
from alembic import context
|
@@ -13,14 +11,14 @@ from sqlalchemy import create_engine, pool
|
|
13 |
try:
|
14 |
from app.models import Base
|
15 |
target_metadata = Base.metadata
|
16 |
-
print(
|
17 |
except ImportError as e:
|
18 |
-
print(f"
|
19 |
-
print(f"
|
20 |
-
print(f"
|
21 |
from sqlalchemy import MetaData
|
22 |
target_metadata = MetaData()
|
23 |
-
print(
|
24 |
|
25 |
config = context.config
|
26 |
target_metadata = Base.metadata
|
@@ -35,17 +33,15 @@ def _get_db_url() -> str:
|
|
35 |
if not url:
|
36 |
raise RuntimeError("Set ALEMBIC_DATABASE_URL or DATABASE_URL for Alembic migrations.")
|
37 |
|
38 |
-
print(f"
|
39 |
|
40 |
-
# Clean the URL if it starts with 'psql ' (common in some environments)
|
41 |
if url.startswith("psql '") and url.endswith("'"):
|
42 |
-
url = url[6:-1]
|
43 |
-
print(f"
|
44 |
|
45 |
-
# Only add sslmode=require for remote connections, not localhost
|
46 |
if "sslmode=" not in url and "localhost" not in url and "127.0.0.1" not in url:
|
47 |
url = f"{url}{'&' if '?' in url else '?'}sslmode=require"
|
48 |
-
print(f"
|
49 |
|
50 |
return url
|
51 |
|
@@ -69,13 +65,13 @@ def run_migrations_online() -> None:
|
|
69 |
"""Run migrations in 'online' mode."""
|
70 |
try:
|
71 |
url = _get_db_url()
|
72 |
-
print(f"
|
73 |
|
74 |
connectable = create_engine(url, poolclass=pool.NullPool, future=True)
|
75 |
-
print(
|
76 |
|
77 |
with connectable.connect() as connection:
|
78 |
-
print(
|
79 |
context.configure(
|
80 |
connection=connection,
|
81 |
target_metadata=target_metadata,
|
@@ -83,14 +79,14 @@ def run_migrations_online() -> None:
|
|
83 |
compare_server_default=True,
|
84 |
)
|
85 |
with context.begin_transaction():
|
86 |
-
print(
|
87 |
context.run_migrations()
|
88 |
-
print(
|
89 |
except Exception as e:
|
90 |
-
print(f"
|
91 |
-
print(f"
|
92 |
import traceback
|
93 |
-
print(f"
|
94 |
raise
|
95 |
|
96 |
|
|
|
2 |
import sys
|
3 |
from dotenv import load_dotenv
|
4 |
|
|
|
5 |
load_dotenv()
|
6 |
|
|
|
7 |
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
8 |
|
9 |
from alembic import context
|
|
|
11 |
try:
|
12 |
from app.models import Base
|
13 |
target_metadata = Base.metadata
|
14 |
+
print("Successfully imported models from app.models")
|
15 |
except ImportError as e:
|
16 |
+
print(f"Could not import app.models: {e}")
|
17 |
+
print(f"Current working directory: {os.getcwd()}")
|
18 |
+
print(f"Python path: {sys.path}")
|
19 |
from sqlalchemy import MetaData
|
20 |
target_metadata = MetaData()
|
21 |
+
print("Using fallback metadata - migrations may not work properly")
|
22 |
|
23 |
config = context.config
|
24 |
target_metadata = Base.metadata
|
|
|
33 |
if not url:
|
34 |
raise RuntimeError("Set ALEMBIC_DATABASE_URL or DATABASE_URL for Alembic migrations.")
|
35 |
|
36 |
+
print(f"Alembic database URL: {url}")
|
37 |
|
|
|
38 |
if url.startswith("psql '") and url.endswith("'"):
|
39 |
+
url = url[6:-1]
|
40 |
+
print(f"Cleaned URL: {url}")
|
41 |
|
|
|
42 |
if "sslmode=" not in url and "localhost" not in url and "127.0.0.1" not in url:
|
43 |
url = f"{url}{'&' if '?' in url else '?'}sslmode=require"
|
44 |
+
print(f"Added sslmode: {url}")
|
45 |
|
46 |
return url
|
47 |
|
|
|
65 |
"""Run migrations in 'online' mode."""
|
66 |
try:
|
67 |
url = _get_db_url()
|
68 |
+
print(f"Creating engine with URL: {url}")
|
69 |
|
70 |
connectable = create_engine(url, poolclass=pool.NullPool, future=True)
|
71 |
+
print("Engine created successfully")
|
72 |
|
73 |
with connectable.connect() as connection:
|
74 |
+
print("Database connection established")
|
75 |
context.configure(
|
76 |
connection=connection,
|
77 |
target_metadata=target_metadata,
|
|
|
79 |
compare_server_default=True,
|
80 |
)
|
81 |
with context.begin_transaction():
|
82 |
+
print("Running migrations...")
|
83 |
context.run_migrations()
|
84 |
+
print("Migrations completed successfully")
|
85 |
except Exception as e:
|
86 |
+
print(f"Migration failed: {e}")
|
87 |
+
print(f"Error type: {type(e).__name__}")
|
88 |
import traceback
|
89 |
+
print(f"Full traceback: {traceback.format_exc()}")
|
90 |
raise
|
91 |
|
92 |
|
py_backend/alembic/versions/b8fc40bfe3c7_initial_schema_seed.py
CHANGED
@@ -42,7 +42,6 @@ def _guess_region(alpha2: str) -> str:
|
|
42 |
def upgrade():
|
43 |
op.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto;')
|
44 |
|
45 |
-
# Drop any old tables if they exist (idempotent for initial setup)
|
46 |
op.execute("DROP TABLE IF EXISTS captions CASCADE;")
|
47 |
op.execute("DROP TABLE IF EXISTS image_countries CASCADE;")
|
48 |
op.execute("DROP TABLE IF EXISTS images CASCADE;")
|
@@ -205,7 +204,6 @@ def upgrade():
|
|
205 |
)
|
206 |
op.execute("INSERT INTO countries (c_code,label,r_code) VALUES ('XX','Not Applicable','OTHER')")
|
207 |
|
208 |
-
# ---- Images table now includes the single caption/interpretation fields ----
|
209 |
op.create_table(
|
210 |
'images',
|
211 |
sa.Column('image_id', postgresql.UUID(as_uuid=True),
|
@@ -220,7 +218,6 @@ def upgrade():
|
|
220 |
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('NOW()'), nullable=False),
|
221 |
sa.Column('captured_at', sa.TIMESTAMP(timezone=True), nullable=True),
|
222 |
|
223 |
-
# --- merged caption fields ---
|
224 |
sa.Column('title', sa.String(), nullable=True),
|
225 |
sa.Column('prompt', sa.String(), nullable=True),
|
226 |
sa.Column('model', sa.String(), sa.ForeignKey('models.m_code'), nullable=True),
|
|
|
42 |
def upgrade():
|
43 |
op.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto;')
|
44 |
|
|
|
45 |
op.execute("DROP TABLE IF EXISTS captions CASCADE;")
|
46 |
op.execute("DROP TABLE IF EXISTS image_countries CASCADE;")
|
47 |
op.execute("DROP TABLE IF EXISTS images CASCADE;")
|
|
|
204 |
)
|
205 |
op.execute("INSERT INTO countries (c_code,label,r_code) VALUES ('XX','Not Applicable','OTHER')")
|
206 |
|
|
|
207 |
op.create_table(
|
208 |
'images',
|
209 |
sa.Column('image_id', postgresql.UUID(as_uuid=True),
|
|
|
218 |
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('NOW()'), nullable=False),
|
219 |
sa.Column('captured_at', sa.TIMESTAMP(timezone=True), nullable=True),
|
220 |
|
|
|
221 |
sa.Column('title', sa.String(), nullable=True),
|
222 |
sa.Column('prompt', sa.String(), nullable=True),
|
223 |
sa.Column('model', sa.String(), sa.ForeignKey('models.m_code'), nullable=True),
|
py_backend/app/database.py
CHANGED
@@ -2,29 +2,25 @@ import os
|
|
2 |
import logging
|
3 |
from sqlalchemy import create_engine
|
4 |
from sqlalchemy.orm import sessionmaker, declarative_base
|
5 |
-
|
6 |
|
7 |
from .config import settings
|
8 |
|
9 |
raw_db_url = settings.DATABASE_URL
|
10 |
|
11 |
-
# Clean the URL if it starts with 'psql ' (common in some environments)
|
12 |
if raw_db_url.startswith("psql '") and raw_db_url.endswith("'"):
|
13 |
-
raw_db_url = raw_db_url[6:-1]
|
14 |
|
15 |
-
# Only add sslmode=require for remote connections, not localhost
|
16 |
if "sslmode=" not in raw_db_url and "localhost" not in raw_db_url and "127.0.0.1" not in raw_db_url:
|
17 |
raw_db_url = f"{raw_db_url}{'&' if '?' in raw_db_url else '?'}sslmode=require"
|
18 |
|
19 |
-
|
20 |
-
print(f"database url: {raw_db_url}")
|
21 |
|
22 |
engine = create_engine(
|
23 |
raw_db_url,
|
24 |
pool_pre_ping=True,
|
25 |
pool_recycle=300,
|
26 |
-
|
27 |
-
# echo=True,
|
28 |
future=True,
|
29 |
)
|
30 |
|
|
|
2 |
import logging
|
3 |
from sqlalchemy import create_engine
|
4 |
from sqlalchemy.orm import sessionmaker, declarative_base
|
5 |
+
|
6 |
|
7 |
from .config import settings
|
8 |
|
9 |
raw_db_url = settings.DATABASE_URL
|
10 |
|
|
|
11 |
if raw_db_url.startswith("psql '") and raw_db_url.endswith("'"):
|
12 |
+
raw_db_url = raw_db_url[6:-1]
|
13 |
|
|
|
14 |
if "sslmode=" not in raw_db_url and "localhost" not in raw_db_url and "127.0.0.1" not in raw_db_url:
|
15 |
raw_db_url = f"{raw_db_url}{'&' if '?' in raw_db_url else '?'}sslmode=require"
|
16 |
|
17 |
+
print(f"database url: {raw_db_url}")
|
|
|
18 |
|
19 |
engine = create_engine(
|
20 |
raw_db_url,
|
21 |
pool_pre_ping=True,
|
22 |
pool_recycle=300,
|
23 |
+
|
|
|
24 |
future=True,
|
25 |
)
|
26 |
|
py_backend/app/images.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
|
2 |
from pydantic import BaseModel, AnyHttpUrl, Field
|
3 |
from typing import List, Optional
|
4 |
|
|
|
1 |
+
|
2 |
from pydantic import BaseModel, AnyHttpUrl, Field
|
3 |
from typing import List, Optional
|
4 |
|
py_backend/app/main.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
|
2 |
import os
|
3 |
import subprocess
|
4 |
from datetime import datetime
|
@@ -15,7 +15,7 @@ app = FastAPI(title="PromptAid Vision")
|
|
15 |
|
16 |
app.add_middleware(
|
17 |
CORSMiddleware,
|
18 |
-
allow_origins=["http://localhost:3000","http://localhost:5173"],
|
19 |
allow_origin_regex=r"https://.*\.hf\.space$",
|
20 |
allow_credentials=False,
|
21 |
allow_methods=["*"],
|
@@ -40,15 +40,20 @@ def root():
|
|
40 |
<p>OK</p>
|
41 |
<p><a href="/app/">Open UI</a> β’ <a href="/docs">API Docs</a></p>"""
|
42 |
|
43 |
-
|
44 |
-
|
|
|
|
|
|
|
|
|
45 |
|
46 |
if os.path.isdir(STATIC_DIR):
|
47 |
-
print(f"
|
48 |
app.mount("/app", StaticFiles(directory=STATIC_DIR, html=True), name="static")
|
49 |
else:
|
50 |
-
print(f"
|
51 |
-
print(f"
|
|
|
52 |
|
53 |
@app.get("/app/{full_path:path}", include_in_schema=False)
|
54 |
def spa_fallback(full_path: str):
|
@@ -90,7 +95,7 @@ async def debug_storage():
|
|
90 |
if storage_exists:
|
91 |
try:
|
92 |
for root, dirs, filenames in os.walk(storage_dir):
|
93 |
-
for filename in filenames[:10]:
|
94 |
rel_path = os.path.relpath(os.path.join(root, filename), storage_dir)
|
95 |
files.append(rel_path)
|
96 |
except Exception as e:
|
@@ -121,42 +126,42 @@ async def serve_upload(file_path: str):
|
|
121 |
def run_migrations():
|
122 |
"""Run database migrations on startup"""
|
123 |
try:
|
124 |
-
print("
|
125 |
|
126 |
-
|
127 |
-
print("
|
|
|
|
|
128 |
try:
|
129 |
result = subprocess.run(["which", "alembic"], capture_output=True, text=True)
|
130 |
-
print(f"
|
131 |
except Exception as e:
|
132 |
-
print(f"
|
133 |
|
134 |
-
|
135 |
-
print(f"π Checking if /app exists: {os.path.exists('/app')}")
|
136 |
if os.path.exists('/app'):
|
137 |
-
print(f"
|
138 |
|
139 |
-
# Find where alembic.ini is located
|
140 |
alembic_paths = [
|
|
|
|
|
|
|
141 |
"/app/alembic.ini",
|
142 |
-
"/app/py_backend/alembic.ini",
|
143 |
-
"/app/app/alembic.ini"
|
144 |
]
|
145 |
|
146 |
alembic_dir = None
|
147 |
for path in alembic_paths:
|
148 |
if os.path.exists(path):
|
149 |
alembic_dir = os.path.dirname(path)
|
150 |
-
print(f"
|
151 |
break
|
152 |
|
153 |
if not alembic_dir:
|
154 |
-
print("
|
155 |
-
alembic_dir =
|
156 |
|
157 |
-
# Try to run alembic
|
158 |
try:
|
159 |
-
print(f"
|
160 |
result = subprocess.run(
|
161 |
["alembic", "upgrade", "head"],
|
162 |
cwd=alembic_dir,
|
@@ -164,32 +169,52 @@ def run_migrations():
|
|
164 |
text=True,
|
165 |
timeout=60
|
166 |
)
|
167 |
-
print(f"
|
168 |
-
print(f"
|
169 |
-
print(f"
|
170 |
|
171 |
if result.returncode == 0:
|
172 |
-
print("
|
173 |
else:
|
174 |
-
print(f"
|
175 |
-
print("
|
176 |
try:
|
177 |
from app.database import engine
|
178 |
from app.models import Base
|
179 |
Base.metadata.create_all(bind=engine)
|
180 |
-
print("
|
181 |
except Exception as fallback_error:
|
182 |
-
print(f"
|
183 |
except Exception as e:
|
184 |
-
print(f"
|
185 |
|
186 |
except Exception as e:
|
187 |
-
print(f"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
188 |
|
189 |
-
# Run migrations on startup
|
190 |
run_migrations()
|
191 |
|
192 |
-
|
193 |
-
|
194 |
-
print(
|
195 |
-
print("
|
|
|
|
|
|
1 |
+
|
2 |
import os
|
3 |
import subprocess
|
4 |
from datetime import datetime
|
|
|
15 |
|
16 |
app.add_middleware(
|
17 |
CORSMiddleware,
|
18 |
+
allow_origins=["http://localhost:3000","http://localhost:5173","http://localhost:8000"],
|
19 |
allow_origin_regex=r"https://.*\.hf\.space$",
|
20 |
allow_credentials=False,
|
21 |
allow_methods=["*"],
|
|
|
40 |
<p>OK</p>
|
41 |
<p><a href="/app/">Open UI</a> β’ <a href="/docs">API Docs</a></p>"""
|
42 |
|
43 |
+
if os.path.exists("/app"):
|
44 |
+
STATIC_DIR = "/app/static"
|
45 |
+
else:
|
46 |
+
STATIC_DIR = "../frontend/dist"
|
47 |
+
|
48 |
+
print(f"Looking for static files in: {STATIC_DIR}")
|
49 |
|
50 |
if os.path.isdir(STATIC_DIR):
|
51 |
+
print(f"Static directory found: {STATIC_DIR}")
|
52 |
app.mount("/app", StaticFiles(directory=STATIC_DIR, html=True), name="static")
|
53 |
else:
|
54 |
+
print(f"Static directory NOT found: {STATIC_DIR}")
|
55 |
+
print(f"Current directory contents: {os.listdir(os.path.dirname(__file__))}")
|
56 |
+
print(f"Parent directory contents: {os.listdir(os.path.dirname(os.path.dirname(__file__)))}")
|
57 |
|
58 |
@app.get("/app/{full_path:path}", include_in_schema=False)
|
59 |
def spa_fallback(full_path: str):
|
|
|
95 |
if storage_exists:
|
96 |
try:
|
97 |
for root, dirs, filenames in os.walk(storage_dir):
|
98 |
+
for filename in filenames[:10]:
|
99 |
rel_path = os.path.relpath(os.path.join(root, filename), storage_dir)
|
100 |
files.append(rel_path)
|
101 |
except Exception as e:
|
|
|
126 |
def run_migrations():
|
127 |
"""Run database migrations on startup"""
|
128 |
try:
|
129 |
+
print("Running database migrations...")
|
130 |
|
131 |
+
current_dir = os.getcwd()
|
132 |
+
print(f"Current working directory: {current_dir}")
|
133 |
+
|
134 |
+
print("Checking container environment...")
|
135 |
try:
|
136 |
result = subprocess.run(["which", "alembic"], capture_output=True, text=True)
|
137 |
+
print(f"Alembic location: {result.stdout.strip() if result.stdout else 'Not found'}")
|
138 |
except Exception as e:
|
139 |
+
print(f"Could not check alembic location: {e}")
|
140 |
|
141 |
+
print(f"Checking if /app exists: {os.path.exists('/app')}")
|
|
|
142 |
if os.path.exists('/app'):
|
143 |
+
print(f"Contents of /app: {os.listdir('/app')}")
|
144 |
|
|
|
145 |
alembic_paths = [
|
146 |
+
"alembic.ini",
|
147 |
+
"../alembic.ini",
|
148 |
+
"py_backend/alembic.ini",
|
149 |
"/app/alembic.ini",
|
|
|
|
|
150 |
]
|
151 |
|
152 |
alembic_dir = None
|
153 |
for path in alembic_paths:
|
154 |
if os.path.exists(path):
|
155 |
alembic_dir = os.path.dirname(path)
|
156 |
+
print(f"Found alembic.ini at: {path}")
|
157 |
break
|
158 |
|
159 |
if not alembic_dir:
|
160 |
+
print("Could not find alembic.ini - using current directory")
|
161 |
+
alembic_dir = current_dir
|
162 |
|
|
|
163 |
try:
|
164 |
+
print(f"Running alembic upgrade head from: {alembic_dir}")
|
165 |
result = subprocess.run(
|
166 |
["alembic", "upgrade", "head"],
|
167 |
cwd=alembic_dir,
|
|
|
169 |
text=True,
|
170 |
timeout=60
|
171 |
)
|
172 |
+
print(f"Alembic return code: {result.returncode}")
|
173 |
+
print(f"Alembic stdout: {result.stdout}")
|
174 |
+
print(f"Alembic stderr: {result.stderr}")
|
175 |
|
176 |
if result.returncode == 0:
|
177 |
+
print("Database migrations completed successfully")
|
178 |
else:
|
179 |
+
print(f"Database migrations failed")
|
180 |
+
print("Trying fallback: create tables directly...")
|
181 |
try:
|
182 |
from app.database import engine
|
183 |
from app.models import Base
|
184 |
Base.metadata.create_all(bind=engine)
|
185 |
+
print("Tables created directly via SQLAlchemy")
|
186 |
except Exception as fallback_error:
|
187 |
+
print(f"Fallback also failed: {fallback_error}")
|
188 |
except Exception as e:
|
189 |
+
print(f"Error running alembic: {e}")
|
190 |
|
191 |
except Exception as e:
|
192 |
+
print(f"Could not run migrations: {e}")
|
193 |
+
|
194 |
+
|
195 |
+
def ensure_storage_ready():
|
196 |
+
"""Ensure MinIO storage is ready before starting the app"""
|
197 |
+
print(f"Storage provider from settings: '{settings.STORAGE_PROVIDER}'")
|
198 |
+
print(f"S3 endpoint from settings: '{settings.S3_ENDPOINT}'")
|
199 |
+
print(f"S3 bucket from settings: '{settings.S3_BUCKET}'")
|
200 |
+
|
201 |
+
if settings.STORAGE_PROVIDER == "s3":
|
202 |
+
try:
|
203 |
+
print("Checking MinIO storage connection...")
|
204 |
+
from app.storage import _ensure_bucket
|
205 |
+
_ensure_bucket()
|
206 |
+
print("MinIO storage ready")
|
207 |
+
except Exception as e:
|
208 |
+
print(f"Warning: MinIO storage not ready: {e}")
|
209 |
+
print("Storage operations may fail until MinIO is available")
|
210 |
+
else:
|
211 |
+
print("Using local storage - no external dependencies")
|
212 |
|
|
|
213 |
run_migrations()
|
214 |
|
215 |
+
ensure_storage_ready()
|
216 |
+
|
217 |
+
print("PromptAid Vision API server ready")
|
218 |
+
print("Endpoints: /api/images, /api/captions, /api/metadata, /api/models")
|
219 |
+
print(f"Environment: {settings.ENVIRONMENT}")
|
220 |
+
print("CORS: localhost + *.hf.space")
|
py_backend/app/routers/caption.py
CHANGED
@@ -80,28 +80,23 @@ async def create_caption(
|
|
80 |
raise HTTPException(404, "image not found")
|
81 |
|
82 |
try:
|
83 |
-
# Try to get image bytes using storage functions
|
84 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
85 |
-
# S3/MinIO path
|
86 |
response = storage.s3.get_object(
|
87 |
Bucket=settings.S3_BUCKET,
|
88 |
Key=img.file_key,
|
89 |
)
|
90 |
img_bytes = response["Body"].read()
|
91 |
else:
|
92 |
-
# Local storage path - read file directly
|
93 |
import os
|
94 |
file_path = os.path.join(settings.STORAGE_DIR, img.file_key)
|
95 |
with open(file_path, 'rb') as f:
|
96 |
img_bytes = f.read()
|
97 |
except Exception as e:
|
98 |
print(f"Error reading image file: {e}")
|
99 |
-
# Fallback: try to get via URL
|
100 |
try:
|
101 |
url = storage.generate_presigned_url(img.file_key)
|
102 |
if url.startswith('/'):
|
103 |
-
|
104 |
-
url = f"http://localhost:7860{url}"
|
105 |
import requests
|
106 |
resp = requests.get(url)
|
107 |
resp.raise_for_status()
|
@@ -144,9 +139,8 @@ async def create_caption(
|
|
144 |
from .upload import convert_image_to_dict
|
145 |
try:
|
146 |
url = storage.generate_presigned_url(c.file_key, expires_in=3600)
|
147 |
-
# For local storage, ensure we have a full URL
|
148 |
if url.startswith('/') and settings.STORAGE_PROVIDER == "local":
|
149 |
-
url = f"http://localhost:
|
150 |
except Exception:
|
151 |
url = f"/api/images/{c.image_id}/file"
|
152 |
|
@@ -173,9 +167,8 @@ def get_caption(
|
|
173 |
from .upload import convert_image_to_dict
|
174 |
try:
|
175 |
url = storage.generate_presigned_url(caption.file_key, expires_in=3600)
|
176 |
-
# For local storage, ensure we have a full URL
|
177 |
if url.startswith('/') and settings.STORAGE_PROVIDER == "local":
|
178 |
-
url = f"http://localhost:
|
179 |
except Exception:
|
180 |
url = f"/api/images/{caption.image_id}/file"
|
181 |
|
|
|
80 |
raise HTTPException(404, "image not found")
|
81 |
|
82 |
try:
|
|
|
83 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
|
|
84 |
response = storage.s3.get_object(
|
85 |
Bucket=settings.S3_BUCKET,
|
86 |
Key=img.file_key,
|
87 |
)
|
88 |
img_bytes = response["Body"].read()
|
89 |
else:
|
|
|
90 |
import os
|
91 |
file_path = os.path.join(settings.STORAGE_DIR, img.file_key)
|
92 |
with open(file_path, 'rb') as f:
|
93 |
img_bytes = f.read()
|
94 |
except Exception as e:
|
95 |
print(f"Error reading image file: {e}")
|
|
|
96 |
try:
|
97 |
url = storage.generate_presigned_url(img.file_key)
|
98 |
if url.startswith('/'):
|
99 |
+
url = f"http://localhost:8000{url}"
|
|
|
100 |
import requests
|
101 |
resp = requests.get(url)
|
102 |
resp.raise_for_status()
|
|
|
139 |
from .upload import convert_image_to_dict
|
140 |
try:
|
141 |
url = storage.generate_presigned_url(c.file_key, expires_in=3600)
|
|
|
142 |
if url.startswith('/') and settings.STORAGE_PROVIDER == "local":
|
143 |
+
url = f"http://localhost:8000{url}"
|
144 |
except Exception:
|
145 |
url = f"/api/images/{c.image_id}/file"
|
146 |
|
|
|
167 |
from .upload import convert_image_to_dict
|
168 |
try:
|
169 |
url = storage.generate_presigned_url(caption.file_key, expires_in=3600)
|
|
|
170 |
if url.startswith('/') and settings.STORAGE_PROVIDER == "local":
|
171 |
+
url = f"http://localhost:8000{url}"
|
172 |
except Exception:
|
173 |
url = f"/api/images/{caption.image_id}/file"
|
174 |
|
py_backend/app/routers/images.py
CHANGED
@@ -21,37 +21,34 @@ def get_db():
|
|
21 |
|
22 |
@router.post("/from-url", response_model=CreateImageFromUrlOut)
|
23 |
async def create_image_from_url(payload: CreateImageFromUrlIn, db: Session = Depends(get_db)):
|
24 |
-
print(f"DEBUG: Received payload: {payload}")
|
25 |
try:
|
26 |
-
if
|
27 |
-
|
28 |
-
|
|
|
|
|
|
|
29 |
else:
|
30 |
raise HTTPException(status_code=400, detail="Invalid image URL format")
|
31 |
|
32 |
existing_image = db.query(Images).filter(Images.image_id == image_id).first()
|
33 |
if not existing_image:
|
34 |
raise HTTPException(status_code=404, detail="Source image not found")
|
35 |
-
print(f"DEBUG: Found existing image: {existing_image.image_id}")
|
36 |
|
37 |
try:
|
38 |
-
# Try to get image content using storage functions
|
39 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
40 |
-
# S3/MinIO path
|
41 |
response = storage.s3.get_object(
|
42 |
Bucket=settings.S3_BUCKET,
|
43 |
Key=existing_image.file_key,
|
44 |
)
|
45 |
data = response["Body"].read()
|
46 |
else:
|
47 |
-
# Local storage path - read file directly
|
48 |
import os
|
49 |
file_path = os.path.join(settings.STORAGE_DIR, existing_image.file_key)
|
50 |
with open(file_path, 'rb') as f:
|
51 |
data = f.read()
|
52 |
|
53 |
content_type = "image/jpeg"
|
54 |
-
print(f"DEBUG: Downloaded image data: {len(data)} bytes")
|
55 |
except Exception as e:
|
56 |
raise HTTPException(status_code=400, detail=f"Failed to fetch image from storage: {e}")
|
57 |
|
@@ -61,10 +58,8 @@ async def create_image_from_url(payload: CreateImageFromUrlIn, db: Session = Dep
|
|
61 |
ext = mimetypes.guess_extension(content_type) or ".jpg"
|
62 |
key = upload_bytes(data, filename=f"contributed{ext}", content_type=content_type)
|
63 |
image_url = get_object_url(key, expires_in=86400)
|
64 |
-
print(f"DEBUG: Uploaded new image with key: {key}")
|
65 |
|
66 |
sha = hashlib.sha256(data).hexdigest()
|
67 |
-
print(f"DEBUG: Creating new Images object...")
|
68 |
|
69 |
img = Images(
|
70 |
file_key=key,
|
@@ -85,22 +80,17 @@ async def create_image_from_url(payload: CreateImageFromUrlIn, db: Session = Dep
|
|
85 |
usability=50,
|
86 |
starred=False
|
87 |
)
|
88 |
-
print(f"DEBUG: Images object created: {img}")
|
89 |
db.add(img)
|
90 |
-
db.flush()
|
91 |
-
print(f"DEBUG: New image_id: {img.image_id}")
|
92 |
|
93 |
for c in payload.countries:
|
94 |
db.execute(image_countries.insert().values(image_id=img.image_id, c_code=c))
|
95 |
|
96 |
db.commit()
|
97 |
-
print(f"DEBUG: Database commit successful")
|
98 |
|
99 |
result = CreateImageFromUrlOut(image_id=str(img.image_id), image_url=image_url)
|
100 |
-
print(f"DEBUG: Returning result: {result}")
|
101 |
return result
|
102 |
|
103 |
except Exception as e:
|
104 |
-
print(f"DEBUG: Exception occurred: {type(e).__name__}: {str(e)}")
|
105 |
db.rollback()
|
106 |
raise HTTPException(status_code=500, detail=f"Failed to create image: {str(e)}")
|
|
|
21 |
|
22 |
@router.post("/from-url", response_model=CreateImageFromUrlOut)
|
23 |
async def create_image_from_url(payload: CreateImageFromUrlIn, db: Session = Depends(get_db)):
|
|
|
24 |
try:
|
25 |
+
if '/api/images/' in payload.url and '/file' in payload.url:
|
26 |
+
url_parts = payload.url.split('/api/images/')
|
27 |
+
if len(url_parts) > 1:
|
28 |
+
image_id = url_parts[1].split('/file')[0]
|
29 |
+
else:
|
30 |
+
raise HTTPException(status_code=400, detail="Invalid image URL format")
|
31 |
else:
|
32 |
raise HTTPException(status_code=400, detail="Invalid image URL format")
|
33 |
|
34 |
existing_image = db.query(Images).filter(Images.image_id == image_id).first()
|
35 |
if not existing_image:
|
36 |
raise HTTPException(status_code=404, detail="Source image not found")
|
|
|
37 |
|
38 |
try:
|
|
|
39 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
|
|
40 |
response = storage.s3.get_object(
|
41 |
Bucket=settings.S3_BUCKET,
|
42 |
Key=existing_image.file_key,
|
43 |
)
|
44 |
data = response["Body"].read()
|
45 |
else:
|
|
|
46 |
import os
|
47 |
file_path = os.path.join(settings.STORAGE_DIR, existing_image.file_key)
|
48 |
with open(file_path, 'rb') as f:
|
49 |
data = f.read()
|
50 |
|
51 |
content_type = "image/jpeg"
|
|
|
52 |
except Exception as e:
|
53 |
raise HTTPException(status_code=400, detail=f"Failed to fetch image from storage: {e}")
|
54 |
|
|
|
58 |
ext = mimetypes.guess_extension(content_type) or ".jpg"
|
59 |
key = upload_bytes(data, filename=f"contributed{ext}", content_type=content_type)
|
60 |
image_url = get_object_url(key, expires_in=86400)
|
|
|
61 |
|
62 |
sha = hashlib.sha256(data).hexdigest()
|
|
|
63 |
|
64 |
img = Images(
|
65 |
file_key=key,
|
|
|
80 |
usability=50,
|
81 |
starred=False
|
82 |
)
|
|
|
83 |
db.add(img)
|
84 |
+
db.flush()
|
|
|
85 |
|
86 |
for c in payload.countries:
|
87 |
db.execute(image_countries.insert().values(image_id=img.image_id, c_code=c))
|
88 |
|
89 |
db.commit()
|
|
|
90 |
|
91 |
result = CreateImageFromUrlOut(image_id=str(img.image_id), image_url=image_url)
|
|
|
92 |
return result
|
93 |
|
94 |
except Exception as e:
|
|
|
95 |
db.rollback()
|
96 |
raise HTTPException(status_code=500, detail=f"Failed to create image: {str(e)}")
|
py_backend/app/routers/upload.py
CHANGED
@@ -141,16 +141,13 @@ async def copy_image_for_contribution(
|
|
141 |
raise HTTPException(404, "Source image not found")
|
142 |
|
143 |
try:
|
144 |
-
# Try to get image content using storage functions
|
145 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
146 |
-
# S3/MinIO path
|
147 |
response = storage.s3.get_object(
|
148 |
Bucket=settings.S3_BUCKET,
|
149 |
Key=source_img.file_key,
|
150 |
)
|
151 |
image_content = response["Body"].read()
|
152 |
else:
|
153 |
-
# Local storage path - read file directly
|
154 |
import os
|
155 |
file_path = os.path.join(settings.STORAGE_DIR, source_img.file_key)
|
156 |
with open(file_path, 'rb') as f:
|
@@ -197,14 +194,11 @@ async def get_image_file(image_id: str, db: Session = Depends(get_db)):
|
|
197 |
print(f"β
Found image: {img.image_id}, file_key: {img.file_key}")
|
198 |
|
199 |
try:
|
200 |
-
# Try to get image content using storage functions
|
201 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
202 |
-
# S3/MinIO path
|
203 |
print(f"π Using S3 storage")
|
204 |
response = storage.s3.get_object(Bucket=settings.S3_BUCKET, Key=img.file_key)
|
205 |
content = response['Body'].read()
|
206 |
else:
|
207 |
-
# Local storage path - read file directly
|
208 |
print(f"π Using local storage")
|
209 |
import os
|
210 |
file_path = os.path.join(settings.STORAGE_DIR, img.file_key)
|
|
|
141 |
raise HTTPException(404, "Source image not found")
|
142 |
|
143 |
try:
|
|
|
144 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
|
|
145 |
response = storage.s3.get_object(
|
146 |
Bucket=settings.S3_BUCKET,
|
147 |
Key=source_img.file_key,
|
148 |
)
|
149 |
image_content = response["Body"].read()
|
150 |
else:
|
|
|
151 |
import os
|
152 |
file_path = os.path.join(settings.STORAGE_DIR, source_img.file_key)
|
153 |
with open(file_path, 'rb') as f:
|
|
|
194 |
print(f"β
Found image: {img.image_id}, file_key: {img.file_key}")
|
195 |
|
196 |
try:
|
|
|
197 |
if hasattr(storage, 's3') and settings.STORAGE_PROVIDER != "local":
|
|
|
198 |
print(f"π Using S3 storage")
|
199 |
response = storage.s3.get_object(Bucket=settings.S3_BUCKET, Key=img.file_key)
|
200 |
content = response['Body'].read()
|
201 |
else:
|
|
|
202 |
print(f"π Using local storage")
|
203 |
import os
|
204 |
file_path = os.path.join(settings.STORAGE_DIR, img.file_key)
|
py_backend/app/services/vlm_service.py
CHANGED
@@ -65,14 +65,12 @@ class VLMServiceManager:
|
|
65 |
async def generate_caption(self, image_bytes: bytes, prompt: str, model_name: str | None = None) -> dict:
|
66 |
"""Generate caption using the specified model or fallback to available service."""
|
67 |
|
68 |
-
# Find appropriate service
|
69 |
service = None
|
70 |
if model_name:
|
71 |
service = self.services.get(model_name)
|
72 |
if not service:
|
73 |
print(f"Model '{model_name}' not found, using fallback")
|
74 |
|
75 |
-
# Fallback to first available service
|
76 |
if not service and self.services:
|
77 |
service = next(iter(self.services.values()))
|
78 |
print(f"Using fallback service: {service.model_name}")
|
|
|
65 |
async def generate_caption(self, image_bytes: bytes, prompt: str, model_name: str | None = None) -> dict:
|
66 |
"""Generate caption using the specified model or fallback to available service."""
|
67 |
|
|
|
68 |
service = None
|
69 |
if model_name:
|
70 |
service = self.services.get(model_name)
|
71 |
if not service:
|
72 |
print(f"Model '{model_name}' not found, using fallback")
|
73 |
|
|
|
74 |
if not service and self.services:
|
75 |
service = next(iter(self.services.values()))
|
76 |
print(f"Using fallback service: {service.model_name}")
|
py_backend/app/storage.py
CHANGED
@@ -6,7 +6,6 @@ from typing import BinaryIO, Optional
|
|
6 |
|
7 |
from .config import settings
|
8 |
|
9 |
-
# Initialize s3 client based on storage provider
|
10 |
if settings.STORAGE_PROVIDER != "local":
|
11 |
import boto3
|
12 |
import botocore
|
@@ -19,7 +18,6 @@ if settings.STORAGE_PROVIDER != "local":
|
|
19 |
region_name=getattr(settings, "S3_REGION", None),
|
20 |
)
|
21 |
else:
|
22 |
-
# Create a dummy s3 object for local storage to avoid AttributeError
|
23 |
class DummyS3Client:
|
24 |
def __init__(self):
|
25 |
pass
|
@@ -32,38 +30,26 @@ else:
|
|
32 |
|
33 |
s3 = DummyS3Client()
|
34 |
|
35 |
-
# Optional settings you can add to your config:
|
36 |
-
# - S3_PUBLIC_URL_BASE: str | None (e.g. "https://cdn.example.com" or bucket website endpoint)
|
37 |
-
# - S3_PUBLIC_READ: bool (True if the bucket/objects are world-readable)
|
38 |
|
39 |
def _ensure_bucket() -> None:
|
40 |
-
"""
|
41 |
-
Make sure the bucket exists. Safe to call on every upload.
|
42 |
-
"""
|
43 |
if settings.STORAGE_PROVIDER == "local":
|
44 |
-
# For local storage, ensure the storage directory exists
|
45 |
os.makedirs(settings.STORAGE_DIR, exist_ok=True)
|
46 |
return
|
47 |
|
48 |
try:
|
49 |
s3.head_bucket(Bucket=settings.S3_BUCKET)
|
50 |
except botocore.exceptions.ClientError as e:
|
51 |
-
# Create bucket. Some providers need LocationConstraint; MinIO typically doesn't.
|
52 |
create_kwargs = {"Bucket": settings.S3_BUCKET}
|
53 |
region = getattr(settings, "S3_REGION", None)
|
54 |
-
# For AWS S3 outside us-east-1 you must pass LocationConstraint.
|
55 |
if region and (settings.S3_ENDPOINT is None or "amazonaws.com" in str(settings.S3_ENDPOINT).lower()):
|
56 |
create_kwargs["CreateBucketConfiguration"] = {"LocationConstraint": region}
|
57 |
s3.create_bucket(**create_kwargs)
|
58 |
|
59 |
|
60 |
def get_object_url(key: str, *, expires_in: int = 3600) -> str:
|
61 |
-
"""
|
62 |
-
Return a browser-usable URL for the object.
|
63 |
-
If S3_PUBLIC_URL_BASE is set, return a public URL. Otherwise, return a presigned URL.
|
64 |
-
"""
|
65 |
if settings.STORAGE_PROVIDER == "local":
|
66 |
-
# For local storage, return a relative path that can be served by FastAPI
|
67 |
return f"/uploads/{key}"
|
68 |
|
69 |
public_base = getattr(settings, "S3_PUBLIC_URL_BASE", None)
|
@@ -73,11 +59,8 @@ def get_object_url(key: str, *, expires_in: int = 3600) -> str:
|
|
73 |
|
74 |
|
75 |
def generate_presigned_url(key: str, expires_in: int = 3600) -> str:
|
76 |
-
"""
|
77 |
-
Returns a presigned URL for GETting the object.
|
78 |
-
"""
|
79 |
if settings.STORAGE_PROVIDER == "local":
|
80 |
-
# For local storage, return a direct URL
|
81 |
return f"/uploads/{key}"
|
82 |
|
83 |
return s3.generate_presigned_url(
|
@@ -94,10 +77,7 @@ def upload_fileobj(
|
|
94 |
content_type: Optional[str] = None,
|
95 |
cache_control: Optional[str] = "public, max-age=31536000, immutable",
|
96 |
) -> str:
|
97 |
-
"""
|
98 |
-
Upload a file-like object to the configured storage.
|
99 |
-
Returns the object key (not the URL).
|
100 |
-
"""
|
101 |
if settings.STORAGE_PROVIDER == "local":
|
102 |
return _upload_local(fileobj, filename, content_type)
|
103 |
else:
|
@@ -109,18 +89,14 @@ def _upload_local(
|
|
109 |
content_type: Optional[str] = None,
|
110 |
) -> str:
|
111 |
"""Upload to local filesystem"""
|
112 |
-
# Ensure storage directory exists
|
113 |
os.makedirs(settings.STORAGE_DIR, exist_ok=True)
|
114 |
|
115 |
-
# Build a namespaced key
|
116 |
safe_name = filename or "upload.bin"
|
117 |
key = f"maps/{uuid4()}_{safe_name}"
|
118 |
filepath = os.path.join(settings.STORAGE_DIR, key)
|
119 |
|
120 |
-
# Ensure directory exists
|
121 |
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
122 |
|
123 |
-
# Write file
|
124 |
with open(filepath, 'wb') as f:
|
125 |
fileobj.seek(0)
|
126 |
f.write(fileobj.read())
|
@@ -136,14 +112,11 @@ def _upload_s3(
|
|
136 |
"""Upload to S3/MinIO"""
|
137 |
_ensure_bucket()
|
138 |
|
139 |
-
# Build a namespaced key; keep original filename tail if helpful
|
140 |
safe_name = filename or "upload.bin"
|
141 |
key = f"maps/{uuid4()}_{safe_name}"
|
142 |
|
143 |
-
# Guess content type if not provided
|
144 |
ct = content_type or (mimetypes.guess_type(safe_name)[0] or "application/octet-stream")
|
145 |
|
146 |
-
# Make sure we read from the start
|
147 |
try:
|
148 |
fileobj.seek(0)
|
149 |
except Exception:
|
@@ -153,7 +126,6 @@ def _upload_s3(
|
|
153 |
if cache_control:
|
154 |
extra_args["CacheControl"] = cache_control
|
155 |
if getattr(settings, "S3_PUBLIC_READ", False):
|
156 |
-
# Only set this if your bucket policy allows public read
|
157 |
extra_args["ACL"] = "public-read"
|
158 |
|
159 |
s3.upload_fileobj(fileobj, settings.S3_BUCKET, key, ExtraArgs=extra_args)
|
@@ -167,9 +139,7 @@ def upload_bytes(
|
|
167 |
content_type: Optional[str] = None,
|
168 |
cache_control: Optional[str] = "public, max-age=31536000, immutable",
|
169 |
) -> str:
|
170 |
-
"""
|
171 |
-
Convenience helper to upload raw bytes (e.g., after server-side URL download).
|
172 |
-
"""
|
173 |
buf = io.BytesIO(data)
|
174 |
return upload_fileobj(buf, filename, content_type=content_type, cache_control=cache_control)
|
175 |
|
@@ -180,11 +150,7 @@ def copy_object(
|
|
180 |
new_filename: Optional[str] = None,
|
181 |
cache_control: Optional[str] = "public, max-age=31536000, immutable",
|
182 |
) -> str:
|
183 |
-
"""
|
184 |
-
Server-side copy within the same bucket (no download/upload round-trip).
|
185 |
-
Useful for 'duplicate' endpoints if you already know the source key.
|
186 |
-
Returns the NEW object key.
|
187 |
-
"""
|
188 |
if settings.STORAGE_PROVIDER == "local":
|
189 |
return _copy_local(src_key, new_filename)
|
190 |
else:
|
@@ -200,10 +166,8 @@ def _copy_local(src_key: str, new_filename: Optional[str] = None) -> str:
|
|
200 |
dest_key = f"maps/{uuid4()}_{tail}"
|
201 |
dest_path = os.path.join(settings.STORAGE_DIR, dest_key)
|
202 |
|
203 |
-
# Ensure directory exists
|
204 |
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
205 |
|
206 |
-
# Copy file
|
207 |
with open(src_path, 'rb') as src, open(dest_path, 'wb') as dest:
|
208 |
dest.write(src.read())
|
209 |
|
@@ -231,9 +195,7 @@ def _copy_s3(src_key: str, new_filename: Optional[str] = None, cache_control: Op
|
|
231 |
|
232 |
|
233 |
def delete_object(key: str) -> None:
|
234 |
-
"""
|
235 |
-
Delete an object (best-effort).
|
236 |
-
"""
|
237 |
if settings.STORAGE_PROVIDER == "local":
|
238 |
_delete_local(key)
|
239 |
else:
|
@@ -246,7 +208,6 @@ def _delete_local(key: str) -> None:
|
|
246 |
if os.path.exists(file_path):
|
247 |
os.remove(file_path)
|
248 |
except (OSError, FileNotFoundError):
|
249 |
-
# Swallow to keep deletes idempotent for callers
|
250 |
pass
|
251 |
|
252 |
def _delete_s3(key: str) -> None:
|
@@ -254,5 +215,4 @@ def _delete_s3(key: str) -> None:
|
|
254 |
try:
|
255 |
s3.delete_object(Bucket=settings.S3_BUCKET, Key=key)
|
256 |
except botocore.exceptions.ClientError:
|
257 |
-
# Swallow to keep deletes idempotent for callers
|
258 |
pass
|
|
|
6 |
|
7 |
from .config import settings
|
8 |
|
|
|
9 |
if settings.STORAGE_PROVIDER != "local":
|
10 |
import boto3
|
11 |
import botocore
|
|
|
18 |
region_name=getattr(settings, "S3_REGION", None),
|
19 |
)
|
20 |
else:
|
|
|
21 |
class DummyS3Client:
|
22 |
def __init__(self):
|
23 |
pass
|
|
|
30 |
|
31 |
s3 = DummyS3Client()
|
32 |
|
|
|
|
|
|
|
33 |
|
34 |
def _ensure_bucket() -> None:
|
35 |
+
"""Ensure bucket exists. Safe to call on every upload."""
|
|
|
|
|
36 |
if settings.STORAGE_PROVIDER == "local":
|
|
|
37 |
os.makedirs(settings.STORAGE_DIR, exist_ok=True)
|
38 |
return
|
39 |
|
40 |
try:
|
41 |
s3.head_bucket(Bucket=settings.S3_BUCKET)
|
42 |
except botocore.exceptions.ClientError as e:
|
|
|
43 |
create_kwargs = {"Bucket": settings.S3_BUCKET}
|
44 |
region = getattr(settings, "S3_REGION", None)
|
|
|
45 |
if region and (settings.S3_ENDPOINT is None or "amazonaws.com" in str(settings.S3_ENDPOINT).lower()):
|
46 |
create_kwargs["CreateBucketConfiguration"] = {"LocationConstraint": region}
|
47 |
s3.create_bucket(**create_kwargs)
|
48 |
|
49 |
|
50 |
def get_object_url(key: str, *, expires_in: int = 3600) -> str:
|
51 |
+
"""Return browser-usable URL for object."""
|
|
|
|
|
|
|
52 |
if settings.STORAGE_PROVIDER == "local":
|
|
|
53 |
return f"/uploads/{key}"
|
54 |
|
55 |
public_base = getattr(settings, "S3_PUBLIC_URL_BASE", None)
|
|
|
59 |
|
60 |
|
61 |
def generate_presigned_url(key: str, expires_in: int = 3600) -> str:
|
62 |
+
"""Generate presigned URL for GETting object."""
|
|
|
|
|
63 |
if settings.STORAGE_PROVIDER == "local":
|
|
|
64 |
return f"/uploads/{key}"
|
65 |
|
66 |
return s3.generate_presigned_url(
|
|
|
77 |
content_type: Optional[str] = None,
|
78 |
cache_control: Optional[str] = "public, max-age=31536000, immutable",
|
79 |
) -> str:
|
80 |
+
"""Upload file-like object to configured storage. Returns object key."""
|
|
|
|
|
|
|
81 |
if settings.STORAGE_PROVIDER == "local":
|
82 |
return _upload_local(fileobj, filename, content_type)
|
83 |
else:
|
|
|
89 |
content_type: Optional[str] = None,
|
90 |
) -> str:
|
91 |
"""Upload to local filesystem"""
|
|
|
92 |
os.makedirs(settings.STORAGE_DIR, exist_ok=True)
|
93 |
|
|
|
94 |
safe_name = filename or "upload.bin"
|
95 |
key = f"maps/{uuid4()}_{safe_name}"
|
96 |
filepath = os.path.join(settings.STORAGE_DIR, key)
|
97 |
|
|
|
98 |
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
99 |
|
|
|
100 |
with open(filepath, 'wb') as f:
|
101 |
fileobj.seek(0)
|
102 |
f.write(fileobj.read())
|
|
|
112 |
"""Upload to S3/MinIO"""
|
113 |
_ensure_bucket()
|
114 |
|
|
|
115 |
safe_name = filename or "upload.bin"
|
116 |
key = f"maps/{uuid4()}_{safe_name}"
|
117 |
|
|
|
118 |
ct = content_type or (mimetypes.guess_type(safe_name)[0] or "application/octet-stream")
|
119 |
|
|
|
120 |
try:
|
121 |
fileobj.seek(0)
|
122 |
except Exception:
|
|
|
126 |
if cache_control:
|
127 |
extra_args["CacheControl"] = cache_control
|
128 |
if getattr(settings, "S3_PUBLIC_READ", False):
|
|
|
129 |
extra_args["ACL"] = "public-read"
|
130 |
|
131 |
s3.upload_fileobj(fileobj, settings.S3_BUCKET, key, ExtraArgs=extra_args)
|
|
|
139 |
content_type: Optional[str] = None,
|
140 |
cache_control: Optional[str] = "public, max-age=31536000, immutable",
|
141 |
) -> str:
|
142 |
+
"""Upload raw bytes. Returns object key."""
|
|
|
|
|
143 |
buf = io.BytesIO(data)
|
144 |
return upload_fileobj(buf, filename, content_type=content_type, cache_control=cache_control)
|
145 |
|
|
|
150 |
new_filename: Optional[str] = None,
|
151 |
cache_control: Optional[str] = "public, max-age=31536000, immutable",
|
152 |
) -> str:
|
153 |
+
"""Server-side copy within same bucket. Returns new object key."""
|
|
|
|
|
|
|
|
|
154 |
if settings.STORAGE_PROVIDER == "local":
|
155 |
return _copy_local(src_key, new_filename)
|
156 |
else:
|
|
|
166 |
dest_key = f"maps/{uuid4()}_{tail}"
|
167 |
dest_path = os.path.join(settings.STORAGE_DIR, dest_key)
|
168 |
|
|
|
169 |
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
170 |
|
|
|
171 |
with open(src_path, 'rb') as src, open(dest_path, 'wb') as dest:
|
172 |
dest.write(src.read())
|
173 |
|
|
|
195 |
|
196 |
|
197 |
def delete_object(key: str) -> None:
|
198 |
+
"""Delete object (best-effort)."""
|
|
|
|
|
199 |
if settings.STORAGE_PROVIDER == "local":
|
200 |
_delete_local(key)
|
201 |
else:
|
|
|
208 |
if os.path.exists(file_path):
|
209 |
os.remove(file_path)
|
210 |
except (OSError, FileNotFoundError):
|
|
|
211 |
pass
|
212 |
|
213 |
def _delete_s3(key: str) -> None:
|
|
|
215 |
try:
|
216 |
s3.delete_object(Bucket=settings.S3_BUCKET, Key=key)
|
217 |
except botocore.exceptions.ClientError:
|
|
|
218 |
pass
|
py_backend/tests/test_explore_page.py
CHANGED
@@ -6,7 +6,6 @@ import requests
|
|
6 |
import sys
|
7 |
import os
|
8 |
|
9 |
-
# Add the parent directory to the path
|
10 |
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
11 |
|
12 |
from app.database import SessionLocal
|
@@ -18,7 +17,6 @@ def test_explore_page_endpoints():
|
|
18 |
|
19 |
base_url = "http://localhost:8000/api"
|
20 |
|
21 |
-
# Test metadata endpoints
|
22 |
endpoints = [
|
23 |
"/sources",
|
24 |
"/types",
|
@@ -52,7 +50,6 @@ def test_images_list_endpoint():
|
|
52 |
data = response.json()
|
53 |
print(f" + {len(data)} images returned")
|
54 |
|
55 |
-
# Check structure of first image
|
56 |
if data:
|
57 |
first_image = data[0]
|
58 |
required_fields = ['image_id', 'file_key', 'source', 'type', 'image_url']
|
@@ -71,7 +68,6 @@ def test_image_detail_endpoint():
|
|
71 |
"""Test the image detail endpoint"""
|
72 |
print("\n=== Testing Image Detail Endpoint ===")
|
73 |
|
74 |
-
# First get a list of images
|
75 |
try:
|
76 |
response = requests.get("http://localhost:8000/api/images/")
|
77 |
if response.status_code == 200:
|
@@ -79,7 +75,6 @@ def test_image_detail_endpoint():
|
|
79 |
if images:
|
80 |
image_id = images[0]['image_id']
|
81 |
|
82 |
-
# Test getting specific image
|
83 |
detail_response = requests.get(f"http://localhost:8000/api/images/{image_id}")
|
84 |
print(f"GET /api/images/{image_id}: {detail_response.status_code}")
|
85 |
|
@@ -103,7 +98,6 @@ def test_image_file_endpoint():
|
|
103 |
"""Test the image file serving endpoint"""
|
104 |
print("\n=== Testing Image File Endpoint ===")
|
105 |
|
106 |
-
# First get a list of images
|
107 |
try:
|
108 |
response = requests.get("http://localhost:8000/api/images/")
|
109 |
if response.status_code == 200:
|
@@ -111,7 +105,6 @@ def test_image_file_endpoint():
|
|
111 |
if images:
|
112 |
image_id = images[0]['image_id']
|
113 |
|
114 |
-
# Test getting image file
|
115 |
file_response = requests.get(f"http://localhost:8000/api/images/{image_id}/file")
|
116 |
print(f"GET /api/images/{image_id}/file: {file_response.status_code}")
|
117 |
|
@@ -133,27 +126,22 @@ def test_filtering_functionality():
|
|
133 |
"""Test the filtering functionality"""
|
134 |
print("\n=== Testing Filtering Functionality ===")
|
135 |
|
136 |
-
# Test filtering by source
|
137 |
try:
|
138 |
response = requests.get("http://localhost:8000/api/images/")
|
139 |
if response.status_code == 200:
|
140 |
images = response.json()
|
141 |
if images:
|
142 |
-
# Get unique sources
|
143 |
sources = list(set(img['source'] for img in images))
|
144 |
print(f" + Available sources: {sources}")
|
145 |
|
146 |
-
# Test filtering by first source
|
147 |
if sources:
|
148 |
first_source = sources[0]
|
149 |
filtered_images = [img for img in images if img['source'] == first_source]
|
150 |
print(f" + Filtered by source '{first_source}': {len(filtered_images)} images")
|
151 |
|
152 |
-
# Get unique types
|
153 |
types = list(set(img['type'] for img in images))
|
154 |
print(f" + Available types: {types}")
|
155 |
|
156 |
-
# Test filtering by first type
|
157 |
if types:
|
158 |
first_type = types[0]
|
159 |
filtered_images = [img for img in images if img['type'] == first_type]
|
@@ -172,15 +160,12 @@ def test_database_consistency():
|
|
172 |
|
173 |
db = SessionLocal()
|
174 |
try:
|
175 |
-
# Check if images exist
|
176 |
images = db.query(models.Images).all()
|
177 |
print(f" + Total images in database: {len(images)}")
|
178 |
|
179 |
-
# Check if images have caption data
|
180 |
images_with_captions = db.query(models.Images).filter(models.Images.title.isnot(None)).all()
|
181 |
print(f" + Images with caption data: {len(images_with_captions)}")
|
182 |
|
183 |
-
# Check metadata tables
|
184 |
sources = db.query(models.Source).all()
|
185 |
print(f" + Total sources: {len(sources)}")
|
186 |
|
@@ -190,7 +175,6 @@ def test_database_consistency():
|
|
190 |
countries = db.query(models.Country).all()
|
191 |
print(f" + Total countries: {len(countries)}")
|
192 |
|
193 |
-
# Check relationships
|
194 |
images_with_countries = db.query(models.Images).join(models.Images.countries).all()
|
195 |
print(f" + Images with countries: {len(images_with_countries)}")
|
196 |
|
@@ -205,7 +189,6 @@ def create_test_data():
|
|
205 |
|
206 |
db = SessionLocal()
|
207 |
try:
|
208 |
-
# Create a test image
|
209 |
test_content = b"test image data for explore page"
|
210 |
key = storage.upload_fileobj(io.BytesIO(test_content), "explore_test.jpg")
|
211 |
|
@@ -221,7 +204,6 @@ def create_test_data():
|
|
221 |
)
|
222 |
print(f" + Created test image: {img.image_id}")
|
223 |
|
224 |
-
# Create a test caption
|
225 |
caption = crud.create_caption(
|
226 |
db=db,
|
227 |
image_id=img.image_id,
|
@@ -259,10 +241,8 @@ def cleanup_test_data(image_id):
|
|
259 |
if __name__ == "__main__":
|
260 |
print("Starting Explore Page Tests...")
|
261 |
|
262 |
-
# Create test data
|
263 |
test_image_id = create_test_data()
|
264 |
|
265 |
-
# Run tests
|
266 |
test_explore_page_endpoints()
|
267 |
test_images_list_endpoint()
|
268 |
test_image_detail_endpoint()
|
@@ -270,7 +250,6 @@ if __name__ == "__main__":
|
|
270 |
test_filtering_functionality()
|
271 |
test_database_consistency()
|
272 |
|
273 |
-
# Clean up
|
274 |
if test_image_id:
|
275 |
cleanup_test_data(test_image_id)
|
276 |
|
|
|
6 |
import sys
|
7 |
import os
|
8 |
|
|
|
9 |
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
10 |
|
11 |
from app.database import SessionLocal
|
|
|
17 |
|
18 |
base_url = "http://localhost:8000/api"
|
19 |
|
|
|
20 |
endpoints = [
|
21 |
"/sources",
|
22 |
"/types",
|
|
|
50 |
data = response.json()
|
51 |
print(f" + {len(data)} images returned")
|
52 |
|
|
|
53 |
if data:
|
54 |
first_image = data[0]
|
55 |
required_fields = ['image_id', 'file_key', 'source', 'type', 'image_url']
|
|
|
68 |
"""Test the image detail endpoint"""
|
69 |
print("\n=== Testing Image Detail Endpoint ===")
|
70 |
|
|
|
71 |
try:
|
72 |
response = requests.get("http://localhost:8000/api/images/")
|
73 |
if response.status_code == 200:
|
|
|
75 |
if images:
|
76 |
image_id = images[0]['image_id']
|
77 |
|
|
|
78 |
detail_response = requests.get(f"http://localhost:8000/api/images/{image_id}")
|
79 |
print(f"GET /api/images/{image_id}: {detail_response.status_code}")
|
80 |
|
|
|
98 |
"""Test the image file serving endpoint"""
|
99 |
print("\n=== Testing Image File Endpoint ===")
|
100 |
|
|
|
101 |
try:
|
102 |
response = requests.get("http://localhost:8000/api/images/")
|
103 |
if response.status_code == 200:
|
|
|
105 |
if images:
|
106 |
image_id = images[0]['image_id']
|
107 |
|
|
|
108 |
file_response = requests.get(f"http://localhost:8000/api/images/{image_id}/file")
|
109 |
print(f"GET /api/images/{image_id}/file: {file_response.status_code}")
|
110 |
|
|
|
126 |
"""Test the filtering functionality"""
|
127 |
print("\n=== Testing Filtering Functionality ===")
|
128 |
|
|
|
129 |
try:
|
130 |
response = requests.get("http://localhost:8000/api/images/")
|
131 |
if response.status_code == 200:
|
132 |
images = response.json()
|
133 |
if images:
|
|
|
134 |
sources = list(set(img['source'] for img in images))
|
135 |
print(f" + Available sources: {sources}")
|
136 |
|
|
|
137 |
if sources:
|
138 |
first_source = sources[0]
|
139 |
filtered_images = [img for img in images if img['source'] == first_source]
|
140 |
print(f" + Filtered by source '{first_source}': {len(filtered_images)} images")
|
141 |
|
|
|
142 |
types = list(set(img['type'] for img in images))
|
143 |
print(f" + Available types: {types}")
|
144 |
|
|
|
145 |
if types:
|
146 |
first_type = types[0]
|
147 |
filtered_images = [img for img in images if img['type'] == first_type]
|
|
|
160 |
|
161 |
db = SessionLocal()
|
162 |
try:
|
|
|
163 |
images = db.query(models.Images).all()
|
164 |
print(f" + Total images in database: {len(images)}")
|
165 |
|
|
|
166 |
images_with_captions = db.query(models.Images).filter(models.Images.title.isnot(None)).all()
|
167 |
print(f" + Images with caption data: {len(images_with_captions)}")
|
168 |
|
|
|
169 |
sources = db.query(models.Source).all()
|
170 |
print(f" + Total sources: {len(sources)}")
|
171 |
|
|
|
175 |
countries = db.query(models.Country).all()
|
176 |
print(f" + Total countries: {len(countries)}")
|
177 |
|
|
|
178 |
images_with_countries = db.query(models.Images).join(models.Images.countries).all()
|
179 |
print(f" + Images with countries: {len(images_with_countries)}")
|
180 |
|
|
|
189 |
|
190 |
db = SessionLocal()
|
191 |
try:
|
|
|
192 |
test_content = b"test image data for explore page"
|
193 |
key = storage.upload_fileobj(io.BytesIO(test_content), "explore_test.jpg")
|
194 |
|
|
|
204 |
)
|
205 |
print(f" + Created test image: {img.image_id}")
|
206 |
|
|
|
207 |
caption = crud.create_caption(
|
208 |
db=db,
|
209 |
image_id=img.image_id,
|
|
|
241 |
if __name__ == "__main__":
|
242 |
print("Starting Explore Page Tests...")
|
243 |
|
|
|
244 |
test_image_id = create_test_data()
|
245 |
|
|
|
246 |
test_explore_page_endpoints()
|
247 |
test_images_list_endpoint()
|
248 |
test_image_detail_endpoint()
|
|
|
250 |
test_filtering_functionality()
|
251 |
test_database_consistency()
|
252 |
|
|
|
253 |
if test_image_id:
|
254 |
cleanup_test_data(test_image_id)
|
255 |
|
py_backend/tests/test_openai_integration.py
CHANGED
@@ -9,7 +9,6 @@ import os
|
|
9 |
from pathlib import Path
|
10 |
import sys
|
11 |
|
12 |
-
# Add the app directory to the Python path
|
13 |
sys.path.append(os.path.join(os.path.dirname(__file__), 'app'))
|
14 |
|
15 |
from app.services.gpt4v_service import GPT4VService
|
@@ -20,7 +19,6 @@ async def test_gpt4v_service():
|
|
20 |
print("π§ͺ OpenAI GPT-4 Vision Integration Test")
|
21 |
print("=" * 60)
|
22 |
|
23 |
-
# Check API key
|
24 |
if not settings.OPENAI_API_KEY:
|
25 |
print("β OPENAI_API_KEY environment variable not set!")
|
26 |
print("Please set your OpenAI API key in .env:")
|
@@ -29,7 +27,6 @@ async def test_gpt4v_service():
|
|
29 |
|
30 |
print(f"β
OpenAI API Key found: {settings.OPENAI_API_KEY[:10]}...")
|
31 |
|
32 |
-
# Find test image
|
33 |
test_image_path = Path("tests/test.jpg")
|
34 |
if not test_image_path.exists():
|
35 |
print(f"β Test image not found at: {test_image_path}")
|
@@ -38,7 +35,6 @@ async def test_gpt4v_service():
|
|
38 |
|
39 |
print(f"β
Test image found: {test_image_path}")
|
40 |
|
41 |
-
# Initialize GPT4V service
|
42 |
try:
|
43 |
gpt4v_service = GPT4VService(settings.OPENAI_API_KEY)
|
44 |
print(f"β
GPT4V service initialized: {gpt4v_service.model_name}")
|
@@ -46,13 +42,11 @@ async def test_gpt4v_service():
|
|
46 |
print(f"β Failed to initialize GPT4V service: {e}")
|
47 |
return
|
48 |
|
49 |
-
# Read test image
|
50 |
with open(test_image_path, 'rb') as f:
|
51 |
image_bytes = f.read()
|
52 |
|
53 |
print(f"πΈ Image size: {len(image_bytes)} bytes")
|
54 |
|
55 |
-
# Test caption generation
|
56 |
prompt = "Analyze this crisis map and provide a detailed description of the emergency situation, affected areas, and key information shown in the map."
|
57 |
|
58 |
print(f"\nπ― Testing with prompt: {prompt}")
|
|
|
9 |
from pathlib import Path
|
10 |
import sys
|
11 |
|
|
|
12 |
sys.path.append(os.path.join(os.path.dirname(__file__), 'app'))
|
13 |
|
14 |
from app.services.gpt4v_service import GPT4VService
|
|
|
19 |
print("π§ͺ OpenAI GPT-4 Vision Integration Test")
|
20 |
print("=" * 60)
|
21 |
|
|
|
22 |
if not settings.OPENAI_API_KEY:
|
23 |
print("β OPENAI_API_KEY environment variable not set!")
|
24 |
print("Please set your OpenAI API key in .env:")
|
|
|
27 |
|
28 |
print(f"β
OpenAI API Key found: {settings.OPENAI_API_KEY[:10]}...")
|
29 |
|
|
|
30 |
test_image_path = Path("tests/test.jpg")
|
31 |
if not test_image_path.exists():
|
32 |
print(f"β Test image not found at: {test_image_path}")
|
|
|
35 |
|
36 |
print(f"β
Test image found: {test_image_path}")
|
37 |
|
|
|
38 |
try:
|
39 |
gpt4v_service = GPT4VService(settings.OPENAI_API_KEY)
|
40 |
print(f"β
GPT4V service initialized: {gpt4v_service.model_name}")
|
|
|
42 |
print(f"β Failed to initialize GPT4V service: {e}")
|
43 |
return
|
44 |
|
|
|
45 |
with open(test_image_path, 'rb') as f:
|
46 |
image_bytes = f.read()
|
47 |
|
48 |
print(f"πΈ Image size: {len(image_bytes)} bytes")
|
49 |
|
|
|
50 |
prompt = "Analyze this crisis map and provide a detailed description of the emergency situation, affected areas, and key information shown in the map."
|
51 |
|
52 |
print(f"\nπ― Testing with prompt: {prompt}")
|
py_backend/tests/test_upload_flow.py
CHANGED
@@ -4,7 +4,6 @@
|
|
4 |
import sys
|
5 |
import os
|
6 |
|
7 |
-
# Add the parent directory to the path so we can import app modules
|
8 |
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
9 |
|
10 |
import requests
|
@@ -19,11 +18,9 @@ def test_database_connection():
|
|
19 |
try:
|
20 |
print("Testing database connection...")
|
21 |
|
22 |
-
# Test basic query
|
23 |
sources = db.query(models.Source).all()
|
24 |
print(f"Found {len(sources)} sources in database")
|
25 |
|
26 |
-
# Test image creation
|
27 |
test_img = models.Images(
|
28 |
file_key="test_key",
|
29 |
sha256="test_sha",
|
@@ -43,7 +40,6 @@ def test_database_connection():
|
|
43 |
db.commit()
|
44 |
print(f"Created test image with ID: {test_img.image_id}")
|
45 |
|
46 |
-
# Clean up
|
47 |
db.delete(test_img)
|
48 |
db.commit()
|
49 |
print("Test completed successfully - database is working")
|
@@ -60,7 +56,6 @@ def test_crud_functions():
|
|
60 |
try:
|
61 |
print("Testing CRUD functions...")
|
62 |
|
63 |
-
# Test create_image
|
64 |
img = crud.create_image(
|
65 |
db=db,
|
66 |
src="OTHER",
|
@@ -73,7 +68,6 @@ def test_crud_functions():
|
|
73 |
)
|
74 |
print(f"CRUD create_image successful: {img.image_id}")
|
75 |
|
76 |
-
# Test create_caption
|
77 |
caption = crud.create_caption(
|
78 |
db=db,
|
79 |
image_id=img.image_id,
|
@@ -85,7 +79,6 @@ def test_crud_functions():
|
|
85 |
)
|
86 |
print(f"CRUD create_caption successful for image: {caption.image_id}")
|
87 |
|
88 |
-
# Clean up
|
89 |
db.delete(img)
|
90 |
db.commit()
|
91 |
print("CRUD test completed successfully")
|
@@ -100,11 +93,9 @@ def test_complete_upload_flow():
|
|
100 |
"""Test the complete upload flow: upload β create caption β submit caption"""
|
101 |
print("=== Testing Complete Upload Flow ===")
|
102 |
|
103 |
-
# Create test image data
|
104 |
test_content = b"test image data for upload flow"
|
105 |
test_filename = "test_upload.jpg"
|
106 |
|
107 |
-
# Step 1: Upload image via API
|
108 |
print("1. Uploading image via API...")
|
109 |
files = {'file': (test_filename, io.BytesIO(test_content), 'image/jpeg')}
|
110 |
data = {
|
@@ -124,7 +115,6 @@ def test_complete_upload_flow():
|
|
124 |
image_id = upload_result['image_id']
|
125 |
print(f"Upload successful! Image ID: {image_id}")
|
126 |
|
127 |
-
# Step 2: Create caption via API
|
128 |
print("2. Creating caption via API...")
|
129 |
caption_data = {
|
130 |
'title': 'Test Caption',
|
@@ -139,10 +129,9 @@ def test_complete_upload_flow():
|
|
139 |
|
140 |
if caption_response.status_code == 200:
|
141 |
caption_result = caption_response.json()
|
142 |
-
caption_id = caption_result['image_id']
|
143 |
print(f"Caption created successfully! Caption ID: {caption_id}")
|
144 |
|
145 |
-
# Step 3: Submit caption via API
|
146 |
print("3. Submitting caption via API...")
|
147 |
submit_data = {
|
148 |
'title': 'Test Caption',
|
@@ -161,7 +150,6 @@ def test_complete_upload_flow():
|
|
161 |
if submit_response.status_code == 200:
|
162 |
print("Caption submitted successfully!")
|
163 |
|
164 |
-
# Verify in database
|
165 |
print("4. Verifying in database...")
|
166 |
db = SessionLocal()
|
167 |
try:
|
@@ -191,11 +179,9 @@ def test_deletion_logic():
|
|
191 |
"""Test the deletion logic for images"""
|
192 |
print("=== Testing Deletion Logic ===")
|
193 |
|
194 |
-
# Create test image data
|
195 |
test_content = b"test image data for deletion test"
|
196 |
test_filename = "test_deletion.jpg"
|
197 |
|
198 |
-
# Step 1: Upload image via API
|
199 |
print("1. Uploading image via API...")
|
200 |
files = {'file': (test_filename, io.BytesIO(test_content), 'image/jpeg')}
|
201 |
data = {
|
@@ -215,7 +201,6 @@ def test_deletion_logic():
|
|
215 |
image_id = upload_result['image_id']
|
216 |
print(f"Upload successful! Image ID: {image_id}")
|
217 |
|
218 |
-
# Step 2: Create caption via API
|
219 |
print("2. Creating caption via API...")
|
220 |
caption_data = {
|
221 |
'title': 'Test Caption for Deletion',
|
@@ -231,7 +216,6 @@ def test_deletion_logic():
|
|
231 |
if caption_response.status_code == 200:
|
232 |
print("Caption created successfully!")
|
233 |
|
234 |
-
# Step 3: Test image deletion
|
235 |
print("3. Testing image deletion...")
|
236 |
delete_response = requests.delete(
|
237 |
f'http://localhost:8080/api/images/{image_id}'
|
@@ -241,7 +225,6 @@ def test_deletion_logic():
|
|
241 |
if delete_response.status_code == 200:
|
242 |
print("Image deleted successfully!")
|
243 |
|
244 |
-
# Verify image is completely removed
|
245 |
print("4. Verifying image deletion...")
|
246 |
db = SessionLocal()
|
247 |
try:
|
|
|
4 |
import sys
|
5 |
import os
|
6 |
|
|
|
7 |
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
8 |
|
9 |
import requests
|
|
|
18 |
try:
|
19 |
print("Testing database connection...")
|
20 |
|
|
|
21 |
sources = db.query(models.Source).all()
|
22 |
print(f"Found {len(sources)} sources in database")
|
23 |
|
|
|
24 |
test_img = models.Images(
|
25 |
file_key="test_key",
|
26 |
sha256="test_sha",
|
|
|
40 |
db.commit()
|
41 |
print(f"Created test image with ID: {test_img.image_id}")
|
42 |
|
|
|
43 |
db.delete(test_img)
|
44 |
db.commit()
|
45 |
print("Test completed successfully - database is working")
|
|
|
56 |
try:
|
57 |
print("Testing CRUD functions...")
|
58 |
|
|
|
59 |
img = crud.create_image(
|
60 |
db=db,
|
61 |
src="OTHER",
|
|
|
68 |
)
|
69 |
print(f"CRUD create_image successful: {img.image_id}")
|
70 |
|
|
|
71 |
caption = crud.create_caption(
|
72 |
db=db,
|
73 |
image_id=img.image_id,
|
|
|
79 |
)
|
80 |
print(f"CRUD create_caption successful for image: {caption.image_id}")
|
81 |
|
|
|
82 |
db.delete(img)
|
83 |
db.commit()
|
84 |
print("CRUD test completed successfully")
|
|
|
93 |
"""Test the complete upload flow: upload β create caption β submit caption"""
|
94 |
print("=== Testing Complete Upload Flow ===")
|
95 |
|
|
|
96 |
test_content = b"test image data for upload flow"
|
97 |
test_filename = "test_upload.jpg"
|
98 |
|
|
|
99 |
print("1. Uploading image via API...")
|
100 |
files = {'file': (test_filename, io.BytesIO(test_content), 'image/jpeg')}
|
101 |
data = {
|
|
|
115 |
image_id = upload_result['image_id']
|
116 |
print(f"Upload successful! Image ID: {image_id}")
|
117 |
|
|
|
118 |
print("2. Creating caption via API...")
|
119 |
caption_data = {
|
120 |
'title': 'Test Caption',
|
|
|
129 |
|
130 |
if caption_response.status_code == 200:
|
131 |
caption_result = caption_response.json()
|
132 |
+
caption_id = caption_result['image_id']
|
133 |
print(f"Caption created successfully! Caption ID: {caption_id}")
|
134 |
|
|
|
135 |
print("3. Submitting caption via API...")
|
136 |
submit_data = {
|
137 |
'title': 'Test Caption',
|
|
|
150 |
if submit_response.status_code == 200:
|
151 |
print("Caption submitted successfully!")
|
152 |
|
|
|
153 |
print("4. Verifying in database...")
|
154 |
db = SessionLocal()
|
155 |
try:
|
|
|
179 |
"""Test the deletion logic for images"""
|
180 |
print("=== Testing Deletion Logic ===")
|
181 |
|
|
|
182 |
test_content = b"test image data for deletion test"
|
183 |
test_filename = "test_deletion.jpg"
|
184 |
|
|
|
185 |
print("1. Uploading image via API...")
|
186 |
files = {'file': (test_filename, io.BytesIO(test_content), 'image/jpeg')}
|
187 |
data = {
|
|
|
201 |
image_id = upload_result['image_id']
|
202 |
print(f"Upload successful! Image ID: {image_id}")
|
203 |
|
|
|
204 |
print("2. Creating caption via API...")
|
205 |
caption_data = {
|
206 |
'title': 'Test Caption for Deletion',
|
|
|
216 |
if caption_response.status_code == 200:
|
217 |
print("Caption created successfully!")
|
218 |
|
|
|
219 |
print("3. Testing image deletion...")
|
220 |
delete_response = requests.delete(
|
221 |
f'http://localhost:8080/api/images/{image_id}'
|
|
|
225 |
if delete_response.status_code == 200:
|
226 |
print("Image deleted successfully!")
|
227 |
|
|
|
228 |
print("4. Verifying image deletion...")
|
229 |
db = SessionLocal()
|
230 |
try:
|
start-local.bat
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
@echo off
|
2 |
+
echo π Starting PromptAid Vision Local Development Environment...
|
3 |
+
echo.
|
4 |
+
|
5 |
+
echo 1. Starting MinIO and PostgreSQL...
|
6 |
+
docker-compose up -d postgres minio
|
7 |
+
|
8 |
+
echo.
|
9 |
+
echo 2. Waiting for services to be ready...
|
10 |
+
timeout /t 15 /nobreak >nul
|
11 |
+
|
12 |
+
echo.
|
13 |
+
echo 3. Starting Backend on port 8000...
|
14 |
+
cd py_backend
|
15 |
+
call .venv\Scripts\activate
|
16 |
+
start "Backend" cmd /k "uvicorn app.main:app --reload --host 0.0.0.0 --port 8000"
|
17 |
+
|
18 |
+
echo.
|
19 |
+
echo β
All Services Started:
|
20 |
+
echo PostgreSQL: localhost:5434
|
21 |
+
echo MinIO: localhost:9000
|
22 |
+
echo MinIO Console: http://localhost:9001
|
23 |
+
echo Backend: Port 8000
|
24 |
+
echo.
|
25 |
+
echo π URLs:
|
26 |
+
echo Your App: http://localhost:8000/app/
|
27 |
+
echo API Docs: http://localhost:8000/docs
|
28 |
+
echo Health Check: http://localhost:8000/health
|
29 |
+
echo MinIO Console: http://localhost:9001 (promptaid/promptaid)
|
30 |
+
echo.
|
31 |
+
echo π― Main App URL: http://localhost:8000/app/
|
32 |
+
echo.
|
33 |
+
echo Press any key to stop all services...
|
34 |
+
pause >nul
|
35 |
+
|
36 |
+
echo.
|
37 |
+
echo π Stopping all services...
|
38 |
+
docker-compose down
|
39 |
+
echo β
Services stopped.
|
start-local.sh
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
echo "π Starting PromptAid Vision Local Development Environment..."
|
3 |
+
echo ""
|
4 |
+
|
5 |
+
echo "1. Starting MinIO and PostgreSQL..."
|
6 |
+
docker-compose up -d postgres minio
|
7 |
+
|
8 |
+
echo ""
|
9 |
+
echo "2. Waiting for services to be ready..."
|
10 |
+
sleep 15
|
11 |
+
|
12 |
+
echo ""
|
13 |
+
echo "3. Starting Backend on port 8000..."
|
14 |
+
cd py_backend
|
15 |
+
source .venv/bin/activate
|
16 |
+
uvicorn app.main:app --reload --host 0.0.0.0 --port 8000 &
|
17 |
+
BACKEND_PID=$!
|
18 |
+
|
19 |
+
echo ""
|
20 |
+
echo "β
All Services Started:"
|
21 |
+
echo " PostgreSQL: localhost:5434"
|
22 |
+
echo " MinIO: localhost:9000"
|
23 |
+
echo " MinIO Console: http://localhost:9001"
|
24 |
+
echo " Backend PID: $BACKEND_PID (Port 8000)"
|
25 |
+
echo ""
|
26 |
+
echo "π URLs:"
|
27 |
+
echo " Your App: http://localhost:8000/app/"
|
28 |
+
echo " API Docs: http://localhost:8000/docs"
|
29 |
+
echo " Health Check: http://localhost:8000/health"
|
30 |
+
echo " MinIO Console: http://localhost:9001 (promptaid/promptaid)"
|
31 |
+
echo ""
|
32 |
+
echo "π― Main App URL: http://localhost:8000/app/"
|
33 |
+
echo ""
|
34 |
+
echo "Press Ctrl+C to stop all services"
|
35 |
+
|
36 |
+
# Function to cleanup on exit
|
37 |
+
cleanup() {
|
38 |
+
echo ""
|
39 |
+
echo "π Stopping all services..."
|
40 |
+
docker-compose down
|
41 |
+
kill $BACKEND_PID 2>/dev/null
|
42 |
+
echo "β
All services stopped"
|
43 |
+
exit 0
|
44 |
+
}
|
45 |
+
|
46 |
+
# Set trap to cleanup on script exit
|
47 |
+
trap cleanup INT TERM EXIT
|
48 |
+
|
49 |
+
# Wait for both processes
|
50 |
+
wait
|