Spaces:
Running
Running
model 400 fix
Browse files
docker-compose.prod.yml
CHANGED
@@ -2,7 +2,7 @@ version: "3.8"
|
|
2 |
|
3 |
services:
|
4 |
backend:
|
5 |
-
build:
|
6 |
ports:
|
7 |
- "8080:8080"
|
8 |
environment:
|
|
|
2 |
|
3 |
services:
|
4 |
backend:
|
5 |
+
build: .
|
6 |
ports:
|
7 |
- "8080:8080"
|
8 |
environment:
|
py_backend/app/services/huggingface_service.py
CHANGED
@@ -251,7 +251,6 @@ class ProvidersGenericVLMService(HuggingFaceService):
|
|
251 |
# Use a human-friendly stable name that your UI/DB will reference
|
252 |
self.model_name = public_name or model_id.replace("/", "_").upper()
|
253 |
self.model_type = ModelType.CUSTOM
|
254 |
-
|
255 |
class ProvidersGenericVLMService(HuggingFaceService):
|
256 |
"""
|
257 |
Generic wrapper so you can register ANY Providers VLM by model_id from config.
|
@@ -263,3 +262,4 @@ class ProvidersGenericVLMService(HuggingFaceService):
|
|
263 |
# Use a human-friendly stable name that your UI/DB will reference
|
264 |
self.model_name = public_name or model_id.replace("/", "_").upper()
|
265 |
self.model_type = ModelType.CUSTOM
|
|
|
|
251 |
# Use a human-friendly stable name that your UI/DB will reference
|
252 |
self.model_name = public_name or model_id.replace("/", "_").upper()
|
253 |
self.model_type = ModelType.CUSTOM
|
|
|
254 |
class ProvidersGenericVLMService(HuggingFaceService):
|
255 |
"""
|
256 |
Generic wrapper so you can register ANY Providers VLM by model_id from config.
|
|
|
262 |
# Use a human-friendly stable name that your UI/DB will reference
|
263 |
self.model_name = public_name or model_id.replace("/", "_").upper()
|
264 |
self.model_type = ModelType.CUSTOM
|
265 |
+
|
py_backend/app/services/schema_validator.py
CHANGED
@@ -139,28 +139,38 @@ class SchemaValidator:
|
|
139 |
Tuple of (cleaned_data, is_valid, error_message)
|
140 |
"""
|
141 |
try:
|
142 |
-
|
143 |
-
|
144 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
content = raw_data["content"]
|
146 |
if isinstance(content, str):
|
147 |
-
# Try to parse JSON from string content
|
148 |
try:
|
149 |
parsed_content = json.loads(content)
|
150 |
data = parsed_content
|
151 |
except json.JSONDecodeError:
|
152 |
-
# If it's not JSON, treat as analysis
|
153 |
data = {"analysis": content, "metadata": {}}
|
154 |
else:
|
155 |
data = content
|
156 |
else:
|
157 |
data = raw_data
|
158 |
|
159 |
-
# Validate the data
|
160 |
is_valid, error_msg = self.validate_data_by_type(data, image_type)
|
161 |
|
162 |
if is_valid:
|
163 |
-
# Clean the data (remove any extra fields, normalize)
|
164 |
cleaned_data = self._clean_data(data, image_type)
|
165 |
return cleaned_data, True, None
|
166 |
else:
|
|
|
139 |
Tuple of (cleaned_data, is_valid, error_message)
|
140 |
"""
|
141 |
try:
|
142 |
+
if "raw_response" in raw_data:
|
143 |
+
ai_data = raw_data["raw_response"]
|
144 |
+
|
145 |
+
if "response" in ai_data:
|
146 |
+
content = ai_data["response"]
|
147 |
+
if isinstance(content, str):
|
148 |
+
try:
|
149 |
+
data = json.loads(content)
|
150 |
+
except json.JSONDecodeError:
|
151 |
+
data = {"analysis": content, "metadata": {}}
|
152 |
+
else:
|
153 |
+
data = content
|
154 |
+
elif "analysis" in ai_data and "metadata" in ai_data:
|
155 |
+
data = ai_data
|
156 |
+
else:
|
157 |
+
data = ai_data
|
158 |
+
elif "content" in raw_data:
|
159 |
content = raw_data["content"]
|
160 |
if isinstance(content, str):
|
|
|
161 |
try:
|
162 |
parsed_content = json.loads(content)
|
163 |
data = parsed_content
|
164 |
except json.JSONDecodeError:
|
|
|
165 |
data = {"analysis": content, "metadata": {}}
|
166 |
else:
|
167 |
data = content
|
168 |
else:
|
169 |
data = raw_data
|
170 |
|
|
|
171 |
is_valid, error_msg = self.validate_data_by_type(data, image_type)
|
172 |
|
173 |
if is_valid:
|
|
|
174 |
cleaned_data = self._clean_data(data, image_type)
|
175 |
return cleaned_data, True, None
|
176 |
else:
|