|
18 | 18 | EmbeddingResponse, |
19 | 19 | EmbeddingBatchCreateRequest, |
20 | 20 | EmbeddingBatchCreateResponse, |
21 | | - VectorStoreListResponse |
| 21 | + VectorStoreListResponse, |
| 22 | + ContentChunk |
22 | 23 | ) |
23 | 24 | from config import settings |
24 | 25 | from embedding_service import embedding_service |
@@ -288,17 +289,26 @@ async def search_vector_store( |
288 | 289 | # Cosine distance ranges from 0 (identical) to 2 (opposite) |
289 | 290 | similarity_score = max(0, 1 - (row['distance'] / 2)) |
290 | 291 |
|
| 292 | + # Extract filename from metadata or use a default |
| 293 | + metadata = row[fields.metadata_field] or {} |
| 294 | + filename = metadata.get('filename', 'document.txt') |
| 295 | + |
| 296 | + content_chunks = [ContentChunk(type="text", text=row[fields.content_field])] |
| 297 | + |
291 | 298 | result = SearchResult( |
292 | | - id=row[fields.id_field], |
293 | | - content=row[fields.content_field], |
| 299 | + file_id=row[fields.id_field], |
| 300 | + filename=filename, |
294 | 301 | score=similarity_score, |
295 | | - metadata=row[fields.metadata_field] if request.return_metadata else None |
| 302 | + attributes=metadata if request.return_metadata else None, |
| 303 | + content=content_chunks |
296 | 304 | ) |
297 | 305 | search_results.append(result) |
298 | 306 |
|
299 | 307 | return VectorStoreSearchResponse( |
| 308 | + search_query=request.query, |
300 | 309 | data=search_results, |
301 | | - usage={"total_tokens": len(search_results)} |
| 310 | + has_more=False, # TODO: Implement pagination |
| 311 | + next_page=None |
302 | 312 | ) |
303 | 313 |
|
304 | 314 | except HTTPException: |
|
0 commit comments