core/services/search.py

265 lines
10 KiB
Python
Raw Normal View History

2024-02-29 11:04:24 +00:00
import asyncio
2022-11-17 19:53:58 +00:00
import json
2024-06-02 13:36:12 +00:00
import logging
2024-06-02 14:01:22 +00:00
import os
import httpx
import time
2023-12-17 20:30:20 +00:00
# Set up proper logging
2024-06-02 13:36:12 +00:00
logger = logging.getLogger("search")
logger.setLevel(logging.INFO) # Change to INFO to see more details
2024-06-02 13:36:12 +00:00
# Configuration for search service
SEARCH_ENABLED = bool(os.environ.get("SEARCH_ENABLED", "true").lower() in ["true", "1", "yes"])
TXTAI_SERVICE_URL = os.environ.get("TXTAI_SERVICE_URL", "http://search-txtai.web.1:8000")
MAX_BATCH_SIZE = int(os.environ.get("SEARCH_MAX_BATCH_SIZE", "25"))
2024-05-18 08:52:17 +00:00
2024-02-29 11:09:50 +00:00
2024-01-29 01:09:54 +00:00
class SearchService:
def __init__(self):
logger.info(f"Initializing search service with URL: {TXTAI_SERVICE_URL}")
self.available = SEARCH_ENABLED
# Use different timeout settings for indexing and search requests
self.client = httpx.AsyncClient(timeout=30.0, base_url=TXTAI_SERVICE_URL)
self.index_client = httpx.AsyncClient(timeout=120.0, base_url=TXTAI_SERVICE_URL)
if not self.available:
logger.info("Search disabled (SEARCH_ENABLED = False)")
2024-05-18 08:22:13 +00:00
async def info(self):
"""Return information about search service"""
if not self.available:
2024-11-22 17:32:14 +00:00
return {"status": "disabled"}
2024-12-11 20:02:14 +00:00
2024-11-22 17:23:45 +00:00
try:
response = await self.client.get("/info")
response.raise_for_status()
result = response.json()
logger.info(f"Search service info: {result}")
return result
2024-11-22 17:32:14 +00:00
except Exception as e:
logger.error(f"Failed to get search info: {e}")
return {"status": "error", "message": str(e)}
2024-01-29 01:41:46 +00:00
def is_ready(self):
"""Check if service is available"""
return self.available
2024-01-29 00:27:30 +00:00
2024-01-29 03:42:02 +00:00
def index(self, shout):
"""Index a single document"""
if not self.available:
2024-11-22 17:32:14 +00:00
return
2024-12-11 20:02:14 +00:00
logger.info(f"Indexing post {shout.id}")
# Start in background to not block
asyncio.create_task(self.perform_index(shout))
async def perform_index(self, shout):
"""Actually perform the indexing operation"""
if not self.available:
return
try:
# Combine all text fields
text = " ".join(filter(None, [
shout.title or "",
shout.subtitle or "",
shout.lead or "",
shout.body or "",
shout.media or ""
]))
if not text.strip():
logger.warning(f"No text content to index for shout {shout.id}")
return
logger.info(f"Indexing document: ID={shout.id}, Text length={len(text)}")
# Send to txtai service
response = await self.client.post(
"/index",
json={"id": str(shout.id), "text": text}
)
response.raise_for_status()
result = response.json()
logger.info(f"Post {shout.id} successfully indexed: {result}")
except Exception as e:
logger.error(f"Indexing error for shout {shout.id}: {e}")
2024-04-08 07:23:54 +00:00
async def bulk_index(self, shouts):
"""Index multiple documents at once"""
if not self.available or not shouts:
logger.warning(f"Bulk indexing skipped: available={self.available}, shouts_count={len(shouts) if shouts else 0}")
return
start_time = time.time()
logger.info(f"Starting bulk indexing of {len(shouts)} documents")
batch_size = MAX_BATCH_SIZE
total_indexed = 0
total_skipped = 0
i = 0
for i in range(0, len(shouts), batch_size):
batch = shouts[i:i+batch_size]
logger.info(f"Processing batch {i//batch_size + 1} of {(len(shouts)-1)//batch_size + 1}, size {len(batch)}")
documents = []
for shout in batch:
try:
text_fields = []
for field_name in ['title', 'subtitle', 'lead', 'body']:
field_value = getattr(shout, field_name, None)
if field_value and isinstance(field_value, str) and field_value.strip():
text_fields.append(field_value.strip())
media = getattr(shout, 'media', None)
if media:
if isinstance(media, str):
try:
media_json = json.loads(media)
if isinstance(media_json, dict):
if 'title' in media_json:
text_fields.append(media_json['title'])
if 'body' in media_json:
text_fields.append(media_json['body'])
except json.JSONDecodeError:
text_fields.append(media)
elif isinstance(media, dict):
if 'title' in media:
text_fields.append(media['title'])
if 'body' in media:
text_fields.append(media['body'])
text = " ".join(text_fields)
if not text.strip():
logger.debug(f"Skipping shout {shout.id}: no text content")
total_skipped += 1
continue
documents.append({
"id": str(shout.id),
"text": text
})
total_indexed += 1
except Exception as e:
logger.error(f"Error processing shout {getattr(shout, 'id', 'unknown')} for indexing: {e}")
total_skipped += 1
if not documents:
logger.warning(f"No valid documents in batch {i//batch_size + 1}")
continue
try:
if documents:
sample = documents[0]
logger.info(f"Sample document: id={sample['id']}, text_length={len(sample['text'])}")
logger.info(f"Sending batch of {len(documents)} documents to search service")
response = await self.index_client.post(
"/bulk-index",
json={"documents": documents}
)
2025-03-21 17:10:09 +00:00
# Error Handling
if response.status_code == 422:
error_detail = response.json()
logger.error(f"Validation error from search service: {error_detail}")
# Try to identify problematic documents
for doc in documents:
if len(doc['text']) > 10000: # Adjust threshold as needed
logger.warning(f"Document {doc['id']} has very long text: {len(doc['text'])} chars")
# Continue with next batch instead of failing completely
continue
response.raise_for_status()
result = response.json()
logger.info(f"Batch {i//batch_size + 1} indexed successfully: {result}")
except Exception as e:
logger.error(f"Bulk indexing error for batch {i//batch_size + 1}: {e}")
elapsed = time.time() - start_time
logger.info(f"Bulk indexing completed in {elapsed:.2f}s: {total_indexed} indexed, {total_skipped} skipped")
2024-01-29 00:27:30 +00:00
2024-01-29 06:45:00 +00:00
async def search(self, text, limit, offset):
"""Search documents"""
if not self.available:
logger.warning("Search not available")
return []
if not isinstance(text, str) or not text.strip():
logger.warning(f"Invalid search text: {text}")
2024-11-22 17:32:14 +00:00
return []
logger.info(f"Searching for: '{text}' (limit={limit}, offset={offset})")
try:
logger.info(f"Sending search request: text='{text}', limit={limit}, offset={offset}")
response = await self.client.post(
"/search",
json={"text": text, "limit": limit, "offset": offset}
)
response.raise_for_status()
logger.info(f"Raw search response: {response.text}")
result = response.json()
logger.info(f"Parsed search response: {result}")
formatted_results = result.get("results", [])
logger.info(f"Search for '{text}' returned {len(formatted_results)} results")
if formatted_results:
logger.info(f"Sample result: {formatted_results[0]}")
else:
logger.warning(f"No results found for '{text}'")
return formatted_results
except Exception as e:
logger.error(f"Search error for '{text}': {e}", exc_info=True)
return []
2024-01-29 00:27:30 +00:00
2024-02-29 11:09:50 +00:00
# Create the search service singleton
2024-01-29 03:42:02 +00:00
search_service = SearchService()
2024-01-29 01:41:46 +00:00
2024-02-29 11:09:50 +00:00
# API-compatible function to perform a search
2024-01-29 01:41:46 +00:00
async def search_text(text: str, limit: int = 50, offset: int = 0):
payload = []
if search_service.available:
2024-01-29 07:48:36 +00:00
payload = await search_service.search(text, limit, offset)
2024-01-29 01:41:46 +00:00
return payload
2024-11-22 17:23:45 +00:00
2024-12-11 20:02:14 +00:00
async def initialize_search_index(shouts_data):
"""Initialize search index with existing data during application startup"""
if SEARCH_ENABLED:
if not shouts_data:
logger.warning("No shouts data provided for search indexing")
return
logger.info(f"Initializing search index with {len(shouts_data)} documents")
info = await search_service.info()
if info.get("status") in ["error", "unavailable", "disabled"]:
logger.error(f"Cannot initialize search index: {info}")
return
await search_service.bulk_index(shouts_data)
try:
test_query = "test"
logger.info(f"Verifying search index with query: '{test_query}'")
test_results = await search_text(test_query, 5)
if test_results:
logger.info(f"Search verification successful: found {len(test_results)} results")
else:
logger.warning("Search verification returned no results. Index may be empty or not working.")
except Exception as e:
logger.error(f"Error verifying search index: {e}")
else:
logger.info("Search indexing skipped (SEARCH_ENABLED=False)")