Compare commits

...

2 Commits

Author SHA1 Message Date
Stepan Vladovskiy
a1a61a6731 feat: follow same logic as search shouts for authors. Store them to Reddis cache + pagination
All checks were successful
Deploy on push / deploy (push) Successful in 41s
2025-05-02 18:17:05 -03:00
Stepan Vladovskiy
8d6ad2c84f refactor(author.py): remove verbose loging in resolver level 2025-05-02 18:04:10 -03:00
2 changed files with 25 additions and 17 deletions

View File

@ -314,22 +314,16 @@ async def load_authors_search(_, info, text: str, limit: int = 10, offset: int =
list: List of authors matching the search criteria list: List of authors matching the search criteria
""" """
logger.info(f"Executing load_authors_search for text: '{text}', limit: {limit}, offset: {offset}")
# Get author IDs from search engine (already sorted by relevance) # Get author IDs from search engine (already sorted by relevance)
search_results = await search_service.search_authors(text, limit, offset) search_results = await search_service.search_authors(text, limit, offset)
if not search_results: if not search_results:
logger.info(f"No authors found in search for '{text}'")
return [] return []
author_ids = [result.get("id") for result in search_results if result.get("id")] author_ids = [result.get("id") for result in search_results if result.get("id")]
if not author_ids: if not author_ids:
logger.warning(f"Search for '{text}' returned results but no valid IDs.")
return [] return []
logger.info(f"Search returned {len(author_ids)} author IDs: {author_ids}")
# Fetch full author objects from DB # Fetch full author objects from DB
with local_session() as session: with local_session() as session:
# Simple query to get authors by IDs - no need for stats here # Simple query to get authors by IDs - no need for stats here
@ -337,7 +331,6 @@ async def load_authors_search(_, info, text: str, limit: int = 10, offset: int =
db_authors = session.execute(authors_query).scalars().all() db_authors = session.execute(authors_query).scalars().all()
if not db_authors: if not db_authors:
logger.warning(f"No authors found in DB for IDs: {author_ids}")
return [] return []
# Create a dictionary for quick lookup # Create a dictionary for quick lookup
@ -346,7 +339,6 @@ async def load_authors_search(_, info, text: str, limit: int = 10, offset: int =
# Keep the order from search results (maintains the relevance sorting) # Keep the order from search results (maintains the relevance sorting)
ordered_authors = [authors_dict[author_id] for author_id in author_ids if author_id in authors_dict] ordered_authors = [authors_dict[author_id] for author_id in author_ids if author_id in authors_dict]
logger.info(f"Returning {len(ordered_authors)} authors matching search order.")
return ordered_authors return ordered_authors

View File

@ -748,28 +748,44 @@ class SearchService:
cache_key = f"author:{text}" cache_key = f"author:{text}"
# Try cache first if enabled # Check if we can serve from cache
if SEARCH_CACHE_ENABLED: if SEARCH_CACHE_ENABLED:
if await self.cache.has_query(cache_key): has_cache = await self.cache.has_query(cache_key)
return await self.cache.get(cache_key, limit, offset) if has_cache:
cached_results = await self.cache.get(cache_key, limit, offset)
if cached_results is not None:
return cached_results
# Not in cache or cache disabled, perform new search
try: try:
search_limit = limit
if SEARCH_CACHE_ENABLED:
search_limit = SEARCH_PREFETCH_SIZE
else:
search_limit = limit
logger.info( logger.info(
f"Searching authors for: '{text}' (limit={limit}, offset={offset})" f"Searching authors for: '{text}' (limit={limit}, offset={offset}, search_limit={search_limit})"
) )
response = await self.client.post( response = await self.client.post(
"/search-author", json={"text": text, "limit": limit + offset} "/search-author", json={"text": text, "limit": search_limit}
) )
response.raise_for_status() response.raise_for_status()
result = response.json() result = response.json()
author_results = result.get("results", []) author_results = result.get("results", [])
# Store in cache if enabled # Filter out any invalid results if necessary
if SEARCH_CACHE_ENABLED: valid_results = [r for r in author_results if r.get("id", "").isdigit()]
await self.cache.store(cache_key, author_results) if len(valid_results) != len(author_results):
author_results = valid_results
# Apply offset/limit if SEARCH_CACHE_ENABLED:
# Store the full prefetch batch, then page it
await self.cache.store(cache_key, author_results)
return await self.cache.get(cache_key, limit, offset)
return author_results[offset : offset + limit] return author_results[offset : offset + limit]
except Exception as e: except Exception as e: