From d14f0c2f951e9533341d3ff3ca6980f6526fc2e0 Mon Sep 17 00:00:00 2001 From: Untone Date: Tue, 19 Dec 2023 15:28:55 +0300 Subject: [PATCH] search-result-schema-fix-3 --- services/search.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/services/search.py b/services/search.py index 9b901edf..a9166aa1 100644 --- a/services/search.py +++ b/services/search.py @@ -16,25 +16,25 @@ class SearchService: async def init(session): async with SearchService.lock: logging.info("[services.search] Initializing SearchService") - SearchService.cache = {} @staticmethod - async def search(text, limit: int = 50, offset: int = 0) -> List[Shout]: - cached = await redis.execute("GET", text) - if not cached: - async with SearchService.lock: - # Use aiohttp to send a request to ElasticSearch - async with aiohttp.ClientSession() as session: - search_url = f"https://search.discours.io/search?q={text}" - try: + async def search(text: str, limit: int = 50, offset: int = 0) -> List[Shout]: + try: + payload = await redis.execute("GET", text) + + if not payload: + async with SearchService.lock: + # Use aiohttp to send a request to ElasticSearch + async with aiohttp.ClientSession() as session: + search_url = f"https://search.discours.io/search?q={text}" async with session.get(search_url) as response: if response.status == 200: payload = await response.json() await redis.execute("SET", text, json.dumps(payload)) # use redis as cache - return payload[offset : offset + limit] else: logging.error(f"[services.search] response: {response.status} {await response.text()}") - except Exception as e: - logging.error(f"[services.search] error: {e}") - else: - return json.loads(cached)[offset : offset + limit] + except Exception as e: + logging.error(f"[services.search] Error during search: {e}") + payload = [] + + return payload[offset : offset + limit]