core/services/cache.py

96 lines
3.4 KiB
Python
Raw Normal View History

2024-06-11 14:51:34 +00:00
import asyncio
2024-02-27 12:40:53 +00:00
import json
2024-05-30 16:16:50 +00:00
from typing import List
2024-08-06 16:45:42 +00:00
from sqlalchemy import select
2024-05-30 16:42:38 +00:00
2024-08-06 16:45:42 +00:00
from orm.author import Author
from orm.topic import Topic
2024-05-06 21:06:31 +00:00
from services.db import local_session
2024-03-06 18:57:04 +00:00
from services.encoders import CustomJSONEncoder
2024-04-08 07:38:58 +00:00
from services.rediscache import redis
2024-02-25 13:43:04 +00:00
2024-02-27 12:40:53 +00:00
DEFAULT_FOLLOWS = {
2024-04-17 15:32:23 +00:00
"topics": [],
"authors": [],
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
2024-02-27 12:40:53 +00:00
}
2024-02-25 13:43:04 +00:00
2024-08-06 16:45:42 +00:00
async def cache_multiple_items(items, cache_function):
await asyncio.gather(*(cache_function(item) for item in items))
2024-05-20 22:40:57 +00:00
async def cache_topic(topic: dict):
2024-08-06 16:45:42 +00:00
await cache_multiple_items([topic], _cache_topic_helper)
async def _cache_topic_helper(topic):
2024-05-20 23:56:58 +00:00
topic_id = topic.get("id")
topic_slug = topic.get("slug")
2024-05-30 16:15:11 +00:00
payload = json.dumps(topic, cls=CustomJSONEncoder)
2024-08-06 16:45:42 +00:00
await redis.set(f"topic:id:{topic_id}", payload)
await redis.set(f"topic:slug:{topic_slug}", payload)
2024-05-20 22:40:57 +00:00
2024-04-09 08:17:32 +00:00
async def cache_author(author: dict):
2024-05-05 18:38:59 +00:00
author_id = author.get("id")
2024-06-05 20:03:13 +00:00
user_id = author.get("user", "").strip()
2024-03-06 19:00:37 +00:00
payload = json.dumps(author, cls=CustomJSONEncoder)
2024-08-06 16:45:42 +00:00
await redis.set(f"author:user:{user_id}", author_id)
await redis.set(f"author:id:{author_id}", payload)
2024-03-12 12:50:57 +00:00
2024-02-27 12:40:53 +00:00
2024-05-30 09:49:46 +00:00
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
2024-05-20 22:40:57 +00:00
redis_key = f"author:follows-{entity_type}s:{follower_id}"
2024-08-06 16:45:42 +00:00
follows = await redis.get(redis_key)
follows = json.loads(follows) if follows else []
2024-05-20 22:40:57 +00:00
if is_insert:
2024-08-06 16:45:42 +00:00
follows.append(entity_id) if entity_id not in follows else None
2024-05-20 22:40:57 +00:00
else:
follows = [eid for eid in follows if eid != entity_id]
payload = json.dumps(follows, cls=CustomJSONEncoder)
2024-08-06 16:45:42 +00:00
await redis.set(redis_key, payload)
follower = await redis.get(f"author:id:{follower_id}")
if follower:
follower = json.loads(follower)
2024-05-20 22:40:57 +00:00
follower["stat"][f"{entity_type}s"] = len(follows)
await cache_author(follower)
2024-02-29 21:51:49 +00:00
2024-06-05 14:45:55 +00:00
async def get_cached_topic_by_slug(slug: str, get_with_stat):
2024-08-06 16:45:42 +00:00
cached_result = await redis.get(f"topic:slug:{slug}")
if cached_result:
2024-06-05 14:45:55 +00:00
return json.loads(cached_result)
2024-05-20 23:56:58 +00:00
2024-05-30 17:43:18 +00:00
with local_session() as session:
2024-08-06 16:45:42 +00:00
topic_query = select(Topic).filter(Topic.slug == slug)
result = await get_with_stat(session.execute(topic_query))
topic = result if isinstance(result, Topic) else result[0]
if topic:
await cache_topic(topic.dict())
return topic
# Пример агрегации получения и кеширования информации для авторов
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
cache_keys = [f"author:id:{author_id}" for author_id in author_ids]
authors_data = await asyncio.gather(*(redis.get(key) for key in cache_keys))
authors = [json.loads(author) for author in authors_data if author]
# Кешируем отсутствующие данные
missing_ids = [author_ids[i] for i, data in enumerate(authors_data) if not data]
if missing_ids:
2024-05-30 17:23:32 +00:00
with local_session() as session:
2024-08-06 16:45:42 +00:00
query = select(Author).where(Author.id.in_(missing_ids))
results = await session.execute(query)
authors_to_cache = [result.dict() for result in results.scalars()]
await cache_multiple_items(authors_to_cache, cache_author)
authors.extend(authors_to_cache)
2024-05-30 17:23:32 +00:00
2024-08-06 16:45:42 +00:00
return authors
2024-05-30 18:29:25 +00:00
2024-05-30 17:22:10 +00:00
2024-08-06 16:45:42 +00:00
# Остальные функции с аналогичными оптимизациями