2024-06-11 14:51:34 +00:00
|
|
|
|
import asyncio
|
2024-02-27 12:40:53 +00:00
|
|
|
|
import json
|
2024-05-30 16:16:50 +00:00
|
|
|
|
from typing import List
|
2024-05-30 16:42:38 +00:00
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
from sqlalchemy import select
|
2024-08-06 16:45:42 +00:00
|
|
|
|
from orm.author import Author
|
|
|
|
|
from orm.topic import Topic
|
2024-05-06 21:06:31 +00:00
|
|
|
|
from services.db import local_session
|
2024-03-06 18:57:04 +00:00
|
|
|
|
from services.encoders import CustomJSONEncoder
|
2024-04-08 07:38:58 +00:00
|
|
|
|
from services.rediscache import redis
|
2024-02-25 13:43:04 +00:00
|
|
|
|
|
2024-02-27 12:40:53 +00:00
|
|
|
|
DEFAULT_FOLLOWS = {
|
2024-04-17 15:32:23 +00:00
|
|
|
|
"topics": [],
|
|
|
|
|
"authors": [],
|
|
|
|
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
2024-02-27 12:40:53 +00:00
|
|
|
|
}
|
2024-02-25 13:43:04 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Кэширование данных темы
|
2024-05-20 22:40:57 +00:00
|
|
|
|
async def cache_topic(topic: dict):
|
2024-05-30 16:15:11 +00:00
|
|
|
|
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Одновременное кэширование по id и slug для быстрого доступа
|
|
|
|
|
await asyncio.gather(
|
|
|
|
|
redis.set(f"topic:id:{topic['id']}", payload), redis.set(f"topic:slug:{topic['slug']}", payload)
|
|
|
|
|
)
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Кэширование данных автора
|
2024-04-09 08:17:32 +00:00
|
|
|
|
async def cache_author(author: dict):
|
2024-03-06 19:00:37 +00:00
|
|
|
|
payload = json.dumps(author, cls=CustomJSONEncoder)
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Кэширование данных автора по user и id
|
|
|
|
|
await asyncio.gather(
|
|
|
|
|
redis.set(f"author:user:{author['user'].strip()}", str(author["id"])),
|
|
|
|
|
redis.set(f"author:id:{author['id']}", payload),
|
|
|
|
|
)
|
2024-03-12 12:50:57 +00:00
|
|
|
|
|
2024-02-27 12:40:53 +00:00
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Кэширование данных о подписках
|
2024-05-30 09:49:46 +00:00
|
|
|
|
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
2024-08-06 16:55:27 +00:00
|
|
|
|
key = f"author:follows-{entity_type}s:{follower_id}"
|
|
|
|
|
follows_str = await redis.get(key)
|
|
|
|
|
follows = json.loads(follows_str) if follows_str else []
|
2024-05-20 22:40:57 +00:00
|
|
|
|
if is_insert:
|
2024-08-06 16:55:27 +00:00
|
|
|
|
if entity_id not in follows:
|
|
|
|
|
follows.append(entity_id)
|
2024-05-20 22:40:57 +00:00
|
|
|
|
else:
|
|
|
|
|
follows = [eid for eid in follows if eid != entity_id]
|
2024-08-06 16:55:27 +00:00
|
|
|
|
await redis.set(key, json.dumps(follows, cls=CustomJSONEncoder))
|
|
|
|
|
update_follower_stat(follower_id, entity_type, len(follows))
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
|
|
|
|
|
# Обновление статистики подписчика
|
|
|
|
|
async def update_follower_stat(follower_id, entity_type, count):
|
|
|
|
|
follower_key = f"author:id:{follower_id}"
|
|
|
|
|
follower_str = await redis.get(follower_key)
|
|
|
|
|
follower = json.loads(follower_str) if follower_str else None
|
2024-08-06 16:45:42 +00:00
|
|
|
|
if follower:
|
2024-08-06 16:55:27 +00:00
|
|
|
|
follower["stat"] = {f"{entity_type}s": count}
|
2024-05-20 22:40:57 +00:00
|
|
|
|
await cache_author(follower)
|
2024-02-29 21:51:49 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Получение автора из кэша
|
|
|
|
|
async def get_cached_author(author_id: int):
|
|
|
|
|
author_key = f"author:id:{author_id}"
|
|
|
|
|
result = await redis.get(author_key)
|
|
|
|
|
if result:
|
|
|
|
|
return json.loads(result)
|
|
|
|
|
# Загрузка из базы данных, если не найдено в кэше
|
2024-05-30 17:43:18 +00:00
|
|
|
|
with local_session() as session:
|
2024-08-06 16:55:27 +00:00
|
|
|
|
author = session.execute(select(Author).where(Author.id == author_id)).scalar_one_or_none()
|
|
|
|
|
if author:
|
|
|
|
|
await cache_author(author.dict())
|
|
|
|
|
return author.dict()
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Получение темы по slug из кэша
|
|
|
|
|
async def get_cached_topic_by_slug(slug: str):
|
|
|
|
|
topic_key = f"topic:slug:{slug}"
|
|
|
|
|
result = await redis.get(topic_key)
|
|
|
|
|
if result:
|
|
|
|
|
return json.loads(result)
|
|
|
|
|
# Загрузка из базы данных, если не найдено в кэше
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
topic = session.execute(select(Topic).where(Topic.slug == slug)).scalar_one_or_none()
|
2024-08-06 16:45:42 +00:00
|
|
|
|
if topic:
|
|
|
|
|
await cache_topic(topic.dict())
|
2024-08-06 16:55:27 +00:00
|
|
|
|
return topic.dict()
|
|
|
|
|
return None
|
2024-08-06 16:45:42 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Получение списка авторов по ID из кэша
|
2024-08-06 16:45:42 +00:00
|
|
|
|
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
2024-08-06 16:55:27 +00:00
|
|
|
|
# Одновременное получение данных всех авторов
|
|
|
|
|
keys = [f"author:id:{author_id}" for author_id in author_ids]
|
|
|
|
|
results = await asyncio.gather(*(redis.get(key) for key in keys))
|
|
|
|
|
authors = [json.loads(result) if result else None for result in results]
|
|
|
|
|
# Загрузка отсутствующих авторов из базы данных и кэширование
|
|
|
|
|
missing_indices = [index for index, author in enumerate(authors) if author is None]
|
|
|
|
|
if missing_indices:
|
|
|
|
|
missing_ids = [author_ids[index] for index in missing_indices]
|
2024-05-30 17:23:32 +00:00
|
|
|
|
with local_session() as session:
|
2024-08-06 16:45:42 +00:00
|
|
|
|
query = select(Author).where(Author.id.in_(missing_ids))
|
2024-08-06 16:55:27 +00:00
|
|
|
|
missing_authors = session.execute(query).scalars().all()
|
|
|
|
|
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
|
|
|
|
authors = [author.dict() for author in missing_authors]
|
2024-08-06 16:45:42 +00:00
|
|
|
|
return authors
|