inbox/resolvers/load.py

149 lines
6.0 KiB
Python
Raw Normal View History

2023-10-14 12:59:43 +00:00
import asyncio
2023-10-03 14:15:17 +00:00
import json
2023-10-13 16:45:30 +00:00
from typing import Any, Dict, List, Optional, Union
2023-10-14 12:59:43 +00:00
2023-12-17 17:13:17 +00:00
from models.chat import ChatPayload, Message
from resolvers.chats import create_chat
2023-10-14 12:59:43 +00:00
from services.auth import login_required
2023-12-23 06:11:04 +00:00
from services.core import CacheStorage
2023-10-14 14:55:51 +00:00
from services.rediscache import redis
2023-10-04 21:43:07 +00:00
from services.schema import query
2023-10-14 14:55:51 +00:00
2024-01-24 07:36:50 +00:00
import logging
logger = logging.getLogger("[resolvers.load] ")
logger.setLevel(logging.DEBUG)
2023-10-14 14:55:51 +00:00
2023-11-28 09:05:39 +00:00
async def get_unread_counter(chat_id: str, member_id: int) -> int:
unread = await redis.execute("LLEN", f"chats/{chat_id}/unread/{member_id}")
2023-12-23 06:11:04 +00:00
if isinstance(unread, int):
return unread
else:
return 0
2023-10-14 12:59:43 +00:00
2023-10-13 00:16:54 +00:00
2023-10-03 14:15:17 +00:00
# NOTE: not an API handler
2023-11-14 18:25:55 +00:00
async def load_messages(
chat_id: str, limit: int = 5, offset: int = 0, ids: Optional[List[int]] = None
2024-01-23 21:13:14 +00:00
):
2023-10-03 14:15:17 +00:00
"""load :limit messages for :chat_id with :offset"""
2024-01-24 12:26:16 +00:00
logger.info("load_messages")
2023-10-03 14:15:17 +00:00
messages = []
try:
2023-11-22 12:09:24 +00:00
message_ids = [] + (ids or [])
2023-10-03 14:15:17 +00:00
if limit:
2024-01-23 20:13:49 +00:00
mids = await redis.execute("LRANGE", f"chats/{chat_id}/message_ids", offset, offset + limit)
if isinstance(mids, list):
message_ids.extend(mids)
2023-10-03 14:15:17 +00:00
if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
2024-01-23 21:13:14 +00:00
messages = await redis.execute("MGET", *message_keys)
if isinstance(messages, list):
messages = [json.loads(m) if isinstance(m, str) else m for m in messages]
replies = []
for m in messages:
if m:
reply_to = m.get("reply_to")
if reply_to:
reply_to = int(reply_to)
if reply_to not in message_ids:
replies.append(reply_to)
if replies:
more_messages = await load_messages(chat_id, offset, limit, replies)
if isinstance(more_messages, list):
messages.extend(more_messages)
2023-11-06 16:02:16 +00:00
except Exception:
2023-10-16 20:05:24 +00:00
import traceback
2023-11-14 18:25:55 +00:00
2023-10-16 20:05:24 +00:00
traceback.print_exc()
2023-10-03 14:15:17 +00:00
return messages
2023-10-14 06:38:12 +00:00
2023-11-28 08:33:50 +00:00
@query.field("load_chats")
2023-10-03 14:15:17 +00:00
@login_required
2023-10-14 14:55:51 +00:00
async def load_chats(_, info, limit: int = 50, offset: int = 0) -> Dict[str, Union[List[Dict[str, Any]], None]]:
2023-10-03 14:15:17 +00:00
"""load :limit chats of current user with :offset"""
2024-01-24 12:26:16 +00:00
logger.info("load_chats")
2023-12-19 15:13:37 +00:00
author_id = info.context["author_id"]
2023-12-18 19:33:40 +00:00
chats = []
2024-01-24 09:51:16 +00:00
try:
if author_id:
logger.debug("got author", author_id)
cids = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
logger.debug("got cids", cids)
members_online = (await redis.execute("SMEMBERS", "authors-online")) or [] # to show online status
logger.debug("members online", members_online)
if isinstance(cids, set):
# TODO: add sort by chat.created_at with in-memory caching chats service
cids = list(cids)[offset : (offset + limit)]
lock = asyncio.Lock()
if len(cids) == 0:
logger.debug(f"no chats for user with id={author_id}")
r = await create_chat(None, info, members=[2]) # member with id = 2 is discours
logger.debug(f"created chat: {r['chat_id']}")
cids.append(r["chat"]["id"])
logger.debug(f"getting data for {len(cids)} user's chats")
for cid in cids:
async with lock:
chat_str = await redis.execute("GET", f"chats/{cid}")
if isinstance(chat_str, str):
logger.debug(f"redis GET by {cid}: {chat_str}")
c: ChatPayload = json.loads(chat_str)
c["messages"] = (await load_messages(cid, 5, 0)) or []
c["unread"] = await get_unread_counter(cid, author_id)
member_ids = c["members"].copy()
c["members"] = []
for member_id in member_ids:
2024-01-24 10:06:47 +00:00
a = CacheStorage.authors_by_id.get(str(member_id))
2024-01-24 09:51:16 +00:00
if a:
a["online"] = a.get("id") in members_online
c["members"].append(a)
else:
logger.error(f"cant find author by id {member_id}")
chats.append(c)
else:
logger.error(f"cant find chat by id {cid}")
except Exception as error:
import traceback
traceback.print_exc()
2023-10-03 14:15:17 +00:00
return {"chats": chats, "error": None}
2023-11-28 08:33:50 +00:00
@query.field("load_messages_by")
2023-10-03 14:15:17 +00:00
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
"""load :limit messages of :chat_id with :offset"""
2024-01-24 12:00:28 +00:00
logger.info("load_messages_by")
2023-12-19 15:13:37 +00:00
author_id = info.context["author_id"]
2024-01-24 09:51:16 +00:00
author_chats = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
try:
if isinstance(author_chats, set):
author_chats = list(author_chats)
messages = []
by_chat = by.get("chat")
if by_chat in author_chats:
chat = await redis.execute("GET", f"chats/{by_chat}")
if not chat:
return {"messages": [], "error": "chat not exist"}
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
if isinstance(messages, list):
sorted_messages = [m for m in messages if m and m.get("created_at")]
return {
"messages": sorted(
sorted_messages,
key=lambda m: m.get("created_at"),
),
"error": None,
}
except Exception as error:
import traceback
traceback.print_exc()
2024-01-23 21:13:14 +00:00
return {"error": "Cannot get messages of this chat"}