2024-03-04 10:35:33 +03:00
|
|
|
import time
|
2025-08-21 11:47:11 +03:00
|
|
|
from datetime import UTC, datetime
|
2025-06-02 02:56:11 +03:00
|
|
|
from typing import Any
|
2024-03-04 10:35:33 +03:00
|
|
|
|
2025-03-20 11:55:21 +03:00
|
|
|
import orjson
|
2025-06-02 02:56:11 +03:00
|
|
|
from graphql import GraphQLResolveInfo
|
2024-04-08 10:38:58 +03:00
|
|
|
from sqlalchemy import and_, select
|
2024-03-04 10:35:33 +03:00
|
|
|
from sqlalchemy.exc import SQLAlchemyError
|
2024-04-08 10:38:58 +03:00
|
|
|
from sqlalchemy.orm import aliased
|
|
|
|
|
from sqlalchemy.sql import not_
|
2024-03-04 10:35:33 +03:00
|
|
|
|
[0.9.7] - 2025-08-18
### 🔄 Изменения
- **SQLAlchemy KeyError** - исправление ошибки `KeyError: Reaction` при инициализации
- **Исправлена ошибка SQLAlchemy**: Устранена проблема `InvalidRequestError: When initializing mapper Mapper[Shout(shout)], expression Reaction failed to locate a name (Reaction)`
### 🧪 Тестирование
- **Исправление тестов** - адаптация к новой структуре моделей
- **RBAC инициализация** - добавление `rbac.initialize_rbac()` в `conftest.py`
- **Создан тест для getSession**: Добавлен комплексный тест `test_getSession_cookies.py` с проверкой всех сценариев
- **Покрытие edge cases**: Тесты проверяют работу с валидными/невалидными токенами, отсутствующими пользователями
- **Мокирование зависимостей**: Использование unittest.mock для изоляции тестируемого кода
### 🔧 Рефакторинг
- **Упрощена архитектура**: Убраны сложные конструкции с отложенными импортами, заменены на чистую архитектуру
- **Перемещение моделей** - `Author` и связанные модели перенесены в `orm/author.py`: Вынесены базовые модели пользователей (`Author`, `AuthorFollower`, `AuthorBookmark`, `AuthorRating`) из `orm.author` в отдельный модуль
- **Устранены циклические импорты**: Разорван цикл между `auth.core` → `orm.community` → `orm.author` через реструктуризацию архитектуры
- **Создан модуль `utils/password.py`**: Класс `Password` вынесен в utils для избежания циклических зависимостей
- **Оптимизированы импорты моделей**: Убран прямой импорт `Shout` из `orm/community.py`, заменен на строковые ссылки
### 🔧 Авторизация с cookies
- **getSession теперь работает с cookies**: Мутация `getSession` теперь может получать токен из httpOnly cookies даже без заголовка Authorization
- **Убрано требование авторизации**: `getSession` больше не требует декоратор `@login_required`, работает автономно
- **Поддержка dual-авторизации**: Токен может быть получен как из заголовка Authorization, так и из cookie `session_token`
- **Автоматическая установка cookies**: Middleware автоматически устанавливает httpOnly cookies при успешном `getSession`
- **Обновлена GraphQL схема**: `SessionInfo` теперь содержит поля `success`, `error` и опциональные `token`, `author`
- **Единообразная обработка токенов**: Все модули теперь используют централизованные функции для работы с токенами
- **Улучшена обработка ошибок**: Добавлена детальная валидация токенов и пользователей в `getSession`
- **Логирование операций**: Добавлены подробные логи для отслеживания процесса авторизации
### 📝 Документация
- **Обновлена схема GraphQL**: `SessionInfo` тип теперь соответствует новому формату ответа
- Обновлена документация RBAC
- Обновлена документация авторизации с cookies
2025-08-18 14:25:25 +03:00
|
|
|
from orm.author import Author
|
2024-08-09 09:37:06 +03:00
|
|
|
from orm.notification import (
|
|
|
|
|
Notification,
|
|
|
|
|
NotificationAction,
|
|
|
|
|
NotificationEntity,
|
|
|
|
|
NotificationSeen,
|
|
|
|
|
)
|
2024-03-04 13:43:02 +03:00
|
|
|
from orm.shout import Shout
|
2024-03-04 10:35:33 +03:00
|
|
|
from services.auth import login_required
|
2025-08-17 17:56:31 +03:00
|
|
|
from storage.db import local_session
|
|
|
|
|
from storage.schema import mutation, query
|
2024-08-09 09:37:06 +03:00
|
|
|
from utils.logger import root_logger as logger
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
|
2025-06-02 02:56:11 +03:00
|
|
|
def query_notifications(author_id: int, after: int = 0) -> tuple[int, int, list[tuple[Notification, bool]]]:
|
2024-03-04 10:35:33 +03:00
|
|
|
notification_seen_alias = aliased(NotificationSeen)
|
2024-04-17 18:32:23 +03:00
|
|
|
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
|
2024-03-06 12:25:55 +03:00
|
|
|
NotificationSeen,
|
|
|
|
|
and_(
|
|
|
|
|
NotificationSeen.viewer == author_id,
|
|
|
|
|
NotificationSeen.notification == Notification.id,
|
|
|
|
|
),
|
2024-03-04 10:35:33 +03:00
|
|
|
)
|
|
|
|
|
if after:
|
2025-08-21 11:27:13 +03:00
|
|
|
# Convert Unix timestamp to datetime for PostgreSQL compatibility
|
2025-08-21 11:47:11 +03:00
|
|
|
after_datetime = datetime.fromtimestamp(after, tz=UTC)
|
2025-08-21 11:27:13 +03:00
|
|
|
q = q.where(Notification.created_at > after_datetime)
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
with local_session() as session:
|
2025-08-21 11:47:11 +03:00
|
|
|
# Build query conditions
|
|
|
|
|
conditions = [Notification.action == NotificationAction.CREATE.value]
|
|
|
|
|
if after:
|
|
|
|
|
after_datetime = datetime.fromtimestamp(after, tz=UTC)
|
|
|
|
|
conditions.append(Notification.created_at > after_datetime)
|
2024-03-04 10:35:33 +03:00
|
|
|
|
2025-08-21 11:47:11 +03:00
|
|
|
total = session.query(Notification).where(and_(*conditions)).count()
|
|
|
|
|
|
|
|
|
|
unread_conditions = [*conditions, not_(Notification.seen)]
|
|
|
|
|
unread = session.query(Notification).where(and_(*unread_conditions)).count()
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
notifications_result = session.execute(q)
|
|
|
|
|
notifications = []
|
|
|
|
|
for n, seen in notifications_result:
|
|
|
|
|
notifications.append((n, seen))
|
|
|
|
|
|
|
|
|
|
return total, unread, notifications
|
|
|
|
|
|
|
|
|
|
|
2025-06-02 02:56:11 +03:00
|
|
|
def group_notification(
|
|
|
|
|
thread: str,
|
|
|
|
|
authors: list[Any] | None = None,
|
|
|
|
|
shout: Any | None = None,
|
|
|
|
|
reactions: list[Any] | None = None,
|
|
|
|
|
entity: str = "follower",
|
|
|
|
|
action: str = "follow",
|
|
|
|
|
) -> dict:
|
2024-03-04 13:43:02 +03:00
|
|
|
reactions = reactions or []
|
|
|
|
|
authors = authors or []
|
2024-03-04 10:35:33 +03:00
|
|
|
return {
|
2024-04-17 18:32:23 +03:00
|
|
|
"thread": thread,
|
|
|
|
|
"authors": authors,
|
|
|
|
|
"updated_at": int(time.time()),
|
|
|
|
|
"shout": shout,
|
|
|
|
|
"reactions": reactions,
|
|
|
|
|
"entity": entity,
|
|
|
|
|
"action": action,
|
2024-03-04 10:35:33 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2025-06-02 02:56:11 +03:00
|
|
|
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0) -> list[dict]:
|
2024-03-04 10:35:33 +03:00
|
|
|
"""
|
|
|
|
|
Retrieves notifications for a given author.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
author_id (int): The ID of the author for whom notifications are retrieved.
|
|
|
|
|
after (int, optional): If provided, selects only notifications created after this timestamp will be considered.
|
|
|
|
|
limit (int, optional): The maximum number of groupa to retrieve.
|
2024-03-04 15:47:17 +03:00
|
|
|
offset (int, optional): offset
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Dict[str, NotificationGroup], int, int: A dictionary where keys are thread IDs
|
|
|
|
|
and values are NotificationGroup objects, unread and total amounts.
|
|
|
|
|
|
|
|
|
|
This function queries the database to retrieve notifications for the specified author, considering optional filters.
|
|
|
|
|
The result is a dictionary where each key is a thread ID, and the corresponding value is a NotificationGroup
|
|
|
|
|
containing information about the notifications within that thread.
|
|
|
|
|
|
|
|
|
|
NotificationGroup structure:
|
|
|
|
|
{
|
|
|
|
|
entity: str, # Type of entity (e.g., 'reaction', 'shout', 'follower').
|
|
|
|
|
updated_at: int, # Timestamp of the latest update in the thread.
|
|
|
|
|
shout: Optional[NotificationShout]
|
|
|
|
|
reactions: List[int], # List of reaction ids within the thread.
|
|
|
|
|
authors: List[NotificationAuthor], # List of authors involved in the thread.
|
|
|
|
|
}
|
|
|
|
|
"""
|
2025-09-16 12:52:14 +03:00
|
|
|
_total, _unread, notifications = query_notifications(author_id, after)
|
2024-03-04 10:35:33 +03:00
|
|
|
groups_by_thread = {}
|
|
|
|
|
groups_amount = 0
|
|
|
|
|
|
2025-06-02 02:56:11 +03:00
|
|
|
for notification, _seen in notifications:
|
2024-03-04 15:47:17 +03:00
|
|
|
if (groups_amount + offset) >= limit:
|
2024-03-04 10:35:33 +03:00
|
|
|
break
|
|
|
|
|
|
2025-03-20 11:55:21 +03:00
|
|
|
payload = orjson.loads(str(notification.payload))
|
2024-03-04 10:35:33 +03:00
|
|
|
|
2024-03-28 15:56:32 +03:00
|
|
|
if str(notification.entity) == NotificationEntity.SHOUT.value:
|
2024-03-04 13:43:02 +03:00
|
|
|
shout = payload
|
2024-04-17 18:32:23 +03:00
|
|
|
shout_id = shout.get("id")
|
|
|
|
|
author_id = shout.get("created_by")
|
|
|
|
|
thread_id = f"shout-{shout_id}"
|
2025-10-04 08:59:47 +03:00
|
|
|
|
2024-03-04 13:43:02 +03:00
|
|
|
with local_session() as session:
|
2025-07-31 18:55:59 +03:00
|
|
|
author = session.query(Author).where(Author.id == author_id).first()
|
|
|
|
|
shout = session.query(Shout).where(Shout.id == shout_id).first()
|
2024-03-04 13:43:02 +03:00
|
|
|
if author and shout:
|
2025-06-02 02:56:11 +03:00
|
|
|
author_dict = author.dict()
|
|
|
|
|
shout_dict = shout.dict()
|
2024-03-06 12:25:55 +03:00
|
|
|
group = group_notification(
|
|
|
|
|
thread_id,
|
2025-06-02 02:56:11 +03:00
|
|
|
shout=shout_dict,
|
|
|
|
|
authors=[author_dict],
|
2024-03-28 15:56:32 +03:00
|
|
|
action=str(notification.action),
|
|
|
|
|
entity=str(notification.entity),
|
2024-03-06 12:25:55 +03:00
|
|
|
)
|
2024-03-04 10:35:33 +03:00
|
|
|
groups_by_thread[thread_id] = group
|
|
|
|
|
groups_amount += 1
|
|
|
|
|
|
2024-03-28 15:56:32 +03:00
|
|
|
elif str(notification.entity) == NotificationEntity.REACTION.value:
|
2024-03-04 13:43:02 +03:00
|
|
|
reaction = payload
|
2024-04-23 14:31:34 +03:00
|
|
|
if not isinstance(reaction, dict):
|
2025-06-02 02:56:11 +03:00
|
|
|
msg = "reaction data is not consistent"
|
|
|
|
|
raise ValueError(msg)
|
2024-04-23 14:31:34 +03:00
|
|
|
shout_id = reaction.get("shout")
|
|
|
|
|
author_id = reaction.get("created_by", 0)
|
2024-03-28 14:05:46 +03:00
|
|
|
if shout_id and author_id:
|
|
|
|
|
with local_session() as session:
|
2025-07-31 18:55:59 +03:00
|
|
|
author = session.query(Author).where(Author.id == author_id).first()
|
|
|
|
|
shout = session.query(Shout).where(Shout.id == shout_id).first()
|
2024-03-28 14:05:46 +03:00
|
|
|
if shout and author:
|
2025-06-02 02:56:11 +03:00
|
|
|
author_dict = author.dict()
|
|
|
|
|
shout_dict = shout.dict()
|
2024-04-17 18:32:23 +03:00
|
|
|
reply_id = reaction.get("reply_to")
|
|
|
|
|
thread_id = f"shout-{shout_id}"
|
|
|
|
|
if reply_id and reaction.get("kind", "").lower() == "comment":
|
2025-10-04 08:36:24 +03:00
|
|
|
thread_id = f"shout-{shout_id}::{reply_id}"
|
2025-10-04 08:59:47 +03:00
|
|
|
|
2024-03-28 14:05:46 +03:00
|
|
|
existing_group = groups_by_thread.get(thread_id)
|
|
|
|
|
if existing_group:
|
2024-04-17 18:32:23 +03:00
|
|
|
existing_group["seen"] = False
|
|
|
|
|
existing_group["authors"].append(author_id)
|
2024-05-30 07:12:00 +03:00
|
|
|
existing_group["reactions"] = existing_group["reactions"] or []
|
2024-04-17 18:32:23 +03:00
|
|
|
existing_group["reactions"].append(reaction)
|
2024-03-28 14:05:46 +03:00
|
|
|
groups_by_thread[thread_id] = existing_group
|
|
|
|
|
else:
|
|
|
|
|
group = group_notification(
|
|
|
|
|
thread_id,
|
2025-06-02 02:56:11 +03:00
|
|
|
authors=[author_dict],
|
|
|
|
|
shout=shout_dict,
|
2024-03-28 14:05:46 +03:00
|
|
|
reactions=[reaction],
|
2024-03-28 15:56:32 +03:00
|
|
|
entity=str(notification.entity),
|
|
|
|
|
action=str(notification.action),
|
2024-03-28 14:05:46 +03:00
|
|
|
)
|
|
|
|
|
if group:
|
|
|
|
|
groups_by_thread[thread_id] = group
|
|
|
|
|
groups_amount += 1
|
2024-03-04 13:43:02 +03:00
|
|
|
|
2024-04-17 18:32:23 +03:00
|
|
|
elif str(notification.entity) == "follower":
|
|
|
|
|
thread_id = "followers"
|
2025-03-20 11:55:21 +03:00
|
|
|
follower = orjson.loads(payload)
|
2025-06-02 02:56:11 +03:00
|
|
|
existing_group = groups_by_thread.get(thread_id)
|
|
|
|
|
if existing_group:
|
2024-04-17 18:32:23 +03:00
|
|
|
if str(notification.action) == "follow":
|
2025-06-02 02:56:11 +03:00
|
|
|
existing_group["authors"].append(follower)
|
2024-04-17 18:32:23 +03:00
|
|
|
elif str(notification.action) == "unfollow":
|
|
|
|
|
follower_id = follower.get("id")
|
2025-06-02 02:56:11 +03:00
|
|
|
for author in existing_group["authors"]:
|
|
|
|
|
if isinstance(author, dict) and author.get("id") == follower_id:
|
|
|
|
|
existing_group["authors"].remove(author)
|
2024-03-28 14:05:46 +03:00
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
group = group_notification(
|
|
|
|
|
thread_id,
|
|
|
|
|
authors=[follower],
|
2024-03-28 15:56:32 +03:00
|
|
|
entity=str(notification.entity),
|
|
|
|
|
action=str(notification.action),
|
2024-03-28 14:05:46 +03:00
|
|
|
)
|
|
|
|
|
groups_amount += 1
|
2025-06-02 02:56:11 +03:00
|
|
|
existing_group = group
|
|
|
|
|
groups_by_thread[thread_id] = existing_group
|
|
|
|
|
return list(groups_by_thread.values())
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
|
2024-04-17 18:32:23 +03:00
|
|
|
@query.field("load_notifications")
|
2024-03-04 10:35:33 +03:00
|
|
|
@login_required
|
2025-06-02 02:56:11 +03:00
|
|
|
async def load_notifications(_: None, info: GraphQLResolveInfo, after: int, limit: int = 50, offset: int = 0) -> dict:
|
2025-05-29 17:09:32 +03:00
|
|
|
author_dict = info.context.get("author") or {}
|
2024-04-19 18:22:07 +03:00
|
|
|
author_id = author_dict.get("id")
|
2024-03-04 13:43:02 +03:00
|
|
|
error = None
|
|
|
|
|
total = 0
|
|
|
|
|
unread = 0
|
|
|
|
|
notifications = []
|
|
|
|
|
try:
|
|
|
|
|
if author_id:
|
2025-06-02 02:56:11 +03:00
|
|
|
groups_list = get_notifications_grouped(author_id, after, limit)
|
|
|
|
|
notifications = sorted(groups_list, key=lambda group: group.get("updated_at", 0), reverse=True)
|
2025-10-04 08:59:47 +03:00
|
|
|
|
2025-10-04 08:36:24 +03:00
|
|
|
# Считаем реальное количество сгруппированных уведомлений
|
|
|
|
|
total = len(notifications)
|
|
|
|
|
unread = sum(1 for n in notifications if not n.get("seen", False))
|
2024-03-04 13:43:02 +03:00
|
|
|
except Exception as e:
|
2025-06-02 02:56:11 +03:00
|
|
|
error = str(e)
|
2024-03-04 13:43:02 +03:00
|
|
|
logger.error(e)
|
2024-03-06 12:25:55 +03:00
|
|
|
return {
|
2024-04-17 18:32:23 +03:00
|
|
|
"notifications": notifications,
|
|
|
|
|
"total": total,
|
|
|
|
|
"unread": unread,
|
|
|
|
|
"error": error,
|
2024-03-06 12:25:55 +03:00
|
|
|
}
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
|
2024-04-17 18:32:23 +03:00
|
|
|
@mutation.field("notification_mark_seen")
|
2024-03-04 10:35:33 +03:00
|
|
|
@login_required
|
2025-06-02 02:56:11 +03:00
|
|
|
async def notification_mark_seen(_: None, info: GraphQLResolveInfo, notification_id: int) -> dict:
|
2024-04-19 18:22:07 +03:00
|
|
|
author_id = info.context.get("author", {}).get("id")
|
2024-03-04 10:35:33 +03:00
|
|
|
if author_id:
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
try:
|
|
|
|
|
ns = NotificationSeen(notification=notification_id, viewer=author_id)
|
|
|
|
|
session.add(ns)
|
|
|
|
|
session.commit()
|
|
|
|
|
except SQLAlchemyError as e:
|
|
|
|
|
session.rollback()
|
2024-04-17 18:32:23 +03:00
|
|
|
logger.error(f"seen mutation failed: {e}")
|
|
|
|
|
return {"error": "cant mark as read"}
|
|
|
|
|
return {"error": None}
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
|
2024-04-17 18:32:23 +03:00
|
|
|
@mutation.field("notifications_seen_after")
|
2024-03-04 10:35:33 +03:00
|
|
|
@login_required
|
2025-06-02 02:56:11 +03:00
|
|
|
async def notifications_seen_after(_: None, info: GraphQLResolveInfo, after: int) -> dict:
|
2025-10-04 08:59:47 +03:00
|
|
|
"""Mark all notifications after given timestamp as seen."""
|
2024-03-04 10:35:33 +03:00
|
|
|
error = None
|
|
|
|
|
try:
|
2024-04-19 18:22:07 +03:00
|
|
|
author_id = info.context.get("author", {}).get("id")
|
2024-03-04 10:35:33 +03:00
|
|
|
if author_id:
|
|
|
|
|
with local_session() as session:
|
2025-08-21 11:27:13 +03:00
|
|
|
# Convert Unix timestamp to datetime for PostgreSQL compatibility
|
2025-08-21 11:47:11 +03:00
|
|
|
after_datetime = datetime.fromtimestamp(after, tz=UTC) if after else None
|
|
|
|
|
if after_datetime:
|
|
|
|
|
nnn = session.query(Notification).where(and_(Notification.created_at > after_datetime)).all()
|
|
|
|
|
else:
|
|
|
|
|
nnn = session.query(Notification).all()
|
2025-06-02 02:56:11 +03:00
|
|
|
for notification in nnn:
|
|
|
|
|
ns = NotificationSeen(notification=notification.id, author=author_id)
|
|
|
|
|
session.add(ns)
|
|
|
|
|
session.commit()
|
2024-03-04 10:35:33 +03:00
|
|
|
except Exception as e:
|
|
|
|
|
print(e)
|
2024-04-17 18:32:23 +03:00
|
|
|
error = "cant mark as read"
|
|
|
|
|
return {"error": error}
|
2024-03-04 10:35:33 +03:00
|
|
|
|
|
|
|
|
|
2024-04-17 18:32:23 +03:00
|
|
|
@mutation.field("notifications_seen_thread")
|
2024-03-04 10:35:33 +03:00
|
|
|
@login_required
|
2025-06-02 02:56:11 +03:00
|
|
|
async def notifications_seen_thread(_: None, info: GraphQLResolveInfo, thread: str, after: int) -> dict:
|
2024-03-04 10:35:33 +03:00
|
|
|
error = None
|
2024-04-19 18:22:07 +03:00
|
|
|
author_id = info.context.get("author", {}).get("id")
|
2024-03-04 10:35:33 +03:00
|
|
|
if author_id:
|
|
|
|
|
with local_session() as session:
|
2025-08-21 11:27:13 +03:00
|
|
|
# Convert Unix timestamp to datetime for PostgreSQL compatibility
|
2025-08-21 11:47:11 +03:00
|
|
|
after_datetime = datetime.fromtimestamp(after, tz=UTC) if after else None
|
|
|
|
|
|
2025-10-04 08:59:47 +03:00
|
|
|
# Handle different thread types: shout reactions, followers, or new shouts
|
|
|
|
|
if thread == "followers":
|
|
|
|
|
# Mark follower notifications as seen
|
|
|
|
|
query_conditions = [
|
|
|
|
|
Notification.entity == NotificationEntity.FOLLOWER.value,
|
|
|
|
|
]
|
|
|
|
|
if after_datetime:
|
|
|
|
|
query_conditions.append(Notification.created_at > after_datetime)
|
|
|
|
|
|
|
|
|
|
follower_notifications = session.query(Notification).where(and_(*query_conditions)).all()
|
|
|
|
|
for n in follower_notifications:
|
|
|
|
|
try:
|
|
|
|
|
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
|
|
|
|
session.add(ns)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning(f"Failed to mark follower notification as seen: {e}")
|
|
|
|
|
session.commit()
|
|
|
|
|
return {"error": None}
|
|
|
|
|
|
|
|
|
|
# Handle shout and reaction notifications
|
|
|
|
|
thread_parts = thread.split(":")
|
|
|
|
|
if len(thread_parts) < 2:
|
|
|
|
|
return {"error": "Invalid thread format"}
|
|
|
|
|
|
|
|
|
|
shout_id = thread_parts[0]
|
|
|
|
|
reply_to_id = thread_parts[1] if len(thread_parts) > 1 else None
|
|
|
|
|
|
|
|
|
|
# Query for new shout notifications in this thread
|
|
|
|
|
shout_query_conditions = [
|
|
|
|
|
Notification.entity == NotificationEntity.SHOUT.value,
|
|
|
|
|
Notification.action == NotificationAction.CREATE.value,
|
|
|
|
|
]
|
|
|
|
|
if after_datetime:
|
|
|
|
|
shout_query_conditions.append(Notification.created_at > after_datetime)
|
|
|
|
|
|
|
|
|
|
shout_notifications = session.query(Notification).where(and_(*shout_query_conditions)).all()
|
|
|
|
|
|
|
|
|
|
# Mark relevant shout notifications as seen
|
|
|
|
|
for n in shout_notifications:
|
|
|
|
|
payload = orjson.loads(str(n.payload))
|
|
|
|
|
if str(payload.get("id")) == shout_id:
|
|
|
|
|
try:
|
|
|
|
|
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
|
|
|
|
session.add(ns)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning(f"Failed to mark shout notification as seen: {e}")
|
|
|
|
|
|
|
|
|
|
# Query for reaction notifications
|
2025-08-21 11:47:11 +03:00
|
|
|
if after_datetime:
|
|
|
|
|
new_reaction_notifications = (
|
|
|
|
|
session.query(Notification)
|
|
|
|
|
.where(
|
2025-10-04 08:59:47 +03:00
|
|
|
Notification.action == NotificationAction.CREATE.value,
|
|
|
|
|
Notification.entity == NotificationEntity.REACTION.value,
|
2025-08-21 11:47:11 +03:00
|
|
|
Notification.created_at > after_datetime,
|
|
|
|
|
)
|
|
|
|
|
.all()
|
|
|
|
|
)
|
|
|
|
|
removed_reaction_notifications = (
|
|
|
|
|
session.query(Notification)
|
|
|
|
|
.where(
|
2025-10-04 08:59:47 +03:00
|
|
|
Notification.action == NotificationAction.DELETE.value,
|
|
|
|
|
Notification.entity == NotificationEntity.REACTION.value,
|
2025-08-21 11:47:11 +03:00
|
|
|
Notification.created_at > after_datetime,
|
|
|
|
|
)
|
|
|
|
|
.all()
|
2024-03-06 12:25:55 +03:00
|
|
|
)
|
2025-08-21 11:47:11 +03:00
|
|
|
else:
|
|
|
|
|
new_reaction_notifications = (
|
|
|
|
|
session.query(Notification)
|
|
|
|
|
.where(
|
2025-10-04 08:59:47 +03:00
|
|
|
Notification.action == NotificationAction.CREATE.value,
|
|
|
|
|
Notification.entity == NotificationEntity.REACTION.value,
|
2025-08-21 11:47:11 +03:00
|
|
|
)
|
|
|
|
|
.all()
|
|
|
|
|
)
|
|
|
|
|
removed_reaction_notifications = (
|
|
|
|
|
session.query(Notification)
|
|
|
|
|
.where(
|
2025-10-04 08:59:47 +03:00
|
|
|
Notification.action == NotificationAction.DELETE.value,
|
|
|
|
|
Notification.entity == NotificationEntity.REACTION.value,
|
2025-08-21 11:47:11 +03:00
|
|
|
)
|
|
|
|
|
.all()
|
2024-03-06 12:25:55 +03:00
|
|
|
)
|
2024-03-04 10:35:33 +03:00
|
|
|
exclude = set()
|
|
|
|
|
for nr in removed_reaction_notifications:
|
2025-03-20 11:55:21 +03:00
|
|
|
reaction = orjson.loads(str(nr.payload))
|
2024-04-17 18:32:23 +03:00
|
|
|
reaction_id = reaction.get("id")
|
2024-03-04 10:35:33 +03:00
|
|
|
exclude.add(reaction_id)
|
|
|
|
|
for n in new_reaction_notifications:
|
2025-03-20 11:55:21 +03:00
|
|
|
reaction = orjson.loads(str(n.payload))
|
2024-04-17 18:32:23 +03:00
|
|
|
reaction_id = reaction.get("id")
|
2024-03-04 10:35:33 +03:00
|
|
|
if (
|
2024-03-06 12:25:55 +03:00
|
|
|
reaction_id not in exclude
|
2024-04-17 18:32:23 +03:00
|
|
|
and reaction.get("shout") == shout_id
|
|
|
|
|
and reaction.get("reply_to") == reply_to_id
|
2024-03-04 10:35:33 +03:00
|
|
|
):
|
|
|
|
|
try:
|
|
|
|
|
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
|
|
|
|
session.add(ns)
|
|
|
|
|
session.commit()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warn(e)
|
|
|
|
|
session.rollback()
|
|
|
|
|
else:
|
2024-04-17 18:32:23 +03:00
|
|
|
error = "You are not logged in"
|
|
|
|
|
return {"error": error}
|