upgrade schema, resolvers, panel added
This commit is contained in:
@@ -14,7 +14,7 @@ from cache.cache import (
|
||||
get_cached_follower_topics,
|
||||
invalidate_cache_by_prefix,
|
||||
)
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
@@ -70,7 +70,9 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None):
|
||||
|
||||
# Функция для получения авторов из БД
|
||||
async def fetch_authors_with_stats():
|
||||
logger.debug(f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}")
|
||||
logger.debug(
|
||||
f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}"
|
||||
)
|
||||
|
||||
with local_session() as session:
|
||||
# Базовый запрос для получения авторов
|
||||
@@ -80,7 +82,7 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None):
|
||||
if by:
|
||||
if isinstance(by, dict):
|
||||
# Обработка словаря параметров сортировки
|
||||
from sqlalchemy import asc, desc
|
||||
from sqlalchemy import desc
|
||||
|
||||
for field, direction in by.items():
|
||||
column = getattr(Author, field, None)
|
||||
|
@@ -3,7 +3,7 @@ from operator import and_
|
||||
from graphql import GraphQLError
|
||||
from sqlalchemy import delete, insert
|
||||
|
||||
from orm.author import AuthorBookmark
|
||||
from auth.orm import AuthorBookmark
|
||||
from orm.shout import Shout
|
||||
from resolvers.feed import apply_options
|
||||
from resolvers.reader import get_shouts_with_links, query_with_stat
|
||||
@@ -72,7 +72,9 @@ def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
|
||||
|
||||
if existing_bookmark:
|
||||
db.execute(
|
||||
delete(AuthorBookmark).where(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
|
||||
delete(AuthorBookmark).where(
|
||||
AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id
|
||||
)
|
||||
)
|
||||
result = False
|
||||
else:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.invite import Invite, InviteStatus
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.community import Community, CommunityFollower
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
@@ -74,9 +74,9 @@ async def update_community(_, info, community_data):
|
||||
if slug:
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.query(Community).where(Community.created_by == author_id, Community.slug == slug).update(
|
||||
community_data
|
||||
)
|
||||
session.query(Community).where(
|
||||
Community.created_by == author_id, Community.slug == slug
|
||||
).update(community_data)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
return {"ok": False, "error": str(e)}
|
||||
@@ -90,7 +90,9 @@ async def delete_community(_, info, slug: str):
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.query(Community).where(Community.slug == slug, Community.created_by == author_id).delete()
|
||||
session.query(Community).where(
|
||||
Community.slug == slug, Community.created_by == author_id
|
||||
).delete()
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
except Exception as e:
|
||||
|
@@ -1,26 +1,22 @@
|
||||
import time
|
||||
import trafilatura
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cache_by_id,
|
||||
cache_topic,
|
||||
invalidate_shout_related_cache,
|
||||
invalidate_shouts_cache,
|
||||
)
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.draft import Draft, DraftAuthor, DraftTopic
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.notify import notify_shout
|
||||
from services.schema import mutation, query
|
||||
from services.search import search_service
|
||||
from utils.html_wrapper import wrap_html_fragment
|
||||
from utils.extract_text import extract_text
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def create_shout_from_draft(session, draft, author_id):
|
||||
"""
|
||||
Создаёт новый объект публикации (Shout) на основе черновика.
|
||||
@@ -62,11 +58,11 @@ def create_shout_from_draft(session, draft, author_id):
|
||||
draft=draft.id,
|
||||
deleted_at=None,
|
||||
)
|
||||
|
||||
|
||||
# Инициализируем пустые массивы для связей
|
||||
shout.topics = []
|
||||
shout.authors = []
|
||||
|
||||
|
||||
return shout
|
||||
|
||||
|
||||
@@ -75,10 +71,10 @@ def create_shout_from_draft(session, draft, author_id):
|
||||
async def load_drafts(_, info):
|
||||
"""
|
||||
Загружает все черновики, доступные текущему пользователю.
|
||||
|
||||
|
||||
Предварительно загружает связанные объекты (topics, authors, publication),
|
||||
чтобы избежать ошибок с отсоединенными объектами при сериализации.
|
||||
|
||||
|
||||
Returns:
|
||||
dict: Список черновиков или сообщение об ошибке
|
||||
"""
|
||||
@@ -97,12 +93,12 @@ async def load_drafts(_, info):
|
||||
.options(
|
||||
joinedload(Draft.topics),
|
||||
joinedload(Draft.authors),
|
||||
joinedload(Draft.publication) # Загружаем связанную публикацию
|
||||
joinedload(Draft.publication), # Загружаем связанную публикацию
|
||||
)
|
||||
.filter(Draft.authors.any(Author.id == author_id))
|
||||
)
|
||||
drafts = drafts_query.all()
|
||||
|
||||
|
||||
# Преобразуем объекты в словари, пока они в контексте сессии
|
||||
drafts_data = []
|
||||
for draft in drafts:
|
||||
@@ -110,19 +106,19 @@ async def load_drafts(_, info):
|
||||
# Всегда возвращаем массив для topics, даже если он пустой
|
||||
draft_dict["topics"] = [topic.dict() for topic in (draft.topics or [])]
|
||||
draft_dict["authors"] = [author.dict() for author in (draft.authors or [])]
|
||||
|
||||
|
||||
# Добавляем информацию о публикации, если она есть
|
||||
if draft.publication:
|
||||
draft_dict["publication"] = {
|
||||
"id": draft.publication.id,
|
||||
"slug": draft.publication.slug,
|
||||
"published_at": draft.publication.published_at
|
||||
"published_at": draft.publication.published_at,
|
||||
}
|
||||
else:
|
||||
draft_dict["publication"] = None
|
||||
|
||||
draft_dict["publication"] = None
|
||||
|
||||
drafts_data.append(draft_dict)
|
||||
|
||||
|
||||
return {"drafts": drafts_data}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load drafts: {e}", exc_info=True)
|
||||
@@ -180,27 +176,27 @@ async def create_draft(_, info, draft_input):
|
||||
# Remove id from input if present since it's auto-generated
|
||||
if "id" in draft_input:
|
||||
del draft_input["id"]
|
||||
|
||||
|
||||
# Добавляем текущее время создания и ID автора
|
||||
draft_input["created_at"] = int(time.time())
|
||||
draft_input["created_by"] = author_id
|
||||
draft = Draft(**draft_input)
|
||||
session.add(draft)
|
||||
session.flush()
|
||||
|
||||
|
||||
# Добавляем создателя как автора
|
||||
da = DraftAuthor(shout=draft.id, author=author_id)
|
||||
session.add(da)
|
||||
|
||||
|
||||
session.commit()
|
||||
return {"draft": draft}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create draft: {e}", exc_info=True)
|
||||
return {"error": f"Failed to create draft: {str(e)}"}
|
||||
|
||||
|
||||
def generate_teaser(body, limit=300):
|
||||
body_html = wrap_html_fragment(body)
|
||||
body_text = trafilatura.extract(body_html, include_comments=False, include_tables=False)
|
||||
body_text = extract_text(body)
|
||||
body_teaser = ". ".join(body_text[:limit].split(". ")[:-1])
|
||||
return body_teaser
|
||||
|
||||
@@ -246,9 +242,20 @@ async def update_draft(_, info, draft_id: int, draft_input):
|
||||
|
||||
# Фильтруем входные данные, оставляя только разрешенные поля
|
||||
allowed_fields = {
|
||||
"layout", "author_ids", "topic_ids", "main_topic_id",
|
||||
"media", "lead", "subtitle", "lang", "seo", "body",
|
||||
"title", "slug", "cover", "cover_caption"
|
||||
"layout",
|
||||
"author_ids",
|
||||
"topic_ids",
|
||||
"main_topic_id",
|
||||
"media",
|
||||
"lead",
|
||||
"subtitle",
|
||||
"lang",
|
||||
"seo",
|
||||
"body",
|
||||
"title",
|
||||
"slug",
|
||||
"cover",
|
||||
"cover_caption",
|
||||
}
|
||||
filtered_input = {k: v for k, v in draft_input.items() if k in allowed_fields}
|
||||
|
||||
@@ -277,9 +284,9 @@ async def update_draft(_, info, draft_id: int, draft_input):
|
||||
# Добавляем новые связи
|
||||
for tid in topic_ids:
|
||||
dt = DraftTopic(
|
||||
shout=draft_id,
|
||||
shout=draft_id,
|
||||
topic=tid,
|
||||
main=(tid == main_topic_id) if main_topic_id else False
|
||||
main=(tid == main_topic_id) if main_topic_id else False,
|
||||
)
|
||||
session.add(dt)
|
||||
|
||||
@@ -287,13 +294,10 @@ async def update_draft(_, info, draft_id: int, draft_input):
|
||||
if "seo" not in filtered_input and not draft.seo:
|
||||
body_src = filtered_input.get("body", draft.body)
|
||||
lead_src = filtered_input.get("lead", draft.lead)
|
||||
body_html = wrap_html_fragment(body_src)
|
||||
lead_html = wrap_html_fragment(lead_src)
|
||||
|
||||
|
||||
try:
|
||||
body_text = trafilatura.extract(body_html, include_comments=False, include_tables=False) if body_src else None
|
||||
lead_text = trafilatura.extract(lead_html, include_comments=False, include_tables=False) if lead_src else None
|
||||
|
||||
body_text = extract_text(body_src) if body_src else None
|
||||
lead_text = extract_text(lead_src) if lead_src else None
|
||||
body_teaser = generate_teaser(body_text, 300) if body_text else ""
|
||||
filtered_input["seo"] = lead_text if lead_text else body_teaser
|
||||
except Exception as e:
|
||||
@@ -308,14 +312,14 @@ async def update_draft(_, info, draft_id: int, draft_input):
|
||||
draft.updated_by = author_id
|
||||
|
||||
session.commit()
|
||||
|
||||
|
||||
# Преобразуем объект в словарь для ответа
|
||||
draft_dict = draft.dict()
|
||||
draft_dict["topics"] = [topic.dict() for topic in draft.topics]
|
||||
draft_dict["authors"] = [author.dict() for author in draft.authors]
|
||||
# Добавляем объект автора в updated_by
|
||||
draft_dict["updated_by"] = author_dict
|
||||
|
||||
|
||||
return {"draft": draft_dict}
|
||||
|
||||
except Exception as e:
|
||||
@@ -343,13 +347,13 @@ async def delete_draft(_, info, draft_id: int):
|
||||
def validate_html_content(html_content: str) -> tuple[bool, str]:
|
||||
"""
|
||||
Проверяет валидность HTML контента через trafilatura.
|
||||
|
||||
|
||||
Args:
|
||||
html_content: HTML строка для проверки
|
||||
|
||||
|
||||
Returns:
|
||||
tuple[bool, str]: (валидность, сообщение об ошибке)
|
||||
|
||||
|
||||
Example:
|
||||
>>> is_valid, error = validate_html_content("<p>Valid HTML</p>")
|
||||
>>> is_valid
|
||||
@@ -364,13 +368,10 @@ def validate_html_content(html_content: str) -> tuple[bool, str]:
|
||||
"""
|
||||
if not html_content or not html_content.strip():
|
||||
return False, "Content is empty"
|
||||
|
||||
|
||||
try:
|
||||
html_content = wrap_html_fragment(html_content)
|
||||
extracted = trafilatura.extract(html_content)
|
||||
if not extracted:
|
||||
return False, "Invalid HTML structure or empty content"
|
||||
return True, ""
|
||||
extracted = extract_text(html_content)
|
||||
return bool(extracted), extracted or ""
|
||||
except Exception as e:
|
||||
logger.error(f"HTML validation error: {e}", exc_info=True)
|
||||
return False, f"Invalid HTML content: {str(e)}"
|
||||
@@ -381,10 +382,10 @@ def validate_html_content(html_content: str) -> tuple[bool, str]:
|
||||
async def publish_draft(_, info, draft_id: int):
|
||||
"""
|
||||
Публикует черновик, создавая новый Shout или обновляя существующий.
|
||||
|
||||
|
||||
Args:
|
||||
draft_id (int): ID черновика для публикации
|
||||
|
||||
|
||||
Returns:
|
||||
dict: Результат публикации с shout или сообщением об ошибке
|
||||
"""
|
||||
@@ -400,11 +401,7 @@ async def publish_draft(_, info, draft_id: int):
|
||||
# Загружаем черновик со всеми связями
|
||||
draft = (
|
||||
session.query(Draft)
|
||||
.options(
|
||||
joinedload(Draft.topics),
|
||||
joinedload(Draft.authors),
|
||||
joinedload(Draft.publication)
|
||||
)
|
||||
.options(joinedload(Draft.topics), joinedload(Draft.authors), joinedload(Draft.publication))
|
||||
.filter(Draft.id == draft_id)
|
||||
.first()
|
||||
)
|
||||
@@ -421,7 +418,17 @@ async def publish_draft(_, info, draft_id: int):
|
||||
if draft.publication:
|
||||
shout = draft.publication
|
||||
# Обновляем существующую публикацию
|
||||
for field in ["body", "title", "subtitle", "lead", "cover", "cover_caption", "media", "lang", "seo"]:
|
||||
for field in [
|
||||
"body",
|
||||
"title",
|
||||
"subtitle",
|
||||
"lead",
|
||||
"cover",
|
||||
"cover_caption",
|
||||
"media",
|
||||
"lang",
|
||||
"seo",
|
||||
]:
|
||||
if hasattr(draft, field):
|
||||
setattr(shout, field, getattr(draft, field))
|
||||
shout.updated_at = int(time.time())
|
||||
@@ -440,16 +447,14 @@ async def publish_draft(_, info, draft_id: int):
|
||||
session.query(ShoutTopic).filter(ShoutTopic.shout == shout.id).delete()
|
||||
|
||||
# Добавляем авторов
|
||||
for author in (draft.authors or []):
|
||||
for author in draft.authors or []:
|
||||
sa = ShoutAuthor(shout=shout.id, author=author.id)
|
||||
session.add(sa)
|
||||
|
||||
# Добавляем темы
|
||||
for topic in (draft.topics or []):
|
||||
for topic in draft.topics or []:
|
||||
st = ShoutTopic(
|
||||
topic=topic.id,
|
||||
shout=shout.id,
|
||||
main=topic.main if hasattr(topic, "main") else False
|
||||
topic=topic.id, shout=shout.id, main=topic.main if hasattr(topic, "main") else False
|
||||
)
|
||||
session.add(st)
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import time
|
||||
|
||||
import orjson
|
||||
import trafilatura
|
||||
from sqlalchemy import and_, desc, select
|
||||
from sqlalchemy.orm import joinedload, selectinload
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
@@ -12,7 +11,7 @@ from cache.cache import (
|
||||
invalidate_shout_related_cache,
|
||||
invalidate_shouts_cache,
|
||||
)
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.draft import Draft
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
@@ -23,7 +22,7 @@ from services.db import local_session
|
||||
from services.notify import notify_shout
|
||||
from services.schema import mutation, query
|
||||
from services.search import search_service
|
||||
from utils.html_wrapper import wrap_html_fragment
|
||||
from utils.extract_text import extract_text
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@@ -181,11 +180,11 @@ async def create_shout(_, info, inp):
|
||||
# Создаем публикацию без topics
|
||||
body = inp.get("body", "")
|
||||
lead = inp.get("lead", "")
|
||||
body_html = wrap_html_fragment(body)
|
||||
lead_html = wrap_html_fragment(lead)
|
||||
body_text = trafilatura.extract(body_html)
|
||||
lead_text = trafilatura.extract(lead_html)
|
||||
seo = inp.get("seo", lead_text.strip() or body_text.strip()[:300].split(". ")[:-1].join(". "))
|
||||
body_text = extract_text(body)
|
||||
lead_text = extract_text(lead)
|
||||
seo = inp.get(
|
||||
"seo", lead_text.strip() or body_text.strip()[:300].split(". ")[:-1].join(". ")
|
||||
)
|
||||
new_shout = Shout(
|
||||
slug=slug,
|
||||
body=body,
|
||||
@@ -282,7 +281,9 @@ def patch_main_topic(session, main_topic_slug, shout):
|
||||
with session.begin():
|
||||
# Получаем текущий главный топик
|
||||
old_main = (
|
||||
session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
|
||||
session.query(ShoutTopic)
|
||||
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True)))
|
||||
.first()
|
||||
)
|
||||
if old_main:
|
||||
logger.info(f"Found current main topic: {old_main.topic.slug}")
|
||||
@@ -316,7 +317,9 @@ def patch_main_topic(session, main_topic_slug, shout):
|
||||
session.flush()
|
||||
logger.info(f"Main topic updated for shout#{shout.id}")
|
||||
else:
|
||||
logger.warning(f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})")
|
||||
logger.warning(
|
||||
f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})"
|
||||
)
|
||||
|
||||
|
||||
def patch_topics(session, shout, topics_input):
|
||||
@@ -417,7 +420,9 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||
logger.info(f"Processing update for shout#{shout_id} by author #{author_id}")
|
||||
shout_by_id = (
|
||||
session.query(Shout)
|
||||
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
|
||||
.options(
|
||||
joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors)
|
||||
)
|
||||
.filter(Shout.id == shout_id)
|
||||
.first()
|
||||
)
|
||||
@@ -446,7 +451,10 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||
shout_input["slug"] = slug
|
||||
logger.info(f"shout#{shout_id} slug patched")
|
||||
|
||||
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
|
||||
if (
|
||||
filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors])
|
||||
or "editor" in roles
|
||||
):
|
||||
logger.info(f"Author #{author_id} has permission to edit shout#{shout_id}")
|
||||
|
||||
# topics patch
|
||||
@@ -560,7 +568,9 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||
# Получаем полные данные шаута со связями
|
||||
shout_with_relations = (
|
||||
session.query(Shout)
|
||||
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
|
||||
.options(
|
||||
joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors)
|
||||
)
|
||||
.filter(Shout.id == shout_id)
|
||||
.first()
|
||||
)
|
||||
@@ -648,19 +658,17 @@ async def delete_shout(_, info, shout_id: int):
|
||||
def get_main_topic(topics):
|
||||
"""Get the main topic from a list of ShoutTopic objects."""
|
||||
logger.info(f"Starting get_main_topic with {len(topics) if topics else 0} topics")
|
||||
logger.debug(
|
||||
f"Topics data: {[(t.slug, getattr(t, 'main', False)) for t in topics] if topics else []}"
|
||||
)
|
||||
logger.debug(f"Topics data: {[(t.slug, getattr(t, 'main', False)) for t in topics] if topics else []}")
|
||||
|
||||
if not topics:
|
||||
logger.warning("No topics provided to get_main_topic")
|
||||
return {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
|
||||
|
||||
# Проверяем, является ли topics списком объектов ShoutTopic или Topic
|
||||
if hasattr(topics[0], 'topic') and topics[0].topic:
|
||||
if hasattr(topics[0], "topic") and topics[0].topic:
|
||||
# Для ShoutTopic объектов (старый формат)
|
||||
# Find first main topic in original order
|
||||
main_topic_rel = next((st for st in topics if getattr(st, 'main', False)), None)
|
||||
main_topic_rel = next((st for st in topics if getattr(st, "main", False)), None)
|
||||
logger.debug(
|
||||
f"Found main topic relation: {main_topic_rel.topic.slug if main_topic_rel and main_topic_rel.topic else None}"
|
||||
)
|
||||
@@ -701,6 +709,7 @@ def get_main_topic(topics):
|
||||
logger.warning("No valid topics found, returning default")
|
||||
return {"slug": "notopic", "title": "no topic", "id": 0, "is_main": True}
|
||||
|
||||
|
||||
@mutation.field("unpublish_shout")
|
||||
@login_required
|
||||
async def unpublish_shout(_, info, shout_id: int):
|
||||
@@ -727,31 +736,25 @@ async def unpublish_shout(_, info, shout_id: int):
|
||||
# Загружаем Shout со всеми связями для правильного формирования ответа
|
||||
shout = (
|
||||
session.query(Shout)
|
||||
.options(
|
||||
joinedload(Shout.authors),
|
||||
selectinload(Shout.topics)
|
||||
)
|
||||
.options(joinedload(Shout.authors), selectinload(Shout.topics))
|
||||
.filter(Shout.id == shout_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
if not shout:
|
||||
logger.warning(f"Shout not found for unpublish: ID {shout_id}")
|
||||
return {"error": "Shout not found"}
|
||||
|
||||
logger.warning(f"Shout not found for unpublish: ID {shout_id}")
|
||||
return {"error": "Shout not found"}
|
||||
|
||||
# Если у публикации есть связанный черновик, загружаем его с relationships
|
||||
if shout.draft:
|
||||
# Отдельно загружаем черновик с его связями
|
||||
draft = (
|
||||
session.query(Draft)
|
||||
.options(
|
||||
selectinload(Draft.authors),
|
||||
selectinload(Draft.topics)
|
||||
)
|
||||
.options(selectinload(Draft.authors), selectinload(Draft.topics))
|
||||
.filter(Draft.id == shout.draft)
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
# Связываем черновик с публикацией вручную для доступа через API
|
||||
if draft:
|
||||
shout.draft_obj = draft
|
||||
@@ -768,38 +771,32 @@ async def unpublish_shout(_, info, shout_id: int):
|
||||
# Снимаем с публикации (устанавливаем published_at в None)
|
||||
shout.published_at = None
|
||||
session.commit()
|
||||
|
||||
|
||||
# Формируем полноценный словарь для ответа
|
||||
shout_dict = shout.dict()
|
||||
|
||||
|
||||
# Добавляем связанные данные
|
||||
shout_dict["topics"] = (
|
||||
[
|
||||
{"id": topic.id, "slug": topic.slug, "title": topic.title}
|
||||
for topic in shout.topics
|
||||
]
|
||||
[{"id": topic.id, "slug": topic.slug, "title": topic.title} for topic in shout.topics]
|
||||
if shout.topics
|
||||
else []
|
||||
)
|
||||
|
||||
# Добавляем main_topic
|
||||
|
||||
# Добавляем main_topic
|
||||
shout_dict["main_topic"] = get_main_topic(shout.topics)
|
||||
|
||||
|
||||
# Добавляем авторов
|
||||
shout_dict["authors"] = (
|
||||
[
|
||||
{"id": author.id, "name": author.name, "slug": author.slug}
|
||||
for author in shout.authors
|
||||
]
|
||||
[{"id": author.id, "name": author.name, "slug": author.slug} for author in shout.authors]
|
||||
if shout.authors
|
||||
else []
|
||||
)
|
||||
|
||||
|
||||
# Важно! Обновляем поле publication, отражая состояние "снят с публикации"
|
||||
shout_dict["publication"] = {
|
||||
"id": shout_id_for_publication,
|
||||
"slug": shout_slug,
|
||||
"published_at": None # Ключевое изменение - устанавливаем published_at в None
|
||||
"published_at": None, # Ключевое изменение - устанавливаем published_at в None
|
||||
}
|
||||
|
||||
# Инвалидация кэша
|
||||
@@ -810,17 +807,17 @@ async def unpublish_shout(_, info, shout_id: int):
|
||||
"random_top", # случайные топовые
|
||||
"unrated", # неоцененные
|
||||
]
|
||||
await invalidate_shout_related_cache(shout, author_id)
|
||||
await invalidate_shout_related_cache(shout, author_id)
|
||||
await invalidate_shouts_cache(cache_keys)
|
||||
logger.info(f"Cache invalidated after unpublishing shout {shout_id}")
|
||||
except Exception as cache_err:
|
||||
logger.error(f"Failed to invalidate cache for unpublish shout {shout_id}: {cache_err}")
|
||||
logger.error(f"Failed to invalidate cache for unpublish shout {shout_id}: {cache_err}")
|
||||
|
||||
except Exception as e:
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"Failed to unpublish shout {shout_id}: {e}", exc_info=True)
|
||||
logger.error(f"Failed to unpublish shout {shout_id}: {e}", exc_info=True)
|
||||
return {"error": f"Failed to unpublish shout: {str(e)}"}
|
||||
|
||||
# Возвращаем сформированный словарь вместо объекта
|
||||
logger.info(f"Shout {shout_id} unpublished successfully by author {author_id}")
|
||||
return {"shout": shout_dict}
|
||||
return {"shout": shout_dict}
|
||||
|
@@ -2,7 +2,7 @@ from typing import List
|
||||
|
||||
from sqlalchemy import and_, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from auth.orm import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.reader import (
|
||||
@@ -71,7 +71,9 @@ def shouts_by_follower(info, follower_id: int, options):
|
||||
q = query_with_stat(info)
|
||||
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == follower_id)
|
||||
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == follower_id)
|
||||
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == follower_id)
|
||||
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(
|
||||
ShoutReactionsFollower.follower == follower_id
|
||||
)
|
||||
followed_subquery = (
|
||||
select(Shout.id)
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
@@ -140,7 +142,9 @@ async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
else select(Shout).filter(
|
||||
and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
|
||||
)
|
||||
)
|
||||
q = q.filter(Shout.authors.any(id=author_id))
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
@@ -169,7 +173,9 @@ async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
else select(Shout).filter(
|
||||
and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
|
||||
)
|
||||
)
|
||||
q = q.filter(Shout.topics.any(id=topic_id))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
|
@@ -10,7 +10,7 @@ from cache.cache import (
|
||||
get_cached_follower_authors,
|
||||
get_cached_follower_topics,
|
||||
)
|
||||
from orm.author import Author, AuthorFollower
|
||||
from auth.orm import Author, AuthorFollower
|
||||
from orm.community import Community, CommunityFollower
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout, ShoutReactionsFollower
|
||||
@@ -71,11 +71,16 @@ async def follow(_, info, what, slug="", entity_id=0):
|
||||
with local_session() as session:
|
||||
existing_sub = (
|
||||
session.query(follower_class)
|
||||
.filter(follower_class.follower == follower_id, getattr(follower_class, entity_type) == entity_id)
|
||||
.filter(
|
||||
follower_class.follower == follower_id,
|
||||
getattr(follower_class, entity_type) == entity_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if existing_sub:
|
||||
logger.info(f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}")
|
||||
logger.info(
|
||||
f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}"
|
||||
)
|
||||
else:
|
||||
logger.debug("Добавление новой записи в базу данных")
|
||||
sub = follower_class(follower=follower_id, **{entity_type: entity_id})
|
||||
|
@@ -7,7 +7,7 @@ from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.sql import not_
|
||||
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.notification import (
|
||||
Notification,
|
||||
NotificationAction,
|
||||
@@ -66,7 +66,9 @@ def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[
|
||||
return total, unread, notifications
|
||||
|
||||
|
||||
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
|
||||
def group_notification(
|
||||
thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"
|
||||
):
|
||||
reactions = reactions or []
|
||||
authors = authors or []
|
||||
return {
|
||||
|
@@ -14,7 +14,11 @@ def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
|
||||
session.query(Reaction).filter(Reaction.id == reply_to, Reaction.shout == shout_id).first()
|
||||
)
|
||||
|
||||
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
|
||||
if (
|
||||
replied_reaction
|
||||
and replied_reaction.kind is ReactionKind.PROPOSE.value
|
||||
and replied_reaction.quote
|
||||
):
|
||||
# patch all the proposals' quotes
|
||||
proposals = (
|
||||
session.query(Reaction)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
from sqlalchemy import and_, case, func, select, true
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from orm.author import Author, AuthorRating
|
||||
from auth.orm import Author, AuthorRating
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
@@ -187,7 +187,9 @@ def count_author_shouts_rating(session, author_id) -> int:
|
||||
|
||||
def get_author_rating_old(session, author: Author):
|
||||
likes_count = (
|
||||
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
|
||||
session.query(AuthorRating)
|
||||
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
|
||||
.count()
|
||||
)
|
||||
dislikes_count = (
|
||||
session.query(AuthorRating)
|
||||
|
@@ -3,7 +3,7 @@ import time
|
||||
from sqlalchemy import and_, asc, case, desc, func, select
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.rating import PROPOSAL_REACTIONS, RATING_REACTIONS, is_negative, is_positive
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor
|
||||
@@ -334,7 +334,9 @@ async def create_reaction(_, info, reaction):
|
||||
with local_session() as session:
|
||||
authors = session.query(ShoutAuthor.author).filter(ShoutAuthor.shout == shout_id).scalar()
|
||||
is_author = (
|
||||
bool(list(filter(lambda x: x == int(author_id), authors))) if isinstance(authors, list) else False
|
||||
bool(list(filter(lambda x: x == int(author_id), authors)))
|
||||
if isinstance(authors, list)
|
||||
else False
|
||||
)
|
||||
reaction_input["created_by"] = author_id
|
||||
kind = reaction_input.get("kind")
|
||||
@@ -487,7 +489,7 @@ def apply_reaction_filters(by, q):
|
||||
shout_slug = by.get("shout")
|
||||
if shout_slug:
|
||||
q = q.filter(Shout.slug == shout_slug)
|
||||
|
||||
|
||||
shout_id = by.get("shout_id")
|
||||
if shout_id:
|
||||
q = q.filter(Shout.id == shout_id)
|
||||
|
@@ -4,7 +4,7 @@ from sqlalchemy import and_, nulls_last, text
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.sql.expression import asc, case, desc, func, select
|
||||
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
@@ -93,7 +93,14 @@ def query_with_stat(info):
|
||||
q = q.join(main_topic, main_topic.id == main_topic_join.topic)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"id", main_topic.id, "title", main_topic.title, "slug", main_topic.slug, "is_main", main_topic_join.main
|
||||
"id",
|
||||
main_topic.id,
|
||||
"title",
|
||||
main_topic.title,
|
||||
"slug",
|
||||
main_topic.slug,
|
||||
"is_main",
|
||||
main_topic_join.main,
|
||||
).label("main_topic")
|
||||
)
|
||||
|
||||
@@ -131,7 +138,9 @@ def query_with_stat(info):
|
||||
select(
|
||||
ShoutTopic.shout,
|
||||
json_array_builder(
|
||||
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
|
||||
json_builder(
|
||||
"id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main
|
||||
)
|
||||
).label("topics"),
|
||||
)
|
||||
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
|
||||
@@ -239,7 +248,9 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
|
||||
if hasattr(row, "main_topic"):
|
||||
# logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}")
|
||||
main_topic = (
|
||||
orjson.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
|
||||
orjson.loads(row.main_topic)
|
||||
if isinstance(row.main_topic, str)
|
||||
else row.main_topic
|
||||
)
|
||||
# logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}")
|
||||
|
||||
@@ -253,7 +264,12 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
|
||||
}
|
||||
elif not main_topic:
|
||||
logger.warning(f"No main_topic and no topics found for shout#{shout_id}")
|
||||
main_topic = {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
|
||||
main_topic = {
|
||||
"id": 0,
|
||||
"title": "no topic",
|
||||
"slug": "notopic",
|
||||
"is_main": True,
|
||||
}
|
||||
shout_dict["main_topic"] = main_topic
|
||||
# logger.debug(f"Final main_topic for shout#{shout_id}: {main_topic}")
|
||||
|
||||
@@ -270,7 +286,9 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
|
||||
media_data = orjson.loads(media_data)
|
||||
except orjson.JSONDecodeError:
|
||||
media_data = []
|
||||
shout_dict["media"] = [media_data] if isinstance(media_data, dict) else media_data
|
||||
shout_dict["media"] = (
|
||||
[media_data] if isinstance(media_data, dict) else media_data
|
||||
)
|
||||
|
||||
shouts.append(shout_dict)
|
||||
|
||||
@@ -358,7 +376,9 @@ def apply_sorting(q, options):
|
||||
"""
|
||||
order_str = options.get("order_by")
|
||||
if order_str in ["rating", "comments_count", "last_commented_at"]:
|
||||
query_order_by = desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
|
||||
query_order_by = (
|
||||
desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
|
||||
)
|
||||
q = q.distinct(text(order_str), Shout.id).order_by( # DISTINCT ON включает поле сортировки
|
||||
nulls_last(query_order_by), Shout.id
|
||||
)
|
||||
@@ -442,7 +462,8 @@ async def load_shouts_unrated(_, info, options):
|
||||
select(Reaction.shout)
|
||||
.where(
|
||||
and_(
|
||||
Reaction.deleted_at.is_(None), Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
)
|
||||
)
|
||||
.group_by(Reaction.shout)
|
||||
@@ -453,11 +474,15 @@ async def load_shouts_unrated(_, info, options):
|
||||
q = select(Shout).where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
q = q.join(Author, Author.id == Shout.created_by)
|
||||
q = q.add_columns(
|
||||
json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label("main_author")
|
||||
json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label(
|
||||
"main_author"
|
||||
)
|
||||
)
|
||||
q = q.join(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
|
||||
q = q.join(Topic, Topic.id == ShoutTopic.topic)
|
||||
q = q.add_columns(json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic"))
|
||||
q = q.add_columns(
|
||||
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic")
|
||||
)
|
||||
q = q.where(Shout.id.not_in(rated_shouts))
|
||||
q = q.order_by(func.random())
|
||||
|
||||
|
@@ -4,7 +4,7 @@ from sqlalchemy import and_, distinct, func, join, select
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from cache.cache import cache_author
|
||||
from orm.author import Author, AuthorFollower
|
||||
from auth.orm import Author, AuthorFollower
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
@@ -177,7 +177,9 @@ def get_topic_comments_stat(topic_id: int) -> int:
|
||||
.subquery()
|
||||
)
|
||||
# Запрос для суммирования количества комментариев по теме
|
||||
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
|
||||
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(
|
||||
ShoutTopic.topic == topic_id
|
||||
)
|
||||
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
@@ -237,7 +239,9 @@ def get_author_followers_stat(author_id: int) -> int:
|
||||
:return: Количество уникальных подписчиков автора.
|
||||
"""
|
||||
aliased_followers = aliased(AuthorFollower)
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(
|
||||
aliased_followers.author == author_id
|
||||
)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
@@ -282,14 +286,16 @@ def get_with_stat(q):
|
||||
q = add_author_stat_columns(q) if is_author else add_topic_stat_columns(q)
|
||||
|
||||
# Выполняем запрос
|
||||
result = session.execute(q)
|
||||
result = session.execute(q).unique()
|
||||
for cols in result:
|
||||
entity = cols[0]
|
||||
stat = dict()
|
||||
stat["shouts"] = cols[1] # Статистика по публикациям
|
||||
stat["followers"] = cols[2] # Статистика по подписчикам
|
||||
if is_author:
|
||||
stat["authors"] = get_author_authors_stat(entity.id) # Статистика по подпискам на авторов
|
||||
stat["authors"] = get_author_authors_stat(
|
||||
entity.id
|
||||
) # Статистика по подпискам на авторов
|
||||
stat["comments"] = get_author_comments_stat(entity.id) # Статистика по комментариям
|
||||
else:
|
||||
stat["authors"] = get_topic_authors_stat(entity.id) # Статистика по авторам темы
|
||||
|
@@ -8,7 +8,7 @@ from cache.cache import (
|
||||
get_cached_topic_followers,
|
||||
invalidate_cache_by_prefix,
|
||||
)
|
||||
from orm.author import Author
|
||||
from auth.orm import Author
|
||||
from orm.topic import Topic
|
||||
from orm.reaction import ReactionKind
|
||||
from resolvers.stat import get_with_stat
|
||||
|
Reference in New Issue
Block a user