Merge branch 'dev' into feat/sv-searching-txtai
All checks were successful
Deploy on push / deploy (push) Successful in 6s
All checks were successful
Deploy on push / deploy (push) Successful in 6s
This commit is contained in:
commit
83d61ca76d
|
@ -1,3 +1,10 @@
|
|||
#### [0.4.18] - 2025-04-10
|
||||
- Fixed unique constraint violation for empty slug values:
|
||||
- Modified `update_draft` resolver to handle empty slug values
|
||||
- Modified `create_draft` resolver to prevent empty slug values
|
||||
- Added validation to prevent inserting or updating drafts with empty slug
|
||||
- Fixed database error "duplicate key value violates unique constraint draft_slug_key"
|
||||
|
||||
#### [0.4.17] - 2025-03-26
|
||||
- Fixed `'Reaction' object is not subscriptable` error in hierarchical comments:
|
||||
- Modified `get_reactions_with_stat()` to convert Reaction objects to dictionaries
|
||||
|
|
5
cache/cache.py
vendored
5
cache/cache.py
vendored
|
@ -545,8 +545,9 @@ async def get_cached_data(key: str) -> Optional[Any]:
|
|||
try:
|
||||
cached_data = await redis.execute("GET", key)
|
||||
if cached_data:
|
||||
logger.debug(f"Данные получены из кеша по ключу {key}")
|
||||
return orjson.loads(cached_data)
|
||||
loaded = orjson.loads(cached_data)
|
||||
logger.debug(f"Данные получены из кеша по ключу {key}: {len(loaded)}")
|
||||
return loaded
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при получении данных из кеша: {e}")
|
||||
|
|
|
@ -104,6 +104,11 @@ async def create_draft(_, info, draft_input):
|
|||
|
||||
if "title" not in draft_input or not draft_input["title"]:
|
||||
draft_input["title"] = "" # Пустая строка вместо NULL
|
||||
|
||||
# Проверяем slug - он должен быть или не пустым, или не передаваться вообще
|
||||
if "slug" in draft_input and (draft_input["slug"] is None or draft_input["slug"] == ""):
|
||||
# При создании черновика удаляем пустой slug из входных данных
|
||||
del draft_input["slug"]
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
|
@ -142,6 +147,15 @@ async def update_draft(_, info, draft_id: int, draft_input):
|
|||
if not user_id or not author_id:
|
||||
return {"error": "Author ID are required"}
|
||||
|
||||
# Проверяем slug - он должен быть или не пустым, или не передаваться вообще
|
||||
if "slug" in draft_input and (draft_input["slug"] is None or draft_input["slug"] == ""):
|
||||
# Если slug пустой, либо удаляем его из входных данных, либо генерируем временный уникальный
|
||||
# Вариант 1: просто удаляем ключ из входных данных, чтобы оставить старое значение
|
||||
del draft_input["slug"]
|
||||
# Вариант 2 (если нужно обновить): генерируем временный уникальный slug
|
||||
# import uuid
|
||||
# draft_input["slug"] = f"draft-{uuid.uuid4().hex[:8]}"
|
||||
|
||||
with local_session() as session:
|
||||
draft = session.query(Draft).filter(Draft.id == draft_id).first()
|
||||
if not draft:
|
||||
|
|
|
@ -6,7 +6,7 @@ from cache.cache import (
|
|||
get_cached_topic_authors,
|
||||
get_cached_topic_by_slug,
|
||||
get_cached_topic_followers,
|
||||
invalidate_cache_by_prefix,
|
||||
invalidate_cache_by_prefix
|
||||
)
|
||||
from orm.author import Author
|
||||
from orm.topic import Topic
|
||||
|
@ -126,6 +126,29 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
|
|||
GROUP BY topic
|
||||
"""
|
||||
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
|
||||
|
||||
# Запрос на получение статистики авторов для выбранных тем
|
||||
authors_stats_query = f"""
|
||||
SELECT st.topic, COUNT(DISTINCT sa.author) as authors_count
|
||||
FROM shout_topic st
|
||||
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
|
||||
JOIN shout_author sa ON sa.shout = s.id
|
||||
WHERE st.topic IN ({",".join(map(str, topic_ids))})
|
||||
GROUP BY st.topic
|
||||
"""
|
||||
authors_stats = {row[0]: row[1] for row in session.execute(text(authors_stats_query))}
|
||||
|
||||
# Запрос на получение статистики комментариев для выбранных тем
|
||||
comments_stats_query = f"""
|
||||
SELECT st.topic, COUNT(DISTINCT r.id) as comments_count
|
||||
FROM shout_topic st
|
||||
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
|
||||
JOIN reaction r ON r.shout = s.id
|
||||
WHERE st.topic IN ({",".join(map(str, topic_ids))})
|
||||
GROUP BY st.topic
|
||||
"""
|
||||
comments_stats = {row[0]: row[1] for row in session.execute(text(comments_stats_query))}
|
||||
|
||||
|
||||
# Формируем результат с добавлением статистики
|
||||
result = []
|
||||
|
@ -134,6 +157,8 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
|
|||
topic_dict["stat"] = {
|
||||
"shouts": shouts_stats.get(topic.id, 0),
|
||||
"followers": followers_stats.get(topic.id, 0),
|
||||
"authors": authors_stats.get(topic.id, 0),
|
||||
"comments": comments_stats.get(topic.id, 0)
|
||||
}
|
||||
result.append(topic_dict)
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user