fixed-coales
Some checks failed
Deploy on push / deploy (push) Failing after 10s

This commit is contained in:
Untone 2024-10-15 11:12:09 +03:00
parent 76aeddbde2
commit bf33cdc95c
6 changed files with 47 additions and 48 deletions

View File

@ -192,7 +192,8 @@ def add_author_rating_columns(q, group_list):
(shout_reaction.kind == ReactionKind.DISLIKE.value, -1), (shout_reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0, else_=0,
) )
) ),
0,
).label("shouts_rating"), ).label("shouts_rating"),
) )
.select_from(shout_reaction) .select_from(shout_reaction)
@ -226,7 +227,8 @@ def add_author_rating_columns(q, group_list):
(reaction_2.kind == ReactionKind.DISLIKE.value, -1), (reaction_2.kind == ReactionKind.DISLIKE.value, -1),
else_=0, else_=0,
) )
) ),
0,
).label("comments_rating"), ).label("comments_rating"),
) )
.select_from(reaction_2) .select_from(reaction_2)

View File

@ -52,9 +52,9 @@ def add_reaction_stat_columns(q):
), ),
).add_columns( ).add_columns(
# Count unique comments # Count unique comments
func.count(aliased_reaction.id) func.coalesce(
.filter(aliased_reaction.kind == ReactionKind.COMMENT.value) func.count(aliased_reaction.id).filter(aliased_reaction.kind == ReactionKind.COMMENT.value), 0
.label("comments_stat"), ).label("comments_stat"),
# Calculate rating as the difference between likes and dislikes # Calculate rating as the difference between likes and dislikes
func.sum( func.sum(
case( case(

View File

@ -173,7 +173,7 @@ def get_topic_comments_stat(topic_id: int) -> int:
sub_comments = ( sub_comments = (
select( select(
Shout.id.label("shout_id"), Shout.id.label("shout_id"),
func.coalesce(func.count(Reaction.id)).label("comments_count"), func.coalesce(func.count(Reaction.id), 0).label("comments_count"),
) )
.join(ShoutTopic, ShoutTopic.shout == Shout.id) .join(ShoutTopic, ShoutTopic.shout == Shout.id)
.join(Topic, ShoutTopic.topic == Topic.id) .join(Topic, ShoutTopic.topic == Topic.id)
@ -257,17 +257,10 @@ def get_author_followers_stat(author_id: int) -> int:
return result[0] if result else 0 return result[0] if result else 0
def get_author_comments_stat(author_id: int) -> int: def get_author_comments_stat(author_id):
""" q = (
Получает количество комментариев, оставленных указанным автором. select(func.coalesce(func.count(Reaction.id), 0).label("comments_count"))
.select_from(Author)
:param author_id: Идентификатор автора.
:return: Количество комментариев, оставленных автором.
"""
# Подзапрос для получения количества комментариев, оставленных автором
sub_comments = (
select(Author.id, func.coalesce(func.count(Reaction.id)).label("comments_count"))
.select_from(Author) # явно указываем левый элемент join'а
.outerjoin( .outerjoin(
Reaction, Reaction,
and_( and_(
@ -276,13 +269,13 @@ def get_author_comments_stat(author_id: int) -> int:
Reaction.deleted_at.is_(None), Reaction.deleted_at.is_(None),
), ),
) )
.where(Author.id == author_id)
.group_by(Author.id) .group_by(Author.id)
.subquery()
) )
q = select(sub_comments.c.comments_count).filter(sub_comments.c.id == author_id)
with local_session() as session: with local_session() as session:
result = session.execute(q).first() result = session.execute(q).first()
return result[0] if result else 0 return result.comments_count if result else 0
def get_with_stat(q): def get_with_stat(q):

View File

@ -39,7 +39,7 @@ def create_table_if_not_exists(engine, table):
table.__table__.create(engine) table.__table__.create(engine)
logger.info(f"Table '{table.__tablename__}' created.") logger.info(f"Table '{table.__tablename__}' created.")
else: else:
logger.info(f"Table '{table.__tablename__}' already exists.") logger.info(f"Table '{table.__tablename__}' ok.")
# noinspection PyUnusedLocal # noinspection PyUnusedLocal

View File

@ -108,7 +108,7 @@ class SearchService:
logger.error(f"Ошибка подключения к OpenSearch: {exc}") logger.error(f"Ошибка подключения к OpenSearch: {exc}")
self.client = None self.client = None
else: else:
logger.warning("Задайте переменные среды для подключения к серверу поиска") logger.warning("env var ELASTIC_HOST is not set")
async def info(self): async def info(self):
if isinstance(self.client, OpenSearch): if isinstance(self.client, OpenSearch):

View File

@ -55,40 +55,46 @@ class ViewedStorage:
# Запуск фоновой задачи # Запуск фоновой задачи
_task = asyncio.create_task(self.worker()) _task = asyncio.create_task(self.worker())
else: else:
logger.warning(" * Пожалуйста, добавьте ключевой файл Google Analytics") logger.warning(" * please, add Google Analytics credentials file")
self.disabled = True self.disabled = True
@staticmethod @staticmethod
def load_precounted_views(): def load_precounted_views():
"""Загрузка предварительно подсчитанных просмотров из файла JSON""" """Загрузка предварительно подсчитанных просмотров из файла JSON"""
self = ViewedStorage self = ViewedStorage
viewfile_path = VIEWS_FILEPATH
if not os.path.exists(viewfile_path):
viewfile_path = os.path.join(os.path.curdir, "views.json")
if not os.path.exists(viewfile_path):
logger.warning(" * views.json not found")
return
logger.info(f" * loading views from {viewfile_path}")
try: try:
if os.path.exists(VIEWS_FILEPATH): start_date_int = os.path.getmtime(viewfile_path)
start_date_int = os.path.getmtime(VIEWS_FILEPATH) start_date_str = datetime.fromtimestamp(start_date_int).strftime("%Y-%m-%d")
start_date_str = datetime.fromtimestamp(start_date_int).strftime("%Y-%m-%d") self.start_date = start_date_str
self.start_date = start_date_str now_date = datetime.now().strftime("%Y-%m-%d")
now_date = datetime.now().strftime("%Y-%m-%d")
if now_date == self.start_date: if now_date == self.start_date:
logger.info(" * Данные актуализованы!") logger.info(" * views data is up to date!")
else:
logger.warn(f" * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}")
with open(VIEWS_FILEPATH, "r") as file:
precounted_views = json.load(file)
self.views_by_shout.update(precounted_views)
logger.info(f" * {len(precounted_views)} публикаций с просмотрами успешно загружены.")
else: else:
logger.warning(" * Файл просмотров не найден.") logger.warn(f" * {viewfile_path} is too old: {self.start_date}")
with open(viewfile_path, "r") as file:
precounted_views = json.load(file)
self.views_by_shout.update(precounted_views)
logger.info(f" * {len(precounted_views)} shouts with views was loaded.")
except Exception as e: except Exception as e:
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}") logger.error(f"precounted views loading error: {e}")
# noinspection PyTypeChecker # noinspection PyTypeChecker
@staticmethod @staticmethod
async def update_pages(): async def update_pages():
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров""" """Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
self = ViewedStorage self = ViewedStorage
logger.info("Обновление данных просмотров от Google Analytics ---") logger.info("views update from Google Analytics ---")
if not self.disabled: if not self.disabled:
try: try:
start = time.time() start = time.time()
@ -122,10 +128,10 @@ class ViewedStorage:
# Запись путей страниц для логирования # Запись путей страниц для логирования
slugs.add(slug) slugs.add(slug)
logger.info(f"Собрано страниц: {len(slugs)} ") logger.info(f"collected pages: {len(slugs)} ")
end = time.time() end = time.time()
logger.info("Обновление страниц заняло %fs " % (end - start)) logger.info("views update time: %fs " % (end - start))
except Exception as error: except Exception as error:
logger.error(error) logger.error(error)
self.disabled = True self.disabled = True
@ -189,17 +195,15 @@ class ViewedStorage:
except Exception as exc: except Exception as exc:
failed += 1 failed += 1
logger.debug(exc) logger.debug(exc)
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed) logger.info(" - update failed #%d, wait 10 secs" % failed)
if failed > 3: if failed > 3:
logger.info(" - Больше не пытаемся обновить") logger.info(" - views update failed, not trying anymore")
break break
if failed == 0: if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period) when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat()) t = format(when.astimezone().isoformat())
logger.info( logger.info(" ⎩ next update: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]))
" ⎩ Следующее обновление: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
)
await asyncio.sleep(self.period) await asyncio.sleep(self.period)
else: else:
await asyncio.sleep(10) await asyncio.sleep(10)
logger.info(" - Попытка снова обновить данные") logger.info(" - try to update views again")