author-shouts-counter-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m49s

This commit is contained in:
Untone 2024-01-30 11:58:17 +03:00
parent e4d7284681
commit e28f03d7db
4 changed files with 15 additions and 23 deletions

View File

@ -27,7 +27,7 @@ logger.setLevel(logging.DEBUG)
def add_author_stat_columns(q): def add_author_stat_columns(q):
shout_author_aliased = aliased(ShoutAuthor) shout_author_aliased = aliased(ShoutAuthor)
q = q.outerjoin(shout_author_aliased, shout_author_aliased.author == Author.id).add_columns( q = q.outerjoin(shout_author_aliased).add_columns(
func.count(distinct(shout_author_aliased.shout)).label('shouts_stat') func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
) )
@ -51,9 +51,9 @@ async def get_authors_from_query(q):
for [author, shouts_stat, followers_stat, followings_stat] in session.execute(q): for [author, shouts_stat, followers_stat, followings_stat] in session.execute(q):
author.stat = { author.stat = {
'shouts': shouts_stat, 'shouts': shouts_stat,
'viewed': await ViewedStorage.get_author(author.slug),
'followers': followers_stat, 'followers': followers_stat,
'followings': followings_stat, 'followings': followings_stat,
'viewed': await ViewedStorage.get_author(author.slug),
} }
authors.append(author) authors.append(author)
return authors return authors

View File

@ -286,7 +286,9 @@ async def load_shouts_feed(_, info, options):
# print(q.compile(compile_kwargs={"literal_binds": True})) # print(q.compile(compile_kwargs={"literal_binds": True}))
for [shout, reacted_stat, commented_stat, likes_stat, dislikes_stat, _last_comment] in session.execute(q).unique(): for [shout, reacted_stat, commented_stat, likes_stat, dislikes_stat, _last_comment] in session.execute(
q
).unique():
main_topic = ( main_topic = (
session.query(Topic.slug) session.query(Topic.slug)
.join( .join(
@ -306,7 +308,7 @@ async def load_shouts_feed(_, info, options):
'viewed': await ViewedStorage.get_shout(shout.slug), 'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat, 'reacted': reacted_stat,
'commented': commented_stat, 'commented': commented_stat,
'rating': likes_stat - dislikes_stat 'rating': likes_stat - dislikes_stat,
} }
shouts.append(shout) shouts.append(shout)
@ -383,7 +385,7 @@ async def get_shouts_from_query(q, author_id=None):
'reacted': reacted_stat, 'reacted': reacted_stat,
'commented': commented_stat, 'commented': commented_stat,
'rating': int(likes_stat or 0) - int(dislikes_stat or 0), 'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'last_comment': last_comment 'last_comment': last_comment,
} }
return shouts return shouts

View File

@ -48,9 +48,9 @@ async def check_auth(req) -> str | None:
} }
gql = { gql = {
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{' + 'query': f'query {operation}($params: ValidateJWTTokenInput!) {{'
f'{query_name}(params: $params) {{ is_valid claims }} ' + + f'{query_name}(params: $params) {{ is_valid claims }} '
'}', + '}',
'variables': variables, 'variables': variables,
'operationName': operation, 'operationName': operation,
} }

View File

@ -13,16 +13,12 @@ os_logger.setLevel(logging.INFO)
logger = logging.getLogger('\t[services.search]\t') logger = logging.getLogger('\t[services.search]\t')
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
ELASTIC_HOST = ( ELASTIC_HOST = os.environ.get('ELASTIC_HOST', '').replace('https://', '')
os.environ.get('ELASTIC_HOST', '').replace('https://', '')
)
ELASTIC_USER = os.environ.get('ELASTIC_USER', '') ELASTIC_USER = os.environ.get('ELASTIC_USER', '')
ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '') ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '')
ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200) ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200)
ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else '' ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else ''
ELASTIC_URL = os.environ.get( ELASTIC_URL = os.environ.get('ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}')
'ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}'
)
REDIS_TTL = 86400 # 1 day in seconds REDIS_TTL = 86400 # 1 day in seconds
@ -114,9 +110,7 @@ class SearchService:
if self.lock.acquire(blocking=False): if self.lock.acquire(blocking=False):
try: try:
logger.debug(f' Создаём новый индекс: {self.index_name} ') logger.debug(f' Создаём новый индекс: {self.index_name} ')
self.client.indices.create( self.client.indices.create(index=self.index_name, body=index_settings)
index=self.index_name, body=index_settings
)
self.client.indices.close(index=self.index_name) self.client.indices.close(index=self.index_name)
self.client.indices.open(index=self.index_name) self.client.indices.open(index=self.index_name)
finally: finally:
@ -127,9 +121,7 @@ class SearchService:
def put_mapping(self): def put_mapping(self):
if self.client: if self.client:
logger.debug(f' Разметка индекации {self.index_name}') logger.debug(f' Разметка индекации {self.index_name}')
self.client.indices.put_mapping( self.client.indices.put_mapping(index=self.index_name, body=expected_mapping)
index=self.index_name, body=expected_mapping
)
def check_index(self): def check_index(self):
if self.client: if self.client:
@ -164,9 +156,7 @@ class SearchService:
'query': {'match': {'_all': text}}, 'query': {'match': {'_all': text}},
} }
if self.client: if self.client:
search_response = self.client.search( search_response = self.client.search(index=self.index_name, body=search_body, size=limit, from_=offset)
index=self.index_name, body=search_body, size=limit, from_=offset
)
hits = search_response['hits']['hits'] hits = search_response['hits']['hits']
results = [ results = [