granian+precommit
This commit is contained in:
@@ -1,90 +1,95 @@
|
||||
from functools import wraps
|
||||
import logging
|
||||
from functools import wraps
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
from settings import AUTH_URL, AUTH_SECRET
|
||||
from settings import AUTH_SECRET, AUTH_URL
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger("\t[services.auth]\t")
|
||||
logger = logging.getLogger('\t[services.auth]\t')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
async def request_data(gql, headers = { "Content-Type": "application/json" }):
|
||||
async def request_data(gql, headers=None):
|
||||
if headers is None:
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
try:
|
||||
# Asynchronous HTTP request to the authentication server
|
||||
async with ClientSession() as session:
|
||||
async with session.post(AUTH_URL, json=gql, headers=headers) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
errors = data.get("errors")
|
||||
errors = data.get('errors')
|
||||
if errors:
|
||||
logger.error(f"[services.auth] errors: {errors}")
|
||||
logger.error(f'[services.auth] errors: {errors}')
|
||||
else:
|
||||
return data
|
||||
except Exception as e:
|
||||
# Handling and logging exceptions during authentication check
|
||||
logger.error(f"[services.auth] request_data error: {e}")
|
||||
logger.error(f'[services.auth] request_data error: {e}')
|
||||
return None
|
||||
|
||||
|
||||
|
||||
async def check_auth(req) -> str | None:
|
||||
token = req.headers.get("Authorization")
|
||||
user_id = ""
|
||||
token = req.headers.get('Authorization')
|
||||
user_id = ''
|
||||
if token:
|
||||
# Logging the authentication token
|
||||
logger.error(f"[services.auth] checking auth token: {token}")
|
||||
query_name = "validate_jwt_token"
|
||||
operation = "ValidateToken"
|
||||
logger.error(f'[services.auth] checking auth token: {token}')
|
||||
query_name = 'validate_jwt_token'
|
||||
operation = 'ValidateToken'
|
||||
variables = {
|
||||
"params": {
|
||||
"token_type": "access_token",
|
||||
"token": token,
|
||||
'params': {
|
||||
'token_type': 'access_token',
|
||||
'token': token,
|
||||
}
|
||||
}
|
||||
|
||||
gql = {
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
}
|
||||
data = await request_data(gql)
|
||||
if data:
|
||||
user_id = data.get("data", {}).get(query_name, {}).get("claims", {}).get("sub")
|
||||
user_id = data.get('data', {}).get(query_name, {}).get('claims', {}).get('sub')
|
||||
return user_id
|
||||
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
||||
|
||||
|
||||
async def add_user_role(user_id):
|
||||
logger.info(f"[services.auth] add author role for user_id: {user_id}")
|
||||
query_name = "_update_user"
|
||||
operation = "UpdateUserRoles"
|
||||
headers = {"Content-Type": "application/json", "x-authorizer-admin-secret": AUTH_SECRET}
|
||||
variables = {"params": {"roles": "author, reader", "id": user_id}}
|
||||
logger.info(f'[services.auth] add author role for user_id: {user_id}')
|
||||
query_name = '_update_user'
|
||||
operation = 'UpdateUserRoles'
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'x-authorizer-admin-secret': AUTH_SECRET,
|
||||
}
|
||||
variables = {'params': {'roles': 'author, reader', 'id': user_id}}
|
||||
gql = {
|
||||
"query": f"mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
'query': f'mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
}
|
||||
data = await request_data(gql, headers)
|
||||
if data:
|
||||
user_id = data.get("data", {}).get(query_name, {}).get("id")
|
||||
user_id = data.get('data', {}).get(query_name, {}).get('id')
|
||||
return user_id
|
||||
|
||||
|
||||
def login_required(f):
|
||||
@wraps(f)
|
||||
async def decorated_function(*args, **kwargs):
|
||||
info = args[1]
|
||||
context = info.context
|
||||
req = context.get("request")
|
||||
req = context.get('request')
|
||||
user_id = await check_auth(req)
|
||||
if user_id:
|
||||
context["user_id"] = user_id.strip()
|
||||
context['user_id'] = user_id.strip()
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
@@ -96,7 +101,7 @@ def auth_request(f):
|
||||
req = args[0]
|
||||
user_id = await check_auth(req)
|
||||
if user_id:
|
||||
req["user_id"] = user_id.strip()
|
||||
req['user_id'] = user_id.strip()
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
@@ -1,47 +1,48 @@
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
import logging
|
||||
|
||||
# from contextlib import contextmanager
|
||||
from typing import Any, Callable, Dict, TypeVar
|
||||
|
||||
# from psycopg2.errors import UniqueViolation
|
||||
from sqlalchemy import Column, Integer, create_engine, event
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.sql.schema import Table
|
||||
from sqlalchemy.engine import Engine
|
||||
|
||||
from settings import DB_URL
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger("\t [sqlalchemy.profiler]\t")
|
||||
logger = logging.getLogger('\t [sqlalchemy.profiler]\t')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
@event.listens_for(Engine, "before_cursor_execute")
|
||||
@event.listens_for(Engine, 'before_cursor_execute')
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
conn.info.setdefault("query_start_time", []).append(time.time())
|
||||
conn.info.setdefault('query_start_time', []).append(time.time())
|
||||
# logger.debug(f" {statement}")
|
||||
|
||||
|
||||
@event.listens_for(Engine, "after_cursor_execute")
|
||||
@event.listens_for(Engine, 'after_cursor_execute')
|
||||
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
total = time.time() - conn.info["query_start_time"].pop(-1)
|
||||
total = time.time() - conn.info['query_start_time'].pop(-1)
|
||||
total = math.floor(total * 10000) / 10000
|
||||
if total > 35:
|
||||
print(f"\n{statement}\n----------------- Finished in {total} s ")
|
||||
print(f'\n{statement}\n----------------- Finished in {total} s ')
|
||||
|
||||
|
||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
|
||||
T = TypeVar("T")
|
||||
T = TypeVar('T')
|
||||
|
||||
REGISTRY: Dict[str, type] = {}
|
||||
|
||||
|
||||
# @contextmanager
|
||||
def local_session(src=""):
|
||||
def local_session(src=''):
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
||||
# try:
|
||||
@@ -69,7 +70,7 @@ class Base(declarative_base()):
|
||||
__init__: Callable
|
||||
__allow_unmapped__ = True
|
||||
__abstract__ = True
|
||||
__table_args__ = {"extend_existing": True}
|
||||
__table_args__ = {'extend_existing': True}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
@@ -78,12 +79,12 @@ class Base(declarative_base()):
|
||||
|
||||
def dict(self) -> Dict[str, Any]:
|
||||
column_names = self.__table__.columns.keys()
|
||||
if "_sa_instance_state" in column_names:
|
||||
column_names.remove("_sa_instance_state")
|
||||
if '_sa_instance_state' in column_names:
|
||||
column_names.remove('_sa_instance_state')
|
||||
try:
|
||||
return {c: getattr(self, c) for c in column_names}
|
||||
except Exception as e:
|
||||
print(f"[services.db] Error dict: {e}")
|
||||
print(f'[services.db] Error dict: {e}')
|
||||
return {}
|
||||
|
||||
def update(self, values: Dict[str, Any]) -> None:
|
||||
|
@@ -19,7 +19,7 @@ class Following:
|
||||
class FollowingManager:
|
||||
lock = asyncio.Lock()
|
||||
followers_by_kind = {}
|
||||
data = {"author": [], "topic": [], "shout": [], "community": []}
|
||||
data = {'author': [], 'topic': [], 'shout': [], 'community': []}
|
||||
|
||||
@staticmethod
|
||||
async def register(kind, uid):
|
||||
@@ -41,7 +41,7 @@ class FollowingManager:
|
||||
async with FollowingManager.lock:
|
||||
entities = FollowingManager.followers_by_kind.get(kind, [])
|
||||
for entity in entities[:]: # Use a copy to iterate
|
||||
if payload.shout["created_by"] == entity.uid:
|
||||
if payload.shout['created_by'] == entity.uid:
|
||||
entity.queue.put_nowait(payload)
|
||||
except Exception as e:
|
||||
print(Exception(e))
|
||||
|
@@ -3,43 +3,43 @@ import json
|
||||
from services.rediscache import redis
|
||||
|
||||
|
||||
async def notify_reaction(reaction, action: str = "create"):
|
||||
channel_name = "reaction"
|
||||
data = {"payload": reaction, "action": action}
|
||||
async def notify_reaction(reaction, action: str = 'create'):
|
||||
channel_name = 'reaction'
|
||||
data = {'payload': reaction, 'action': action}
|
||||
try:
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
except Exception as e:
|
||||
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}")
|
||||
print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
|
||||
|
||||
|
||||
async def notify_shout(shout, action: str = "create"):
|
||||
channel_name = "shout"
|
||||
data = {"payload": shout, "action": action}
|
||||
async def notify_shout(shout, action: str = 'create'):
|
||||
channel_name = 'shout'
|
||||
data = {'payload': shout, 'action': action}
|
||||
try:
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
except Exception as e:
|
||||
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}")
|
||||
print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
|
||||
|
||||
|
||||
async def notify_follower(follower: dict, author_id: int, action: str = "follow"):
|
||||
channel_name = f"follower:{author_id}"
|
||||
async def notify_follower(follower: dict, author_id: int, action: str = 'follow'):
|
||||
channel_name = f'follower:{author_id}'
|
||||
try:
|
||||
# Simplify dictionary before publishing
|
||||
simplified_follower = {k: follower[k] for k in ["id", "name", "slug", "pic"]}
|
||||
simplified_follower = {k: follower[k] for k in ['id', 'name', 'slug', 'pic']}
|
||||
|
||||
data = {"payload": simplified_follower, "action": action}
|
||||
data = {'payload': simplified_follower, 'action': action}
|
||||
|
||||
# Convert data to JSON string
|
||||
json_data = json.dumps(data)
|
||||
|
||||
# Ensure the data is not empty before publishing
|
||||
if not json_data:
|
||||
raise ValueError("Empty data to publish.")
|
||||
raise ValueError('Empty data to publish.')
|
||||
|
||||
# Use the 'await' keyword when publishing
|
||||
await redis.publish(channel_name, json_data)
|
||||
|
||||
except Exception as e:
|
||||
# Log the error and re-raise it
|
||||
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}")
|
||||
print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
|
||||
raise
|
||||
|
@@ -1,9 +1,11 @@
|
||||
import logging
|
||||
|
||||
import redis.asyncio as aredis
|
||||
|
||||
from settings import REDIS_URL
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("[services.redis] ")
|
||||
|
||||
logger = logging.getLogger('[services.redis] ')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
@@ -23,7 +25,7 @@ class RedisCache:
|
||||
async def execute(self, command, *args, **kwargs):
|
||||
if self._client:
|
||||
try:
|
||||
logger.debug(f"{command} {args} {kwargs}")
|
||||
logger.debug(f'{command} {args} {kwargs}')
|
||||
r = await self._client.execute_command(command, *args, **kwargs)
|
||||
logger.debug(type(r))
|
||||
logger.debug(r)
|
||||
@@ -51,6 +53,7 @@ class RedisCache:
|
||||
return
|
||||
await self._client.publish(channel, data)
|
||||
|
||||
|
||||
redis = RedisCache()
|
||||
|
||||
__all__ = ["redis"]
|
||||
__all__ = ['redis']
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from ariadne import MutationType, QueryType # , ScalarType
|
||||
|
||||
|
||||
# datetime_scalar = ScalarType("DateTime")
|
||||
query = QueryType()
|
||||
mutation = MutationType()
|
||||
|
@@ -15,28 +15,28 @@ class SearchService:
|
||||
@staticmethod
|
||||
async def init(session):
|
||||
async with SearchService.lock:
|
||||
logging.info("[services.search] Initializing SearchService")
|
||||
logging.info('[services.search] Initializing SearchService')
|
||||
|
||||
@staticmethod
|
||||
async def search(text: str, limit: int = 50, offset: int = 0) -> List[Shout]:
|
||||
payload = []
|
||||
try:
|
||||
# TODO: add ttl for redis cached search results
|
||||
cached = await redis.execute("GET", text)
|
||||
cached = await redis.execute('GET', text)
|
||||
if not cached:
|
||||
async with SearchService.lock:
|
||||
# Use aiohttp to send a request to ElasticSearch
|
||||
async with aiohttp.ClientSession() as session:
|
||||
search_url = f"https://search.discours.io/search?q={text}"
|
||||
search_url = f'https://search.discours.io/search?q={text}'
|
||||
async with session.get(search_url) as response:
|
||||
if response.status == 200:
|
||||
payload = await response.json()
|
||||
await redis.execute("SET", text, json.dumps(payload)) # use redis as cache
|
||||
await redis.execute('SET', text, json.dumps(payload)) # use redis as cache
|
||||
else:
|
||||
logging.error(f"[services.search] response: {response.status} {await response.text()}")
|
||||
logging.error(f'[services.search] response: {response.status} {await response.text()}')
|
||||
elif isinstance(cached, str):
|
||||
payload = json.loads(cached)
|
||||
except Exception as e:
|
||||
logging.error(f"[services.search] Error during search: {e}")
|
||||
logging.error(f'[services.search] Error during search: {e}')
|
||||
|
||||
return payload[offset : offset + limit]
|
||||
|
@@ -1,9 +1,10 @@
|
||||
from services.rediscache import redis
|
||||
import json
|
||||
|
||||
from services.rediscache import redis
|
||||
|
||||
|
||||
async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||
r = await redis.execute("LLEN", f"chats/{chat_id}/unread/{author_id}")
|
||||
r = await redis.execute('LLEN', f'chats/{chat_id}/unread/{author_id}')
|
||||
if isinstance(r, str):
|
||||
return int(r)
|
||||
elif isinstance(r, int):
|
||||
@@ -11,8 +12,9 @@ async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
async def get_total_unread_counter(author_id: int) -> int:
|
||||
chats_set = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
|
||||
chats_set = await redis.execute('SMEMBERS', f'chats_by_author/{author_id}')
|
||||
s = 0
|
||||
if isinstance(chats_set, str):
|
||||
chats_set = json.loads(chats_set)
|
||||
|
@@ -1,40 +1,39 @@
|
||||
import os
|
||||
from typing import Dict, List
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import environ
|
||||
from typing import Dict
|
||||
|
||||
# ga
|
||||
from apiclient.discovery import build
|
||||
from google.oauth2.service_account import Credentials
|
||||
import pandas as pd
|
||||
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from services.db import local_session
|
||||
|
||||
|
||||
# Настройка журналирования
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logger = logging.getLogger("\t[services.viewed]\t")
|
||||
logger = logging.getLogger('\t[services.viewed]\t')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Пути к ключевым файлам и идентификатор представления в Google Analytics
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", '/dump/google-service.json')
|
||||
GOOGLE_GA_VIEW_ID = os.environ.get("GOOGLE_GA_VIEW_ID", "")
|
||||
gaBaseUrl = "https://analyticsreporting.googleapis.com/v4"
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get('GOOGLE_KEYFILE_PATH', '/dump/google-service.json')
|
||||
GOOGLE_GA_VIEW_ID = os.environ.get('GOOGLE_GA_VIEW_ID', '')
|
||||
# GOOGLE_ANALYTICS_API = 'https://analyticsreporting.googleapis.com/v4'
|
||||
GOOGLE_ANALYTICS_SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
|
||||
|
||||
|
||||
# Функция для создания объекта службы Analytics Reporting API V4
|
||||
def get_service():
|
||||
SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
|
||||
credentials = Credentials.from_service_account_file(GOOGLE_KEYFILE_PATH, scopes=SCOPES)
|
||||
credentials = Credentials.from_service_account_file(GOOGLE_KEYFILE_PATH, scopes=GOOGLE_ANALYTICS_SCOPES)
|
||||
service = build(serviceName='analyticsreporting', version='v4', credentials=credentials)
|
||||
return service
|
||||
|
||||
|
||||
class ViewedStorage:
|
||||
lock = asyncio.Lock()
|
||||
views_by_shout = {}
|
||||
@@ -45,7 +44,7 @@ class ViewedStorage:
|
||||
analytics_client = None
|
||||
auth_result = None
|
||||
disabled = False
|
||||
date_range = ""
|
||||
date_range = ''
|
||||
|
||||
@staticmethod
|
||||
async def init():
|
||||
@@ -54,13 +53,13 @@ class ViewedStorage:
|
||||
async with self.lock:
|
||||
if os.path.exists(GOOGLE_KEYFILE_PATH):
|
||||
self.analytics_client = get_service()
|
||||
logger.info(f" * Постоянная авторизация в Google Analytics {self.analytics_client}")
|
||||
logger.info(f' * Постоянная авторизация в Google Analytics {self.analytics_client}')
|
||||
|
||||
# Загрузка предварительно подсчитанных просмотров из файла JSON
|
||||
self.load_precounted_views()
|
||||
|
||||
# Установка диапазона дат на основе времени создания файла views.json
|
||||
views_json_path = "/dump/views.json"
|
||||
views_json_path = '/dump/views.json'
|
||||
creation_time = datetime.fromtimestamp(os.path.getctime(views_json_path))
|
||||
end_date = datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
||||
start_date = creation_time.strftime('%Y-%m-%d')
|
||||
@@ -69,7 +68,7 @@ class ViewedStorage:
|
||||
views_stat_task = asyncio.create_task(self.worker())
|
||||
logger.info(views_stat_task)
|
||||
else:
|
||||
logger.info(" * Пожалуйста, добавьте ключевой файл Google Analytics")
|
||||
logger.info(' * Пожалуйста, добавьте ключевой файл Google Analytics')
|
||||
self.disabled = True
|
||||
|
||||
@staticmethod
|
||||
@@ -77,33 +76,43 @@ class ViewedStorage:
|
||||
"""Загрузка предварительно подсчитанных просмотров из файла JSON"""
|
||||
self = ViewedStorage
|
||||
try:
|
||||
with open("/dump/views.json", "r") as file:
|
||||
with open('/dump/views.json', 'r') as file:
|
||||
precounted_views = json.load(file)
|
||||
self.views_by_shout.update(precounted_views)
|
||||
logger.info(f" * {len(precounted_views)} предварительно подсчитанных просмотров shouts успешно загружены.")
|
||||
logger.info(
|
||||
f' * {len(precounted_views)} предварительно подсчитанных просмотров shouts успешно загружены.'
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}")
|
||||
logger.error(f'Ошибка загрузки предварительно подсчитанных просмотров: {e}')
|
||||
|
||||
@staticmethod
|
||||
async def update_pages():
|
||||
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
|
||||
self = ViewedStorage
|
||||
if not self.disabled and GOOGLE_GA_VIEW_ID:
|
||||
logger.info(" ⎧ Обновление данных просмотров от Google Analytics ---")
|
||||
logger.info(' ⎧ Обновление данных просмотров от Google Analytics ---')
|
||||
try:
|
||||
start = time.time()
|
||||
async with self.lock:
|
||||
if self.analytics_client:
|
||||
data = self.analytics_client.reports().batchGet(body={
|
||||
'reportRequests': [{
|
||||
'viewId': GOOGLE_GA_VIEW_ID,
|
||||
'dateRanges': self.date_range,
|
||||
'metrics': [{'expression': 'ga:pageviews'}],
|
||||
'dimensions': [{'name': 'ga:pagePath'}],
|
||||
}]
|
||||
}).execute()
|
||||
data = (
|
||||
self.analytics_client.reports()
|
||||
.batchGet(
|
||||
body={
|
||||
'reportRequests': [
|
||||
{
|
||||
'viewId': GOOGLE_GA_VIEW_ID,
|
||||
'dateRanges': self.date_range,
|
||||
'metrics': [{'expression': 'ga:pageviews'}],
|
||||
'dimensions': [{'name': 'ga:pagePath'}],
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
if isinstance(data, dict):
|
||||
slugs = set([])
|
||||
slugs = set()
|
||||
reports = data.get('reports', [])
|
||||
if reports and isinstance(reports, list):
|
||||
rows = list(reports[0].get('data', {}).get('rows', []))
|
||||
@@ -113,7 +122,7 @@ class ViewedStorage:
|
||||
dimensions = row.get('dimensions', [])
|
||||
if isinstance(dimensions, list) and dimensions:
|
||||
page_path = dimensions[0]
|
||||
slug = page_path.split("discours.io/")[-1]
|
||||
slug = page_path.split('discours.io/')[-1]
|
||||
views_count = int(row['metrics'][0]['values'][0])
|
||||
|
||||
# Обновление данных в хранилище
|
||||
@@ -124,15 +133,15 @@ class ViewedStorage:
|
||||
# Запись путей страниц для логирования
|
||||
slugs.add(slug)
|
||||
|
||||
logger.info(f" ⎪ Собрано страниц: {len(slugs)} ")
|
||||
logger.info(f' ⎪ Собрано страниц: {len(slugs)} ')
|
||||
|
||||
end = time.time()
|
||||
logger.info(" ⎪ Обновление страниц заняло %fs " % (end - start))
|
||||
logger.info(' ⎪ Обновление страниц заняло %fs ' % (end - start))
|
||||
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
@staticmethod
|
||||
async def get_shout(shout_slug) -> int:
|
||||
@@ -178,10 +187,14 @@ class ViewedStorage:
|
||||
dictionary[key] = list(set(dictionary.get(key, []) + [value]))
|
||||
|
||||
# Обновление тем и авторов с использованием вспомогательной функции
|
||||
for [_shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all():
|
||||
for [_shout_topic, topic] in (
|
||||
session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all()
|
||||
):
|
||||
update_groups(self.shouts_by_topic, topic.slug, shout_slug)
|
||||
|
||||
for [_shout_topic, author] in session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all():
|
||||
for [_shout_topic, author] in (
|
||||
session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all()
|
||||
):
|
||||
update_groups(self.shouts_by_author, author.slug, shout_slug)
|
||||
|
||||
@staticmethod
|
||||
@@ -194,20 +207,20 @@ class ViewedStorage:
|
||||
|
||||
while True:
|
||||
try:
|
||||
logger.info(" - Обновление записей...")
|
||||
logger.info(' - Обновление записей...')
|
||||
await self.update_pages()
|
||||
failed = 0
|
||||
except Exception:
|
||||
failed += 1
|
||||
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed)
|
||||
logger.info(' - Обновление не удалось #%d, ожидание 10 секунд' % failed)
|
||||
if failed > 3:
|
||||
logger.info(" - Больше не пытаемся обновить")
|
||||
logger.info(' - Больше не пытаемся обновить')
|
||||
break
|
||||
if failed == 0:
|
||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||
t = format(when.astimezone().isoformat())
|
||||
logger.info(" ⎩ Следующее обновление: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]))
|
||||
logger.info(' ⎩ Следующее обновление: %s' % (t.split('T')[0] + ' ' + t.split('T')[1].split('.')[0]))
|
||||
await asyncio.sleep(self.period)
|
||||
else:
|
||||
await asyncio.sleep(10)
|
||||
logger.info(" - Попытка снова обновить данные")
|
||||
logger.info(' - Попытка снова обновить данные')
|
||||
|
Reference in New Issue
Block a user