This commit is contained in:
parent
f160ab4d26
commit
90260534eb
|
@ -5,12 +5,12 @@
|
||||||
import time
|
import time
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from sqlalchemy.orm import exc
|
||||||
from starlette.authentication import UnauthenticatedUser
|
from starlette.authentication import UnauthenticatedUser
|
||||||
from starlette.datastructures import Headers
|
from starlette.datastructures import Headers
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
from starlette.responses import JSONResponse, Response
|
from starlette.responses import JSONResponse, Response
|
||||||
from starlette.types import ASGIApp, Receive, Scope, Send
|
from starlette.types import ASGIApp, Receive, Scope, Send
|
||||||
from sqlalchemy.orm import exc
|
|
||||||
|
|
||||||
from auth.credentials import AuthCredentials
|
from auth.credentials import AuthCredentials
|
||||||
from auth.orm import Author
|
from auth.orm import Author
|
||||||
|
@ -18,6 +18,8 @@ from auth.sessions import SessionManager
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
from settings import (
|
from settings import (
|
||||||
ADMIN_EMAILS as ADMIN_EMAILS_LIST,
|
ADMIN_EMAILS as ADMIN_EMAILS_LIST,
|
||||||
|
)
|
||||||
|
from settings import (
|
||||||
SESSION_COOKIE_HTTPONLY,
|
SESSION_COOKIE_HTTPONLY,
|
||||||
SESSION_COOKIE_MAX_AGE,
|
SESSION_COOKIE_MAX_AGE,
|
||||||
SESSION_COOKIE_NAME,
|
SESSION_COOKIE_NAME,
|
||||||
|
@ -33,7 +35,9 @@ ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
|
||||||
class AuthenticatedUser:
|
class AuthenticatedUser:
|
||||||
"""Аутентифицированный пользователь"""
|
"""Аутентифицированный пользователь"""
|
||||||
|
|
||||||
def __init__(self, user_id: str, username: str = "", roles: list = None, permissions: dict = None, token: str = None):
|
def __init__(
|
||||||
|
self, user_id: str, username: str = "", roles: list = None, permissions: dict = None, token: str = None
|
||||||
|
):
|
||||||
self.user_id = user_id
|
self.user_id = user_id
|
||||||
self.username = username
|
self.username = username
|
||||||
self.roles = roles or []
|
self.roles = roles or []
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
import time
|
import time
|
||||||
import orjson
|
|
||||||
from secrets import token_urlsafe
|
from secrets import token_urlsafe
|
||||||
|
|
||||||
|
import orjson
|
||||||
from authlib.integrations.starlette_client import OAuth
|
from authlib.integrations.starlette_client import OAuth
|
||||||
from authlib.oauth2.rfc7636 import create_s256_code_challenge
|
from authlib.oauth2.rfc7636 import create_s256_code_challenge
|
||||||
from starlette.responses import JSONResponse, RedirectResponse
|
from starlette.responses import JSONResponse, RedirectResponse
|
||||||
|
|
||||||
from auth.orm import Author
|
from auth.orm import Author
|
||||||
from auth.tokenstorage import TokenStorage
|
from auth.tokenstorage import TokenStorage
|
||||||
|
from resolvers.auth import generate_unique_slug
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
from services.redis import redis
|
from services.redis import redis
|
||||||
from settings import FRONTEND_URL, OAUTH_CLIENTS
|
from settings import FRONTEND_URL, OAUTH_CLIENTS
|
||||||
from utils.logger import root_logger as logger
|
from utils.logger import root_logger as logger
|
||||||
from resolvers.auth import generate_unique_slug
|
|
||||||
|
|
||||||
oauth = OAuth()
|
oauth = OAuth()
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ async def oauth_login(request):
|
||||||
"code_verifier": code_verifier,
|
"code_verifier": code_verifier,
|
||||||
"provider": provider,
|
"provider": provider,
|
||||||
"redirect_uri": redirect_uri,
|
"redirect_uri": redirect_uri,
|
||||||
"created_at": int(time.time())
|
"created_at": int(time.time()),
|
||||||
}
|
}
|
||||||
await store_oauth_state(state, oauth_data)
|
await store_oauth_state(state, oauth_data)
|
||||||
|
|
||||||
|
@ -223,6 +223,7 @@ async def store_oauth_state(state: str, data: dict) -> None:
|
||||||
key = f"oauth_state:{state}"
|
key = f"oauth_state:{state}"
|
||||||
await redis.execute("SETEX", key, OAUTH_STATE_TTL, orjson.dumps(data))
|
await redis.execute("SETEX", key, OAUTH_STATE_TTL, orjson.dumps(data))
|
||||||
|
|
||||||
|
|
||||||
async def get_oauth_state(state: str) -> dict:
|
async def get_oauth_state(state: str) -> dict:
|
||||||
"""Получает и удаляет OAuth состояние из Redis (one-time use)"""
|
"""Получает и удаляет OAuth состояние из Redis (one-time use)"""
|
||||||
key = f"oauth_state:{state}"
|
key = f"oauth_state:{state}"
|
||||||
|
|
10
cache/cache.py
vendored
10
cache/cache.py
vendored
|
@ -117,7 +117,9 @@ async def get_cached_author(author_id: int, get_with_stat):
|
||||||
if result:
|
if result:
|
||||||
logger.debug(f"[get_cached_author] Найдены данные в кэше, размер: {len(result)} байт")
|
logger.debug(f"[get_cached_author] Найдены данные в кэше, размер: {len(result)} байт")
|
||||||
cached_data = orjson.loads(result)
|
cached_data = orjson.loads(result)
|
||||||
logger.debug(f"[get_cached_author] Кэшированные данные имеют ключи: {list(cached_data.keys()) if cached_data else 'None'}")
|
logger.debug(
|
||||||
|
f"[get_cached_author] Кэшированные данные имеют ключи: {list(cached_data.keys()) if cached_data else 'None'}"
|
||||||
|
)
|
||||||
return cached_data
|
return cached_data
|
||||||
|
|
||||||
logger.debug(f"[get_cached_author] Данные не найдены в кэше, загрузка из БД")
|
logger.debug(f"[get_cached_author] Данные не найдены в кэше, загрузка из БД")
|
||||||
|
@ -132,8 +134,10 @@ async def get_cached_author(author_id: int, get_with_stat):
|
||||||
logger.debug(f"[get_cached_author] Получен автор из БД: {type(author)}, id: {getattr(author, 'id', 'N/A')}")
|
logger.debug(f"[get_cached_author] Получен автор из БД: {type(author)}, id: {getattr(author, 'id', 'N/A')}")
|
||||||
|
|
||||||
# Используем безопасный вызов dict() для Author
|
# Используем безопасный вызов dict() для Author
|
||||||
author_dict = author.dict() if hasattr(author, 'dict') else author.__dict__
|
author_dict = author.dict() if hasattr(author, "dict") else author.__dict__
|
||||||
logger.debug(f"[get_cached_author] Сериализованные данные автора: {list(author_dict.keys()) if author_dict else 'None'}")
|
logger.debug(
|
||||||
|
f"[get_cached_author] Сериализованные данные автора: {list(author_dict.keys()) if author_dict else 'None'}"
|
||||||
|
)
|
||||||
|
|
||||||
await cache_author(author_dict)
|
await cache_author(author_dict)
|
||||||
logger.debug(f"[get_cached_author] Автор кэширован")
|
logger.debug(f"[get_cached_author] Автор кэширован")
|
||||||
|
|
2
main.py
2
main.py
|
@ -15,6 +15,7 @@ from starlette.staticfiles import StaticFiles
|
||||||
|
|
||||||
from auth.handler import EnhancedGraphQLHTTPHandler
|
from auth.handler import EnhancedGraphQLHTTPHandler
|
||||||
from auth.middleware import AuthMiddleware, auth_middleware
|
from auth.middleware import AuthMiddleware, auth_middleware
|
||||||
|
from auth.oauth import oauth_callback, oauth_login
|
||||||
from cache.precache import precache_data
|
from cache.precache import precache_data
|
||||||
from cache.revalidator import revalidation_manager
|
from cache.revalidator import revalidation_manager
|
||||||
from services.exception import ExceptionHandlerMiddleware
|
from services.exception import ExceptionHandlerMiddleware
|
||||||
|
@ -24,7 +25,6 @@ from services.search import check_search_service, initialize_search_index_backgr
|
||||||
from services.viewed import ViewedStorage
|
from services.viewed import ViewedStorage
|
||||||
from settings import DEV_SERVER_PID_FILE_NAME
|
from settings import DEV_SERVER_PID_FILE_NAME
|
||||||
from utils.logger import root_logger as logger
|
from utils.logger import root_logger as logger
|
||||||
from auth.oauth import oauth_login, oauth_callback
|
|
||||||
|
|
||||||
DEVMODE = os.getenv("DOKKU_APP_TYPE", "false").lower() == "false"
|
DEVMODE = os.getenv("DOKKU_APP_TYPE", "false").lower() == "false"
|
||||||
DIST_DIR = join(os.path.dirname(__file__), "dist") # Директория для собранных файлов
|
DIST_DIR = join(os.path.dirname(__file__), "dist") # Директория для собранных файлов
|
||||||
|
|
|
@ -290,7 +290,7 @@ def get_with_stat(q):
|
||||||
stat["followers"] = cols[2] # Статистика по подписчикам
|
stat["followers"] = cols[2] # Статистика по подписчикам
|
||||||
if is_author:
|
if is_author:
|
||||||
# Дополнительная проверка типа entity.id
|
# Дополнительная проверка типа entity.id
|
||||||
if not hasattr(entity, 'id'):
|
if not hasattr(entity, "id"):
|
||||||
logger.error(f"Entity does not have id attribute: {entity}")
|
logger.error(f"Entity does not have id attribute: {entity}")
|
||||||
continue
|
continue
|
||||||
entity_id = entity.id
|
entity_id = entity.id
|
||||||
|
@ -302,7 +302,7 @@ def get_with_stat(q):
|
||||||
stat["comments"] = get_author_comments_stat(entity_id) # Статистика по комментариям
|
stat["comments"] = get_author_comments_stat(entity_id) # Статистика по комментариям
|
||||||
else:
|
else:
|
||||||
# Дополнительная проверка типа entity.id для тем
|
# Дополнительная проверка типа entity.id для тем
|
||||||
if not hasattr(entity, 'id'):
|
if not hasattr(entity, "id"):
|
||||||
logger.error(f"Entity does not have id attribute: {entity}")
|
logger.error(f"Entity does not have id attribute: {entity}")
|
||||||
continue
|
continue
|
||||||
entity_id = entity.id
|
entity_id = entity.id
|
||||||
|
|
Loading…
Reference in New Issue
Block a user