search-fix, devstart-fix, cache-fix, logs-less
Some checks failed
Deploy on push / deploy (push) Failing after 5s
Some checks failed
Deploy on push / deploy (push) Failing after 5s
This commit is contained in:
@@ -2,6 +2,11 @@
|
||||
|
||||
Все значимые изменения в проекте документируются в этом файле.
|
||||
|
||||
## [0.9.1] - 2025-07-31
|
||||
- исправлен `dev.py`
|
||||
- исправлен запуск поиска
|
||||
- незначительные улучшения логов
|
||||
|
||||
## [0.9.0] - 2025-07-31
|
||||
|
||||
## Миграция на типы SQLAlchemy2
|
||||
|
21
cache/cache.py
vendored
21
cache/cache.py
vendored
@@ -37,7 +37,6 @@ from sqlalchemy import and_, join, select
|
||||
from auth.orm import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from utils.encoders import fast_json_dumps
|
||||
@@ -119,7 +118,7 @@ async def update_follower_stat(follower_id: int, entity_type: str, count: int) -
|
||||
|
||||
|
||||
# Get author from cache
|
||||
async def get_cached_author(author_id: int, get_with_stat) -> dict | None:
|
||||
async def get_cached_author(author_id: int, get_with_stat=None) -> dict | None:
|
||||
logger.debug(f"[get_cached_author] Начало выполнения для author_id: {author_id}")
|
||||
|
||||
author_key = f"author:id:{author_id}"
|
||||
@@ -137,6 +136,9 @@ async def get_cached_author(author_id: int, get_with_stat) -> dict | None:
|
||||
logger.debug("[get_cached_author] Данные не найдены в кэше, загрузка из БД")
|
||||
|
||||
# Load from database if not found in cache
|
||||
if get_with_stat is None:
|
||||
from resolvers.stat import get_with_stat
|
||||
|
||||
q = select(Author).where(Author.id == author_id)
|
||||
authors = get_with_stat(q)
|
||||
logger.debug(f"[get_cached_author] Результат запроса из БД: {len(authors) if authors else 0} записей")
|
||||
@@ -188,12 +190,15 @@ async def get_cached_topic(topic_id: int) -> dict | None:
|
||||
|
||||
|
||||
# Get topic by slug from cache
|
||||
async def get_cached_topic_by_slug(slug: str, get_with_stat) -> dict | None:
|
||||
async def get_cached_topic_by_slug(slug: str, get_with_stat=None) -> dict | None:
|
||||
topic_key = f"topic:slug:{slug}"
|
||||
result = await redis.execute("GET", topic_key)
|
||||
if result:
|
||||
return orjson.loads(result)
|
||||
# Load from database if not found in cache
|
||||
if get_with_stat is None:
|
||||
from resolvers.stat import get_with_stat
|
||||
|
||||
topic_query = select(Topic).where(Topic.slug == slug)
|
||||
topics = get_with_stat(topic_query)
|
||||
if topics:
|
||||
@@ -337,7 +342,7 @@ async def get_cached_follower_topics(author_id: int):
|
||||
|
||||
|
||||
# Get author by author_id from cache
|
||||
async def get_cached_author_by_id(author_id: int, get_with_stat):
|
||||
async def get_cached_author_by_id(author_id: int, get_with_stat=None):
|
||||
"""
|
||||
Retrieve author information by author_id, checking the cache first, then the database.
|
||||
|
||||
@@ -354,6 +359,9 @@ async def get_cached_author_by_id(author_id: int, get_with_stat):
|
||||
return orjson.loads(cached_author_data)
|
||||
|
||||
# If data is not found in cache, query the database
|
||||
if get_with_stat is None:
|
||||
from resolvers.stat import get_with_stat
|
||||
|
||||
author_query = select(Author).where(Author.id == author_id)
|
||||
authors = get_with_stat(author_query)
|
||||
if authors:
|
||||
@@ -521,7 +529,7 @@ async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_
|
||||
return None
|
||||
|
||||
|
||||
async def cache_by_id(entity, entity_id: int, cache_method):
|
||||
async def cache_by_id(entity, entity_id: int, cache_method, get_with_stat=None):
|
||||
"""
|
||||
Кэширует сущность по ID, используя указанный метод кэширования
|
||||
|
||||
@@ -531,6 +539,9 @@ async def cache_by_id(entity, entity_id: int, cache_method):
|
||||
cache_method: функция кэширования
|
||||
"""
|
||||
|
||||
if get_with_stat is None:
|
||||
from resolvers.stat import get_with_stat
|
||||
|
||||
caching_query = select(entity).where(entity.id == entity_id)
|
||||
result = get_with_stat(caching_query)
|
||||
if not result or not result[0]:
|
||||
|
4
dev.py
4
dev.py
@@ -76,7 +76,7 @@ def generate_certificates(domain="localhost", cert_file="localhost.pem", key_fil
|
||||
return None, None
|
||||
|
||||
|
||||
def run_server(host="localhost", port=8000, use_https=False, workers=1, domain="localhost") -> None:
|
||||
def run_server(host="127.0.0.1", port=8000, use_https=False, workers=1, domain="localhost") -> None:
|
||||
"""
|
||||
Запускает сервер Granian с поддержкой HTTPS при необходимости
|
||||
|
||||
@@ -136,7 +136,7 @@ if __name__ == "__main__":
|
||||
parser.add_argument("--workers", type=int, default=1, help="Количество рабочих процессов")
|
||||
parser.add_argument("--domain", type=str, default="localhost", help="Домен для сертификата")
|
||||
parser.add_argument("--port", type=int, default=8000, help="Порт для запуска сервера")
|
||||
parser.add_argument("--host", type=str, default="localhost", help="Хост для запуска сервера")
|
||||
parser.add_argument("--host", type=str, default="127.0.0.1", help="Хост для запуска сервера")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
@@ -36,7 +36,7 @@ class BaseModel(DeclarativeBase):
|
||||
"""
|
||||
column_names = filter(lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys())
|
||||
data: builtins.dict[str, Any] = {}
|
||||
logger.debug(f"Converting object to dictionary {'with access' if access else 'without access'}")
|
||||
# logger.debug(f"Converting object to dictionary {'with access' if access else 'without access'}")
|
||||
try:
|
||||
for column_name in column_names:
|
||||
try:
|
||||
|
@@ -126,23 +126,34 @@ def get_json_builder() -> tuple[Any, Any, Any]:
|
||||
json_builder, json_array_builder, json_cast = get_json_builder()
|
||||
|
||||
|
||||
def create_table_if_not_exists(connection_or_engine: Connection | Engine, model_cls: Type[DeclarativeBase]) -> None:
|
||||
def create_table_if_not_exists(
|
||||
connection_or_engine_or_session: Connection | Engine | Session, model_cls: Type[DeclarativeBase]
|
||||
) -> None:
|
||||
"""Creates table for the given model if it doesn't exist"""
|
||||
# If an Engine is passed, get a connection from it
|
||||
connection = connection_or_engine.connect() if isinstance(connection_or_engine, Engine) else connection_or_engine
|
||||
|
||||
# Handle different input types
|
||||
if isinstance(connection_or_engine_or_session, Session):
|
||||
# Use session's bind
|
||||
connection = connection_or_engine_or_session.get_bind()
|
||||
should_close = False
|
||||
elif isinstance(connection_or_engine_or_session, Engine):
|
||||
# Get a connection from engine
|
||||
connection = connection_or_engine_or_session.connect()
|
||||
should_close = True
|
||||
else:
|
||||
# Already a connection
|
||||
connection = connection_or_engine_or_session
|
||||
should_close = False
|
||||
|
||||
try:
|
||||
inspector = inspect(connection)
|
||||
if not inspector.has_table(model_cls.__tablename__):
|
||||
# Use SQLAlchemy's built-in table creation instead of manual SQL generation
|
||||
from sqlalchemy.schema import CreateTable
|
||||
|
||||
create_stmt = CreateTable(model_cls.__table__) # type: ignore[arg-type]
|
||||
connection.execute(create_stmt)
|
||||
model_cls.__table__.create(bind=connection, checkfirst=False)
|
||||
logger.info(f"Created table: {model_cls.__tablename__}")
|
||||
finally:
|
||||
# If we created a connection from an Engine, close it
|
||||
if isinstance(connection_or_engine, Engine):
|
||||
# Close connection only if we created it
|
||||
if should_close:
|
||||
connection.close()
|
||||
|
||||
|
||||
|
@@ -189,10 +189,22 @@ class SearchCache:
|
||||
class SearchService:
|
||||
def __init__(self) -> None:
|
||||
logger.info(f"Initializing search service with URL: {TXTAI_SERVICE_URL}")
|
||||
self.available = SEARCH_ENABLED
|
||||
|
||||
# Проверяем валидность URL
|
||||
if not TXTAI_SERVICE_URL or not TXTAI_SERVICE_URL.startswith(("http://", "https://")):
|
||||
self.available = False
|
||||
logger.info("Search disabled (invalid TXTAI_SERVICE_URL)")
|
||||
else:
|
||||
self.available = SEARCH_ENABLED
|
||||
|
||||
# Use different timeout settings for indexing and search requests
|
||||
self.client = AsyncClient(timeout=30.0, base_url=TXTAI_SERVICE_URL)
|
||||
self.index_client = AsyncClient(timeout=120.0, base_url=TXTAI_SERVICE_URL)
|
||||
if self.available:
|
||||
self.client = AsyncClient(timeout=30.0, base_url=TXTAI_SERVICE_URL)
|
||||
self.index_client = AsyncClient(timeout=120.0, base_url=TXTAI_SERVICE_URL)
|
||||
else:
|
||||
self.client = None
|
||||
self.index_client = None
|
||||
|
||||
# Initialize search cache
|
||||
self.cache = SearchCache() if SEARCH_CACHE_ENABLED else None
|
||||
|
||||
@@ -205,7 +217,7 @@ class SearchService:
|
||||
|
||||
async def info(self) -> dict:
|
||||
"""Return information about search service"""
|
||||
if not self.available:
|
||||
if not self.available or not self.client:
|
||||
return {"status": "disabled"}
|
||||
try:
|
||||
response: Response = await self.client.get("/info")
|
||||
|
Reference in New Issue
Block a user