Compare commits
4 Commits
2bebfbd4df
...
82870a4e47
Author | SHA1 | Date | |
---|---|---|---|
![]() |
82870a4e47 | ||
![]() |
80b909d801 | ||
![]() |
1ada0a02f9 | ||
![]() |
44aef147b5 |
|
@ -35,7 +35,7 @@ jobs:
|
||||||
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Push to dokku for staging branch
|
- name: Push to dokku for staging branch
|
||||||
if: github.ref == 'refs/heads/staging'
|
if: github.ref == 'refs/heads/feat/sv-search-v4'
|
||||||
uses: dokku/github-action@master
|
uses: dokku/github-action@master
|
||||||
with:
|
with:
|
||||||
branch: 'dev'
|
branch: 'dev'
|
||||||
|
|
12
cache/precache.py
vendored
12
cache/precache.py
vendored
|
@ -77,11 +77,15 @@ async def precache_topics_followers(topic_id: int, session):
|
||||||
|
|
||||||
async def precache_data():
|
async def precache_data():
|
||||||
logger.info("precaching...")
|
logger.info("precaching...")
|
||||||
|
logger.debug("Entering precache_data")
|
||||||
try:
|
try:
|
||||||
key = "authorizer_env"
|
key = "authorizer_env"
|
||||||
|
logger.debug(f"Fetching existing hash for key '{key}' from Redis")
|
||||||
# cache reset
|
# cache reset
|
||||||
value = await redis.execute("HGETALL", key)
|
value = await redis.execute("HGETALL", key)
|
||||||
|
logger.debug(f"Fetched value for '{key}': {value}")
|
||||||
await redis.execute("FLUSHDB")
|
await redis.execute("FLUSHDB")
|
||||||
|
logger.debug("Redis database flushed")
|
||||||
logger.info("redis: FLUSHDB")
|
logger.info("redis: FLUSHDB")
|
||||||
|
|
||||||
# Преобразуем словарь в список аргументов для HSET
|
# Преобразуем словарь в список аргументов для HSET
|
||||||
|
@ -97,21 +101,27 @@ async def precache_data():
|
||||||
await redis.execute("HSET", key, *value)
|
await redis.execute("HSET", key, *value)
|
||||||
logger.info(f"redis hash '{key}' was restored")
|
logger.info(f"redis hash '{key}' was restored")
|
||||||
|
|
||||||
|
logger.info("Beginning topic precache phase")
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
# topics
|
# topics
|
||||||
q = select(Topic).where(Topic.community == 1)
|
q = select(Topic).where(Topic.community == 1)
|
||||||
topics = get_with_stat(q)
|
topics = get_with_stat(q)
|
||||||
|
logger.info(f"Found {len(topics)} topics to precache")
|
||||||
for topic in topics:
|
for topic in topics:
|
||||||
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
|
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
|
||||||
|
logger.debug(f"Precaching topic id={topic_dict.get('id')}")
|
||||||
await cache_topic(topic_dict)
|
await cache_topic(topic_dict)
|
||||||
|
logger.debug(f"Cached topic id={topic_dict.get('id')}")
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
precache_topics_followers(topic_dict["id"], session),
|
precache_topics_followers(topic_dict["id"], session),
|
||||||
precache_topics_authors(topic_dict["id"], session),
|
precache_topics_authors(topic_dict["id"], session),
|
||||||
)
|
)
|
||||||
|
logger.debug(f"Finished precaching followers and authors for topic id={topic_dict.get('id')}")
|
||||||
logger.info(f"{len(topics)} topics and their followings precached")
|
logger.info(f"{len(topics)} topics and their followings precached")
|
||||||
|
|
||||||
# authors
|
# authors
|
||||||
authors = get_with_stat(select(Author).where(Author.user.is_not(None)))
|
authors = get_with_stat(select(Author).where(Author.user.is_not(None)))
|
||||||
|
logger.info(f"Found {len(authors)} authors to precache")
|
||||||
logger.info(f"{len(authors)} authors found in database")
|
logger.info(f"{len(authors)} authors found in database")
|
||||||
for author in authors:
|
for author in authors:
|
||||||
if isinstance(author, Author):
|
if isinstance(author, Author):
|
||||||
|
@ -119,10 +129,12 @@ async def precache_data():
|
||||||
author_id = profile.get("id")
|
author_id = profile.get("id")
|
||||||
user_id = profile.get("user", "").strip()
|
user_id = profile.get("user", "").strip()
|
||||||
if author_id and user_id:
|
if author_id and user_id:
|
||||||
|
logger.debug(f"Precaching author id={author_id}")
|
||||||
await cache_author(profile)
|
await cache_author(profile)
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
precache_authors_followers(author_id, session), precache_authors_follows(author_id, session)
|
precache_authors_followers(author_id, session), precache_authors_follows(author_id, session)
|
||||||
)
|
)
|
||||||
|
logger.debug(f"Finished precaching followers and follows for author id={author_id}")
|
||||||
else:
|
else:
|
||||||
logger.error(f"fail caching {author}")
|
logger.error(f"fail caching {author}")
|
||||||
logger.info(f"{len(authors)} authors and their followings precached")
|
logger.info(f"{len(authors)} authors and their followings precached")
|
||||||
|
|
15
main.py
15
main.py
|
@ -17,7 +17,6 @@ from cache.revalidator import revalidation_manager
|
||||||
from services.exception import ExceptionHandlerMiddleware
|
from services.exception import ExceptionHandlerMiddleware
|
||||||
from services.redis import redis
|
from services.redis import redis
|
||||||
from services.schema import create_all_tables, resolvers
|
from services.schema import create_all_tables, resolvers
|
||||||
#from services.search import search_service
|
|
||||||
from services.search import search_service, initialize_search_index
|
from services.search import search_service, initialize_search_index
|
||||||
from services.viewed import ViewedStorage
|
from services.viewed import ViewedStorage
|
||||||
from services.webhook import WebhookEndpoint, create_webhook_endpoint
|
from services.webhook import WebhookEndpoint, create_webhook_endpoint
|
||||||
|
@ -43,6 +42,15 @@ async def check_search_service():
|
||||||
else:
|
else:
|
||||||
print(f"[INFO] Search service is available: {info}")
|
print(f"[INFO] Search service is available: {info}")
|
||||||
|
|
||||||
|
# Helper to run precache with timeout and catch errors
|
||||||
|
async def precache_with_timeout():
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(precache_data(), timeout=60)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
print("[precache] Precache timed out after 60 seconds")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[precache] Error during precache: {e}")
|
||||||
|
|
||||||
|
|
||||||
# indexing DB data
|
# indexing DB data
|
||||||
# async def indexing():
|
# async def indexing():
|
||||||
|
@ -53,9 +61,12 @@ async def lifespan(_app):
|
||||||
try:
|
try:
|
||||||
print("[lifespan] Starting application initialization")
|
print("[lifespan] Starting application initialization")
|
||||||
create_all_tables()
|
create_all_tables()
|
||||||
|
|
||||||
|
# schedule precaching in background with timeout and error handling
|
||||||
|
asyncio.create_task(precache_with_timeout())
|
||||||
|
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
redis.connect(),
|
redis.connect(),
|
||||||
precache_data(),
|
|
||||||
ViewedStorage.init(),
|
ViewedStorage.init(),
|
||||||
create_webhook_endpoint(),
|
create_webhook_endpoint(),
|
||||||
check_search_service(),
|
check_search_service(),
|
||||||
|
|
Loading…
Reference in New Issue
Block a user