Compare commits
4 Commits
2bebfbd4df
...
82870a4e47
Author | SHA1 | Date | |
---|---|---|---|
![]() |
82870a4e47 | ||
![]() |
80b909d801 | ||
![]() |
1ada0a02f9 | ||
![]() |
44aef147b5 |
|
@ -35,7 +35,7 @@ jobs:
|
|||
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Push to dokku for staging branch
|
||||
if: github.ref == 'refs/heads/staging'
|
||||
if: github.ref == 'refs/heads/feat/sv-search-v4'
|
||||
uses: dokku/github-action@master
|
||||
with:
|
||||
branch: 'dev'
|
||||
|
|
12
cache/precache.py
vendored
12
cache/precache.py
vendored
|
@ -77,11 +77,15 @@ async def precache_topics_followers(topic_id: int, session):
|
|||
|
||||
async def precache_data():
|
||||
logger.info("precaching...")
|
||||
logger.debug("Entering precache_data")
|
||||
try:
|
||||
key = "authorizer_env"
|
||||
logger.debug(f"Fetching existing hash for key '{key}' from Redis")
|
||||
# cache reset
|
||||
value = await redis.execute("HGETALL", key)
|
||||
logger.debug(f"Fetched value for '{key}': {value}")
|
||||
await redis.execute("FLUSHDB")
|
||||
logger.debug("Redis database flushed")
|
||||
logger.info("redis: FLUSHDB")
|
||||
|
||||
# Преобразуем словарь в список аргументов для HSET
|
||||
|
@ -97,21 +101,27 @@ async def precache_data():
|
|||
await redis.execute("HSET", key, *value)
|
||||
logger.info(f"redis hash '{key}' was restored")
|
||||
|
||||
logger.info("Beginning topic precache phase")
|
||||
with local_session() as session:
|
||||
# topics
|
||||
q = select(Topic).where(Topic.community == 1)
|
||||
topics = get_with_stat(q)
|
||||
logger.info(f"Found {len(topics)} topics to precache")
|
||||
for topic in topics:
|
||||
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
|
||||
logger.debug(f"Precaching topic id={topic_dict.get('id')}")
|
||||
await cache_topic(topic_dict)
|
||||
logger.debug(f"Cached topic id={topic_dict.get('id')}")
|
||||
await asyncio.gather(
|
||||
precache_topics_followers(topic_dict["id"], session),
|
||||
precache_topics_authors(topic_dict["id"], session),
|
||||
)
|
||||
logger.debug(f"Finished precaching followers and authors for topic id={topic_dict.get('id')}")
|
||||
logger.info(f"{len(topics)} topics and their followings precached")
|
||||
|
||||
# authors
|
||||
authors = get_with_stat(select(Author).where(Author.user.is_not(None)))
|
||||
logger.info(f"Found {len(authors)} authors to precache")
|
||||
logger.info(f"{len(authors)} authors found in database")
|
||||
for author in authors:
|
||||
if isinstance(author, Author):
|
||||
|
@ -119,10 +129,12 @@ async def precache_data():
|
|||
author_id = profile.get("id")
|
||||
user_id = profile.get("user", "").strip()
|
||||
if author_id and user_id:
|
||||
logger.debug(f"Precaching author id={author_id}")
|
||||
await cache_author(profile)
|
||||
await asyncio.gather(
|
||||
precache_authors_followers(author_id, session), precache_authors_follows(author_id, session)
|
||||
)
|
||||
logger.debug(f"Finished precaching followers and follows for author id={author_id}")
|
||||
else:
|
||||
logger.error(f"fail caching {author}")
|
||||
logger.info(f"{len(authors)} authors and their followings precached")
|
||||
|
|
15
main.py
15
main.py
|
@ -17,7 +17,6 @@ from cache.revalidator import revalidation_manager
|
|||
from services.exception import ExceptionHandlerMiddleware
|
||||
from services.redis import redis
|
||||
from services.schema import create_all_tables, resolvers
|
||||
#from services.search import search_service
|
||||
from services.search import search_service, initialize_search_index
|
||||
from services.viewed import ViewedStorage
|
||||
from services.webhook import WebhookEndpoint, create_webhook_endpoint
|
||||
|
@ -43,6 +42,15 @@ async def check_search_service():
|
|||
else:
|
||||
print(f"[INFO] Search service is available: {info}")
|
||||
|
||||
# Helper to run precache with timeout and catch errors
|
||||
async def precache_with_timeout():
|
||||
try:
|
||||
await asyncio.wait_for(precache_data(), timeout=60)
|
||||
except asyncio.TimeoutError:
|
||||
print("[precache] Precache timed out after 60 seconds")
|
||||
except Exception as e:
|
||||
print(f"[precache] Error during precache: {e}")
|
||||
|
||||
|
||||
# indexing DB data
|
||||
# async def indexing():
|
||||
|
@ -53,9 +61,12 @@ async def lifespan(_app):
|
|||
try:
|
||||
print("[lifespan] Starting application initialization")
|
||||
create_all_tables()
|
||||
|
||||
# schedule precaching in background with timeout and error handling
|
||||
asyncio.create_task(precache_with_timeout())
|
||||
|
||||
await asyncio.gather(
|
||||
redis.connect(),
|
||||
precache_data(),
|
||||
ViewedStorage.init(),
|
||||
create_webhook_endpoint(),
|
||||
check_search_service(),
|
||||
|
|
Loading…
Reference in New Issue
Block a user