2024-10-14 08:11:13 +00:00
|
|
|
import asyncio
|
2023-01-17 21:07:44 +00:00
|
|
|
import os
|
2024-12-11 22:04:11 +00:00
|
|
|
import sys
|
2022-09-03 10:50:14 +00:00
|
|
|
from importlib import import_module
|
2022-11-22 23:51:29 +00:00
|
|
|
from os.path import exists
|
2025-05-19 19:19:27 +00:00
|
|
|
from datetime import datetime
|
2023-12-17 20:30:20 +00:00
|
|
|
|
2024-02-19 08:58:02 +00:00
|
|
|
from ariadne import load_schema_from_path, make_executable_schema
|
2022-09-03 10:50:14 +00:00
|
|
|
from ariadne.asgi import GraphQL
|
|
|
|
from starlette.applications import Starlette
|
2024-12-11 22:04:11 +00:00
|
|
|
from starlette.middleware.cors import CORSMiddleware
|
2024-10-14 09:31:55 +00:00
|
|
|
from starlette.requests import Request
|
|
|
|
from starlette.responses import JSONResponse, Response
|
2024-10-14 09:19:30 +00:00
|
|
|
from starlette.routing import Route
|
2023-11-28 19:07:53 +00:00
|
|
|
|
2024-08-09 06:37:06 +00:00
|
|
|
from cache.precache import precache_data
|
|
|
|
from cache.revalidator import revalidation_manager
|
2024-06-04 06:07:46 +00:00
|
|
|
from services.exception import ExceptionHandlerMiddleware
|
2024-08-09 06:37:06 +00:00
|
|
|
from services.redis import redis
|
2025-02-10 15:04:08 +00:00
|
|
|
from services.schema import create_all_tables, resolvers
|
2025-03-05 20:08:21 +00:00
|
|
|
#from services.search import search_service
|
|
|
|
from services.search import search_service, initialize_search_index
|
2023-12-22 09:09:24 +00:00
|
|
|
from services.viewed import ViewedStorage
|
2024-12-11 19:10:48 +00:00
|
|
|
from services.webhook import WebhookEndpoint, create_webhook_endpoint
|
2024-02-16 09:34:39 +00:00
|
|
|
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
2024-01-25 19:41:27 +00:00
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
import_module("resolvers")
|
|
|
|
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
|
2024-02-19 08:58:02 +00:00
|
|
|
|
2022-09-03 10:50:14 +00:00
|
|
|
|
2024-02-19 07:14:14 +00:00
|
|
|
async def start():
|
2024-04-17 15:32:23 +00:00
|
|
|
if MODE == "development":
|
2024-02-19 07:14:14 +00:00
|
|
|
if not exists(DEV_SERVER_PID_FILE_NAME):
|
|
|
|
# pid file management
|
2024-04-17 15:32:23 +00:00
|
|
|
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
|
2024-02-19 07:14:14 +00:00
|
|
|
f.write(str(os.getpid()))
|
2024-04-17 15:32:23 +00:00
|
|
|
print(f"[main] process started in {MODE} mode")
|
2024-02-21 07:27:16 +00:00
|
|
|
|
2025-05-19 19:19:27 +00:00
|
|
|
# Disable search service if it's causing problems
|
|
|
|
DISABLE_SEARCH = os.environ.get("DISABLE_SEARCH", "false").lower() in ["true", "1", "yes"]
|
|
|
|
|
2025-03-12 15:06:09 +00:00
|
|
|
async def check_search_service():
|
|
|
|
"""Check if search service is available and log result"""
|
2025-05-19 19:19:27 +00:00
|
|
|
if DISABLE_SEARCH:
|
|
|
|
print("[INFO] Search service checks disabled via environment variable")
|
|
|
|
return {"status": "disabled", "message": "Search disabled via environment variable"}
|
|
|
|
|
2025-05-19 19:10:35 +00:00
|
|
|
try:
|
|
|
|
info_task = search_service.info()
|
|
|
|
info = await asyncio.wait_for(info_task, timeout=10.0) # 10 second timeout
|
|
|
|
|
|
|
|
if info.get("status") in ["error", "unavailable"]:
|
|
|
|
print(f"[WARNING] Search service unavailable: {info.get('message', 'unknown reason')}")
|
|
|
|
else:
|
|
|
|
print(f"[INFO] Search service is available: {info}")
|
2025-05-19 19:19:27 +00:00
|
|
|
return info
|
2025-05-19 19:10:35 +00:00
|
|
|
except asyncio.TimeoutError:
|
|
|
|
print("[WARNING] Search service check timed out after 10 seconds")
|
2025-05-19 19:19:27 +00:00
|
|
|
return {"status": "timeout", "message": "Search service check timed out"}
|
2025-05-19 19:10:35 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(f"[WARNING] Error checking search service: {str(e)}")
|
|
|
|
print("[INFO] Continuing startup with search service in degraded mode")
|
2025-05-19 19:19:27 +00:00
|
|
|
return {"status": "error", "message": str(e)}
|
2025-03-12 15:06:09 +00:00
|
|
|
|
2022-09-03 10:50:14 +00:00
|
|
|
|
2025-03-21 16:30:23 +00:00
|
|
|
# indexing DB data
|
|
|
|
# async def indexing():
|
|
|
|
# from services.db import fetch_all_shouts
|
|
|
|
# all_shouts = await fetch_all_shouts()
|
|
|
|
# await initialize_search_index(all_shouts)
|
2025-02-10 15:15:54 +00:00
|
|
|
async def lifespan(_app):
|
2024-11-01 21:26:57 +00:00
|
|
|
try:
|
2025-03-21 16:45:50 +00:00
|
|
|
print("[lifespan] Starting application initialization")
|
2025-02-10 15:15:54 +00:00
|
|
|
create_all_tables()
|
2025-05-19 19:10:35 +00:00
|
|
|
|
|
|
|
# Run each initialization step separately to identify where it's hanging
|
|
|
|
try:
|
|
|
|
print("[lifespan] Connecting to Redis...")
|
|
|
|
await redis.connect()
|
|
|
|
print("[lifespan] Redis connected successfully")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error connecting to Redis: {e}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
print("[lifespan] Starting precache operation...")
|
2025-05-19 19:19:27 +00:00
|
|
|
# Add a timeout to precache_data to ensure it doesn't hang
|
|
|
|
try:
|
|
|
|
precache_task = precache_data()
|
|
|
|
await asyncio.wait_for(precache_task, timeout=60) # 1 minute timeout
|
|
|
|
print("[lifespan] Precache completed successfully")
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
print("[lifespan] WARNING: Precache operation timed out after 60 seconds")
|
|
|
|
print("[lifespan] Continuing server startup despite precache timeout")
|
2025-05-19 19:10:35 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error during precache: {e}")
|
2025-05-19 19:19:27 +00:00
|
|
|
import traceback
|
|
|
|
print(f"[lifespan] Precache error traceback: {traceback.format_exc()}")
|
2025-05-19 19:10:35 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
print("[lifespan] Initializing ViewedStorage...")
|
|
|
|
await ViewedStorage.init()
|
|
|
|
print("[lifespan] ViewedStorage initialized successfully")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error initializing ViewedStorage: {e}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
print("[lifespan] Creating webhook endpoint...")
|
|
|
|
await create_webhook_endpoint()
|
|
|
|
print("[lifespan] Webhook endpoint created successfully")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error creating webhook endpoint: {e}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
print("[lifespan] Checking search service...")
|
|
|
|
await check_search_service()
|
|
|
|
print("[lifespan] Search service check completed")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error checking search service: {e}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
print("[lifespan] Starting app...")
|
|
|
|
await start()
|
|
|
|
print("[lifespan] App started successfully")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error starting app: {e}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
print("[lifespan] Starting revalidation manager...")
|
|
|
|
await revalidation_manager.start()
|
|
|
|
print("[lifespan] Revalidation manager started successfully")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error starting revalidation manager: {e}")
|
|
|
|
|
2025-03-21 16:45:50 +00:00
|
|
|
print("[lifespan] Basic initialization complete")
|
2025-05-19 19:03:26 +00:00
|
|
|
|
|
|
|
# Verify the server is ready to accept connections
|
|
|
|
import socket
|
|
|
|
import os
|
|
|
|
|
|
|
|
def check_port_available(port):
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
try:
|
|
|
|
s.bind(('0.0.0.0', port))
|
|
|
|
s.close()
|
|
|
|
return False # Port is available, not in use
|
|
|
|
except:
|
|
|
|
return True # Port is in use, not available
|
|
|
|
|
|
|
|
# Check if the port is in use
|
|
|
|
port = int(os.environ.get("PORT", 8000))
|
|
|
|
if check_port_available(port):
|
|
|
|
print(f"[lifespan] ✅ Server port {port} is active, ready to accept connections")
|
|
|
|
else:
|
|
|
|
print(f"[lifespan] ⚠️ Warning: Server port {port} is not bound yet!")
|
2025-03-05 20:08:21 +00:00
|
|
|
|
2025-03-25 00:42:51 +00:00
|
|
|
# Add a delay before starting the intensive search indexing
|
|
|
|
print("[lifespan] Waiting for system stabilization before search indexing...")
|
|
|
|
await asyncio.sleep(10) # 10-second delay to let the system stabilize
|
|
|
|
|
|
|
|
# Start search indexing as a background task with lower priority
|
2025-05-19 19:10:35 +00:00
|
|
|
try:
|
|
|
|
print("[lifespan] Creating search indexing background task...")
|
2025-05-19 19:19:27 +00:00
|
|
|
if DISABLE_SEARCH:
|
|
|
|
print("[lifespan] Search indexing skipped - search is disabled via environment variable")
|
|
|
|
else:
|
|
|
|
# Use a new dedicated task group for search
|
|
|
|
search_task = asyncio.create_task(initialize_search_index_background())
|
|
|
|
# Don't wait for it to complete, let it run in the background
|
|
|
|
print("[lifespan] Search indexing task scheduled successfully")
|
2025-05-19 19:10:35 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(f"[lifespan] Error scheduling search indexing task: {e}")
|
2025-03-05 20:08:21 +00:00
|
|
|
|
2025-05-19 19:10:35 +00:00
|
|
|
print("[lifespan] Full server startup completed successfully")
|
2024-11-01 21:26:57 +00:00
|
|
|
yield
|
2025-05-19 19:10:35 +00:00
|
|
|
except Exception as e:
|
|
|
|
import traceback
|
|
|
|
print(f"[lifespan] Critical error in lifespan function: {e}")
|
|
|
|
print(f"[lifespan] Traceback: {traceback.format_exc()}")
|
|
|
|
yield # Still yield to allow clean shutdown
|
2024-11-01 21:26:57 +00:00
|
|
|
finally:
|
2025-03-21 16:45:50 +00:00
|
|
|
print("[lifespan] Shutting down application services")
|
2024-11-02 09:09:24 +00:00
|
|
|
tasks = [redis.disconnect(), ViewedStorage.stop(), revalidation_manager.stop()]
|
2024-11-01 21:26:57 +00:00
|
|
|
await asyncio.gather(*tasks, return_exceptions=True)
|
2025-03-21 16:45:50 +00:00
|
|
|
print("[lifespan] Shutdown complete")
|
2025-03-25 00:42:51 +00:00
|
|
|
|
|
|
|
# Initialize search index in the background
|
|
|
|
async def initialize_search_index_background():
|
|
|
|
"""Run search indexing as a background task with low priority"""
|
|
|
|
try:
|
|
|
|
print("[search] Starting background search indexing process")
|
|
|
|
from services.db import fetch_all_shouts
|
|
|
|
|
2025-05-19 19:03:26 +00:00
|
|
|
try:
|
2025-05-19 19:10:35 +00:00
|
|
|
print("[search] About to fetch all shouts for indexing")
|
|
|
|
# Get total count first (optional)
|
|
|
|
all_shouts = await fetch_all_shouts()
|
|
|
|
total_count = len(all_shouts) if all_shouts else 0
|
|
|
|
print(f"[search] Fetched {total_count} shouts for background indexing")
|
|
|
|
|
|
|
|
# Skip indexing if no shouts found
|
|
|
|
if not all_shouts or total_count == 0:
|
|
|
|
print("[search] No shouts to index, skipping indexing process")
|
|
|
|
return
|
|
|
|
|
|
|
|
# Start the indexing process with the fetched shouts
|
|
|
|
print("[search] Beginning background search index initialization...")
|
|
|
|
|
|
|
|
# Add a timeout to the indexing operation
|
|
|
|
try:
|
|
|
|
index_task = initialize_search_index(all_shouts)
|
|
|
|
await asyncio.wait_for(index_task, timeout=300) # 5-minute timeout
|
|
|
|
print("[search] Background search index initialization complete")
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
print("[search] Background indexing timed out after 5 minutes")
|
|
|
|
return
|
|
|
|
|
|
|
|
# Perform a test search to verify indexing worked
|
|
|
|
try:
|
|
|
|
print("[search] Running test search to verify index...")
|
|
|
|
from services.search import search_text
|
|
|
|
search_task = search_text("test", 3)
|
|
|
|
test_results = await asyncio.wait_for(search_task, timeout=30) # 30-second timeout
|
|
|
|
print(f"[search] Test search complete with {len(test_results)} results")
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
print("[search] Test search timed out after 30 seconds")
|
|
|
|
except Exception as test_error:
|
|
|
|
print(f"[search] Test search error: {str(test_error)}")
|
|
|
|
except Exception as inner_error:
|
|
|
|
print(f"[search] Error in search indexing process: {str(inner_error)}")
|
|
|
|
import traceback
|
|
|
|
print(f"[search] Inner traceback: {traceback.format_exc()}")
|
|
|
|
|
|
|
|
print("[search] Search initialization process completed (with or without errors)")
|
|
|
|
|
2025-03-25 00:42:51 +00:00
|
|
|
except Exception as e:
|
2025-05-19 19:10:35 +00:00
|
|
|
print(f"[search] Outer error in background search indexing: {str(e)}")
|
2025-05-19 19:03:26 +00:00
|
|
|
import traceback
|
2025-05-19 19:10:35 +00:00
|
|
|
print(f"[search] Outer traceback: {traceback.format_exc()}")
|
2025-03-25 00:42:51 +00:00
|
|
|
|
2024-10-14 09:13:18 +00:00
|
|
|
# Создаем экземпляр GraphQL
|
|
|
|
graphql_app = GraphQL(schema, debug=True)
|
|
|
|
|
2024-10-14 09:19:30 +00:00
|
|
|
|
2024-10-14 08:11:13 +00:00
|
|
|
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
|
2024-10-14 09:31:55 +00:00
|
|
|
async def graphql_handler(request: Request):
|
|
|
|
if request.method not in ["GET", "POST"]:
|
|
|
|
return JSONResponse({"error": "Method Not Allowed"}, status_code=405)
|
|
|
|
|
2024-10-14 08:11:13 +00:00
|
|
|
try:
|
2024-10-14 09:31:55 +00:00
|
|
|
result = await graphql_app.handle_request(request)
|
|
|
|
if isinstance(result, Response):
|
|
|
|
return result
|
|
|
|
return JSONResponse(result)
|
2024-10-14 08:11:13 +00:00
|
|
|
except asyncio.CancelledError:
|
|
|
|
return JSONResponse({"error": "Request cancelled"}, status_code=499)
|
|
|
|
except Exception as e:
|
2024-10-14 09:31:55 +00:00
|
|
|
print(f"GraphQL error: {str(e)}")
|
2024-10-14 08:11:13 +00:00
|
|
|
return JSONResponse({"error": str(e)}, status_code=500)
|
|
|
|
|
|
|
|
|
2024-10-14 09:31:55 +00:00
|
|
|
# Обновляем маршрут в Starlette
|
2024-02-16 09:40:41 +00:00
|
|
|
app = Starlette(
|
|
|
|
routes=[
|
2024-10-14 09:31:55 +00:00
|
|
|
Route("/", graphql_handler, methods=["GET", "POST"]),
|
2024-04-17 15:32:23 +00:00
|
|
|
Route("/new-author", WebhookEndpoint),
|
2025-05-19 19:10:35 +00:00
|
|
|
# Health check endpoint
|
2025-05-19 19:19:27 +00:00
|
|
|
Route("/health", lambda request: JSONResponse({"status": "healthy", "time": datetime.now().isoformat()}), methods=["GET"]),
|
|
|
|
# Debug endpoint to get server status
|
|
|
|
Route("/debug", lambda request: JSONResponse({
|
|
|
|
"status": "running",
|
|
|
|
"search_disabled": DISABLE_SEARCH,
|
|
|
|
"mode": MODE,
|
|
|
|
"time": datetime.now().isoformat()
|
|
|
|
}), methods=["GET"]),
|
2024-02-16 09:40:41 +00:00
|
|
|
],
|
2024-10-14 09:13:18 +00:00
|
|
|
lifespan=lifespan,
|
2024-02-21 07:27:16 +00:00
|
|
|
debug=True,
|
2024-04-08 06:17:05 +00:00
|
|
|
)
|
2024-10-14 09:13:18 +00:00
|
|
|
|
2025-05-19 19:10:35 +00:00
|
|
|
# Register an error handler for uncaught exceptions
|
|
|
|
@app.exception_handler(Exception)
|
|
|
|
async def handle_exception(request, exc):
|
|
|
|
print(f"Global exception handler caught: {str(exc)}")
|
|
|
|
import traceback
|
|
|
|
traceback.print_exc()
|
|
|
|
return JSONResponse(
|
|
|
|
status_code=500,
|
|
|
|
content={"detail": "Internal Server Error", "message": str(exc)}
|
|
|
|
)
|
|
|
|
|
2024-06-04 05:15:59 +00:00
|
|
|
app.add_middleware(ExceptionHandlerMiddleware)
|
2024-12-11 22:04:11 +00:00
|
|
|
if "dev" in sys.argv:
|
|
|
|
app.add_middleware(
|
|
|
|
CORSMiddleware,
|
2025-01-29 16:59:47 +00:00
|
|
|
allow_origins=["https://localhost:3000"],
|
2024-12-11 22:04:11 +00:00
|
|
|
allow_credentials=True,
|
|
|
|
allow_methods=["*"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|