This commit is contained in:
parent
3242758817
commit
8133d0030f
31
Dockerfile
31
Dockerfile
|
@ -1,12 +1,25 @@
|
|||
FROM python:slim
|
||||
FROM python:alpine
|
||||
|
||||
# Update package lists and install necessary dependencies
|
||||
RUN apk update &&
|
||||
apk add --no-cache build-base icu-data-full curl python3-dev musl-dev &&
|
||||
curl -sSL https://install.python-poetry.org | python
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy only the pyproject.toml file initially
|
||||
COPY pyproject.toml /app/
|
||||
|
||||
# Install poetry and dependencies
|
||||
RUN pip install poetry &&
|
||||
poetry config virtualenvs.create false &&
|
||||
poetry install --no-root --only main
|
||||
|
||||
# Copy the rest of the files
|
||||
COPY . /app
|
||||
|
||||
RUN apt-get update && apt-get install -y git gcc curl postgresql && \
|
||||
curl -sSL https://install.python-poetry.org | python - && \
|
||||
echo "export PATH=$PATH:/root/.local/bin" >> ~/.bashrc && \
|
||||
. ~/.bashrc && \
|
||||
poetry config virtualenvs.create false && \
|
||||
poetry install --no-dev
|
||||
|
||||
CMD ["python", "main.py"]
|
||||
# Expose the port
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["python", "server.py"]
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
- STORJ_END_POINT
|
||||
- STORJ_BUCKET_NAME
|
||||
- CDN_DOMAIN
|
||||
|
||||
- AUTH_URL
|
||||
|
||||
### Локальная разработка
|
||||
|
||||
|
@ -23,7 +23,7 @@ mkdir .venv
|
|||
python3.12 -m venv .venv
|
||||
poetry env use .venv/bin/python3.12
|
||||
poetry update
|
||||
poetry run python main.py
|
||||
poetry run python server.py
|
||||
```
|
||||
### Интеграция в Core
|
||||
|
||||
|
|
91
auth.py
91
auth.py
|
@ -1,41 +1,72 @@
|
|||
from functools import wraps
|
||||
from starlette.responses import JSONResponse
|
||||
import aiohttp
|
||||
|
||||
AUTH_URL = 'https://auth.discours.io'
|
||||
import aiohttp
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from logger import root_logger as logger
|
||||
from settings import AUTH_URL
|
||||
|
||||
|
||||
async def request_data(gql, headers=None):
|
||||
if headers is None:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(AUTH_URL, json=gql, headers=headers) as response:
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
errors = data.get("errors")
|
||||
if errors:
|
||||
logger.error(f"HTTP Errors: {errors}")
|
||||
else:
|
||||
return data
|
||||
except Exception as e:
|
||||
# Handling and logging exceptions during authentication check
|
||||
import traceback
|
||||
|
||||
logger.error(f"request_data error: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return None
|
||||
|
||||
|
||||
async def check_auth(req):
|
||||
token = req.headers.get("Authorization")
|
||||
headers = {"Authorization": token, "Content-Type": "application/json"}
|
||||
user_id = ""
|
||||
if token:
|
||||
# Logging the authentication token
|
||||
logger.debug(f"{token}")
|
||||
query_name = "validate_jwt_token"
|
||||
operation = "ValidateToken"
|
||||
variables = {"params": {"token_type": "access_token", "token": token}}
|
||||
|
||||
print(f"[services.auth] checking auth token: {token}")
|
||||
gql = {
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{"
|
||||
+ f"{query_name}(params: $params) {{ is_valid claims }} "
|
||||
+ "}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql)
|
||||
if data:
|
||||
logger.debug(data)
|
||||
user_data = data.get("data", {}).get(query_name, {}).get("claims", {})
|
||||
user_id = user_data.get("sub", "")
|
||||
return user_id
|
||||
|
||||
gql = {
|
||||
"query": "query GetUserId { session { user { id } } }",
|
||||
"operationName": "GetUserId",
|
||||
"variables": None,
|
||||
}
|
||||
|
||||
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=30.0)) as session:
|
||||
async with session.post(AUTH_URL, headers=headers, json=gql) as response:
|
||||
print(f"[services.auth] {AUTH_URL} response: {response.status}")
|
||||
if response.status != 200:
|
||||
return False, None
|
||||
r = await response.json()
|
||||
if r:
|
||||
user_id = r.get("data", {}).get("session", {}).get("user", {}).get("id", None)
|
||||
is_authenticated = user_id is not None
|
||||
return is_authenticated, user_id
|
||||
return False, None
|
||||
|
||||
def login_required(f):
|
||||
@wraps(f)
|
||||
async def decorated_function(request, *args, **kwargs):
|
||||
is_authenticated, user_id = await check_auth(request)
|
||||
if not is_authenticated:
|
||||
return JSONResponse({'error': 'Unauthorized'}, status_code=401)
|
||||
"""
|
||||
A decorator that requires user authentication before accessing a route.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
async def decorated_function(req, *args, **kwargs):
|
||||
user_id = await check_auth(req)
|
||||
if user_id:
|
||||
logger.info(f" got {user_id}")
|
||||
req.state.user_id = user_id.strip()
|
||||
return await f(req, *args, **kwargs)
|
||||
else:
|
||||
return JSONResponse({"detail": "Not authorized"}, status_code=401)
|
||||
|
||||
# Make user_id available to the route handler, if needed
|
||||
request.state.user_id = user_id
|
||||
return await f(request, *args, **kwargs)
|
||||
return decorated_function
|
||||
|
|
80
logger.py
Normal file
80
logger.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
import logging
|
||||
|
||||
import colorlog
|
||||
|
||||
# Define the color scheme
|
||||
color_scheme = {
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red,bg_white",
|
||||
"DEFAULT": "white",
|
||||
}
|
||||
|
||||
# Define secondary log colors
|
||||
secondary_colors = {
|
||||
"log_name": {"DEBUG": "blue"},
|
||||
"asctime": {"DEBUG": "cyan"},
|
||||
"process": {"DEBUG": "purple"},
|
||||
"module": {"DEBUG": "cyan,bg_blue"},
|
||||
"funcName": {"DEBUG": "light_white,bg_blue"},
|
||||
}
|
||||
|
||||
# Define the log format string
|
||||
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s"
|
||||
|
||||
# Define formatting configuration
|
||||
fmt_config = {
|
||||
"log_colors": color_scheme,
|
||||
"secondary_log_colors": secondary_colors,
|
||||
"style": "%",
|
||||
"reset": True,
|
||||
}
|
||||
|
||||
|
||||
class MultilineColoredFormatter(colorlog.ColoredFormatter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.log_colors = kwargs.pop("log_colors", {})
|
||||
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
|
||||
|
||||
def format(self, record):
|
||||
message = record.getMessage()
|
||||
if "\n" in message:
|
||||
lines = message.split("\n")
|
||||
first_line = lines[0]
|
||||
record.message = first_line
|
||||
formatted_first_line = super().format(record)
|
||||
formatted_lines = [formatted_first_line]
|
||||
for line in lines[1:]:
|
||||
formatted_lines.append(line)
|
||||
return "\n".join(formatted_lines)
|
||||
else:
|
||||
return super().format(record)
|
||||
|
||||
|
||||
# Create a MultilineColoredFormatter object for colorized logging
|
||||
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
|
||||
|
||||
# Create a stream handler for logging output
|
||||
stream = logging.StreamHandler()
|
||||
stream.setFormatter(formatter)
|
||||
|
||||
# Set up the root logger with the same formatting
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.addHandler(stream)
|
||||
|
||||
ignore_logs = [
|
||||
"_trace",
|
||||
"httpx",
|
||||
"_client",
|
||||
"_trace.atrace",
|
||||
"aiohttp",
|
||||
"_client",
|
||||
"._make_request",
|
||||
]
|
||||
for lgr in ignore_logs:
|
||||
loggr = logging.getLogger(lgr)
|
||||
loggr.setLevel(logging.INFO)
|
53
main.py
53
main.py
|
@ -7,35 +7,43 @@ from starlette.applications import Starlette
|
|||
from starlette.responses import JSONResponse
|
||||
from starlette.routing import Route
|
||||
from starlette.requests import Request
|
||||
from auth import check_auth
|
||||
from auth import login_required
|
||||
|
||||
from settings import (
|
||||
PORT,
|
||||
STORJ_ACCESS_KEY,
|
||||
STORJ_SECRET_KEY,
|
||||
CDN_DOMAIN,
|
||||
STORJ_BUCKET_NAME,
|
||||
STORJ_END_POINT,
|
||||
)
|
||||
|
||||
# Logging configuration
|
||||
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
|
||||
STORJ_ACCESS_KEY = os.environ.get('STORJ_ACCESS_KEY')
|
||||
STORJ_SECRET_KEY = os.environ.get('STORJ_SECRET_KEY')
|
||||
STORJ_END_POINT = os.environ.get('STORJ_END_POINT')
|
||||
STORJ_BUCKET_NAME = os.environ.get('STORJ_BUCKET_NAME')
|
||||
CDN_DOMAIN = os.environ.get('CDN_DOMAIN')
|
||||
|
||||
@check_auth
|
||||
@login_required
|
||||
async def upload_handler(request: Request):
|
||||
logging.debug("Received upload request")
|
||||
form = await request.form()
|
||||
file = form.get('file')
|
||||
file = form.get("file")
|
||||
|
||||
if file is None:
|
||||
logging.error("No file uploaded")
|
||||
return JSONResponse({'error': 'No file uploaded'}, status_code=400)
|
||||
return JSONResponse({"error": "No file uploaded"}, status_code=400)
|
||||
|
||||
file_name, file_extension = os.path.splitext(file.filename)
|
||||
key = str(uuid.uuid4()) + file_extension
|
||||
logging.debug(f"Generated file key: {key}")
|
||||
|
||||
async with aioboto3.client('s3',
|
||||
aws_access_key_id=STORJ_ACCESS_KEY,
|
||||
aws_secret_access_key=STORJ_SECRET_KEY,
|
||||
endpoint_url=STORJ_END_POINT) as s3:
|
||||
async with aioboto3.client(
|
||||
"s3",
|
||||
aws_access_key_id=STORJ_ACCESS_KEY,
|
||||
aws_secret_access_key=STORJ_SECRET_KEY,
|
||||
endpoint_url=STORJ_END_POINT,
|
||||
) as s3:
|
||||
with tempfile.NamedTemporaryFile() as tmp_file:
|
||||
while True:
|
||||
chunk = await file.read(8192)
|
||||
|
@ -49,25 +57,28 @@ async def upload_handler(request: Request):
|
|||
Filename=tmp_file.name,
|
||||
Bucket=STORJ_BUCKET_NAME,
|
||||
Key=key,
|
||||
ExtraArgs={"ContentType": file.content_type}
|
||||
ExtraArgs={"ContentType": file.content_type},
|
||||
)
|
||||
logging.debug("File upload completed")
|
||||
|
||||
url = f'http://{CDN_DOMAIN}/{key}'
|
||||
url = f"http://{CDN_DOMAIN}/{key}"
|
||||
logging.info(f"File uploaded successfully: {url}")
|
||||
return JSONResponse({'url': url, 'originalFilename': file.filename})
|
||||
return JSONResponse({"url": url, "originalFilename": file.filename})
|
||||
|
||||
|
||||
async def home(request: Request):
|
||||
logging.debug("Home route called")
|
||||
return JSONResponse({'message': 'Hello World!'})
|
||||
return JSONResponse({"message": "Hello World!"})
|
||||
|
||||
|
||||
routes = [
|
||||
Route('/test', home, methods=['GET']),
|
||||
Route('/', upload_handler, methods=['POST'])
|
||||
Route("/test", home, methods=["GET"]),
|
||||
Route("/", upload_handler, methods=["POST"]),
|
||||
]
|
||||
|
||||
app = Starlette(debug=True, routes=routes)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host='0.0.0.0', port=8080)
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=PORT)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "discoursio-migrator"
|
||||
version = "0.2.6"
|
||||
version = "0.3.0"
|
||||
description = ""
|
||||
authors = ["discoursio devteam"]
|
||||
license = "MIT"
|
||||
|
@ -8,40 +8,27 @@ readme = "README.md"
|
|||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12"
|
||||
aiohttp = "^3.9.1"
|
||||
uvicorn = "^0.24.0.post1"
|
||||
starlette = "^0.33.0"
|
||||
aioboto3 = "^9.0.0"
|
||||
python-multipart = "^0.0.5"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
black = "^23.10.1"
|
||||
colorlog = "^6.8.2"
|
||||
granian = "^1.3.1"
|
||||
aiohttp = "^3.9.5"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
setuptools = "^69.0.2"
|
||||
ruff = "^0.3.5"
|
||||
isort = "^5.13.2"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
target-version = ['py312']
|
||||
include = '\.pyi?$'
|
||||
exclude = '''
|
||||
(
|
||||
/(
|
||||
\.eggs
|
||||
| \.git
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
)/
|
||||
| foo.py
|
||||
)
|
||||
'''
|
||||
[tool.pyright]
|
||||
venvPath = "."
|
||||
venv = ".venv"
|
||||
|
||||
[tool.isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
force_grid_wrap = 0
|
||||
line_length = 120
|
||||
|
|
26
server.py
Normal file
26
server.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from granian.constants import Interfaces
|
||||
from granian.server import Granian
|
||||
import subprocess
|
||||
from logger import root_logger as logger
|
||||
from settings import PORT
|
||||
|
||||
|
||||
def is_docker_container_running(name):
|
||||
cmd = ["docker", "ps", "-f", f"name={name}"]
|
||||
output = subprocess.run(cmd, capture_output=True, text=True).stdout
|
||||
logger.info(output)
|
||||
return name in output
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("started")
|
||||
|
||||
granian_instance = Granian(
|
||||
"main:app",
|
||||
address="0.0.0.0", # noqa S104
|
||||
port=PORT,
|
||||
threads=4,
|
||||
websockets=False,
|
||||
interface=Interfaces.ASGI,
|
||||
)
|
||||
granian_instance.serve()
|
9
settings.py
Normal file
9
settings.py
Normal file
|
@ -0,0 +1,9 @@
|
|||
from os import environ
|
||||
|
||||
PORT = 8000
|
||||
AUTH_URL = environ.get("AUTH_URL") or ""
|
||||
STORJ_ACCESS_KEY = environ.get("STORJ_ACCESS_KEY")
|
||||
STORJ_SECRET_KEY = environ.get("STORJ_SECRET_KEY")
|
||||
STORJ_END_POINT = environ.get("STORJ_END_POINT")
|
||||
STORJ_BUCKET_NAME = environ.get("STORJ_BUCKET_NAME")
|
||||
CDN_DOMAIN = environ.get("CDN_DOMAIN")
|
Loading…
Reference in New Issue
Block a user