Compare commits
30 Commits
03c2d286de
...
main
Author | SHA1 | Date | |
---|---|---|---|
![]() |
fb6e03c1a2 | ||
![]() |
46c3345f45 | ||
![]() |
1156a32a88 | ||
d848af524f | |||
c9f88c36cd | |||
0ad44a944e | |||
fbd0e03a33 | |||
![]() |
076828f003 | ||
![]() |
4f6c459532 | ||
![]() |
11524c17ea | ||
168f845772 | |||
657146cdca | |||
![]() |
86111bc9f5 | ||
![]() |
a8018a0b2f | ||
![]() |
9d8bd629ab | ||
1eddf9cc0b | |||
6415f86286 | |||
5d1c4f0084 | |||
1dce947db6 | |||
4d9551a93c | |||
e6471280d5 | |||
3e062b4346 | |||
5b1a93c781 | |||
c30001547a | |||
025019b544 | |||
a862a11c91 | |||
f3d86daea7 | |||
296716397e | |||
22c42839c1 | |||
4fd90e305f |
@@ -1 +0,0 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
@@ -1,5 +1,9 @@
|
||||
name: 'Deploy on push'
|
||||
on: [push]
|
||||
name: 'Deploy to discoursio-api'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -17,19 +21,11 @@ jobs:
|
||||
id: branch_name
|
||||
run: echo "::set-output name=branch::$(echo ${GITHUB_REF##*/})"
|
||||
|
||||
- name: Push to dokku for main branch
|
||||
if: github.ref == 'refs/heads/main'
|
||||
- name: Push to dokku
|
||||
uses: dokku/github-action@master
|
||||
with:
|
||||
branch: 'main'
|
||||
git_remote_url: 'ssh://dokku@v2.discours.io:22/discoursio-api'
|
||||
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Push to dokku for dev branch
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: dokku/github-action@master
|
||||
with:
|
||||
branch: 'dev'
|
||||
force: true
|
||||
git_remote_url: 'ssh://dokku@v2.discours.io:22/core'
|
||||
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
|
16
.github/workflows/checks.yml
vendored
Normal file
16
.github/workflows/checks.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: Checks
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Checks
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.10.6
|
||||
- run: pip install --upgrade pip
|
||||
- run: pip install -r requirements.txt
|
||||
- run: pip install -r requirements-dev.txt
|
||||
- run: ./checks.sh
|
4
.github/workflows/deploy.yml
vendored
4
.github/workflows/deploy.yml
vendored
@@ -17,11 +17,11 @@ jobs:
|
||||
|
||||
- uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ github.action.secrets.SSH_PRIVATE_KEY }}
|
||||
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Push to dokku
|
||||
env:
|
||||
HOST_KEY: ${{ github.action.secrets.HOST_KEY }}
|
||||
HOST_KEY: ${{ secrets.HOST_KEY }}
|
||||
run: |
|
||||
echo $HOST_KEY > ~/.ssh/known_hosts
|
||||
git remote add dokku dokku@v2.discours.io:discoursio-api
|
||||
|
19
.gitignore
vendored
19
.gitignore
vendored
@@ -147,16 +147,11 @@ migration/content/**/*.md
|
||||
*.csv
|
||||
dev-server.pid
|
||||
backups/
|
||||
poetry.lock
|
||||
.ruff_cache
|
||||
.jj
|
||||
.zed
|
||||
|
||||
dokku_config
|
||||
|
||||
*.db
|
||||
*.sqlite3
|
||||
views.json
|
||||
*.pem
|
||||
*.key
|
||||
*.crt
|
||||
.venv
|
||||
poetry.lock
|
||||
.devcontainer/devcontainer.json
|
||||
localhost-key.pem
|
||||
.gitignore
|
||||
discoursio.db
|
||||
localhost.pem
|
||||
|
@@ -1,18 +1,44 @@
|
||||
exclude: |
|
||||
(?x)(
|
||||
^tests/unit_tests/resource|
|
||||
_grpc.py|
|
||||
_pb2.py
|
||||
)
|
||||
|
||||
default_language_version:
|
||||
python: python3.10
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: check-case-conflict
|
||||
- id: check-docstring-first
|
||||
- id: check-json
|
||||
- id: check-merge-conflict
|
||||
- id: check-toml
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: check-added-large-files
|
||||
- id: detect-private-key
|
||||
- id: check-ast
|
||||
- id: check-merge-conflict
|
||||
- id: requirements-txt-fixer
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.7
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 23.10.1
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.1.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
# - repo: https://github.com/python/mypy
|
||||
# rev: v1.6.1
|
||||
# hooks:
|
||||
# - id: mypy
|
||||
|
226
CHANGELOG.md
226
CHANGELOG.md
@@ -1,226 +0,0 @@
|
||||
#### [0.4.7]
|
||||
- `get_my_rates_shouts` resolver added with:
|
||||
- `shout_id` and `my_rate` fields in response
|
||||
- filters by `Reaction.deleted_at.is_(None)`
|
||||
- filters by `Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])`
|
||||
- filters by `Reaction.reply_to.is_(None)`
|
||||
- uses `local_session()` context manager
|
||||
- returns empty list on errors
|
||||
- SQLAlchemy syntax updated:
|
||||
- `select()` statement fixed for newer versions
|
||||
- `Reaction` model direct selection instead of labeled columns
|
||||
- proper row access with `row[0].shout` and `row[0].kind`
|
||||
- GraphQL resolver fixes:
|
||||
- added root parameter `_` to match schema
|
||||
- proper async/await handling with `@login_required`
|
||||
- error logging added via `logger.error()`
|
||||
|
||||
#### [0.4.6]
|
||||
- login_accepted decorator added
|
||||
- `docs` added
|
||||
- optimized and unified `load_shouts_*` resolvers with `LoadShoutsOptions`
|
||||
- `load_shouts_bookmarked` resolver fixed
|
||||
- resolvers updates:
|
||||
- new resolvers group `feed`
|
||||
- `load_shouts_authored_by` resolver added
|
||||
- `load_shouts_with_topic` resolver added
|
||||
- `load_shouts_followed` removed
|
||||
- `load_shouts_random_topic` removed
|
||||
- `get_topics_random` removed
|
||||
- model updates:
|
||||
- `ShoutsOrderBy` enum added
|
||||
- `Shout.main_topic` from `ShoutTopic.main` as `Topic` type output
|
||||
- `Shout.created_by` as `Author` type output
|
||||
|
||||
#### [0.4.5]
|
||||
- `bookmark_shout` mutation resolver added
|
||||
- `load_shouts_bookmarked` resolver added
|
||||
- `get_communities_by_author` resolver added
|
||||
- `get_communities_all` resolver fixed
|
||||
- `Community` stats in orm
|
||||
- `Community` CUDL resolvers added
|
||||
- `Reaction` filter by `Reaction.kind`s
|
||||
- `ReactionSort` enum added
|
||||
- `CommunityFollowerRole` enum added
|
||||
- `InviteStatus` enum added
|
||||
- `Topic.parents` ids added
|
||||
- `get_shout` resolver accepts slug or shout_id
|
||||
|
||||
#### [0.4.4]
|
||||
- `followers_stat` removed for shout
|
||||
- sqlite3 support added
|
||||
- `rating_stat` and `commented_stat` fixes
|
||||
|
||||
#### [0.4.3]
|
||||
- cache reimplemented
|
||||
- load shouts queries unified
|
||||
- `followers_stat` removed from shout
|
||||
|
||||
#### [0.4.2]
|
||||
- reactions load resolvers separated for ratings (no stats) and comments
|
||||
- reactions stats improved
|
||||
- `load_comment_ratings` separate resolver
|
||||
|
||||
#### [0.4.1]
|
||||
- follow/unfollow logic updated and unified with cache
|
||||
|
||||
#### [0.4.0]
|
||||
- chore: version migrator synced
|
||||
- feat: precache_data on start
|
||||
- fix: store id list for following cache data
|
||||
- fix: shouts stat filter out deleted
|
||||
|
||||
#### [0.3.5]
|
||||
- cache isolated to services
|
||||
- topics followers and authors cached
|
||||
- redis stores lists of ids
|
||||
|
||||
#### [0.3.4]
|
||||
- `load_authors_by` from cache
|
||||
|
||||
#### [0.3.3]
|
||||
- feat: sentry integration enabled with glitchtip
|
||||
- fix: reindex on update shout
|
||||
- packages upgrade, isort
|
||||
- separated stats queries for author and topic
|
||||
- fix: feed featured filter
|
||||
- fts search removed
|
||||
|
||||
#### [0.3.2]
|
||||
- redis cache for what author follows
|
||||
- redis cache for followers
|
||||
- graphql add query: get topic followers
|
||||
|
||||
#### [0.3.1]
|
||||
- enabling sentry
|
||||
- long query log report added
|
||||
- editor fixes
|
||||
- authors links cannot be updated by `update_shout` anymore
|
||||
|
||||
#### [0.3.0]
|
||||
- `Shout.featured_at` timestamp of the frontpage featuring event
|
||||
- added proposal accepting logics
|
||||
- schema modulized
|
||||
- Shout.visibility removed
|
||||
|
||||
#### [0.2.22]
|
||||
- added precommit hook
|
||||
- fmt
|
||||
- granian asgi
|
||||
|
||||
#### [0.2.21]
|
||||
- fix: rating logix
|
||||
- fix: `load_top_random_shouts`
|
||||
- resolvers: `add_stat_*` refactored
|
||||
- services: use google analytics
|
||||
- services: minor fixes search
|
||||
|
||||
#### [0.2.20]
|
||||
- services: ackee removed
|
||||
- services: following manager fixed
|
||||
- services: import views.json
|
||||
|
||||
#### [0.2.19]
|
||||
- fix: adding `author` role
|
||||
- fix: stripping `user_id` in auth connector
|
||||
|
||||
#### [0.2.18]
|
||||
- schema: added `Shout.seo` string field
|
||||
- resolvers: added `/new-author` webhook resolver
|
||||
- resolvers: added reader.load_shouts_top_random
|
||||
- resolvers: added reader.load_shouts_unrated
|
||||
- resolvers: community follower id property name is `.author`
|
||||
- resolvers: `get_authors_all` and `load_authors_by`
|
||||
- services: auth connector upgraded
|
||||
|
||||
#### [0.2.17]
|
||||
- schema: enum types workaround, `ReactionKind`, `InviteStatus`, `ShoutVisibility`
|
||||
- schema: `Shout.created_by`, `Shout.updated_by`
|
||||
- schema: `Shout.authors` can be empty
|
||||
- resolvers: optimized `reacted_shouts_updates` query
|
||||
|
||||
#### [0.2.16]
|
||||
- resolvers: collab inviting logics
|
||||
- resolvers: queries and mutations revision and renaming
|
||||
- resolvers: `delete_topic(slug)` implemented
|
||||
- resolvers: added `get_shout_followers`
|
||||
- resolvers: `load_shouts_by` filters implemented
|
||||
- orm: invite entity
|
||||
- schema: `Reaction.range` -> `Reaction.quote`
|
||||
- filters: `time_ago` -> `after`
|
||||
- httpx -> aiohttp
|
||||
|
||||
#### [0.2.15]
|
||||
- schema: `Shout.created_by` removed
|
||||
- schema: `Shout.mainTopic` removed
|
||||
- services: cached elasticsearch connector
|
||||
- services: auth is using `user_id` from authorizer
|
||||
- resolvers: `notify_*` usage fixes
|
||||
- resolvers: `getAuthor` now accepts slug, `user_id` or `author_id`
|
||||
- resolvers: login_required usage fixes
|
||||
|
||||
#### [0.2.14]
|
||||
- schema: some fixes from migrator
|
||||
- schema: `.days` -> `.time_ago`
|
||||
- schema: `excludeLayout` + `layout` in filters -> `layouts`
|
||||
- services: db access simpler, no contextmanager
|
||||
- services: removed Base.create() method
|
||||
- services: rediscache updated
|
||||
- resolvers: get_reacted_shouts_updates as followedReactions query
|
||||
|
||||
#### [0.2.13]
|
||||
- services: db context manager
|
||||
- services: `ViewedStorage` fixes
|
||||
- services: views are not stored in core db anymore
|
||||
- schema: snake case in model fields names
|
||||
- schema: no DateTime scalar
|
||||
- resolvers: `get_my_feed` comments filter reactions body.is_not('')
|
||||
- resolvers: `get_my_feed` query fix
|
||||
- resolvers: `LoadReactionsBy.days` -> `LoadReactionsBy.time_ago`
|
||||
- resolvers: `LoadShoutsBy.days` -> `LoadShoutsBy.time_ago`
|
||||
|
||||
#### [0.2.12]
|
||||
- `Author.userpic` -> `Author.pic`
|
||||
- `CommunityFollower.role` is string now
|
||||
- `Author.user` is string now
|
||||
|
||||
#### [0.2.11]
|
||||
- redis interface updated
|
||||
- `viewed` interface updated
|
||||
- `presence` interface updated
|
||||
- notify on create, update, delete for reaction and shout
|
||||
- notify on follow / unfollow author
|
||||
- use pyproject
|
||||
- devmode fixed
|
||||
|
||||
#### [0.2.10]
|
||||
- community resolvers connected
|
||||
|
||||
#### [0.2.9]
|
||||
- starlette is back, aiohttp removed
|
||||
- aioredis replaced with aredis
|
||||
|
||||
#### [0.2.8]
|
||||
- refactored
|
||||
|
||||
|
||||
#### [0.2.7]
|
||||
- `loadFollowedReactions` now with `login_required`
|
||||
- notifier service api draft
|
||||
- added `shout` visibility kind in schema
|
||||
- community isolated from author in orm
|
||||
|
||||
|
||||
#### [0.2.6]
|
||||
- redis connection pool
|
||||
- auth context fixes
|
||||
- communities orm, resolvers, schema
|
||||
|
||||
|
||||
#### [0.2.5]
|
||||
- restructured
|
||||
- all users have their profiles as authors in core
|
||||
- `gittask`, `inbox` and `auth` logics removed
|
||||
- `settings` moved to base and now smaller
|
||||
- new outside auth schema
|
||||
- removed `gittask`, `auth`, `inbox`, `migration`
|
30
Dockerfile
30
Dockerfile
@@ -1,25 +1,11 @@
|
||||
FROM python:3.12-alpine
|
||||
|
||||
# Update package lists and install necessary dependencies
|
||||
RUN apk update && \
|
||||
apk add --no-cache build-base icu-data-full curl python3-dev musl-dev && \
|
||||
curl -sSL https://install.python-poetry.org | python
|
||||
|
||||
# Set working directory
|
||||
FROM python:3.11-slim
|
||||
WORKDIR /app
|
||||
|
||||
# Copy only the pyproject.toml file initially
|
||||
COPY pyproject.toml /app/
|
||||
EXPOSE 8080
|
||||
ADD nginx.conf.sigil ./
|
||||
COPY requirements.txt .
|
||||
RUN apt update && apt install -y git gcc curl postgresql
|
||||
RUN pip install -r requirements.txt
|
||||
COPY . .
|
||||
|
||||
# Install poetry and dependencies
|
||||
RUN pip install poetry && \
|
||||
poetry config virtualenvs.create false && \
|
||||
poetry install --no-root --only main
|
||||
|
||||
# Copy the rest of the files
|
||||
COPY . /app
|
||||
|
||||
# Expose the port
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["python", "server.py"]
|
||||
CMD python server.py
|
||||
|
122
README.md
122
README.md
@@ -1,95 +1,47 @@
|
||||
# GraphQL API Backend
|
||||
|
||||
Backend service providing GraphQL API for content management system with reactions, ratings and comments.
|
||||
|
||||
## Core Features
|
||||
|
||||
### Shouts (Posts)
|
||||
- CRUD operations via GraphQL mutations
|
||||
- Rich filtering and sorting options
|
||||
- Support for multiple authors and topics
|
||||
- Rating system with likes/dislikes
|
||||
- Comments and nested replies
|
||||
- Bookmarks and following
|
||||
|
||||
### Reactions System
|
||||
- `ReactionKind` types: LIKE, DISLIKE, COMMENT
|
||||
- Rating calculation for shouts and comments
|
||||
- User-specific reaction tracking
|
||||
- Reaction stats and aggregations
|
||||
- Nested comments support
|
||||
|
||||
### Authors & Topics
|
||||
- Author profiles with stats
|
||||
- Topic categorization and hierarchy
|
||||
- Following system for authors/topics
|
||||
- Activity tracking and stats
|
||||
- Community features
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **(Python)[https://www.python.org/]** 3.12+
|
||||
- **GraphQL** with [Ariadne](https://ariadnegraphql.org/)
|
||||
- **(SQLAlchemy)[https://docs.sqlalchemy.org/en/20/orm/]**
|
||||
- **(PostgreSQL)[https://www.postgresql.org/]/(SQLite)[https://www.sqlite.org/]** support
|
||||
- **(Starlette)[https://www.starlette.io/]** for ASGI server
|
||||
- **(Redis)[https://redis.io/]** for caching
|
||||
|
||||
## Development
|
||||
|
||||
### Setup
|
||||
# discoursio-api
|
||||
|
||||
|
||||
Start API server with `dev` keyword added and `mkcert` installed:
|
||||
- sqlalchemy
|
||||
- redis
|
||||
- ariadne
|
||||
- starlette
|
||||
- uvicorn
|
||||
|
||||
```shell
|
||||
mkdir .venv
|
||||
python3.12 -m venv .venv
|
||||
poetry env use .venv/bin/python3.12
|
||||
poetry update
|
||||
|
||||
mkcert -install
|
||||
mkcert localhost
|
||||
poetry run server.py dev
|
||||
on osx
|
||||
```
|
||||
brew install redis nginx postgres
|
||||
brew services start redis
|
||||
```
|
||||
|
||||
### Useful Commands
|
||||
|
||||
```shell
|
||||
# Linting and import sorting
|
||||
poetry run ruff check . --fix --select I
|
||||
|
||||
# Code formatting
|
||||
poetry run ruff format . --line-length=120
|
||||
|
||||
# Run tests
|
||||
poetry run pytest
|
||||
|
||||
# Type checking
|
||||
poetry run mypy .
|
||||
on debian/ubuntu
|
||||
```
|
||||
apt install redis nginx
|
||||
```
|
||||
|
||||
### Code Style
|
||||
# Local development
|
||||
|
||||
We use:
|
||||
- Ruff for linting and import sorting
|
||||
- Line length: 120 characters
|
||||
- Python type hints
|
||||
- Docstrings for public methods
|
||||
Install deps first
|
||||
|
||||
### GraphQL Development
|
||||
|
||||
Test queries in GraphQL Playground at `http://localhost:8000`:
|
||||
|
||||
```graphql
|
||||
# Example query
|
||||
query GetShout($slug: String) {
|
||||
get_shout(slug: $slug) {
|
||||
id
|
||||
title
|
||||
main_author {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements-dev.txt
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
Create database from backup
|
||||
```
|
||||
./restdb.sh
|
||||
```
|
||||
|
||||
Start local server
|
||||
```
|
||||
python3 server.py dev
|
||||
```
|
||||
|
||||
# How to do an authorized request
|
||||
|
||||
Put the header 'Authorization' with token from signIn query or registerUser mutation.
|
||||
|
||||
# How to debug Ackee
|
||||
|
||||
Set ACKEE_TOKEN var
|
||||
|
@@ -1,6 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Получаем путь к корневой директории проекта
|
||||
root_path = os.path.abspath(os.path.dirname(__file__))
|
||||
sys.path.append(root_path)
|
110
alembic.ini
Normal file
110
alembic.ini
Normal file
@@ -0,0 +1,110 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = %(DB_URL)
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
3
alembic/README
Normal file
3
alembic/README
Normal file
@@ -0,0 +1,3 @@
|
||||
Generic single-database configuration.
|
||||
|
||||
https://alembic.sqlalchemy.org/en/latest/tutorial.html
|
@@ -3,7 +3,7 @@ from logging.config import fileConfig
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from alembic import context
|
||||
from services.db import Base
|
||||
from base.orm import Base
|
||||
from settings import DB_URL
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
|
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
26
alembic/versions/fe943b098418_init_alembic.py
Normal file
26
alembic/versions/fe943b098418_init_alembic.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""init alembic
|
||||
|
||||
Revision ID: fe943b098418
|
||||
Revises:
|
||||
Create Date: 2023-08-19 01:37:57.031933
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
# import sqlalchemy as sa
|
||||
|
||||
# from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "fe943b098418"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
pass
|
15
app.json
15
app.json
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"healthchecks": {
|
||||
"web": [
|
||||
{
|
||||
"type": "startup",
|
||||
"name": "web check",
|
||||
"description": "Checking if the app responds to the GET /",
|
||||
"path": "/",
|
||||
"attempts": 3,
|
||||
"warn": true,
|
||||
"initialDelay": 1
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
@@ -7,22 +7,26 @@ from starlette.authentication import AuthenticationBackend
|
||||
from starlette.requests import HTTPConnection
|
||||
|
||||
from auth.credentials import AuthCredentials, AuthUser
|
||||
from auth.exceptions import OperationNotAllowed
|
||||
from auth.tokenstorage import SessionToken
|
||||
from auth.usermodel import Role, User
|
||||
from services.db import local_session
|
||||
from base.exceptions import OperationNotAllowed
|
||||
from base.orm import local_session
|
||||
from orm.user import Role, User
|
||||
from settings import SESSION_TOKEN_HEADER
|
||||
|
||||
|
||||
class JWTAuthenticate(AuthenticationBackend):
|
||||
async def authenticate(self, request: HTTPConnection) -> Optional[Tuple[AuthCredentials, AuthUser]]:
|
||||
async def authenticate(
|
||||
self, request: HTTPConnection
|
||||
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
|
||||
if SESSION_TOKEN_HEADER not in request.headers:
|
||||
return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
|
||||
|
||||
token = request.headers.get(SESSION_TOKEN_HEADER)
|
||||
if not token:
|
||||
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
|
||||
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(user_id=None, username="")
|
||||
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
|
||||
user_id=None, username=""
|
||||
)
|
||||
|
||||
if len(token.split(".")) > 1:
|
||||
payload = await SessionToken.verify(token)
|
||||
@@ -48,14 +52,20 @@ class JWTAuthenticate(AuthenticationBackend):
|
||||
except exc.NoResultFound:
|
||||
pass
|
||||
|
||||
return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(user_id=None, username="")
|
||||
return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(
|
||||
user_id=None, username=""
|
||||
)
|
||||
|
||||
|
||||
def login_required(func):
|
||||
@wraps(func)
|
||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
# debug only
|
||||
# print('[auth.authenticate] login required for %r with info %r' % (func, info))
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
# print(auth)
|
||||
if not auth or not auth.logged_in:
|
||||
# raise Unauthorized(auth.error_message or "Please login")
|
||||
return {"error": "Please login first"}
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
@@ -65,7 +75,9 @@ def login_required(func):
|
||||
def permission_required(resource, operation, func):
|
||||
@wraps(func)
|
||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
print("[auth.authenticate] permission_required for %r with info %r" % (func, info)) # debug only
|
||||
print(
|
||||
"[auth.authenticate] permission_required for %r with info %r" % (func, info)
|
||||
) # debug only
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
if not auth.logged_in:
|
||||
raise OperationNotAllowed(auth.error_message or "Please login")
|
||||
@@ -75,22 +87,3 @@ def permission_required(resource, operation, func):
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def login_accepted(func):
|
||||
@wraps(func)
|
||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
# Если есть авторизация, добавляем данные автора в контекст
|
||||
if auth and auth.logged_in:
|
||||
info.context["author"] = auth.author
|
||||
info.context["user_id"] = auth.author.get("id")
|
||||
else:
|
||||
# Очищаем данные автора из контекста если авторизация отсутствует
|
||||
info.context["author"] = None
|
||||
info.context["user_id"] = None
|
||||
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
@@ -1,13 +1,14 @@
|
||||
from binascii import hexlify
|
||||
from hashlib import sha256
|
||||
|
||||
# from base.exceptions import InvalidPassword, InvalidToken
|
||||
from base.orm import local_session
|
||||
from jwt import DecodeError, ExpiredSignatureError
|
||||
from passlib.hash import bcrypt
|
||||
|
||||
from auth.jwtcodec import JWTCodec
|
||||
from auth.tokenstorage import TokenStorage
|
||||
|
||||
# from base.exceptions import InvalidPassword, InvalidToken
|
||||
from base.orm import local_session
|
||||
from orm import User
|
||||
|
||||
|
||||
@@ -32,8 +33,8 @@ class Password:
|
||||
Verify that password hash is equal to specified hash. Hash format:
|
||||
|
||||
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
|
||||
\__/\/ \____________________/\_____________________________/ # noqa: W605
|
||||
| | Salt Hash
|
||||
__ __ ____________________________________________________ # noqa: W605
|
||||
| | | Salt (22) | Hash
|
||||
| Cost
|
||||
Version
|
||||
|
||||
|
@@ -1,23 +1,15 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import jwt
|
||||
from pydantic import BaseModel
|
||||
|
||||
from auth.exceptions import ExpiredToken, InvalidToken
|
||||
from base.exceptions import ExpiredToken, InvalidToken
|
||||
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
|
||||
|
||||
|
||||
class TokenPayload(BaseModel):
|
||||
user_id: str
|
||||
username: str
|
||||
exp: datetime
|
||||
iat: datetime
|
||||
iss: str
|
||||
from validations.auth import AuthInput, TokenPayload
|
||||
|
||||
|
||||
class JWTCodec:
|
||||
@staticmethod
|
||||
def encode(user, exp: datetime) -> str:
|
||||
def encode(user: AuthInput, exp: datetime) -> str:
|
||||
payload = {
|
||||
"user_id": user.id,
|
||||
"username": user.email or user.phone,
|
||||
@@ -31,7 +23,7 @@ class JWTCodec:
|
||||
print("[auth.jwtcodec] JWT encode error %r" % e)
|
||||
|
||||
@staticmethod
|
||||
def decode(token: str, verify_exp: bool = True):
|
||||
def decode(token: str, verify_exp: bool = True) -> TokenPayload:
|
||||
r = None
|
||||
payload = None
|
||||
try:
|
||||
|
@@ -1,10 +1,9 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from base.redis import redis
|
||||
from validations.auth import AuthInput
|
||||
|
||||
from auth.jwtcodec import JWTCodec
|
||||
from base.redis import redis
|
||||
from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
|
||||
from validations.auth import AuthInput
|
||||
|
||||
|
||||
async def save(token_key, life_span, auto_delete=True):
|
||||
|
@@ -1,110 +0,0 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String, func
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class Permission(Base):
|
||||
__tablename__ = "permission"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
resource = Column(String, nullable=False)
|
||||
operation = Column(String, nullable=False)
|
||||
|
||||
|
||||
class Role(Base):
|
||||
__tablename__ = "role"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
name = Column(String, nullable=False)
|
||||
permissions = relationship(Permission)
|
||||
|
||||
|
||||
class AuthorizerUser(Base):
|
||||
__tablename__ = "authorizer_users"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
key = Column(String)
|
||||
email = Column(String, unique=True)
|
||||
email_verified_at = Column(Integer)
|
||||
family_name = Column(String)
|
||||
gender = Column(String)
|
||||
given_name = Column(String)
|
||||
is_multi_factor_auth_enabled = Column(Boolean)
|
||||
middle_name = Column(String)
|
||||
nickname = Column(String)
|
||||
password = Column(String)
|
||||
phone_number = Column(String, unique=True)
|
||||
phone_number_verified_at = Column(Integer)
|
||||
# preferred_username = Column(String, nullable=False)
|
||||
picture = Column(String)
|
||||
revoked_timestamp = Column(Integer)
|
||||
roles = Column(String, default="author,reader")
|
||||
signup_methods = Column(String, default="magic_link_login")
|
||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, default=lambda: int(time.time()))
|
||||
|
||||
|
||||
class UserRating(Base):
|
||||
__tablename__ = "user_rating"
|
||||
|
||||
id = None
|
||||
rater: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
value: Column = Column(Integer)
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
pass
|
||||
|
||||
|
||||
class UserRole(Base):
|
||||
__tablename__ = "user_role"
|
||||
|
||||
id = None
|
||||
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
role = Column(ForeignKey("role.id"), primary_key=True, index=True)
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
default_user = None
|
||||
|
||||
email = Column(String, unique=True, nullable=False, comment="Email")
|
||||
username = Column(String, nullable=False, comment="Login")
|
||||
password = Column(String, nullable=True, comment="Password")
|
||||
bio = Column(String, nullable=True, comment="Bio") # status description
|
||||
about = Column(String, nullable=True, comment="About") # long and formatted
|
||||
userpic = Column(String, nullable=True, comment="Userpic")
|
||||
name = Column(String, nullable=True, comment="Display name")
|
||||
slug = Column(String, unique=True, comment="User's slug")
|
||||
links = Column(JSON, nullable=True, comment="Links")
|
||||
oauth = Column(String, nullable=True)
|
||||
oid = Column(String, nullable=True)
|
||||
|
||||
muted = Column(Boolean, default=False)
|
||||
confirmed = Column(Boolean, default=False)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at")
|
||||
updated_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Updated at")
|
||||
last_seen = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Was online at")
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
|
||||
|
||||
ratings = relationship(UserRating, foreign_keys=UserRating.user)
|
||||
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
|
||||
|
||||
def get_permission(self):
|
||||
scope = {}
|
||||
for role in self.roles:
|
||||
for p in role.permissions:
|
||||
if p.resource not in scope:
|
||||
scope[p.resource] = set()
|
||||
scope[p.resource].add(p.operation)
|
||||
print(scope)
|
||||
return scope
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# print(User.get_permission(user_id=1))
|
57
base/orm.py
Normal file
57
base/orm.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from typing import Any, Callable, Dict, Generic, TypeVar
|
||||
|
||||
from sqlalchemy import Column, Integer, create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.sql.schema import Table
|
||||
|
||||
from settings import DB_URL
|
||||
|
||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
REGISTRY: Dict[str, type] = {}
|
||||
|
||||
|
||||
def local_session():
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
||||
|
||||
DeclarativeBase = declarative_base() # type: Any
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
__table__: Table
|
||||
__tablename__: str
|
||||
__new__: Callable
|
||||
__init__: Callable
|
||||
__allow_unmapped__ = True
|
||||
__abstract__ = True
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
REGISTRY[cls.__name__] = cls
|
||||
|
||||
@classmethod
|
||||
def create(cls: Generic[T], **kwargs) -> Generic[T]:
|
||||
instance = cls(**kwargs)
|
||||
return instance.save()
|
||||
|
||||
def save(self) -> Generic[T]:
|
||||
with local_session() as session:
|
||||
session.add(self)
|
||||
session.commit()
|
||||
return self
|
||||
|
||||
def update(self, input):
|
||||
column_names = self.__table__.columns.keys()
|
||||
for name, value in input.items():
|
||||
if name in column_names:
|
||||
setattr(self, name, value)
|
||||
|
||||
def dict(self) -> Dict[str, Any]:
|
||||
column_names = self.__table__.columns.keys()
|
||||
return {c: getattr(self, c) for c in column_names}
|
@@ -1,41 +1,32 @@
|
||||
import logging
|
||||
|
||||
from redis.asyncio import Redis
|
||||
import redis.asyncio as aredis
|
||||
|
||||
from settings import REDIS_URL
|
||||
import logging
|
||||
|
||||
# Set redis logging level to suppress DEBUG messages
|
||||
logger = logging.getLogger("redis")
|
||||
logger.setLevel(logging.WARNING)
|
||||
logger = logging.getLogger("[services.redis] ")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class RedisService:
|
||||
class RedisCache:
|
||||
def __init__(self, uri=REDIS_URL):
|
||||
self._uri: str = uri
|
||||
self.pubsub_channels = []
|
||||
self._client = None
|
||||
|
||||
async def connect(self):
|
||||
if self._uri:
|
||||
self._client = await Redis.from_url(self._uri, decode_responses=True)
|
||||
logger.info("Redis connection was established.")
|
||||
self._client = aredis.Redis.from_url(self._uri, decode_responses=True)
|
||||
|
||||
async def disconnect(self):
|
||||
if isinstance(self._client, Redis):
|
||||
if self._client:
|
||||
await self._client.close()
|
||||
logger.info("Redis connection was closed.")
|
||||
|
||||
async def execute(self, command, *args, **kwargs):
|
||||
if self._client:
|
||||
try:
|
||||
logger.debug(f"{command}") # {args[0]}") # {args} {kwargs}")
|
||||
for arg in args:
|
||||
if isinstance(arg, dict):
|
||||
if arg.get("_sa_instance_state"):
|
||||
del arg["_sa_instance_state"]
|
||||
logger.debug(f"{command} {args} {kwargs}")
|
||||
r = await self._client.execute_command(command, *args, **kwargs)
|
||||
# logger.debug(type(r))
|
||||
# logger.debug(r)
|
||||
logger.debug(type(r))
|
||||
logger.debug(r)
|
||||
return r
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
@@ -60,22 +51,12 @@ class RedisService:
|
||||
return
|
||||
await self._client.publish(channel, data)
|
||||
|
||||
async def set(self, key, data, ex=None):
|
||||
# Prepare the command arguments
|
||||
args = [key, data]
|
||||
async def mget(self, *keys):
|
||||
return await self.execute('MGET', *keys)
|
||||
|
||||
# If an expiration time is provided, add it to the arguments
|
||||
if ex is not None:
|
||||
args.append("EX")
|
||||
args.append(ex)
|
||||
async def lrange(self, key, start, stop):
|
||||
return await self.execute('LRANGE', key, start, stop)
|
||||
|
||||
# Execute the command with the provided arguments
|
||||
await self.execute("set", *args)
|
||||
|
||||
async def get(self, key):
|
||||
return await self.execute("get", key)
|
||||
|
||||
|
||||
redis = RedisService()
|
||||
redis = RedisCache()
|
||||
|
||||
__all__ = ["redis"]
|
13
base/resolvers.py
Normal file
13
base/resolvers.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from ariadne import MutationType, QueryType, ScalarType
|
||||
|
||||
datetime_scalar = ScalarType("DateTime")
|
||||
|
||||
|
||||
@datetime_scalar.serializer
|
||||
def serialize_datetime(value):
|
||||
return value.isoformat()
|
||||
|
||||
|
||||
query = QueryType()
|
||||
mutation = MutationType()
|
||||
resolvers = [query, mutation, datetime_scalar]
|
331
cache/cache.py
vendored
331
cache/cache.py
vendored
@@ -1,331 +0,0 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import and_, join, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from utils.encoders import CustomJSONEncoder
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"shouts": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
|
||||
# Cache topic data
|
||||
async def cache_topic(topic: dict):
|
||||
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
||||
# Cache by id and slug for quick access
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"topic:id:{topic['id']}", payload),
|
||||
redis.execute("SET", f"topic:slug:{topic['slug']}", payload),
|
||||
)
|
||||
|
||||
|
||||
# Cache author data
|
||||
async def cache_author(author: dict):
|
||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||
# Cache author data by user and id
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
|
||||
redis.execute("SET", f"author:id:{author['id']}", payload),
|
||||
)
|
||||
|
||||
|
||||
# Cache follows data
|
||||
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
||||
key = f"author:follows-{entity_type}s:{follower_id}"
|
||||
follows_str = await redis.execute("get", key)
|
||||
follows = json.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
|
||||
if is_insert:
|
||||
if entity_id not in follows:
|
||||
follows.append(entity_id)
|
||||
else:
|
||||
follows = [eid for eid in follows if eid != entity_id]
|
||||
await redis.execute("set", key, json.dumps(follows, cls=CustomJSONEncoder))
|
||||
await update_follower_stat(follower_id, entity_type, len(follows))
|
||||
|
||||
|
||||
# Update follower statistics
|
||||
async def update_follower_stat(follower_id, entity_type, count):
|
||||
follower_key = f"author:id:{follower_id}"
|
||||
follower_str = await redis.execute("get", follower_key)
|
||||
follower = json.loads(follower_str) if follower_str else None
|
||||
if follower:
|
||||
follower["stat"] = {f"{entity_type}s": count}
|
||||
await cache_author(follower)
|
||||
|
||||
|
||||
# Get author from cache
|
||||
async def get_cached_author(author_id: int, get_with_stat):
|
||||
author_key = f"author:id:{author_id}"
|
||||
result = await redis.execute("get", author_key)
|
||||
if result:
|
||||
return json.loads(result)
|
||||
# Load from database if not found in cache
|
||||
q = select(Author).where(Author.id == author_id)
|
||||
authors = get_with_stat(q)
|
||||
if authors:
|
||||
author = authors[0]
|
||||
await cache_author(author.dict())
|
||||
return author.dict()
|
||||
return None
|
||||
|
||||
|
||||
# Function to get cached topic
|
||||
async def get_cached_topic(topic_id: int):
|
||||
"""
|
||||
Fetch topic data from cache or database by id.
|
||||
|
||||
Args:
|
||||
topic_id (int): The identifier for the topic.
|
||||
|
||||
Returns:
|
||||
dict: Topic data or None if not found.
|
||||
"""
|
||||
topic_key = f"topic:id:{topic_id}"
|
||||
cached_topic = await redis.execute("get", topic_key)
|
||||
if cached_topic:
|
||||
return json.loads(cached_topic)
|
||||
|
||||
# If not in cache, fetch from the database
|
||||
with local_session() as session:
|
||||
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
||||
if topic:
|
||||
topic_dict = topic.dict()
|
||||
await redis.execute("set", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
|
||||
return topic_dict
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# Get topic by slug from cache
|
||||
async def get_cached_topic_by_slug(slug: str, get_with_stat):
|
||||
topic_key = f"topic:slug:{slug}"
|
||||
result = await redis.execute("get", topic_key)
|
||||
if result:
|
||||
return json.loads(result)
|
||||
# Load from database if not found in cache
|
||||
topic_query = select(Topic).where(Topic.slug == slug)
|
||||
topics = get_with_stat(topic_query)
|
||||
if topics:
|
||||
topic_dict = topics[0].dict()
|
||||
await cache_topic(topic_dict)
|
||||
return topic_dict
|
||||
return None
|
||||
|
||||
|
||||
# Get list of authors by ID from cache
|
||||
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
||||
# Fetch all author data concurrently
|
||||
keys = [f"author:id:{author_id}" for author_id in author_ids]
|
||||
results = await asyncio.gather(*(redis.execute("get", key) for key in keys))
|
||||
authors = [json.loads(result) if result else None for result in results]
|
||||
# Load missing authors from database and cache
|
||||
missing_indices = [index for index, author in enumerate(authors) if author is None]
|
||||
if missing_indices:
|
||||
missing_ids = [author_ids[index] for index in missing_indices]
|
||||
with local_session() as session:
|
||||
query = select(Author).where(Author.id.in_(missing_ids))
|
||||
missing_authors = session.execute(query).scalars().all()
|
||||
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
||||
for index, author in zip(missing_indices, missing_authors):
|
||||
authors[index] = author.dict()
|
||||
return authors
|
||||
|
||||
|
||||
async def get_cached_topic_followers(topic_id: int):
|
||||
"""
|
||||
Получает подписчиков темы по ID, используя кеш Redis.
|
||||
Если данные отсутствуют в кеше, извлекает из базы данных и кеширует их.
|
||||
|
||||
:param topic_id: Идентификатор темы, подписчиков которой необходимо получить.
|
||||
:return: Список подписчиков темы, каждый элемент представляет собой словарь с ID и именем автора.
|
||||
"""
|
||||
try:
|
||||
# Попытка получить данные из кеша
|
||||
cached = await redis.get(f"topic:followers:{topic_id}")
|
||||
if cached:
|
||||
followers_ids = json.loads(cached)
|
||||
logger.debug(f"Cached {len(followers_ids)} followers for topic #{topic_id}")
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
return followers
|
||||
|
||||
# Если данные не найдены в кеше, загрузка из базы данных
|
||||
async with local_session() as session:
|
||||
result = await session.execute(
|
||||
session.query(Author.id)
|
||||
.join(TopicFollower, TopicFollower.follower == Author.id)
|
||||
.filter(TopicFollower.topic == topic_id)
|
||||
)
|
||||
followers_ids = [f[0] for f in result.scalars().all()]
|
||||
|
||||
# Кеширование результатов
|
||||
await redis.set(f"topic:followers:{topic_id}", json.dumps(followers_ids))
|
||||
|
||||
# Получение подробной информации о подписчиках по их ID
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
logger.debug(followers)
|
||||
return followers
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при получении подписчиков для темы #{topic_id}: {str(e)}")
|
||||
return []
|
||||
|
||||
|
||||
# Get cached author followers
|
||||
async def get_cached_author_followers(author_id: int):
|
||||
# Check cache for data
|
||||
cached = await redis.execute("get", f"author:followers:{author_id}")
|
||||
if cached:
|
||||
followers_ids = json.loads(cached)
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
logger.debug(f"Cached followers for author #{author_id}: {len(followers)}")
|
||||
return followers
|
||||
|
||||
# Query database if cache is empty
|
||||
with local_session() as session:
|
||||
followers_ids = [
|
||||
f[0]
|
||||
for f in session.query(Author.id)
|
||||
.join(AuthorFollower, AuthorFollower.follower == Author.id)
|
||||
.filter(AuthorFollower.author == author_id, Author.id != author_id)
|
||||
.all()
|
||||
]
|
||||
await redis.execute("SET", f"author:followers:{author_id}", json.dumps(followers_ids))
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
return followers
|
||||
|
||||
|
||||
# Get cached follower authors
|
||||
async def get_cached_follower_authors(author_id: int):
|
||||
# Attempt to retrieve authors from cache
|
||||
cached = await redis.execute("get", f"author:follows-authors:{author_id}")
|
||||
if cached:
|
||||
authors_ids = json.loads(cached)
|
||||
else:
|
||||
# Query authors from database
|
||||
with local_session() as session:
|
||||
authors_ids = [
|
||||
a[0]
|
||||
for a in session.execute(
|
||||
select(Author.id)
|
||||
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
|
||||
.where(AuthorFollower.follower == author_id)
|
||||
).all()
|
||||
]
|
||||
await redis.execute("SET", f"author:follows-authors:{author_id}", json.dumps(authors_ids))
|
||||
|
||||
authors = await get_cached_authors_by_ids(authors_ids)
|
||||
return authors
|
||||
|
||||
|
||||
# Get cached follower topics
|
||||
async def get_cached_follower_topics(author_id: int):
|
||||
# Attempt to retrieve topics from cache
|
||||
cached = await redis.execute("get", f"author:follows-topics:{author_id}")
|
||||
if cached:
|
||||
topics_ids = json.loads(cached)
|
||||
else:
|
||||
# Load topics from database and cache them
|
||||
with local_session() as session:
|
||||
topics_ids = [
|
||||
t[0]
|
||||
for t in session.query(Topic.id)
|
||||
.join(TopicFollower, TopicFollower.topic == Topic.id)
|
||||
.where(TopicFollower.follower == author_id)
|
||||
.all()
|
||||
]
|
||||
await redis.execute("SET", f"author:follows-topics:{author_id}", json.dumps(topics_ids))
|
||||
|
||||
topics = []
|
||||
for topic_id in topics_ids:
|
||||
topic_str = await redis.execute("get", f"topic:id:{topic_id}")
|
||||
if topic_str:
|
||||
topic = json.loads(topic_str)
|
||||
if topic and topic not in topics:
|
||||
topics.append(topic)
|
||||
|
||||
logger.debug(f"Cached topics for author#{author_id}: {len(topics)}")
|
||||
return topics
|
||||
|
||||
|
||||
# Get author by user ID from cache
|
||||
async def get_cached_author_by_user_id(user_id: str, get_with_stat):
|
||||
"""
|
||||
Retrieve author information by user_id, checking the cache first, then the database.
|
||||
|
||||
Args:
|
||||
user_id (str): The user identifier for which to retrieve the author.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with author data or None if not found.
|
||||
"""
|
||||
# Attempt to find author ID by user_id in Redis cache
|
||||
author_id = await redis.execute("get", f"author:user:{user_id.strip()}")
|
||||
if author_id:
|
||||
# If ID is found, get full author data by ID
|
||||
author_data = await redis.execute("get", f"author:id:{author_id}")
|
||||
if author_data:
|
||||
return json.loads(author_data)
|
||||
|
||||
# If data is not found in cache, query the database
|
||||
author_query = select(Author).where(Author.user == user_id)
|
||||
authors = get_with_stat(author_query)
|
||||
if authors:
|
||||
# Cache the retrieved author data
|
||||
author = authors[0]
|
||||
author_dict = author.dict()
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:user:{user_id.strip()}", str(author.id)),
|
||||
redis.execute("SET", f"author:id:{author.id}", json.dumps(author_dict)),
|
||||
)
|
||||
return author_dict
|
||||
|
||||
# Return None if author is not found
|
||||
return None
|
||||
|
||||
|
||||
# Get cached topic authors
|
||||
async def get_cached_topic_authors(topic_id: int):
|
||||
"""
|
||||
Retrieve a list of authors for a given topic, using cache or database.
|
||||
|
||||
Args:
|
||||
topic_id (int): The identifier of the topic for which to retrieve authors.
|
||||
|
||||
Returns:
|
||||
List[dict]: A list of dictionaries containing author data.
|
||||
"""
|
||||
# Attempt to get a list of author IDs from cache
|
||||
rkey = f"topic:authors:{topic_id}"
|
||||
cached_authors_ids = await redis.execute("get", rkey)
|
||||
if cached_authors_ids:
|
||||
authors_ids = json.loads(cached_authors_ids)
|
||||
else:
|
||||
# If cache is empty, get data from the database
|
||||
with local_session() as session:
|
||||
query = (
|
||||
select(ShoutAuthor.author)
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.where(and_(ShoutTopic.topic == topic_id, Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
|
||||
# Cache the retrieved author IDs
|
||||
await redis.execute("set", rkey, json.dumps(authors_ids))
|
||||
|
||||
# Retrieve full author details from cached IDs
|
||||
if authors_ids:
|
||||
authors = await get_cached_authors_by_ids(authors_ids)
|
||||
logger.debug(f"Topic#{topic_id} authors fetched and cached: {len(authors)} authors found.")
|
||||
return authors
|
||||
|
||||
return []
|
11
cache/memorycache.py
vendored
11
cache/memorycache.py
vendored
@@ -1,11 +0,0 @@
|
||||
from dogpile.cache import make_region
|
||||
|
||||
from settings import REDIS_URL
|
||||
|
||||
# Создание региона кэша с TTL
|
||||
cache_region = make_region()
|
||||
cache_region.configure(
|
||||
"dogpile.cache.redis",
|
||||
arguments={"url": f"{REDIS_URL}/1"},
|
||||
expiration_time=3600, # Cache expiration time in seconds
|
||||
)
|
129
cache/precache.py
vendored
129
cache/precache.py
vendored
@@ -1,129 +0,0 @@
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from sqlalchemy import and_, join, select
|
||||
|
||||
from cache.cache import cache_author, cache_topic
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from utils.encoders import CustomJSONEncoder
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
# Предварительное кеширование подписчиков автора
|
||||
async def precache_authors_followers(author_id, session):
|
||||
authors_followers = set()
|
||||
followers_query = select(AuthorFollower.follower).where(AuthorFollower.author == author_id)
|
||||
result = session.execute(followers_query)
|
||||
authors_followers.update(row[0] for row in result if row[0])
|
||||
|
||||
followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
|
||||
|
||||
|
||||
# Предварительное кеширование подписок автора
|
||||
async def precache_authors_follows(author_id, session):
|
||||
follows_topics_query = select(TopicFollower.topic).where(TopicFollower.follower == author_id)
|
||||
follows_authors_query = select(AuthorFollower.author).where(AuthorFollower.follower == author_id)
|
||||
follows_shouts_query = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == author_id)
|
||||
|
||||
follows_topics = {row[0] for row in session.execute(follows_topics_query) if row[0]}
|
||||
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
|
||||
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
|
||||
|
||||
topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder)
|
||||
authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder)
|
||||
shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder)
|
||||
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
|
||||
redis.execute("SET", f"author:follows-authors:{author_id}", authors_payload),
|
||||
redis.execute("SET", f"author:follows-shouts:{author_id}", shouts_payload),
|
||||
)
|
||||
|
||||
|
||||
# Предварительное кеширование авторов тем
|
||||
async def precache_topics_authors(topic_id: int, session):
|
||||
topic_authors_query = (
|
||||
select(ShoutAuthor.author)
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.filter(
|
||||
and_(
|
||||
ShoutTopic.topic == topic_id,
|
||||
Shout.published_at.is_not(None),
|
||||
Shout.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
|
||||
|
||||
authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
|
||||
|
||||
|
||||
# Предварительное кеширование подписчиков тем
|
||||
async def precache_topics_followers(topic_id: int, session):
|
||||
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
|
||||
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
|
||||
|
||||
followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
|
||||
|
||||
|
||||
async def precache_data():
|
||||
logger.info("precaching...")
|
||||
try:
|
||||
key = "authorizer_env"
|
||||
# cache reset
|
||||
value = await redis.execute("HGETALL", key)
|
||||
await redis.execute("FLUSHDB")
|
||||
logger.info("redis: FLUSHDB")
|
||||
|
||||
# Преобразуем словарь в список аргументов для HSET
|
||||
if value:
|
||||
flattened = []
|
||||
for field, val in value.items():
|
||||
flattened.extend([field, val])
|
||||
|
||||
await redis.execute("HSET", key, *flattened)
|
||||
logger.info(f"redis hash '{key}' was restored")
|
||||
|
||||
with local_session() as session:
|
||||
# topics
|
||||
q = select(Topic).where(Topic.community == 1)
|
||||
topics = get_with_stat(q)
|
||||
for topic in topics:
|
||||
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
|
||||
await cache_topic(topic_dict)
|
||||
await asyncio.gather(
|
||||
precache_topics_followers(topic_dict["id"], session),
|
||||
precache_topics_authors(topic_dict["id"], session),
|
||||
)
|
||||
logger.info(f"{len(topics)} topics and their followings precached")
|
||||
|
||||
# authors
|
||||
authors = get_with_stat(select(Author).where(Author.user.is_not(None)))
|
||||
logger.info(f"{len(authors)} authors found in database")
|
||||
for author in authors:
|
||||
if isinstance(author, Author):
|
||||
profile = author.dict()
|
||||
author_id = profile.get("id")
|
||||
user_id = profile.get("user", "").strip()
|
||||
if author_id and user_id:
|
||||
await cache_author(profile)
|
||||
await asyncio.gather(
|
||||
precache_authors_followers(author_id, session), precache_authors_follows(author_id, session)
|
||||
)
|
||||
else:
|
||||
logger.error(f"fail caching {author}")
|
||||
logger.info(f"{len(authors)} authors and their followings precached")
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(f"Error in precache_data: {exc}")
|
63
cache/revalidator.py
vendored
63
cache/revalidator.py
vendored
@@ -1,63 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from cache.cache import cache_author, cache_topic, get_cached_author, get_cached_topic
|
||||
from resolvers.stat import get_with_stat
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
class CacheRevalidationManager:
|
||||
def __init__(self, interval=60):
|
||||
"""Инициализация менеджера с заданным интервалом проверки (в секундах)."""
|
||||
self.interval = interval
|
||||
self.items_to_revalidate = {"authors": set(), "topics": set(), "shouts": set(), "reactions": set()}
|
||||
self.lock = asyncio.Lock()
|
||||
self.running = True
|
||||
|
||||
async def start(self):
|
||||
"""Запуск фонового воркера для ревалидации кэша."""
|
||||
self.task = asyncio.create_task(self.revalidate_cache())
|
||||
|
||||
async def revalidate_cache(self):
|
||||
"""Циклическая проверка и ревалидация кэша каждые self.interval секунд."""
|
||||
try:
|
||||
while self.running:
|
||||
await asyncio.sleep(self.interval)
|
||||
await self.process_revalidation()
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Revalidation worker was stopped.")
|
||||
except Exception as e:
|
||||
logger.error(f"An error occurred in the revalidation worker: {e}")
|
||||
|
||||
async def process_revalidation(self):
|
||||
"""Обновление кэша для всех сущностей, требующих ревалидации."""
|
||||
async with self.lock:
|
||||
# Ревалидация кэша авторов
|
||||
for author_id in self.items_to_revalidate["authors"]:
|
||||
author = await get_cached_author(author_id, get_with_stat)
|
||||
if author:
|
||||
await cache_author(author)
|
||||
self.items_to_revalidate["authors"].clear()
|
||||
|
||||
# Ревалидация кэша тем
|
||||
for topic_id in self.items_to_revalidate["topics"]:
|
||||
topic = await get_cached_topic(topic_id)
|
||||
if topic:
|
||||
await cache_topic(topic)
|
||||
self.items_to_revalidate["topics"].clear()
|
||||
|
||||
def mark_for_revalidation(self, entity_id, entity_type):
|
||||
"""Отметить сущность для ревалидации."""
|
||||
self.items_to_revalidate[entity_type].add(entity_id)
|
||||
|
||||
async def stop(self):
|
||||
"""Остановка фонового воркера."""
|
||||
self.running = False
|
||||
if hasattr(self, "task"):
|
||||
self.task.cancel()
|
||||
try:
|
||||
await self.task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
|
||||
revalidation_manager = CacheRevalidationManager(interval=300) # Ревалидация каждые 5 минут
|
69
cache/triggers.py
vendored
69
cache/triggers.py
vendored
@@ -1,69 +0,0 @@
|
||||
from sqlalchemy import event
|
||||
|
||||
from cache.revalidator import revalidation_manager
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def mark_for_revalidation(entity, *args):
|
||||
"""Отметка сущности для ревалидации."""
|
||||
entity_type = (
|
||||
"authors"
|
||||
if isinstance(entity, Author)
|
||||
else "topics"
|
||||
if isinstance(entity, Topic)
|
||||
else "reactions"
|
||||
if isinstance(entity, Reaction)
|
||||
else "shouts"
|
||||
if isinstance(entity, Shout)
|
||||
else None
|
||||
)
|
||||
if entity_type:
|
||||
revalidation_manager.mark_for_revalidation(entity.id, entity_type)
|
||||
|
||||
|
||||
def after_follower_handler(mapper, connection, target, is_delete=False):
|
||||
"""Обработчик добавления, обновления или удаления подписки."""
|
||||
entity_type = None
|
||||
if isinstance(target, AuthorFollower):
|
||||
entity_type = "authors"
|
||||
elif isinstance(target, TopicFollower):
|
||||
entity_type = "topics"
|
||||
elif isinstance(target, ShoutReactionsFollower):
|
||||
entity_type = "shouts"
|
||||
|
||||
if entity_type:
|
||||
revalidation_manager.mark_for_revalidation(
|
||||
target.author if entity_type == "authors" else target.topic, entity_type
|
||||
)
|
||||
if not is_delete:
|
||||
revalidation_manager.mark_for_revalidation(target.follower, "authors")
|
||||
|
||||
|
||||
def events_register():
|
||||
"""Регистрация обработчиков событий для всех сущностей."""
|
||||
event.listen(ShoutAuthor, "after_insert", mark_for_revalidation)
|
||||
event.listen(ShoutAuthor, "after_update", mark_for_revalidation)
|
||||
event.listen(ShoutAuthor, "after_delete", mark_for_revalidation)
|
||||
|
||||
event.listen(AuthorFollower, "after_insert", after_follower_handler)
|
||||
event.listen(AuthorFollower, "after_update", after_follower_handler)
|
||||
event.listen(AuthorFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
|
||||
|
||||
event.listen(TopicFollower, "after_insert", after_follower_handler)
|
||||
event.listen(TopicFollower, "after_update", after_follower_handler)
|
||||
event.listen(TopicFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
|
||||
|
||||
event.listen(ShoutReactionsFollower, "after_insert", after_follower_handler)
|
||||
event.listen(ShoutReactionsFollower, "after_update", after_follower_handler)
|
||||
event.listen(ShoutReactionsFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
|
||||
|
||||
event.listen(Reaction, "after_update", mark_for_revalidation)
|
||||
event.listen(Author, "after_update", mark_for_revalidation)
|
||||
event.listen(Topic, "after_update", mark_for_revalidation)
|
||||
event.listen(Shout, "after_update", mark_for_revalidation)
|
||||
|
||||
logger.info("Event handlers registered successfully.")
|
10
checks.sh
Executable file
10
checks.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "> isort"
|
||||
isort .
|
||||
echo "> black"
|
||||
black .
|
||||
echo "> flake8"
|
||||
flake8 .
|
||||
# echo "> mypy"
|
||||
# mypy .
|
@@ -1,35 +0,0 @@
|
||||
## Просмотры публикаций
|
||||
|
||||
- Интеграция с Google Analytics для отслеживания просмотров публикаций
|
||||
- Подсчет уникальных пользователей и общего количества просмотров
|
||||
- Автоматическое обновление статистики при запросе данных публикации
|
||||
|
||||
## Мультидоменная авторизация
|
||||
|
||||
- Поддержка авторизации для разных доменов:
|
||||
- *.dscrs.site (включая testing.dscrs.site)
|
||||
- localhost[:port]
|
||||
- testingdiscoursio-git-*-discoursio.vercel.app
|
||||
- *.discours.io
|
||||
- Автоматическое определение сервера авторизации
|
||||
- Корректная обработка CORS для всех поддерживаемых доменов
|
||||
|
||||
## Webhooks
|
||||
|
||||
- Автоматическая регистрация вебхука для события user.login
|
||||
- Предотвращение создания дублирующихся вебхуков
|
||||
- Автоматическая очистка устаревших вебхуков
|
||||
- Поддержка авторизации вебхуков через WEBHOOK_SECRET
|
||||
- Обработка ошибок при операциях с вебхуками
|
||||
- Динамическое определение endpoint'а на основе окружения
|
||||
|
||||
## CORS Configuration
|
||||
|
||||
- Поддерживаются домены:
|
||||
- *.dscrs.site (включая testing.dscrs.site, core.dscrs.site)
|
||||
- *.discours.io (включая testing.discours.io)
|
||||
- localhost (включая порты)
|
||||
- Поддерживаемые методы: GET, POST, OPTIONS
|
||||
- Настроена поддержка credentials
|
||||
- Разрешенные заголовки: Authorization, Content-Type, X-Requested-With, DNT, Cache-Control
|
||||
- Настроено кэширование preflight-ответов на 20 дней (1728000 секунд)
|
@@ -1,94 +0,0 @@
|
||||
# Following System
|
||||
|
||||
## Overview
|
||||
System supports following different entity types:
|
||||
- Authors
|
||||
- Topics
|
||||
- Communities
|
||||
- Shouts (Posts)
|
||||
|
||||
## GraphQL API
|
||||
|
||||
### Mutations
|
||||
|
||||
#### follow
|
||||
Follow an entity (author/topic/community/shout).
|
||||
|
||||
**Parameters:**
|
||||
- `what: String!` - Entity type (`AUTHOR`, `TOPIC`, `COMMUNITY`, `SHOUT`)
|
||||
- `slug: String` - Entity slug
|
||||
- `entity_id: Int` - Optional entity ID
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
{
|
||||
authors?: Author[] // For AUTHOR type
|
||||
topics?: Topic[] // For TOPIC type
|
||||
communities?: Community[] // For COMMUNITY type
|
||||
shouts?: Shout[] // For SHOUT type
|
||||
error?: String // Error message if any
|
||||
}
|
||||
```
|
||||
|
||||
#### unfollow
|
||||
Unfollow an entity.
|
||||
|
||||
**Parameters:** Same as `follow`
|
||||
|
||||
**Returns:** Same as `follow`
|
||||
|
||||
### Queries
|
||||
|
||||
#### get_shout_followers
|
||||
Get list of users who reacted to a shout.
|
||||
|
||||
**Parameters:**
|
||||
- `slug: String` - Shout slug
|
||||
- `shout_id: Int` - Optional shout ID
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
Author[] // List of authors who reacted
|
||||
```
|
||||
|
||||
## Caching System
|
||||
|
||||
### Supported Entity Types
|
||||
- Authors: `cache_author`, `get_cached_follower_authors`
|
||||
- Topics: `cache_topic`, `get_cached_follower_topics`
|
||||
- Communities: No cache
|
||||
- Shouts: No cache
|
||||
|
||||
### Cache Flow
|
||||
1. On follow/unfollow:
|
||||
- Update entity in cache
|
||||
- Update follower's following list
|
||||
2. Cache is updated before notifications
|
||||
|
||||
## Notifications
|
||||
|
||||
- Sent when author is followed/unfollowed
|
||||
- Contains:
|
||||
- Follower info
|
||||
- Author ID
|
||||
- Action type ("follow"/"unfollow")
|
||||
|
||||
## Error Handling
|
||||
|
||||
- Unauthorized access check
|
||||
- Entity existence validation
|
||||
- Duplicate follow prevention
|
||||
- Full error logging
|
||||
- Transaction safety with `local_session()`
|
||||
|
||||
## Database Schema
|
||||
|
||||
### Follower Tables
|
||||
- `AuthorFollower`
|
||||
- `TopicFollower`
|
||||
- `CommunityFollower`
|
||||
- `ShoutReactionsFollower`
|
||||
|
||||
Each table contains:
|
||||
- `follower` - ID of following user
|
||||
- `{entity_type}` - ID of followed entity
|
@@ -1,80 +0,0 @@
|
||||
# Система загрузки публикаций
|
||||
|
||||
## Особенности реализации
|
||||
|
||||
### Базовый запрос
|
||||
- Автоматически подгружает основного автора
|
||||
- Добавляет основную тему публикации
|
||||
- Поддерживает гибкую систему фильтрации
|
||||
- Оптимизирует запросы на основе запрошенных полей
|
||||
|
||||
### Статистика
|
||||
- Подсчёт лайков/дислайков
|
||||
- Количество комментариев
|
||||
- Дата последней реакции
|
||||
- Статистика подгружается только при запросе поля `stat`
|
||||
|
||||
### Оптимизация производительности
|
||||
- Ленивая загрузка связанных данных
|
||||
- Кэширование результатов на 5 минут
|
||||
- Пакетная загрузка авторов и тем
|
||||
- Использование подзапросов для сложных выборок
|
||||
|
||||
## Типы лент
|
||||
|
||||
### Случайные топовые посты (load_shouts_random_top)
|
||||
**Преимущества:**
|
||||
- Разнообразный контент
|
||||
- Быстрая выборка из кэша топовых постов
|
||||
- Настраиваемый размер пула для выборки
|
||||
|
||||
**Ограничения:**
|
||||
- Обновление раз в 5 минут
|
||||
- Максимальный размер пула: 100 постов
|
||||
- Учитываются только лайки/дислайки (без комментариев)
|
||||
|
||||
### Неоцененные посты (load_shouts_unrated)
|
||||
**Преимущества:**
|
||||
- Помогает найти новый контент
|
||||
- Равномерное распределение оценок
|
||||
- Случайный порядок выдачи
|
||||
|
||||
**Ограничения:**
|
||||
- Только посты с менее чем 3 реакциями
|
||||
- Не учитываются комментарии
|
||||
- Без сортировки по рейтингу
|
||||
|
||||
### Закладки (load_shouts_bookmarked)
|
||||
**Преимущества:**
|
||||
- Персонализированная выборка
|
||||
- Быстрый доступ к сохраненному
|
||||
- Поддержка всех фильтров
|
||||
|
||||
**Ограничения:**
|
||||
- Требует авторизации
|
||||
- Ограничение на количество закладок
|
||||
- Кэширование отключено
|
||||
|
||||
## Важные моменты
|
||||
|
||||
### Пагинация
|
||||
- Стандартный размер страницы: 10
|
||||
- Максимальный размер: 100
|
||||
- Поддержка курсор-пагинации
|
||||
|
||||
### Кэширование
|
||||
- TTL: 5 минут
|
||||
- Инвалидация при изменении поста
|
||||
- Отдельный кэш для каждого типа сортировки
|
||||
|
||||
### Сортировка
|
||||
- По рейтингу (лайки минус дислайки)
|
||||
- По количеству комментариев
|
||||
- По дате последней реакции
|
||||
- По дате публикации (по умолчанию)
|
||||
|
||||
### Безопасность
|
||||
- Проверка прав доступа
|
||||
- Фильтрация удаленного контента
|
||||
- Защита от SQL-инъекций
|
||||
- Валидация входных данных
|
@@ -1,82 +0,0 @@
|
||||
# Rating System
|
||||
|
||||
## GraphQL Resolvers
|
||||
|
||||
### Queries
|
||||
|
||||
#### get_my_rates_shouts
|
||||
Get user's reactions (LIKE/DISLIKE) for specified posts.
|
||||
|
||||
**Parameters:**
|
||||
- `shouts: [Int!]!` - array of shout IDs
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
[{
|
||||
shout_id: Int
|
||||
my_rate: ReactionKind // LIKE or DISLIKE
|
||||
}]
|
||||
```
|
||||
|
||||
#### get_my_rates_comments
|
||||
Get user's reactions (LIKE/DISLIKE) for specified comments.
|
||||
|
||||
**Parameters:**
|
||||
- `comments: [Int!]!` - array of comment IDs
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
[{
|
||||
comment_id: Int
|
||||
my_rate: ReactionKind // LIKE or DISLIKE
|
||||
}]
|
||||
```
|
||||
|
||||
### Mutations
|
||||
|
||||
#### rate_author
|
||||
Rate another author (karma system).
|
||||
|
||||
**Parameters:**
|
||||
- `rated_slug: String!` - author's slug
|
||||
- `value: Int!` - rating value (positive/negative)
|
||||
|
||||
## Rating Calculation
|
||||
|
||||
### Author Rating Components
|
||||
|
||||
#### Shouts Rating
|
||||
- Calculated from LIKE/DISLIKE reactions on author's posts
|
||||
- Each LIKE: +1
|
||||
- Each DISLIKE: -1
|
||||
- Excludes deleted reactions
|
||||
- Excludes comment reactions
|
||||
|
||||
#### Comments Rating
|
||||
- Calculated from LIKE/DISLIKE reactions on author's comments
|
||||
- Each LIKE: +1
|
||||
- Each DISLIKE: -1
|
||||
- Only counts reactions to COMMENT type reactions
|
||||
- Excludes deleted reactions
|
||||
|
||||
#### Legacy Karma
|
||||
- Based on direct author ratings via `rate_author` mutation
|
||||
- Stored in `AuthorRating` table
|
||||
- Each positive rating: +1
|
||||
- Each negative rating: -1
|
||||
|
||||
### Helper Functions
|
||||
|
||||
- `count_author_comments_rating()` - Calculate comment rating
|
||||
- `count_author_shouts_rating()` - Calculate posts rating
|
||||
- `get_author_rating_old()` - Get legacy karma rating
|
||||
- `get_author_rating_shouts()` - Get posts rating (optimized)
|
||||
- `get_author_rating_comments()` - Get comments rating (optimized)
|
||||
- `add_author_rating_columns()` - Add rating columns to author query
|
||||
|
||||
## Notes
|
||||
|
||||
- All ratings exclude deleted content
|
||||
- Reactions are unique per user/content
|
||||
- Rating calculations are optimized with SQLAlchemy
|
||||
- System supports both direct author rating and content-based rating
|
1
generate_gql_types.sh
Executable file
1
generate_gql_types.sh
Executable file
@@ -0,0 +1 @@
|
||||
python -m gql_schema_codegen -p ./schema.graphql -t ./schema_types.py
|
183
main.py
183
main.py
@@ -1,135 +1,94 @@
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from importlib import import_module
|
||||
from os.path import exists
|
||||
|
||||
from ariadne import load_schema_from_path, make_executable_schema
|
||||
from ariadne.asgi import GraphQL
|
||||
from starlette.applications import Starlette
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse, Response
|
||||
from starlette.middleware import Middleware
|
||||
from starlette.middleware.authentication import AuthenticationMiddleware
|
||||
from starlette.middleware.sessions import SessionMiddleware
|
||||
from starlette.routing import Route
|
||||
|
||||
from cache.precache import precache_data
|
||||
from cache.revalidator import revalidation_manager
|
||||
from orm import (
|
||||
# collection,
|
||||
# invite,
|
||||
author,
|
||||
community,
|
||||
notification,
|
||||
reaction,
|
||||
shout,
|
||||
topic,
|
||||
)
|
||||
from services.db import create_table_if_not_exists, engine
|
||||
from services.exception import ExceptionHandlerMiddleware
|
||||
from services.redis import redis
|
||||
from services.schema import resolvers
|
||||
from services.search import search_service
|
||||
from services.viewed import ViewedStorage
|
||||
from services.webhook import WebhookEndpoint, create_webhook_endpoint
|
||||
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
||||
from auth.authenticate import JWTAuthenticate
|
||||
from auth.oauth import oauth_authorize, oauth_login
|
||||
from base.redis import redis
|
||||
from base.resolvers import resolvers
|
||||
from orm import init_tables
|
||||
from resolvers.upload import upload_handler
|
||||
from services.main import storages_init
|
||||
from services.notifications.notification_service import notification_service
|
||||
from services.notifications.sse import sse_subscribe_handler
|
||||
from services.stat.viewed import ViewedStorage
|
||||
|
||||
# from services.zine.gittask import GitTask
|
||||
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
|
||||
|
||||
import_module("resolvers")
|
||||
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
|
||||
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers)
|
||||
|
||||
middleware = [
|
||||
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
|
||||
Middleware(SessionMiddleware, secret_key=SESSION_SECRET_KEY),
|
||||
]
|
||||
|
||||
|
||||
async def start():
|
||||
if MODE == "development":
|
||||
if not exists(DEV_SERVER_PID_FILE_NAME):
|
||||
# pid file management
|
||||
async def start_up():
|
||||
init_tables()
|
||||
await redis.connect()
|
||||
await storages_init()
|
||||
views_stat_task = asyncio.create_task(ViewedStorage().worker())
|
||||
print(views_stat_task)
|
||||
# git_task = asyncio.create_task(GitTask.git_task_worker())
|
||||
# print(git_task)
|
||||
notification_service_task = asyncio.create_task(notification_service.worker())
|
||||
print(notification_service_task)
|
||||
|
||||
try:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.init(SENTRY_DSN)
|
||||
except Exception as e:
|
||||
print("[sentry] init error")
|
||||
print(e)
|
||||
|
||||
|
||||
async def dev_start_up():
|
||||
if exists(DEV_SERVER_PID_FILE_NAME):
|
||||
await redis.connect()
|
||||
return
|
||||
else:
|
||||
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
|
||||
f.write(str(os.getpid()))
|
||||
print(f"[main] process started in {MODE} mode")
|
||||
|
||||
await start_up()
|
||||
|
||||
|
||||
def create_all_tables():
|
||||
for model in [
|
||||
# user.User,
|
||||
author.Author,
|
||||
author.AuthorFollower,
|
||||
community.Community,
|
||||
community.CommunityFollower,
|
||||
shout.Shout,
|
||||
shout.ShoutAuthor,
|
||||
author.AuthorBookmark,
|
||||
topic.Topic,
|
||||
topic.TopicFollower,
|
||||
shout.ShoutTopic,
|
||||
reaction.Reaction,
|
||||
shout.ShoutReactionsFollower,
|
||||
author.AuthorRating,
|
||||
notification.Notification,
|
||||
notification.NotificationSeen,
|
||||
# collection.Collection, collection.ShoutCollection,
|
||||
# invite.Invite
|
||||
]:
|
||||
create_table_if_not_exists(engine, model)
|
||||
async def shutdown():
|
||||
await redis.disconnect()
|
||||
|
||||
|
||||
async def create_all_tables_async():
|
||||
# Оборачиваем синхронную функцию в асинхронную
|
||||
await asyncio.to_thread(create_all_tables)
|
||||
|
||||
|
||||
async def lifespan(app):
|
||||
try:
|
||||
await asyncio.gather(
|
||||
create_all_tables_async(),
|
||||
redis.connect(),
|
||||
precache_data(),
|
||||
ViewedStorage.init(),
|
||||
create_webhook_endpoint(),
|
||||
search_service.info(),
|
||||
start(),
|
||||
revalidation_manager.start(),
|
||||
)
|
||||
yield
|
||||
finally:
|
||||
tasks = [redis.disconnect(), ViewedStorage.stop(), revalidation_manager.stop()]
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
|
||||
# Создаем экземпляр GraphQL
|
||||
graphql_app = GraphQL(schema, debug=True)
|
||||
|
||||
|
||||
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
|
||||
async def graphql_handler(request: Request):
|
||||
if request.method not in ["GET", "POST"]:
|
||||
return JSONResponse({"error": "Method Not Allowed"}, status_code=405)
|
||||
|
||||
try:
|
||||
result = await graphql_app.handle_request(request)
|
||||
if isinstance(result, Response):
|
||||
return result
|
||||
return JSONResponse(result)
|
||||
except asyncio.CancelledError:
|
||||
return JSONResponse({"error": "Request cancelled"}, status_code=499)
|
||||
except Exception as e:
|
||||
print(f"GraphQL error: {str(e)}")
|
||||
return JSONResponse({"error": str(e)}, status_code=500)
|
||||
|
||||
|
||||
# Обновляем маршрут в Starlette
|
||||
app = Starlette(
|
||||
routes = [
|
||||
Route("/", graphql_handler, methods=["GET", "POST"]),
|
||||
Route("/new-author", WebhookEndpoint),
|
||||
],
|
||||
lifespan=lifespan,
|
||||
debug=True,
|
||||
)
|
||||
Route("/oauth/{provider}", endpoint=oauth_login),
|
||||
Route("/oauth-authorize", endpoint=oauth_authorize),
|
||||
Route("/upload", endpoint=upload_handler, methods=["POST"]),
|
||||
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
|
||||
]
|
||||
|
||||
app.add_middleware(ExceptionHandlerMiddleware)
|
||||
if "dev" in sys.argv:
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["https://localhost:3000"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
app = Starlette(
|
||||
on_startup=[start_up],
|
||||
on_shutdown=[shutdown],
|
||||
middleware=middleware,
|
||||
routes=routes,
|
||||
)
|
||||
app.mount("/", GraphQL(schema))
|
||||
|
||||
dev_app = Starlette(
|
||||
debug=True,
|
||||
on_startup=[dev_start_up],
|
||||
on_shutdown=[shutdown],
|
||||
middleware=middleware,
|
||||
routes=routes,
|
||||
)
|
||||
dev_app.mount("/", GraphQL(schema, debug=True))
|
||||
|
18
migrate.sh
Normal file
18
migrate.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
database_name="discoursio"
|
||||
|
||||
echo "DATABASE MIGRATION STARTED"
|
||||
|
||||
echo "Dropping database $database_name"
|
||||
dropdb $database_name --force
|
||||
if [ $? -ne 0 ]; then { echo "Failed to drop database, aborting." ; exit 1; } fi
|
||||
echo "Database $database_name dropped"
|
||||
|
||||
echo "Creating database $database_name"
|
||||
createdb $database_name
|
||||
if [ $? -ne 0 ]; then { echo "Failed to create database, aborting." ; exit 1; } fi
|
||||
echo "Database $database_name successfully created"
|
||||
|
||||
echo "Start migration"
|
||||
python3 server.py migrate
|
||||
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
|
||||
echo 'Done!'
|
279
migration/__init__.py
Normal file
279
migration/__init__.py
Normal file
@@ -0,0 +1,279 @@
|
||||
""" cmd managed migration """
|
||||
import asyncio
|
||||
import gc
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import bs4
|
||||
|
||||
from migration.export import export_mdx
|
||||
from migration.tables.comments import migrate as migrateComment
|
||||
from migration.tables.comments import migrate_2stage as migrateComment_2stage
|
||||
from migration.tables.content_items import get_shout_slug
|
||||
from migration.tables.content_items import migrate as migrateShout
|
||||
|
||||
# from migration.tables.remarks import migrate as migrateRemark
|
||||
from migration.tables.topics import migrate as migrateTopic
|
||||
from migration.tables.users import migrate as migrateUser
|
||||
from migration.tables.users import migrate_2stage as migrateUser_2stage
|
||||
from migration.tables.users import post_migrate as users_post_migrate
|
||||
from orm import init_tables
|
||||
from orm.reaction import Reaction
|
||||
|
||||
TODAY = datetime.strftime(datetime.now(tz=timezone.utc), "%Y%m%d")
|
||||
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||
|
||||
|
||||
async def users_handle(storage):
|
||||
"""migrating users first"""
|
||||
counter = 0
|
||||
id_map = {}
|
||||
print("[migration] migrating %d users" % (len(storage["users"]["data"])))
|
||||
for entry in storage["users"]["data"]:
|
||||
oid = entry["_id"]
|
||||
user = migrateUser(entry)
|
||||
storage["users"]["by_oid"][oid] = user # full
|
||||
del user["password"]
|
||||
del user["emailConfirmed"]
|
||||
del user["username"]
|
||||
del user["email"]
|
||||
storage["users"]["by_slug"][user["slug"]] = user # public
|
||||
id_map[user["oid"]] = user["slug"]
|
||||
counter += 1
|
||||
ce = 0
|
||||
for entry in storage["users"]["data"]:
|
||||
ce += migrateUser_2stage(entry, id_map)
|
||||
users_post_migrate()
|
||||
|
||||
|
||||
async def topics_handle(storage):
|
||||
"""topics from categories and tags"""
|
||||
counter = 0
|
||||
for t in storage["topics"]["tags"] + storage["topics"]["cats"]:
|
||||
if t["slug"] in storage["replacements"]:
|
||||
t["slug"] = storage["replacements"][t["slug"]]
|
||||
topic = migrateTopic(t)
|
||||
storage["topics"]["by_oid"][t["_id"]] = topic
|
||||
storage["topics"]["by_slug"][t["slug"]] = topic
|
||||
counter += 1
|
||||
else:
|
||||
print("[migration] topic " + t["slug"] + " ignored")
|
||||
for oldslug, newslug in storage["replacements"].items():
|
||||
if oldslug != newslug and oldslug in storage["topics"]["by_slug"]:
|
||||
oid = storage["topics"]["by_slug"][oldslug]["_id"]
|
||||
del storage["topics"]["by_slug"][oldslug]
|
||||
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
|
||||
print("[migration] " + str(counter) + " topics migrated")
|
||||
print("[migration] " + str(len(storage["topics"]["by_oid"].values())) + " topics by oid")
|
||||
print("[migration] " + str(len(storage["topics"]["by_slug"].values())) + " topics by slug")
|
||||
|
||||
|
||||
async def shouts_handle(storage, args):
|
||||
"""migrating content items one by one"""
|
||||
counter = 0
|
||||
discours_author = 0
|
||||
anonymous_author = 0
|
||||
pub_counter = 0
|
||||
ignored = 0
|
||||
topics_dataset_bodies = []
|
||||
topics_dataset_tlist = []
|
||||
for entry in storage["shouts"]["data"]:
|
||||
gc.collect()
|
||||
# slug
|
||||
slug = get_shout_slug(entry)
|
||||
|
||||
# single slug mode
|
||||
if "-" in args and slug not in args:
|
||||
continue
|
||||
|
||||
# migrate
|
||||
shout_dict = await migrateShout(entry, storage)
|
||||
if shout_dict:
|
||||
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
|
||||
storage["shouts"]["by_slug"][shout_dict["slug"]] = shout_dict
|
||||
# shouts.topics
|
||||
if not shout_dict["topics"]:
|
||||
print("[migration] no topics!")
|
||||
|
||||
# with author
|
||||
author = shout_dict["authors"][0]
|
||||
if author["slug"] == "discours":
|
||||
discours_author += 1
|
||||
if author["slug"] == "anonymous":
|
||||
anonymous_author += 1
|
||||
# print('[migration] ' + shout['slug'] + ' with author ' + author)
|
||||
|
||||
if entry.get("published"):
|
||||
if "mdx" in args:
|
||||
export_mdx(shout_dict)
|
||||
pub_counter += 1
|
||||
|
||||
# print main counter
|
||||
counter += 1
|
||||
print(
|
||||
"[migration] shouts_handle %d: %s @%s"
|
||||
% ((counter + 1), shout_dict["slug"], author["slug"])
|
||||
)
|
||||
|
||||
b = bs4.BeautifulSoup(shout_dict["body"], "html.parser")
|
||||
texts = [shout_dict["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", "")]
|
||||
texts = texts + b.findAll(text=True)
|
||||
topics_dataset_bodies.append(" ".join([x.strip().lower() for x in texts]))
|
||||
topics_dataset_tlist.append(shout_dict["topics"])
|
||||
else:
|
||||
ignored += 1
|
||||
|
||||
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',
|
||||
# ', fmt='%s')
|
||||
|
||||
print("[migration] " + str(counter) + " content items were migrated")
|
||||
print("[migration] " + str(pub_counter) + " have been published")
|
||||
print("[migration] " + str(discours_author) + " authored by @discours")
|
||||
print("[migration] " + str(anonymous_author) + " authored by @anonymous")
|
||||
|
||||
|
||||
# async def remarks_handle(storage):
|
||||
# print("[migration] comments")
|
||||
# c = 0
|
||||
# for entry_remark in storage["remarks"]["data"]:
|
||||
# remark = await migrateRemark(entry_remark, storage)
|
||||
# c += 1
|
||||
# print("[migration] " + str(c) + " remarks migrated")
|
||||
|
||||
|
||||
async def comments_handle(storage):
|
||||
print("[migration] comments")
|
||||
id_map = {}
|
||||
ignored_counter = 0
|
||||
missed_shouts = {}
|
||||
for oldcomment in storage["reactions"]["data"]:
|
||||
if not oldcomment.get("deleted"):
|
||||
reaction = await migrateComment(oldcomment, storage)
|
||||
if isinstance(reaction, str):
|
||||
missed_shouts[reaction] = oldcomment
|
||||
elif isinstance(reaction, Reaction):
|
||||
reaction = reaction.dict()
|
||||
rid = reaction["id"]
|
||||
oid = reaction["oid"]
|
||||
id_map[oid] = rid
|
||||
else:
|
||||
ignored_counter += 1
|
||||
|
||||
for reaction in storage["reactions"]["data"]:
|
||||
migrateComment_2stage(reaction, id_map)
|
||||
print("[migration] " + str(len(id_map)) + " comments migrated")
|
||||
print("[migration] " + str(ignored_counter) + " comments ignored")
|
||||
print("[migration] " + str(len(missed_shouts.keys())) + " commented shouts missed")
|
||||
missed_counter = 0
|
||||
for missed in missed_shouts.values():
|
||||
missed_counter += len(missed)
|
||||
print("[migration] " + str(missed_counter) + " comments dropped")
|
||||
|
||||
|
||||
async def all_handle(storage, args):
|
||||
print("[migration] handle everything")
|
||||
await users_handle(storage)
|
||||
await topics_handle(storage)
|
||||
print("[migration] users and topics are migrated")
|
||||
await shouts_handle(storage, args)
|
||||
# print("[migration] remarks...")
|
||||
# await remarks_handle(storage)
|
||||
print("[migration] migrating comments")
|
||||
await comments_handle(storage)
|
||||
# export_email_subscriptions()
|
||||
print("[migration] done!")
|
||||
|
||||
|
||||
def data_load():
|
||||
storage = {
|
||||
"content_items": {
|
||||
"by_oid": {},
|
||||
"by_slug": {},
|
||||
},
|
||||
"shouts": {"by_oid": {}, "by_slug": {}, "data": []},
|
||||
"reactions": {"by_oid": {}, "by_slug": {}, "by_content": {}, "data": []},
|
||||
"topics": {
|
||||
"by_oid": {},
|
||||
"by_slug": {},
|
||||
"cats": [],
|
||||
"tags": [],
|
||||
},
|
||||
"remarks": {"data": []},
|
||||
"users": {"by_oid": {}, "by_slug": {}, "data": []},
|
||||
"replacements": json.loads(open("migration/tables/replacements.json").read()),
|
||||
}
|
||||
try:
|
||||
users_data = json.loads(open("migration/data/users.json").read())
|
||||
print("[migration.load] " + str(len(users_data)) + " users ")
|
||||
tags_data = json.loads(open("migration/data/tags.json").read())
|
||||
storage["topics"]["tags"] = tags_data
|
||||
print("[migration.load] " + str(len(tags_data)) + " tags ")
|
||||
cats_data = json.loads(open("migration/data/content_item_categories.json").read())
|
||||
storage["topics"]["cats"] = cats_data
|
||||
print("[migration.load] " + str(len(cats_data)) + " cats ")
|
||||
comments_data = json.loads(open("migration/data/comments.json").read())
|
||||
storage["reactions"]["data"] = comments_data
|
||||
print("[migration.load] " + str(len(comments_data)) + " comments ")
|
||||
content_data = json.loads(open("migration/data/content_items.json").read())
|
||||
storage["shouts"]["data"] = content_data
|
||||
print("[migration.load] " + str(len(content_data)) + " content items ")
|
||||
|
||||
remarks_data = json.loads(open("migration/data/remarks.json").read())
|
||||
storage["remarks"]["data"] = remarks_data
|
||||
print("[migration.load] " + str(len(remarks_data)) + " remarks data ")
|
||||
|
||||
# fill out storage
|
||||
for x in users_data:
|
||||
storage["users"]["by_oid"][x["_id"]] = x
|
||||
# storage['users']['by_slug'][x['slug']] = x
|
||||
# no user.slug yet
|
||||
print("[migration.load] " + str(len(storage["users"]["by_oid"].keys())) + " users by oid")
|
||||
for x in tags_data:
|
||||
storage["topics"]["by_oid"][x["_id"]] = x
|
||||
storage["topics"]["by_slug"][x["slug"]] = x
|
||||
for x in cats_data:
|
||||
storage["topics"]["by_oid"][x["_id"]] = x
|
||||
storage["topics"]["by_slug"][x["slug"]] = x
|
||||
print(
|
||||
"[migration.load] " + str(len(storage["topics"]["by_slug"].keys())) + " topics by slug"
|
||||
)
|
||||
for item in content_data:
|
||||
slug = get_shout_slug(item)
|
||||
storage["content_items"]["by_slug"][slug] = item
|
||||
storage["content_items"]["by_oid"][item["_id"]] = item
|
||||
print("[migration.load] " + str(len(content_data)) + " content items")
|
||||
for x in comments_data:
|
||||
storage["reactions"]["by_oid"][x["_id"]] = x
|
||||
cid = x["contentItem"]
|
||||
storage["reactions"]["by_content"][cid] = x
|
||||
ci = storage["content_items"]["by_oid"].get(cid, {})
|
||||
if "slug" in ci:
|
||||
storage["reactions"]["by_slug"][ci["slug"]] = x
|
||||
print(
|
||||
"[migration.load] "
|
||||
+ str(len(storage["reactions"]["by_content"].keys()))
|
||||
+ " with comments"
|
||||
)
|
||||
storage["users"]["data"] = users_data
|
||||
storage["topics"]["tags"] = tags_data
|
||||
storage["topics"]["cats"] = cats_data
|
||||
storage["shouts"]["data"] = content_data
|
||||
storage["reactions"]["data"] = comments_data
|
||||
except Exception as e:
|
||||
raise e
|
||||
return storage
|
||||
|
||||
|
||||
async def handling_migration():
|
||||
init_tables()
|
||||
await all_handle(data_load(), sys.argv)
|
||||
|
||||
|
||||
def process():
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(handling_migration())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
process()
|
33
migration/bson2json.py
Normal file
33
migration/bson2json.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import gc
|
||||
import json
|
||||
import os
|
||||
|
||||
import bson
|
||||
|
||||
from .utils import DateTimeEncoder
|
||||
|
||||
|
||||
def json_tables():
|
||||
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
|
||||
data = {
|
||||
"content_items": [],
|
||||
"content_item_categories": [],
|
||||
"tags": [],
|
||||
"email_subscriptions": [],
|
||||
"users": [],
|
||||
"comments": [],
|
||||
"remarks": [],
|
||||
}
|
||||
for table in data.keys():
|
||||
print("[migration] bson2json for " + table)
|
||||
gc.collect()
|
||||
lc = []
|
||||
bs = open("dump/discours/" + table + ".bson", "rb").read()
|
||||
base = 0
|
||||
while base < len(bs):
|
||||
base, d = bson.decode_document(bs, base)
|
||||
lc.append(d)
|
||||
data[table] = lc
|
||||
open(os.getcwd() + "/migration/data/" + table + ".json", "w").write(
|
||||
json.dumps(lc, cls=DateTimeEncoder)
|
||||
)
|
0
migration/data/.gitkeep
Normal file
0
migration/data/.gitkeep
Normal file
137
migration/export.py
Normal file
137
migration/export.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import frontmatter
|
||||
|
||||
from .extract import extract_html, extract_media
|
||||
from .utils import DateTimeEncoder
|
||||
|
||||
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||
EXPORT_DEST = "../discoursio-web/data/"
|
||||
parentDir = "/".join(os.getcwd().split("/")[:-1])
|
||||
contentDir = parentDir + "/discoursio-web/content/"
|
||||
ts = datetime.now(tz=timezone.utc)
|
||||
|
||||
|
||||
def get_metadata(r):
|
||||
authors = []
|
||||
for a in r["authors"]:
|
||||
authors.append(
|
||||
{ # a short version for public listings
|
||||
"slug": a.slug or "discours",
|
||||
"name": a.name or "Дискурс",
|
||||
"userpic": a.userpic or "https://discours.io/static/img/discours.png",
|
||||
}
|
||||
)
|
||||
metadata = {}
|
||||
metadata["title"] = r.get("title", "").replace("{", "(").replace("}", ")")
|
||||
metadata["authors"] = authors
|
||||
metadata["createdAt"] = r.get("createdAt", ts)
|
||||
metadata["layout"] = r["layout"]
|
||||
metadata["topics"] = [topic for topic in r["topics"]]
|
||||
metadata["topics"].sort()
|
||||
if r.get("cover", False):
|
||||
metadata["cover"] = r.get("cover")
|
||||
return metadata
|
||||
|
||||
|
||||
def export_mdx(r):
|
||||
# print('[export] mdx %s' % r['slug'])
|
||||
content = ""
|
||||
metadata = get_metadata(r)
|
||||
content = frontmatter.dumps(frontmatter.Post(r["body"], **metadata))
|
||||
ext = "mdx"
|
||||
filepath = contentDir + r["slug"]
|
||||
bc = bytes(content, "utf-8").decode("utf-8", "ignore")
|
||||
open(filepath + "." + ext, "w").write(bc)
|
||||
|
||||
|
||||
def export_body(shout, storage):
|
||||
entry = storage["content_items"]["by_oid"][shout["oid"]]
|
||||
if entry:
|
||||
body = extract_html(entry)
|
||||
media = extract_media(entry)
|
||||
shout["body"] = body # prepare_html_body(entry) # prepare_md_body(entry)
|
||||
shout["media"] = media
|
||||
export_mdx(shout)
|
||||
print("[export] html for %s" % shout["slug"])
|
||||
open(contentDir + shout["slug"] + ".html", "w").write(body)
|
||||
else:
|
||||
raise Exception("no content_items entry found")
|
||||
|
||||
|
||||
def export_slug(slug, storage):
|
||||
shout = storage["shouts"]["by_slug"][slug]
|
||||
shout = storage["shouts"]["by_slug"].get(slug)
|
||||
assert shout, "[export] no shout found by slug: %s " % slug
|
||||
author = shout["authors"][0]
|
||||
assert author, "[export] no author error"
|
||||
export_body(shout, storage)
|
||||
|
||||
|
||||
def export_email_subscriptions():
|
||||
email_subscriptions_data = json.loads(open("migration/data/email_subscriptions.json").read())
|
||||
for data in email_subscriptions_data:
|
||||
# TODO: migrate to mailgun list manually
|
||||
# migrate_email_subscription(data)
|
||||
pass
|
||||
print("[migration] " + str(len(email_subscriptions_data)) + " email subscriptions exported")
|
||||
|
||||
|
||||
def export_shouts(storage):
|
||||
# update what was just migrated or load json again
|
||||
if len(storage["users"]["by_slugs"].keys()) == 0:
|
||||
storage["users"]["by_slugs"] = json.loads(open(EXPORT_DEST + "authors.json").read())
|
||||
print("[migration] " + str(len(storage["users"]["by_slugs"].keys())) + " exported authors ")
|
||||
if len(storage["shouts"]["by_slugs"].keys()) == 0:
|
||||
storage["shouts"]["by_slugs"] = json.loads(open(EXPORT_DEST + "articles.json").read())
|
||||
print(
|
||||
"[migration] " + str(len(storage["shouts"]["by_slugs"].keys())) + " exported articles "
|
||||
)
|
||||
for slug in storage["shouts"]["by_slugs"].keys():
|
||||
export_slug(slug, storage)
|
||||
|
||||
|
||||
def export_json(export_articles={}, export_authors={}, export_topics={}, export_comments={}):
|
||||
open(EXPORT_DEST + "authors.json", "w").write(
|
||||
json.dumps(
|
||||
export_authors,
|
||||
cls=DateTimeEncoder,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
)
|
||||
print("[migration] " + str(len(export_authors.items())) + " authors exported")
|
||||
open(EXPORT_DEST + "topics.json", "w").write(
|
||||
json.dumps(
|
||||
export_topics,
|
||||
cls=DateTimeEncoder,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
)
|
||||
print("[migration] " + str(len(export_topics.keys())) + " topics exported")
|
||||
|
||||
open(EXPORT_DEST + "articles.json", "w").write(
|
||||
json.dumps(
|
||||
export_articles,
|
||||
cls=DateTimeEncoder,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
)
|
||||
print("[migration] " + str(len(export_articles.items())) + " articles exported")
|
||||
open(EXPORT_DEST + "comments.json", "w").write(
|
||||
json.dumps(
|
||||
export_comments,
|
||||
cls=DateTimeEncoder,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
)
|
||||
print("[migration] " + str(len(export_comments.items())) + " exported articles with comments")
|
276
migration/extract.py
Normal file
276
migration/extract.py
Normal file
@@ -0,0 +1,276 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
|
||||
contentDir = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
|
||||
)
|
||||
|
||||
cdn = "https://images.discours.io"
|
||||
|
||||
|
||||
def replace_tooltips(body):
|
||||
# change if you prefer regexp
|
||||
newbody = body
|
||||
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
|
||||
for match in matches:
|
||||
newbody = body.replace(
|
||||
match.group(1), '<Tooltip text="' + match.group(2) + '" />'
|
||||
) # NOTE: doesn't work
|
||||
if len(matches) > 0:
|
||||
print("[extract] found %d tooltips" % len(matches))
|
||||
return newbody
|
||||
|
||||
|
||||
# def extract_footnotes(body, shout_dict):
|
||||
# parts = body.split("&&&")
|
||||
# lll = len(parts)
|
||||
# newparts = list(parts)
|
||||
# placed = False
|
||||
# if lll & 1:
|
||||
# if lll > 1:
|
||||
# i = 1
|
||||
# print("[extract] found %d footnotes in body" % (lll - 1))
|
||||
# for part in parts[1:]:
|
||||
# if i & 1:
|
||||
# placed = True
|
||||
# if 'a class="footnote-url" href=' in part:
|
||||
# print("[extract] footnote: " + part)
|
||||
# fn = 'a class="footnote-url" href="'
|
||||
# exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
|
||||
# extracted_body = part.split(fn, 1)[1].split(">", 1)[1].split("</a>", 1)[0]
|
||||
# print("[extract] footnote link: " + extracted_link)
|
||||
# with local_session() as session:
|
||||
# Reaction.create(
|
||||
# {
|
||||
# "shout": shout_dict["id"],
|
||||
# "kind": ReactionKind.FOOTNOTE,
|
||||
# "body": extracted_body,
|
||||
# "range": str(body.index(fn + link) - len("<"))
|
||||
# + ":"
|
||||
# + str(body.index(extracted_body) + len("</a>")),
|
||||
# }
|
||||
# )
|
||||
# newparts[i] = "<a href='#'>ℹ️</a>"
|
||||
# else:
|
||||
# newparts[i] = part
|
||||
# i += 1
|
||||
# return ("".join(newparts), placed)
|
||||
|
||||
|
||||
# def place_tooltips(body):
|
||||
# parts = body.split("&&&")
|
||||
# lll = len(parts)
|
||||
# newparts = list(parts)
|
||||
# placed = False
|
||||
# if lll & 1:
|
||||
# if lll > 1:
|
||||
# i = 1
|
||||
# print("[extract] found %d tooltips" % (lll - 1))
|
||||
# for part in parts[1:]:
|
||||
# if i & 1:
|
||||
# placed = True
|
||||
# if 'a class="footnote-url" href=' in part:
|
||||
# print("[extract] footnote: " + part)
|
||||
# fn = 'a class="footnote-url" href="'
|
||||
# link = part.split(fn, 1)[1].split('"', 1)[0]
|
||||
# extracted_part = part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
|
||||
# newparts[i] = (
|
||||
# "<Tooltip"
|
||||
# + (' link="' + link + '" ' if link else "")
|
||||
# + ">"
|
||||
# + extracted_part
|
||||
# + "</Tooltip>"
|
||||
# )
|
||||
# else:
|
||||
# newparts[i] = "<Tooltip>%s</Tooltip>" % part
|
||||
# # print('[extract] ' + newparts[i])
|
||||
# else:
|
||||
# # print('[extract] ' + part[:10] + '..')
|
||||
# newparts[i] = part
|
||||
# i += 1
|
||||
# return ("".join(newparts), placed)
|
||||
|
||||
|
||||
IMG_REGEX = (
|
||||
r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}="
|
||||
)
|
||||
IMG_REGEX += r"|[A-Za-z\d+\/]{2}==)))\)"
|
||||
|
||||
parentDir = "/".join(os.getcwd().split("/")[:-1])
|
||||
public = parentDir + "/discoursio-web/public"
|
||||
cache = {}
|
||||
|
||||
|
||||
# def reextract_images(body, oid):
|
||||
# # change if you prefer regexp
|
||||
# matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
|
||||
# i = 0
|
||||
# for match in matches:
|
||||
# print("[extract] image " + match.group(1))
|
||||
# ext = match.group(3)
|
||||
# name = oid + str(i)
|
||||
# link = public + "/upload/image-" + name + "." + ext
|
||||
# img = match.group(4)
|
||||
# title = match.group(1) # NOTE: this is not the title
|
||||
# if img not in cache:
|
||||
# content = base64.b64decode(img + "==")
|
||||
# print(str(len(img)) + " image bytes been written")
|
||||
# open("../" + link, "wb").write(content)
|
||||
# cache[img] = name
|
||||
# i += 1
|
||||
# else:
|
||||
# print("[extract] image cached " + cache[img])
|
||||
# body.replace(
|
||||
# str(match), ""
|
||||
# ) # WARNING: this does not work
|
||||
# return body
|
||||
|
||||
|
||||
IMAGES = {
|
||||
"data:image/png": "png",
|
||||
"data:image/jpg": "jpg",
|
||||
"data:image/jpeg": "jpg",
|
||||
}
|
||||
|
||||
b64 = ";base64,"
|
||||
|
||||
di = "data:image"
|
||||
|
||||
|
||||
def extract_media(entry):
|
||||
"""normalized media extraction method"""
|
||||
# media [ { title pic url body } ]}
|
||||
kind = entry.get("type")
|
||||
if not kind:
|
||||
print(entry)
|
||||
raise Exception("shout no layout")
|
||||
media = []
|
||||
for m in entry.get("media") or []:
|
||||
# title
|
||||
title = m.get("title", "").replace("\n", " ").replace(" ", " ")
|
||||
artist = m.get("performer") or m.get("artist")
|
||||
if artist:
|
||||
title = artist + " - " + title
|
||||
|
||||
# pic
|
||||
url = m.get("fileUrl") or m.get("url", "")
|
||||
pic = ""
|
||||
if m.get("thumborId"):
|
||||
pic = cdn + "/unsafe/" + m["thumborId"]
|
||||
|
||||
# url
|
||||
if not url:
|
||||
if kind == "Image":
|
||||
url = pic
|
||||
elif "youtubeId" in m:
|
||||
url = "https://youtube.com/?watch=" + m["youtubeId"]
|
||||
elif "vimeoId" in m:
|
||||
url = "https://vimeo.com/" + m["vimeoId"]
|
||||
# body
|
||||
body = m.get("body") or m.get("literatureBody") or ""
|
||||
media.append({"url": url, "pic": pic, "title": title, "body": body})
|
||||
return media
|
||||
|
||||
|
||||
def prepare_html_body(entry):
|
||||
# body modifications
|
||||
body = ""
|
||||
kind = entry.get("type")
|
||||
addon = ""
|
||||
if kind == "Video":
|
||||
addon = ""
|
||||
for m in entry.get("media") or []:
|
||||
if "youtubeId" in m:
|
||||
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
|
||||
addon += m["youtubeId"]
|
||||
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
|
||||
elif "vimeoId" in m:
|
||||
addon += '<iframe src="https://player.vimeo.com/video/'
|
||||
addon += m["vimeoId"]
|
||||
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen"'
|
||||
addon += " allowfullscreen></iframe>"
|
||||
else:
|
||||
print("[extract] media is not supported")
|
||||
print(m)
|
||||
body += addon
|
||||
|
||||
elif kind == "Music":
|
||||
addon = ""
|
||||
for m in entry.get("media") or []:
|
||||
artist = m.get("performer")
|
||||
trackname = ""
|
||||
if artist:
|
||||
trackname += artist + " - "
|
||||
if "title" in m:
|
||||
trackname += m.get("title", "")
|
||||
addon += "<figure><figcaption>"
|
||||
addon += trackname
|
||||
addon += '</figcaption><audio controls src="'
|
||||
addon += m.get("fileUrl", "")
|
||||
addon += '"></audio></figure>'
|
||||
body += addon
|
||||
|
||||
body = extract_html(entry)
|
||||
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
|
||||
return body
|
||||
|
||||
|
||||
def cleanup_html(body: str) -> str:
|
||||
new_body = body
|
||||
regex_remove = [
|
||||
r"style=\"width:\s*\d+px;height:\s*\d+px;\"",
|
||||
r"style=\"width:\s*\d+px;\"",
|
||||
r"style=\"color: #000000;\"",
|
||||
r"style=\"float: none;\"",
|
||||
r"style=\"background: white;\"",
|
||||
r"class=\"Apple-interchange-newline\"",
|
||||
r"class=\"MsoNormalCxSpMiddle\"",
|
||||
r"class=\"MsoNormal\"",
|
||||
r"lang=\"EN-US\"",
|
||||
r"id=\"docs-internal-guid-[\w-]+\"",
|
||||
r"<p>\s*</p>",
|
||||
r"<span></span>",
|
||||
r"<i>\s*</i>",
|
||||
r"<b>\s*</b>",
|
||||
r"<h1>\s*</h1>",
|
||||
r"<h2>\s*</h2>",
|
||||
r"<h3>\s*</h3>",
|
||||
r"<h4>\s*</h4>",
|
||||
r"<div>\s*</div>",
|
||||
]
|
||||
regex_replace = {r"<br>\s*</p>": "</p>"}
|
||||
changed = True
|
||||
while changed:
|
||||
# we need several iterations to clean nested tags this way
|
||||
changed = False
|
||||
new_body_iteration = new_body
|
||||
for regex in regex_remove:
|
||||
new_body = re.sub(regex, "", new_body)
|
||||
for regex, replace in regex_replace.items():
|
||||
new_body = re.sub(regex, replace, new_body)
|
||||
if new_body_iteration != new_body:
|
||||
changed = True
|
||||
return new_body
|
||||
|
||||
|
||||
def extract_html(entry, shout_id=None, cleanup=False):
|
||||
body_orig = (entry.get("body") or "").replace(r"\(", "(").replace(r"\)", ")")
|
||||
if cleanup:
|
||||
# we do that before bs parsing to catch the invalid html
|
||||
body_clean = cleanup_html(body_orig)
|
||||
if body_clean != body_orig:
|
||||
print(f"[migration] html cleaned for slug {entry.get('slug', None)}")
|
||||
body_orig = body_clean
|
||||
# if shout_id:
|
||||
# extract_footnotes(body_orig, shout_id)
|
||||
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
|
||||
if cleanup:
|
||||
# we do that after bs parsing because it can add dummy tags
|
||||
body_clean_html = cleanup_html(body_html)
|
||||
if body_clean_html != body_html:
|
||||
print(f"[migration] html cleaned after bs4 for slug {entry.get('slug', None)}")
|
||||
body_html = body_clean_html
|
||||
return body_html
|
1023
migration/html2text/__init__.py
Normal file
1023
migration/html2text/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
3
migration/html2text/__main__.py
Normal file
3
migration/html2text/__main__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .cli import main
|
||||
|
||||
main()
|
318
migration/html2text/cli.py
Normal file
318
migration/html2text/cli.py
Normal file
@@ -0,0 +1,318 @@
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from . import HTML2Text, __version__, config
|
||||
|
||||
|
||||
# noinspection DuplicatedCode
|
||||
def main() -> None:
|
||||
baseurl = ""
|
||||
|
||||
class bcolors:
|
||||
HEADER = "\033[95m"
|
||||
OKBLUE = "\033[94m"
|
||||
OKGREEN = "\033[92m"
|
||||
WARNING = "\033[93m"
|
||||
FAIL = "\033[91m"
|
||||
ENDC = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
UNDERLINE = "\033[4m"
|
||||
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument(
|
||||
"--default-image-alt",
|
||||
dest="default_image_alt",
|
||||
default=config.DEFAULT_IMAGE_ALT,
|
||||
help="The default alt string for images with missing ones",
|
||||
)
|
||||
p.add_argument(
|
||||
"--pad-tables",
|
||||
dest="pad_tables",
|
||||
action="store_true",
|
||||
default=config.PAD_TABLES,
|
||||
help="pad the cells to equal column width in tables",
|
||||
)
|
||||
p.add_argument(
|
||||
"--no-wrap-links",
|
||||
dest="wrap_links",
|
||||
action="store_false",
|
||||
default=config.WRAP_LINKS,
|
||||
help="don't wrap links during conversion",
|
||||
)
|
||||
p.add_argument(
|
||||
"--wrap-list-items",
|
||||
dest="wrap_list_items",
|
||||
action="store_true",
|
||||
default=config.WRAP_LIST_ITEMS,
|
||||
help="wrap list items during conversion",
|
||||
)
|
||||
p.add_argument(
|
||||
"--wrap-tables",
|
||||
dest="wrap_tables",
|
||||
action="store_true",
|
||||
default=config.WRAP_TABLES,
|
||||
help="wrap tables",
|
||||
)
|
||||
p.add_argument(
|
||||
"--ignore-emphasis",
|
||||
dest="ignore_emphasis",
|
||||
action="store_true",
|
||||
default=config.IGNORE_EMPHASIS,
|
||||
help="don't include any formatting for emphasis",
|
||||
)
|
||||
p.add_argument(
|
||||
"--reference-links",
|
||||
dest="inline_links",
|
||||
action="store_false",
|
||||
default=config.INLINE_LINKS,
|
||||
help="use reference style links instead of inline links",
|
||||
)
|
||||
p.add_argument(
|
||||
"--ignore-links",
|
||||
dest="ignore_links",
|
||||
action="store_true",
|
||||
default=config.IGNORE_ANCHORS,
|
||||
help="don't include any formatting for links",
|
||||
)
|
||||
p.add_argument(
|
||||
"--ignore-mailto-links",
|
||||
action="store_true",
|
||||
dest="ignore_mailto_links",
|
||||
default=config.IGNORE_MAILTO_LINKS,
|
||||
help="don't include mailto: links",
|
||||
)
|
||||
p.add_argument(
|
||||
"--protect-links",
|
||||
dest="protect_links",
|
||||
action="store_true",
|
||||
default=config.PROTECT_LINKS,
|
||||
help="protect links from line breaks surrounding them with angle brackets",
|
||||
)
|
||||
p.add_argument(
|
||||
"--ignore-images",
|
||||
dest="ignore_images",
|
||||
action="store_true",
|
||||
default=config.IGNORE_IMAGES,
|
||||
help="don't include any formatting for images",
|
||||
)
|
||||
p.add_argument(
|
||||
"--images-as-html",
|
||||
dest="images_as_html",
|
||||
action="store_true",
|
||||
default=config.IMAGES_AS_HTML,
|
||||
help=(
|
||||
"Always write image tags as raw html; preserves `height`, `width` and "
|
||||
"`alt` if possible."
|
||||
),
|
||||
)
|
||||
p.add_argument(
|
||||
"--images-to-alt",
|
||||
dest="images_to_alt",
|
||||
action="store_true",
|
||||
default=config.IMAGES_TO_ALT,
|
||||
help="Discard image data, only keep alt text",
|
||||
)
|
||||
p.add_argument(
|
||||
"--images-with-size",
|
||||
dest="images_with_size",
|
||||
action="store_true",
|
||||
default=config.IMAGES_WITH_SIZE,
|
||||
help=("Write image tags with height and width attrs as raw html to retain " "dimensions"),
|
||||
)
|
||||
p.add_argument(
|
||||
"-g",
|
||||
"--google-doc",
|
||||
action="store_true",
|
||||
dest="google_doc",
|
||||
default=False,
|
||||
help="convert an html-exported Google Document",
|
||||
)
|
||||
p.add_argument(
|
||||
"-d",
|
||||
"--dash-unordered-list",
|
||||
action="store_true",
|
||||
dest="ul_style_dash",
|
||||
default=False,
|
||||
help="use a dash rather than a star for unordered list items",
|
||||
)
|
||||
p.add_argument(
|
||||
"-e",
|
||||
"--asterisk-emphasis",
|
||||
action="store_true",
|
||||
dest="em_style_asterisk",
|
||||
default=False,
|
||||
help="use an asterisk rather than an underscore for emphasized text",
|
||||
)
|
||||
p.add_argument(
|
||||
"-b",
|
||||
"--body-width",
|
||||
dest="body_width",
|
||||
type=int,
|
||||
default=config.BODY_WIDTH,
|
||||
help="number of characters per output line, 0 for no wrap",
|
||||
)
|
||||
p.add_argument(
|
||||
"-i",
|
||||
"--google-list-indent",
|
||||
dest="list_indent",
|
||||
type=int,
|
||||
default=config.GOOGLE_LIST_INDENT,
|
||||
help="number of pixels Google indents nested lists",
|
||||
)
|
||||
p.add_argument(
|
||||
"-s",
|
||||
"--hide-strikethrough",
|
||||
action="store_true",
|
||||
dest="hide_strikethrough",
|
||||
default=False,
|
||||
help="hide strike-through text. only relevant when -g is " "specified as well",
|
||||
)
|
||||
p.add_argument(
|
||||
"--escape-all",
|
||||
action="store_true",
|
||||
dest="escape_snob",
|
||||
default=False,
|
||||
help=(
|
||||
"Escape all special characters. Output is less readable, but avoids "
|
||||
"corner case formatting issues."
|
||||
),
|
||||
)
|
||||
p.add_argument(
|
||||
"--bypass-tables",
|
||||
action="store_true",
|
||||
dest="bypass_tables",
|
||||
default=config.BYPASS_TABLES,
|
||||
help="Format tables in HTML rather than Markdown syntax.",
|
||||
)
|
||||
p.add_argument(
|
||||
"--ignore-tables",
|
||||
action="store_true",
|
||||
dest="ignore_tables",
|
||||
default=config.IGNORE_TABLES,
|
||||
help="Ignore table-related tags (table, th, td, tr) " "while keeping rows.",
|
||||
)
|
||||
p.add_argument(
|
||||
"--single-line-break",
|
||||
action="store_true",
|
||||
dest="single_line_break",
|
||||
default=config.SINGLE_LINE_BREAK,
|
||||
help=(
|
||||
"Use a single line break after a block element rather than two line "
|
||||
"breaks. NOTE: Requires --body-width=0"
|
||||
),
|
||||
)
|
||||
p.add_argument(
|
||||
"--unicode-snob",
|
||||
action="store_true",
|
||||
dest="unicode_snob",
|
||||
default=config.UNICODE_SNOB,
|
||||
help="Use unicode throughout document",
|
||||
)
|
||||
p.add_argument(
|
||||
"--no-automatic-links",
|
||||
action="store_false",
|
||||
dest="use_automatic_links",
|
||||
default=config.USE_AUTOMATIC_LINKS,
|
||||
help="Do not use automatic links wherever applicable",
|
||||
)
|
||||
p.add_argument(
|
||||
"--no-skip-internal-links",
|
||||
action="store_false",
|
||||
dest="skip_internal_links",
|
||||
default=config.SKIP_INTERNAL_LINKS,
|
||||
help="Do not skip internal links",
|
||||
)
|
||||
p.add_argument(
|
||||
"--links-after-para",
|
||||
action="store_true",
|
||||
dest="links_each_paragraph",
|
||||
default=config.LINKS_EACH_PARAGRAPH,
|
||||
help="Put links after each paragraph instead of document",
|
||||
)
|
||||
p.add_argument(
|
||||
"--mark-code",
|
||||
action="store_true",
|
||||
dest="mark_code",
|
||||
default=config.MARK_CODE,
|
||||
help="Mark program code blocks with [code]...[/code]",
|
||||
)
|
||||
p.add_argument(
|
||||
"--decode-errors",
|
||||
dest="decode_errors",
|
||||
default=config.DECODE_ERRORS,
|
||||
help=(
|
||||
"What to do in case of decode errors.'ignore', 'strict' and 'replace' are "
|
||||
"acceptable values"
|
||||
),
|
||||
)
|
||||
p.add_argument(
|
||||
"--open-quote",
|
||||
dest="open_quote",
|
||||
default=config.OPEN_QUOTE,
|
||||
help="The character used to open quotes",
|
||||
)
|
||||
p.add_argument(
|
||||
"--close-quote",
|
||||
dest="close_quote",
|
||||
default=config.CLOSE_QUOTE,
|
||||
help="The character used to close quotes",
|
||||
)
|
||||
p.add_argument("--version", action="version", version=".".join(map(str, __version__)))
|
||||
p.add_argument("filename", nargs="?")
|
||||
p.add_argument("encoding", nargs="?", default="utf-8")
|
||||
args = p.parse_args()
|
||||
|
||||
if args.filename and args.filename != "-":
|
||||
with open(args.filename, "rb") as fp:
|
||||
data = fp.read()
|
||||
else:
|
||||
data = sys.stdin.buffer.read()
|
||||
|
||||
try:
|
||||
html = data.decode(args.encoding, args.decode_errors)
|
||||
except UnicodeDecodeError as err:
|
||||
warning = bcolors.WARNING + "Warning:" + bcolors.ENDC
|
||||
warning += " Use the " + bcolors.OKGREEN
|
||||
warning += "--decode-errors=ignore" + bcolors.ENDC + " flag."
|
||||
print(warning)
|
||||
raise err
|
||||
|
||||
h = HTML2Text(baseurl=baseurl)
|
||||
# handle options
|
||||
if args.ul_style_dash:
|
||||
h.ul_item_mark = "-"
|
||||
if args.em_style_asterisk:
|
||||
h.emphasis_mark = "*"
|
||||
h.strong_mark = "__"
|
||||
|
||||
h.body_width = args.body_width
|
||||
h.google_list_indent = args.list_indent
|
||||
h.ignore_emphasis = args.ignore_emphasis
|
||||
h.ignore_links = args.ignore_links
|
||||
h.ignore_mailto_links = args.ignore_mailto_links
|
||||
h.protect_links = args.protect_links
|
||||
h.ignore_images = args.ignore_images
|
||||
h.images_as_html = args.images_as_html
|
||||
h.images_to_alt = args.images_to_alt
|
||||
h.images_with_size = args.images_with_size
|
||||
h.google_doc = args.google_doc
|
||||
h.hide_strikethrough = args.hide_strikethrough
|
||||
h.escape_snob = args.escape_snob
|
||||
h.bypass_tables = args.bypass_tables
|
||||
h.ignore_tables = args.ignore_tables
|
||||
h.single_line_break = args.single_line_break
|
||||
h.inline_links = args.inline_links
|
||||
h.unicode_snob = args.unicode_snob
|
||||
h.use_automatic_links = args.use_automatic_links
|
||||
h.skip_internal_links = args.skip_internal_links
|
||||
h.links_each_paragraph = args.links_each_paragraph
|
||||
h.mark_code = args.mark_code
|
||||
h.wrap_links = args.wrap_links
|
||||
h.wrap_list_items = args.wrap_list_items
|
||||
h.wrap_tables = args.wrap_tables
|
||||
h.pad_tables = args.pad_tables
|
||||
h.default_image_alt = args.default_image_alt
|
||||
h.open_quote = args.open_quote
|
||||
h.close_quote = args.close_quote
|
||||
|
||||
sys.stdout.write(h.handle(html))
|
164
migration/html2text/config.py
Normal file
164
migration/html2text/config.py
Normal file
@@ -0,0 +1,164 @@
|
||||
import re
|
||||
|
||||
# Use Unicode characters instead of their ascii pseudo-replacements
|
||||
UNICODE_SNOB = True
|
||||
|
||||
# Marker to use for marking tables for padding post processing
|
||||
TABLE_MARKER_FOR_PAD = "special_marker_for_table_padding"
|
||||
# Escape all special characters. Output is less readable, but avoids
|
||||
# corner case formatting issues.
|
||||
ESCAPE_SNOB = True
|
||||
|
||||
# Put the links after each paragraph instead of at the end.
|
||||
LINKS_EACH_PARAGRAPH = False
|
||||
|
||||
# Wrap long lines at position. 0 for no wrapping.
|
||||
BODY_WIDTH = 0
|
||||
|
||||
# Don't show internal links (href="#local-anchor") -- corresponding link
|
||||
# targets won't be visible in the plain text file anyway.
|
||||
SKIP_INTERNAL_LINKS = False
|
||||
|
||||
# Use inline, rather than reference, formatting for images and links
|
||||
INLINE_LINKS = True
|
||||
|
||||
# Protect links from line breaks surrounding them with angle brackets (in
|
||||
# addition to their square brackets)
|
||||
PROTECT_LINKS = True
|
||||
WRAP_LINKS = True
|
||||
|
||||
# Wrap list items.
|
||||
WRAP_LIST_ITEMS = False
|
||||
|
||||
# Wrap tables
|
||||
WRAP_TABLES = False
|
||||
|
||||
# Number of pixels Google indents nested lists
|
||||
GOOGLE_LIST_INDENT = 36
|
||||
|
||||
# Values Google and others may use to indicate bold text
|
||||
BOLD_TEXT_STYLE_VALUES = ("bold", "700", "800", "900")
|
||||
|
||||
IGNORE_ANCHORS = False
|
||||
IGNORE_MAILTO_LINKS = False
|
||||
IGNORE_IMAGES = False
|
||||
IMAGES_AS_HTML = False
|
||||
IMAGES_TO_ALT = False
|
||||
IMAGES_WITH_SIZE = False
|
||||
IGNORE_EMPHASIS = False
|
||||
MARK_CODE = True
|
||||
DECODE_ERRORS = "strict"
|
||||
DEFAULT_IMAGE_ALT = ""
|
||||
PAD_TABLES = True
|
||||
|
||||
# Convert links with same href and text to <href> format
|
||||
# if they are absolute links
|
||||
USE_AUTOMATIC_LINKS = True
|
||||
|
||||
# For checking space-only lines on line 771
|
||||
RE_SPACE = re.compile(r"\s\+")
|
||||
|
||||
RE_ORDERED_LIST_MATCHER = re.compile(r"\d+\.\s")
|
||||
RE_UNORDERED_LIST_MATCHER = re.compile(r"[-\*\+]\s")
|
||||
RE_MD_CHARS_MATCHER = re.compile(r"([\\\[\]\(\)])")
|
||||
RE_MD_CHARS_MATCHER_ALL = re.compile(r"([`\*_{}\[\]\(\)#!])")
|
||||
|
||||
# to find links in the text
|
||||
RE_LINK = re.compile(r"(\[.*?\] ?\(.*?\))|(\[.*?\]:.*?)")
|
||||
|
||||
# to find table separators
|
||||
RE_TABLE = re.compile(r" \| ")
|
||||
|
||||
RE_MD_DOT_MATCHER = re.compile(
|
||||
r"""
|
||||
^ # start of line
|
||||
(\s*\d+) # optional whitespace and a number
|
||||
(\.) # dot
|
||||
(?=\s) # lookahead assert whitespace
|
||||
""",
|
||||
re.MULTILINE | re.VERBOSE,
|
||||
)
|
||||
RE_MD_PLUS_MATCHER = re.compile(
|
||||
r"""
|
||||
^
|
||||
(\s*)
|
||||
(\+)
|
||||
(?=\s)
|
||||
""",
|
||||
flags=re.MULTILINE | re.VERBOSE,
|
||||
)
|
||||
RE_MD_DASH_MATCHER = re.compile(
|
||||
r"""
|
||||
^
|
||||
(\s*)
|
||||
(-)
|
||||
(?=\s|\-) # followed by whitespace (bullet list, or spaced out hr)
|
||||
# or another dash (header or hr)
|
||||
""",
|
||||
flags=re.MULTILINE | re.VERBOSE,
|
||||
)
|
||||
RE_SLASH_CHARS = r"\`*_{}[]()#+-.!"
|
||||
RE_MD_BACKSLASH_MATCHER = re.compile(
|
||||
r"""
|
||||
(\\) # match one slash
|
||||
(?=[%s]) # followed by a char that requires escaping
|
||||
"""
|
||||
% re.escape(RE_SLASH_CHARS),
|
||||
flags=re.VERBOSE,
|
||||
)
|
||||
|
||||
UNIFIABLE = {
|
||||
"rsquo": "'",
|
||||
"lsquo": "'",
|
||||
"rdquo": '"',
|
||||
"ldquo": '"',
|
||||
"copy": "(C)",
|
||||
"mdash": "--",
|
||||
"nbsp": " ",
|
||||
"rarr": "->",
|
||||
"larr": "<-",
|
||||
"middot": "*",
|
||||
"ndash": "-",
|
||||
"oelig": "oe",
|
||||
"aelig": "ae",
|
||||
"agrave": "a",
|
||||
"aacute": "a",
|
||||
"acirc": "a",
|
||||
"atilde": "a",
|
||||
"auml": "a",
|
||||
"aring": "a",
|
||||
"egrave": "e",
|
||||
"eacute": "e",
|
||||
"ecirc": "e",
|
||||
"euml": "e",
|
||||
"igrave": "i",
|
||||
"iacute": "i",
|
||||
"icirc": "i",
|
||||
"iuml": "i",
|
||||
"ograve": "o",
|
||||
"oacute": "o",
|
||||
"ocirc": "o",
|
||||
"otilde": "o",
|
||||
"ouml": "o",
|
||||
"ugrave": "u",
|
||||
"uacute": "u",
|
||||
"ucirc": "u",
|
||||
"uuml": "u",
|
||||
"lrm": "",
|
||||
"rlm": "",
|
||||
}
|
||||
|
||||
# Format tables in HTML rather than Markdown syntax
|
||||
BYPASS_TABLES = False
|
||||
# Ignore table-related tags (table, th, td, tr) while keeping rows
|
||||
IGNORE_TABLES = False
|
||||
|
||||
|
||||
# Use a single line break after a block element rather than two line breaks.
|
||||
# NOTE: Requires body width setting to be 0.
|
||||
SINGLE_LINE_BREAK = False
|
||||
|
||||
|
||||
# Use double quotation marks when converting the <q> tag.
|
||||
OPEN_QUOTE = '"'
|
||||
CLOSE_QUOTE = '"'
|
18
migration/html2text/elements.py
Normal file
18
migration/html2text/elements.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from typing import Dict, Optional
|
||||
|
||||
|
||||
class AnchorElement:
|
||||
__slots__ = ["attrs", "count", "outcount"]
|
||||
|
||||
def __init__(self, attrs: Dict[str, Optional[str]], count: int, outcount: int):
|
||||
self.attrs = attrs
|
||||
self.count = count
|
||||
self.outcount = outcount
|
||||
|
||||
|
||||
class ListElement:
|
||||
__slots__ = ["name", "num"]
|
||||
|
||||
def __init__(self, name: str, num: int):
|
||||
self.name = name
|
||||
self.num = num
|
0
migration/html2text/py.typed
Normal file
0
migration/html2text/py.typed
Normal file
3
migration/html2text/typing.py
Normal file
3
migration/html2text/typing.py
Normal file
@@ -0,0 +1,3 @@
|
||||
class OutCallback:
|
||||
def __call__(self, s: str) -> None:
|
||||
...
|
282
migration/html2text/utils.py
Normal file
282
migration/html2text/utils.py
Normal file
@@ -0,0 +1,282 @@
|
||||
import html.entities
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from . import config
|
||||
|
||||
unifiable_n = {
|
||||
html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
|
||||
}
|
||||
|
||||
|
||||
def hn(tag: str) -> int:
|
||||
if tag[0] == "h" and len(tag) == 2:
|
||||
n = tag[1]
|
||||
if "0" < n <= "9":
|
||||
return int(n)
|
||||
return 0
|
||||
|
||||
|
||||
def dumb_property_dict(style: str) -> Dict[str, str]:
|
||||
"""
|
||||
:returns: A hash of css attributes
|
||||
"""
|
||||
return {
|
||||
x.strip().lower(): y.strip().lower()
|
||||
for x, y in [z.split(":", 1) for z in style.split(";") if ":" in z]
|
||||
}
|
||||
|
||||
|
||||
def dumb_css_parser(data: str) -> Dict[str, Dict[str, str]]:
|
||||
"""
|
||||
:type data: str
|
||||
|
||||
:returns: A hash of css selectors, each of which contains a hash of
|
||||
css attributes.
|
||||
:rtype: dict
|
||||
"""
|
||||
# remove @import sentences
|
||||
data += ";"
|
||||
importIndex = data.find("@import")
|
||||
while importIndex != -1:
|
||||
data = data[0:importIndex] + data[data.find(";", importIndex) + 1 :]
|
||||
importIndex = data.find("@import")
|
||||
|
||||
# parse the css. reverted from dictionary comprehension in order to
|
||||
# support older pythons
|
||||
pairs = [x.split("{") for x in data.split("}") if "{" in x.strip()]
|
||||
try:
|
||||
elements = {a.strip(): dumb_property_dict(b) for a, b in pairs}
|
||||
except ValueError:
|
||||
elements = {} # not that important
|
||||
|
||||
return elements
|
||||
|
||||
|
||||
def element_style(
|
||||
attrs: Dict[str, Optional[str]],
|
||||
style_def: Dict[str, Dict[str, str]],
|
||||
parent_style: Dict[str, str],
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
:type attrs: dict
|
||||
:type style_def: dict
|
||||
:type style_def: dict
|
||||
|
||||
:returns: A hash of the 'final' style attributes of the element
|
||||
:rtype: dict
|
||||
"""
|
||||
style = parent_style.copy()
|
||||
attrs_class = attrs.get("class")
|
||||
if attrs_class:
|
||||
for css_class in attrs_class.split():
|
||||
css_style = style_def.get("." + css_class, {})
|
||||
style.update(css_style)
|
||||
attrs_style = attrs.get("style")
|
||||
if attrs_style:
|
||||
immediate_style = dumb_property_dict(attrs_style)
|
||||
style.update(immediate_style)
|
||||
|
||||
return style
|
||||
|
||||
|
||||
def google_list_style(style: Dict[str, str]) -> str:
|
||||
"""
|
||||
Finds out whether this is an ordered or unordered list
|
||||
|
||||
:type style: dict
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
if "list-style-type" in style:
|
||||
list_style = style["list-style-type"]
|
||||
if list_style in ["disc", "circle", "square", "none"]:
|
||||
return "ul"
|
||||
|
||||
return "ol"
|
||||
|
||||
|
||||
def google_has_height(style: Dict[str, str]) -> bool:
|
||||
"""
|
||||
Check if the style of the element has the 'height' attribute
|
||||
explicitly defined
|
||||
|
||||
:type style: dict
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return "height" in style
|
||||
|
||||
|
||||
def google_text_emphasis(style: Dict[str, str]) -> List[str]:
|
||||
"""
|
||||
:type style: dict
|
||||
|
||||
:returns: A list of all emphasis modifiers of the element
|
||||
:rtype: list
|
||||
"""
|
||||
emphasis = []
|
||||
if "text-decoration" in style:
|
||||
emphasis.append(style["text-decoration"])
|
||||
if "font-style" in style:
|
||||
emphasis.append(style["font-style"])
|
||||
if "font-weight" in style:
|
||||
emphasis.append(style["font-weight"])
|
||||
|
||||
return emphasis
|
||||
|
||||
|
||||
def google_fixed_width_font(style: Dict[str, str]) -> bool:
|
||||
"""
|
||||
Check if the css of the current element defines a fixed width font
|
||||
|
||||
:type style: dict
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
font_family = ""
|
||||
if "font-family" in style:
|
||||
font_family = style["font-family"]
|
||||
return "courier new" == font_family or "consolas" == font_family
|
||||
|
||||
|
||||
def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
|
||||
"""
|
||||
Extract numbering from list element attributes
|
||||
|
||||
:type attrs: dict
|
||||
|
||||
:rtype: int or None
|
||||
"""
|
||||
attrs_start = attrs.get("start")
|
||||
if attrs_start:
|
||||
try:
|
||||
return int(attrs_start) - 1
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool:
|
||||
# If it appears to contain a link
|
||||
# don't wrap
|
||||
if not wrap_links and config.RE_LINK.search(para):
|
||||
return True
|
||||
# If the text begins with four spaces or one tab, it's a code block;
|
||||
# don't wrap
|
||||
if para[0:4] == " " or para[0] == "\t":
|
||||
return True
|
||||
|
||||
# If the text begins with only two "--", possibly preceded by
|
||||
# whitespace, that's an emdash; so wrap.
|
||||
stripped = para.lstrip()
|
||||
if stripped[0:2] == "--" and len(stripped) > 2 and stripped[2] != "-":
|
||||
return False
|
||||
|
||||
# I'm not sure what this is for; I thought it was to detect lists,
|
||||
# but there's a <br>-inside-<span> case in one of the tests that
|
||||
# also depends upon it.
|
||||
if stripped[0:1] in ("-", "*") and not stripped[0:2] == "**":
|
||||
return not wrap_list_items
|
||||
|
||||
# If text contains a pipe character it is likely a table
|
||||
if not wrap_tables and config.RE_TABLE.search(para):
|
||||
return True
|
||||
|
||||
# If the text begins with a single -, *, or +, followed by a space,
|
||||
# or an integer, followed by a ., followed by a space (in either
|
||||
# case optionally proceeded by whitespace), it's a list; don't wrap.
|
||||
return bool(
|
||||
config.RE_ORDERED_LIST_MATCHER.match(stripped)
|
||||
or config.RE_UNORDERED_LIST_MATCHER.match(stripped)
|
||||
)
|
||||
|
||||
|
||||
def escape_md(text: str) -> str:
|
||||
"""
|
||||
Escapes markdown-sensitive characters within other markdown
|
||||
constructs.
|
||||
"""
|
||||
return config.RE_MD_CHARS_MATCHER.sub(r"\\\1", text)
|
||||
|
||||
|
||||
def escape_md_section(text: str, snob: bool = False) -> str:
|
||||
"""
|
||||
Escapes markdown-sensitive characters across whole document sections.
|
||||
"""
|
||||
text = config.RE_MD_BACKSLASH_MATCHER.sub(r"\\\1", text)
|
||||
|
||||
if snob:
|
||||
text = config.RE_MD_CHARS_MATCHER_ALL.sub(r"\\\1", text)
|
||||
|
||||
text = config.RE_MD_DOT_MATCHER.sub(r"\1\\\2", text)
|
||||
text = config.RE_MD_PLUS_MATCHER.sub(r"\1\\\2", text)
|
||||
text = config.RE_MD_DASH_MATCHER.sub(r"\1\\\2", text)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def reformat_table(lines: List[str], right_margin: int) -> List[str]:
|
||||
"""
|
||||
Given the lines of a table
|
||||
padds the cells and returns the new lines
|
||||
"""
|
||||
# find the maximum width of the columns
|
||||
max_width = [len(x.rstrip()) + right_margin for x in lines[0].split("|")]
|
||||
max_cols = len(max_width)
|
||||
for line in lines:
|
||||
cols = [x.rstrip() for x in line.split("|")]
|
||||
num_cols = len(cols)
|
||||
|
||||
# don't drop any data if colspan attributes result in unequal lengths
|
||||
if num_cols < max_cols:
|
||||
cols += [""] * (max_cols - num_cols)
|
||||
elif max_cols < num_cols:
|
||||
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
|
||||
max_cols = num_cols
|
||||
|
||||
max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)]
|
||||
|
||||
# reformat
|
||||
new_lines = []
|
||||
for line in lines:
|
||||
cols = [x.rstrip() for x in line.split("|")]
|
||||
if set(line.strip()) == set("-|"):
|
||||
filler = "-"
|
||||
new_cols = [
|
||||
x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
|
||||
]
|
||||
new_lines.append("|-" + "|".join(new_cols) + "|")
|
||||
else:
|
||||
filler = " "
|
||||
new_cols = [
|
||||
x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
|
||||
]
|
||||
new_lines.append("| " + "|".join(new_cols) + "|")
|
||||
return new_lines
|
||||
|
||||
|
||||
def pad_tables_in_text(text: str, right_margin: int = 1) -> str:
|
||||
"""
|
||||
Provide padding for tables in the text
|
||||
"""
|
||||
lines = text.split("\n")
|
||||
table_buffer = [] # type: List[str]
|
||||
table_started = False
|
||||
new_lines = []
|
||||
for line in lines:
|
||||
# Toggle table started
|
||||
if config.TABLE_MARKER_FOR_PAD in line:
|
||||
table_started = not table_started
|
||||
if not table_started:
|
||||
table = reformat_table(table_buffer, right_margin)
|
||||
new_lines.extend(table)
|
||||
table_buffer = []
|
||||
new_lines.append("")
|
||||
continue
|
||||
# Process lines
|
||||
if table_started:
|
||||
table_buffer.append(line)
|
||||
else:
|
||||
new_lines.append(line)
|
||||
return "\n".join(new_lines)
|
196
migration/tables/comments.py
Normal file
196
migration/tables/comments.py
Normal file
@@ -0,0 +1,196 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from dateutil.parser import parse as date_parse
|
||||
|
||||
from base.orm import local_session
|
||||
from migration.html2text import html2text
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutReactionsFollower
|
||||
from orm.topic import TopicFollower
|
||||
from orm.user import User
|
||||
|
||||
ts = datetime.now(tz=timezone.utc)
|
||||
|
||||
|
||||
def auto_followers(session, topics, reaction_dict):
|
||||
# creating shout's reactions following for reaction author
|
||||
following1 = (
|
||||
session.query(ShoutReactionsFollower)
|
||||
.where(ShoutReactionsFollower.follower == reaction_dict["createdBy"])
|
||||
.filter(ShoutReactionsFollower.shout == reaction_dict["shout"])
|
||||
.first()
|
||||
)
|
||||
if not following1:
|
||||
following1 = ShoutReactionsFollower.create(
|
||||
follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True
|
||||
)
|
||||
session.add(following1)
|
||||
# creating topics followings for reaction author
|
||||
for t in topics:
|
||||
tf = (
|
||||
session.query(TopicFollower)
|
||||
.where(TopicFollower.follower == reaction_dict["createdBy"])
|
||||
.filter(TopicFollower.topic == t["id"])
|
||||
.first()
|
||||
)
|
||||
if not tf:
|
||||
topic_following = TopicFollower.create(
|
||||
follower=reaction_dict["createdBy"], topic=t["id"], auto=True
|
||||
)
|
||||
session.add(topic_following)
|
||||
|
||||
|
||||
def migrate_ratings(session, entry, reaction_dict):
|
||||
for comment_rating_old in entry.get("ratings", []):
|
||||
rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first()
|
||||
re_reaction_dict = {
|
||||
"shout": reaction_dict["shout"],
|
||||
"replyTo": reaction_dict["id"],
|
||||
"kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE,
|
||||
"createdBy": rater.id if rater else 1,
|
||||
}
|
||||
cts = comment_rating_old.get("createdAt")
|
||||
if cts:
|
||||
re_reaction_dict["createdAt"] = date_parse(cts)
|
||||
try:
|
||||
# creating reaction from old rating
|
||||
rr = Reaction.create(**re_reaction_dict)
|
||||
following2 = (
|
||||
session.query(ShoutReactionsFollower)
|
||||
.where(ShoutReactionsFollower.follower == re_reaction_dict["createdBy"])
|
||||
.filter(ShoutReactionsFollower.shout == rr.shout)
|
||||
.first()
|
||||
)
|
||||
if not following2:
|
||||
following2 = ShoutReactionsFollower.create(
|
||||
follower=re_reaction_dict["createdBy"], shout=rr.shout, auto=True
|
||||
)
|
||||
session.add(following2)
|
||||
session.add(rr)
|
||||
|
||||
except Exception as e:
|
||||
print("[migration] comment rating error: %r" % re_reaction_dict)
|
||||
raise e
|
||||
session.commit()
|
||||
|
||||
|
||||
async def migrate(entry, storage):
|
||||
"""
|
||||
{
|
||||
"_id": "hdtwS8fSyFLxXCgSC",
|
||||
"body": "<p>",
|
||||
"contentItem": "mnK8KsJHPRi8DrybQ",
|
||||
"createdBy": "bMFPuyNg6qAD2mhXe",
|
||||
"thread": "01/",
|
||||
"createdAt": "2016-04-19 04:33:53+00:00",
|
||||
"ratings": [
|
||||
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
|
||||
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
|
||||
],
|
||||
"rating": 2,
|
||||
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
|
||||
"updatedBy": "0"
|
||||
}
|
||||
->
|
||||
type Reaction {
|
||||
id: Int!
|
||||
shout: Shout!
|
||||
createdAt: DateTime!
|
||||
createdBy: User!
|
||||
updatedAt: DateTime
|
||||
deletedAt: DateTime
|
||||
deletedBy: User
|
||||
range: String # full / 0:2340
|
||||
kind: ReactionKind!
|
||||
body: String
|
||||
replyTo: Reaction
|
||||
stat: Stat
|
||||
old_id: String
|
||||
old_thread: String
|
||||
}
|
||||
"""
|
||||
old_ts = entry.get("createdAt")
|
||||
reaction_dict = {
|
||||
"createdAt": (ts if not old_ts else date_parse(old_ts)),
|
||||
"body": html2text(entry.get("body", "")),
|
||||
"oid": entry["_id"],
|
||||
}
|
||||
shout_oid = entry.get("contentItem")
|
||||
if shout_oid not in storage["shouts"]["by_oid"]:
|
||||
if len(storage["shouts"]["by_oid"]) > 0:
|
||||
return shout_oid
|
||||
else:
|
||||
print("[migration] no shouts migrated yet")
|
||||
raise Exception
|
||||
return
|
||||
else:
|
||||
stage = "started"
|
||||
reaction = None
|
||||
with local_session() as session:
|
||||
author = session.query(User).filter(User.oid == entry["createdBy"]).first()
|
||||
old_shout = storage["shouts"]["by_oid"].get(shout_oid)
|
||||
if not old_shout:
|
||||
raise Exception("no old shout in storage")
|
||||
else:
|
||||
stage = "author and old id found"
|
||||
try:
|
||||
shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one()
|
||||
if shout:
|
||||
reaction_dict["shout"] = shout.id
|
||||
reaction_dict["createdBy"] = author.id if author else 1
|
||||
reaction_dict["kind"] = ReactionKind.COMMENT
|
||||
|
||||
# creating reaction from old comment
|
||||
reaction = Reaction.create(**reaction_dict)
|
||||
session.add(reaction)
|
||||
# session.commit()
|
||||
stage = "new reaction commited"
|
||||
reaction_dict = reaction.dict()
|
||||
topics = [t.dict() for t in shout.topics]
|
||||
auto_followers(session, topics, reaction_dict)
|
||||
|
||||
migrate_ratings(session, entry, reaction_dict)
|
||||
|
||||
return reaction
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print(reaction)
|
||||
raise Exception(stage)
|
||||
return
|
||||
|
||||
|
||||
def migrate_2stage(old_comment, idmap):
|
||||
if old_comment.get("body"):
|
||||
new_id = idmap.get(old_comment.get("oid"))
|
||||
new_id = idmap.get(old_comment.get("_id"))
|
||||
if new_id:
|
||||
new_replyto_id = None
|
||||
old_replyto_id = old_comment.get("replyTo")
|
||||
if old_replyto_id:
|
||||
new_replyto_id = int(idmap.get(old_replyto_id, "0"))
|
||||
with local_session() as session:
|
||||
comment = session.query(Reaction).where(Reaction.id == new_id).first()
|
||||
try:
|
||||
if new_replyto_id:
|
||||
new_reply = (
|
||||
session.query(Reaction).where(Reaction.id == new_replyto_id).first()
|
||||
)
|
||||
if not new_reply:
|
||||
print(new_replyto_id)
|
||||
raise Exception("cannot find reply by id!")
|
||||
comment.replyTo = new_reply.id
|
||||
session.add(comment)
|
||||
srf = (
|
||||
session.query(ShoutReactionsFollower)
|
||||
.where(ShoutReactionsFollower.shout == comment.shout)
|
||||
.filter(ShoutReactionsFollower.follower == comment.createdBy)
|
||||
.first()
|
||||
)
|
||||
if not srf:
|
||||
srf = ShoutReactionsFollower.create(
|
||||
shout=comment.shout, follower=comment.createdBy, auto=True
|
||||
)
|
||||
session.add(srf)
|
||||
session.commit()
|
||||
except Exception:
|
||||
raise Exception("cannot find a comment by oldid")
|
399
migration/tables/content_items.py
Normal file
399
migration/tables/content_items.py
Normal file
@@ -0,0 +1,399 @@
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from dateutil.parser import parse as date_parse
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from transliterate import translit
|
||||
|
||||
from base.orm import local_session
|
||||
from migration.extract import extract_html, extract_media
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from orm.user import User
|
||||
from services.stat.viewed import ViewedStorage
|
||||
|
||||
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||
ts = datetime.now(tz=timezone.utc)
|
||||
type2layout = {
|
||||
"Article": "article",
|
||||
"Literature": "literature",
|
||||
"Music": "music",
|
||||
"Video": "video",
|
||||
"Image": "image",
|
||||
}
|
||||
|
||||
anondict = {"slug": "anonymous", "id": 1, "name": "Аноним"}
|
||||
discours = {"slug": "discours", "id": 2, "name": "Дискурс"}
|
||||
|
||||
|
||||
def get_shout_slug(entry):
|
||||
slug = entry.get("slug", "")
|
||||
if not slug:
|
||||
for friend in entry.get("friendlySlugs", []):
|
||||
slug = friend.get("slug", "")
|
||||
if slug:
|
||||
break
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
|
||||
return slug
|
||||
|
||||
|
||||
def create_author_from_app(app):
|
||||
user = None
|
||||
userdata = None
|
||||
# check if email is used
|
||||
if app["email"]:
|
||||
with local_session() as session:
|
||||
user = session.query(User).where(User.email == app["email"]).first()
|
||||
if not user:
|
||||
# print('[migration] app %r' % app)
|
||||
name = app.get("name")
|
||||
if name:
|
||||
slug = translit(name, "ru", reversed=True).lower()
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
|
||||
print("[migration] created slug %s" % slug)
|
||||
# check if slug is used
|
||||
if slug:
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
|
||||
# get slug from email
|
||||
if user:
|
||||
slug = app["email"].split("@")[0]
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
# one more try
|
||||
if user:
|
||||
slug += "-author"
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
|
||||
# create user with application data
|
||||
if not user:
|
||||
userdata = {
|
||||
"username": app["email"],
|
||||
"email": app["email"],
|
||||
"name": app.get("name", ""),
|
||||
"emailConfirmed": False,
|
||||
"slug": slug,
|
||||
"createdAt": ts,
|
||||
"lastSeen": ts,
|
||||
}
|
||||
# print('[migration] userdata %r' % userdata)
|
||||
user = User.create(**userdata)
|
||||
session.add(user)
|
||||
session.commit()
|
||||
userdata["id"] = user.id
|
||||
|
||||
userdata = user.dict()
|
||||
return userdata
|
||||
else:
|
||||
raise Exception("app is not ok", app)
|
||||
|
||||
|
||||
async def create_shout(shout_dict):
|
||||
s = Shout.create(**shout_dict)
|
||||
author = s.authors[0]
|
||||
with local_session() as session:
|
||||
srf = (
|
||||
session.query(ShoutReactionsFollower)
|
||||
.where(ShoutReactionsFollower.shout == s.id)
|
||||
.filter(ShoutReactionsFollower.follower == author.id)
|
||||
.first()
|
||||
)
|
||||
if not srf:
|
||||
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
|
||||
session.add(srf)
|
||||
session.commit()
|
||||
return s
|
||||
|
||||
|
||||
async def get_user(entry, storage):
|
||||
app = entry.get("application")
|
||||
userdata = None
|
||||
user_oid = None
|
||||
if app:
|
||||
userdata = create_author_from_app(app)
|
||||
else:
|
||||
user_oid = entry.get("createdBy")
|
||||
if user_oid == "0":
|
||||
userdata = discours
|
||||
elif user_oid:
|
||||
userdata = storage["users"]["by_oid"].get(user_oid)
|
||||
if not userdata:
|
||||
print("no userdata by oid, anonymous")
|
||||
userdata = anondict
|
||||
print(app)
|
||||
# cleanup slug
|
||||
if userdata:
|
||||
slug = userdata.get("slug", "")
|
||||
if slug:
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
|
||||
userdata["slug"] = slug
|
||||
else:
|
||||
userdata = anondict
|
||||
|
||||
user = await process_user(userdata, storage, user_oid)
|
||||
return user, user_oid
|
||||
|
||||
|
||||
async def migrate(entry, storage):
|
||||
author, user_oid = await get_user(entry, storage)
|
||||
r = {
|
||||
"layout": type2layout[entry["type"]],
|
||||
"title": entry["title"],
|
||||
"authors": [
|
||||
author,
|
||||
],
|
||||
"slug": get_shout_slug(entry),
|
||||
"cover": (
|
||||
"https://images.discours.io/unsafe/" + entry["thumborId"]
|
||||
if entry.get("thumborId")
|
||||
else entry.get("image", {}).get("url")
|
||||
),
|
||||
"visibility": "public" if entry.get("published") else "community",
|
||||
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
|
||||
"deletedAt": date_parse(entry.get("deletedAt")) if entry.get("deletedAt") else None,
|
||||
"createdAt": date_parse(entry.get("createdAt", OLD_DATE)),
|
||||
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
|
||||
"createdBy": author.id,
|
||||
"topics": await add_topics_follower(entry, storage, author),
|
||||
"body": extract_html(entry, cleanup=True),
|
||||
}
|
||||
|
||||
# main topic patch
|
||||
r["mainTopic"] = r["topics"][0]
|
||||
|
||||
# published author auto-confirm
|
||||
if entry.get("published"):
|
||||
with local_session() as session:
|
||||
# update user.emailConfirmed if published
|
||||
author.emailConfirmed = True
|
||||
session.add(author)
|
||||
session.commit()
|
||||
|
||||
# media
|
||||
media = extract_media(entry)
|
||||
r["media"] = json.dumps(media, ensure_ascii=True) if media else None
|
||||
|
||||
# ----------------------------------- copy
|
||||
shout_dict = r.copy()
|
||||
del shout_dict["topics"]
|
||||
|
||||
try:
|
||||
# save shout to db
|
||||
shout_dict["oid"] = entry.get("_id", "")
|
||||
shout = await create_shout(shout_dict)
|
||||
except IntegrityError as e:
|
||||
print("[migration] create_shout integrity error", e)
|
||||
shout = await resolve_create_shout(shout_dict)
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
# udpate data
|
||||
shout_dict = shout.dict()
|
||||
shout_dict["authors"] = [
|
||||
author.dict(),
|
||||
]
|
||||
|
||||
# shout topics aftermath
|
||||
shout_dict["topics"] = await topics_aftermath(r, storage)
|
||||
|
||||
# content_item ratings to reactions
|
||||
await content_ratings_to_reactions(entry, shout_dict["slug"])
|
||||
|
||||
# shout views
|
||||
await ViewedStorage.increment(
|
||||
shout_dict["slug"], amount=entry.get("views", 1), viewer="old-discours"
|
||||
)
|
||||
# del shout_dict['ratings']
|
||||
|
||||
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
|
||||
storage["shouts"]["by_slug"][shout_dict["slug"]] = shout_dict
|
||||
return shout_dict
|
||||
|
||||
|
||||
async def add_topics_follower(entry, storage, user):
|
||||
topics = set([])
|
||||
category = entry.get("category")
|
||||
topics_by_oid = storage["topics"]["by_oid"]
|
||||
oids = [
|
||||
category,
|
||||
] + entry.get("tags", [])
|
||||
for toid in oids:
|
||||
tslug = topics_by_oid.get(toid, {}).get("slug")
|
||||
if tslug:
|
||||
topics.add(tslug)
|
||||
ttt = list(topics)
|
||||
# add author as TopicFollower
|
||||
with local_session() as session:
|
||||
for tpcslug in topics:
|
||||
try:
|
||||
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
|
||||
if tpc:
|
||||
tf = (
|
||||
session.query(TopicFollower)
|
||||
.where(TopicFollower.follower == user.id)
|
||||
.filter(TopicFollower.topic == tpc.id)
|
||||
.first()
|
||||
)
|
||||
if not tf:
|
||||
tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True)
|
||||
session.add(tf)
|
||||
session.commit()
|
||||
except IntegrityError:
|
||||
print("[migration.shout] hidden by topic " + tpc.slug)
|
||||
# main topic
|
||||
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
|
||||
if maintopic in ttt:
|
||||
ttt.remove(maintopic)
|
||||
ttt.insert(0, maintopic)
|
||||
return ttt
|
||||
|
||||
|
||||
async def process_user(userdata, storage, oid):
|
||||
with local_session() as session:
|
||||
uid = userdata.get("id") # anonymous as
|
||||
if not uid:
|
||||
print(userdata)
|
||||
print("has no id field, set it @anonymous")
|
||||
userdata = anondict
|
||||
uid = 1
|
||||
user = session.query(User).filter(User.id == uid).first()
|
||||
if not user:
|
||||
try:
|
||||
slug = userdata["slug"].lower().strip()
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
|
||||
userdata["slug"] = slug
|
||||
user = User.create(**userdata)
|
||||
session.add(user)
|
||||
session.commit()
|
||||
except IntegrityError:
|
||||
print(f"[migration] user creating with slug {userdata['slug']}")
|
||||
print("[migration] from userdata")
|
||||
print(userdata)
|
||||
raise Exception("[migration] cannot create user in content_items.get_user()")
|
||||
if user.id == 946:
|
||||
print("[migration] ***************** ALPINA")
|
||||
if user.id == 2:
|
||||
print("[migration] +++++++++++++++++ DISCOURS")
|
||||
userdata["id"] = user.id
|
||||
userdata["createdAt"] = user.createdAt
|
||||
storage["users"]["by_slug"][userdata["slug"]] = userdata
|
||||
storage["users"]["by_oid"][oid] = userdata
|
||||
if not user:
|
||||
raise Exception("could not get a user")
|
||||
return user
|
||||
|
||||
|
||||
async def resolve_create_shout(shout_dict):
|
||||
with local_session() as session:
|
||||
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
|
||||
bump = False
|
||||
if s:
|
||||
if s.createdAt != shout_dict["createdAt"]:
|
||||
# create new with different slug
|
||||
shout_dict["slug"] += "-" + shout_dict["layout"]
|
||||
try:
|
||||
await create_shout(shout_dict)
|
||||
except IntegrityError as e:
|
||||
print(e)
|
||||
bump = True
|
||||
else:
|
||||
# update old
|
||||
for key in shout_dict:
|
||||
if key in s.__dict__:
|
||||
if s.__dict__[key] != shout_dict[key]:
|
||||
print("[migration] shout already exists, but differs in %s" % key)
|
||||
bump = True
|
||||
else:
|
||||
print("[migration] shout already exists, but lacks %s" % key)
|
||||
bump = True
|
||||
if bump:
|
||||
s.update(shout_dict)
|
||||
else:
|
||||
print("[migration] something went wrong with shout: \n%r" % shout_dict)
|
||||
raise Exception("")
|
||||
session.commit()
|
||||
return s
|
||||
|
||||
|
||||
async def topics_aftermath(entry, storage):
|
||||
r = []
|
||||
for tpc in filter(lambda x: bool(x), entry["topics"]):
|
||||
oldslug = tpc
|
||||
newslug = storage["replacements"].get(oldslug, oldslug)
|
||||
|
||||
if newslug:
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).where(Shout.slug == entry["slug"]).first()
|
||||
new_topic = session.query(Topic).where(Topic.slug == newslug).first()
|
||||
|
||||
shout_topic_old = (
|
||||
session.query(ShoutTopic)
|
||||
.join(Shout)
|
||||
.join(Topic)
|
||||
.filter(Shout.slug == entry["slug"])
|
||||
.filter(Topic.slug == oldslug)
|
||||
.first()
|
||||
)
|
||||
if shout_topic_old:
|
||||
shout_topic_old.update({"topic": new_topic.id})
|
||||
else:
|
||||
shout_topic_new = (
|
||||
session.query(ShoutTopic)
|
||||
.join(Shout)
|
||||
.join(Topic)
|
||||
.filter(Shout.slug == entry["slug"])
|
||||
.filter(Topic.slug == newslug)
|
||||
.first()
|
||||
)
|
||||
if not shout_topic_new:
|
||||
try:
|
||||
ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id})
|
||||
except Exception:
|
||||
print("[migration] shout topic error: " + newslug)
|
||||
session.commit()
|
||||
if newslug not in r:
|
||||
r.append(newslug)
|
||||
else:
|
||||
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
|
||||
# raise Exception
|
||||
return r
|
||||
|
||||
|
||||
async def content_ratings_to_reactions(entry, slug):
|
||||
try:
|
||||
with local_session() as session:
|
||||
for content_rating in entry.get("ratings", []):
|
||||
rater = (
|
||||
session.query(User).filter(User.oid == content_rating["createdBy"]).first()
|
||||
) or User.default_user
|
||||
shout = session.query(Shout).where(Shout.slug == slug).first()
|
||||
cts = content_rating.get("createdAt")
|
||||
reaction_dict = {
|
||||
"createdAt": date_parse(cts) if cts else None,
|
||||
"kind": ReactionKind.LIKE
|
||||
if content_rating["value"] > 0
|
||||
else ReactionKind.DISLIKE,
|
||||
"createdBy": rater.id,
|
||||
"shout": shout.id,
|
||||
}
|
||||
reaction = (
|
||||
session.query(Reaction)
|
||||
.filter(Reaction.shout == reaction_dict["shout"])
|
||||
.filter(Reaction.createdBy == reaction_dict["createdBy"])
|
||||
.filter(Reaction.kind == reaction_dict["kind"])
|
||||
.first()
|
||||
)
|
||||
if reaction:
|
||||
k = ReactionKind.AGREE if content_rating["value"] > 0 else ReactionKind.DISAGREE
|
||||
reaction_dict["kind"] = k
|
||||
reaction.update(reaction_dict)
|
||||
session.add(reaction)
|
||||
else:
|
||||
rea = Reaction.create(**reaction_dict)
|
||||
session.add(rea)
|
||||
# shout_dict['ratings'].append(reaction_dict)
|
||||
|
||||
session.commit()
|
||||
except Exception:
|
||||
print("[migration] content_item.ratings error: \n%r" % content_rating)
|
35
migration/tables/remarks.py
Normal file
35
migration/tables/remarks.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# from base.orm import local_session
|
||||
|
||||
# from migration.extract import extract_md
|
||||
# from migration.html2text import html2text
|
||||
# from orm.reaction import Reaction, ReactionKind
|
||||
|
||||
|
||||
# def migrate(entry, storage):
|
||||
# post_oid = entry["contentItem"]
|
||||
# print(post_oid)
|
||||
# shout_dict = storage["shouts"]["by_oid"].get(post_oid)
|
||||
# if shout_dict:
|
||||
# print(shout_dict["body"])
|
||||
# remark = {
|
||||
# "shout": shout_dict["id"],
|
||||
# "body": extract_md(html2text(entry["body"]), shout_dict),
|
||||
# "kind": ReactionKind.REMARK,
|
||||
# }
|
||||
#
|
||||
# if entry.get("textBefore"):
|
||||
# remark["range"] = (
|
||||
# str(shout_dict["body"].index(entry["textBefore"] or ""))
|
||||
# + ":"
|
||||
# + str(
|
||||
# shout_dict["body"].index(entry["textAfter"] or "")
|
||||
# + len(entry["textAfter"] or "")
|
||||
# )
|
||||
# )
|
||||
#
|
||||
# with local_session() as session:
|
||||
# rmrk = Reaction.create(**remark)
|
||||
# session.commit()
|
||||
# del rmrk["_sa_instance_state"]
|
||||
# return rmrk
|
||||
# return
|
828
migration/tables/replacements.json
Normal file
828
migration/tables/replacements.json
Normal file
@@ -0,0 +1,828 @@
|
||||
{
|
||||
"207": "207",
|
||||
"1990-e": "90s",
|
||||
"2000-e": "2000s",
|
||||
"90-e": "90s",
|
||||
"Georgia": "georgia",
|
||||
"Japan": "japan",
|
||||
"Sweden": "sweden",
|
||||
"abstraktsiya": "abstract",
|
||||
"absurdism": "absurdism",
|
||||
"acclimatization": "acclimatisation",
|
||||
"activism": "activism",
|
||||
"adolf-gitler": "adolf-hitler",
|
||||
"afrika": "africa",
|
||||
"agata-kristi": "agatha-christie",
|
||||
"agressivnoe-povedenie": "agression",
|
||||
"agressiya": "agression",
|
||||
"aktsii": "actions",
|
||||
"aktsionizm": "actionism",
|
||||
"alber-kamyu": "albert-kamus",
|
||||
"albomy": "albums",
|
||||
"aleksandr-griboedov": "aleksander-griboedov",
|
||||
"aleksandr-pushkin": "aleksander-pushkin",
|
||||
"aleksandr-solzhenitsyn": "aleksander-solzhenitsyn",
|
||||
"aleksandr-vvedenskiy": "aleksander-vvedensky",
|
||||
"aleksey-navalnyy": "alexey-navalny",
|
||||
"alfavit": "alphabet",
|
||||
"alkogol": "alcohol",
|
||||
"alternativa": "alternative",
|
||||
"alternative": "alternative",
|
||||
"alternativnaya-istoriya": "alternative-history",
|
||||
"amerika": "america",
|
||||
"anarhizm": "anarchism",
|
||||
"anatoliy-mariengof": "anatoly-mariengof",
|
||||
"ancient-russia": "ancient-russia",
|
||||
"andegraund": "underground",
|
||||
"andrey-platonov": "andrey-platonov",
|
||||
"andrey-rodionov": "andrey-rodionov",
|
||||
"andrey-tarkovskiy": "andrey-tarkovsky",
|
||||
"angliyskie-istorii": "english-stories",
|
||||
"angliyskiy-yazyk": "english-langugae",
|
||||
"ango": "ango",
|
||||
"animation": "animation",
|
||||
"animatsiya": "animation",
|
||||
"anime": "anime",
|
||||
"anri-volohonskiy": "anri-volohonsky",
|
||||
"antifashizm": "anti-faschism",
|
||||
"antiquity": "antiquity",
|
||||
"antiutopiya": "dystopia",
|
||||
"anton-dolin": "anton-dolin",
|
||||
"antropology": "antropology",
|
||||
"antropotsen": "antropocenus",
|
||||
"architecture": "architecture",
|
||||
"arheologiya": "archeology",
|
||||
"arhetipy": "archetypes",
|
||||
"arhiv": "archive",
|
||||
"aristokraty": "aristocracy",
|
||||
"aristotel": "aristotle",
|
||||
"arktika": "arctic",
|
||||
"armiya": "army",
|
||||
"armiya-1": "army",
|
||||
"art": "art",
|
||||
"art-is": "art-is",
|
||||
"artists": "artists",
|
||||
"ateizm": "atheism",
|
||||
"audio-poetry": "audio-poetry",
|
||||
"audiopoeziya": "audio-poetry",
|
||||
"audiospektakl": "audio-spectacles",
|
||||
"auktsyon": "auktsyon",
|
||||
"avangard": "avantgarde",
|
||||
"avtofikshn": "autofiction",
|
||||
"avtorskaya-pesnya": "bardsongs",
|
||||
"azbuka-immigratsii": "immigration-basics",
|
||||
"aziatskiy-kinematograf": "asian-cinema",
|
||||
"b-movie": "b-movie",
|
||||
"bannye-chteniya": "sauna-reading",
|
||||
"bardsongs": "bardsongs",
|
||||
"bdsm": "bdsm",
|
||||
"beecake": "beecake",
|
||||
"belarus": "belarus",
|
||||
"belgiya": "belgium",
|
||||
"bertold-breht": "berttold-brecht",
|
||||
"bezumie": "madness",
|
||||
"biography": "biography",
|
||||
"biologiya": "biology",
|
||||
"bipolyarnoe-rasstroystvo": "bipolar-disorder",
|
||||
"bitniki": "beatnics",
|
||||
"biznes": "business",
|
||||
"blizhniy-vostok": "middle-east",
|
||||
"blizost": "closeness",
|
||||
"blocked-in-russia": "blocked-in-russia",
|
||||
"blokada": "blockade",
|
||||
"bob-dilan": "bob-dylan",
|
||||
"bog": "god",
|
||||
"bol": "pain",
|
||||
"bolotnoe-delo": "bolotnaya-case",
|
||||
"books": "books",
|
||||
"boris-eltsin": "boris-eltsin",
|
||||
"boris-godunov": "boris-godunov",
|
||||
"boris-grebenschikov": "boris-grebenschikov",
|
||||
"boris-nemtsov": "boris-nemtsov",
|
||||
"boris-pasternak": "boris-pasternak",
|
||||
"brak": "marriage",
|
||||
"bret-iston-ellis": "bret-iston-ellis",
|
||||
"buddizm": "buddhism",
|
||||
"bullying": "bullying",
|
||||
"bunt": "riot",
|
||||
"burning-man": "burning-man",
|
||||
"bytie": "being",
|
||||
"byurokratiya": "bureaucracy",
|
||||
"capitalism": "capitalism",
|
||||
"censored-in-russia": "censored-in-russia",
|
||||
"ch-rno-beloe": "black-and-white",
|
||||
"ch-rnyy-yumor": "black-humour",
|
||||
"chapters": "chapters",
|
||||
"charity": "charity",
|
||||
"chayldfri": "childfree",
|
||||
"chechenskaya-voyna": "chechen-war",
|
||||
"chechnya": "chechnya",
|
||||
"chelovek": "male",
|
||||
"chernobyl": "chernobyl",
|
||||
"chernyy-yumor": "black-humour",
|
||||
"children": "children",
|
||||
"china": "china",
|
||||
"chinovniki": "bureaucracy",
|
||||
"chukotka": "chukotka",
|
||||
"chuma": "plague",
|
||||
"church": "church",
|
||||
"cinema": "cinema",
|
||||
"city": "city",
|
||||
"civil-position": "civil-position",
|
||||
"clips": "clips",
|
||||
"collage": "collage",
|
||||
"comics": "comics",
|
||||
"conspiracy-theory": "conspiracy-theory",
|
||||
"contemporary-art": "contemporary-art",
|
||||
"contemporary-poetry": "poetry",
|
||||
"contemporary-prose": "prose",
|
||||
"coronavirus": "coronavirus",
|
||||
"corruption": "corruption",
|
||||
"creative-writing-school": "creative-writing-school",
|
||||
"crime": "crime",
|
||||
"criticism": "criticism",
|
||||
"critiques": "reviews",
|
||||
"culture": "culture",
|
||||
"dadaizm": "dadaism",
|
||||
"daniel-defo": "daniel-defoe",
|
||||
"daniil-harms": "daniil-kharms",
|
||||
"dante-aligeri": "dante-alighieri",
|
||||
"darkveyv": "darkwave",
|
||||
"death": "death",
|
||||
"debaty": "debats",
|
||||
"delo-seti": "seti-case",
|
||||
"democracy": "democracy",
|
||||
"demografiya": "demographics",
|
||||
"demonstrations": "demonstrations",
|
||||
"depression": "depression",
|
||||
"derevnya": "village",
|
||||
"derrida": "derrida",
|
||||
"design": "design",
|
||||
"detskie-doma": "orphanages",
|
||||
"detstvo": "childhood",
|
||||
"devid-linch": "david-linch",
|
||||
"devyanostye": "90s",
|
||||
"dialog": "dialogue",
|
||||
"digital": "digital",
|
||||
"digital-art": "digital-art",
|
||||
"dinozavry": "dinosaurs",
|
||||
"directing": "directing",
|
||||
"diskurs": "discours",
|
||||
"diskurs-1": "discourse",
|
||||
"diskurs-analiz": "discourse-analytics",
|
||||
"dissidenty": "dissidents",
|
||||
"diy": "diy",
|
||||
"dmitriy-donskoy": "dmitriy-donskoy",
|
||||
"dmitriy-prigov": "dmitriy-prigov",
|
||||
"dnevnik-1": "dairy",
|
||||
"dnevniki": "dairies",
|
||||
"documentary": "documentary",
|
||||
"dokumentalnaya-poema": "documentary-poem",
|
||||
"dokumentalnaya-poeziya": "documentary-poetry",
|
||||
"dokumenty": "doсuments",
|
||||
"domashnee-nasilie": "home-terror",
|
||||
"donald-tramp": "donald-trump",
|
||||
"donbass": "donbass",
|
||||
"donbass-diary": "donbass-diary",
|
||||
"donorstvo": "donation",
|
||||
"dozhd": "rain",
|
||||
"drama": "drama",
|
||||
"dramaturgy": "dramaturgy",
|
||||
"drawing": "drawing",
|
||||
"drevo-zhizni": "tree-of-life",
|
||||
"drugs": "drugs",
|
||||
"duh": "spirit",
|
||||
"dzhaz": "jazz",
|
||||
"dzhek-keruak": "jack-keruak",
|
||||
"dzhim-morrison": "jim-morrison",
|
||||
"dzhordzh-romero": "george-romero",
|
||||
"dzhordzho-agamben": "giorgio-agamben",
|
||||
"ecology": "ecology",
|
||||
"economics": "economics",
|
||||
"eda": "food",
|
||||
"editorial-statements": "editorial-statements",
|
||||
"eduard-limonov": "eduard-limonov",
|
||||
"education": "education",
|
||||
"egor-letov": "egor-letov",
|
||||
"ekspat": "expat",
|
||||
"eksperiment": "experiments",
|
||||
"eksperimentalnaya-muzyka": "experimental-music",
|
||||
"ekspressionizm": "expressionism",
|
||||
"ekstremizm": "extremism",
|
||||
"ekzistentsializm-1": "existentialism",
|
||||
"ekzistentsiya": "existence",
|
||||
"elections": "elections",
|
||||
"electronic": "electronics",
|
||||
"electronics": "electronics",
|
||||
"elena-glinskaya": "elena-glinskaya",
|
||||
"elena-guro": "elena-guro",
|
||||
"elizaveta-mnatsakanova": "elizaveta-mnatsakanova",
|
||||
"embient": "ambient",
|
||||
"emigration": "emigration",
|
||||
"emil-dyurkgeym": "emile-durkheim",
|
||||
"emotsii": "emotions",
|
||||
"empiric": "empiric",
|
||||
"epidemiya": "pandemic",
|
||||
"erich-von-neff": "erich-von-neff",
|
||||
"erotika": "erotics",
|
||||
"essay": "essay",
|
||||
"estetika": "aestetics",
|
||||
"etika": "ethics",
|
||||
"etno": "ethno",
|
||||
"etnos": "ethnics",
|
||||
"everyday-life": "everyday-life",
|
||||
"evgeniy-onegin": "eugene-onegin",
|
||||
"evolyutsiya": "evolution",
|
||||
"exhibitions": "exhibitions",
|
||||
"experience": "experiences",
|
||||
"experimental": "experimental",
|
||||
"experimental-music": "experimental-music",
|
||||
"explanation": "explanation",
|
||||
"faktcheking": "fact-checking",
|
||||
"falsifikatsii": "falsifications",
|
||||
"family": "family",
|
||||
"fanfiki": "fan-fiction",
|
||||
"fantastika": "sci-fi",
|
||||
"fatalizm": "fatalism",
|
||||
"fedor-dostoevskiy": "fedor-dostoevsky",
|
||||
"fedor-ioannovich": "fedor-ioannovich",
|
||||
"feleton": "feuilleton",
|
||||
"feminism": "feminism",
|
||||
"fenomenologiya": "phenomenology",
|
||||
"fentezi": "fantasy",
|
||||
"festival": "festival",
|
||||
"festival-territoriya": "festival-territory",
|
||||
"folk": "folk",
|
||||
"folklor": "folklore",
|
||||
"fotoreportazh": "photoreports",
|
||||
"france": "france",
|
||||
"frants-kafka": "franz-kafka",
|
||||
"frederik-begbeder": "frederick-begbeder",
|
||||
"freedom": "freedom",
|
||||
"friendship": "friendship",
|
||||
"fsb": "fsb",
|
||||
"futbol": "footbool",
|
||||
"future": "future",
|
||||
"futuristy": "futurists",
|
||||
"futurizm": "futurism",
|
||||
"galereya": "gallery",
|
||||
"galereya-anna-nova": "gallery-anna-nova",
|
||||
"gdr": "gdr",
|
||||
"gender": "gender",
|
||||
"gendernyy-diskurs": "gender",
|
||||
"gennadiy-aygi": "gennadiy-aygi",
|
||||
"gerhard-rihter": "gerhard-rihter",
|
||||
"germaniya": "germany",
|
||||
"germenevtika": "hermeneutics",
|
||||
"geroi": "heroes",
|
||||
"girls": "girls",
|
||||
"gkchp": "gkchp",
|
||||
"glitch": "glitch",
|
||||
"globalizatsiya": "globalisation",
|
||||
"gollivud": "hollywood",
|
||||
"gonzo": "gonzo",
|
||||
"gore-ot-uma": "woe-from-wit",
|
||||
"graffiti": "graffiti",
|
||||
"graficheskaya-novella": "graphic-novell",
|
||||
"graphics": "graphics",
|
||||
"gravyura": "engraving",
|
||||
"grazhdanskaya-oborona": "grazhdanskaya-oborona",
|
||||
"gretsiya": "greece",
|
||||
"griby": "mushrooms",
|
||||
"gruziya-2": "georgia",
|
||||
"gulag": "gulag",
|
||||
"han-batyy": "khan-batyy",
|
||||
"hayku": "haiku",
|
||||
"health": "health",
|
||||
"himiya": "chemistry",
|
||||
"hip-hop": "hip-hop",
|
||||
"history": "history",
|
||||
"history-of-russia": "history-of-russia",
|
||||
"holokost": "holocaust",
|
||||
"horeografiya": "choreography",
|
||||
"horror": "horror",
|
||||
"hospis": "hospice",
|
||||
"hristianstvo": "christianity",
|
||||
"humans": "humans",
|
||||
"humour": "humour",
|
||||
"ideologiya": "ideology",
|
||||
"idm": "idm",
|
||||
"igil": "isis",
|
||||
"igor-pomerantsev": "igor-pomerantsev",
|
||||
"igra": "game",
|
||||
"igra-prestolov": "game-of-throne",
|
||||
"igry": "games",
|
||||
"iisus-hristos": "jesus-christ",
|
||||
"illness": "illness",
|
||||
"illustration-history": "illustration-history",
|
||||
"illustrations": "illustrations",
|
||||
"imazhinizm": "imagism",
|
||||
"immanuil-kant": "immanuel-kant",
|
||||
"impressionizm": "impressionism",
|
||||
"improvizatsiya": "improvisation",
|
||||
"indi": "indie",
|
||||
"individualizm": "individualism",
|
||||
"infografika": "infographics",
|
||||
"informatsiya": "information",
|
||||
"ingmar-bergman": "ingmar-bergman",
|
||||
"inklyuziya": "inclusion",
|
||||
"installyatsiya": "installation",
|
||||
"internet": "internet",
|
||||
"interview": "interview",
|
||||
"invalidnost": "disability",
|
||||
"investigations": "investigations",
|
||||
"iosif-brodskiy": "joseph-brodsky",
|
||||
"iosif-stalin": "joseph-stalin",
|
||||
"iskusstvennyy-intellekt": "artificial-intelligence",
|
||||
"islam": "islam",
|
||||
"istoriya-moskvy": "moscow-history",
|
||||
"istoriya-nauki": "history-of-sceince",
|
||||
"istoriya-o-medsestre": "nurse-story",
|
||||
"istoriya-teatra": "theatre-history",
|
||||
"italiya": "italy",
|
||||
"italyanskiy-yazyk": "italian-language",
|
||||
"iudaika": "judaica",
|
||||
"ivan-groznyy": "ivan-grozny",
|
||||
"ivan-iii-gorbatyy": "ivan-iii-gorbaty",
|
||||
"ivan-kalita": "ivan-kalita",
|
||||
"ivan-krylov": "ivan-krylov",
|
||||
"izobreteniya": "inventions",
|
||||
"izrail-1": "israel",
|
||||
"jazz": "jazz",
|
||||
"john-lennon": "john-lennon",
|
||||
"journalism": "journalism",
|
||||
"justice": "justice",
|
||||
"k-pop": "k-pop",
|
||||
"kalligrafiya": "calligraphy",
|
||||
"karikatura": "caricatures",
|
||||
"kartochki-rubinshteyna": "rubinstein-cards",
|
||||
"katrin-nenasheva": "katrin-nenasheva",
|
||||
"kavarga": "kavarga",
|
||||
"kavkaz": "caucasus",
|
||||
"kazan": "kazan",
|
||||
"kiberbezopasnost": "cybersecurity",
|
||||
"kinoklub": "cinema-club",
|
||||
"kinokritika": "film-criticism",
|
||||
"kirill-serebrennikov": "kirill-serebrennikov",
|
||||
"kladbische": "cemetery",
|
||||
"klassika": "classic",
|
||||
"kollektivnoe-bessoznatelnoe": "сollective-unconscious",
|
||||
"komediya": "comedy",
|
||||
"kommunikatsii": "communications",
|
||||
"kommunizm": "communism",
|
||||
"kommuny": "communes",
|
||||
"kompyuternye-igry": "computer-games",
|
||||
"konets-vesny": "end-of-spring",
|
||||
"konservatizm": "conservatism",
|
||||
"kontrkultura": "counter-culture",
|
||||
"kontseptualizm": "conceptualism",
|
||||
"korotkometrazhka": "cinema-shorts",
|
||||
"kosmos": "cosmos",
|
||||
"kraudfanding": "crowdfunding",
|
||||
"kriptovalyuty": "cryptocurrencies",
|
||||
"krizis": "crisis",
|
||||
"krov": "blood",
|
||||
"krym": "crimea",
|
||||
"kulturologiya": "culturology",
|
||||
"kulty": "cults",
|
||||
"kurdistan": "kurdistan",
|
||||
"kurt-kobeyn": "kurt-cobain",
|
||||
"kurt-vonnegut": "kurt-vonnegut",
|
||||
"kvir": "queer",
|
||||
"laboratoriya": "lab",
|
||||
"language": "languages",
|
||||
"lars-fon-trier": "lars-fon-trier",
|
||||
"laws": "laws",
|
||||
"lectures": "lectures",
|
||||
"leto": "summer",
|
||||
"lev-tolstoy": "leo-tolstoy",
|
||||
"lgbt": "lgbt",
|
||||
"liberalizm": "liberalism",
|
||||
"libertarianstvo": "libertarianism",
|
||||
"life": "life",
|
||||
"likbez": "likbez",
|
||||
"lingvistika": "linguistics",
|
||||
"lirika": "lirics",
|
||||
"literary-studies": "literary-studies",
|
||||
"literature": "literature",
|
||||
"literaturnyykaver": "literature-cover",
|
||||
"lo-fi": "lo-fi",
|
||||
"lomonosov": "lomonosov",
|
||||
"love": "love",
|
||||
"luzha-goluboy-krovi": "luzha-goluboy-krovi",
|
||||
"lyudvig-vitgenshteyn": "ludwig-wittgenstein",
|
||||
"lzhedmitriy": "false-dmitry",
|
||||
"lzhenauka": "pseudoscience",
|
||||
"magiya": "magic",
|
||||
"maks-veber": "max-weber",
|
||||
"manifests": "manifests",
|
||||
"manipulyatsii-soznaniem": "mind-manipulation",
|
||||
"marina-abramovich": "marina-abramovich",
|
||||
"marketing": "marketing",
|
||||
"marksizm": "marxism",
|
||||
"marsel-dyushan": "marchel-duchamp",
|
||||
"marsel-prust": "marcel-proust",
|
||||
"martin-haydegger": "martin-hidegger",
|
||||
"matematika": "maths",
|
||||
"mayakovskiy": "vladimir-mayakovsky",
|
||||
"media": "media",
|
||||
"medicine": "medicine",
|
||||
"memuary": "memoirs",
|
||||
"menedzhment": "management",
|
||||
"menty": "police",
|
||||
"merab-mamardashvili": "merab-mamardashvili",
|
||||
"mest": "revenge",
|
||||
"metamodernizm": "metamodern",
|
||||
"metavselennaya": "metaverse",
|
||||
"metro": "metro",
|
||||
"mifologiya": "mythology",
|
||||
"mify": "myth",
|
||||
"mihael-haneke": "michael-haneke",
|
||||
"mihail-baryshnikov": "mihail-baryshnikov",
|
||||
"mihail-bulgakov": "mihail-bulgakov",
|
||||
"mikrotonalnaya-muzyka": "mikrotone-muzyka",
|
||||
"minimalizm": "minimalism",
|
||||
"minkult-privet": "minkult-privet",
|
||||
"mir": "world",
|
||||
"mirovozzrenie": "mindsets",
|
||||
"mishel-fuko": "michel-foucault",
|
||||
"mistika": "mystics",
|
||||
"mitropolit-makariy": "mitropolit-makariy",
|
||||
"mlm": "mlm",
|
||||
"mobilizatsiya": "mobilisation",
|
||||
"moda": "fashion",
|
||||
"modernizm": "modernism",
|
||||
"mokyumentari": "mockumentary",
|
||||
"molodezh": "youth",
|
||||
"moloko-plus": "moloko-plus",
|
||||
"money": "money",
|
||||
"monologs": "monologues",
|
||||
"monstratsiya": "monstration",
|
||||
"moralnaya-otvetstvennost": "moral-responsibility",
|
||||
"more": "sea",
|
||||
"moscow": "moscow",
|
||||
"moshennichestvo": "frauds",
|
||||
"moskovskiy-romanticheskiy-kontseptualizm": "moscow-romantic-conceptualism",
|
||||
"moskovskoe-delo": "moscow-case",
|
||||
"movies": "movies",
|
||||
"mozg": "brain",
|
||||
"multiplikatsiya": "animation",
|
||||
"music": "music",
|
||||
"musulmanstvo": "islam",
|
||||
"muzei": "museum",
|
||||
"muzey": "museum",
|
||||
"muzhchiny": "man",
|
||||
"myshlenie": "thinking",
|
||||
"nagornyy-karabah": "nagorno-karabakh",
|
||||
"nasilie-1": "violence",
|
||||
"natsionalizm": "nationalism",
|
||||
"natsionalnaya-ideya": "national-idea",
|
||||
"natsizm": "nazism",
|
||||
"natyurmort": "nature-morte",
|
||||
"nauchpop": "pop-science",
|
||||
"nbp": "nbp",
|
||||
"nenavist": "hate",
|
||||
"neofitsialnaya-literatura": "unofficial-literature",
|
||||
"neoklassika": "neoclassic",
|
||||
"neprozrachnye-smysly": "hidden-meanings",
|
||||
"neravenstvo": "inequality",
|
||||
"net-voyne": "no-war",
|
||||
"new-year": "new-year",
|
||||
"neyronauka": "neuro-science",
|
||||
"neyroseti": "neural-networks",
|
||||
"niu-vshe": "hse",
|
||||
"nizhniy-novgorod": "nizhny-novgorod",
|
||||
"nko": "nonprofits",
|
||||
"nlo": "ufo",
|
||||
"nobelevskaya-premiya": "nobel-prize",
|
||||
"noize-mc": "noize-mc",
|
||||
"nonkonformizm": "nonconformism",
|
||||
"notforall": "notforall",
|
||||
"novaya-drama": "new-drama",
|
||||
"novosti": "news",
|
||||
"noyz": "noise",
|
||||
"nuar": "noir",
|
||||
"oberiu": "oberiu",
|
||||
"ocherk": "etudes",
|
||||
"ochevidnyy-nuar": "ochevidnyy-nuar",
|
||||
"odinochestvo": "loneliness",
|
||||
"odna-kniga-odna-istoriya": "one-book-one-story",
|
||||
"okrainy": "outskirts",
|
||||
"omon": "swat",
|
||||
"opinions": "opinions",
|
||||
"oppozitsiya": "opposition",
|
||||
"orhan-pamuk": "orhan-pamuk",
|
||||
"ornitologiya": "ornitology",
|
||||
"osen": "autumn",
|
||||
"osip-mandelshtam": "osip-mandelshtam",
|
||||
"oskar-uayld": "oscar-wilde",
|
||||
"osoznanie": "awareness",
|
||||
"otnosheniya": "relationship",
|
||||
"pablo-pikasso": "pablo-picasso",
|
||||
"painting": "painting",
|
||||
"paintings": "painting",
|
||||
"pamyat": "memory",
|
||||
"pandemiya": "pandemic",
|
||||
"parizh": "paris",
|
||||
"patriotizm": "patriotism",
|
||||
"patsifizm": "pacifism",
|
||||
"paul-tselan": "paul-tselan",
|
||||
"per-burd": "pierre-bourdieu",
|
||||
"perezhivaniya": "worries",
|
||||
"performance": "performance",
|
||||
"peyzazh": "landscape",
|
||||
"philology": "philology",
|
||||
"philosophy": "philosophy",
|
||||
"photo": "photography",
|
||||
"photography": "photography",
|
||||
"photoprojects": "photoprojects",
|
||||
"plakaty": "posters",
|
||||
"plastilin": "plasticine",
|
||||
"plays": "plays",
|
||||
"podrostki": "teenagers",
|
||||
"poema": "poem",
|
||||
"poems": "poems",
|
||||
"poeticheskaya-proza": "poetic-prose",
|
||||
"poetry": "poetry",
|
||||
"poetry-of-squares": "poetry-of-squares",
|
||||
"poetry-slam": "poetry-slam",
|
||||
"pokoy": "peace",
|
||||
"police": "police",
|
||||
"politicheskoe-fentezi": "political-fantasy",
|
||||
"politics": "politics",
|
||||
"politzaklyuchennye": "political-prisoners",
|
||||
"polsha": "poland",
|
||||
"pomosch": "help",
|
||||
"pop-art": "pop-art",
|
||||
"pop-culture": "pop-culture",
|
||||
"populyarnaya-psihologiya": "popular-psychology",
|
||||
"pornografiya": "pornography",
|
||||
"portret": "portrait",
|
||||
"poslovitsy": "proverbs",
|
||||
"post-pank": "post-punk",
|
||||
"post-rok": "post-rock",
|
||||
"postmodernism": "postmodernism",
|
||||
"povest": "novells",
|
||||
"povsednevnost": "everyday-life",
|
||||
"power": "power",
|
||||
"pravo": "right",
|
||||
"pravoslavie": "orthodox",
|
||||
"pravozaschitniki": "human-rights-activism",
|
||||
"prazdnik": "holidays",
|
||||
"predatelstvo": "betrayal",
|
||||
"predprinimatelstvo": "entrepreneurship",
|
||||
"premera": "premier",
|
||||
"premiya-oskar": "oscar-prize",
|
||||
"pribaltika-1": "baltic",
|
||||
"priroda": "nature",
|
||||
"prison": "prison",
|
||||
"pritcha": "parable",
|
||||
"privatnost": "privacy",
|
||||
"progress": "progress",
|
||||
"projects": "projects",
|
||||
"prokrastinatsiya": "procrastination",
|
||||
"propaganda": "propaganda",
|
||||
"proschenie": "forgiveness",
|
||||
"prose": "prose",
|
||||
"proshloe": "past",
|
||||
"prostitutsiya": "prostitution",
|
||||
"prosveschenie": "enlightenment",
|
||||
"protests": "protests",
|
||||
"psalmy": "psalms",
|
||||
"psihoanaliz": "psychoanalysis",
|
||||
"psihodeliki": "psychodelics",
|
||||
"pskov": "pskov",
|
||||
"psychiatry": "psychiatry",
|
||||
"psychology": "psychology",
|
||||
"ptitsy": "birds",
|
||||
"punk": "punk",
|
||||
"r-b": "rnb",
|
||||
"rasizm": "racism",
|
||||
"realizm": "realism",
|
||||
"redaktura": "editing",
|
||||
"refleksiya": "reflection",
|
||||
"reggi": "reggae",
|
||||
"religion": "religion",
|
||||
"rene-zhirar": "rene-girard",
|
||||
"renesanss": "renessance",
|
||||
"renovatsiya": "renovation",
|
||||
"rep": "rap",
|
||||
"reportage": "reportage",
|
||||
"reportazh-1": "reportage",
|
||||
"repressions": "repressions",
|
||||
"research": "research",
|
||||
"retroveyv": "retrowave",
|
||||
"review": "review",
|
||||
"revolution": "revolution",
|
||||
"rezo-gabriadze": "rezo-gabriadze",
|
||||
"risunki": "painting",
|
||||
"roboty": "robots",
|
||||
"rock": "rock",
|
||||
"roditeli": "parents",
|
||||
"romantizm": "romantism",
|
||||
"romany": "novell",
|
||||
"ronald-reygan": "ronald-reygan",
|
||||
"roskomnadzor": "roskomnadzor",
|
||||
"rossiyskoe-kino": "russian-cinema",
|
||||
"rouling": "rowling",
|
||||
"rozhava": "rojava",
|
||||
"rpts": "rpts",
|
||||
"rus-na-grani-sryva": "rus-na-grani-sryva",
|
||||
"russia": "russia",
|
||||
"russian-language": "russian-language",
|
||||
"russian-literature": "russian-literature",
|
||||
"russkaya-toska": "russian-toska",
|
||||
"russkiy-mir": "russkiy-mir",
|
||||
"salo": "lard",
|
||||
"salvador-dali": "salvador-dali",
|
||||
"samoidentifikatsiya": "self-identity",
|
||||
"samoopredelenie": "self-definition",
|
||||
"sankt-peterburg": "saint-petersburg",
|
||||
"sasha-skochilenko": "sasha-skochilenko",
|
||||
"satira": "satiric",
|
||||
"saund-art": "sound-art",
|
||||
"schaste": "happiness",
|
||||
"school": "school",
|
||||
"science": "science",
|
||||
"sculpture": "sculpture",
|
||||
"second-world-war": "second-world-war",
|
||||
"sekond-hend": "second-hand",
|
||||
"seksprosvet": "sex-education",
|
||||
"seksualizirovannoe-nasilie": "sexualized-violence",
|
||||
"seksualnoe-nasilie": "sexualized-violence",
|
||||
"sekty": "sects",
|
||||
"semi": "semi",
|
||||
"semiotics": "semiotics",
|
||||
"serbiya": "serbia",
|
||||
"sergey-bodrov-mladshiy": "sergey-bodrov-junior",
|
||||
"sergey-solov-v": "sergey-solovyov",
|
||||
"serialy": "series",
|
||||
"sever": "north",
|
||||
"severnaya-koreya": "north-korea",
|
||||
"sex": "sex",
|
||||
"shotlandiya": "scotland",
|
||||
"shugeyz": "shoegaze",
|
||||
"siloviki": "siloviki",
|
||||
"simeon-bekbulatovich": "simeon-bekbulatovich",
|
||||
"simvolizm": "simbolism",
|
||||
"siriya": "siria",
|
||||
"skulptura": "sculpture",
|
||||
"slavoy-zhizhek": "slavoj-zizek",
|
||||
"smert-1": "death",
|
||||
"smysl": "meaning",
|
||||
"sny": "dreams",
|
||||
"sobytiya": "events",
|
||||
"social": "society",
|
||||
"society": "society",
|
||||
"sociology": "sociology",
|
||||
"sofya-paleolog": "sofya-paleolog",
|
||||
"sofya-vitovtovna": "sofya-vitovtovna",
|
||||
"soobschestva": "communities",
|
||||
"soprotivlenie": "resistence",
|
||||
"sotsializm": "socialism",
|
||||
"sotsialnaya-filosofiya": "social-philosophy",
|
||||
"sotsiologiya-1": "sociology",
|
||||
"sotsseti": "social-networks",
|
||||
"sotvorenie-tretego-rima": "third-rome",
|
||||
"sovremennost": "modernity",
|
||||
"spaces": "spaces",
|
||||
"spektakl": "spectacles",
|
||||
"spetseffekty": "special-fx",
|
||||
"spetsoperatsiya": "special-operation",
|
||||
"spetssluzhby": "special-services",
|
||||
"sport": "sport",
|
||||
"srednevekove": "middle-age",
|
||||
"state": "state",
|
||||
"statistika": "statistics",
|
||||
"stendap": "stand-up",
|
||||
"stihi": "poetry",
|
||||
"stoitsizm": "stoicism",
|
||||
"stories": "stories",
|
||||
"stoyanie-na-ugre": "stoyanie-na-ugre",
|
||||
"strah": "fear",
|
||||
"street-art": "street-art",
|
||||
"stsenarii": "scenarios",
|
||||
"sud": "court",
|
||||
"summary": "summary",
|
||||
"supergeroi": "superheroes",
|
||||
"svetlana-aleksievich": "svetlana-aleksievich",
|
||||
"svobodu-ivanu-golunovu": "free-ivan-golunov",
|
||||
"syurrealizm": "surrealism",
|
||||
"tales": "tales",
|
||||
"tanets": "dance",
|
||||
"tataro-mongolskoe-igo": "mongol-tatar-yoke",
|
||||
"tatuirovki": "tattoo",
|
||||
"technology": "technology",
|
||||
"televidenie": "television",
|
||||
"telo": "body",
|
||||
"telo-kak-iskusstvo": "body-as-art",
|
||||
"terrorizm": "terrorism",
|
||||
"tests": "tests",
|
||||
"text": "texts",
|
||||
"the-beatles": "the-beatles",
|
||||
"theater": "theater",
|
||||
"theory": "theory",
|
||||
"tokio": "tokio",
|
||||
"torture": "torture",
|
||||
"totalitarizm": "totalitarism",
|
||||
"traditions": "traditions",
|
||||
"tragicomedy": "tragicomedy",
|
||||
"transgendernost": "transgender",
|
||||
"translation": "translation",
|
||||
"transport": "transport",
|
||||
"travel": "travel",
|
||||
"travma": "trauma",
|
||||
"trendy": "trends",
|
||||
"tretiy-reyh": "third-reich",
|
||||
"triller": "thriller",
|
||||
"tsar": "central-african-republic",
|
||||
"tsar-edip": "oedipus",
|
||||
"tsarevich-dmitriy": "tsarevich-dmitry",
|
||||
"tsennosti": "values",
|
||||
"tsenzura": "censorship",
|
||||
"tseremonii": "ceremonies",
|
||||
"turizm": "tourism",
|
||||
"tvorchestvo": "creativity",
|
||||
"ugnetennyy-zhilischnyy-klass": "oppressed-housing-class",
|
||||
"uilyam-shekspir": "william-shakespeare",
|
||||
"ukraina-2": "ukraine",
|
||||
"ukraine": "ukraine",
|
||||
"university": "university",
|
||||
"urban-studies": "urban-studies",
|
||||
"uroki-literatury": "literature-lessons",
|
||||
"usa": "usa",
|
||||
"ussr": "ussr",
|
||||
"utopiya": "utopia",
|
||||
"utrata": "loss",
|
||||
"valter-benyamin": "valter-benyamin",
|
||||
"varlam-shalamov": "varlam-shalamov",
|
||||
"vasiliy-ii-temnyy": "basil-ii-temnyy",
|
||||
"vasiliy-iii": "basil-iii",
|
||||
"vdnh": "vdnh",
|
||||
"vechnost": "ethernety",
|
||||
"velikobritaniya": "great-britain",
|
||||
"velimir-hlebnikov": "velimir-hlebnikov",
|
||||
"velkom-tu-greyt-britn": "welcome-to-great-britain",
|
||||
"venedikt-erofeev": "venedikt-erofeev",
|
||||
"venetsiya": "veneece",
|
||||
"vengriya": "hungary",
|
||||
"verlibry": "free-verse",
|
||||
"veschi": "things",
|
||||
"vessels": "vessels",
|
||||
"veterany": "veterans",
|
||||
"video": "video",
|
||||
"videoart": "videoart",
|
||||
"videoklip": "clips",
|
||||
"videopoeziya": "video-poetry",
|
||||
"viktor-astafev": "viktor-astafev",
|
||||
"viktor-pelevin": "viktor-pelevin",
|
||||
"vilgelm-rayh": "wilhelm-reich",
|
||||
"vinzavod": "vinzavod",
|
||||
"violence": "violence",
|
||||
"visual-culture": "visual-culture",
|
||||
"vizualnaya-poeziya": "visual-poetry",
|
||||
"vladimir-lenin": "vladimir-lenin",
|
||||
"vladimir-mayakovskiy": "vladimir-mayakovsky",
|
||||
"vladimir-nabokov": "vladimir-nabokov",
|
||||
"vladimir-putin": "vladimir-putin",
|
||||
"vladimir-sorokin": "vladimir-sorokin",
|
||||
"vladimir-voynovich": "vladimir-voynovich",
|
||||
"vnutrenniy-opyt": "inner-expirience",
|
||||
"volga": "volga",
|
||||
"volontery": "volonteurs",
|
||||
"vong-karvay": "wong-karwai",
|
||||
"vospominaniya": "memories",
|
||||
"vostok": "east",
|
||||
"voyna-na-ukraine": "war-in-ukraine",
|
||||
"voyna-v-ukraine": "war-in-ukraine",
|
||||
"vremya": "time",
|
||||
"vudi-allen": "woody-allen",
|
||||
"vynuzhdennye-otnosheniya": "forced-relationship",
|
||||
"war": "war",
|
||||
"war-in-ukraine-images": "war-in-ukrahine-images",
|
||||
"women": "women",
|
||||
"work": "work",
|
||||
"writers": "writers",
|
||||
"xx-century": "xx-century",
|
||||
"yakob-yordans": "yakob-yordans",
|
||||
"yan-vermeer": "yan-vermeer",
|
||||
"yanka-dyagileva": "yanka-dyagileva",
|
||||
"yaponskaya-literatura": "japan-literature",
|
||||
"yazychestvo": "paganism",
|
||||
"youth": "youth",
|
||||
"yozef-rot": "yozef-rot",
|
||||
"yurgen-habermas": "jorgen-habermas",
|
||||
"za-liniey-mannergeyma": "behind-mannerheim-line",
|
||||
"zabota": "care",
|
||||
"zahar-prilepin": "zahar-prilepin",
|
||||
"zakonodatelstvo": "laws",
|
||||
"zakony-mira": "world-laws",
|
||||
"zametki": "notes",
|
||||
"zhelanie": "wish",
|
||||
"zhivotnye": "animals",
|
||||
"zhoze-saramago": "jose-saramago",
|
||||
"zigmund-freyd": "sigmund-freud",
|
||||
"zolotaya-orda": "golden-horde",
|
||||
"zombi": "zombie",
|
||||
"zombi-simpsony": "zombie-simpsons"
|
||||
}
|
31
migration/tables/topics.py
Normal file
31
migration/tables/topics.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from base.orm import local_session
|
||||
from migration.html2text import html2text
|
||||
from orm import Topic
|
||||
|
||||
|
||||
def migrate(entry):
|
||||
body_orig = entry.get("description", "").replace(" ", " ")
|
||||
topic_dict = {
|
||||
"slug": entry["slug"],
|
||||
"oid": entry["_id"],
|
||||
"title": entry["title"].replace(" ", " "),
|
||||
"body": html2text(body_orig),
|
||||
}
|
||||
|
||||
with local_session() as session:
|
||||
slug = topic_dict["slug"]
|
||||
topic = session.query(Topic).filter(Topic.slug == slug).first() or Topic.create(
|
||||
**topic_dict
|
||||
)
|
||||
if not topic:
|
||||
raise Exception("no topic!")
|
||||
if topic:
|
||||
if len(topic.title) > len(topic_dict["title"]):
|
||||
Topic.update(topic, {"title": topic_dict["title"]})
|
||||
if len(topic.body) < len(topic_dict["body"]):
|
||||
Topic.update(topic, {"body": topic_dict["body"]})
|
||||
session.commit()
|
||||
# print(topic.__dict__)
|
||||
rt = topic.__dict__.copy()
|
||||
del rt["_sa_instance_state"]
|
||||
return rt
|
156
migration/tables/users.py
Normal file
156
migration/tables/users.py
Normal file
@@ -0,0 +1,156 @@
|
||||
import re
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from dateutil.parser import parse
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from base.orm import local_session
|
||||
from orm.user import AuthorFollower, User, UserRating
|
||||
|
||||
|
||||
def migrate(entry): # noqa: C901
|
||||
if "subscribedTo" in entry:
|
||||
del entry["subscribedTo"]
|
||||
email = entry["emails"][0]["address"]
|
||||
user_dict = {
|
||||
"oid": entry["_id"],
|
||||
"roles": [],
|
||||
"ratings": [],
|
||||
"username": email,
|
||||
"email": email,
|
||||
"createdAt": parse(entry["createdAt"]),
|
||||
"emailConfirmed": ("@discours.io" in email) or bool(entry["emails"][0]["verified"]),
|
||||
"muted": False, # amnesty
|
||||
"links": [],
|
||||
"name": "anonymous",
|
||||
"password": entry["services"]["password"].get("bcrypt"),
|
||||
}
|
||||
|
||||
if "updatedAt" in entry:
|
||||
user_dict["updatedAt"] = parse(entry["updatedAt"])
|
||||
if "wasOnlineAt" in entry:
|
||||
user_dict["lastSeen"] = parse(entry["wasOnlineAt"])
|
||||
if entry.get("profile"):
|
||||
# slug
|
||||
slug = entry["profile"].get("path").lower()
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug).strip()
|
||||
user_dict["slug"] = slug
|
||||
bio = (
|
||||
(entry.get("profile", {"bio": ""}).get("bio") or "")
|
||||
.replace(r"\(", "(")
|
||||
.replace(r"\)", ")")
|
||||
)
|
||||
bio_text = BeautifulSoup(bio, features="lxml").text
|
||||
|
||||
if len(bio_text) > 120:
|
||||
user_dict["about"] = bio_text
|
||||
else:
|
||||
user_dict["bio"] = bio_text
|
||||
|
||||
# userpic
|
||||
try:
|
||||
user_dict["userpic"] = (
|
||||
"https://images.discours.io/unsafe/" + entry["profile"]["thumborId"]
|
||||
)
|
||||
except KeyError:
|
||||
try:
|
||||
user_dict["userpic"] = entry["profile"]["image"]["url"]
|
||||
except KeyError:
|
||||
user_dict["userpic"] = ""
|
||||
|
||||
# name
|
||||
fn = entry["profile"].get("firstName", "")
|
||||
ln = entry["profile"].get("lastName", "")
|
||||
name = fn if fn else ""
|
||||
name = (name + " " + ln) if ln else name
|
||||
if not name:
|
||||
name = slug if slug else "anonymous"
|
||||
name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name
|
||||
user_dict["name"] = name
|
||||
|
||||
# links
|
||||
fb = entry["profile"].get("facebook", False)
|
||||
if fb:
|
||||
user_dict["links"].append(fb)
|
||||
vk = entry["profile"].get("vkontakte", False)
|
||||
if vk:
|
||||
user_dict["links"].append(vk)
|
||||
tr = entry["profile"].get("twitter", False)
|
||||
if tr:
|
||||
user_dict["links"].append(tr)
|
||||
ws = entry["profile"].get("website", False)
|
||||
if ws:
|
||||
user_dict["links"].append(ws)
|
||||
|
||||
# some checks
|
||||
if not user_dict["slug"] and len(user_dict["links"]) > 0:
|
||||
user_dict["slug"] = user_dict["links"][0].split("/")[-1]
|
||||
|
||||
user_dict["slug"] = user_dict.get("slug", user_dict["email"].split("@")[0])
|
||||
oid = user_dict["oid"]
|
||||
user_dict["slug"] = user_dict["slug"].lower().strip().replace(" ", "-")
|
||||
try:
|
||||
user = User.create(**user_dict.copy())
|
||||
except IntegrityError:
|
||||
print("[migration] cannot create user " + user_dict["slug"])
|
||||
with local_session() as session:
|
||||
old_user = session.query(User).filter(User.slug == user_dict["slug"]).first()
|
||||
old_user.oid = oid
|
||||
old_user.password = user_dict["password"]
|
||||
session.commit()
|
||||
user = old_user
|
||||
if not user:
|
||||
print("[migration] ERROR: cannot find user " + user_dict["slug"])
|
||||
raise Exception
|
||||
user_dict["id"] = user.id
|
||||
return user_dict
|
||||
|
||||
|
||||
def post_migrate():
|
||||
old_discours_dict = {
|
||||
"slug": "old-discours",
|
||||
"username": "old-discours",
|
||||
"email": "old@discours.io",
|
||||
"name": "Просмотры на старой версии сайта",
|
||||
}
|
||||
|
||||
with local_session() as session:
|
||||
old_discours_user = User.create(**old_discours_dict)
|
||||
session.add(old_discours_user)
|
||||
session.commit()
|
||||
|
||||
|
||||
def migrate_2stage(entry, id_map):
|
||||
ce = 0
|
||||
for rating_entry in entry.get("ratings", []):
|
||||
rater_oid = rating_entry["createdBy"]
|
||||
rater_slug = id_map.get(rater_oid)
|
||||
if not rater_slug:
|
||||
ce += 1
|
||||
# print(rating_entry)
|
||||
continue
|
||||
oid = entry["_id"]
|
||||
author_slug = id_map.get(oid)
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
rater = session.query(User).where(User.slug == rater_slug).one()
|
||||
user = session.query(User).where(User.slug == author_slug).one()
|
||||
|
||||
user_rating_dict = {
|
||||
"value": rating_entry["value"],
|
||||
"rater": rater.id,
|
||||
"user": user.id,
|
||||
}
|
||||
|
||||
user_rating = UserRating.create(**user_rating_dict)
|
||||
if user_rating_dict["value"] > 0:
|
||||
af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
|
||||
session.add(af)
|
||||
session.add(user_rating)
|
||||
session.commit()
|
||||
except IntegrityError:
|
||||
print("[migration] cannot rate " + author_slug + "`s by " + rater_slug)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return ce
|
10
migration/utils.py
Normal file
10
migration/utils.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from datetime import datetime
|
||||
from json import JSONEncoder
|
||||
|
||||
|
||||
class DateTimeEncoder(JSONEncoder):
|
||||
def default(self, z):
|
||||
if isinstance(z, datetime):
|
||||
return str(z)
|
||||
else:
|
||||
return super().default(z)
|
@@ -1,19 +1,9 @@
|
||||
{{ $proxy_settings := "proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $http_connection; proxy_set_header Host $http_host; proxy_set_header X-Request-Start $msec;" }}
|
||||
{{ $gzip_settings := "gzip on; gzip_min_length 1100; gzip_buffers 4 32k; gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml; gzip_vary on; gzip_comp_level 6;" }}
|
||||
|
||||
{{ $cors_headers_options := "if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS'; add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always; add_header 'Access-Control-Allow-Credentials' 'true' always; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain; charset=utf-8'; add_header 'Content-Length' 0; return 204; }" }}
|
||||
{{ $cors_headers_post := "if ($request_method = 'POST') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
|
||||
{{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
|
||||
|
||||
map $http_origin $allow_origin {
|
||||
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp(-(.*))?\.vercel\.app|(.*\.)?discours\.io|(.*\.)?dscrs\.site)$ $http_origin;
|
||||
default "";
|
||||
}
|
||||
|
||||
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my_cache:10m max_size=1g
|
||||
inactive=60m use_temp_path=off;
|
||||
limit_conn_zone $binary_remote_addr zone=addr:10m;
|
||||
limit_req_zone $binary_remote_addr zone=req_zone:10m rate=20r/s;
|
||||
{{ $cors_headers_options := "if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization'; add_header 'Access-Control-Allow-Credentials' 'true'; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain; charset=utf-8'; add_header 'Content-Length' 0; return 204; }" }}
|
||||
{{ $cors_headers_post := "if ($request_method = 'POST') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
|
||||
{{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
|
||||
|
||||
{{ range $port_map := .PROXY_PORT_MAP | split " " }}
|
||||
{{ $port_map_list := $port_map | split ":" }}
|
||||
@@ -28,7 +18,6 @@ server {
|
||||
server_name {{ $.NOSSL_SERVER_NAME }};
|
||||
access_log /var/log/nginx/{{ $.APP }}-access.log;
|
||||
error_log /var/log/nginx/{{ $.APP }}-error.log;
|
||||
client_max_body_size 100M;
|
||||
|
||||
{{ else if eq $scheme "https" }}
|
||||
listen [::]:{{ $listen_port }} ssl http2;
|
||||
@@ -42,10 +31,6 @@ server {
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
keepalive_timeout 70;
|
||||
keepalive_requests 500;
|
||||
proxy_read_timeout 3600;
|
||||
limit_conn addr 10000;
|
||||
client_max_body_size 100M;
|
||||
{{ end }}
|
||||
|
||||
|
||||
@@ -56,44 +41,13 @@ server {
|
||||
{{ $cors_headers_options }}
|
||||
{{ $cors_headers_post }}
|
||||
{{ $cors_headers_get }}
|
||||
|
||||
proxy_cache my_cache;
|
||||
proxy_cache_revalidate on;
|
||||
proxy_cache_min_uses 2;
|
||||
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_lock on;
|
||||
|
||||
# Connections and request limits increase (bad for DDos)
|
||||
limit_conn addr 10000;
|
||||
limit_req zone=req_zone burst=10 nodelay;
|
||||
}
|
||||
|
||||
# Custom location block for /upload
|
||||
# location /upload {
|
||||
# proxy_pass http://uploader-8080/;
|
||||
# {{ $proxy_settings }}
|
||||
# {{ $gzip_settings }}
|
||||
# {{ $cors_headers_options }}
|
||||
# {{ $cors_headers_post }}
|
||||
# {{ $cors_headers_get }}
|
||||
# }
|
||||
|
||||
location ~* \.(jpg|jpeg|png|gif|ico|css|js)$ {
|
||||
expires 30d; # This means that the client can cache these resources for 30 days.
|
||||
add_header Cache-Control "public, no-transform";
|
||||
}
|
||||
|
||||
location ~* \.(mp3)$ {
|
||||
if ($request_method = 'GET') {
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always;
|
||||
add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always;
|
||||
add_header 'Access-Control-Allow-Credentials' 'true' always;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
error_page 400 401 402 403 405 406 407 408 409 410 411 412 413 414 415 416 417 418 420 422 423 424 426 428 429 431 444 449 450 451 /400-error.html;
|
||||
location /400-error.html {
|
||||
@@ -119,6 +73,7 @@ server {
|
||||
internal;
|
||||
}
|
||||
|
||||
# include /home/dokku/gateway/nginx.conf.d/*.conf;
|
||||
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
|
||||
}
|
||||
{{ end }}
|
||||
|
36
orm/__init__.py
Normal file
36
orm/__init__.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from base.orm import Base, engine
|
||||
from orm.community import Community
|
||||
from orm.notification import Notification
|
||||
from orm.rbac import Operation, Permission, Resource, Role
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from orm.user import User, UserRating
|
||||
|
||||
|
||||
def init_tables():
|
||||
Base.metadata.create_all(engine)
|
||||
Operation.init_table()
|
||||
Resource.init_table()
|
||||
User.init_table()
|
||||
Community.init_table()
|
||||
Role.init_table()
|
||||
UserRating.init_table()
|
||||
Shout.init_table()
|
||||
print("[orm] tables initialized")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
"Role",
|
||||
"Operation",
|
||||
"Permission",
|
||||
"Community",
|
||||
"Shout",
|
||||
"Topic",
|
||||
"TopicFollower",
|
||||
"Notification",
|
||||
"Reaction",
|
||||
"UserRating",
|
||||
"init_tables",
|
||||
]
|
@@ -1,55 +0,0 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
|
||||
from services.db import Base
|
||||
|
||||
# from sqlalchemy_utils import TSVectorType
|
||||
|
||||
|
||||
class AuthorRating(Base):
|
||||
__tablename__ = "author_rating"
|
||||
|
||||
id = None # type: ignore
|
||||
rater = Column(ForeignKey("author.id"), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
plus = Column(Boolean)
|
||||
|
||||
|
||||
class AuthorFollower(Base):
|
||||
__tablename__ = "author_follower"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
|
||||
class AuthorBookmark(Base):
|
||||
__tablename__ = "author_bookmark"
|
||||
|
||||
id = None # type: ignore
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
|
||||
|
||||
class Author(Base):
|
||||
__tablename__ = "author"
|
||||
|
||||
user = Column(String) # unbounded link with authorizer's User type
|
||||
|
||||
name = Column(String, nullable=True, comment="Display name")
|
||||
slug = Column(String, unique=True, comment="Author's slug")
|
||||
bio = Column(String, nullable=True, comment="Bio") # status description
|
||||
about = Column(String, nullable=True, comment="About") # long and formatted
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
links = Column(JSON, nullable=True, comment="Links")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
deleted_at = Column(Integer, nullable=True, comment="Deleted at")
|
||||
|
||||
# search_vector = Column(
|
||||
# TSVectorType("name", "slug", "bio", "about", regconfig="pg_catalog.russian")
|
||||
# )
|
@@ -1,14 +1,12 @@
|
||||
import time
|
||||
from sqlalchemy import Column, DateTime, ForeignKey, String, func
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
|
||||
from services.db import Base
|
||||
from base.orm import Base
|
||||
|
||||
|
||||
class ShoutCollection(Base):
|
||||
__tablename__ = "shout_collection"
|
||||
|
||||
id = None # type: ignore
|
||||
id = None
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
collection = Column(ForeignKey("collection.id"), primary_key=True)
|
||||
|
||||
@@ -20,6 +18,6 @@ class Collection(Base):
|
||||
title = Column(String, nullable=False, comment="Title")
|
||||
body = Column(String, nullable=True, comment="Body")
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||
created_by = Column(ForeignKey("author.id"), comment="Created By")
|
||||
published_at = Column(Integer, default=lambda: int(time.time()))
|
||||
createdAt = Column(DateTime(timezone=True), server_default=func.now(), comment="Created At")
|
||||
createdBy = Column(ForeignKey("user.id"), comment="Created By")
|
||||
publishedAt = Column(DateTime(timezone=True), server_default=func.now(), comment="Published At")
|
||||
|
116
orm/community.py
116
orm/community.py
@@ -1,106 +1,38 @@
|
||||
import enum
|
||||
import time
|
||||
from sqlalchemy import Column, DateTime, ForeignKey, String, func
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String, Text, distinct, func
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
|
||||
from orm.author import Author
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class CommunityRole(enum.Enum):
|
||||
READER = "reader" # can read and comment
|
||||
AUTHOR = "author" # + can vote and invite collaborators
|
||||
ARTIST = "artist" # + can be credited as featured artist
|
||||
EXPERT = "expert" # + can add proof or disproof to shouts, can manage topics
|
||||
EDITOR = "editor" # + can manage topics, comments and community settings
|
||||
|
||||
@classmethod
|
||||
def as_string_array(cls, roles):
|
||||
return [role.value for role in roles]
|
||||
from base.orm import Base, local_session
|
||||
|
||||
|
||||
class CommunityFollower(Base):
|
||||
__tablename__ = "community_author"
|
||||
__tablename__ = "community_followers"
|
||||
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
roles = Column(Text, nullable=True, comment="Roles (comma-separated)")
|
||||
|
||||
def set_roles(self, roles):
|
||||
self.roles = CommunityRole.as_string_array(roles)
|
||||
|
||||
def get_roles(self):
|
||||
return [CommunityRole(role) for role in self.roles]
|
||||
id = None
|
||||
follower: Column = Column(ForeignKey("user.id"), primary_key=True)
|
||||
community: Column = Column(ForeignKey("community.id"), primary_key=True)
|
||||
joinedAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
|
||||
|
||||
|
||||
class Community(Base):
|
||||
__tablename__ = "community"
|
||||
|
||||
name = Column(String, nullable=False)
|
||||
slug = Column(String, nullable=False, unique=True)
|
||||
name = Column(String, nullable=False, comment="Name")
|
||||
slug = Column(String, nullable=False, unique=True, comment="Slug")
|
||||
desc = Column(String, nullable=False, default="")
|
||||
pic = Column(String, nullable=False, default="")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
|
||||
@hybrid_property
|
||||
def stat(self):
|
||||
return CommunityStats(self)
|
||||
|
||||
@property
|
||||
def role_list(self):
|
||||
return self.roles.split(",") if self.roles else []
|
||||
|
||||
@role_list.setter
|
||||
def role_list(self, value):
|
||||
self.roles = ",".join(value) if value else None
|
||||
|
||||
|
||||
class CommunityStats:
|
||||
def __init__(self, community):
|
||||
self.community = community
|
||||
|
||||
@property
|
||||
def shouts(self):
|
||||
from orm.shout import Shout
|
||||
|
||||
return self.community.session.query(func.count(Shout.id)).filter(Shout.community == self.community.id).scalar()
|
||||
|
||||
@property
|
||||
def followers(self):
|
||||
return (
|
||||
self.community.session.query(func.count(CommunityFollower.author))
|
||||
.filter(CommunityFollower.community == self.community.id)
|
||||
.scalar()
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
|
||||
@property
|
||||
def authors(self):
|
||||
from orm.shout import Shout
|
||||
|
||||
# author has a shout with community id and its featured_at is not null
|
||||
return (
|
||||
self.community.session.query(func.count(distinct(Author.id)))
|
||||
.join(Shout)
|
||||
.filter(Shout.community == self.community.id, Shout.featured_at.is_not(None), Author.id.in_(Shout.authors))
|
||||
.scalar()
|
||||
)
|
||||
|
||||
|
||||
class CommunityAuthor(Base):
|
||||
__tablename__ = "community_author"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
community_id = Column(Integer, ForeignKey("community.id"))
|
||||
author_id = Column(Integer, ForeignKey("author.id"))
|
||||
roles = Column(Text, nullable=True, comment="Roles (comma-separated)")
|
||||
|
||||
@property
|
||||
def role_list(self):
|
||||
return self.roles.split(",") if self.roles else []
|
||||
|
||||
@role_list.setter
|
||||
def role_list(self, value):
|
||||
self.roles = ",".join(value) if value else None
|
||||
@staticmethod
|
||||
def init_table():
|
||||
with local_session() as session:
|
||||
d = session.query(Community).filter(Community.slug == "discours").first()
|
||||
if not d:
|
||||
d = Community.create(name="Дискурс", slug="discours")
|
||||
session.add(d)
|
||||
session.commit()
|
||||
Community.default_community = d
|
||||
print("[orm] default community id: %s" % d.id)
|
||||
|
@@ -1,35 +0,0 @@
|
||||
import enum
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class InviteStatus(enum.Enum):
|
||||
PENDING = "PENDING"
|
||||
ACCEPTED = "ACCEPTED"
|
||||
REJECTED = "REJECTED"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
return cls(value)
|
||||
|
||||
|
||||
class Invite(Base):
|
||||
__tablename__ = "invite"
|
||||
|
||||
inviter_id = Column(ForeignKey("author.id"), primary_key=True)
|
||||
author_id = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout_id = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
status = Column(String, default=InviteStatus.PENDING.value)
|
||||
|
||||
inviter = relationship("Author", foreign_keys=[inviter_id])
|
||||
author = relationship("Author", foreign_keys=[author_id])
|
||||
shout = relationship("Shout")
|
||||
|
||||
def set_status(self, status: InviteStatus):
|
||||
self.status = status.value
|
||||
|
||||
def get_status(self) -> InviteStatus:
|
||||
return InviteStatus.from_string(self.status)
|
@@ -1,63 +1,26 @@
|
||||
import enum
|
||||
import time
|
||||
from enum import Enum as Enumeration
|
||||
|
||||
from sqlalchemy import JSON, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer, func
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
from orm.author import Author
|
||||
from services.db import Base
|
||||
from base.orm import Base
|
||||
|
||||
|
||||
class NotificationEntity(enum.Enum):
|
||||
REACTION = "reaction"
|
||||
SHOUT = "shout"
|
||||
FOLLOWER = "follower"
|
||||
COMMUNITY = "community"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
return cls(value)
|
||||
|
||||
|
||||
class NotificationAction(enum.Enum):
|
||||
CREATE = "create"
|
||||
UPDATE = "update"
|
||||
DELETE = "delete"
|
||||
SEEN = "seen"
|
||||
FOLLOW = "follow"
|
||||
UNFOLLOW = "unfollow"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
return cls(value)
|
||||
|
||||
|
||||
class NotificationSeen(Base):
|
||||
__tablename__ = "notification_seen"
|
||||
|
||||
viewer = Column(ForeignKey("author.id"), primary_key=True)
|
||||
notification = Column(ForeignKey("notification.id"), primary_key=True)
|
||||
class NotificationType(Enumeration):
|
||||
NEW_COMMENT = 1
|
||||
NEW_REPLY = 2
|
||||
|
||||
|
||||
class Notification(Base):
|
||||
__tablename__ = "notification"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
created_at = Column(Integer, server_default=str(int(time.time())))
|
||||
entity = Column(String, nullable=False)
|
||||
action = Column(String, nullable=False)
|
||||
payload = Column(JSON, nullable=True)
|
||||
|
||||
seen = relationship(Author, secondary="notification_seen")
|
||||
|
||||
def set_entity(self, entity: NotificationEntity):
|
||||
self.entity = entity.value
|
||||
|
||||
def get_entity(self) -> NotificationEntity:
|
||||
return NotificationEntity.from_string(self.entity)
|
||||
|
||||
def set_action(self, action: NotificationAction):
|
||||
self.action = action.value
|
||||
|
||||
def get_action(self) -> NotificationAction:
|
||||
return NotificationAction.from_string(self.action)
|
||||
shout: Column = Column(ForeignKey("shout.id"), index=True)
|
||||
reaction: Column = Column(ForeignKey("reaction.id"), index=True)
|
||||
user: Column = Column(ForeignKey("user.id"), index=True)
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), index=True
|
||||
)
|
||||
seen = Column(Boolean, nullable=False, default=False, index=True)
|
||||
type = Column(Enum(NotificationType), nullable=False)
|
||||
data = Column(JSONB, nullable=True)
|
||||
occurrences = Column(Integer, default=1)
|
||||
|
@@ -1,30 +0,0 @@
|
||||
from orm.reaction import ReactionKind
|
||||
|
||||
PROPOSAL_REACTIONS = [
|
||||
ReactionKind.ACCEPT.value,
|
||||
ReactionKind.REJECT.value,
|
||||
ReactionKind.AGREE.value,
|
||||
ReactionKind.DISAGREE.value,
|
||||
ReactionKind.ASK.value,
|
||||
ReactionKind.PROPOSE.value,
|
||||
]
|
||||
|
||||
PROOF_REACTIONS = [ReactionKind.PROOF.value, ReactionKind.DISPROOF.value]
|
||||
|
||||
RATING_REACTIONS = [ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
||||
|
||||
|
||||
def is_negative(x):
|
||||
return x in [
|
||||
ReactionKind.DISLIKE.value,
|
||||
ReactionKind.DISPROOF.value,
|
||||
ReactionKind.REJECT.value,
|
||||
]
|
||||
|
||||
|
||||
def is_positive(x):
|
||||
return x in [
|
||||
ReactionKind.ACCEPT.value,
|
||||
ReactionKind.LIKE.value,
|
||||
ReactionKind.PROOF.value,
|
||||
]
|
178
orm/rbac.py
Normal file
178
orm/rbac.py
Normal file
@@ -0,0 +1,178 @@
|
||||
import warnings
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from base.orm import REGISTRY, Base, local_session
|
||||
|
||||
# Role Based Access Control #
|
||||
|
||||
|
||||
class ClassType(TypeDecorator):
|
||||
impl = String
|
||||
|
||||
@property
|
||||
def python_type(self):
|
||||
return NotImplemented
|
||||
|
||||
def process_literal_param(self, value, dialect):
|
||||
return NotImplemented
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return value.__name__ if isinstance(value, type) else str(value)
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
class_ = REGISTRY.get(value)
|
||||
if class_ is None:
|
||||
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
|
||||
return class_
|
||||
|
||||
|
||||
class Role(Base):
|
||||
__tablename__ = "role"
|
||||
|
||||
name = Column(String, nullable=False, comment="Role Name")
|
||||
desc = Column(String, nullable=True, comment="Role Description")
|
||||
community = Column(
|
||||
ForeignKey("community.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
comment="Community",
|
||||
)
|
||||
permissions = relationship(lambda: Permission)
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
with local_session() as session:
|
||||
r = session.query(Role).filter(Role.name == "author").first()
|
||||
if r:
|
||||
Role.default_role = r
|
||||
return
|
||||
|
||||
r1 = Role.create(
|
||||
name="author",
|
||||
desc="Role for an author",
|
||||
community=1,
|
||||
)
|
||||
|
||||
session.add(r1)
|
||||
|
||||
Role.default_role = r1
|
||||
|
||||
r2 = Role.create(
|
||||
name="reader",
|
||||
desc="Role for a reader",
|
||||
community=1,
|
||||
)
|
||||
|
||||
session.add(r2)
|
||||
|
||||
r3 = Role.create(
|
||||
name="expert",
|
||||
desc="Role for an expert",
|
||||
community=1,
|
||||
)
|
||||
|
||||
session.add(r3)
|
||||
|
||||
r4 = Role.create(
|
||||
name="editor",
|
||||
desc="Role for an editor",
|
||||
community=1,
|
||||
)
|
||||
|
||||
session.add(r4)
|
||||
|
||||
|
||||
class Operation(Base):
|
||||
__tablename__ = "operation"
|
||||
name = Column(String, nullable=False, unique=True, comment="Operation Name")
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
with local_session() as session:
|
||||
for name in ["create", "update", "delete", "load"]:
|
||||
"""
|
||||
* everyone can:
|
||||
- load shouts
|
||||
- load topics
|
||||
- load reactions
|
||||
- create an account to become a READER
|
||||
* readers can:
|
||||
- update and delete their account
|
||||
- load chats
|
||||
- load messages
|
||||
- create reaction of some shout's author allowed kinds
|
||||
- create shout to become an AUTHOR
|
||||
* authors can:
|
||||
- update and delete their shout
|
||||
- invite other authors to edit shout and chat
|
||||
- manage allowed reactions for their shout
|
||||
* pros can:
|
||||
- create/update/delete their community
|
||||
- create/update/delete topics for their community
|
||||
|
||||
"""
|
||||
op = session.query(Operation).filter(Operation.name == name).first()
|
||||
if not op:
|
||||
op = Operation.create(name=name)
|
||||
session.add(op)
|
||||
session.commit()
|
||||
|
||||
|
||||
class Resource(Base):
|
||||
__tablename__ = "resource"
|
||||
resourceClass = Column(String, nullable=False, unique=True, comment="Resource class")
|
||||
name = Column(String, nullable=False, unique=True, comment="Resource name")
|
||||
# TODO: community = Column(ForeignKey())
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
with local_session() as session:
|
||||
for res in [
|
||||
"shout",
|
||||
"topic",
|
||||
"reaction",
|
||||
"chat",
|
||||
"message",
|
||||
"invite",
|
||||
"community",
|
||||
"user",
|
||||
]:
|
||||
r = session.query(Resource).filter(Resource.name == res).first()
|
||||
if not r:
|
||||
r = Resource.create(name=res, resourceClass=res)
|
||||
session.add(r)
|
||||
session.commit()
|
||||
|
||||
|
||||
class Permission(Base):
|
||||
__tablename__ = "permission"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("role", "operation", "resource"),
|
||||
{"extend_existing": True},
|
||||
)
|
||||
|
||||
role: Column = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
|
||||
operation: Column = Column(
|
||||
ForeignKey("operation.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
comment="Operation",
|
||||
)
|
||||
resource: Column = Column(
|
||||
ForeignKey("resource.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
comment="Resource",
|
||||
)
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# Base.metadata.create_all(engine)
|
||||
# ops = [
|
||||
# Permission(role=1, operation=1, resource=1),
|
||||
# Permission(role=1, operation=2, resource=1),
|
||||
# Permission(role=1, operation=3, resource=1),
|
||||
# Permission(role=1, operation=4, resource=1),
|
||||
# Permission(role=2, operation=4, resource=1),
|
||||
# ]
|
||||
# global_session.add_all(ops)
|
||||
# global_session.commit()
|
@@ -1,45 +1,47 @@
|
||||
import time
|
||||
from enum import Enum as Enumeration
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String, func
|
||||
|
||||
from services.db import Base
|
||||
from base.orm import Base
|
||||
|
||||
|
||||
class ReactionKind(Enumeration):
|
||||
AGREE = 1 # +1
|
||||
DISAGREE = 2 # -1
|
||||
PROOF = 3 # +1
|
||||
DISPROOF = 4 # -1
|
||||
ASK = 5 # +0
|
||||
PROPOSE = 6 # +0
|
||||
QUOTE = 7 # +0 bookmark
|
||||
COMMENT = 8 # +0
|
||||
ACCEPT = 9 # +1
|
||||
REJECT = 0 # -1
|
||||
LIKE = 11 # +1
|
||||
DISLIKE = 12 # -1
|
||||
REMARK = 13 # 0
|
||||
FOOTNOTE = 14 # 0
|
||||
# TYPE = <reaction index> # rating diff
|
||||
|
||||
# editor mode
|
||||
AGREE = "AGREE" # +1
|
||||
DISAGREE = "DISAGREE" # -1
|
||||
ASK = "ASK" # +0
|
||||
PROPOSE = "PROPOSE" # +0
|
||||
ACCEPT = "ACCEPT" # +1
|
||||
REJECT = "REJECT" # -1
|
||||
|
||||
# expert mode
|
||||
PROOF = "PROOF" # +1
|
||||
DISPROOF = "DISPROOF" # -1
|
||||
|
||||
# public feed
|
||||
QUOTE = "QUOTE" # +0 TODO: use to bookmark in collection
|
||||
COMMENT = "COMMENT" # +0
|
||||
LIKE = "LIKE" # +1
|
||||
DISLIKE = "DISLIKE" # -1
|
||||
|
||||
|
||||
class Reaction(Base):
|
||||
__tablename__ = "reaction"
|
||||
|
||||
body = Column(String, default="", comment="Reaction Body")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()), index=True)
|
||||
updated_at = Column(Integer, nullable=True, comment="Updated at", index=True)
|
||||
deleted_at = Column(Integer, nullable=True, comment="Deleted at", index=True)
|
||||
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
reply_to = Column(ForeignKey("reaction.id"), nullable=True)
|
||||
quote = Column(String, nullable=True, comment="Original quoted text")
|
||||
shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
kind = Column(String, nullable=False, index=True)
|
||||
|
||||
oid = Column(String)
|
||||
body = Column(String, nullable=True, comment="Reaction Body")
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
createdBy: Column = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
|
||||
updatedAt = Column(DateTime(timezone=True), nullable=True, comment="Updated at")
|
||||
updatedBy: Column = Column(
|
||||
ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor"
|
||||
)
|
||||
deletedAt = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
|
||||
deletedBy: Column = Column(
|
||||
ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by"
|
||||
)
|
||||
shout: Column = Column(ForeignKey("shout.id"), nullable=False, index=True)
|
||||
replyTo: Column = Column(
|
||||
ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
|
||||
)
|
||||
range = Column(String, nullable=True, comment="Range in format <start index>:<end>")
|
||||
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
|
||||
oid = Column(String, nullable=True, comment="Old ID")
|
||||
|
98
orm/shout.py
98
orm/shout.py
@@ -1,74 +1,98 @@
|
||||
import time
|
||||
from sqlalchemy import (
|
||||
JSON,
|
||||
Boolean,
|
||||
Column,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
String,
|
||||
func,
|
||||
)
|
||||
from sqlalchemy.orm import column_property, relationship
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from orm.author import Author
|
||||
from base.orm import Base, local_session
|
||||
from orm.reaction import Reaction
|
||||
from orm.topic import Topic
|
||||
from services.db import Base
|
||||
from orm.user import User
|
||||
|
||||
|
||||
class ShoutTopic(Base):
|
||||
__tablename__ = "shout_topic"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
|
||||
main = Column(Boolean, nullable=True)
|
||||
id = None
|
||||
shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
topic: Column = Column(ForeignKey("topic.id"), primary_key=True, index=True)
|
||||
|
||||
|
||||
class ShoutReactionsFollower(Base):
|
||||
__tablename__ = "shout_reactions_followers"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True, index=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
id = None
|
||||
follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
deleted_at = Column(Integer, nullable=True)
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
deletedAt = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
|
||||
class ShoutAuthor(Base):
|
||||
__tablename__ = "shout_author"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True, index=True)
|
||||
caption = Column(String, nullable=True, default="")
|
||||
id = None
|
||||
shout: Column = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
caption: Column = Column(String, nullable=True, default="")
|
||||
|
||||
|
||||
class Shout(Base):
|
||||
__tablename__ = "shout"
|
||||
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=True, index=True)
|
||||
published_at = Column(Integer, nullable=True, index=True)
|
||||
featured_at = Column(Integer, nullable=True, index=True)
|
||||
deleted_at = Column(Integer, nullable=True, index=True)
|
||||
# timestamps
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
updatedAt = Column(DateTime(timezone=True), nullable=True, comment="Updated at")
|
||||
publishedAt = Column(DateTime(timezone=True), nullable=True)
|
||||
deletedAt = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
updated_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
community = Column(ForeignKey("community.id"), nullable=False)
|
||||
createdBy: Column = Column(ForeignKey("user.id"), comment="Created By")
|
||||
deletedBy: Column = Column(ForeignKey("user.id"), nullable=True)
|
||||
|
||||
body = Column(String, nullable=False, comment="Body")
|
||||
slug = Column(String, unique=True)
|
||||
cover = Column(String, nullable=True, comment="Cover image url")
|
||||
cover_caption = Column(String, nullable=True, comment="Cover image alt caption")
|
||||
lead = Column(String, nullable=True)
|
||||
description = Column(String, nullable=True)
|
||||
title = Column(String, nullable=False)
|
||||
body = Column(String, nullable=False, comment="Body")
|
||||
title = Column(String, nullable=True)
|
||||
subtitle = Column(String, nullable=True)
|
||||
layout = Column(String, nullable=False, default="article")
|
||||
layout = Column(String, nullable=True)
|
||||
media = Column(JSON, nullable=True)
|
||||
authors = relationship(lambda: User, secondary=ShoutAuthor.__tablename__)
|
||||
topics = relationship(lambda: Topic, secondary=ShoutTopic.__tablename__)
|
||||
|
||||
authors = relationship(Author, secondary="shout_author")
|
||||
topics = relationship(Topic, secondary="shout_topic")
|
||||
reactions = relationship(Reaction)
|
||||
# views from the old Discours website
|
||||
viewsOld = Column(Integer, default=0)
|
||||
# views from Ackee tracker on the new Discours website
|
||||
viewsAckee = Column(Integer, default=0)
|
||||
views = column_property(viewsOld + viewsAckee)
|
||||
reactions = relationship(lambda: Reaction)
|
||||
|
||||
# TODO: these field should be used or modified
|
||||
community: Column = Column(ForeignKey("community.id"), default=1)
|
||||
lang = Column(String, nullable=False, default="ru", comment="Language")
|
||||
version_of = Column(ForeignKey("shout.id"), nullable=True)
|
||||
mainTopic: Column = Column(ForeignKey("topic.slug"), nullable=True)
|
||||
visibility = Column(String, nullable=True) # owner authors community public
|
||||
versionOf: Column = Column(ForeignKey("shout.id"), nullable=True)
|
||||
oid = Column(String, nullable=True)
|
||||
|
||||
seo = Column(String, nullable=True) # JSON
|
||||
@staticmethod
|
||||
def init_table():
|
||||
with local_session() as session:
|
||||
s = session.query(Shout).first()
|
||||
if not s:
|
||||
entry = {"slug": "genesis-block", "body": "", "title": "Ничего", "lang": "ru"}
|
||||
s = Shout.create(**entry)
|
||||
session.add(s)
|
||||
session.commit()
|
||||
|
20
orm/topic.py
20
orm/topic.py
@@ -1,17 +1,17 @@
|
||||
import time
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String, func
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
|
||||
from services.db import Base
|
||||
from base.orm import Base
|
||||
|
||||
|
||||
class TopicFollower(Base):
|
||||
__tablename__ = "topic_followers"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(Integer, ForeignKey("author.id"), primary_key=True)
|
||||
topic = Column(Integer, ForeignKey("topic.id"), primary_key=True)
|
||||
created_at = Column(Integer, nullable=False, default=int(time.time()))
|
||||
id = None
|
||||
follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
topic: Column = Column(ForeignKey("topic.id"), primary_key=True, index=True)
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
|
||||
@@ -22,7 +22,5 @@ class Topic(Base):
|
||||
title = Column(String, nullable=False, comment="Title")
|
||||
body = Column(String, nullable=True, comment="Body")
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
community = Column(ForeignKey("community.id"), default=1)
|
||||
community: Column = Column(ForeignKey("community.id"), default=1, comment="Community")
|
||||
oid = Column(String, nullable=True, comment="Old ID")
|
||||
|
||||
parent_ids = Column(JSON, nullable=True, comment="Parent Topic IDs")
|
||||
|
105
orm/user.py
Normal file
105
orm/user.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from sqlalchemy import JSON as JSONType
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, func
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from base.orm import Base, local_session
|
||||
from orm.rbac import Role
|
||||
|
||||
|
||||
class UserRating(Base):
|
||||
__tablename__ = "user_rating"
|
||||
|
||||
id = None
|
||||
rater: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
value: Column = Column(Integer)
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
pass
|
||||
|
||||
|
||||
class UserRole(Base):
|
||||
__tablename__ = "user_role"
|
||||
|
||||
id = None
|
||||
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
role = Column(ForeignKey("role.id"), primary_key=True, index=True)
|
||||
|
||||
|
||||
class AuthorFollower(Base):
|
||||
__tablename__ = "author_follower"
|
||||
|
||||
id = None
|
||||
follower: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
author: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
default_user = None
|
||||
|
||||
email = Column(String, unique=True, nullable=False, comment="Email")
|
||||
username = Column(String, nullable=False, comment="Login")
|
||||
password = Column(String, nullable=True, comment="Password")
|
||||
bio = Column(String, nullable=True, comment="Bio") # status description
|
||||
about = Column(String, nullable=True, comment="About") # long and formatted
|
||||
userpic = Column(String, nullable=True, comment="Userpic")
|
||||
name = Column(String, nullable=True, comment="Display name")
|
||||
slug = Column(String, unique=True, comment="User's slug")
|
||||
muted = Column(Boolean, default=False)
|
||||
emailConfirmed = Column(Boolean, default=False)
|
||||
createdAt = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at"
|
||||
)
|
||||
lastSeen = Column(
|
||||
DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Was online at"
|
||||
)
|
||||
deletedAt = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
|
||||
links = Column(JSONType, nullable=True, comment="Links")
|
||||
oauth = Column(String, nullable=True)
|
||||
ratings = relationship(UserRating, foreign_keys=UserRating.user)
|
||||
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
|
||||
oid = Column(String, nullable=True)
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
with local_session() as session:
|
||||
default = session.query(User).filter(User.slug == "anonymous").first()
|
||||
if not default:
|
||||
default_dict = {
|
||||
"email": "noreply@discours.io",
|
||||
"username": "noreply@discours.io",
|
||||
"name": "Аноним",
|
||||
"slug": "anonymous",
|
||||
}
|
||||
default = User.create(**default_dict)
|
||||
session.add(default)
|
||||
discours_dict = {
|
||||
"email": "welcome@discours.io",
|
||||
"username": "welcome@discours.io",
|
||||
"name": "Дискурс",
|
||||
"slug": "discours",
|
||||
}
|
||||
discours = User.create(**discours_dict)
|
||||
session.add(discours)
|
||||
session.commit()
|
||||
User.default_user = default
|
||||
|
||||
def get_permission(self):
|
||||
scope = {}
|
||||
for role in self.roles:
|
||||
for p in role.permissions:
|
||||
if p.resource not in scope:
|
||||
scope[p.resource] = set()
|
||||
scope[p.resource].add(p.operation)
|
||||
print(scope)
|
||||
return scope
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# print(User.get_permission(user_id=1))
|
1802
poetry.lock
generated
Normal file
1802
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,51 +0,0 @@
|
||||
[tool.poetry]
|
||||
name = "core"
|
||||
version = "0.4.7"
|
||||
description = "core module for discours.io"
|
||||
authors = ["discoursio devteam"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12"
|
||||
SQLAlchemy = "^2.0.29"
|
||||
psycopg2-binary = "^2.9.9"
|
||||
redis = {extras = ["hiredis"], version = "^5.0.1"}
|
||||
sentry-sdk = {version = "^1.44.1", extras = ["starlette", "ariadne", "sqlalchemy"]}
|
||||
starlette = "^0.39.2"
|
||||
gql = "^3.5.0"
|
||||
ariadne = "^0.23.0"
|
||||
pre-commit = "^3.7.0"
|
||||
granian = "^1.4.1"
|
||||
google-analytics-data = "^0.18.7"
|
||||
opensearch-py = "^2.6.0"
|
||||
httpx = "^0.27.0"
|
||||
dogpile-cache = "^1.3.1"
|
||||
colorlog = "^6.8.2"
|
||||
fakeredis = "^2.25.1"
|
||||
pydantic = "^2.9.2"
|
||||
jwt = "^1.3.1"
|
||||
authlib = "^1.3.2"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.4.7"
|
||||
isort = "^5.13.2"
|
||||
pydantic = "^2.9.2"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pyright]
|
||||
venvPath = "."
|
||||
venv = ".venv"
|
||||
|
||||
[tool.isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
force_grid_wrap = 0
|
||||
line_length = 120
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
8
requirements-dev.txt
Executable file
8
requirements-dev.txt
Executable file
@@ -0,0 +1,8 @@
|
||||
black==23.10.1
|
||||
flake8==6.1.0
|
||||
gql_schema_codegen==1.0.1
|
||||
isort==5.12.0
|
||||
mypy==1.6.1
|
||||
pre-commit==3.5.0
|
||||
pymongo-stubs==0.2.0
|
||||
sqlalchemy-stubs==0.4
|
37
requirements.txt
Normal file
37
requirements.txt
Normal file
@@ -0,0 +1,37 @@
|
||||
aiohttp==3.8.6
|
||||
alembic==1.11.3
|
||||
ariadne>=0.17.0
|
||||
asyncio~=3.4.3
|
||||
authlib==1.2.1
|
||||
bcrypt>=4.0.0
|
||||
beautifulsoup4~=4.11.1
|
||||
boto3~=1.28.2
|
||||
botocore~=1.31.2
|
||||
bson~=0.5.10
|
||||
DateTime~=4.7
|
||||
gql~=3.4.0
|
||||
graphql-core>=3.0.3
|
||||
httpx>=0.23.0
|
||||
itsdangerous
|
||||
lxml
|
||||
Mako==1.2.4
|
||||
MarkupSafe==2.1.3
|
||||
nltk~=3.8.1
|
||||
passlib~=1.7.4
|
||||
psycopg2-binary
|
||||
pydantic>=1.10.2
|
||||
pyjwt>=2.6.0
|
||||
pymystem3~=0.2.0
|
||||
python-dateutil~=2.8.2
|
||||
python-frontmatter~=1.0.0
|
||||
python-multipart~=0.0.6
|
||||
PyYAML>=5.4
|
||||
requests~=2.28.1
|
||||
sentry-sdk>=1.14.0
|
||||
sqlalchemy>=1.4.41
|
||||
sse-starlette==1.6.5
|
||||
starlette~=0.23.1
|
||||
transliterate~=1.10.2
|
||||
uvicorn>=0.18.3
|
||||
|
||||
redis
|
55
resetdb.sh
Executable file
55
resetdb.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
database_name="discoursio"
|
||||
remote_backup_dir="/var/backups/mongodb"
|
||||
user="root"
|
||||
host="v2.discours.io"
|
||||
server="$user@$host"
|
||||
dump_dir="./dump"
|
||||
local_backup_filename="discours-backup.bson.gz.tar"
|
||||
|
||||
echo "DATABASE RESET STARTED"
|
||||
echo "server: $server"
|
||||
echo "remote backup directory: $remote_backup_dir"
|
||||
|
||||
echo "Searching for last backup file..."
|
||||
last_backup_filename=$(ssh $server "ls -t $remote_backup_dir | head -1")
|
||||
if [ $? -ne 0 ]; then { echo "Failed to get last backup filename, aborting." ; exit 1; } fi
|
||||
echo "Last backup file found: $last_backup_filename"
|
||||
|
||||
echo "Downloading..."
|
||||
scp $server:$remote_backup_dir/"$last_backup_filename" "$local_backup_filename"
|
||||
if [ $? -ne 0 ]; then { echo "Failed to download backup file, aborting." ; exit 1; } fi
|
||||
echo "Backup file $local_backup_filename downloaded successfully"
|
||||
|
||||
echo "Creating dump directory: $dump_dir"
|
||||
mkdir -p "$dump_dir"
|
||||
if [ $? -ne 0 ]; then { echo "Failed to create dump directory, aborting." ; exit 1; } fi
|
||||
echo "$dump_dir directory created"
|
||||
|
||||
echo "Unpacking backup file $local_backup_filename to $dump_dir"
|
||||
tar -xzf "$local_backup_filename" --directory "$dump_dir" --strip-components 1
|
||||
if [ $? -ne 0 ]; then { echo "Failed to unpack backup, aborting." ; exit 1; } fi
|
||||
echo "Backup file $local_backup_filename successfully unpacked to $dump_dir"
|
||||
|
||||
echo "Removing backup file $local_backup_filename"
|
||||
rm "$local_backup_filename"
|
||||
if [ $? -ne 0 ]; then { echo "Failed to remove backup file, aborting." ; exit 1; } fi
|
||||
echo "Backup file removed"
|
||||
|
||||
echo "Dropping database $database_name"
|
||||
dropdb $database_name --force
|
||||
if [ $? -ne 0 ]; then { echo "Failed to drop database, aborting." ; exit 1; } fi
|
||||
echo "Database $database_name dropped"
|
||||
|
||||
echo "Creating database $database_name"
|
||||
createdb $database_name
|
||||
if [ $? -ne 0 ]; then { echo "Failed to create database, aborting." ; exit 1; } fi
|
||||
echo "Database $database_name successfully created"
|
||||
|
||||
echo "BSON -> JSON"
|
||||
python3 server.py bson
|
||||
if [ $? -ne 0 ]; then { echo "BSON -> JSON failed, aborting." ; exit 1; } fi
|
||||
|
||||
echo "Start migration"
|
||||
python3 server.py migrate
|
||||
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
|
||||
echo 'Done!'
|
@@ -1,116 +1,46 @@
|
||||
from cache.triggers import events_register
|
||||
from resolvers.author import ( # search_authors,
|
||||
get_author,
|
||||
get_author_followers,
|
||||
get_author_follows,
|
||||
get_author_follows_authors,
|
||||
get_author_follows_topics,
|
||||
get_author_id,
|
||||
# flake8: noqa
|
||||
|
||||
from resolvers.auth import (
|
||||
auth_send_link,
|
||||
confirm_email,
|
||||
get_current_user,
|
||||
is_email_used,
|
||||
login,
|
||||
register_by_email,
|
||||
sign_out,
|
||||
)
|
||||
from resolvers.create.editor import create_shout, delete_shout, update_shout
|
||||
from resolvers.inbox.chats import create_chat, delete_chat, update_chat
|
||||
from resolvers.inbox.load import load_chats, load_messages_by, load_recipients
|
||||
from resolvers.inbox.messages import (
|
||||
create_message,
|
||||
delete_message,
|
||||
mark_as_read,
|
||||
update_message,
|
||||
)
|
||||
from resolvers.inbox.search import search_recipients
|
||||
from resolvers.notifications import load_notifications
|
||||
from resolvers.zine.following import follow, unfollow
|
||||
from resolvers.zine.load import load_shout, load_shouts_by
|
||||
from resolvers.zine.profile import (
|
||||
get_authors_all,
|
||||
load_authors_by,
|
||||
update_author,
|
||||
rate_user,
|
||||
update_profile,
|
||||
)
|
||||
from resolvers.community import get_communities_all, get_community
|
||||
from resolvers.editor import create_shout, delete_shout, update_shout
|
||||
from resolvers.feed import (
|
||||
load_shouts_coauthored,
|
||||
load_shouts_discussed,
|
||||
load_shouts_feed,
|
||||
load_shouts_followed_by,
|
||||
)
|
||||
from resolvers.follower import follow, get_shout_followers, unfollow
|
||||
from resolvers.notifier import (
|
||||
load_notifications,
|
||||
notification_mark_seen,
|
||||
notifications_seen_after,
|
||||
notifications_seen_thread,
|
||||
)
|
||||
from resolvers.rating import get_my_rates_comments, get_my_rates_shouts, rate_author
|
||||
from resolvers.reaction import (
|
||||
from resolvers.zine.reactions import (
|
||||
create_reaction,
|
||||
delete_reaction,
|
||||
load_comment_ratings,
|
||||
load_reactions_by,
|
||||
load_shout_comments,
|
||||
load_shout_ratings,
|
||||
reactions_follow,
|
||||
reactions_unfollow,
|
||||
update_reaction,
|
||||
)
|
||||
from resolvers.reader import (
|
||||
get_shout,
|
||||
load_shouts_by,
|
||||
load_shouts_random_top,
|
||||
load_shouts_search,
|
||||
load_shouts_unrated,
|
||||
)
|
||||
from resolvers.topic import (
|
||||
from resolvers.zine.topics import (
|
||||
get_topic,
|
||||
get_topic_authors,
|
||||
get_topic_followers,
|
||||
get_topics_all,
|
||||
get_topics_by_author,
|
||||
get_topics_by_community,
|
||||
topic_follow,
|
||||
topic_unfollow,
|
||||
topics_all,
|
||||
topics_by_author,
|
||||
topics_by_community,
|
||||
)
|
||||
|
||||
events_register()
|
||||
|
||||
__all__ = [
|
||||
# author
|
||||
"get_author",
|
||||
"get_author_id",
|
||||
"get_author_followers",
|
||||
"get_author_follows",
|
||||
"get_author_follows_topics",
|
||||
"get_author_follows_authors",
|
||||
"get_authors_all",
|
||||
"load_authors_by",
|
||||
"update_author",
|
||||
## "search_authors",
|
||||
# community
|
||||
"get_community",
|
||||
"get_communities_all",
|
||||
# topic
|
||||
"get_topic",
|
||||
"get_topics_all",
|
||||
"get_topics_by_community",
|
||||
"get_topics_by_author",
|
||||
"get_topic_followers",
|
||||
"get_topic_authors",
|
||||
# reader
|
||||
"get_shout",
|
||||
"load_shouts_by",
|
||||
"load_shouts_random_top",
|
||||
"load_shouts_search",
|
||||
"load_shouts_unrated",
|
||||
# feed
|
||||
"load_shouts_feed",
|
||||
"load_shouts_coauthored",
|
||||
"load_shouts_discussed",
|
||||
"load_shouts_with_topic",
|
||||
"load_shouts_followed_by",
|
||||
"load_shouts_authored_by",
|
||||
# follower
|
||||
"follow",
|
||||
"unfollow",
|
||||
"get_shout_followers",
|
||||
# editor
|
||||
"create_shout",
|
||||
"update_shout",
|
||||
"delete_shout",
|
||||
# reaction
|
||||
"create_reaction",
|
||||
"update_reaction",
|
||||
"delete_reaction",
|
||||
"load_reactions_by",
|
||||
"load_shout_comments",
|
||||
"load_shout_ratings",
|
||||
"load_comment_ratings",
|
||||
# notifier
|
||||
"load_notifications",
|
||||
"notifications_seen_thread",
|
||||
"notifications_seen_after",
|
||||
"notification_mark_seen",
|
||||
# rating
|
||||
"rate_author",
|
||||
"get_my_rates_comments",
|
||||
"get_my_rates_shouts",
|
||||
]
|
||||
|
@@ -5,17 +5,18 @@ from datetime import datetime, timezone
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from graphql.type import GraphQLResolveInfo
|
||||
from transliterate import translit
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from auth.email import send_auth_email
|
||||
from auth.exceptions import InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
|
||||
from auth.identity import Identity, Password
|
||||
from auth.jwtcodec import JWTCodec
|
||||
from auth.tokenstorage import TokenStorage
|
||||
from base.exceptions import InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
|
||||
from base.orm import local_session
|
||||
from base.resolvers import mutation, query
|
||||
from orm import Role, User
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
from settings import SESSION_TOKEN_HEADER
|
||||
|
||||
|
||||
@@ -65,50 +66,9 @@ def create_user(user_dict):
|
||||
return user
|
||||
|
||||
|
||||
def replace_translit(src):
|
||||
ruchars = "абвгдеёжзийклмнопрстуфхцчшщъыьэюя."
|
||||
enchars = [
|
||||
"a",
|
||||
"b",
|
||||
"v",
|
||||
"g",
|
||||
"d",
|
||||
"e",
|
||||
"yo",
|
||||
"zh",
|
||||
"z",
|
||||
"i",
|
||||
"y",
|
||||
"k",
|
||||
"l",
|
||||
"m",
|
||||
"n",
|
||||
"o",
|
||||
"p",
|
||||
"r",
|
||||
"s",
|
||||
"t",
|
||||
"u",
|
||||
"f",
|
||||
"h",
|
||||
"c",
|
||||
"ch",
|
||||
"sh",
|
||||
"sch",
|
||||
"",
|
||||
"y",
|
||||
"'",
|
||||
"e",
|
||||
"yu",
|
||||
"ya",
|
||||
"-",
|
||||
]
|
||||
return src.translate(str.maketrans(ruchars, enchars))
|
||||
|
||||
|
||||
def generate_unique_slug(src):
|
||||
print("[resolvers.auth] generating slug from: " + src)
|
||||
slug = replace_translit(src.lower())
|
||||
slug = translit(src, "ru", reversed=True).replace(".", "-").lower()
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
|
||||
if slug != src:
|
||||
print("[resolvers.auth] translited name: " + slug)
|
@@ -1,249 +0,0 @@
|
||||
import asyncio
|
||||
import time
|
||||
|
||||
from sqlalchemy import desc, select, text
|
||||
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
get_cached_author,
|
||||
get_cached_author_by_user_id,
|
||||
get_cached_author_followers,
|
||||
get_cached_follower_authors,
|
||||
get_cached_follower_topics,
|
||||
)
|
||||
from orm.author import Author
|
||||
from orm.shout import ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@mutation.field("update_author")
|
||||
@login_required
|
||||
async def update_author(_, info, profile):
|
||||
user_id = info.context.get("user_id")
|
||||
if not user_id:
|
||||
return {"error": "unauthorized", "author": None}
|
||||
try:
|
||||
with local_session() as session:
|
||||
author = session.query(Author).where(Author.user == user_id).first()
|
||||
if author:
|
||||
Author.update(author, profile)
|
||||
session.add(author)
|
||||
session.commit()
|
||||
author_query = select(Author).where(Author.user == user_id)
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_dict)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
return {"error": None, "author": author}
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
logger.error(traceback.format_exc())
|
||||
return {"error": exc, "author": None}
|
||||
|
||||
|
||||
@query.field("get_authors_all")
|
||||
def get_authors_all(_, _info):
|
||||
with local_session() as session:
|
||||
authors = session.query(Author).all()
|
||||
return authors
|
||||
|
||||
|
||||
@query.field("get_author")
|
||||
async def get_author(_, _info, slug="", author_id=0):
|
||||
author_dict = None
|
||||
try:
|
||||
author_id = get_author_id_from(slug=slug, user="", author_id=author_id)
|
||||
if not author_id:
|
||||
raise ValueError("cant find")
|
||||
author_dict = await get_cached_author(int(author_id), get_with_stat)
|
||||
|
||||
if not author_dict or not author_dict.get("stat"):
|
||||
# update stat from db
|
||||
author_query = select(Author).filter(Author.id == author_id)
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_dict)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
except ValueError:
|
||||
pass
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
logger.error(f"{exc}:\n{traceback.format_exc()}")
|
||||
return author_dict
|
||||
|
||||
|
||||
@query.field("get_author_id")
|
||||
async def get_author_id(_, _info, user: str):
|
||||
user_id = user.strip()
|
||||
logger.info(f"getting author id for {user_id}")
|
||||
author = None
|
||||
try:
|
||||
author = await get_cached_author_by_user_id(user_id, get_with_stat)
|
||||
if author:
|
||||
return author
|
||||
|
||||
author_query = select(Author).filter(Author.user == user_id)
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_dict)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
return author_with_stat
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(exc)
|
||||
|
||||
|
||||
@query.field("load_authors_by")
|
||||
async def load_authors_by(_, _info, by, limit, offset):
|
||||
logger.debug(f"loading authors by {by}")
|
||||
authors_query = select(Author)
|
||||
|
||||
if by.get("slug"):
|
||||
authors_query = authors_query.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
||||
elif by.get("name"):
|
||||
authors_query = authors_query.filter(Author.name.ilike(f"%{by['name']}%"))
|
||||
elif by.get("topic"):
|
||||
authors_query = (
|
||||
authors_query.join(ShoutAuthor) # Первое соединение ShoutAuthor
|
||||
.join(ShoutTopic, ShoutAuthor.shout == ShoutTopic.shout)
|
||||
.join(Topic, ShoutTopic.topic == Topic.id)
|
||||
.filter(Topic.slug == str(by["topic"]))
|
||||
)
|
||||
|
||||
if by.get("last_seen"): # в unix time
|
||||
before = int(time.time()) - by["last_seen"]
|
||||
authors_query = authors_query.filter(Author.last_seen > before)
|
||||
elif by.get("created_at"): # в unix time
|
||||
before = int(time.time()) - by["created_at"]
|
||||
authors_query = authors_query.filter(Author.created_at > before)
|
||||
|
||||
authors_query = authors_query.limit(limit).offset(offset)
|
||||
|
||||
with local_session() as session:
|
||||
authors_nostat = session.execute(authors_query).all()
|
||||
authors = []
|
||||
for a in authors_nostat:
|
||||
if isinstance(a, Author):
|
||||
author_dict = await get_cached_author(a.id, get_with_stat)
|
||||
if author_dict and isinstance(author_dict.get("shouts"), int):
|
||||
authors.append(author_dict)
|
||||
|
||||
# order
|
||||
order = by.get("order")
|
||||
if order in ["shouts", "followers"]:
|
||||
authors_query = authors_query.order_by(desc(text(f"{order}_stat")))
|
||||
|
||||
# group by
|
||||
authors = get_with_stat(authors_query)
|
||||
return authors or []
|
||||
|
||||
|
||||
def get_author_id_from(slug="", user=None, author_id=None):
|
||||
if not slug and not user and not author_id:
|
||||
raise ValueError("One of slug, user, or author_id must be provided")
|
||||
|
||||
author_query = select(Author.id)
|
||||
if user:
|
||||
author_query = author_query.filter(Author.user == user)
|
||||
elif slug:
|
||||
author_query = author_query.filter(Author.slug == slug)
|
||||
elif author_id:
|
||||
author_query = author_query.filter(Author.id == author_id)
|
||||
|
||||
with local_session() as session:
|
||||
author_id_result = session.execute(author_query).first()
|
||||
author_id = author_id_result[0] if author_id_result else None
|
||||
|
||||
if not author_id:
|
||||
raise ValueError("Author not found")
|
||||
|
||||
return author_id
|
||||
|
||||
|
||||
@query.field("get_author_follows")
|
||||
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||
try:
|
||||
author_id = get_author_id_from(slug, user, author_id)
|
||||
|
||||
if bool(author_id):
|
||||
logger.debug(f"getting {author_id} follows authors")
|
||||
authors = await get_cached_follower_authors(author_id)
|
||||
topics = await get_cached_follower_topics(author_id)
|
||||
return {
|
||||
"topics": topics,
|
||||
"authors": authors,
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
except Exception:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return {"error": "Author not found"}
|
||||
|
||||
|
||||
@query.field("get_author_follows_topics")
|
||||
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
|
||||
try:
|
||||
follower_id = get_author_id_from(slug, user, author_id)
|
||||
topics = await get_cached_follower_topics(follower_id)
|
||||
return topics
|
||||
except Exception:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
@query.field("get_author_follows_authors")
|
||||
async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None):
|
||||
try:
|
||||
follower_id = get_author_id_from(slug, user, author_id)
|
||||
return await get_cached_follower_authors(follower_id)
|
||||
except Exception:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def create_author(user_id: str, slug: str, name: str = ""):
|
||||
with local_session() as session:
|
||||
try:
|
||||
author = None
|
||||
if user_id:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
elif slug:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if not author:
|
||||
new_author = Author(user=user_id, slug=slug, name=name)
|
||||
session.add(new_author)
|
||||
session.commit()
|
||||
logger.info(f"author created by webhook {new_author.dict()}")
|
||||
except Exception as exc:
|
||||
logger.debug(exc)
|
||||
|
||||
|
||||
@query.field("get_author_followers")
|
||||
async def get_author_followers(_, _info, slug: str = "", user: str = "", author_id: int = 0):
|
||||
logger.debug(f"getting followers for @{slug}")
|
||||
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
||||
followers = []
|
||||
if author_id:
|
||||
followers = await get_cached_author_followers(author_id)
|
||||
return followers
|
@@ -1,83 +0,0 @@
|
||||
from operator import and_
|
||||
|
||||
from graphql import GraphQLError
|
||||
from sqlalchemy import delete, insert
|
||||
|
||||
from orm.author import AuthorBookmark
|
||||
from orm.shout import Shout
|
||||
from resolvers.feed import apply_options
|
||||
from resolvers.reader import get_shouts_with_links, query_with_stat
|
||||
from services.auth import login_required
|
||||
from services.common_result import CommonResult
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
|
||||
|
||||
@query.field("load_shouts_bookmarked")
|
||||
@login_required
|
||||
def load_shouts_bookmarked(_, info, options):
|
||||
"""
|
||||
Load bookmarked shouts for the authenticated user.
|
||||
|
||||
Args:
|
||||
limit (int): Maximum number of shouts to return.
|
||||
offset (int): Number of shouts to skip.
|
||||
|
||||
Returns:
|
||||
list: List of bookmarked shouts.
|
||||
"""
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not author_id:
|
||||
raise GraphQLError("User not authenticated")
|
||||
|
||||
q = query_with_stat(info)
|
||||
q = q.join(AuthorBookmark)
|
||||
q = q.filter(
|
||||
and_(
|
||||
Shout.id == AuthorBookmark.shout,
|
||||
AuthorBookmark.author == author_id,
|
||||
)
|
||||
)
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
return get_shouts_with_links(info, q, limit, offset)
|
||||
|
||||
|
||||
@mutation.field("toggle_bookmark_shout")
|
||||
def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
|
||||
"""
|
||||
Toggle bookmark status for a specific shout.
|
||||
|
||||
Args:
|
||||
slug (str): Unique identifier of the shout.
|
||||
|
||||
Returns:
|
||||
CommonResult: Result of the operation with bookmark status.
|
||||
"""
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not author_id:
|
||||
raise GraphQLError("User not authenticated")
|
||||
|
||||
with local_session() as db:
|
||||
shout = db.query(Shout).filter(Shout.slug == slug).first()
|
||||
if not shout:
|
||||
raise GraphQLError("Shout not found")
|
||||
|
||||
existing_bookmark = (
|
||||
db.query(AuthorBookmark)
|
||||
.filter(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing_bookmark:
|
||||
db.execute(
|
||||
delete(AuthorBookmark).where(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
|
||||
)
|
||||
result = False
|
||||
else:
|
||||
db.execute(insert(AuthorBookmark).values(author=author_id, shout=shout.id))
|
||||
result = True
|
||||
|
||||
db.commit()
|
||||
return result
|
@@ -1,147 +0,0 @@
|
||||
from orm.author import Author
|
||||
from orm.invite import Invite, InviteStatus
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import mutation
|
||||
|
||||
|
||||
@mutation.field("accept_invite")
|
||||
@login_required
|
||||
async def accept_invite(_, info, invite_id: int):
|
||||
info.context["user_id"]
|
||||
author_dict = info.context["author"]
|
||||
author_id = author_dict.get("id")
|
||||
if author_id:
|
||||
author_id = int(author_id)
|
||||
# Check if the user exists
|
||||
with local_session() as session:
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
|
||||
# Add the user to the shout authors
|
||||
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
|
||||
if shout:
|
||||
if author_id not in shout.authors:
|
||||
author = session.query(Author).filter(Author.id == author_id).first()
|
||||
if author:
|
||||
shout.authors.append(author)
|
||||
session.add(shout)
|
||||
session.delete(invite)
|
||||
session.commit()
|
||||
return {"success": True, "message": "Invite accepted"}
|
||||
else:
|
||||
return {"error": "Shout not found"}
|
||||
else:
|
||||
return {"error": "Invalid invite or already accepted/rejected"}
|
||||
else:
|
||||
return {"error": "Unauthorized"}
|
||||
|
||||
|
||||
@mutation.field("reject_invite")
|
||||
@login_required
|
||||
async def reject_invite(_, info, invite_id: int):
|
||||
info.context["user_id"]
|
||||
author_dict = info.context["author"]
|
||||
author_id = author_dict.get("id")
|
||||
|
||||
if author_id:
|
||||
# Check if the user exists
|
||||
with local_session() as session:
|
||||
author_id = int(author_id)
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
|
||||
# Delete the invite
|
||||
session.delete(invite)
|
||||
session.commit()
|
||||
return {"success": True, "message": "Invite rejected"}
|
||||
else:
|
||||
return {"error": "Invalid invite or already accepted/rejected"}
|
||||
return {"error": "User not found"}
|
||||
|
||||
|
||||
@mutation.field("create_invite")
|
||||
@login_required
|
||||
async def create_invite(_, info, slug: str = "", author_id: int = 0):
|
||||
user_id = info.context["user_id"]
|
||||
author_dict = info.context["author"]
|
||||
author_id = author_dict.get("id")
|
||||
if author_id:
|
||||
# Check if the inviter is the owner of the shout
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
inviter = session.query(Author).filter(Author.user == user_id).first()
|
||||
if inviter and shout and shout.authors and inviter.id is shout.created_by:
|
||||
# Check if an invite already exists
|
||||
existing_invite = (
|
||||
session.query(Invite)
|
||||
.filter(
|
||||
Invite.inviter_id == inviter.id,
|
||||
Invite.author_id == author_id,
|
||||
Invite.shout_id == shout.id,
|
||||
Invite.status == InviteStatus.PENDING.value,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if existing_invite:
|
||||
return {"error": "Invite already sent"}
|
||||
|
||||
# Create a new invite
|
||||
new_invite = Invite(
|
||||
inviter_id=user_id,
|
||||
author_id=author_id,
|
||||
shout_id=shout.id,
|
||||
status=InviteStatus.PENDING.value,
|
||||
)
|
||||
session.add(new_invite)
|
||||
session.commit()
|
||||
|
||||
return {"error": None, "invite": new_invite}
|
||||
else:
|
||||
return {"error": "Invalid author"}
|
||||
else:
|
||||
return {"error": "Access denied"}
|
||||
|
||||
|
||||
@mutation.field("remove_author")
|
||||
@login_required
|
||||
async def remove_author(_, info, slug: str = "", author_id: int = 0):
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
# NOTE: owner should be first in a list
|
||||
if shout and author.id is shout.created_by:
|
||||
shout.authors = [author for author in shout.authors if author.id != author_id]
|
||||
session.commit()
|
||||
return {}
|
||||
return {"error": "Access denied"}
|
||||
|
||||
|
||||
@mutation.field("remove_invite")
|
||||
@login_required
|
||||
async def remove_invite(_, info, invite_id: int):
|
||||
info.context["user_id"]
|
||||
|
||||
author_dict = info.context["author"]
|
||||
author_id = author_dict.get("id")
|
||||
if isinstance(author_id, int):
|
||||
# Check if the user exists
|
||||
with local_session() as session:
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if isinstance(invite, Invite):
|
||||
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
|
||||
if shout and shout.deleted_at is None and invite:
|
||||
if invite.inviter_id is author_id or author_id == shout.created_by:
|
||||
if invite.status is InviteStatus.PENDING.value:
|
||||
# Delete the invite
|
||||
session.delete(invite)
|
||||
session.commit()
|
||||
return {}
|
||||
else:
|
||||
return {"error": "Invalid invite or already accepted/rejected"}
|
||||
else:
|
||||
return {"error": "Author not found"}
|
@@ -1,97 +0,0 @@
|
||||
from orm.author import Author
|
||||
from orm.community import Community, CommunityFollower
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
|
||||
|
||||
@query.field("get_communities_all")
|
||||
async def get_communities_all(_, _info):
|
||||
return local_session().query(Community).all()
|
||||
|
||||
|
||||
@query.field("get_community")
|
||||
async def get_community(_, _info, slug: str):
|
||||
q = local_session().query(Community).where(Community.slug == slug)
|
||||
return q.first()
|
||||
|
||||
|
||||
@query.field("get_communities_by_author")
|
||||
async def get_communities_by_author(_, _info, slug="", user="", author_id=0):
|
||||
with local_session() as session:
|
||||
q = session.query(Community).join(CommunityFollower)
|
||||
if slug:
|
||||
author_id = session.query(Author).where(Author.slug == slug).first().id
|
||||
q = q.where(CommunityFollower.author == author_id)
|
||||
if user:
|
||||
author_id = session.query(Author).where(Author.user == user).first().id
|
||||
q = q.where(CommunityFollower.author == author_id)
|
||||
if author_id:
|
||||
q = q.where(CommunityFollower.author == author_id)
|
||||
return q.all()
|
||||
return []
|
||||
|
||||
|
||||
@mutation.field("join_community")
|
||||
async def join_community(_, info, slug: str):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
community = session.query(Community).where(Community.slug == slug).first()
|
||||
if not community:
|
||||
return {"ok": False, "error": "Community not found"}
|
||||
session.add(CommunityFollower(community=community.id, author=author_id))
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@mutation.field("leave_community")
|
||||
async def leave_community(_, info, slug: str):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
session.query(CommunityFollower).where(
|
||||
CommunityFollower.author == author_id, CommunityFollower.community == slug
|
||||
).delete()
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@mutation.field("create_community")
|
||||
async def create_community(_, info, community_data):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
session.add(Community(author=author_id, **community_data))
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@mutation.field("update_community")
|
||||
async def update_community(_, info, community_data):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
slug = community_data.get("slug")
|
||||
if slug:
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.query(Community).where(Community.created_by == author_id, Community.slug == slug).update(
|
||||
community_data
|
||||
)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
return {"ok": False, "error": str(e)}
|
||||
return {"ok": True}
|
||||
return {"ok": False, "error": "Please, set community slug in input"}
|
||||
|
||||
|
||||
@mutation.field("delete_community")
|
||||
async def delete_community(_, info, slug: str):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.query(Community).where(Community.slug == slug, Community.created_by == author_id).delete()
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
except Exception as e:
|
||||
return {"ok": False, "error": str(e)}
|
179
resolvers/create/editor.py
Normal file
179
resolvers/create/editor.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from base.orm import local_session
|
||||
from base.resolvers import mutation
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
|
||||
|
||||
|
||||
@mutation.field("createShout")
|
||||
@login_required
|
||||
async def create_shout(_, info, inp):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
with local_session() as session:
|
||||
topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
|
||||
|
||||
new_shout = Shout.create(
|
||||
**{
|
||||
"title": inp.get("title"),
|
||||
"subtitle": inp.get("subtitle"),
|
||||
"lead": inp.get("lead"),
|
||||
"description": inp.get("description"),
|
||||
"body": inp.get("body", ""),
|
||||
"layout": inp.get("layout"),
|
||||
"authors": inp.get("authors", []),
|
||||
"slug": inp.get("slug"),
|
||||
"mainTopic": inp.get("mainTopic"),
|
||||
"visibility": "owner",
|
||||
"createdBy": auth.user_id,
|
||||
}
|
||||
)
|
||||
|
||||
for topic in topics:
|
||||
t = ShoutTopic.create(topic=topic.id, shout=new_shout.id)
|
||||
session.add(t)
|
||||
|
||||
# NOTE: shout made by one first author
|
||||
sa = ShoutAuthor.create(shout=new_shout.id, user=auth.user_id)
|
||||
session.add(sa)
|
||||
|
||||
session.add(new_shout)
|
||||
|
||||
reactions_follow(auth.user_id, new_shout.id, True)
|
||||
|
||||
session.commit()
|
||||
|
||||
# TODO
|
||||
# GitTask(inp, user.username, user.email, "new shout %s" % new_shout.slug)
|
||||
|
||||
if new_shout.slug is None:
|
||||
new_shout.slug = f"draft-{new_shout.id}"
|
||||
session.commit()
|
||||
|
||||
return {"shout": new_shout}
|
||||
|
||||
|
||||
@mutation.field("updateShout")
|
||||
@login_required
|
||||
async def update_shout(_, info, shout_id, shout_input=None, publish=False): # noqa: C901
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
with local_session() as session:
|
||||
shout = (
|
||||
session.query(Shout)
|
||||
.options(
|
||||
joinedload(Shout.authors),
|
||||
joinedload(Shout.topics),
|
||||
)
|
||||
.filter(Shout.id == shout_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not shout:
|
||||
return {"error": "shout not found"}
|
||||
|
||||
if shout.createdBy != auth.user_id:
|
||||
return {"error": "access denied"}
|
||||
|
||||
updated = False
|
||||
|
||||
if shout_input is not None:
|
||||
topics_input = shout_input["topics"]
|
||||
del shout_input["topics"]
|
||||
|
||||
new_topics_to_link = []
|
||||
new_topics = [topic_input for topic_input in topics_input if topic_input["id"] < 0]
|
||||
|
||||
for new_topic in new_topics:
|
||||
del new_topic["id"]
|
||||
created_new_topic = Topic.create(**new_topic)
|
||||
session.add(created_new_topic)
|
||||
new_topics_to_link.append(created_new_topic)
|
||||
|
||||
if len(new_topics) > 0:
|
||||
session.commit()
|
||||
|
||||
for new_topic_to_link in new_topics_to_link:
|
||||
created_unlinked_topic = ShoutTopic.create(
|
||||
shout=shout.id, topic=new_topic_to_link.id
|
||||
)
|
||||
session.add(created_unlinked_topic)
|
||||
|
||||
existing_topics_input = [
|
||||
topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
|
||||
]
|
||||
existing_topic_to_link_ids = [
|
||||
existing_topic_input["id"]
|
||||
for existing_topic_input in existing_topics_input
|
||||
if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
|
||||
]
|
||||
|
||||
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
||||
created_unlinked_topic = ShoutTopic.create(
|
||||
shout=shout.id, topic=existing_topic_to_link_id
|
||||
)
|
||||
session.add(created_unlinked_topic)
|
||||
|
||||
topic_to_unlink_ids = [
|
||||
topic.id
|
||||
for topic in shout.topics
|
||||
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
|
||||
]
|
||||
|
||||
shout_topics_to_remove = session.query(ShoutTopic).filter(
|
||||
and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
|
||||
)
|
||||
|
||||
for shout_topic_to_remove in shout_topics_to_remove:
|
||||
session.delete(shout_topic_to_remove)
|
||||
|
||||
shout_input["mainTopic"] = shout_input["mainTopic"]["slug"]
|
||||
|
||||
if shout_input["mainTopic"] == "":
|
||||
del shout_input["mainTopic"]
|
||||
|
||||
shout.update(shout_input)
|
||||
updated = True
|
||||
|
||||
if publish and shout.visibility == "owner":
|
||||
shout.visibility = "community"
|
||||
shout.publishedAt = datetime.now(tz=timezone.utc)
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
shout.updatedAt = datetime.now(tz=timezone.utc)
|
||||
|
||||
session.commit()
|
||||
# GitTask(inp, user.username, user.email, "update shout %s" % slug)
|
||||
|
||||
return {"shout": shout}
|
||||
|
||||
|
||||
@mutation.field("deleteShout")
|
||||
@login_required
|
||||
async def delete_shout(_, info, shout_id):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
|
||||
if not shout:
|
||||
return {"error": "invalid shout id"}
|
||||
|
||||
if auth.user_id != shout.createdBy:
|
||||
return {"error": "access denied"}
|
||||
|
||||
for author_id in shout.authors:
|
||||
reactions_unfollow(author_id, shout_id)
|
||||
|
||||
shout.deletedAt = datetime.now(tz=timezone.utc)
|
||||
session.commit()
|
||||
|
||||
return {}
|
@@ -1,337 +0,0 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import and_, desc, select
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
|
||||
from cache.cache import cache_author, cache_topic
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from resolvers.follower import follow, unfollow
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.notify import notify_shout
|
||||
from services.schema import mutation, query
|
||||
from services.search import search_service
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
async def cache_by_id(entity, entity_id: int, cache_method):
|
||||
caching_query = select(entity).filter(entity.id == entity_id)
|
||||
result = get_with_stat(caching_query)
|
||||
if not result or not result[0]:
|
||||
logger.warning(f"{entity.__name__} with id {entity_id} not found")
|
||||
return
|
||||
x = result[0]
|
||||
d = x.dict() # convert object to dictionary
|
||||
cache_method(d)
|
||||
return d
|
||||
|
||||
|
||||
@query.field("get_my_shout")
|
||||
@login_required
|
||||
async def get_my_shout(_, info, shout_id: int):
|
||||
# logger.debug(info)
|
||||
user_id = info.context.get("user_id", "")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
roles = info.context.get("roles", [])
|
||||
shout = None
|
||||
if not user_id or not author_id:
|
||||
return {"error": "unauthorized", "shout": None}
|
||||
with local_session() as session:
|
||||
shout = (
|
||||
session.query(Shout)
|
||||
.filter(Shout.id == shout_id)
|
||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||
.filter(Shout.deleted_at.is_(None))
|
||||
.first()
|
||||
)
|
||||
if not shout:
|
||||
return {"error": "no shout found", "shout": None}
|
||||
|
||||
logger.debug(f"got {len(shout.authors)} shout authors, created by {shout.created_by}")
|
||||
is_editor = "editor" in roles
|
||||
logger.debug(f'viewer is{'' if is_editor else ' not'} editor')
|
||||
is_creator = author_id == shout.created_by
|
||||
logger.debug(f'viewer is{'' if is_creator else ' not'} creator')
|
||||
is_author = bool(list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors])))
|
||||
logger.debug(f'viewer is{'' if is_creator else ' not'} author')
|
||||
can_edit = is_editor or is_author or is_creator
|
||||
|
||||
if not can_edit:
|
||||
return {"error": "forbidden", "shout": None}
|
||||
|
||||
logger.debug("got shout editor with data")
|
||||
return {"error": None, "shout": shout}
|
||||
|
||||
|
||||
@query.field("get_shouts_drafts")
|
||||
@login_required
|
||||
async def get_shouts_drafts(_, info):
|
||||
# user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
return {"error": "author profile was not found"}
|
||||
author_id = author_dict.get("id")
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
if author_id:
|
||||
q = (
|
||||
select(Shout)
|
||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id)))
|
||||
.filter(Shout.published_at.is_(None))
|
||||
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
|
||||
.group_by(Shout.id)
|
||||
)
|
||||
shouts = [shout for [shout] in session.execute(q).unique()]
|
||||
return {"shouts": shouts}
|
||||
|
||||
|
||||
@mutation.field("create_shout")
|
||||
@login_required
|
||||
async def create_shout(_, info, inp):
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
return {"error": "author profile was not found"}
|
||||
author_id = author_dict.get("id")
|
||||
if user_id and author_id:
|
||||
with local_session() as session:
|
||||
author_id = int(author_id)
|
||||
current_time = int(time.time())
|
||||
slug = inp.get("slug") or f"draft-{current_time}"
|
||||
shout_dict = {
|
||||
"title": inp.get("title", ""),
|
||||
"subtitle": inp.get("subtitle", ""),
|
||||
"lead": inp.get("lead", ""),
|
||||
"description": inp.get("description", ""),
|
||||
"body": inp.get("body", ""),
|
||||
"layout": inp.get("layout", "article"),
|
||||
"created_by": author_id,
|
||||
"authors": [],
|
||||
"slug": slug,
|
||||
"topics": inp.get("topics", []),
|
||||
"published_at": None,
|
||||
"community": 1,
|
||||
"created_at": current_time, # Set created_at as Unix timestamp
|
||||
}
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == shout_dict.get("slug")).first()
|
||||
c = 1
|
||||
while same_slug_shout is not None:
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == shout_dict.get("slug")).first()
|
||||
c += 1
|
||||
shout_dict["slug"] += f"-{c}"
|
||||
new_shout = Shout(**shout_dict)
|
||||
session.add(new_shout)
|
||||
session.commit()
|
||||
|
||||
# NOTE: requesting new shout back
|
||||
shout = session.query(Shout).where(Shout.slug == slug).first()
|
||||
if shout:
|
||||
# Проверка на существование записи
|
||||
existing_sa = session.query(ShoutAuthor).filter_by(shout=shout.id, author=author_id).first()
|
||||
if not existing_sa:
|
||||
sa = ShoutAuthor(shout=shout.id, author=author_id)
|
||||
session.add(sa)
|
||||
|
||||
topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
|
||||
for topic in topics:
|
||||
existing_st = session.query(ShoutTopic).filter_by(shout=shout.id, author=topic.id).first()
|
||||
if not existing_st:
|
||||
t = ShoutTopic(topic=topic.id, shout=shout.id)
|
||||
session.add(t)
|
||||
|
||||
session.commit()
|
||||
|
||||
follow(None, info, "shout", shout.slug)
|
||||
|
||||
# notifier
|
||||
# await notify_shout(shout_dict, 'create')
|
||||
|
||||
return {"shout": shout}
|
||||
|
||||
return {"error": "cant create shout" if user_id else "unauthorized"}
|
||||
|
||||
|
||||
def patch_main_topic(session, main_topic, shout):
|
||||
with session.begin():
|
||||
shout = session.query(Shout).options(joinedload(Shout.topics)).filter(Shout.id == shout.id).first()
|
||||
if not shout:
|
||||
return
|
||||
old_main_topic = (
|
||||
session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
|
||||
)
|
||||
|
||||
main_topic = session.query(Topic).filter(Topic.slug == main_topic).first()
|
||||
|
||||
if main_topic:
|
||||
new_main_topic = (
|
||||
session.query(ShoutTopic)
|
||||
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id))
|
||||
.first()
|
||||
)
|
||||
|
||||
if old_main_topic and new_main_topic and old_main_topic is not new_main_topic:
|
||||
ShoutTopic.update(old_main_topic, {"main": False})
|
||||
session.add(old_main_topic)
|
||||
|
||||
ShoutTopic.update(new_main_topic, {"main": True})
|
||||
session.add(new_main_topic)
|
||||
|
||||
|
||||
def patch_topics(session, shout, topics_input):
|
||||
new_topics_to_link = [Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0]
|
||||
if new_topics_to_link:
|
||||
session.add_all(new_topics_to_link)
|
||||
session.commit()
|
||||
|
||||
for new_topic_to_link in new_topics_to_link:
|
||||
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id)
|
||||
session.add(created_unlinked_topic)
|
||||
|
||||
existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
|
||||
existing_topic_to_link_ids = [
|
||||
existing_topic_input["id"]
|
||||
for existing_topic_input in existing_topics_input
|
||||
if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
|
||||
]
|
||||
|
||||
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
||||
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=existing_topic_to_link_id)
|
||||
session.add(created_unlinked_topic)
|
||||
|
||||
topic_to_unlink_ids = [
|
||||
topic.id
|
||||
for topic in shout.topics
|
||||
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
|
||||
]
|
||||
|
||||
session.query(ShoutTopic).filter(
|
||||
and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
|
||||
@mutation.field("update_shout")
|
||||
@login_required
|
||||
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||
user_id = info.context.get("user_id")
|
||||
roles = info.context.get("roles", [])
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
return {"error": "author profile was not found"}
|
||||
author_id = author_dict.get("id")
|
||||
shout_input = shout_input or {}
|
||||
current_time = int(time.time())
|
||||
shout_id = shout_id or shout_input.get("id", shout_id)
|
||||
slug = shout_input.get("slug")
|
||||
if not user_id:
|
||||
return {"error": "unauthorized"}
|
||||
try:
|
||||
with local_session() as session:
|
||||
if author_id:
|
||||
logger.info(f"author for shout#{shout_id} detected author #{author_id}")
|
||||
shout_by_id = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
|
||||
if not shout_by_id:
|
||||
logger.error(f"shout#{shout_id} not found")
|
||||
return {"error": "shout not found"}
|
||||
logger.info(f"shout#{shout_id} found")
|
||||
|
||||
if slug != shout_by_id.slug:
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
c = 1
|
||||
while same_slug_shout is not None:
|
||||
c += 1
|
||||
slug = f"{slug}-{c}"
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
shout_input["slug"] = slug
|
||||
logger.info(f"shout#{shout_id} slug patched")
|
||||
|
||||
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
|
||||
logger.info(f"shout#{shout_id} is author or editor")
|
||||
# topics patch
|
||||
topics_input = shout_input.get("topics")
|
||||
if topics_input:
|
||||
logger.info(f"topics_input: {topics_input}")
|
||||
patch_topics(session, shout_by_id, topics_input)
|
||||
del shout_input["topics"]
|
||||
for tpc in topics_input:
|
||||
await cache_by_id(Topic, tpc["id"], cache_topic)
|
||||
|
||||
# main topic
|
||||
main_topic = shout_input.get("main_topic")
|
||||
if main_topic:
|
||||
patch_main_topic(session, main_topic, shout_by_id)
|
||||
|
||||
shout_input["updated_at"] = current_time
|
||||
shout_input["published_at"] = current_time if publish else None
|
||||
Shout.update(shout_by_id, shout_input)
|
||||
session.add(shout_by_id)
|
||||
session.commit()
|
||||
|
||||
shout_dict = shout_by_id.dict()
|
||||
|
||||
if not publish:
|
||||
await notify_shout(shout_dict, "update")
|
||||
else:
|
||||
await notify_shout(shout_dict, "published")
|
||||
# search service indexing
|
||||
search_service.index(shout_by_id)
|
||||
for a in shout_by_id.authors:
|
||||
await cache_by_id(Author, a.id, cache_author)
|
||||
logger.info(f"shout#{shout_id} updated")
|
||||
return {"shout": shout_dict, "error": None}
|
||||
else:
|
||||
logger.warning(f"shout#{shout_id} is not author or editor")
|
||||
return {"error": "access denied", "shout": None}
|
||||
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(exc)
|
||||
logger.error(f" cannot update with data: {shout_input}")
|
||||
|
||||
return {"error": "cant update shout"}
|
||||
|
||||
|
||||
@mutation.field("delete_shout")
|
||||
@login_required
|
||||
async def delete_shout(_, info, shout_id: int):
|
||||
user_id = info.context.get("user_id")
|
||||
roles = info.context.get("roles", [])
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
return {"error": "author profile was not found"}
|
||||
author_id = author_dict.get("id")
|
||||
if user_id and author_id:
|
||||
author_id = int(author_id)
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if not isinstance(shout, Shout):
|
||||
return {"error": "invalid shout id"}
|
||||
shout_dict = shout.dict()
|
||||
# NOTE: only owner and editor can mark the shout as deleted
|
||||
if shout_dict["created_by"] == author_id or "editor" in roles:
|
||||
shout_dict["deleted_at"] = int(time.time())
|
||||
Shout.update(shout, shout_dict)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
|
||||
for author in shout.authors:
|
||||
await cache_by_id(Author, author.id, cache_author)
|
||||
info.context["author"] = author.dict()
|
||||
info.context["user_id"] = author.user
|
||||
unfollow(None, info, "shout", shout.slug)
|
||||
|
||||
for topic in shout.topics:
|
||||
await cache_by_id(Topic, topic.id, cache_topic)
|
||||
|
||||
await notify_shout(shout_dict, "delete")
|
||||
return {"error": None}
|
||||
else:
|
||||
return {"error": "access denied"}
|
@@ -1,175 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import and_, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.reader import apply_options, get_shouts_with_links, has_field, query_with_stat
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@query.field("load_shouts_coauthored")
|
||||
@login_required
|
||||
async def load_shouts_coauthored(_, info, options):
|
||||
"""
|
||||
Загрузка публикаций, написанных в соавторстве с пользователем.
|
||||
|
||||
:param info: Информаци о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций в соавтостве.
|
||||
"""
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if not author_id:
|
||||
return []
|
||||
q = query_with_stat(info)
|
||||
q = q.filter(Shout.authors.any(id=author_id))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
return get_shouts_with_links(info, q, limit, offset=offset)
|
||||
|
||||
|
||||
@query.field("load_shouts_discussed")
|
||||
@login_required
|
||||
async def load_shouts_discussed(_, info, options):
|
||||
"""
|
||||
Загрузка публикаций, которые обсуждались пользователем.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций, обсужденых пользователем.
|
||||
"""
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if not author_id:
|
||||
return []
|
||||
q = query_with_stat(info)
|
||||
options["filters"]["commented"] = True
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
return get_shouts_with_links(info, q, limit, offset=offset)
|
||||
|
||||
|
||||
def shouts_by_follower(info, follower_id: int, options):
|
||||
"""
|
||||
Загружает публикации, на которые подписан автор.
|
||||
|
||||
- по авторам
|
||||
- по темам
|
||||
- по реакциям
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param follower_id: Идентификатор автора.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
q = query_with_stat(info)
|
||||
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == follower_id)
|
||||
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == follower_id)
|
||||
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == follower_id)
|
||||
followed_subquery = (
|
||||
select(Shout.id)
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
|
||||
.where(
|
||||
ShoutAuthor.author.in_(reader_followed_authors)
|
||||
| ShoutTopic.topic.in_(reader_followed_topics)
|
||||
| Shout.id.in_(reader_followed_shouts)
|
||||
)
|
||||
.scalar_subquery()
|
||||
)
|
||||
q = q.filter(Shout.id.in_(followed_subquery))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset=offset)
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field("load_shouts_followed_by")
|
||||
async def load_shouts_followed_by(_, info, slug: str, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, на которые подписан автор по slug.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param slug: Slug автора.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if author:
|
||||
follower_id = author.dict()["id"]
|
||||
shouts = shouts_by_follower(info, follower_id, options)
|
||||
return shouts
|
||||
return []
|
||||
|
||||
|
||||
@query.field("load_shouts_feed")
|
||||
@login_required
|
||||
async def load_shouts_feed(_, info, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, на которые подписан авторизованный пользователь.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
return shouts_by_follower(info, author_id, options) if author_id else []
|
||||
|
||||
|
||||
@query.field("load_shouts_authored_by")
|
||||
async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, написанные автором по slug.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param slug: Slug автора.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if author:
|
||||
try:
|
||||
author_id: int = author.dict()["id"]
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
q = q.filter(Shout.authors.any(id=author_id))
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset=offset)
|
||||
return shouts
|
||||
except Exception as error:
|
||||
logger.debug(error)
|
||||
return []
|
||||
|
||||
|
||||
@query.field("load_shouts_with_topic")
|
||||
async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, связанные с темой по slug.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param slug: Slug темы.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
with local_session() as session:
|
||||
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||
if topic:
|
||||
try:
|
||||
topic_id: int = topic.dict()["id"]
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
q = q.filter(Shout.topics.any(id=topic_id))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset=offset)
|
||||
return shouts
|
||||
except Exception as error:
|
||||
logger.debug(error)
|
||||
return []
|
@@ -1,222 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from graphql import GraphQLError
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.sql import and_
|
||||
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cache_topic,
|
||||
get_cached_follower_authors,
|
||||
get_cached_follower_topics,
|
||||
)
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.community import Community, CommunityFollower
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout, ShoutReactionsFollower
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.notify import notify_follower
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@mutation.field("follow")
|
||||
@login_required
|
||||
async def follow(_, info, what, slug="", entity_id=0):
|
||||
logger.debug("Начало выполнения функции 'follow'")
|
||||
user_id = info.context.get("user_id")
|
||||
follower_dict = info.context.get("author")
|
||||
logger.debug(f"follower: {follower_dict}")
|
||||
|
||||
if not user_id or not follower_dict:
|
||||
return GraphQLError("unauthorized")
|
||||
|
||||
follower_id = follower_dict.get("id")
|
||||
logger.debug(f"follower_id: {follower_id}")
|
||||
|
||||
entity_classes = {
|
||||
"AUTHOR": (Author, AuthorFollower, get_cached_follower_authors, cache_author),
|
||||
"TOPIC": (Topic, TopicFollower, get_cached_follower_topics, cache_topic),
|
||||
"COMMUNITY": (Community, CommunityFollower, None, None), # Нет методов кэша для сообщества
|
||||
"SHOUT": (Shout, ShoutReactionsFollower, None, None), # Нет методов кэша для shout
|
||||
}
|
||||
|
||||
if what not in entity_classes:
|
||||
logger.error(f"Неверный тип для следования: {what}")
|
||||
return {"error": "invalid follow type"}
|
||||
|
||||
entity_class, follower_class, get_cached_follows_method, cache_method = entity_classes[what]
|
||||
entity_type = what.lower()
|
||||
entity_dict = None
|
||||
|
||||
try:
|
||||
logger.debug("Попытка получить сущность из базы данных")
|
||||
with local_session() as session:
|
||||
entity_query = select(entity_class).filter(entity_class.slug == slug)
|
||||
entities = get_with_stat(entity_query)
|
||||
[entity] = entities
|
||||
if not entity:
|
||||
logger.warning(f"{what.lower()} не найден по slug: {slug}")
|
||||
return {"error": f"{what.lower()} not found"}
|
||||
if not entity_id and entity:
|
||||
entity_id = entity.id
|
||||
entity_dict = entity.dict()
|
||||
logger.debug(f"entity_id: {entity_id}, entity_dict: {entity_dict}")
|
||||
|
||||
if entity_id:
|
||||
logger.debug("Проверка существующей подписки")
|
||||
with local_session() as session:
|
||||
existing_sub = (
|
||||
session.query(follower_class)
|
||||
.filter(follower_class.follower == follower_id, getattr(follower_class, entity_type) == entity_id)
|
||||
.first()
|
||||
)
|
||||
if existing_sub:
|
||||
logger.info(f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}")
|
||||
else:
|
||||
logger.debug("Добавление новой записи в базу данных")
|
||||
sub = follower_class(follower=follower_id, **{entity_type: entity_id})
|
||||
logger.debug(f"Создан объект подписки: {sub}")
|
||||
session.add(sub)
|
||||
session.commit()
|
||||
logger.info(f"Пользователь {follower_id} подписался на {what.lower()} с ID {entity_id}")
|
||||
|
||||
follows = None
|
||||
if cache_method:
|
||||
logger.debug("Обновление кэша")
|
||||
await cache_method(entity_dict)
|
||||
if get_cached_follows_method:
|
||||
logger.debug("Получение подписок из кэша")
|
||||
existing_follows = await get_cached_follows_method(follower_id)
|
||||
follows = [*existing_follows, entity_dict] if not existing_sub else existing_follows
|
||||
logger.debug("Обновлен список подписок")
|
||||
|
||||
if what == "AUTHOR" and not existing_sub:
|
||||
logger.debug("Отправка уведомления автору о подписке")
|
||||
await notify_follower(follower=follower_dict, author_id=entity_id, action="follow")
|
||||
|
||||
except Exception as exc:
|
||||
logger.exception("Произошла ошибка в функции 'follow'")
|
||||
return {"error": str(exc)}
|
||||
|
||||
return {f"{what.lower()}s": follows}
|
||||
|
||||
|
||||
@mutation.field("unfollow")
|
||||
@login_required
|
||||
async def unfollow(_, info, what, slug="", entity_id=0):
|
||||
logger.debug("Начало выполнения функции 'unfollow'")
|
||||
user_id = info.context.get("user_id")
|
||||
follower_dict = info.context.get("author")
|
||||
logger.debug(f"follower: {follower_dict}")
|
||||
|
||||
if not user_id or not follower_dict:
|
||||
logger.warning("Неавторизованный доступ при попытке отписаться")
|
||||
return {"error": "unauthorized"}
|
||||
|
||||
follower_id = follower_dict.get("id")
|
||||
logger.debug(f"follower_id: {follower_id}")
|
||||
|
||||
entity_classes = {
|
||||
"AUTHOR": (Author, AuthorFollower, get_cached_follower_authors, cache_author),
|
||||
"TOPIC": (Topic, TopicFollower, get_cached_follower_topics, cache_topic),
|
||||
"COMMUNITY": (Community, CommunityFollower, None, None), # Нет методов кэша для сообщества
|
||||
"SHOUT": (Shout, ShoutReactionsFollower, None, None), # Нет методов кэша для shout
|
||||
}
|
||||
|
||||
if what not in entity_classes:
|
||||
logger.error(f"Неверный тип для отписки: {what}")
|
||||
return {"error": "invalid unfollow type"}
|
||||
|
||||
entity_class, follower_class, get_cached_follows_method, cache_method = entity_classes[what]
|
||||
entity_type = what.lower()
|
||||
follows = []
|
||||
error = None
|
||||
|
||||
try:
|
||||
logger.debug("Попытка получить сущность из базы данных")
|
||||
with local_session() as session:
|
||||
entity = session.query(entity_class).filter(entity_class.slug == slug).first()
|
||||
logger.debug(f"Полученная сущность: {entity}")
|
||||
if not entity:
|
||||
logger.warning(f"{what.lower()} не найден по slug: {slug}")
|
||||
return {"error": f"{what.lower()} not found"}
|
||||
if entity and not entity_id:
|
||||
entity_id = entity.id
|
||||
logger.debug(f"entity_id: {entity_id}")
|
||||
|
||||
sub = (
|
||||
session.query(follower_class)
|
||||
.filter(
|
||||
and_(
|
||||
getattr(follower_class, "follower") == follower_id,
|
||||
getattr(follower_class, entity_type) == entity_id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
logger.debug(f"Найдена подписка для удаления: {sub}")
|
||||
if sub:
|
||||
session.delete(sub)
|
||||
session.commit()
|
||||
logger.info(f"Пользователь {follower_id} отписался от {what.lower()} с ID {entity_id}")
|
||||
|
||||
if cache_method:
|
||||
logger.debug("Обновление кэша после отписки")
|
||||
await cache_method(entity.dict())
|
||||
if get_cached_follows_method:
|
||||
logger.debug("Получение подписок из кэша")
|
||||
existing_follows = await get_cached_follows_method(follower_id)
|
||||
follows = filter(lambda x: x["id"] != entity_id, existing_follows)
|
||||
logger.debug("Обновлен список подписок")
|
||||
|
||||
if what == "AUTHOR":
|
||||
logger.debug("Отправка уведомления автору об отписке")
|
||||
await notify_follower(follower=follower_dict, author_id=entity_id, action="unfollow")
|
||||
else:
|
||||
return {"error": "following was not found", f"{entity_type}s": follows}
|
||||
|
||||
except Exception as exc:
|
||||
logger.exception("Произошла ошибка в функции 'unfollow'")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return {"error": str(exc)}
|
||||
|
||||
# logger.debug(f"Функция 'unfollow' завершена успешно с результатом: {entity_type}s={follows}, error={error}")
|
||||
return {f"{entity_type}s": follows, "error": error}
|
||||
|
||||
|
||||
@query.field("get_shout_followers")
|
||||
def get_shout_followers(_, _info, slug: str = "", shout_id: int | None = None) -> List[Author]:
|
||||
logger.debug("Начало выполнения функции 'get_shout_followers'")
|
||||
followers = []
|
||||
try:
|
||||
with local_session() as session:
|
||||
shout = None
|
||||
if slug:
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
logger.debug(f"Найден shout по slug: {slug} -> {shout}")
|
||||
elif shout_id:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
logger.debug(f"Найден shout по ID: {shout_id} -> {shout}")
|
||||
|
||||
if shout:
|
||||
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all()
|
||||
logger.debug(f"Полученные реакции для shout ID {shout.id}: {reactions}")
|
||||
for r in reactions:
|
||||
followers.append(r.created_by)
|
||||
logger.debug(f"Добавлен follower: {r.created_by}")
|
||||
|
||||
except Exception as _exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.exception("Произошла ошибка в функции 'get_shout_followers'")
|
||||
return []
|
||||
|
||||
# logger.debug(f"Функция 'get_shout_followers' завершена с {len(followers)} подписчиками")
|
||||
return followers
|
113
resolvers/inbox/chats.py
Normal file
113
resolvers/inbox/chats.py
Normal file
@@ -0,0 +1,113 @@
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from base.redis import redis
|
||||
from base.resolvers import mutation
|
||||
from validations.inbox import Chat
|
||||
|
||||
|
||||
@mutation.field("updateChat")
|
||||
@login_required
|
||||
async def update_chat(_, info, chat_new: Chat):
|
||||
"""
|
||||
updating chat
|
||||
requires info["request"].user.slug to be in chat["admins"]
|
||||
|
||||
:param info: GraphQLInfo with request
|
||||
:param chat_new: dict with chat data
|
||||
:return: Result { error chat }
|
||||
"""
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
chat_id = chat_new["id"]
|
||||
chat = await redis.execute("GET", f"chats/{chat_id}")
|
||||
if not chat:
|
||||
return {"error": "chat not exist"}
|
||||
chat = dict(json.loads(chat))
|
||||
|
||||
# TODO
|
||||
if auth.user_id in chat["admins"]:
|
||||
chat.update(
|
||||
{
|
||||
"title": chat_new.get("title", chat["title"]),
|
||||
"description": chat_new.get("description", chat["description"]),
|
||||
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"admins": chat_new.get("admins", chat.get("admins") or []),
|
||||
"users": chat_new.get("users", chat["users"]),
|
||||
}
|
||||
)
|
||||
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
|
||||
await redis.execute("COMMIT")
|
||||
|
||||
return {"error": None, "chat": chat}
|
||||
|
||||
|
||||
@mutation.field("createChat")
|
||||
@login_required
|
||||
async def create_chat(_, info, title="", members=[]):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
chat = {}
|
||||
print("create_chat members: %r" % members)
|
||||
if auth.user_id not in members:
|
||||
members.append(int(auth.user_id))
|
||||
|
||||
# reuse chat craeted before if exists
|
||||
if len(members) == 2 and title == "":
|
||||
chat = None
|
||||
print(members)
|
||||
chatset1 = await redis.execute("SMEMBERS", f"chats_by_user/{members[0]}")
|
||||
if not chatset1:
|
||||
chatset1 = set([])
|
||||
print(chatset1)
|
||||
chatset2 = await redis.execute("SMEMBERS", f"chats_by_user/{members[1]}")
|
||||
if not chatset2:
|
||||
chatset2 = set([])
|
||||
print(chatset2)
|
||||
chatset = chatset1.intersection(chatset2)
|
||||
print(chatset)
|
||||
for c in chatset:
|
||||
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
|
||||
if chat:
|
||||
chat = json.loads(chat)
|
||||
if chat["title"] == "":
|
||||
print("[inbox] createChat found old chat")
|
||||
print(chat)
|
||||
break
|
||||
if chat:
|
||||
return {"chat": chat, "error": "existed"}
|
||||
|
||||
chat_id = str(uuid.uuid4())
|
||||
chat = {
|
||||
"id": chat_id,
|
||||
"users": members,
|
||||
"title": title,
|
||||
"createdBy": auth.user_id,
|
||||
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"admins": members if (len(members) == 2 and title == "") else [],
|
||||
}
|
||||
|
||||
for m in members:
|
||||
await redis.execute("SADD", f"chats_by_user/{m}", chat_id)
|
||||
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
||||
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
|
||||
await redis.execute("COMMIT")
|
||||
return {"error": None, "chat": chat}
|
||||
|
||||
|
||||
@mutation.field("deleteChat")
|
||||
@login_required
|
||||
async def delete_chat(_, info, chat_id: str):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
chat = await redis.execute("GET", f"/chats/{chat_id}")
|
||||
if chat:
|
||||
chat = dict(json.loads(chat))
|
||||
if auth.user_id in chat["admins"]:
|
||||
await redis.execute("DEL", f"chats/{chat_id}")
|
||||
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
|
||||
await redis.execute("COMMIT")
|
||||
else:
|
||||
return {"error": "chat not exist"}
|
138
resolvers/inbox/load.py
Normal file
138
resolvers/inbox/load.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import json
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from base.orm import local_session
|
||||
from base.redis import redis
|
||||
from base.resolvers import query
|
||||
from orm.user import User
|
||||
from resolvers.zine.profile import followed_authors
|
||||
|
||||
from .unread import get_unread_counter
|
||||
|
||||
# from datetime import datetime, timedelta, timezone
|
||||
|
||||
|
||||
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
|
||||
"""load :limit messages for :chat_id with :offset"""
|
||||
messages = []
|
||||
message_ids = []
|
||||
if ids:
|
||||
message_ids += ids
|
||||
try:
|
||||
if limit:
|
||||
mids = await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit)
|
||||
mids = [mid.decode("utf-8") for mid in mids]
|
||||
message_ids += mids
|
||||
except Exception as e:
|
||||
print(e)
|
||||
if message_ids:
|
||||
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
|
||||
messages = await redis.mget(*message_keys)
|
||||
messages = [json.loads(msg.decode("utf-8")) for msg in messages]
|
||||
replies = []
|
||||
for m in messages:
|
||||
rt = m.get("replyTo")
|
||||
if rt:
|
||||
rt = int(rt)
|
||||
if rt not in message_ids:
|
||||
replies.append(rt)
|
||||
if replies:
|
||||
messages += await load_messages(chat_id, limit=0, ids=replies)
|
||||
return messages
|
||||
|
||||
|
||||
@query.field("loadChats")
|
||||
@login_required
|
||||
async def load_chats(_, info, limit: int = 50, offset: int = 0):
|
||||
"""load :limit chats of current user with :offset"""
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
|
||||
if cids:
|
||||
cids = list(cids)[offset : offset + limit]
|
||||
if not cids:
|
||||
print("[inbox.load] no chats were found")
|
||||
cids = []
|
||||
onliners = await redis.execute("SMEMBERS", "users-online")
|
||||
if not onliners:
|
||||
onliners = []
|
||||
chats = []
|
||||
for cid in cids:
|
||||
cid = cid.decode("utf-8")
|
||||
c = await redis.execute("GET", "chats/" + cid)
|
||||
if c:
|
||||
c = dict(json.loads(c))
|
||||
c["messages"] = await load_messages(cid, 5, 0)
|
||||
c["unread"] = await get_unread_counter(cid, auth.user_id)
|
||||
with local_session() as session:
|
||||
c["members"] = []
|
||||
for uid in c["users"]:
|
||||
a = session.query(User).where(User.id == uid).first()
|
||||
if a:
|
||||
c["members"].append(
|
||||
{
|
||||
"id": a.id,
|
||||
"slug": a.slug,
|
||||
"userpic": a.userpic,
|
||||
"name": a.name,
|
||||
"lastSeen": a.lastSeen,
|
||||
"online": a.id in onliners,
|
||||
}
|
||||
)
|
||||
chats.append(c)
|
||||
return {"chats": chats, "error": None}
|
||||
|
||||
|
||||
@query.field("loadMessagesBy")
|
||||
@login_required
|
||||
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
|
||||
"""load :limit messages of :chat_id with :offset"""
|
||||
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
|
||||
userchats = [c.decode("utf-8") for c in userchats]
|
||||
# print('[inbox] userchats: %r' % userchats)
|
||||
if userchats:
|
||||
# print('[inbox] loading messages by...')
|
||||
messages = []
|
||||
by_chat = by.get("chat")
|
||||
if by_chat in userchats:
|
||||
chat = await redis.execute("GET", f"chats/{by_chat}")
|
||||
# print(chat)
|
||||
if not chat:
|
||||
return {"messages": [], "error": "chat not exist"}
|
||||
# everyone's messages in filtered chat
|
||||
messages = await load_messages(by_chat, limit, offset)
|
||||
return {"messages": sorted(list(messages), key=lambda m: m["createdAt"]), "error": None}
|
||||
else:
|
||||
return {"error": "Cannot access messages of this chat"}
|
||||
|
||||
|
||||
@query.field("loadRecipients")
|
||||
async def load_recipients(_, info, limit=50, offset=0):
|
||||
chat_users = []
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
onliners = await redis.execute("SMEMBERS", "users-online")
|
||||
if not onliners:
|
||||
onliners = []
|
||||
try:
|
||||
chat_users += await followed_authors(auth.user_id)
|
||||
limit = limit - len(chat_users)
|
||||
except Exception:
|
||||
pass
|
||||
with local_session() as session:
|
||||
chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset)
|
||||
members = []
|
||||
for a in chat_users:
|
||||
members.append(
|
||||
{
|
||||
"id": a.id,
|
||||
"slug": a.slug,
|
||||
"userpic": a.userpic,
|
||||
"name": a.name,
|
||||
"lastSeen": a.lastSeen,
|
||||
"online": a.id in onliners,
|
||||
}
|
||||
)
|
||||
return {"members": members, "error": None}
|
129
resolvers/inbox/messages.py
Normal file
129
resolvers/inbox/messages.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from base.redis import redis
|
||||
from base.resolvers import mutation
|
||||
from services.following import FollowingManager, FollowingResult
|
||||
|
||||
|
||||
@mutation.field("createMessage")
|
||||
@login_required
|
||||
async def create_message(_, info, chat: str, body: str, replyTo=None):
|
||||
"""create message with :body for :chat_id replying to :replyTo optionally"""
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
chat = await redis.execute("GET", f"chats/{chat}")
|
||||
if not chat:
|
||||
return {"error": "chat is not exist"}
|
||||
else:
|
||||
chat_dict = dict(json.loads(chat))
|
||||
message_id = await redis.execute("GET", f"chats/{chat_dict['id']}/next_message_id")
|
||||
message_id = int(message_id)
|
||||
new_message = {
|
||||
"chatId": chat_dict["id"],
|
||||
"id": message_id,
|
||||
"author": auth.user_id,
|
||||
"body": body,
|
||||
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
}
|
||||
if replyTo:
|
||||
new_message["replyTo"] = replyTo
|
||||
chat_dict["updatedAt"] = new_message["createdAt"]
|
||||
await redis.execute("SET", f"chats/{chat_dict['id']}", json.dumps(chat))
|
||||
print(f"[inbox] creating message {new_message}")
|
||||
await redis.execute(
|
||||
"SET", f"chats/{chat_dict['id']}/messages/{message_id}", json.dumps(new_message)
|
||||
)
|
||||
await redis.execute("LPUSH", f"chats/{chat_dict['id']}/message_ids", str(message_id))
|
||||
await redis.execute("SET", f"chats/{chat_dict['id']}/next_message_id", str(message_id + 1))
|
||||
|
||||
users = chat_dict["users"]
|
||||
for user_slug in users:
|
||||
await redis.execute(
|
||||
"LPUSH", f"chats/{chat_dict['id']}/unread/{user_slug}", str(message_id)
|
||||
)
|
||||
|
||||
result = FollowingResult("NEW", "chat", new_message)
|
||||
await FollowingManager.push("chat", result)
|
||||
|
||||
return {"message": new_message, "error": None}
|
||||
|
||||
|
||||
@mutation.field("updateMessage")
|
||||
@login_required
|
||||
async def update_message(_, info, chat_id: str, message_id: int, body: str):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
chat = await redis.execute("GET", f"chats/{chat_id}")
|
||||
if not chat:
|
||||
return {"error": "chat not exist"}
|
||||
|
||||
message = await redis.execute("GET", f"chats/{chat_id}/messages/{message_id}")
|
||||
if not message:
|
||||
return {"error": "message not exist"}
|
||||
|
||||
message = json.loads(message)
|
||||
if message["author"] != auth.user_id:
|
||||
return {"error": "access denied"}
|
||||
|
||||
message["body"] = body
|
||||
message["updatedAt"] = int(datetime.now(tz=timezone.utc).timestamp())
|
||||
|
||||
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message))
|
||||
|
||||
result = FollowingResult("UPDATED", "chat", message)
|
||||
await FollowingManager.push("chat", result)
|
||||
|
||||
return {"message": message, "error": None}
|
||||
|
||||
|
||||
@mutation.field("deleteMessage")
|
||||
@login_required
|
||||
async def delete_message(_, info, chat_id: str, message_id: int):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
chat = await redis.execute("GET", f"chats/{chat_id}")
|
||||
if not chat:
|
||||
return {"error": "chat not exist"}
|
||||
chat = json.loads(chat)
|
||||
|
||||
message = await redis.execute("GET", f"chats/{chat_id}/messages/{str(message_id)}")
|
||||
if not message:
|
||||
return {"error": "message not exist"}
|
||||
message = json.loads(message)
|
||||
if message["author"] != auth.user_id:
|
||||
return {"error": "access denied"}
|
||||
|
||||
await redis.execute("LREM", f"chats/{chat_id}/message_ids", 0, str(message_id))
|
||||
await redis.execute("DEL", f"chats/{chat_id}/messages/{str(message_id)}")
|
||||
|
||||
users = chat["users"]
|
||||
for user_id in users:
|
||||
await redis.execute("LREM", f"chats/{chat_id}/unread/{user_id}", 0, str(message_id))
|
||||
|
||||
result = FollowingResult("DELETED", "chat", message)
|
||||
await FollowingManager.push(result)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@mutation.field("markAsRead")
|
||||
@login_required
|
||||
async def mark_as_read(_, info, chat_id: str, messages: [int]):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
chat = await redis.execute("GET", f"chats/{chat_id}")
|
||||
if not chat:
|
||||
return {"error": "chat not exist"}
|
||||
|
||||
chat = json.loads(chat)
|
||||
users = set(chat["users"])
|
||||
if auth.user_id not in users:
|
||||
return {"error": "access denied"}
|
||||
|
||||
for message_id in messages:
|
||||
await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id))
|
||||
|
||||
return {"error": None}
|
96
resolvers/inbox/search.py
Normal file
96
resolvers/inbox/search.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from base.orm import local_session
|
||||
from base.redis import redis
|
||||
from base.resolvers import query
|
||||
from orm.user import AuthorFollower, User
|
||||
from resolvers.inbox.load import load_messages
|
||||
|
||||
|
||||
@query.field("searchRecipients")
|
||||
@login_required
|
||||
async def search_recipients(_, info, query: str, limit: int = 50, offset: int = 0):
|
||||
result = []
|
||||
# TODO: maybe redis scan?
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}")
|
||||
if talk_before:
|
||||
talk_before = list(json.loads(talk_before))[offset : offset + limit]
|
||||
for chat_id in talk_before:
|
||||
members = await redis.execute("GET", f"/chats/{chat_id}/users")
|
||||
if members:
|
||||
members = list(json.loads(members))
|
||||
for member in members:
|
||||
if member.startswith(query):
|
||||
if member not in result:
|
||||
result.append(member)
|
||||
|
||||
more_amount = limit - len(result)
|
||||
|
||||
with local_session() as session:
|
||||
# followings
|
||||
result += (
|
||||
session.query(AuthorFollower.author)
|
||||
.join(User, User.id == AuthorFollower.follower)
|
||||
.where(User.slug.startswith(query))
|
||||
.offset(offset + len(result))
|
||||
.limit(more_amount)
|
||||
)
|
||||
|
||||
more_amount = limit
|
||||
# followers
|
||||
result += (
|
||||
session.query(AuthorFollower.follower)
|
||||
.join(User, User.id == AuthorFollower.author)
|
||||
.where(User.slug.startswith(query))
|
||||
.offset(offset + len(result))
|
||||
.limit(offset + len(result) + limit)
|
||||
)
|
||||
return {"members": list(result), "error": None}
|
||||
|
||||
|
||||
@query.field("searchMessages")
|
||||
@login_required
|
||||
async def search_user_chats(by, messages, user_id: int, limit, offset):
|
||||
cids = set([])
|
||||
cids.union(set(await redis.execute("SMEMBERS", "chats_by_user/" + str(user_id))))
|
||||
messages = []
|
||||
|
||||
by_author = by.get("author")
|
||||
if by_author:
|
||||
# all author's messages
|
||||
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
|
||||
# author's messages in filtered chat
|
||||
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
|
||||
for c in cids:
|
||||
c = c.decode("utf-8")
|
||||
messages = await load_messages(c, limit, offset)
|
||||
|
||||
body_like = by.get("body")
|
||||
if body_like:
|
||||
# search in all messages in all user's chats
|
||||
for c in cids:
|
||||
# FIXME: use redis scan here
|
||||
c = c.decode("utf-8")
|
||||
mmm = await load_messages(c, limit, offset)
|
||||
for m in mmm:
|
||||
if body_like in m["body"]:
|
||||
messages.add(m)
|
||||
else:
|
||||
# search in chat's messages
|
||||
messages.extend(filter(lambda m: body_like in m["body"], list(messages)))
|
||||
|
||||
days = by.get("days")
|
||||
if days:
|
||||
messages.extend(
|
||||
filter(
|
||||
list(messages),
|
||||
key=lambda m: (
|
||||
datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
|
||||
),
|
||||
)
|
||||
)
|
||||
return {"messages": messages, "error": None}
|
10
resolvers/inbox/unread.py
Normal file
10
resolvers/inbox/unread.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from base.redis import redis
|
||||
|
||||
|
||||
async def get_unread_counter(chat_id: str, user_id: int):
|
||||
try:
|
||||
unread = await redis.execute("LLEN", f"chats/{chat_id.decode('utf-8')}/unread/{user_id}")
|
||||
if unread:
|
||||
return unread
|
||||
except Exception:
|
||||
return 0
|
89
resolvers/notifications.py
Normal file
89
resolvers/notifications.py
Normal file
@@ -0,0 +1,89 @@
|
||||
from sqlalchemy import and_, desc, select, update
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from base.orm import local_session
|
||||
from base.resolvers import mutation, query
|
||||
from orm import Notification
|
||||
|
||||
|
||||
@query.field("loadNotifications")
|
||||
@login_required
|
||||
async def load_notifications(_, info, params=None):
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
user_id = auth.user_id
|
||||
|
||||
limit = params.get("limit", 50)
|
||||
offset = params.get("offset", 0)
|
||||
|
||||
q = (
|
||||
select(Notification)
|
||||
.where(Notification.user == user_id)
|
||||
.order_by(desc(Notification.createdAt))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
notifications = []
|
||||
with local_session() as session:
|
||||
total_count = session.query(Notification).where(Notification.user == user_id).count()
|
||||
|
||||
total_unread_count = (
|
||||
session.query(Notification)
|
||||
.where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
|
||||
.count()
|
||||
)
|
||||
|
||||
for [notification] in session.execute(q):
|
||||
notification.type = notification.type.name
|
||||
notifications.append(notification)
|
||||
|
||||
return {
|
||||
"notifications": notifications,
|
||||
"totalCount": total_count,
|
||||
"totalUnreadCount": total_unread_count,
|
||||
}
|
||||
|
||||
|
||||
@mutation.field("markNotificationAsRead")
|
||||
@login_required
|
||||
async def mark_notification_as_read(_, info, notification_id: int):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
user_id = auth.user_id
|
||||
|
||||
with local_session() as session:
|
||||
notification = (
|
||||
session.query(Notification)
|
||||
.where(and_(Notification.id == notification_id, Notification.user == user_id))
|
||||
.one()
|
||||
)
|
||||
notification.seen = True
|
||||
session.commit()
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@mutation.field("markAllNotificationsAsRead")
|
||||
@login_required
|
||||
async def mark_all_notifications_as_read(_, info):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
user_id = auth.user_id
|
||||
|
||||
statement = (
|
||||
update(Notification)
|
||||
.where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
|
||||
.values(seen=True)
|
||||
)
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.execute(statement)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
print(f"[mark_all_notifications_as_read] error: {str(e)}")
|
||||
|
||||
return {}
|
@@ -1,316 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
from typing import List, Tuple
|
||||
|
||||
from sqlalchemy import and_, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.sql import not_
|
||||
|
||||
from orm.author import Author
|
||||
from orm.notification import (
|
||||
Notification,
|
||||
NotificationAction,
|
||||
NotificationEntity,
|
||||
NotificationSeen,
|
||||
)
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
||||
notification_seen_alias = aliased(NotificationSeen)
|
||||
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
|
||||
NotificationSeen,
|
||||
and_(
|
||||
NotificationSeen.viewer == author_id,
|
||||
NotificationSeen.notification == Notification.id,
|
||||
),
|
||||
)
|
||||
if after:
|
||||
q = q.filter(Notification.created_at > after)
|
||||
q = q.group_by(NotificationSeen.notification, Notification.created_at)
|
||||
|
||||
with local_session() as session:
|
||||
total = (
|
||||
session.query(Notification)
|
||||
.filter(
|
||||
and_(
|
||||
Notification.action == NotificationAction.CREATE.value,
|
||||
Notification.created_at > after,
|
||||
)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
unread = (
|
||||
session.query(Notification)
|
||||
.filter(
|
||||
and_(
|
||||
Notification.action == NotificationAction.CREATE.value,
|
||||
Notification.created_at > after,
|
||||
not_(Notification.seen),
|
||||
)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
notifications_result = session.execute(q)
|
||||
notifications = []
|
||||
for n, seen in notifications_result:
|
||||
notifications.append((n, seen))
|
||||
|
||||
return total, unread, notifications
|
||||
|
||||
|
||||
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
|
||||
reactions = reactions or []
|
||||
authors = authors or []
|
||||
return {
|
||||
"thread": thread,
|
||||
"authors": authors,
|
||||
"updated_at": int(time.time()),
|
||||
"shout": shout,
|
||||
"reactions": reactions,
|
||||
"entity": entity,
|
||||
"action": action,
|
||||
}
|
||||
|
||||
|
||||
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0):
|
||||
"""
|
||||
Retrieves notifications for a given author.
|
||||
|
||||
Args:
|
||||
author_id (int): The ID of the author for whom notifications are retrieved.
|
||||
after (int, optional): If provided, selects only notifications created after this timestamp will be considered.
|
||||
limit (int, optional): The maximum number of groupa to retrieve.
|
||||
offset (int, optional): offset
|
||||
|
||||
Returns:
|
||||
Dict[str, NotificationGroup], int, int: A dictionary where keys are thread IDs
|
||||
and values are NotificationGroup objects, unread and total amounts.
|
||||
|
||||
This function queries the database to retrieve notifications for the specified author, considering optional filters.
|
||||
The result is a dictionary where each key is a thread ID, and the corresponding value is a NotificationGroup
|
||||
containing information about the notifications within that thread.
|
||||
|
||||
NotificationGroup structure:
|
||||
{
|
||||
entity: str, # Type of entity (e.g., 'reaction', 'shout', 'follower').
|
||||
updated_at: int, # Timestamp of the latest update in the thread.
|
||||
shout: Optional[NotificationShout]
|
||||
reactions: List[int], # List of reaction ids within the thread.
|
||||
authors: List[NotificationAuthor], # List of authors involved in the thread.
|
||||
}
|
||||
"""
|
||||
total, unread, notifications = query_notifications(author_id, after)
|
||||
groups_by_thread = {}
|
||||
groups_amount = 0
|
||||
|
||||
for notification, seen in notifications:
|
||||
if (groups_amount + offset) >= limit:
|
||||
break
|
||||
|
||||
payload = json.loads(str(notification.payload))
|
||||
|
||||
if str(notification.entity) == NotificationEntity.SHOUT.value:
|
||||
shout = payload
|
||||
shout_id = shout.get("id")
|
||||
author_id = shout.get("created_by")
|
||||
thread_id = f"shout-{shout_id}"
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.id == author_id).first()
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if author and shout:
|
||||
author = author.dict()
|
||||
shout = shout.dict()
|
||||
group = group_notification(
|
||||
thread_id,
|
||||
shout=shout,
|
||||
authors=[author],
|
||||
action=str(notification.action),
|
||||
entity=str(notification.entity),
|
||||
)
|
||||
groups_by_thread[thread_id] = group
|
||||
groups_amount += 1
|
||||
|
||||
elif str(notification.entity) == NotificationEntity.REACTION.value:
|
||||
reaction = payload
|
||||
if not isinstance(reaction, dict):
|
||||
raise ValueError("reaction data is not consistent")
|
||||
shout_id = reaction.get("shout")
|
||||
author_id = reaction.get("created_by", 0)
|
||||
if shout_id and author_id:
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.id == author_id).first()
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if shout and author:
|
||||
author = author.dict()
|
||||
shout = shout.dict()
|
||||
reply_id = reaction.get("reply_to")
|
||||
thread_id = f"shout-{shout_id}"
|
||||
if reply_id and reaction.get("kind", "").lower() == "comment":
|
||||
thread_id += f"{reply_id}"
|
||||
existing_group = groups_by_thread.get(thread_id)
|
||||
if existing_group:
|
||||
existing_group["seen"] = False
|
||||
existing_group["authors"].append(author_id)
|
||||
existing_group["reactions"] = existing_group["reactions"] or []
|
||||
existing_group["reactions"].append(reaction)
|
||||
groups_by_thread[thread_id] = existing_group
|
||||
else:
|
||||
group = group_notification(
|
||||
thread_id,
|
||||
authors=[author],
|
||||
shout=shout,
|
||||
reactions=[reaction],
|
||||
entity=str(notification.entity),
|
||||
action=str(notification.action),
|
||||
)
|
||||
if group:
|
||||
groups_by_thread[thread_id] = group
|
||||
groups_amount += 1
|
||||
|
||||
elif str(notification.entity) == "follower":
|
||||
thread_id = "followers"
|
||||
follower = json.loads(payload)
|
||||
group = groups_by_thread.get(thread_id)
|
||||
if group:
|
||||
if str(notification.action) == "follow":
|
||||
group["authors"].append(follower)
|
||||
elif str(notification.action) == "unfollow":
|
||||
follower_id = follower.get("id")
|
||||
for author in group["authors"]:
|
||||
if author.get("id") == follower_id:
|
||||
group["authors"].remove(author)
|
||||
break
|
||||
else:
|
||||
group = group_notification(
|
||||
thread_id,
|
||||
authors=[follower],
|
||||
entity=str(notification.entity),
|
||||
action=str(notification.action),
|
||||
)
|
||||
groups_amount += 1
|
||||
groups_by_thread[thread_id] = group
|
||||
return groups_by_thread, unread, total
|
||||
|
||||
|
||||
@query.field("load_notifications")
|
||||
@login_required
|
||||
async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
|
||||
author_dict = info.context.get("author")
|
||||
author_id = author_dict.get("id")
|
||||
error = None
|
||||
total = 0
|
||||
unread = 0
|
||||
notifications = []
|
||||
try:
|
||||
if author_id:
|
||||
groups, unread, total = get_notifications_grouped(author_id, after, limit)
|
||||
notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True)
|
||||
except Exception as e:
|
||||
error = e
|
||||
logger.error(e)
|
||||
return {
|
||||
"notifications": notifications,
|
||||
"total": total,
|
||||
"unread": unread,
|
||||
"error": error,
|
||||
}
|
||||
|
||||
|
||||
@mutation.field("notification_mark_seen")
|
||||
@login_required
|
||||
async def notification_mark_seen(_, info, notification_id: int):
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if author_id:
|
||||
with local_session() as session:
|
||||
try:
|
||||
ns = NotificationSeen(notification=notification_id, viewer=author_id)
|
||||
session.add(ns)
|
||||
session.commit()
|
||||
except SQLAlchemyError as e:
|
||||
session.rollback()
|
||||
logger.error(f"seen mutation failed: {e}")
|
||||
return {"error": "cant mark as read"}
|
||||
return {"error": None}
|
||||
|
||||
|
||||
@mutation.field("notifications_seen_after")
|
||||
@login_required
|
||||
async def notifications_seen_after(_, info, after: int):
|
||||
# TODO: use latest loaded notification_id as input offset parameter
|
||||
error = None
|
||||
try:
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if author_id:
|
||||
with local_session() as session:
|
||||
nnn = session.query(Notification).filter(and_(Notification.created_at > after)).all()
|
||||
for n in nnn:
|
||||
try:
|
||||
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
||||
session.add(ns)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error = "cant mark as read"
|
||||
return {"error": error}
|
||||
|
||||
|
||||
@mutation.field("notifications_seen_thread")
|
||||
@login_required
|
||||
async def notifications_seen_thread(_, info, thread: str, after: int):
|
||||
error = None
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if author_id:
|
||||
[shout_id, reply_to_id] = thread.split(":")
|
||||
with local_session() as session:
|
||||
# TODO: handle new follower and new shout notifications
|
||||
new_reaction_notifications = (
|
||||
session.query(Notification)
|
||||
.filter(
|
||||
Notification.action == "create",
|
||||
Notification.entity == "reaction",
|
||||
Notification.created_at > after,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
removed_reaction_notifications = (
|
||||
session.query(Notification)
|
||||
.filter(
|
||||
Notification.action == "delete",
|
||||
Notification.entity == "reaction",
|
||||
Notification.created_at > after,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
exclude = set()
|
||||
for nr in removed_reaction_notifications:
|
||||
reaction = json.loads(str(nr.payload))
|
||||
reaction_id = reaction.get("id")
|
||||
exclude.add(reaction_id)
|
||||
for n in new_reaction_notifications:
|
||||
reaction = json.loads(str(n.payload))
|
||||
reaction_id = reaction.get("id")
|
||||
if (
|
||||
reaction_id not in exclude
|
||||
and reaction.get("shout") == shout_id
|
||||
and reaction.get("reply_to") == reply_to_id
|
||||
):
|
||||
try:
|
||||
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
||||
session.add(ns)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
logger.warn(e)
|
||||
session.rollback()
|
||||
else:
|
||||
error = "You are not logged in"
|
||||
return {"error": error}
|
@@ -1,49 +0,0 @@
|
||||
from sqlalchemy import and_
|
||||
|
||||
from orm.rating import is_negative, is_positive
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout
|
||||
from services.db import local_session
|
||||
from utils.diff import apply_diff, get_diff
|
||||
|
||||
|
||||
def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
|
||||
with local_session() as session:
|
||||
if is_positive(kind):
|
||||
replied_reaction = (
|
||||
session.query(Reaction).filter(Reaction.id == reply_to, Reaction.shout == shout_id).first()
|
||||
)
|
||||
|
||||
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
|
||||
# patch all the proposals' quotes
|
||||
proposals = (
|
||||
session.query(Reaction)
|
||||
.filter(
|
||||
and_(
|
||||
Reaction.shout == shout_id,
|
||||
Reaction.kind == ReactionKind.PROPOSE.value,
|
||||
)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
# patch shout's body
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
body = replied_reaction.quote
|
||||
Shout.update(shout, {body})
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
|
||||
# реакция содержит цитату -> обновляются все предложения
|
||||
# (proposals) для соответствующего Shout.
|
||||
for proposal in proposals:
|
||||
if proposal.quote:
|
||||
proposal_diff = get_diff(shout.body, proposal.quote)
|
||||
proposal_dict = proposal.dict()
|
||||
proposal_dict["quote"] = apply_diff(replied_reaction.quote, proposal_diff)
|
||||
Reaction.update(proposal, proposal_dict)
|
||||
session.add(proposal)
|
||||
|
||||
if is_negative(kind):
|
||||
# TODO: rejection logic
|
||||
pass
|
@@ -1,327 +0,0 @@
|
||||
from sqlalchemy import and_, case, func, select, true
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from orm.author import Author, AuthorRating
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@query.field("get_my_rates_comments")
|
||||
@login_required
|
||||
async def get_my_rates_comments(_, info, comments: list[int]) -> list[dict]:
|
||||
"""
|
||||
Получение реакций пользователя на комментарии
|
||||
"""
|
||||
author_dict = info.context.get("author") if info.context else None
|
||||
author_id = author_dict.get("id") if author_dict else None
|
||||
if not author_id:
|
||||
return {"error": "Author not found"}
|
||||
|
||||
# Подзапрос для реакций текущего пользователя
|
||||
rated_query = (
|
||||
select(Reaction.id.label("comment_id"), Reaction.kind.label("my_rate"))
|
||||
.where(
|
||||
and_(
|
||||
Reaction.reply_to.in_(comments),
|
||||
Reaction.created_by == author_id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
)
|
||||
)
|
||||
.order_by(Reaction.shout, Reaction.created_at.desc())
|
||||
.distinct(Reaction.shout)
|
||||
)
|
||||
with local_session() as session:
|
||||
comments_result = session.execute(rated_query).all()
|
||||
return [{"comment_id": row.comment_id, "my_rate": row.my_rate} for row in comments_result]
|
||||
|
||||
|
||||
@query.field("get_my_rates_shouts")
|
||||
@login_required
|
||||
async def get_my_rates_shouts(_, info, shouts):
|
||||
"""
|
||||
Получение реакций пользователя на публикации
|
||||
"""
|
||||
author_dict = info.context.get("author") if info.context else None
|
||||
author_id = author_dict.get("id") if author_dict else None
|
||||
|
||||
if not author_id:
|
||||
return []
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
stmt = (
|
||||
select(Reaction)
|
||||
.where(
|
||||
and_(
|
||||
Reaction.shout.in_(shouts),
|
||||
Reaction.reply_to.is_(None),
|
||||
Reaction.created_by == author_id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
)
|
||||
)
|
||||
.order_by(Reaction.shout, Reaction.created_at.desc())
|
||||
.distinct(Reaction.shout)
|
||||
)
|
||||
|
||||
result = session.execute(stmt).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"shout_id": row[0].shout, # Получаем shout_id из объекта Reaction
|
||||
"my_rate": row[0].kind, # Получаем kind (my_rate) из объекта Reaction
|
||||
}
|
||||
for row in result
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error in get_my_rates_shouts: {e}")
|
||||
return []
|
||||
|
||||
|
||||
@mutation.field("rate_author")
|
||||
@login_required
|
||||
async def rate_author(_, info, rated_slug, value):
|
||||
info.context["user_id"]
|
||||
rater_id = info.context.get("author", {}).get("id")
|
||||
with local_session() as session:
|
||||
rater_id = int(rater_id)
|
||||
rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
|
||||
if rater_id and rated_author:
|
||||
rating: AuthorRating = (
|
||||
session.query(AuthorRating)
|
||||
.filter(
|
||||
and_(
|
||||
AuthorRating.rater == rater_id,
|
||||
AuthorRating.author == rated_author.id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if rating:
|
||||
rating.plus = value > 0
|
||||
session.add(rating)
|
||||
session.commit()
|
||||
return {}
|
||||
else:
|
||||
try:
|
||||
rating = AuthorRating(rater=rater_id, author=rated_author.id, plus=value > 0)
|
||||
session.add(rating)
|
||||
session.commit()
|
||||
except Exception as err:
|
||||
return {"error": err}
|
||||
return {}
|
||||
|
||||
|
||||
def count_author_comments_rating(session, author_id) -> int:
|
||||
replied_alias = aliased(Reaction)
|
||||
replies_likes = (
|
||||
session.query(replied_alias)
|
||||
.join(Reaction, replied_alias.id == Reaction.reply_to)
|
||||
.where(
|
||||
and_(
|
||||
replied_alias.created_by == author_id,
|
||||
replied_alias.kind == ReactionKind.COMMENT.value,
|
||||
)
|
||||
)
|
||||
.filter(replied_alias.kind == ReactionKind.LIKE.value)
|
||||
.count()
|
||||
) or 0
|
||||
replies_dislikes = (
|
||||
session.query(replied_alias)
|
||||
.join(Reaction, replied_alias.id == Reaction.reply_to)
|
||||
.where(
|
||||
and_(
|
||||
replied_alias.created_by == author_id,
|
||||
replied_alias.kind == ReactionKind.COMMENT.value,
|
||||
)
|
||||
)
|
||||
.filter(replied_alias.kind == ReactionKind.DISLIKE.value)
|
||||
.count()
|
||||
) or 0
|
||||
|
||||
return replies_likes - replies_dislikes
|
||||
|
||||
|
||||
def count_author_shouts_rating(session, author_id) -> int:
|
||||
shouts_likes = (
|
||||
session.query(Reaction, Shout)
|
||||
.join(Shout, Shout.id == Reaction.shout)
|
||||
.filter(
|
||||
and_(
|
||||
Shout.authors.any(id=author_id),
|
||||
Reaction.kind == ReactionKind.LIKE.value,
|
||||
)
|
||||
)
|
||||
.count()
|
||||
or 0
|
||||
)
|
||||
shouts_dislikes = (
|
||||
session.query(Reaction, Shout)
|
||||
.join(Shout, Shout.id == Reaction.shout)
|
||||
.filter(
|
||||
and_(
|
||||
Shout.authors.any(id=author_id),
|
||||
Reaction.kind == ReactionKind.DISLIKE.value,
|
||||
)
|
||||
)
|
||||
.count()
|
||||
or 0
|
||||
)
|
||||
return shouts_likes - shouts_dislikes
|
||||
|
||||
|
||||
def get_author_rating_old(session, author: Author):
|
||||
likes_count = (
|
||||
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
|
||||
)
|
||||
dislikes_count = (
|
||||
session.query(AuthorRating)
|
||||
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_not(True)))
|
||||
.count()
|
||||
)
|
||||
return likes_count - dislikes_count
|
||||
|
||||
|
||||
def get_author_rating_shouts(session, author: Author) -> int:
|
||||
q = (
|
||||
select(
|
||||
func.coalesce(
|
||||
func.sum(
|
||||
case(
|
||||
(Reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
),
|
||||
0,
|
||||
).label("shouts_rating")
|
||||
)
|
||||
.select_from(Reaction)
|
||||
.outerjoin(Shout, Shout.authors.any(id=author.id))
|
||||
.outerjoin(
|
||||
Reaction,
|
||||
and_(
|
||||
Reaction.reply_to.is_(None),
|
||||
Reaction.shout == Shout.id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
)
|
||||
result = session.execute(q).scalar()
|
||||
return result
|
||||
|
||||
|
||||
def get_author_rating_comments(session, author: Author) -> int:
|
||||
replied_comment = aliased(Reaction)
|
||||
q = (
|
||||
select(
|
||||
func.coalesce(
|
||||
func.sum(
|
||||
case(
|
||||
(Reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
),
|
||||
0,
|
||||
).label("shouts_rating")
|
||||
)
|
||||
.select_from(Reaction)
|
||||
.outerjoin(
|
||||
Reaction,
|
||||
and_(
|
||||
replied_comment.kind == ReactionKind.COMMENT.value,
|
||||
replied_comment.created_by == author.id,
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
Reaction.reply_to == replied_comment.id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
)
|
||||
result = session.execute(q).scalar()
|
||||
return result
|
||||
|
||||
|
||||
def add_author_rating_columns(q, group_list):
|
||||
# NOTE: method is not used
|
||||
|
||||
# old karma
|
||||
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
|
||||
q = q.add_columns(func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating"))
|
||||
|
||||
# by shouts rating
|
||||
shout_reaction = aliased(Reaction)
|
||||
shouts_rating_subq = (
|
||||
select(
|
||||
Author.id,
|
||||
func.coalesce(
|
||||
func.sum(
|
||||
case(
|
||||
(shout_reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(shout_reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
),
|
||||
0,
|
||||
).label("shouts_rating"),
|
||||
)
|
||||
.select_from(shout_reaction)
|
||||
.outerjoin(Shout, Shout.authors.any(id=Author.id))
|
||||
.outerjoin(
|
||||
shout_reaction,
|
||||
and_(
|
||||
shout_reaction.reply_to.is_(None),
|
||||
shout_reaction.shout == Shout.id,
|
||||
shout_reaction.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.group_by(Author.id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
q = q.outerjoin(shouts_rating_subq, Author.id == shouts_rating_subq.c.id)
|
||||
q = q.add_columns(shouts_rating_subq.c.shouts_rating)
|
||||
group_list = [shouts_rating_subq.c.shouts_rating]
|
||||
|
||||
# by comments
|
||||
replied_comment = aliased(Reaction)
|
||||
reaction_2 = aliased(Reaction)
|
||||
comments_subq = (
|
||||
select(
|
||||
Author.id,
|
||||
func.coalesce(
|
||||
func.sum(
|
||||
case(
|
||||
(reaction_2.kind == ReactionKind.LIKE.value, 1),
|
||||
(reaction_2.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
),
|
||||
0,
|
||||
).label("comments_rating"),
|
||||
)
|
||||
.select_from(reaction_2)
|
||||
.outerjoin(
|
||||
replied_comment,
|
||||
and_(
|
||||
replied_comment.kind == ReactionKind.COMMENT.value,
|
||||
replied_comment.created_by == Author.id,
|
||||
reaction_2.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
reaction_2.reply_to == replied_comment.id,
|
||||
reaction_2.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.group_by(Author.id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
q = q.outerjoin(comments_subq, Author.id == comments_subq.c.id)
|
||||
q = q.add_columns(comments_subq.c.comments_rating)
|
||||
group_list.extend([comments_subq.c.comments_rating])
|
||||
|
||||
return q, group_list
|
@@ -1,587 +0,0 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import and_, asc, case, desc, func, select
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from orm.author import Author
|
||||
from orm.rating import PROPOSAL_REACTIONS, RATING_REACTIONS, is_negative, is_positive
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor
|
||||
from resolvers.follower import follow
|
||||
from resolvers.proposals import handle_proposing
|
||||
from resolvers.stat import update_author_stat
|
||||
from services.auth import add_user_role, login_required
|
||||
from services.db import local_session
|
||||
from services.notify import notify_reaction
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def query_reactions():
|
||||
"""
|
||||
Base query for fetching reactions with associated authors and shouts.
|
||||
|
||||
:return: Base query.
|
||||
"""
|
||||
return (
|
||||
select(
|
||||
Reaction,
|
||||
Author,
|
||||
Shout,
|
||||
)
|
||||
.select_from(Reaction)
|
||||
.join(Author, Reaction.created_by == Author.id)
|
||||
.join(Shout, Reaction.shout == Shout.id)
|
||||
)
|
||||
|
||||
|
||||
def add_reaction_stat_columns(q):
|
||||
"""
|
||||
Add statistical columns to a reaction query.
|
||||
|
||||
:param q: SQL query for reactions.
|
||||
:return: Query with added statistics columns.
|
||||
"""
|
||||
aliased_reaction = aliased(Reaction)
|
||||
# Join reactions and add statistical columns
|
||||
q = q.outerjoin(
|
||||
aliased_reaction,
|
||||
and_(
|
||||
aliased_reaction.reply_to == Reaction.id,
|
||||
aliased_reaction.deleted_at.is_(None),
|
||||
),
|
||||
).add_columns(
|
||||
# Count unique comments
|
||||
func.coalesce(
|
||||
func.count(aliased_reaction.id).filter(aliased_reaction.kind == ReactionKind.COMMENT.value), 0
|
||||
).label("comments_stat"),
|
||||
# Calculate rating as the difference between likes and dislikes
|
||||
func.sum(
|
||||
case(
|
||||
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
).label("rating_stat"),
|
||||
)
|
||||
return q
|
||||
|
||||
|
||||
def get_reactions_with_stat(q, limit, offset):
|
||||
"""
|
||||
Execute the reaction query and retrieve reactions with statistics.
|
||||
|
||||
:param q: Query with reactions and statistics.
|
||||
:param limit: Number of reactions to load.
|
||||
:param offset: Pagination offset.
|
||||
:return: List of reactions.
|
||||
"""
|
||||
q = q.limit(limit).offset(offset)
|
||||
reactions = []
|
||||
|
||||
with local_session() as session:
|
||||
result_rows = session.execute(q)
|
||||
for reaction, author, shout, commented_stat, rating_stat in result_rows:
|
||||
# Пропускаем реакции с отсутствующими shout или author
|
||||
if not shout or not author:
|
||||
logger.error(f"Пропущена реакция из-за отсутствия shout или author: {reaction.dict()}")
|
||||
continue
|
||||
|
||||
reaction.created_by = author.dict()
|
||||
reaction.shout = shout.dict()
|
||||
reaction.stat = {"rating": rating_stat, "comments": commented_stat}
|
||||
reactions.append(reaction)
|
||||
|
||||
return reactions
|
||||
|
||||
|
||||
def is_featured_author(session, author_id) -> bool:
|
||||
"""
|
||||
Check if an author has at least one featured article.
|
||||
|
||||
:param session: Database session.
|
||||
:param author_id: Author ID.
|
||||
:return: True if the author has a featured article, else False.
|
||||
"""
|
||||
return session.query(
|
||||
session.query(Shout).where(Shout.authors.any(id=author_id)).filter(Shout.featured_at.is_not(None)).exists()
|
||||
).scalar()
|
||||
|
||||
|
||||
def check_to_feature(session, approver_id, reaction) -> bool:
|
||||
"""
|
||||
Make a shout featured if it receives more than 4 votes.
|
||||
|
||||
:param session: Database session.
|
||||
:param approver_id: Approver author ID.
|
||||
:param reaction: Reaction object.
|
||||
:return: True if shout should be featured, else False.
|
||||
"""
|
||||
if not reaction.reply_to and is_positive(reaction.kind):
|
||||
approvers = {approver_id}
|
||||
# Count the number of approvers
|
||||
reacted_readers = (
|
||||
session.query(Reaction.created_by)
|
||||
.filter(Reaction.shout == reaction.shout, is_positive(Reaction.kind), Reaction.deleted_at.is_(None))
|
||||
.distinct()
|
||||
)
|
||||
|
||||
for reader_id in reacted_readers:
|
||||
if is_featured_author(session, reader_id):
|
||||
approvers.add(reader_id)
|
||||
return len(approvers) > 4
|
||||
return False
|
||||
|
||||
|
||||
def check_to_unfeature(session, rejecter_id, reaction) -> bool:
|
||||
"""
|
||||
Unfeature a shout if 20% of reactions are negative.
|
||||
|
||||
:param session: Database session.
|
||||
:param rejecter_id: Rejecter author ID.
|
||||
:param reaction: Reaction object.
|
||||
:return: True if shout should be unfeatured, else False.
|
||||
"""
|
||||
if not reaction.reply_to and is_negative(reaction.kind):
|
||||
total_reactions = (
|
||||
session.query(Reaction)
|
||||
.filter(
|
||||
Reaction.shout == reaction.shout, Reaction.kind.in_(RATING_REACTIONS), Reaction.deleted_at.is_(None)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
|
||||
negative_reactions = (
|
||||
session.query(Reaction)
|
||||
.filter(Reaction.shout == reaction.shout, is_negative(Reaction.kind), Reaction.deleted_at.is_(None))
|
||||
.count()
|
||||
)
|
||||
|
||||
return total_reactions > 0 and (negative_reactions / total_reactions) >= 0.2
|
||||
return False
|
||||
|
||||
|
||||
async def set_featured(session, shout_id):
|
||||
"""
|
||||
Feature a shout and update the author's role.
|
||||
|
||||
:param session: Database session.
|
||||
:param shout_id: Shout ID.
|
||||
"""
|
||||
s = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if s:
|
||||
current_time = int(time.time())
|
||||
s.featured_at = current_time
|
||||
session.commit()
|
||||
author = session.query(Author).filter(Author.id == s.created_by).first()
|
||||
if author:
|
||||
await add_user_role(str(author.user))
|
||||
session.add(s)
|
||||
session.commit()
|
||||
|
||||
|
||||
def set_unfeatured(session, shout_id):
|
||||
"""
|
||||
Unfeature a shout.
|
||||
|
||||
:param session: Database session.
|
||||
:param shout_id: Shout ID.
|
||||
"""
|
||||
session.query(Shout).filter(Shout.id == shout_id).update({"featured_at": None})
|
||||
session.commit()
|
||||
|
||||
|
||||
async def _create_reaction(session, shout_id: int, is_author: bool, author_id: int, reaction) -> dict:
|
||||
"""
|
||||
Create a new reaction and perform related actions such as updating counters and notification.
|
||||
|
||||
:param session: Database session.
|
||||
:param info: GraphQL context info.
|
||||
:param shout: Shout object.
|
||||
:param author_id: Author ID.
|
||||
:param reaction: Dictionary with reaction data.
|
||||
:return: Dictionary with created reaction data.
|
||||
"""
|
||||
r = Reaction(**reaction)
|
||||
session.add(r)
|
||||
session.commit()
|
||||
rdict = r.dict()
|
||||
|
||||
# Update author stat for comments
|
||||
if r.kind == ReactionKind.COMMENT.value:
|
||||
update_author_stat(author_id)
|
||||
|
||||
# Handle proposal
|
||||
if r.reply_to and r.kind in PROPOSAL_REACTIONS and is_author:
|
||||
handle_proposing(r.kind, r.reply_to, shout_id)
|
||||
|
||||
# Handle rating
|
||||
if r.kind in RATING_REACTIONS:
|
||||
if check_to_unfeature(session, author_id, r):
|
||||
set_unfeatured(session, shout_id)
|
||||
elif check_to_feature(session, author_id, r):
|
||||
await set_featured(session, shout_id)
|
||||
|
||||
# Notify creation
|
||||
await notify_reaction(rdict, "create")
|
||||
|
||||
return rdict
|
||||
|
||||
|
||||
def prepare_new_rating(reaction: dict, shout_id: int, session, author_id: int):
|
||||
"""
|
||||
Check for the possibility of rating a shout.
|
||||
|
||||
:param reaction: Dictionary with reaction data.
|
||||
:param shout_id: Shout ID.
|
||||
:param session: Database session.
|
||||
:param author_id: Author ID.
|
||||
:return: Dictionary with error or None.
|
||||
"""
|
||||
kind = reaction.get("kind")
|
||||
opposite_kind = ReactionKind.DISLIKE.value if is_positive(kind) else ReactionKind.LIKE.value
|
||||
|
||||
existing_ratings = (
|
||||
session.query(Reaction)
|
||||
.filter(
|
||||
Reaction.shout == shout_id,
|
||||
Reaction.created_by == author_id,
|
||||
Reaction.kind.in_(RATING_REACTIONS),
|
||||
Reaction.deleted_at.is_(None),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
for r in existing_ratings:
|
||||
if r.kind == kind:
|
||||
return {"error": "You can't rate the same thing twice"}
|
||||
if r.kind == opposite_kind:
|
||||
return {"error": "Remove opposite vote first"}
|
||||
if shout_id in [r.shout for r in existing_ratings]:
|
||||
return {"error": "You can't rate your own thing"}
|
||||
|
||||
return
|
||||
|
||||
|
||||
@mutation.field("create_reaction")
|
||||
@login_required
|
||||
async def create_reaction(_, info, reaction):
|
||||
"""
|
||||
Create a new reaction through a GraphQL request.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param reaction: Dictionary with reaction data.
|
||||
:return: Dictionary with created reaction data or error.
|
||||
"""
|
||||
reaction_input = reaction
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
shout_id = int(reaction_input.get("shout", "0"))
|
||||
|
||||
logger.debug(f"Creating reaction with data: {reaction_input}")
|
||||
logger.debug(f"Author ID: {author_id}, Shout ID: {shout_id}")
|
||||
|
||||
if not shout_id or not author_id:
|
||||
return {"error": "Shout ID and author ID are required to create a reaction."}
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
authors = session.query(ShoutAuthor.author).filter(ShoutAuthor.shout == shout_id).scalar()
|
||||
is_author = (
|
||||
bool(list(filter(lambda x: x == int(author_id), authors))) if isinstance(authors, list) else False
|
||||
)
|
||||
reaction_input["created_by"] = author_id
|
||||
kind = reaction_input.get("kind")
|
||||
|
||||
# handle ratings
|
||||
if kind in RATING_REACTIONS:
|
||||
logger.debug(f"creating rating reaction: {kind}")
|
||||
error_result = prepare_new_rating(reaction_input, shout_id, session, author_id)
|
||||
if error_result:
|
||||
logger.error(f"Rating preparation error: {error_result}")
|
||||
return error_result
|
||||
|
||||
# handle all reactions
|
||||
rdict = await _create_reaction(session, shout_id, is_author, author_id, reaction_input)
|
||||
logger.debug(f"Created reaction result: {rdict}")
|
||||
|
||||
# follow if liked
|
||||
if kind == ReactionKind.LIKE.value:
|
||||
try:
|
||||
follow(None, info, "shout", shout_id=shout_id)
|
||||
except Exception:
|
||||
pass
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if not shout:
|
||||
return {"error": "Shout not found"}
|
||||
rdict["shout"] = shout.dict()
|
||||
rdict["created_by"] = author_dict
|
||||
return {"reaction": rdict}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(f"{type(e).__name__}: {e}")
|
||||
return {"error": "Cannot create reaction."}
|
||||
|
||||
|
||||
@mutation.field("update_reaction")
|
||||
@login_required
|
||||
async def update_reaction(_, info, reaction):
|
||||
"""
|
||||
Update an existing reaction through a GraphQL request.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param reaction: Dictionary with reaction data.
|
||||
:return: Dictionary with updated reaction data or error.
|
||||
"""
|
||||
user_id = info.context.get("user_id")
|
||||
roles = info.context.get("roles")
|
||||
rid = reaction.get("id")
|
||||
|
||||
if not rid or not user_id or not roles:
|
||||
return {"error": "Invalid input data"}
|
||||
|
||||
del reaction["id"]
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
reaction_query = query_reactions().filter(Reaction.id == rid)
|
||||
reaction_query = add_reaction_stat_columns(reaction_query)
|
||||
reaction_query = reaction_query.group_by(Reaction.id, Author.id, Shout.id)
|
||||
|
||||
result = session.execute(reaction_query).unique().first()
|
||||
if result:
|
||||
r, author, shout, commented_stat, rating_stat = result
|
||||
if not r or not author:
|
||||
return {"error": "Invalid reaction ID or unauthorized"}
|
||||
|
||||
if r.created_by != author.id and "editor" not in roles:
|
||||
return {"error": "Access denied"}
|
||||
|
||||
# Update reaction
|
||||
r.body = reaction.get("body", r.body)
|
||||
r.updated_at = int(time.time())
|
||||
Reaction.update(r, reaction)
|
||||
session.add(r)
|
||||
session.commit()
|
||||
|
||||
r.stat = {
|
||||
"commented": commented_stat,
|
||||
"rating": rating_stat,
|
||||
}
|
||||
|
||||
await notify_reaction(r.dict(), "update")
|
||||
|
||||
return {"reaction": r}
|
||||
except Exception as e:
|
||||
logger.error(f"{type(e).__name__}: {e}")
|
||||
return {"error": "Cannot update reaction"}
|
||||
|
||||
|
||||
@mutation.field("delete_reaction")
|
||||
@login_required
|
||||
async def delete_reaction(_, info, reaction_id: int):
|
||||
"""
|
||||
Delete an existing reaction through a GraphQL request.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param reaction_id: Reaction ID to delete.
|
||||
:return: Dictionary with deleted reaction data or error.
|
||||
"""
|
||||
user_id = info.context.get("user_id")
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
roles = info.context.get("roles", [])
|
||||
|
||||
if not user_id:
|
||||
return {"error": "Unauthorized"}
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
author = session.query(Author).filter(Author.user == user_id).one()
|
||||
r = session.query(Reaction).filter(Reaction.id == reaction_id).one()
|
||||
|
||||
if r.created_by != author_id and "editor" not in roles:
|
||||
return {"error": "Access denied"}
|
||||
|
||||
logger.debug(f"{user_id} user removing his #{reaction_id} reaction")
|
||||
reaction_dict = r.dict()
|
||||
session.delete(r)
|
||||
session.commit()
|
||||
|
||||
# Update author stat
|
||||
if r.kind == ReactionKind.COMMENT.value:
|
||||
update_author_stat(author.id)
|
||||
|
||||
await notify_reaction(reaction_dict, "delete")
|
||||
|
||||
return {"error": None, "reaction": reaction_dict}
|
||||
except Exception as e:
|
||||
logger.error(f"{type(e).__name__}: {e}")
|
||||
return {"error": "Cannot delete reaction"}
|
||||
|
||||
|
||||
def apply_reaction_filters(by, q):
|
||||
"""
|
||||
Apply filters to a reaction query.
|
||||
|
||||
:param by: Dictionary with filter parameters.
|
||||
:param q: SQL query.
|
||||
:return: Query with applied filters.
|
||||
"""
|
||||
shout_slug = by.get("shout")
|
||||
if shout_slug:
|
||||
q = q.filter(Shout.slug == shout_slug)
|
||||
|
||||
shouts = by.get("shouts")
|
||||
if shouts:
|
||||
q = q.filter(Shout.slug.in_(shouts))
|
||||
|
||||
created_by = by.get("created_by")
|
||||
if created_by:
|
||||
q = q.filter(Author.id == created_by)
|
||||
|
||||
author_slug = by.get("author")
|
||||
if author_slug:
|
||||
q = q.filter(Author.slug == author_slug)
|
||||
|
||||
topic = by.get("topic")
|
||||
if isinstance(topic, int):
|
||||
q = q.filter(Shout.topics.any(id=topic))
|
||||
|
||||
kinds = by.get("kinds")
|
||||
if isinstance(kinds, list):
|
||||
q = q.filter(Reaction.kind.in_(kinds))
|
||||
|
||||
if by.get("reply_to"):
|
||||
q = q.filter(Reaction.reply_to == by.get("reply_to"))
|
||||
|
||||
by_search = by.get("search", "")
|
||||
if len(by_search) > 2:
|
||||
q = q.filter(Reaction.body.ilike(f"%{by_search}%"))
|
||||
|
||||
after = by.get("after")
|
||||
if isinstance(after, int):
|
||||
q = q.filter(Reaction.created_at > after)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
@query.field("load_reactions_by")
|
||||
async def load_reactions_by(_, _info, by, limit=50, offset=0):
|
||||
"""
|
||||
Load reactions based on specified parameters.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param by: Filter parameters.
|
||||
:param limit: Number of reactions to load.
|
||||
:param offset: Pagination offset.
|
||||
:return: List of reactions.
|
||||
"""
|
||||
q = query_reactions()
|
||||
|
||||
# Add statistics and apply filters
|
||||
q = add_reaction_stat_columns(q)
|
||||
q = apply_reaction_filters(by, q)
|
||||
q = q.where(Reaction.deleted_at.is_(None))
|
||||
|
||||
# Group and sort
|
||||
q = q.group_by(Reaction.id, Author.id, Shout.id)
|
||||
order_stat = by.get("sort", "").lower()
|
||||
order_by_stmt = desc(Reaction.created_at)
|
||||
if order_stat == "oldest":
|
||||
order_by_stmt = asc(Reaction.created_at)
|
||||
elif order_stat.endswith("like"):
|
||||
order_by_stmt = desc("rating_stat")
|
||||
q = q.order_by(order_by_stmt)
|
||||
|
||||
# Retrieve and return reactions
|
||||
return get_reactions_with_stat(q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_shout_ratings")
|
||||
async def load_shout_ratings(_, info, shout: int, limit=100, offset=0):
|
||||
"""
|
||||
Load ratings for a specified shout with pagination.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param shout: Shout ID.
|
||||
:param limit: Number of reactions to load.
|
||||
:param offset: Pagination offset.
|
||||
:return: List of reactions.
|
||||
"""
|
||||
q = query_reactions()
|
||||
|
||||
# Filter, group, sort, limit, offset
|
||||
q = q.filter(
|
||||
and_(
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.shout == shout,
|
||||
Reaction.kind.in_(RATING_REACTIONS),
|
||||
)
|
||||
)
|
||||
q = q.group_by(Reaction.id, Author.id, Shout.id)
|
||||
q = q.order_by(desc(Reaction.created_at))
|
||||
|
||||
# Retrieve and return reactions
|
||||
return get_reactions_with_stat(q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_shout_comments")
|
||||
async def load_shout_comments(_, info, shout: int, limit=50, offset=0):
|
||||
"""
|
||||
Load comments for a specified shout with pagination and statistics.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param shout: Shout ID.
|
||||
:param limit: Number of comments to load.
|
||||
:param offset: Pagination offset.
|
||||
:return: List of reactions.
|
||||
"""
|
||||
q = query_reactions()
|
||||
|
||||
q = add_reaction_stat_columns(q)
|
||||
|
||||
# Filter, group, sort, limit, offset
|
||||
q = q.filter(
|
||||
and_(
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.shout == shout,
|
||||
Reaction.body.is_not(None),
|
||||
)
|
||||
)
|
||||
q = q.group_by(Reaction.id, Author.id, Shout.id)
|
||||
q = q.order_by(desc(Reaction.created_at))
|
||||
|
||||
# Retrieve and return reactions
|
||||
return get_reactions_with_stat(q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_comment_ratings")
|
||||
async def load_comment_ratings(_, info, comment: int, limit=50, offset=0):
|
||||
"""
|
||||
Load ratings for a specified comment with pagination and statistics.
|
||||
|
||||
:param info: GraphQL context info.
|
||||
:param comment: Comment ID.
|
||||
:param limit: Number of ratings to load.
|
||||
:param offset: Pagination offset.
|
||||
:return: List of reactions.
|
||||
"""
|
||||
q = query_reactions()
|
||||
|
||||
q = add_reaction_stat_columns(q)
|
||||
|
||||
# Filter, group, sort, limit, offset
|
||||
q = q.filter(
|
||||
and_(
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.reply_to == comment,
|
||||
Reaction.kind == ReactionKind.COMMENT.value,
|
||||
)
|
||||
)
|
||||
q = q.group_by(Reaction.id, Author.id, Shout.id)
|
||||
q = q.order_by(desc(Reaction.created_at))
|
||||
|
||||
# Retrieve and return reactions
|
||||
return get_reactions_with_stat(q, limit, offset)
|
@@ -1,475 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
|
||||
from graphql import GraphQLResolveInfo
|
||||
from sqlalchemy import nulls_last, text
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.sql.expression import and_, asc, case, desc, func, select
|
||||
|
||||
from orm.author import Author
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from services.auth import login_accepted
|
||||
from services.db import json_array_builder, json_builder, local_session
|
||||
from services.schema import query
|
||||
from services.search import search_text
|
||||
from services.viewed import ViewedStorage
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def apply_options(q, options, reactions_created_by=0):
|
||||
"""
|
||||
Применяет опции фильтрации и сортировки
|
||||
[опционально] выбирая те публикации, на которые есть реакции/комментарии от указанного автора
|
||||
|
||||
:param q: Исходный запрос.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:param reactions_created_by: Идентификатор автора.
|
||||
:return: Запрос с примененными опциями.
|
||||
"""
|
||||
filters = options.get("filters")
|
||||
if isinstance(filters, dict):
|
||||
q = apply_filters(q, filters)
|
||||
if reactions_created_by:
|
||||
q = q.join(Reaction, Reaction.shout == Shout.id)
|
||||
q = q.filter(Reaction.created_by == reactions_created_by)
|
||||
if "commented" in filters:
|
||||
q = q.filter(Reaction.body.is_not(None))
|
||||
q = apply_sorting(q, options)
|
||||
limit = options.get("limit", 10)
|
||||
offset = options.get("offset", 0)
|
||||
return q, limit, offset
|
||||
|
||||
|
||||
def has_field(info, fieldname: str) -> bool:
|
||||
"""
|
||||
Проверяет, запрошено ли поле :fieldname: в GraphQL запросе
|
||||
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param fieldname: Имя запрашиваемого поля
|
||||
:return: True, если поле запрошено, False в противном случае
|
||||
"""
|
||||
field_node = info.field_nodes[0]
|
||||
for selection in field_node.selection_set.selections:
|
||||
if hasattr(selection, "name") and selection.name.value == fieldname:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def query_with_stat(info):
|
||||
"""
|
||||
:param info: Информация о контексте GraphQL - для получения id авторизованного пользователя
|
||||
:return: Запрос с подзапросами статистики.
|
||||
|
||||
Добавляет подзапрос статистики
|
||||
"""
|
||||
q = (
|
||||
select(Shout)
|
||||
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
.join(Author, Author.id == Shout.created_by)
|
||||
)
|
||||
|
||||
# Главный автор
|
||||
main_author = aliased(Author)
|
||||
q = q.join(main_author, main_author.id == Shout.created_by)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"id",
|
||||
main_author.id,
|
||||
"name",
|
||||
main_author.name,
|
||||
"slug",
|
||||
main_author.slug,
|
||||
"pic",
|
||||
main_author.pic,
|
||||
"created_at",
|
||||
main_author.created_at,
|
||||
).label("main_author")
|
||||
)
|
||||
|
||||
if has_field(info, "main_topic"):
|
||||
main_topic_join = aliased(ShoutTopic)
|
||||
main_topic = aliased(Topic)
|
||||
q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
|
||||
q = q.join(main_topic, main_topic.id == main_topic_join.topic)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"id", main_topic.id, "title", main_topic.title, "slug", main_topic.slug, "is_main", main_topic_join.main
|
||||
).label("main_topic")
|
||||
)
|
||||
|
||||
if has_field(info, "topics"):
|
||||
topics_subquery = (
|
||||
select(
|
||||
ShoutTopic.shout,
|
||||
json_array_builder(
|
||||
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
|
||||
).label("topics"),
|
||||
)
|
||||
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
|
||||
.where(ShoutTopic.shout == Shout.id)
|
||||
.group_by(ShoutTopic.shout)
|
||||
.subquery()
|
||||
)
|
||||
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
|
||||
q = q.add_columns(topics_subquery.c.topics)
|
||||
|
||||
if has_field(info, "authors"):
|
||||
authors_subquery = (
|
||||
select(
|
||||
ShoutAuthor.shout,
|
||||
json_array_builder(
|
||||
json_builder(
|
||||
"id",
|
||||
Author.id,
|
||||
"name",
|
||||
Author.name,
|
||||
"slug",
|
||||
Author.slug,
|
||||
"pic",
|
||||
Author.pic,
|
||||
"caption",
|
||||
ShoutAuthor.caption,
|
||||
"created_at",
|
||||
Author.created_at,
|
||||
)
|
||||
).label("authors"),
|
||||
)
|
||||
.outerjoin(Author, ShoutAuthor.author == Author.id)
|
||||
.where(ShoutAuthor.shout == Shout.id)
|
||||
.group_by(ShoutAuthor.shout)
|
||||
.subquery()
|
||||
)
|
||||
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
|
||||
q = q.add_columns(authors_subquery.c.authors)
|
||||
|
||||
if has_field(info, "stat"):
|
||||
# Подзапрос для статистики реакций
|
||||
stats_subquery = (
|
||||
select(
|
||||
Reaction.shout,
|
||||
func.count(func.distinct(Reaction.id))
|
||||
.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
||||
.label("comments_count"),
|
||||
func.sum(
|
||||
case(
|
||||
(Reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
)
|
||||
.filter(Reaction.reply_to.is_(None))
|
||||
.label("rating"),
|
||||
func.max(Reaction.created_at)
|
||||
.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
||||
.label("last_commented_at"),
|
||||
)
|
||||
.where(Reaction.deleted_at.is_(None))
|
||||
.group_by(Reaction.shout)
|
||||
.subquery()
|
||||
)
|
||||
q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"comments_count",
|
||||
func.coalesce(stats_subquery.c.comments_count, 0),
|
||||
"rating",
|
||||
func.coalesce(stats_subquery.c.rating, 0),
|
||||
"last_commented_at",
|
||||
func.coalesce(stats_subquery.c.last_commented_at, 0),
|
||||
).label("stat")
|
||||
)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
def get_shouts_with_links(info, q, limit=20, offset=0):
|
||||
"""
|
||||
получение публикаций с применением пагинации
|
||||
"""
|
||||
shouts = []
|
||||
try:
|
||||
logger.info(f"Начало выполнения get_shouts_with_links с limit={limit}, offset={offset}")
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
with local_session() as session:
|
||||
logger.info("Выполнение основного запроса")
|
||||
t1 = time.time()
|
||||
shouts_result = session.execute(q).all()
|
||||
logger.info(f"Запрос выполнен, получено {len(shouts_result)} результатов за {time.time() - t1:.3f} секунд")
|
||||
|
||||
if not shouts_result:
|
||||
logger.warning("Нет найденных результатов")
|
||||
return []
|
||||
|
||||
for idx, row in enumerate(shouts_result):
|
||||
try:
|
||||
shout = None
|
||||
if hasattr(row, "Shout"):
|
||||
shout = row.Shout
|
||||
if shout:
|
||||
shout_id = int(f"{shout.id}")
|
||||
shout_dict = shout.dict()
|
||||
|
||||
if has_field(info, "created_by") and shout_dict.get("created_by"):
|
||||
main_author_id = shout_dict.get("created_by")
|
||||
a = session.query(Author).filter(Author.id == main_author_id).first()
|
||||
shout_dict["created_by"] = {
|
||||
"id": main_author_id,
|
||||
"name": a.name,
|
||||
"slug": a.slug,
|
||||
"pic": a.pic,
|
||||
}
|
||||
|
||||
if hasattr(row, "stat"):
|
||||
stat = {}
|
||||
if isinstance(row.stat, str):
|
||||
stat = json.loads(row.stat)
|
||||
elif isinstance(row.stat, dict):
|
||||
stat = row.stat
|
||||
else:
|
||||
logger.warning(f"Строка {idx} - неизвестный тип stat: {type(row.stat)}")
|
||||
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
|
||||
shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)}
|
||||
|
||||
if has_field(info, "main_topic") and hasattr(row, "main_topic"):
|
||||
shout_dict["main_topic"] = (
|
||||
json.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
|
||||
)
|
||||
if has_field(info, "authors") and hasattr(row, "authors"):
|
||||
shout_dict["authors"] = (
|
||||
json.loads(row.authors) if isinstance(row.authors, str) else row.authors
|
||||
)
|
||||
if has_field(info, "topics") and hasattr(row, "topics"):
|
||||
shout_dict["topics"] = json.loads(row.topics) if isinstance(row.topics, str) else row.topics
|
||||
|
||||
shouts.append(shout_dict)
|
||||
|
||||
except Exception as row_error:
|
||||
logger.error(f"Ошибка при обработке строки {idx}: {row_error}", exc_info=True)
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Фатальная ошибка в get_shouts_with_links: {e}", exc_info=True)
|
||||
raise
|
||||
finally:
|
||||
return shouts
|
||||
|
||||
|
||||
def apply_filters(q, filters):
|
||||
"""
|
||||
Применение общих фильтров к запросу.
|
||||
|
||||
:param q: Исходный запрос.
|
||||
:param filters: Словарь фильтров.
|
||||
:return: Запрос с примененными фильтрами.
|
||||
"""
|
||||
if isinstance(filters, dict):
|
||||
if "featured" in filters:
|
||||
featured_filter = filters.get("featured")
|
||||
if featured_filter:
|
||||
q = q.filter(Shout.featured_at.is_not(None))
|
||||
else:
|
||||
q = q.filter(Shout.featured_at.is_(None))
|
||||
by_layouts = filters.get("layouts")
|
||||
if by_layouts and isinstance(by_layouts, list):
|
||||
q = q.filter(Shout.layout.in_(by_layouts))
|
||||
by_author = filters.get("author")
|
||||
if by_author:
|
||||
q = q.filter(Shout.authors.any(slug=by_author))
|
||||
by_topic = filters.get("topic")
|
||||
if by_topic:
|
||||
q = q.filter(Shout.topics.any(slug=by_topic))
|
||||
by_after = filters.get("after")
|
||||
if by_after:
|
||||
ts = int(by_after)
|
||||
q = q.filter(Shout.created_at > ts)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
@query.field("get_shout")
|
||||
@login_accepted
|
||||
async def get_shout(_, info: GraphQLResolveInfo, slug="", shout_id=0):
|
||||
"""
|
||||
Получение публикации по slug или id.
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param slug: Уникальный идентификатор публикации
|
||||
:param shout_id: ID публикации
|
||||
:return: Данные публикации с включенной статистикой
|
||||
"""
|
||||
try:
|
||||
# Получаем базовый запрос с подзапросами статистики
|
||||
q = query_with_stat(info)
|
||||
|
||||
# Применяем фильтр по slug или id
|
||||
if slug:
|
||||
q = q.where(Shout.slug == slug)
|
||||
elif shout_id:
|
||||
q = q.where(Shout.id == shout_id)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Получаем результат через get_shouts_with_stats с limit=1
|
||||
shouts = get_shouts_with_links(info, q, limit=1)
|
||||
|
||||
# Возвращаем первую (и единственную) публикацию, если она найдена
|
||||
return shouts[0] if shouts else None
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error in get_shout: {exc}", exc_info=True)
|
||||
return None
|
||||
|
||||
|
||||
def apply_sorting(q, options):
|
||||
"""
|
||||
Применение сортировки с сохранением порядка
|
||||
"""
|
||||
order_str = options.get("order_by")
|
||||
if order_str in ["rating", "comments_count", "last_commented_at"]:
|
||||
query_order_by = desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
|
||||
q = q.distinct(text(order_str), Shout.id).order_by( # DISTINCT ON включает поле сортировки
|
||||
nulls_last(query_order_by), Shout.id
|
||||
)
|
||||
else:
|
||||
q = q.distinct(Shout.published_at, Shout.id).order_by(Shout.published_at.desc(), Shout.id)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
@query.field("load_shouts_by")
|
||||
async def load_shouts_by(_, info: GraphQLResolveInfo, options):
|
||||
"""
|
||||
Загрузка публикаций с фильтрацией, сортировкой и пагинацией.
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций, удовлетворяющих критериям.
|
||||
"""
|
||||
# Базовый запрос: используем специальный запрос с статистикой
|
||||
q = query_with_stat(info)
|
||||
q, limit, offset = apply_options(q, options)
|
||||
|
||||
# Передача сформированного запроса в метод получения публикаций с учетом сортировки и пагинации
|
||||
return get_shouts_with_links(info, q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_shouts_search")
|
||||
async def load_shouts_search(_, info, text, options):
|
||||
"""
|
||||
Поиск публикаций по тексту.
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param text: Строка поиска.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций, найденных по тексту.
|
||||
"""
|
||||
limit = options.get("limit", 10)
|
||||
offset = options.get("offset", 0)
|
||||
if isinstance(text, str) and len(text) > 2:
|
||||
results = await search_text(text, limit, offset)
|
||||
scores = {}
|
||||
hits_ids = []
|
||||
for sr in results:
|
||||
shout_id = sr.get("id")
|
||||
if shout_id:
|
||||
shout_id = str(shout_id)
|
||||
scores[shout_id] = sr.get("score")
|
||||
hits_ids.append(shout_id)
|
||||
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
q = q.filter(Shout.id.in_(hits_ids))
|
||||
q = apply_filters(q, options)
|
||||
q = apply_sorting(q, options)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset)
|
||||
for shout in shouts:
|
||||
shout.score = scores[f"{shout.id}"]
|
||||
shouts.sort(key=lambda x: x.score, reverse=True)
|
||||
return shouts
|
||||
return []
|
||||
|
||||
|
||||
@query.field("load_shouts_unrated")
|
||||
async def load_shouts_unrated(_, info, options):
|
||||
"""
|
||||
Загрузка публикаций с менее чем 3 реакциями типа LIKE/DISLIKE
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
rated_shouts = (
|
||||
select(Reaction.shout)
|
||||
.where(
|
||||
and_(
|
||||
Reaction.deleted_at.is_(None), Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])
|
||||
)
|
||||
)
|
||||
.group_by(Reaction.shout)
|
||||
.having(func.count("*") >= 3)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
q = select(Shout).where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
q = q.join(Author, Author.id == Shout.created_by)
|
||||
q = q.add_columns(
|
||||
json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label("main_author")
|
||||
)
|
||||
q = q.join(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
|
||||
q = q.join(Topic, Topic.id == ShoutTopic.topic)
|
||||
q = q.add_columns(json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic"))
|
||||
q = q.where(Shout.id.not_in(rated_shouts))
|
||||
q = q.order_by(func.random())
|
||||
|
||||
limit = options.get("limit", 5)
|
||||
offset = options.get("offset", 0)
|
||||
return get_shouts_with_links(info, q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_shouts_random_top")
|
||||
async def load_shouts_random_top(_, info, options):
|
||||
"""
|
||||
Загрузка случайных публикаций, упорядоченных по топовым реакциям.
|
||||
|
||||
:param _info: Информация о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список случайных публикаций.
|
||||
"""
|
||||
aliased_reaction = aliased(Reaction)
|
||||
|
||||
subquery = select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
|
||||
|
||||
filters = options.get("filters")
|
||||
if isinstance(filters, dict):
|
||||
subquery = apply_filters(subquery, filters)
|
||||
|
||||
subquery = subquery.group_by(Shout.id).order_by(
|
||||
desc(
|
||||
func.sum(
|
||||
case(
|
||||
# не учитывать реакции на комментарии
|
||||
(aliased_reaction.reply_to.is_not(None), 0),
|
||||
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
random_limit = options.get("random_limit", 100)
|
||||
subquery = subquery.limit(random_limit)
|
||||
q = query_with_stat(info)
|
||||
q = q.filter(Shout.id.in_(subquery))
|
||||
q = q.order_by(func.random())
|
||||
limit = options.get("limit", 10)
|
||||
return get_shouts_with_links(info, q, limit)
|
@@ -1,366 +0,0 @@
|
||||
import asyncio
|
||||
|
||||
from sqlalchemy import and_, distinct, func, join, select
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from cache.cache import cache_author
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from services.db import local_session
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def add_topic_stat_columns(q):
|
||||
"""
|
||||
Добавляет статистические колонки к запросу тем.
|
||||
|
||||
:param q: SQL-запрос для получения тем.
|
||||
:return: Запрос с добавленными колонками статистики.
|
||||
"""
|
||||
# Создаем алиасы для предотвращения конфликтов имен
|
||||
aliased_shout = aliased(ShoutTopic)
|
||||
|
||||
# Создаем новый объект запроса для тем
|
||||
new_q = select(Topic)
|
||||
|
||||
# Применяем необходимые фильтры и добавляем колонки статистики
|
||||
new_q = (
|
||||
new_q.join(
|
||||
aliased_shout,
|
||||
aliased_shout.topic == Topic.id,
|
||||
)
|
||||
.join(
|
||||
Shout,
|
||||
and_(
|
||||
aliased_shout.shout == Shout.id,
|
||||
Shout.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.add_columns(
|
||||
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
|
||||
) # Подсчет уникальных публикаций для темы
|
||||
)
|
||||
|
||||
aliased_follower = aliased(TopicFollower)
|
||||
|
||||
# Добавляем количество подписчиков темы
|
||||
new_q = new_q.outerjoin(aliased_follower, aliased_follower.topic == Topic.id).add_columns(
|
||||
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
||||
)
|
||||
|
||||
# Группировка по идентификатору темы
|
||||
new_q = new_q.group_by(Topic.id)
|
||||
|
||||
return new_q
|
||||
|
||||
|
||||
def add_author_stat_columns(q):
|
||||
"""
|
||||
Добавляет статистические колонки к запросу авторов.
|
||||
|
||||
:param q: SQL-запрос для получения авторов.
|
||||
:return: Запрос с добавленными колонками статистики.
|
||||
"""
|
||||
# Алиасирование таблиц для предотвращения конфликтов имен
|
||||
aliased_shout_author = aliased(ShoutAuthor)
|
||||
aliased_shout = aliased(Shout)
|
||||
aliased_author_follower = aliased(AuthorFollower)
|
||||
|
||||
# Применение фильтров и добавление колонок статистики
|
||||
q = (
|
||||
q.select_from(Author)
|
||||
.join(
|
||||
aliased_shout_author,
|
||||
aliased_shout_author.author == Author.id,
|
||||
)
|
||||
.join(
|
||||
aliased_shout,
|
||||
and_(
|
||||
aliased_shout.id == aliased_shout_author.shout,
|
||||
aliased_shout.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.add_columns(
|
||||
func.count(distinct(aliased_shout.id)).label("shouts_stat")
|
||||
) # Подсчет уникальных публикаций автора
|
||||
)
|
||||
|
||||
# Добавляем количество подписчиков автора
|
||||
q = q.outerjoin(aliased_author_follower, aliased_author_follower.author == Author.id).add_columns(
|
||||
func.count(distinct(aliased_author_follower.follower)).label("followers_stat")
|
||||
)
|
||||
|
||||
# Группировка по идентификатору автора
|
||||
q = q.group_by(Author.id)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
def get_topic_shouts_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество публикаций для указанной темы.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Количество уникальных публикаций для темы.
|
||||
"""
|
||||
q = (
|
||||
select(func.count(distinct(ShoutTopic.shout)))
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.filter(
|
||||
and_(
|
||||
ShoutTopic.topic == topic_id,
|
||||
Shout.published_at.is_not(None),
|
||||
Shout.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
# Выполнение запроса и получение результата
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_topic_authors_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество уникальных авторов для указанной темы.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Количество уникальных авторов, связанных с темой.
|
||||
"""
|
||||
count_query = (
|
||||
select(func.count(distinct(ShoutAuthor.author)))
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.filter(
|
||||
and_(
|
||||
ShoutTopic.topic == topic_id,
|
||||
Shout.published_at.is_not(None),
|
||||
Shout.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Выполнение запроса и получение результата
|
||||
with local_session() as session:
|
||||
result = session.execute(count_query).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_topic_followers_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество подписчиков для указанной темы.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Количество уникальных подписчиков темы.
|
||||
"""
|
||||
aliased_followers = aliased(TopicFollower)
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.topic == topic_id)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_topic_comments_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество комментариев для всех публикаций в указанной теме.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Общее количество комментариев к публикациям темы.
|
||||
"""
|
||||
# Подзапрос для получения количества комментариев для каждой публикации
|
||||
sub_comments = (
|
||||
select(
|
||||
Shout.id.label("shout_id"),
|
||||
func.coalesce(func.count(Reaction.id), 0).label("comments_count"),
|
||||
)
|
||||
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
|
||||
.join(Topic, ShoutTopic.topic == Topic.id)
|
||||
.outerjoin(
|
||||
Reaction,
|
||||
and_(
|
||||
Reaction.shout == Shout.id,
|
||||
Reaction.kind == ReactionKind.COMMENT.value,
|
||||
Reaction.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.group_by(Shout.id)
|
||||
.subquery()
|
||||
)
|
||||
# Запрос для суммирования количества комментариев по теме
|
||||
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
|
||||
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_shouts_stat(author_id: int) -> int:
|
||||
"""
|
||||
Получает количество публикаций для указанного автора.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Количество уникальных публикаций автора.
|
||||
"""
|
||||
aliased_shout_author = aliased(ShoutAuthor)
|
||||
aliased_shout = aliased(Shout)
|
||||
|
||||
q = (
|
||||
select(func.count(distinct(aliased_shout.id)))
|
||||
.select_from(aliased_shout)
|
||||
.join(aliased_shout_author, aliased_shout.id == aliased_shout_author.shout)
|
||||
.filter(
|
||||
and_(
|
||||
aliased_shout_author.author == author_id,
|
||||
aliased_shout.published_at.is_not(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_authors_stat(author_id: int) -> int:
|
||||
"""
|
||||
Получает количество авторов, на которых подписан указанный автор.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Количество уникальных авторов, на которых подписан автор.
|
||||
"""
|
||||
aliased_authors = aliased(AuthorFollower)
|
||||
q = select(func.count(distinct(aliased_authors.author))).filter(
|
||||
and_(
|
||||
aliased_authors.follower == author_id,
|
||||
aliased_authors.author != author_id,
|
||||
)
|
||||
)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_followers_stat(author_id: int) -> int:
|
||||
"""
|
||||
Получает количество подписчиков для указанного автора.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Количество уникальных подписчиков автора.
|
||||
"""
|
||||
aliased_followers = aliased(AuthorFollower)
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_comments_stat(author_id):
|
||||
q = (
|
||||
select(func.coalesce(func.count(Reaction.id), 0).label("comments_count"))
|
||||
.select_from(Author)
|
||||
.outerjoin(
|
||||
Reaction,
|
||||
and_(
|
||||
Reaction.created_by == Author.id,
|
||||
Reaction.kind == ReactionKind.COMMENT.value,
|
||||
Reaction.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.where(Author.id == author_id)
|
||||
.group_by(Author.id)
|
||||
)
|
||||
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result.comments_count if result else 0
|
||||
|
||||
|
||||
def get_with_stat(q):
|
||||
"""
|
||||
Выполняет запрос с добавлением статистики.
|
||||
|
||||
:param q: SQL-запрос для выполнения.
|
||||
:return: Список объектов с добавленной статистикой.
|
||||
"""
|
||||
records = []
|
||||
try:
|
||||
with local_session() as session:
|
||||
# Определяем, является ли запрос запросом авторов
|
||||
author_prefixes = ("select author", "select * from author")
|
||||
is_author = f"{q}".lower().startswith(author_prefixes)
|
||||
|
||||
# Добавляем колонки статистики в запрос
|
||||
q = add_author_stat_columns(q) if is_author else add_topic_stat_columns(q)
|
||||
|
||||
# Выполняем запрос
|
||||
result = session.execute(q)
|
||||
for cols in result:
|
||||
entity = cols[0]
|
||||
stat = dict()
|
||||
stat["shouts"] = cols[1] # Статистика по публикациям
|
||||
stat["followers"] = cols[2] # Статистика по подписчикам
|
||||
if is_author:
|
||||
stat["authors"] = get_author_authors_stat(entity.id) # Статистика по подпискам на авторов
|
||||
stat["comments"] = get_author_comments_stat(entity.id) # Статистика по комментариям
|
||||
else:
|
||||
stat["authors"] = get_topic_authors_stat(entity.id) # Статистика по авторам темы
|
||||
entity.stat = stat
|
||||
records.append(entity)
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
logger.debug(q)
|
||||
traceback.print_exc()
|
||||
logger.error(exc, exc_info=True)
|
||||
return records
|
||||
|
||||
|
||||
def author_follows_authors(author_id: int):
|
||||
"""
|
||||
Получает список авторов, на которых подписан указанный автор.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Список авторов с добавленной статистикой.
|
||||
"""
|
||||
af = aliased(AuthorFollower, name="af")
|
||||
author_follows_authors_query = (
|
||||
select(Author).select_from(join(Author, af, Author.id == af.author)).where(af.follower == author_id)
|
||||
)
|
||||
return get_with_stat(author_follows_authors_query)
|
||||
|
||||
|
||||
def author_follows_topics(author_id: int):
|
||||
"""
|
||||
Получает список тем, на которые подписан указанный автор.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Список тем с добавленной статистикой.
|
||||
"""
|
||||
author_follows_topics_query = (
|
||||
select(Topic)
|
||||
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
|
||||
.where(TopicFollower.follower == author_id)
|
||||
)
|
||||
return get_with_stat(author_follows_topics_query)
|
||||
|
||||
|
||||
def update_author_stat(author_id: int):
|
||||
"""
|
||||
Обновляет статистику для указанного автора и сохраняет её в кэше.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
"""
|
||||
author_query = select(Author).where(Author.id == author_id)
|
||||
try:
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# Асинхронное кэширование данных автора
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
except Exception as exc:
|
||||
logger.error(exc, exc_info=True)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user