2021-08-20 09:27:19 +00:00
|
|
|
from dateutil.parser import parse
|
|
|
|
from os.path import abspath
|
2021-08-20 15:10:15 +00:00
|
|
|
import frontmatter
|
2021-08-20 09:27:19 +00:00
|
|
|
import json
|
2021-10-12 19:38:12 +00:00
|
|
|
from orm import Shout, Comment, Topic, ShoutRating, User #, TODO: CommentRating
|
2021-08-23 08:44:46 +00:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from migration.html2text import html2text
|
2021-10-12 19:38:12 +00:00
|
|
|
from migration.tables.comments import migrate as migrateComment
|
2021-08-25 21:20:53 +00:00
|
|
|
from transliterate import translit
|
|
|
|
from datetime import datetime
|
|
|
|
from sqlalchemy.exc import IntegrityError
|
|
|
|
from orm.base import local_session
|
2021-08-20 09:27:19 +00:00
|
|
|
|
|
|
|
users_dict = json.loads(open(abspath('migration/data/users.dict.json')).read())
|
2021-10-12 19:38:12 +00:00
|
|
|
print(str(len(users_dict.items())) + ' users loaded')
|
2021-10-08 09:58:19 +00:00
|
|
|
topics_dict = json.loads(open(abspath('migration/data/topics.dict.json')).read()) # old_id keyed
|
2021-10-12 19:38:12 +00:00
|
|
|
print(str(len(topics_dict.items())) + ' topics loaded')
|
|
|
|
comments_data = json.loads(open(abspath('migration/data/comments.json')).read())
|
|
|
|
print(str(len(comments_data)) + ' comments loaded')
|
|
|
|
comments_by_post = {}
|
|
|
|
for comment in comments_data:
|
|
|
|
p = comment['contentItem']
|
|
|
|
comments_by_post[p] = comments_by_post.get(p, [])
|
|
|
|
comments_by_post[p].append(comment)
|
|
|
|
|
2021-08-25 21:20:53 +00:00
|
|
|
users_dict['0'] = {
|
|
|
|
'id': 9999999,
|
2021-10-09 08:36:14 +00:00
|
|
|
'slug': 'discours',
|
2021-10-03 16:19:48 +00:00
|
|
|
'name': 'Дискурс',
|
2021-10-08 04:42:59 +00:00
|
|
|
'userpic': 'https://discours.io/images/logo-mini.svg',
|
|
|
|
'createdAt': '2016-03-05 22:22:00.350000'
|
|
|
|
}
|
2021-08-20 09:27:19 +00:00
|
|
|
|
2021-08-25 21:20:53 +00:00
|
|
|
ts = datetime.now()
|
2021-08-20 09:27:19 +00:00
|
|
|
|
|
|
|
type2layout = {
|
|
|
|
'Article': 'article',
|
|
|
|
'Literature': 'prose',
|
|
|
|
'Music': 'music',
|
|
|
|
'Video': 'video',
|
|
|
|
'Image': 'image'
|
|
|
|
}
|
|
|
|
|
2021-10-08 04:42:59 +00:00
|
|
|
|
2021-10-08 09:58:19 +00:00
|
|
|
def get_metadata(r):
|
|
|
|
metadata = {}
|
|
|
|
metadata['title'] = r.get('title')
|
|
|
|
metadata['authors'] = r.get('authors')
|
|
|
|
metadata['createdAt'] = r.get('createdAt', ts)
|
|
|
|
metadata['layout'] = r['layout']
|
2021-10-12 19:38:12 +00:00
|
|
|
metadata['topics'] = r['topics']
|
2021-10-08 09:58:19 +00:00
|
|
|
if r.get('cover', False):
|
|
|
|
metadata['cover'] = r.get('cover')
|
|
|
|
return metadata
|
|
|
|
|
2021-10-08 04:42:59 +00:00
|
|
|
def migrate(entry):
|
|
|
|
'''
|
2021-08-20 09:27:19 +00:00
|
|
|
type Shout {
|
|
|
|
slug: String!
|
|
|
|
author: Int!
|
|
|
|
body: String!
|
|
|
|
createdAt: DateTime!
|
|
|
|
updatedAt: DateTime!
|
|
|
|
deletedAt: DateTime
|
|
|
|
deletedBy: Int
|
|
|
|
rating: Int
|
|
|
|
ratigns: [Rating]
|
2021-10-08 04:42:59 +00:00
|
|
|
published: Bool!
|
2021-08-20 09:27:19 +00:00
|
|
|
publishedAt: DateTime # if there is no published field - it is not published
|
|
|
|
replyTo: String # another shout
|
|
|
|
tags: [String] # actual values
|
|
|
|
topics: [String] # topic-slugs, order has matter
|
|
|
|
title: String
|
|
|
|
versionOf: String
|
|
|
|
visibleForRoles: [String] # role ids are strings
|
|
|
|
visibleForUsers: [Int]
|
|
|
|
views: Int
|
|
|
|
}
|
|
|
|
'''
|
2021-10-08 04:42:59 +00:00
|
|
|
content = ''
|
2021-08-20 09:27:19 +00:00
|
|
|
r = {
|
2021-10-08 04:42:59 +00:00
|
|
|
'layout': type2layout[entry['type']],
|
|
|
|
'title': entry['title'],
|
|
|
|
'community': 0,
|
|
|
|
'authors': [],
|
|
|
|
'topics': [],
|
|
|
|
'published': entry.get('published', False),
|
|
|
|
'views': entry.get('views', 0),
|
|
|
|
'rating': entry.get('rating', 0),
|
|
|
|
'ratings': [],
|
2021-10-12 19:38:12 +00:00
|
|
|
'comments': [],
|
2021-10-08 09:58:19 +00:00
|
|
|
'createdAt': entry.get('createdAt', '2016-03-05 22:22:00.350000')
|
2021-10-08 04:42:59 +00:00
|
|
|
}
|
2021-08-23 08:44:46 +00:00
|
|
|
r['slug'] = entry.get('slug', '')
|
|
|
|
body_orig = entry.get('body', '')
|
2021-08-20 09:27:19 +00:00
|
|
|
if not r['slug'] and entry.get('friendlySlugs') is not None:
|
|
|
|
r['slug'] = entry['friendlySlugs']['slug'][0]['slug']
|
|
|
|
if(r['slug'] is None):
|
|
|
|
r['slug'] = entry['friendlySlugs'][0]['slug']
|
2021-08-23 08:44:46 +00:00
|
|
|
if not r['slug']:
|
|
|
|
print('NO SLUG ERROR')
|
|
|
|
# print(entry)
|
|
|
|
raise Exception
|
2021-10-08 09:58:19 +00:00
|
|
|
try:
|
|
|
|
r['topics'].append(topics_dict[entry['category']]['slug'])
|
|
|
|
except Exception:
|
|
|
|
print(entry['category'])
|
2021-08-20 09:27:19 +00:00
|
|
|
if entry.get('image') is not None:
|
|
|
|
r['cover'] = entry['image']['url']
|
2021-08-23 08:44:46 +00:00
|
|
|
if entry.get('thumborId') is not None:
|
|
|
|
r['cover'] = 'https://assets.discours.io/unsafe/1600x/' + entry['thumborId']
|
2021-08-20 09:27:19 +00:00
|
|
|
if entry.get('updatedAt') is not None:
|
2021-08-25 21:20:53 +00:00
|
|
|
r['updatedAt'] = parse(entry['updatedAt'])
|
2021-08-20 09:27:19 +00:00
|
|
|
if entry.get('type') == 'Literature':
|
2021-08-23 08:44:46 +00:00
|
|
|
media = entry.get('media', '')
|
|
|
|
# print(media[0]['literatureBody'])
|
|
|
|
if type(media) == list:
|
|
|
|
body_orig = media[0].get('literatureBody', '')
|
|
|
|
if body_orig == '':
|
|
|
|
print('EMPTY BODY!')
|
|
|
|
else:
|
2021-10-08 04:42:59 +00:00
|
|
|
body_html = str(BeautifulSoup(
|
|
|
|
body_orig, features="html.parser"))
|
2021-10-12 19:38:12 +00:00
|
|
|
r['body'] = html2text(body_html)
|
2021-08-23 08:44:46 +00:00
|
|
|
else:
|
|
|
|
print(r['slug'] + ': literature has no media')
|
2021-08-20 09:27:19 +00:00
|
|
|
elif entry.get('type') == 'Video':
|
2021-10-08 09:58:19 +00:00
|
|
|
m = entry['media'][0]
|
|
|
|
yt = m.get('youtubeId', '')
|
|
|
|
vm = m.get('vimeoId', '')
|
|
|
|
video_url = 'https://www.youtube.com/watch?v=' + yt if yt else '#'
|
|
|
|
if video_url == '#':
|
|
|
|
video_url = 'https://vimeo.com/' + vm if vm else '#'
|
|
|
|
if video_url == '#':
|
|
|
|
print(entry.get('media', 'NO MEDIA!'))
|
|
|
|
# raise Exception
|
|
|
|
r['body'] = '<ShoutVideo src=\"' + video_url + \
|
2021-10-08 04:42:59 +00:00
|
|
|
'\" />' + html2text(m.get('body', '')) # FIXME
|
2021-08-20 09:27:19 +00:00
|
|
|
elif entry.get('type') == 'Music':
|
2021-10-08 04:42:59 +00:00
|
|
|
r['body'] = '<ShoutMusic media={\"' + \
|
|
|
|
json.dumps(entry['media']) + '\"} />' # FIXME
|
2021-08-23 08:44:46 +00:00
|
|
|
if r.get('body') is None:
|
|
|
|
body_orig = entry.get('body', '')
|
2021-10-04 17:06:05 +00:00
|
|
|
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
|
2021-10-12 19:38:12 +00:00
|
|
|
r['body'] = html2text(body_html)
|
|
|
|
body = r.get('body', '')
|
|
|
|
r['old_id'] = entry.get('_id')
|
2021-08-25 21:20:53 +00:00
|
|
|
user = None
|
|
|
|
try:
|
2021-10-08 04:42:59 +00:00
|
|
|
userdata = users_dict.get(entry['createdBy'], users_dict['0'])
|
2021-08-25 21:20:53 +00:00
|
|
|
slug = userdata['slug']
|
2021-10-03 16:19:48 +00:00
|
|
|
name = userdata['name']
|
2021-08-25 21:20:53 +00:00
|
|
|
userpic = userdata['userpic']
|
|
|
|
except KeyError:
|
|
|
|
app = entry.get('application')
|
|
|
|
if app is not None:
|
|
|
|
authordata = {
|
|
|
|
'username': app['email'],
|
|
|
|
'email': app['email'],
|
2021-10-03 16:19:48 +00:00
|
|
|
'name': app['name'],
|
2021-08-25 21:20:53 +00:00
|
|
|
'bio': app.get('bio', ''),
|
|
|
|
'emailConfirmed': False,
|
|
|
|
'slug': translit(app['name'], 'ru', reversed=True).replace(' ', '-').lower(),
|
|
|
|
'createdAt': ts,
|
|
|
|
'wasOnlineAt': ts
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
user = User.create(**authordata)
|
|
|
|
except IntegrityError:
|
|
|
|
with local_session() as session:
|
2021-10-08 04:42:59 +00:00
|
|
|
user = session.query(User).filter(
|
|
|
|
User.email == authordata['email']).first()
|
2021-08-25 21:20:53 +00:00
|
|
|
if user is None:
|
2021-10-08 04:42:59 +00:00
|
|
|
user = session.query(User).filter(
|
|
|
|
User.slug == authordata['slug']).first()
|
2021-10-04 17:06:05 +00:00
|
|
|
slug = user['slug']
|
|
|
|
name = user['name']
|
2021-10-12 19:38:12 +00:00
|
|
|
userpic = user['userpic']
|
2021-08-25 21:20:53 +00:00
|
|
|
else:
|
|
|
|
# no application, no author!
|
2021-10-09 08:36:14 +00:00
|
|
|
slug = 'discours'
|
2021-08-25 21:20:53 +00:00
|
|
|
name = 'Дискурс'
|
|
|
|
userpic = 'https://discours.io/images/logo-mini.svg'
|
|
|
|
with local_session() as session:
|
|
|
|
user = session.query(User).filter(User.slug == slug).first()
|
|
|
|
r['authors'].append({
|
2021-10-13 17:46:30 +00:00
|
|
|
'id': user.id,
|
2021-08-25 21:20:53 +00:00
|
|
|
'slug': slug,
|
|
|
|
'name': name,
|
2021-10-10 12:36:57 +00:00
|
|
|
'userpic': userpic
|
2021-08-25 21:20:53 +00:00
|
|
|
})
|
|
|
|
|
2021-10-08 09:58:19 +00:00
|
|
|
r['layout'] = type2layout[entry['type']]
|
|
|
|
|
|
|
|
metadata = get_metadata(r)
|
|
|
|
content = frontmatter.dumps(frontmatter.Post(body, **metadata))
|
2021-08-23 08:44:46 +00:00
|
|
|
|
2021-10-08 04:42:59 +00:00
|
|
|
if entry['published']:
|
2021-10-04 17:06:05 +00:00
|
|
|
ext = 'md'
|
2021-10-08 04:42:59 +00:00
|
|
|
open('migration/content/' +
|
2021-10-13 17:46:30 +00:00
|
|
|
r['layout'] + '/' + r['slug'] + '.' + ext, 'w').write(content)
|
2021-10-04 17:06:05 +00:00
|
|
|
try:
|
2021-10-09 10:08:27 +00:00
|
|
|
shout_dict = r.copy()
|
|
|
|
shout_dict['authors'] = [user, ]
|
|
|
|
if entry.get('createdAt') is not None:
|
|
|
|
shout_dict['createdAt'] = parse(r.get('createdAt'))
|
|
|
|
else:
|
|
|
|
shout_dict['createdAt'] = ts
|
|
|
|
if entry.get('published'):
|
|
|
|
if entry.get('publishedAt') is not None:
|
|
|
|
shout_dict['publishedAt'] = parse(entry.get('publishedAt'))
|
|
|
|
else:
|
|
|
|
shout_dict['publishedAt'] = ts
|
|
|
|
del shout_dict['published']
|
|
|
|
|
|
|
|
try:
|
2021-10-12 19:38:12 +00:00
|
|
|
topic_slugs = shout_dict['topics']
|
|
|
|
del shout_dict['topics'] # FIXME: AttributeError: 'str' object has no attribute '_sa_instance_state'
|
|
|
|
del shout_dict['views'] # FIXME: TypeError: 'views' is an invalid keyword argument for Shout
|
|
|
|
del shout_dict['rating'] # FIXME: TypeError: 'rating' is an invalid keyword argument for Shout
|
|
|
|
del shout_dict['ratings']
|
|
|
|
s = Shout.create(**shout_dict)
|
2021-10-09 10:08:27 +00:00
|
|
|
r['id'] = s.id
|
2021-10-12 19:38:12 +00:00
|
|
|
|
|
|
|
if len(entry.get('ratings', [])) > 0:
|
|
|
|
# TODO: adding shout ratings
|
|
|
|
'''
|
|
|
|
shout_dict['ratings'] = []
|
|
|
|
for shout_rating_old in entry['ratings']:
|
|
|
|
shout_rating = ShoutRating.create(
|
|
|
|
rater_id = users_dict[shout_rating_old['createdBy']]['id'],
|
|
|
|
shout_id = s.id,
|
|
|
|
value = shout_rating_old['value']
|
|
|
|
)
|
|
|
|
shout.ratings.append(shout_rating.id)
|
|
|
|
'''
|
2021-10-13 17:46:30 +00:00
|
|
|
# adding topics to created shout
|
2021-10-12 19:38:12 +00:00
|
|
|
for topic_slug in topic_slugs:
|
2021-10-13 17:46:30 +00:00
|
|
|
if not topic:
|
|
|
|
topic_dict = topics_dict.get(topic_slug)
|
|
|
|
if topic_dict:
|
|
|
|
topic = Topic.create(**topic_dict)
|
2021-10-12 19:38:12 +00:00
|
|
|
shout.topics = [ topic, ]
|
|
|
|
shout.save()
|
2021-10-09 10:08:27 +00:00
|
|
|
except Exception as e:
|
2021-10-13 17:46:30 +00:00
|
|
|
r['error'] = 'db error'
|
|
|
|
# pass
|
|
|
|
raise e
|
2021-10-09 10:08:27 +00:00
|
|
|
except Exception as e:
|
|
|
|
if not r['body']: r['body'] = 'body moved'
|
|
|
|
raise e
|
2021-10-08 09:58:19 +00:00
|
|
|
return r
|