This commit is contained in:
tonyrewin 2023-01-08 01:38:47 +03:00
parent dda9201c96
commit ecdbf7c89d
8 changed files with 110 additions and 12551 deletions

6
.flake8 Normal file
View File

@ -0,0 +1,6 @@
[flake8]
ignore = E203,W504,W191,W503
exclude = .git,__pycache__,orm/rbac.py
max-complexity = 12
max-line-length = 108
indent-string = ' '

79
api/upload.py Normal file
View File

@ -0,0 +1,79 @@
from flask import Flask, request
from werkzeug.utils import secure_filename
import boto3
from botocore.exceptions import ClientError, WaiterError
import tempfile
import os
app = Flask(__name__)
session = boto3.Session()
storj_resource = session.resource('s3')
storj_client = boto3.client('s3',
aws_access_key_id=os.environ['STORJ_ACCESS_KEY'],
aws_secret_access_key=os.environ['STORJ_SECRET_KEY'],
endpoint_url=os.environ['STORJ_END_POINT']
)
def upload_storj(filecontent, filename, bucket_name):
success = False
try:
bucket = storj_resource.Bucket(bucket_name)
except ClientError:
bucket = None
try:
# In case filename already exists, get current etag to check if the
# contents change after upload
head = storj_client.head_object(Bucket=bucket_name, Key=filename)
except ClientError:
etag = ''
else:
etag = head['ETag'].strip('"')
try:
s3_obj = bucket.Object(filename)
except (ClientError, AttributeError):
s3_obj = None
try:
# Use the upload_fileobj method to safely upload the file
storj_client.upload_fileobj(
Fileobj=filecontent,
Bucket='discours.io',
Key=filename
)
except (ClientError, AttributeError):
pass
else:
try:
s3_obj.wait_until_exists(IfNoneMatch=etag)
except WaiterError:
pass
else:
head = storj_client.head_object(Bucket=bucket_name, Key=filename)
success = head['ContentLength']
return success
@app.route('/upload', methods=['post'])
def upload():
if request.method == 'POST':
img = request.files['file']
if img:
# Perform the file upload
filename = secure_filename(img.filename)
# Save the file to a temporary location
with tempfile.TemporaryDirectory() as temp_dir:
temp_path = os.path.join(temp_dir, filename)
img.save(temp_path)
# Open the file in binary mode
with open(temp_path, 'rb') as filecontent:
return upload_storj(filecontent, filename, 'discours.io')
return
if __name__ == "__main__":
app.run()

View File

@ -1,89 +0,0 @@
import { Writable } from 'stream'
import formidable from 'formidable'
import { S3Client } from '@aws-sdk/client-s3'
import { Upload } from '@aws-sdk/lib-storage'
export const config = {
api: {
bodyParser: false
}
}
const BUCKET_NAME = process.env.S3_BUCKET || 'discours-io'
const s3 = new S3Client({
region: process.env.S3_REGION || 'eu-west-1',
credentials: {
accessKeyId: process.env.S3_ACCESS_KEY,
secretAccessKey: process.env.S3_SECRET_KEY
}
})
const formidableConfig = {
keepExtensions: true,
maxFileSize: 10_000_000,
maxFieldsSize: 10_000_000,
maxFields: 7,
allowEmptyFiles: false,
multiples: false
}
const formidablePromise = async (req, opts) => {
return new Promise((resolve, reject) => {
const form = formidable(opts)
form.parse(req, (err, fields, files) => {
if (err) {
return reject(err)
}
return resolve({ fields, files })
})
})
}
const fileConsumer = (acc) => {
return new Writable({
write: (chunk, _enc, next) => {
acc.push(chunk)
next()
}
})
}
async function handler(req, res) {
if (req.method === 'POST') {
try {
const chunks = []
const { fields, files }: any = await formidablePromise(req, {
...formidableConfig,
// consume this, otherwise formidable tries to save the file to disk
fileWriteStreamHandler: () => fileConsumer(chunks)
})
const data = Buffer.concat(chunks)
if (!data) {
throw Error('data is empty')
} else {
console.debug(data)
}
const params = {
Bucket: process.env.S3_BUCKET || 'discours-io',
Key: fields.name + '.' + fields.ext,
Body: data,
ACL: 'public-read',
'Content-Type': fields.type
}
const upload = new Upload({ params, client: s3 })
await upload.done()
// console.log(upload)
const { singleUploadResult: result }: any = upload
return res.status(200).json(result.Location)
} catch (error) {
console.error(error)
}
}
return res.status(405).end()
}
export default handler

View File

@ -33,10 +33,6 @@
"vercel-build": "astro build"
},
"dependencies": {
"@aws-sdk/abort-controller": "^3.226.0",
"@aws-sdk/client-s3": "^3.216.0",
"@aws-sdk/lib-storage": "^3.235.0",
"formidable": "^2.1.1",
"mailgun.js": "^8.0.2"
},
"devDependencies": {
@ -85,7 +81,7 @@
"eslint-plugin-sonarjs": "^0.16.0",
"eslint-plugin-unicorn": "^45.0.0",
"graphql": "^16.6.0",
"graphql-sse": "^1.3.1",
"graphql-sse": "^1.3.2",
"graphql-tag": "^2.12.6",
"graphql-ws": "^5.11.2",
"hast-util-select": "^5.0.2",

File diff suppressed because it is too large Load Diff

2
requirements.txt Normal file
View File

@ -0,0 +1,2 @@
Flask==2.2.2
boto3

View File

@ -1,6 +1,6 @@
{
"functions": {
"api/upload.ts": {
"api/upload.py": {
"memory": 3008,
"maxDuration": 30
},

11323
yarn.lock

File diff suppressed because it is too large Load Diff