2024-08-31 00:32:37 +00:00
|
|
|
|
use actix_web::error::ErrorInternalServerError;
|
|
|
|
|
use aws_config::BehaviorVersion;
|
|
|
|
|
use aws_sdk_s3::{config::Credentials, Client as S3Client};
|
|
|
|
|
use redis::{aio::MultiplexedConnection, AsyncCommands, Client as RedisClient};
|
|
|
|
|
use std::{env, time::Duration};
|
|
|
|
|
use tokio::time::interval;
|
2024-10-21 21:11:33 +00:00
|
|
|
|
use std::collections::HashMap;
|
2024-08-31 00:32:37 +00:00
|
|
|
|
use crate::s3_utils::check_file_exists;
|
|
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
pub struct AppState {
|
|
|
|
|
pub redis: MultiplexedConnection,
|
|
|
|
|
pub s3_client: S3Client,
|
|
|
|
|
pub s3_bucket: String,
|
|
|
|
|
pub aws_client: S3Client,
|
|
|
|
|
pub aws_bucket: String,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const FILE_LIST_CACHE_KEY: &str = "s3_file_list_cache"; // Ключ для хранения списка файлов в Redis
|
|
|
|
|
const PATH_MAPPING_KEY: &str = "path_mapping"; // Ключ для хранения маппинга путей
|
|
|
|
|
const CHECK_INTERVAL_SECONDS: u64 = 60 * 60; // Интервал обновления списка файлов: 1 час
|
|
|
|
|
const WEEK_SECONDS: u64 = 604800;
|
|
|
|
|
|
|
|
|
|
impl AppState {
|
|
|
|
|
/// Инициализация нового состояния приложения.
|
|
|
|
|
pub async fn new() -> Self {
|
|
|
|
|
// Получаем конфигурацию для Redis
|
|
|
|
|
let redis_url = env::var("REDIS_URL").expect("REDIS_URL must be set");
|
|
|
|
|
let redis_client = RedisClient::open(redis_url).expect("Invalid Redis URL");
|
|
|
|
|
let redis_connection = redis_client
|
|
|
|
|
.get_multiplexed_async_connection()
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
// Получаем конфигурацию для S3 (Storj)
|
|
|
|
|
let s3_access_key = env::var("STORJ_ACCESS_KEY").expect("STORJ_ACCESS_KEY must be set");
|
|
|
|
|
let s3_secret_key = env::var("STORJ_SECRET_KEY").expect("STORJ_SECRET_KEY must be set");
|
2024-09-23 13:32:54 +00:00
|
|
|
|
let s3_endpoint = env::var("STORJ_END_POINT")
|
|
|
|
|
.unwrap_or_else(|_| "https://gateway.storjshare.io".to_string());
|
2024-09-23 12:40:16 +00:00
|
|
|
|
let s3_bucket = env::var("STORJ_BUCKET_NAME").unwrap_or_else(|_| "discours-io".to_string());
|
2024-08-31 00:32:37 +00:00
|
|
|
|
|
|
|
|
|
// Получаем конфигурацию для AWS S3
|
|
|
|
|
let aws_access_key = env::var("AWS_ACCESS_KEY").expect("AWS_ACCESS_KEY must be set");
|
2024-09-23 13:32:54 +00:00
|
|
|
|
let aws_secret_key = env::var("AWS_SECRET_KEY").expect("AWS_SECRET_KEY must be set");
|
|
|
|
|
let aws_endpoint =
|
|
|
|
|
env::var("AWS_END_POINT").unwrap_or_else(|_| "https://s3.amazonaws.com".to_string());
|
2024-09-23 12:40:16 +00:00
|
|
|
|
let aws_bucket = env::var("AWS_BUCKET_NAME").unwrap_or_else(|_| "discours-io".to_string());
|
2024-08-31 00:32:37 +00:00
|
|
|
|
|
2024-10-21 21:36:42 +00:00
|
|
|
|
// Конфигу<D0B3><D183>ируем клиент S3 для Storj
|
2024-08-31 00:32:37 +00:00
|
|
|
|
let storj_config = aws_config::defaults(BehaviorVersion::latest())
|
|
|
|
|
.region("eu-west-1")
|
|
|
|
|
.endpoint_url(s3_endpoint)
|
|
|
|
|
.credentials_provider(Credentials::new(
|
|
|
|
|
s3_access_key,
|
|
|
|
|
s3_secret_key,
|
|
|
|
|
None,
|
|
|
|
|
None,
|
|
|
|
|
"rust-s3-client",
|
|
|
|
|
))
|
|
|
|
|
.load()
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
let s3_client = S3Client::new(&storj_config);
|
|
|
|
|
|
|
|
|
|
// Конфигурируем клиент S3 для AWS
|
|
|
|
|
let aws_config = aws_config::defaults(BehaviorVersion::latest())
|
2024-09-23 12:45:59 +00:00
|
|
|
|
.region("eu-west-1")
|
2024-08-31 00:32:37 +00:00
|
|
|
|
.endpoint_url(aws_endpoint)
|
|
|
|
|
.credentials_provider(Credentials::new(
|
|
|
|
|
aws_access_key,
|
|
|
|
|
aws_secret_key,
|
|
|
|
|
None,
|
|
|
|
|
None,
|
|
|
|
|
"rust-aws-client",
|
|
|
|
|
))
|
|
|
|
|
.load()
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
let aws_client = S3Client::new(&aws_config);
|
|
|
|
|
|
|
|
|
|
let app_state = AppState {
|
|
|
|
|
redis: redis_connection,
|
|
|
|
|
s3_client,
|
|
|
|
|
s3_bucket,
|
|
|
|
|
aws_client,
|
|
|
|
|
aws_bucket,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Кэшируем список файлов из S3 при старте приложения
|
|
|
|
|
app_state.cache_file_list().await;
|
|
|
|
|
|
|
|
|
|
app_state
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Кэширует список файлов из Storj S3 в Redis.
|
|
|
|
|
pub async fn cache_file_list(&self) {
|
|
|
|
|
let mut redis = self.redis.clone();
|
|
|
|
|
|
|
|
|
|
// Запрашиваем список файлов из Storj S3
|
|
|
|
|
let list_objects_v2 = self.s3_client.list_objects_v2();
|
|
|
|
|
let list_response = list_objects_v2
|
|
|
|
|
.bucket(&self.s3_bucket)
|
|
|
|
|
.send()
|
|
|
|
|
.await
|
|
|
|
|
.expect("Failed to list files from S3");
|
|
|
|
|
|
|
|
|
|
if let Some(objects) = list_response.contents {
|
2024-10-21 20:46:26 +00:00
|
|
|
|
// Формируем список файлов без дубликатов по имени файла (без расширения)
|
|
|
|
|
let mut file_list = std::collections::HashMap::new();
|
|
|
|
|
for object in objects.iter() {
|
|
|
|
|
if let Some(key) = &object.key {
|
|
|
|
|
let parts: Vec<&str> = key.split('.').collect();
|
2024-10-21 21:11:33 +00:00
|
|
|
|
let filename = parts.first().unwrap_or(&"");
|
|
|
|
|
let ext = parts.get(1).unwrap_or(&"");
|
|
|
|
|
if ext.contains('/') {
|
|
|
|
|
continue;
|
2024-10-21 20:46:26 +00:00
|
|
|
|
}
|
2024-10-21 21:11:33 +00:00
|
|
|
|
let filename_with_extension = format!("{}.{}", filename, ext);
|
|
|
|
|
file_list.insert(filename_with_extension, key.clone());
|
2024-10-21 20:46:26 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Сохраняем список файлов в Redis, используя HSET для каждого файла
|
|
|
|
|
for (filename, path) in file_list {
|
|
|
|
|
let _: () = redis
|
|
|
|
|
.hset(FILE_LIST_CACHE_KEY, filename, path)
|
|
|
|
|
.await
|
|
|
|
|
.expect("Failed to cache file in Redis");
|
|
|
|
|
}
|
2024-08-31 00:32:37 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Получает кэшированный список файлов из Redis.
|
|
|
|
|
pub async fn get_cached_file_list(&self) -> Vec<String> {
|
|
|
|
|
let mut redis = self.redis.clone();
|
|
|
|
|
|
|
|
|
|
// Пытаемся получить кэшированный список из Redis
|
2024-10-21 20:46:26 +00:00
|
|
|
|
let cached_list: HashMap<String, String> = redis.hgetall(FILE_LIST_CACHE_KEY).await.unwrap_or_default();
|
2024-08-31 00:32:37 +00:00
|
|
|
|
|
2024-10-21 20:46:26 +00:00
|
|
|
|
// Преобразуем HashMap в Vec<String>, используя значения (пути файлов)
|
|
|
|
|
cached_list.into_values().collect()
|
2024-08-31 00:32:37 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Периодически обновляет кэшированный список файлов из Storj S3.
|
|
|
|
|
pub async fn refresh_file_list_periodically(&self) {
|
|
|
|
|
let mut interval = interval(Duration::from_secs(CHECK_INTERVAL_SECONDS));
|
|
|
|
|
loop {
|
|
|
|
|
interval.tick().await;
|
|
|
|
|
self.cache_file_list().await;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Сохраняет маппинг старого пути из AWS S3 на новый путь в Storj S3.
|
2024-10-21 21:11:33 +00:00
|
|
|
|
async fn save_path_by_filename_with_extension(
|
2024-08-31 00:32:37 +00:00
|
|
|
|
&self,
|
2024-10-21 21:11:33 +00:00
|
|
|
|
filename_with_extension: &str,
|
2024-08-31 00:32:37 +00:00
|
|
|
|
path: &str,
|
|
|
|
|
) -> Result<(), actix_web::Error> {
|
|
|
|
|
let mut redis = self.redis.clone();
|
|
|
|
|
// Храним маппинг в формате Hash: old_path -> new_path
|
|
|
|
|
redis
|
2024-10-21 21:11:33 +00:00
|
|
|
|
.hset::<_, &str, &str, ()>(PATH_MAPPING_KEY, filename_with_extension, path)
|
2024-08-31 00:32:37 +00:00
|
|
|
|
.await
|
|
|
|
|
.map_err(|_| ErrorInternalServerError("Failed to save path mapping in Redis"))?;
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Получает путь в хранилище из ключа (имени файла) в Redis.
|
2024-10-21 21:36:42 +00:00
|
|
|
|
pub async fn get_path(&self, file_key: &str) -> Result<Option<String>, actix_web::Error> {
|
2024-08-31 00:32:37 +00:00
|
|
|
|
let mut redis = self.redis.clone();
|
|
|
|
|
let new_path: Option<String> = redis
|
2024-10-21 21:36:42 +00:00
|
|
|
|
.hget(PATH_MAPPING_KEY, file_key)
|
2024-08-31 00:32:37 +00:00
|
|
|
|
.await
|
|
|
|
|
.map_err(|_| ErrorInternalServerError("Failed to get path mapping from Redis"))?;
|
|
|
|
|
Ok(new_path)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Обновляет Storj S3 данными из Amazon S3
|
|
|
|
|
pub async fn update_filelist_from_aws(&self) {
|
|
|
|
|
// Получаем список объектов из AWS S3
|
|
|
|
|
let list_objects_v2 = self.aws_client.list_objects_v2();
|
|
|
|
|
|
2024-09-23 13:32:54 +00:00
|
|
|
|
match list_objects_v2.bucket(&self.aws_bucket).send().await {
|
|
|
|
|
Ok(list_response) => {
|
|
|
|
|
// Перебор списка файлов
|
|
|
|
|
if let Some(objects) = list_response.contents {
|
|
|
|
|
for object in objects {
|
|
|
|
|
if let Some(key) = object.key {
|
|
|
|
|
// Получаем имя файла с расширением
|
2024-10-21 21:11:33 +00:00
|
|
|
|
let parts: Vec<&str> = key.split('.').collect();
|
|
|
|
|
let filename = parts.first().unwrap_or(&"");
|
|
|
|
|
let ext = parts.get(1).unwrap_or(&"");
|
|
|
|
|
if ext.contains('/') {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
let filename_with_extension = format!("{}.{}", filename, ext);
|
2024-09-23 13:32:54 +00:00
|
|
|
|
|
2024-10-21 20:52:37 +00:00
|
|
|
|
if filename.is_empty() {
|
2024-10-21 21:11:33 +00:00
|
|
|
|
eprint!("[ERROR] empty filename: {}", key);
|
2024-09-23 14:45:10 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Проверяем, существует ли файл на Storj S3
|
2024-10-21 21:36:42 +00:00
|
|
|
|
match check_file_exists(&self.s3_client, &self.s3_bucket, &key).await
|
2024-09-23 14:45:10 +00:00
|
|
|
|
{
|
|
|
|
|
Ok(false) => {
|
|
|
|
|
// Сохраняем маппинг пути
|
|
|
|
|
if let Err(e) =
|
2024-10-21 21:11:33 +00:00
|
|
|
|
self.save_path_by_filename_with_extension(&filename_with_extension, &key).await
|
2024-09-23 14:45:10 +00:00
|
|
|
|
{
|
2024-10-21 21:11:33 +00:00
|
|
|
|
eprintln!("[ERROR] save {}: {:?}", key, e);
|
2024-09-23 14:45:10 +00:00
|
|
|
|
} else {
|
2024-10-21 21:11:33 +00:00
|
|
|
|
println!("{}", key);
|
2024-09-23 14:45:10 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(true) => {
|
2024-10-21 21:11:33 +00:00
|
|
|
|
println!("Already exists in Storj: {}", filename_with_extension);
|
2024-09-23 14:45:10 +00:00
|
|
|
|
}
|
|
|
|
|
Err(e) => {
|
2024-09-23 13:32:54 +00:00
|
|
|
|
eprintln!(
|
2024-10-21 21:11:33 +00:00
|
|
|
|
"[ERROR] check {}: {:?}",
|
|
|
|
|
filename_with_extension, e
|
2024-09-23 13:32:54 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-08-31 00:32:37 +00:00
|
|
|
|
}
|
2024-09-23 13:32:54 +00:00
|
|
|
|
} else {
|
2024-10-21 21:11:33 +00:00
|
|
|
|
println!("AWS S3 file list is empty.");
|
2024-08-31 00:32:37 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2024-09-23 13:32:54 +00:00
|
|
|
|
Err(e) => {
|
2024-10-21 21:11:33 +00:00
|
|
|
|
eprintln!("[ERROR] get AWS S3 file list: {:?}", e);
|
2024-09-23 13:32:54 +00:00
|
|
|
|
}
|
2024-08-31 00:32:37 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-09-23 13:32:54 +00:00
|
|
|
|
/// создает или получает текущее значение квоты пользователя
|
2024-08-31 00:32:37 +00:00
|
|
|
|
pub async fn get_or_create_quota(&self, user_id: &str) -> Result<u64, actix_web::Error> {
|
|
|
|
|
let mut redis = self.redis.clone();
|
|
|
|
|
let quota_key = format!("quota:{}", user_id);
|
|
|
|
|
|
|
|
|
|
// Попытка получить квоту из Redis
|
|
|
|
|
let quota: u64 = redis.get("a_key).await.unwrap_or(0);
|
|
|
|
|
|
|
|
|
|
if quota == 0 {
|
|
|
|
|
// Если квота не найдена, устанавливаем её в 0 байт и задаем TTL на одну неделю
|
|
|
|
|
redis
|
2024-09-23 13:32:54 +00:00
|
|
|
|
.set_ex::<&str, u64, ()>("a_key, 0, WEEK_SECONDS)
|
2024-08-31 00:32:37 +00:00
|
|
|
|
.await
|
|
|
|
|
.map_err(|_| {
|
|
|
|
|
ErrorInternalServerError("Failed to set initial user quota in Redis")
|
|
|
|
|
})?;
|
|
|
|
|
|
|
|
|
|
Ok(0) // Возвращаем 0 как начальную квоту
|
|
|
|
|
} else {
|
|
|
|
|
Ok(quota)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-09-23 13:32:54 +00:00
|
|
|
|
/// инкрементирует значение квоты пользователя в байтах
|
2024-08-31 00:32:37 +00:00
|
|
|
|
pub async fn increment_uploaded_bytes(
|
|
|
|
|
&self,
|
|
|
|
|
user_id: &str,
|
|
|
|
|
bytes: u64,
|
|
|
|
|
) -> Result<u64, actix_web::Error> {
|
|
|
|
|
let mut redis = self.redis.clone();
|
|
|
|
|
let quota_key = format!("quota:{}", user_id);
|
2024-09-23 14:45:10 +00:00
|
|
|
|
|
2024-08-31 00:32:37 +00:00
|
|
|
|
// Проверяем, существует ли ключ в Redis
|
2024-09-23 13:32:54 +00:00
|
|
|
|
let exists: bool = redis.exists::<_, bool>("a_key).await.map_err(|_| {
|
2024-08-31 00:32:37 +00:00
|
|
|
|
ErrorInternalServerError("Failed to check if user quota exists in Redis")
|
|
|
|
|
})?;
|
2024-09-23 14:45:10 +00:00
|
|
|
|
|
2024-08-31 00:32:37 +00:00
|
|
|
|
// Если ключ не существует, создаем его с начальным значением и устанавливаем TTL
|
|
|
|
|
if !exists {
|
|
|
|
|
redis
|
2024-09-23 13:32:54 +00:00
|
|
|
|
.set_ex::<_, u64, ()>("a_key, bytes, WEEK_SECONDS)
|
2024-08-31 00:32:37 +00:00
|
|
|
|
.await
|
|
|
|
|
.map_err(|_| {
|
|
|
|
|
ErrorInternalServerError("Failed to set initial user quota in Redis")
|
|
|
|
|
})?;
|
|
|
|
|
return Ok(bytes);
|
|
|
|
|
}
|
2024-09-23 14:45:10 +00:00
|
|
|
|
|
2024-08-31 00:32:37 +00:00
|
|
|
|
// Если ключ существует, инкрементируем его значение на заданное количество байт
|
|
|
|
|
let new_quota: u64 = redis
|
2024-09-23 13:32:54 +00:00
|
|
|
|
.incr::<_, u64, u64>("a_key, bytes)
|
2024-08-31 00:32:37 +00:00
|
|
|
|
.await
|
|
|
|
|
.map_err(|_| ErrorInternalServerError("Failed to increment user quota in Redis"))?;
|
2024-09-23 14:45:10 +00:00
|
|
|
|
|
2024-08-31 00:32:37 +00:00
|
|
|
|
Ok(new_quota)
|
|
|
|
|
}
|
|
|
|
|
}
|