From f4d1c724dd8563b99f15a82ad1a10380f0126f44 Mon Sep 17 00:00:00 2001 From: Kould <kould2333@gmail.com> Date: Mon, 25 Dec 2023 11:02:27 +0800 Subject: [PATCH] support S3 for file storage (#15) * feat: support S3 for file storage * doc: fmt env template --- .env | 3 -- .env.template | 8 ++++- Cargo.toml | 1 + src/api/doc_info.rs | 79 +++++++++++++++++++++++---------------------- src/errors.rs | 58 ++++++++++++++++++--------------- src/main.rs | 57 +++++++++++++++++++++----------- 6 files changed, 118 insertions(+), 88 deletions(-) delete mode 100644 .env diff --git a/.env b/.env deleted file mode 100644 index a033ac2..0000000 --- a/.env +++ /dev/null @@ -1,3 +0,0 @@ -HOST=127.0.0.1 -PORT=8000 -DATABASE_URL="postgresql://infiniflow:infiniflow@localhost/docgpt" \ No newline at end of file diff --git a/.env.template b/.env.template index a033ac2..681361c 100644 --- a/.env.template +++ b/.env.template @@ -1,3 +1,9 @@ +# Database HOST=127.0.0.1 PORT=8000 -DATABASE_URL="postgresql://infiniflow:infiniflow@localhost/docgpt" \ No newline at end of file +DATABASE_URL="postgresql://infiniflow:infiniflow@localhost/docgpt" + +# S3 Storage +S3_BASE_URL="https://play.min.io" +S3_ACCESS_KEY="Q3AM3UQ867SPQQA43P2F" +S3_SECRET_KEY="zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG" \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index aed22e1..883d2fa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,6 +23,7 @@ dotenvy = "0.15.7" listenfd = "1.0.1" chrono = "0.4.31" migration = { path = "./migration" } +minio = "0.1.0" futures-util = "0.3.29" actix-multipart-extract = "0.1.5" diff --git a/src/api/doc_info.rs b/src/api/doc_info.rs index dc66a40..84886d4 100644 --- a/src/api/doc_info.rs +++ b/src/api/doc_info.rs @@ -1,8 +1,9 @@ use std::collections::HashMap; use std::io::Write; -use actix_multipart_extract::{ File, Multipart, MultipartForm }; -use actix_web::{ HttpResponse, post, web }; -use chrono::{ Utc, FixedOffset }; +use actix_multipart_extract::{File, Multipart, MultipartForm}; +use actix_web::{get, HttpResponse, post, web}; +use chrono::{Utc, FixedOffset}; +use minio::s3::args::{BucketExistsArgs, MakeBucketArgs, UploadObjectArgs}; use sea_orm::DbConn; use crate::api::JsonResponse; use crate::AppState; @@ -11,6 +12,9 @@ use crate::errors::AppError; use crate::service::doc_info::{ Mutation, Query }; use serde::Deserialize; +const BUCKET_NAME: &'static str = "docgpt-upload"; + + fn now() -> chrono::DateTime<FixedOffset> { Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap()) } @@ -69,57 +73,54 @@ async fn upload( data: web::Data<AppState> ) -> Result<HttpResponse, AppError> { let uid = payload.uid; - async fn add_number_to_filename( - file_name: String, - conn: &DbConn, - uid: i64, - parent_id: i64 - ) -> String { + let file_name = payload.file_field.name.as_str(); + async fn add_number_to_filename(file_name: &str, conn:&DbConn, uid:i64, parent_id:i64) -> String { let mut i = 0; let mut new_file_name = file_name.to_string(); let arr: Vec<&str> = file_name.split(".").collect(); - let suffix = String::from(arr[arr.len() - 1]); - let preffix = arr[..arr.len() - 1].join("."); - let mut docs = Query::find_doc_infos_by_name( - conn, - uid, - &new_file_name, - Some(parent_id) - ).await.unwrap(); - while docs.len() > 0 { + let suffix = String::from(arr[arr.len()-1]); + let preffix = arr[..arr.len()-1].join("."); + let mut docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap(); + while docs.len()>0 { i += 1; new_file_name = format!("{}_{}.{}", preffix, i, suffix); - docs = Query::find_doc_infos_by_name( - conn, - uid, - &new_file_name, - Some(parent_id) - ).await.unwrap(); + docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap(); } new_file_name } - let fnm = add_number_to_filename( - payload.file_field.name.clone(), - &data.conn, - uid, - payload.did - ).await; - - std::fs::create_dir_all(format!("./upload/{}/", uid)); - let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone()); - let mut f = std::fs::File::create(&filepath)?; - f.write(&payload.file_field.bytes)?; + let fnm = add_number_to_filename(file_name, &data.conn, uid, payload.did).await; + + let s3_client = &data.s3_client; + let buckets_exists = s3_client + .bucket_exists(&BucketExistsArgs::new(BUCKET_NAME)?) + .await?; + if !buckets_exists { + s3_client + .make_bucket(&MakeBucketArgs::new(BUCKET_NAME)?) + .await?; + } + s3_client + .upload_object( + &mut UploadObjectArgs::new( + BUCKET_NAME, + fnm.as_str(), + format!("/{}/{}-{}", payload.uid, payload.did, fnm).as_str() + )? + ) + .await?; + + let location = format!("/{}/{}", BUCKET_NAME, fnm); let doc = Mutation::create_doc_info(&data.conn, Model { - did: Default::default(), - uid: uid, + did:Default::default(), + uid: uid, doc_name: fnm, size: payload.file_field.bytes.len() as i64, - location: filepath, + location, r#type: "doc".to_string(), created_at: now(), updated_at: now(), - is_deleted: Default::default(), + is_deleted:Default::default(), }).await?; let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?; diff --git a/src/errors.rs b/src/errors.rs index 6690cfc..f19e58c 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,17 +1,25 @@ -use actix_web::{ HttpResponse, ResponseError }; +use actix_web::{HttpResponse, ResponseError}; use thiserror::Error; #[derive(Debug, Error)] pub(crate) enum AppError { - #[error("`{0}`")] User(#[from] UserError), + #[error("`{0}`")] + User(#[from] UserError), - #[error("`{0}`")] Json(#[from] serde_json::Error), + #[error("`{0}`")] + Json(#[from] serde_json::Error), - #[error("`{0}`")] Actix(#[from] actix_web::Error), + #[error("`{0}`")] + Actix(#[from] actix_web::Error), - #[error("`{0}`")] Db(#[from] sea_orm::DbErr), + #[error("`{0}`")] + Db(#[from] sea_orm::DbErr), - #[error("`{0}`")] Std(#[from] std::io::Error), + #[error("`{0}`")] + MinioS3(#[from] minio::s3::error::Error), + + #[error("`{0}`")] + Std(#[from] std::io::Error), } #[derive(Debug, Error)] @@ -28,7 +36,8 @@ pub(crate) enum UserError { #[error("`password` field of `User` cannot contain whitespaces!")] PasswordInvalidCharacter, - #[error("Could not find any `User` for id: `{0}`!")] NotFound(i64), + #[error("Could not find any `User` for id: `{0}`!")] + NotFound(i64), #[error("Failed to login user!")] LoginFailed, @@ -46,26 +55,23 @@ pub(crate) enum UserError { impl ResponseError for AppError { fn status_code(&self) -> actix_web::http::StatusCode { match self { - AppError::User(user_error) => - match user_error { - UserError::EmptyUsername => actix_web::http::StatusCode::UNPROCESSABLE_ENTITY, - UserError::UsernameInvalidCharacter => { - actix_web::http::StatusCode::UNPROCESSABLE_ENTITY - } - UserError::EmptyPassword => actix_web::http::StatusCode::UNPROCESSABLE_ENTITY, - UserError::PasswordInvalidCharacter => { - actix_web::http::StatusCode::UNPROCESSABLE_ENTITY - } - UserError::NotFound(_) => actix_web::http::StatusCode::NOT_FOUND, - UserError::NotLoggedIn => actix_web::http::StatusCode::UNAUTHORIZED, - UserError::Empty => actix_web::http::StatusCode::NOT_FOUND, - UserError::LoginFailed => actix_web::http::StatusCode::NOT_FOUND, - UserError::InvalidToken => actix_web::http::StatusCode::UNAUTHORIZED, + AppError::User(user_error) => match user_error { + UserError::EmptyUsername => actix_web::http::StatusCode::UNPROCESSABLE_ENTITY, + UserError::UsernameInvalidCharacter => { + actix_web::http::StatusCode::UNPROCESSABLE_ENTITY + } + UserError::EmptyPassword => actix_web::http::StatusCode::UNPROCESSABLE_ENTITY, + UserError::PasswordInvalidCharacter => { + actix_web::http::StatusCode::UNPROCESSABLE_ENTITY } - AppError::Json(_) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR, + UserError::NotFound(_) => actix_web::http::StatusCode::NOT_FOUND, + UserError::NotLoggedIn => actix_web::http::StatusCode::UNAUTHORIZED, + UserError::Empty => actix_web::http::StatusCode::NOT_FOUND, + UserError::LoginFailed => actix_web::http::StatusCode::NOT_FOUND, + UserError::InvalidToken => actix_web::http::StatusCode::UNAUTHORIZED, + }, AppError::Actix(fail) => fail.as_response_error().status_code(), - AppError::Db(_) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR, - AppError::Std(_) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR, + _ => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR, } } @@ -74,4 +80,4 @@ impl ResponseError for AppError { let response = HttpResponse::build(status_code).body(self.to_string()); response } -} +} \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 36e6472..f99c4c3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,29 +5,33 @@ mod errors; use std::env; use actix_files::Files; -use actix_identity::{ CookieIdentityPolicy, IdentityService, RequestIdentity }; +use actix_identity::{CookieIdentityPolicy, IdentityService, RequestIdentity}; use actix_session::CookieSession; -use actix_web::{ web, App, HttpServer, middleware, Error }; +use actix_web::{web, App, HttpServer, middleware, Error}; use actix_web::cookie::time::Duration; use actix_web::dev::ServiceRequest; use actix_web::error::ErrorUnauthorized; use actix_web_httpauth::extractors::bearer::BearerAuth; use listenfd::ListenFd; -use sea_orm::{ Database, DatabaseConnection }; -use migration::{ Migrator, MigratorTrait }; -use crate::errors::UserError; +use minio::s3::client::Client; +use minio::s3::creds::StaticProvider; +use minio::s3::http::BaseUrl; +use sea_orm::{Database, DatabaseConnection}; +use migration::{Migrator, MigratorTrait}; +use crate::errors::{AppError, UserError}; #[derive(Debug, Clone)] struct AppState { conn: DatabaseConnection, + s3_client: Client, } pub(crate) async fn validator( req: ServiceRequest, - credentials: BearerAuth + credentials: BearerAuth, ) -> Result<ServiceRequest, Error> { if let Some(token) = req.get_identity() { - println!("{}, {}", credentials.token(), token); + println!("{}, {}",credentials.token(), token); (credentials.token() == token) .then(|| req) .ok_or(ErrorUnauthorized(UserError::InvalidToken)) @@ -37,7 +41,7 @@ pub(crate) async fn validator( } #[actix_web::main] -async fn main() -> std::io::Result<()> { +async fn main() -> Result<(), AppError> { std::env::set_var("RUST_LOG", "debug"); tracing_subscriber::fmt::init(); @@ -48,12 +52,29 @@ async fn main() -> std::io::Result<()> { let port = env::var("PORT").expect("PORT is not set in .env file"); let server_url = format!("{host}:{port}"); + let s3_base_url = env::var("S3_BASE_URL").expect("S3_BASE_URL is not set in .env file"); + let s3_access_key = env::var("S3_ACCESS_KEY").expect("S3_ACCESS_KEY is not set in .env file");; + let s3_secret_key = env::var("S3_SECRET_KEY").expect("S3_SECRET_KEY is not set in .env file");; + // establish connection to database and apply migrations // -> create post table if not exists let conn = Database::connect(&db_url).await.unwrap(); Migrator::up(&conn, None).await.unwrap(); - let state = AppState { conn }; + let static_provider = StaticProvider::new( + s3_access_key.as_str(), + s3_secret_key.as_str(), + None, + ); + + let s3_client = Client::new( + s3_base_url.parse::<BaseUrl>()?, + Some(Box::new(static_provider)), + None, + None, + )?; + + let state = AppState { conn, s3_client }; // create server and try to serve over socket if possible let mut listenfd = ListenFd::from_env(); @@ -61,20 +82,18 @@ async fn main() -> std::io::Result<()> { App::new() .service(Files::new("/static", "./static")) .app_data(web::Data::new(state.clone())) - .wrap( - IdentityService::new( - CookieIdentityPolicy::new(&[0; 32]) - .name("auth-cookie") - .login_deadline(Duration::seconds(120)) - .secure(false) - ) - ) + .wrap(IdentityService::new( + CookieIdentityPolicy::new(&[0; 32]) + .name("auth-cookie") + .login_deadline(Duration::seconds(120)) + .secure(false), + )) .wrap( CookieSession::signed(&[0; 32]) .name("session-cookie") .secure(false) // WARNING(alex): This uses the `time` crate, not `std::time`! - .expires_in_time(Duration::seconds(60)) + .expires_in_time(Duration::seconds(60)), ) .wrap(middleware::Logger::default()) .configure(init) @@ -118,4 +137,4 @@ fn init(cfg: &mut web::ServiceConfig) { cfg.service(api::user_info::login); cfg.service(api::user_info::register); cfg.service(api::user_info::setting); -} +} \ No newline at end of file -- GitLab