1
0
mirror of https://github.com/actix/examples synced 2025-06-26 17:17:42 +02:00

migrate s3 example to aws-sdk-s3

This commit is contained in:
Rob Ede
2022-08-01 01:17:59 +01:00
parent 0149e64c7a
commit 4840cfdb68
8 changed files with 538 additions and 292 deletions

View File

@ -1,4 +1,4 @@
AWS_REGION=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
AWS_S3_BUCKET_NAME=
AWS_REGION=

View File

@ -1,18 +1,20 @@
[package]
name = "multipart-s3"
name = "multipart-s3-example"
version = "1.0.0"
edition = "2021"
[dependencies]
actix-web = "4"
actix-multipart = "0.4"
actix-web = "4"
actix-web-lab = "0.16"
dotenv = "0.15.0"
aws-config = "0.46"
aws-sdk-s3 = "0.16"
dotenv = "0.15"
env_logger = "0.9"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
log = "0.4"
rusoto_core = "0.48"
rusoto_s3 = "0.48"
sanitize-filename = "0.4"
serde = { version = "1.0.104", features = ["derive"] }
serde_json = "1.0"
serde = { version = "1", features = ["derive"] }
serde_json = "1"

View File

@ -1,30 +1,21 @@
# Multipart + AWS S3
Upload a file in multipart form to aws s3 (https://github.com/rusoto/rusoto).
Upload a file in multipart form to AWS S3 using [AWS S3 SDK](https://crates.io/crates/aws-sdk-s3).
Receive multiple data in multipart form in JSON format and receive it as a struct.
# Usage
```
```sh
cd forms/multipart-s3
```
1. copy .env.example .env
1. edit .env AWS_ACCESS_KEY_ID=you_key
1. edit .env AWS_SECRET_ACCESS_KEY=you_key
1. edit .env AWS_S3_BUCKET_NAME=you_key
1. edit .env AWS_ACCESS_KEY_ID=your_key
1. edit .env AWS_SECRET_ACCESS_KEY=your_key
1. edit .env AWS_S3_BUCKET_NAME=your_chosen_region
# Running Server
```
cd forms/multipart-s3
cargo run (or ``cargo watch -x run``)
```sh
cargo run
```
http://localhost:8080
# Using Other Regions
<!-- - https://www.rusoto.org/regions.html -->
- https://docs.rs/rusoto_core/0.42.0/rusoto_core/enum.Region.html
<http://localhost:8080>

View File

@ -0,0 +1,35 @@
<html>
<head><title>Upload Test</title></head>
<body>
<form target="/" method="post" enctype="multipart/form-data" id="myForm" >
<input type="text" id="text" name="text" value="test_text"/>
<input type="number" id="number" name="number" value="123123"/>
<input type="button" value="Submit" onclick="myFunction()"></button>
</form>
<input type="file" multiple name="file" id="myFile"/>
</body>
<script>
function myFunction(){
var myForm = document.getElementById('myForm');
var myFile = document.getElementById('myFile');
let formData = new FormData();
const obj = {
text: document.getElementById('text').value,
number: Number(document.getElementById('number').value)
};
const json = JSON.stringify(obj);
console.log(obj);
console.log(json);
formData.append("data", json);
formData.append("myFile", myFile.files[0]);
var request = new XMLHttpRequest();
request.open("POST", "");
request.send(formData);
}
</script>
</html>

View File

@ -1,12 +1,18 @@
use std::{borrow::BorrowMut, env};
use std::fs;
use actix_multipart::Multipart;
use actix_web::{middleware::Logger, web, App, Error, HttpResponse, HttpServer};
use actix_web::{middleware::Logger, web, App, Error, HttpResponse, HttpServer, Responder};
use actix_web_lab::respond::Html;
use aws_config::meta::region::RegionProviderChain;
use dotenv::dotenv;
use serde::{Deserialize, Serialize};
mod utils;
use self::utils::upload::{save_file as upload_save_file, split_payload, UploadFile};
use self::utils::{
s3::Client,
upload::{save_file as upload_save_file, split_payload, UploadFile},
};
#[derive(Deserialize, Serialize, Debug)]
pub struct InpAdd {
@ -14,60 +20,31 @@ pub struct InpAdd {
pub number: i32,
}
async fn save_file(mut payload: Multipart) -> Result<HttpResponse, Error> {
let pl = split_payload(payload.borrow_mut()).await;
async fn save_file(
s3_client: web::Data<Client>,
mut payload: Multipart,
) -> Result<HttpResponse, Error> {
let pl = split_payload(&mut payload).await;
println!("bytes={:#?}", pl.0);
let inp_info: InpAdd = serde_json::from_slice(&pl.0).unwrap();
println!("converter_struct={:#?}", inp_info);
println!("tmpfiles={:#?}", pl.1);
//make key
// make key
let s3_upload_key = format!("projects/{}/", "posts_id");
//create tmp file and upload s3 and remove tmp file
let upload_files: Vec<UploadFile> = upload_save_file(pl.1, s3_upload_key).await.unwrap();
// create tmp file and upload s3 and remove tmp file
let upload_files: Vec<UploadFile> = upload_save_file(&s3_client, pl.1, &s3_upload_key)
.await
.unwrap();
println!("upload_files={:#?}", upload_files);
Ok(HttpResponse::Ok().into())
}
async fn index() -> HttpResponse {
let html = r#"<html>
<head><title>Upload Test</title></head>
<body>
<form target="/" method="post" enctype="multipart/form-data" id="myForm" >
<input type="text" id="text" name="text" value="test_text"/>
<input type="number" id="number" name="number" value="123123"/>
<input type="button" value="Submit" onclick="myFunction()"></button>
</form>
<input type="file" multiple name="file" id="myFile"/>
</body>
<script>
function myFunction(){
var myForm = document.getElementById('myForm');
var myFile = document.getElementById('myFile');
let formData = new FormData();
const obj = {
text: document.getElementById('text').value,
number: Number(document.getElementById('number').value)
};
const json = JSON.stringify(obj);
console.log(obj);
console.log(json);
formData.append("data", json);
formData.append("myFile", myFile.files[0]);
var request = new XMLHttpRequest();
request.open("POST", "");
request.send(formData);
}
</script>
</html>"#;
HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(html)
async fn index() -> impl Responder {
Html(include_str!("./index.html").to_owned())
}
#[actix_web::main]
@ -75,21 +52,20 @@ async fn main() -> std::io::Result<()> {
dotenv().ok();
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
let aws_access_key_id = env::var("AWS_ACCESS_KEY_ID").expect("AWS_ACCESS_KEY_ID must be set");
let aws_secret_access_key =
env::var("AWS_SECRET_ACCESS_KEY").expect("AWS_SECRET_ACCESS_KEY must be set");
let aws_s3_bucket_name =
env::var("AWS_S3_BUCKET_NAME").expect("AWS_S3_BUCKET_NAME must be set");
log::info!("creating temporary upload directory");
log::info!("aws_access_key_id: {aws_access_key_id}");
log::info!("aws_secret_access_key: {aws_secret_access_key}");
log::info!("aws_s3_bucket_name: {aws_s3_bucket_name}");
fs::create_dir_all("./tmp").unwrap();
std::fs::create_dir_all("./tmp").unwrap();
log::info!("configuring S3 client");
let aws_region = RegionProviderChain::default_provider().or_else("us-east-1");
let aws_config = aws_config::from_env().region(aws_region).load().await;
let s3_client = Client::new(&aws_config);
log::info!("using AWS region: {}", aws_config.region().unwrap());
log::info!("starting HTTP server at http://localhost:8080");
HttpServer::new(|| {
HttpServer::new(move || {
App::new()
.service(
web::resource("/")
@ -97,7 +73,9 @@ async fn main() -> std::io::Result<()> {
.route(web::post().to(save_file)),
)
.wrap(Logger::default())
.app_data(web::Data::new(s3_client.clone()))
})
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await

View File

@ -1,64 +1,57 @@
use std::io::Read as _;
use std::{env, fs, io::Read as _};
use rusoto_core::Region;
use rusoto_s3::{DeleteObjectRequest, PutObjectRequest, S3Client, S3};
use aws_config::SdkConfig as AwsConfig;
use aws_sdk_s3::{types::ByteStream, Client as S3Client};
#[derive(Debug, Clone)]
pub struct Client {
#[allow(dead_code)]
region: Region,
s3: S3Client,
bucket_name: String,
}
impl Client {
// construct S3 testing client
pub fn new() -> Client {
let region = Region::default();
pub fn new(config: &AwsConfig) -> Client {
Client {
region: region.to_owned(),
s3: S3Client::new(region),
bucket_name: std::env::var("AWS_S3_BUCKET_NAME").unwrap(),
s3: S3Client::new(config),
bucket_name: env::var("AWS_S3_BUCKET_NAME").unwrap(),
}
}
pub fn url(&self, key: &str) -> String {
format!(
"https://{}.s3.{}.amazonaws.com/{key}",
std::env::var("AWS_S3_BUCKET_NAME").unwrap(),
std::env::var("AWS_REGION").unwrap(),
env::var("AWS_S3_BUCKET_NAME").unwrap(),
env::var("AWS_REGION").unwrap(),
)
}
pub async fn put_object(&self, localfilepath: &str, key: &str) -> String {
let mut file = std::fs::File::open(localfilepath).unwrap();
let mut contents: Vec<u8> = Vec::new();
let _ = file.read_to_end(&mut contents);
let put_request = PutObjectRequest {
bucket: self.bucket_name.to_owned(),
key: key.to_owned(),
body: Some(contents.into()),
..Default::default()
};
pub async fn put_object(&self, local_path: &str, key: &str) -> String {
let mut file = fs::File::open(local_path).unwrap();
let mut contents =
Vec::with_capacity(file.metadata().map(|md| md.len()).unwrap_or(1024) as usize);
file.read_to_end(&mut contents).unwrap();
let _res = self
.s3
.put_object(put_request)
.put_object()
.bucket(&self.bucket_name)
.key(key)
.body(ByteStream::from(contents))
.send()
.await
.expect("Failed to put test object");
self.url(key)
}
pub async fn delete_object(&self, key: String) {
let delete_object_req = DeleteObjectRequest {
bucket: self.bucket_name.to_owned(),
key: key.to_owned(),
..Default::default()
};
let _res = self
.s3
.delete_object(delete_object_req)
pub async fn delete_object(&self, key: &str) {
self.s3
.delete_object()
.bucket(&self.bucket_name)
.key(key)
.send()
.await
.expect("Couldn't delete object");
}

View File

@ -1,4 +1,4 @@
use std::{convert::From, io::Write};
use std::{convert::From, fs, io::Write};
use actix_multipart::{Field, Multipart};
use actix_web::{web, web::Bytes, Error};
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use crate::utils::s3::Client;
#[derive(Deserialize, Serialize, Debug, Clone)]
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct UploadFile {
pub filename: String,
pub key: String,
@ -36,6 +36,7 @@ pub struct Tmpfile {
pub s3_key: String,
pub s3_url: String,
}
impl Tmpfile {
fn new(filename: &str) -> Tmpfile {
Tmpfile {
@ -46,25 +47,25 @@ impl Tmpfile {
}
}
async fn s3_upload_and_tmp_remove(&mut self, s3_upload_key: String) {
self.s3_upload(s3_upload_key).await;
async fn s3_upload_and_tmp_remove(&mut self, client: &Client, s3_upload_key: &str) {
self.s3_upload(client, s3_upload_key).await;
self.tmp_remove();
}
async fn s3_upload(&mut self, s3_upload_key: String) {
async fn s3_upload(&mut self, client: &Client, s3_upload_key: &str) {
let key = format!("{s3_upload_key}{}", &self.name);
self.s3_key = key.clone();
let url: String = Client::new().put_object(&self.tmp_path, &key.clone()).await;
let url = client.put_object(&self.tmp_path, &key.clone()).await;
self.s3_url = url;
}
fn tmp_remove(&self) {
std::fs::remove_file(&self.tmp_path).unwrap();
fs::remove_file(&self.tmp_path).unwrap();
}
}
pub async fn split_payload(payload: &mut Multipart) -> (Bytes, Vec<Tmpfile>) {
let mut tmp_files: Vec<Tmpfile> = Vec::new();
let mut tmp_files = vec![];
let mut data = Bytes::new();
while let Some(item) = payload.next().await {
@ -80,7 +81,7 @@ pub async fn split_payload(payload: &mut Multipart) -> (Bytes, Vec<Tmpfile>) {
Some(filename) => {
let tmp_file = Tmpfile::new(&sanitize_filename::sanitize(&filename));
let tmp_path = tmp_file.tmp_path.clone();
let mut f = web::block(move || std::fs::File::create(&tmp_path))
let mut f = web::block(move || fs::File::create(&tmp_path))
.await
.unwrap()
.unwrap();
@ -103,24 +104,27 @@ pub async fn split_payload(payload: &mut Multipart) -> (Bytes, Vec<Tmpfile>) {
}
pub async fn save_file(
client: &Client,
tmp_files: Vec<Tmpfile>,
s3_upload_key: String,
s3_upload_key: &str,
) -> Result<Vec<UploadFile>, Error> {
let mut arr: Vec<UploadFile> = Vec::with_capacity(tmp_files.len());
for item in tmp_files {
let mut tmp_file: Tmpfile = item.clone();
tmp_file
.s3_upload_and_tmp_remove(s3_upload_key.clone())
.s3_upload_and_tmp_remove(client, s3_upload_key)
.await;
arr.push(UploadFile::from(tmp_file));
}
Ok(arr)
}
#[allow(unused)]
pub async fn delete_object(list: Vec<String>) {
for key in list {
Client::new().delete_object(key).await;
pub async fn delete_object(client: &Client, keys: Vec<&str>) {
for key in keys {
client.delete_object(key).await;
}
}