mirror of
https://github.com/actix/examples
synced 2025-06-28 09:50:36 +02:00
Restructure folders (#411)
This commit is contained in:
committed by
GitHub
parent
9db98162b2
commit
c3407627d0
17
database_interactions/basic/Cargo.toml
Normal file
17
database_interactions/basic/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "async_db"
|
||||
version = "2.0.0"
|
||||
authors = ["Darin Gordon <dkcdkg@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3"
|
||||
env_logger = "0.8"
|
||||
failure = "0.1.7"
|
||||
futures = "0.3.1"
|
||||
num_cpus = "1.13"
|
||||
r2d2 = "0.8.2"
|
||||
r2d2_sqlite = "0.14"
|
||||
rusqlite = "0.21"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
47
database_interactions/basic/README.md
Normal file
47
database_interactions/basic/README.md
Normal file
@ -0,0 +1,47 @@
|
||||
Getting started using databases with Actix web, asynchronously.
|
||||
|
||||
## Usage
|
||||
|
||||
### init database sqlite
|
||||
|
||||
From the root directory of this project:
|
||||
```bash
|
||||
bash db/setup_db.sh
|
||||
```
|
||||
|
||||
This creates a sqlite database, weather.db, in the root.
|
||||
|
||||
|
||||
### server
|
||||
|
||||
```bash
|
||||
# if ubuntu : sudo apt-get install libsqlite3-dev
|
||||
# if fedora : sudo dnf install libsqlite3x-devel
|
||||
cargo run (or ``cargo watch -x run``)
|
||||
# Started http server: 127.0.0.1:8080
|
||||
```
|
||||
|
||||
### web client
|
||||
|
||||
[http://127.0.0.1:8080/asyncio_weather](http://127.0.0.1:8080/asyncio_weather)
|
||||
|
||||
[http://127.0.0.1:8080/parallel_weather](http://127.0.0.1:8080/parallel_weather)
|
||||
|
||||
|
||||
### sqlite client
|
||||
|
||||
```bash
|
||||
# if ubuntu : sudo apt-get install sqlite3
|
||||
# if fedora : sudo dnf install sqlite3x
|
||||
sqlite3 weather.db
|
||||
sqlite> .tables
|
||||
sqlite> select * from nyc_weather;
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
On Ubuntu 19.10:
|
||||
|
||||
```
|
||||
sudo apt install libsqlite3-dev
|
||||
```
|
BIN
database_interactions/basic/db/GHCND_documentation.pdf
Executable file
BIN
database_interactions/basic/db/GHCND_documentation.pdf
Executable file
Binary file not shown.
5
database_interactions/basic/db/README.md
Normal file
5
database_interactions/basic/db/README.md
Normal file
@ -0,0 +1,5 @@
|
||||
This directory includes weather information obtained from NOAA for NYC Central Park: https://www.ncdc.noaa.gov/cdo-web/
|
||||
|
||||
# Setup Instructions
|
||||
|
||||
Set up a sqlite3 database by executing the setup_db.sh file: `bash sqlite_db.sh`
|
28
database_interactions/basic/db/db.sql
Executable file
28
database_interactions/basic/db/db.sql
Executable file
@ -0,0 +1,28 @@
|
||||
CREATE TABLE nyc_weather(
|
||||
STATION TEXT,
|
||||
NAME TEXT,
|
||||
DATE TEXT,
|
||||
ACMH DOUBLE,
|
||||
AWND DOUBLE,
|
||||
FMTM DOUBLE,
|
||||
PGTM DOUBLE,
|
||||
PRCP DOUBLE,
|
||||
PSUN DOUBLE,
|
||||
SNOW DOUBLE,
|
||||
SNWD DOUBLE,
|
||||
TAVG DOUBLE,
|
||||
TMAX DOUBLE,
|
||||
TMIN DOUBLE,
|
||||
TSUN DOUBLE,
|
||||
WDF1 DOUBLE,
|
||||
WDF2 DOUBLE,
|
||||
WDF5 DOUBLE,
|
||||
WDFG DOUBLE,
|
||||
WDFM DOUBLE,
|
||||
WSF1 DOUBLE,
|
||||
WSF2 DOUBLE,
|
||||
WSF5 DOUBLE,
|
||||
WSFG DOUBLE,
|
||||
WSFM DOUBLE
|
||||
);
|
||||
|
13882
database_interactions/basic/db/nyc_centralpark_weather.csv
Executable file
13882
database_interactions/basic/db/nyc_centralpark_weather.csv
Executable file
File diff suppressed because it is too large
Load Diff
4
database_interactions/basic/db/setup_db.sh
Executable file
4
database_interactions/basic/db/setup_db.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
cd $(dirname "$0")
|
||||
sqlite3 ../weather.db < db.sql
|
||||
sqlite3 -csv ../weather.db ".import nyc_centralpark_weather.csv nyc_weather"
|
122
database_interactions/basic/src/db.rs
Normal file
122
database_interactions/basic/src/db.rs
Normal file
@ -0,0 +1,122 @@
|
||||
use actix_web::{web, Error as AWError};
|
||||
use failure::Error;
|
||||
use futures::{Future, TryFutureExt};
|
||||
use rusqlite::{Statement, NO_PARAMS};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{thread::sleep, time::Duration};
|
||||
|
||||
pub type Pool = r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>;
|
||||
pub type Connection = r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>;
|
||||
type WeatherAggResult = Result<Vec<WeatherAgg>, rusqlite::Error>;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum WeatherAgg {
|
||||
AnnualAgg { year: i32, total: f64 },
|
||||
MonthAgg { year: i32, month: i32, total: f64 },
|
||||
}
|
||||
|
||||
pub enum Queries {
|
||||
GetTopTenHottestYears,
|
||||
GetTopTenColdestYears,
|
||||
GetTopTenHottestMonths,
|
||||
GetTopTenColdestMonths,
|
||||
}
|
||||
|
||||
pub fn execute(
|
||||
pool: &Pool,
|
||||
query: Queries,
|
||||
) -> impl Future<Output = Result<Vec<WeatherAgg>, AWError>> {
|
||||
let pool = pool.clone();
|
||||
web::block(move || {
|
||||
// simulate an expensive query, see comments at top of main.rs
|
||||
sleep(Duration::from_secs(2));
|
||||
|
||||
let result = match query {
|
||||
Queries::GetTopTenHottestYears => get_hottest_years(pool.get()?),
|
||||
Queries::GetTopTenColdestYears => get_coldest_years(pool.get()?),
|
||||
Queries::GetTopTenHottestMonths => get_hottest_months(pool.get()?),
|
||||
Queries::GetTopTenColdestMonths => get_coldest_months(pool.get()?),
|
||||
};
|
||||
result.map_err(Error::from)
|
||||
})
|
||||
.map_err(AWError::from)
|
||||
}
|
||||
|
||||
fn get_hottest_years(conn: Connection) -> WeatherAggResult {
|
||||
let stmt = conn.prepare(
|
||||
"
|
||||
SELECT cast(strftime('%Y', date) as int) as theyear,
|
||||
sum(tmax) as total
|
||||
FROM nyc_weather
|
||||
WHERE tmax <> 'TMAX'
|
||||
GROUP BY theyear
|
||||
ORDER BY total DESC LIMIT 10",
|
||||
)?;
|
||||
|
||||
get_rows_as_annual_agg(stmt)
|
||||
}
|
||||
|
||||
fn get_coldest_years(conn: Connection) -> WeatherAggResult {
|
||||
let stmt = conn.prepare(
|
||||
"
|
||||
SELECT cast(strftime('%Y', date) as int) as theyear,
|
||||
sum(tmax) as total
|
||||
FROM nyc_weather
|
||||
WHERE tmax <> 'TMAX'
|
||||
GROUP BY theyear
|
||||
ORDER BY total ASC LIMIT 10",
|
||||
)?;
|
||||
|
||||
get_rows_as_annual_agg(stmt)
|
||||
}
|
||||
|
||||
fn get_rows_as_annual_agg(mut statement: Statement) -> WeatherAggResult {
|
||||
statement
|
||||
.query_map(NO_PARAMS, |row| {
|
||||
Ok(WeatherAgg::AnnualAgg {
|
||||
year: row.get(0)?,
|
||||
total: row.get(1)?,
|
||||
})
|
||||
})
|
||||
.and_then(Iterator::collect)
|
||||
}
|
||||
|
||||
fn get_hottest_months(conn: Connection) -> WeatherAggResult {
|
||||
let stmt = conn.prepare(
|
||||
"SELECT cast(strftime('%Y', date) as int) as theyear,
|
||||
cast(strftime('%m', date) as int) as themonth,
|
||||
sum(tmax) as total
|
||||
FROM nyc_weather
|
||||
WHERE tmax <> 'TMAX'
|
||||
GROUP BY theyear, themonth
|
||||
ORDER BY total DESC LIMIT 10",
|
||||
)?;
|
||||
|
||||
get_rows_as_month_agg(stmt)
|
||||
}
|
||||
|
||||
fn get_coldest_months(conn: Connection) -> WeatherAggResult {
|
||||
let stmt = conn.prepare(
|
||||
"SELECT cast(strftime('%Y', date) as int) as theyear,
|
||||
cast(strftime('%m', date) as int) as themonth,
|
||||
sum(tmax) as total
|
||||
FROM nyc_weather
|
||||
WHERE tmax <> 'TMAX'
|
||||
GROUP BY theyear, themonth
|
||||
ORDER BY total ASC LIMIT 10",
|
||||
)?;
|
||||
|
||||
get_rows_as_month_agg(stmt)
|
||||
}
|
||||
|
||||
fn get_rows_as_month_agg(mut statement: Statement) -> WeatherAggResult {
|
||||
statement
|
||||
.query_map(NO_PARAMS, |row| {
|
||||
Ok(WeatherAgg::MonthAgg {
|
||||
year: row.get(0)?,
|
||||
month: row.get(1)?,
|
||||
total: row.get(2)?,
|
||||
})
|
||||
})
|
||||
.and_then(Iterator::collect)
|
||||
}
|
77
database_interactions/basic/src/main.rs
Normal file
77
database_interactions/basic/src/main.rs
Normal file
@ -0,0 +1,77 @@
|
||||
/* Actix-Web Asynchronous Database Example
|
||||
|
||||
This project illustrates expensive and blocking database requests that runs
|
||||
in a thread-pool using `web::block` with two examples:
|
||||
|
||||
1. An asynchronous handler that executes 4 queries in *sequential order*,
|
||||
collecting the results and returning them as a single serialized json object
|
||||
|
||||
2. An asynchronous handler that executes 4 queries in *parallel*,
|
||||
collecting the results and returning them as a single serialized json object
|
||||
|
||||
Note: The use of sleep(Duration::from_secs(2)); in db.rs is to make performance
|
||||
improvement with parallelism more obvious.
|
||||
*/
|
||||
use std::io;
|
||||
|
||||
use actix_web::{middleware, web, App, Error as AWError, HttpResponse, HttpServer};
|
||||
use futures::future::join_all;
|
||||
use r2d2_sqlite::{self, SqliteConnectionManager};
|
||||
|
||||
mod db;
|
||||
use db::{Pool, Queries};
|
||||
|
||||
/// Version 1: Calls 4 queries in sequential order, as an asynchronous handler
|
||||
#[allow(clippy::eval_order_dependence)] // it's FP?
|
||||
async fn asyncio_weather(db: web::Data<Pool>) -> Result<HttpResponse, AWError> {
|
||||
let result = vec![
|
||||
db::execute(&db, Queries::GetTopTenHottestYears).await?,
|
||||
db::execute(&db, Queries::GetTopTenColdestYears).await?,
|
||||
db::execute(&db, Queries::GetTopTenHottestMonths).await?,
|
||||
db::execute(&db, Queries::GetTopTenColdestMonths).await?,
|
||||
];
|
||||
|
||||
Ok(HttpResponse::Ok().json(result))
|
||||
}
|
||||
|
||||
/// Version 2: Calls 4 queries in parallel, as an asynchronous handler
|
||||
/// Returning Error types turn into None values in the response
|
||||
async fn parallel_weather(db: web::Data<Pool>) -> Result<HttpResponse, AWError> {
|
||||
let fut_result = vec![
|
||||
Box::pin(db::execute(&db, Queries::GetTopTenHottestYears)),
|
||||
Box::pin(db::execute(&db, Queries::GetTopTenColdestYears)),
|
||||
Box::pin(db::execute(&db, Queries::GetTopTenHottestMonths)),
|
||||
Box::pin(db::execute(&db, Queries::GetTopTenColdestMonths)),
|
||||
];
|
||||
let result: Result<Vec<_>, _> = join_all(fut_result).await.into_iter().collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(result.map_err(AWError::from)?))
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
std::env::set_var("RUST_LOG", "actix_web=info");
|
||||
env_logger::init();
|
||||
|
||||
// Start N db executor actors (N = number of cores avail)
|
||||
let manager = SqliteConnectionManager::file("weather.db");
|
||||
let pool = Pool::new(manager).unwrap();
|
||||
|
||||
// Start http server
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
// store db pool as Data object
|
||||
.data(pool.clone())
|
||||
.wrap(middleware::Logger::default())
|
||||
.service(
|
||||
web::resource("/asyncio_weather").route(web::get().to(asyncio_weather)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/parallel_weather")
|
||||
.route(web::get().to(parallel_weather)),
|
||||
)
|
||||
})
|
||||
.bind("127.0.0.1:8080")?
|
||||
.run()
|
||||
.await
|
||||
}
|
1
database_interactions/diesel/.env
Normal file
1
database_interactions/diesel/.env
Normal file
@ -0,0 +1 @@
|
||||
DATABASE_URL=test.db
|
1
database_interactions/diesel/.gitignore
vendored
Normal file
1
database_interactions/diesel/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
test.db
|
20
database_interactions/diesel/Cargo.toml
Normal file
20
database_interactions/diesel/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "diesel-example"
|
||||
version = "1.0.0"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3"
|
||||
diesel = { version = "^1.1.0", features = ["sqlite", "r2d2"] }
|
||||
dotenv = "0.15"
|
||||
env_logger = "0.8"
|
||||
failure = "0.1.8"
|
||||
futures = "0.3.1"
|
||||
r2d2 = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
uuid = { version = "0.8", features = ["serde", "v4"] }
|
109
database_interactions/diesel/README.md
Normal file
109
database_interactions/diesel/README.md
Normal file
@ -0,0 +1,109 @@
|
||||
# diesel
|
||||
|
||||
Basic integration of [Diesel](https://diesel.rs/) using SQLite for Actix web.
|
||||
|
||||
## Usage
|
||||
|
||||
### Install SQLite
|
||||
|
||||
```sh
|
||||
# on OpenSUSE
|
||||
sudo zypper install sqlite3-devel libsqlite3-0 sqlite3
|
||||
|
||||
# on Ubuntu
|
||||
sudo apt-get install libsqlite3-dev sqlite3
|
||||
|
||||
# on Fedora
|
||||
sudo dnf install libsqlite3x-devel sqlite3x
|
||||
|
||||
# on macOS (using homebrew)
|
||||
brew install sqlite3
|
||||
```
|
||||
|
||||
### Initialize SQLite Database
|
||||
|
||||
```sh
|
||||
cd examples/diesel
|
||||
cargo install diesel_cli --no-default-features --features sqlite
|
||||
|
||||
echo "DATABASE_URL=test.db" > .env
|
||||
diesel migration run
|
||||
```
|
||||
|
||||
There will now be a database file at `./test.db`.
|
||||
|
||||
### Running Server
|
||||
|
||||
```sh
|
||||
cd examples/diesel
|
||||
cargo run (or ``cargo watch -x run``)
|
||||
|
||||
# Started http server: 127.0.0.1:8080
|
||||
```
|
||||
|
||||
### Available Routes
|
||||
|
||||
#### `POST /user`
|
||||
|
||||
Inserts a new user into the SQLite DB.
|
||||
|
||||
Provide a JSON payload with a name. Eg:
|
||||
```json
|
||||
{ "name": "bill" }
|
||||
```
|
||||
|
||||
On success, a response like the following is returned:
|
||||
```json
|
||||
{
|
||||
"id": "9e46baba-a001-4bb3-b4cf-4b3e5bab5e97",
|
||||
"name": "bill"
|
||||
}
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Client Examples</summary>
|
||||
|
||||
Using [HTTPie](https://httpie.org/):
|
||||
```sh
|
||||
http POST localhost:8080/user name=bill
|
||||
```
|
||||
|
||||
Using cURL:
|
||||
```sh
|
||||
curl -S -X POST --header "Content-Type: application/json" --data '{"name":"bill"}' http://localhost:8080/user
|
||||
```
|
||||
</details>
|
||||
|
||||
#### `GET /user/{user_uid}`
|
||||
|
||||
Gets a user from the DB using its UID (returned from the insert request or taken from the DB directly). Returns a 404 when no user exists with that UID.
|
||||
|
||||
<details>
|
||||
<summary>Client Examples</summary>
|
||||
|
||||
Using [HTTPie](https://httpie.org/):
|
||||
```sh
|
||||
http localhost:8080/user/9e46baba-a001-4bb3-b4cf-4b3e5bab5e97
|
||||
```
|
||||
|
||||
Using cURL:
|
||||
```sh
|
||||
curl -S http://localhost:8080/user/9e46baba-a001-4bb3-b4cf-4b3e5bab5e97
|
||||
```
|
||||
</details>
|
||||
|
||||
### Explore The SQLite DB
|
||||
|
||||
```sh
|
||||
sqlite3 test.db
|
||||
```
|
||||
|
||||
```
|
||||
sqlite> .tables
|
||||
sqlite> SELECT * FROM users;
|
||||
```
|
||||
|
||||
|
||||
## Using Other Databases
|
||||
|
||||
You can find a complete example of Diesel + PostgreSQL at: [https://github.com/TechEmpower/FrameworkBenchmarks/tree/master/frameworks/Rust/actix](https://github.com/TechEmpower/FrameworkBenchmarks/tree/master/frameworks/Rust/actix)
|
@ -0,0 +1 @@
|
||||
DROP TABLE users
|
@ -0,0 +1,4 @@
|
||||
CREATE TABLE users (
|
||||
id VARCHAR NOT NULL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL
|
||||
)
|
40
database_interactions/diesel/src/actions.rs
Normal file
40
database_interactions/diesel/src/actions.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use diesel::prelude::*;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::models;
|
||||
|
||||
/// Run query using Diesel to find user by uid and return it.
|
||||
pub fn find_user_by_uid(
|
||||
uid: Uuid,
|
||||
conn: &SqliteConnection,
|
||||
) -> Result<Option<models::User>, diesel::result::Error> {
|
||||
use crate::schema::users::dsl::*;
|
||||
|
||||
let user = users
|
||||
.filter(id.eq(uid.to_string()))
|
||||
.first::<models::User>(conn)
|
||||
.optional()?;
|
||||
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
/// Run query using Diesel to insert a new database row and return the result.
|
||||
pub fn insert_new_user(
|
||||
// prevent collision with `name` column imported inside the function
|
||||
nm: &str,
|
||||
conn: &SqliteConnection,
|
||||
) -> Result<models::User, diesel::result::Error> {
|
||||
// It is common when using Diesel with Actix web to import schema-related
|
||||
// modules inside a function's scope (rather than the normal module's scope)
|
||||
// to prevent import collisions and namespace pollution.
|
||||
use crate::schema::users::dsl::*;
|
||||
|
||||
let new_user = models::User {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
name: nm.to_owned(),
|
||||
};
|
||||
|
||||
diesel::insert_into(users).values(&new_user).execute(conn)?;
|
||||
|
||||
Ok(new_user)
|
||||
}
|
94
database_interactions/diesel/src/main.rs
Normal file
94
database_interactions/diesel/src/main.rs
Normal file
@ -0,0 +1,94 @@
|
||||
//! Actix web Diesel integration example
|
||||
//!
|
||||
//! Diesel does not support tokio, so we have to run it in separate threads using the web::block
|
||||
//! function which offloads blocking code (like Diesel's) in order to not block the server's thread.
|
||||
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
|
||||
use actix_web::{get, middleware, post, web, App, Error, HttpResponse, HttpServer};
|
||||
use diesel::prelude::*;
|
||||
use diesel::r2d2::{self, ConnectionManager};
|
||||
use uuid::Uuid;
|
||||
|
||||
mod actions;
|
||||
mod models;
|
||||
mod schema;
|
||||
|
||||
type DbPool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
|
||||
|
||||
/// Finds user by UID.
|
||||
#[get("/user/{user_id}")]
|
||||
async fn get_user(
|
||||
pool: web::Data<DbPool>,
|
||||
user_uid: web::Path<Uuid>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let user_uid = user_uid.into_inner();
|
||||
let conn = pool.get().expect("couldn't get db connection from pool");
|
||||
|
||||
// use web::block to offload blocking Diesel code without blocking server thread
|
||||
let user = web::block(move || actions::find_user_by_uid(user_uid, &conn))
|
||||
.await
|
||||
.map_err(|e| {
|
||||
eprintln!("{}", e);
|
||||
HttpResponse::InternalServerError().finish()
|
||||
})?;
|
||||
|
||||
if let Some(user) = user {
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
} else {
|
||||
let res = HttpResponse::NotFound()
|
||||
.body(format!("No user found with uid: {}", user_uid));
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
/// Inserts new user with name defined in form.
|
||||
#[post("/user")]
|
||||
async fn add_user(
|
||||
pool: web::Data<DbPool>,
|
||||
form: web::Json<models::NewUser>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let conn = pool.get().expect("couldn't get db connection from pool");
|
||||
|
||||
// use web::block to offload blocking Diesel code without blocking server thread
|
||||
let user = web::block(move || actions::insert_new_user(&form.name, &conn))
|
||||
.await
|
||||
.map_err(|e| {
|
||||
eprintln!("{}", e);
|
||||
HttpResponse::InternalServerError().finish()
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
std::env::set_var("RUST_LOG", "actix_web=info");
|
||||
env_logger::init();
|
||||
dotenv::dotenv().ok();
|
||||
|
||||
// set up database connection pool
|
||||
let connspec = std::env::var("DATABASE_URL").expect("DATABASE_URL");
|
||||
let manager = ConnectionManager::<SqliteConnection>::new(connspec);
|
||||
let pool = r2d2::Pool::builder()
|
||||
.build(manager)
|
||||
.expect("Failed to create pool.");
|
||||
|
||||
let bind = "127.0.0.1:8080";
|
||||
|
||||
println!("Starting server at: {}", &bind);
|
||||
|
||||
// Start HTTP server
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
// set up DB pool to be used with web::Data<Pool> extractor
|
||||
.data(pool.clone())
|
||||
.wrap(middleware::Logger::default())
|
||||
.service(get_user)
|
||||
.service(add_user)
|
||||
})
|
||||
.bind(&bind)?
|
||||
.run()
|
||||
.await
|
||||
}
|
14
database_interactions/diesel/src/models.rs
Normal file
14
database_interactions/diesel/src/models.rs
Normal file
@ -0,0 +1,14 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::schema::users;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Queryable, Insertable)]
|
||||
pub struct User {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NewUser {
|
||||
pub name: String,
|
||||
}
|
6
database_interactions/diesel/src/schema.rs
Normal file
6
database_interactions/diesel/src/schema.rs
Normal file
@ -0,0 +1,6 @@
|
||||
table! {
|
||||
users (id) {
|
||||
id -> Text,
|
||||
name -> Text,
|
||||
}
|
||||
}
|
3
database_interactions/pg/.gitignore
vendored
Normal file
3
database_interactions/pg/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/target
|
||||
**/*.rs.bk
|
||||
.env
|
16
database_interactions/pg/Cargo.toml
Normal file
16
database_interactions/pg/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "async_pg"
|
||||
version = "0.1.0"
|
||||
authors = ["dowwie <dkcdkg@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3"
|
||||
config = "0.10.1"
|
||||
deadpool-postgres = "0.5.0"
|
||||
derive_more = "0.99.2"
|
||||
dotenv = "0.15.0"
|
||||
serde = { version = "1.0.104", features = ["derive"] }
|
||||
tokio-pg-mapper = "0.1"
|
||||
tokio-pg-mapper-derive = "0.1"
|
||||
tokio-postgres = "0.5.1"
|
72
database_interactions/pg/README.md
Normal file
72
database_interactions/pg/README.md
Normal file
@ -0,0 +1,72 @@
|
||||
# async_pg example
|
||||
|
||||
## This example illustrates
|
||||
|
||||
- `tokio_postgres`
|
||||
- use of `tokio_pg_mapper` for postgres data mapping
|
||||
- `deadpool_postgres` for connection pooling
|
||||
- `dotenv` + `config` for configuration
|
||||
|
||||
## Instructions
|
||||
|
||||
1. Create database user
|
||||
|
||||
```shell
|
||||
createuser -P test_user
|
||||
```
|
||||
|
||||
Enter a password of your choice. The following instructions assume you
|
||||
used `testing` as password.
|
||||
|
||||
This step is **optional** and you can also use an existing database user
|
||||
for that. Just make sure to replace `test_user` by the database user
|
||||
of your choice in the following steps and change the `.env` file
|
||||
containing the configuration accordingly.
|
||||
|
||||
2. Create database
|
||||
|
||||
```shell
|
||||
createdb -O test_user testing_db
|
||||
```
|
||||
|
||||
3. Initialize database
|
||||
|
||||
```shell
|
||||
psql -f sql/schema.sql testing_db
|
||||
```
|
||||
|
||||
This step can be repeated and clears the database as it drops and
|
||||
recreates the schema `testing` which is used within the database.
|
||||
|
||||
4. Create `.env` file:
|
||||
|
||||
```ini
|
||||
SERVER_ADDR=127.0.0.1:8080
|
||||
PG.USER=test_user
|
||||
PG.PASSWORD=testing
|
||||
PG.HOST=127.0.0.1
|
||||
PG.PORT=5432
|
||||
PG.DBNAME=testing_db
|
||||
PG.POOL.MAX_SIZE=16
|
||||
```
|
||||
|
||||
5. Run the server:
|
||||
|
||||
```shell
|
||||
cargo run
|
||||
```
|
||||
|
||||
6. Using a different terminal send an HTTP POST request to the running server:
|
||||
|
||||
```shell
|
||||
echo '{"email": "ferris@thecrab.com", "first_name": "ferris", "last_name": "crab", "username": "ferreal"}' | http -f --json --print h POST http://127.0.0.1:8080/users
|
||||
```
|
||||
|
||||
**...or using curl...**
|
||||
|
||||
```shell
|
||||
curl -d '{"email": "ferris@thecrab.com", "first_name": "ferris", "last_name": "crab", "username": "ferreal"}' -H 'Content-Type: application/json' http://127.0.0.1:8080/users
|
||||
```
|
||||
|
||||
A unique constraint exists for username, so sending this request twice
|
||||
will return an internal server error (HTTP 500).
|
3
database_interactions/pg/sql/add_user.sql
Normal file
3
database_interactions/pg/sql/add_user.sql
Normal file
@ -0,0 +1,3 @@
|
||||
INSERT INTO testing.users(email, first_name, last_name, username)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
RETURNING $table_fields;
|
1
database_interactions/pg/sql/get_users.sql
Normal file
1
database_interactions/pg/sql/get_users.sql
Normal file
@ -0,0 +1 @@
|
||||
SELECT $table_fields FROM testing.users;
|
11
database_interactions/pg/sql/schema.sql
Normal file
11
database_interactions/pg/sql/schema.sql
Normal file
@ -0,0 +1,11 @@
|
||||
DROP SCHEMA IF EXISTS testing CASCADE;
|
||||
CREATE SCHEMA testing;
|
||||
|
||||
CREATE TABLE testing.users (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
email VARCHAR(200) NOT NULL,
|
||||
first_name VARCHAR(200) NOT NULL,
|
||||
last_name VARCHAR(200) NOT NULL,
|
||||
username VARCHAR(50) UNIQUE NOT NULL,
|
||||
UNIQUE (username)
|
||||
);
|
131
database_interactions/pg/src/main.rs
Normal file
131
database_interactions/pg/src/main.rs
Normal file
@ -0,0 +1,131 @@
|
||||
mod config {
|
||||
pub use ::config::ConfigError;
|
||||
use serde::Deserialize;
|
||||
#[derive(Deserialize)]
|
||||
pub struct Config {
|
||||
pub server_addr: String,
|
||||
pub pg: deadpool_postgres::Config,
|
||||
}
|
||||
impl Config {
|
||||
pub fn from_env() -> Result<Self, ConfigError> {
|
||||
let mut cfg = ::config::Config::new();
|
||||
cfg.merge(::config::Environment::new())?;
|
||||
cfg.try_into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod models {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio_pg_mapper_derive::PostgresMapper;
|
||||
|
||||
#[derive(Deserialize, PostgresMapper, Serialize)]
|
||||
#[pg_mapper(table = "users")] // singular 'user' is a keyword..
|
||||
pub struct User {
|
||||
pub email: String,
|
||||
pub first_name: String,
|
||||
pub last_name: String,
|
||||
pub username: String,
|
||||
}
|
||||
}
|
||||
|
||||
mod errors {
|
||||
use actix_web::{HttpResponse, ResponseError};
|
||||
use deadpool_postgres::PoolError;
|
||||
use derive_more::{Display, From};
|
||||
use tokio_pg_mapper::Error as PGMError;
|
||||
use tokio_postgres::error::Error as PGError;
|
||||
|
||||
#[derive(Display, From, Debug)]
|
||||
pub enum MyError {
|
||||
NotFound,
|
||||
PGError(PGError),
|
||||
PGMError(PGMError),
|
||||
PoolError(PoolError),
|
||||
}
|
||||
impl std::error::Error for MyError {}
|
||||
|
||||
impl ResponseError for MyError {
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
match *self {
|
||||
MyError::NotFound => HttpResponse::NotFound().finish(),
|
||||
MyError::PoolError(ref err) => {
|
||||
HttpResponse::InternalServerError().body(err.to_string())
|
||||
}
|
||||
_ => HttpResponse::InternalServerError().finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod db {
|
||||
use crate::{errors::MyError, models::User};
|
||||
use deadpool_postgres::Client;
|
||||
use tokio_pg_mapper::FromTokioPostgresRow;
|
||||
|
||||
pub async fn add_user(client: &Client, user_info: User) -> Result<User, MyError> {
|
||||
let _stmt = include_str!("../sql/add_user.sql");
|
||||
let _stmt = _stmt.replace("$table_fields", &User::sql_table_fields());
|
||||
let stmt = client.prepare(&_stmt).await.unwrap();
|
||||
|
||||
client
|
||||
.query(
|
||||
&stmt,
|
||||
&[
|
||||
&user_info.email,
|
||||
&user_info.first_name,
|
||||
&user_info.last_name,
|
||||
&user_info.username,
|
||||
],
|
||||
)
|
||||
.await?
|
||||
.iter()
|
||||
.map(|row| User::from_row_ref(row).unwrap())
|
||||
.collect::<Vec<User>>()
|
||||
.pop()
|
||||
.ok_or(MyError::NotFound) // more applicable for SELECTs
|
||||
}
|
||||
}
|
||||
|
||||
mod handlers {
|
||||
use crate::{db, errors::MyError, models::User};
|
||||
use actix_web::{web, Error, HttpResponse};
|
||||
use deadpool_postgres::{Client, Pool};
|
||||
|
||||
pub async fn add_user(
|
||||
user: web::Json<User>,
|
||||
db_pool: web::Data<Pool>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let user_info: User = user.into_inner();
|
||||
|
||||
let client: Client = db_pool.get().await.map_err(MyError::PoolError)?;
|
||||
|
||||
let new_user = db::add_user(&client, user_info).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(new_user))
|
||||
}
|
||||
}
|
||||
|
||||
use actix_web::{web, App, HttpServer};
|
||||
use dotenv::dotenv;
|
||||
use handlers::add_user;
|
||||
use tokio_postgres::NoTls;
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
dotenv().ok();
|
||||
|
||||
let config = crate::config::Config::from_env().unwrap();
|
||||
let pool = config.pg.create_pool(NoTls).unwrap();
|
||||
|
||||
let server = HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(pool.clone())
|
||||
.service(web::resource("/users").route(web::post().to(add_user)))
|
||||
})
|
||||
.bind(config.server_addr.clone())?
|
||||
.run();
|
||||
println!("Server running at http://{}/", config.server_addr);
|
||||
|
||||
server.await
|
||||
}
|
15
database_interactions/r2d2/Cargo.toml
Normal file
15
database_interactions/r2d2/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "r2d2-example"
|
||||
version = "1.0.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3"
|
||||
|
||||
env_logger = "0.8"
|
||||
uuid = { version = "0.8", features = ["v4"] }
|
||||
|
||||
r2d2 = "0.8"
|
||||
r2d2_sqlite = "0.14"
|
||||
rusqlite = "0.21"
|
53
database_interactions/r2d2/src/main.rs
Normal file
53
database_interactions/r2d2/src/main.rs
Normal file
@ -0,0 +1,53 @@
|
||||
//! Actix web r2d2 example
|
||||
use std::io;
|
||||
|
||||
use actix_web::{middleware, web, App, Error, HttpResponse, HttpServer};
|
||||
use r2d2::Pool;
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
|
||||
/// Async request handler. Ddb pool is stored in application state.
|
||||
async fn index(
|
||||
path: web::Path<String>,
|
||||
db: web::Data<Pool<SqliteConnectionManager>>,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
// execute sync code in threadpool
|
||||
let res = web::block(move || {
|
||||
let conn = db.get().unwrap();
|
||||
|
||||
let uuid = format!("{}", uuid::Uuid::new_v4());
|
||||
conn.execute(
|
||||
"INSERT INTO users (id, name) VALUES ($1, $2)",
|
||||
&[&uuid, &path.into_inner()],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
conn.query_row("SELECT name FROM users WHERE id=$1", &[&uuid], |row| {
|
||||
row.get::<_, String>(0)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.map(|user| HttpResponse::Ok().json(user))
|
||||
.map_err(|_| HttpResponse::InternalServerError())?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
std::env::set_var("RUST_LOG", "actix_web=debug");
|
||||
env_logger::init();
|
||||
|
||||
// r2d2 pool
|
||||
let manager = SqliteConnectionManager::file("test.db");
|
||||
let pool = r2d2::Pool::new(manager).unwrap();
|
||||
|
||||
// start http server
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(pool.clone()) // <- store db pool in app state
|
||||
.wrap(middleware::Logger::default())
|
||||
.route("/{name}", web::get().to(index))
|
||||
})
|
||||
.bind("127.0.0.1:8080")?
|
||||
.run()
|
||||
.await
|
||||
}
|
14
database_interactions/redis/Cargo.toml
Normal file
14
database_interactions/redis/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "actix_redis"
|
||||
version = "1.0.0"
|
||||
authors = ["dowwie <dkcdkg@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
actix = "0.10"
|
||||
actix-web = "3"
|
||||
actix-redis = "0.9"
|
||||
futures = "0.3"
|
||||
redis-async = "0.6.3"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
env_logger = "0.8"
|
15
database_interactions/redis/README.md
Normal file
15
database_interactions/redis/README.md
Normal file
@ -0,0 +1,15 @@
|
||||
This project illustrates how to send multiple cache requests to redis in bulk, asynchronously.
|
||||
This asyncio approach resembles traditional redis pipelining. Details about how this
|
||||
is so can be read at https://github.com/benashford/redis-async-rs/issues/19#issuecomment-412208018
|
||||
|
||||
|
||||
|
||||
To test the demo, POST a json object containing three strings to the /stuff endpoint:
|
||||
{"one": "first entry",
|
||||
"two": "second entry",
|
||||
"three": "third entry" }
|
||||
|
||||
|
||||
These three entries will cache to redis, keyed accordingly.
|
||||
|
||||
to delete these, simply issue a DELETE http request to /stuff endpoint
|
94
database_interactions/redis/src/main.rs
Normal file
94
database_interactions/redis/src/main.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use actix::prelude::*;
|
||||
use actix_redis::{Command, RedisActor};
|
||||
use actix_web::{middleware, web, App, Error as AWError, HttpResponse, HttpServer};
|
||||
use futures::future::join_all;
|
||||
use redis_async::{resp::RespValue, resp_array};
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CacheInfo {
|
||||
one: String,
|
||||
two: String,
|
||||
three: String,
|
||||
}
|
||||
|
||||
async fn cache_stuff(
|
||||
info: web::Json<CacheInfo>,
|
||||
redis: web::Data<Addr<RedisActor>>,
|
||||
) -> Result<HttpResponse, AWError> {
|
||||
let info = info.into_inner();
|
||||
|
||||
let one = redis.send(Command(resp_array!["SET", "mydomain:one", info.one]));
|
||||
let two = redis.send(Command(resp_array!["SET", "mydomain:two", info.two]));
|
||||
let three = redis.send(Command(resp_array!["SET", "mydomain:three", info.three]));
|
||||
|
||||
// Creates a future which represents a collection of the results of the futures
|
||||
// given. The returned future will drive execution for all of its underlying futures,
|
||||
// collecting the results into a destination `Vec<RespValue>` in the same order as they
|
||||
// were provided. If any future returns an error then all other futures will be
|
||||
// canceled and an error will be returned immediately. If all futures complete
|
||||
// successfully, however, then the returned future will succeed with a `Vec` of
|
||||
// all the successful results.
|
||||
let res: Vec<Result<RespValue, AWError>> =
|
||||
join_all(vec![one, two, three].into_iter())
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
item.map_err(AWError::from)
|
||||
.and_then(|res| res.map_err(AWError::from))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// successful operations return "OK", so confirm that all returned as so
|
||||
if !res.iter().all(|res| match res {
|
||||
Ok(RespValue::SimpleString(x)) if x == "OK" => true,
|
||||
_ => false,
|
||||
}) {
|
||||
Ok(HttpResponse::InternalServerError().finish())
|
||||
} else {
|
||||
Ok(HttpResponse::Ok().body("successfully cached values"))
|
||||
}
|
||||
}
|
||||
|
||||
async fn del_stuff(redis: web::Data<Addr<RedisActor>>) -> Result<HttpResponse, AWError> {
|
||||
let res = redis
|
||||
.send(Command(resp_array![
|
||||
"DEL",
|
||||
"mydomain:one",
|
||||
"mydomain:two",
|
||||
"mydomain:three"
|
||||
]))
|
||||
.await?;
|
||||
|
||||
match res {
|
||||
Ok(RespValue::Integer(x)) if x == 3 => {
|
||||
Ok(HttpResponse::Ok().body("successfully deleted values"))
|
||||
}
|
||||
_ => {
|
||||
println!("---->{:?}", res);
|
||||
Ok(HttpResponse::InternalServerError().finish())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
std::env::set_var("RUST_LOG", "actix_web=trace,actix_redis=trace");
|
||||
env_logger::init();
|
||||
|
||||
HttpServer::new(|| {
|
||||
let redis_addr = RedisActor::start("127.0.0.1:6379");
|
||||
|
||||
App::new()
|
||||
.data(redis_addr)
|
||||
.wrap(middleware::Logger::default())
|
||||
.service(
|
||||
web::resource("/stuff")
|
||||
.route(web::post().to(cache_stuff))
|
||||
.route(web::delete().to(del_stuff)),
|
||||
)
|
||||
})
|
||||
.bind("0.0.0.0:8080")?
|
||||
.run()
|
||||
.await
|
||||
}
|
25
database_interactions/simple-auth-server/Cargo.toml
Normal file
25
database_interactions/simple-auth-server/Cargo.toml
Normal file
@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "simple-auth-server"
|
||||
version = "2.0.0"
|
||||
authors = ["mygnu <tech@hgill.io>"]
|
||||
edition = "2018"
|
||||
workspace = "../.."
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3"
|
||||
actix-identity = "0.3"
|
||||
|
||||
chrono = { version = "0.4.6", features = ["serde"] }
|
||||
derive_more = "0.99.0"
|
||||
diesel = { version = "1.4.5", features = ["postgres", "uuidv07", "r2d2", "chrono"] }
|
||||
dotenv = "0.15"
|
||||
env_logger = "0.8"
|
||||
futures = "0.3.1"
|
||||
r2d2 = "0.8"
|
||||
rust-argon2 = "0.8"
|
||||
lazy_static = "1.4.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
sparkpost = "0.5.2"
|
||||
uuid = { version = "0.8", features = ["serde", "v4"] }
|
||||
time = "0.2"
|
38
database_interactions/simple-auth-server/README.md
Normal file
38
database_interactions/simple-auth-server/README.md
Normal file
@ -0,0 +1,38 @@
|
||||
## Auth Web Microservice with rust using Actix-Web 1.0
|
||||
|
||||
##### Flow of the event would look like this:
|
||||
|
||||
- Registers with email address ➡ Receive an 📨 with a link to verify
|
||||
- Follow the link ➡ register with same email and a password
|
||||
- Login with email and password ➡ Get verified and receive auth cookie
|
||||
|
||||
##### Crates Used
|
||||
|
||||
- [actix-web](https://crates.io/crates/actix-web) // Actix web is a simple, pragmatic and extremely fast web framework for Rust.
|
||||
- [rust-argon2](https://crates.io/crates/rust-argon2) // crate for hashing passwords using the cryptographically-secure Argon2 hashing algorithm.
|
||||
- [chrono](https://crates.io/crates/chrono) // Date and time library for Rust.
|
||||
- [diesel](https://crates.io/crates/diesel) // A safe, extensible ORM and Query Builder for PostgreSQL, SQLite, and MySQL.
|
||||
- [dotenv](https://crates.io/crates/dotenv) // A dotenv implementation for Rust.
|
||||
- [derive_more](https://crates.io/crates/derive_more) // Convenience macros to derive tarits easily
|
||||
- [env_logger](https://crates.io/crates/env_logger) // A logging implementation for log which is configured via an environment variable.
|
||||
- [futures](https://crates.io/crates/futures) // An implementation of futures and streams featuring zero allocations, composability, and iterator-like interfaces.
|
||||
- [lazy_static](https://docs.rs/lazy_static) // A macro for declaring lazily evaluated statics.
|
||||
- [r2d2](https://crates.io/crates/r2d2) // A generic connection pool.
|
||||
- [serde](https://crates.io/crates/serde) // A generic serialization/deserialization framework.
|
||||
- [serde_json](https://crates.io/crates/serde_json) // A JSON serialization file format.
|
||||
- [serde_derive](https://crates.io/crates/serde_derive) // Macros 1.1 implementation of #[derive(Serialize, Deserialize)].
|
||||
- [sparkpost](https://crates.io/crates/sparkpost) // Rust bindings for sparkpost email api v1.
|
||||
- [uuid](https://crates.io/crates/uuid) // A library to generate and parse UUIDs.
|
||||
|
||||
|
||||
Read the full tutorial series on [gill.net.in](https://gill.net.in)
|
||||
|
||||
- [Auth Web Microservice with Rust using Actix Web v2 - Complete Tutorial](https://gill.net.in/posts/auth-microservice-rust-actix-web1.0-diesel-complete-tutorial/)
|
||||
|
||||
## Dependencies
|
||||
|
||||
On Ubuntu 19.10:
|
||||
|
||||
```
|
||||
sudo apt install libclang-dev libpq-dev
|
||||
```
|
5
database_interactions/simple-auth-server/diesel.toml
Normal file
5
database_interactions/simple-auth-server/diesel.toml
Normal file
@ -0,0 +1,5 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
@ -0,0 +1,6 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
@ -0,0 +1,36 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
|
||||
|
||||
|
||||
-- Sets up a trigger for the given table to automatically set a column called
|
||||
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||
-- in the modified columns)
|
||||
--
|
||||
-- # Example
|
||||
--
|
||||
-- ```sql
|
||||
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||
--
|
||||
-- SELECT diesel_manage_updated_at('users');
|
||||
-- ```
|
||||
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||
BEGIN
|
||||
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||
BEGIN
|
||||
IF (
|
||||
NEW IS DISTINCT FROM OLD AND
|
||||
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||
) THEN
|
||||
NEW.updated_at := current_timestamp;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
@ -0,0 +1,2 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
DROP TABLE users;
|
@ -0,0 +1,6 @@
|
||||
-- Your SQL goes here
|
||||
CREATE TABLE users (
|
||||
email VARCHAR(100) NOT NULL UNIQUE PRIMARY KEY,
|
||||
hash VARCHAR(122) NOT NULL, --argon hash
|
||||
created_at TIMESTAMP NOT NULL
|
||||
);
|
@ -0,0 +1,2 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
DROP TABLE invitations;
|
@ -0,0 +1,6 @@
|
||||
-- Your SQL goes here
|
||||
CREATE TABLE invitations (
|
||||
id UUID NOT NULL UNIQUE PRIMARY KEY,
|
||||
email VARCHAR(100) NOT NULL,
|
||||
expires_at TIMESTAMP NOT NULL
|
||||
);
|
86
database_interactions/simple-auth-server/src/auth_handler.rs
Normal file
86
database_interactions/simple-auth-server/src/auth_handler.rs
Normal file
@ -0,0 +1,86 @@
|
||||
use actix_identity::Identity;
|
||||
use actix_web::{
|
||||
dev::Payload, error::BlockingError, web, Error, FromRequest, HttpRequest,
|
||||
HttpResponse,
|
||||
};
|
||||
use diesel::prelude::*;
|
||||
use diesel::PgConnection;
|
||||
use futures::future::{err, ok, Ready};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::errors::ServiceError;
|
||||
use crate::models::{Pool, SlimUser, User};
|
||||
use crate::utils::verify;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct AuthData {
|
||||
pub email: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
// we need the same data
|
||||
// simple aliasing makes the intentions clear and its more readable
|
||||
pub type LoggedUser = SlimUser;
|
||||
|
||||
impl FromRequest for LoggedUser {
|
||||
type Config = ();
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<LoggedUser, Error>>;
|
||||
|
||||
fn from_request(req: &HttpRequest, pl: &mut Payload) -> Self::Future {
|
||||
if let Ok(identity) = Identity::from_request(req, pl).into_inner() {
|
||||
if let Some(user_json) = identity.identity() {
|
||||
if let Ok(user) = serde_json::from_str(&user_json) {
|
||||
return ok(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
err(ServiceError::Unauthorized.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn logout(id: Identity) -> HttpResponse {
|
||||
id.forget();
|
||||
HttpResponse::Ok().finish()
|
||||
}
|
||||
|
||||
pub async fn login(
|
||||
auth_data: web::Json<AuthData>,
|
||||
id: Identity,
|
||||
pool: web::Data<Pool>,
|
||||
) -> Result<HttpResponse, ServiceError> {
|
||||
let res = web::block(move || query(auth_data.into_inner(), pool)).await;
|
||||
|
||||
match res {
|
||||
Ok(user) => {
|
||||
let user_string = serde_json::to_string(&user).unwrap();
|
||||
id.remember(user_string);
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
Err(err) => match err {
|
||||
BlockingError::Error(service_error) => Err(service_error),
|
||||
BlockingError::Canceled => Err(ServiceError::InternalServerError),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_me(logged_user: LoggedUser) -> HttpResponse {
|
||||
HttpResponse::Ok().json(logged_user)
|
||||
}
|
||||
/// Diesel query
|
||||
fn query(auth_data: AuthData, pool: web::Data<Pool>) -> Result<SlimUser, ServiceError> {
|
||||
use crate::schema::users::dsl::{email, users};
|
||||
let conn: &PgConnection = &pool.get().unwrap();
|
||||
let mut items = users
|
||||
.filter(email.eq(&auth_data.email))
|
||||
.load::<User>(conn)?;
|
||||
|
||||
if let Some(user) = items.pop() {
|
||||
if let Ok(matching) = verify(&user.hash, &auth_data.password) {
|
||||
if matching {
|
||||
return Ok(user.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(ServiceError::Unauthorized)
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
// email_service.rs
|
||||
use crate::errors::ServiceError;
|
||||
use crate::models::Invitation;
|
||||
use sparkpost::transmission::{
|
||||
EmailAddress, Message, Options, Recipient, Transmission, TransmissionResponse,
|
||||
};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref API_KEY: String = std::env::var("SPARKPOST_API_KEY").expect("SPARKPOST_API_KEY must be set");
|
||||
}
|
||||
|
||||
pub fn send_invitation(invitation: &Invitation) -> Result<(), ServiceError> {
|
||||
let tm = Transmission::new_eu(API_KEY.as_str());
|
||||
let sending_email = std::env::var("SENDING_EMAIL_ADDRESS")
|
||||
.expect("SENDING_EMAIL_ADDRESS must be set");
|
||||
// new email message with sender name and email
|
||||
let mut email = Message::new(EmailAddress::new(sending_email, "Let's Organise"));
|
||||
|
||||
let options = Options {
|
||||
open_tracking: false,
|
||||
click_tracking: false,
|
||||
transactional: true,
|
||||
sandbox: false,
|
||||
inline_css: false,
|
||||
start_time: None,
|
||||
};
|
||||
|
||||
// recipient from the invitation email
|
||||
let recipient: Recipient = invitation.email.as_str().into();
|
||||
|
||||
let email_body = format!(
|
||||
"Please click on the link below to complete registration. <br/>
|
||||
<a href=\"http://localhost:3000/register.html?id={}&email={}\">
|
||||
http://localhost:3030/register</a> <br>
|
||||
your Invitation expires on <strong>{}</strong>",
|
||||
invitation.id,
|
||||
invitation.email,
|
||||
invitation
|
||||
.expires_at
|
||||
.format("%I:%M %p %A, %-d %B, %C%y")
|
||||
.to_string()
|
||||
);
|
||||
|
||||
// complete the email message with details
|
||||
email
|
||||
.add_recipient(recipient)
|
||||
.options(options)
|
||||
.subject("You have been invited to join Simple-Auth-Server Rust")
|
||||
.html(email_body);
|
||||
|
||||
let result = tm.send(&email);
|
||||
|
||||
// Note that we only print out the error response from email api
|
||||
match result {
|
||||
Ok(res) => match res {
|
||||
TransmissionResponse::ApiResponse(api_res) => {
|
||||
println!("API Response: \n {:#?}", api_res);
|
||||
Ok(())
|
||||
}
|
||||
TransmissionResponse::ApiError(errors) => {
|
||||
println!("Response Errors: \n {:#?}", &errors);
|
||||
Err(ServiceError::InternalServerError)
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
println!("Send Email Error: \n {:#?}", error);
|
||||
Err(ServiceError::InternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
59
database_interactions/simple-auth-server/src/errors.rs
Normal file
59
database_interactions/simple-auth-server/src/errors.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use actix_web::{error::ResponseError, HttpResponse};
|
||||
use derive_more::Display;
|
||||
use diesel::result::{DatabaseErrorKind, Error as DBError};
|
||||
use std::convert::From;
|
||||
use uuid::Error as ParseError;
|
||||
|
||||
#[derive(Debug, Display)]
|
||||
pub enum ServiceError {
|
||||
#[display(fmt = "Internal Server Error")]
|
||||
InternalServerError,
|
||||
|
||||
#[display(fmt = "BadRequest: {}", _0)]
|
||||
BadRequest(String),
|
||||
|
||||
#[display(fmt = "Unauthorized")]
|
||||
Unauthorized,
|
||||
}
|
||||
|
||||
// impl ResponseError trait allows to convert our errors into http responses with appropriate data
|
||||
impl ResponseError for ServiceError {
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
match self {
|
||||
ServiceError::InternalServerError => HttpResponse::InternalServerError()
|
||||
.json("Internal Server Error, Please try later"),
|
||||
ServiceError::BadRequest(ref message) => {
|
||||
HttpResponse::BadRequest().json(message)
|
||||
}
|
||||
ServiceError::Unauthorized => {
|
||||
HttpResponse::Unauthorized().json("Unauthorized")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// we can return early in our handlers if UUID provided by the user is not valid
|
||||
// and provide a custom message
|
||||
impl From<ParseError> for ServiceError {
|
||||
fn from(_: ParseError) -> ServiceError {
|
||||
ServiceError::BadRequest("Invalid UUID".into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DBError> for ServiceError {
|
||||
fn from(error: DBError) -> ServiceError {
|
||||
// Right now we just care about UniqueViolation from diesel
|
||||
// But this would be helpful to easily map errors as our app grows
|
||||
match error {
|
||||
DBError::DatabaseError(kind, info) => {
|
||||
if let DatabaseErrorKind::UniqueViolation = kind {
|
||||
let message =
|
||||
info.details().unwrap_or_else(|| info.message()).to_string();
|
||||
return ServiceError::BadRequest(message);
|
||||
}
|
||||
ServiceError::InternalServerError
|
||||
}
|
||||
_ => ServiceError::InternalServerError,
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
use actix_web::{error::BlockingError, web, HttpResponse};
|
||||
use diesel::{prelude::*, PgConnection};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::email_service::send_invitation;
|
||||
use crate::errors::ServiceError;
|
||||
use crate::models::{Invitation, Pool};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct InvitationData {
|
||||
pub email: String,
|
||||
}
|
||||
|
||||
pub async fn post_invitation(
|
||||
invitation_data: web::Json<InvitationData>,
|
||||
pool: web::Data<Pool>,
|
||||
) -> Result<HttpResponse, ServiceError> {
|
||||
// run diesel blocking code
|
||||
let res =
|
||||
web::block(move || create_invitation(invitation_data.into_inner().email, pool))
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(_) => Ok(HttpResponse::Ok().finish()),
|
||||
Err(err) => match err {
|
||||
BlockingError::Error(service_error) => Err(service_error),
|
||||
BlockingError::Canceled => Err(ServiceError::InternalServerError),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn create_invitation(
|
||||
eml: String,
|
||||
pool: web::Data<Pool>,
|
||||
) -> Result<(), crate::errors::ServiceError> {
|
||||
let invitation = dbg!(query(eml, pool)?);
|
||||
send_invitation(&invitation)
|
||||
}
|
||||
|
||||
/// Diesel query
|
||||
fn query(
|
||||
eml: String,
|
||||
pool: web::Data<Pool>,
|
||||
) -> Result<Invitation, crate::errors::ServiceError> {
|
||||
use crate::schema::invitations::dsl::invitations;
|
||||
|
||||
let new_invitation: Invitation = eml.into();
|
||||
let conn: &PgConnection = &pool.get().unwrap();
|
||||
|
||||
let inserted_invitation = diesel::insert_into(invitations)
|
||||
.values(&new_invitation)
|
||||
.get_result(conn)?;
|
||||
|
||||
Ok(inserted_invitation)
|
||||
}
|
74
database_interactions/simple-auth-server/src/main.rs
Normal file
74
database_interactions/simple-auth-server/src/main.rs
Normal file
@ -0,0 +1,74 @@
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
|
||||
use actix_identity::{CookieIdentityPolicy, IdentityService};
|
||||
use actix_web::{middleware, web, App, HttpServer};
|
||||
use diesel::prelude::*;
|
||||
use diesel::r2d2::{self, ConnectionManager};
|
||||
use time::Duration;
|
||||
|
||||
mod auth_handler;
|
||||
mod email_service;
|
||||
mod errors;
|
||||
mod invitation_handler;
|
||||
mod models;
|
||||
mod register_handler;
|
||||
mod schema;
|
||||
mod utils;
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
dotenv::dotenv().ok();
|
||||
std::env::set_var(
|
||||
"RUST_LOG",
|
||||
"simple-auth-server=debug,actix_web=info,actix_server=info",
|
||||
);
|
||||
env_logger::init();
|
||||
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
|
||||
// create db connection pool
|
||||
let manager = ConnectionManager::<PgConnection>::new(database_url);
|
||||
let pool: models::Pool = r2d2::Pool::builder()
|
||||
.build(manager)
|
||||
.expect("Failed to create pool.");
|
||||
let domain: String =
|
||||
std::env::var("DOMAIN").unwrap_or_else(|_| "localhost".to_string());
|
||||
|
||||
// Start http server
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(pool.clone())
|
||||
// enable logger
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(IdentityService::new(
|
||||
CookieIdentityPolicy::new(utils::SECRET_KEY.as_bytes())
|
||||
.name("auth")
|
||||
.path("/")
|
||||
.domain(domain.as_str())
|
||||
.max_age_time(Duration::days(1))
|
||||
.secure(false), // this can only be true if you have https
|
||||
))
|
||||
.data(web::JsonConfig::default().limit(4096))
|
||||
// everything under '/api/' route
|
||||
.service(
|
||||
web::scope("/api")
|
||||
.service(
|
||||
web::resource("/invitation")
|
||||
.route(web::post().to(invitation_handler::post_invitation)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/register/{invitation_id}")
|
||||
.route(web::post().to(register_handler::register_user)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/auth")
|
||||
.route(web::post().to(auth_handler::login))
|
||||
.route(web::delete().to(auth_handler::logout))
|
||||
.route(web::get().to(auth_handler::get_me)),
|
||||
),
|
||||
)
|
||||
})
|
||||
.bind("127.0.0.1:3000")?
|
||||
.run()
|
||||
.await
|
||||
}
|
57
database_interactions/simple-auth-server/src/models.rs
Normal file
57
database_interactions/simple-auth-server/src/models.rs
Normal file
@ -0,0 +1,57 @@
|
||||
use super::schema::*;
|
||||
use diesel::{r2d2::ConnectionManager, PgConnection};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// type alias to use in multiple places
|
||||
pub type Pool = r2d2::Pool<ConnectionManager<PgConnection>>;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Queryable, Insertable)]
|
||||
#[table_name = "users"]
|
||||
pub struct User {
|
||||
pub email: String,
|
||||
pub hash: String,
|
||||
pub created_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn from_details<S: Into<String>, T: Into<String>>(email: S, pwd: T) -> Self {
|
||||
User {
|
||||
email: email.into(),
|
||||
hash: pwd.into(),
|
||||
created_at: chrono::Local::now().naive_local(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Queryable, Insertable)]
|
||||
#[table_name = "invitations"]
|
||||
pub struct Invitation {
|
||||
pub id: uuid::Uuid,
|
||||
pub email: String,
|
||||
pub expires_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
// any type that implements Into<String> can be used to create Invitation
|
||||
impl<T> From<T> for Invitation
|
||||
where
|
||||
T: Into<String>,
|
||||
{
|
||||
fn from(email: T) -> Self {
|
||||
Invitation {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
email: email.into(),
|
||||
expires_at: chrono::Local::now().naive_local() + chrono::Duration::hours(24),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SlimUser {
|
||||
pub email: String,
|
||||
}
|
||||
|
||||
impl From<User> for SlimUser {
|
||||
fn from(user: User) -> Self {
|
||||
SlimUser { email: user.email }
|
||||
}
|
||||
}
|
@ -0,0 +1,67 @@
|
||||
use actix_web::{error::BlockingError, web, HttpResponse};
|
||||
use diesel::prelude::*;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::errors::ServiceError;
|
||||
use crate::models::{Invitation, Pool, SlimUser, User};
|
||||
use crate::utils::hash_password;
|
||||
// UserData is used to extract data from a post request by the client
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct UserData {
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
pub async fn register_user(
|
||||
invitation_id: web::Path<String>,
|
||||
user_data: web::Json<UserData>,
|
||||
pool: web::Data<Pool>,
|
||||
) -> Result<HttpResponse, ServiceError> {
|
||||
let res = web::block(move || {
|
||||
query(
|
||||
invitation_id.into_inner(),
|
||||
user_data.into_inner().password,
|
||||
pool,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(user) => Ok(HttpResponse::Ok().json(&user)),
|
||||
Err(err) => match err {
|
||||
BlockingError::Error(service_error) => Err(service_error),
|
||||
BlockingError::Canceled => Err(ServiceError::InternalServerError),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn query(
|
||||
invitation_id: String,
|
||||
password: String,
|
||||
pool: web::Data<Pool>,
|
||||
) -> Result<SlimUser, crate::errors::ServiceError> {
|
||||
use crate::schema::invitations::dsl::{id, invitations};
|
||||
use crate::schema::users::dsl::users;
|
||||
let invitation_id = uuid::Uuid::parse_str(&invitation_id)?;
|
||||
|
||||
let conn: &PgConnection = &pool.get().unwrap();
|
||||
invitations
|
||||
.filter(id.eq(invitation_id))
|
||||
.load::<Invitation>(conn)
|
||||
.map_err(|_db_error| ServiceError::BadRequest("Invalid Invitation".into()))
|
||||
.and_then(|mut result| {
|
||||
if let Some(invitation) = result.pop() {
|
||||
// if invitation is not expired
|
||||
if invitation.expires_at > chrono::Local::now().naive_local() {
|
||||
// try hashing the password, else return the error that will be converted to ServiceError
|
||||
let password: String = hash_password(&password)?;
|
||||
dbg!(&password);
|
||||
let user = User::from_details(invitation.email, password);
|
||||
let inserted_user: User =
|
||||
diesel::insert_into(users).values(&user).get_result(conn)?;
|
||||
dbg!(&inserted_user);
|
||||
return Ok(inserted_user.into());
|
||||
}
|
||||
}
|
||||
Err(ServiceError::BadRequest("Invalid Invitation".into()))
|
||||
})
|
||||
}
|
17
database_interactions/simple-auth-server/src/schema.rs
Normal file
17
database_interactions/simple-auth-server/src/schema.rs
Normal file
@ -0,0 +1,17 @@
|
||||
table! {
|
||||
invitations (id) {
|
||||
id -> Uuid,
|
||||
email -> Varchar,
|
||||
expires_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users (email) {
|
||||
email -> Varchar,
|
||||
hash -> Varchar,
|
||||
created_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
allow_tables_to_appear_in_same_query!(invitations, users,);
|
28
database_interactions/simple-auth-server/src/utils.rs
Normal file
28
database_interactions/simple-auth-server/src/utils.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use crate::errors::ServiceError;
|
||||
use argon2::{self, Config};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref SECRET_KEY: String = std::env::var("SECRET_KEY").unwrap_or_else(|_| "0123".repeat(8));
|
||||
}
|
||||
|
||||
const SALT: &'static [u8] = b"supersecuresalt";
|
||||
|
||||
// WARNING THIS IS ONLY FOR DEMO PLEASE DO MORE RESEARCH FOR PRODUCTION USE
|
||||
pub fn hash_password(password: &str) -> Result<String, ServiceError> {
|
||||
let config = Config {
|
||||
secret: SECRET_KEY.as_bytes(),
|
||||
..Default::default()
|
||||
};
|
||||
argon2::hash_encoded(password.as_bytes(), &SALT, &config).map_err(|err| {
|
||||
dbg!(err);
|
||||
ServiceError::InternalServerError
|
||||
})
|
||||
}
|
||||
|
||||
pub fn verify(hash: &str, password: &str) -> Result<bool, ServiceError> {
|
||||
argon2::verify_encoded_ext(hash, password.as_bytes(), SECRET_KEY.as_bytes(), &[])
|
||||
.map_err(|err| {
|
||||
dbg!(err);
|
||||
ServiceError::Unauthorized
|
||||
})
|
||||
}
|
31
database_interactions/simple-auth-server/static/index.html
Normal file
31
database_interactions/simple-auth-server/static/index.html
Normal file
@ -0,0 +1,31 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
<title>Actix Web - Auth App</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="main.css" />
|
||||
<script src="main.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="login">
|
||||
<h1>Email Invitation</h1>
|
||||
|
||||
<p>Please enter your email receive Invitation</p>
|
||||
<input class="field" type="text" placeholder="email" id="email" /> <br />
|
||||
<input class="btn" type="submit" value="Send Email" onclick="sendVerificationEmail()" />
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
<script>
|
||||
function sendVerificationEmail() {
|
||||
let email = document.querySelector('#email');
|
||||
|
||||
post('api/invitation', { email: email.value }).then(data => {
|
||||
alert('Please check your email.');
|
||||
email.value = '';
|
||||
console.error(data);
|
||||
});
|
||||
}
|
||||
</script>
|
37
database_interactions/simple-auth-server/static/main.css
Normal file
37
database_interactions/simple-auth-server/static/main.css
Normal file
@ -0,0 +1,37 @@
|
||||
/* CSSTerm.com Easy CSS login form */
|
||||
|
||||
.login {
|
||||
width:600px;
|
||||
margin:auto;
|
||||
border:1px #CCC solid;
|
||||
padding:0px 30px;
|
||||
background-color: #3b6caf;
|
||||
color:#FFF;
|
||||
}
|
||||
|
||||
.field {
|
||||
background: #1e4f8a;
|
||||
border:1px #03306b solid;
|
||||
padding:10px;
|
||||
margin:5px 25px;
|
||||
width:215px;
|
||||
color:#FFF;
|
||||
}
|
||||
|
||||
.login h1, p, .chbox, .btn {
|
||||
margin-left:25px;
|
||||
color:#fff;
|
||||
}
|
||||
|
||||
.btn {
|
||||
background-color: #00CCFF;
|
||||
border:1px #03306b solid;
|
||||
padding:10px 30px;
|
||||
font-weight:bold;
|
||||
margin:25px 25px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.forgot {
|
||||
color:#fff;
|
||||
}
|
19
database_interactions/simple-auth-server/static/main.js
Normal file
19
database_interactions/simple-auth-server/static/main.js
Normal file
@ -0,0 +1,19 @@
|
||||
function post(url = ``, data = {}) {
|
||||
// Default options are marked with *
|
||||
return fetch(url, {
|
||||
method: 'POST', // *GET, POST, PUT, DELETE, etc.
|
||||
mode: 'cors', // no-cors, cors, *same-origin
|
||||
cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
},
|
||||
redirect: 'follow', // manual, *follow, error
|
||||
referrer: 'no-referrer', // no-referrer, *client
|
||||
body: JSON.stringify(data), // body data type must match "Content-Type" header
|
||||
}).then(response => response.json()); // parses response to JSON
|
||||
}
|
||||
|
||||
// window.addEventListener('load', function() {
|
||||
// console.log('All assets are loaded');
|
||||
// console.log(getUrlVars());
|
||||
// });
|
@ -0,0 +1,44 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
<title>Actix Web - Auth App</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="main.css" />
|
||||
<script src="main.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="login">
|
||||
<h1>Register Account</h1>
|
||||
|
||||
<p>Please enter your email and new password</p>
|
||||
<input class="field" type="text" placeholder="email" id="email" />
|
||||
<input class="field" type="password" placeholder="Password" id="password" />
|
||||
<input class="btn" type="submit" value="Register" onclick="register()" />
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
<script>
|
||||
function getUrlVars() {
|
||||
var vars = {};
|
||||
var parts = window.location.href.replace(/[?&]+([^=&]+)=([^&]*)/gi, function(m, key, value) {
|
||||
vars[key] = value;
|
||||
});
|
||||
return vars;
|
||||
}
|
||||
function register() {
|
||||
let password = document.querySelector('#password');
|
||||
let invitation_id = getUrlVars().id;
|
||||
|
||||
post('api/register/' + invitation_id, { password: password.value }).then(data => {
|
||||
password.value = '';
|
||||
console.error(data);
|
||||
});
|
||||
}
|
||||
window.addEventListener('load', function() {
|
||||
let email = document.querySelector('#email');
|
||||
email.value = getUrlVars().email;
|
||||
console.log(getUrlVars());
|
||||
});
|
||||
</script>
|
4
database_interactions/sqlx_todo/.env.example
Normal file
4
database_interactions/sqlx_todo/.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
HOST=127.0.0.1
|
||||
PORT=5000
|
||||
DATABASE_URL="postgres://user:pass@192.168.33.11/actix_sqlx_todo"
|
||||
RUST_LOG=sqlx_todo=info,actix=info
|
2
database_interactions/sqlx_todo/.gitignore
vendored
Normal file
2
database_interactions/sqlx_todo/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
/target
|
||||
.env
|
19
database_interactions/sqlx_todo/Cargo.toml
Normal file
19
database_interactions/sqlx_todo/Cargo.toml
Normal file
@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "sqlx_todo"
|
||||
version = "0.1.0"
|
||||
authors = ["Milan Zivkovic <zivkovic.milan@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
actix-web = "3"
|
||||
listenfd = "0.3.3"
|
||||
serde = "1.0.106"
|
||||
serde_json = "1.0.51"
|
||||
sqlx = { version = "0.3", features = [ "postgres" ] }
|
||||
dotenv = "0.15.0"
|
||||
env_logger = "0.7.1"
|
||||
log = "0.4.8"
|
||||
anyhow = "1.0.28"
|
||||
futures = "0.3.13"
|
33
database_interactions/sqlx_todo/README.md
Normal file
33
database_interactions/sqlx_todo/README.md
Normal file
@ -0,0 +1,33 @@
|
||||
# actix-sqlx-todo
|
||||
|
||||
Example Todo application using Actix-web and [SQLx](https://github.com/launchbadge/sqlx) with posgresql
|
||||
|
||||
# Usage
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Rust
|
||||
* PostgreSQL
|
||||
|
||||
## Change into the project sub-directory
|
||||
|
||||
All instructions assume you have changed into this folder:
|
||||
|
||||
```bash
|
||||
cd examples/sqlx_todo
|
||||
```
|
||||
|
||||
## Set up the database
|
||||
|
||||
* Create new database using `schema.sql`
|
||||
* Copy `.env-example` into `.env` and adjust DATABASE_URL to match your PostgreSQL address, username and password
|
||||
|
||||
## Run the application
|
||||
|
||||
To run the application execute:
|
||||
|
||||
```bash
|
||||
cargo run
|
||||
```
|
||||
|
||||
By default application will be available on `http://localhost:5000`. If you wish to change address or port you can do it inside `.env` file
|
5
database_interactions/sqlx_todo/schema.sql
Normal file
5
database_interactions/sqlx_todo/schema.sql
Normal file
@ -0,0 +1,5 @@
|
||||
CREATE TABLE IF NOT EXISTS todos (
|
||||
id SERIAL PRIMARY KEY,
|
||||
description TEXT NOT NULL,
|
||||
done BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
60
database_interactions/sqlx_todo/src/main.rs
Normal file
60
database_interactions/sqlx_todo/src/main.rs
Normal file
@ -0,0 +1,60 @@
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
use actix_web::{web, App, HttpResponse, HttpServer, Responder};
|
||||
use anyhow::Result;
|
||||
use dotenv::dotenv;
|
||||
use listenfd::ListenFd;
|
||||
use sqlx::PgPool;
|
||||
use std::env;
|
||||
|
||||
// import todo module (routes and model)
|
||||
mod todo;
|
||||
|
||||
// default / handler
|
||||
async fn index() -> impl Responder {
|
||||
HttpResponse::Ok().body(r#"
|
||||
Welcome to Actix-web with SQLx Todos example.
|
||||
Available routes:
|
||||
GET /todos -> list of all todos
|
||||
POST /todo -> create new todo, example: { "description": "learn actix and sqlx", "done": false }
|
||||
GET /todo/{id} -> show one todo with requested id
|
||||
PUT /todo/{id} -> update todo with requested id, example: { "description": "learn actix and sqlx", "done": true }
|
||||
DELETE /todo/{id} -> delete todo with requested id
|
||||
"#
|
||||
)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> Result<()> {
|
||||
dotenv().ok();
|
||||
env_logger::init();
|
||||
|
||||
// this will enable us to keep application running during recompile: systemfd --no-pid -s http::5000 -- cargo watch -x run
|
||||
let mut listenfd = ListenFd::from_env();
|
||||
|
||||
let database_url =
|
||||
env::var("DATABASE_URL").expect("DATABASE_URL is not set in .env file");
|
||||
let db_pool = PgPool::new(&database_url).await?;
|
||||
|
||||
let mut server = HttpServer::new(move || {
|
||||
App::new()
|
||||
.data(db_pool.clone()) // pass database pool to application so we can access it inside handlers
|
||||
.route("/", web::get().to(index))
|
||||
.configure(todo::init) // init todo routes
|
||||
});
|
||||
|
||||
server = match listenfd.take_tcp_listener(0)? {
|
||||
Some(listener) => server.listen(listener)?,
|
||||
None => {
|
||||
let host = env::var("HOST").expect("HOST is not set in .env file");
|
||||
let port = env::var("PORT").expect("PORT is not set in .env file");
|
||||
server.bind(format!("{}:{}", host, port))?
|
||||
}
|
||||
};
|
||||
|
||||
info!("Starting server");
|
||||
server.run().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
5
database_interactions/sqlx_todo/src/todo/mod.rs
Normal file
5
database_interactions/sqlx_todo/src/todo/mod.rs
Normal file
@ -0,0 +1,5 @@
|
||||
mod model;
|
||||
mod routes;
|
||||
|
||||
pub use model::*;
|
||||
pub use routes::init;
|
128
database_interactions/sqlx_todo/src/todo/model.rs
Normal file
128
database_interactions/sqlx_todo/src/todo/model.rs
Normal file
@ -0,0 +1,128 @@
|
||||
use actix_web::{Error, HttpRequest, HttpResponse, Responder};
|
||||
use anyhow::Result;
|
||||
use futures::future::{ready, Ready};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgRow;
|
||||
use sqlx::{FromRow, PgPool, Row};
|
||||
|
||||
// this struct will use to receive user input
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct TodoRequest {
|
||||
pub description: String,
|
||||
pub done: bool,
|
||||
}
|
||||
|
||||
// this struct will be used to represent database record
|
||||
#[derive(Serialize, FromRow)]
|
||||
pub struct Todo {
|
||||
pub id: i32,
|
||||
pub description: String,
|
||||
pub done: bool,
|
||||
}
|
||||
|
||||
// implementation of Actix Responder for Todo struct so we can return Todo from action handler
|
||||
impl Responder for Todo {
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<HttpResponse, Error>>;
|
||||
|
||||
fn respond_to(self, _req: &HttpRequest) -> Self::Future {
|
||||
let body = serde_json::to_string(&self).unwrap();
|
||||
// create response and set content type
|
||||
ready(Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(body)))
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation for Todo struct, functions for read/write/update and delete todo from database
|
||||
impl Todo {
|
||||
pub async fn find_all(pool: &PgPool) -> Result<Vec<Todo>> {
|
||||
let mut todos = vec![];
|
||||
let recs = sqlx::query!(
|
||||
r#"
|
||||
SELECT id, description, done
|
||||
FROM todos
|
||||
ORDER BY id
|
||||
"#
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
for rec in recs {
|
||||
todos.push(Todo {
|
||||
id: rec.id,
|
||||
description: rec.description,
|
||||
done: rec.done,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(todos)
|
||||
}
|
||||
|
||||
pub async fn find_by_id(id: i32, pool: &PgPool) -> Result<Todo> {
|
||||
let rec = sqlx::query!(
|
||||
r#"
|
||||
SELECT * FROM todos WHERE id = $1
|
||||
"#,
|
||||
id
|
||||
)
|
||||
.fetch_one(&*pool)
|
||||
.await?;
|
||||
|
||||
Ok(Todo {
|
||||
id: rec.id,
|
||||
description: rec.description,
|
||||
done: rec.done,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create(todo: TodoRequest, pool: &PgPool) -> Result<Todo> {
|
||||
let mut tx = pool.begin().await?;
|
||||
let todo = sqlx::query("INSERT INTO todos (description, done) VALUES ($1, $2) RETURNING id, description, done")
|
||||
.bind(&todo.description)
|
||||
.bind(todo.done)
|
||||
.map(|row: PgRow| {
|
||||
Todo {
|
||||
id: row.get(0),
|
||||
description: row.get(1),
|
||||
done: row.get(2)
|
||||
}
|
||||
})
|
||||
.fetch_one(&mut tx)
|
||||
.await?;
|
||||
|
||||
tx.commit().await?;
|
||||
Ok(todo)
|
||||
}
|
||||
|
||||
pub async fn update(id: i32, todo: TodoRequest, pool: &PgPool) -> Result<Todo> {
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
let todo = sqlx::query("UPDATE todos SET description = $1, done = $2 WHERE id = $3 RETURNING id, description, done")
|
||||
.bind(&todo.description)
|
||||
.bind(todo.done)
|
||||
.bind(id)
|
||||
.map(|row: PgRow| {
|
||||
Todo {
|
||||
id: row.get(0),
|
||||
description: row.get(1),
|
||||
done: row.get(2)
|
||||
}
|
||||
})
|
||||
.fetch_one(&mut tx)
|
||||
.await?;
|
||||
|
||||
tx.commit().await.unwrap();
|
||||
Ok(todo)
|
||||
}
|
||||
|
||||
pub async fn delete(id: i32, pool: &PgPool) -> Result<u64> {
|
||||
let mut tx = pool.begin().await?;
|
||||
let deleted = sqlx::query("DELETE FROM todos WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
|
||||
tx.commit().await?;
|
||||
Ok(deleted)
|
||||
}
|
||||
}
|
73
database_interactions/sqlx_todo/src/todo/routes.rs
Normal file
73
database_interactions/sqlx_todo/src/todo/routes.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use crate::todo::{Todo, TodoRequest};
|
||||
use actix_web::{delete, get, post, put, web, HttpResponse, Responder};
|
||||
use sqlx::PgPool;
|
||||
|
||||
#[get("/todos")]
|
||||
async fn find_all(db_pool: web::Data<PgPool>) -> impl Responder {
|
||||
let result = Todo::find_all(db_pool.get_ref()).await;
|
||||
match result {
|
||||
Ok(todos) => HttpResponse::Ok().json(todos),
|
||||
_ => HttpResponse::BadRequest()
|
||||
.body("Error trying to read all todos from database"),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/todo/{id}")]
|
||||
async fn find(id: web::Path<i32>, db_pool: web::Data<PgPool>) -> impl Responder {
|
||||
let result = Todo::find_by_id(id.into_inner(), db_pool.get_ref()).await;
|
||||
match result {
|
||||
Ok(todo) => HttpResponse::Ok().json(todo),
|
||||
_ => HttpResponse::BadRequest().body("Todo not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/todo")]
|
||||
async fn create(
|
||||
todo: web::Json<TodoRequest>,
|
||||
db_pool: web::Data<PgPool>,
|
||||
) -> impl Responder {
|
||||
let result = Todo::create(todo.into_inner(), db_pool.get_ref()).await;
|
||||
match result {
|
||||
Ok(todo) => HttpResponse::Ok().json(todo),
|
||||
_ => HttpResponse::BadRequest().body("Error trying to create new todo"),
|
||||
}
|
||||
}
|
||||
|
||||
#[put("/todo/{id}")]
|
||||
async fn update(
|
||||
id: web::Path<i32>,
|
||||
todo: web::Json<TodoRequest>,
|
||||
db_pool: web::Data<PgPool>,
|
||||
) -> impl Responder {
|
||||
let result =
|
||||
Todo::update(id.into_inner(), todo.into_inner(), db_pool.get_ref()).await;
|
||||
match result {
|
||||
Ok(todo) => HttpResponse::Ok().json(todo),
|
||||
_ => HttpResponse::BadRequest().body("Todo not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[delete("/todo/{id}")]
|
||||
async fn delete(id: web::Path<i32>, db_pool: web::Data<PgPool>) -> impl Responder {
|
||||
let result = Todo::delete(id.into_inner(), db_pool.get_ref()).await;
|
||||
match result {
|
||||
Ok(rows) => {
|
||||
if rows > 0 {
|
||||
HttpResponse::Ok()
|
||||
.body(format!("Successfully deleted {} record(s)", rows))
|
||||
} else {
|
||||
HttpResponse::BadRequest().body("Todo not found")
|
||||
}
|
||||
}
|
||||
_ => HttpResponse::BadRequest().body("Todo not found"),
|
||||
}
|
||||
}
|
||||
|
||||
// function that will be called on new Application to configure routes for this module
|
||||
pub fn init(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(find_all);
|
||||
cfg.service(find);
|
||||
cfg.service(create);
|
||||
cfg.service(update);
|
||||
cfg.service(delete);
|
||||
}
|
Reference in New Issue
Block a user