Compare commits
27 Commits
v0.1.0-dev
...
v0.3.2
Author | SHA1 | Date | |
---|---|---|---|
70a547ca94 | |||
89c89a5524 | |||
a58b0cb40b | |||
f601442f0e | |||
3424d31151 | |||
332e9d3378 | |||
2229d98ab6 | |||
7f5f1bae2f | |||
d7c3443022 | |||
6bdc893147 | |||
4b8430e114 | |||
238fb15e6d | |||
5b0592f51d | |||
9be38542c1 | |||
7e189e84d8 | |||
79f6ebdc09 | |||
68a9998572 | |||
b6787de66b | |||
4d3415acf2 | |||
c9873d95d7 | |||
f105de7c80 | |||
9b77a8dd78 | |||
88f45645b3 | |||
dda88ce0a0 | |||
5893710431 | |||
0a678228dd | |||
bfc14c96a7 |
2
.env.sample
Normal file
2
.env.sample
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
DATABASE_URL=postgres://username:password@localhost/database_name
|
||||||
|
SECRET_KEY=refero34o8rfhfjn983thf39fhc943rf923n3h
|
@@ -19,7 +19,7 @@ jobs:
|
|||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.85.0
|
toolchain: 1.86.0
|
||||||
components: cargo
|
components: cargo
|
||||||
|
|
||||||
- name: Extract Version from Cargo.toml
|
- name: Extract Version from Cargo.toml
|
||||||
|
@@ -18,33 +18,73 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.85.0
|
toolchain: 1.86.0
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
chmod 600 ~/.ssh/gitlab_deploy_key
|
chmod 600 ~/.ssh/icarus_models_deploy_key
|
||||||
ssh-keyscan git.kundeng.us ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLl/OZiKVDxwnyvMxa+rjKvDpKqTxH1GWuGuDPLmENGQMbTVulajZWr9x8Q1cotoJiHZkt7DA5vczcjB/4lwgWA= >> ~/.ssh/known_hosts
|
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
eval $(ssh-agent -s)
|
eval $(ssh-agent -s)
|
||||||
ssh-add -v ~/.ssh/gitlab_deploy_key
|
ssh-add -v ~/.ssh/icarus_models_deploy_key
|
||||||
|
|
||||||
cargo check
|
cargo check
|
||||||
|
|
||||||
test:
|
test:
|
||||||
name: Test Suite
|
name: Test Suite
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
|
# --- Add database service definition ---
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17.4 # Or pin to a more specific version like 14.9
|
||||||
|
env:
|
||||||
|
# Use secrets for DB init, with fallbacks for flexibility
|
||||||
|
POSTGRES_USER: ${{ secrets.DB_TEST_USER || 'testuser' }}
|
||||||
|
POSTGRES_PASSWORD: ${{ secrets.DB_TEST_PASSWORD || 'testpassword' }}
|
||||||
|
POSTGRES_DB: ${{ secrets.DB_TEST_NAME || 'testdb' }}
|
||||||
|
# Options to wait until the database is ready
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.85.0
|
toolchain: 1.86.0
|
||||||
- run: |
|
# --- Add this step for explicit verification ---
|
||||||
|
- name: Verify Docker Environment
|
||||||
|
run: |
|
||||||
|
echo "Runner User Info:"
|
||||||
|
id
|
||||||
|
echo "Checking Docker Version:"
|
||||||
|
docker --version
|
||||||
|
echo "Checking Docker Daemon Status (info):"
|
||||||
|
docker info
|
||||||
|
echo "Checking Docker Daemon Status (ps):"
|
||||||
|
docker ps -a
|
||||||
|
echo "Docker environment check complete."
|
||||||
|
# NOTE: Do NOT use continue-on-error here.
|
||||||
|
# If Docker isn't working as expected, the job SHOULD fail here.
|
||||||
|
- name: Run tests
|
||||||
|
env:
|
||||||
|
# Define DATABASE_URL for tests to use
|
||||||
|
DATABASE_URL: postgresql://${{ secrets.DB_TEST_USER || 'testuser' }}:${{ secrets.DB_TEST_PASSWORD || 'testpassword' }}@postgres:5432/${{ secrets.DB_TEST_NAME || 'testdb' }}
|
||||||
|
RUST_LOG: info # Optional: configure test log level
|
||||||
|
SECRET_KEY: ${{ secrets.TOKEN_SECRET_KEY }}
|
||||||
|
# Make SSH agent available if tests fetch private dependencies
|
||||||
|
SSH_AUTH_SOCK: ${{ env.SSH_AUTH_SOCK }}
|
||||||
|
run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
chmod 600 ~/.ssh/gitlab_deploy_key
|
chmod 600 ~/.ssh/icarus_models_deploy_key
|
||||||
ssh-keyscan git.kundeng.us ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLl/OZiKVDxwnyvMxa+rjKvDpKqTxH1GWuGuDPLmENGQMbTVulajZWr9x8Q1cotoJiHZkt7DA5vczcjB/4lwgWA= >> ~/.ssh/known_hosts
|
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
eval $(ssh-agent -s)
|
eval $(ssh-agent -s)
|
||||||
ssh-add -v ~/.ssh/gitlab_deploy_key
|
ssh-add -v ~/.ssh/icarus_models_deploy_key
|
||||||
|
|
||||||
cargo test
|
cargo test
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
@@ -54,16 +94,16 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.85.0
|
toolchain: 1.86.0
|
||||||
- run: rustup component add rustfmt
|
- run: rustup component add rustfmt
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
chmod 600 ~/.ssh/gitlab_deploy_key
|
chmod 600 ~/.ssh/icarus_models_deploy_key
|
||||||
ssh-keyscan git.kundeng.us ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLl/OZiKVDxwnyvMxa+rjKvDpKqTxH1GWuGuDPLmENGQMbTVulajZWr9x8Q1cotoJiHZkt7DA5vczcjB/4lwgWA= >> ~/.ssh/known_hosts
|
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
eval $(ssh-agent -s)
|
eval $(ssh-agent -s)
|
||||||
ssh-add -v ~/.ssh/gitlab_deploy_key
|
ssh-add -v ~/.ssh/icarus_models_deploy_key
|
||||||
cargo fmt --all -- --check
|
cargo fmt --all -- --check
|
||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
@@ -73,16 +113,16 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.85.0
|
toolchain: 1.86.0
|
||||||
- run: rustup component add clippy
|
- run: rustup component add clippy
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
chmod 600 ~/.ssh/gitlab_deploy_key
|
chmod 600 ~/.ssh/icarus_models_deploy_key
|
||||||
ssh-keyscan git.kundeng.us ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLl/OZiKVDxwnyvMxa+rjKvDpKqTxH1GWuGuDPLmENGQMbTVulajZWr9x8Q1cotoJiHZkt7DA5vczcjB/4lwgWA= >> ~/.ssh/known_hosts
|
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
eval $(ssh-agent -s)
|
eval $(ssh-agent -s)
|
||||||
ssh-add -v ~/.ssh/gitlab_deploy_key
|
ssh-add -v ~/.ssh/icarus_models_deploy_key
|
||||||
cargo clippy -- -D warnings
|
cargo clippy -- -D warnings
|
||||||
|
|
||||||
build:
|
build:
|
||||||
@@ -92,14 +132,13 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.85.0
|
toolchain: 1.86.0
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
chmod 600 ~/.ssh/gitlab_deploy_key
|
chmod 600 ~/.ssh/icarus_models_deploy_key
|
||||||
ssh-keyscan git.kundeng.us ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLl/OZiKVDxwnyvMxa+rjKvDpKqTxH1GWuGuDPLmENGQMbTVulajZWr9x8Q1cotoJiHZkt7DA5vczcjB/4lwgWA= >> ~/.ssh/known_hosts
|
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
eval $(ssh-agent -s)
|
eval $(ssh-agent -s)
|
||||||
ssh-add -v ~/.ssh/gitlab_deploy_key
|
ssh-add -v ~/.ssh/icarus_models_deploy_key
|
||||||
cargo build --release
|
cargo build --release
|
||||||
|
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
|||||||
/target
|
/target
|
||||||
Cargo.lock
|
Cargo.lock
|
||||||
|
.env
|
||||||
|
21
Cargo.toml
21
Cargo.toml
@@ -1,12 +1,27 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "icarus_auth"
|
name = "icarus_auth"
|
||||||
version = "0.1.0"
|
version = "0.3.2"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
rust-version = "1.86"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
axum = { version = "0.8.3" }
|
axum = { version = "0.8.3" }
|
||||||
serde = { version = "1.0.218", features = ["derive"] }
|
serde = { version = "1.0.218", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.139" }
|
serde_json = { version = "1.0.139" }
|
||||||
tokio = { version = "1.44.1", features = ["rt-multi-thread"] }
|
tokio = { version = "1.44.1", features = ["rt-multi-thread"] }
|
||||||
tracing-subscriber = "0.3.19"
|
tracing-subscriber = { version = "0.3.19" }
|
||||||
icarus-models = { git = "ssh://git@git.kundeng.us/phoenix/icarus-models.git", tag = "v0.1.14" }
|
tower = { version = "0.5.2" }
|
||||||
|
hyper = { version = "1.6.0" }
|
||||||
|
sqlx = { version = "0.8.3", features = ["postgres", "runtime-tokio-native-tls", "time", "uuid"] }
|
||||||
|
dotenvy = { version = "0.15.7" }
|
||||||
|
uuid = { version = "1.16.0", features = ["v4", "serde"] }
|
||||||
|
argon2 = { version = "0.5.3", features = ["std"] } # Use the latest 0.5.x version
|
||||||
|
rand = { version = "0.9" }
|
||||||
|
time = { version = "0.3.41", features = ["macros", "serde"] }
|
||||||
|
josekit = { version = "0.10.1" }
|
||||||
|
icarus_models = { git = "ssh://git@git.kundeng.us/phoenix/icarus_models.git", tag = "v0.4.1" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
http-body-util = { version = "0.1.3" }
|
||||||
|
url = { version = "2.5" }
|
||||||
|
once_cell = { version = "1.19" } # Useful for lazy initialization in tests/app setup
|
||||||
|
22
migrations/20250402221858_init_migrate.sql
Normal file
22
migrations/20250402221858_init_migrate.sql
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
-- Add migration script here
|
||||||
|
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS "user" (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
password TEXT NOT NULL,
|
||||||
|
email TEXT NOT NULL,
|
||||||
|
phone TEXT NOT NULL,
|
||||||
|
firstname TEXT NOT NULL,
|
||||||
|
lastname TEXT NOT NULL,
|
||||||
|
email_verified BOOL NOT NULL,
|
||||||
|
date_created TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
last_login TIMESTAMPTZ NULL DEFAULT NOW(),
|
||||||
|
salt_id UUID NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS "salt" (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
salt TEXT NOT NULL
|
||||||
|
);
|
25
run_migrations.txt
Normal file
25
run_migrations.txt
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Make sure role has CREATEDB
|
||||||
|
ALTER ROLE username_that_needs_permission CREATEDB;
|
||||||
|
|
||||||
|
# Install migrations
|
||||||
|
cargo install sqlx-cli
|
||||||
|
|
||||||
|
# Make sure to populate DATABASE_URL with correct value.
|
||||||
|
# By default, the DATABASE_URL found in .env file will be used
|
||||||
|
export DATABASE_URL="postgres://icarus_op_test:password@localhost/icarus_auth_test"
|
||||||
|
|
||||||
|
# init
|
||||||
|
sqlx migrate add init_migration
|
||||||
|
sqlx migrate run
|
||||||
|
|
||||||
|
# Create
|
||||||
|
sqlx database create
|
||||||
|
|
||||||
|
# Drop
|
||||||
|
sqlx database drop
|
||||||
|
|
||||||
|
# setup
|
||||||
|
sqlx database setup
|
||||||
|
|
||||||
|
# Reset
|
||||||
|
sqlx database reset
|
37
src/callers/common.rs
Normal file
37
src/callers/common.rs
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
pub mod response {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize)]
|
||||||
|
pub struct TestResult {
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod endpoint {
|
||||||
|
use super::*;
|
||||||
|
use axum::{Extension, Json, http::StatusCode};
|
||||||
|
|
||||||
|
// basic handler that responds with a static string
|
||||||
|
pub async fn root() -> &'static str {
|
||||||
|
"Hello, World!"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn db_ping(
|
||||||
|
Extension(pool): Extension<sqlx::PgPool>,
|
||||||
|
) -> (StatusCode, Json<response::TestResult>) {
|
||||||
|
match sqlx::query("SELECT 1").execute(&pool).await {
|
||||||
|
Ok(_) => {
|
||||||
|
let tr = response::TestResult {
|
||||||
|
message: String::from("This works"),
|
||||||
|
};
|
||||||
|
(StatusCode::OK, Json(tr))
|
||||||
|
}
|
||||||
|
Err(e) => (
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
Json(response::TestResult {
|
||||||
|
message: e.to_string(),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
79
src/callers/login.rs
Normal file
79
src/callers/login.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
pub mod request {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
|
pub struct Request {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod response {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
|
pub struct Response {
|
||||||
|
pub message: String,
|
||||||
|
pub data: Vec<icarus_models::login_result::LoginResult>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod endpoint {
|
||||||
|
use axum::{Json, http::StatusCode};
|
||||||
|
|
||||||
|
use crate::hashing;
|
||||||
|
use crate::repo;
|
||||||
|
use crate::token_stuff;
|
||||||
|
|
||||||
|
use super::request;
|
||||||
|
use super::response;
|
||||||
|
|
||||||
|
async fn not_found(message: &str) -> (StatusCode, Json<response::Response>) {
|
||||||
|
(
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json(response::Response {
|
||||||
|
message: String::from(message),
|
||||||
|
data: Vec::new(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn login(
|
||||||
|
axum::Extension(pool): axum::Extension<sqlx::PgPool>,
|
||||||
|
Json(payload): Json<request::Request>,
|
||||||
|
) -> (StatusCode, Json<response::Response>) {
|
||||||
|
// Check if user exists
|
||||||
|
match repo::user::get(&pool, &payload.username).await {
|
||||||
|
Ok(user) => {
|
||||||
|
if hashing::verify_password(&payload.password, user.password.clone()).unwrap() {
|
||||||
|
// Create token
|
||||||
|
let key = token_stuff::get_key().unwrap();
|
||||||
|
let (token_literal, duration) = token_stuff::create_token(&key).unwrap();
|
||||||
|
|
||||||
|
if token_stuff::verify_token(&key, &token_literal) {
|
||||||
|
(
|
||||||
|
StatusCode::OK,
|
||||||
|
Json(response::Response {
|
||||||
|
message: String::from("Successful"),
|
||||||
|
data: vec![icarus_models::login_result::LoginResult {
|
||||||
|
id: user.id,
|
||||||
|
username: user.username,
|
||||||
|
token: token_literal,
|
||||||
|
token_type: String::from(token_stuff::TOKENTYPE),
|
||||||
|
expiration: duration,
|
||||||
|
}],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return not_found("Could not verify password").await;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return not_found("Error Hashing").await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
return not_found(&err.to_string()).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
10
src/callers/mod.rs
Normal file
10
src/callers/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
pub mod common;
|
||||||
|
pub mod login;
|
||||||
|
pub mod register;
|
||||||
|
|
||||||
|
pub mod endpoints {
|
||||||
|
pub const ROOT: &str = "/";
|
||||||
|
pub const REGISTER: &str = "/api/v2/register";
|
||||||
|
pub const DBTEST: &str = "/api/v2/test/db";
|
||||||
|
pub const LOGIN: &str = "/api/v2/login";
|
||||||
|
}
|
105
src/callers/register.rs
Normal file
105
src/callers/register.rs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
use axum::{Json, http::StatusCode};
|
||||||
|
|
||||||
|
use crate::hashing;
|
||||||
|
use crate::repo;
|
||||||
|
|
||||||
|
pub mod request {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
|
pub struct Request {
|
||||||
|
#[serde(skip_serializing_if = "String::is_empty")]
|
||||||
|
pub username: String,
|
||||||
|
#[serde(skip_serializing_if = "String::is_empty")]
|
||||||
|
pub password: String,
|
||||||
|
#[serde(skip_serializing_if = "String::is_empty")]
|
||||||
|
pub email: String,
|
||||||
|
#[serde(skip_serializing_if = "String::is_empty")]
|
||||||
|
pub phone: String,
|
||||||
|
#[serde(skip_serializing_if = "String::is_empty")]
|
||||||
|
pub firstname: String,
|
||||||
|
#[serde(skip_serializing_if = "String::is_empty")]
|
||||||
|
pub lastname: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod response {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize)]
|
||||||
|
pub struct Response {
|
||||||
|
pub message: String,
|
||||||
|
pub data: Vec<icarus_models::user::User>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn register_user(
|
||||||
|
axum::Extension(pool): axum::Extension<sqlx::PgPool>,
|
||||||
|
Json(payload): Json<request::Request>,
|
||||||
|
) -> (StatusCode, Json<response::Response>) {
|
||||||
|
let mut user = icarus_models::user::User {
|
||||||
|
id: uuid::Uuid::nil(),
|
||||||
|
username: payload.username.clone(),
|
||||||
|
password: payload.password.clone(),
|
||||||
|
email: payload.email.clone(),
|
||||||
|
phone: payload.phone.clone(),
|
||||||
|
firstname: payload.firstname.clone(),
|
||||||
|
lastname: payload.lastname.clone(),
|
||||||
|
status: String::from("Active"),
|
||||||
|
email_verified: true,
|
||||||
|
date_created: Some(time::OffsetDateTime::now_utc()),
|
||||||
|
last_login: None,
|
||||||
|
salt_id: uuid::Uuid::nil(),
|
||||||
|
};
|
||||||
|
|
||||||
|
match repo::user::exists(&pool, &user.username).await {
|
||||||
|
Ok(res) => {
|
||||||
|
if res {
|
||||||
|
(
|
||||||
|
StatusCode::NOT_FOUND,
|
||||||
|
Json(response::Response {
|
||||||
|
message: String::from("Error"),
|
||||||
|
data: vec![user],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let salt_string = hashing::generate_salt().unwrap();
|
||||||
|
let mut salt = icarus_models::user::salt::Salt::default();
|
||||||
|
let generated_salt = salt_string;
|
||||||
|
salt.salt = generated_salt.to_string();
|
||||||
|
salt.id = repo::salt::insert(&pool, &salt).await.unwrap();
|
||||||
|
user.salt_id = salt.id;
|
||||||
|
let hashed_password =
|
||||||
|
hashing::hash_password(&user.password, &generated_salt).unwrap();
|
||||||
|
user.password = hashed_password;
|
||||||
|
|
||||||
|
match repo::user::insert(&pool, &user).await {
|
||||||
|
Ok(id) => {
|
||||||
|
user.id = id;
|
||||||
|
(
|
||||||
|
StatusCode::CREATED,
|
||||||
|
Json(response::Response {
|
||||||
|
message: String::from("User created"),
|
||||||
|
data: vec![user],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Err(err) => (
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
Json(response::Response {
|
||||||
|
message: err.to_string(),
|
||||||
|
data: vec![user],
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => (
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
Json(response::Response {
|
||||||
|
message: err.to_string(),
|
||||||
|
data: vec![user],
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
10
src/config/mod.rs
Normal file
10
src/config/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
pub fn get_full() -> String {
|
||||||
|
get_address() + ":" + &get_port()
|
||||||
|
}
|
||||||
|
fn get_address() -> String {
|
||||||
|
String::from("0.0.0.0")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_port() -> String {
|
||||||
|
String::from("3000")
|
||||||
|
}
|
101
src/hashing/mod.rs
Normal file
101
src/hashing/mod.rs
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
use argon2::{
|
||||||
|
Argon2, // The Argon2 algorithm struct
|
||||||
|
PasswordVerifier,
|
||||||
|
password_hash::{
|
||||||
|
PasswordHasher,
|
||||||
|
SaltString,
|
||||||
|
rand_core::OsRng, // Secure random number generator
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn generate_salt() -> Result<SaltString, argon2::Error> {
|
||||||
|
// Generate a random salt
|
||||||
|
// SaltString::generate uses OsRng internally for cryptographic security
|
||||||
|
Ok(SaltString::generate(&mut OsRng))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_salt(s: &str) -> Result<SaltString, argon2::password_hash::Error> {
|
||||||
|
SaltString::from_b64(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_password(
|
||||||
|
password: &String,
|
||||||
|
salt: &SaltString,
|
||||||
|
) -> Result<String, argon2::password_hash::Error> {
|
||||||
|
let password_bytes = password.as_bytes();
|
||||||
|
|
||||||
|
// Create an Argon2 instance with default parameters (recommended)
|
||||||
|
// You could customize parameters here if needed, but defaults are strong
|
||||||
|
let argon2 = Argon2::default();
|
||||||
|
|
||||||
|
// Hash the password with the salt
|
||||||
|
// The output is a PasswordHash string format that includes algorithm, version,
|
||||||
|
// parameters, salt, and the hash itself.
|
||||||
|
Ok(argon2.hash_password(password_bytes, salt)?.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify_password(
|
||||||
|
password_attempt: &String,
|
||||||
|
stored_hash: String,
|
||||||
|
) -> Result<bool, argon2::password_hash::Error> {
|
||||||
|
let password_bytes = password_attempt.as_bytes();
|
||||||
|
|
||||||
|
// Parse the stored hash string
|
||||||
|
// This extracts the salt, parameters, and hash digest
|
||||||
|
let parsed_hash = argon2::PasswordHash::new(stored_hash.as_str())?;
|
||||||
|
|
||||||
|
// Create an Argon2 instance (it will use the parameters from the parsed hash)
|
||||||
|
// Verify the password against the parsed hash
|
||||||
|
// This automatically uses the correct salt and parameters embedded in `parsed_hash`
|
||||||
|
match Argon2::default().verify_password(password_bytes, &parsed_hash) {
|
||||||
|
Ok(()) => Ok(true), // Passwords match
|
||||||
|
Err(argon2::password_hash::Error::Password) => Ok(false), // Passwords don't match
|
||||||
|
Err(e) => Err(e), // Some other error occurred (e.g., invalid hash format)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_hash_password() {
|
||||||
|
let some_password = String::from("somethingrandom");
|
||||||
|
match hash_password(&some_password, &generate_salt().unwrap()) {
|
||||||
|
Ok(p) => match verify_password(&some_password, p.clone()) {
|
||||||
|
Ok(res) => {
|
||||||
|
assert_eq!(res, true);
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(eerr) => {
|
||||||
|
assert!(false, "Error: {:?}", eerr.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wrong_password() {
|
||||||
|
let some_password = String::from("somethingrandom");
|
||||||
|
match hash_password(&some_password, &generate_salt().unwrap()) {
|
||||||
|
Ok(p) => {
|
||||||
|
match verify_password(&some_password, p.clone()) {
|
||||||
|
Ok(res) => {
|
||||||
|
assert_eq!(res, true, "Passwords are not verified");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let wrong_password = String::from("Differentanotherlevel");
|
||||||
|
let result = verify_password(&wrong_password, p.clone()).unwrap();
|
||||||
|
assert_eq!(false, result, "Passwords should not match");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
51
src/lib.rs
Normal file
51
src/lib.rs
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
pub mod callers;
|
||||||
|
pub mod config;
|
||||||
|
pub mod hashing;
|
||||||
|
pub mod repo;
|
||||||
|
pub mod token_stuff;
|
||||||
|
|
||||||
|
pub mod keys {
|
||||||
|
pub const DBURL: &str = "DATABASE_URL";
|
||||||
|
|
||||||
|
pub mod error {
|
||||||
|
pub const ERROR: &str = "DATABASE_URL must be set in .env";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod connection_settings {
|
||||||
|
pub const MAXCONN: u32 = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod db {
|
||||||
|
|
||||||
|
use sqlx::postgres::PgPoolOptions;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
use crate::{connection_settings, keys};
|
||||||
|
|
||||||
|
pub async fn create_pool() -> Result<sqlx::PgPool, sqlx::Error> {
|
||||||
|
let database_url = get_db_url().await;
|
||||||
|
println!("Database url: {:?}", database_url);
|
||||||
|
|
||||||
|
PgPoolOptions::new()
|
||||||
|
.max_connections(connection_settings::MAXCONN)
|
||||||
|
.connect(&database_url)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_db_url() -> String {
|
||||||
|
#[cfg(debug_assertions)] // Example: Only load .env in debug builds
|
||||||
|
dotenvy::dotenv().ok();
|
||||||
|
|
||||||
|
env::var(keys::DBURL).expect(keys::error::ERROR)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn migrations(pool: &sqlx::PgPool) {
|
||||||
|
// Run migrations using the sqlx::migrate! macro
|
||||||
|
// Assumes your migrations are in a ./migrations folder relative to Cargo.toml
|
||||||
|
sqlx::migrate!("./migrations")
|
||||||
|
.run(pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to run migrations");
|
||||||
|
}
|
||||||
|
}
|
362
src/main.rs
362
src/main.rs
@@ -1,41 +1,353 @@
|
|||||||
use axum::{
|
use icarus_auth::callers;
|
||||||
// Json,
|
use icarus_auth::config;
|
||||||
Router,
|
|
||||||
// http::StatusCode,
|
|
||||||
routing::get,
|
|
||||||
// routing::{get, post},
|
|
||||||
};
|
|
||||||
// use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
// initialize tracing
|
// initialize tracing
|
||||||
tracing_subscriber::fmt::init();
|
tracing_subscriber::fmt::init();
|
||||||
|
|
||||||
// build our application with a route
|
let app = init::app().await;
|
||||||
let app = Router::new()
|
|
||||||
// `GET /` goes to `root`
|
|
||||||
.route("/", get(root));
|
|
||||||
// `POST /users` goes to `create_user`
|
|
||||||
// .route("/users", post(create_user));
|
|
||||||
|
|
||||||
// run our app with hyper, listening globally on port 3000
|
// run our app with hyper, listening globally on port 3000
|
||||||
let listener = tokio::net::TcpListener::bind(get_full()).await.unwrap();
|
let url = config::get_full();
|
||||||
|
let listener = tokio::net::TcpListener::bind(url).await.unwrap();
|
||||||
axum::serve(listener, app).await.unwrap();
|
axum::serve(listener, app).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_full() -> String {
|
mod init {
|
||||||
get_address() + ":" + &get_port()
|
use axum::{
|
||||||
}
|
Router,
|
||||||
fn get_address() -> String {
|
routing::{get, post},
|
||||||
String::from("0.0.0.0")
|
};
|
||||||
|
|
||||||
|
use crate::callers;
|
||||||
|
|
||||||
|
pub async fn routes() -> Router {
|
||||||
|
// build our application with a route
|
||||||
|
Router::new()
|
||||||
|
.route(
|
||||||
|
callers::endpoints::DBTEST,
|
||||||
|
get(callers::common::endpoint::db_ping),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
callers::endpoints::ROOT,
|
||||||
|
get(callers::common::endpoint::root),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
callers::endpoints::REGISTER,
|
||||||
|
post(callers::register::register_user),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
callers::endpoints::LOGIN,
|
||||||
|
post(callers::login::endpoint::login),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_port() -> String {
|
pub async fn app() -> Router {
|
||||||
String::from("3000")
|
let pool = icarus_auth::db::create_pool()
|
||||||
|
.await
|
||||||
|
.expect("Failed to create pool");
|
||||||
|
|
||||||
|
icarus_auth::db::migrations(&pool).await;
|
||||||
|
|
||||||
|
routes().await.layer(axum::Extension(pool))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// basic handler that responds with a static string
|
#[cfg(test)]
|
||||||
async fn root() -> &'static str {
|
mod tests {
|
||||||
"Hello, World!"
|
use super::*;
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
body::Body,
|
||||||
|
http::{Request, StatusCode},
|
||||||
|
};
|
||||||
|
use http_body_util::BodyExt;
|
||||||
|
use serde_json::json;
|
||||||
|
use tower::ServiceExt; // for `call`, `oneshot`, and `ready`
|
||||||
|
|
||||||
|
mod db_mgr {
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use icarus_auth::keys;
|
||||||
|
|
||||||
|
pub const LIMIT: usize = 6;
|
||||||
|
|
||||||
|
pub async fn get_pool() -> Result<sqlx::PgPool, sqlx::Error> {
|
||||||
|
let tm_db_url = std::env::var(keys::DBURL).expect("DATABASE_URL must be present");
|
||||||
|
let tm_options = sqlx::postgres::PgConnectOptions::from_str(&tm_db_url).unwrap();
|
||||||
|
sqlx::PgPool::connect_with(tm_options).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn generate_db_name() -> String {
|
||||||
|
let db_name =
|
||||||
|
get_database_name().unwrap() + &"_" + &uuid::Uuid::new_v4().to_string()[..LIMIT];
|
||||||
|
db_name
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn connect_to_db(db_name: &str) -> Result<sqlx::PgPool, sqlx::Error> {
|
||||||
|
let db_url = std::env::var(keys::DBURL).expect("DATABASE_URL must be set for tests");
|
||||||
|
let options = sqlx::postgres::PgConnectOptions::from_str(&db_url)?.database(db_name);
|
||||||
|
sqlx::PgPool::connect_with(options).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_database(
|
||||||
|
template_pool: &sqlx::PgPool,
|
||||||
|
db_name: &str,
|
||||||
|
) -> Result<(), sqlx::Error> {
|
||||||
|
let create_query = format!("CREATE DATABASE {}", db_name);
|
||||||
|
match sqlx::query(&create_query).execute(template_pool).await {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to drop a database
|
||||||
|
pub async fn drop_database(
|
||||||
|
template_pool: &sqlx::PgPool,
|
||||||
|
db_name: &str,
|
||||||
|
) -> Result<(), sqlx::Error> {
|
||||||
|
let drop_query = format!("DROP DATABASE IF EXISTS {} WITH (FORCE)", db_name);
|
||||||
|
sqlx::query(&drop_query).execute(template_pool).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_database_name() -> Result<String, Box<dyn std::error::Error>> {
|
||||||
|
dotenvy::dotenv().ok(); // Load .env file if it exists
|
||||||
|
|
||||||
|
match std::env::var(keys::DBURL) {
|
||||||
|
Ok(database_url) => {
|
||||||
|
let parsed_url = url::Url::parse(&database_url)?;
|
||||||
|
if parsed_url.scheme() == "postgres" || parsed_url.scheme() == "postgresql" {
|
||||||
|
match parsed_url
|
||||||
|
.path_segments()
|
||||||
|
.and_then(|segments| segments.last().map(|s| s.to_string()))
|
||||||
|
{
|
||||||
|
Some(sss) => Ok(sss),
|
||||||
|
None => Err("Error parsing".into()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Handle other database types if needed
|
||||||
|
Err("Error parsing".into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// DATABASE_URL environment variable not found
|
||||||
|
Err("Error parsing".into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_test_register_request() -> icarus_auth::callers::register::request::Request {
|
||||||
|
icarus_auth::callers::register::request::Request {
|
||||||
|
username: String::from("somethingsss"),
|
||||||
|
password: String::from("Raindown!"),
|
||||||
|
email: String::from("dev@null.com"),
|
||||||
|
phone: String::from("1234567890"),
|
||||||
|
firstname: String::from("Bob"),
|
||||||
|
lastname: String::from("Smith"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_test_register_payload(
|
||||||
|
usr: &icarus_auth::callers::register::request::Request,
|
||||||
|
) -> serde_json::Value {
|
||||||
|
json!({
|
||||||
|
"username": &usr.username,
|
||||||
|
"password": &usr.password,
|
||||||
|
"email": &usr.email,
|
||||||
|
"phone": &usr.phone,
|
||||||
|
"firstname": &usr.firstname,
|
||||||
|
"lastname": &usr.lastname,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_hello_world() {
|
||||||
|
let app = init::app().await;
|
||||||
|
|
||||||
|
// `Router` implements `tower::Service<Request<Body>>` so we can
|
||||||
|
// call it like any tower service, no need to run an HTTP server.
|
||||||
|
let response = app
|
||||||
|
.oneshot(
|
||||||
|
Request::builder()
|
||||||
|
.uri(callers::endpoints::ROOT)
|
||||||
|
.body(Body::empty())
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let body = response.into_body().collect().await.unwrap().to_bytes();
|
||||||
|
assert_eq!(&body[..], b"Hello, World!");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_register_user() {
|
||||||
|
let tm_pool = db_mgr::get_pool().await.unwrap();
|
||||||
|
|
||||||
|
let db_name = db_mgr::generate_db_name().await;
|
||||||
|
|
||||||
|
match db_mgr::create_database(&tm_pool, &db_name).await {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Success");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
assert!(false, "Error: {:?}", e.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let pool = db_mgr::connect_to_db(&db_name).await.unwrap();
|
||||||
|
|
||||||
|
icarus_auth::db::migrations(&pool).await;
|
||||||
|
|
||||||
|
let app = init::routes().await.layer(axum::Extension(pool));
|
||||||
|
|
||||||
|
let usr = get_test_register_request();
|
||||||
|
let payload = get_test_register_payload(&usr);
|
||||||
|
|
||||||
|
let response = app
|
||||||
|
.oneshot(
|
||||||
|
Request::builder()
|
||||||
|
.method(axum::http::Method::POST)
|
||||||
|
.uri(callers::endpoints::REGISTER)
|
||||||
|
.header(axum::http::header::CONTENT_TYPE, "application/json")
|
||||||
|
.body(Body::from(payload.to_string()))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match response {
|
||||||
|
Ok(resp) => {
|
||||||
|
assert_eq!(
|
||||||
|
resp.status(),
|
||||||
|
StatusCode::CREATED,
|
||||||
|
"Message: {:?} {:?}",
|
||||||
|
resp,
|
||||||
|
usr.username
|
||||||
|
);
|
||||||
|
let body = axum::body::to_bytes(resp.into_body(), usize::MAX)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let parsed_body: callers::register::response::Response =
|
||||||
|
serde_json::from_slice(&body).unwrap();
|
||||||
|
let returned_usr = &parsed_body.data[0];
|
||||||
|
|
||||||
|
assert_eq!(false, returned_usr.id.is_nil(), "Id is not populated");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
usr.username, returned_usr.username,
|
||||||
|
"Usernames do not match"
|
||||||
|
);
|
||||||
|
assert!(returned_usr.date_created.is_some(), "Date Created is empty");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = db_mgr::drop_database(&tm_pool, &db_name).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_login_user() {
|
||||||
|
let tm_pool = db_mgr::get_pool().await.unwrap();
|
||||||
|
|
||||||
|
let db_name = db_mgr::generate_db_name().await;
|
||||||
|
|
||||||
|
match db_mgr::create_database(&tm_pool, &db_name).await {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("Success");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
assert!(false, "Error: {:?}", e.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let pool = db_mgr::connect_to_db(&db_name).await.unwrap();
|
||||||
|
|
||||||
|
icarus_auth::db::migrations(&pool).await;
|
||||||
|
|
||||||
|
let app = init::routes().await.layer(axum::Extension(pool));
|
||||||
|
|
||||||
|
let usr = get_test_register_request();
|
||||||
|
let payload = get_test_register_payload(&usr);
|
||||||
|
|
||||||
|
let response = app
|
||||||
|
.clone()
|
||||||
|
.oneshot(
|
||||||
|
Request::builder()
|
||||||
|
.method(axum::http::Method::POST)
|
||||||
|
.uri(callers::endpoints::REGISTER)
|
||||||
|
.header(axum::http::header::CONTENT_TYPE, "application/json")
|
||||||
|
.body(Body::from(payload.to_string()))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match response {
|
||||||
|
Ok(resp) => {
|
||||||
|
assert_eq!(
|
||||||
|
resp.status(),
|
||||||
|
StatusCode::CREATED,
|
||||||
|
"Message: {:?} {:?}",
|
||||||
|
resp,
|
||||||
|
usr.username
|
||||||
|
);
|
||||||
|
let body = axum::body::to_bytes(resp.into_body(), usize::MAX)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let parsed_body: callers::register::response::Response =
|
||||||
|
serde_json::from_slice(&body).unwrap();
|
||||||
|
let returned_usr = &parsed_body.data[0];
|
||||||
|
|
||||||
|
assert_eq!(false, returned_usr.id.is_nil(), "Id is not populated");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
usr.username, returned_usr.username,
|
||||||
|
"Usernames do not match"
|
||||||
|
);
|
||||||
|
assert!(returned_usr.date_created.is_some(), "Date Created is empty");
|
||||||
|
|
||||||
|
let login_payload = json!({
|
||||||
|
"username": &usr.username,
|
||||||
|
"password": &usr.password,
|
||||||
|
});
|
||||||
|
|
||||||
|
match app
|
||||||
|
.oneshot(
|
||||||
|
Request::builder()
|
||||||
|
.method(axum::http::Method::POST)
|
||||||
|
.uri(callers::endpoints::LOGIN)
|
||||||
|
.header(axum::http::header::CONTENT_TYPE, "application/json")
|
||||||
|
.body(Body::from(login_payload.to_string()))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(resp) => {
|
||||||
|
assert_eq!(StatusCode::OK, resp.status(), "Status is not right");
|
||||||
|
let body = axum::body::to_bytes(resp.into_body(), usize::MAX)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let parsed_body: callers::login::response::Response =
|
||||||
|
serde_json::from_slice(&body).unwrap();
|
||||||
|
let login_result = &parsed_body.data[0];
|
||||||
|
assert!(!login_result.id.is_nil(), "Id is nil");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = db_mgr::drop_database(&tm_pool, &db_name).await;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
164
src/repo/mod.rs
Normal file
164
src/repo/mod.rs
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
pub mod user {
|
||||||
|
use sqlx::Row;
|
||||||
|
|
||||||
|
#[derive(Debug, serde::Serialize, sqlx::FromRow)]
|
||||||
|
pub struct InsertedData {
|
||||||
|
pub id: uuid::Uuid,
|
||||||
|
pub date_created: Option<time::OffsetDateTime>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
username: &String,
|
||||||
|
) -> Result<icarus_models::user::User, sqlx::Error> {
|
||||||
|
let result = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT * FROM "user" WHERE username = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(username)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(r) => match r {
|
||||||
|
Some(r) => Ok(icarus_models::user::User {
|
||||||
|
id: r.try_get("id")?,
|
||||||
|
username: r.try_get("username")?,
|
||||||
|
password: r.try_get("password")?,
|
||||||
|
email: r.try_get("email")?,
|
||||||
|
email_verified: r.try_get("email_verified")?,
|
||||||
|
phone: r.try_get("phone")?,
|
||||||
|
salt_id: r.try_get("salt_id")?,
|
||||||
|
firstname: r.try_get("firstname")?,
|
||||||
|
lastname: r.try_get("lastname")?,
|
||||||
|
date_created: r.try_get("date_created")?,
|
||||||
|
last_login: r.try_get("last_login")?,
|
||||||
|
status: r.try_get("status")?,
|
||||||
|
}),
|
||||||
|
None => Err(sqlx::Error::RowNotFound),
|
||||||
|
},
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn exists(pool: &sqlx::PgPool, username: &String) -> Result<bool, sqlx::Error> {
|
||||||
|
let result = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT 1 FROM "user" WHERE username = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(username)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(r) => Ok(r.is_some()),
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
user: &icarus_models::user::User,
|
||||||
|
) -> Result<uuid::Uuid, sqlx::Error> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO "user" (username, password, email, phone, firstname, lastname, email_verified, status, salt_id)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
|
RETURNING id, date_created;
|
||||||
|
"#)
|
||||||
|
.bind(&user.username)
|
||||||
|
.bind(&user.password)
|
||||||
|
.bind(&user.email)
|
||||||
|
.bind(&user.phone)
|
||||||
|
.bind(&user.firstname)
|
||||||
|
.bind(&user.lastname)
|
||||||
|
.bind(user.email_verified)
|
||||||
|
.bind(&user.status)
|
||||||
|
.bind(user.salt_id)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
eprintln!("Error inserting item: {}", e);
|
||||||
|
e
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let result = InsertedData {
|
||||||
|
id: row.try_get("id").map_err(|_e| sqlx::Error::RowNotFound)?,
|
||||||
|
date_created: row
|
||||||
|
.try_get("date_created")
|
||||||
|
.map_err(|_e| sqlx::Error::RowNotFound)?,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !result.id.is_nil() {
|
||||||
|
Ok(result.id)
|
||||||
|
} else {
|
||||||
|
Err(sqlx::Error::RowNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod salt {
|
||||||
|
use sqlx::Row;
|
||||||
|
|
||||||
|
#[derive(Debug, serde::Serialize, sqlx::FromRow)]
|
||||||
|
pub struct InsertedData {
|
||||||
|
pub id: uuid::Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
id: &uuid::Uuid,
|
||||||
|
) -> Result<icarus_models::user::salt::Salt, sqlx::Error> {
|
||||||
|
let result = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT * FROM "salt" WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(r) => match r {
|
||||||
|
Some(r) => Ok(icarus_models::user::salt::Salt {
|
||||||
|
id: r.try_get("id")?,
|
||||||
|
salt: r.try_get("salt")?,
|
||||||
|
}),
|
||||||
|
None => Err(sqlx::Error::RowNotFound),
|
||||||
|
},
|
||||||
|
Err(e) => Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn insert(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
salt: &icarus_models::user::salt::Salt,
|
||||||
|
) -> Result<uuid::Uuid, sqlx::Error> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO "salt" (salt)
|
||||||
|
VALUES ($1)
|
||||||
|
RETURNING id;
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&salt.salt)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
eprintln!("Error inserting item: {}", e);
|
||||||
|
e
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let result = InsertedData {
|
||||||
|
id: row.try_get("id").map_err(|_e| sqlx::Error::RowNotFound)?,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !result.id.is_nil() {
|
||||||
|
Ok(result.id)
|
||||||
|
} else {
|
||||||
|
Err(sqlx::Error::RowNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
96
src/token_stuff/mod.rs
Normal file
96
src/token_stuff/mod.rs
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
use josekit::{
|
||||||
|
self,
|
||||||
|
jws::{JwsHeader, alg::hmac::HmacJwsAlgorithm::Hs256},
|
||||||
|
jwt::{self, JwtPayload},
|
||||||
|
};
|
||||||
|
|
||||||
|
use time;
|
||||||
|
|
||||||
|
pub const TOKENTYPE: &str = "JWT";
|
||||||
|
pub const KEY_ENV: &str = "SECRET_KEY";
|
||||||
|
pub const MESSAGE: &str = "Something random";
|
||||||
|
pub const ISSUER: &str = "icarus_auth";
|
||||||
|
pub const AUDIENCE: &str = "icarus";
|
||||||
|
|
||||||
|
pub fn get_key() -> Result<String, dotenvy::Error> {
|
||||||
|
dotenvy::dotenv().ok();
|
||||||
|
let key = std::env::var(KEY_ENV).expect("SECRET_KEY_NOT_FOUND");
|
||||||
|
Ok(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_issued() -> time::Result<time::OffsetDateTime> {
|
||||||
|
Ok(time::OffsetDateTime::now_utc())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_expiration(issued: &time::OffsetDateTime) -> Result<time::OffsetDateTime, time::Error> {
|
||||||
|
let duration_expire = time::Duration::hours(4);
|
||||||
|
Ok(*issued + duration_expire)
|
||||||
|
}
|
||||||
|
|
||||||
|
mod util {
|
||||||
|
pub fn time_to_std_time(
|
||||||
|
provided_time: &time::OffsetDateTime,
|
||||||
|
) -> Result<std::time::SystemTime, std::time::SystemTimeError> {
|
||||||
|
let converted = std::time::SystemTime::from(*provided_time);
|
||||||
|
Ok(converted)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_token(provided_key: &String) -> Result<(String, i64), josekit::JoseError> {
|
||||||
|
let mut header = JwsHeader::new();
|
||||||
|
header.set_token_type(TOKENTYPE);
|
||||||
|
|
||||||
|
let mut payload = JwtPayload::new();
|
||||||
|
payload.set_subject(MESSAGE);
|
||||||
|
payload.set_issuer(ISSUER);
|
||||||
|
payload.set_audience(vec![AUDIENCE]);
|
||||||
|
match get_issued() {
|
||||||
|
Ok(issued) => {
|
||||||
|
let expire = get_expiration(&issued).unwrap();
|
||||||
|
payload.set_issued_at(&util::time_to_std_time(&issued).unwrap());
|
||||||
|
payload.set_expires_at(&util::time_to_std_time(&expire).unwrap());
|
||||||
|
|
||||||
|
let key: String = if provided_key.is_empty() {
|
||||||
|
get_key().unwrap()
|
||||||
|
} else {
|
||||||
|
provided_key.to_owned()
|
||||||
|
};
|
||||||
|
|
||||||
|
let signer = Hs256.signer_from_bytes(key.as_bytes()).unwrap();
|
||||||
|
Ok((
|
||||||
|
josekit::jwt::encode_with_signer(&payload, &header, &signer).unwrap(),
|
||||||
|
(expire - time::OffsetDateTime::UNIX_EPOCH).whole_seconds(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Err(e) => Err(josekit::JoseError::InvalidClaim(e.into())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify_token(key: &String, token: &String) -> bool {
|
||||||
|
let ver = Hs256.verifier_from_bytes(key.as_bytes()).unwrap();
|
||||||
|
let (payload, _header) = jwt::decode_with_verifier(token, &ver).unwrap();
|
||||||
|
match payload.subject() {
|
||||||
|
Some(_sub) => true,
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tokenize() {
|
||||||
|
let special_key = get_key().unwrap();
|
||||||
|
match create_token(&special_key) {
|
||||||
|
Ok((token, _duration)) => {
|
||||||
|
let result = verify_token(&special_key, &token);
|
||||||
|
assert!(result, "Token not verified");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
Reference in New Issue
Block a user