Compare commits

..

5 Commits

Author SHA1 Message Date
d7c3443022 dynamic_db (#17)
All checks were successful
Release Tagging / release (push) Successful in 39s
Rust Build / Check (push) Successful in 41s
Rust Build / Test Suite (push) Successful in 48s
Rust Build / Rustfmt (push) Successful in 28s
Rust Build / Clippy (push) Successful in 48s
Rust Build / build (push) Successful in 1m13s
Rust Build / Check (pull_request) Successful in 57s
Rust Build / Test Suite (pull_request) Successful in 48s
Rust Build / Rustfmt (pull_request) Successful in 26s
Rust Build / Clippy (pull_request) Successful in 46s
Rust Build / build (pull_request) Successful in 1m12s
Reviewed-on: #17
Co-authored-by: phoenix <kundeng94@gmail.com>
Co-committed-by: phoenix <kundeng94@gmail.com>
2025-04-05 01:30:35 +00:00
6bdc893147 Version bump (#13)
Some checks failed
Release Tagging / release (push) Failing after 34s
Rust Build / Check (pull_request) Successful in 50s
Rust Build / Test Suite (pull_request) Successful in 56s
Rust Build / Rustfmt (pull_request) Successful in 28s
Rust Build / Clippy (pull_request) Successful in 48s
Rust Build / build (pull_request) Successful in 1m14s
Rust Build / Check (push) Successful in 45s
Rust Build / Test Suite (push) Successful in 1m10s
Rust Build / Rustfmt (push) Successful in 30s
Rust Build / Clippy (push) Successful in 50s
Rust Build / build (push) Successful in 1m11s
Reviewed-on: #13
2025-04-03 16:50:09 +00:00
4b8430e114 Updated ssh key (#14)
Some checks failed
Release Tagging / release (push) Has been cancelled
Rust Build / Check (push) Has been cancelled
Rust Build / Test Suite (push) Has been cancelled
Rust Build / Rustfmt (push) Has been cancelled
Rust Build / Clippy (push) Has been cancelled
Rust Build / build (push) Has been cancelled
Rust Build / Check (pull_request) Successful in 45s
Rust Build / Test Suite (pull_request) Successful in 53s
Rust Build / Rustfmt (pull_request) Successful in 25s
Rust Build / Clippy (pull_request) Successful in 46s
Rust Build / build (pull_request) Successful in 1m9s
Reviewed-on: #14
2025-04-03 16:49:45 +00:00
KD
238fb15e6d Updated ssh key
All checks were successful
Rust Build / Check (pull_request) Successful in 45s
Rust Build / Test Suite (pull_request) Successful in 1m0s
Rust Build / Rustfmt (pull_request) Successful in 27s
Rust Build / Clippy (pull_request) Successful in 45s
Rust Build / build (pull_request) Successful in 1m18s
2025-04-03 12:40:29 -04:00
KD
9be38542c1 Version bump
All checks were successful
Rust Build / Check (pull_request) Successful in 44s
Rust Build / Test Suite (pull_request) Successful in 50s
Rust Build / Rustfmt (pull_request) Successful in 25s
Rust Build / Clippy (pull_request) Successful in 48s
Rust Build / build (pull_request) Successful in 1m17s
2025-04-03 12:27:48 -04:00
11 changed files with 383 additions and 107 deletions

View File

@@ -1,2 +1 @@
DATABASE_URL=postgres://username:password@localhost/database_name
TEST_DATABASE_URL=postgres://username:password@localhost/database_name_test

View File

@@ -21,12 +21,13 @@ jobs:
toolchain: 1.85.0
- run: |
mkdir -p ~/.ssh
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
chmod 600 ~/.ssh/gitlab_deploy_key
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
chmod 600 ~/.ssh/icarus_models_deploy_key
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
eval $(ssh-agent -s)
ssh-add -v ~/.ssh/gitlab_deploy_key
ssh-add -v ~/.ssh/icarus_models_deploy_key
cargo check
test:
@@ -68,40 +69,22 @@ jobs:
# NOTE: Do NOT use continue-on-error here.
# If Docker isn't working as expected, the job SHOULD fail here.
# --- Optional but Recommended: Database Migrations Step ---
- name: Run Database Migrations
env:
# Define TEST_DATABASE_URL using service details and secrets
TEST_DATABASE_URL: postgresql://${{ secrets.DB_TEST_USER || 'testuser' }}:${{ secrets.DB_TEST_PASSWORD || 'testpassword' }}@postgres:5432/${{ secrets.DB_TEST_NAME || 'testdb' }}
# Make SSH agent available if migrations fetch private dependencies
SSH_AUTH_SOCK: ${{ env.SSH_AUTH_SOCK }}
run: |
echo "Running database migrations..."
# ===> IMPORTANT: Replace placeholder below with your actual migration command <===
# Example: Install and run sqlx-cli
# cargo install sqlx-cli --no-default-features --features native-tls,postgres
# sqlx database setup --database-url $TEST_DATABASE_URL
# Example: Install and run diesel_cli
# cargo install diesel_cli --no-default-features --features postgres
# diesel migration run --database-url $TEST_DATABASE_URL
# echo "[Placeholder] Your migration command goes here."
# ===> End of Placeholder <===
- name: Run tests
env:
# Define TEST_DATABASE_URL for tests to use
TEST_DATABASE_URL: postgresql://${{ secrets.DB_TEST_USER || 'testuser' }}:${{ secrets.DB_TEST_PASSWORD || 'testpassword' }}@postgres:5432/${{ secrets.DB_TEST_NAME || 'testdb' }}
# Define DATABASE_URL for tests to use
DATABASE_URL: postgresql://${{ secrets.DB_TEST_USER || 'testuser' }}:${{ secrets.DB_TEST_PASSWORD || 'testpassword' }}@postgres:5432/${{ secrets.DB_TEST_NAME || 'testdb' }}
RUST_LOG: info # Optional: configure test log level
# Make SSH agent available if tests fetch private dependencies
SSH_AUTH_SOCK: ${{ env.SSH_AUTH_SOCK }}
run: |
mkdir -p ~/.ssh
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
chmod 600 ~/.ssh/gitlab_deploy_key
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
chmod 600 ~/.ssh/icarus_models_deploy_key
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
eval $(ssh-agent -s)
ssh-add -v ~/.ssh/gitlab_deploy_key
ssh-add -v ~/.ssh/icarus_models_deploy_key
cargo test
fmt:
@@ -115,12 +98,12 @@ jobs:
- run: rustup component add rustfmt
- run: |
mkdir -p ~/.ssh
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
chmod 600 ~/.ssh/gitlab_deploy_key
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
chmod 600 ~/.ssh/icarus_models_deploy_key
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
eval $(ssh-agent -s)
ssh-add -v ~/.ssh/gitlab_deploy_key
ssh-add -v ~/.ssh/icarus_models_deploy_key
cargo fmt --all -- --check
clippy:
@@ -134,12 +117,12 @@ jobs:
- run: rustup component add clippy
- run: |
mkdir -p ~/.ssh
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
chmod 600 ~/.ssh/gitlab_deploy_key
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
chmod 600 ~/.ssh/icarus_models_deploy_key
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
eval $(ssh-agent -s)
ssh-add -v ~/.ssh/gitlab_deploy_key
ssh-add -v ~/.ssh/icarus_models_deploy_key
cargo clippy -- -D warnings
build:
@@ -152,11 +135,10 @@ jobs:
toolchain: 1.85.0
- run: |
mkdir -p ~/.ssh
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/gitlab_deploy_key
chmod 600 ~/.ssh/gitlab_deploy_key
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
chmod 600 ~/.ssh/icarus_models_deploy_key
ssh-keyscan ${{ vars.MYHOST }} >> ~/.ssh/known_hosts
eval $(ssh-agent -s)
ssh-add -v ~/.ssh/gitlab_deploy_key
ssh-add -v ~/.ssh/icarus_models_deploy_key
cargo build --release

View File

@@ -1,6 +1,6 @@
[package]
name = "icarus_auth"
version = "0.1.0"
version = "0.1.1"
edition = "2024"
[dependencies]
@@ -11,11 +11,15 @@ tokio = { version = "1.44.1", features = ["rt-multi-thread"] }
tracing-subscriber = { version = "0.3.19" }
tower = { version = "0.5.2" }
hyper = { version = "1.6.0" }
sqlx = { version = "0.8.3", features = ["postgres", "runtime-tokio-native-tls"] }
sqlx = { version = "0.8.3", features = ["postgres", "runtime-tokio-native-tls", "uuid"] }
dotenvy = { version = "0.15.7" }
icarus_models = { git = "ssh://git@git.kundeng.us/phoenix/icarus_models.git", tag = "v0.2.0" }
uuid = { version = "1.16.0", features = ["v4", "serde"] }
argon2 = { version = "0.5.3", features = ["std"] } # Use the latest 0.5.x version
rand = { version = "0.9" }
icarus_models = { git = "ssh://git@git.kundeng.us/phoenix/icarus_models.git", tag = "v0.3.0" }
[dev-dependencies]
http-body-util = "0.1.3"
http-body-util = { version = "0.1.3" }
url = { version = "2.5" }
reqwest = { version = "0.12.5", features = ["json"] } # For making HTTP requests in tests
once_cell = "1.19" # Useful for lazy initialization in tests/app setup
once_cell = { version = "1.19" } # Useful for lazy initialization in tests/app setup

View File

@@ -1 +1,9 @@
-- Add migration script here
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE TABLE IF NOT EXISTS "user" (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
username TEXT NOT NULL,
password TEXT NOT NULL,
date_created TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

View File

@@ -1,3 +1,25 @@
# Make sure role has CREATEDB
ALTER ROLE username_that_needs_permission CREATEDB;
# Install migrations
cargo install sqlx-cli
# Make sure to populate DATABASE_URL with correct value.
# By default, the DATABASE_URL found in .env file will be used
export DATABASE_URL="postgres://icarus_op_test:password@localhost/icarus_auth_test"
# init
sqlx migrate add init_migration
sqlx migrate run
# Create
sqlx database create
# Drop
sqlx database drop
# setup
sqlx database setup
# Reset
sqlx database reset

View File

@@ -1,12 +1,47 @@
use axum::{Json, http::StatusCode};
use crate::models;
use crate::repo;
mod response {
use serde::{Deserialize, Serialize};
use crate::models;
#[derive(Deserialize, Serialize)]
pub struct Response {
pub message: String,
pub data: models::common::User,
}
}
pub async fn register_user(
axum::Extension(pool): axum::Extension<sqlx::PgPool>,
Json(payload): Json<models::common::CreateUser>,
) -> (StatusCode, Json<models::common::User>) {
let user = models::common::User {
) -> (StatusCode, Json<response::Response>) {
let mut user = models::common::User {
id: uuid::Uuid::nil(),
username: payload.username.clone(),
password: payload.password.clone(),
};
(StatusCode::CREATED, Json(user))
match repo::user::insert(&pool, &user).await {
Ok(id) => {
user.id = id;
(
StatusCode::CREATED,
Json(response::Response {
message: String::from("User inserted"),
data: user,
}),
)
}
Err(err) => (
StatusCode::BAD_REQUEST,
Json(response::Response {
message: err.to_string(),
data: user,
}),
),
}
}

73
src/hashing/mod.rs Normal file
View File

@@ -0,0 +1,73 @@
use argon2::{
Argon2, // The Argon2 algorithm struct
PasswordVerifier,
password_hash::{
PasswordHasher,
SaltString,
rand_core::OsRng, // Secure random number generator
},
};
pub fn hash_password(password: &String) -> Result<String, argon2::password_hash::Error> {
let password_bytes = password.as_bytes();
// Generate a random salt
// SaltString::generate uses OsRng internally for cryptographic security
let salt = SaltString::generate(&mut OsRng);
// Create an Argon2 instance with default parameters (recommended)
// You could customize parameters here if needed, but defaults are strong
let argon2 = Argon2::default();
// Hash the password with the salt
// The output is a PasswordHash string format that includes algorithm, version,
// parameters, salt, and the hash itself.
let password_hash = argon2.hash_password(password_bytes, &salt)?.to_string();
Ok(password_hash)
}
pub fn verify_password(
password_attempt: &String,
stored_hash: String,
) -> Result<bool, argon2::password_hash::Error> {
let password_bytes = password_attempt.as_bytes();
// Parse the stored hash string
// This extracts the salt, parameters, and hash digest
let parsed_hash = argon2::PasswordHash::new(stored_hash.as_str())?;
// Create an Argon2 instance (it will use the parameters from the parsed hash)
let argon2 = Argon2::default();
// Verify the password against the parsed hash
// This automatically uses the correct salt and parameters embedded in `parsed_hash`
match argon2.verify_password(password_bytes, &parsed_hash) {
Ok(()) => Ok(true), // Passwords match
Err(argon2::password_hash::Error::Password) => Ok(false), // Passwords don't match
Err(e) => Err(e), // Some other error occurred (e.g., invalid hash format)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_hash_password() {
let some_password = String::from("somethingrandom");
match hash_password(&some_password) {
Ok(p) => match verify_password(&some_password, p.clone()) {
Ok(res) => {
assert_eq!(res, true);
}
Err(err) => {
assert!(false, "Error: {:?}", err.to_string());
}
},
Err(eerr) => {
assert!(false, "Error: {:?}", eerr.to_string());
}
}
}
}

View File

@@ -1,20 +1,15 @@
pub mod callers;
pub mod config;
pub mod hashing;
pub mod models;
pub mod repo;
mod keys {
pub mod keys {
pub const DBURL: &str = "DATABASE_URL";
pub mod error {
pub const ERROR: &str = "DATABASE_URL must be set in .env";
}
pub mod test {
pub const DBURL: &str = "TEST_DATABASE_URL";
pub mod error {
pub const ERROR: &str = "TEST_DATABASE_URL must be set in .env";
}
}
}
mod connection_settings {
@@ -42,10 +37,6 @@ pub mod db_pool {
#[cfg(debug_assertions)] // Example: Only load .env in debug builds
dotenvy::dotenv().ok();
if cfg!(debug_assertions) {
env::var(keys::test::DBURL).expect(keys::test::error::ERROR)
} else {
env::var(keys::DBURL).expect(keys::error::ERROR)
}
}
}

View File

@@ -1,19 +1,12 @@
use axum::{
Router,
routing::{get, post},
};
// use std::net::SocketAddr;
use icarus_auth::callers;
use icarus_auth::config;
// use sqlx::Postgres;
#[tokio::main]
async fn main() {
// initialize tracing
tracing_subscriber::fmt::init();
let app = app().await;
let app = init::app().await;
// run our app with hyper, listening globally on port 3000
let url = config::get_full();
@@ -21,7 +14,27 @@ async fn main() {
axum::serve(listener, app).await.unwrap();
}
async fn routes() -> Router {
mod db {
pub async fn migrations(pool: &sqlx::PgPool) {
// Run migrations using the sqlx::migrate! macro
// Assumes your migrations are in a ./migrations folder relative to Cargo.toml
sqlx::migrate!("./migrations")
.run(pool)
.await
.expect("Failed to run migrations on testcontainer DB");
}
}
mod init {
use axum::{
Router,
routing::{get, post},
};
use crate::callers;
use crate::db;
pub async fn routes() -> Router {
// build our application with a route
Router::new()
.route(callers::endpoints::DBTEST, get(callers::common::db_ping))
@@ -30,34 +43,114 @@ async fn routes() -> Router {
callers::endpoints::REGISTER,
post(callers::register::register_user),
)
}
}
async fn app() -> Router {
pub async fn app() -> Router {
let pool = icarus_auth::db_pool::create_pool()
.await
.expect("Failed to create pool");
db::migrations(&pool).await;
routes().await.layer(axum::Extension(pool))
}
}
#[cfg(test)]
mod tests {
use super::*;
use axum::{
body::Body,
// extract::connect_info::MockConnectInfo,
http::{Request, StatusCode},
};
use http_body_util::BodyExt;
// use http_body_util::BodyExt; // for `collect`
// use serde_json::{Value, json};
// use tokio::net::TcpListener;
// use tower::{Service, ServiceExt}; // for `call`, `oneshot`, and `ready`
use serde::{Deserialize, Serialize};
use serde_json::json;
use tower::ServiceExt; // for `call`, `oneshot`, and `ready`
#[derive(Deserialize, Serialize)]
struct Response {
pub message: String,
pub data: icarus_auth::models::common::User,
}
mod db_mgr {
use std::str::FromStr;
use icarus_auth::keys;
pub const LIMIT: usize = 6;
pub async fn get_pool() -> Result<sqlx::PgPool, sqlx::Error> {
let tm_db_url = std::env::var(keys::DBURL).expect("DATABASE_URL must be present");
let tm_options = sqlx::postgres::PgConnectOptions::from_str(&tm_db_url).unwrap();
sqlx::PgPool::connect_with(tm_options).await
}
pub async fn generate_db_name() -> String {
let db_name =
get_database_name().unwrap() + &"_" + &uuid::Uuid::new_v4().to_string()[..LIMIT];
db_name
}
pub async fn connect_to_db(db_name: &str) -> Result<sqlx::PgPool, sqlx::Error> {
let db_url = std::env::var(keys::DBURL).expect("DATABASE_URL must be set for tests");
let options = sqlx::postgres::PgConnectOptions::from_str(&db_url)?.database(db_name);
sqlx::PgPool::connect_with(options).await
}
pub async fn create_database(
template_pool: &sqlx::PgPool,
db_name: &str,
) -> Result<(), sqlx::Error> {
let create_query = format!("CREATE DATABASE {}", db_name);
match sqlx::query(&create_query).execute(template_pool).await {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
// Function to drop a database
pub async fn drop_database(
template_pool: &sqlx::PgPool,
db_name: &str,
) -> Result<(), sqlx::Error> {
let drop_query = format!("DROP DATABASE IF EXISTS {} WITH (FORCE)", db_name);
sqlx::query(&drop_query).execute(template_pool).await?;
Ok(())
}
pub fn get_database_name() -> Result<String, Box<dyn std::error::Error>> {
dotenvy::dotenv().ok(); // Load .env file if it exists
match std::env::var(keys::DBURL) {
Ok(database_url) => {
let parsed_url = url::Url::parse(&database_url)?;
if parsed_url.scheme() == "postgres" || parsed_url.scheme() == "postgresql" {
match parsed_url
.path_segments()
.and_then(|segments| segments.last().map(|s| s.to_string()))
{
Some(sss) => Ok(sss),
None => Err("Error parsing".into()),
}
} else {
// Handle other database types if needed
Err("Error parsing".into())
}
}
Err(_) => {
// DATABASE_URL environment variable not found
Err("Error parsing".into())
}
}
}
}
#[tokio::test]
async fn hello_world() {
let app = app().await;
async fn test_hello_world() {
let app = init::app().await;
// `Router` implements `tower::Service<Request<Body>>` so we can
// call it like any tower service, no need to run an HTTP server.
@@ -73,24 +166,70 @@ mod tests {
assert_eq!(response.status(), StatusCode::OK);
/*
match response.into_body().collect().await {
Ok(o) => {
let parsed: String = match String::from_utf8(o.to_bytes()) {
Ok(s) => s,
Err(err) => {
String::new()
}
};
}
Err(err) => {
assert!(false,
"Error: {:?}", err.to_string());
}
}
*/
let body = response.into_body().collect().await.unwrap().to_bytes();
assert_eq!(&body[..], b"Hello, World!");
}
#[tokio::test]
async fn test_register_user() {
let tm_pool = db_mgr::get_pool().await.unwrap();
let db_name = db_mgr::generate_db_name().await;
match db_mgr::create_database(&tm_pool, &db_name).await {
Ok(_) => {
println!("Success");
}
Err(e) => {
assert!(false, "Error: {:?}", e.to_string());
}
}
let pool = db_mgr::connect_to_db(&db_name).await.unwrap();
db::migrations(&pool).await;
let app = init::routes().await.layer(axum::Extension(pool));
let usr = icarus_auth::models::common::CreateUser {
username: String::from("somethingsss"),
password: String::from("Raindown!"),
};
let payload = json!({
"username": &usr.username,
"password": &usr.password,
});
let response = app
.oneshot(
Request::builder()
.method(axum::http::Method::POST)
.uri(callers::endpoints::REGISTER)
.header(axum::http::header::CONTENT_TYPE, "application/json")
.body(Body::from(payload.to_string()))
.unwrap(),
)
.await;
match response {
Ok(resp) => {
assert_eq!(resp.status(), StatusCode::CREATED, "Message: {:?}", resp);
let body = axum::body::to_bytes(resp.into_body(), usize::MAX)
.await
.unwrap();
let parsed_body: Response = serde_json::from_slice(&body).unwrap();
assert_eq!(
usr.username, parsed_body.data.username,
"Usernames do not match"
);
}
Err(err) => {
assert!(false, "Error: {:?}", err.to_string());
}
};
let _ = db_mgr::drop_database(&tm_pool, &db_name).await;
}
}

View File

@@ -1,11 +1,14 @@
use serde::{Deserialize, Serialize};
#[derive(Deserialize)]
#[derive(Deserialize, Serialize)]
pub struct CreateUser {
pub username: String,
pub password: String,
}
#[derive(Serialize)]
#[derive(Deserialize, Serialize)]
pub struct User {
pub id: uuid::Uuid,
pub username: String,
pub password: String,
}

20
src/repo/mod.rs Normal file
View File

@@ -0,0 +1,20 @@
pub mod user {
use crate::models;
pub async fn insert(
pool: &sqlx::PgPool,
user: &models::common::User,
) -> Result<uuid::Uuid, sqlx::Error> {
let insert_sql = "INSERT INTO \"user\" (username, password) VALUES ($1, $2) RETURNING id";
match sqlx::query_scalar(insert_sql)
.bind(&user.username) // Bind the input message securely
.bind(&user.password)
.fetch_one(pool) // Execute and expect exactly ONE row with ONE column back
.await
{
Ok(o) => Ok(o),
Err(err) => Err(err), // _ => uuid::Uuid::nil(),
}
}
}