Compare commits
26 Commits
v0.3.0-dev
...
v0.4.1-dev
Author | SHA1 | Date | |
---|---|---|---|
5967ed5b13 | |||
be4d1109a7 | |||
4353414c69 | |||
c176d0fcf3 | |||
c8b8d470dc | |||
bcd0e607ef | |||
70de6b862f | |||
8c902b9d61 | |||
480a428e8b | |||
02697b2fd9 | |||
d4faa7976e | |||
ed77cab700 | |||
2c30abb5c6 | |||
1817ab01d6 | |||
31be156be3 | |||
fc6b66f2e6 | |||
6dec9942cc | |||
a855db9ecc | |||
17af1a00c0 | |||
50e735e1a9 | |||
f6cf968f86 | |||
70a547ca94 | |||
89c89a5524 | |||
f601442f0e | |||
2229d98ab6 | |||
88f45645b3 |
21
.dockerignore.yaml
Normal file
21
.dockerignore.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Ignore build artifacts
|
||||||
|
target/
|
||||||
|
pkg/
|
||||||
|
|
||||||
|
# Ignore git directory
|
||||||
|
.git/
|
||||||
|
|
||||||
|
.gitea/
|
||||||
|
|
||||||
|
# Ignore environment files (configure via docker-compose instead)
|
||||||
|
.env*
|
||||||
|
|
||||||
|
# Ignore IDE/editor specific files
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
# Ignore OS specific files
|
||||||
|
*.DS_Store
|
||||||
|
|
||||||
|
# Add any other files/directories you don't need in the image
|
||||||
|
# e.g., logs/, tmp/
|
6
.env.docker.sample
Normal file
6
.env.docker.sample
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
SECRET_KEY=refero34o8rfhfjn983thf39fhc943rf923n3h
|
||||||
|
POSTGRES_AUTH_USER=icarus_op
|
||||||
|
POSTGRES_AUTH_PASSWORD=password
|
||||||
|
POSTGRES_AUTH_DB=icarus_auth_db
|
||||||
|
POSTGRES_AUTH_HOST=auth_db
|
||||||
|
DATABASE_URL=postgresql://${POSTGRES_AUTH_USER}:${POSTGRES_AUTH_PASSWORD}@${POSTGRES_AUTH_HOST}:5432/${POSTGRES_AUTH_DB}
|
@@ -1,2 +1,6 @@
|
|||||||
DATABASE_URL=postgres://username:password@localhost/database_name
|
SECRET_KEY=refero34o8rfhfjn983thf39fhc943rf923n3h
|
||||||
SECRET_KEY=refero34o8rfhfjn983thf39fhc943rf923n3h
|
POSTGRES_AUTH_USER=icarus_op_test
|
||||||
|
POSTGRES_AUTH_PASSWORD=password
|
||||||
|
POSTGRES_AUTH_DB=icarus_auth_test_db
|
||||||
|
POSTGRES_AUTH_HOST=localhost
|
||||||
|
DATABASE_URL=postgresql://${POSTGRES_AUTH_USER}:${POSTGRES_AUTH_PASSWORD}@${POSTGRES_AUTH_HOST}:5432/${POSTGRES_AUTH_DB}
|
||||||
|
@@ -4,8 +4,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
tags:
|
|
||||||
- 'v*' # Trigger on tags matching v*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
@@ -19,7 +17,7 @@ jobs:
|
|||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.86.0
|
toolchain: 1.88.0
|
||||||
components: cargo
|
components: cargo
|
||||||
|
|
||||||
- name: Extract Version from Cargo.toml
|
- name: Extract Version from Cargo.toml
|
||||||
@@ -51,7 +49,3 @@ jobs:
|
|||||||
release_name: Release ${{ steps.version.outputs.project_tag_release }}
|
release_name: Release ${{ steps.version.outputs.project_tag_release }}
|
||||||
body: |
|
body: |
|
||||||
Release of version ${{ steps.version.outputs.project_tag_release }}
|
Release of version ${{ steps.version.outputs.project_tag_release }}
|
||||||
# draft: false
|
|
||||||
# prerelease: ${{ startsWith(github.ref, 'v') == false }} # prerelease if not a valid release tag
|
|
||||||
|
|
||||||
|
|
||||||
|
@@ -18,7 +18,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.86.0
|
toolchain: 1.88.0
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
# --- Add database service definition ---
|
# --- Add database service definition ---
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:17.4 # Or pin to a more specific version like 14.9
|
image: postgres:17.5
|
||||||
env:
|
env:
|
||||||
# Use secrets for DB init, with fallbacks for flexibility
|
# Use secrets for DB init, with fallbacks for flexibility
|
||||||
POSTGRES_USER: ${{ secrets.DB_TEST_USER || 'testuser' }}
|
POSTGRES_USER: ${{ secrets.DB_TEST_USER || 'testuser' }}
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.86.0
|
toolchain: 1.88.0
|
||||||
# --- Add this step for explicit verification ---
|
# --- Add this step for explicit verification ---
|
||||||
- name: Verify Docker Environment
|
- name: Verify Docker Environment
|
||||||
run: |
|
run: |
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.86.0
|
toolchain: 1.88.0
|
||||||
- run: rustup component add rustfmt
|
- run: rustup component add rustfmt
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
@@ -113,7 +113,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.86.0
|
toolchain: 1.88.0
|
||||||
- run: rustup component add clippy
|
- run: rustup component add clippy
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
@@ -132,7 +132,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: 1.86.0
|
toolchain: 1.88.0
|
||||||
- run: |
|
- run: |
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
echo "${{ secrets.MYREPO_TOKEN }}" > ~/.ssh/icarus_models_deploy_key
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
/target
|
/target
|
||||||
Cargo.lock
|
|
||||||
.env
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.docker
|
||||||
|
2611
Cargo.lock
generated
Normal file
2611
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
Cargo.toml
28
Cargo.toml
@@ -1,27 +1,27 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "icarus_auth"
|
name = "icarus_auth"
|
||||||
version = "0.3.0"
|
version = "0.4.1"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
rust-version = "1.86"
|
rust-version = "1.88"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
axum = { version = "0.8.3" }
|
axum = { version = "0.8.4" }
|
||||||
serde = { version = "1.0.218", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.139" }
|
serde_json = { version = "1.0.140" }
|
||||||
tokio = { version = "1.44.1", features = ["rt-multi-thread"] }
|
tokio = { version = "1.45.1", features = ["rt-multi-thread"] }
|
||||||
tracing-subscriber = { version = "0.3.19" }
|
tracing-subscriber = { version = "0.3.19" }
|
||||||
tower = { version = "0.5.2" }
|
tower = { version = "0.5.2" }
|
||||||
hyper = { version = "1.6.0" }
|
hyper = { version = "1.6.0" }
|
||||||
sqlx = { version = "0.8.3", features = ["postgres", "runtime-tokio-native-tls", "time", "uuid"] }
|
sqlx = { version = "0.8.6", features = ["postgres", "runtime-tokio-native-tls", "time", "uuid"] }
|
||||||
dotenvy = { version = "0.15.7" }
|
uuid = { version = "1.17.0", features = ["v4", "serde"] }
|
||||||
uuid = { version = "1.16.0", features = ["v4", "serde"] }
|
|
||||||
argon2 = { version = "0.5.3", features = ["std"] } # Use the latest 0.5.x version
|
argon2 = { version = "0.5.3", features = ["std"] } # Use the latest 0.5.x version
|
||||||
rand = { version = "0.9" }
|
rand = { version = "0.9.1" }
|
||||||
time = { version = "0.3.41", features = ["macros", "serde"] }
|
time = { version = "0.3.41", features = ["macros", "serde"] }
|
||||||
josekit = { version = "0.10.1" }
|
josekit = { version = "0.10.3" }
|
||||||
icarus_models = { git = "ssh://git@git.kundeng.us/phoenix/icarus_models.git", tag = "v0.4.1" }
|
icarus_models = { git = "ssh://git@git.kundeng.us/phoenix/icarus_models.git", tag = "v0.5.4-devel-1e95822b5a-111" }
|
||||||
|
icarus_envy = { git = "ssh://git@git.kundeng.us/phoenix/icarus_envy.git", tag = "v0.3.0-devel-d73fba9899-006" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
http-body-util = { version = "0.1.3" }
|
http-body-util = { version = "0.1.3" }
|
||||||
url = { version = "2.5" }
|
url = { version = "2.5.4" }
|
||||||
once_cell = { version = "1.19" } # Useful for lazy initialization in tests/app setup
|
once_cell = { version = "1.21.3" } # Useful for lazy initialization in tests/app setup
|
||||||
|
71
Dockerfile
Normal file
71
Dockerfile
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# Stage 1: Build the application
|
||||||
|
# Use a specific Rust version for reproducibility. Choose one that matches your development environment.
|
||||||
|
# Using slim variant for smaller base image
|
||||||
|
FROM rust:1.88 as builder
|
||||||
|
|
||||||
|
# Set the working directory inside the container
|
||||||
|
WORKDIR /usr/src/app
|
||||||
|
|
||||||
|
# Install build dependencies if needed (e.g., for certain crates like sqlx with native TLS)
|
||||||
|
# RUN apt-get update && apt-get install -y pkg-config libssl-dev
|
||||||
|
|
||||||
|
# Install build dependencies if needed (e.g., git for cloning)
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
pkg-config libssl3 \
|
||||||
|
ca-certificates \
|
||||||
|
openssh-client git \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# << --- ADD HOST KEY HERE --- >>
|
||||||
|
# Replace 'yourgithost.com' with the actual hostname (e.g., github.com)
|
||||||
|
RUN mkdir -p -m 0700 ~/.ssh && \
|
||||||
|
ssh-keyscan git.kundeng.us >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
|
# Copy Cargo manifests
|
||||||
|
COPY Cargo.toml Cargo.lock ./
|
||||||
|
|
||||||
|
# Build *only* dependencies to leverage Docker cache
|
||||||
|
# This dummy build caches dependencies as a separate layer
|
||||||
|
RUN --mount=type=ssh mkdir src && \
|
||||||
|
echo "fn main() {println!(\"if you see this, the build broke\")}" > src/main.rs && \
|
||||||
|
cargo build --release --quiet && \
|
||||||
|
rm -rf src target/release/deps/icarus_auth* # Clean up dummy build artifacts (replace icarus_auth)
|
||||||
|
|
||||||
|
# Copy the actual source code
|
||||||
|
COPY src ./src
|
||||||
|
# If you have other directories like `templates` or `static`, copy them too
|
||||||
|
COPY .env ./.env
|
||||||
|
COPY migrations ./migrations
|
||||||
|
|
||||||
|
# << --- SSH MOUNT ADDED HERE --- >>
|
||||||
|
# Build *only* dependencies to leverage Docker cache
|
||||||
|
# This dummy build caches dependencies as a separate layer
|
||||||
|
# Mount the SSH agent socket for this command
|
||||||
|
RUN --mount=type=ssh \
|
||||||
|
cargo build --release --quiet
|
||||||
|
|
||||||
|
# Stage 2: Create the final, smaller runtime image
|
||||||
|
# Use a minimal base image like debian-slim or even distroless for security/size
|
||||||
|
FROM ubuntu:24.04
|
||||||
|
|
||||||
|
# Install runtime dependencies if needed (e.g., SSL certificates)
|
||||||
|
RUN apt-get update && apt-get install -y ca-certificates libssl-dev libssl3 && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Set the working directory
|
||||||
|
WORKDIR /usr/local/bin
|
||||||
|
|
||||||
|
# Copy the compiled binary from the builder stage
|
||||||
|
# Replace 'icarus_auth' with the actual name of your binary (usually the crate name)
|
||||||
|
COPY --from=builder /usr/src/app/target/release/icarus_auth .
|
||||||
|
|
||||||
|
# Copy other necessary files like .env (if used for runtime config) or static assets
|
||||||
|
# It's generally better to configure via environment variables in Docker though
|
||||||
|
COPY --from=builder /usr/src/app/.env .
|
||||||
|
COPY --from=builder /usr/src/app/migrations ./migrations
|
||||||
|
|
||||||
|
# Expose the port your Axum app listens on (e.g., 3000 or 8000)
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
# Set the command to run your application
|
||||||
|
# Ensure this matches the binary name copied above
|
||||||
|
CMD ["./icarus_auth"]
|
26
README.md
Normal file
26
README.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
|
||||||
|
|
||||||
|
# Getting Started
|
||||||
|
Copy the `.env.sample` file to `.env` and ensure that the variables are populated. This project
|
||||||
|
can be used with regular hosting or with docker. For the sake of getting up to speed quickly,
|
||||||
|
Docker will be covered. Make sure docker is running and your ssh identity has been loaded.
|
||||||
|
|
||||||
|
Build image
|
||||||
|
```
|
||||||
|
docker compose build
|
||||||
|
```
|
||||||
|
|
||||||
|
Start images
|
||||||
|
```
|
||||||
|
docker compose up -d --force-recreate
|
||||||
|
```
|
||||||
|
|
||||||
|
Bring it down
|
||||||
|
```
|
||||||
|
docker compose down -v
|
||||||
|
```
|
||||||
|
|
||||||
|
Pruning
|
||||||
|
```
|
||||||
|
docker system prune -a
|
||||||
|
```
|
45
docker-compose.yaml
Normal file
45
docker-compose.yaml
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
version: '3.8' # Use a recent version
|
||||||
|
|
||||||
|
services:
|
||||||
|
# Your Rust Application Service
|
||||||
|
auth_api:
|
||||||
|
build: # Tells docker-compose to build the Dockerfile in the current directory
|
||||||
|
context: .
|
||||||
|
ssh: ["default"] # Uses host's SSH agent
|
||||||
|
container_name: icarus_auth # Optional: Give the container a specific name
|
||||||
|
ports:
|
||||||
|
# Map host port 8000 to container port 3000 (adjust as needed)
|
||||||
|
- "8000:3000"
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
depends_on:
|
||||||
|
auth_db:
|
||||||
|
condition: service_healthy # Wait for the DB to be healthy before starting the app
|
||||||
|
restart: unless-stopped # Optional: Restart policy
|
||||||
|
|
||||||
|
# PostgreSQL Database Service
|
||||||
|
auth_db:
|
||||||
|
image: postgres:17.5-alpine # Use an official Postgres image (Alpine variant is smaller)
|
||||||
|
container_name: icarus_auth_db # Optional: Give the container a specific name
|
||||||
|
environment:
|
||||||
|
# These MUST match the user, password, and database name in the DATABASE_URL above
|
||||||
|
POSTGRES_USER: ${POSTGRES_AUTH_USER:-icarus_op}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_AUTH_PASSWORD:-password}
|
||||||
|
POSTGRES_DB: ${POSTGRES_AUTH_DB:-icarus_auth_db}
|
||||||
|
volumes:
|
||||||
|
# Persist database data using a named volume
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
ports: []
|
||||||
|
healthcheck:
|
||||||
|
# Checks if Postgres is ready to accept connections
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
restart: always # Optional: Restart policy
|
||||||
|
|
||||||
|
# Define the named volume for data persistence
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
driver: local # Use the default local driver
|
@@ -1,3 +1,5 @@
|
|||||||
|
TODO: At some point, move this somewhere that is appropriate
|
||||||
|
|
||||||
# Make sure role has CREATEDB
|
# Make sure role has CREATEDB
|
||||||
ALTER ROLE username_that_needs_permission CREATEDB;
|
ALTER ROLE username_that_needs_permission CREATEDB;
|
||||||
|
|
||||||
|
@@ -1,30 +1,37 @@
|
|||||||
use axum::{Extension, Json, http::StatusCode};
|
pub mod response {
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
#[derive(Deserialize, Serialize)]
|
||||||
|
pub struct TestResult {
|
||||||
#[derive(Deserialize, Serialize)]
|
pub message: String,
|
||||||
pub struct TestResult {
|
}
|
||||||
message: String,
|
}
|
||||||
}
|
|
||||||
|
pub mod endpoint {
|
||||||
// basic handler that responds with a static string
|
use super::*;
|
||||||
pub async fn root() -> &'static str {
|
use axum::{Extension, Json, http::StatusCode};
|
||||||
"Hello, World!"
|
|
||||||
}
|
// basic handler that responds with a static string
|
||||||
|
pub async fn root() -> &'static str {
|
||||||
pub async fn db_ping(Extension(pool): Extension<sqlx::PgPool>) -> (StatusCode, Json<TestResult>) {
|
"Hello, World!"
|
||||||
match sqlx::query("SELECT 1").execute(&pool).await {
|
}
|
||||||
Ok(_) => {
|
|
||||||
let tr = TestResult {
|
pub async fn db_ping(
|
||||||
message: String::from("This works"),
|
Extension(pool): Extension<sqlx::PgPool>,
|
||||||
};
|
) -> (StatusCode, Json<response::TestResult>) {
|
||||||
(StatusCode::OK, Json(tr))
|
match sqlx::query("SELECT 1").execute(&pool).await {
|
||||||
}
|
Ok(_) => {
|
||||||
Err(e) => (
|
let tr = response::TestResult {
|
||||||
StatusCode::BAD_REQUEST,
|
message: String::from("This works"),
|
||||||
Json(TestResult {
|
};
|
||||||
message: e.to_string(),
|
(StatusCode::OK, Json(tr))
|
||||||
}),
|
}
|
||||||
),
|
Err(e) => (
|
||||||
|
StatusCode::BAD_REQUEST,
|
||||||
|
Json(response::TestResult {
|
||||||
|
message: e.to_string(),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -45,45 +45,33 @@ pub mod endpoint {
|
|||||||
// Check if user exists
|
// Check if user exists
|
||||||
match repo::user::get(&pool, &payload.username).await {
|
match repo::user::get(&pool, &payload.username).await {
|
||||||
Ok(user) => {
|
Ok(user) => {
|
||||||
let salt = repo::salt::get(&pool, &user.salt_id).await.unwrap();
|
if hashing::verify_password(&payload.password, user.password.clone()).unwrap() {
|
||||||
let salt_str = hashing::get_salt(&salt.salt).unwrap();
|
// Create token
|
||||||
let unhashed_password = payload.password;
|
let key = icarus_envy::environment::get_secret_key().await;
|
||||||
|
let (token_literal, duration) = token_stuff::create_token(&key).unwrap();
|
||||||
|
|
||||||
// Check if password is correct
|
if token_stuff::verify_token(&key, &token_literal) {
|
||||||
match hashing::hash_password(&unhashed_password, &salt_str) {
|
let current_time = time::OffsetDateTime::now_utc();
|
||||||
Ok(hash_password) => {
|
let _ = repo::user::update_last_login(&pool, &user, ¤t_time).await;
|
||||||
if hashing::verify_password(&unhashed_password, hash_password.clone())
|
|
||||||
.unwrap()
|
|
||||||
{
|
|
||||||
// Create token
|
|
||||||
let key = token_stuff::get_key().unwrap();
|
|
||||||
let (token_literal, duration) =
|
|
||||||
token_stuff::create_token(&key).unwrap();
|
|
||||||
|
|
||||||
if token_stuff::verify_token(&key, &token_literal) {
|
(
|
||||||
(
|
StatusCode::OK,
|
||||||
StatusCode::OK,
|
Json(response::Response {
|
||||||
Json(response::Response {
|
message: String::from("Successful"),
|
||||||
message: String::from("Successful"),
|
data: vec![icarus_models::login_result::LoginResult {
|
||||||
data: vec![icarus_models::login_result::LoginResult {
|
id: user.id,
|
||||||
id: user.id,
|
username: user.username.clone(),
|
||||||
username: user.username,
|
token: token_literal,
|
||||||
token: token_literal,
|
token_type: String::from(icarus_models::token::TOKEN_TYPE),
|
||||||
token_type: String::from(token_stuff::TOKENTYPE),
|
expiration: duration,
|
||||||
expiration: duration,
|
}],
|
||||||
}],
|
}),
|
||||||
}),
|
)
|
||||||
)
|
} else {
|
||||||
} else {
|
return not_found("Could not verify password").await;
|
||||||
return not_found("Could not verify password").await;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return not_found("Error Hashing").await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return not_found(&err.to_string()).await;
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
return not_found("Error Hashing").await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
@@ -11,8 +11,7 @@ use argon2::{
|
|||||||
pub fn generate_salt() -> Result<SaltString, argon2::Error> {
|
pub fn generate_salt() -> Result<SaltString, argon2::Error> {
|
||||||
// Generate a random salt
|
// Generate a random salt
|
||||||
// SaltString::generate uses OsRng internally for cryptographic security
|
// SaltString::generate uses OsRng internally for cryptographic security
|
||||||
let salt = SaltString::generate(&mut OsRng);
|
Ok(SaltString::generate(&mut OsRng))
|
||||||
Ok(salt)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_salt(s: &str) -> Result<SaltString, argon2::password_hash::Error> {
|
pub fn get_salt(s: &str) -> Result<SaltString, argon2::password_hash::Error> {
|
||||||
@@ -32,9 +31,7 @@ pub fn hash_password(
|
|||||||
// Hash the password with the salt
|
// Hash the password with the salt
|
||||||
// The output is a PasswordHash string format that includes algorithm, version,
|
// The output is a PasswordHash string format that includes algorithm, version,
|
||||||
// parameters, salt, and the hash itself.
|
// parameters, salt, and the hash itself.
|
||||||
let password_hash = argon2.hash_password(password_bytes, salt)?.to_string();
|
Ok(argon2.hash_password(password_bytes, salt)?.to_string())
|
||||||
|
|
||||||
Ok(password_hash)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify_password(
|
pub fn verify_password(
|
||||||
@@ -48,11 +45,9 @@ pub fn verify_password(
|
|||||||
let parsed_hash = argon2::PasswordHash::new(stored_hash.as_str())?;
|
let parsed_hash = argon2::PasswordHash::new(stored_hash.as_str())?;
|
||||||
|
|
||||||
// Create an Argon2 instance (it will use the parameters from the parsed hash)
|
// Create an Argon2 instance (it will use the parameters from the parsed hash)
|
||||||
let argon2 = Argon2::default();
|
|
||||||
|
|
||||||
// Verify the password against the parsed hash
|
// Verify the password against the parsed hash
|
||||||
// This automatically uses the correct salt and parameters embedded in `parsed_hash`
|
// This automatically uses the correct salt and parameters embedded in `parsed_hash`
|
||||||
match argon2.verify_password(password_bytes, &parsed_hash) {
|
match Argon2::default().verify_password(password_bytes, &parsed_hash) {
|
||||||
Ok(()) => Ok(true), // Passwords match
|
Ok(()) => Ok(true), // Passwords match
|
||||||
Err(argon2::password_hash::Error::Password) => Ok(false), // Passwords don't match
|
Err(argon2::password_hash::Error::Password) => Ok(false), // Passwords don't match
|
||||||
Err(e) => Err(e), // Some other error occurred (e.g., invalid hash format)
|
Err(e) => Err(e), // Some other error occurred (e.g., invalid hash format)
|
||||||
@@ -66,8 +61,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_hash_password() {
|
fn test_hash_password() {
|
||||||
let some_password = String::from("somethingrandom");
|
let some_password = String::from("somethingrandom");
|
||||||
let salt = generate_salt().unwrap();
|
match hash_password(&some_password, &generate_salt().unwrap()) {
|
||||||
match hash_password(&some_password, &salt) {
|
|
||||||
Ok(p) => match verify_password(&some_password, p.clone()) {
|
Ok(p) => match verify_password(&some_password, p.clone()) {
|
||||||
Ok(res) => {
|
Ok(res) => {
|
||||||
assert_eq!(res, true);
|
assert_eq!(res, true);
|
||||||
@@ -81,4 +75,27 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wrong_password() {
|
||||||
|
let some_password = String::from("somethingrandom");
|
||||||
|
match hash_password(&some_password, &generate_salt().unwrap()) {
|
||||||
|
Ok(p) => {
|
||||||
|
match verify_password(&some_password, p.clone()) {
|
||||||
|
Ok(res) => {
|
||||||
|
assert_eq!(res, true, "Passwords are not verified");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let wrong_password = String::from("Differentanotherlevel");
|
||||||
|
let result = verify_password(&wrong_password, p.clone()).unwrap();
|
||||||
|
assert_eq!(false, result, "Passwords should not match");
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
assert!(false, "Error: {:?}", err.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
22
src/lib.rs
22
src/lib.rs
@@ -4,14 +4,6 @@ pub mod hashing;
|
|||||||
pub mod repo;
|
pub mod repo;
|
||||||
pub mod token_stuff;
|
pub mod token_stuff;
|
||||||
|
|
||||||
pub mod keys {
|
|
||||||
pub const DBURL: &str = "DATABASE_URL";
|
|
||||||
|
|
||||||
pub mod error {
|
|
||||||
pub const ERROR: &str = "DATABASE_URL must be set in .env";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod connection_settings {
|
mod connection_settings {
|
||||||
pub const MAXCONN: u32 = 5;
|
pub const MAXCONN: u32 = 5;
|
||||||
}
|
}
|
||||||
@@ -19,13 +11,12 @@ mod connection_settings {
|
|||||||
pub mod db {
|
pub mod db {
|
||||||
|
|
||||||
use sqlx::postgres::PgPoolOptions;
|
use sqlx::postgres::PgPoolOptions;
|
||||||
use std::env;
|
|
||||||
|
|
||||||
use crate::{connection_settings, keys};
|
use crate::connection_settings;
|
||||||
|
|
||||||
pub async fn create_pool() -> Result<sqlx::PgPool, sqlx::Error> {
|
pub async fn create_pool() -> Result<sqlx::PgPool, sqlx::Error> {
|
||||||
let database_url = get_db_url().await;
|
let database_url = icarus_envy::environment::get_db_url().await;
|
||||||
println!("Database url: {:?}", database_url);
|
println!("Database url: {database_url}");
|
||||||
|
|
||||||
PgPoolOptions::new()
|
PgPoolOptions::new()
|
||||||
.max_connections(connection_settings::MAXCONN)
|
.max_connections(connection_settings::MAXCONN)
|
||||||
@@ -33,13 +24,6 @@ pub mod db {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_db_url() -> String {
|
|
||||||
#[cfg(debug_assertions)] // Example: Only load .env in debug builds
|
|
||||||
dotenvy::dotenv().ok();
|
|
||||||
|
|
||||||
env::var(keys::DBURL).expect(keys::error::ERROR)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn migrations(pool: &sqlx::PgPool) {
|
pub async fn migrations(pool: &sqlx::PgPool) {
|
||||||
// Run migrations using the sqlx::migrate! macro
|
// Run migrations using the sqlx::migrate! macro
|
||||||
// Assumes your migrations are in a ./migrations folder relative to Cargo.toml
|
// Assumes your migrations are in a ./migrations folder relative to Cargo.toml
|
||||||
|
55
src/main.rs
55
src/main.rs
@@ -25,8 +25,14 @@ mod init {
|
|||||||
pub async fn routes() -> Router {
|
pub async fn routes() -> Router {
|
||||||
// build our application with a route
|
// build our application with a route
|
||||||
Router::new()
|
Router::new()
|
||||||
.route(callers::endpoints::DBTEST, get(callers::common::db_ping))
|
.route(
|
||||||
.route(callers::endpoints::ROOT, get(callers::common::root))
|
callers::endpoints::DBTEST,
|
||||||
|
get(callers::common::endpoint::db_ping),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
callers::endpoints::ROOT,
|
||||||
|
get(callers::common::endpoint::root),
|
||||||
|
)
|
||||||
.route(
|
.route(
|
||||||
callers::endpoints::REGISTER,
|
callers::endpoints::REGISTER,
|
||||||
post(callers::register::register_user),
|
post(callers::register::register_user),
|
||||||
@@ -63,24 +69,23 @@ mod tests {
|
|||||||
mod db_mgr {
|
mod db_mgr {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use icarus_auth::keys;
|
|
||||||
|
|
||||||
pub const LIMIT: usize = 6;
|
pub const LIMIT: usize = 6;
|
||||||
|
|
||||||
pub async fn get_pool() -> Result<sqlx::PgPool, sqlx::Error> {
|
pub async fn get_pool() -> Result<sqlx::PgPool, sqlx::Error> {
|
||||||
let tm_db_url = std::env::var(keys::DBURL).expect("DATABASE_URL must be present");
|
let tm_db_url = icarus_envy::environment::get_db_url().await;
|
||||||
let tm_options = sqlx::postgres::PgConnectOptions::from_str(&tm_db_url).unwrap();
|
let tm_options = sqlx::postgres::PgConnectOptions::from_str(&tm_db_url).unwrap();
|
||||||
sqlx::PgPool::connect_with(tm_options).await
|
sqlx::PgPool::connect_with(tm_options).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn generate_db_name() -> String {
|
pub async fn generate_db_name() -> String {
|
||||||
let db_name =
|
let db_name = get_database_name().await.unwrap()
|
||||||
get_database_name().unwrap() + &"_" + &uuid::Uuid::new_v4().to_string()[..LIMIT];
|
+ &"_"
|
||||||
|
+ &uuid::Uuid::new_v4().to_string()[..LIMIT];
|
||||||
db_name
|
db_name
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn connect_to_db(db_name: &str) -> Result<sqlx::PgPool, sqlx::Error> {
|
pub async fn connect_to_db(db_name: &str) -> Result<sqlx::PgPool, sqlx::Error> {
|
||||||
let db_url = std::env::var(keys::DBURL).expect("DATABASE_URL must be set for tests");
|
let db_url = icarus_envy::environment::get_db_url().await;
|
||||||
let options = sqlx::postgres::PgConnectOptions::from_str(&db_url)?.database(db_name);
|
let options = sqlx::postgres::PgConnectOptions::from_str(&db_url)?.database(db_name);
|
||||||
sqlx::PgPool::connect_with(options).await
|
sqlx::PgPool::connect_with(options).await
|
||||||
}
|
}
|
||||||
@@ -106,29 +111,21 @@ mod tests {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_database_name() -> Result<String, Box<dyn std::error::Error>> {
|
pub async fn get_database_name() -> Result<String, Box<dyn std::error::Error>> {
|
||||||
dotenvy::dotenv().ok(); // Load .env file if it exists
|
let database_url = icarus_envy::environment::get_db_url().await;
|
||||||
|
|
||||||
match std::env::var(keys::DBURL) {
|
let parsed_url = url::Url::parse(&database_url)?;
|
||||||
Ok(database_url) => {
|
if parsed_url.scheme() == "postgres" || parsed_url.scheme() == "postgresql" {
|
||||||
let parsed_url = url::Url::parse(&database_url)?;
|
match parsed_url
|
||||||
if parsed_url.scheme() == "postgres" || parsed_url.scheme() == "postgresql" {
|
.path_segments()
|
||||||
match parsed_url
|
.and_then(|segments| segments.last().map(|s| s.to_string()))
|
||||||
.path_segments()
|
{
|
||||||
.and_then(|segments| segments.last().map(|s| s.to_string()))
|
Some(sss) => Ok(sss),
|
||||||
{
|
None => Err("Error parsing".into()),
|
||||||
Some(sss) => Ok(sss),
|
|
||||||
None => Err("Error parsing".into()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Handle other database types if needed
|
|
||||||
Err("Error parsing".into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
// DATABASE_URL environment variable not found
|
|
||||||
Err("Error parsing".into())
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// Handle other database types if needed
|
||||||
|
Err("Error parsing".into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -42,6 +42,39 @@ pub mod user {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn update_last_login(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
user: &icarus_models::user::User,
|
||||||
|
time: &time::OffsetDateTime,
|
||||||
|
) -> Result<time::OffsetDateTime, sqlx::Error> {
|
||||||
|
let result = sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE "user" SET last_login = $1 WHERE id = $2 RETURNING last_login
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(time)
|
||||||
|
.bind(user.id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
eprintln!("Error updating time: {e}");
|
||||||
|
e
|
||||||
|
});
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(row) => match row {
|
||||||
|
Some(r) => {
|
||||||
|
let last_login: time::OffsetDateTime = r
|
||||||
|
.try_get("last_login")
|
||||||
|
.map_err(|_e| sqlx::Error::RowNotFound)?;
|
||||||
|
Ok(last_login)
|
||||||
|
}
|
||||||
|
None => Err(sqlx::Error::RowNotFound),
|
||||||
|
},
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn exists(pool: &sqlx::PgPool, username: &String) -> Result<bool, sqlx::Error> {
|
pub async fn exists(pool: &sqlx::PgPool, username: &String) -> Result<bool, sqlx::Error> {
|
||||||
let result = sqlx::query(
|
let result = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
@@ -80,7 +113,7 @@ pub mod user {
|
|||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
eprintln!("Error inserting item: {}", e);
|
eprintln!("Error inserting item: {e}");
|
||||||
e
|
e
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
@@ -147,7 +180,7 @@ pub mod salt {
|
|||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
eprintln!("Error inserting item: {}", e);
|
eprintln!("Error inserting item: {e}");
|
||||||
e
|
e
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@@ -1,23 +1,16 @@
|
|||||||
use josekit::{
|
use josekit::{
|
||||||
self,
|
self,
|
||||||
jws::{JwsHeader, alg::hmac::HmacJwsAlgorithm::Hs256},
|
jws::alg::hmac::HmacJwsAlgorithm::Hs256,
|
||||||
jwt::{self, JwtPayload},
|
jwt::{self},
|
||||||
};
|
};
|
||||||
|
|
||||||
use time;
|
use time;
|
||||||
|
|
||||||
pub const TOKENTYPE: &str = "JWT";
|
|
||||||
pub const KEY_ENV: &str = "SECRET_KEY";
|
pub const KEY_ENV: &str = "SECRET_KEY";
|
||||||
pub const MESSAGE: &str = "Something random";
|
pub const MESSAGE: &str = "Something random";
|
||||||
pub const ISSUER: &str = "icarus_auth";
|
pub const ISSUER: &str = "icarus_auth";
|
||||||
pub const AUDIENCE: &str = "icarus";
|
pub const AUDIENCE: &str = "icarus";
|
||||||
|
|
||||||
pub fn get_key() -> Result<String, dotenvy::Error> {
|
|
||||||
dotenvy::dotenv().ok();
|
|
||||||
let key = std::env::var(KEY_ENV).expect("SECRET_KEY_NOT_FOUND");
|
|
||||||
Ok(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_issued() -> time::Result<time::OffsetDateTime> {
|
pub fn get_issued() -> time::Result<time::OffsetDateTime> {
|
||||||
Ok(time::OffsetDateTime::now_utc())
|
Ok(time::OffsetDateTime::now_utc())
|
||||||
}
|
}
|
||||||
@@ -27,43 +20,13 @@ pub fn get_expiration(issued: &time::OffsetDateTime) -> Result<time::OffsetDateT
|
|||||||
Ok(*issued + duration_expire)
|
Ok(*issued + duration_expire)
|
||||||
}
|
}
|
||||||
|
|
||||||
mod util {
|
|
||||||
pub fn time_to_std_time(
|
|
||||||
provided_time: &time::OffsetDateTime,
|
|
||||||
) -> Result<std::time::SystemTime, std::time::SystemTimeError> {
|
|
||||||
let converted = std::time::SystemTime::from(*provided_time);
|
|
||||||
Ok(converted)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_token(provided_key: &String) -> Result<(String, i64), josekit::JoseError> {
|
pub fn create_token(provided_key: &String) -> Result<(String, i64), josekit::JoseError> {
|
||||||
let mut header = JwsHeader::new();
|
let resource = icarus_models::token::TokenResource {
|
||||||
header.set_token_type(TOKENTYPE);
|
message: String::from(MESSAGE),
|
||||||
|
issuer: String::from(ISSUER),
|
||||||
let mut payload = JwtPayload::new();
|
audiences: vec![String::from(AUDIENCE)],
|
||||||
payload.set_subject(MESSAGE);
|
};
|
||||||
payload.set_issuer(ISSUER);
|
icarus_models::token::create_token(provided_key, &resource, time::Duration::hours(4))
|
||||||
payload.set_audience(vec![AUDIENCE]);
|
|
||||||
match get_issued() {
|
|
||||||
Ok(issued) => {
|
|
||||||
let expire = get_expiration(&issued).unwrap();
|
|
||||||
payload.set_issued_at(&util::time_to_std_time(&issued).unwrap());
|
|
||||||
payload.set_expires_at(&util::time_to_std_time(&expire).unwrap());
|
|
||||||
|
|
||||||
let key: String = if provided_key.is_empty() {
|
|
||||||
get_key().unwrap()
|
|
||||||
} else {
|
|
||||||
provided_key.to_owned()
|
|
||||||
};
|
|
||||||
|
|
||||||
let signer = Hs256.signer_from_bytes(key.as_bytes()).unwrap();
|
|
||||||
Ok((
|
|
||||||
josekit::jwt::encode_with_signer(&payload, &header, &signer).unwrap(),
|
|
||||||
(expire - time::OffsetDateTime::UNIX_EPOCH).whole_seconds(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
Err(e) => Err(josekit::JoseError::InvalidClaim(e.into())),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify_token(key: &String, token: &String) -> bool {
|
pub fn verify_token(key: &String, token: &String) -> bool {
|
||||||
@@ -77,12 +40,12 @@ pub fn verify_token(key: &String, token: &String) -> bool {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize() {
|
fn test_tokenize() {
|
||||||
let special_key = get_key().unwrap();
|
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||||
|
let special_key = rt.block_on(icarus_envy::environment::get_secret_key());
|
||||||
match create_token(&special_key) {
|
match create_token(&special_key) {
|
||||||
Ok((token, _duration)) => {
|
Ok((token, _duration)) => {
|
||||||
let result = verify_token(&special_key, &token);
|
let result = verify_token(&special_key, &token);
|
||||||
|
Reference in New Issue
Block a user