Compare commits

...

5 Commits

Author SHA1 Message Date
d20d31d5f8 Added .env to gitignore 2025-01-31 14:41:25 +01:00
26f5195069 Finalized member migration 2025-01-31 14:37:48 +01:00
9cf9e5752f Started on member migration 2025-01-31 13:36:09 +01:00
169e89cefb Added basic axum server 2025-01-15 23:20:17 +01:00
fe219d51e1 Added devenv and nuxt client 2025-01-13 22:29:52 +01:00
44 changed files with 10050 additions and 0 deletions

3
.envrc Normal file
View File

@ -0,0 +1,3 @@
source_url "https://raw.githubusercontent.com/cachix/devenv/82c0147677e510b247d8b9165c54f73d32dfd899/direnvrc" "sha256-7u4iDd1nZpxL4tCzmPG0dQgC5V+/44Ba+tHkPob1v2k="
use devenv

12
.gitignore vendored Normal file
View File

@ -0,0 +1,12 @@
# Devenv
.devenv*
devenv.local.nix
# direnv
.direnv
# pre-commit
.pre-commit-config.yaml
# Other
.env

24
client/.gitignore vendored Normal file
View File

@ -0,0 +1,24 @@
# Nuxt dev/build outputs
.output
.data
.nuxt
.nitro
.cache
dist
# Node dependencies
node_modules
# Logs
logs
*.log
# Misc
.DS_Store
.fleet
.idea
# Local env files
.env
.env.*
!.env.example

75
client/README.md Normal file
View File

@ -0,0 +1,75 @@
# Nuxt Minimal Starter
Look at the [Nuxt documentation](https://nuxt.com/docs/getting-started/introduction) to learn more.
## Setup
Make sure to install dependencies:
```bash
# npm
npm install
# pnpm
pnpm install
# yarn
yarn install
# bun
bun install
```
## Development Server
Start the development server on `http://localhost:3000`:
```bash
# npm
npm run dev
# pnpm
pnpm dev
# yarn
yarn dev
# bun
bun run dev
```
## Production
Build the application for production:
```bash
# npm
npm run build
# pnpm
pnpm build
# yarn
yarn build
# bun
bun run build
```
Locally preview production build:
```bash
# npm
npm run preview
# pnpm
pnpm preview
# yarn
yarn preview
# bun
bun run preview
```
Check out the [deployment documentation](https://nuxt.com/docs/getting-started/deployment) for more information.

6
client/app.vue Normal file
View File

@ -0,0 +1,6 @@
<template>
<div>
<NuxtRouteAnnouncer />
<NuxtWelcome />
</div>
</template>

5
client/nuxt.config.ts Normal file
View File

@ -0,0 +1,5 @@
// https://nuxt.com/docs/api/configuration/nuxt-config
export default defineNuxtConfig({
compatibilityDate: '2024-11-01',
devtools: { enabled: true }
})

18
client/package.json Normal file
View File

@ -0,0 +1,18 @@
{
"name": "nuxt-app",
"private": true,
"type": "module",
"scripts": {
"build": "nuxt build",
"dev": "nuxt dev",
"generate": "nuxt generate",
"preview": "nuxt preview",
"postinstall": "nuxt prepare"
},
"dependencies": {
"nuxt": "^3.15.1",
"vue": "latest",
"vue-router": "latest"
},
"packageManager": "pnpm@9.15.4+sha512.b2dc20e2fc72b3e18848459b37359a32064663e5627a51e4c74b2c29dd8e8e0491483c3abb40789cfd578bf362fb6ba8261b05f0387d76792ed6e23ea3b1b6a0"
}

6413
client/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

BIN
client/public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

1
client/public/robots.txt Normal file
View File

@ -0,0 +1 @@

View File

@ -0,0 +1,3 @@
{
"extends": "../.nuxt/tsconfig.server.json"
}

4
client/tsconfig.json Normal file
View File

@ -0,0 +1,4 @@
{
// https://nuxt.com/docs/guide/concepts/typescript
"extends": "./.nuxt/tsconfig.json"
}

100
devenv.lock Normal file
View File

@ -0,0 +1,100 @@
{
"nodes": {
"devenv": {
"locked": {
"dir": "src/modules",
"lastModified": 1736426010,
"owner": "cachix",
"repo": "devenv",
"rev": "1c384bc4be3ee571511fbbc6fdc94fe47d60f6cf",
"type": "github"
},
"original": {
"dir": "src/modules",
"owner": "cachix",
"repo": "devenv",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1733328505,
"owner": "edolstra",
"repo": "flake-compat",
"rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"pre-commit-hooks",
"nixpkgs"
]
},
"locked": {
"lastModified": 1709087332,
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1733477122,
"owner": "cachix",
"repo": "devenv-nixpkgs",
"rev": "7bd9e84d0452f6d2e63b6e6da29fe73fac951857",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "rolling",
"repo": "devenv-nixpkgs",
"type": "github"
}
},
"pre-commit-hooks": {
"inputs": {
"flake-compat": "flake-compat",
"gitignore": "gitignore",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1735882644,
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "a5a961387e75ae44cc20f0a57ae463da5e959656",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"type": "github"
}
},
"root": {
"inputs": {
"devenv": "devenv",
"nixpkgs": "nixpkgs",
"pre-commit-hooks": "pre-commit-hooks"
}
}
},
"root": "root",
"version": 7
}

26
devenv.nix Normal file
View File

@ -0,0 +1,26 @@
{ pkgs, ... }:
{
packages = with pkgs; [
openssl
];
languages.rust.enable = true;
languages.javascript = {
enable = true;
pnpm.enable = true;
};
services.postgres = {
enable = true;
listen_addresses = "127.0.0.1";
initialDatabases = [
{
name = "wrbapp";
pass = "password";
user = "wrbapp";
}
];
};
}

15
devenv.yaml Normal file
View File

@ -0,0 +1,15 @@
# yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
inputs:
nixpkgs:
url: github:cachix/devenv-nixpkgs/rolling
# If you're using non-OSS software, you can set allowUnfree to true.
# allowUnfree: true
# If you're willing to use a package that's vulnerable
# permittedInsecurePackages:
# - "openssl-1.1.1w"
# If you have more than one devenv you can merge them
#imports:
# - ./backend

2
server/.env Normal file
View File

@ -0,0 +1,2 @@
DATABASE_URL="postgres://wrbapp:password@localhost/wrbapp"
API_TOKEN="SuperSecretToken"

2
server/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
target
.env

2571
server/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

29
server/Cargo.toml Normal file
View File

@ -0,0 +1,29 @@
[package]
name = "wrbapp_server"
version = "0.1.0"
edition = "2021"
[dependencies]
# Primary crates
axum = { version = "0.8", features = [ "macros", "json" ] }
axum-extra = { version = "0.10.0", features = [ "typed-header" ] }
tokio = { version = "1.43", features = [ "rt-multi-thread", "macros" ] }
sqlx = { version = "0.8", features = [ "runtime-tokio", "postgres" ] }
# Secondary crates
csv = { version = "1.3" }
serde = "1.0"
dotenvy = "0.15.7"
validator = { version = "0.19.0", features = [ "derive" ] }
# Tertiary crates
tracing = "0.1"
tracing-subscriber = "0.3"
chrono = "0.4.39"
uuid = "1.12.0"
serde_json = "1.0.137"
rand = "0.9"
thiserror = { version = "2.0" }
itertools = "0.14"

5
server/build.rs Normal file
View File

@ -0,0 +1,5 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}

View File

@ -0,0 +1,10 @@
create table "members" (
id varchar(7) primary key,
first_name text not null,
full_name text not null,
registration_token text unique not null,
diploma text,
hours text[] not null,
groups text[] not null
);

48
server/src/auth.rs Normal file
View File

@ -0,0 +1,48 @@
use std::collections::HashSet;
use axum::{extract::FromRequestParts, http::request::Parts, RequestPartsExt};
use axum_extra::{
headers::{authorization::Bearer, Authorization},
typed_header::TypedHeaderRejectionReason,
TypedHeader,
};
use bearer::verify_bearer;
pub use error::AuthError;
mod bearer;
mod error;
#[derive(Debug)]
pub struct Permissions<'a>(pub HashSet<&'a str>);
// Middleware for getting permissions
impl<S> FromRequestParts<S> for Permissions<'_>
where
S: Send + Sync,
{
type Rejection = crate::Error;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
// First check if the request has a beaerer token to authenticate
match parts.extract::<TypedHeader<Authorization<Bearer>>>().await {
Ok(bearer) => {
verify_bearer(bearer.token().to_string()).map_err(|_| AuthError::InvalidToken)?;
let permissions = Permissions {
0: HashSet::from(["root"]),
};
return Ok(permissions);
}
Err(err) => match err.reason() {
TypedHeaderRejectionReason::Missing => (),
TypedHeaderRejectionReason::Error(_err) => {
return Err(AuthError::InvalidToken.into())
}
_ => return Err(AuthError::Unexpected.into()),
},
};
Err(AuthError::Unexpected.into())
}
}

View File

@ -0,0 +1,8 @@
pub fn verify_bearer(token: String) -> Result<(), ()> {
let env_api_token = dotenvy::var("API_TOKEN").map_err(|_| ())?;
match env_api_token == token {
true => Ok(()),
false => Err(()),
}
}

20
server/src/auth/error.rs Normal file
View File

@ -0,0 +1,20 @@
use std::fmt::Display;
#[derive(Debug)]
pub enum AuthError {
NoPermssions,
InvalidToken,
Unexpected,
}
impl Display for AuthError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NoPermssions => write!(f, "{}", "No permissions"),
Self::InvalidToken => write!(f, "{}", "Invalid token"),
Self::Unexpected => write!(f, "{}", "Unexpected error"),
}
}
}
impl std::error::Error for AuthError {}

5
server/src/database.rs Normal file
View File

@ -0,0 +1,5 @@
mod postgres;
pub use postgres::apply_migrations;
pub use postgres::connect;
pub mod model;

View File

@ -0,0 +1,5 @@
pub mod member;
pub mod session;
pub mod user;
pub use member::Member;

View File

@ -0,0 +1,90 @@
use rand::distr::{Alphanumeric, SampleString};
use sqlx::{PgPool, Postgres, QueryBuilder};
use validator::Validate;
#[derive(Debug, Validate)]
pub struct Member {
#[validate(length(equal = 7))]
pub id: String,
pub first_name: String,
pub full_name: String,
pub registration_token: Option<String>,
pub diploma: Option<String>,
pub hours: Vec<String>,
pub groups: Vec<String>,
}
impl Member {
pub async fn get_many(transaction: &PgPool, members: Vec<Self>) -> Result<(), sqlx::Error> {
Ok(())
}
pub async fn get_all(pool: &PgPool) -> Result<Vec<Self>, sqlx::Error> {
let members = sqlx::query_as!(Member, "SELECT * FROM members;",)
.fetch_all(pool)
.await?;
Ok(members)
}
pub async fn insert_many(
transaction: &mut sqlx::Transaction<'_, Postgres>,
members: Vec<Self>,
) -> Result<(), sqlx::Error> {
if members.len() == 0 {
return Ok(());
}
let mut query_builder = QueryBuilder::new(
"INSERT INTO members(id, first_name, full_name, registration_token, diploma, hours, groups) "
);
query_builder.push_values(members.into_iter(), |mut b, member| {
let registration_token = Alphanumeric.sample_string(&mut rand::rng(), 16);
b.push_bind(member.id);
b.push_bind(member.first_name);
b.push_bind(member.full_name);
b.push_bind(registration_token);
b.push_bind(member.diploma);
b.push_bind(member.hours);
b.push_bind(member.groups);
});
let query = query_builder.build();
query.execute(&mut **transaction).await?;
Ok(())
}
pub async fn update_many(
transaction: &mut sqlx::Transaction<'_, Postgres>,
members: Vec<Self>,
) -> Result<(), sqlx::Error> {
if members.len() == 0 {
return Ok(());
}
for member in members {
sqlx::query!("UPDATE ONLY members SET first_name = $1, full_name = $2, diploma = $3, hours = $4, groups = $5 WHERE id = $6", member.first_name, member.full_name, member.diploma, &member.hours, &member.groups, member.id).execute(&mut **transaction).await?;
}
Ok(())
}
pub async fn remove_many(
transaction: &mut sqlx::Transaction<'_, Postgres>,
member_ids: &Vec<String>,
) -> Result<(), sqlx::Error> {
sqlx::query!(
"
DELETE FROM members WHERE id = ANY($1)
",
member_ids
)
.execute(&mut **transaction)
.await?;
Ok(())
}
}

View File

@ -0,0 +1,6 @@
struct Session {
id: u32,
user_id: u32,
token: String,
expires: chrono::NaiveDateTime,
}

View File

@ -0,0 +1,8 @@
#[derive(validator::Validate)]
struct User {
pub id: uuid::Uuid,
#[validate(email)]
pub email: String,
pub password: String,
pub admin: bool,
}

View File

@ -0,0 +1,37 @@
use sqlx::{
migrate::MigrateDatabase, postgres::PgPoolOptions, Connection, PgConnection, PgPool, Postgres,
};
pub async fn connect() -> Result<PgPool, sqlx::Error> {
tracing::info!("Initializing database connection");
let database_url =
dotenvy::var("DATABASE_URL").expect("`DATABASE_URL` environment variable not set");
let pool = PgPoolOptions::new()
.max_connections(5)
.connect(&database_url)
.await?;
Ok(pool)
}
pub async fn apply_migrations() -> Result<(), sqlx::Error> {
let uri = dotenvy::var("DATABASE_URL").expect("`DATABASE_URL` environment variable not set");
let uri = uri.as_str();
if !Postgres::database_exists(uri).await? {
tracing::info!("Creating database...");
Postgres::create_database(uri).await?;
}
tracing::info!("Applying migrations...");
let mut conn = PgConnection::connect(uri).await?;
sqlx::migrate!()
.run(&mut conn)
.await
.expect("Error while running database migrations");
Ok(())
}

18
server/src/lib.rs Normal file
View File

@ -0,0 +1,18 @@
use routes::member::migrate::MigrationStore;
use sqlx::{Pool, Postgres};
use std::sync::Arc;
use tokio::sync::Mutex;
pub mod auth;
pub mod database;
pub mod model;
pub mod routes;
pub mod util;
pub use util::error::Error;
#[derive(Clone)]
pub struct AppState {
pub pool: Pool<Postgres>,
pub migration_store: Arc<Mutex<MigrationStore>>,
}

52
server/src/main.rs Normal file
View File

@ -0,0 +1,52 @@
use std::sync::Arc;
use axum::Router;
use tokio::{net::TcpListener, sync::Mutex};
use tracing::Level;
use tracing_subscriber::FmtSubscriber;
use wrbapp_server::routes::member::migrate::MigrationStore;
use wrbapp_server::routes::routes;
use wrbapp_server::{database, AppState};
#[tokio::main]
async fn main() {
dotenvy::dotenv().ok();
// Initialize logging
let subscriber = FmtSubscriber::builder()
.with_max_level(Level::INFO)
.finish();
tracing::subscriber::set_global_default(subscriber)
.expect("Error while initialized tracing subscriber");
// Initialize database connection
database::apply_migrations()
.await
.expect("Database migrations failed");
let pool = database::connect()
.await
.expect("Database connection failed");
let migration_store = Arc::new(Mutex::new(MigrationStore::default()));
let app_state = AppState {
pool,
migration_store,
};
// Serve app
let app = Router::new().merge(routes()).with_state(app_state);
let listener = TcpListener::bind("127.0.0.1:3000")
.await
.expect("Error while initializing listener");
tracing::info!("Listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app)
.await
.expect("Error while serving axum application");
}

6
server/src/model.rs Normal file
View File

@ -0,0 +1,6 @@
pub mod member;
pub mod session;
pub mod user;
pub use member::Member;
pub use user::User;

View File

@ -0,0 +1,46 @@
#[derive(Debug, Clone, serde::Serialize)]
pub struct Name {
pub first: String,
pub full: String,
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct Member {
pub id: String,
pub name: Name,
pub registration_token: Option<String>,
pub diploma: Option<String>,
pub hours: Vec<String>,
pub groups: Vec<String>,
}
use crate::database::model::Member as DbMember;
impl From<DbMember> for Member {
fn from(value: DbMember) -> Self {
Member {
id: value.id,
name: Name {
first: value.first_name,
full: value.full_name,
},
registration_token: value.registration_token,
diploma: value.diploma,
hours: value.hours,
groups: value.groups,
}
}
}
impl From<Member> for DbMember {
fn from(value: Member) -> Self {
DbMember {
id: value.id,
first_name: value.name.first,
full_name: value.name.full,
registration_token: None,
diploma: value.diploma,
hours: value.hours,
groups: value.groups,
}
}
}

View File

5
server/src/model/user.rs Normal file
View File

@ -0,0 +1,5 @@
pub struct User {
pub id: uuid::Uuid,
pub email: String,
pub admin: bool,
}

30
server/src/routes.rs Normal file
View File

@ -0,0 +1,30 @@
use axum::{
extract::State,
http::StatusCode,
routing::{get, post},
Router,
};
use member::migrate::{migrate_confirm, migrate_request};
use crate::{auth::Permissions, AppState};
pub mod auth;
pub mod member;
pub mod user;
pub fn routes() -> Router<AppState> {
Router::new()
.route("/", get(root))
// .route("/member/:id", get())
.route("/members/migrate_request", post(migrate_request))
.route("/members/migrate_confirm", post(migrate_confirm))
}
async fn root(
State(state): State<AppState>,
permissions: Permissions<'_>,
) -> Result<String, (StatusCode, String)> {
tracing::info!("{:?}", permissions);
Ok("Hello world".to_string())
}

View File

@ -0,0 +1 @@

View File

@ -0,0 +1 @@
pub mod migrate;

View File

@ -0,0 +1,277 @@
use std::collections::HashMap;
use axum::{extract::State, Json};
use itertools::Itertools;
use sqlx::PgPool;
use crate::{
auth::{AuthError, Permissions},
database::model::Member as DbMember,
model::{member::Name, Member},
util::convert_vec,
AppState,
};
pub async fn migrate_request<'a>(
State(state): State<AppState>,
permissions: Permissions<'a>,
body: String,
) -> Result<Json<MigrationResponse>, crate::Error> {
if !permissions.0.contains("root") {
return Err(AuthError::NoPermssions.into());
}
tracing::info!("Migration is requested");
// Convert the input CSV to a vector of members
let members_new: Vec<Member> = Row::from_csv_many(&body)?
.into_iter()
.map(|m| m.into())
.collect();
let members_old = convert_vec(DbMember::get_all(&state.pool).await?);
let members_diff = generate_diff(members_new, members_old);
let count = state
.migration_store
.lock()
.await
.insert(members_diff.clone());
Ok(Json(MigrationResponse::from((count, members_diff))))
}
pub async fn migrate_confirm<'a>(
State(state): State<AppState>,
permissions: Permissions<'a>,
body: String,
) -> Result<(), crate::Error> {
if !permissions.0.contains("root") {
return Err(AuthError::NoPermssions.into());
}
tracing::info!("Migration is confirmed");
// TODO: Implement better error naming
let count = match body.trim().parse::<u32>() {
Ok(c) => c,
Err(_) => return Err(crate::Error::NotFound),
};
let mut store = state.migration_store.lock().await;
let members_diff = match store.remove(&count) {
Some(m) => m,
None => return Err(crate::Error::NotFound),
};
let inserted_len = members_diff.insert.len();
let update_len = members_diff.update.len();
let remove_len = members_diff.remove.len();
migrate_transaction(&state.pool, members_diff).await?;
tracing::info!(
"Migration is successfully executed. Inserted: {}, updated: {}, removed: {}",
inserted_len,
update_len,
remove_len
);
Ok(())
}
async fn migrate_transaction(pool: &PgPool, members_diff: MembersDiff) -> Result<(), sqlx::Error> {
let mut transaction = pool.begin().await?;
DbMember::insert_many(&mut transaction, convert_vec(members_diff.insert)).await?;
DbMember::update_many(&mut transaction, convert_vec(members_diff.update)).await?;
let members_remove_ids: Vec<String> = members_diff.remove.into_iter().map(|m| m.id).collect();
DbMember::remove_many(&mut transaction, &members_remove_ids).await?;
transaction.commit().await?;
Ok(())
}
// Create a row for the csv file
#[derive(Debug, serde::Deserialize, Clone)]
struct Row {
#[serde(rename = "Relatiecode")]
id: String,
#[serde(rename = "Roepnaam")]
first_name: String,
// #[serde(rename = "Tussenvoegsel(s)")]
// middle_name: String,
// #[serde(rename = "Achternaam")]
// last_name: String,
#[serde(rename = "E-mail")]
email: String,
#[serde(rename = "Verenigingssporten")]
hours: String,
#[serde(rename = "Diploma dropdown 1")]
diploma: Option<String>,
}
#[derive(Debug, Clone)]
pub struct MembersDiff {
insert: Vec<Member>,
update: Vec<Member>,
remove: Vec<Member>,
}
#[derive(serde::Serialize)]
pub struct MigrationResponse {
count: u32,
insert: Vec<(String, Name)>,
update: Vec<(String, Name)>,
remove: Vec<(String, Name)>,
}
pub struct MigrationStore {
pub store: HashMap<u32, MembersDiff>,
pub count: u32,
}
impl Default for MigrationStore {
fn default() -> Self {
Self {
count: 0,
store: HashMap::new(),
}
}
}
impl Row {
fn from_csv_many(input: &str) -> Result<Vec<Self>, csv::Error> {
let mut rdr = csv::ReaderBuilder::new()
.delimiter(b';')
.from_reader(input.as_bytes());
let members: Result<Vec<Row>, csv::Error> = rdr.deserialize().collect();
members
}
fn hours_parsed(&self) -> Vec<String> {
let mut hours: Vec<String> = Vec::new();
let group_parts: Vec<&str> = self.hours.split(", ").collect();
for group in group_parts {
let hour_parts: Vec<&str> = group.split(" - ").collect();
for part in hour_parts {
if &*part != "Groep" {
hours.push(part.to_string());
}
}
}
hours.into_iter().unique().collect()
}
}
impl Into<Name> for Row {
fn into(self) -> Name {
Name {
first: self.first_name,
full: "Temporarely full name".to_string(),
}
}
}
impl Into<Member> for Row {
fn into(self) -> Member {
let name: Name = self.clone().into();
Member {
id: self.id.clone(),
name,
registration_token: None,
diploma: self.diploma.clone(),
hours: self.hours_parsed(),
groups: Vec::new(),
}
}
}
impl From<(u32, MembersDiff)> for MigrationResponse {
fn from(value: (u32, MembersDiff)) -> Self {
let members_insert: Vec<(String, Name)> =
value.1.insert.into_iter().map(|m| (m.id, m.name)).collect();
let members_update: Vec<(String, Name)> =
value.1.update.into_iter().map(|m| (m.id, m.name)).collect();
let members_remove: Vec<(String, Name)> =
value.1.remove.into_iter().map(|m| (m.id, m.name)).collect();
Self {
count: value.0,
insert: members_insert,
update: members_update,
remove: members_remove,
}
}
}
impl MigrationStore {
fn insert(&mut self, members_diff: MembersDiff) -> u32 {
let count = self.count + 1;
self.store.insert(count, members_diff);
self.count = count;
count
}
fn get(&self, id: &u32) -> Option<&MembersDiff> {
self.store.get(id)
}
fn remove(&mut self, id: &u32) -> Option<MembersDiff> {
self.store.remove(id)
}
}
fn generate_diff(members_new: Vec<Member>, members_old: Vec<Member>) -> MembersDiff {
let members_old_map: HashMap<String, Member> = members_old
.iter()
.map(|m| (m.id.clone(), m.clone()))
.collect();
let members_new_map: HashMap<String, Member> = members_new
.iter()
.map(|m| (m.id.clone(), m.clone()))
.collect();
let mut members_insert: Vec<Member> = Vec::new();
let mut members_update: Vec<Member> = Vec::new();
let mut members_remove: Vec<Member> = Vec::new();
for old_member in members_old {
if let Some(new_member) = members_new_map.get(&old_member.id) {
members_update.push(Member {
id: old_member.id,
name: new_member.name.clone(),
registration_token: old_member.registration_token,
diploma: new_member.diploma.clone(),
hours: new_member.hours.clone(),
groups: old_member.groups,
})
} else {
members_remove.push(old_member);
}
}
for new_member in members_new {
if !members_old_map.contains_key(&new_member.id) {
members_insert.push(new_member);
}
}
MembersDiff {
insert: members_insert,
update: members_update,
remove: members_remove,
}
}

View File

@ -0,0 +1 @@

4
server/src/util.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod error;
mod helpers;
pub use helpers::convert_vec;

52
server/src/util/error.rs Normal file
View File

@ -0,0 +1,52 @@
use crate::auth::AuthError;
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("CSV error: {0}")]
Csv(#[from] csv::Error),
#[error("Auth error: {0}")]
Auth(#[from] crate::auth::AuthError),
#[error("Database error: {0}")]
Database(#[from] sqlx::Error),
#[error("Resource not found")]
NotFound,
}
impl IntoResponse for Error {
fn into_response(self) -> Response {
tracing::error!("Error... {:?}", self);
let (status, error_message) = match self {
Error::Auth(AuthError::NoPermssions) => {
(StatusCode::UNAUTHORIZED, String::from("No permissions"))
}
Error::Auth(AuthError::InvalidToken) => {
(StatusCode::BAD_REQUEST, String::from("Invalid token"))
}
Error::Auth(AuthError::Unexpected) => (
StatusCode::INTERNAL_SERVER_ERROR,
String::from("Unexpected error occured"),
),
Error::Csv(err) => (StatusCode::BAD_REQUEST, err.to_string()),
Error::NotFound => (
StatusCode::BAD_REQUEST,
String::from("Could not find resource"),
),
Error::Database(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()),
};
let body = Json(serde_json::json!({
"error": error_message
}));
(status, body).into_response()
}
}

View File

@ -0,0 +1,6 @@
pub fn convert_vec<T, U>(vec: Vec<T>) -> Vec<U>
where
U: From<T>,
{
vec.into_iter().map(U::from).collect()
}