Add basic employee hour balance report

This commit is contained in:
Simon Goller 2024-06-23 18:12:54 +02:00
parent 0eb885216a
commit d4adcb182f
31 changed files with 2155 additions and 5 deletions

108
dao_impl/src/extra_hours.rs Normal file
View file

@ -0,0 +1,108 @@
use std::sync::Arc;
use crate::ResultDbErrorExt;
use async_trait::async_trait;
use dao::{
extra_hours::{ExtraHoursCategoryEntity, ExtraHoursDao, ExtraHoursEntity},
DaoError,
};
use sqlx::query_as;
use time::{format_description::well_known::Iso8601, PrimitiveDateTime};
use uuid::Uuid;
struct ExtraHoursDb {
id: Vec<u8>,
sales_person_id: Vec<u8>,
amount: f64,
category: String,
description: Option<String>,
date_time: String,
deleted: Option<String>,
}
impl TryFrom<&ExtraHoursDb> for ExtraHoursEntity {
type Error = DaoError;
fn try_from(extra_hours: &ExtraHoursDb) -> Result<Self, DaoError> {
Ok(Self {
id: Uuid::from_slice(extra_hours.id.as_ref()).unwrap(),
sales_person_id: Uuid::from_slice(extra_hours.sales_person_id.as_ref()).unwrap(),
amount: extra_hours.amount as f32,
category: match extra_hours.category.as_str() {
"ExtraWork" => ExtraHoursCategoryEntity::ExtraWork,
"Vacation" => ExtraHoursCategoryEntity::Vacation,
"SickLeave" => ExtraHoursCategoryEntity::SickLeave,
"Holiday" => ExtraHoursCategoryEntity::Holiday,
value @ _ => return Err(DaoError::EnumValueNotFound(value.into())),
},
description: extra_hours
.description
.clone()
.unwrap_or_else(|| String::new())
.as_str()
.into(),
date_time: PrimitiveDateTime::parse(
extra_hours.date_time.as_str(),
&Iso8601::DATE_TIME,
)
.unwrap(),
deleted: extra_hours
.deleted
.as_ref()
.map(|deleted| PrimitiveDateTime::parse(deleted, &Iso8601::DATE_TIME))
.transpose()
.unwrap(),
})
}
}
pub struct ExtraHoursDaoImpl {
pub pool: Arc<sqlx::SqlitePool>,
}
impl ExtraHoursDaoImpl {
pub fn new(pool: Arc<sqlx::SqlitePool>) -> Self {
Self { pool }
}
}
#[async_trait]
impl ExtraHoursDao for ExtraHoursDaoImpl {
async fn find_by_sales_person_id_and_year(
&self,
sales_person_id: Uuid,
year: u32,
until_week: u8,
) -> Result<Arc<[ExtraHoursEntity]>, crate::DaoError> {
let id_vec = sales_person_id.as_bytes().to_vec();
Ok(query_as!(
ExtraHoursDb,
"SELECT id, sales_person_id, amount, category, description, date_time, deleted FROM extra_hours WHERE sales_person_id = ? AND strftime('%Y', date_time) = ? AND strftime('%m', date_time) <= ?",
id_vec,
year,
until_week,
).fetch_all(self.pool.as_ref())
.await
.map_db_error()?
.iter()
.map(ExtraHoursEntity::try_from)
.collect::<Result<Arc<[_]>, _>>()?
.into())
}
async fn create(
&self,
_entity: &ExtraHoursEntity,
_process: &str,
) -> Result<(), crate::DaoError> {
unimplemented!()
}
async fn update(
&self,
_entity: &ExtraHoursEntity,
_process: &str,
) -> Result<(), crate::DaoError> {
unimplemented!()
}
async fn delete(&self, _id: Uuid, _process: &str) -> Result<(), crate::DaoError> {
unimplemented!()
}
}

View file

@ -5,8 +5,11 @@ use dao::{DaoError, PrivilegeEntity};
use sqlx::{query, query_as, SqlitePool};
pub mod booking;
pub mod extra_hours;
pub mod sales_person;
pub mod shiftplan_report;
pub mod slot;
pub mod working_hours;
pub trait ResultDbErrorExt<T, E> {
fn map_db_error(self) -> Result<T, DaoError>;

View file

@ -0,0 +1,123 @@
use std::sync::Arc;
use crate::ResultDbErrorExt;
use async_trait::async_trait;
use dao::{
shiftplan_report::{ShiftplanQuickOverviewEntity, ShiftplanReportDao, ShiftplanReportEntity},
slot::DayOfWeek,
DaoError,
};
use sqlx::query_as;
use uuid::Uuid;
pub struct ShiftplanReportDb {
pub sales_person_id: Vec<u8>,
pub hours: Option<f64>,
pub year: i64,
pub calendar_week: i64,
pub day_of_week: i64,
}
impl TryFrom<&ShiftplanReportDb> for ShiftplanReportEntity {
type Error = DaoError;
fn try_from(entity: &ShiftplanReportDb) -> Result<Self, DaoError> {
Ok(Self {
sales_person_id: Uuid::from_slice(entity.sales_person_id.as_ref())?,
hours: entity.hours.unwrap_or(0.0) as f32,
year: entity.year as u32,
calendar_week: entity.calendar_week as u8,
day_of_week: DayOfWeek::from_number(entity.day_of_week as u8)
.ok_or_else(|| DaoError::InvalidDayOfWeek(entity.day_of_week as u8))?,
})
}
}
pub struct ShiftplanQuickOverviewDb {
pub sales_person_id: Vec<u8>,
pub hours: Option<f64>,
pub year: i64,
}
impl From<&ShiftplanQuickOverviewDb> for ShiftplanQuickOverviewEntity {
fn from(entity: &ShiftplanQuickOverviewDb) -> Self {
Self {
sales_person_id: Uuid::from_slice(entity.sales_person_id.as_ref()).unwrap(),
hours: entity.hours.unwrap_or(0.0) as f32,
year: entity.year as u32,
}
}
}
pub struct ShiftplanReportDaoImpl {
pub pool: Arc<sqlx::SqlitePool>,
}
impl ShiftplanReportDaoImpl {
pub fn new(pool: Arc<sqlx::SqlitePool>) -> Self {
Self { pool }
}
}
#[async_trait]
impl ShiftplanReportDao for ShiftplanReportDaoImpl {
async fn extract_shiftplan_report(
&self,
sales_person_id: Uuid,
year: u32,
until_week: u8,
) -> Result<Arc<[ShiftplanReportEntity]>, DaoError> {
let sales_person_id_vec = sales_person_id.as_bytes().to_vec();
Ok(query_as!(
ShiftplanReportDb,
r#"
SELECT
sales_person.id as sales_person_id,
sum((STRFTIME('%H', slot.time_to) + STRFTIME('%M', slot.time_to) / 60.0) - (STRFTIME('%H', slot.time_from) + STRFTIME('%M', slot.time_from))) as hours,
booking.calendar_week, booking.year, slot.day_of_week
FROM slot
INNER JOIN booking ON (booking.slot_id = slot.id AND booking.deleted IS NULL)
INNER JOIN sales_person ON booking.sales_person_id = sales_person.id
WHERE sales_person.id = ?
AND booking.year = ?
AND booking.calendar_week <= ?
GROUP BY year, calendar_week, day_of_week
"#,
sales_person_id_vec,
year,
until_week
).fetch_all(self.pool.as_ref())
.await
.map_db_error()?
.iter()
.map(ShiftplanReportEntity::try_from)
.collect::<Result<Arc<[_]>, _>>()?
)
}
async fn extract_quick_shiftplan_report(
&self,
year: u32,
until_week: u8,
) -> Result<Arc<[ShiftplanQuickOverviewEntity]>, DaoError> {
Ok(query_as!(
ShiftplanQuickOverviewDb,
r#"
SELECT
sales_person.id as sales_person_id,
sum((STRFTIME('%H', slot.time_to) + STRFTIME('%M', slot.time_to) / 60.0) - (STRFTIME('%H', slot.time_from) + STRFTIME('%M', slot.time_from))) as hours,
booking.year
FROM slot
INNER JOIN booking ON (booking.slot_id = slot.id AND booking.deleted IS NULL)
INNER JOIN sales_person ON booking.sales_person_id = sales_person.id
WHERE booking.year = ?
AND booking.calendar_week <= ?
GROUP BY sales_person_id, year
"#,
year,
until_week
).fetch_all(self.pool.as_ref())
.await
.map_db_error()?
.iter()
.map(ShiftplanQuickOverviewEntity::from)
.collect::<Arc<[_]>>()
)
}
}

View file

@ -0,0 +1,183 @@
use std::sync::Arc;
use crate::ResultDbErrorExt;
use async_trait::async_trait;
use dao::{
working_hours::{WorkingHoursDao, WorkingHoursEntity},
DaoError,
};
use sqlx::{query, query_as};
use time::{format_description::well_known::Iso8601, PrimitiveDateTime};
use uuid::Uuid;
pub struct WorkingHoursDb {
pub id: Vec<u8>,
pub sales_person_id: Vec<u8>,
pub expected_hours: f64,
pub from_calendar_week: i64,
pub from_year: i64,
pub to_calendar_week: i64,
pub to_year: i64,
pub created: String,
pub deleted: Option<String>,
update_version: Vec<u8>,
}
impl TryFrom<&WorkingHoursDb> for WorkingHoursEntity {
type Error = DaoError;
fn try_from(working_hours: &WorkingHoursDb) -> Result<Self, DaoError> {
Ok(Self {
id: Uuid::from_slice(working_hours.id.as_ref())?,
sales_person_id: Uuid::from_slice(working_hours.sales_person_id.as_ref()).unwrap(),
expected_hours: working_hours.expected_hours as f32,
from_calendar_week: working_hours.from_calendar_week as u8,
from_year: working_hours.from_year as u32,
to_calendar_week: working_hours.to_calendar_week as u8,
to_year: working_hours.to_year as u32,
created: PrimitiveDateTime::parse(working_hours.created.as_str(), &Iso8601::DATE_TIME)?,
deleted: working_hours
.deleted
.as_ref()
.map(|deleted| PrimitiveDateTime::parse(deleted, &Iso8601::DATE_TIME))
.transpose()?,
version: Uuid::from_slice(&working_hours.update_version)?,
})
}
}
pub struct WorkingHoursDaoImpl {
pub pool: Arc<sqlx::SqlitePool>,
}
impl WorkingHoursDaoImpl {
pub fn new(pool: Arc<sqlx::SqlitePool>) -> Self {
Self { pool }
}
}
#[async_trait]
impl WorkingHoursDao for WorkingHoursDaoImpl {
async fn all(&self) -> Result<Arc<[WorkingHoursEntity]>, DaoError> {
query_as!(
WorkingHoursDb,
r#"
SELECT
id,
sales_person_id,
expected_hours,
from_calendar_week,
from_year,
to_calendar_week,
to_year,
created,
deleted,
update_version
FROM
working_hours
"#
)
.fetch_all(self.pool.as_ref())
.await
.map_db_error()?
.iter()
.map(WorkingHoursEntity::try_from)
.collect::<Result<_, _>>()
}
async fn find_by_sales_person_id(
&self,
sales_person_id: Uuid,
) -> Result<Arc<[WorkingHoursEntity]>, DaoError> {
let id_vec = sales_person_id.as_bytes().to_vec();
query_as!(
WorkingHoursDb,
r#"
SELECT
id,
sales_person_id,
expected_hours,
from_calendar_week,
from_year,
to_calendar_week,
to_year,
created,
deleted,
update_version
FROM
working_hours
WHERE
sales_person_id = ?
"#,
id_vec
)
.fetch_all(self.pool.as_ref())
.await
.map_db_error()?
.iter()
.map(WorkingHoursEntity::try_from)
.collect::<Result<_, _>>()
}
async fn create(&self, entity: &WorkingHoursEntity, process: &str) -> Result<(), DaoError> {
let id = entity.id.as_bytes().to_vec();
let sales_person_id = entity.sales_person_id.as_bytes().to_vec();
let expected_hours = entity.expected_hours as f64;
let from_calendar_week = entity.from_calendar_week as i64;
let from_year = entity.from_year as i64;
let to_calendar_week = entity.to_calendar_week as i64;
let to_year = entity.to_year as i64;
let created = entity.created.format(&Iso8601::DATE_TIME)?;
let version = entity.id.as_bytes().to_vec();
query!(
r#"
INSERT INTO working_hours (
id,
sales_person_id,
expected_hours,
from_calendar_week,
from_year,
to_calendar_week,
to_year,
created,
update_process,
update_version
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"#,
id,
sales_person_id,
expected_hours,
from_calendar_week,
from_year,
to_calendar_week,
to_year,
created,
process,
version,
)
.execute(self.pool.as_ref())
.await
.map_db_error()?;
Ok(())
}
async fn update(&self, entity: &WorkingHoursEntity, process: &str) -> Result<(), DaoError> {
let id = entity.id.as_bytes().to_vec();
let deleted = entity.deleted.as_ref().map(|deleted| deleted.to_string());
query!(
r#"
UPDATE working_hours SET
deleted = ?,
update_process = ?
WHERE
id = ?
"#,
deleted,
process,
id
)
.execute(self.pool.as_ref())
.await
.map_db_error()?;
Ok(())
}
}