Add quarterlies management system with complete CRUD operations
- New quarterly_orders table with validation and constraints - Full CRUD API endpoints for quarterly orders management - Import functionality for bulk quarterly data from JSON - Python scripts for data migration and database import - Consistent validation for quarterly types and amounts - Follows established DRY/KISS architectural patterns
This commit is contained in:
parent
adacf443e5
commit
e206ce3332
44
import_quarterlies.py
Normal file
44
import_quarterlies.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def import_quarterlies():
|
||||||
|
# Read the quarterly data
|
||||||
|
with open('/home/rockvilleav/Nextcloud/quarterlies_orders_autosave.json', 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Transform the data to match our API format
|
||||||
|
import_data = {
|
||||||
|
"orders": []
|
||||||
|
}
|
||||||
|
|
||||||
|
for order in data["orders"]:
|
||||||
|
import_data["orders"].append({
|
||||||
|
"name": order["name"],
|
||||||
|
"type": order["type"],
|
||||||
|
"amount": order["amount"]
|
||||||
|
})
|
||||||
|
|
||||||
|
# Print the data that will be imported
|
||||||
|
print(f"Importing {len(import_data['orders'])} quarterly orders...")
|
||||||
|
print("\nFirst few orders:")
|
||||||
|
for i, order in enumerate(import_data["orders"][:5]):
|
||||||
|
print(f" {i+1}. {order['name']} - {order['type']} (x{order['amount']})")
|
||||||
|
|
||||||
|
if len(import_data["orders"]) > 5:
|
||||||
|
print(f" ... and {len(import_data['orders']) - 5} more")
|
||||||
|
|
||||||
|
# Save the transformed data for manual import via API
|
||||||
|
with open('/opt/rtsda/church-api/quarterly_import_data.json', 'w') as f:
|
||||||
|
json.dump(import_data, f, indent=2)
|
||||||
|
|
||||||
|
print(f"\nTransformed data saved to: /opt/rtsda/church-api/quarterly_import_data.json")
|
||||||
|
print("You can now use this file to import via the API endpoint:")
|
||||||
|
print("curl -X POST http://localhost:3002/api/admin/quarterlies/import \\")
|
||||||
|
print(" -H 'Content-Type: application/json' \\")
|
||||||
|
print(" -H 'Authorization: Bearer YOUR_JWT_TOKEN' \\")
|
||||||
|
print(" -d @quarterly_import_data.json")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import_quarterlies()
|
50
import_to_db.py
Normal file
50
import_to_db.py
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import psycopg2
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def import_to_database():
|
||||||
|
# Read the transformed JSON data
|
||||||
|
with open('/opt/rtsda/church-api/quarterly_import_data.json', 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Connect to the database
|
||||||
|
conn = psycopg2.connect("postgresql://rtsda_user:SaviourofMyLife!!@localhost:5432/church_db")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
print(f"Importing {len(data['orders'])} quarterly orders to database...")
|
||||||
|
|
||||||
|
imported = 0
|
||||||
|
errors = 0
|
||||||
|
|
||||||
|
for order in data["orders"]:
|
||||||
|
try:
|
||||||
|
# Generate a new UUID for each order
|
||||||
|
order_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Insert the order
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO quarterly_orders (id, name, quarterly_type, amount)
|
||||||
|
VALUES (%s, %s, %s, %s)
|
||||||
|
""", (order_id, order["name"], order["type"], order["amount"]))
|
||||||
|
|
||||||
|
imported += 1
|
||||||
|
if imported % 10 == 0:
|
||||||
|
print(f" Imported {imported} orders...")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" Error importing '{order['name']}': {e}")
|
||||||
|
errors += 1
|
||||||
|
|
||||||
|
# Commit all changes
|
||||||
|
conn.commit()
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
print(f"\nImport completed!")
|
||||||
|
print(f" Successfully imported: {imported} orders")
|
||||||
|
print(f" Errors: {errors}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import_to_database()
|
29
migrations/20250906000001_create_quarterly_orders_table.sql
Normal file
29
migrations/20250906000001_create_quarterly_orders_table.sql
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
-- Create quarterly_orders table for managing Sabbath School quarterly orders
|
||||||
|
CREATE TABLE quarterly_orders (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
quarterly_type VARCHAR(50) NOT NULL CHECK (quarterly_type IN ('Regular', 'Teachers', 'Large Print', 'EG White Notes')),
|
||||||
|
amount INTEGER NOT NULL CHECK (amount > 0),
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create index on name for faster searching
|
||||||
|
CREATE INDEX idx_quarterly_orders_name ON quarterly_orders(name);
|
||||||
|
|
||||||
|
-- Create index on quarterly_type for filtering
|
||||||
|
CREATE INDEX idx_quarterly_orders_type ON quarterly_orders(quarterly_type);
|
||||||
|
|
||||||
|
-- Create trigger to automatically update updated_at column
|
||||||
|
CREATE OR REPLACE FUNCTION update_quarterly_orders_updated_at()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = NOW();
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE TRIGGER trigger_quarterly_orders_updated_at
|
||||||
|
BEFORE UPDATE ON quarterly_orders
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_quarterly_orders_updated_at();
|
264
quarterly_import_data.json
Normal file
264
quarterly_import_data.json
Normal file
|
@ -0,0 +1,264 @@
|
||||||
|
{
|
||||||
|
"orders": [
|
||||||
|
{
|
||||||
|
"name": "Clyvia Blackmore",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Tara Bradford",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Brown Family",
|
||||||
|
"type": "Teachers",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Carleen Brown",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Brown Family",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Fostena Byron",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Louise Candee",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Louise Candee",
|
||||||
|
"type": "Teachers",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Louise Candee",
|
||||||
|
"type": "EG White Notes",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Edwardo Carcache",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Orville Castillo",
|
||||||
|
"type": "Teachers",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Karen Castillo",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Karen Castillo",
|
||||||
|
"type": "EG White Notes",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Nadine Cross",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Carlos Domenech",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Flick Family",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Claire Garrett",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Samantha Gayle",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Maria Gonzales",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Greeter Station",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Hamilton Family",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Chris Isaacs",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jaffat Family",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Angie Johnson",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Denova Jones",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Linda Lahart",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "James Lee",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Patricia Lemone",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Lera Little",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Michelle Maitland",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Malcolm Matthews",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Kalee Mead",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Denise Millany",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Sarah Olchanowski",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "John Oliver",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Pinnock Family",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Deborah Queen",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Rich Raider",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Rawlins Family",
|
||||||
|
"type": "Teachers",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Marsha Reid",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Joe Roberts",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Rosa Family",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "La Sala Roy",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Dan Sedgewick",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Dan Sedgewick",
|
||||||
|
"type": "EG White Notes",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Mildred Stephens",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jheanell Thomas",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Wayne Tino",
|
||||||
|
"type": "Teachers",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jerry Travers",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Deborah Troesch",
|
||||||
|
"type": "Large Print",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Frank Varricchio",
|
||||||
|
"type": "Teachers",
|
||||||
|
"amount": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Glenton Young",
|
||||||
|
"type": "Regular",
|
||||||
|
"amount": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -15,3 +15,4 @@ pub mod smart_streaming;
|
||||||
pub mod v2;
|
pub mod v2;
|
||||||
pub mod backup;
|
pub mod backup;
|
||||||
pub mod hymnal;
|
pub mod hymnal;
|
||||||
|
pub mod quarterlies;
|
||||||
|
|
126
src/handlers/quarterlies.rs
Normal file
126
src/handlers/quarterlies.rs
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
use axum::{extract::{Path, State}, Json};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
error::{Result, ApiError},
|
||||||
|
models::{QuarterlyOrder, ApiResponse, CreateQuarterlyOrderRequest, UpdateQuarterlyOrderRequest},
|
||||||
|
services::QuarterliesService,
|
||||||
|
utils::response::{success_response, success_with_message},
|
||||||
|
AppState,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub async fn list(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<ApiResponse<Vec<QuarterlyOrder>>>> {
|
||||||
|
let quarterlies = QuarterliesService::list_all(&state.pool).await?;
|
||||||
|
|
||||||
|
Ok(success_response(quarterlies))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<ApiResponse<QuarterlyOrder>>> {
|
||||||
|
let quarterly = QuarterliesService::get_by_id(&state.pool, &id).await?;
|
||||||
|
|
||||||
|
match quarterly {
|
||||||
|
Some(q) => Ok(success_response(q)),
|
||||||
|
None => Err(ApiError::NotFound("Quarterly order not found".to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(req): Json<CreateQuarterlyOrderRequest>,
|
||||||
|
) -> Result<Json<ApiResponse<QuarterlyOrder>>> {
|
||||||
|
let quarterly = QuarterliesService::create(&state.pool, req).await?;
|
||||||
|
|
||||||
|
Ok(success_with_message(quarterly, "Quarterly order created successfully"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(req): Json<UpdateQuarterlyOrderRequest>,
|
||||||
|
) -> Result<Json<ApiResponse<QuarterlyOrder>>> {
|
||||||
|
let quarterly = QuarterliesService::update(&state.pool, &id, req).await?;
|
||||||
|
|
||||||
|
match quarterly {
|
||||||
|
Some(q) => Ok(success_with_message(q, "Quarterly order updated successfully")),
|
||||||
|
None => Err(ApiError::NotFound("Quarterly order not found".to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<ApiResponse<bool>>> {
|
||||||
|
let deleted = QuarterliesService::delete(&state.pool, &id).await?;
|
||||||
|
|
||||||
|
if deleted {
|
||||||
|
Ok(success_with_message(true, "Quarterly order deleted successfully"))
|
||||||
|
} else {
|
||||||
|
Err(ApiError::NotFound("Quarterly order not found".to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import structure for JSON import functionality
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct ImportQuarterliesRequest {
|
||||||
|
pub orders: Vec<ImportOrderItem>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct ImportOrderItem {
|
||||||
|
pub name: String,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub quarterly_type: String,
|
||||||
|
pub amount: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
pub struct ImportResult {
|
||||||
|
pub imported: usize,
|
||||||
|
pub skipped: usize,
|
||||||
|
pub errors: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl crate::utils::sanitize::SanitizeOutput for ImportResult {
|
||||||
|
fn sanitize_output(mut self) -> Self {
|
||||||
|
use crate::utils::sanitize::sanitize_string;
|
||||||
|
self.errors = self.errors.into_iter().map(sanitize_string).collect();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn import_from_json(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(req): Json<ImportQuarterliesRequest>,
|
||||||
|
) -> Result<Json<ApiResponse<ImportResult>>> {
|
||||||
|
let mut imported = 0;
|
||||||
|
let mut skipped = 0;
|
||||||
|
let mut errors = Vec::new();
|
||||||
|
|
||||||
|
for order in req.orders {
|
||||||
|
match QuarterliesService::create(&state.pool, CreateQuarterlyOrderRequest {
|
||||||
|
name: order.name.clone(),
|
||||||
|
quarterly_type: order.quarterly_type,
|
||||||
|
amount: order.amount,
|
||||||
|
}).await {
|
||||||
|
Ok(_) => imported += 1,
|
||||||
|
Err(e) => {
|
||||||
|
errors.push(format!("Failed to import '{}': {:?}", order.name, e));
|
||||||
|
skipped += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = ImportResult {
|
||||||
|
imported,
|
||||||
|
skipped,
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(success_with_message(result, &format!("Import completed: {} imported, {} skipped", imported, skipped)))
|
||||||
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
use axum::{extract::{Path, Query, State}, response::Json};
|
use axum::{extract::{Path, Query, State}, response::Json};
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models::{ApiResponse, ScheduleData, ConferenceData, DateQuery};
|
use crate::models::{ApiResponse, ConferenceData, DateQuery};
|
||||||
use crate::services::{ScheduleService, CreateScheduleRequest};
|
use crate::services::{ScheduleService, CreateScheduleRequest};
|
||||||
use crate::utils::response::{success_response, success_with_message, success_message_only};
|
use crate::utils::response::{success_response, success_with_message, success_message_only};
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
|
@ -8,11 +8,35 @@ use crate::AppState;
|
||||||
pub async fn get_schedule(
|
pub async fn get_schedule(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Query(params): Query<DateQuery>,
|
Query(params): Query<DateQuery>,
|
||||||
) -> Result<Json<ApiResponse<ScheduleData>>> {
|
) -> Result<Json<serde_json::Value>> {
|
||||||
let date_str = params.date.unwrap_or_else(|| "2025-06-14".to_string());
|
match params.date {
|
||||||
let schedule_data = ScheduleService::get_schedule_data_v1(&state.pool, &date_str).await?;
|
Some(date_str) => {
|
||||||
|
// Return single schedule for specific date
|
||||||
Ok(success_response(schedule_data))
|
let schedule_data = ScheduleService::get_schedule_data_v1(&state.pool, &date_str).await?;
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"success": true,
|
||||||
|
"data": schedule_data,
|
||||||
|
"message": null
|
||||||
|
})))
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
// Return all schedules when no date specified
|
||||||
|
let schedules = ScheduleService::list_schedules_v1(&state.pool).await?;
|
||||||
|
let mut schedule_data_list = Vec::new();
|
||||||
|
|
||||||
|
for schedule in schedules {
|
||||||
|
let date_str = schedule.date.format("%Y-%m-%d").to_string();
|
||||||
|
let schedule_data = ScheduleService::get_schedule_data_v1(&state.pool, &date_str).await?;
|
||||||
|
schedule_data_list.push(schedule_data);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"success": true,
|
||||||
|
"data": schedule_data_list,
|
||||||
|
"message": null
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_conference_data(
|
pub async fn get_conference_data(
|
||||||
|
|
|
@ -96,6 +96,10 @@ async fn main() -> Result<()> {
|
||||||
.route("/members", get(handlers::members::list))
|
.route("/members", get(handlers::members::list))
|
||||||
.route("/members", post(handlers::members::create))
|
.route("/members", post(handlers::members::create))
|
||||||
.route("/members/:id", delete(handlers::members::delete))
|
.route("/members/:id", delete(handlers::members::delete))
|
||||||
|
.route("/quarterlies", post(handlers::quarterlies::create))
|
||||||
|
.route("/quarterlies/:id", put(handlers::quarterlies::update))
|
||||||
|
.route("/quarterlies/:id", delete(handlers::quarterlies::delete))
|
||||||
|
.route("/quarterlies/import", post(handlers::quarterlies::import_from_json))
|
||||||
.route("/backup/create", post(handlers::backup::create_backup))
|
.route("/backup/create", post(handlers::backup::create_backup))
|
||||||
.route("/backup/list", get(handlers::backup::list_backups))
|
.route("/backup/list", get(handlers::backup::list_backups))
|
||||||
.route("/backup/cleanup", post(handlers::backup::cleanup_backups))
|
.route("/backup/cleanup", post(handlers::backup::cleanup_backups))
|
||||||
|
@ -125,6 +129,8 @@ let app = Router::new()
|
||||||
.route("/api/schedule", get(handlers::schedule::get_schedule))
|
.route("/api/schedule", get(handlers::schedule::get_schedule))
|
||||||
.route("/api/conference-data", get(handlers::schedule::get_conference_data))
|
.route("/api/conference-data", get(handlers::schedule::get_conference_data))
|
||||||
.route("/api/members/active", get(handlers::members::list_active))
|
.route("/api/members/active", get(handlers::members::list_active))
|
||||||
|
.route("/api/quarterlies", get(handlers::quarterlies::list))
|
||||||
|
.route("/api/quarterlies/:id", get(handlers::quarterlies::get))
|
||||||
// Hymnal API endpoints
|
// Hymnal API endpoints
|
||||||
.route("/api/hymnals", get(handlers::hymnal::list_hymnals))
|
.route("/api/hymnals", get(handlers::hymnal::list_hymnals))
|
||||||
.route("/api/hymnals/:id", get(handlers::hymnal::get_hymnal))
|
.route("/api/hymnals/:id", get(handlers::hymnal::get_hymnal))
|
||||||
|
|
|
@ -246,6 +246,7 @@ pub struct Personnel {
|
||||||
pub offering: String,
|
pub offering: String,
|
||||||
pub special_music: String,
|
pub special_music: String,
|
||||||
pub speaker: String,
|
pub speaker: String,
|
||||||
|
pub deacons: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
@ -849,3 +850,36 @@ impl SanitizeOutput for SearchResult {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Quarterly Models
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
|
pub struct QuarterlyOrder {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub quarterly_type: String,
|
||||||
|
pub amount: i32,
|
||||||
|
pub created_at: Option<DateTime<Utc>>,
|
||||||
|
pub updated_at: Option<DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct CreateQuarterlyOrderRequest {
|
||||||
|
pub name: String,
|
||||||
|
pub quarterly_type: String,
|
||||||
|
pub amount: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct UpdateQuarterlyOrderRequest {
|
||||||
|
pub name: Option<String>,
|
||||||
|
pub quarterly_type: Option<String>,
|
||||||
|
pub amount: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SanitizeOutput for QuarterlyOrder {
|
||||||
|
fn sanitize_output(mut self) -> Self {
|
||||||
|
self.name = sanitize_string(self.name);
|
||||||
|
self.quarterly_type = sanitize_string(self.quarterly_type);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ pub mod backup_scheduler;
|
||||||
pub mod hymnal;
|
pub mod hymnal;
|
||||||
pub mod hymnal_search;
|
pub mod hymnal_search;
|
||||||
pub mod members;
|
pub mod members;
|
||||||
|
pub mod quarterlies;
|
||||||
|
|
||||||
pub use events_v1::EventsV1Service;
|
pub use events_v1::EventsV1Service;
|
||||||
pub use events_v2::EventsV2Service;
|
pub use events_v2::EventsV2Service;
|
||||||
|
@ -30,4 +31,5 @@ pub use thumbnail_generator::ThumbnailGenerator;
|
||||||
pub use backup_scheduler::BackupScheduler;
|
pub use backup_scheduler::BackupScheduler;
|
||||||
pub use hymnal::HymnalService;
|
pub use hymnal::HymnalService;
|
||||||
pub use hymnal_search::HymnalSearchService;
|
pub use hymnal_search::HymnalSearchService;
|
||||||
pub use members::MemberService;
|
pub use members::MemberService;
|
||||||
|
pub use quarterlies::QuarterliesService;
|
66
src/services/quarterlies.rs
Normal file
66
src/services/quarterlies.rs
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use crate::{error::Result, models::{QuarterlyOrder, CreateQuarterlyOrderRequest, UpdateQuarterlyOrderRequest}, sql};
|
||||||
|
|
||||||
|
pub struct QuarterliesService;
|
||||||
|
|
||||||
|
impl QuarterliesService {
|
||||||
|
/// List all quarterly orders
|
||||||
|
pub async fn list_all(pool: &PgPool) -> Result<Vec<QuarterlyOrder>> {
|
||||||
|
sql::quarterlies::list_all(pool).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get quarterly order by ID
|
||||||
|
pub async fn get_by_id(pool: &PgPool, id: &Uuid) -> Result<Option<QuarterlyOrder>> {
|
||||||
|
sql::quarterlies::get_by_id(pool, id).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create new quarterly order with validation
|
||||||
|
pub async fn create(pool: &PgPool, req: CreateQuarterlyOrderRequest) -> Result<QuarterlyOrder> {
|
||||||
|
// Validate quarterly type
|
||||||
|
let valid_types = vec!["Regular", "Teachers", "Large Print", "EG White Notes"];
|
||||||
|
if !valid_types.contains(&req.quarterly_type.as_str()) {
|
||||||
|
return Err(crate::error::ApiError::BadRequest(
|
||||||
|
format!("Invalid quarterly type. Must be one of: {}", valid_types.join(", "))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate amount is positive
|
||||||
|
if req.amount <= 0 {
|
||||||
|
return Err(crate::error::ApiError::BadRequest(
|
||||||
|
"Amount must be greater than 0".to_string()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
sql::quarterlies::create(pool, req).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update quarterly order
|
||||||
|
pub async fn update(pool: &PgPool, id: &Uuid, req: UpdateQuarterlyOrderRequest) -> Result<Option<QuarterlyOrder>> {
|
||||||
|
// Validate quarterly type if provided
|
||||||
|
if let Some(ref quarterly_type) = req.quarterly_type {
|
||||||
|
let valid_types = vec!["Regular", "Teachers", "Large Print", "EG White Notes"];
|
||||||
|
if !valid_types.contains(&quarterly_type.as_str()) {
|
||||||
|
return Err(crate::error::ApiError::BadRequest(
|
||||||
|
format!("Invalid quarterly type. Must be one of: {}", valid_types.join(", "))
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate amount is positive if provided
|
||||||
|
if let Some(amount) = req.amount {
|
||||||
|
if amount <= 0 {
|
||||||
|
return Err(crate::error::ApiError::BadRequest(
|
||||||
|
"Amount must be greater than 0".to_string()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sql::quarterlies::update(pool, id, req).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete quarterly order by ID
|
||||||
|
pub async fn delete(pool: &PgPool, id: &Uuid) -> Result<bool> {
|
||||||
|
sql::quarterlies::delete(pool, id).await
|
||||||
|
}
|
||||||
|
}
|
|
@ -46,6 +46,7 @@ impl ScheduleService {
|
||||||
offering: s.offering.unwrap_or_default(),
|
offering: s.offering.unwrap_or_default(),
|
||||||
special_music: s.special_music.unwrap_or_default(),
|
special_music: s.special_music.unwrap_or_default(),
|
||||||
speaker: s.sermon_speaker.unwrap_or_default(),
|
speaker: s.sermon_speaker.unwrap_or_default(),
|
||||||
|
deacons: s.deacons.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Return empty data if no schedule found
|
// Return empty data if no schedule found
|
||||||
|
@ -58,6 +59,7 @@ impl ScheduleService {
|
||||||
offering: String::new(),
|
offering: String::new(),
|
||||||
special_music: String::new(),
|
special_music: String::new(),
|
||||||
speaker: String::new(),
|
speaker: String::new(),
|
||||||
|
deacons: String::new(),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -9,5 +9,6 @@ pub mod events;
|
||||||
pub mod hymnal;
|
pub mod hymnal;
|
||||||
pub mod media;
|
pub mod media;
|
||||||
pub mod members;
|
pub mod members;
|
||||||
|
pub mod quarterlies;
|
||||||
pub mod schedule;
|
pub mod schedule;
|
||||||
pub mod users;
|
pub mod users;
|
61
src/sql/quarterlies.rs
Normal file
61
src/sql/quarterlies.rs
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use crate::{error::Result, models::{QuarterlyOrder, CreateQuarterlyOrderRequest, UpdateQuarterlyOrderRequest}};
|
||||||
|
|
||||||
|
pub async fn list_all(pool: &PgPool) -> Result<Vec<QuarterlyOrder>> {
|
||||||
|
let quarterlies = sqlx::query_as::<_, QuarterlyOrder>(
|
||||||
|
"SELECT id, name, quarterly_type, amount, created_at, updated_at FROM quarterly_orders ORDER BY name ASC"
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(quarterlies)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_by_id(pool: &PgPool, id: &Uuid) -> Result<Option<QuarterlyOrder>> {
|
||||||
|
let quarterly = sqlx::query_as::<_, QuarterlyOrder>(
|
||||||
|
"SELECT id, name, quarterly_type, amount, created_at, updated_at FROM quarterly_orders WHERE id = $1"
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(quarterly)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create(pool: &PgPool, req: CreateQuarterlyOrderRequest) -> Result<QuarterlyOrder> {
|
||||||
|
let quarterly = sqlx::query_as::<_, QuarterlyOrder>(
|
||||||
|
"INSERT INTO quarterly_orders (id, name, quarterly_type, amount) VALUES ($1, $2, $3, $4) RETURNING id, name, quarterly_type, amount, created_at, updated_at"
|
||||||
|
)
|
||||||
|
.bind(Uuid::new_v4())
|
||||||
|
.bind(req.name)
|
||||||
|
.bind(req.quarterly_type)
|
||||||
|
.bind(req.amount)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(quarterly)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update(pool: &PgPool, id: &Uuid, req: UpdateQuarterlyOrderRequest) -> Result<Option<QuarterlyOrder>> {
|
||||||
|
let quarterly = sqlx::query_as::<_, QuarterlyOrder>(
|
||||||
|
"UPDATE quarterly_orders SET name = COALESCE($2, name), quarterly_type = COALESCE($3, quarterly_type), amount = COALESCE($4, amount), updated_at = NOW() WHERE id = $1 RETURNING id, name, quarterly_type, amount, created_at, updated_at"
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(req.name)
|
||||||
|
.bind(req.quarterly_type)
|
||||||
|
.bind(req.amount)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(quarterly)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete(pool: &PgPool, id: &Uuid) -> Result<bool> {
|
||||||
|
let result = sqlx::query("DELETE FROM quarterly_orders WHERE id = $1")
|
||||||
|
.bind(id)
|
||||||
|
.execute(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(result.rows_affected() > 0)
|
||||||
|
}
|
Loading…
Reference in a new issue