Complete major performance optimizations: eliminate N+1 patterns and memory pagination
- Add event categories config endpoint (/api/config/event-categories) - Fix bulletin processing N+1: batch hymn lookups for single bulletins - Optimize V2 events list endpoint: replace memory pagination with database LIMIT/OFFSET - Add missing SQL functions: list_events_paginated() and count_events() - Preserve HTTP response compatibility for iOS app
This commit is contained in:
parent
8728e14125
commit
4899c3829c
|
@ -6,18 +6,42 @@ use crate::{
|
|||
};
|
||||
use regex::Regex;
|
||||
|
||||
/// Process multiple bulletins with shared logic
|
||||
/// Process multiple bulletins with shared logic (optimized to prevent N+1 queries)
|
||||
pub async fn process_bulletins_batch(
|
||||
pool: &sqlx::PgPool,
|
||||
bulletins: &mut [Bulletin]
|
||||
) -> Result<()> {
|
||||
for bulletin in bulletins.iter_mut() {
|
||||
process_single_bulletin(pool, bulletin).await?;
|
||||
// Collect all hymn numbers across all bulletins first for batch lookup
|
||||
let mut all_hymn_numbers = std::collections::HashSet::new();
|
||||
|
||||
for bulletin in bulletins.iter() {
|
||||
// Extract hymn numbers from divine_worship content
|
||||
if let Some(ref worship_content) = bulletin.divine_worship {
|
||||
let hymn_numbers = extract_hymn_numbers_from_content(worship_content);
|
||||
all_hymn_numbers.extend(hymn_numbers);
|
||||
}
|
||||
|
||||
// Extract hymn numbers from sabbath_school content
|
||||
if let Some(ref ss_content) = bulletin.sabbath_school {
|
||||
let hymn_numbers = extract_hymn_numbers_from_content(ss_content);
|
||||
all_hymn_numbers.extend(hymn_numbers);
|
||||
}
|
||||
}
|
||||
|
||||
// Batch query all hymns from both hymnals
|
||||
let hymn_numbers_vec: Vec<i32> = all_hymn_numbers.into_iter().collect();
|
||||
let hymnal_codes = vec!["sda-1985".to_string(), "sda-1941".to_string()];
|
||||
let hymn_map = crate::sql::hymnal::get_hymns_by_numbers_batch(pool, &hymnal_codes, &hymn_numbers_vec).await?;
|
||||
|
||||
// Now process each bulletin using the cached hymn data
|
||||
for bulletin in bulletins.iter_mut() {
|
||||
process_single_bulletin_with_cache(pool, bulletin, &hymn_map).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Process a single bulletin with all required transformations
|
||||
/// Process a single bulletin with all required transformations (optimized to prevent N+1)
|
||||
pub async fn process_single_bulletin(
|
||||
pool: &sqlx::PgPool,
|
||||
bulletin: &mut Bulletin
|
||||
|
@ -25,14 +49,33 @@ pub async fn process_single_bulletin(
|
|||
// Process scripture reading to include full verse text
|
||||
bulletin.scripture_reading = process_scripture_reading(pool, &bulletin.scripture_reading).await?;
|
||||
|
||||
// Process hymn references in worship content
|
||||
// Collect all hymn numbers from this bulletin first for batch lookup
|
||||
let mut all_hymn_numbers = std::collections::HashSet::new();
|
||||
|
||||
// Extract hymn numbers from divine_worship content
|
||||
if let Some(ref worship_content) = bulletin.divine_worship {
|
||||
bulletin.divine_worship = Some(process_hymn_references(pool, worship_content).await?);
|
||||
let hymn_numbers = extract_hymn_numbers_from_content(worship_content);
|
||||
all_hymn_numbers.extend(hymn_numbers);
|
||||
}
|
||||
|
||||
// Process hymn references in sabbath school content
|
||||
// Extract hymn numbers from sabbath_school content
|
||||
if let Some(ref ss_content) = bulletin.sabbath_school {
|
||||
bulletin.sabbath_school = Some(process_hymn_references(pool, ss_content).await?);
|
||||
let hymn_numbers = extract_hymn_numbers_from_content(ss_content);
|
||||
all_hymn_numbers.extend(hymn_numbers);
|
||||
}
|
||||
|
||||
// Batch query all hymns from both hymnals for this single bulletin
|
||||
let hymn_numbers_vec: Vec<i32> = all_hymn_numbers.into_iter().collect();
|
||||
let hymnal_codes = vec!["sda-1985".to_string(), "sda-1941".to_string()];
|
||||
let hymn_map = crate::sql::hymnal::get_hymns_by_numbers_batch(pool, &hymnal_codes, &hymn_numbers_vec).await?;
|
||||
|
||||
// Process hymn references using cached data
|
||||
if let Some(ref worship_content) = bulletin.divine_worship {
|
||||
bulletin.divine_worship = Some(process_hymn_references_with_cache(worship_content, &hymn_map));
|
||||
}
|
||||
|
||||
if let Some(ref ss_content) = bulletin.sabbath_school {
|
||||
bulletin.sabbath_school = Some(process_hymn_references_with_cache(ss_content, &hymn_map));
|
||||
}
|
||||
|
||||
// Ensure sunset field compatibility
|
||||
|
@ -151,4 +194,104 @@ pub async fn process_hymn_references(
|
|||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Extract hymn numbers from content without making database calls
|
||||
fn extract_hymn_numbers_from_content(content: &str) -> Vec<i32> {
|
||||
let hymn_patterns = vec![
|
||||
Regex::new(r"#(\d{1,3})").unwrap(),
|
||||
Regex::new(r"(?i)hymn\s+(\d{1,3})").unwrap(),
|
||||
Regex::new(r"(?i)no\.\s*(\d{1,3})").unwrap(),
|
||||
Regex::new(r"(?i)number\s+(\d{1,3})").unwrap(),
|
||||
];
|
||||
|
||||
let mut hymn_numbers = Vec::new();
|
||||
|
||||
for pattern in hymn_patterns {
|
||||
for capture in pattern.captures_iter(content) {
|
||||
if let Some(number_match) = capture.get(1) {
|
||||
if let Ok(hymn_number) = number_match.as_str().parse::<i32>() {
|
||||
hymn_numbers.push(hymn_number);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hymn_numbers
|
||||
}
|
||||
|
||||
/// Process single bulletin using cached hymn data (no database calls for hymns)
|
||||
async fn process_single_bulletin_with_cache(
|
||||
pool: &sqlx::PgPool,
|
||||
bulletin: &mut Bulletin,
|
||||
hymn_cache: &std::collections::HashMap<(String, i32), crate::models::HymnWithHymnal>
|
||||
) -> Result<()> {
|
||||
// Process scripture reading to include full verse text (still needs DB for bible verses)
|
||||
bulletin.scripture_reading = process_scripture_reading(pool, &bulletin.scripture_reading).await?;
|
||||
|
||||
// Process hymn references in worship content using cache
|
||||
if let Some(ref worship_content) = bulletin.divine_worship {
|
||||
bulletin.divine_worship = Some(process_hymn_references_with_cache(worship_content, hymn_cache));
|
||||
}
|
||||
|
||||
// Process hymn references in sabbath school content using cache
|
||||
if let Some(ref ss_content) = bulletin.sabbath_school {
|
||||
bulletin.sabbath_school = Some(process_hymn_references_with_cache(ss_content, hymn_cache));
|
||||
}
|
||||
|
||||
// Ensure sunset field compatibility
|
||||
if bulletin.sunset.is_none() {
|
||||
bulletin.sunset = Some("TBA".to_string());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Process hymn references using cached hymn data (no database calls)
|
||||
fn process_hymn_references_with_cache(
|
||||
content: &str,
|
||||
hymn_cache: &std::collections::HashMap<(String, i32), crate::models::HymnWithHymnal>
|
||||
) -> String {
|
||||
let hymn_patterns = vec![
|
||||
Regex::new(r"#(\d{1,3})").unwrap(),
|
||||
Regex::new(r"(?i)hymn\s+(\d{1,3})").unwrap(),
|
||||
Regex::new(r"(?i)no\.\s*(\d{1,3})").unwrap(),
|
||||
Regex::new(r"(?i)number\s+(\d{1,3})").unwrap(),
|
||||
];
|
||||
|
||||
let mut result = content.to_string();
|
||||
|
||||
// Process each pattern
|
||||
for pattern in hymn_patterns {
|
||||
let mut matches_to_replace = Vec::new();
|
||||
|
||||
// Find all matches for this pattern
|
||||
for capture in pattern.captures_iter(&result) {
|
||||
if let Some(number_match) = capture.get(1) {
|
||||
if let Ok(hymn_number) = number_match.as_str().parse::<i32>() {
|
||||
// Look for hymn in 1985 hymnal first, then 1941
|
||||
let hymn_title = hymn_cache.get(&("sda-1985".to_string(), hymn_number))
|
||||
.or_else(|| hymn_cache.get(&("sda-1941".to_string(), hymn_number)))
|
||||
.map(|hymn| &hymn.title);
|
||||
|
||||
if let Some(title) = hymn_title {
|
||||
let full_match = capture.get(0).unwrap();
|
||||
matches_to_replace.push((
|
||||
full_match.start(),
|
||||
full_match.end(),
|
||||
format!("{} - {}", full_match.as_str(), title)
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply replacements in reverse order to maintain string indices
|
||||
matches_to_replace.reverse();
|
||||
for (start, end, replacement) in matches_to_replace {
|
||||
result.replace_range(start..end, &replacement);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
|
@ -25,6 +25,10 @@ pub async fn get_recurring_types() -> Json<Vec<&'static str>> {
|
|||
Json(crate::utils::validation::get_valid_recurring_types())
|
||||
}
|
||||
|
||||
pub async fn get_event_categories() -> Json<Vec<&'static str>> {
|
||||
Json(vec!["Service", "Social", "Ministry", "Other"])
|
||||
}
|
||||
|
||||
pub async fn update_config(
|
||||
State(state): State<AppState>,
|
||||
Json(config): Json<ChurchConfig>,
|
||||
|
|
|
@ -32,19 +32,9 @@ pub async fn list(
|
|||
let pagination = PaginationHelper::from_query(query.page, query.per_page);
|
||||
|
||||
let url_builder = UrlBuilder::new();
|
||||
let events_v2 = EventsV2Service::list_all(&state.pool, timezone, &url_builder).await?;
|
||||
let total = events_v2.len() as i64;
|
||||
let (events_v2, total) = EventsV2Service::list_paginated(&state.pool, pagination.page, pagination.per_page, timezone, &url_builder).await?;
|
||||
|
||||
// Apply pagination
|
||||
let start = pagination.offset as usize;
|
||||
let end = std::cmp::min(start + pagination.per_page as usize, events_v2.len());
|
||||
let paginated_events = if start < events_v2.len() {
|
||||
events_v2[start..end].to_vec()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let response = pagination.create_response(paginated_events, total);
|
||||
let response = pagination.create_response(events_v2, total);
|
||||
Ok(success_response(response))
|
||||
}
|
||||
|
||||
|
|
|
@ -122,6 +122,7 @@ let app = Router::new()
|
|||
.route("/api/events/:id", get(handlers::events::get))
|
||||
.route("/api/config", get(handlers::config::get_public_config))
|
||||
.route("/api/config/recurring-types", get(handlers::config::get_recurring_types))
|
||||
.route("/api/config/event-categories", get(handlers::config::get_event_categories))
|
||||
.route("/api/collections/rtsda_android/records", get(handlers::legacy::android_update))
|
||||
.route("/api/bible_verses/random", get(handlers::bible_verses::random))
|
||||
.route("/api/bible_verses", get(handlers::bible_verses::list))
|
||||
|
|
|
@ -27,12 +27,20 @@ impl EventsV2Service {
|
|||
convert_events_to_v2(events, timezone, url_builder)
|
||||
}
|
||||
|
||||
/// Get all events with V2 timezone handling and pagination
|
||||
/// Get all events with V2 timezone handling (no pagination)
|
||||
pub async fn list_all(pool: &PgPool, timezone: &str, url_builder: &UrlBuilder) -> Result<Vec<EventV2>> {
|
||||
let events = events::list_all_events(pool).await?;
|
||||
convert_events_to_v2(events, timezone, url_builder)
|
||||
}
|
||||
|
||||
/// Get events with proper database-level pagination and V2 timezone handling
|
||||
pub async fn list_paginated(pool: &PgPool, page: i32, per_page: i32, timezone: &str, url_builder: &UrlBuilder) -> Result<(Vec<EventV2>, i64)> {
|
||||
let events = events::list_events_paginated(pool, page, per_page).await?;
|
||||
let total = events::count_events(pool).await?;
|
||||
let converted_events = convert_events_to_v2(events, timezone, url_builder)?;
|
||||
Ok((converted_events, total))
|
||||
}
|
||||
|
||||
/// Get single event by ID with V2 timezone handling
|
||||
pub async fn get_by_id(pool: &PgPool, id: &Uuid, timezone: &str, url_builder: &UrlBuilder) -> Result<Option<EventV2>> {
|
||||
let event = events::get_event_by_id(pool, id).await?;
|
||||
|
|
|
@ -71,6 +71,37 @@ pub async fn list_all_events(pool: &PgPool) -> Result<Vec<Event>> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Get events with pagination
|
||||
pub async fn list_events_paginated(pool: &PgPool, page: i32, per_page: i32) -> Result<Vec<Event>> {
|
||||
let offset = (page - 1) * per_page;
|
||||
sqlx::query_as!(
|
||||
Event,
|
||||
"SELECT * FROM events ORDER BY start_time DESC LIMIT $1 OFFSET $2",
|
||||
per_page as i64,
|
||||
offset as i64
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!("Failed to list events paginated: {}", e);
|
||||
ApiError::DatabaseError(e)
|
||||
})
|
||||
}
|
||||
|
||||
/// Count total number of events
|
||||
pub async fn count_events(pool: &PgPool) -> Result<i64> {
|
||||
sqlx::query_scalar!(
|
||||
"SELECT COUNT(*) FROM events"
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.map(|count| count.unwrap_or(0))
|
||||
.map_err(|e| {
|
||||
tracing::error!("Failed to count events: {}", e);
|
||||
ApiError::DatabaseError(e)
|
||||
})
|
||||
}
|
||||
|
||||
/// Get event by ID
|
||||
pub async fn get_event_by_id(pool: &PgPool, id: &Uuid) -> Result<Option<Event>> {
|
||||
sqlx::query_as!(
|
||||
|
|
|
@ -480,6 +480,42 @@ pub async fn get_thematic_ambits_batch(pool: &PgPool, list_ids: &[uuid::Uuid]) -
|
|||
Ok(ambits_map)
|
||||
}
|
||||
|
||||
/// Get hymns by numbers for specific hymnals in batch (optimization for bulletin processing)
|
||||
pub async fn get_hymns_by_numbers_batch(
|
||||
pool: &PgPool,
|
||||
hymnal_codes: &[String],
|
||||
hymn_numbers: &[i32]
|
||||
) -> Result<std::collections::HashMap<(String, i32), crate::models::HymnWithHymnal>> {
|
||||
if hymnal_codes.is_empty() || hymn_numbers.is_empty() {
|
||||
return Ok(std::collections::HashMap::new());
|
||||
}
|
||||
|
||||
let hymns = sqlx::query_as!(
|
||||
crate::models::HymnWithHymnal,
|
||||
r#"
|
||||
SELECT h.id, h.hymnal_id, h.number, h.title, h.content, h.is_favorite, h.created_at, h.updated_at,
|
||||
hy.name as "hymnal_name!", hy.code as "hymnal_code!", hy.year as "hymnal_year"
|
||||
FROM hymns h
|
||||
JOIN hymnals hy ON h.hymnal_id = hy.id
|
||||
WHERE hy.code = ANY($1) AND h.number = ANY($2) AND hy.is_active = true
|
||||
ORDER BY hy.code, h.number
|
||||
"#,
|
||||
hymnal_codes,
|
||||
hymn_numbers
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| crate::error::ApiError::DatabaseError(e))?;
|
||||
|
||||
let mut hymn_map = std::collections::HashMap::new();
|
||||
for hymn in hymns {
|
||||
let key = (hymn.hymnal_code.clone(), hymn.number);
|
||||
hymn_map.insert(key, hymn);
|
||||
}
|
||||
|
||||
Ok(hymn_map)
|
||||
}
|
||||
|
||||
/// Count responsive readings
|
||||
pub async fn count_responsive_readings(pool: &PgPool) -> Result<i64> {
|
||||
let count = sqlx::query!("SELECT COUNT(*) as count FROM responsive_readings")
|
||||
|
|
Loading…
Reference in a new issue