Rust library to generate static websites

feat: some sort of incremental builds

+1020 -155
+9 -8
Cargo.lock
··· 1670 1670 ] 1671 1671 1672 1672 [[package]] 1673 + name = "fixtures-incremental-build" 1674 + version = "0.1.0" 1675 + dependencies = [ 1676 + "maud", 1677 + "maudit", 1678 + ] 1679 + 1680 + [[package]] 1673 1681 name = "fixtures-prefetch-prerender" 1674 1682 version = "0.1.0" 1675 1683 dependencies = [ ··· 1958 1966 version = "0.5.0" 1959 1967 source = "registry+https://github.com/rust-lang/crates.io-index" 1960 1968 checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" 1961 - 1962 - [[package]] 1963 - name = "hot-reload-optimization" 1964 - version = "0.1.0" 1965 - dependencies = [ 1966 - "codspeed-divan-compat", 1967 - "maudit", 1968 - ] 1969 1969 1970 1970 [[package]] 1971 1971 name = "http" ··· 2600 2600 "rayon", 2601 2601 "rustc-hash", 2602 2602 "serde", 2603 + "serde_json", 2603 2604 "serde_yaml", 2604 2605 "slug", 2605 2606 "syntect",
+26 -3
crates/maudit-cli/src/dev.rs
··· 49 49 .collect::<Vec<_>>(); 50 50 51 51 let mut debouncer = new_debouncer( 52 - std::time::Duration::from_millis(100), 52 + std::time::Duration::from_millis(200), // Longer debounce to better batch rapid file changes 53 53 None, 54 54 move |result: DebounceEventResult| { 55 55 tx.blocking_send(result).unwrap_or(()); ··· 164 164 } 165 165 } else { 166 166 // Normal rebuild - check if we need full recompilation or just rerun 167 - let changed_paths: Vec<PathBuf> = events.iter() 167 + let mut changed_paths: Vec<PathBuf> = events.iter() 168 168 .flat_map(|e| e.paths.iter().cloned()) 169 + .filter(|p| { 170 + // Only keep files with known asset extensions 171 + if let Some(ext) = p.extension() { 172 + let ext_str = ext.to_string_lossy().to_lowercase(); 173 + matches!(ext_str.as_str(), 174 + "rs" | "toml" | "css" | "js" | "ts" | "jsx" | "tsx" | 175 + "html" | "md" | "txt" | "json" | "yaml" | "yml" | 176 + "png" | "jpg" | "jpeg" | "gif" | "svg" | "webp" | "ico" | 177 + "woff" | "woff2" | "ttf" | "eot" | "otf") 178 + } else { 179 + false 180 + } 181 + }) 169 182 .collect(); 183 + 184 + // Deduplicate paths 185 + changed_paths.sort(); 186 + changed_paths.dedup(); 187 + 188 + if changed_paths.is_empty() { 189 + // No file changes, only directory changes - skip rebuild 190 + continue; 191 + } 170 192 171 193 let needs_recompile = build_manager_watcher.needs_recompile(&changed_paths).await; 172 194 ··· 188 210 // Just rerun the binary without recompiling 189 211 info!(name: "watch", "Non-dependency files changed, rerunning binary..."); 190 212 let build_manager_clone = build_manager_watcher.clone(); 213 + let changed_paths_clone = changed_paths.clone(); 191 214 tokio::spawn(async move { 192 - match build_manager_clone.rerun_binary().await { 215 + match build_manager_clone.rerun_binary(&changed_paths_clone).await { 193 216 Ok(_) => { 194 217 // Rerun completed (success or failure already logged) 195 218 }
+21 -4
crates/maudit-cli/src/dev/build.rs
··· 78 78 } 79 79 80 80 /// Rerun the binary without recompiling 81 - pub async fn rerun_binary(&self) -> Result<bool, Box<dyn std::error::Error>> { 81 + pub async fn rerun_binary( 82 + &self, 83 + changed_paths: &[PathBuf], 84 + ) -> Result<bool, Box<dyn std::error::Error>> { 82 85 let binary_path = self.binary_path.read().await; 83 86 84 87 let Some(path) = binary_path.as_ref() else { ··· 91 94 return self.start_build().await; 92 95 } 93 96 97 + // Log that we're doing an incremental build 98 + info!(name: "build", "Incremental build: {} files changed", changed_paths.len()); 99 + debug!(name: "build", "Changed files: {:?}", changed_paths); 94 100 info!(name: "build", "Rerunning binary without recompilation..."); 95 101 96 102 // Notify that build is starting (even though we're just rerunning) ··· 104 110 105 111 let build_start_time = Instant::now(); 106 112 113 + // Serialize changed paths to JSON for the binary 114 + let changed_files_json = serde_json::to_string(changed_paths)?; 115 + 107 116 let child = Command::new(path) 108 - .envs([("MAUDIT_DEV", "true"), ("MAUDIT_QUIET", "true")]) 117 + .envs([ 118 + ("MAUDIT_DEV", "true"), 119 + ("MAUDIT_CHANGED_FILES", changed_files_json.as_str()), 120 + ]) 109 121 .stdout(std::process::Stdio::piped()) 110 122 .stderr(std::process::Stdio::piped()) 111 123 .spawn()?; ··· 118 130 format_elapsed_time(duration, &FormatElapsedTimeOptions::default_dev()); 119 131 120 132 if output.status.success() { 133 + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); 134 + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); 135 + println!("{}", stdout); 136 + if !stderr.is_empty() { 137 + println!("{}", stderr); 138 + } 121 139 info!(name: "build", "Binary rerun finished {}", formatted_elapsed_time); 122 140 update_status( 123 141 &self.websocket_tx, ··· 186 204 ]) 187 205 .envs([ 188 206 ("MAUDIT_DEV", "true"), 189 - ("MAUDIT_QUIET", "true"), 190 207 ("CARGO_TERM_COLOR", "always"), 191 208 ]) 192 209 .stdout(std::process::Stdio::piped()) ··· 197 214 let mut stdout = child.stdout.take().unwrap(); 198 215 let mut stderr = child.stderr.take().unwrap(); 199 216 217 + let build_start_time = Instant::now(); 200 218 let websocket_tx = self.websocket_tx.clone(); 201 219 let current_status = self.current_status.clone(); 202 220 let dep_tracker_clone = self.dep_tracker.clone(); 203 221 let binary_path_clone = self.binary_path.clone(); 204 222 let target_dir_clone = self.target_dir.clone(); 205 223 let binary_name_clone = self.binary_name.clone(); 206 - let build_start_time = Instant::now(); 207 224 208 225 // Create a channel to get the build result back 209 226 let (result_tx, mut result_rx) = tokio::sync::mpsc::channel::<bool>(1);
+6 -2
crates/maudit-cli/src/logging.rs
··· 2 2 use std::{fmt, time::Duration}; 3 3 use tracing::{Event, Subscriber}; 4 4 use tracing_subscriber::{ 5 - fmt::{FmtContext, FormatEvent, FormatFields, format}, 5 + fmt::{format, FmtContext, FormatEvent, FormatFields}, 6 6 layer::SubscriberExt, 7 7 registry::LookupSpan, 8 8 util::SubscriberInitExt, ··· 135 135 tracing_subscriber::registry() 136 136 .with( 137 137 tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| { 138 - format!("{}=info,tower_http=info", env!("CARGO_CRATE_NAME")).into() 138 + format!( 139 + "{}=info,maudit=info,tower_http=info", 140 + env!("CARGO_CRATE_NAME") 141 + ) 142 + .into() 139 143 }), 140 144 ) 141 145 .with(tracing_formatter)
+1
crates/maudit/Cargo.toml
··· 24 24 # TODO: Allow making those optional 25 25 rolldown = { package = "brk_rolldown", version = "0.8.0" } 26 26 serde = { workspace = true } 27 + serde_json = "1.0" 27 28 serde_yaml = "0.9.34" 28 29 pulldown-cmark = "0.13.0" 29 30 tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
+347 -133
crates/maudit/src/build.rs
··· 14 14 self, HashAssetType, HashConfig, PrefetchPlugin, RouteAssets, Script, TailwindPlugin, 15 15 calculate_hash, image_cache::ImageCache, prefetch, 16 16 }, 17 - build::{images::process_image, options::PrefetchStrategy}, 17 + build::{images::process_image, options::PrefetchStrategy, state::{BuildState, RouteIdentifier}}, 18 18 content::ContentSources, 19 19 is_dev, 20 20 logging::print_title, ··· 36 36 pub mod images; 37 37 pub mod metadata; 38 38 pub mod options; 39 + pub mod state; 40 + 41 + /// Helper to check if a route should be rebuilt during incremental builds 42 + fn should_rebuild_route( 43 + route_id: &RouteIdentifier, 44 + routes_to_rebuild: &Option<FxHashSet<RouteIdentifier>>, 45 + ) -> bool { 46 + let result = match routes_to_rebuild { 47 + Some(set) => set.contains(route_id), 48 + None => true, // Full build 49 + }; 50 + 51 + if !result { 52 + trace!(target: "build", "Skipping route {:?} (not in rebuild set)", route_id); 53 + } 54 + 55 + result 56 + } 57 + 58 + /// Helper to track all assets used by a route 59 + fn track_route_assets( 60 + build_state: &mut BuildState, 61 + route_id: &RouteIdentifier, 62 + route_assets: &RouteAssets, 63 + ) { 64 + // Track images 65 + for image in &route_assets.images { 66 + if let Ok(canonical) = image.path().canonicalize() { 67 + build_state.track_asset(canonical, route_id.clone()); 68 + } 69 + } 70 + 71 + // Track scripts 72 + for script in &route_assets.scripts { 73 + if let Ok(canonical) = script.path().canonicalize() { 74 + build_state.track_asset(canonical, route_id.clone()); 75 + } 76 + } 77 + 78 + // Track styles 79 + for style in &route_assets.styles { 80 + if let Ok(canonical) = style.path().canonicalize() { 81 + build_state.track_asset(canonical, route_id.clone()); 82 + } 83 + } 84 + } 39 85 40 86 pub fn execute_build( 41 87 routes: &[&dyn FullRoute], 42 88 content_sources: &mut ContentSources, 43 89 options: &BuildOptions, 90 + changed_files: Option<&[PathBuf]>, 44 91 async_runtime: &tokio::runtime::Runtime, 45 92 ) -> Result<BuildOutput, Box<dyn std::error::Error>> { 46 - async_runtime.block_on(async { build(routes, content_sources, options).await }) 93 + async_runtime.block_on(async { build(routes, content_sources, options, changed_files).await }) 47 94 } 48 95 49 96 pub async fn build( 50 97 routes: &[&dyn FullRoute], 51 98 content_sources: &mut ContentSources, 52 99 options: &BuildOptions, 100 + changed_files: Option<&[PathBuf]>, 53 101 ) -> Result<BuildOutput, Box<dyn std::error::Error>> { 54 102 let build_start = Instant::now(); 55 103 let mut build_metadata = BuildOutput::new(build_start); ··· 57 105 // Create a directory for the output 58 106 trace!(target: "build", "Setting up required directories..."); 59 107 60 - let clean_up_handle = if options.clean_output_dir { 108 + // Determine build cache directory 109 + let build_cache_dir = options.assets.image_cache_dir.parent() 110 + .unwrap_or(Path::new("target/maudit_cache")) 111 + .to_path_buf(); 112 + 113 + // Load build state for incremental builds 114 + let mut build_state = if is_dev() { 115 + BuildState::load(&build_cache_dir).unwrap_or_else(|e| { 116 + debug!(target: "build", "Failed to load build state: {}", e); 117 + BuildState::new() 118 + }) 119 + } else { 120 + BuildState::new() 121 + }; 122 + 123 + // Determine if this is an incremental build 124 + let is_incremental = is_dev() && changed_files.is_some() && !build_state.asset_to_routes.is_empty(); 125 + 126 + let routes_to_rebuild = if is_incremental { 127 + let changed = changed_files.unwrap(); 128 + info!(target: "build", "Incremental build: {} files changed", changed.len()); 129 + info!(target: "build", "Changed files: {:?}", changed); 130 + 131 + info!(target: "build", "Build state has {} asset mappings", build_state.asset_to_routes.len()); 132 + 133 + let affected = build_state.get_affected_routes(changed); 134 + info!(target: "build", "Rebuilding {} affected routes", affected.len()); 135 + info!(target: "build", "Affected routes: {:?}", affected); 136 + 137 + Some(affected) 138 + } else { 139 + if changed_files.is_some() { 140 + info!(target: "build", "Full build (first run after recompilation)"); 141 + } 142 + // Full build - clear old state 143 + build_state.clear(); 144 + None 145 + }; 146 + 147 + // Check if we should rebundle during incremental builds 148 + // Only rebundle if a changed file is in the bundler inputs 149 + let should_rebundle = if is_incremental && !build_state.bundler_inputs.is_empty() { 150 + let changed = changed_files.unwrap(); 151 + let should = changed.iter().any(|changed_file| { 152 + build_state.bundler_inputs.iter().any(|bundler_input| { 153 + // Check if the changed file matches any bundler input 154 + // Canonicalize both paths for comparison 155 + if let (Ok(changed_canonical), Ok(bundler_canonical)) = ( 156 + changed_file.canonicalize(), 157 + PathBuf::from(bundler_input).canonicalize() 158 + ) { 159 + changed_canonical == bundler_canonical 160 + } else { 161 + false 162 + } 163 + }) 164 + }); 165 + 166 + if should { 167 + info!(target: "build", "Rebundling needed: changed file matches bundler input"); 168 + } else { 169 + info!(target: "build", "Skipping bundler: no changed files match bundler inputs"); 170 + } 171 + 172 + should 173 + } else { 174 + // Not incremental or no previous bundler inputs 175 + false 176 + }; 177 + 178 + let clean_up_handle = if options.clean_output_dir && !is_incremental { 61 179 let old_dist_tmp_dir = { 62 180 let duration = SystemTime::now().duration_since(UNIX_EPOCH)?; 63 181 let num = (duration.as_secs() + duration.subsec_nanos() as u64) % 100000; ··· 183 301 184 302 // Static base route 185 303 if base_params.is_empty() { 186 - let mut route_assets = RouteAssets::with_default_assets( 187 - &route_assets_options, 188 - Some(image_cache.clone()), 189 - default_scripts.clone(), 190 - vec![], 191 - ); 304 + let route_id = RouteIdentifier::base(base_path.clone(), None); 305 + 306 + // Check if we need to rebuild this route 307 + if should_rebuild_route(&route_id, &routes_to_rebuild) { 308 + let mut route_assets = RouteAssets::with_default_assets( 309 + &route_assets_options, 310 + Some(image_cache.clone()), 311 + default_scripts.clone(), 312 + vec![], 313 + ); 192 314 193 - let params = PageParams::default(); 194 - let url = cached_route.url(&params); 315 + let params = PageParams::default(); 316 + let url = cached_route.url(&params); 195 317 196 - let result = route.build(&mut PageContext::from_static_route( 197 - content_sources, 198 - &mut route_assets, 199 - &url, 200 - &options.base_url, 201 - None, 202 - ))?; 318 + let result = route.build(&mut PageContext::from_static_route( 319 + content_sources, 320 + &mut route_assets, 321 + &url, 322 + &options.base_url, 323 + None, 324 + ))?; 203 325 204 - let file_path = cached_route.file_path(&params, &options.output_dir); 326 + let file_path = cached_route.file_path(&params, &options.output_dir); 205 327 206 - write_route_file(&result, &file_path)?; 328 + write_route_file(&result, &file_path)?; 207 329 208 - info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options)); 330 + info!(target: "pages", "{} -> {} {}", url, file_path.to_string_lossy().dimmed(), format_elapsed_time(route_start.elapsed(), &route_format_options)); 209 331 210 - build_pages_images.extend(route_assets.images); 211 - build_pages_scripts.extend(route_assets.scripts); 212 - build_pages_styles.extend(route_assets.styles); 332 + // Track assets for this route 333 + track_route_assets(&mut build_state, &route_id, &route_assets); 213 334 214 - build_metadata.add_page( 215 - base_path.clone(), 216 - file_path.to_string_lossy().to_string(), 217 - None, 218 - ); 335 + build_pages_images.extend(route_assets.images); 336 + build_pages_scripts.extend(route_assets.scripts); 337 + build_pages_styles.extend(route_assets.styles); 219 338 220 - add_sitemap_entry( 221 - &mut sitemap_entries, 222 - normalized_base_url, 223 - &url, 224 - base_path, 225 - &route.sitemap_metadata(), 226 - &options.sitemap, 227 - ); 339 + build_metadata.add_page( 340 + base_path.clone(), 341 + file_path.to_string_lossy().to_string(), 342 + None, 343 + ); 228 344 229 - page_count += 1; 345 + add_sitemap_entry( 346 + &mut sitemap_entries, 347 + normalized_base_url, 348 + &url, 349 + base_path, 350 + &route.sitemap_metadata(), 351 + &options.sitemap, 352 + ); 353 + 354 + page_count += 1; 355 + } else { 356 + trace!(target: "build", "Skipping unchanged route: {}", base_path); 357 + } 230 358 } else { 231 359 // Dynamic base route 232 360 let mut route_assets = RouteAssets::with_default_assets( ··· 250 378 251 379 // Build all pages for this route 252 380 for page in pages { 253 - let page_start = Instant::now(); 254 - let url = cached_route.url(&page.0); 255 - let file_path = cached_route.file_path(&page.0, &options.output_dir); 381 + let route_id = RouteIdentifier::base( 382 + base_path.clone(), 383 + Some(page.0.0.clone()), 384 + ); 385 + 386 + // Check if we need to rebuild this specific page 387 + if should_rebuild_route(&route_id, &routes_to_rebuild) { 388 + let page_start = Instant::now(); 389 + let url = cached_route.url(&page.0); 390 + let file_path = cached_route.file_path(&page.0, &options.output_dir); 256 391 257 - let content = route.build(&mut PageContext::from_dynamic_route( 258 - &page, 259 - content_sources, 260 - &mut route_assets, 261 - &url, 262 - &options.base_url, 263 - None, 264 - ))?; 392 + let content = route.build(&mut PageContext::from_dynamic_route( 393 + &page, 394 + content_sources, 395 + &mut route_assets, 396 + &url, 397 + &options.base_url, 398 + None, 399 + ))?; 265 400 266 - write_route_file(&content, &file_path)?; 401 + write_route_file(&content, &file_path)?; 267 402 268 - info!(target: "pages", "├─ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options)); 403 + info!(target: "pages", "├─ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(page_start.elapsed(), &route_format_options)); 269 404 270 - build_metadata.add_page( 271 - base_path.clone(), 272 - file_path.to_string_lossy().to_string(), 273 - Some(page.0.0.clone()), 274 - ); 405 + // Track assets for this page 406 + track_route_assets(&mut build_state, &route_id, &route_assets); 275 407 276 - add_sitemap_entry( 277 - &mut sitemap_entries, 278 - normalized_base_url, 279 - &url, 280 - base_path, 281 - &route.sitemap_metadata(), 282 - &options.sitemap, 283 - ); 408 + build_metadata.add_page( 409 + base_path.clone(), 410 + file_path.to_string_lossy().to_string(), 411 + Some(page.0.0.clone()), 412 + ); 284 413 285 - page_count += 1; 414 + add_sitemap_entry( 415 + &mut sitemap_entries, 416 + normalized_base_url, 417 + &url, 418 + base_path, 419 + &route.sitemap_metadata(), 420 + &options.sitemap, 421 + ); 422 + 423 + page_count += 1; 424 + } else { 425 + trace!(target: "build", "Skipping unchanged page: {} with params {:?}", base_path, page.0.0); 426 + } 286 427 } 287 428 } 288 429 ··· 299 440 300 441 if variant_params.is_empty() { 301 442 // Static variant 302 - let mut route_assets = RouteAssets::with_default_assets( 303 - &route_assets_options, 304 - Some(image_cache.clone()), 305 - default_scripts.clone(), 306 - vec![], 443 + let route_id = RouteIdentifier::variant( 444 + variant_id.clone(), 445 + variant_path.clone(), 446 + None, 307 447 ); 448 + 449 + // Check if we need to rebuild this variant 450 + if should_rebuild_route(&route_id, &routes_to_rebuild) { 451 + let mut route_assets = RouteAssets::with_default_assets( 452 + &route_assets_options, 453 + Some(image_cache.clone()), 454 + default_scripts.clone(), 455 + vec![], 456 + ); 308 457 309 - let params = PageParams::default(); 310 - let url = cached_route.variant_url(&params, &variant_id)?; 311 - let file_path = 312 - cached_route.variant_file_path(&params, &options.output_dir, &variant_id)?; 458 + let params = PageParams::default(); 459 + let url = cached_route.variant_url(&params, &variant_id)?; 460 + let file_path = 461 + cached_route.variant_file_path(&params, &options.output_dir, &variant_id)?; 313 462 314 - let result = route.build(&mut PageContext::from_static_route( 315 - content_sources, 316 - &mut route_assets, 317 - &url, 318 - &options.base_url, 319 - Some(variant_id.clone()), 320 - ))?; 463 + let result = route.build(&mut PageContext::from_static_route( 464 + content_sources, 465 + &mut route_assets, 466 + &url, 467 + &options.base_url, 468 + Some(variant_id.clone()), 469 + ))?; 321 470 322 - write_route_file(&result, &file_path)?; 471 + write_route_file(&result, &file_path)?; 323 472 324 - info!(target: "pages", "├─ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options)); 473 + info!(target: "pages", "├─ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_start.elapsed(), &route_format_options)); 325 474 326 - build_pages_images.extend(route_assets.images); 327 - build_pages_scripts.extend(route_assets.scripts); 328 - build_pages_styles.extend(route_assets.styles); 475 + // Track assets for this variant 476 + track_route_assets(&mut build_state, &route_id, &route_assets); 329 477 330 - build_metadata.add_page( 331 - variant_path.clone(), 332 - file_path.to_string_lossy().to_string(), 333 - None, 334 - ); 478 + build_pages_images.extend(route_assets.images); 479 + build_pages_scripts.extend(route_assets.scripts); 480 + build_pages_styles.extend(route_assets.styles); 335 481 336 - add_sitemap_entry( 337 - &mut sitemap_entries, 338 - normalized_base_url, 339 - &url, 340 - &variant_path, 341 - &route.sitemap_metadata(), 342 - &options.sitemap, 343 - ); 482 + build_metadata.add_page( 483 + variant_path.clone(), 484 + file_path.to_string_lossy().to_string(), 485 + None, 486 + ); 487 + 488 + add_sitemap_entry( 489 + &mut sitemap_entries, 490 + normalized_base_url, 491 + &url, 492 + &variant_path, 493 + &route.sitemap_metadata(), 494 + &options.sitemap, 495 + ); 344 496 345 - page_count += 1; 497 + page_count += 1; 498 + } else { 499 + trace!(target: "build", "Skipping unchanged variant: {}", variant_path); 500 + } 346 501 } else { 347 502 // Dynamic variant 348 503 let mut route_assets = RouteAssets::with_default_assets( ··· 365 520 366 521 // Build all pages for this variant group 367 522 for page in pages { 368 - let variant_page_start = Instant::now(); 369 - let url = cached_route.variant_url(&page.0, &variant_id)?; 370 - let file_path = cached_route.variant_file_path( 371 - &page.0, 372 - &options.output_dir, 373 - &variant_id, 374 - )?; 523 + let route_id = RouteIdentifier::variant( 524 + variant_id.clone(), 525 + variant_path.clone(), 526 + Some(page.0.0.clone()), 527 + ); 528 + 529 + // Check if we need to rebuild this specific variant page 530 + if should_rebuild_route(&route_id, &routes_to_rebuild) { 531 + let variant_page_start = Instant::now(); 532 + let url = cached_route.variant_url(&page.0, &variant_id)?; 533 + let file_path = cached_route.variant_file_path( 534 + &page.0, 535 + &options.output_dir, 536 + &variant_id, 537 + )?; 375 538 376 - let content = route.build(&mut PageContext::from_dynamic_route( 377 - &page, 378 - content_sources, 379 - &mut route_assets, 380 - &url, 381 - &options.base_url, 382 - Some(variant_id.clone()), 383 - ))?; 539 + let content = route.build(&mut PageContext::from_dynamic_route( 540 + &page, 541 + content_sources, 542 + &mut route_assets, 543 + &url, 544 + &options.base_url, 545 + Some(variant_id.clone()), 546 + ))?; 384 547 385 - write_route_file(&content, &file_path)?; 548 + write_route_file(&content, &file_path)?; 386 549 387 - info!(target: "pages", "│ ├─ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options)); 550 + info!(target: "pages", "│ ├─ {} {}", file_path.to_string_lossy().dimmed(), format_elapsed_time(variant_page_start.elapsed(), &route_format_options)); 388 551 389 - build_metadata.add_page( 390 - variant_path.clone(), 391 - file_path.to_string_lossy().to_string(), 392 - Some(page.0.0.clone()), 393 - ); 552 + // Track assets for this variant page 553 + track_route_assets(&mut build_state, &route_id, &route_assets); 394 554 395 - add_sitemap_entry( 396 - &mut sitemap_entries, 397 - normalized_base_url, 398 - &url, 399 - &variant_path, 400 - &route.sitemap_metadata(), 401 - &options.sitemap, 402 - ); 555 + build_metadata.add_page( 556 + variant_path.clone(), 557 + file_path.to_string_lossy().to_string(), 558 + Some(page.0.0.clone()), 559 + ); 403 560 404 - page_count += 1; 561 + add_sitemap_entry( 562 + &mut sitemap_entries, 563 + normalized_base_url, 564 + &url, 565 + &variant_path, 566 + &route.sitemap_metadata(), 567 + &options.sitemap, 568 + ); 569 + 570 + page_count += 1; 571 + } else { 572 + trace!(target: "build", "Skipping unchanged variant page: {} with params {:?}", variant_path, page.0.0); 573 + } 405 574 } 406 575 } 407 576 ··· 421 590 fs::create_dir_all(&route_assets_options.output_assets_dir)?; 422 591 } 423 592 424 - if !build_pages_styles.is_empty() || !build_pages_scripts.is_empty() { 593 + if !build_pages_styles.is_empty() || !build_pages_scripts.is_empty() || (is_incremental && should_rebundle) { 425 594 let assets_start = Instant::now(); 426 595 print_title("generating assets"); 427 596 ··· 439 608 }) 440 609 .collect::<Vec<InputItem>>(); 441 610 442 - let bundler_inputs = build_pages_scripts 611 + let mut bundler_inputs = build_pages_scripts 443 612 .iter() 444 613 .map(|script| InputItem { 445 614 import: script.path().to_string_lossy().to_string(), ··· 454 623 .chain(css_inputs.into_iter()) 455 624 .collect::<Vec<InputItem>>(); 456 625 626 + // During incremental builds, merge with previous bundler inputs 627 + // to ensure we bundle all assets, not just from rebuilt routes 628 + if is_incremental && !build_state.bundler_inputs.is_empty() { 629 + debug!(target: "bundling", "Merging with {} previous bundler inputs", build_state.bundler_inputs.len()); 630 + 631 + let current_imports: FxHashSet<String> = bundler_inputs 632 + .iter() 633 + .map(|input| input.import.clone()) 634 + .collect(); 635 + 636 + // Add previous inputs that aren't in the current set 637 + for prev_input in &build_state.bundler_inputs { 638 + if !current_imports.contains(prev_input) { 639 + bundler_inputs.push(InputItem { 640 + import: prev_input.clone(), 641 + name: Some( 642 + PathBuf::from(prev_input) 643 + .file_stem() 644 + .unwrap_or_default() 645 + .to_string_lossy() 646 + .to_string(), 647 + ), 648 + }); 649 + } 650 + } 651 + } 652 + 457 653 debug!( 458 654 target: "bundling", 459 655 "Bundler inputs: {:?}", ··· 462 658 .map(|input| input.import.clone()) 463 659 .collect::<Vec<String>>() 464 660 ); 661 + 662 + // Store bundler inputs in build state for next incremental build 663 + if is_dev() { 664 + build_state.bundler_inputs = bundler_inputs 665 + .iter() 666 + .map(|input| input.import.clone()) 667 + .collect(); 668 + } 465 669 466 670 if !bundler_inputs.is_empty() { 467 671 let mut module_types_hashmap = FxHashMap::default(); ··· 598 802 info!(target: "SKIP_FORMAT", "{}", ""); 599 803 info!(target: "build", "{}", format!("Build completed in {}", format_elapsed_time(build_start.elapsed(), &section_format_options)).bold()); 600 804 805 + // Save build state for next incremental build 806 + if is_dev() { 807 + if let Err(e) = build_state.save(&build_cache_dir) { 808 + warn!(target: "build", "Failed to save build state: {}", e); 809 + } else { 810 + debug!(target: "build", "Build state saved to {}", build_cache_dir.join("build_state.json").display()); 811 + } 812 + } 813 + 601 814 if let Some(clean_up_handle) = clean_up_handle { 602 815 clean_up_handle.await?; 603 816 } ··· 680 893 fs::create_dir_all(parent_dir)? 681 894 } 682 895 896 + trace!(target: "build", "Writing HTML file: {}", file_path.display()); 683 897 fs::write(file_path, content)?; 684 898 685 899 Ok(())
+134
crates/maudit/src/build/state.rs
··· 1 + use rustc_hash::{FxHashMap, FxHashSet}; 2 + use serde::{Deserialize, Serialize}; 3 + use std::fs; 4 + use std::path::{Path, PathBuf}; 5 + 6 + /// Identifies a specific route or variant for incremental rebuilds 7 + #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] 8 + pub enum RouteIdentifier { 9 + /// A base route with optional page parameters 10 + /// Params are stored as a sorted Vec for hashing purposes 11 + Base { 12 + route_path: String, 13 + params: Option<Vec<(String, Option<String>)>>, 14 + }, 15 + /// A variant route with optional page parameters 16 + /// Params are stored as a sorted Vec for hashing purposes 17 + Variant { 18 + variant_id: String, 19 + variant_path: String, 20 + params: Option<Vec<(String, Option<String>)>>, 21 + }, 22 + } 23 + 24 + impl RouteIdentifier { 25 + pub fn base(route_path: String, params: Option<FxHashMap<String, Option<String>>>) -> Self { 26 + Self::Base { 27 + route_path, 28 + params: params.map(|p| { 29 + let mut sorted: Vec<_> = p.into_iter().collect(); 30 + sorted.sort_by(|a, b| a.0.cmp(&b.0)); 31 + sorted 32 + }), 33 + } 34 + } 35 + 36 + pub fn variant( 37 + variant_id: String, 38 + variant_path: String, 39 + params: Option<FxHashMap<String, Option<String>>>, 40 + ) -> Self { 41 + Self::Variant { 42 + variant_id, 43 + variant_path, 44 + params: params.map(|p| { 45 + let mut sorted: Vec<_> = p.into_iter().collect(); 46 + sorted.sort_by(|a, b| a.0.cmp(&b.0)); 47 + sorted 48 + }), 49 + } 50 + } 51 + } 52 + 53 + /// Tracks build state for incremental builds 54 + #[derive(Debug, Default, Serialize, Deserialize)] 55 + pub struct BuildState { 56 + /// Maps asset paths to routes that use them 57 + /// Key: canonicalized asset path 58 + /// Value: set of routes using this asset 59 + pub asset_to_routes: FxHashMap<PathBuf, FxHashSet<RouteIdentifier>>, 60 + 61 + /// Stores all bundler input paths from the last build 62 + /// This needs to be preserved to ensure consistent bundling 63 + pub bundler_inputs: Vec<String>, 64 + } 65 + 66 + impl BuildState { 67 + pub fn new() -> Self { 68 + Self::default() 69 + } 70 + 71 + /// Load build state from disk cache 72 + pub fn load(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> { 73 + let state_path = cache_dir.join("build_state.json"); 74 + 75 + if !state_path.exists() { 76 + return Ok(Self::new()); 77 + } 78 + 79 + let content = fs::read_to_string(&state_path)?; 80 + let state: BuildState = serde_json::from_str(&content)?; 81 + Ok(state) 82 + } 83 + 84 + /// Save build state to disk cache 85 + pub fn save(&self, cache_dir: &Path) -> Result<(), Box<dyn std::error::Error>> { 86 + fs::create_dir_all(cache_dir)?; 87 + let state_path = cache_dir.join("build_state.json"); 88 + let content = serde_json::to_string_pretty(self)?; 89 + fs::write(state_path, content)?; 90 + Ok(()) 91 + } 92 + 93 + /// Add an asset->route mapping 94 + pub fn track_asset(&mut self, asset_path: PathBuf, route_id: RouteIdentifier) { 95 + self.asset_to_routes 96 + .entry(asset_path) 97 + .or_default() 98 + .insert(route_id); 99 + } 100 + 101 + /// Get all routes affected by changes to specific files 102 + pub fn get_affected_routes(&self, changed_files: &[PathBuf]) -> FxHashSet<RouteIdentifier> { 103 + let mut affected_routes = FxHashSet::default(); 104 + 105 + for changed_file in changed_files { 106 + // Try exact match first 107 + if let Some(routes) = self.asset_to_routes.get(changed_file) { 108 + affected_routes.extend(routes.iter().cloned()); 109 + } 110 + 111 + // Try canonicalized path match 112 + if let Ok(canonical) = changed_file.canonicalize() { 113 + if let Some(routes) = self.asset_to_routes.get(&canonical) { 114 + affected_routes.extend(routes.iter().cloned()); 115 + } 116 + } 117 + 118 + // Also check if any tracked asset has this file as a prefix (for directories) 119 + for (asset_path, routes) in &self.asset_to_routes { 120 + if asset_path.starts_with(changed_file) { 121 + affected_routes.extend(routes.iter().cloned()); 122 + } 123 + } 124 + } 125 + 126 + affected_routes 127 + } 128 + 129 + /// Clear all tracked data (for full rebuild) 130 + pub fn clear(&mut self) { 131 + self.asset_to_routes.clear(); 132 + self.bundler_inputs.clear(); 133 + } 134 + }
+14 -2
crates/maudit/src/lib.rs
··· 54 54 // Internal modules 55 55 mod logging; 56 56 57 - use std::env; 57 + use std::{env, path::PathBuf}; 58 58 59 59 use build::execute_build; 60 60 use content::ContentSources; ··· 212 212 .enable_all() 213 213 .build()?; 214 214 215 - execute_build(routes, &mut content_sources, &options, &async_runtime) 215 + // Check for changed files from environment variable (set by CLI in dev mode) 216 + let changed_files = env::var("MAUDIT_CHANGED_FILES") 217 + .ok() 218 + .and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok()) 219 + .map(|paths| paths.into_iter().map(PathBuf::from).collect::<Vec<_>>()); 220 + 221 + execute_build( 222 + routes, 223 + &mut content_sources, 224 + &options, 225 + changed_files.as_deref(), 226 + &async_runtime, 227 + ) 216 228 }
+6 -2
crates/maudit/src/logging.rs
··· 29 29 30 30 let _ = Builder::from_env(logging_env) 31 31 .format(|buf, record| { 32 - if std::env::args().any(|arg| arg == "--quiet") || std::env::var("MAUDIT_QUIET").is_ok() 33 - { 32 + if std::env::args().any(|arg| arg == "--quiet") { 33 + return Ok(()); 34 + } 35 + 36 + // In quiet mode, only show build target logs (for debugging incremental builds) 37 + if std::env::var("MAUDIT_QUIET").is_ok() && record.target() != "build" { 34 38 return Ok(()); 35 39 } 36 40
+9
e2e/fixtures/incremental-build/Cargo.toml
··· 1 + [package] 2 + name = "fixtures-incremental-build" 3 + version = "0.1.0" 4 + edition = "2024" 5 + publish = false 6 + 7 + [dependencies] 8 + maudit.workspace = true 9 + maud.workspace = true
+2
e2e/fixtures/incremental-build/src/assets/about.js
··· 1 + // About script 2 + console.log('About script loaded');
+20
e2e/fixtures/incremental-build/src/assets/blog.css
··· 1 + /* Blog styles */ 2 + .blog-post { 3 + margin: 20px; 4 + } 5 + /* test */ 6 + /* test2 */ 7 + /* test5 */ 8 + /* test6 */ 9 + /* test */ 10 + /* test3 */ 11 + /* test5 */ 12 + /* changed */ 13 + /* change2 */ 14 + /* change */ 15 + /* change */ 16 + /* change2 */ 17 + /* change2 */ 18 + /* change2 */ 19 + /* change2 */ 20 + /* change2 */
e2e/fixtures/incremental-build/src/assets/logo.png

This is a binary file and will not be displayed.

+2
e2e/fixtures/incremental-build/src/assets/main.js
··· 1 + // Main script 2 + console.log('Main script loaded');
+13
e2e/fixtures/incremental-build/src/assets/styles.css
··· 1 + /* Main styles */ 2 + body { 3 + font-family: sans-serif; 4 + } 5 + /* test7 */ 6 + /* test */ 7 + /* test2 */ 8 + /* test4 */ 9 + /* change1 */ 10 + /* change1 */ 11 + /* change1 */ 12 + /* change1 */ 13 + /* change1 */
e2e/fixtures/incremental-build/src/assets/team.png

This is a binary file and will not be displayed.

+11
e2e/fixtures/incremental-build/src/main.rs
··· 1 + use maudit::{content_sources, coronate, routes, BuildOptions, BuildOutput}; 2 + 3 + mod pages; 4 + 5 + fn main() -> Result<BuildOutput, Box<dyn std::error::Error>> { 6 + coronate( 7 + routes![pages::index::Index, pages::about::About, pages::blog::Blog], 8 + content_sources![], 9 + BuildOptions::default(), 10 + ) 11 + }
+24
e2e/fixtures/incremental-build/src/pages/about.rs
··· 1 + use maud::{html, Markup}; 2 + use maudit::route::prelude::*; 3 + 4 + #[route("/about")] 5 + pub struct About; 6 + 7 + impl Route for About { 8 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 9 + let _image = ctx.assets.add_image("src/assets/team.png"); 10 + let _script = ctx.assets.add_script("src/assets/about.js"); 11 + 12 + html! { 13 + html { 14 + head { 15 + title { "About Page" } 16 + } 17 + body { 18 + h1 id="title" { "About Us" } 19 + p id="content" { "Learn more about us" } 20 + } 21 + } 22 + } 23 + } 24 + }
+23
e2e/fixtures/incremental-build/src/pages/blog.rs
··· 1 + use maud::{html, Markup}; 2 + use maudit::route::prelude::*; 3 + 4 + #[route("/blog")] 5 + pub struct Blog; 6 + 7 + impl Route for Blog { 8 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 9 + let _style = ctx.assets.add_style("src/assets/blog.css"); 10 + 11 + html! { 12 + html { 13 + head { 14 + title { "Blog Page" } 15 + } 16 + body { 17 + h1 id="title" { "Blog" } 18 + p id="content" { "Read our latest posts" } 19 + } 20 + } 21 + } 22 + } 23 + }
+25
e2e/fixtures/incremental-build/src/pages/index.rs
··· 1 + use maud::{html, Markup}; 2 + use maudit::route::prelude::*; 3 + 4 + #[route("/")] 5 + pub struct Index; 6 + 7 + impl Route for Index { 8 + fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 9 + let _image = ctx.assets.add_image("src/assets/logo.png"); 10 + let _script = ctx.assets.add_script("src/assets/main.js"); 11 + let _style = ctx.assets.add_style("src/assets/styles.css"); 12 + 13 + html! { 14 + html { 15 + head { 16 + title { "Home Page" } 17 + } 18 + body { 19 + h1 id="title" { "Home Page" } 20 + p id="content" { "Welcome to the home page" } 21 + } 22 + } 23 + } 24 + } 25 + }
+3
e2e/fixtures/incremental-build/src/pages/mod.rs
··· 1 + pub mod about; 2 + pub mod blog; 3 + pub mod index;
+297
e2e/tests/incremental-build.spec.ts
··· 1 + import { expect } from "@playwright/test"; 2 + import { createTestWithFixture } from "./test-utils"; 3 + import { readFileSync, writeFileSync, statSync } from "node:fs"; 4 + import { resolve, dirname } from "node:path"; 5 + import { fileURLToPath } from "node:url"; 6 + 7 + const __filename = fileURLToPath(import.meta.url); 8 + const __dirname = dirname(__filename); 9 + 10 + // Create test instance with incremental-build fixture 11 + const test = createTestWithFixture("incremental-build"); 12 + 13 + test.describe.configure({ mode: "serial" }); 14 + 15 + /** 16 + * Wait for dev server to complete a build/rerun by polling logs 17 + */ 18 + async function waitForBuildComplete(devServer: any, timeoutMs = 20000): Promise<string[]> { 19 + const startTime = Date.now(); 20 + 21 + while (Date.now() - startTime < timeoutMs) { 22 + const logs = devServer.getLogs(100); 23 + const logsText = logs.join("\n").toLowerCase(); 24 + 25 + // Look for completion messages 26 + if (logsText.includes("finished") || 27 + logsText.includes("rerun finished") || 28 + logsText.includes("build finished")) { 29 + return logs; 30 + } 31 + 32 + // Wait 100ms before checking again 33 + await new Promise(resolve => setTimeout(resolve, 100)); 34 + } 35 + 36 + throw new Error(`Build did not complete within ${timeoutMs}ms`); 37 + } 38 + 39 + test.describe("Incremental Build", () => { 40 + // Increase timeout for these tests since they involve compilation 41 + test.setTimeout(60000); 42 + 43 + const fixturePath = resolve(__dirname, "..", "fixtures", "incremental-build"); 44 + const stylesPath = resolve(fixturePath, "src", "assets", "styles.css"); 45 + const blogStylesPath = resolve(fixturePath, "src", "assets", "blog.css"); 46 + const mainScriptPath = resolve(fixturePath, "src", "assets", "main.js"); 47 + const aboutScriptPath = resolve(fixturePath, "src", "assets", "about.js"); 48 + const logoPath = resolve(fixturePath, "src", "assets", "logo.png"); 49 + const teamPath = resolve(fixturePath, "src", "assets", "team.png"); 50 + 51 + const indexHtmlPath = resolve(fixturePath, "dist", "index.html"); 52 + const aboutHtmlPath = resolve(fixturePath, "dist", "about", "index.html"); 53 + const blogHtmlPath = resolve(fixturePath, "dist", "blog", "index.html"); 54 + 55 + let originalStylesContent: string; 56 + let originalBlogStylesContent: string; 57 + let originalMainScriptContent: string; 58 + let originalAboutScriptContent: string; 59 + let originalLogoContent: Buffer; 60 + let originalTeamContent: Buffer; 61 + 62 + test.beforeAll(async () => { 63 + // Save original content 64 + originalStylesContent = readFileSync(stylesPath, "utf-8"); 65 + originalBlogStylesContent = readFileSync(blogStylesPath, "utf-8"); 66 + originalMainScriptContent = readFileSync(mainScriptPath, "utf-8"); 67 + originalAboutScriptContent = readFileSync(aboutScriptPath, "utf-8"); 68 + originalLogoContent = readFileSync(logoPath); 69 + originalTeamContent = readFileSync(teamPath); 70 + 71 + // Ensure files are in original state 72 + writeFileSync(stylesPath, originalStylesContent, "utf-8"); 73 + writeFileSync(blogStylesPath, originalBlogStylesContent, "utf-8"); 74 + writeFileSync(mainScriptPath, originalMainScriptContent, "utf-8"); 75 + writeFileSync(aboutScriptPath, originalAboutScriptContent, "utf-8"); 76 + writeFileSync(logoPath, originalLogoContent); 77 + writeFileSync(teamPath, originalTeamContent); 78 + }); 79 + 80 + test.afterEach(async ({ devServer }) => { 81 + // Restore original content after each test 82 + writeFileSync(stylesPath, originalStylesContent, "utf-8"); 83 + writeFileSync(blogStylesPath, originalBlogStylesContent, "utf-8"); 84 + writeFileSync(mainScriptPath, originalMainScriptContent, "utf-8"); 85 + writeFileSync(aboutScriptPath, originalAboutScriptContent, "utf-8"); 86 + writeFileSync(logoPath, originalLogoContent); 87 + writeFileSync(teamPath, originalTeamContent); 88 + 89 + // Wait for build if devServer is available 90 + if (devServer) { 91 + try { 92 + devServer.clearLogs(); 93 + await waitForBuildComplete(devServer); 94 + } catch (error) { 95 + console.warn("Failed to wait for build completion in afterEach:", error); 96 + } 97 + } 98 + }); 99 + 100 + test.afterAll(async () => { 101 + // Restore original content 102 + writeFileSync(stylesPath, originalStylesContent, "utf-8"); 103 + writeFileSync(blogStylesPath, originalBlogStylesContent, "utf-8"); 104 + writeFileSync(mainScriptPath, originalMainScriptContent, "utf-8"); 105 + writeFileSync(aboutScriptPath, originalAboutScriptContent, "utf-8"); 106 + writeFileSync(logoPath, originalLogoContent); 107 + writeFileSync(teamPath, originalTeamContent); 108 + }); 109 + 110 + test("should perform full build on first run after recompilation", async ({ devServer }) => { 111 + // Clear logs to track what happens after initial startup 112 + devServer.clearLogs(); 113 + 114 + // Modify a file to trigger a rebuild 115 + writeFileSync(stylesPath, originalStylesContent + "\n/* comment */", "utf-8"); 116 + 117 + // Wait for rebuild 118 + const logs = await waitForBuildComplete(devServer, 20000); 119 + const logsText = logs.join("\n").toLowerCase(); 120 + 121 + // After the first change post-startup, we should see an incremental build message 122 + expect(logsText).toContain("incremental build"); 123 + }); 124 + 125 + test("should only rebuild affected route when CSS changes", async ({ devServer }) => { 126 + // First, do a change to ensure we have build state 127 + writeFileSync(stylesPath, originalStylesContent + "\n/* setup */", "utf-8"); 128 + await waitForBuildComplete(devServer); 129 + 130 + // Get modification times before change 131 + const indexMtimeBefore = statSync(indexHtmlPath).mtimeMs; 132 + const aboutMtimeBefore = statSync(aboutHtmlPath).mtimeMs; 133 + const blogMtimeBefore = statSync(blogHtmlPath).mtimeMs; 134 + 135 + // Wait longer to ensure timestamps differ and debouncer completes 136 + await new Promise(resolve => setTimeout(resolve, 500)); 137 + 138 + // Clear logs 139 + devServer.clearLogs(); 140 + 141 + // Change blog.css (only used by /blog route) 142 + writeFileSync(blogStylesPath, originalBlogStylesContent + "\n/* modified */", "utf-8"); 143 + 144 + // Wait for rebuild 145 + const logs = await waitForBuildComplete(devServer, 20000); 146 + const logsText = logs.join("\n").toLowerCase(); 147 + 148 + // Should be incremental build 149 + expect(logsText).toContain("incremental build"); 150 + 151 + // Get modification times after change 152 + const indexMtimeAfter = statSync(indexHtmlPath).mtimeMs; 153 + const aboutMtimeAfter = statSync(aboutHtmlPath).mtimeMs; 154 + const blogMtimeAfter = statSync(blogHtmlPath).mtimeMs; 155 + 156 + // Index and About should NOT be rebuilt (same mtime) 157 + expect(indexMtimeAfter).toBe(indexMtimeBefore); 158 + expect(aboutMtimeAfter).toBe(aboutMtimeBefore); 159 + 160 + // Blog should be rebuilt (different mtime) 161 + expect(blogMtimeAfter).toBeGreaterThan(blogMtimeBefore); 162 + }); 163 + 164 + test("should rebuild multiple routes when shared asset changes", async ({ devServer }) => { 165 + // First, do a change to ensure we have build state 166 + writeFileSync(stylesPath, originalStylesContent + "\n/* setup */", "utf-8"); 167 + await waitForBuildComplete(devServer); 168 + 169 + // Get modification times before change 170 + const indexMtimeBefore = statSync(indexHtmlPath).mtimeMs; 171 + const aboutMtimeBefore = statSync(aboutHtmlPath).mtimeMs; 172 + const blogMtimeBefore = statSync(blogHtmlPath).mtimeMs; 173 + 174 + // Wait longer to ensure timestamps differ and debouncer completes 175 + await new Promise(resolve => setTimeout(resolve, 500)); 176 + 177 + // Clear logs 178 + devServer.clearLogs(); 179 + 180 + // Change styles.css (used by /index route) 181 + writeFileSync(stylesPath, originalStylesContent + "\n/* modified */", "utf-8"); 182 + 183 + // Wait for rebuild 184 + const logs = await waitForBuildComplete(devServer, 20000); 185 + const logsText = logs.join("\n").toLowerCase(); 186 + 187 + // Should be incremental build 188 + expect(logsText).toContain("incremental build"); 189 + 190 + // Get modification times after change 191 + const indexMtimeAfter = statSync(indexHtmlPath).mtimeMs; 192 + const aboutMtimeAfter = statSync(aboutHtmlPath).mtimeMs; 193 + const blogMtimeAfter = statSync(blogHtmlPath).mtimeMs; 194 + 195 + // Index should be rebuilt (uses styles.css) 196 + expect(indexMtimeAfter).toBeGreaterThan(indexMtimeBefore); 197 + 198 + // About and Blog should NOT be rebuilt 199 + expect(aboutMtimeAfter).toBe(aboutMtimeBefore); 200 + expect(blogMtimeAfter).toBe(blogMtimeBefore); 201 + }); 202 + 203 + test("should rebuild affected route when script changes", async ({ devServer }) => { 204 + // First, do a change to ensure we have build state 205 + writeFileSync(mainScriptPath, originalMainScriptContent + "\n// setup", "utf-8"); 206 + await waitForBuildComplete(devServer); 207 + 208 + // Get modification times before change 209 + const indexMtimeBefore = statSync(indexHtmlPath).mtimeMs; 210 + const aboutMtimeBefore = statSync(aboutHtmlPath).mtimeMs; 211 + 212 + // Wait longer to ensure timestamps differ and debouncer completes 213 + await new Promise(resolve => setTimeout(resolve, 500)); 214 + 215 + // Clear logs 216 + devServer.clearLogs(); 217 + 218 + // Change about.js (only used by /about route) 219 + writeFileSync(aboutScriptPath, originalAboutScriptContent + "\n// modified", "utf-8"); 220 + 221 + // Wait for rebuild 222 + const logs = await waitForBuildComplete(devServer, 20000); 223 + const logsText = logs.join("\n").toLowerCase(); 224 + 225 + // Should be incremental build 226 + expect(logsText).toContain("incremental build"); 227 + 228 + // Get modification times after change 229 + const indexMtimeAfter = statSync(indexHtmlPath).mtimeMs; 230 + const aboutMtimeAfter = statSync(aboutHtmlPath).mtimeMs; 231 + 232 + // Index should NOT be rebuilt 233 + expect(indexMtimeAfter).toBe(indexMtimeBefore); 234 + 235 + // About should be rebuilt 236 + expect(aboutMtimeAfter).toBeGreaterThan(aboutMtimeBefore); 237 + }); 238 + 239 + test("should rebuild affected route when image changes", async ({ devServer }) => { 240 + // First, do a change to ensure we have build state 241 + writeFileSync(stylesPath, originalStylesContent + "\n/* setup */", "utf-8"); 242 + await waitForBuildComplete(devServer); 243 + 244 + // Get modification times before change 245 + const indexMtimeBefore = statSync(indexHtmlPath).mtimeMs; 246 + const aboutMtimeBefore = statSync(aboutHtmlPath).mtimeMs; 247 + 248 + // Wait longer to ensure timestamps differ and debouncer completes 249 + await new Promise(resolve => setTimeout(resolve, 500)); 250 + 251 + // Clear logs 252 + devServer.clearLogs(); 253 + 254 + // "Change" team.png (used by /about route) 255 + // We'll just write it again with same content but new mtime 256 + writeFileSync(teamPath, originalTeamContent); 257 + 258 + // Wait for rebuild 259 + const logs = await waitForBuildComplete(devServer, 20000); 260 + const logsText = logs.join("\n").toLowerCase(); 261 + 262 + // Should be incremental build 263 + expect(logsText).toContain("incremental build"); 264 + 265 + // Get modification times after change 266 + const indexMtimeAfter = statSync(indexHtmlPath).mtimeMs; 267 + const aboutMtimeAfter = statSync(aboutHtmlPath).mtimeMs; 268 + 269 + // Index should NOT be rebuilt 270 + expect(indexMtimeAfter).toBe(indexMtimeBefore); 271 + 272 + // About should be rebuilt 273 + expect(aboutMtimeAfter).toBeGreaterThan(aboutMtimeBefore); 274 + }); 275 + 276 + test("should preserve bundler inputs across incremental builds", async ({ devServer }) => { 277 + // First, do a change to ensure we have build state 278 + writeFileSync(stylesPath, originalStylesContent + "\n/* setup */", "utf-8"); 279 + await waitForBuildComplete(devServer); 280 + 281 + // Clear logs 282 + devServer.clearLogs(); 283 + 284 + // Change only blog.css (blog route only) 285 + writeFileSync(blogStylesPath, originalBlogStylesContent + "\n/* modified */", "utf-8"); 286 + 287 + // Wait for rebuild 288 + const logs = await waitForBuildComplete(devServer, 20000); 289 + const logsText = logs.join("\n"); 290 + 291 + // Check that logs mention merging with previous bundler inputs 292 + // This ensures that even though only blog route was rebuilt, 293 + // all assets from the previous build are still bundled 294 + expect(logsText).toContain("Merging with"); 295 + expect(logsText).toContain("previous bundler inputs"); 296 + }); 297 + });
+8 -1
examples/basics/src/main.rs
··· 3 3 use maudit::{BuildOptions, BuildOutput, content_sources, coronate, routes}; 4 4 5 5 mod routes { 6 + mod another; 6 7 mod index; 8 + pub use another::Another; 7 9 pub use index::Index; 8 10 } 9 11 12 + pub use routes::Another; 10 13 pub use routes::Index; 11 14 12 15 fn main() -> Result<BuildOutput, Box<dyn std::error::Error>> { 13 - coronate(routes![Index], content_sources![], BuildOptions::default()) 16 + coronate( 17 + routes![Index, Another], 18 + content_sources![], 19 + BuildOptions::default(), 20 + ) 14 21 }
+14
examples/basics/src/routes/another.rs
··· 1 + use crate::layout::layout; 2 + use maud::html; 3 + use maudit::route::prelude::*; 4 + 5 + #[route("/another")] 6 + pub struct Another; 7 + 8 + impl Route for Another { 9 + fn render(&self, _ctx: &mut PageContext) -> impl Into<RenderResult> { 10 + Ok(layout(html! { 11 + h1 { "Hello World2" } 12 + })) 13 + } 14 + }
+3
website/assets/walrus.svg
··· 1 + <?xml version="1.0" encoding="utf-8"?> 2 + <!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools --> 3 + <svg width="800px" height="800px" viewBox="0 0 1024 1024" class="icon" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M450.72 418.17c-42.29-21.86-144.5-220-171.65-198.22s-40.59 114.28 0.29 171.31 132 97 153.52 129.58 18.45 57.07 13.36 63.2S262.49 462 217.66 485.53s-28.41 84.69 17.56 132.54S427 651.39 455.57 672.76s32.72 55 20.49 55-145.88-32.38-192.77-24.15-68.25 39.89 0.12 73.42 180.26 8.87 199.28 28.21 6.8 28.54-7.47 29.58-110.14-4.91-143.78 0.24 6.21 56.07 23.57 69.3 80.59 19.24 98.94 16.15 36.67-26.58 51-20.48 3.14 45.88 8.25 53 46.92 9.1 53-0.09-10.26-37.71-0.09-51 32.65 11.16 66.28-1.13 109-70.55 111-104.2-132.52 27.76-167.19 26.8c-24.48-4-34.71-21.36-19.43-30.56s228.33-55.45 244.57-96.27 4-34.68-21.47-34.63S605.6 724.45 590.26 700 791 610 813.3 555.9s29.37-119.36-0.22-127.47-147.62 137.92-194.54 130.86-1.06-21.41 19.29-48 132.36-120.51 133.32-154.16 10.08-67.32-27.65-71.33-129.27 135.84-149.69 123.63 52.89-78.61 64-143.89S632.09 133 611.7 137.14s-19.37 4.11-19.34 22.47 10.33 79.52-1.85 114.21-13.14 60.18-23.35 54.08-10.27-43.83-4.2-73.41 23.3-92.83 13.07-112.19S545.27 48.53 467.8 68s-72.25 89.86-65 136.75 27.67 83.57 45.09 128.41 21.71 94.77 2.83 85.01z" fill="#5AB286" /></svg>
+2
website/src/routes/index.rs
··· 23 23 24 24 impl Route for Index { 25 25 fn render(&self, ctx: &mut PageContext) -> impl Into<RenderResult> { 26 + let walrus = ctx.assets.add_image("assets/walrus.svg").unwrap(); 26 27 let features = [ 27 28 ("Performant", "Generate a site with thousands of pages in less than a second using minimal resources."), 28 29 ("Content", "Bring your content to life with built-in support for Markdown, custom components, syntax highlighting, and more."), ··· 43 44 div.w-full { 44 45 div."lg:container".mx-auto.relative { 45 46 div."px-4"."sm:py-8"."sm:mx-6"."sm:my-26"."my-14"."mb-20".flex.flex-col.justify-center.items-center.text-center { 47 + (walrus.render("A walrus")) 46 48 h2."sm:text-6xl"."text-5xl"."sm:w-[22ch]"."xl:w-[30ch]"."mb-2"."leading-[1.15]" { 47 49 "Lo, " span.text-brand-red { "the still scrolls of the web"} ", unchanging and steadfast, at last!" 48 50 }