diff --git a/AGENTS.md b/AGENTS.md index 6fc5748..6f94b84 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -92,6 +92,18 @@ When using the `gh` CLI to view issues, PRs, or other GitHub objects: - **ALWAYS** use the patch editing mechanism provided by the agent - Shell text tools may be used for **read‑only analysis only** +### Rust Error Handling + +- Do not introduce `Box`, `Box`, or `anyhow::Error` as fallible return types in production `src/` code, public doctests, examples, or benchmarks that demonstrate user-facing workflows +- Prefer `CdtResult` and narrow `CdtError` variants with structured context for distinct I/O, serialization, validation, backend, checkpoint, or output failure modes +- `&dyn Error` is acceptable for `std::error::Error::source`, tests that verify standard error trait behavior, and lint fixtures that intentionally exercise forbidden generic-error patterns +- Detailed error-type guidance lives in `docs/dev/rust.md` + +### Rust Import Hygiene + +- Keep production module preambles free of test-only imports; place `#[cfg(test)]` imports inside the relevant `tests` module instead +- Detailed import guidance lives in `docs/dev/rust.md` + ### Public API Preludes - Keep `prelude::*` small and focused on common quick-start workflows. @@ -145,7 +157,7 @@ just ci Refer to `docs/dev/commands.md` for full details. -When adding or renaming Cargo examples, update `just validate-examples` markers as needed so CI keeps validating the user-facing example contracts. +When adding or renaming Cargo examples, update `just examples-validate` markers as needed so CI keeps validating the user-facing example contracts. For tooling-alignment work, update `docs/dev/tooling-alignment.md` with the comparison and rationale before adding or changing config, workflow, or repository-rule files. diff --git a/Cargo.lock b/Cargo.lock index 3eca2bb..eeae458 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -184,6 +184,7 @@ dependencies = [ "proptest", "rand 0.10.1", "serde", + "serde_json", "slotmap", "thiserror", ] @@ -687,6 +688,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8953e926feb16a077a9032d75603fc6d9cb00355354bca4981440177e3d7203e" dependencies = [ "rand 0.10.1", + "serde", ] [[package]] @@ -967,6 +969,7 @@ dependencies = [ "chacha20", "getrandom 0.4.2", "rand_core 0.10.0", + "serde", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 4aa26cc..d8f2a07 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,14 +35,15 @@ harness = false [dependencies] clap = { version = "4.6.1", features = [ "derive" ] } delaunay = "0.7.6" -markov-chain-monte-carlo = "0.3.0" +markov-chain-monte-carlo = { version = "0.3.0", features = [ "serde" ] } dirs = "6.0.0" env_logger = "0.11.10" float-ord = "0.3.2" log = "0.4.29" num-traits = "0.2.19" -rand = "0.10.1" +rand = { version = "0.10.1", features = [ "serde" ] } serde = { version = "1.0.228", features = [ "derive" ] } +serde_json = "1.0.145" thiserror = "2.0.18" [dev-dependencies] diff --git a/README.md b/README.md index be598b6..b0d3df2 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,8 @@ The library leverages high-performance [Delaunay triangulation] backends and pro - [x] Regge action calculation with configurable coupling constants - [x] Alexander/Pachner-style local move proposals with causal constraints - [x] Volume-profile, Hausdorff-dimension, and spectral-dimension observables for CDT analysis +- [x] CSV/JSON simulation output for external analysis workflows +- [x] Resumable serde-backed CDT/MCMC checkpoints for durable chain continuation - [x] Focused public preludes for simulation, triangulation, geometry, action, and observables - [x] Command-line interface, examples, Criterion benchmarks, and CI-aligned validation tooling - [x] Cross-platform compatibility: Linux, macOS, Windows diff --git a/docs/code_organization.md b/docs/code_organization.md index df0e0ab..1495984 100644 --- a/docs/code_organization.md +++ b/docs/code_organization.md @@ -55,7 +55,8 @@ causal-triangulations/ │ │ └── performance_test.sh │ ├── basic_cdt.rs │ ├── find_good_seeds.rs -│ └── observables.rs +│ ├── observables.rs +│ └── output_and_checkpoint.rs ├── proptest-regressions/ │ └── cdt/ │ └── triangulation.txt diff --git a/docs/dev/commands.md b/docs/dev/commands.md index 34eaed7..e1e976d 100644 --- a/docs/dev/commands.md +++ b/docs/dev/commands.md @@ -146,7 +146,7 @@ Validate with: ```bash just examples -just validate-examples +just examples-validate ``` Examples must: @@ -155,7 +155,7 @@ Examples must: - run successfully - demonstrate correct API usage -`just validate-examples` additionally checks stable output markers for user-facing Cargo examples. Keep those markers semantic rather than exact numeric values so simulation output can evolve without making the example contract brittle. +`just examples-validate` additionally checks stable output markers for user-facing Cargo examples. Keep those markers semantic rather than exact numeric values so simulation output can evolve without making the example contract brittle. When adding or renaming a Cargo example, update `scripts/run_all_examples.sh` `validate_example_output()` with stable semantic output markers, or intentionally document why success-only validation is sufficient for that example. @@ -291,7 +291,7 @@ just test-python # pytest | Run all tests | `just test-all` | | Run Python tests | `just test-python` | | Run examples | `just examples` | -| Validate examples | `just validate-examples` | +| Validate examples | `just examples-validate` | | Run full CI | `just ci` | | Pre-commit check | `just commit-check` | @@ -302,7 +302,7 @@ just test-python # pytest | Changed files | Command | | ------------- | -------------------------------------- | | `tests/` | `just test-integration` (or `just ci`) | -| `examples/` | `just validate-examples` | +| `examples/` | `just examples-validate` | | `benches/` | `just bench-compile` | | `src/` | `just test` | | `scripts/` | `just test-python` | diff --git a/docs/dev/rust.md b/docs/dev/rust.md index d57354d..254f3fc 100644 --- a/docs/dev/rust.md +++ b/docs/dev/rust.md @@ -145,6 +145,8 @@ Keep error layers orthogonal. Invalid input or handles, unsupported operations, Public error enums must be `#[non_exhaustive]` so new variants remain additive. +Do not use `Box`, `Box`, or `anyhow::Error` as fallible return types in production `src/` code, public doctests, examples, or benchmarks that demonstrate user-facing workflows. Prefer the crate's typed `CdtResult` and add a narrow `CdtError` variant when a distinct I/O, serialization, validation, backend, or checkpoint failure mode is otherwise only representable as a generic error. `&dyn Error` is acceptable for implementing `std::error::Error::source`, for tests that explicitly verify the standard error trait implementation, and for lint fixtures that exercise the forbidden pattern. + Example: ```rust @@ -164,6 +166,8 @@ Always import types at the top of the module rather than using fully‑qualified Group imports from the same module into a single `use` statement with braces. +Do not put test-only imports in the production module preamble. Move `#[cfg(test)]` imports into the relevant `tests` module so normal builds do not carry test-only style noise at the top of implementation files. + If a test module already has `use super::*;`, do not re‑import items that are already brought into scope by the parent module's imports. --- diff --git a/docs/dev/testing.md b/docs/dev/testing.md index f38174e..c535335 100644 --- a/docs/dev/testing.md +++ b/docs/dev/testing.md @@ -199,12 +199,12 @@ Before proposing patches agents should run: just ci ``` -The `ci` recipe runs `check bench-compile test-all validate-examples`, which enforces: +The `ci` recipe runs `check bench-compile test-all examples-validate`, which enforces: - **check** (via `lint`): formatting, clippy, documentation builds, markdown, spelling, config validation (JSON, TOML, YAML, GitHub Actions) - **bench-compile**: benchmarks compile without warnings under `-D warnings` - **test-all**: unit tests, doc tests, integration tests, and Python tests (pytest) -- **validate-examples**: all Cargo examples run successfully and user-facing examples emit stable output markers +- **examples-validate**: all Cargo examples run successfully and user-facing examples emit stable output markers --- diff --git a/docs/dev/tooling-alignment.md b/docs/dev/tooling-alignment.md index dbae255..11a894d 100644 --- a/docs/dev/tooling-alignment.md +++ b/docs/dev/tooling-alignment.md @@ -46,7 +46,7 @@ The useful updates ported from MCMC are: - the MCMC-style `cliff.toml` template and `just changelog-unreleased ` flow, adapted to keep CDT's changelog archive step and avoid temporary local release tags; - a Semgrep rule that rejects `NaN` and infinity defaults after failed floating-point conversions, with a regression fixture under `tests/semgrep/`. - production-only Rust Semgrep rules that reject bare `unwrap()` and explicit `panic!` in non-test `src/` code while preserving idiomatic fail-fast usage in tests, doctests, examples, and benchmark setup. -- a `validate-examples` recipe that runs Cargo examples and verifies stable output markers for the user-facing example contracts. +- an `examples-validate` recipe that runs Cargo examples and verifies stable output markers for the user-facing example contracts. ## Deferred Updates diff --git a/examples/output_and_checkpoint.rs b/examples/output_and_checkpoint.rs new file mode 100644 index 0000000..966c911 --- /dev/null +++ b/examples/output_and_checkpoint.rs @@ -0,0 +1,109 @@ +#![forbid(unsafe_code)] + +//! Example: writing simulation output files and round-tripping a CDT checkpoint. +//! +//! This example runs a short CDT simulation, writes the configured CSV and JSON +//! outputs, and serializes the final triangulation as a serde checkpoint. + +use causal_triangulations::prelude::simulation::*; +use serde_json::{Value, from_str, to_string}; +use std::env; +use std::fs; +use std::path::Path; +use std::process; + +fn main() -> CdtResult<()> { + let output_dir = + env::temp_dir().join(format!("causal-triangulations-output-{}", process::id())); + let csv_path = output_dir.join("measurements.csv"); + let json_path = output_dir.join("summary.json"); + + let config = CdtConfig { + simulate: true, + steps: 4, + thermalization_steps: 0, + measurement_frequency: 1, + seed: Some(13), + output_csv: Some(csv_path.clone()), + output_json: Some(json_path.clone()), + ..CdtConfig::new(12, 3) + }; + + let results = run_simulation(&config)?; + + let csv = read_output(&csv_path, "CSV")?; + let summary_json = read_output(&json_path, "JSON")?; + let summary: Value = from_str(&summary_json).map_err(|err| CdtError::OutputReadFailed { + path: json_path.display().to_string(), + format: "JSON".to_string(), + detail: err.to_string(), + })?; + assert!(csv.starts_with("step,action,vertices,edges,triangles,accepted,delta_action\n")); + assert_eq!(summary["config"]["vertices"], config.vertices); + assert_eq!( + summary["final_triangulation"]["time_slices"], + config.timeslices + ); + + let checkpoint = to_string(&results.triangulation).map_err(|err| { + CdtError::CheckpointSerializationFailed { + operation: "serialize".to_string(), + target: "final triangulation".to_string(), + detail: err.to_string(), + } + })?; + let restored: CdtTriangulation2D = + from_str(&checkpoint).map_err(|err| CdtError::CheckpointSerializationFailed { + operation: "deserialize".to_string(), + target: "final triangulation".to_string(), + detail: err.to_string(), + })?; + restored.validate_topology()?; + restored.validate_foliation()?; + restored.validate_causality()?; + restored.validate_cell_classification()?; + + let mcmc_checkpoint = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), + ActionConfig::default(), + ) + .run_to_checkpoint(CdtTriangulation2D::from_cdt_strip(4, 3)?)?; + let checkpoint_json = + to_string(&mcmc_checkpoint).map_err(|err| CdtError::CheckpointSerializationFailed { + operation: "serialize".to_string(), + target: "MCMC state".to_string(), + detail: err.to_string(), + })?; + let restored_checkpoint: CdtMcmcCheckpoint = + from_str(&checkpoint_json).map_err(|err| CdtError::CheckpointSerializationFailed { + operation: "deserialize".to_string(), + target: "MCMC state".to_string(), + detail: err.to_string(), + })?; + let resumed = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ) + .resume_from_checkpoint(restored_checkpoint)?; + + println!("CSV output rows: {}", csv.lines().count().saturating_sub(1)); + println!( + "JSON summary measurements: {}", + summary["measurements"].as_array().map_or(0, Vec::len) + ); + println!("Checkpoint roundtrip vertices: {}", restored.vertex_count()); + println!("Resumed MCMC checkpoint steps: {}", resumed.steps.len()); + println!("Output and checkpoint example completed successfully!"); + + let _ = fs::remove_dir_all(output_dir); + Ok(()) +} + +/// Read an example output file and preserve the path and format in typed errors. +fn read_output(path: &Path, format: &'static str) -> CdtResult { + fs::read_to_string(path).map_err(|err| CdtError::OutputReadFailed { + path: path.display().to_string(), + format: format.to_string(), + detail: err.to_string(), + }) +} diff --git a/justfile b/justfile index 34e387a..c565db6 100644 --- a/justfile +++ b/justfile @@ -152,7 +152,7 @@ check-fast: # CI simulation: comprehensive validation (matches .github/workflows/ci.yml) # Runs: checks + all tests (Rust + Python) + validated examples + bench compile -ci: check bench-compile test-all validate-examples +ci: check bench-compile test-all examples-validate @echo "🎯 CI checks complete!" # CI with performance baseline @@ -200,7 +200,7 @@ doc-check: examples: ./scripts/run_all_examples.sh -validate-examples: +examples-validate: ./scripts/run_all_examples.sh --validate # Fix (mutating): apply formatters/auto-fixes @@ -232,7 +232,7 @@ help-workflows: @echo " just test-cli # CLI integration tests only" @echo " just test-examples # Compile all examples as tests" @echo " just examples # Run all example scripts" - @echo " just validate-examples # Run examples and validate stable output markers" + @echo " just examples-validate # Run examples and validate stable output markers" @echo " just coverage # Generate coverage report (HTML)" @echo " just coverage-ci # Generate coverage for CI (XML)" @echo "" diff --git a/scripts/run_all_examples.sh b/scripts/run_all_examples.sh index 5eacd33..4e19070 100755 --- a/scripts/run_all_examples.sh +++ b/scripts/run_all_examples.sh @@ -171,6 +171,12 @@ validate_example_output() { require_marker "$example" "$output" "Final Hausdorff-dimension estimate" require_marker "$example" "$output" "Final spectral-dimension estimate" ;; + output_and_checkpoint) + require_marker "$example" "$output" "CSV output rows" + require_marker "$example" "$output" "JSON summary measurements" + require_marker "$example" "$output" "Resumed MCMC checkpoint steps" + require_marker "$example" "$output" "Output and checkpoint example completed successfully" + ;; find_good_seeds) require_marker "$example" "$output" "SEED VALIDATION" require_marker "$example" "$output" "ADDITIONAL SEED TESTING" diff --git a/src/cdt/action.rs b/src/cdt/action.rs index 2337315..dee6659 100644 --- a/src/cdt/action.rs +++ b/src/cdt/action.rs @@ -6,6 +6,7 @@ //! which is based on the Regge calculus formulation of general relativity. use crate::errors::{CdtError, CdtResult}; +use serde::{Deserialize, Serialize}; /// Calculates the 2D Regge Action for a given triangulation. /// @@ -57,7 +58,7 @@ pub fn compute_regge_action( } /// Configuration for CDT action parameters. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct ActionConfig { /// Coupling constant for vertices (κ₀) pub coupling_0: f64, diff --git a/src/cdt/ergodic_moves.rs b/src/cdt/ergodic_moves.rs index 2205a0b..e79b598 100644 --- a/src/cdt/ergodic_moves.rs +++ b/src/cdt/ergodic_moves.rs @@ -13,10 +13,11 @@ use crate::errors::CdtError; use crate::geometry::CdtTriangulation2D; use crate::geometry::backends::delaunay::{DelaunayFaceHandle, DelaunayVertexHandle}; use crate::geometry::traits::{EdgeAdjacentFaces, TriangulationQuery}; -use rand::{RngExt, SeedableRng, rngs::StdRng}; +use rand::{RngExt, SeedableRng, rngs::Xoshiro256PlusPlus}; +use serde::{Deserialize, Serialize}; /// Types of ergodic moves available in 2D CDT. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum MoveType { /// (2,2) move: Flip edge between two triangles Move22, @@ -44,7 +45,7 @@ pub enum MoveResult { } /// Statistics tracking for ergodic moves. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct MoveStatistics { /// Number of (2,2) moves attempted pub moves_22_attempted: u64, @@ -183,6 +184,46 @@ impl MoveStatistics { total_accepted / total_attempted } } + + /// Returns the total number of attempted moves across all move types. + /// + /// # Examples + /// + /// ``` + /// use causal_triangulations::prelude::moves::{MoveStatistics, MoveType}; + /// + /// let mut stats = MoveStatistics::new(); + /// stats.record_attempt(MoveType::Move22); + /// stats.record_attempt(MoveType::Move13Add); + /// assert_eq!(stats.total_attempted(), 2); + /// ``` + #[must_use] + pub const fn total_attempted(&self) -> u64 { + self.moves_22_attempted + + self.moves_13_attempted + + self.moves_31_attempted + + self.edge_flips_attempted + } + + /// Returns the total number of accepted moves across all move types. + /// + /// # Examples + /// + /// ``` + /// use causal_triangulations::prelude::moves::{MoveStatistics, MoveType}; + /// + /// let mut stats = MoveStatistics::new(); + /// stats.record_success(MoveType::Move22); + /// stats.record_success(MoveType::EdgeFlip); + /// assert_eq!(stats.total_accepted(), 2); + /// ``` + #[must_use] + pub const fn total_accepted(&self) -> u64 { + self.moves_22_accepted + + self.moves_13_accepted + + self.moves_31_accepted + + self.edge_flips_accepted + } } /// Converts an accumulated move counter to a finite value for rate reporting. @@ -195,11 +236,12 @@ const fn count_to_f64(count: u64) -> f64 { } /// Ergodic move system for CDT triangulations. +#[derive(Clone, Serialize, Deserialize)] pub struct ErgodicsSystem { /// Move statistics pub stats: MoveStatistics, /// Random number generator - rng: StdRng, + rng: Xoshiro256PlusPlus, } #[derive(Debug, Clone, Copy)] @@ -242,7 +284,7 @@ impl ErgodicsSystem { pub fn with_seed(seed: u64) -> Self { Self { stats: MoveStatistics::new(), - rng: StdRng::seed_from_u64(seed), + rng: Xoshiro256PlusPlus::seed_from_u64(seed), } } @@ -620,7 +662,7 @@ impl Default for ErgodicsSystem { } /// Selects a random candidate index without borrowing the candidate list. -fn pick(rng: &mut StdRng, len: usize) -> Option { +fn pick(rng: &mut Xoshiro256PlusPlus, len: usize) -> Option { if len == 0 { return None; } diff --git a/src/cdt/foliation.rs b/src/cdt/foliation.rs index 1060b18..35de8b4 100644 --- a/src/cdt/foliation.rs +++ b/src/cdt/foliation.rs @@ -12,10 +12,12 @@ //! CGAL's `vertex->info()` used in CDT-plusplus. The `Foliation` struct //! tracks only aggregate bookkeeping (per-slice counts and total slices). +use serde::de::Error as DeError; +use serde::{Deserialize, Deserializer, Serialize}; use std::{error::Error, fmt}; /// Classification of an edge -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum EdgeType { /// Both endpoints share the same time slice. Spacelike, @@ -30,7 +32,7 @@ pub enum EdgeType { /// In a valid foliated triangulation every triangle spans exactly two /// adjacent time slices. The type is determined by how many vertices /// sit on the lower vs. upper slice. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum CellType { /// **(2,1)** — two vertices at time *t*, one at *t + 1*. /// The spacelike base is in the lower slice. @@ -147,6 +149,8 @@ pub fn classify_cell(t0: Option, t1: Option, t2: Option) -> Optio #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum FoliationError { + /// No time slices were supplied for foliation bookkeeping. + EmptyFoliation, /// `slice_sizes` length does not match `num_slices`. SliceSizeMismatch { /// Actual length of the `slice_sizes` vector. @@ -254,6 +258,7 @@ pub enum FoliationError { impl fmt::Display for FoliationError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { + Self::EmptyFoliation => write!(f, "foliation must contain at least one time slice"), Self::SliceSizeMismatch { slice_sizes_len, num_slices, @@ -334,8 +339,10 @@ impl Error for FoliationError {} /// /// Time labels are stored on vertices directly (as vertex data in the /// Delaunay triangulation). This struct tracks only the per-slice vertex -/// counts and the total number of slices. -#[derive(Debug, Clone)] +/// counts and the total number of slices. Deserialization validates the same +/// invariants as [`Self::from_slice_sizes`], so checkpoint data cannot create +/// empty or mismatched slice bookkeeping. +#[derive(Debug, Clone, Serialize)] pub struct Foliation { /// Number of vertices per time slice (`slice_sizes[t]`). slice_sizes: Vec, @@ -343,28 +350,51 @@ pub struct Foliation { num_slices: u32, } +#[derive(Deserialize)] +struct SerializedFoliation { + slice_sizes: Vec, + num_slices: u32, +} + +impl<'de> Deserialize<'de> for Foliation { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let serialized = SerializedFoliation::deserialize(deserializer)?; + Self::from_slice_sizes(serialized.slice_sizes, serialized.num_slices) + .map_err(D::Error::custom) + } +} + impl Foliation { /// Creates a new foliation from pre-computed per-slice vertex counts. /// /// # Errors /// - /// Returns error if `slice_sizes.len() != num_slices` or if any slice is - /// empty. + /// Returns error if there are no slices, if `slice_sizes.len() != num_slices`, + /// or if any slice is empty. /// /// # Examples /// /// ``` - /// use causal_triangulations::Foliation; + /// use causal_triangulations::{Foliation, FoliationError}; /// /// let foliation = Foliation::from_slice_sizes(vec![3, 4], 2) /// .expect("both slices are non-empty"); /// assert_eq!(foliation.num_slices(), 2); /// assert_eq!(foliation.labeled_vertex_count(), 7); + /// + /// let err = Foliation::from_slice_sizes(vec![], 0).expect_err("zero slices are invalid"); + /// assert_eq!(err, FoliationError::EmptyFoliation); /// ``` pub fn from_slice_sizes( slice_sizes: Vec, num_slices: u32, ) -> Result { + if slice_sizes.is_empty() { + return Err(FoliationError::EmptyFoliation); + } if slice_sizes.len() != num_slices as usize { return Err(FoliationError::SliceSizeMismatch { slice_sizes_len: slice_sizes.len(), @@ -429,6 +459,7 @@ impl Foliation { #[cfg(test)] mod tests { use super::*; + use serde_json::from_str; #[test] fn test_edge_type_equality() { @@ -444,6 +475,13 @@ mod tests { assert_eq!(err, FoliationError::EmptySlice { slice: 0 }); } + #[test] + fn test_foliation_zero_slices_rejected() { + let result = Foliation::from_slice_sizes(vec![], 0); + let err = result.expect_err("zero-slice foliations should be rejected"); + assert_eq!(err, FoliationError::EmptyFoliation); + } + #[test] fn test_foliation_populated() { let fol = Foliation::from_slice_sizes(vec![3, 3], 2).expect("valid foliation"); @@ -467,6 +505,25 @@ mod tests { ); } + #[test] + fn foliation_deserialization_rejects_invalid_bookkeeping() { + let zero_slice_err = from_str::(r#"{"slice_sizes":[],"num_slices":0}"#) + .expect_err("zero slices should fail deserialization"); + assert!( + zero_slice_err + .to_string() + .contains("at least one time slice") + ); + + let empty_err = from_str::(r#"{"slice_sizes":[0],"num_slices":1}"#) + .expect_err("empty slices should fail deserialization"); + assert!(empty_err.to_string().contains("empty")); + + let mismatch_err = from_str::(r#"{"slice_sizes":[3,3],"num_slices":3}"#) + .expect_err("mismatched slice counts should fail deserialization"); + assert!(mismatch_err.to_string().contains("slice_sizes length")); + } + #[test] fn test_classify_edge_spacelike() { assert_eq!(classify_edge(Some(0), Some(0)), Some(EdgeType::Spacelike)); @@ -592,6 +649,15 @@ mod tests { ); } + #[test] + fn test_foliation_error_empty_foliation_display() { + let err = FoliationError::EmptyFoliation; + assert_eq!( + err.to_string(), + "foliation must contain at least one time slice" + ); + } + #[test] fn test_foliation_error_missing_vertex_label_display() { let err = FoliationError::MissingVertexLabel { vertex: 4 }; diff --git a/src/cdt/metropolis.rs b/src/cdt/metropolis.rs index 8b67dfe..8d331d5 100644 --- a/src/cdt/metropolis.rs +++ b/src/cdt/metropolis.rs @@ -18,12 +18,13 @@ use crate::config::validate_schedule; use crate::errors::{CdtError, CdtResult}; use crate::geometry::CdtTriangulation2D; use crate::util::saturating_usize_to_u32; -use markov_chain_monte_carlo::{DelayedProposal, Target}; -use rand::{Rng, RngExt, SeedableRng, rngs::StdRng}; +use markov_chain_monte_carlo::{Chain, ChainCheckpoint, DelayedProposal, Target}; +use rand::{Rng, RngExt, SeedableRng, rngs::Xoshiro256PlusPlus}; +use serde::{Deserialize, Serialize}; use std::error::Error; use std::fmt; use std::hint::cold_path; -use std::time::Instant; +use std::time::{Duration, Instant}; const ACCEPTED_MOVE_RETRIES: usize = 8; @@ -35,7 +36,7 @@ struct SimplexCounts { } /// Configuration for the Metropolis-Hastings algorithm. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct MetropolisConfig { /// Temperature parameter (1/β) pub temperature: f64, @@ -172,7 +173,7 @@ fn validate_temperature(temperature: f64) -> CdtResult<()> { } /// Result of a Monte Carlo step. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct MonteCarloStep { /// Step number pub step: u32, @@ -643,6 +644,157 @@ impl DelayedProposal for CdtProposal { // Metropolis algorithm // --------------------------------------------------------------------------- +/// Resumable checkpoint for a CDT Metropolis-Hastings run. +/// +/// The embedded [`ChainCheckpoint`] stores the current triangulation and +/// accepted/rejected chain counters using the shared MCMC crate's portable +/// checkpoint type. CDT adds the domain-specific runtime state needed for +/// scientific continuation: action/config metadata, accumulated telemetry, +/// both RNG streams, and the ergodic move system. +/// +/// Resuming a serialized checkpoint continues from the stored chain counters +/// and RNG streams. The triangulation is restored through its invariant-checked +/// serde representation, so callers should rely on CDT observables and +/// validation contracts rather than byte-for-byte backend identity. +/// +/// # Examples +/// +/// ``` +/// use causal_triangulations::prelude::simulation::{ +/// ActionConfig, CdtResult, CdtTriangulation, MetropolisAlgorithm, MetropolisConfig, +/// }; +/// +/// fn main() -> CdtResult<()> { +/// let tri = CdtTriangulation::from_cdt_strip(4, 3)?; +/// let algorithm = MetropolisAlgorithm::new( +/// MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), +/// ActionConfig::default(), +/// ); +/// let checkpoint = algorithm.run_to_checkpoint(tri)?; +/// +/// assert_eq!(checkpoint.current_step(), 2); +/// assert_eq!(checkpoint.steps().len(), 2); +/// assert_eq!(checkpoint.chain().total_steps(), 2); +/// assert_eq!(checkpoint.config().steps, 2); +/// assert_eq!(checkpoint.action_config(), &ActionConfig::default()); +/// assert!(checkpoint.current_action().is_finite()); +/// assert_eq!(checkpoint.move_stats().total_attempted(), 2); +/// assert_eq!(checkpoint.measurements().len(), 3); +/// Ok(()) +/// } +/// ``` +#[derive(Clone, Serialize, Deserialize)] +pub struct CdtMcmcCheckpoint { + chain: ChainCheckpoint, + config: MetropolisConfig, + action_config: ActionConfig, + current_step: u32, + current_action: f64, + move_stats: MoveStatistics, + steps: Vec, + measurements: Vec, + elapsed_time: Duration, + acceptance_rng: Xoshiro256PlusPlus, + ergodics: ErgodicsSystem, +} + +impl CdtMcmcCheckpoint { + /// Returns the generic MCMC chain checkpoint. + pub const fn chain(&self) -> &ChainCheckpoint { + &self.chain + } + + /// Returns the Metropolis configuration used when the checkpoint was made. + #[must_use] + pub const fn config(&self) -> &MetropolisConfig { + &self.config + } + + /// Returns the action configuration used when the checkpoint was made. + #[must_use] + pub const fn action_config(&self) -> &ActionConfig { + &self.action_config + } + + /// Returns the last completed Monte Carlo step. + #[must_use] + pub const fn current_step(&self) -> u32 { + self.current_step + } + + /// Returns the action of the checkpointed triangulation. + #[must_use] + pub const fn current_action(&self) -> f64 { + self.current_action + } + + /// Returns accumulated move statistics through the checkpoint step. + #[must_use] + pub const fn move_stats(&self) -> &MoveStatistics { + &self.move_stats + } + + /// Returns accumulated step telemetry through the checkpoint step. + #[must_use] + pub fn steps(&self) -> &[MonteCarloStep] { + &self.steps + } + + /// Returns accumulated measurements through the checkpoint step. + #[must_use] + pub fn measurements(&self) -> &[Measurement] { + &self.measurements + } + + /// Converts the checkpoint into a complete simulation result snapshot. + /// + /// # Examples + /// + /// ``` + /// use causal_triangulations::prelude::simulation::{ + /// ActionConfig, CdtResult, CdtTriangulation, MetropolisAlgorithm, MetropolisConfig, + /// }; + /// + /// fn main() -> CdtResult<()> { + /// let tri = CdtTriangulation::from_cdt_strip(4, 3)?; + /// let checkpoint = MetropolisAlgorithm::new( + /// MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), + /// ActionConfig::default(), + /// ) + /// .run_to_checkpoint(tri)?; + /// + /// let results = checkpoint.into_results(); + /// assert_eq!(results.steps.len(), 2); + /// Ok(()) + /// } + /// ``` + #[must_use] + pub fn into_results(self) -> SimulationResultsBackend { + let (triangulation, _, _) = self.chain.into_parts(); + SimulationResultsBackend { + config: self.config, + action_config: self.action_config, + move_stats: self.move_stats, + steps: self.steps, + measurements: self.measurements, + elapsed_time: self.elapsed_time, + triangulation, + } + } +} + +struct MetropolisRunState { + triangulation: CdtTriangulation2D, + current_step: u32, + current_action: f64, + acceptance_rng: Xoshiro256PlusPlus, + ergodics: ErgodicsSystem, + move_stats: MoveStatistics, + steps: Vec, + measurements: Vec, + elapsed_time: Duration, +} + /// Metropolis-Hastings algorithm implementation for CDT. /// /// Accepts or rejects proposed CDT move types before applying them. @@ -685,11 +837,13 @@ impl MetropolisAlgorithm { /// # Errors /// /// Returns [`CdtError::InvalidSimulationConfiguration`] if the Metropolis - /// configuration is invalid, [`CdtError::MetropolisMoveApplicationFailed`] - /// if an accepted move causes a hard backend mutation failure, or a - /// validation error for unrecoverable triangulation failures. Accepted move - /// types that cannot find a realizable local site after bounded retries are - /// recorded as rejected proposals. + /// configuration is invalid, [`CdtError::InvalidConfiguration`] if the + /// action configuration is invalid, + /// [`CdtError::MetropolisMoveApplicationFailed`] if an accepted move causes + /// a hard backend mutation failure, or a validation error for + /// unrecoverable triangulation failures. Accepted move types that cannot + /// find a realizable local site after bounded retries are recorded as + /// rejected proposals. /// /// # Examples /// @@ -706,122 +860,638 @@ impl MetropolisAlgorithm { /// Ok(()) /// } /// ``` - pub fn run( + pub fn run(&self, triangulation: CdtTriangulation2D) -> CdtResult { + Ok(self.run_to_checkpoint(triangulation)?.into_results()) + } + + /// Run the simulation and return both the final results and checkpoint. + /// + /// The checkpoint can be serialized and later resumed with + /// [`Self::resume_from_checkpoint`] without losing the CDT RNG streams. + /// + /// # Errors + /// + /// Returns [`CdtError::InvalidSimulationConfiguration`] if the Metropolis + /// configuration is invalid, [`CdtError::InvalidConfiguration`] if the + /// action configuration is invalid, + /// [`CdtError::MetropolisMoveApplicationFailed`] if an accepted move causes + /// a hard backend mutation failure, or a validation error for + /// unrecoverable triangulation failures. + /// + /// # Examples + /// + /// ``` + /// use causal_triangulations::prelude::simulation::{ + /// ActionConfig, CdtResult, CdtTriangulation, MetropolisAlgorithm, MetropolisConfig, + /// }; + /// + /// fn main() -> CdtResult<()> { + /// let tri = CdtTriangulation::from_cdt_strip(4, 3)?; + /// let algorithm = MetropolisAlgorithm::new( + /// MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), + /// ActionConfig::default(), + /// ); + /// let (results, checkpoint) = algorithm.run_with_checkpoint(tri)?; + /// + /// assert_eq!(results.steps.len(), checkpoint.steps().len()); + /// assert_eq!(checkpoint.current_step(), 2); + /// Ok(()) + /// } + /// ``` + pub fn run_with_checkpoint( &self, - mut triangulation: CdtTriangulation2D, - ) -> CdtResult { - // Validate configuration to fail fast before any work + triangulation: CdtTriangulation2D, + ) -> CdtResult<(SimulationResultsBackend, CdtMcmcCheckpoint)> { + let checkpoint = self.run_to_checkpoint(triangulation)?; + let results = checkpoint.clone().into_results(); + Ok((results, checkpoint)) + } + + /// Run the simulation and return a resumable checkpoint. + /// + /// The checkpoint embeds the current triangulation in the MCMC crate's + /// [`ChainCheckpoint`] and stores CDT-specific proposal state, telemetry, + /// and RNG streams beside it. + /// + /// # Errors + /// + /// Returns [`CdtError::InvalidSimulationConfiguration`] if the Metropolis + /// configuration is invalid, [`CdtError::InvalidConfiguration`] if the + /// action configuration is invalid, + /// [`CdtError::MetropolisMoveApplicationFailed`] if an accepted move causes + /// a hard backend mutation failure, or a validation error for + /// unrecoverable triangulation failures. + /// + /// # Examples + /// + /// ``` + /// use causal_triangulations::prelude::simulation::{ + /// ActionConfig, CdtResult, CdtTriangulation, MetropolisAlgorithm, MetropolisConfig, + /// }; + /// + /// fn main() -> CdtResult<()> { + /// let tri = CdtTriangulation::from_cdt_strip(4, 3)?; + /// let checkpoint = MetropolisAlgorithm::new( + /// MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), + /// ActionConfig::default(), + /// ) + /// .run_to_checkpoint(tri)?; + /// + /// assert_eq!(checkpoint.current_step(), 2); + /// Ok(()) + /// } + /// ``` + pub fn run_to_checkpoint( + &self, + triangulation: CdtTriangulation2D, + ) -> CdtResult { self.config.validate()?; self.action_config.validate()?; - let mut rng = simulation_rng(self.config.seed); - let mut moves = self.config.seed.map_or_else(ErgodicsSystem::new, |seed| { - ErgodicsSystem::with_seed(seed.wrapping_add(0x9E37_79B9_7F4A_7C15)) - }); - let start = Instant::now(); - let mut move_stats = MoveStatistics::new(); - let mut steps = Vec::with_capacity(usize::try_from(self.config.steps).unwrap_or(0)); - let mut measurements = Vec::new(); + let mut state = self.initial_state(triangulation); + self.run_steps(&mut state, self.config.steps)?; + state.into_checkpoint(self.config.clone(), self.action_config.clone()) + } - let mut current_action = action_for(&self.action_config, &triangulation); - measurements.push(measurement_for(0, current_action, &triangulation)); + /// Continue a checkpoint for this algorithm's configured step count. + /// + /// `self.config.steps` is interpreted as an additional number of steps. The + /// returned [`SimulationResultsBackend`] is cumulative: it includes the + /// checkpointed prefix and the resumed suffix. + /// + /// # Errors + /// + /// Returns [`CdtError::InvalidSimulationConfiguration`] if the resumed + /// Metropolis configuration is invalid, [`CdtError::InvalidConfiguration`] + /// if the action configuration is invalid, or + /// [`CdtError::CheckpointResumeFailed`] if the checkpoint is incompatible + /// with this algorithm or internally inconsistent. Returns + /// [`CdtError::MetropolisMoveApplicationFailed`] or validation errors for + /// failures during resumed sampling. + /// + /// # Examples + /// + /// ``` + /// use causal_triangulations::prelude::simulation::{ + /// ActionConfig, CdtResult, CdtTriangulation, MetropolisAlgorithm, MetropolisConfig, + /// }; + /// + /// fn main() -> CdtResult<()> { + /// let tri = CdtTriangulation::from_cdt_strip(4, 3)?; + /// let action = ActionConfig::default(); + /// let prefix = MetropolisAlgorithm::new( + /// MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), + /// action.clone(), + /// ); + /// let checkpoint = prefix.run_to_checkpoint(tri)?; + /// + /// let resumed = MetropolisAlgorithm::new( + /// MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + /// action, + /// ) + /// .resume_from_checkpoint(checkpoint)?; + /// + /// assert_eq!(resumed.steps.len(), 4); + /// assert_eq!(resumed.config.steps, 4); + /// Ok(()) + /// } + /// ``` + pub fn resume_from_checkpoint( + &self, + checkpoint: CdtMcmcCheckpoint, + ) -> CdtResult { + self.config.validate()?; + self.action_config.validate()?; + validate_resume_compatible(self, &checkpoint)?; + + let mut result_config = checkpoint.config.clone(); + result_config.steps = checkpoint + .current_step + .checked_add(self.config.steps) + .ok_or_else(|| { + checkpoint_resume_failed( + "step count overflow", + "resumed step count exceeds u32::MAX", + ) + })?; + + let mut state = MetropolisRunState::from_checkpoint(checkpoint)?; + self.run_steps(&mut state, self.config.steps)?; + state + .into_checkpoint(result_config, self.action_config.clone()) + .map(CdtMcmcCheckpoint::into_results) + } + + fn initial_state(&self, mut triangulation: CdtTriangulation2D) -> MetropolisRunState { + let current_action = action_for(&self.action_config, &triangulation); + let measurements = vec![measurement_for(0, current_action, &triangulation)]; triangulation.record_event(SimulationEvent::MeasurementTaken { step: 0, action: current_action, }); - for step in 1..=self.config.steps { - let move_type = moves.select_random_move(); - move_stats.record_attempt(move_type); - triangulation.record_event(SimulationEvent::MoveAttempted { - move_type: format!("{move_type:?}"), - step: step.into(), - }); + MetropolisRunState { + triangulation, + current_step: 0, + current_action, + acceptance_rng: simulation_rng(self.config.seed), + ergodics: self.config.seed.map_or_else(ErgodicsSystem::new, |seed| { + ErgodicsSystem::with_seed(seed.wrapping_add(0x9E37_79B9_7F4A_7C15)) + }), + move_stats: MoveStatistics::new(), + steps: Vec::new(), + measurements, + elapsed_time: Duration::ZERO, + } + } - let action_before = current_action; - let delta_action = proposed_delta_action( - &self.action_config, - simplex_counts(&triangulation), - move_type, - ); + fn run_steps(&self, state: &mut MetropolisRunState, additional_steps: u32) -> CdtResult<()> { + let start = Instant::now(); + for _ in 0..additional_steps { + let step = state.current_step.checked_add(1).ok_or_else(|| { + checkpoint_resume_failed( + "step count overflow", + "resumed step count exceeds u32::MAX", + ) + })?; + run_one_step(self, state, step)?; + state.current_step = step; + } + state.elapsed_time += start.elapsed(); + Ok(()) + } +} + +impl MetropolisRunState { + /// Restores the mutable simulation state from a validated checkpoint. + /// + /// The generic MCMC checkpoint rechecks target compatibility, then CDT + /// recomputes the action so serialized telemetry cannot silently diverge + /// from the invariant-checked triangulation payload. + fn from_checkpoint(checkpoint: CdtMcmcCheckpoint) -> CdtResult { + validate_checkpoint_counters(&checkpoint)?; + let target = CdtTarget::new( + checkpoint.action_config.clone(), + checkpoint.config.temperature, + ) + .map_err(|err| { + checkpoint_resume_failed("checkpoint target configuration", err.to_string()) + })?; + let chain = Chain::from_checkpoint(checkpoint.chain, &target) + .map_err(|err| checkpoint_resume_failed("mcmc chain restore", err.to_string()))?; + let triangulation = chain.into_state(); + let actual_action = action_for(&checkpoint.action_config, &triangulation); + if !actions_match(actual_action, checkpoint.current_action) { + return Err(checkpoint_resume_failed( + "action mismatch", + format!( + "checkpoint action mismatch: stored {}, recomputed {}", + checkpoint.current_action, actual_action + ), + )); + } + + Ok(Self { + triangulation, + current_step: checkpoint.current_step, + current_action: checkpoint.current_action, + acceptance_rng: checkpoint.acceptance_rng, + ergodics: checkpoint.ergodics, + move_stats: checkpoint.move_stats, + steps: checkpoint.steps, + measurements: checkpoint.measurements, + elapsed_time: checkpoint.elapsed_time, + }) + } - let mut accepted = false; - let mut action_after = None; - if let Some(delta) = delta_action - && metropolis_accept(delta, self.config.temperature, &mut rng) - { - match apply_accepted_move( - &mut triangulation, - &mut moves, - &self.action_config, + /// Converts mutable run state into a resumable CDT/MCMC checkpoint. + /// + /// The conversion rebuilds the generic chain counters from CDT move + /// statistics so serialized checkpoints keep both accounting systems in + /// lockstep. + fn into_checkpoint( + self, + config: MetropolisConfig, + action_config: ActionConfig, + ) -> CdtResult { + let (accepted, rejected) = chain_counters(&self.move_stats)?; + Ok(CdtMcmcCheckpoint { + chain: ChainCheckpoint::new(self.triangulation, accepted, rejected), + config, + action_config, + current_step: self.current_step, + current_action: self.current_action, + move_stats: self.move_stats, + steps: self.steps, + measurements: self.measurements, + elapsed_time: self.elapsed_time, + acceptance_rng: self.acceptance_rng, + ergodics: self.ergodics, + }) + } +} + +/// Executes one additional Metropolis step against an initialized run state. +/// +/// Fresh and resumed simulations use this shared path so checkpoint +/// continuation cannot drift from ordinary sampling behavior. +fn run_one_step( + algorithm: &MetropolisAlgorithm, + state: &mut MetropolisRunState, + step: u32, +) -> CdtResult<()> { + let move_type = state.ergodics.select_random_move(); + state.move_stats.record_attempt(move_type); + state + .triangulation + .record_event(SimulationEvent::MoveAttempted { + move_type: format!("{move_type:?}"), + step: step.into(), + }); + + let action_before = state.current_action; + let delta_action = proposed_delta_action( + &algorithm.action_config, + simplex_counts(&state.triangulation), + move_type, + ); + + let mut accepted = false; + let mut action_after = None; + if let Some(delta) = delta_action + && metropolis_accept( + delta, + algorithm.config.temperature, + &mut state.acceptance_rng, + ) + { + match apply_accepted_move( + &mut state.triangulation, + &mut state.ergodics, + &algorithm.action_config, + move_type, + action_before, + ) { + Ok(AcceptedMoveResult::Applied { + action_after: applied_action, + }) => { + accepted = true; + action_after = Some(applied_action); + state.current_action = applied_action; + state.move_stats.record_success(move_type); + state + .triangulation + .record_event(SimulationEvent::MoveAccepted { + move_type: format!("{move_type:?}"), + step: step.into(), + action_change: applied_action - action_before, + }); + } + Ok(AcceptedMoveResult::NoApplicableSite { .. }) => { + // A move type can be Metropolis-accepted even when bounded + // random local-site selection finds no realizable site. That + // is an ordinary proposal rejection, not a fatal simulation error. + } + Err(err) => { + return Err(accepted_move_error( + step, move_type, - action_before, - ) { - Ok(AcceptedMoveResult::Applied { - action_after: applied_action, - }) => { - accepted = true; - action_after = Some(applied_action); - current_action = applied_action; - move_stats.record_success(move_type); - triangulation.record_event(SimulationEvent::MoveAccepted { - move_type: format!("{move_type:?}"), - step: step.into(), - action_change: applied_action - action_before, - }); - } - Ok(AcceptedMoveResult::NoApplicableSite { .. }) => { - // A move type can be Metropolis-accepted even when bounded - // random local-site selection finds no realizable site. That - // is an ordinary proposal rejection, not a fatal simulation error. - } - Err(err) => { - return Err(accepted_move_error( - step, - move_type, - err.attempt, - err.source.to_string(), - )); - } - } + err.attempt, + err.source.to_string(), + )); } + } + } - steps.push(MonteCarloStep { - step, - move_type, - accepted, - action_before, - action_after, - delta_action, + state.steps.push(MonteCarloStep { + step, + move_type, + accepted, + action_before, + action_after, + delta_action, + }); + + if step.is_multiple_of(algorithm.config.measurement_frequency) { + state.measurements.push(measurement_for( + step, + state.current_action, + &state.triangulation, + )); + state + .triangulation + .record_event(SimulationEvent::MeasurementTaken { + step: step.into(), + action: state.current_action, }); + } - if step.is_multiple_of(self.config.measurement_frequency) { - measurements.push(measurement_for(step, current_action, &triangulation)); - triangulation.record_event(SimulationEvent::MeasurementTaken { - step: step.into(), - action: current_action, - }); + Ok(()) +} + +/// Builds a structured checkpoint-resume error. +fn checkpoint_resume_failed(reason: &'static str, detail: impl Into) -> CdtError { + CdtError::CheckpointResumeFailed { + reason: reason.to_string(), + detail: detail.into(), + } +} + +/// Verifies that a checkpoint can be resumed by the requested algorithm. +/// +/// Resume accepts a different fresh seed because serialized checkpoints carry +/// their own RNG streams, but rejects physics and sampling schedule changes +/// that would make the cumulative chain scientifically ambiguous. +fn validate_resume_compatible( + algorithm: &MetropolisAlgorithm, + checkpoint: &CdtMcmcCheckpoint, +) -> CdtResult<()> { + if algorithm.action_config != checkpoint.action_config { + return Err(checkpoint_resume_failed( + "incompatible action configuration", + "action configuration differs from checkpoint", + )); + } + if algorithm.config.temperature.to_bits() != checkpoint.config.temperature.to_bits() { + return Err(checkpoint_resume_failed( + "incompatible temperature", + "temperature differs from checkpoint", + )); + } + if algorithm.config.thermalization_steps != checkpoint.config.thermalization_steps { + return Err(checkpoint_resume_failed( + "incompatible thermalization schedule", + "thermalization schedule differs from checkpoint", + )); + } + if algorithm.config.measurement_frequency != checkpoint.config.measurement_frequency { + return Err(checkpoint_resume_failed( + "incompatible measurement frequency", + "measurement frequency differs from checkpoint", + )); + } + validate_checkpoint_counters(checkpoint) +} + +/// Checks that serialized chain counters and CDT telemetry agree. +/// +/// The generic checkpoint, move statistics, current step, and step telemetry +/// are redundant by design; this catches tampered or partially written +/// checkpoint payloads before any resumed sampling occurs. +fn validate_checkpoint_counters(checkpoint: &CdtMcmcCheckpoint) -> CdtResult<()> { + checkpoint + .config + .validate() + .map_err(|err| checkpoint_resume_failed("checkpoint configuration", err.to_string()))?; + checkpoint.action_config.validate().map_err(|err| { + checkpoint_resume_failed("checkpoint action configuration", err.to_string()) + })?; + + let (accepted, rejected) = chain_counters(&checkpoint.move_stats)?; + if checkpoint.chain.accepted() != accepted || checkpoint.chain.rejected() != rejected { + return Err(checkpoint_resume_failed( + "chain counter mismatch", + "chain counters do not match move statistics", + )); + } + if checkpoint.chain.total_steps() + != usize::try_from(checkpoint.current_step).unwrap_or(usize::MAX) + { + return Err(checkpoint_resume_failed( + "chain step mismatch", + "chain step count does not match checkpoint step", + )); + } + if checkpoint.steps.len() != checkpoint.chain.total_steps() { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + "step telemetry length does not match chain step count", + )); + } + validate_checkpoint_steps(checkpoint)?; + validate_checkpoint_measurements(checkpoint)?; + Ok(()) +} + +/// Checks that serialized per-step telemetry forms the exact prefix being resumed. +fn validate_checkpoint_steps(checkpoint: &CdtMcmcCheckpoint) -> CdtResult<()> { + let accepted_steps = checkpoint.steps.iter().filter(|step| step.accepted).count(); + if accepted_steps != checkpoint.chain.accepted() { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!( + "accepted step count mismatch: got {}, expected {}", + accepted_steps, + checkpoint.chain.accepted() + ), + )); + } + + for (index, step) in checkpoint.steps.iter().enumerate() { + let expected_step = u32::try_from(index + 1).map_err(|_| { + checkpoint_resume_failed( + "step telemetry overflow", + "step telemetry index exceeds u32::MAX", + ) + })?; + if step.step != expected_step { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!( + "step telemetry must be sequential: got step {}, expected {}", + step.step, expected_step + ), + )); + } + if !step.action_before.is_finite() { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!("step {} has non-finite action_before", step.step), + )); + } + if let Some(delta_action) = step.delta_action + && !delta_action.is_finite() + { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!("step {} has non-finite delta_action", step.step), + )); + } + if step.accepted && step.delta_action.is_none() { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!("accepted step {} is missing delta_action", step.step), + )); + } + match (step.accepted, step.action_after) { + (true, Some(action_after)) if action_after.is_finite() => { + if let Some(delta_action) = step.delta_action + && !actions_match(action_after, step.action_before + delta_action) + { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!( + "step {} action_after does not match delta_action", + step.step + ), + )); + } + } + (true, Some(_)) => { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!("step {} has non-finite action_after", step.step), + )); + } + (true, None) => { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!("accepted step {} is missing action_after", step.step), + )); + } + (false, Some(_)) => { + return Err(checkpoint_resume_failed( + "step telemetry mismatch", + format!("rejected step {} unexpectedly has action_after", step.step), + )); } + (false, None) => {} } + } + Ok(()) +} - Ok(SimulationResultsBackend { - config: self.config.clone(), - action_config: self.action_config.clone(), - move_stats, - steps, - measurements, - elapsed_time: start.elapsed(), - triangulation, - }) +/// Checks that serialized measurements match the configured sampling schedule. +fn validate_checkpoint_measurements(checkpoint: &CdtMcmcCheckpoint) -> CdtResult<()> { + let expected_measurements = usize::try_from( + u64::from(checkpoint.current_step) / u64::from(checkpoint.config.measurement_frequency) + 1, + ) + .map_err(|_| { + checkpoint_resume_failed( + "measurement telemetry overflow", + "scheduled measurement count exceeds usize::MAX", + ) + })?; + if checkpoint.measurements.len() != expected_measurements { + return Err(checkpoint_resume_failed( + "measurement telemetry mismatch", + format!( + "scheduled measurement count mismatch: got {}, expected {}", + checkpoint.measurements.len(), + expected_measurements + ), + )); } + + for (index, measurement) in checkpoint.measurements.iter().enumerate() { + let expected_step = u64::try_from(index) + .ok() + .and_then(|index| index.checked_mul(u64::from(checkpoint.config.measurement_frequency))) + .and_then(|step| u32::try_from(step).ok()) + .ok_or_else(|| { + checkpoint_resume_failed( + "measurement telemetry overflow", + "scheduled measurement step exceeds u32::MAX", + ) + })?; + if measurement.step != expected_step { + return Err(checkpoint_resume_failed( + "measurement telemetry mismatch", + format!( + "measurement telemetry must follow the sampling schedule: got step {}, expected {}", + measurement.step, expected_step + ), + )); + } + if !measurement.action.is_finite() { + return Err(checkpoint_resume_failed( + "measurement telemetry mismatch", + format!( + "measurement at step {} has non-finite action", + measurement.step + ), + )); + } + } + Ok(()) +} + +/// Converts CDT move statistics into generic MCMC chain counters. +/// +/// Accepted and rejected counts are derived from proposal accounting, with +/// overflow and impossible accepted-above-attempted states reported as +/// checkpoint resume errors instead of panicking. +fn chain_counters(move_stats: &MoveStatistics) -> CdtResult<(usize, usize)> { + let attempted = move_stats.total_attempted(); + let accepted = move_stats.total_accepted(); + let rejected = attempted.checked_sub(accepted).ok_or_else(|| { + checkpoint_resume_failed( + "move statistics invariant", + "accepted move count exceeds attempted move count", + ) + })?; + Ok(( + usize::try_from(accepted).map_err(|_| { + checkpoint_resume_failed( + "counter conversion overflow", + "accepted move count exceeds usize::MAX", + ) + })?, + usize::try_from(rejected).map_err(|_| { + checkpoint_resume_failed( + "counter conversion overflow", + "rejected move count exceeds usize::MAX", + ) + })?, + )) } /// Builds the RNG used only for Metropolis acceptance draws. /// /// This keeps acceptance randomness separate from move-site selection, so seeded /// simulations are reproducible while unseeded simulations still draw fresh entropy. -fn simulation_rng(seed: Option) -> StdRng { - seed.map_or_else(rand::make_rng, StdRng::seed_from_u64) +fn simulation_rng(seed: Option) -> Xoshiro256PlusPlus { + seed.map_or_else(rand::make_rng, Xoshiro256PlusPlus::seed_from_u64) } /// Reads simplex counts through the CDT wrapper for action and measurement code. @@ -895,10 +1565,19 @@ fn proposed_delta_action( /// /// Factoring this out keeps the probability rule isolated from move selection /// and makes deterministic unit tests possible with a seeded RNG. -fn metropolis_accept(delta_action: f64, temperature: f64, rng: &mut StdRng) -> bool { +fn metropolis_accept(delta_action: f64, temperature: f64, rng: &mut R) -> bool { delta_action <= 0.0 || rng.random::() < (-delta_action / temperature).exp() } +/// Compares action values with a scale-aware tolerance for checkpoint validation. +fn actions_match(left: f64, right: f64) -> bool { + if !(left.is_finite() && right.is_finite()) { + return false; + } + let scale = left.abs().max(right.abs()).max(1.0); + (left - right).abs() <= f64::EPSILON * scale * 8.0 +} + /// Applies an already-accepted move, rolling back and retrying failed sites. /// /// Retry exhaustion means the move type did not bind to a realizable local site @@ -955,7 +1634,7 @@ fn apply_accepted_move( } debug_assert!( - (action_for(action_config, triangulation) - action_before).abs() < f64::EPSILON, + actions_match(action_for(action_config, triangulation), action_before), "failed accepted move retries must leave the triangulation rolled back" ); Ok(AcceptedMoveResult::NoApplicableSite { last_rejection }) @@ -1004,6 +1683,8 @@ mod tests { use crate::geometry::traits::TriangulationQuery; use approx::assert_relative_eq; use markov_chain_monte_carlo::Chain; + use rand::rngs::StdRng; + use serde_json::{from_str, to_string, to_value}; fn assert_optional_relative_eq(left: Option, right: Option) { match (left, right) { @@ -1013,6 +1694,32 @@ mod tests { } } + fn short_checkpoint() -> CdtMcmcCheckpoint { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit strip should build"); + MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(13), + ActionConfig::default(), + ) + .run_to_checkpoint(triangulation) + .expect("short prefix run should checkpoint") + } + + fn assert_checkpoint_resume_failed( + result: CdtResult, + expected_reason: &str, + expected_detail: &str, + ) { + let Err(CdtError::CheckpointResumeFailed { reason, detail }) = result else { + panic!("expected checkpoint resume failure"); + }; + assert_eq!(reason, expected_reason); + assert!( + detail.contains(expected_detail), + "expected detail to contain {expected_detail:?}, got {detail:?}" + ); + } + #[test] fn test_metropolis_config() { let config = MetropolisConfig::new(2.0, 500, 50, 5); @@ -1118,6 +1825,250 @@ mod tests { })); } + #[test] + fn serialized_checkpoint_resumes_from_stored_rng_state() { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit strip should build"); + let action_config = ActionConfig::default(); + let prefix = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 4, 0, 1).with_seed(13), + action_config.clone(), + ); + let checkpoint = prefix + .run_to_checkpoint(triangulation) + .expect("prefix run should checkpoint"); + let checkpoint_json = to_string(&checkpoint).expect("checkpoint should serialize"); + let checkpoint: CdtMcmcCheckpoint = + from_str(&checkpoint_json).expect("checkpoint should deserialize"); + let alternate_checkpoint: CdtMcmcCheckpoint = + from_str(&checkpoint_json).expect("checkpoint should deserialize again"); + let first_resume_algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 6, 0, 1).with_seed(999), + action_config.clone(), + ); + let first_resumed = first_resume_algorithm + .resume_from_checkpoint(checkpoint) + .expect("resume should complete"); + let second_resume_algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 6, 0, 1).with_seed(123), + action_config, + ); + let second_resumed = second_resume_algorithm + .resume_from_checkpoint(alternate_checkpoint) + .expect("resume should ignore fresh seed and use checkpoint RNG state"); + + assert_eq!(first_resumed.config.steps, 10); + assert_eq!(first_resumed.steps.len(), 10); + assert_eq!(first_resumed.steps[4].step, 5); + first_resumed + .triangulation + .validate_topology() + .expect("resumed triangulation should preserve topology"); + first_resumed + .triangulation + .validate_foliation() + .expect("resumed triangulation should preserve foliation"); + first_resumed + .triangulation + .validate_causality() + .expect("resumed triangulation should preserve causality"); + first_resumed + .triangulation + .validate_cell_classification() + .expect("resumed triangulation should preserve cell classification"); + assert_eq!( + to_value(&first_resumed.steps).expect("steps should serialize"), + to_value(&second_resumed.steps).expect("steps should serialize") + ); + assert_eq!( + to_value(&first_resumed.measurements).expect("measurements should serialize"), + to_value(&second_resumed.measurements).expect("measurements should serialize") + ); + assert_eq!( + to_value(&first_resumed.move_stats).expect("stats should serialize"), + to_value(&second_resumed.move_stats).expect("stats should serialize") + ); + assert_eq!( + first_resumed.triangulation.vertex_count(), + second_resumed.triangulation.vertex_count() + ); + assert_eq!( + first_resumed.triangulation.edge_count(), + second_resumed.triangulation.edge_count() + ); + assert_eq!( + first_resumed.triangulation.face_count(), + second_resumed.triangulation.face_count() + ); + assert_eq!( + first_resumed.triangulation.slice_sizes(), + second_resumed.triangulation.slice_sizes() + ); + } + + #[test] + fn resume_rejects_incompatible_action_config() { + let checkpoint = short_checkpoint(); + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::new(2.0, 1.0, 0.1), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "incompatible action configuration", + "action configuration", + ); + } + + #[test] + fn resume_rejects_incompatible_sampling_schedule() { + let checkpoint = short_checkpoint(); + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 2).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "incompatible measurement frequency", + "measurement frequency", + ); + } + + #[test] + fn resume_rejects_inconsistent_checkpoint_counters() { + let mut checkpoint = short_checkpoint(); + checkpoint.move_stats.record_attempt(MoveType::Move22); + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "chain counter mismatch", + "chain counters", + ); + } + + #[test] + fn resume_rejects_inconsistent_step_telemetry() { + let mut checkpoint = short_checkpoint(); + checkpoint.steps.pop(); + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "step telemetry mismatch", + "step telemetry length", + ); + } + + #[test] + fn resume_rejects_nonsequential_step_telemetry() { + let mut checkpoint = short_checkpoint(); + checkpoint.steps[0].step = 2; + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "step telemetry mismatch", + "step telemetry must be sequential", + ); + } + + #[test] + fn resume_rejects_step_acceptance_counter_mismatch() { + let mut checkpoint = short_checkpoint(); + if let Some(step) = checkpoint.steps.iter_mut().find(|step| step.accepted) { + step.accepted = false; + step.action_after = None; + } else { + let step = &mut checkpoint.steps[0]; + step.accepted = true; + step.delta_action = Some(0.0); + step.action_after = Some(step.action_before); + } + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "step telemetry mismatch", + "accepted step count mismatch", + ); + } + + #[test] + fn resume_rejects_missing_scheduled_measurement() { + let mut checkpoint = short_checkpoint(); + checkpoint.measurements.pop(); + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "measurement telemetry mismatch", + "scheduled measurement count mismatch", + ); + } + + #[test] + fn resume_rejects_checkpoint_action_mismatch() { + let mut checkpoint = short_checkpoint(); + checkpoint.current_action += 1.0; + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 2, 0, 1).with_seed(999), + ActionConfig::default(), + ); + + assert_checkpoint_resume_failed( + algorithm.resume_from_checkpoint(checkpoint), + "action mismatch", + "checkpoint action mismatch", + ); + } + + #[test] + fn checkpoint_restore_maps_invalid_checkpoint_config_to_resume_failure() { + let mut checkpoint = short_checkpoint(); + checkpoint.config.temperature = f64::NAN; + let Err(CdtError::CheckpointResumeFailed { reason, detail }) = + MetropolisRunState::from_checkpoint(checkpoint) + else { + panic!("expected checkpoint configuration failure"); + }; + + assert_eq!(reason, "checkpoint configuration"); + assert!(detail.contains("temperature")); + } + + #[test] + fn chain_counters_rejects_accepted_above_attempted() { + let stats = MoveStatistics { + moves_22_accepted: 1, + ..MoveStatistics::new() + }; + let Err(CdtError::CheckpointResumeFailed { reason, detail }) = chain_counters(&stats) + else { + panic!("expected impossible move statistics to fail"); + }; + + assert_eq!(reason, "move statistics invariant"); + assert!(detail.contains("accepted move count exceeds attempted move count")); + } + #[test] fn explicit_cdt_volume_profiles_count_time_slabs() { let strip = CdtTriangulation::from_cdt_strip(4, 3).expect("create explicit strip"); diff --git a/src/cdt/results.rs b/src/cdt/results.rs index b373305..5d4a85c 100644 --- a/src/cdt/results.rs +++ b/src/cdt/results.rs @@ -10,8 +10,17 @@ use crate::cdt::action::ActionConfig; use crate::cdt::ergodic_moves::MoveStatistics; use crate::cdt::metropolis::{MetropolisConfig, MonteCarloStep}; use crate::cdt::observables::{estimate_hausdorff_dimension, estimate_spectral_dimension}; +use crate::config::{CdtConfig, CdtTopology}; +use crate::errors::{CdtError, CdtResult}; use crate::geometry::CdtTriangulation2D; use num_traits::cast::NumCast; +use serde::{Deserialize, Serialize}; +use serde_json::to_writer_pretty; +use std::collections::HashMap; +use std::fmt::Display; +use std::fs::{File, create_dir_all}; +use std::io::{BufWriter, Write}; +use std::path::Path; use std::time::Duration; /// Measurement data collected during simulation. @@ -19,7 +28,7 @@ use std::time::Duration; /// Use [`Self::new`] and builder-style methods such as /// [`Self::with_volume_profile`] rather than struct literals outside this /// crate; additional measurement fields may be added over time. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[non_exhaustive] pub struct Measurement { /// Monte Carlo step when measurement was taken @@ -95,7 +104,12 @@ impl Measurement { /// Values are produced by [`MetropolisAlgorithm::run`](crate::cdt::metropolis::MetropolisAlgorithm::run) /// and include raw Monte Carlo steps, recorded measurements, final geometry, /// and convenience methods for common post-simulation summaries. -#[derive(Debug)] +/// +/// Serde serialization preserves the complete result object, including the +/// final triangulation checkpoint. Use [`Self::write_summary_json`] when you +/// want an analysis-friendly JSON report with configuration, aggregate +/// statistics, step telemetry, and measurements. +#[derive(Debug, Serialize, Deserialize)] pub struct SimulationResultsBackend { /// Configuration used for the simulation pub config: MetropolisConfig, @@ -113,6 +127,38 @@ pub struct SimulationResultsBackend { pub triangulation: CdtTriangulation2D, } +#[derive(Serialize)] +struct SimulationSummary<'a> { + config: &'a CdtConfig, + metropolis_config: &'a MetropolisConfig, + action_config: &'a ActionConfig, + move_stats: &'a MoveStatistics, + aggregate: AggregateSummary, + final_triangulation: TriangulationSummary, + steps: &'a [MonteCarloStep], + measurements: &'a [Measurement], +} + +#[derive(Serialize)] +struct AggregateSummary { + acceptance_rate: f64, + average_action: f64, + elapsed_time_ms: u128, + measurement_count: usize, + step_count: usize, + average_volume_profile: Vec, + volume_fluctuations: Vec, +} + +#[derive(Serialize)] +struct TriangulationSummary { + vertices: usize, + edges: usize, + triangles: usize, + time_slices: u32, + topology: CdtTopology, +} + impl SimulationResultsBackend { /// Calculates the acceptance rate for the simulation. /// @@ -251,8 +297,7 @@ impl SimulationResultsBackend { let mut sums = vec![0.0; profile_len]; for measurement in &measurements { for (index, &volume) in measurement.volume_profile.iter().enumerate() { - let volume: f64 = volume.into(); - sums[index] += volume; + sums[index] += >::from(volume); } } @@ -315,10 +360,7 @@ impl SimulationResultsBackend { let volume = measurement .volume_profile .get(index) - .map_or(0.0, |&volume| { - let volume: f64 = volume.into(); - volume - }); + .map_or(0.0, |&volume| >::from(volume)); let delta = volume - mean; variances[index] += delta * delta; } @@ -444,10 +486,182 @@ impl SimulationResultsBackend { /// ``` #[must_use] pub fn equilibrium_measurements(&self) -> Vec<&Measurement> { + self.equilibrium_measurements_iter().collect() + } + + /// Iterates over measurements after thermalization without allocating. + fn equilibrium_measurements_iter(&self) -> impl Iterator { self.measurements .iter() - .filter(|m| m.step >= self.config.thermalization_steps) - .collect() + .filter(|measurement| measurement.step >= self.config.thermalization_steps) + } + + /// Writes one CSV row per recorded measurement. + /// + /// The CSV includes scalar measurement values plus accepted/delta-action + /// telemetry from the Monte Carlo step with the same step number when such a + /// step exists. Initial measurements at step 0 leave those telemetry columns + /// blank. + /// + /// # Errors + /// + /// Returns [`CdtError::OutputWriteFailed`] if the file or a parent directory + /// cannot be created, or if writing the CSV fails. + /// + /// # Examples + /// + /// ```no_run + /// use causal_triangulations::prelude::simulation::*; + /// + /// fn main() -> CdtResult<()> { + /// let tri = CdtTriangulation::from_cdt_strip(4, 3)?; + /// let results = MetropolisAlgorithm::new( + /// MetropolisConfig::new(1.0, 2, 1, 1), + /// ActionConfig::default(), + /// ) + /// .run(tri)?; + /// results.write_measurements_csv("measurements.csv")?; + /// Ok(()) + /// } + /// ``` + pub fn write_measurements_csv(&self, path: impl AsRef) -> CdtResult<()> { + let path = path.as_ref(); + ensure_parent_directory(path, "csv")?; + let file = File::create(path).map_err(|err| output_error(path, "csv", err))?; + let mut writer = BufWriter::new(file); + writeln!( + writer, + "step,action,vertices,edges,triangles,accepted,delta_action" + ) + .map_err(|err| output_error(path, "csv", err))?; + + let steps_by_number: HashMap<_, _> = + self.steps.iter().map(|step| (step.step, step)).collect(); + for measurement in &self.measurements { + let step = steps_by_number.get(&measurement.step).copied(); + let accepted = step.map_or(String::new(), |step| step.accepted.to_string()); + let delta_action = step + .and_then(|step| step.delta_action) + .map_or_else(String::new, |delta| delta.to_string()); + writeln!( + writer, + "{},{},{},{},{},{},{}", + measurement.step, + measurement.action, + measurement.vertices, + measurement.edges, + measurement.triangles, + accepted, + delta_action, + ) + .map_err(|err| output_error(path, "csv", err))?; + } + + writer.flush().map_err(|err| output_error(path, "csv", err)) + } + + /// Writes a JSON summary for external analysis and run bookkeeping. + /// + /// The summary stores the top-level CLI/configuration parameters, action and + /// Metropolis configuration, aggregate statistics, final triangulation counts, + /// Monte Carlo step telemetry, and all measurements. The aggregate + /// `average_action` is computed from [`Self::equilibrium_measurements`] so + /// it excludes the initial snapshot and thermalization window. + /// + /// # Errors + /// + /// Returns [`CdtError::OutputWriteFailed`] if the file or a parent directory + /// cannot be created, or if JSON serialization fails. + /// + /// # Examples + /// + /// ```no_run + /// use causal_triangulations::prelude::simulation::*; + /// + /// fn main() -> CdtResult<()> { + /// let config = CdtConfig { + /// simulate: true, + /// steps: 2, + /// thermalization_steps: 1, + /// measurement_frequency: 1, + /// ..CdtConfig::new(12, 3) + /// }; + /// let results = causal_triangulations::run_simulation(&config)?; + /// results.write_summary_json(&config, "summary.json")?; + /// Ok(()) + /// } + /// ``` + pub fn write_summary_json(&self, config: &CdtConfig, path: impl AsRef) -> CdtResult<()> { + let path = path.as_ref(); + ensure_parent_directory(path, "json")?; + let file = File::create(path).map_err(|err| output_error(path, "json", err))?; + let mut writer = BufWriter::new(file); + let summary = SimulationSummary { + config, + metropolis_config: &self.config, + action_config: &self.action_config, + move_stats: &self.move_stats, + aggregate: AggregateSummary { + acceptance_rate: self.acceptance_rate(), + average_action: mean_measurement_action(self.equilibrium_measurements_iter()), + elapsed_time_ms: self.elapsed_time.as_millis(), + measurement_count: self.measurements.len(), + step_count: self.steps.len(), + average_volume_profile: self.average_volume_profile(), + volume_fluctuations: self.volume_fluctuations(), + }, + final_triangulation: TriangulationSummary { + vertices: self.triangulation.vertex_count(), + edges: self.triangulation.edge_count(), + triangles: self.triangulation.face_count(), + time_slices: self.triangulation.time_slices(), + topology: self.triangulation.metadata().topology, + }, + steps: &self.steps, + measurements: &self.measurements, + }; + + to_writer_pretty(&mut writer, &summary).map_err(|err| output_error(path, "json", err))?; + writeln!(writer).map_err(|err| output_error(path, "json", err))?; + writer + .flush() + .map_err(|err| output_error(path, "json", err)) + } +} + +/// Returns the mean action across a measurement stream. +fn mean_measurement_action<'a>(measurements: impl IntoIterator) -> f64 { + let mut sum = 0.0; + let mut count = 0_usize; + for measurement in measurements { + sum += measurement.action; + count += 1; + } + + if count == 0 { + return 0.0; + } + + let count = NumCast::from(count).unwrap_or(1.0); + sum / count +} + +/// Creates a parent directory for configured output paths when needed. +fn ensure_parent_directory(path: &Path, format: &'static str) -> CdtResult<()> { + if let Some(parent) = path.parent() + && !parent.as_os_str().is_empty() + { + create_dir_all(parent).map_err(|err| output_error(path, format, err))?; + } + Ok(()) +} + +/// Builds a typed output error without exposing I/O dependencies in public APIs. +fn output_error(path: &Path, format: &'static str, err: impl Display) -> CdtError { + CdtError::OutputWriteFailed { + path: path.display().to_string(), + format: format.to_string(), + detail: err.to_string(), } } @@ -457,6 +671,12 @@ mod tests { use crate::cdt::ergodic_moves::MoveType; use crate::cdt::triangulation::CdtTriangulation; use approx::assert_relative_eq; + use serde_json::{Value, from_str}; + use std::env; + use std::fs; + use std::path::PathBuf; + use std::process; + use std::thread; /// Builds a result container around deterministic geometry for summary-method tests. fn results_with( @@ -495,6 +715,31 @@ mod tests { } } + /// Returns a unique temporary path for output-writer tests. + fn temp_output_path(name: &str) -> PathBuf { + let thread_name = safe_thread_name(); + env::temp_dir().join(format!( + "causal-triangulations-{name}-{}-{}", + process::id(), + thread_name + )) + } + + /// Returns the current test thread name with path separators and + /// reserved characters removed. + fn safe_thread_name() -> String { + thread::current() + .name() + .unwrap_or("test") + .chars() + .map(|ch| match ch { + '<' | '>' | ':' | '"' | '/' | '\\' | '|' | '?' | '*' => '_', + ch if ch.is_control() => '_', + ch => ch, + }) + .collect() + } + #[test] fn measurement_builders_preserve_scalar_counts_and_profile() { let measurement = Measurement::new(7, -3.5, 12, 26, 12).with_volume_profile(vec![6, 6, 0]); @@ -507,6 +752,120 @@ mod tests { assert_eq!(measurement.volume_profile, vec![6, 6, 0]); } + #[test] + fn writes_measurements_csv_with_matching_step_telemetry() { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit strip should build"); + let results = results_with( + MetropolisConfig::new(1.0, 2, 1, 1), + vec![MonteCarloStep { + step: 1, + move_type: MoveType::Move22, + accepted: true, + action_before: 3.0, + action_after: Some(2.5), + delta_action: Some(-0.5), + }], + vec![ + Measurement::new(0, 3.0, 12, 26, 12), + Measurement::new(1, 2.5, 12, 26, 12), + ], + triangulation, + ); + let path = temp_output_path("measurements.csv"); + + results + .write_measurements_csv(&path) + .expect("CSV output should write"); + let csv = fs::read_to_string(&path).expect("CSV output should be readable"); + fs::remove_file(&path).expect("temporary CSV should be removable"); + + assert_eq!( + csv, + "step,action,vertices,edges,triangles,accepted,delta_action\n\ + 0,3,12,26,12,,\n\ + 1,2.5,12,26,12,true,-0.5\n" + ); + } + + #[test] + fn writes_summary_json_with_config_and_aggregates() { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit strip should build"); + let results = results_with( + MetropolisConfig::new(1.0, 1, 0, 1), + vec![MonteCarloStep { + step: 1, + move_type: MoveType::Move22, + accepted: true, + action_before: 3.0, + action_after: Some(2.5), + delta_action: Some(-0.5), + }], + vec![Measurement::new(1, 2.5, 12, 26, 12)], + triangulation, + ); + let config = CdtConfig { + steps: 1, + thermalization_steps: 0, + measurement_frequency: 1, + simulate: true, + ..CdtConfig::new(12, 3) + }; + let path = temp_output_path("summary.json"); + + results + .write_summary_json(&config, &path) + .expect("JSON output should write"); + let json = fs::read_to_string(&path).expect("JSON output should be readable"); + fs::remove_file(&path).expect("temporary JSON should be removable"); + let parsed: Value = from_str(&json).expect("summary should be valid JSON"); + + assert_eq!(parsed["config"]["vertices"], 12); + assert_eq!(parsed["aggregate"]["measurement_count"], 1); + assert_eq!(parsed["aggregate"]["step_count"], 1); + assert_eq!(parsed["final_triangulation"]["time_slices"], 3); + assert_eq!(parsed["measurements"][0]["step"], 1); + } + + #[test] + fn summary_json_average_action_uses_equilibrium_measurements() { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit strip should build"); + let results = results_with( + MetropolisConfig::new(1.0, 2, 1, 1), + vec![], + vec![ + Measurement::new(0, 100.0, 12, 26, 12), + Measurement::new(1, 4.0, 12, 26, 12), + Measurement::new(2, 6.0, 12, 26, 12), + ], + triangulation, + ); + let config = CdtConfig { + steps: 2, + thermalization_steps: 1, + measurement_frequency: 1, + simulate: true, + ..CdtConfig::new(12, 3) + }; + let path = temp_output_path("equilibrium-summary.json"); + + results + .write_summary_json(&config, &path) + .expect("JSON output should write"); + let json = fs::read_to_string(&path).expect("JSON output should be readable"); + fs::remove_file(&path).expect("temporary JSON should be removable"); + let parsed: Value = from_str(&json).expect("summary should be valid JSON"); + + assert_relative_eq!( + parsed["aggregate"]["average_action"] + .as_f64() + .expect("average action should be numeric"), + 5.0 + ); + } + #[test] fn summaries_use_post_thermalization_measurements() { let config = MetropolisConfig::new(1.0, 20, 10, 5); diff --git a/src/cdt/triangulation.rs b/src/cdt/triangulation.rs index 9ea7e9e..cc49d10 100644 --- a/src/cdt/triangulation.rs +++ b/src/cdt/triangulation.rs @@ -8,11 +8,10 @@ use crate::cdt::foliation::Foliation; use crate::config::CdtTopology; use crate::errors::{CdtError, CdtResult}; -#[cfg(test)] use crate::geometry::DelaunayBackend2D; -#[cfg(test)] -use crate::geometry::generators::build_delaunay2_with_data; use crate::geometry::traits::TriangulationQuery; +use serde::de::Error as DeError; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::time::Instant; mod builders; @@ -66,7 +65,7 @@ struct CachedValue { } /// Events in simulation history -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub enum SimulationEvent { /// Triangulation was created Created { @@ -100,6 +99,101 @@ pub enum SimulationEvent { }, } +#[derive(Serialize)] +struct SerializedCdtTriangulation<'a> { + geometry: &'a DelaunayBackend2D, + metadata: SerializedCdtMetadata<'a>, + foliation: &'a Option, +} + +#[derive(Serialize)] +struct SerializedCdtMetadata<'a> { + time_slices: u32, + dimension: u8, + topology: CdtTopology, + modification_count: u64, + simulation_history: &'a [SimulationEvent], +} + +#[derive(Deserialize)] +struct DeserializedCdtTriangulation { + geometry: DelaunayBackend2D, + metadata: DeserializedCdtMetadata, + foliation: Option, +} + +#[derive(Deserialize)] +struct DeserializedCdtMetadata { + time_slices: u32, + dimension: u8, + topology: CdtTopology, + modification_count: u64, + simulation_history: Vec, +} + +impl Serialize for CdtTriangulation { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + SerializedCdtTriangulation { + geometry: &self.geometry, + metadata: SerializedCdtMetadata { + time_slices: self.metadata.time_slices, + dimension: self.metadata.dimension, + topology: self.metadata.topology, + modification_count: self.metadata.modification_count, + simulation_history: &self.metadata.simulation_history, + }, + foliation: &self.foliation, + } + .serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for CdtTriangulation { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let serialized = DeserializedCdtTriangulation::deserialize(deserializer)?; + let now = Instant::now(); + let foliation_synced_at_modification = serialized + .foliation + .as_ref() + .map(|_| serialized.metadata.modification_count); + let tri = Self { + geometry: serialized.geometry, + metadata: CdtMetadata { + time_slices: serialized.metadata.time_slices, + dimension: serialized.metadata.dimension, + topology: serialized.metadata.topology, + creation_time: now, + last_modified: now, + modification_count: serialized.metadata.modification_count, + simulation_history: serialized.metadata.simulation_history, + }, + cache: GeometryCache::default(), + foliation: serialized.foliation, + foliation_synced_at_modification, + }; + + tri.validate_checkpoint_invariants() + .map_err(DeError::custom)?; + Ok(tri) + } +} + +impl CdtTriangulation { + fn validate_checkpoint_invariants(&self) -> CdtResult<()> { + self.validate_topology()?; + self.validate_foliation()?; + self.validate_causality()?; + self.validate_cell_classification()?; + Ok(()) + } +} + impl CdtTriangulation { /// Fallible constructor for a CDT triangulation with open boundary topology. /// @@ -617,6 +711,10 @@ impl CdtTriangulation { #[cfg(test)] mod tests { use super::*; + use crate::cdt::action::ActionConfig; + use crate::cdt::metropolis::{MetropolisAlgorithm, MetropolisConfig}; + use crate::geometry::generators::build_delaunay2_with_data; + use serde_json::{from_str, to_string}; use std::thread; use std::time::{Duration, Instant}; @@ -1286,6 +1384,93 @@ mod tests { && expected == "≥ 3" )); } + + #[test] + fn strip_checkpoint_roundtrip_preserves_foliation_and_classification() { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit CDT strip should build"); + + let json = to_string(&triangulation).expect("checkpoint should serialize"); + let restored: CdtTriangulation = + from_str(&json).expect("checkpoint should deserialize"); + + restored + .validate_checkpoint_invariants() + .expect("restored strip should validate checkpoint invariants"); + assert_eq!(restored.slice_sizes(), triangulation.slice_sizes()); + assert_eq!(restored.metadata().topology, CdtTopology::OpenBoundary); + assert_eq!( + restored + .geometry() + .faces() + .filter(|face| restored.cell_type(face).is_some()) + .count(), + restored.face_count(), + "all strip cells should keep Up/Down classification" + ); + } + + #[test] + fn toroidal_checkpoint_roundtrip_preserves_topology_and_labels() { + let triangulation = + CdtTriangulation::from_toroidal_cdt(4, 3).expect("explicit torus should build"); + let labels_before: Vec<_> = triangulation + .geometry() + .vertices() + .map(|vertex| triangulation.time_label(&vertex)) + .collect(); + + let json = to_string(&triangulation).expect("checkpoint should serialize"); + let restored: CdtTriangulation = + from_str(&json).expect("checkpoint should deserialize"); + let labels_after: Vec<_> = restored + .geometry() + .vertices() + .map(|vertex| restored.time_label(&vertex)) + .collect(); + + restored + .validate_checkpoint_invariants() + .expect("restored torus should validate checkpoint invariants"); + assert_eq!(restored.metadata().topology, CdtTopology::Toroidal); + assert_eq!(restored.geometry().periodic_domain(), Some([1.0, 1.0])); + assert_eq!(restored.slice_sizes(), triangulation.slice_sizes()); + assert_eq!(labels_after, labels_before); + } + + #[test] + fn mcmc_checkpoint_roundtrip_preserves_history_and_invariants() { + let triangulation = + CdtTriangulation::from_cdt_strip(4, 3).expect("explicit CDT strip should build"); + let algorithm = MetropolisAlgorithm::new( + MetropolisConfig::new(1.0, 4, 0, 1).with_seed(13), + ActionConfig::default(), + ); + let results = algorithm + .run(triangulation) + .expect("short MCMC run should complete"); + + let json = to_string(&results.triangulation).expect("checkpoint should serialize"); + let restored: CdtTriangulation = + from_str(&json).expect("checkpoint should deserialize"); + + restored + .validate_checkpoint_invariants() + .expect("restored MCMC checkpoint should validate invariants"); + assert_eq!( + restored.metadata().simulation_history.len(), + results.triangulation.metadata().simulation_history.len() + ); + assert_eq!( + restored.metadata().modification_count, + results.triangulation.metadata().modification_count + ); + assert_eq!( + restored.slice_sizes(), + results.triangulation.slice_sizes(), + "MCMC checkpoint should preserve foliation bookkeeping" + ); + } } #[cfg(test)] diff --git a/src/config.rs b/src/config.rs index 396396c..a406bf9 100644 --- a/src/config.rs +++ b/src/config.rs @@ -14,6 +14,7 @@ use crate::cdt::metropolis::MetropolisConfig; use crate::errors::{CdtError, CdtResult}; use clap::{Parser, ValueEnum}; use dirs::home_dir; +use serde::{Deserialize, Serialize}; use std::fmt::Display; use std::path::{Component, Path, PathBuf}; @@ -23,7 +24,12 @@ use std::path::{Component, Path, PathBuf}; /// - [`OpenBoundary`](Self::OpenBoundary) — open-boundary generation; topology /// validation accepts disk-like χ = 1 and sphere-like χ = 2 configurations /// - [`Toroidal`](Self::Toroidal) — periodic in both space and time (S¹×S¹, χ = 0) -#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, ValueEnum)] +/// +/// Serde uses the same kebab-case vocabulary as the CLI (`open-boundary`, +/// `toroidal`) so saved JSON configuration can round-trip through command-line +/// overrides. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, ValueEnum, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum CdtTopology { /// Finite strip with open boundaries (Euler characteristic χ = 1 for /// disk-like or χ = 2 for sphere-like configurations). @@ -43,7 +49,7 @@ pub enum CdtTopology { /// This combines all configuration options for the CDT simulation, /// including triangulation generation, action calculation, and /// Metropolis algorithm parameters. -#[derive(Parser, Debug, Clone)] +#[derive(Parser, Debug, Clone, Serialize, Deserialize)] #[command(author, version, about, long_about = None)] pub struct CdtConfig { /// Dimensionality of the triangulation @@ -97,10 +103,28 @@ pub struct CdtConfig { /// Topology and boundary conditions for triangulation generation #[arg(long, value_enum, default_value_t = CdtTopology::default())] pub topology: CdtTopology, + + /// Write per-measurement simulation data to a CSV file. + /// + /// Relative paths are resolved from the current working directory with + /// [`CdtConfig::resolve_path`]. Parent directories are created when output + /// is written. [`crate::run_simulation`] rejects configurations where CSV + /// and JSON output paths resolve to the same file. + #[arg(long, value_name = "PATH")] + pub output_csv: Option, + + /// Write simulation metadata and aggregate summary data to a JSON file. + /// + /// Relative paths are resolved from the current working directory with + /// [`CdtConfig::resolve_path`]. Parent directories are created when output + /// is written. [`crate::run_simulation`] rejects configurations where CSV + /// and JSON output paths resolve to the same file. + #[arg(long, value_name = "PATH")] + pub output_json: Option, } /// Controls how dimension overrides are applied when merging configuration. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum DimensionOverride { /// Replace the dimension with the supplied value. Value(u8), @@ -112,7 +136,7 @@ pub enum DimensionOverride { /// /// Each field is optional, allowing callers to override only the configuration entries /// that need changing while leaving the rest untouched. -#[derive(Debug, Default, Clone, Copy)] +#[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct CdtConfigOverrides { /// Optional override for the triangulation dimension. pub dimension: Option, @@ -144,6 +168,18 @@ pub struct CdtConfigOverrides { pub seed: Option>, /// Optional override for the topology. pub topology: Option, + /// Optional override for CSV output path. + #[expect( + clippy::option_option, + reason = "None=no override, Some(None)=clear output path, Some(Some(v))=set output path" + )] + pub output_csv: Option>, + /// Optional override for JSON output path. + #[expect( + clippy::option_option, + reason = "None=no override, Some(None)=clear output path, Some(Some(v))=set output path" + )] + pub output_json: Option>, } impl CdtConfig { @@ -231,6 +267,14 @@ impl CdtConfig { merged.topology = topology; } + if let Some(output_csv) = &overrides.output_csv { + merged.output_csv.clone_from(output_csv); + } + + if let Some(output_json) = &overrides.output_json { + merged.output_json.clone_from(output_json); + } + merged } @@ -463,6 +507,8 @@ impl CdtConfig { simulate: false, seed: None, topology: CdtTopology::OpenBoundary, + output_csv: None, + output_json: None, } } @@ -657,6 +703,8 @@ impl TestConfig { simulate: false, seed: None, topology: CdtTopology::OpenBoundary, + output_csv: None, + output_json: None, } } @@ -686,6 +734,8 @@ impl TestConfig { simulate: false, seed: None, topology: CdtTopology::OpenBoundary, + output_csv: None, + output_json: None, } } @@ -715,6 +765,8 @@ impl TestConfig { simulate: false, seed: None, topology: CdtTopology::OpenBoundary, + output_csv: None, + output_json: None, } } } @@ -733,6 +785,19 @@ mod tests { assert_eq!(config.timeslices, 3); assert_eq!(config.dimension(), 2); assert!(!config.simulate); + assert_eq!(config.output_csv, None); + assert_eq!(config.output_json, None); + } + + #[test] + fn topology_serializes_with_cli_vocabulary() { + let json = + serde_json::to_string(&CdtTopology::OpenBoundary).expect("topology should serialize"); + assert_eq!(json, "\"open-boundary\""); + + let topology: CdtTopology = + serde_json::from_str("\"toroidal\"").expect("topology should deserialize"); + assert_eq!(topology, CdtTopology::Toroidal); } #[test] @@ -1201,6 +1266,8 @@ mod tests { cosmological_constant: Some(0.25), seed: Some(Some(99)), topology: Some(CdtTopology::Toroidal), + output_csv: Some(Some(PathBuf::from("measurements.csv"))), + output_json: Some(Some(PathBuf::from("summary.json"))), ..CdtConfigOverrides::default() }; @@ -1215,6 +1282,8 @@ mod tests { assert_relative_eq!(merged.cosmological_constant, 0.25); assert_eq!(merged.seed, Some(99)); assert_eq!(merged.topology, CdtTopology::Toroidal); + assert_eq!(merged.output_csv, Some(PathBuf::from("measurements.csv"))); + assert_eq!(merged.output_json, Some(PathBuf::from("summary.json"))); } #[test] diff --git a/src/errors.rs b/src/errors.rs index c97709c..cc7307a 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -178,6 +178,60 @@ pub enum CdtError { /// MCMC framework error (e.g. NaN in log-probability) #[error("MCMC error: {0}")] Mcmc(String), + /// Writing CSV/JSON simulation output failed. + #[error("Failed to write {format} output to {path}: {detail}")] + OutputWriteFailed { + /// Target output path. + path: String, + /// Output format being written. + format: String, + /// Lower-level I/O or serialization error. + detail: String, + }, + /// Resolving a configured output path failed before writing began. + #[error("Failed to resolve output path from base {base_path}: {detail}")] + OutputPathResolutionFailed { + /// Base path used for resolving configured output paths. + base_path: String, + /// Lower-level path resolution error. + detail: String, + }, + /// Configured CSV and JSON output paths resolve to the same file. + #[error("CSV output path {csv_path} and JSON output path {json_path} resolve to the same file")] + OutputPathConflict { + /// Resolved CSV output path. + csv_path: String, + /// Resolved JSON output path. + json_path: String, + }, + /// Reading or decoding CSV/JSON simulation output failed. + #[error("Failed to read {format} output from {path}: {detail}")] + OutputReadFailed { + /// Source output path. + path: String, + /// Output format being read. + format: String, + /// Lower-level I/O or decoding error. + detail: String, + }, + /// Serializing or deserializing a CDT or MCMC checkpoint failed. + #[error("Failed to {operation} {target} checkpoint: {detail}")] + CheckpointSerializationFailed { + /// Checkpoint operation being attempted. + operation: String, + /// Human-readable checkpoint target, such as "final triangulation". + target: String, + /// Lower-level serialization error. + detail: String, + }, + /// Restoring or continuing an MCMC checkpoint failed before sampling resumed. + #[error("Failed to resume MCMC checkpoint [{reason}]: {detail}")] + CheckpointResumeFailed { + /// Structured reason category for the resume failure. + reason: String, + /// Human-readable reason resume could not proceed. + detail: String, + }, } /// Keeps causality error formatting centralized so open and toroidal distances stay consistent. @@ -459,6 +513,137 @@ mod tests { ); } + #[test] + fn test_output_write_failed_error() { + let error = CdtError::OutputWriteFailed { + path: "measurements.csv".to_string(), + format: "CSV".to_string(), + detail: "permission denied".to_string(), + }; + let CdtError::OutputWriteFailed { + path, + format, + detail, + } = &error + else { + panic!("expected OutputWriteFailed variant"); + }; + assert_eq!(path, "measurements.csv"); + assert_eq!(format, "CSV"); + assert_eq!(detail, "permission denied"); + let display = format!("{error}"); + assert_eq!( + display, + "Failed to write CSV output to measurements.csv: permission denied" + ); + } + + #[test] + fn test_output_path_resolution_failed_error() { + let error = CdtError::OutputPathResolutionFailed { + base_path: ".".to_string(), + detail: "No such file or directory".to_string(), + }; + let CdtError::OutputPathResolutionFailed { base_path, detail } = &error else { + panic!("expected OutputPathResolutionFailed variant"); + }; + assert_eq!(base_path, "."); + assert_eq!(detail, "No such file or directory"); + let display = format!("{error}"); + assert_eq!( + display, + "Failed to resolve output path from base .: No such file or directory" + ); + } + + #[test] + fn test_output_path_conflict_error() { + let error = CdtError::OutputPathConflict { + csv_path: "output/results".to_string(), + json_path: "output/results".to_string(), + }; + let CdtError::OutputPathConflict { + csv_path, + json_path, + } = &error + else { + panic!("expected OutputPathConflict variant"); + }; + assert_eq!(csv_path, "output/results"); + assert_eq!(json_path, "output/results"); + assert_eq!( + format!("{error}"), + "CSV output path output/results and JSON output path output/results resolve to the same file" + ); + } + + #[test] + fn test_output_read_failed_error() { + let error = CdtError::OutputReadFailed { + path: "summary.json".to_string(), + format: "JSON".to_string(), + detail: "expected value at line 1 column 1".to_string(), + }; + let CdtError::OutputReadFailed { + path, + format, + detail, + } = &error + else { + panic!("expected OutputReadFailed variant"); + }; + assert_eq!(path, "summary.json"); + assert_eq!(format, "JSON"); + assert_eq!(detail, "expected value at line 1 column 1"); + let display = format!("{error}"); + assert_eq!( + display, + "Failed to read JSON output from summary.json: expected value at line 1 column 1" + ); + } + + #[test] + fn test_checkpoint_serialization_failed_error() { + let error = CdtError::CheckpointSerializationFailed { + operation: "deserialize".to_string(), + target: "final triangulation".to_string(), + detail: "missing field `geometry`".to_string(), + }; + let CdtError::CheckpointSerializationFailed { + operation, + target, + detail, + } = &error + else { + panic!("expected CheckpointSerializationFailed variant"); + }; + assert_eq!(operation, "deserialize"); + assert_eq!(target, "final triangulation"); + assert_eq!(detail, "missing field `geometry`"); + let display = format!("{error}"); + assert_eq!( + display, + "Failed to deserialize final triangulation checkpoint: missing field `geometry`" + ); + } + + #[test] + fn test_checkpoint_resume_failed_error() { + let error = CdtError::CheckpointResumeFailed { + reason: "incompatible temperature".to_string(), + detail: "temperature differs from checkpoint".to_string(), + }; + let CdtError::CheckpointResumeFailed { reason, detail } = &error else { + panic!("expected CheckpointResumeFailed variant"); + }; + assert_eq!(reason, "incompatible temperature"); + assert_eq!(detail, "temperature differs from checkpoint"); + assert_eq!( + format!("{error}"), + "Failed to resume MCMC checkpoint [incompatible temperature]: temperature differs from checkpoint" + ); + } + #[test] fn test_error_equality() { let error1 = CdtError::InvalidConfiguration { diff --git a/src/geometry/backends/delaunay.rs b/src/geometry/backends/delaunay.rs index dba908c..39f309e 100644 --- a/src/geometry/backends/delaunay.rs +++ b/src/geometry/backends/delaunay.rs @@ -16,15 +16,18 @@ use crate::geometry::traits::{ use delaunay::core::DataType; use delaunay::core::edge::EdgeKey; use delaunay::core::facet::FacetHandle; -use delaunay::core::tds::{CellKey, VertexKey}; +use delaunay::core::tds::{CellKey, Tds, VertexKey}; use delaunay::core::vertex::Vertex; use delaunay::geometry::kernel::AdaptiveKernel; use delaunay::geometry::point::Point; use delaunay::geometry::traits::coordinate::Coordinate; +use delaunay::prelude::TopologyGuarantee; use delaunay::prelude::VertexBuilder; use delaunay::prelude::triangulation::flips::BistellarFlips; -use delaunay::topology::traits::{GlobalTopology, TopologyKind}; +use delaunay::topology::traits::{GlobalTopology, TopologyKind, ToroidalConstructionMode}; use delaunay::triangulation::DelaunayTriangulation; +use serde::de::Error as DeError; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::collections::HashMap; type DelaunayKernel = AdaptiveKernel; @@ -40,6 +43,15 @@ type RawVertex = Vertex; /// are backed by the upstream Delaunay edit API where possible. `move_vertex()`, `clear()`, /// and `reserve_capacity()` are not yet implemented and return /// [`DelaunayError::NotImplemented`]. +/// +/// # Serialization +/// +/// Serde checkpoints store the upstream triangulation data structure plus its +/// global topology and topology-guarantee metadata. Deserialization rebuilds +/// transient backend caches, including the interior-facet lookup used for local +/// 2D edge queries. Toroidal topology checkpoints must contain finite, +/// strictly positive periods; invalid domains are rejected during +/// deserialization before a backend can observe them. #[derive(Debug, Clone)] pub struct DelaunayBackend { /// The underlying Delaunay triangulation from the delaunay crate @@ -48,6 +60,163 @@ pub struct DelaunayBackend, } +#[derive(Serialize, Deserialize)] +#[serde(bound( + serialize = "Tds: Serialize", + deserialize = "Tds: Deserialize<'de>" +))] +struct SerializedDelaunayBackend { + tds: Tds, + global_topology: SerializableGlobalTopology, + topology_guarantee: SerializableTopologyGuarantee, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +enum SerializableGlobalTopology { + Euclidean, + Toroidal { + domain: Vec, + mode: SerializableToroidalConstructionMode, + }, + Spherical, + Hyperbolic, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +enum SerializableToroidalConstructionMode { + Canonicalized, + PeriodicImagePoint, + Explicit, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +enum SerializableTopologyGuarantee { + Pseudomanifold, + PLManifold, + PLManifoldStrict, +} + +impl From> for SerializableGlobalTopology { + fn from(topology: GlobalTopology) -> Self { + match topology { + GlobalTopology::Euclidean => Self::Euclidean, + GlobalTopology::Toroidal { domain, mode } => Self::Toroidal { + domain: domain.to_vec(), + mode: mode.into(), + }, + GlobalTopology::Spherical => Self::Spherical, + GlobalTopology::Hyperbolic => Self::Hyperbolic, + } + } +} + +impl SerializableGlobalTopology { + fn into_global_topology(self) -> Result, E> { + match self { + Self::Euclidean => Ok(GlobalTopology::Euclidean), + Self::Toroidal { domain, mode } => { + let actual = domain.len(); + let domain: [f64; D] = domain.try_into().map_err(|_| { + E::custom(format!( + "toroidal domain length mismatch: got {actual}, expected {D}" + )) + })?; + for (index, period) in domain.iter().copied().enumerate() { + if !period.is_finite() || period <= 0.0 { + return Err(E::custom(format!( + "invalid toroidal period at index {index}: {period}" + ))); + } + } + Ok(GlobalTopology::Toroidal { + domain, + mode: mode.into(), + }) + } + Self::Spherical => Ok(GlobalTopology::Spherical), + Self::Hyperbolic => Ok(GlobalTopology::Hyperbolic), + } + } +} + +impl From for SerializableToroidalConstructionMode { + fn from(mode: ToroidalConstructionMode) -> Self { + match mode { + ToroidalConstructionMode::Canonicalized => Self::Canonicalized, + ToroidalConstructionMode::PeriodicImagePoint => Self::PeriodicImagePoint, + ToroidalConstructionMode::Explicit => Self::Explicit, + } + } +} + +impl From for ToroidalConstructionMode { + fn from(mode: SerializableToroidalConstructionMode) -> Self { + match mode { + SerializableToroidalConstructionMode::Canonicalized => Self::Canonicalized, + SerializableToroidalConstructionMode::PeriodicImagePoint => Self::PeriodicImagePoint, + SerializableToroidalConstructionMode::Explicit => Self::Explicit, + } + } +} + +impl From for SerializableTopologyGuarantee { + fn from(guarantee: TopologyGuarantee) -> Self { + match guarantee { + TopologyGuarantee::Pseudomanifold => Self::Pseudomanifold, + TopologyGuarantee::PLManifold => Self::PLManifold, + TopologyGuarantee::PLManifoldStrict => Self::PLManifoldStrict, + } + } +} + +impl From for TopologyGuarantee { + fn from(guarantee: SerializableTopologyGuarantee) -> Self { + match guarantee { + SerializableTopologyGuarantee::Pseudomanifold => Self::Pseudomanifold, + SerializableTopologyGuarantee::PLManifold => Self::PLManifold, + SerializableTopologyGuarantee::PLManifoldStrict => Self::PLManifoldStrict, + } + } +} + +impl Serialize + for DelaunayBackend +where + Tds: Serialize, +{ + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + SerializedDelaunayBackend { + tds: self.dt.tds().clone(), + global_topology: self.dt.global_topology().into(), + topology_guarantee: self.dt.topology_guarantee().into(), + } + .serialize(serializer) + } +} + +impl<'de, VertexData: DataType, CellData: DataType, const D: usize> Deserialize<'de> + for DelaunayBackend +where + Tds: Deserialize<'de>, +{ + fn deserialize(deserializer: DE) -> Result + where + DE: Deserializer<'de>, + { + let serialized = SerializedDelaunayBackend::deserialize(deserializer)?; + let mut dt = DelaunayTriangulation::from_tds_with_topology_guarantee( + serialized.tds, + AdaptiveKernel::new(), + serialized.topology_guarantee.into(), + ); + dt.set_global_topology(serialized.global_topology.into_global_topology()?); + Ok(Self::from_triangulation(dt)) + } +} + /// Opaque handle for vertices in Delaunay backend #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct DelaunayVertexHandle { @@ -1023,6 +1192,22 @@ mod tests { use super::*; + #[test] + fn toroidal_topology_deserialization_rejects_invalid_periods() { + for period in [f64::NAN, f64::INFINITY, 0.0, -1.0] { + let topology = SerializableGlobalTopology::Toroidal { + domain: vec![period, 1.0], + mode: SerializableToroidalConstructionMode::Explicit, + }; + + let error = topology + .into_global_topology::<2, serde::de::value::Error>() + .expect_err("invalid toroidal period should fail deserialization"); + + assert!(error.to_string().contains("invalid toroidal period")); + } + } + #[test] fn test_delaunay_mutation_error_messages_preserve_context() { let insertion = DelaunayError::InsertionFailed { diff --git a/src/lib.rs b/src/lib.rs index acb6302..c300cbe 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,12 +19,36 @@ //! - Metropolis-Hastings sampling over foliation-aware 2D ergodic moves //! - Volume-profile, Hausdorff-dimension, and spectral-dimension observables //! for CDT analysis +//! - CSV/JSON simulation output and resumable serde-backed CDT/MCMC checkpoints //! //! The crate root re-exports the most common construction, simulation, //! observable, and error types. Focused preludes under [`prelude`] provide //! smaller import surfaces for documentation, examples, integration tests, and //! benchmarks. //! +//! # Checkpointing +//! +//! CDT triangulations backed by [`geometry::DelaunayBackend2D`] serialize their +//! stable geometry, metadata, foliation, and simulation history while rebuilding +//! transient caches and timestamps on load. +//! +//! ``` +//! use causal_triangulations::prelude::triangulation::*; +//! use serde_json::{from_str, to_string}; +//! +//! fn main() -> CdtResult<()> { +//! let tri = CdtTriangulation::from_cdt_strip(4, 3)?; +//! let json = to_string(&tri).expect("serialize checkpoint"); +//! let restored: CdtTriangulation2D = from_str(&json).expect("deserialize checkpoint"); +//! restored.validate_topology()?; +//! restored.validate_foliation()?; +//! restored.validate_causality()?; +//! restored.validate_cell_classification()?; +//! assert_eq!(restored.slice_sizes(), &[4, 4, 4]); +//! Ok(()) +//! } +//! ``` +//! //! # Example //! //! ``` @@ -108,9 +132,9 @@ pub mod cdt { // Re-exports for convenience pub use cdt::action::{ActionConfig, compute_regge_action}; pub use cdt::ergodic_moves::{ErgodicsSystem, MoveResult, MoveStatistics, MoveType}; -pub use cdt::foliation::{CellType, EdgeType, Foliation}; +pub use cdt::foliation::{CellType, EdgeType, Foliation, FoliationError}; pub use cdt::metropolis::{ - CdtProposal, CdtProposalError, CdtProposalInfo, CdtProposalPlan, CdtTarget, + CdtMcmcCheckpoint, CdtProposal, CdtProposalError, CdtProposalInfo, CdtProposalPlan, CdtTarget, MetropolisAlgorithm, MetropolisConfig, MonteCarloStep, }; pub use cdt::observables::{estimate_hausdorff_dimension, estimate_spectral_dimension}; @@ -119,6 +143,7 @@ pub use config::{CdtConfig, CdtTopology, TestConfig}; pub use errors::{CdtError, CdtResult}; use crate::util::saturating_usize_to_u32; +use std::env; use std::time::Duration; // Trait-based triangulation (recommended) @@ -152,6 +177,7 @@ pub mod prelude { // Action and simulation setup pub use crate::cdt::action::ActionConfig; pub use crate::cdt::metropolis::{MetropolisAlgorithm, MetropolisConfig}; + pub use crate::run_simulation; // Configuration and errors pub use crate::config::{CdtConfig, CdtTopology}; @@ -235,20 +261,22 @@ pub mod prelude { /// Focused exports for running CDT simulations. /// - /// This prelude includes the Metropolis runner, delayed proposal adapter, - /// telemetry structs, and typed proposal errors needed by MCMC workflows. + /// This prelude includes [`run_simulation`], the Metropolis runner, delayed + /// proposal adapter, telemetry structs, result containers, and typed + /// proposal errors needed by MCMC workflows. /// Observable estimators live in [`crate::prelude::observables`]. pub mod simulation { - pub use crate::CdtTriangulation; pub use crate::cdt::action::{ActionConfig, compute_regge_action}; pub use crate::cdt::ergodic_moves::MoveType; pub use crate::cdt::metropolis::{ - CdtProposal, CdtProposalError, CdtProposalInfo, CdtProposalPlan, CdtTarget, - MetropolisAlgorithm, MetropolisConfig, MonteCarloStep, + CdtMcmcCheckpoint, CdtProposal, CdtProposalError, CdtProposalInfo, CdtProposalPlan, + CdtTarget, MetropolisAlgorithm, MetropolisConfig, MonteCarloStep, }; pub use crate::cdt::results::{Measurement, SimulationResultsBackend}; pub use crate::config::{CdtConfig, CdtTopology}; pub use crate::errors::{CdtError, CdtResult}; + pub use crate::geometry::CdtTriangulation2D; + pub use crate::{CdtTriangulation, run_simulation}; } /// Focused exports for CDT observables and post-simulation analysis. @@ -358,6 +386,12 @@ pub mod prelude { /// a simulation dimension other than 2. /// Returns triangulation generation, topology, foliation, or Metropolis errors /// from the selected construction and simulation path. +/// If [`CdtConfig::output_csv`] or [`CdtConfig::output_json`] is set, returns +/// [`CdtError::OutputPathResolutionFailed`] if the current working directory +/// cannot be resolved. Returns [`CdtError::OutputPathConflict`] if CSV and JSON +/// outputs resolve to the same file. Returns [`CdtError::OutputWriteFailed`] if +/// the configured output file, parent directory creation, or JSON serialization +/// fails. /// /// # Examples /// @@ -422,7 +456,7 @@ pub fn run_simulation(config: &CdtConfig) -> CdtResult triangulation.face_count() ); - if config.simulate { + let results = if config.simulate { // Run full CDT simulation with MCMC backend let metropolis_config = config.to_metropolis_config(); let action_config = config.to_action_config(); @@ -437,7 +471,7 @@ pub fn run_simulation(config: &CdtConfig) -> CdtResult ); log::info!(" Average action: {:.3}", results.average_action()); - Ok(results) + results } else { // Just return basic simulation results with the triangulation let vertices = saturating_usize_to_u32(triangulation.vertex_count()); @@ -447,7 +481,7 @@ pub fn run_simulation(config: &CdtConfig) -> CdtResult .to_action_config() .calculate_action(vertices, edges, triangles); - Ok(SimulationResultsBackend { + SimulationResultsBackend { config: config.to_metropolis_config(), action_config: config.to_action_config(), move_stats: MoveStatistics::new(), @@ -462,14 +496,68 @@ pub fn run_simulation(config: &CdtConfig) -> CdtResult }], elapsed_time: Duration::from_millis(0), triangulation, - }) + } + }; + + write_configured_outputs(config, &results)?; + Ok(results) +} + +/// Writes configured result outputs after a run completes. +fn write_configured_outputs( + config: &CdtConfig, + results: &SimulationResultsBackend, +) -> CdtResult<()> { + if config.output_csv.is_none() && config.output_json.is_none() { + return Ok(()); } + + let base_dir = env::current_dir().map_err(|err| CdtError::OutputPathResolutionFailed { + base_path: ".".to_string(), + detail: err.to_string(), + })?; + + let resolved_csv = config + .output_csv + .as_ref() + .map(|path| CdtConfig::resolve_path(&base_dir, path)); + let resolved_json = config + .output_json + .as_ref() + .map(|path| CdtConfig::resolve_path(&base_dir, path)); + + if let (Some(csv_path), Some(json_path)) = (&resolved_csv, &resolved_json) + && csv_path == json_path + { + return Err(CdtError::OutputPathConflict { + csv_path: csv_path.display().to_string(), + json_path: json_path.display().to_string(), + }); + } + + if let Some(resolved) = resolved_csv { + results.write_measurements_csv(&resolved)?; + log::info!("Wrote measurement CSV to {}", resolved.display()); + } + + if let Some(resolved) = resolved_json { + results.write_summary_json(config, &resolved)?; + log::info!("Wrote simulation JSON summary to {}", resolved.display()); + } + + Ok(()) } #[cfg(test)] mod tests { use super::*; use approx::assert_relative_eq; + use serde_json::{Value, from_str}; + use std::env; + use std::fs; + use std::path::PathBuf; + use std::process; + use std::thread; fn create_test_config() -> CdtConfig { CdtConfig { @@ -486,9 +574,35 @@ mod tests { simulate: false, seed: Some(42), topology: CdtTopology::OpenBoundary, + output_csv: None, + output_json: None, } } + fn temp_output_path(name: &str) -> PathBuf { + let thread_name = safe_thread_name(); + env::temp_dir().join(format!( + "causal-triangulations-run-{name}-{}-{}", + process::id(), + thread_name + )) + } + + /// Returns the current test thread name with path separators and + /// reserved characters removed. + fn safe_thread_name() -> String { + thread::current() + .name() + .unwrap_or("test") + .chars() + .map(|ch| match ch { + '<' | '>' | ':' | '"' | '/' | '\\' | '|' | '?' | '*' => '_', + ch if ch.is_control() => '_', + ch => ch, + }) + .collect() + } + #[test] fn test_run_simulation() { let config = create_test_config(); @@ -521,6 +635,51 @@ mod tests { assert!(results.triangulation.face_count() > 0); } + #[test] + fn run_simulation_writes_configured_outputs() { + let csv_path = temp_output_path("measurements.csv"); + let json_path = temp_output_path("summary.json"); + let mut config = create_test_config(); + config.output_csv = Some(csv_path.clone()); + config.output_json = Some(json_path.clone()); + + run_simulation(&config).expect("configured outputs should write"); + + let csv = fs::read_to_string(&csv_path).expect("CSV output should be readable"); + let json = fs::read_to_string(&json_path).expect("JSON output should be readable"); + fs::remove_file(&csv_path).expect("temporary CSV output should be removable"); + fs::remove_file(&json_path).expect("temporary JSON output should be removable"); + let parsed: Value = from_str(&json).expect("JSON output should parse"); + + assert!(csv.starts_with("step,action,vertices,edges,triangles,accepted,delta_action\n")); + assert_eq!(parsed["config"]["vertices"], config.vertices); + assert_eq!( + parsed["final_triangulation"]["time_slices"], + config.timeslices + ); + } + + #[test] + fn run_simulation_rejects_overlapping_output_paths() { + let path = temp_output_path("shared-output"); + let mut config = create_test_config(); + config.output_csv = Some(path.clone()); + config.output_json = Some(path.clone()); + + let error = run_simulation(&config).expect_err("overlapping outputs should fail"); + + let CdtError::OutputPathConflict { + csv_path, + json_path, + } = error + else { + panic!("expected output path conflict error"); + }; + assert_eq!(csv_path, path.display().to_string()); + assert_eq!(json_path, path.display().to_string()); + assert!(!path.exists()); + } + #[test] fn test_config_validation_invalid_measurement_frequency() { let mut config = create_test_config(); @@ -670,6 +829,8 @@ mod tests { simulate: false, seed: None, topology: CdtTopology::Toroidal, + output_csv: None, + output_json: None, }; let results = run_simulation(&config).expect("toroidal simulation should run"); diff --git a/tests/cli.rs b/tests/cli.rs index 887b9f4..eeb379c 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -7,7 +7,36 @@ use assert_cmd::prelude::*; use predicates::prelude::*; -use std::process::Command; +use serde_json::{Value, from_str}; +use std::env; +use std::fs; +use std::path::PathBuf; +use std::process::{self, Command}; +use std::thread; + +fn temp_output_dir(name: &str) -> PathBuf { + let thread_name = safe_thread_name(); + env::temp_dir().join(format!( + "causal-triangulations-cli-{name}-{}-{}", + process::id(), + thread_name + )) +} + +/// Returns the current test thread name with path separators and +/// reserved characters removed. +fn safe_thread_name() -> String { + thread::current() + .name() + .unwrap_or("test") + .chars() + .map(|ch| match ch { + '<' | '>' | ':' | '"' | '/' | '\\' | '|' | '?' | '*' => '_', + ch if ch.is_control() => '_', + ch => ch, + }) + .collect() +} #[test] fn exit_success() { @@ -121,6 +150,36 @@ fn cdt_cli_runs_simulation_with_real_moves() { cmd.assert().success(); } +#[test] +fn cdt_cli_writes_configured_outputs() { + let output_dir = temp_output_dir("outputs"); + let csv_path = output_dir.join("measurements.csv"); + let json_path = output_dir.join("summary.json"); + let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("cdt")); + + cmd.arg("--vertices").arg("12"); + cmd.arg("--timeslices").arg("3"); + cmd.arg("--steps").arg("4"); + cmd.arg("--thermalization-steps").arg("0"); + cmd.arg("--measurement-frequency").arg("1"); + cmd.arg("--seed").arg("13"); + cmd.arg("--simulate"); + cmd.arg("--output-csv").arg(&csv_path); + cmd.arg("--output-json").arg(&json_path); + cmd.env("RUST_LOG", "error"); + + cmd.assert().success(); + + let csv = fs::read_to_string(&csv_path).expect("CSV output should be readable"); + let json = fs::read_to_string(&json_path).expect("JSON output should be readable"); + let parsed: Value = from_str(&json).expect("summary should parse"); + fs::remove_dir_all(&output_dir).expect("temporary output directory should be removable"); + + assert!(csv.starts_with("step,action,vertices,edges,triangles,accepted,delta_action\n")); + assert_eq!(parsed["config"]["vertices"], 12); + assert_eq!(parsed["final_triangulation"]["time_slices"], 3); +} + #[test] fn cdt_cli_rejects_missing_post_thermalization_measurement() { let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("cdt")); diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index ebbdc44..e503a54 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -65,6 +65,9 @@ mod integration_tests { #[test] fn test_toroidal_metropolis_preserves_topology_after_many_accepted_moves() { + const STEPS: u32 = 200; + const MIN_ACCEPTED_MOVES: usize = 50; + let triangulation = CdtTriangulation::from_toroidal_cdt(8, 6).expect("build toroidal CDT"); assert_eq!(triangulation.metadata().topology, CdtTopology::Toroidal); assert_eq!(triangulation.geometry().euler_characteristic(), 0); @@ -75,7 +78,7 @@ mod integration_tests { .validate_foliation() .expect("initial toroidal foliation is valid"); - let config = MetropolisConfig::new(1.0, 200, 0, 10).with_seed(105); + let config = MetropolisConfig::new(1.0, STEPS, 0, 10).with_seed(105); let algorithm = MetropolisAlgorithm::new(config, ActionConfig::default()); let results = algorithm .run(triangulation) @@ -83,8 +86,8 @@ mod integration_tests { let accepted_moves = results.steps.iter().filter(|step| step.accepted).count(); assert!( - accepted_moves >= 100, - "expected at least 100 accepted toroidal moves, got {accepted_moves}" + accepted_moves >= MIN_ACCEPTED_MOVES, + "expected at least {MIN_ACCEPTED_MOVES} accepted toroidal moves, got {accepted_moves}" ); assert!( results.acceptance_rate() > 0.0,