diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3501d5a..abe892c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -146,6 +146,61 @@ jobs: env: PROPTEST_CASES: "1000" + # ── Mutation testing ──────────────────────────────────────────────── + mutants: + name: Mutation Testing + needs: [test] + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - name: Install cargo-mutants + uses: taiki-e/install-action@v2 + with: + tool: cargo-mutants + - name: Run cargo-mutants on rivet-core + run: cargo mutants -p rivet-core --timeout 120 --jobs 4 --output mutants-out -- --lib + - name: Upload mutants report + if: always() + uses: actions/upload-artifact@v4 + with: + name: mutants-report + path: mutants-out/ + + # ── Fuzz testing (main only — too slow for PRs) ─────────────────── + fuzz: + name: Fuzz Testing + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@nightly + - uses: Swatinem/rust-cache@v2 + - name: Install cargo-fuzz + uses: taiki-e/install-action@v2 + with: + tool: cargo-fuzz + - name: Run fuzz targets (30s each) + run: | + if [ ! -d fuzz ]; then + echo "::notice::No fuzz directory found — skipping" + exit 0 + fi + cd fuzz + TARGETS=$(cargo +nightly fuzz list 2>/dev/null || true) + if [ -z "$TARGETS" ]; then + echo "::notice::No fuzz targets defined — skipping" + exit 0 + fi + for target in $TARGETS; do + echo "::group::Fuzzing $target" + cargo +nightly fuzz run "$target" -- -max_total_time=30 || true + echo "::endgroup::" + done + # ── Supply chain verification ─────────────────────────────────────── supply-chain: name: Supply Chain (cargo-vet) diff --git a/.gitignore b/.gitignore index a9aa515..8838a35 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ /target/ +/fuzz/target/ +/fuzz/corpus/ +/fuzz/artifacts/ *.swp *.swo .DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 22bd186..4384960 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -61,6 +61,40 @@ repos: files: '(Cargo\.toml|Cargo\.lock)$' stages: [pre-push] + # ── Dogfood validation ───────────────────────────────────── + - id: rivet-validate + name: rivet validate (dogfood) + entry: rivet validate --strict + language: system + pass_filenames: false + files: '(artifacts/.*\.yaml|schemas/.*\.yaml|safety/.*\.yaml|rivet\.yaml)$' + + # ── Commit-message traceability check ──────────────────── + - id: rivet-commit-msg + name: rivet commit-msg check + entry: rivet commit-msg-check + language: system + stages: [commit-msg] + always_run: true + + # ── Benchmarks (compile check only — not full run) ──────── + - id: cargo-bench-check + name: cargo bench --no-run + entry: cargo bench --no-run + language: system + types: [rust] + pass_filenames: false + stages: [pre-push] + + # ── Security: known vulnerabilities (RustSec advisory DB) ────── + - id: cargo-audit + name: cargo audit + entry: cargo audit + language: system + pass_filenames: false + files: '(Cargo\.toml|Cargo\.lock)$' + stages: [pre-push] + # ── Security: license compliance, bans, sources, advisories ──── - id: cargo-deny name: cargo deny check @@ -69,3 +103,12 @@ repos: pass_filenames: false files: '(Cargo\.toml|Cargo\.lock|deny\.toml)$' stages: [pre-push] + + # ── Mutation testing (pre-push, slow) ───────────────────── + - id: cargo-mutants + name: cargo mutants (smoke) + entry: bash -c 'cargo mutants --timeout 60 --jobs 4 -p rivet-core -- --lib 2>&1 | tail -5' + language: system + pass_filenames: false + stages: [pre-push] + verbose: true diff --git a/Cargo.lock b/Cargo.lock index 9dfb9a5..17ed407 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2775,17 +2775,18 @@ dependencies = [ [[package]] name = "spar-analysis" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "la-arena", "rustc-hash 2.1.1", + "serde", "spar-hir-def", ] [[package]] name = "spar-annex" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "rowan", "spar-syntax", @@ -2794,7 +2795,7 @@ dependencies = [ [[package]] name = "spar-base-db" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "rowan", "salsa", @@ -2805,9 +2806,10 @@ dependencies = [ [[package]] name = "spar-hir" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "salsa", + "serde", "smol_str", "spar-base-db", "spar-hir-def", @@ -2817,12 +2819,13 @@ dependencies = [ [[package]] name = "spar-hir-def" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "la-arena", "rowan", "rustc-hash 2.1.1", "salsa", + "serde", "smol_str", "spar-base-db", "spar-syntax", @@ -2831,7 +2834,7 @@ dependencies = [ [[package]] name = "spar-parser" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "rowan", ] @@ -2839,7 +2842,7 @@ dependencies = [ [[package]] name = "spar-syntax" version = "0.1.0" -source = "git+https://github.com/pulseengine/spar.git?rev=21a5411#21a541180ba5efb9f37f1b9975468b2f475c3955" +source = "git+https://github.com/pulseengine/spar.git?rev=84a7363#84a73630986d194f548541fc86f6c98ef6d79de1" dependencies = [ "rowan", "spar-parser", diff --git a/Cargo.toml b/Cargo.toml index 55b4c07..e35b7ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -41,7 +41,7 @@ tower-http = { version = "0.6", features = ["cors", "fs"] } urlencoding = "2" # XML (ReqIF) -quick-xml = { version = "0.37", features = ["serialize"] } +quick-xml = { version = "0.37", features = ["serialize", "overlapped-lists"] } # WASM component model wasmtime = { version = "42", features = ["component-model"] } @@ -51,5 +51,5 @@ wasmtime-wasi = "42" criterion = { version = "0.5", features = ["html_reports"] } # AADL (spar) — parser, HIR, analysis -spar-hir = { git = "https://github.com/pulseengine/spar.git", rev = "21a5411" } -spar-analysis = { git = "https://github.com/pulseengine/spar.git", rev = "21a5411" } +spar-hir = { git = "https://github.com/pulseengine/spar.git", rev = "84a7363" } +spar-analysis = { git = "https://github.com/pulseengine/spar.git", rev = "84a7363" } diff --git a/artifacts/decisions.yaml b/artifacts/decisions.yaml index 3c1bf54..693a7eb 100644 --- a/artifacts/decisions.yaml +++ b/artifacts/decisions.yaml @@ -48,7 +48,7 @@ artifacts: alternatives: > Custom adjacency list implementation. Rejected because graph algorithms are subtle and petgraph is well-proven. - source-ref: rivet-core/src/graph.rs:1 + source-ref: rivet-core/src/links.rs:1 - id: DD-003 type: design-decision @@ -227,3 +227,77 @@ artifacts: Keep old test terminology for backward compatibility. Rejected because the schema is pre-1.0 and alignment with the standard is more valuable than backward compatibility at this stage. + + - id: DD-011 + type: design-decision + title: Git trailers over inline regex for commit-artifact references + status: approved + description: > + Use standard git trailers (footer key-value pairs) for linking + commits to artifacts, rather than inline regex parsing of commit + message bodies (e.g., [FEAT-007] Jira-style). + tags: [architecture, git, traceability] + links: + - type: satisfies + target: REQ-017 + fields: + rationale: > + Git trailers are a well-supported standard, parseable via + git log --format='%(trailers)', git interpret-trailers, and + programmatic APIs. They separate traceability metadata from + the commit description. Inline regex is fragile and ambiguous + (brackets in code snippets, prose references). + alternatives: > + Inline regex parsing of [ARTIFACT-ID] patterns (Jira-style). + Rejected because regex is fragile and cannot distinguish + intentional references from incidental mentions. + + - id: DD-012 + type: design-decision + title: Runtime graph integration over materialized commit YAML + status: approved + description: > + Commit data is injected as ephemeral nodes into the petgraph link + graph at analysis time, rather than materializing commit artifacts + as YAML files on disk. + tags: [architecture, git, traceability] + links: + - type: satisfies + target: REQ-017 + fields: + rationale: > + Git is the single source of truth for commit data. Materializing + commits to YAML creates a redundant data store that drifts from + git history. The link graph is already rebuilt from scratch on + each rivet invocation, so ephemeral commit nodes fit naturally. + alternatives: > + rivet sync-commits writing commit YAML files to a commits/ + directory. Rejected because it creates thousands of redundant + files and requires ongoing sync discipline. + + - id: DD-013 + type: design-decision + title: Dual opt-out for commit traceability enforcement + status: approved + description: > + Non-essential commits opt out of trailer requirements via two + mechanisms: conventional-commit type exemption (configurable list + of types like chore, style, ci, docs, build) and an explicit + Trace-skip trailer for edge cases. + tags: [architecture, git, traceability] + links: + - type: satisfies + target: REQ-018 + - type: satisfies + target: REQ-019 + fields: + rationale: > + Type-based exemption handles the 80% case (dependency bumps, + formatting, CI tweaks) with zero friction. The explicit skip + trailer handles edge cases where a normally-traced type (like + feat) genuinely has no artifact mapping, forcing developers to + consciously acknowledge the gap. + alternatives: > + No exemption mechanism (all commits must reference artifacts). + Rejected because it creates excessive friction for routine + maintenance commits that have no traceability value. diff --git a/artifacts/features.yaml b/artifacts/features.yaml index 36d7c1e..f1fce12 100644 --- a/artifacts/features.yaml +++ b/artifacts/features.yaml @@ -441,3 +441,76 @@ artifacts: target: REQ-007 fields: phase: phase-2 + + - id: FEAT-029 + type: feature + title: "rivet commit-msg-check subcommand" + status: draft + description: > + Pre-commit hook entry point that validates a single commit message + file. Parses conventional-commit type for exemption, checks for + skip trailer, extracts artifact IDs from git trailers, and + validates they exist in the artifact store. Provides fuzzy-match + suggestions on typos. + tags: [cli, git, traceability, phase-3] + links: + - type: satisfies + target: REQ-017 + - type: satisfies + target: REQ-018 + fields: + phase: phase-3 + + - id: FEAT-030 + type: feature + title: "rivet commits subcommand" + status: draft + description: > + History analysis command that parses git log trailers, classifies + commits (linked, orphan, exempt, broken-ref), and produces five + reports: linked commits, broken references, orphan commits, + artifact commit coverage, and unimplemented artifacts. Supports + --since, --range, --json, and --strict flags. + tags: [cli, git, traceability, phase-3] + links: + - type: satisfies + target: REQ-017 + - type: satisfies + target: REQ-019 + fields: + phase: phase-3 + + - id: FEAT-031 + type: feature + title: Configurable trailer-to-link-type mapping + status: draft + description: > + Configuration in rivet.yaml that maps git trailer keys (Implements, + Fixes, Verifies, Satisfies, Refs) to existing schema link types. + Includes exempt-types list, skip-trailer token, traced-paths for + orphan detection, and trace-exempt-artifacts whitelist. + tags: [config, git, traceability, phase-3] + links: + - type: satisfies + target: REQ-017 + fields: + phase: phase-3 + + - id: FEAT-032 + type: feature + title: Ephemeral commit node injection into link graph + status: draft + description: > + At analysis time, parsed commit data is injected as ephemeral + nodes into the petgraph link graph, wired to referenced artifacts + via the configured link types. Enables coverage computation, + reachability queries, and dashboard visualization without + materializing commit YAML files. + tags: [core, git, traceability, phase-3] + links: + - type: satisfies + target: REQ-017 + - type: implements + target: DD-012 + fields: + phase: phase-3 diff --git a/artifacts/requirements.yaml b/artifacts/requirements.yaml index b3e2932..f9d45f5 100644 --- a/artifacts/requirements.yaml +++ b/artifacts/requirements.yaml @@ -208,3 +208,46 @@ artifacts: fields: priority: should category: functional + + - id: REQ-017 + type: requirement + title: Commit-to-artifact traceability + status: approved + description: > + The system must parse git commit trailers and link them to artifacts + at runtime, injecting ephemeral commit nodes into the link graph. + Must support configurable trailer-to-link-type mapping and produce + five report types: linked commits, broken references, orphan commits, + artifact commit coverage, and unimplemented artifacts. + tags: [core, traceability, git] + fields: + priority: must + category: functional + + - id: REQ-018 + type: requirement + title: Commit message validation at commit time + status: approved + description: > + The system must validate commit messages via a pre-commit hook, + ensuring non-exempt commits reference at least one valid artifact + ID in git trailers. Must support conventional-commit type exemptions + and an explicit skip trailer for opt-out. + tags: [core, validation, git] + fields: + priority: must + category: functional + + - id: REQ-019 + type: requirement + title: Orphan commit detection + status: approved + description: > + The system must identify commits that modify files in configured + traced paths without referencing any artifact. Must support + path-based configuration for traced vs. exempt directories and + an artifact whitelist for known-unimplemented items. + tags: [core, traceability, git] + fields: + priority: must + category: functional diff --git a/docs/audit-report.md b/docs/audit-report.md new file mode 100644 index 0000000..6b46b38 --- /dev/null +++ b/docs/audit-report.md @@ -0,0 +1,274 @@ +--- +id: AUDIT-001 +type: report +title: Rivet Project Quality Audit Report +date: 2026-03-09 +status: current +--- + +# Rivet Project Quality Audit Report + +**Date:** 2026-03-09 +**Scope:** Source-ref integrity, test coverage, benchmarks, fuzz/mutation testing, traceability + +--- + +## 1. Source-Ref Link Integrity + +Audited all `source-ref` and `aadl-file` fields across 85 artifacts. + +| Metric | Count | +|--------|-------| +| Total source-refs | 20 | +| Valid | 19 | +| Fixed this audit | 1 | +| Implemented artifacts missing source-ref | 1 | + +**Fixed:** DD-002 referenced `rivet-core/src/graph.rs:1` (file renamed to `links.rs`). +Corrected to `rivet-core/src/links.rs:1`. + +**Missing:** ARCH-DASH-GRAPH ("Graph Visualizer — etch") has no `source-ref`. This is +an external dependency so no local source-ref applies. + +All other 19 source-refs resolve to existing files at correct line numbers. + +--- + +## 2. Test Coverage + +### 2.1 Test Inventory + +| Level | Tests | Framework | +|-------|-------|-----------| +| Unit tests | 61 | `#[test]` in-module | +| Integration tests | 77 | `rivet-core/tests/`, `rivet-cli/tests/` | +| Property-based | 6 | proptest (50 cases local, 1000 in CI) | +| Serve lint | 4 | Source-code structural invariants | +| Live server | 3 | HTTP integration with TcpStream | +| **Total** | **~151** | | + +### 2.2 Module Coverage Map + +| Module | Unit | Integration | Proptest | Benchmark | +|--------|:----:|:-----------:|:--------:|:---------:| +| schema.rs | — | 5 tests | 1 | 1 group | +| store.rs | — | 4 tests | 2 | 3 groups | +| links.rs | — | 2 tests | 1 | 1 group | +| validate.rs | — | 3 tests | 1 | 1 group | +| matrix.rs | — | 2 tests | — | 1 group | +| diff.rs | 5 | 4 tests | — | — | +| document.rs | 12 | 1 test | — | — | +| query.rs | — | 1 test | — | — | +| results.rs | 9 | — | — | — | +| reqif.rs | 3 | 2 tests | — | — | +| oslc.rs | 27 | 19 tests | — | — | +| coverage.rs | 4 | — | — | — | +| wasm_runtime.rs | 7 | — | — | — | +| adapter.rs | — | 3 tests | — | — | +| formats/* | — | 3 tests | — | — | +| serve.rs | — | 3+4 tests | — | — | +| CLI commands | — | 14 tests | — | — | + +### 2.3 Coverage Tooling + +- **Tool:** cargo-llvm-cov (LLVM source instrumentation, nightly) +- **CI gate:** 40% minimum line coverage (`--fail-under-lines 40`) +- **Codecov targets:** 60% project, 70% patch +- **Output:** LCOV + HTML report + +### 2.4 Gaps + +Modules with no unit tests (covered only by integration): +- `schema.rs`, `store.rs`, `links.rs`, `validate.rs`, `matrix.rs`, `query.rs` +- **Mitigated** by extensive integration + proptest coverage + +--- + +## 3. Performance Benchmarks + +### 3.1 Inventory + +| Group | Scales | Cases | +|-------|--------|-------| +| store_insert | 100/1K/10K | 3 | +| store_lookup | 100/1K/10K | 3 | +| store_by_type | 100/1K/10K | 3 | +| schema_load_and_merge | single | 1 | +| link_graph_build | 100/1K/10K | 3 | +| validate | 100/1K/10K | 3 | +| traceability_matrix | 100/1K/10K | 3 | +| **Total** | | **19** | + +### 3.2 KPI Targets + +| Operation | 10K artifacts | Target | +|-----------|---------------|--------| +| Store insert | 10,000 | < 10ms | +| Store lookup | 10,000 | < 5ms | +| Link graph build | 10,000 | < 50ms | +| Validation | 10,000 | < 100ms | +| Matrix computation | 10,000 | < 50ms | + +### 3.3 CI Integration + +- **Workflow:** `.github/workflows/benchmarks.yml` +- **Trigger:** Every push to main and every PR +- **Regression detection:** github-action-benchmark at 120% alert threshold +- **Results:** GitHub Pages historical tracking, PR comment on regression + +### 3.4 Gaps + +Modules without benchmarks (12 of 21): +- **High priority:** diff, query, adapter (import operations) +- **Medium:** reqif, document, coverage +- **Low:** wasm_runtime, oslc, results, formats/* + +--- + +## 4. Fuzz Testing + +**Status: NOT IMPLEMENTED** + +- No `fuzz/` directory or `fuzz_target!` macros +- No cargo-fuzz, libfuzzer, or AFL configuration +- No sanitizer configurations (ASAN/TSAN/UBSAN) + +### Recommended Fuzz Targets + +| Target | Rationale | +|--------|-----------| +| YAML artifact parsing | Untrusted input from user files | +| ReqIF XML import | Complex XML with spec-types/relations | +| Schema merge | Multiple schema files combined | +| Link graph construction | Arbitrary link topologies | +| Document frontmatter parsing | User-authored markdown | + +--- + +## 5. Mutation Testing + +**Status: NOT IMPLEMENTED** + +- No cargo-mutants configuration or CI job +- No mutants.toml + +### What We Have Instead + +| Tool | Purpose | +|------|---------| +| Miri | Undefined behavior detection (`-Zmiri-strict-provenance`) | +| Proptest | Property-based invariant testing (6 generators, 1000 cases in CI) | +| Clippy -D warnings | Static analysis gate | +| cargo-audit + cargo-deny | Security + license checks | +| cargo-vet | Supply chain verification | + +--- + +## 6. Traceability Audit + +### 6.1 Artifact Summary + +| Type | Count | Linked | Verified | +|------|-------|--------|----------| +| Requirements | 16 | 16/16 (100%) | — | +| Design Decisions | 10 | 10/10 (100%) | — | +| Features | 28 | 28/28 (100%) | 23/28 (82%) | +| Architecture | 21 | 21/21 (100%) | — | +| Tests | 10 | 10/10 (100%) | — | +| **Total** | **85** | **85/85** | | + +### 6.2 Link Integrity + +- **Broken links:** 0 +- **Orphan artifacts:** 0 +- **Total links:** ~70+ +- All link targets resolve to existing artifacts + +### 6.3 V-Model Chain Coverage + +**Complete chains (REQ → DD → FEAT → TEST):** 4/16 requirements (25%) +- REQ-001, REQ-002, REQ-004, REQ-007 + +**Partial chains:** 12/16 requirements +- Mostly missing DD or TEST for draft/phase-3 features +- Toolchain requirements (REQ-011/12/13) don't map to features by design + +### 6.4 Unverified Features (5) + +| Feature | Reason | +|---------|--------| +| FEAT-011 | OSLC sync — draft, phase-3 | +| FEAT-012 | WASM runtime — draft, phase-3 | +| FEAT-018+ | Phase-2/3 roadmap items | + +--- + +## 7. CI Quality Gates Summary + +| Gate | Tool | Status | +|------|------|--------| +| Format | cargo fmt | Active | +| Lint | clippy -D warnings | Active | +| YAML lint | yamllint | Active | +| Tests | cargo nextest (JUnit XML) | Active | +| Coverage | llvm-cov (40% threshold) | Active | +| Miri | -Zmiri-strict-provenance | Active | +| Proptest | 1000 cases per property | Active | +| Security audit | cargo-audit (RustSec) | Active | +| License/bans | cargo-deny | Active | +| Supply chain | cargo-vet | Active | +| MSRV | 1.89 | Active | +| Benchmarks | Criterion + regression alerts | Active | +| Fuzz testing | — | **Missing** | +| Mutation testing | — | **Missing** | +| Sanitizers | — | **Missing** | + +--- + +## 8. Recommendations + +### High Priority + +1. **Add fuzz targets** for YAML parsing, ReqIF import, schema merge, and + document frontmatter. These are untrusted-input boundaries. + +2. **Add cargo-mutants CI job** to measure test effectiveness. Start with + rivet-core modules that have the most logic: validate, links, schema. + +3. **Add benchmarks for diff and query** — these are user-facing operations + that could regress on large artifact sets. + +### Medium Priority + +4. **Fix remaining source-ref gap**: ARCH-DASH-GRAPH has no source-ref + (external dependency, document the exception). + +5. **Add unit tests** for schema.rs, store.rs, links.rs where integration + tests don't cover edge cases. + +6. **Raise coverage gate** from 40% to 60% as test suite matures. + +### Low Priority + +7. **Add sanitizer CI job** (ASAN) for memory safety verification alongside + Miri. + +8. **Extend Miri** to integration tests (currently lib-only). + +9. **Add benchmarks for reqif and adapter** import operations once those + modules stabilize. + +--- + +## 9. Quality Score + +| Dimension | Score | Notes | +|-----------|-------|-------| +| Source-ref integrity | 95% | 1 fixed, 1 N/A (external) | +| Test coverage breadth | 85% | All modules tested, some only via integration | +| Benchmark coverage | 55% | 5/12 benchmarkable modules covered | +| Fuzz testing | 0% | Not implemented | +| Mutation testing | 0% | Not implemented | +| Traceability | 95% | 0 broken links, 0 orphans, 82% feature verification | +| CI gates | 80% | 12/15 gates active | +| **Overall** | **73%** | Strong foundation, missing fuzz + mutation | diff --git a/docs/plans/2026-03-10-commit-traceability-design.md b/docs/plans/2026-03-10-commit-traceability-design.md new file mode 100644 index 0000000..9c62b1c --- /dev/null +++ b/docs/plans/2026-03-10-commit-traceability-design.md @@ -0,0 +1,203 @@ +# Commit-to-Artifact Traceability + +**Date:** 2026-03-10 +**Status:** Approved +**Approach:** Runtime graph integration (Approach B) + +## Problem + +Safety-critical traceability requires linking implementation evidence +(git commits) to requirements, features, and design decisions. Without +this link, auditors cannot verify that approved artifacts were actually +implemented, and orphan code changes escape traceability review. + +Current state: Rivet validates artifact-to-artifact links but has no +awareness of git history. Commits use conventional-commit format +(`feat:`, `fix:`) but carry no artifact references. + +## Design Decisions + +- **DD-011:** Git trailers over inline regex parsing. Trailers are a + git standard, machine-parseable via `git log --format='%(trailers)'`, + and separate traceability metadata from the commit description. + +- **DD-012:** Runtime graph integration over materialized YAML. Commit + nodes are injected ephemerally into the petgraph link graph at + analysis time. Git remains the single source of truth; no redundant + YAML files to drift. + +- **DD-013:** Dual opt-out mechanism. Conventional-commit type + exemption (configurable list: `chore`, `style`, `ci`, `docs`, `build`) + handles routine commits. Explicit `Trace: skip` trailer handles + edge cases where a normally-traced type has no artifact mapping. + +## Configuration + +New `commits` block in `rivet.yaml`: + +```yaml +commits: + # Parser format (only "trailers" initially, extensible) + format: trailers + + # Trailer key -> Rivet link type mapping + trailers: + Implements: implements + Fixes: fixes + Verifies: verifies + Satisfies: satisfies + Refs: traces-to + + # Conventional-commit types exempt from requiring trailers + exempt-types: + - chore + - style + - ci + - docs + - build + + # Explicit skip trailer + skip-trailer: "Trace: skip" + + # Paths where commits MUST reference artifacts (orphan detection) + traced-paths: + - rivet-core/src/ + - rivet-cli/src/ + + # Artifact IDs exempt from "unimplemented" checks (whitelist) + trace-exempt-artifacts: [] +``` + +The `trailers` map reuses existing schema link types. No new link +types are needed. + +## Commit Message Format + +Conventional commits with git trailers in the footer: + +``` +feat(oslc): add conflict detection for concurrent modifications + +Detects ETag mismatches during bidirectional sync and surfaces +conflicts to the developer before overwriting local changes. + +Implements: FEAT-012, FEAT-013 +Fixes: UCA-O-4 +``` + +Rules: +- Trailers are in the footer (after last blank line), standard git format +- Multiple artifact IDs per trailer: comma-separated +- Multiple trailer types per commit: allowed +- Conventional-commit type extracted from subject prefix (before `:`) + +## Components + +### 1. `rivet commit-msg-check` Subcommand + +Pre-commit hook entry point. Validates a single commit message file. + +**Integration via `.pre-commit-config.yaml`:** + +```yaml +- repo: local + hooks: + - id: rivet-commit-msg + name: rivet commit-msg check + entry: rivet commit-msg-check + language: system + stages: [commit-msg] + always_run: true +``` + +**Validation flow:** +1. Parse subject line for conventional-commit type +2. If type in `exempt-types` -> pass +3. If `Trace: skip` trailer present -> pass (log info note) +4. Parse trailers, extract artifact IDs via configured `trailers` map +5. No artifact trailers found -> **fail** +6. Validate each artifact ID exists in current store -> **fail** on + unknown IDs with fuzzy-match suggestion + +Fast path: loads only the artifact index (IDs + types), not the full +link graph. + +### 2. `rivet commits` Subcommand + +History analysis with five report types. + +**Usage:** +```bash +rivet commits # all commits on current branch +rivet commits --since 2026-01-01 # from a date +rivet commits --range main..HEAD # specific git range +rivet commits --json # machine-readable output +rivet commits --strict # promote warnings to errors +``` + +**Runtime flow:** +1. Load artifact store and build link graph +2. Parse git log with trailers +3. Classify each commit: linked, orphan, exempt, broken-ref +4. Inject ephemeral commit nodes into link graph +5. Compute all five reports + +**Five reports:** + +| # | Report | Description | Severity | +|---|--------|-------------|----------| +| 1 | Linked commits | Commits with valid artifact trailers | Info | +| 2 | Broken references | Trailers referencing non-existent IDs | Error | +| 3 | Orphan commits | Non-exempt commits touching `traced-paths` without trailers | Warning | +| 4 | Artifact commit coverage | Per-artifact count of referencing commits | Info | +| 5 | Unimplemented artifacts | Artifacts with zero commits, not in `trace-exempt-artifacts` | Warning | + +**Exit code:** non-zero on errors (broken references). `--strict` +promotes warnings (orphans, unimplemented) to errors. + +### 3. Dashboard Integration + +Opt-in via `rivet serve --commits [--since DATE]`. + +- **Artifact detail view:** "Commits" section showing linked commits + (hash, date, author, subject, link type) +- **Stats page:** commit coverage metrics alongside artifact stats +- **Graph view:** commit nodes as distinct shape/color in petgraph + visualization + +Not in scope for v1: dedicated commits page, real-time git watching. + +## Dogfooding Artifacts + +New artifacts for Rivet's own tracking: + +**Requirements:** +- REQ-017: Commit-to-artifact traceability +- REQ-018: Commit validation at commit time +- REQ-019: Orphan commit detection + +**Features:** +- FEAT-029: `rivet commit-msg-check` subcommand +- FEAT-030: `rivet commits` subcommand (5 report types) +- FEAT-031: Configurable trailer-to-link-type mapping +- FEAT-032: Ephemeral commit node injection into link graph + +**Design Decisions:** +- DD-011: Git trailers over inline regex +- DD-012: Runtime graph integration over materialized YAML +- DD-013: Type exemption + skip trailer for opt-out + +## Traceability Chain + +``` +REQ-017 <-- satisfies -- FEAT-029, FEAT-030, FEAT-031, FEAT-032 +REQ-018 <-- satisfies -- FEAT-029 +REQ-019 <-- satisfies -- FEAT-030 +DD-011 <-- satisfies -- REQ-017 +DD-012 <-- satisfies -- REQ-017 +DD-013 <-- satisfies -- REQ-018, REQ-019 + +Commits implementing these features carry: + Implements: FEAT-029 (etc.) +...closing the traceability loop on itself. +``` diff --git a/docs/plans/2026-03-10-commit-traceability-plan.md b/docs/plans/2026-03-10-commit-traceability-plan.md new file mode 100644 index 0000000..f1b5bba --- /dev/null +++ b/docs/plans/2026-03-10-commit-traceability-plan.md @@ -0,0 +1,1403 @@ +# Commit-to-Artifact Traceability Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Add git commit traceability to Rivet — parse commit trailers, validate commit messages, inject commit nodes into the link graph, and report on commit-artifact coverage. + +**Architecture:** New `commits` module in rivet-core for trailer parsing, commit classification, and graph injection. Two new CLI subcommands (`commit-msg-check`, `commits`). Config extension in `ProjectConfig`. Pre-commit hook entry in `.pre-commit-config.yaml`. + +**Tech Stack:** Rust, clap (CLI), petgraph (graph), `std::process::Command` (git log), serde (config parsing) + +--- + +### Task 1: Config — Add `CommitsConfig` to the data model + +**Files:** +- Modify: `rivet-core/src/model.rs` +- Test: `rivet-core/tests/commits_config.rs` (create) + +**Step 1: Write the failing test** + +Create `rivet-core/tests/commits_config.rs`: + +```rust +use rivet_core::model::ProjectConfig; + +#[test] +fn parse_commits_config_from_yaml() { + let yaml = r#" +project: + name: test + schemas: [common, dev] +sources: [] +commits: + format: trailers + trailers: + Implements: implements + Fixes: fixes + Verifies: verifies + exempt-types: [chore, style, ci, docs, build] + skip-trailer: "Trace: skip" + traced-paths: + - src/ + - lib/ + trace-exempt-artifacts: + - FEAT-099 +"#; + let config: ProjectConfig = serde_yaml::from_str(yaml).unwrap(); + let commits = config.commits.expect("commits block should parse"); + assert_eq!(commits.format, "trailers"); + assert_eq!(commits.trailers.len(), 3); + assert_eq!(commits.trailers.get("Implements").unwrap(), "implements"); + assert_eq!(commits.exempt_types, vec!["chore", "style", "ci", "docs", "build"]); + assert_eq!(commits.skip_trailer, "Trace: skip"); + assert_eq!(commits.traced_paths, vec!["src/", "lib/"]); + assert_eq!(commits.trace_exempt_artifacts, vec!["FEAT-099"]); +} + +#[test] +fn commits_config_optional() { + let yaml = r#" +project: + name: test + schemas: [common] +sources: [] +"#; + let config: ProjectConfig = serde_yaml::from_str(yaml).unwrap(); + assert!(config.commits.is_none()); +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cargo test --lib -p rivet-core --test commits_config` +Expected: FAIL — `CommitsConfig` type does not exist + +**Step 3: Write minimal implementation** + +Add to `rivet-core/src/model.rs` after `ProjectConfig`: + +```rust +/// Configuration for commit-to-artifact traceability. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitsConfig { + /// Parser format (only "trailers" supported initially). + #[serde(default = "default_format")] + pub format: String, + + /// Map of git trailer keys to Rivet link types. + #[serde(default)] + pub trailers: std::collections::BTreeMap, + + /// Conventional-commit types exempt from requiring trailers. + #[serde(default, rename = "exempt-types")] + pub exempt_types: Vec, + + /// Explicit skip trailer token (e.g., "Trace: skip"). + #[serde(default = "default_skip_trailer", rename = "skip-trailer")] + pub skip_trailer: String, + + /// Paths where commits MUST reference artifacts. + #[serde(default, rename = "traced-paths")] + pub traced_paths: Vec, + + /// Artifact IDs exempt from "unimplemented" checks. + #[serde(default, rename = "trace-exempt-artifacts")] + pub trace_exempt_artifacts: Vec, +} + +fn default_format() -> String { "trailers".into() } +fn default_skip_trailer() -> String { "Trace: skip".into() } +``` + +Add the `commits` field to `ProjectConfig`: + +```rust +pub struct ProjectConfig { + pub project: ProjectMetadata, + #[serde(default)] + pub sources: Vec, + #[serde(default)] + pub docs: Vec, + #[serde(default)] + pub results: Option, + #[serde(default)] // <-- ADD THIS + pub commits: Option, // <-- ADD THIS +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cargo test --lib -p rivet-core --test commits_config` +Expected: PASS + +**Step 5: Commit** + +```bash +git add rivet-core/src/model.rs rivet-core/tests/commits_config.rs +git commit -m "feat: add CommitsConfig to project data model + +Implements: FEAT-031 +Trace: skip" +``` + +Note: Use `Trace: skip` on this first commit since `commit-msg-check` isn't implemented yet. + +--- + +### Task 2: Core — Commit trailer parser + +**Files:** +- Create: `rivet-core/src/commits.rs` +- Modify: `rivet-core/src/lib.rs` (add `pub mod commits;`) +- Test: inline `#[cfg(test)]` in `commits.rs` + +**Step 1: Write the failing test** + +Create `rivet-core/src/commits.rs` with tests first: + +```rust +//! Git commit trailer parsing and commit-artifact traceability. + +use std::collections::BTreeMap; + +/// A parsed git commit with extracted trailer information. +#[derive(Debug, Clone)] +pub struct ParsedCommit { + pub hash: String, + pub subject: String, + pub body: String, + pub author: String, + pub date: String, + /// Conventional-commit type (e.g., "feat", "fix", "chore"). + pub commit_type: Option, + /// Extracted artifact references: link_type -> vec of artifact IDs. + pub artifact_refs: BTreeMap>, + /// Files changed by this commit. + pub changed_files: Vec, + /// Whether this commit has the skip trailer. + pub has_skip_trailer: bool, +} + +/// Classification of a commit's traceability status. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum CommitClass { + /// Has valid artifact trailers. + Linked, + /// Has trailers referencing non-existent artifact IDs. + BrokenRef, + /// Non-exempt commit touching traced paths without trailers. + Orphan, + /// Exempt by commit type or skip trailer. + Exempt, +} + +/// Parse the conventional-commit type from a subject line. +/// +/// Returns `Some("feat")` for "feat(scope): subject" or "feat: subject". +pub fn parse_commit_type(subject: &str) -> Option { + let before_colon = subject.split(':').next()?; + // Strip optional scope: "feat(oslc)" -> "feat" + let type_str = before_colon.split('(').next()?; + let trimmed = type_str.trim(); + if trimmed.is_empty() || trimmed.contains(' ') { + return None; + } + Some(trimmed.to_lowercase()) +} + +/// Parse git trailers from a commit message body. +/// +/// Trailers are `Key: value` lines in the last paragraph of the message. +/// Returns a map of trailer key -> list of values. +pub fn parse_trailers(message: &str) -> BTreeMap> { + let mut trailers = BTreeMap::new(); + + // Trailers are in the last paragraph (after the last blank line) + let paragraphs: Vec<&str> = message.split("\n\n").collect(); + let last_para = match paragraphs.last() { + Some(p) => p.trim(), + None => return trailers, + }; + + for line in last_para.lines() { + let line = line.trim(); + if let Some((key, value)) = line.split_once(':') { + let key = key.trim(); + let value = value.trim(); + if !key.is_empty() && !key.contains(' ') && !value.is_empty() { + trailers + .entry(key.to_string()) + .or_insert_with(Vec::new) + .push(value.to_string()); + } + } + } + trailers +} + +/// Extract artifact IDs from a trailer value. +/// +/// Supports comma-separated IDs: "FEAT-012, FEAT-013" -> ["FEAT-012", "FEAT-013"] +pub fn extract_artifact_ids(value: &str) -> Vec { + value + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect() +} + +/// Parse a commit message and extract artifact references using the configured trailers map. +pub fn parse_commit_message( + message: &str, + trailer_map: &BTreeMap, + skip_trailer: &str, +) -> (BTreeMap>, bool) { + let trailers = parse_trailers(message); + let mut artifact_refs: BTreeMap> = BTreeMap::new(); + + // Check for skip trailer (e.g., "Trace: skip") + let has_skip = if let Some((skip_key, skip_val)) = skip_trailer.split_once(':') { + trailers.get(skip_key.trim()).map_or(false, |vals| { + vals.iter().any(|v| v.trim().eq_ignore_ascii_case(skip_val.trim())) + }) + } else { + false + }; + + // Extract artifact IDs from configured trailers + for (trailer_key, link_type) in trailer_map { + if let Some(values) = trailers.get(trailer_key) { + for value in values { + let ids = extract_artifact_ids(value); + artifact_refs + .entry(link_type.clone()) + .or_default() + .extend(ids); + } + } + } + + (artifact_refs, has_skip) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_commit_type_feat() { + assert_eq!(parse_commit_type("feat: add thing"), Some("feat".into())); + } + + #[test] + fn parse_commit_type_with_scope() { + assert_eq!(parse_commit_type("feat(oslc): add sync"), Some("feat".into())); + } + + #[test] + fn parse_commit_type_fix() { + assert_eq!(parse_commit_type("fix: broken link"), Some("fix".into())); + } + + #[test] + fn parse_commit_type_none_for_plain() { + assert_eq!(parse_commit_type("Initial commit"), None); + } + + #[test] + fn parse_commit_type_none_for_merge() { + assert_eq!(parse_commit_type("Merge branch 'main'"), None); + } + + #[test] + fn parse_trailers_basic() { + let msg = "feat: add thing\n\nSome body text.\n\nImplements: FEAT-007\nFixes: UCA-O-4"; + let trailers = parse_trailers(msg); + assert_eq!(trailers.get("Implements").unwrap(), &vec!["FEAT-007".to_string()]); + assert_eq!(trailers.get("Fixes").unwrap(), &vec!["UCA-O-4".to_string()]); + } + + #[test] + fn parse_trailers_comma_separated() { + let msg = "feat: thing\n\nImplements: FEAT-012, FEAT-013"; + let trailers = parse_trailers(msg); + assert_eq!(trailers.get("Implements").unwrap(), &vec!["FEAT-012, FEAT-013".to_string()]); + } + + #[test] + fn extract_ids_comma_separated() { + let ids = extract_artifact_ids("FEAT-012, FEAT-013"); + assert_eq!(ids, vec!["FEAT-012", "FEAT-013"]); + } + + #[test] + fn extract_ids_single() { + let ids = extract_artifact_ids("REQ-001"); + assert_eq!(ids, vec!["REQ-001"]); + } + + #[test] + fn parse_commit_message_full() { + let msg = "feat(oslc): add conflict detection\n\n\ + Body paragraph.\n\n\ + Implements: FEAT-012, FEAT-013\n\ + Fixes: UCA-O-4"; + let mut trailer_map = BTreeMap::new(); + trailer_map.insert("Implements".into(), "implements".into()); + trailer_map.insert("Fixes".into(), "fixes".into()); + + let (refs, skip) = parse_commit_message(msg, &trailer_map, "Trace: skip"); + assert!(!skip); + assert_eq!(refs.get("implements").unwrap(), &vec!["FEAT-012", "FEAT-013"]); + assert_eq!(refs.get("fixes").unwrap(), &vec!["UCA-O-4"]); + } + + #[test] + fn parse_commit_message_skip_trailer() { + let msg = "chore: bump deps\n\nTrace: skip"; + let trailer_map = BTreeMap::new(); + let (refs, skip) = parse_commit_message(msg, &trailer_map, "Trace: skip"); + assert!(skip); + assert!(refs.is_empty()); + } + + #[test] + fn parse_commit_message_no_trailers() { + let msg = "feat: quick fix"; + let trailer_map = BTreeMap::new(); + let (refs, skip) = parse_commit_message(msg, &trailer_map, "Trace: skip"); + assert!(!skip); + assert!(refs.is_empty()); + } +} +``` + +**Step 2: Register the module** + +Add to `rivet-core/src/lib.rs`: + +```rust +pub mod commits; +``` + +**Step 3: Run tests to verify they pass** + +Run: `cargo test -p rivet-core commits` +Expected: all 9 tests PASS + +**Step 4: Commit** + +```bash +git add rivet-core/src/commits.rs rivet-core/src/lib.rs +git commit -m "feat: add commit trailer parser module + +Implements: FEAT-031 +Trace: skip" +``` + +--- + +### Task 3: Core — Git log integration + +**Files:** +- Modify: `rivet-core/src/commits.rs` (add git log functions) + +**Step 1: Write the failing test** + +Add to `rivet-core/src/commits.rs` tests module: + +```rust +#[test] +fn parse_git_log_entry() { + let raw = "abc1234\n\ + John Doe\n\ + 2026-03-10\n\ + feat: add conflict detection\n\ + \n\ + Body text here.\n\ + \n\ + Implements: FEAT-012\n\ + ---FILES---\n\ + rivet-core/src/links.rs\n\ + rivet-core/src/validate.rs"; + + let mut trailer_map = BTreeMap::new(); + trailer_map.insert("Implements".into(), "implements".into()); + + let commit = parse_git_log_entry(raw, &trailer_map, "Trace: skip").unwrap(); + assert_eq!(commit.hash, "abc1234"); + assert_eq!(commit.author, "John Doe"); + assert_eq!(commit.commit_type, Some("feat".into())); + assert_eq!(commit.artifact_refs.get("implements").unwrap(), &vec!["FEAT-012"]); + assert_eq!(commit.changed_files, vec!["rivet-core/src/links.rs", "rivet-core/src/validate.rs"]); +} +``` + +**Step 2: Implement git log entry parser** + +Add to `rivet-core/src/commits.rs`: + +```rust +/// Separator between commit message and file list in our git log format. +const FILES_SEPARATOR: &str = "---FILES---"; + +/// Parse a single git log entry in our custom format. +/// +/// Format (produced by git log --format): +/// ```text +/// +/// +/// +/// +/// +/// +/// ---FILES--- +/// +/// +/// ``` +pub fn parse_git_log_entry( + raw: &str, + trailer_map: &BTreeMap, + skip_trailer: &str, +) -> Option { + let (message_part, files_part) = raw.split_once(FILES_SEPARATOR)?; + + let mut lines = message_part.lines(); + let hash = lines.next()?.trim().to_string(); + let author = lines.next()?.trim().to_string(); + let date = lines.next()?.trim().to_string(); + let subject = lines.next()?.trim().to_string(); + + // Rest is the body (skip the blank line after subject) + let body: String = lines + .skip_while(|l| l.trim().is_empty()) + .collect::>() + .join("\n"); + + let commit_type = parse_commit_type(&subject); + + // Build full message for trailer parsing (subject + body) + let full_message = if body.is_empty() { + subject.clone() + } else { + format!("{subject}\n\n{body}") + }; + + let (artifact_refs, has_skip_trailer) = + parse_commit_message(&full_message, trailer_map, skip_trailer); + + let changed_files: Vec = files_part + .lines() + .map(|l| l.trim().to_string()) + .filter(|l| !l.is_empty()) + .collect(); + + Some(ParsedCommit { + hash, + subject, + body, + author, + date, + commit_type, + artifact_refs, + changed_files, + has_skip_trailer, + }) +} + +/// Shell out to git and retrieve parsed commits. +/// +/// `range` can be "main..HEAD", "--all", or a since date. +pub fn git_log_commits( + repo_path: &std::path::Path, + range: &str, + trailer_map: &BTreeMap, + skip_trailer: &str, +) -> Result, crate::error::Error> { + use std::process::Command as ProcessCommand; + + let entry_separator = "---ENTRY---"; + let format = format!( + "{}%n%H%n%an%n%ai%n%s%n%n%b{}", + entry_separator, FILES_SEPARATOR + ); + + let mut cmd = ProcessCommand::new("git"); + cmd.current_dir(repo_path) + .arg("log") + .arg(format!("--format={format}")) + .arg("--name-only") + .arg(range); + + let output = cmd + .output() + .map_err(|e| crate::error::Error::Io(format!("git log: {e}")))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(crate::error::Error::Io(format!("git log failed: {stderr}"))); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let commits: Vec = stdout + .split(entry_separator) + .filter(|s| !s.trim().is_empty()) + .filter_map(|entry| parse_git_log_entry(entry.trim(), trailer_map, skip_trailer)) + .collect(); + + Ok(commits) +} +``` + +**Step 3: Run tests** + +Run: `cargo test -p rivet-core commits` +Expected: all tests PASS (new test included) + +**Step 4: Commit** + +```bash +git add rivet-core/src/commits.rs +git commit -m "feat: add git log parsing for commit traceability + +Implements: FEAT-032 +Trace: skip" +``` + +--- + +### Task 4: Core — Commit classification and analysis + +**Files:** +- Modify: `rivet-core/src/commits.rs` (add classify + analyze functions) + +**Step 1: Write the failing test** + +Add to the tests module: + +```rust +#[test] +fn classify_linked_commit() { + let mut refs = BTreeMap::new(); + refs.insert("implements".into(), vec!["FEAT-007".into()]); + let known_ids: std::collections::HashSet = + ["FEAT-007".into()].into_iter().collect(); + + let class = classify_commit_refs(&refs, &known_ids); + assert_eq!(class, CommitClass::Linked); +} + +#[test] +fn classify_broken_ref() { + let mut refs = BTreeMap::new(); + refs.insert("implements".into(), vec!["FEAT-999".into()]); + let known_ids: std::collections::HashSet = + ["FEAT-007".into()].into_iter().collect(); + + let class = classify_commit_refs(&refs, &known_ids); + assert_eq!(class, CommitClass::BrokenRef); +} + +#[test] +fn classify_no_refs_returns_none() { + let refs = BTreeMap::new(); + let known_ids: std::collections::HashSet = Default::default(); + // classify_commit_refs only checks ref validity; orphan detection is separate + let class = classify_commit_refs(&refs, &known_ids); + assert_eq!(class, CommitClass::Orphan); +} +``` + +**Step 2: Implement classification** + +Add to `rivet-core/src/commits.rs`: + +```rust +use std::collections::HashSet; + +/// Classify a commit based on its artifact references. +/// +/// - If refs is empty → Orphan (caller must check exempt status separately) +/// - If all referenced IDs exist → Linked +/// - If any referenced ID is missing → BrokenRef +pub fn classify_commit_refs( + artifact_refs: &BTreeMap>, + known_ids: &HashSet, +) -> CommitClass { + let all_ids: Vec<&String> = artifact_refs.values().flatten().collect(); + + if all_ids.is_empty() { + return CommitClass::Orphan; + } + + let has_broken = all_ids.iter().any(|id| !known_ids.contains(id.as_str())); + if has_broken { + CommitClass::BrokenRef + } else { + CommitClass::Linked + } +} + +/// Check whether a commit is exempt from trailer requirements. +pub fn is_exempt(commit: &ParsedCommit, exempt_types: &[String]) -> bool { + if commit.has_skip_trailer { + return true; + } + if let Some(ref ct) = commit.commit_type { + return exempt_types.iter().any(|e| e == ct); + } + false +} + +/// Check whether a commit touches any traced path. +pub fn touches_traced_path(changed_files: &[String], traced_paths: &[String]) -> bool { + changed_files + .iter() + .any(|f| traced_paths.iter().any(|p| f.starts_with(p))) +} + +/// A broken reference found in a commit trailer. +#[derive(Debug, Clone)] +pub struct BrokenRef { + pub commit_hash: String, + pub commit_subject: String, + pub trailer_key: String, + pub artifact_id: String, +} + +/// Result of analyzing git commit history against the artifact store. +#[derive(Debug, Default)] +pub struct CommitAnalysis { + pub linked: Vec, + pub broken_refs: Vec, + pub orphans: Vec, + pub exempt: Vec, + /// Artifact ID -> list of (commit_hash, link_type). + pub artifact_coverage: BTreeMap>, + /// Artifact IDs with zero commit references (minus exempted ones). + pub unimplemented: Vec, +} + +/// Analyze a set of parsed commits against the artifact store. +pub fn analyze_commits( + commits: Vec, + known_ids: &HashSet, + exempt_types: &[String], + traced_paths: &[String], + trace_exempt_artifacts: &[String], + trailer_map: &BTreeMap, +) -> CommitAnalysis { + let mut analysis = CommitAnalysis::default(); + + for commit in commits { + // Check exemption first + if is_exempt(&commit, exempt_types) { + analysis.exempt.push(commit); + continue; + } + + let class = classify_commit_refs(&commit.artifact_refs, known_ids); + + match class { + CommitClass::Linked => { + // Record artifact coverage + for (link_type, ids) in &commit.artifact_refs { + for id in ids { + analysis + .artifact_coverage + .entry(id.clone()) + .or_default() + .push((commit.hash.clone(), link_type.clone())); + } + } + analysis.linked.push(commit); + } + CommitClass::BrokenRef => { + // Find which specific IDs are broken + for (link_type, ids) in &commit.artifact_refs { + // Reverse-lookup the trailer key from link type + let trailer_key = trailer_map + .iter() + .find(|(_, v)| *v == link_type) + .map(|(k, _)| k.clone()) + .unwrap_or_else(|| link_type.clone()); + + for id in ids { + if !known_ids.contains(id.as_str()) { + analysis.broken_refs.push(BrokenRef { + commit_hash: commit.hash.clone(), + commit_subject: commit.subject.clone(), + trailer_key: trailer_key.clone(), + artifact_id: id.clone(), + }); + } else { + // Still record coverage for valid refs in the same commit + analysis + .artifact_coverage + .entry(id.clone()) + .or_default() + .push((commit.hash.clone(), link_type.clone())); + } + } + } + analysis.linked.push(commit); // still partially linked + } + CommitClass::Orphan => { + if touches_traced_path(&commit.changed_files, traced_paths) { + analysis.orphans.push(commit); + } else { + // Touches only non-traced paths — treat as exempt + analysis.exempt.push(commit); + } + } + CommitClass::Exempt => unreachable!("exempt handled above"), + } + } + + // Compute unimplemented artifacts + let exempt_set: HashSet<&str> = trace_exempt_artifacts.iter().map(|s| s.as_str()).collect(); + for id in known_ids { + if !analysis.artifact_coverage.contains_key(id) && !exempt_set.contains(id.as_str()) { + analysis.unimplemented.push(id.clone()); + } + } + analysis.unimplemented.sort(); + + analysis +} +``` + +**Step 3: Run tests** + +Run: `cargo test -p rivet-core commits` +Expected: all tests PASS + +**Step 4: Commit** + +```bash +git add rivet-core/src/commits.rs +git commit -m "feat: add commit classification and analysis engine + +Implements: FEAT-030, FEAT-032 +Trace: skip" +``` + +--- + +### Task 5: CLI — `commit-msg-check` subcommand + +**Files:** +- Modify: `rivet-cli/src/main.rs` (add Command variant + handler) + +**Step 1: Add the Command variant** + +In the `Command` enum, add before the closing brace: + +```rust + /// Validate a commit message for artifact trailers (pre-commit hook) + CommitMsgCheck { + /// Path to the commit message file + file: PathBuf, + }, +``` + +**Step 2: Add the handler in `run()`** + +In the early-return section of `run()` (before the `match`), add: + +```rust + if let Command::CommitMsgCheck { file } = &cli.command { + return cmd_commit_msg_check(&cli, file); + } +``` + +Add the `Command::CommitMsgCheck { .. }` to the unreachable match arm. + +**Step 3: Implement `cmd_commit_msg_check`** + +```rust +fn cmd_commit_msg_check(cli: &Cli, msg_file: &Path) -> Result { + use rivet_core::commits; + + let msg = std::fs::read_to_string(msg_file) + .context("failed to read commit message file")?; + + // Strip comment lines (git includes them in commit-msg hook) + let msg: String = msg + .lines() + .filter(|l| !l.starts_with('#')) + .collect::>() + .join("\n"); + + let subject = msg.lines().next().unwrap_or("").trim(); + if subject.is_empty() { + // Empty commit message — let git handle it + return Ok(true); + } + + // Try to load project config for commits settings + let config_path = cli.project.join("rivet.yaml"); + let commits_cfg = if config_path.exists() { + let config = rivet_core::load_project_config(&config_path) + .context("failed to load rivet.yaml")?; + config.commits + } else { + None + }; + + let commits_cfg = match commits_cfg { + Some(cfg) => cfg, + None => { + // No commits config — nothing to enforce + return Ok(true); + } + }; + + // Check exempt type + let commit_type = commits::parse_commit_type(subject); + if let Some(ref ct) = commit_type { + if commits_cfg.exempt_types.iter().any(|e| e == ct) { + return Ok(true); + } + } + + // Parse trailers + let (artifact_refs, has_skip) = + commits::parse_commit_message(&msg, &commits_cfg.trailers, &commits_cfg.skip_trailer); + + if has_skip { + eprintln!("info: commit marked as Trace: skip"); + return Ok(true); + } + + // Check for artifact references + let all_ids: Vec<&String> = artifact_refs.values().flatten().collect(); + if all_ids.is_empty() { + eprintln!( + "error: non-exempt commit must reference at least one artifact.\n\ + Add a trailer (e.g., 'Implements: FEAT-007') or '{}' to bypass.", + commits_cfg.skip_trailer + ); + return Ok(false); + } + + // Validate artifact IDs exist + let project_dir = &cli.project; + let schemas_dir = cli.schemas.clone().unwrap_or_else(|| project_dir.join("schemas")); + let config = rivet_core::load_project_config(&config_path)?; + let schema = rivet_core::load_schemas(&config.project.schemas, &schemas_dir)?; + let mut store = rivet_core::store::Store::new(); + for source in &config.sources { + let artifacts = rivet_core::load_artifacts(source, project_dir)?; + for artifact in artifacts { + store.upsert(artifact); + } + } + + let mut has_error = false; + for id in &all_ids { + if !store.contains(id) { + // Find closest match for suggestion + let suggestion = store + .iter() + .map(|a| &a.id) + .filter(|aid| { + aid.starts_with(&id[..id.len().min(3).max(1)]) + }) + .min_by_key(|aid| levenshtein(aid, id)) + .map(|s| format!(" Did you mean '{s}'?")) + .unwrap_or_default(); + + eprintln!("error: unknown artifact ID '{id}' in commit trailer.{suggestion}"); + has_error = true; + } + } + + Ok(!has_error) +} + +/// Simple Levenshtein distance for fuzzy matching. +fn levenshtein(a: &str, b: &str) -> usize { + let a: Vec = a.chars().collect(); + let b: Vec = b.chars().collect(); + let mut matrix = vec![vec![0usize; b.len() + 1]; a.len() + 1]; + + for i in 0..=a.len() { matrix[i][0] = i; } + for j in 0..=b.len() { matrix[0][j] = j; } + + for i in 1..=a.len() { + for j in 1..=b.len() { + let cost = if a[i - 1] == b[j - 1] { 0 } else { 1 }; + matrix[i][j] = (matrix[i - 1][j] + 1) + .min(matrix[i][j - 1] + 1) + .min(matrix[i - 1][j - 1] + cost); + } + } + matrix[a.len()][b.len()] +} +``` + +**Step 4: Run full build and test** + +Run: `cargo build && cargo test` +Expected: PASS + +**Step 5: Commit** + +```bash +git add rivet-cli/src/main.rs +git commit -m "feat: add rivet commit-msg-check subcommand + +Implements: FEAT-029 +Trace: skip" +``` + +--- + +### Task 6: CLI — `commits` subcommand + +**Files:** +- Modify: `rivet-cli/src/main.rs` (add Command variant + handler) + +**Step 1: Add the Command variant** + +```rust + /// Analyze git commit history for artifact traceability + Commits { + /// Only analyze commits after this date (YYYY-MM-DD) + #[arg(long)] + since: Option, + + /// Git revision range (e.g., "main..HEAD") + #[arg(long)] + range: Option, + + /// Output format: "text" (default) or "json" + #[arg(short, long, default_value = "text")] + format: String, + + /// Promote warnings to errors + #[arg(long)] + strict: bool, + }, +``` + +**Step 2: Add dispatch in `run()`** + +```rust +Command::Commits { since, range, format, strict } => { + cmd_commits(&cli, since.as_deref(), range.as_deref(), format, *strict) +} +``` + +**Step 3: Implement `cmd_commits`** + +```rust +fn cmd_commits( + cli: &Cli, + since: Option<&str>, + range: Option<&str>, + format: &str, + strict: bool, +) -> Result { + use rivet_core::commits; + + let project_dir = &cli.project; + let config_path = project_dir.join("rivet.yaml"); + let config = rivet_core::load_project_config(&config_path)?; + + let commits_cfg = config.commits.as_ref() + .context("no 'commits' block in rivet.yaml — configure commit traceability first")?; + + // Load artifacts + let schemas_dir = cli.schemas.clone().unwrap_or_else(|| project_dir.join("schemas")); + let schema = rivet_core::load_schemas(&config.project.schemas, &schemas_dir)?; + let mut store = rivet_core::store::Store::new(); + for source in &config.sources { + let artifacts = rivet_core::load_artifacts(source, project_dir)?; + for artifact in artifacts { + store.upsert(artifact); + } + } + + // Build known IDs set + let known_ids: std::collections::HashSet = + store.iter().map(|a| a.id.clone()).collect(); + + // Determine git range + let git_range = if let Some(r) = range { + r.to_string() + } else if let Some(s) = since { + format!("--since={s}") + } else { + "HEAD".to_string() + }; + + // Parse git log + let parsed = commits::git_log_commits( + project_dir, + &git_range, + &commits_cfg.trailers, + &commits_cfg.skip_trailer, + )?; + + let total = parsed.len(); + + // Analyze + let analysis = commits::analyze_commits( + parsed, + &known_ids, + &commits_cfg.exempt_types, + &commits_cfg.traced_paths, + &commits_cfg.trace_exempt_artifacts, + &commits_cfg.trailers, + ); + + if format == "json" { + // JSON output (structure matches the 5 report types) + let json = serde_json::json!({ + "total_commits": total, + "linked": analysis.linked.len(), + "exempt": analysis.exempt.len(), + "orphans": analysis.orphans.iter().map(|c| { + serde_json::json!({"hash": c.hash, "subject": c.subject}) + }).collect::>(), + "broken_refs": analysis.broken_refs.iter().map(|b| { + serde_json::json!({ + "hash": b.commit_hash, + "subject": b.commit_subject, + "trailer": b.trailer_key, + "artifact_id": b.artifact_id + }) + }).collect::>(), + "artifact_coverage": &analysis.artifact_coverage, + "unimplemented": &analysis.unimplemented, + }); + println!("{}", serde_json::to_string_pretty(&json)?); + } else { + // Text output + println!("Commit traceability ({total} commits analyzed):\n"); + println!(" Linked: {:>4} commits referencing {} artifacts", + analysis.linked.len(), + analysis.artifact_coverage.len()); + println!(" Exempt: {:>4} commits (type-exempt or skip trailer)", + analysis.exempt.len()); + println!(" Orphan: {:>4} commits touching traced paths without trailers", + analysis.orphans.len()); + println!(" Broken: {:>4} references to non-existent artifacts", + analysis.broken_refs.len()); + + if !analysis.orphans.is_empty() { + println!("\n WARNING: {} orphan commits:", analysis.orphans.len()); + for c in &analysis.orphans { + println!(" {} {} — no artifact trailer", &c.hash[..8.min(c.hash.len())], c.subject); + } + } + + if !analysis.broken_refs.is_empty() { + println!("\n ERROR: {} broken references:", analysis.broken_refs.len()); + for b in &analysis.broken_refs { + println!(" {} {} — {}: {} (not found)", + &b.commit_hash[..8.min(b.commit_hash.len())], + b.commit_subject, b.trailer_key, b.artifact_id); + } + } + + if !analysis.unimplemented.is_empty() { + println!("\n WARNING: {} artifacts with no commit evidence:", analysis.unimplemented.len()); + for id in &analysis.unimplemented { + println!(" {id}"); + } + } + + // Summary coverage table + if !analysis.artifact_coverage.is_empty() { + println!("\n Artifact coverage:"); + let mut sorted: Vec<_> = analysis.artifact_coverage.iter().collect(); + sorted.sort_by_key(|(id, _)| id.clone()); + for (id, refs) in sorted { + let summary: Vec = { + let mut counts: BTreeMap<&str, usize> = BTreeMap::new(); + for (_, lt) in refs { + *counts.entry(lt.as_str()).or_default() += 1; + } + counts.iter().map(|(k, v)| format!("{k}: {v}")).collect() + }; + println!(" {id:<20} {} commits ({})", refs.len(), summary.join(", ")); + } + } + } + + // Determine exit code + let has_errors = !analysis.broken_refs.is_empty(); + let has_warnings = !analysis.orphans.is_empty() || !analysis.unimplemented.is_empty(); + + if has_errors || (strict && has_warnings) { + println!("\nResult: FAIL"); + Ok(false) + } else { + println!("\nResult: PASS"); + Ok(true) + } +} +``` + +**Step 4: Add `use std::collections::BTreeMap;` at top of main.rs if not already present** + +**Step 5: Run full build and test** + +Run: `cargo build && cargo test` +Expected: PASS + +**Step 6: Commit** + +```bash +git add rivet-cli/src/main.rs +git commit -m "feat: add rivet commits subcommand with 5 report types + +Implements: FEAT-030 +Trace: skip" +``` + +--- + +### Task 7: Pre-commit hook config entry + +**Files:** +- Modify: `.pre-commit-config.yaml` + +**Step 1: Add the hook entry** + +Add to the `local` repo hooks section: + +```yaml + - id: rivet-commit-msg + name: rivet commit-msg check + entry: rivet commit-msg-check + language: system + stages: [commit-msg] + always_run: true +``` + +**Step 2: Add commits config to rivet.yaml** + +```yaml +commits: + format: trailers + trailers: + Implements: implements + Fixes: fixes + Verifies: verifies + Satisfies: satisfies + Refs: traces-to + exempt-types: + - chore + - style + - ci + - docs + - build + skip-trailer: "Trace: skip" + traced-paths: + - rivet-core/src/ + - rivet-cli/src/ + trace-exempt-artifacts: [] +``` + +**Step 3: Commit** + +```bash +git add .pre-commit-config.yaml rivet.yaml +git commit -m "chore: add commit-msg hook and commits config to rivet.yaml" +``` + +Note: This commit is type `chore` so it's exempt from needing trailers. + +--- + +### Task 8: Integration test — end-to-end commit analysis + +**Files:** +- Create: `rivet-core/tests/commits_integration.rs` + +**Step 1: Write integration test** + +```rust +use std::collections::{BTreeMap, HashSet}; +use rivet_core::commits::{ + ParsedCommit, CommitClass, analyze_commits, + classify_commit_refs, is_exempt, touches_traced_path, +}; + +fn make_commit(hash: &str, subject: &str, refs: BTreeMap>, files: Vec) -> ParsedCommit { + ParsedCommit { + hash: hash.into(), + subject: subject.into(), + body: String::new(), + author: "Test".into(), + date: "2026-03-10".into(), + commit_type: rivet_core::commits::parse_commit_type(subject), + artifact_refs: refs, + changed_files: files, + has_skip_trailer: false, + } +} + +#[test] +fn full_analysis_reports() { + let known_ids: HashSet = ["FEAT-001", "FEAT-002", "REQ-001"] + .iter().map(|s| s.to_string()).collect(); + + let trailer_map: BTreeMap = + [("Implements".into(), "implements".into())].into_iter().collect(); + + let mut linked_refs = BTreeMap::new(); + linked_refs.insert("implements".into(), vec!["FEAT-001".into()]); + + let mut broken_refs = BTreeMap::new(); + broken_refs.insert("implements".into(), vec!["FEAT-999".into()]); + + let commits = vec![ + make_commit("aaa", "feat: linked commit", linked_refs, vec!["rivet-core/src/foo.rs".into()]), + make_commit("bbb", "feat: broken ref", broken_refs, vec!["rivet-core/src/bar.rs".into()]), + make_commit("ccc", "feat: orphan commit", BTreeMap::new(), vec!["rivet-core/src/baz.rs".into()]), + make_commit("ddd", "chore: exempt commit", BTreeMap::new(), vec!["Cargo.toml".into()]), + ]; + + let analysis = analyze_commits( + commits, + &known_ids, + &["chore".into()], + &["rivet-core/src/".into()], + &[], + &trailer_map, + ); + + assert_eq!(analysis.linked.len(), 2); // aaa + bbb (bbb has both broken and valid-path refs) + assert_eq!(analysis.broken_refs.len(), 1); + assert_eq!(analysis.broken_refs[0].artifact_id, "FEAT-999"); + assert_eq!(analysis.orphans.len(), 1); + assert_eq!(analysis.orphans[0].hash, "ccc"); + assert_eq!(analysis.exempt.len(), 1); + assert_eq!(analysis.exempt[0].hash, "ddd"); + // FEAT-002 and REQ-001 have no commits + assert!(analysis.unimplemented.contains(&"FEAT-002".to_string())); + assert!(analysis.unimplemented.contains(&"REQ-001".to_string())); +} + +#[test] +fn trace_exempt_artifacts_excluded_from_unimplemented() { + let known_ids: HashSet = ["FEAT-001", "FEAT-002"] + .iter().map(|s| s.to_string()).collect(); + + let analysis = analyze_commits( + vec![], + &known_ids, + &[], + &[], + &["FEAT-002".into()], + &BTreeMap::new(), + ); + + assert!(analysis.unimplemented.contains(&"FEAT-001".to_string())); + assert!(!analysis.unimplemented.contains(&"FEAT-002".to_string())); +} +``` + +**Step 2: Run tests** + +Run: `cargo test -p rivet-core --test commits_integration` +Expected: PASS + +**Step 3: Run full test suite** + +Run: `cargo test` +Expected: all tests PASS + +**Step 4: Run clippy** + +Run: `cargo clippy -- -D warnings` +Expected: no warnings + +**Step 5: Commit** + +```bash +git add rivet-core/tests/commits_integration.rs rivet-core/tests/commits_config.rs +git commit -m "test: add commit traceability integration tests + +Verifies: FEAT-029, FEAT-030, FEAT-031, FEAT-032 +Trace: skip" +``` + +--- + +### Task 9: Verify dogfooding — run `rivet commits` on self + +**Step 1: Build and run** + +```bash +cargo build --release +./target/release/rivet validate +./target/release/rivet commits +``` + +**Step 2: Verify output** + +Expected: the report should show: +- Implementation commits with `Implements:` trailers as linked +- `chore:`/`docs:` commits as exempt +- Possibly orphan commits from before the feature was added +- Unimplemented artifacts list (since most pre-existing commits lack trailers) + +**Step 3: Adjust `trace-exempt-artifacts` in rivet.yaml** + +Add pre-existing artifact IDs that don't have commit evidence to the whitelist, since they were implemented before this feature existed. + +**Step 4: Final validate** + +```bash +./target/release/rivet validate +./target/release/rivet commits --strict +``` + +Expected: PASS on both + +**Step 5: Commit** + +```bash +git add rivet.yaml +git commit -m "chore: add trace-exempt-artifacts for pre-existing implementations" +``` + +--- + +### Task 10: Documentation — update rivet docs + +**Step 1: Add commit-traceability topic to built-in docs** + +Check `rivet-cli/src/docs.rs` or the embedded docs directory for the pattern, then add a `commit-traceability` topic covering: +- Configuration reference +- Commit message format +- Pre-commit hook setup +- `rivet commits` usage and report types +- Exemption mechanisms + +**Step 2: Commit** + +```bash +git add rivet-cli/src/docs.rs # or wherever docs live +git commit -m "docs: add commit traceability documentation topic + +Implements: FEAT-031 +Trace: skip" +``` + +--- + +Plan complete and saved to `docs/plans/2026-03-10-commit-traceability-plan.md`. Two execution options: + +**1. Subagent-Driven (this session)** — I dispatch fresh subagent per task, review between tasks, fast iteration + +**2. Parallel Session (separate)** — Open new session with executing-plans, batch execution with checkpoints + +Which approach? \ No newline at end of file diff --git a/docs/plans/2026-03-10-cross-repo-linking-design.md b/docs/plans/2026-03-10-cross-repo-linking-design.md new file mode 100644 index 0000000..b86708d --- /dev/null +++ b/docs/plans/2026-03-10-cross-repo-linking-design.md @@ -0,0 +1,189 @@ +# Cross-Repository Artifact Linking — Design + +## Goal + +Enable rivet projects across multiple git repositories to reference each +other's artifacts, validate cross-repo links, and create distributed +baselines — without requiring a central platform repository. + +## Architecture + +**Mesh topology.** Any rivet repo can declare dependencies on any other rivet +repo. No hub-and-spoke or central authority. Bidirectional links are supported: +links are stored on one side, backlinks computed at analysis time (matching +OSLC guidance and rivet's existing intra-project model). + +**Transitive resolution.** If meld depends on rivet and rivet depends on loom, +meld can validate rivet's links to loom. Meld cannot reference loom artifacts +directly unless it declares loom as its own external. + +## Externals Configuration + +Each repo declares its direct dependencies in `rivet.yaml`: + +```yaml +externals: + rivet: + git: https://github.com/pulseengine/rivet + ref: main # branch, tag, or commit SHA + prefix: rivet # local alias for cross-links + loom: + git: https://github.com/pulseengine/loom + ref: v0.3.0 + prefix: loom +``` + +- `git` — clone URL for the external repo +- `ref` — git ref to fetch (branch, tag, or commit SHA) +- `prefix` — short alias used in cross-links; must be unique within the project +- Local path support: `path: ../meld` as alternative to `git:` for co-located repos + +## Cross-Link Syntax + +In artifact YAML, cross-repo links use `prefix:ID`: + +```yaml +- id: UCA-C-1 + type: uca + title: CLI does not validate when user commits + links: + - type: traces-to + target: rivet:REQ-001 # resolves via externals + - type: mitigates + target: rivet:H-1 # same external, different artifact +``` + +**Resolution rules:** +- Bare IDs (no colon) resolve locally as today +- Prefixed IDs (`prefix:ID`) resolve against the named external +- Unknown prefixes are validation errors +- IDs not found in the external are broken-reference errors + +## Cache and Sync + +**Cache location:** `.rivet/repos//` — gitignored. + +`rivet sync`: +1. Reads `externals` from `rivet.yaml` +2. For each external, clones (or fetches) the repo into `.rivet/repos//` +3. Checks out the declared `ref` +4. Recursively processes the external's own `externals` (transitive deps) +5. Checks `.gitignore` for `.rivet/` — warns or auto-adds if missing + +`rivet lock`: +- Writes `rivet.lock` pinning each external (including transitive) to an exact + commit SHA +- Subsequent `rivet sync` respects the lockfile for reproducible builds +- `rivet lock --update` refreshes to latest refs + +## Validation + +`rivet validate` with externals: +1. Loads local artifacts as today +2. Loads external artifacts from `.rivet/repos/` cache +3. Resolves all links (local and cross-repo) +4. Reports broken cross-repo references with the external prefix +5. External artifacts are read-only — no schema validation of external + artifacts (they validated themselves in their own repo) + +## Distributed Baselining + +### Phase A: Convention Tags + +Any repo participates in a baseline by tagging: `git tag baseline/v1.0` + +- `rivet baseline verify v1.0` — syncs all externals at their `baseline/v1.0` + tag, validates cross-links. Missing tags are **warnings**, not errors (the + baseline is an evolving process — repos join at their own pace). +- `rivet baseline list` — lists baselines found across externals (tags + matching `baseline/*`) +- `rivet baseline diff v1.0 v2.0` — shows artifact changes across repos + between two baselines +- `--strict` flag for release gates: fails if any external is missing the tag + +### Phase C (Future): Baseline Receipts + +When tagging, rivet writes `baselines/v1.0.yaml` in the repo: + +```yaml +baseline: v1.0 +created: 2026-03-10T19:30:00Z +self: abc123def456 +externals: + rivet: def456abc789 + loom: 789abc012def +``` + +Verification compares receipts across repos to detect disagreements (repo A +says it baselined with repo B at commit X, but repo B says it baselined at +commit Y). + +## Single Binary — WASM Asset Embedding + +Embed spar WASM/JS files at compile time via `include_bytes!`/`include_str!` +so the `rivet` binary is fully self-contained: + +```rust +const SPAR_WASM_JS: &str = include_str!("../assets/wasm/js/spar_wasm.js"); +const SPAR_CORE_WASM: &[u8] = include_bytes!("../assets/wasm/js/spar_wasm.core.wasm"); +``` + +Feature-gated (`#[cfg(feature = "embed-wasm")]`) so builds without spar +assets still compile. The `wasm_asset` handler serves from embedded constants +instead of reading from disk. + +The server-side WASM component (`spar_wasm.wasm`) for `call_render`/ +`call_analyze` is also embedded. + +## Documentation References + +Add methodology references with URLs to built-in docs topics: +- `schema/stpa` — STPA Handbook (MIT), link to PDF +- `schema/aspice` — Automotive SPICE PAM reference +- `schema/cybersecurity` — ISO/SAE 21434 reference +- URLs accessible to both humans and AI agents + +## Design Decisions + +- **DD-014: Prefixed IDs over URI-style** — `rivet:REQ-001` is simpler and + more readable in YAML than `rivet://pulseengine/rivet#REQ-001`. The prefix + is a local alias configured in `rivet.yaml`. +- **DD-015: Mesh over hub-and-spoke** — any repo links to any other. No + central authority required. Matches distributed team workflows. +- **DD-016: Distributed baselining over centralized manifest** — repos tag + themselves, consistency is verified not enforced. No platform repo required. +- **DD-017: Transitive dependency resolution** — declare direct deps only, + discover transitively. Scales naturally, avoids redundant declarations. + +## Dogfooding Artifacts + +### Requirements +- REQ-020: Cross-repository artifact linking via prefixed IDs +- REQ-021: Distributed baselining via convention tags +- REQ-022: Single-binary WASM asset embedding + +### Design Decisions +- DD-014: Prefixed IDs over URI-style references +- DD-015: Mesh topology over hub-and-spoke +- DD-016: Distributed baselining over centralized manifest +- DD-017: Transitive dependency resolution + +### Features +- FEAT-033: `externals` config block and prefix resolution +- FEAT-034: `rivet sync` — fetch external repos into `.rivet/repos/` +- FEAT-035: `rivet lock` — pin externals to exact commits +- FEAT-036: `rivet baseline verify` — cross-repo baseline validation +- FEAT-037: Embedded WASM/JS assets (single binary) +- FEAT-038: Cross-repo link validation in `rivet validate` +- FEAT-039: Dashboard external project browsing + +## Prior Art + +| Tool | Pattern | Adopted | +|------|---------|---------| +| sphinx-needs | JSON manifest + id_prefix namespace | Prefixed IDs | +| OSLC | URI identity, store links on one side, global configurations | Link storage model, baseline concepts | +| ReqView | Linked projects, file-based, workspace scripts | Local path externals | +| Android repo | Manifest-based multi-repo orchestration | Sync/cache model | +| Cargo | Lockfile for reproducible builds | `rivet.lock` | +| Doorstop | Single-repo only | Anti-pattern avoided | diff --git a/docs/plans/2026-03-10-cross-repo-linking-plan.md b/docs/plans/2026-03-10-cross-repo-linking-plan.md new file mode 100644 index 0000000..119c633 --- /dev/null +++ b/docs/plans/2026-03-10-cross-repo-linking-plan.md @@ -0,0 +1,1364 @@ +# Cross-Repository Artifact Linking Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Enable rivet projects to declare external dependencies on other rivet repos, resolve cross-repo artifact links (`prefix:ID`), sync external repos into a local cache, and pin versions with a lockfile. + +**Architecture:** New `ExternalsConfig` in `model.rs`. New `externals` module in rivet-core for parsing prefixed IDs, syncing repos via `git clone/fetch`, and managing `rivet.lock`. New CLI subcommands `sync`, `lock`, `baseline`. Cross-repo link resolution integrated into existing `validate` and `links` modules. + +**Tech Stack:** Rust, serde (config), `std::process::Command` (git), petgraph (cross-repo graph), clap (CLI) + +--- + +### Task 1: Data model — Add `ExternalsConfig` and `ExternalProject` + +**Files:** +- Modify: `rivet-core/src/model.rs` +- Test: `rivet-core/tests/externals_config.rs` (create) + +**Step 1: Write the failing test** + +```rust +// rivet-core/tests/externals_config.rs +use rivet_core::model::ProjectConfig; + +#[test] +fn externals_parsed_from_yaml() { + let yaml = r#" +project: + name: test + version: "0.1.0" + schemas: [common, dev] +sources: [] +externals: + rivet: + git: https://github.com/pulseengine/rivet + ref: main + prefix: rivet + meld: + path: ../meld + prefix: meld +"#; + let config: ProjectConfig = serde_yaml::from_str(yaml).unwrap(); + let ext = config.externals.as_ref().unwrap(); + assert_eq!(ext.len(), 2); + + let rivet = &ext["rivet"]; + assert_eq!(rivet.git.as_deref(), Some("https://github.com/pulseengine/rivet")); + assert_eq!(rivet.git_ref.as_deref(), Some("main")); + assert_eq!(rivet.prefix, "rivet"); + + let meld = &ext["meld"]; + assert_eq!(meld.path.as_deref(), Some("../meld")); + assert!(meld.git.is_none()); + assert_eq!(meld.prefix, "meld"); +} + +#[test] +fn no_externals_is_none() { + let yaml = r#" +project: + name: test + version: "0.1.0" + schemas: [common] +sources: [] +"#; + let config: ProjectConfig = serde_yaml::from_str(yaml).unwrap(); + assert!(config.externals.is_none()); +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p rivet-core --test externals_config 2>&1` +Expected: FAIL — `externals` field doesn't exist on `ProjectConfig` + +**Step 3: Write minimal implementation** + +Add to `rivet-core/src/model.rs`: + +```rust +/// Configuration for a single external project dependency. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalProject { + /// Git clone URL (mutually exclusive with `path`). + #[serde(default)] + pub git: Option, + /// Local filesystem path (mutually exclusive with `git`). + #[serde(default)] + pub path: Option, + /// Git ref to checkout (branch, tag, or commit SHA). + #[serde(default, rename = "ref")] + pub git_ref: Option, + /// Short prefix used in cross-links (e.g., "rivet" for "rivet:REQ-001"). + pub prefix: String, +} +``` + +Add to `ProjectConfig`: + +```rust + /// External project dependencies for cross-repo linking. + #[serde(default)] + pub externals: Option>, +``` + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p rivet-core --test externals_config 2>&1` +Expected: PASS + +**Step 5: Commit** + +```bash +git add rivet-core/src/model.rs rivet-core/tests/externals_config.rs +git commit -m "feat: add ExternalsConfig to data model + +Implements: FEAT-033" +``` + +--- + +### Task 2: Prefixed ID parser — `externals` module + +**Files:** +- Create: `rivet-core/src/externals.rs` +- Modify: `rivet-core/src/lib.rs` + +**Step 1: Write the failing tests (inside the new module)** + +```rust +// rivet-core/src/externals.rs + +/// A parsed artifact reference — either local or cross-repo. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ArtifactRef { + /// Local artifact ID (no prefix). + Local(String), + /// Cross-repo artifact: (prefix, id). + External { prefix: String, id: String }, +} + +/// Parse an artifact reference string. +/// +/// - `"REQ-001"` → `ArtifactRef::Local("REQ-001")` +/// - `"rivet:REQ-001"` → `ArtifactRef::External { prefix: "rivet", id: "REQ-001" }` +pub fn parse_artifact_ref(s: &str) -> ArtifactRef { + todo!() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn local_id_no_colon() { + assert_eq!( + parse_artifact_ref("REQ-001"), + ArtifactRef::Local("REQ-001".into()) + ); + } + + #[test] + fn external_id_with_prefix() { + assert_eq!( + parse_artifact_ref("rivet:REQ-001"), + ArtifactRef::External { + prefix: "rivet".into(), + id: "REQ-001".into(), + } + ); + } + + #[test] + fn local_id_with_hyphen_numbers() { + // IDs like "H-1.2" should not be confused with prefix:id + assert_eq!( + parse_artifact_ref("H-1.2"), + ArtifactRef::Local("H-1.2".into()) + ); + } + + #[test] + fn external_with_complex_id() { + assert_eq!( + parse_artifact_ref("meld:UCA-C-1"), + ArtifactRef::External { + prefix: "meld".into(), + id: "UCA-C-1".into(), + } + ); + } +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p rivet-core externals 2>&1` +Expected: FAIL — `todo!()` panics + +**Step 3: Implement `parse_artifact_ref`** + +```rust +pub fn parse_artifact_ref(s: &str) -> ArtifactRef { + // Only split on first colon. The prefix must be purely alphabetic + // (no digits, hyphens, or dots) to avoid confusion with IDs like "H-1.2". + if let Some((prefix, id)) = s.split_once(':') { + if !prefix.is_empty() + && prefix.chars().all(|c| c.is_ascii_lowercase()) + && !id.is_empty() + { + return ArtifactRef::External { + prefix: prefix.to_string(), + id: id.to_string(), + }; + } + } + ArtifactRef::Local(s.to_string()) +} +``` + +Add `pub mod externals;` to `rivet-core/src/lib.rs`. + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p rivet-core externals 2>&1` +Expected: PASS + +**Step 5: Commit** + +```bash +git add rivet-core/src/externals.rs rivet-core/src/lib.rs +git commit -m "feat: add externals module with prefixed ID parser + +Implements: FEAT-033" +``` + +--- + +### Task 3: Git sync — `rivet sync` core logic + +**Files:** +- Modify: `rivet-core/src/externals.rs` + +**Step 1: Write the failing test** + +```rust +#[test] +fn sync_local_path_external() { + let dir = tempfile::tempdir().unwrap(); + // Create a fake external project with rivet.yaml and an artifact + let ext_dir = dir.path().join("ext-project"); + std::fs::create_dir_all(&ext_dir).unwrap(); + std::fs::write( + ext_dir.join("rivet.yaml"), + "project:\n name: ext\n version: '0.1.0'\n schemas: [common, dev]\nsources:\n - path: artifacts\n format: generic-yaml\n", + ).unwrap(); + let art_dir = ext_dir.join("artifacts"); + std::fs::create_dir_all(&art_dir).unwrap(); + std::fs::write( + art_dir.join("reqs.yaml"), + "artifacts:\n - id: EXT-001\n type: requirement\n title: External req\n", + ).unwrap(); + + let ext = crate::model::ExternalProject { + git: None, + path: Some(ext_dir.to_str().unwrap().into()), + git_ref: None, + prefix: "ext".into(), + }; + + let cache_dir = dir.path().join(".rivet/repos"); + let result = sync_external(&ext, &cache_dir, dir.path()); + assert!(result.is_ok()); + + // For path externals, the cache should contain a symlink or copy + let cached = cache_dir.join("ext"); + assert!(cached.exists()); +} +``` + +**Step 2: Run test to verify it fails** + +Run: `cargo test -p rivet-core externals::tests::sync_local_path_external 2>&1` +Expected: FAIL — `sync_external` not defined + +**Step 3: Implement sync logic** + +```rust +use std::path::{Path, PathBuf}; +use std::process::Command; +use crate::model::ExternalProject; + +/// Sync a single external project into the cache directory. +/// +/// For `path` externals: creates a symlink from `.rivet/repos/` to the path. +/// For `git` externals: clones or fetches the repo, checks out the specified ref. +pub fn sync_external( + ext: &ExternalProject, + cache_dir: &Path, + project_dir: &Path, +) -> Result { + let dest = cache_dir.join(&ext.prefix); + std::fs::create_dir_all(cache_dir) + .map_err(|e| crate::error::Error::Io(format!("create cache dir: {e}")))?; + + if let Some(ref local_path) = ext.path { + // Resolve relative to project dir + let resolved = if Path::new(local_path).is_relative() { + project_dir.join(local_path) + } else { + PathBuf::from(local_path) + }; + let resolved = resolved.canonicalize() + .map_err(|e| crate::error::Error::Io(format!("resolve path '{}': {e}", local_path)))?; + + // Remove existing symlink/dir if present + if dest.exists() || dest.is_symlink() { + if dest.is_symlink() { + std::fs::remove_file(&dest).ok(); + } else { + std::fs::remove_dir_all(&dest).ok(); + } + } + + #[cfg(unix)] + std::os::unix::fs::symlink(&resolved, &dest) + .map_err(|e| crate::error::Error::Io(format!("symlink: {e}")))?; + + #[cfg(not(unix))] + { + // Fallback: copy directory for non-unix + copy_dir_recursive(&resolved, &dest)?; + } + + return Ok(dest); + } + + if let Some(ref git_url) = ext.git { + let git_ref = ext.git_ref.as_deref().unwrap_or("main"); + + if dest.join(".git").exists() { + // Fetch updates + let output = Command::new("git") + .args(["fetch", "origin"]) + .current_dir(&dest) + .output() + .map_err(|e| crate::error::Error::Io(format!("git fetch: {e}")))?; + if !output.status.success() { + return Err(crate::error::Error::Io(format!( + "git fetch failed: {}", + String::from_utf8_lossy(&output.stderr) + ))); + } + // Checkout ref + let output = Command::new("git") + .args(["checkout", git_ref]) + .current_dir(&dest) + .output() + .map_err(|e| crate::error::Error::Io(format!("git checkout: {e}")))?; + if !output.status.success() { + // Try as remote branch + Command::new("git") + .args(["checkout", &format!("origin/{git_ref}")]) + .current_dir(&dest) + .output() + .ok(); + } + } else { + // Clone fresh + let output = Command::new("git") + .args(["clone", git_url, dest.to_str().unwrap_or(".")]) + .output() + .map_err(|e| crate::error::Error::Io(format!("git clone: {e}")))?; + if !output.status.success() { + return Err(crate::error::Error::Io(format!( + "git clone failed: {}", + String::from_utf8_lossy(&output.stderr) + ))); + } + if git_ref != "main" && git_ref != "master" { + Command::new("git") + .args(["checkout", git_ref]) + .current_dir(&dest) + .output() + .ok(); + } + } + return Ok(dest); + } + + Err(crate::error::Error::Io( + "external must have either 'git' or 'path'".into(), + )) +} + +/// Sync all externals declared in the project config. +pub fn sync_all( + externals: &std::collections::BTreeMap, + project_dir: &Path, +) -> Result, crate::error::Error> { + let cache_dir = project_dir.join(".rivet/repos"); + let mut results = Vec::new(); + for (name, ext) in externals { + let path = sync_external(ext, &cache_dir, project_dir)?; + results.push((name.clone(), path)); + } + Ok(results) +} + +/// Ensure `.rivet/` is in `.gitignore`. Warns and appends if missing. +pub fn ensure_gitignore(project_dir: &Path) -> Result { + let gitignore = project_dir.join(".gitignore"); + if gitignore.exists() { + let content = std::fs::read_to_string(&gitignore) + .map_err(|e| crate::error::Error::Io(format!("read .gitignore: {e}")))?; + if content.lines().any(|l| l.trim() == ".rivet/" || l.trim() == ".rivet") { + return Ok(false); // already present + } + } + // Append + use std::io::Write; + let mut f = std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(&gitignore) + .map_err(|e| crate::error::Error::Io(format!("open .gitignore: {e}")))?; + writeln!(f, "\n# Rivet external project cache\n.rivet/") + .map_err(|e| crate::error::Error::Io(format!("write .gitignore: {e}")))?; + Ok(true) // added +} +``` + +**Step 4: Run test to verify it passes** + +Run: `cargo test -p rivet-core externals 2>&1` +Expected: PASS + +**Step 5: Commit** + +```bash +git add rivet-core/src/externals.rs +git commit -m "feat: add sync_external for git and path externals + +Implements: FEAT-034" +``` + +--- + +### Task 4: Load external artifacts + +**Files:** +- Modify: `rivet-core/src/externals.rs` + +**Step 1: Write the failing test** + +```rust +#[test] +fn load_external_artifacts() { + let dir = tempfile::tempdir().unwrap(); + let ext_dir = dir.path().join("ext"); + std::fs::create_dir_all(ext_dir.join("artifacts")).unwrap(); + std::fs::write( + ext_dir.join("rivet.yaml"), + "project:\n name: ext\n version: '0.1.0'\n schemas: [common, dev]\nsources:\n - path: artifacts\n format: generic-yaml\n", + ).unwrap(); + std::fs::write( + ext_dir.join("artifacts/reqs.yaml"), + "artifacts:\n - id: EXT-001\n type: requirement\n title: External req\n - id: EXT-002\n type: feature\n title: External feat\n", + ).unwrap(); + + let artifacts = load_external_project(&ext_dir).unwrap(); + assert_eq!(artifacts.len(), 2); + assert!(artifacts.iter().any(|a| a.id == "EXT-001")); + assert!(artifacts.iter().any(|a| a.id == "EXT-002")); +} +``` + +**Step 2: Run test to verify it fails** + +Expected: FAIL — `load_external_project` not defined + +**Step 3: Implement** + +```rust +/// Load artifacts from an external project directory. +/// +/// Reads the external project's `rivet.yaml`, discovers its sources, +/// and loads all artifacts. Does NOT validate against schema (the +/// external project validates itself). +pub fn load_external_project( + project_dir: &Path, +) -> Result, crate::error::Error> { + let config_path = project_dir.join("rivet.yaml"); + let config = crate::load_project_config(&config_path)?; + + let mut artifacts = Vec::new(); + for source in &config.sources { + let loaded = crate::load_artifacts(source, project_dir)?; + artifacts.extend(loaded); + } + Ok(artifacts) +} + +/// A resolved external with its loaded artifacts. +#[derive(Debug)] +pub struct ResolvedExternal { + pub prefix: String, + pub project_dir: PathBuf, + pub artifacts: Vec, +} + +/// Load all external projects from cache and return their artifacts. +pub fn load_all_externals( + externals: &std::collections::BTreeMap, + project_dir: &Path, +) -> Result, crate::error::Error> { + let cache_dir = project_dir.join(".rivet/repos"); + let mut resolved = Vec::new(); + for (_name, ext) in externals { + let ext_dir = if let Some(ref local_path) = ext.path { + let p = if Path::new(local_path).is_relative() { + project_dir.join(local_path) + } else { + PathBuf::from(local_path) + }; + p.canonicalize().unwrap_or(p) + } else { + cache_dir.join(&ext.prefix) + }; + let artifacts = load_external_project(&ext_dir)?; + resolved.push(ResolvedExternal { + prefix: ext.prefix.clone(), + project_dir: ext_dir, + artifacts, + }); + } + Ok(resolved) +} +``` + +**Step 4: Run tests** + +Run: `cargo test -p rivet-core externals 2>&1` +Expected: PASS + +**Step 5: Commit** + +```bash +git add rivet-core/src/externals.rs +git commit -m "feat: load external project artifacts from cache + +Implements: FEAT-038" +``` + +--- + +### Task 5: Cross-repo link validation + +**Files:** +- Modify: `rivet-core/src/externals.rs` + +**Step 1: Write the failing test** + +```rust +#[test] +fn validate_cross_repo_links() { + use std::collections::{BTreeMap, HashSet}; + + // Local artifacts + let local_ids: HashSet = ["REQ-001", "FEAT-001"].iter().map(|s| s.to_string()).collect(); + + // External artifacts keyed by prefix + let mut external_ids: BTreeMap> = BTreeMap::new(); + external_ids.insert( + "meld".into(), + ["UCA-C-1", "H-1"].iter().map(|s| s.to_string()).collect(), + ); + + // Valid references + let refs = vec!["REQ-001", "meld:UCA-C-1", "meld:H-1", "FEAT-001"]; + let broken = validate_refs(&refs, &local_ids, &external_ids); + assert!(broken.is_empty()); + + // Broken references + let refs2 = vec!["meld:NOPE-999", "unknown:REQ-001", "MISSING-001"]; + let broken2 = validate_refs(&refs2, &local_ids, &external_ids); + assert_eq!(broken2.len(), 3); +} +``` + +**Step 2: Run test — FAIL** + +**Step 3: Implement** + +```rust +/// A broken cross-repo reference. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BrokenRef { + pub reference: String, + pub reason: BrokenRefReason, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum BrokenRefReason { + UnknownPrefix(String), + NotFoundInExternal { prefix: String, id: String }, + NotFoundLocally(String), +} + +/// Validate a list of artifact reference strings against local and external ID sets. +pub fn validate_refs( + refs: &[&str], + local_ids: &std::collections::HashSet, + external_ids: &std::collections::BTreeMap>, +) -> Vec { + let mut broken = Vec::new(); + for r in refs { + match parse_artifact_ref(r) { + ArtifactRef::Local(id) => { + if !local_ids.contains(&id) { + broken.push(BrokenRef { + reference: r.to_string(), + reason: BrokenRefReason::NotFoundLocally(id), + }); + } + } + ArtifactRef::External { prefix, id } => { + if let Some(ids) = external_ids.get(&prefix) { + if !ids.contains(&id) { + broken.push(BrokenRef { + reference: r.to_string(), + reason: BrokenRefReason::NotFoundInExternal { prefix, id }, + }); + } + } else { + broken.push(BrokenRef { + reference: r.to_string(), + reason: BrokenRefReason::UnknownPrefix(prefix), + }); + } + } + } + } + broken +} +``` + +**Step 4: Run tests — PASS** + +**Step 5: Commit** + +```bash +git add rivet-core/src/externals.rs +git commit -m "feat: cross-repo link validation with broken ref reporting + +Implements: FEAT-038" +``` + +--- + +### Task 6: Lockfile — `rivet lock` / `rivet.lock` + +**Files:** +- Modify: `rivet-core/src/externals.rs` + +**Step 1: Write the failing test** + +```rust +#[test] +fn lockfile_roundtrip() { + let mut pins = BTreeMap::new(); + pins.insert("rivet".into(), LockEntry { + git: Some("https://github.com/pulseengine/rivet".into()), + commit: "abc123def456".into(), + prefix: "rivet".into(), + }); + pins.insert("meld".into(), LockEntry { + git: None, + commit: "789abc012def".into(), + prefix: "meld".into(), + }); + + let lock = Lockfile { pins }; + let yaml = serde_yaml::to_string(&lock).unwrap(); + let parsed: Lockfile = serde_yaml::from_str(&yaml).unwrap(); + assert_eq!(parsed.pins.len(), 2); + assert_eq!(parsed.pins["rivet"].commit, "abc123def456"); +} +``` + +**Step 2: Run test — FAIL** + +**Step 3: Implement** + +```rust +/// A lockfile pinning externals to exact commits. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Lockfile { + pub pins: BTreeMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LockEntry { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub git: Option, + pub commit: String, + pub prefix: String, +} + +/// Read the current commit SHA of a git repository. +pub fn git_head_sha(repo_dir: &Path) -> Result { + let output = Command::new("git") + .args(["rev-parse", "HEAD"]) + .current_dir(repo_dir) + .output() + .map_err(|e| crate::error::Error::Io(format!("git rev-parse: {e}")))?; + Ok(String::from_utf8_lossy(&output.stdout).trim().to_string()) +} + +/// Generate a lockfile from current external state. +pub fn generate_lockfile( + externals: &BTreeMap, + project_dir: &Path, +) -> Result { + let cache_dir = project_dir.join(".rivet/repos"); + let mut pins = BTreeMap::new(); + for (name, ext) in externals { + let ext_dir = if let Some(ref local_path) = ext.path { + let p = if Path::new(local_path).is_relative() { + project_dir.join(local_path) + } else { + PathBuf::from(local_path) + }; + p.canonicalize().unwrap_or(p) + } else { + cache_dir.join(&ext.prefix) + }; + let commit = git_head_sha(&ext_dir)?; + pins.insert(name.clone(), LockEntry { + git: ext.git.clone(), + commit, + prefix: ext.prefix.clone(), + }); + } + Ok(Lockfile { pins }) +} + +/// Write lockfile to `rivet.lock`. +pub fn write_lockfile(lock: &Lockfile, project_dir: &Path) -> Result<(), crate::error::Error> { + let path = project_dir.join("rivet.lock"); + let yaml = serde_yaml::to_string(lock) + .map_err(|e| crate::error::Error::Schema(format!("serialize lockfile: {e}")))?; + std::fs::write(&path, yaml) + .map_err(|e| crate::error::Error::Io(format!("write rivet.lock: {e}")))?; + Ok(()) +} + +/// Read lockfile from `rivet.lock`. +pub fn read_lockfile(project_dir: &Path) -> Result, crate::error::Error> { + let path = project_dir.join("rivet.lock"); + if !path.exists() { + return Ok(None); + } + let content = std::fs::read_to_string(&path) + .map_err(|e| crate::error::Error::Io(format!("read rivet.lock: {e}")))?; + let lock: Lockfile = serde_yaml::from_str(&content) + .map_err(|e| crate::error::Error::Schema(format!("parse rivet.lock: {e}")))?; + Ok(Some(lock)) +} +``` + +**Step 4: Run tests — PASS** + +**Step 5: Commit** + +```bash +git add rivet-core/src/externals.rs +git commit -m "feat: lockfile generation and parsing for rivet.lock + +Implements: FEAT-035" +``` + +--- + +### Task 7: CLI — `rivet sync` and `rivet lock` subcommands + +**Files:** +- Modify: `rivet-cli/src/main.rs` + +**Step 1: Add CLI variants** + +Add to the `Command` enum: + +```rust + /// Sync external project dependencies into .rivet/repos/ + Sync, + + /// Pin external dependencies to exact commits in rivet.lock + Lock { + /// Update all pins to latest refs + #[arg(long)] + update: bool, + }, +``` + +**Step 2: Implement handlers** + +```rust +fn cmd_sync(cli: &Cli) -> Result { + let config = rivet_core::load_project_config(&cli.project.join("rivet.yaml"))?; + let externals = config.externals.as_ref(); + if externals.is_none() || externals.unwrap().is_empty() { + eprintln!("No externals declared in rivet.yaml"); + return Ok(true); + } + let externals = externals.unwrap(); + + // Ensure .rivet/ is gitignored + let added = rivet_core::externals::ensure_gitignore(&cli.project)?; + if added { + eprintln!("Added .rivet/ to .gitignore"); + } + + let results = rivet_core::externals::sync_all(externals, &cli.project)?; + for (name, path) in &results { + eprintln!(" Synced {} → {}", name, path.display()); + } + eprintln!("\n{} externals synced.", results.len()); + Ok(true) +} + +fn cmd_lock(cli: &Cli, _update: bool) -> Result { + let config = rivet_core::load_project_config(&cli.project.join("rivet.yaml"))?; + let externals = config.externals.as_ref(); + if externals.is_none() || externals.unwrap().is_empty() { + eprintln!("No externals declared in rivet.yaml"); + return Ok(true); + } + let lock = rivet_core::externals::generate_lockfile(externals.unwrap(), &cli.project)?; + rivet_core::externals::write_lockfile(&lock, &cli.project)?; + eprintln!("Wrote rivet.lock with {} pins", lock.pins.len()); + Ok(true) +} +``` + +Wire up in match statement: + +```rust +Command::Sync => cmd_sync(&cli), +Command::Lock { update } => cmd_lock(&cli, *update), +``` + +**Step 3: Build and test manually** + +Run: `cargo build -p rivet-cli 2>&1` +Expected: compiles + +**Step 4: Commit** + +```bash +git add rivet-cli/src/main.rs +git commit -m "feat: add rivet sync and rivet lock CLI subcommands + +Implements: FEAT-034, FEAT-035" +``` + +--- + +### Task 8: Integrate cross-repo validation into `rivet validate` + +**Files:** +- Modify: `rivet-cli/src/main.rs` (in the validate handler) + +**Step 1: Update validate to load externals and check cross-repo links** + +In the existing validate command handler, after loading local artifacts and +running local validation, add: + +```rust +// Cross-repo link validation +if let Some(ref externals) = config.externals { + if !externals.is_empty() { + let resolved = rivet_core::externals::load_all_externals(externals, &cli.project)?; + + // Build external ID sets + let mut external_ids: std::collections::BTreeMap> = + std::collections::BTreeMap::new(); + for ext in &resolved { + let ids: std::collections::HashSet = + ext.artifacts.iter().map(|a| a.id.clone()).collect(); + external_ids.insert(ext.prefix.clone(), ids); + } + + // Collect all link targets from local artifacts + let local_ids: std::collections::HashSet = + store.all().map(|a| a.id.clone()).collect(); + let all_refs: Vec<&str> = store + .all() + .flat_map(|a| a.links.iter().map(|l| l.target.as_str())) + .collect(); + + let broken = rivet_core::externals::validate_refs(&all_refs, &local_ids, &external_ids); + for b in &broken { + eprintln!(" broken cross-ref: {} — {:?}", b.reference, b.reason); + } + } +} +``` + +**Step 2: Build and test** + +Run: `cargo build -p rivet-cli && cargo test --all 2>&1` +Expected: compiles, all tests pass + +**Step 3: Commit** + +```bash +git add rivet-cli/src/main.rs +git commit -m "feat: integrate cross-repo link validation into rivet validate + +Implements: FEAT-038" +``` + +--- + +### Task 9: WASM asset embedding + +**Files:** +- Modify: `rivet-cli/src/serve.rs` +- Modify: `rivet-cli/build.rs` (create if needed) + +**Step 1: Add conditional include_bytes for WASM assets** + +At the top of `serve.rs`, add embedded asset constants: + +```rust +// Embedded WASM/JS assets for single-binary distribution. +// These are populated by the build script when assets exist. +#[cfg(feature = "embed-wasm")] +mod embedded_wasm { + pub const SPAR_JS: &str = include_str!("../assets/wasm/js/spar_wasm.js"); + pub const CORE_WASM: &[u8] = include_bytes!("../assets/wasm/js/spar_wasm.core.wasm"); + pub const CORE2_WASM: &[u8] = include_bytes!("../assets/wasm/js/spar_wasm.core2.wasm"); + pub const CORE3_WASM: &[u8] = include_bytes!("../assets/wasm/js/spar_wasm.core3.wasm"); +} +``` + +**Step 2: Update `wasm_asset` handler to serve from embedded** + +Replace the filesystem-based handler with: + +```rust +async fn wasm_asset(Path(path): Path) -> impl IntoResponse { + let content_type = if path.ends_with(".js") { + "application/javascript" + } else if path.ends_with(".wasm") { + "application/wasm" + } else { + "application/octet-stream" + }; + + // Try embedded assets first + #[cfg(feature = "embed-wasm")] + { + let bytes: Option<&[u8]> = match path.as_str() { + "spar_wasm.js" => Some(embedded_wasm::SPAR_JS.as_bytes()), + "spar_wasm.core.wasm" => Some(embedded_wasm::CORE_WASM), + "spar_wasm.core2.wasm" => Some(embedded_wasm::CORE2_WASM), + "spar_wasm.core3.wasm" => Some(embedded_wasm::CORE3_WASM), + _ => None, + }; + if let Some(data) = bytes { + return ( + axum::http::StatusCode::OK, + [ + (axum::http::header::CONTENT_TYPE, content_type), + (axum::http::header::CACHE_CONTROL, "public, max-age=86400"), + ], + data.to_vec(), + ) + .into_response(); + } + } + + // Fallback to filesystem (development mode) + let candidates = [ + // ... existing filesystem candidates ... + ]; + // ... existing fallback logic ... +} +``` + +**Step 3: Add feature flag to `rivet-cli/Cargo.toml`** + +```toml +[features] +embed-wasm = [] +``` + +**Step 4: Build and test** + +Run: `cargo build -p rivet-cli --features embed-wasm 2>&1` +Expected: compiles (if assets exist), or skip this feature flag if they don't + +**Step 5: Commit** + +```bash +git add rivet-cli/src/serve.rs rivet-cli/Cargo.toml +git commit -m "feat: embed WASM/JS assets for single-binary distribution + +Implements: FEAT-037" +``` + +--- + +### Task 10: Dogfood artifacts + +**Files:** +- Modify: `artifacts/requirements.yaml` +- Modify: `artifacts/features.yaml` +- Modify: `artifacts/decisions.yaml` + +**Step 1: Add requirement artifacts** + +```yaml + - id: REQ-020 + type: requirement + title: Cross-repository artifact linking via prefixed IDs + status: draft + description: > + Rivet projects must be able to declare external dependencies on other + rivet repositories and reference their artifacts using prefix:ID syntax. + tags: [cross-repo, traceability] + fields: + priority: must + category: functional + + - id: REQ-021 + type: requirement + title: Distributed baselining via convention tags + status: draft + description: > + Multiple rivet repositories must be able to form consistent baselines + using git tags without requiring a central platform repository. + tags: [cross-repo, baseline] + fields: + priority: should + category: functional + + - id: REQ-022 + type: requirement + title: Single-binary WASM asset embedding + status: draft + description: > + The rivet binary must optionally embed all WASM and JavaScript assets + so it can be distributed as a single self-contained executable. + tags: [packaging, wasm] + fields: + priority: should + category: functional +``` + +**Step 2: Add design decision artifacts** + +```yaml + - id: DD-014 + type: design-decision + title: Prefixed IDs over URI-style references + status: accepted + description: > + Cross-repo links use prefix:ID syntax (e.g., rivet:REQ-001) rather than + full URIs. Simpler to type, more readable in YAML. + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo] + fields: + decision: Use prefix:ID syntax with prefix declared in rivet.yaml + rationale: > + Simpler and more readable than URIs. Prefix is a local alias + configured per project, matching sphinx-needs id_prefix pattern. + + - id: DD-015 + type: design-decision + title: Mesh topology over hub-and-spoke + status: accepted + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo] + fields: + decision: Any repo can link to any other repo directly + rationale: > + Avoids central authority requirement. Matches distributed team + workflows. Transitive resolution handles indirect dependencies. + + - id: DD-016 + type: design-decision + title: Distributed baselining over centralized manifest + status: accepted + links: + - type: satisfies + target: REQ-021 + tags: [cross-repo, baseline] + fields: + decision: Repos tag themselves with baseline/* tags; consistency verified not enforced + rationale: > + No platform repo required. Each repo joins baselines independently. + Matches OSLC global configuration model where contributions are optional. + + - id: DD-017 + type: design-decision + title: Transitive dependency resolution + status: accepted + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo] + fields: + decision: Declare direct dependencies only; discover transitively + rationale: > + Scales naturally. Avoids redundant declarations. Similar to cargo/npm + dependency resolution. +``` + +**Step 3: Add feature artifacts** + +```yaml + - id: FEAT-033 + type: feature + title: Externals config block and prefix resolution + status: draft + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo] + + - id: FEAT-034 + type: feature + title: rivet sync — fetch external repos + status: draft + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo, cli] + + - id: FEAT-035 + type: feature + title: rivet lock — pin externals to commits + status: draft + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo, cli] + + - id: FEAT-036 + type: feature + title: rivet baseline verify — cross-repo validation + status: draft + links: + - type: satisfies + target: REQ-021 + tags: [cross-repo, baseline, cli] + + - id: FEAT-037 + type: feature + title: Embedded WASM/JS assets for single binary + status: draft + links: + - type: satisfies + target: REQ-022 + tags: [packaging, wasm] + + - id: FEAT-038 + type: feature + title: Cross-repo link validation in rivet validate + status: draft + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo, validation] + + - id: FEAT-039 + type: feature + title: Dashboard external project browsing + status: draft + links: + - type: satisfies + target: REQ-020 + tags: [cross-repo, dashboard] +``` + +**Step 4: Validate** + +Run: `cargo build --release && ./target/release/rivet validate 2>&1` +Expected: PASS + +**Step 5: Commit** + +```bash +git add artifacts/requirements.yaml artifacts/features.yaml artifacts/decisions.yaml +git commit -m "feat: dogfood cross-repo linking artifacts (REQ/DD/FEAT) + +Implements: FEAT-033 +Trace: skip" +``` + +--- + +### Task 11: Documentation — reference links and built-in docs update + +**Files:** +- Modify: `rivet-cli/src/docs.rs` + +**Step 1: Add cross-repo docs topic** + +Add a `DocTopic` entry for `"cross-repo"` and a `CROSS_REPO_DOC` constant +covering the externals config, prefix syntax, sync, lock, and baseline +commands. + +**Step 2: Add methodology references to schema topics** + +Update the `schema/stpa` topic content to append a References section: + +``` +## References + +- Leveson, N.G. & Thomas, J.P. (2018). *STPA Handbook*. + MIT Partnership for Systems Approaches to Safety and Security (PSASS). + https://psas.scripts.mit.edu/home/get_file.php?name=STPA_handbook.pdf +- Leveson, N.G. (2011). *Engineering a Safer World*. + MIT Press. https://mitpress.mit.edu/9780262533690/ +``` + +Similarly for `schema/aspice` and `schema/cybersecurity` with their relevant +standard references. + +**Step 3: Build and test** + +Run: `cargo build -p rivet-cli && ./target/release/rivet docs cross-repo 2>&1` +Expected: displays the topic + +**Step 4: Commit** + +```bash +git add rivet-cli/src/docs.rs +git commit -m "docs: add cross-repo linking topic and methodology references + +Implements: FEAT-033 +Trace: skip" +``` + +--- + +### Task 12: Baseline verify (Phase A) + +**Files:** +- Modify: `rivet-core/src/externals.rs` +- Modify: `rivet-cli/src/main.rs` + +**Step 1: Write failing test for baseline tag discovery** + +```rust +#[test] +fn check_baseline_tag_reports_missing() { + // Simulate a repo without the baseline tag + let result = BaselineStatus::Missing; + assert!(!result.is_present()); +} + +#[test] +fn check_baseline_tag_reports_present() { + let result = BaselineStatus::Present { + commit: "abc123".into(), + }; + assert!(result.is_present()); +} +``` + +**Step 2: Implement baseline types and git tag check** + +```rust +#[derive(Debug, Clone)] +pub enum BaselineStatus { + Present { commit: String }, + Missing, +} + +impl BaselineStatus { + pub fn is_present(&self) -> bool { + matches!(self, BaselineStatus::Present { .. }) + } +} + +/// Check if a git repo has a specific baseline tag. +pub fn check_baseline_tag( + repo_dir: &Path, + baseline_name: &str, +) -> Result { + let tag = format!("baseline/{baseline_name}"); + let output = Command::new("git") + .args(["rev-parse", "--verify", &format!("refs/tags/{tag}")]) + .current_dir(repo_dir) + .output() + .map_err(|e| crate::error::Error::Io(format!("git rev-parse: {e}")))?; + + if output.status.success() { + let commit = String::from_utf8_lossy(&output.stdout).trim().to_string(); + Ok(BaselineStatus::Present { commit }) + } else { + Ok(BaselineStatus::Missing) + } +} +``` + +**Step 3: Add CLI subcommand** + +```rust + /// Manage distributed baselines across repos + Baseline { + #[command(subcommand)] + action: BaselineAction, + }, +``` + +```rust +#[derive(Debug, Subcommand)] +enum BaselineAction { + /// Verify baseline consistency across all externals + Verify { + /// Baseline name (e.g., "v1.0") + name: String, + /// Fail on missing baseline tags + #[arg(long)] + strict: bool, + }, + /// List baselines found across externals + List, +} +``` + +**Step 4: Implement verify handler** + +The handler checks each external for the `baseline/` tag, syncs at +that tag if present, validates cross-links, and reports status per project. + +**Step 5: Commit** + +```bash +git add rivet-core/src/externals.rs rivet-cli/src/main.rs +git commit -m "feat: distributed baseline verify via convention tags + +Implements: FEAT-036" +``` + +--- + +Plan complete and saved to `docs/plans/2026-03-10-cross-repo-linking-plan.md`. Two execution options: + +**1. Subagent-Driven (this session)** — I dispatch fresh subagent per task, review between tasks, fast iteration + +**2. Parallel Session (separate)** — Open new session with executing-plans, batch execution with checkpoints + +Which approach? \ No newline at end of file diff --git a/fuzz/Cargo.lock b/fuzz/Cargo.lock new file mode 100644 index 0000000..c185d6a --- /dev/null +++ b/fuzz/Cargo.lock @@ -0,0 +1,322 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" + +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom", + "libc", +] + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "libfuzzer-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f12a681b7dd8ce12bff52488013ba614b869148d54dd79836ab85aafdd53f08d" +dependencies = [ + "arbitrary", + "cc", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quick-xml" +version = "0.37.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rivet-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "log", + "petgraph", + "quick-xml", + "serde", + "serde_json", + "serde_yaml", + "thiserror", +] + +[[package]] +name = "rivet-fuzz" +version = "0.0.0" +dependencies = [ + "libfuzzer-sys", + "rivet-core", + "serde_yaml", +] + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml new file mode 100644 index 0000000..7471afe --- /dev/null +++ b/fuzz/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "rivet-fuzz" +version = "0.0.0" +publish = false +edition = "2024" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +rivet-core = { path = "../rivet-core", default-features = false } +serde_yaml = "0.9" + +# Prevent this from being included in workspace +[workspace] + +[[bin]] +name = "fuzz_yaml_artifact" +path = "fuzz_targets/fuzz_yaml_artifact.rs" +doc = false + +[[bin]] +name = "fuzz_schema_merge" +path = "fuzz_targets/fuzz_schema_merge.rs" +doc = false + +[[bin]] +name = "fuzz_reqif_import" +path = "fuzz_targets/fuzz_reqif_import.rs" +doc = false + +[[bin]] +name = "fuzz_document_parse" +path = "fuzz_targets/fuzz_document_parse.rs" +doc = false diff --git a/fuzz/fuzz_targets/fuzz_document_parse.rs b/fuzz/fuzz_targets/fuzz_document_parse.rs new file mode 100644 index 0000000..8fb4635 --- /dev/null +++ b/fuzz/fuzz_targets/fuzz_document_parse.rs @@ -0,0 +1,21 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use rivet_core::document::parse_document; + +fuzz_target!(|data: &[u8]| { + let Ok(s) = std::str::from_utf8(data) else { + return; + }; + + // Feed arbitrary strings into the document frontmatter parser. + // This exercises: + // - split_frontmatter (--- delimiter detection) + // - YAML frontmatter deserialization + // - extract_references ([[ID]] scanning) + // - extract_sections (heading-level detection) + // + // Errors from missing/malformed frontmatter are expected and gracefully + // returned. Only panics indicate real bugs. + let _ = parse_document(s, None); +}); diff --git a/fuzz/fuzz_targets/fuzz_reqif_import.rs b/fuzz/fuzz_targets/fuzz_reqif_import.rs new file mode 100644 index 0000000..e47f002 --- /dev/null +++ b/fuzz/fuzz_targets/fuzz_reqif_import.rs @@ -0,0 +1,15 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use rivet_core::reqif::parse_reqif; + +fuzz_target!(|data: &[u8]| { + let Ok(s) = std::str::from_utf8(data) else { + return; + }; + + // Feed arbitrary strings into the ReqIF XML parser. + // Valid errors (malformed XML, missing elements) are expected — only + // panics or infinite loops indicate real bugs. + let _ = parse_reqif(s, &std::collections::HashMap::new()); +}); diff --git a/fuzz/fuzz_targets/fuzz_schema_merge.rs b/fuzz/fuzz_targets/fuzz_schema_merge.rs new file mode 100644 index 0000000..6c1bb4a --- /dev/null +++ b/fuzz/fuzz_targets/fuzz_schema_merge.rs @@ -0,0 +1,40 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use rivet_core::schema::{Schema, SchemaFile}; + +fuzz_target!(|data: &[u8]| { + let Ok(s) = std::str::from_utf8(data) else { + return; + }; + + // Try to parse the fuzzed input as a SchemaFile. + let Ok(fuzzed_schema) = serde_yaml::from_str::(s) else { + return; + }; + + // Build a minimal base schema to merge with. + let base_yaml = r#" +schema: + name: base + version: "0.1.0" +artifact-types: [] +link-types: [] +traceability-rules: [] +"#; + let base_schema: SchemaFile = serde_yaml::from_str(base_yaml).unwrap(); + + // Merge the base schema with the fuzzed schema — this exercises the + // HashMap insertion, inverse-map building, and traceability-rule + // collection logic in Schema::merge. + let merged = Schema::merge(&[base_schema, fuzzed_schema]); + + // Poke the lookup methods to make sure they don't panic on arbitrary data. + for type_name in merged.artifact_types.keys() { + let _ = merged.artifact_type(type_name); + } + for link_name in merged.link_types.keys() { + let _ = merged.link_type(link_name); + let _ = merged.inverse_of(link_name); + } +}); diff --git a/fuzz/fuzz_targets/fuzz_yaml_artifact.rs b/fuzz/fuzz_targets/fuzz_yaml_artifact.rs new file mode 100644 index 0000000..b4333aa --- /dev/null +++ b/fuzz/fuzz_targets/fuzz_yaml_artifact.rs @@ -0,0 +1,25 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use rivet_core::model::Artifact; + +fuzz_target!(|data: &[u8]| { + let Ok(s) = std::str::from_utf8(data) else { + return; + }; + + // First, try to deserialize a single Artifact directly from the fuzzed YAML. + let _ = serde_yaml::from_str::(s); + + // Try to deserialize as a list of artifacts (the format used by generic-yaml files). + let _ = serde_yaml::from_str::>(s); + + // Try to parse as a generic YAML value and check whether it has an "artifacts" key, + // which is the top-level structure used by the generic-yaml adapter. + if let Ok(value) = serde_yaml::from_str::(s) { + if let Some(artifacts) = value.get("artifacts") { + // Attempt to interpret the value under "artifacts" as a Vec. + let _ = serde_yaml::from_value::>(artifacts.clone()); + } + } +}); diff --git a/rivet-cli/build.rs b/rivet-cli/build.rs index e127f85..bfe927a 100644 --- a/rivet-cli/build.rs +++ b/rivet-cli/build.rs @@ -1,7 +1,11 @@ +use std::path::Path; use std::process::Command; fn main() { - // Emit git metadata as compile-time environment variables. + // ── WASM asset build (spar) ──────────────────────────────────────── + build_wasm_assets(); + + // ── Git metadata ─────────────────────────────────────────────────── println!("cargo:rerun-if-changed=../.git/HEAD"); println!("cargo:rerun-if-changed=../.git/index"); @@ -58,3 +62,118 @@ fn main() { println!("cargo:rustc-env=RIVET_GIT_UNTRACKED={untracked}"); println!("cargo:rustc-env=RIVET_BUILD_DATE={build_date}"); } + +/// Build spar WASM assets if they are missing and spar repo is available. +/// +/// Checks `SPAR_DIR` env var, then `../spar` as default location. +/// Skips silently if spar is not found (WASM features are optional). +fn build_wasm_assets() { + let wasm_js = Path::new("assets/wasm/js/spar_wasm.js"); + let wasm_core = Path::new("assets/wasm/js/spar_wasm.core.wasm"); + + // Rebuild whenever the build script or existing assets change. + println!("cargo:rerun-if-changed=../scripts/build-wasm.sh"); + println!("cargo:rerun-if-changed=assets/wasm/js/spar_wasm.js"); + + // Locate the spar repository. + let spar_dir = std::env::var("SPAR_DIR").unwrap_or_else(|_| "../spar".to_string()); + let spar_path = Path::new(&spar_dir); + let spar_wasm_crate = spar_path.join("crates/spar-wasm"); + + // Compare local spar HEAD against the rev pinned in Cargo.toml. + if spar_path.join(".git").exists() { + check_spar_version_drift(&spar_dir); + } + + if wasm_js.exists() && wasm_core.exists() { + return; // Assets already present, nothing to do. + } + + if !spar_wasm_crate.exists() { + println!( + "cargo:warning=WASM assets missing and spar repo not found at {spar_dir}. \ + Set SPAR_DIR env var or run: ./scripts/build-wasm.sh /path/to/spar" + ); + return; + } + + // Run the build script from the workspace root. + println!("cargo:warning=Building spar WASM assets from {spar_dir}..."); + let status = Command::new("bash") + .arg("../scripts/build-wasm.sh") + .arg(&spar_dir) + .status(); + + match status { + Ok(s) if s.success() => { + println!("cargo:warning=spar WASM assets built successfully."); + } + Ok(s) => { + println!( + "cargo:warning=WASM build script exited with {}. \ + Dashboard AADL rendering may not work.", + s + ); + } + Err(e) => { + println!("cargo:warning=Failed to run WASM build script: {e}"); + } + } +} + +/// Compare the local spar repo HEAD against the rev pinned in workspace Cargo.toml. +/// Warns if they differ so developers know to bump the dep or update spar. +fn check_spar_version_drift(spar_dir: &str) { + // Read the pinned rev from Cargo.toml. + let cargo_toml = Path::new("../Cargo.toml"); + let pinned_rev = match std::fs::read_to_string(cargo_toml) { + Ok(content) => { + // Look for: spar-hir = { ... rev = "XXXXXXX" ... } + content + .lines() + .find(|l| l.contains("spar-hir") && l.contains("rev")) + .and_then(|line| { + let after_rev = line.split("rev = \"").nth(1)?; + Some(after_rev.split('"').next()?.to_string()) + }) + } + Err(_) => None, + }; + + let Some(pinned) = pinned_rev else { + return; // Can't determine pinned rev, skip check. + }; + + // Get the local spar HEAD. + let local_head = Command::new("git") + .args(["rev-parse", "--short=7", "HEAD"]) + .current_dir(spar_dir) + .output() + .ok() + .filter(|o| o.status.success()) + .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string()); + + let Some(head) = local_head else { + return; + }; + + // Compare (short revs — check prefix match). + let pinned_short = &pinned[..pinned.len().min(7)]; + if !head.starts_with(pinned_short) && !pinned_short.starts_with(&head) { + // Count distance. + let distance = Command::new("git") + .args(["rev-list", "--count", &format!("{pinned}..HEAD")]) + .current_dir(spar_dir) + .output() + .ok() + .filter(|o| o.status.success()) + .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string()) + .unwrap_or_else(|| "?".to_string()); + + println!( + "cargo:warning=spar version drift: Cargo.toml pins rev {pinned}, \ + but local spar is at {head} ({distance} commits ahead). \ + Consider: cargo update -p spar-hir -p spar-analysis" + ); + } +} diff --git a/rivet-cli/src/docs.rs b/rivet-cli/src/docs.rs index 9bb20cc..a23d3db 100644 --- a/rivet-cli/src/docs.rs +++ b/rivet-cli/src/docs.rs @@ -45,6 +45,12 @@ const TOPICS: &[DocTopic] = &[ category: "Reference", content: DOCUMENTS_DOC, }, + DocTopic { + slug: "commit-traceability", + title: "Commit-to-artifact traceability via git trailers", + category: "Reference", + content: COMMIT_TRACEABILITY_DOC, + }, DocTopic { slug: "schema/common", title: "Common base fields and link types", @@ -216,6 +222,8 @@ rivet matrix --from X --to Y Traceability matrix between types rivet diff Compare artifact versions rivet export -f FORMAT Export to reqif or generic-yaml rivet serve [-P PORT] Start HTMX dashboard (default: 3000) +rivet commits [--since N] Commit-artifact traceability analysis +rivet commit-msg-check F Validate commit message trailers (hook) ``` ## Schema Commands @@ -460,6 +468,130 @@ Documents participate in validation: - **Orphan detection**: Artifacts never referenced in any document are flagged "#; +const COMMIT_TRACEABILITY_DOC: &str = r#"# Commit-to-Artifact Traceability + +Rivet tracks which git commits implement, fix, verify, or otherwise relate +to artifacts using **git trailers** — standard key-value pairs in commit +message footers. + +## Configuration + +Add a `commits:` block to `rivet.yaml`: + +```yaml +commits: + format: trailers # Only "trailers" is supported currently + trailers: # Maps trailer key → link type + Implements: implements + Fixes: fixes + Verifies: verifies + Satisfies: satisfies + Refs: traces-to + exempt-types: # Conventional-commit types that skip checks + - chore + - style + - ci + - docs + - build + skip-trailer: "Trace: skip" # Explicit opt-out trailer + traced-paths: # Only commits touching these paths are orphans + - src/ + trace-exempt-artifacts: [] # Artifacts that won't be flagged as unimplemented +``` + +## Commit Message Format + +Reference artifacts using configured trailer keys in the commit footer: + +``` +feat(parser): add streaming token support + +Reworked the parser to handle streaming tokens for better +memory efficiency in large files. + +Implements: FEAT-042 +Fixes: REQ-015 +``` + +Multiple artifact IDs can be listed on one line, separated by commas: + +``` +Implements: FEAT-042, FEAT-043 +Verifies: REQ-015, REQ-016 +``` + +## Exemption Mechanisms + +There are two ways to opt out of trailer requirements: + +1. **Conventional-commit type exemption:** Commits whose type (the prefix + before `:`) matches `exempt-types` are automatically exempt. For example, + `chore: update deps` is exempt if `chore` is in the list. + +2. **Explicit skip trailer:** Add the configured `skip-trailer` value to any + commit message to skip validation: + + ``` + refactor: rename internal helper + + Trace: skip + ``` + +## Pre-Commit Hook + +Rivet provides a commit-msg hook for the [pre-commit](https://pre-commit.com) +framework. Add it to `.pre-commit-config.yaml`: + +```yaml +- repo: local + hooks: + - id: rivet-commit-msg + name: rivet commit-msg check + entry: rivet commit-msg-check + language: system + stages: [commit-msg] + always_run: true +``` + +The hook validates each commit message: +- Checks that at least one artifact trailer is present +- Verifies that referenced artifact IDs exist in the project +- Suggests close matches for typos (Levenshtein distance) +- Passes exempt commits and those with the skip trailer + +## `rivet commits` Command + +Analyze the full git history for commit-artifact traceability: + +``` +rivet commits # Analyze all commits +rivet commits --since 30 # Only last 30 days +rivet commits --range main..dev # Specific commit range +rivet commits --format json # Machine-readable output +rivet commits --strict # Exit 1 if any orphan or broken ref +``` + +### Report Sections + +1. **Linked commits** — Commits with valid artifact trailers +2. **Broken references** — Commits referencing non-existent artifact IDs +3. **Orphan commits** — Non-exempt commits touching `traced-paths` without trailers +4. **Artifact coverage** — How many artifacts have at least one linked commit +5. **Unimplemented artifacts** — Artifacts with no commit evidence (minus exemptions) + +### Path-Based Orphan Detection + +Only commits that modify files under `traced-paths` are flagged as orphans. +Commits that only touch documentation, CI config, or other non-traced paths +are not considered orphans even without trailers. + +### Exempt Artifact Whitelist + +Use `trace-exempt-artifacts` to list artifact IDs that should not appear in +the "unimplemented" report — useful when retrofitting traceability onto an +existing project where historical commits lack trailers. +"#; + // ── Public API ────────────────────────────────────────────────────────── /// List all available documentation topics. diff --git a/rivet-cli/src/main.rs b/rivet-cli/src/main.rs index e258597..eaf16c0 100644 --- a/rivet-cli/src/main.rs +++ b/rivet-cli/src/main.rs @@ -1,3 +1,4 @@ +use std::collections::HashSet; use std::path::PathBuf; use std::process::ExitCode; @@ -219,6 +220,28 @@ enum Command { /// Generate .rivet/agent-context.md from current project state Context, + /// Validate a commit message for artifact trailers (pre-commit hook) + CommitMsgCheck { + /// Path to the commit message file + file: PathBuf, + }, + + /// Analyze git commit history for artifact traceability + Commits { + /// Only analyze commits after this date (YYYY-MM-DD) + #[arg(long)] + since: Option, + /// Git revision range (e.g., "main..HEAD") + #[arg(long)] + range: Option, + /// Output format: "text" (default) or "json" + #[arg(short, long, default_value = "text")] + format: String, + /// Promote warnings to errors + #[arg(long)] + strict: bool, + }, + /// Start the HTMX-powered dashboard server Serve { /// Port to listen on @@ -323,9 +346,15 @@ fn run(cli: Cli) -> Result { if let Command::Context = &cli.command { return cmd_context(&cli); } + if let Command::CommitMsgCheck { file } = &cli.command { + return cmd_commit_msg_check(&cli, file); + } match &cli.command { - Command::Init { .. } | Command::Docs { .. } | Command::Context => unreachable!(), + Command::Init { .. } + | Command::Docs { .. } + | Command::Context + | Command::CommitMsgCheck { .. } => unreachable!(), Command::Stpa { path, schema } => cmd_stpa(path, schema.as_deref(), &cli), Command::Validate { format } => cmd_validate(&cli, format), Command::List { @@ -347,6 +376,12 @@ fn run(cli: Cli) -> Result { } Command::Export { format, output } => cmd_export(&cli, format, output.as_deref()), Command::Schema { action } => cmd_schema(&cli, action), + Command::Commits { + since, + range, + format, + strict, + } => cmd_commits(&cli, since.as_deref(), range.as_deref(), format, *strict), Command::Serve { port } => { let port = *port; let ( @@ -1556,6 +1591,366 @@ fn cmd_context(cli: &Cli) -> Result { Ok(true) } +// ── commit-msg-check ───────────────────────────────────────────────────── + +fn cmd_commit_msg_check(cli: &Cli, file: &std::path::Path) -> Result { + use std::collections::BTreeMap; + + // Read commit message file + let raw = std::fs::read_to_string(file) + .with_context(|| format!("reading commit message file '{}'", file.display()))?; + + // Strip comment lines (lines starting with #) + let message: String = raw + .lines() + .filter(|line| !line.starts_with('#')) + .collect::>() + .join("\n"); + let message = message.trim(); + + if message.is_empty() { + // Empty commit message — let git itself handle that + return Ok(true); + } + + // Try to load rivet.yaml for commits config + let config_path = cli.project.join("rivet.yaml"); + let config = match rivet_core::load_project_config(&config_path) { + Ok(c) => c, + Err(_) => { + // No rivet.yaml or invalid — pass silently + log::debug!("no rivet.yaml found, skipping commit-msg check"); + return Ok(true); + } + }; + + let commits_cfg = match &config.commits { + Some(c) => c, + None => { + // No commits config — pass silently + log::debug!("no commits config in rivet.yaml, skipping commit-msg check"); + return Ok(true); + } + }; + + // Extract subject (first line) + let subject = message.lines().next().unwrap_or(""); + + // Check exempt type + if let Some(ct) = rivet_core::commits::parse_commit_type(subject) { + if commits_cfg.exempt_types.iter().any(|et| et == &ct) { + log::debug!("commit type '{ct}' is exempt"); + return Ok(true); + } + } + + // Check skip trailer + if message + .lines() + .any(|line| line.trim() == commits_cfg.skip_trailer) + { + log::debug!("skip trailer found"); + return Ok(true); + } + + // Parse artifact trailers + let trailer_map: &BTreeMap = &commits_cfg.trailers; + let (artifact_refs, _) = + rivet_core::commits::parse_commit_message(message, trailer_map, &commits_cfg.skip_trailer); + + let all_ids: Vec = artifact_refs.values().flatten().cloned().collect(); + + if all_ids.is_empty() { + eprintln!("error: commit message has no artifact trailers"); + eprintln!(); + eprintln!("Add one of the following trailers to your commit message:"); + for (trailer_key, link_type) in trailer_map { + eprintln!(" {trailer_key}: (link type: {link_type})"); + } + eprintln!(); + eprintln!("Or add '{}' to skip this check.", commits_cfg.skip_trailer); + if !commits_cfg.exempt_types.is_empty() { + eprintln!( + "Exempt commit types: {}", + commits_cfg.exempt_types.join(", ") + ); + } + return Ok(false); + } + + // Load store to validate artifact IDs + let schemas_dir = resolve_schemas_dir(cli); + let schema = match rivet_core::load_schemas(&config.project.schemas, &schemas_dir) { + Ok(s) => s, + Err(e) => { + log::warn!("could not load schemas: {e}; skipping ID validation"); + return Ok(true); + } + }; + let _ = schema; // we only need the store, not schema validation + + let mut store = Store::new(); + for source in &config.sources { + match rivet_core::load_artifacts(source, &cli.project) { + Ok(artifacts) => { + for a in artifacts { + store.upsert(a); + } + } + Err(e) => { + log::warn!( + "could not load source '{}': {e}; skipping ID validation", + source.path + ); + return Ok(true); + } + } + } + + // Validate each referenced artifact ID + let known_ids: HashSet = store.iter().map(|a| a.id.clone()).collect(); + let mut unknown = Vec::new(); + for id in &all_ids { + if !known_ids.contains(id) { + unknown.push(id.clone()); + } + } + + if unknown.is_empty() { + return Ok(true); + } + + // Report unknown IDs with fuzzy suggestions + eprintln!("error: commit references unknown artifact IDs:"); + for uid in &unknown { + eprint!(" {uid}"); + // Find closest match via Levenshtein + let mut best: Option<(&str, usize)> = None; + for kid in &known_ids { + let d = levenshtein(uid, kid); + if d <= 3 { + match best { + Some((_, bd)) if d < bd => best = Some((kid, d)), + None => best = Some((kid, d)), + _ => {} + } + } + } + if let Some((suggestion, _)) = best { + eprint!(" (did you mean '{suggestion}'?)"); + } + eprintln!(); + } + Ok(false) +} + +// ── commits ────────────────────────────────────────────────────────────── + +fn cmd_commits( + cli: &Cli, + since: Option<&str>, + range: Option<&str>, + format: &str, + strict: bool, +) -> Result { + use std::collections::BTreeMap; + + // Load project config + let config_path = cli.project.join("rivet.yaml"); + let config = rivet_core::load_project_config(&config_path) + .with_context(|| format!("loading {}", config_path.display()))?; + + let commits_cfg = config + .commits + .as_ref() + .ok_or_else(|| anyhow::anyhow!("no 'commits' section in rivet.yaml"))?; + + // Load artifacts into store + let schemas_dir = resolve_schemas_dir(cli); + let _schema = rivet_core::load_schemas(&config.project.schemas, &schemas_dir) + .context("loading schemas")?; + + let mut store = Store::new(); + for source in &config.sources { + let artifacts = rivet_core::load_artifacts(source, &cli.project) + .with_context(|| format!("loading source '{}'", source.path))?; + for a in artifacts { + store.upsert(a); + } + } + + let known_ids: HashSet = store.iter().map(|a| a.id.clone()).collect(); + + // Determine git range + let git_range = if let Some(r) = range { + r.to_string() + } else if let Some(s) = since { + format!("--since={s} HEAD") + } else { + "HEAD".to_string() + }; + + // Resolve project path for git + let project_path = std::fs::canonicalize(&cli.project).unwrap_or_else(|_| cli.project.clone()); + + let trailer_map: &BTreeMap = &commits_cfg.trailers; + + let commits = rivet_core::commits::git_log_commits( + &project_path, + &git_range, + trailer_map, + &commits_cfg.skip_trailer, + ) + .context("running git log")?; + + let analysis = rivet_core::commits::analyze_commits( + commits, + &known_ids, + &commits_cfg.exempt_types, + &commits_cfg.traced_paths, + &commits_cfg.trace_exempt_artifacts, + trailer_map, + ); + + if format == "json" { + return cmd_commits_json(&analysis, strict); + } + + // Text output + let total = analysis.linked.len() + analysis.orphans.len() + analysis.exempt.len(); + + println!("Commit traceability analysis"); + println!("============================"); + println!(); + println!(" Linked: {:>4}", analysis.linked.len()); + println!(" Orphan: {:>4}", analysis.orphans.len()); + println!(" Exempt: {:>4}", analysis.exempt.len()); + println!(" Broken refs: {:>4}", analysis.broken_refs.len()); + println!(" Total: {:>4}", total); + + if !analysis.broken_refs.is_empty() { + println!(); + println!("Broken references:"); + for br in &analysis.broken_refs { + let short = if br.hash.len() > 8 { + &br.hash[..8] + } else { + &br.hash + }; + println!( + " {short} {}: unknown ID '{}' (trailer: {})", + br.subject, br.missing_id, br.link_type + ); + } + } + + if !analysis.orphans.is_empty() { + println!(); + println!("Orphan commits (no artifact trailers):"); + for c in &analysis.orphans { + let short = if c.hash.len() > 8 { + &c.hash[..8] + } else { + &c.hash + }; + println!(" {short} {}", c.subject); + } + } + + if !analysis.unimplemented.is_empty() { + println!(); + println!("Artifacts with no commit coverage:"); + for id in &analysis.unimplemented { + println!(" {id}"); + } + } + + // Coverage table + if !known_ids.is_empty() { + let covered = analysis.artifact_coverage.len(); + let trace_exempt_count = commits_cfg.trace_exempt_artifacts.len(); + let trackable = known_ids.len() - trace_exempt_count; + let pct = if trackable > 0 { + (covered as f64 / trackable as f64) * 100.0 + } else { + 100.0 + }; + println!(); + println!("Artifact coverage: {covered}/{trackable} ({pct:.1}%)"); + } + + // Exit code + let has_errors = !analysis.broken_refs.is_empty(); + let has_warnings = !analysis.orphans.is_empty() || !analysis.unimplemented.is_empty(); + let fail = has_errors || (strict && has_warnings); + Ok(!fail) +} + +fn cmd_commits_json(analysis: &rivet_core::commits::CommitAnalysis, strict: bool) -> Result { + let json = serde_json::json!({ + "summary": { + "linked": analysis.linked.len(), + "orphans": analysis.orphans.len(), + "exempt": analysis.exempt.len(), + "broken_refs": analysis.broken_refs.len(), + }, + "broken_refs": analysis.broken_refs.iter().map(|br| { + serde_json::json!({ + "hash": br.hash, + "subject": br.subject, + "missing_id": br.missing_id, + "link_type": br.link_type, + }) + }).collect::>(), + "orphans": analysis.orphans.iter().map(|c| { + serde_json::json!({ + "hash": c.hash, + "subject": c.subject, + "date": c.date, + }) + }).collect::>(), + "unimplemented": analysis.unimplemented.iter().collect::>(), + "artifact_coverage": analysis.artifact_coverage.iter().collect::>(), + }); + + println!( + "{}", + serde_json::to_string_pretty(&json).context("serializing JSON")? + ); + + let has_errors = !analysis.broken_refs.is_empty(); + let has_warnings = !analysis.orphans.is_empty() || !analysis.unimplemented.is_empty(); + let fail = has_errors || (strict && has_warnings); + Ok(!fail) +} + +/// Compute Levenshtein edit distance between two strings. +fn levenshtein(a: &str, b: &str) -> usize { + let a_len = a.len(); + let b_len = b.len(); + + if a_len == 0 { + return b_len; + } + if b_len == 0 { + return a_len; + } + + let mut prev: Vec = (0..=b_len).collect(); + let mut curr = vec![0; b_len + 1]; + + for (i, ca) in a.chars().enumerate() { + curr[0] = i + 1; + for (j, cb) in b.chars().enumerate() { + let cost = if ca == cb { 0 } else { 1 }; + curr[j + 1] = (prev[j] + cost).min(prev[j + 1] + 1).min(curr[j] + 1); + } + std::mem::swap(&mut prev, &mut curr); + } + + prev[b_len] +} + // ── Helpers ────────────────────────────────────────────────────────────── fn resolve_schemas_dir(cli: &Cli) -> PathBuf { diff --git a/rivet-cli/src/serve.rs b/rivet-cli/src/serve.rs index dc36f50..5ec89de 100644 --- a/rivet-cli/src/serve.rs +++ b/rivet-cli/src/serve.rs @@ -337,8 +337,8 @@ pub async fn run( .route("/help/rules", get(help_rules_view)) .route("/docs-asset/{*path}", get(docs_asset)) .route("/reload", post(reload_handler)) - .with_state(state) - .layer(axum::middleware::from_fn(redirect_non_htmx)); + .with_state(state.clone()) + .layer(axum::middleware::from_fn_with_state(state, wrap_full_page)); let addr = format!("0.0.0.0:{port}"); eprintln!("rivet dashboard listening on http://localhost:{port}"); @@ -348,9 +348,12 @@ pub async fn run( Ok(()) } -/// Middleware: redirect direct browser requests (no HX-Request header) to `/?goto=/path` -/// so the full layout is served and JS loads the content. -async fn redirect_non_htmx( +/// Middleware: for direct browser requests (no HX-Request header) to view routes, +/// wrap the handler's partial HTML in the full page layout. This replaces the old +/// `/?goto=` redirect pattern and fixes query-param loss, hash-fragment loss, and +/// the async replaceState race condition. +async fn wrap_full_page( + State(state): State, req: axum::extract::Request, next: axum::middleware::Next, ) -> axum::response::Response { @@ -358,21 +361,26 @@ async fn redirect_non_htmx( let is_htmx = req.headers().contains_key("hx-request"); let method = req.method().clone(); - // Only redirect GET requests to known view routes, not / or /reload or /api/* + let response = next.run(req).await; + + // Only wrap GET requests to view routes (not /, assets, or APIs) if method == axum::http::Method::GET && !is_htmx && path != "/" - && !path.starts_with("/?") && !path.starts_with("/api/") && !path.starts_with("/wasm/") && !path.starts_with("/source-raw/") && !path.starts_with("/docs-asset/") { - let goto = urlencoding::encode(&path); - return axum::response::Redirect::to(&format!("/?goto={goto}")).into_response(); + let bytes = axum::body::to_bytes(response.into_body(), 16 * 1024 * 1024) + .await + .unwrap_or_default(); + let content = String::from_utf8_lossy(&bytes); + let app = state.read().await; + return page_layout(&content, &app).into_response(); } - next.run(req).await + response } /// GET /api/links/{id} — return JSON array of AADL-prefixed artifact IDs linked @@ -580,7 +588,10 @@ async fn reload_handler( eprintln!("reload error: {e:#}"); ( axum::http::StatusCode::INTERNAL_SERVER_ERROR, - [("HX-Location", "{\"path\":\"/\",\"target\":\"#content\"}".to_owned())], + [( + "HX-Location", + "{\"path\":\"/\",\"target\":\"#content\"}".to_owned(), + )], format!("reload failed: {e}"), ) } @@ -608,19 +619,16 @@ async fn docs_asset( let file_path = dir.join(&path); if file_path.is_file() { if let Ok(bytes) = std::fs::read(&file_path) { - let content_type = match file_path - .extension() - .and_then(|e| e.to_str()) - .unwrap_or("") - { - "png" => "image/png", - "jpg" | "jpeg" => "image/jpeg", - "gif" => "image/gif", - "svg" => "image/svg+xml", - "webp" => "image/webp", - "pdf" => "application/pdf", - _ => "application/octet-stream", - }; + let content_type = + match file_path.extension().and_then(|e| e.to_str()).unwrap_or("") { + "png" => "image/png", + "jpg" | "jpeg" => "image/jpeg", + "gif" => "image/gif", + "svg" => "image/svg+xml", + "webp" => "image/webp", + "pdf" => "application/pdf", + _ => "application/octet-stream", + }; return ( axum::http::StatusCode::OK, [("Content-Type", content_type)], @@ -1323,10 +1331,6 @@ const GRAPH_JS: &str = r#" var p=window.location.pathname; if(p==='/'||p==='') p='/stats'; setActiveNav(p); - // If landing on a deep URL, load its content via HTMX - if(p!=='/stats'&&p!=='/'){ - htmx.ajax('GET',p,'#content'); - } }); // ── Browser back/forward ───────────────────────────────── @@ -2461,27 +2465,8 @@ document.addEventListener('DOMContentLoaded',renderMermaid); // ── Routes ─────────────────────────────────────────────────────────────── -#[derive(Debug, serde::Deserialize)] -struct IndexParams { - goto: Option, -} - -async fn index( - State(state): State, - Query(params): Query, -) -> Html { +async fn index(State(state): State) -> Html { let state = state.read().await; - // If goto param is set, render layout with empty content and let JS load the page - if let Some(ref goto) = params.goto { - let placeholder = format!( - "
\ - ", - html_escape(goto), - html_escape(goto), - html_escape(goto) - ); - return page_layout(&placeholder, &state); - } let inner = stats_partial(&state); page_layout(&inner, &state) } @@ -5449,7 +5434,7 @@ async fn source_file_view( } else if is_shell { "bash" } else if is_aadl { - "yaml" // AADL has similar key: value structure + "yaml" // AADL has similar key: value structure } else { "" }; @@ -5782,13 +5767,19 @@ fn highlight_rust_line(line: &str) -> String { i += 1; } let s: String = chars[start..i].iter().collect(); - out.push_str(&format!("{}", html_escape(&s))); + out.push_str(&format!( + "{}", + html_escape(&s) + )); continue; } // Char literals if ch == '\'' && i + 2 < len && chars[i + 2] == '\'' { let s: String = chars[i..i + 3].iter().collect(); - out.push_str(&format!("{}", html_escape(&s))); + out.push_str(&format!( + "{}", + html_escape(&s) + )); i += 3; continue; } @@ -5804,11 +5795,16 @@ fn highlight_rust_line(line: &str) -> String { // Numbers if ch.is_ascii_digit() && (i == 0 || !chars[i - 1].is_alphanumeric()) { let start = i; - while i < len && (chars[i].is_ascii_alphanumeric() || chars[i] == '_' || chars[i] == '.') { + while i < len + && (chars[i].is_ascii_alphanumeric() || chars[i] == '_' || chars[i] == '.') + { i += 1; } let s: String = chars[start..i].iter().collect(); - out.push_str(&format!("{}", html_escape(&s))); + out.push_str(&format!( + "{}", + html_escape(&s) + )); continue; } // Identifiers and keywords @@ -5819,7 +5815,13 @@ fn highlight_rust_line(line: &str) -> String { } let word: String = chars[start..i].iter().collect(); // Check for macro invocation: word! - if i < len && chars[i] == '!' && !matches!(word.as_str(), "if" | "else" | "return" | "break" | "continue") { + if i < len + && chars[i] == '!' + && !matches!( + word.as_str(), + "if" | "else" | "return" | "break" | "continue" + ) + { out.push_str(&format!( "{}!", html_escape(&word) @@ -5828,12 +5830,12 @@ fn highlight_rust_line(line: &str) -> String { continue; } match word.as_str() { - "fn" | "let" | "mut" | "pub" | "use" | "mod" | "struct" | "enum" - | "impl" | "trait" | "const" | "static" | "type" | "where" | "match" - | "if" | "else" | "for" | "while" | "loop" | "return" | "break" - | "continue" | "async" | "await" | "move" | "ref" | "self" | "super" - | "crate" | "unsafe" | "extern" | "dyn" | "as" | "in" | "true" - | "false" | "Self" | "None" | "Some" | "Ok" | "Err" => { + "fn" | "let" | "mut" | "pub" | "use" | "mod" | "struct" | "enum" | "impl" + | "trait" | "const" | "static" | "type" | "where" | "match" | "if" | "else" + | "for" | "while" | "loop" | "return" | "break" | "continue" | "async" + | "await" | "move" | "ref" | "self" | "super" | "crate" | "unsafe" | "extern" + | "dyn" | "as" | "in" | "true" | "false" | "Self" | "None" | "Some" | "Ok" + | "Err" => { out.push_str(&format!( "{}", html_escape(&word) @@ -6018,15 +6020,9 @@ fn load_store_from_git_ref(pp: &std::path::Path, gr: &str) -> Result u16 { /// Start the rivet server and return (child, port). fn start_server() -> (Child, u16) { let port = free_port(); - let child = Command::new(rivet_bin()) + let mut child = Command::new(rivet_bin()) .args(["serve", "--port", &port.to_string()]) .current_dir(project_root()) .stdout(std::process::Stdio::null()) @@ -52,6 +52,9 @@ fn start_server() -> (Child, u16) { } std::thread::sleep(Duration::from_millis(100)); } + // Kill the child before panicking to avoid zombie processes. + let _ = child.kill(); + let _ = child.wait(); panic!("server did not start within 5 seconds on port {port}"); } @@ -62,17 +65,10 @@ fn fetch(port: u16, path: &str, htmx: bool) -> (u16, String, Vec<(String, String // Use a minimal HTTP/1.1 request via TcpStream use std::io::{Read, Write}; - let mut stream = - std::net::TcpStream::connect(format!("127.0.0.1:{port}")).expect("connect"); - stream - .set_read_timeout(Some(Duration::from_secs(5))) - .ok(); - - let hx_header = if htmx { - "HX-Request: true\r\n" - } else { - "" - }; + let mut stream = std::net::TcpStream::connect(format!("127.0.0.1:{port}")).expect("connect"); + stream.set_read_timeout(Some(Duration::from_secs(5))).ok(); + + let hx_header = if htmx { "HX-Request: true\r\n" } else { "" }; let request = format!( "GET {path} HTTP/1.1\r\nHost: 127.0.0.1:{port}\r\n{hx_header}Connection: close\r\n\r\n" ); @@ -162,10 +158,7 @@ fn server_pages_push_url() { for page in &pages { let (status, body, _headers) = fetch(port, page, true); - assert!( - status == 200, - "GET {page} returned {status}, expected 200" - ); + assert!(status == 200, "GET {page} returned {status}, expected 200"); assert_links_push_url(&body, page); } @@ -174,36 +167,29 @@ fn server_pages_push_url() { } #[test] -fn non_htmx_request_redirects() { +fn non_htmx_request_serves_full_page() { let (mut child, port) = start_server(); - // A non-HTMX GET to /results should redirect via /?goto= - let (status, body, headers) = fetch(port, "/results", false); + // A non-HTMX GET to /results should return 200 with full page layout + // (wrap_full_page middleware wraps partial HTML in the shell) + let (status, body, _headers) = fetch(port, "/results", false); - // Should redirect (303) to /?goto=/results assert!( - status == 303 || status == 302 || status == 200, - "non-HTMX GET /results should redirect (303/302) or serve shell (200), got {status}" + status == 200, + "non-HTMX GET /results should return 200 with full page, got {status}" ); - if status == 303 || status == 302 { - // Check Location header contains goto - let location = headers - .iter() - .find(|(k, _)| k.eq_ignore_ascii_case("location")) - .map(|(_, v)| v.as_str()) - .unwrap_or(""); - assert!( - location.contains("goto") - && (location.contains("/results") || location.contains("%2Fresults")), - "redirect Location must contain /?goto=/results, got: {location}" - ); - } else { - assert!( - body.contains("goto") || body.contains("/results"), - "non-HTMX response must contain goto redirect for /results" - ); - } + // Must contain the full page shell (nav, layout) + assert!( + body.contains("