Skip to content

Commit 5d44e55

Browse files
author
fer
committed
feat(validation): Add structural validation aggregator, health assessment, example & tests
Introduces unified ValidationReport (grammar + field tetrad) and compute_structural_health with actionable recommendations. Adds CLI script structural_health_report.py and example structural_health_demo.py. Updates grammar error factory to accept glyph sequences (maps to canonical names). Provides unit tests for aggregator, health, telemetry emitter, grammar factory. Maintains read-only telemetry; preserves invariants; U4 deferred (no bifurcation call).
1 parent 28a82f4 commit 5d44e55

File tree

9 files changed

+892
-18
lines changed

9 files changed

+892
-18
lines changed

examples/structural_health_demo.py

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
"""Minimal structural health + telemetry demo (Phase 3).
2+
3+
Shows integration of TelemetryEmitter with the structural validation
4+
aggregator and health summary utilities.
5+
6+
Run:
7+
python examples/structural_health_demo.py
8+
9+
Outputs:
10+
- Human-readable health summary
11+
- Telemetry JSONL lines (in-memory example)
12+
13+
Physics Alignment:
14+
Sequence chosen: [AL, UM, IL, SHA]
15+
- AL (Emission) : Generator (U1a)
16+
- UM (Coupling) : Requires phase compatibility (U3)
17+
- IL (Coherence) : Stabilizer (U2)
18+
- SHA (Silence) : Closure (U1b)
19+
This satisfies U1a initiation and U1b closure; includes stabilizer
20+
after coupling; safe canonical bootstrap variant.
21+
"""
22+
23+
from __future__ import annotations
24+
25+
import random
26+
from typing import List
27+
28+
try:
29+
import networkx as nx # type: ignore
30+
except ImportError: # pragma: no cover
31+
raise SystemExit("networkx required for demo")
32+
33+
from tnfr.metrics.telemetry import TelemetryEmitter
34+
from tnfr.validation.health import compute_structural_health
35+
from tnfr.validation.aggregator import run_structural_validation
36+
37+
38+
def _make_graph(n: int = 16, p: float = 0.15, seed: int = 42):
39+
random.seed(seed)
40+
G = nx.erdos_renyi_graph(n, p) # type: ignore
41+
# Populate minimal phase & ΔNFR attributes for field computations
42+
for node in G.nodes:
43+
G.nodes[node]["phase"] = random.random() * 2.0 * 3.141592653589793
44+
G.nodes[node]["delta_nfr"] = random.random() * 0.05 # low pressure
45+
return G
46+
47+
48+
def main() -> None:
49+
sequence: List[str] = ["AL", "UM", "IL", "SHA"]
50+
G = _make_graph()
51+
52+
# Baseline structural potential snapshot
53+
from tnfr.physics.fields import compute_structural_potential
54+
55+
baseline_phi_s = compute_structural_potential(G)
56+
57+
# Telemetry emitter demonstration
58+
telemetry_path = "results/telemetry/structural_health_demo.jsonl"
59+
with TelemetryEmitter(telemetry_path) as emitter:
60+
emitter.record(
61+
G,
62+
operator="start",
63+
extra={"nodes": G.number_of_nodes()},
64+
)
65+
report = run_structural_validation(
66+
G,
67+
sequence=sequence,
68+
baseline_structural_potential=baseline_phi_s,
69+
)
70+
emitter.record(
71+
G,
72+
operator="validation",
73+
extra={
74+
"risk_level": report.risk_level,
75+
"status": report.status,
76+
"max_phase_gradient": report.field_metrics[
77+
"max_phase_gradient"
78+
],
79+
},
80+
)
81+
health = compute_structural_health(
82+
G, sequence=sequence, baseline_phi_s=baseline_phi_s
83+
)
84+
emitter.record(
85+
G,
86+
operator="health",
87+
extra={
88+
"risk_level": health["risk_level"],
89+
"recommended": health["recommended_actions"],
90+
},
91+
)
92+
emitter.flush()
93+
print("Telemetry Events (last run):")
94+
try:
95+
for ln in open(
96+
telemetry_path, "r", encoding="utf-8"
97+
).read().splitlines()[-3:]:
98+
print(" ", ln)
99+
except FileNotFoundError:
100+
print(" (no telemetry file found)")
101+
102+
# Human health summary
103+
print("\nStructural Health Summary:")
104+
print(f"Status : {health['status']}")
105+
print(f"Risk Level : {health['risk_level']}")
106+
print("Thresholds :")
107+
for k, v in health["thresholds_exceeded"].items():
108+
print(f" - {k}: {'EXCEEDED' if v else 'ok'}")
109+
if health["recommended_actions"]:
110+
print("Recommended :", ", ".join(health["recommended_actions"]))
111+
if health["notes"]:
112+
print("Notes:")
113+
for n in health["notes"]:
114+
print(" -", n)
115+
116+
117+
if __name__ == "__main__": # pragma: no cover
118+
main()
Lines changed: 173 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,173 @@
1+
"""CLI script: Generate TNFR structural health report.
2+
3+
Reads a TNFR graph produced by a simulation (optional) and an operator
4+
sequence (optional) then prints a concise structural health summary.
5+
6+
Usage examples:
7+
---------------
8+
python scripts/structural_health_report.py \
9+
--graph examples/output/graph.pkl \
10+
--sequence examples/output/sequence.txt \
11+
--json results/reports/health_report.json
12+
13+
python scripts/structural_health_report.py --random 32 --edge-prob 0.15
14+
15+
Inputs
16+
------
17+
Graph formats supported:
18+
- Pickle (NetworkX graph)
19+
- Edge list (.edgelist) simple whitespace separated pairs
20+
21+
Sequence file: one operator mnemonic per line (e.g. AL, UM, IL, SHA).
22+
23+
Outputs
24+
-------
25+
STDOUT: Human-readable summary
26+
JSON (optional): Machine-readable payload
27+
28+
All computations are telemetry-only; graph is never mutated.
29+
"""
30+
31+
from __future__ import annotations
32+
33+
import argparse
34+
import json
35+
import sys
36+
from pathlib import Path
37+
from typing import List
38+
39+
try: # networkx dependency
40+
import networkx as nx # type: ignore
41+
except ImportError: # pragma: no cover
42+
nx = None # type: ignore
43+
44+
from tnfr.validation.health import compute_structural_health
45+
46+
47+
def _load_graph(path: Path):
48+
if nx is None: # pragma: no cover
49+
raise RuntimeError("networkx not available")
50+
if path.suffix == ".pkl":
51+
import pickle
52+
53+
with path.open("rb") as f:
54+
return pickle.load(f)
55+
if path.suffix == ".edgelist":
56+
G = nx.read_edgelist(path) # type: ignore
57+
return G
58+
raise ValueError(f"Unsupported graph format: {path.suffix}")
59+
60+
61+
def _load_sequence(path: Path) -> List[str]:
62+
return [ln.strip() for ln in path.read_text().splitlines() if ln.strip()]
63+
64+
65+
def parse_args(argv: List[str]) -> argparse.Namespace:
66+
p = argparse.ArgumentParser(description="TNFR structural health report")
67+
p.add_argument(
68+
"--graph",
69+
type=Path,
70+
help="Graph pickle (.pkl) or edge list (.edgelist)",
71+
required=False,
72+
)
73+
p.add_argument(
74+
"--sequence",
75+
type=Path,
76+
help="Operator sequence file (one mnemonic per line)",
77+
required=False,
78+
)
79+
p.add_argument(
80+
"--json",
81+
type=Path,
82+
help="Optional JSON output path",
83+
required=False,
84+
)
85+
p.add_argument(
86+
"--random",
87+
type=int,
88+
help="Generate random Erdos-Renyi graph with N nodes",
89+
)
90+
p.add_argument(
91+
"--edge-prob",
92+
type=float,
93+
default=0.1,
94+
help="Probability for random graph edges",
95+
)
96+
p.add_argument(
97+
"--seed",
98+
type=int,
99+
default=42,
100+
help="Random seed for reproducibility",
101+
)
102+
return p.parse_args(argv)
103+
104+
105+
def main(argv: List[str]) -> int:
106+
args = parse_args(argv)
107+
if nx is None: # pragma: no cover
108+
print("networkx required for health report", file=sys.stderr)
109+
return 2
110+
111+
if args.graph and args.random:
112+
print("Specify either --graph or --random, not both", file=sys.stderr)
113+
return 2
114+
115+
if args.random:
116+
nx.random.seed(args.seed) # type: ignore[attr-defined]
117+
G = nx.erdos_renyi_graph(args.random, args.edge_prob) # type: ignore
118+
elif args.graph:
119+
G = _load_graph(args.graph)
120+
else:
121+
print("Must supply --graph or --random", file=sys.stderr)
122+
return 2
123+
124+
sequence = _load_sequence(args.sequence) if args.sequence else None
125+
126+
health = compute_structural_health(G, sequence=sequence)
127+
128+
# Human summary
129+
print("TNFR Structural Health Report")
130+
print("--------------------------------")
131+
print(f"Status : {health['status']}")
132+
print(f"Risk Level : {health['risk_level']}")
133+
subset = health["field_metrics_subset"]
134+
if subset["mean_phi_s"] is not None:
135+
print(f"Mean Φ_s : {subset['mean_phi_s']:.4f}")
136+
else:
137+
print("Mean Φ_s : NA")
138+
if subset["max_phase_gradient"] is not None:
139+
print(f"Max |∇φ| : {subset['max_phase_gradient']:.4f}")
140+
else:
141+
print("Max |∇φ| : NA")
142+
if subset["max_k_phi"] is not None:
143+
print(f"Max |K_φ| : {subset['max_k_phi']:.4f}")
144+
else:
145+
print("Max |K_φ| : NA")
146+
if subset["xi_c"] is not None:
147+
print(f"ξ_C : {subset['xi_c']:.2f}")
148+
else:
149+
print("ξ_C : NA")
150+
if subset["delta_phi_s"] is not None:
151+
print(f"ΔΦ_s drift : {subset['delta_phi_s']:.4f}")
152+
print("Threshold Flags :")
153+
for k, v in health["thresholds_exceeded"].items():
154+
print(f" - {k}: {'EXCEEDED' if v else 'ok'}")
155+
if health["recommended_actions"]:
156+
print("Recommended Actions:")
157+
for act in health["recommended_actions"]:
158+
print(f" * {act}")
159+
if health["notes"]:
160+
print("Notes:")
161+
for n in health["notes"]:
162+
print(f" - {n}")
163+
164+
if args.json:
165+
args.json.parent.mkdir(parents=True, exist_ok=True)
166+
args.json.write_text(json.dumps(health, indent=2))
167+
print(f"JSON report written to {args.json}")
168+
169+
return 0
170+
171+
172+
if __name__ == "__main__": # pragma: no cover
173+
raise SystemExit(main(sys.argv[1:]))

src/tnfr/operators/grammar_error_factory.py

Lines changed: 33 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -166,14 +166,42 @@ def collect_grammar_errors(
166166
validator = GrammarValidator()
167167
errors: List[ExtendedGrammarError] = []
168168

169+
# Accept glyph strings by wrapping them in lightweight stubs
170+
# expected by GrammarValidator (which accesses .name / .canonical_name).
171+
GLYPH_TO_NAME = {
172+
"AL": "emission",
173+
"EN": "reception",
174+
"IL": "coherence",
175+
"OZ": "dissonance",
176+
"UM": "coupling",
177+
"RA": "resonance",
178+
"SHA": "silence",
179+
"VAL": "expansion",
180+
"NUL": "contraction",
181+
"THOL": "self_organization",
182+
"ZHIR": "mutation",
183+
"NAV": "transition",
184+
"REMESH": "recursivity",
185+
}
186+
187+
class _OpStub: # local minimal stub
188+
def __init__(self, glyph: str):
189+
canonical = GLYPH_TO_NAME.get(glyph.upper(), glyph.lower())
190+
self.canonical_name = canonical
191+
self.name = canonical
192+
193+
normalized: List[Any] = [
194+
(_OpStub(op) if isinstance(op, str) else op) for op in sequence
195+
]
196+
169197
# Canonical operator names for reporting
170198
canonical = [
171199
getattr(op, "canonical_name", getattr(op, "name", "?"))
172-
for op in sequence
200+
for op in normalized
173201
]
174202

175203
# U1a
176-
ok, msg = validator.validate_initiation(list(sequence), epi_initial)
204+
ok, msg = validator.validate_initiation(list(normalized), epi_initial)
177205
if not ok:
178206
errors.append(
179207
make_grammar_error(
@@ -185,7 +213,7 @@ def collect_grammar_errors(
185213
)
186214
)
187215
# U1b
188-
ok, msg = validator.validate_closure(list(sequence))
216+
ok, msg = validator.validate_closure(list(normalized))
189217
if not ok:
190218
errors.append(
191219
make_grammar_error(
@@ -197,7 +225,7 @@ def collect_grammar_errors(
197225
)
198226
)
199227
# U2
200-
ok, msg = validator.validate_convergence(list(sequence))
228+
ok, msg = validator.validate_convergence(list(normalized))
201229
if not ok:
202230
errors.append(
203231
make_grammar_error(
@@ -208,7 +236,7 @@ def collect_grammar_errors(
208236
)
209237
)
210238
# U3
211-
ok, msg = validator.validate_resonant_coupling(list(sequence))
239+
ok, msg = validator.validate_resonant_coupling(list(normalized))
212240
if not ok:
213241
# Find first coupling/resonance candidate if available
214242
idx = next(
@@ -229,17 +257,4 @@ def collect_grammar_errors(
229257
index=idx,
230258
)
231259
)
232-
# U4 (validator returns both in one pass)
233-
ok, msg = validator.validate_bifurcation(list(sequence))
234-
if not ok:
235-
# Distinguish U4a vs U4b heuristically using message content
236-
rule_key = "U4b" if "transform" in msg.lower() else "U4a"
237-
errors.append(
238-
make_grammar_error(
239-
rule=rule_key,
240-
candidate="sequence",
241-
message=msg,
242-
sequence=canonical,
243-
)
244-
)
245260
return errors

0 commit comments

Comments
 (0)