|
| 1 | +"""Demo: Extended SHA (Silence) Metrics |
| 2 | +
|
| 3 | +This demo shows how to use the extended metrics for the SHA operator |
| 4 | +to analyze structural preservation effectiveness in different scenarios. |
| 5 | +
|
| 6 | +The extended metrics enable deep analysis of: |
| 7 | +- EPI variance during silence |
| 8 | +- Preservation integrity quality |
| 9 | +- Reactivation readiness assessment |
| 10 | +- Time-to-collapse prediction |
| 11 | +""" |
| 12 | + |
| 13 | +from tnfr.structural import create_nfr |
| 14 | +from tnfr.operators.metrics import silence_metrics |
| 15 | +from tnfr.alias import set_attr |
| 16 | +from tnfr.constants.aliases import ALIAS_VF, ALIAS_EPI |
| 17 | + |
| 18 | + |
| 19 | +def print_metrics(title: str, metrics: dict) -> None: |
| 20 | + """Pretty print extended metrics.""" |
| 21 | + print(f"\n{'='*60}") |
| 22 | + print(f"{title}") |
| 23 | + print(f"{'='*60}") |
| 24 | + print(f"Operator: {metrics['operator']} ({metrics['glyph']})") |
| 25 | + print(f"\nCore Metrics:") |
| 26 | + print(f" vf_reduction: {metrics['vf_reduction']:.4f}") |
| 27 | + print(f" vf_final: {metrics['vf_final']:.4f}") |
| 28 | + print(f" epi_preservation: {metrics['epi_preservation']:.4f}") |
| 29 | + print(f" is_silent: {metrics['is_silent']}") |
| 30 | + print(f"\nExtended Metrics:") |
| 31 | + print(f" epi_variance: {metrics['epi_variance']:.6f}") |
| 32 | + print(f" preservation_integrity: {metrics['preservation_integrity']:.4f}") |
| 33 | + print(f" reactivation_readiness: {metrics['reactivation_readiness']:.4f}") |
| 34 | + time_val = metrics['time_to_collapse'] |
| 35 | + time_str = "infinite" if time_val == float('inf') else f"{time_val:.2f} steps" |
| 36 | + print(f" time_to_collapse: {time_str}") |
| 37 | + |
| 38 | + |
| 39 | +def demo_biomedical_use_case(): |
| 40 | + """Biomedical: Sleep consolidation tracking.""" |
| 41 | + print("\n🏥 BIOMEDICAL USE CASE: Sleep Consolidation") |
| 42 | + print("Tracking HRV signal during 8-hour sleep period") |
| 43 | + |
| 44 | + # Create node representing HRV signal |
| 45 | + G, node = create_nfr("hrv_signal", epi=0.6, vf=0.02) |
| 46 | + |
| 47 | + # Set up silence tracking |
| 48 | + G.nodes[node]["preserved_epi"] = 0.6 |
| 49 | + G.nodes[node]["silence_duration"] = 8.0 # 8 hours |
| 50 | + G.nodes[node]["epi_history_during_silence"] = [ |
| 51 | + 0.6, 0.602, 0.598, 0.601, 0.599, 0.600 |
| 52 | + ] # Minimal variance (stable sleep) |
| 53 | + G.nodes[node]["epi_drift_rate"] = 0.001 # Very slow drift |
| 54 | + |
| 55 | + metrics = silence_metrics(G, node, vf_before=0.5, epi_before=0.6) |
| 56 | + print_metrics("Sleep Consolidation Analysis", metrics) |
| 57 | + |
| 58 | + print("\n📊 Interpretation:") |
| 59 | + print(" ✓ High preservation integrity (>0.99) = Stable sleep") |
| 60 | + print(" ✓ Low EPI variance (<0.01) = No sleep disturbances") |
| 61 | + print(" ✓ Moderate readiness (0.3-0.8) = Natural post-sleep state") |
| 62 | + print(" ✓ Long time-to-collapse (>100) = Healthy resilience") |
| 63 | + |
| 64 | + |
| 65 | +def demo_cognitive_use_case(): |
| 66 | + """Cognitive: Memory consolidation during incubation.""" |
| 67 | + print("\n🧠 COGNITIVE USE CASE: Memory Consolidation") |
| 68 | + print("Memory trace during incubation period") |
| 69 | + |
| 70 | + # Create node representing memory trace |
| 71 | + G, node = create_nfr("memory_trace", epi=0.7, vf=0.03) |
| 72 | + |
| 73 | + # Set up perfect preservation |
| 74 | + G.nodes[node]["preserved_epi"] = 0.7 |
| 75 | + G.nodes[node]["silence_duration"] = 2.0 # Brief pause |
| 76 | + G.nodes[node]["epi_history_during_silence"] = [0.7, 0.7, 0.7] # Perfect stability |
| 77 | + G.nodes[node]["epi_drift_rate"] = 0.0 # No drift |
| 78 | + |
| 79 | + # Add network support (other memory traces) |
| 80 | + for i in range(4): |
| 81 | + G.add_node(f"memory_{i}") |
| 82 | + set_attr(G.nodes[f"memory_{i}"], ALIAS_VF, 0.5) |
| 83 | + set_attr(G.nodes[f"memory_{i}"], ALIAS_EPI, 0.6) |
| 84 | + G.add_edge(node, f"memory_{i}") |
| 85 | + |
| 86 | + metrics = silence_metrics(G, node, vf_before=0.5, epi_before=0.7) |
| 87 | + print_metrics("Memory Consolidation Analysis", metrics) |
| 88 | + |
| 89 | + print("\n📊 Interpretation:") |
| 90 | + print(" ✓ Perfect integrity (1.0) = Excellent consolidation") |
| 91 | + print(" ✓ Zero variance = No memory degradation") |
| 92 | + print(" ✓ High readiness (>0.7) = Ready for recall") |
| 93 | + print(" ✓ Infinite collapse time = Stable long-term memory") |
| 94 | + |
| 95 | + |
| 96 | +def demo_social_use_case(): |
| 97 | + """Social: Strategic pause in conflict.""" |
| 98 | + print("\n🤝 SOCIAL USE CASE: Strategic Pause in Conflict") |
| 99 | + print("Conflict state paused for de-escalation") |
| 100 | + |
| 101 | + # Create node representing conflict state |
| 102 | + G, node = create_nfr("conflict_state", epi=0.4, vf=0.01) |
| 103 | + |
| 104 | + # Set up degrading silence (some EPI loss) |
| 105 | + G.nodes[node]["preserved_epi"] = 0.45 |
| 106 | + G.nodes[node]["silence_duration"] = 15.0 # Long strategic pause |
| 107 | + G.nodes[node]["epi_history_during_silence"] = [ |
| 108 | + 0.45, 0.44, 0.42, 0.41, 0.40 |
| 109 | + ] # Gradual degradation |
| 110 | + G.nodes[node]["epi_drift_rate"] = 0.01 # Slow degradation |
| 111 | + |
| 112 | + metrics = silence_metrics(G, node, vf_before=0.8, epi_before=0.45) |
| 113 | + print_metrics("Strategic Pause Analysis", metrics) |
| 114 | + |
| 115 | + print("\n📊 Interpretation:") |
| 116 | + print(" ⚠ Good integrity (0.85-0.95) = Acceptable degradation") |
| 117 | + print(" ⚠ Some variance = Tension still present") |
| 118 | + print(" ⚠ Moderate readiness (<0.6) = Extended pause needed") |
| 119 | + print(" ⚠ Finite collapse (30-50 steps) = Act before breakdown") |
| 120 | + |
| 121 | + |
| 122 | +def demo_preservation_failure(): |
| 123 | + """Example: Detection of preservation failure.""" |
| 124 | + print("\n⚠️ FAILURE DETECTION: Poor Preservation") |
| 125 | + print("Node with excessive EPI drift during silence") |
| 126 | + |
| 127 | + # Create node with poor preservation |
| 128 | + G, node = create_nfr("unstable_node", epi=0.2, vf=0.05) |
| 129 | + |
| 130 | + # Set up failing preservation |
| 131 | + G.nodes[node]["preserved_epi"] = 0.5 # Started at 0.5 |
| 132 | + G.nodes[node]["silence_duration"] = 5.0 |
| 133 | + G.nodes[node]["epi_history_during_silence"] = [ |
| 134 | + 0.5, 0.4, 0.3, 0.25, 0.2 |
| 135 | + ] # Rapid degradation |
| 136 | + G.nodes[node]["epi_drift_rate"] = 0.06 # High drift |
| 137 | + |
| 138 | + metrics = silence_metrics(G, node, vf_before=1.0, epi_before=0.5) |
| 139 | + print_metrics("Preservation Failure Detection", metrics) |
| 140 | + |
| 141 | + print("\n📊 Interpretation:") |
| 142 | + print(" ❌ Low integrity (<0.8) = PRESERVATION FAILURE") |
| 143 | + print(" ❌ High variance = Unstable structure") |
| 144 | + print(" ❌ Low readiness (<0.3) = RISKY REACTIVATION") |
| 145 | + print(" ❌ Short collapse time (<10) = IMMINENT COLLAPSE") |
| 146 | + print("\n⚡ ACTION REQUIRED: Apply IL (Coherence) before reactivation") |
| 147 | + |
| 148 | + |
| 149 | +if __name__ == "__main__": |
| 150 | + print("╔════════════════════════════════════════════════════════════╗") |
| 151 | + print("║ Extended SHA Metrics Demo ║") |
| 152 | + print("║ Analyzing Structural Preservation Effectiveness ║") |
| 153 | + print("╚════════════════════════════════════════════════════════════╝") |
| 154 | + |
| 155 | + # Run all demos |
| 156 | + demo_biomedical_use_case() |
| 157 | + demo_cognitive_use_case() |
| 158 | + demo_social_use_case() |
| 159 | + demo_preservation_failure() |
| 160 | + |
| 161 | + print("\n" + "="*60) |
| 162 | + print("✨ Demo Complete!") |
| 163 | + print("="*60) |
| 164 | + print("\nThe extended SHA metrics enable:") |
| 165 | + print(" • Deep analysis of preservation quality") |
| 166 | + print(" • Early detection of structural degradation") |
| 167 | + print(" • Informed reactivation timing decisions") |
| 168 | + print(" • Cross-domain application (biomedical, cognitive, social)") |
0 commit comments