-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathshared_var2
More file actions
100 lines (85 loc) Β· 3.27 KB
/
shared_var2
File metadata and controls
100 lines (85 loc) Β· 3.27 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# analysis/tester.py
# FULL INFINITY MACHINE TESTER β Runs all diagnostics
# Author: poopoomanu | Date: November 02, 2025
import numpy as np
import pandas as pd
from statsmodels.tsa.vector_ar.var_model import VAR
from scipy.stats import kstest, entropy
from scipy.signal import welch
import os
print("INFINITY MACHINE TESTER β STARTING")
print("="*60)
# === CONFIG ===
DATA_DIR = "../data"
ENVIRONMENTS = ["S25", "S28", "S32", "ICARE_0284", "pendulum"]
LAG = 2
np.random.seed(42)
# === LOAD DATA (synthetic proxy if missing) ===
def load_env(env):
path = f"{DATA_DIR}/{env}.csv"
if not os.path.exists(path):
print(f"[!] {path} not found β generating synthetic...")
t = np.linspace(0, 100, 1000)
signal = np.sin(2*np.pi*0.1*t) + 0.5*np.random.randn(len(t))
return pd.DataFrame({f"X{i}": signal + 0.1*np.random.randn(len(t)) for i in range(3)})
return pd.read_csv(path).iloc[:, :3]
data = {env: load_env(env) for env in ENVIRONMENTS}
print(f"[β] Loaded {len(ENVIRONMENTS)} environments")
# === FIT SHARED MODEL ===
all_data = pd.concat(data.values())
shared_model = VAR(all_data)
shared_res = shared_model.fit(maxlags=LAG)
shared_ll = shared_res.llf
shared_k = len(shared_res.params) * len(ENVIRONMENTS)
shared_n = len(all_data)
shared_bic = -2 * shared_ll + np.log(shared_n) * shared_k
shared_pred = shared_res.fittedvalues
shared_resid = all_data.values[LAG:] - shared_pred.values
# === FIT LOCAL MODELS ===
local_bics = []
local_nlls = []
for env, df in data.items():
model = VAR(df)
res = model.fit(maxlags=LAG)
local_bics.append(res.bic)
local_nlls.append(-res.llf / len(df))
local_bic = np.mean(local_bics) * len(ENVIRONMENTS)
local_nll = np.mean(local_nlls)
# === DIAGNOSTICS ===
# 1. KS-sum
noise = np.random.randn(*shared_resid.shape)
ks_sum = sum(kstest(shared_resid[:, i], noise[:, i]).statistic for i in range(shared_resid.shape[1]))
# 2. Recognition Index (RI)
E_eps2 = np.mean(shared_resid**2)
E_eta2 = np.mean(noise**2)
RI = 1 - (E_eps2 / E_eta2)
# 3. Chaos Index (CI)
h_eps = entropy(np.histogram(shared_resid.flatten(), bins=50, density=True)[0] + 1e-10)
h_eta = entropy(np.histogram(noise.flatten(), bins=50, density=True)[0] + 1e-10)
h_X = entropy(np.histogram(all_data.values.flatten(), bins=50, density=True)[0] + 1e-10)
CI = (h_eps - h_eta) / (h_X - h_eta)
# 4. Time-Reversal Symmetry (TRS)
fwd_nll = -shared_ll / shared_n
rev_data = all_data.iloc[::-1].reset_index(drop=True)
rev_model = VAR(rev_data)
rev_res = rev_model.fit(maxlags=LAG)
rev_nll = -rev_res.llf / shared_n
TRS = 1 - abs(fwd_nll - rev_nll) / max(fwd_nll, rev_nll)
# === OUTPUT ===
print("\nCORE RESULTS")
print(f"Shared BIC: {shared_bic:.2f}")
print(f"Local BIC: {local_bic:.2f}")
print(f"ΞBIC: {local_bic - shared_bic:.2f}")
print(f"NLL (Shared): {fwd_nll:.3f} | NLL (Local): {local_nll:.3f}")
print(f"KS-sum: {ks_sum:.3f}")
print("\nRECOGNITION DIAGNOSTICS")
print(f"RI = {RI:.3f} β β₯ 0.8 = Self-recognition")
print(f"CI = {CI:.3f} β β€ 0.2 = Low chaos")
print(f"TRS = {TRS:.3f} β β₯ 0.9 = Time-symmetric")
print("\nDECISION RULE")
if RI >= 0.8 and CI <= 0.2 and TRS >= 0.9:
print("MACHINE LAW CONFIRMED: Reality is a literal, computable engine.")
else:
print("Further testing required.")
print("\n" + "="*60)
print("TEST COMPLETE β T IS ALIVE")