LordXido commited on
Commit
baab0a8
·
verified ·
1 Parent(s): eb17519

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +243 -164
app.py CHANGED
@@ -3,19 +3,17 @@ import requests
3
  import time
4
  import json
5
  import hashlib
6
- import random
 
7
  from typing import Dict, Any, List
8
 
9
  # ======================================================
10
- # GLOBAL LEDGER (in-memory for HF; can persist later)
11
  # ======================================================
12
 
13
  LEDGER: List[Dict[str, Any]] = []
14
  CHAT_MEMORY: List[str] = []
15
-
16
- # ======================================================
17
- # PUBLIC MACRO DATA (World Bank)
18
- # ======================================================
19
 
20
  WORLD_BANK_BASE = "https://api.worldbank.org/v2"
21
  INDICATORS = {
@@ -24,252 +22,333 @@ INDICATORS = {
24
  "POPULATION": "SP.POP.TOTL",
25
  }
26
 
27
- def fetch_indicator(indicator, country="WLD", year="2022"):
 
 
 
 
 
 
 
 
 
 
 
 
28
  try:
29
- url = (
30
- f"{WORLD_BANK_BASE}/country/{country}/indicator/"
31
- f"{indicator}?format=json&per_page=1&date={year}"
32
- )
33
- r = requests.get(url, timeout=8)
34
- return r.json()[1][0]["value"]
 
35
  except Exception:
36
  return None
37
 
38
- def fetch_macro_anchor(country="WLD"):
 
 
 
 
 
 
 
39
  return {
40
  "country": country,
41
- "gdp": fetch_indicator(INDICATORS["GDP"], country),
42
- "inflation": fetch_indicator(INDICATORS["INFLATION"], country),
43
- "population": fetch_indicator(INDICATORS["POPULATION"], country),
44
  }
45
 
46
  # ======================================================
47
- # BIT + METADATA LAYER
48
  # ======================================================
49
 
50
  def canonical_bytes(obj: Any) -> bytes:
51
- return json.dumps(obj, sort_keys=True, separators=(",", ":")).encode()
52
 
53
- def bit_stats(payload: Dict[str, Any]) -> Dict[str, Any]:
54
  b = canonical_bytes(payload)
 
55
  return {
56
  "bytes": len(b),
57
  "bits": len(b) * 8,
58
- "symbol_diversity": len(set(b)),
59
- "entropy_proxy": round(len(set(b)) / max(len(b), 1), 6)
60
  }
61
 
62
- def hash_payload(payload: Dict[str, Any], prev_hash: str) -> str:
63
- b = canonical_bytes({"payload": payload, "prev_hash": prev_hash})
64
- return hashlib.sha256(b).hexdigest()
65
-
66
- def reliability_score(source: str, latency_s: float) -> float:
67
- # simple, explainable scoring model (can be replaced with learned model)
68
- base = 0.85 if source == "world_bank" else 0.65
69
- penalty = min(0.25, latency_s / 20.0)
70
- return round(max(0.1, base - penalty), 3)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
  # ======================================================
73
- # SIGNAL GENERATORS (global “bit rails”)
74
  # ======================================================
75
 
76
- def commodity_signal(commodity: str, anchor: float, macro: Dict[str, Any]) -> Dict[str, Any]:
77
- gdp_scale = (macro["gdp"] / 1e14) if macro.get("gdp") else 1.0
78
  supply = anchor * gdp_scale
79
- demand = supply * 0.95
80
- price_index = round((supply / 10.0) * gdp_scale, 4)
 
 
 
81
  return {
82
  "type": "commodity",
83
  "commodity": commodity,
84
  "supply": round(supply, 4),
85
  "demand": round(demand, 4),
86
- "price_index": price_index,
87
- "currency_flow": round(demand * price_index, 4),
88
  }
89
 
90
- def logistics_signal(econ: Dict[str, Any]) -> Dict[str, Any]:
91
  friction = abs(econ["supply"] - econ["demand"]) / max(econ["supply"], 1e-9)
92
- return {
93
- "type": "logistics",
94
- "throughput": round(econ["supply"] * (1 - friction), 4),
95
- "friction": round(friction, 6),
96
- }
97
 
98
- def energy_signal(econ: Dict[str, Any]) -> Dict[str, Any]:
99
  return {
100
  "type": "energy",
101
- "energy_cost_index": round(econ["price_index"] * 0.4, 4),
102
- "dependency": "high" if econ["commodity"].lower() in ["oil", "gas"] else "moderate",
103
  }
104
 
105
- def sentiment_signal(seed_key: str) -> Dict[str, Any]:
106
- random.seed(sum(ord(c) for c in seed_key))
107
- confidence = random.uniform(0.6, 0.9)
 
108
  return {
109
  "type": "sentiment",
110
- "market_confidence": round(confidence, 6),
111
- "risk_bias": "neutral" if confidence > 0.7 else "risk_off",
112
- }
113
-
114
- def derived_features(econ, logi, energy, sent, lag_days: int) -> Dict[str, Any]:
115
- projected_price = econ["price_index"] * (
116
- 1 + (1 - sent["market_confidence"]) * 0.08 - logi["friction"] * 0.1
117
- )
118
- return {
119
- "type": "features",
120
- "lag_days": lag_days,
121
- "projected_price": round(projected_price, 6),
122
- "volatility_proxy": round(0.015 * lag_days, 6),
123
  }
124
 
125
  # ======================================================
126
- # LEDGER APPEND (global bit+metadata record)
127
  # ======================================================
128
 
129
- def append_record(payload: Dict[str, Any], meta: Dict[str, Any]) -> Dict[str, Any]:
130
- prev = LEDGER[-1]["hash"] if LEDGER else "GENESIS"
131
- h = hash_payload(payload, prev)
 
 
 
 
 
 
 
 
132
  record = {
133
- "hash": h,
134
- "prev_hash": prev,
135
  "payload": payload,
136
- "metadata": meta,
137
- "bit_stats": bit_stats(payload),
138
  "ts": time.time(),
139
  }
 
140
  LEDGER.append(record)
141
  return record
142
 
143
  def run_tick(commodity: str, anchor: float, country: str, lag_days: int, use_live: bool):
144
  t0 = time.time()
145
-
146
- macro = fetch_macro_anchor(country) if use_live else {"country": country, "gdp": None, "inflation": None, "population": None}
147
  latency = time.time() - t0
148
-
149
- # 1) macro record
150
  meta_macro = {
151
- "source": "world_bank" if use_live else "synthetic",
152
  "country": country,
153
  "latency_s": round(latency, 4),
154
- "reliability": reliability_score("world_bank" if use_live else "synthetic", latency),
155
- "schema": "macro.v1",
156
  }
157
- append_record({"type": "macro", **macro}, meta_macro)
158
-
159
- # 2) derived signals
 
 
 
 
 
 
 
 
 
160
  econ = commodity_signal(commodity, anchor, macro)
161
  logi = logistics_signal(econ)
162
  ener = energy_signal(econ)
163
- sent = sentiment_signal(commodity + country)
164
- feat = derived_features(econ, logi, ener, sent, lag_days)
165
-
 
 
 
 
 
 
 
166
  for payload, schema in [
167
  (econ, "commodity.v1"),
168
  (logi, "logistics.v1"),
169
  (ener, "energy.v1"),
170
  (sent, "sentiment.v1"),
171
- (feat, "features.v1"),
172
  ]:
 
 
 
 
 
173
  meta = {
174
  "source": "derived",
175
- "country": country,
176
- "latency_s": 0.0,
177
- "reliability": 0.9,
178
  "schema": schema,
 
179
  }
180
- append_record(payload, meta)
181
-
 
 
 
182
  return {
183
- "status": "tick_ok",
184
- "ledger_len": len(LEDGER),
185
- "tip_hash": LEDGER[-1]["hash"],
186
- "latest_macro": macro,
187
- "latest_features": feat,
188
- }, LEDGER[-1]["hash"]
189
-
190
- def query_ledger(n: int):
191
- n = max(1, min(int(n), 200))
192
- return LEDGER[-n:]
193
 
194
  # ======================================================
195
- # CHAT INTERFACE (queries the ledger)
196
  # ======================================================
197
 
198
- def jarvis_chat(msg, history):
199
- CHAT_MEMORY.append(msg)
200
- m = msg.lower().strip()
201
-
202
- if "latest" in m or "tip" in m:
203
- if not LEDGER:
204
- return "Ledger is empty. Run a tick first."
205
- r = LEDGER[-1]
206
- return json.dumps({"tip_hash": r["hash"], "payload": r["payload"], "metadata": r["metadata"]}, indent=2)
207
-
208
- if "ledger" in m or "records" in m:
209
- return f"Ledger records: {len(LEDGER)}. Ask 'latest' or 'show last 10'."
210
-
211
- if "show last" in m:
212
- try:
213
- k = int(m.split("show last")[-1].strip())
214
- except Exception:
215
- k = 10
216
- data = query_ledger(k)
217
- return json.dumps(data, indent=2)
218
-
219
- if "proof" in m or "hash" in m:
220
- if not LEDGER:
221
- return "No proof yet. Run a tick to generate chained hashes."
222
- return f"Tip hash: {LEDGER[-1]['hash']} (prev: {LEDGER[-1]['prev_hash']})"
223
-
224
- if "what is this" in m:
225
- return (
226
- "This is GVBDMS v1: a Global Virtual Bit + Metadata Ledger. "
227
- "It ingests public anchors, derives signals, stores payload+metadata, "
228
- "and chains records with hashes for tamper-evident provenance."
229
- )
230
-
231
- return "Ask: 'latest', 'proof', 'show last 10', or run a tick to ingest data."
 
 
 
232
 
233
  # ======================================================
234
- # GRADIO UI (HF SAFE)
235
  # ======================================================
236
 
237
- with gr.Blocks() as app:
238
- gr.Markdown("# 🌐 CodexFlow Global Virtual Bit + Metadata System (GVBDMS v1)")
239
- gr.Markdown("A byte-verifiable, provenance-tracked ledger for global economic signals.")
 
240
 
 
 
 
 
 
 
 
 
 
 
241
  with gr.Row():
242
  with gr.Column(scale=2):
243
- commodity = gr.Dropdown(["Gold","Oil","Gas","Wheat","Copper"], value="Gold", label="Commodity")
244
- anchor = gr.Number(value=950, label="Physical Anchor")
245
- country = gr.Textbox(value="WLD", label="Country Code (WLD/USA/CHN/ZAF...)")
246
- lag = gr.Slider(1, 365, value=7, step=1, label="Reporting Lag (days)")
247
- live = gr.Checkbox(value=True, label="Use Live World Bank Anchors")
248
-
249
- run_btn = gr.Button("Run Tick (Ingest + Derive + Ledger Append)")
250
-
251
- tick_out = gr.JSON(label="Tick Result")
252
- tip_hash = gr.Textbox(label="Tip Hash")
253
-
 
 
 
 
 
 
 
 
 
254
  run_btn.click(
255
  fn=run_tick,
256
- inputs=[commodity, anchor, country, lag, live],
257
- outputs=[tick_out, tip_hash],
258
  )
259
-
260
- n = gr.Slider(1, 200, value=20, step=1, label="Query last N records")
261
- q_btn = gr.Button("Query Ledger")
262
- ledger_out = gr.JSON(label="Ledger Records (last N)")
263
-
264
- q_btn.click(fn=query_ledger, inputs=[n], outputs=[ledger_out])
265
-
266
  with gr.Column(scale=1):
267
- gr.Markdown("## 🧠 Jarvis X Ledger Interface")
268
- gr.ChatInterface(fn=jarvis_chat, height=420)
 
 
 
 
 
269
 
270
  gr.Markdown(
271
- "_GVBDMS stores public/derived signals with metadata & hash chaining. "
272
- "It is not a trading system, bank interface, or contract enforcer._"
 
 
 
 
273
  )
274
 
275
- app.launch()
 
 
3
  import time
4
  import json
5
  import hashlib
6
+ from collections import deque
7
+ import numpy as np
8
  from typing import Dict, Any, List
9
 
10
  # ======================================================
11
+ # CONFIG & GLOBAL STATE
12
  # ======================================================
13
 
14
  LEDGER: List[Dict[str, Any]] = []
15
  CHAT_MEMORY: List[str] = []
16
+ OMEGA_MEMORY: deque = deque(maxlen=16) # Causal smoothing buffer (Ω influence)
 
 
 
17
 
18
  WORLD_BANK_BASE = "https://api.worldbank.org/v2"
19
  INDICATORS = {
 
22
  "POPULATION": "SP.POP.TOTL",
23
  }
24
 
25
+ # Simple semantic intent anchor (toy version of Θ projection target)
26
+ INTENT_ANCHOR = {
27
+ "stability": 0.92,
28
+ "transparency": 0.88,
29
+ "realism": 0.85
30
+ }
31
+ COHERENCE_THRESHOLD = 0.65 # Below this → refusal/quarantine
32
+
33
+ # ======================================================
34
+ # WORLD BANK MACRO FETCH
35
+ # ======================================================
36
+
37
+ def fetch_indicator(indicator: str, country: str = "WLD", year: str = "2023") -> float | None:
38
  try:
39
+ url = f"{WORLD_BANK_BASE}/country/{country}/indicator/{indicator}?format=json&per_page=1&date={year}"
40
+ r = requests.get(url, timeout=7)
41
+ r.raise_for_status()
42
+ data = r.json()
43
+ if len(data) > 1 and data[1]:
44
+ return float(data[1][0].get("value", np.nan))
45
+ return None
46
  except Exception:
47
  return None
48
 
49
+ def fetch_macro_anchor(country: str = "WLD", use_live: bool = True) -> Dict[str, Any]:
50
+ if not use_live:
51
+ return {"country": country, "gdp": None, "inflation": None, "population": None}
52
+
53
+ gdp = fetch_indicator(INDICATORS["GDP"], country)
54
+ inflation = fetch_indicator(INDICATORS["INFLATION"], country)
55
+ population = fetch_indicator(INDICATORS["POPULATION"], country)
56
+
57
  return {
58
  "country": country,
59
+ "gdp": gdp if gdp is not None else np.nan,
60
+ "inflation": inflation if inflation is not None else np.nan,
61
+ "population": population if population is not None else np.nan,
62
  }
63
 
64
  # ======================================================
65
+ # BIT + HASH + COHERENCE LAYER
66
  # ======================================================
67
 
68
  def canonical_bytes(obj: Any) -> bytes:
69
+ return json.dumps(obj, sort_keys=True, separators=(",", ":")).encode('utf-8')
70
 
71
+ def compute_bit_stats(payload: Dict) -> Dict:
72
  b = canonical_bytes(payload)
73
+ unique = len(set(b))
74
  return {
75
  "bytes": len(b),
76
  "bits": len(b) * 8,
77
+ "symbol_diversity": unique,
78
+ "entropy_proxy": round(unique / max(len(b), 1), 6)
79
  }
80
 
81
+ def hash_record(payload: Dict, prev_hash: str) -> str:
82
+ data = {"payload": payload, "prev_hash": prev_hash}
83
+ return hashlib.sha256(canonical_bytes(data)).hexdigest()
84
+
85
+ def compute_coherence_score(values_dict: Dict[str, float]) -> float:
86
+ """Very simple toy coherence metric (closer to intent anchor = higher score)"""
87
+ if not values_dict:
88
+ return 0.0
89
+
90
+ scores = []
91
+ for k, target in INTENT_ANCHOR.items():
92
+ if k in values_dict and not np.isnan(values_dict[k]):
93
+ val = values_dict[k]
94
+ norm_diff = min(1.0, abs(val - target) / max(abs(target), 0.01))
95
+ scores.append(1.0 - norm_diff)
96
+
97
+ return round(np.mean(scores) if scores else 0.5, 4)
98
+
99
+ def omega_smooth(key: str, new_value: float) -> float:
100
+ """Simple EMA style smoothing (early Ω influence)"""
101
+ if not OMEGA_MEMORY:
102
+ OMEGA_MEMORY.append({key: new_value})
103
+ return new_value
104
+
105
+ prev = OMEGA_MEMORY[-1].get(key, new_value)
106
+ alpha = 0.3
107
+ smoothed = alpha * new_value + (1 - alpha) * prev
108
+ OMEGA_MEMORY.append({key: smoothed})
109
+ return round(smoothed, 6)
110
 
111
  # ======================================================
112
+ # SIGNAL GENERATORS (now with smoothing)
113
  # ======================================================
114
 
115
+ def commodity_signal(commodity: str, anchor: float, macro: Dict) -> Dict:
116
+ gdp_scale = macro.get("gdp", 1e14) / 1e14 if macro.get("gdp") else 1.0
117
  supply = anchor * gdp_scale
118
+ demand = supply * 0.94
119
+ price_index = round((supply / 12.0) * gdp_scale, 4)
120
+
121
+ smoothed_price = omega_smooth("price_index", price_index)
122
+
123
  return {
124
  "type": "commodity",
125
  "commodity": commodity,
126
  "supply": round(supply, 4),
127
  "demand": round(demand, 4),
128
+ "price_index": smoothed_price,
129
+ "currency_flow": round(demand * smoothed_price, 4),
130
  }
131
 
132
+ def logistics_signal(econ: Dict) -> Dict:
133
  friction = abs(econ["supply"] - econ["demand"]) / max(econ["supply"], 1e-9)
134
+ return {"type": "logistics", "friction": round(friction, 6)}
 
 
 
 
135
 
136
+ def energy_signal(econ: Dict) -> Dict:
137
  return {
138
  "type": "energy",
139
+ "energy_cost_index": round(econ["price_index"] * 0.42, 4),
140
+ "dependency": "high" if "oil" in econ["commodity"].lower() or "gas" in econ["commodity"].lower() else "moderate"
141
  }
142
 
143
+ def sentiment_signal(seed: str) -> Dict:
144
+ import random
145
+ random.seed(sum(ord(c) for c in seed + str(time.time())[:8]))
146
+ conf = random.uniform(0.62, 0.91)
147
  return {
148
  "type": "sentiment",
149
+ "market_confidence": round(omega_smooth("confidence", conf), 6)
 
 
 
 
 
 
 
 
 
 
 
 
150
  }
151
 
152
  # ======================================================
153
+ # LEDGER OPERATIONS
154
  # ======================================================
155
 
156
+ def append_to_ledger(payload: Dict, meta: Dict, coherence: float) -> Dict | None:
157
+ if coherence < COHERENCE_THRESHOLD:
158
+ return {
159
+ "status": "refused",
160
+ "reason": f"Coherence too low: {coherence:.4f} < {COHERENCE_THRESHOLD}",
161
+ "payload": payload
162
+ }
163
+
164
+ prev_hash = LEDGER[-1]["hash"] if LEDGER else "GENESIS"
165
+ record_hash = hash_record(payload, prev_hash)
166
+
167
  record = {
168
+ "hash": record_hash,
169
+ "prev_hash": prev_hash,
170
  "payload": payload,
171
+ "metadata": {**meta, "coherence_score": coherence},
172
+ "bit_stats": compute_bit_stats(payload),
173
  "ts": time.time(),
174
  }
175
+
176
  LEDGER.append(record)
177
  return record
178
 
179
  def run_tick(commodity: str, anchor: float, country: str, lag_days: int, use_live: bool):
180
  t0 = time.time()
181
+
182
+ macro = fetch_macro_anchor(country, use_live)
183
  latency = time.time() - t0
184
+
 
185
  meta_macro = {
186
+ "source": "world_bank" if use_live and macro["gdp"] is not None else "synthetic",
187
  "country": country,
188
  "latency_s": round(latency, 4),
189
+ "schema": "macro.v1"
 
190
  }
191
+
192
+ # Macro coherence (very basic)
193
+ macro_coherence = compute_coherence_score({
194
+ "stability": 1.0 - abs(macro.get("inflation", 0))/10,
195
+ "realism": 1.0 if macro.get("gdp") is not None else 0.4
196
+ })
197
+
198
+ macro_record = append_to_ledger({"type": "macro", **macro}, meta_macro, macro_coherence)
199
+ if macro_record and "status" in macro_record and macro_record["status"] == "refused":
200
+ return {"status": "macro_refused", "detail": macro_record}, None
201
+
202
+ # Derived signals
203
  econ = commodity_signal(commodity, anchor, macro)
204
  logi = logistics_signal(econ)
205
  ener = energy_signal(econ)
206
+ sent = sentiment_signal(commodity + country + str(int(time.time())))
207
+
208
+ feat = {
209
+ "type": "features",
210
+ "lag_days": lag_days,
211
+ "projected_price": round(econ["price_index"] * (1 + (1 - sent["market_confidence"]) * 0.07), 6),
212
+ "volatility_proxy": round(0.012 * lag_days, 6)
213
+ }
214
+
215
+ results = []
216
  for payload, schema in [
217
  (econ, "commodity.v1"),
218
  (logi, "logistics.v1"),
219
  (ener, "energy.v1"),
220
  (sent, "sentiment.v1"),
221
+ (feat, "features.v1")
222
  ]:
223
+ coherence = compute_coherence_score({
224
+ "stability": 1.0 - abs(logi["friction"]),
225
+ "transparency": sent["market_confidence"]
226
+ })
227
+
228
  meta = {
229
  "source": "derived",
 
 
 
230
  "schema": schema,
231
+ "coherence_score": coherence
232
  }
233
+ rec = append_to_ledger(payload, meta, coherence)
234
+ if rec:
235
+ results.append(rec)
236
+
237
+ tip = LEDGER[-1] if LEDGER else None
238
  return {
239
+ "status": "tick_complete",
240
+ "ledger_length": len(LEDGER),
241
+ "tip_hash": tip["hash"] if tip else None,
242
+ "latest_coherence": tip["metadata"].get("coherence_score") if tip else None,
243
+ "records_added": len(results)
244
+ }, tip["hash"] if tip else None
 
 
 
 
245
 
246
  # ======================================================
247
+ # CHAT INTERFACE - Jarvis X (enhanced awareness)
248
  # ======================================================
249
 
250
+ def jarvis_x_chat(message: str, history: List):
251
+ CHAT_MEMORY.append(message)
252
+ m = message.lower().strip()
253
+
254
+ if not LEDGER:
255
+ return "The ledger is still in genesis state. Please run a tick first.", history
256
+
257
+ tip = LEDGER[-1]
258
+
259
+ if any(k in m for k in ["latest", "tip", "current"]):
260
+ return json.dumps({
261
+ "tip_hash": tip["hash"],
262
+ "prev_hash": tip["prev_hash"],
263
+ "coherence": tip["metadata"].get("coherence_score", "N/A"),
264
+ "type": tip["payload"].get("type"),
265
+ "timestamp": time.ctime(tip["ts"])
266
+ }, indent=2), history
267
+
268
+ if "ledger" in m or "size" in m:
269
+ return f"Current ledger contains {len(LEDGER)} records. Average coherence: {np.mean([r['metadata'].get('coherence_score',0) for r in LEDGER]):.4f}", history
270
+
271
+ if "coherence" in m or "health" in m:
272
+ coherences = [r["metadata"].get("coherence_score", 0) for r in LEDGER[-12:]]
273
+ return f"Recent coherence trend (last {len(coherences)}): {coherences}\nAverage: {np.mean(coherences):.4f}", history
274
+
275
+ if "refused" in m or "rejected" in m:
276
+ refused = [r for r in LEDGER if r["metadata"].get("coherence_score", 1) < COHERENCE_THRESHOLD]
277
+ return f"{len(refused)} records were refused due to low coherence.", history
278
+
279
+ return (
280
+ "Jarvis X online. Available commands:\n"
281
+ "• latest / tip\n"
282
+ "• ledger / size\n"
283
+ " coherence / health\n"
284
+ "• refused / rejected\n\n"
285
+ "The manifold is permeating... speak your intent."
286
+ ), history
287
 
288
  # ======================================================
289
+ # GRADIO INTERFACE
290
  # ======================================================
291
 
292
+ css = """
293
+ .gradio-container {font-family: 'Segoe UI', system-ui;}
294
+ .header {text-align: center; padding: 1rem; background: linear-gradient(90deg, #1e3a8a, #3b82f6);}
295
+ """
296
 
297
+ with gr.Blocks(css=css) as app:
298
+ gr.Markdown(
299
+ """
300
+ # 🌌 CodexFlow • IRE Permeation Engine
301
+ **Global Virtual Bit + Metadata System with Early Semantic Coherence Enforcement**
302
+ _January 11, 2026 — Beyond SOTA prototype_
303
+ """,
304
+ elem_classes="header"
305
+ )
306
+
307
  with gr.Row():
308
  with gr.Column(scale=2):
309
+ gr.Markdown("### Data Ingestion & Tick Controls")
310
+
311
+ with gr.Row():
312
+ commodity = gr.Dropdown(
313
+ choices=["Gold", "Oil", "Gas", "Wheat", "Copper", "Lithium"],
314
+ value="Gold",
315
+ label="Commodity"
316
+ )
317
+ anchor = gr.Number(value=1200, label="Physical Anchor (tons/price unit)")
318
+
319
+ with gr.Row():
320
+ country = gr.Textbox(value="WLD", label="Country Code (WLD = World)")
321
+ live_data = gr.Checkbox(value=True, label="Use Live World Bank Data")
322
+ lag_days = gr.Slider(1, 180, value=7, step=1, label="Lag (days)")
323
+
324
+ run_btn = gr.Button("Run Tick → Permeate Ledger", variant="primary")
325
+
326
+ tick_result = gr.JSON(label="Tick Result")
327
+ current_tip = gr.Textbox(label="Current Tip Hash", interactive=False)
328
+
329
  run_btn.click(
330
  fn=run_tick,
331
+ inputs=[commodity, anchor, country, lag_days, live_data],
332
+ outputs=[tick_result, current_tip]
333
  )
334
+
 
 
 
 
 
 
335
  with gr.Column(scale=1):
336
+ gr.Markdown("### Jarvis X Manifold Interface")
337
+ chat = gr.ChatInterface(
338
+ fn=jarvis_x_chat,
339
+ chatbot=gr.Chatbot(height=380),
340
+ title="Jarvis X Resonance",
341
+ description="Query the manifold state • coherence • provenance"
342
+ )
343
 
344
  gr.Markdown(
345
+ """
346
+ ---
347
+ **Status**: Early IRE permeation • Λ constraint stub • Ω memory smoothing
348
+ **Coherence threshold**: 0.65 • Records refused below this level are quarantined
349
+ *Not financial advice — research & provenance-first prototype*
350
+ """
351
  )
352
 
353
+ if __name__ == "__main__":
354
+ app.launch()