v0.5.5: node token meters in frontend
- Per-node context fill bars (input/output/memorizer/sensor) - Color-coded: green <50%, amber 50-80%, red >80% - Sensor meter shows tick count + latest deltas - Token info in trace context events Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
ab661775ef
commit
5c7aece397
7
agent.py
7
agent.py
@ -211,6 +211,7 @@ class Node:
|
||||
def __init__(self, send_hud):
|
||||
self.send_hud = send_hud # async callable to emit hud events to frontend
|
||||
self.last_context_tokens = 0
|
||||
self.context_fill_pct = 0
|
||||
|
||||
async def hud(self, event: str, **data):
|
||||
await self.send_hud({"node": self.name, "event": event, **data})
|
||||
@ -403,7 +404,7 @@ ONE sentence. No content, no response — just your perception of what came thro
|
||||
messages.append(msg)
|
||||
messages = self.trim_context(messages)
|
||||
|
||||
await self.hud("context", messages=messages)
|
||||
await self.hud("context", messages=messages, tokens=self.last_context_tokens, max_tokens=self.max_context_tokens, fill_pct=self.context_fill_pct)
|
||||
instruction = await llm_call(self.model, messages)
|
||||
log.info(f"[input] → command: {instruction}")
|
||||
await self.hud("perceived", instruction=instruction)
|
||||
@ -435,7 +436,7 @@ Be natural. Be concise. If the user asks you to do something, do it — don't de
|
||||
messages.append({"role": "system", "content": f"Input perception: {command.instruction}"})
|
||||
messages = self.trim_context(messages)
|
||||
|
||||
await self.hud("context", messages=messages)
|
||||
await self.hud("context", messages=messages, tokens=self.last_context_tokens, max_tokens=self.max_context_tokens, fill_pct=self.context_fill_pct)
|
||||
|
||||
# Stream response
|
||||
client, resp = await llm_call(self.model, messages, stream=True)
|
||||
@ -526,7 +527,7 @@ Output ONLY valid JSON. No explanation, no markdown fences."""
|
||||
messages.append({"role": "user", "content": "Update the shared state based on this conversation. Output JSON only."})
|
||||
messages = self.trim_context(messages)
|
||||
|
||||
await self.hud("context", messages=messages)
|
||||
await self.hud("context", messages=messages, tokens=self.last_context_tokens, max_tokens=self.max_context_tokens, fill_pct=self.context_fill_pct)
|
||||
|
||||
raw = await llm_call(self.model, messages)
|
||||
log.info(f"[memorizer] raw: {raw[:200]}")
|
||||
|
||||
@ -138,9 +138,14 @@ function handleHud(data) {
|
||||
const event = data.event || '';
|
||||
|
||||
if (event === 'context') {
|
||||
// Expandable: show message count, click to see full context
|
||||
// Update node meter
|
||||
if (data.tokens !== undefined) {
|
||||
updateMeter(node, data.tokens, data.max_tokens, data.fill_pct);
|
||||
}
|
||||
// Expandable: show message count + token info
|
||||
const count = (data.messages || []).length;
|
||||
const summary = count + ' msgs: ' + (data.messages || []).map(m =>
|
||||
const tokenInfo = data.tokens ? ` [${data.tokens}/${data.max_tokens}t ${data.fill_pct}%]` : '';
|
||||
const summary = count + ' msgs' + tokenInfo + ': ' + (data.messages || []).map(m =>
|
||||
m.role[0].toUpperCase() + ':' + truncate(m.content, 30)
|
||||
).join(' | ');
|
||||
const detail = (data.messages || []).map((m, i) =>
|
||||
@ -148,6 +153,9 @@ function handleHud(data) {
|
||||
).join('\n');
|
||||
addTrace(node, 'context', summary, 'context', detail);
|
||||
|
||||
} else if (event === 'perceived') {
|
||||
addTrace(node, 'perceived', data.instruction, 'instruction');
|
||||
|
||||
} else if (event === 'decided') {
|
||||
addTrace(node, 'decided', data.instruction, 'instruction');
|
||||
|
||||
@ -171,6 +179,24 @@ function handleHud(data) {
|
||||
} else if (event === 'done') {
|
||||
addTrace(node, 'done', '');
|
||||
|
||||
} else if (event === 'tick') {
|
||||
// Update sensor meter with tick count
|
||||
const meter = document.getElementById('meter-sensor');
|
||||
if (meter) {
|
||||
const text = meter.querySelector('.nm-text');
|
||||
const deltas = Object.entries(data.deltas || {}).map(([k,v]) => k + '=' + v).join(' ');
|
||||
text.textContent = 'tick #' + (data.tick || 0) + (deltas ? ' | ' + deltas : '');
|
||||
}
|
||||
if (data.deltas && Object.keys(data.deltas).length) {
|
||||
const deltas = Object.entries(data.deltas).map(([k,v]) => k + '=' + truncate(String(v), 30)).join(' ');
|
||||
addTrace(node, 'tick #' + data.tick, deltas);
|
||||
}
|
||||
|
||||
} else if (event === 'started' || event === 'stopped') {
|
||||
const meter = document.getElementById('meter-sensor');
|
||||
if (meter) meter.querySelector('.nm-text').textContent = event;
|
||||
addTrace(node, event, '');
|
||||
|
||||
} else {
|
||||
// Generic fallback
|
||||
const detail = JSON.stringify(data, null, 2);
|
||||
@ -203,6 +229,16 @@ function addTrace(node, event, text, cls, detail) {
|
||||
scroll(traceEl);
|
||||
}
|
||||
|
||||
function updateMeter(node, tokens, maxTokens, fillPct) {
|
||||
const meter = document.getElementById('meter-' + node);
|
||||
if (!meter) return;
|
||||
const fill = meter.querySelector('.nm-fill');
|
||||
const text = meter.querySelector('.nm-text');
|
||||
fill.style.width = fillPct + '%';
|
||||
fill.style.backgroundColor = fillPct > 80 ? '#ef4444' : fillPct > 50 ? '#f59e0b' : '#22c55e';
|
||||
text.textContent = tokens + ' / ' + maxTokens + 't (' + fillPct + '%)';
|
||||
}
|
||||
|
||||
function scroll(el) { el.scrollTop = el.scrollHeight; }
|
||||
function esc(s) { const d = document.createElement('span'); d.textContent = s; return d.innerHTML; }
|
||||
function truncate(s, n) { return s.length > n ? s.slice(0, n) + '\u2026' : s; }
|
||||
|
||||
@ -13,6 +13,13 @@
|
||||
<div id="status">disconnected</div>
|
||||
</div>
|
||||
|
||||
<div id="node-metrics">
|
||||
<div class="node-meter" id="meter-input"><span class="nm-label">input</span><div class="nm-bar"><div class="nm-fill"></div></div><span class="nm-text">—</span></div>
|
||||
<div class="node-meter" id="meter-output"><span class="nm-label">output</span><div class="nm-bar"><div class="nm-fill"></div></div><span class="nm-text">—</span></div>
|
||||
<div class="node-meter" id="meter-memorizer"><span class="nm-label">memorizer</span><div class="nm-bar"><div class="nm-fill"></div></div><span class="nm-text">—</span></div>
|
||||
<div class="node-meter" id="meter-sensor"><span class="nm-label">sensor</span><div class="nm-bar"><div class="nm-fill"></div></div><span class="nm-text">—</span></div>
|
||||
</div>
|
||||
|
||||
<div id="main">
|
||||
<div class="panel chat-panel">
|
||||
<div class="panel-header chat-h">Chat</div>
|
||||
|
||||
@ -6,6 +6,18 @@ body { font-family: system-ui, sans-serif; background: #0a0a0a; color: #e0e0e0;
|
||||
#top-bar h1 { font-size: 0.85rem; font-weight: 600; color: #888; }
|
||||
#status { font-size: 0.75rem; color: #666; }
|
||||
|
||||
/* Node metrics bar */
|
||||
#node-metrics { display: flex; gap: 1px; padding: 0; background: #111; border-bottom: 1px solid #222; }
|
||||
.node-meter { flex: 1; display: flex; align-items: center; gap: 0.4rem; padding: 0.25rem 0.6rem; background: #0a0a0a; }
|
||||
.nm-label { font-size: 0.65rem; font-weight: 700; text-transform: uppercase; letter-spacing: 0.03em; min-width: 4.5rem; }
|
||||
#meter-input .nm-label { color: #f59e0b; }
|
||||
#meter-output .nm-label { color: #34d399; }
|
||||
#meter-memorizer .nm-label { color: #c084fc; }
|
||||
#meter-sensor .nm-label { color: #60a5fa; }
|
||||
.nm-bar { flex: 1; height: 6px; background: #1a1a1a; border-radius: 3px; overflow: hidden; }
|
||||
.nm-fill { height: 100%; width: 0%; border-radius: 3px; transition: width 0.3s, background-color 0.3s; background: #333; }
|
||||
.nm-text { font-size: 0.6rem; color: #555; min-width: 5rem; text-align: right; font-family: monospace; }
|
||||
|
||||
/* Two-column layout: chat 1/3 | trace 2/3 */
|
||||
#main { flex: 1; display: grid; grid-template-columns: 1fr 2fr; gap: 1px; background: #222; overflow: hidden; min-height: 0; }
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user