From 7262545b39fec634e980bf3394696240f34a30c6 Mon Sep 17 00:00:00 2001 From: Huso Date: Tue, 2 Dec 2025 12:08:44 +0000 Subject: [PATCH] fix: use console.error for debug messages to prevent MCP JSON parse errors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit MCP protocol requires stdout to contain only valid JSON-RPC messages. Debug messages like `[DB] Using...` written to stdout cause Claude Desktop to fail with "Unexpected token" errors when parsing the MCP response. Changed all console.log statements with debug prefixes to console.error so they go to stderr instead of stdout, preventing JSON stream pollution. Affected files: - backend/src/ai/mcp.ts - backend/src/core/db.ts - backend/src/core/migrate.ts - backend/src/core/models.ts - backend/src/core/telemetry.ts - backend/src/memory/decay.ts - backend/src/memory/embed.ts - backend/src/memory/reflect.ts - backend/src/migrate.ts - backend/src/ops/dynamics.ts - backend/src/ops/ingest.ts - backend/src/server/index.ts - backend/src/server/middleware/auth.ts - backend/src/temporal_graph/store.ts 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- backend/src/ai/mcp.ts | 4 ++-- backend/src/core/db.ts | 10 +++++----- backend/src/core/migrate.ts | 2 +- backend/src/core/models.ts | 2 +- backend/src/core/telemetry.ts | 2 +- backend/src/memory/decay.ts | 4 ++-- backend/src/memory/embed.ts | 8 ++++---- backend/src/memory/reflect.ts | 14 +++++++------- backend/src/migrate.ts | 12 ++++++------ backend/src/ops/dynamics.ts | 2 +- backend/src/ops/ingest.ts | 22 +++++++++++----------- backend/src/server/index.ts | 26 +++++++++++++------------- backend/src/server/middleware/auth.ts | 2 +- backend/src/temporal_graph/store.ts | 8 ++++---- 14 files changed, 59 insertions(+), 59 deletions(-) diff --git a/backend/src/ai/mcp.ts b/backend/src/ai/mcp.ts index 74ca59e..2546537 100644 --- a/backend/src/ai/mcp.ts +++ b/backend/src/ai/mcp.ts @@ -419,7 +419,7 @@ export const mcp = (app: any) => { const srv_ready = srv .connect(trans) .then(() => { - console.log("[MCP] Server started and transport connected"); + console.error("[MCP] Server started and transport connected"); }) .catch((error) => { console.error("[MCP] Failed to initialize transport:", error); @@ -434,7 +434,7 @@ export const mcp = (app: any) => { send_err(res, -32600, "Request body must be a JSON object"); return; } - console.log("[MCP] Incoming request:", JSON.stringify(pay)); + console.error("[MCP] Incoming request:", JSON.stringify(pay)); set_hdrs(res); await trans.handleRequest(req, res, pay); } catch (error) { diff --git a/backend/src/core/db.ts b/backend/src/core/db.ts index 113a65d..4a81b50 100644 --- a/backend/src/core/db.ts +++ b/backend/src/core/db.ts @@ -141,7 +141,7 @@ if (is_pg) { const admin = pool("postgres"); try { await admin.query(`CREATE DATABASE ${db_name}`); - console.log(`[DB] Created ${db_name}`); + console.error(`[DB] Created ${db_name}`); } catch (e: any) { if (e.code !== "42P04") throw e; } finally { @@ -201,11 +201,11 @@ if (is_pg) { // Initialize VectorStore if (env.vector_backend === "valkey") { vector_store = new ValkeyVectorStore(); - console.log("[DB] Using Valkey VectorStore"); + console.error("[DB] Using Valkey VectorStore"); } else { const vt = process.env.OM_VECTOR_TABLE || "openmemory_vectors"; vector_store = new PostgresVectorStore({ run_async, get_async, all_async }, v.replace(/"/g, "")); - console.log(`[DB] Using Postgres VectorStore with table: ${v}`); + console.error(`[DB] Using Postgres VectorStore with table: ${v}`); } }; init().catch((err) => { @@ -538,11 +538,11 @@ if (is_pg) { if (env.vector_backend === "valkey") { vector_store = new ValkeyVectorStore(); - console.log("[DB] Using Valkey VectorStore"); + console.error("[DB] Using Valkey VectorStore"); } else { const vt = process.env.OM_VECTOR_TABLE || "vectors"; vector_store = new PostgresVectorStore({ run_async, get_async, all_async }, vt); - console.log(`[DB] Using SQLite VectorStore with table: ${vt}`); + console.error(`[DB] Using SQLite VectorStore with table: ${vt}`); } transaction = { diff --git a/backend/src/core/migrate.ts b/backend/src/core/migrate.ts index 97c6bb9..d5fcb63 100644 --- a/backend/src/core/migrate.ts +++ b/backend/src/core/migrate.ts @@ -4,7 +4,7 @@ import { Pool } from "pg"; const is_pg = env.metadata_backend === "postgres"; -const log = (msg: string) => console.log(`[MIGRATE] ${msg}`); +const log = (msg: string) => console.error(`[MIGRATE] ${msg}`); interface Migration { version: string; diff --git a/backend/src/core/models.ts b/backend/src/core/models.ts index 055bf7d..f852688 100644 --- a/backend/src/core/models.ts +++ b/backend/src/core/models.ts @@ -15,7 +15,7 @@ export const load_models = (): model_cfg => { try { const yml = readFileSync(p, "utf-8"); cfg = parse_yaml(yml); - console.log( + console.error( `[MODELS] Loaded models.yml (${Object.keys(cfg).length} sectors)`, ); return cfg; diff --git a/backend/src/core/telemetry.ts b/backend/src/core/telemetry.ts index f4f92f9..f40e98e 100644 --- a/backend/src/core/telemetry.ts +++ b/backend/src/core/telemetry.ts @@ -38,7 +38,7 @@ export const sendTelemetry = async () => { if (!res.ok) { console.warn(``) } else { - console.log(`[telemetry] sent`) + console.error(`[telemetry] sent`) } } catch { // silently ignore telemetry errors diff --git a/backend/src/memory/decay.ts b/backend/src/memory/decay.ts index 0fb9e6f..82ee769 100644 --- a/backend/src/memory/decay.ts +++ b/backend/src/memory/decay.ts @@ -218,12 +218,12 @@ const top_keywords = (t: string, k = 5): string[] => { export const apply_decay = async () => { if (active_q > 0) { - console.log(`[decay] skipped - ${active_q} active queries`); + console.error(`[decay] skipped - ${active_q} active queries`); return; } const now_ts = Date.now(); if (now_ts - last_decay < cooldown) { - console.log( + console.error( `[decay] skipped - cooldown active (${((cooldown - (now_ts - last_decay)) / 1000).toFixed(0)}s remaining)`, ); return; diff --git a/backend/src/memory/embed.ts b/backend/src/memory/embed.ts index d5abf74..a056b95 100644 --- a/backend/src/memory/embed.ts +++ b/backend/src/memory/embed.ts @@ -123,7 +123,7 @@ async function get_sem_emb(t: string, s: string): Promise { try { const result = await embed_with_provider(provider, t, s); if (i > 0) { - console.log( + console.error( `[EMBED] Fallback to ${provider} succeeded for sector: ${s}`, ); } @@ -174,7 +174,7 @@ async function emb_batch_with_fallback( } } if (i > 0) { - console.log( + console.error( `[EMBED] Fallback to ${provider} succeeded for batch`, ); } @@ -522,7 +522,7 @@ export async function embedMultiSector( simp && (env.emb_kind === "gemini" || env.emb_kind === "openai") ) { - console.log( + console.error( `[EMBED] Simple mode (1 batch for ${secs.length} sectors)`, ); const tb: Record = {}; @@ -533,7 +533,7 @@ export async function embedMultiSector( r.push({ sector: s, vector: v, dim: v.length }), ); } else { - console.log(`[EMBED] Advanced mode (${secs.length} calls)`); + console.error(`[EMBED] Advanced mode (${secs.length} calls)`); const par = env.adv_embed_parallel && env.emb_kind !== "gemini"; if (par) { const p = secs.map(async (s) => { diff --git a/backend/src/memory/reflect.ts b/backend/src/memory/reflect.ts index af49f41..4066b3d 100644 --- a/backend/src/memory/reflect.ts +++ b/backend/src/memory/reflect.ts @@ -108,18 +108,18 @@ const boost = async (ids: string[]) => { }; export const run_reflection = async () => { - console.log("[REFLECT] Starting reflection job..."); + console.error("[REFLECT] Starting reflection job..."); const min = env.reflect_min || 20; const mems = await q.all_mem.all(100, 0); - console.log( + console.error( `[REFLECT] Fetched ${mems.length} memories (min required: ${min})`, ); if (mems.length < min) { - console.log("[REFLECT] Not enough memories, skipping"); + console.error("[REFLECT] Not enough memories, skipping"); return { created: 0, reason: "low" }; } const cls = cluster(mems); - console.log(`[REFLECT] Clustered into ${cls.length} groups`); + console.error(`[REFLECT] Clustered into ${cls.length} groups`); let n = 0; for (const c of cls) { const txt = summ(c); @@ -131,7 +131,7 @@ export const run_reflection = async () => { freq: c.n, at: new Date().toISOString(), }; - console.log( + console.error( `[REFLECT] Creating reflection: ${c.n} memories, salience=${s.toFixed(3)}, sector=${c.mem[0].primary_sector}`, ); await add_hsg_memory(txt, j(["reflect:auto"]), meta); @@ -140,7 +140,7 @@ export const run_reflection = async () => { n++; } if (n > 0) await log_maint_op("reflect", n); - console.log(`[REFLECT] Job complete: created ${n} reflections`); + console.error(`[REFLECT] Job complete: created ${n} reflections`); return { created: n, clusters: cls.length }; }; @@ -153,7 +153,7 @@ export const start_reflection = () => { () => run_reflection().catch((e) => console.error("[REFLECT]", e)), int, ); - console.log(`[REFLECT] Started: every ${env.reflect_interval || 10}m`); + console.error(`[REFLECT] Started: every ${env.reflect_interval || 10}m`); }; export const stop_reflection = () => { diff --git a/backend/src/migrate.ts b/backend/src/migrate.ts index 14bda56..64233e4 100644 --- a/backend/src/migrate.ts +++ b/backend/src/migrate.ts @@ -48,7 +48,7 @@ async function get_existing_indexes(): Promise> { } async function run_migrations() { - console.log("[MIGRATE] Starting automatic migration..."); + console.error("[MIGRATE] Starting automatic migration..."); const existing_tables = await get_existing_tables(); const existing_indexes = await get_existing_indexes(); @@ -58,7 +58,7 @@ async function run_migrations() { for (const [table_name, schema] of Object.entries(SCHEMA_DEFINITIONS)) { if (!existing_tables.has(table_name)) { - console.log(`[MIGRATE] Creating table: ${table_name}`); + console.error(`[MIGRATE] Creating table: ${table_name}`); const statements = schema.split(";").filter((s) => s.trim()); for (const stmt of statements) { if (stmt.trim()) { @@ -73,19 +73,19 @@ async function run_migrations() { const match = index_sql.match(/create index if not exists (\w+)/); const index_name = match ? match[1] : null; if (index_name && !existing_indexes.has(index_name)) { - console.log(`[MIGRATE] Creating index: ${index_name}`); + console.error(`[MIGRATE] Creating index: ${index_name}`); await run_async(index_sql); created_indexes++; } } - console.log( + console.error( `[MIGRATE] Migration complete: ${created_tables} tables, ${created_indexes} indexes created`, ); const final_tables = await get_existing_tables(); - console.log(`[MIGRATE] Total tables: ${final_tables.size}`); - console.log(`[MIGRATE] Tables: ${Array.from(final_tables).join(", ")}`); + console.error(`[MIGRATE] Total tables: ${final_tables.size}`); + console.error(`[MIGRATE] Tables: ${Array.from(final_tables).join(", ")}`); } run_migrations().catch((err) => { diff --git a/backend/src/ops/dynamics.ts b/backend/src/ops/dynamics.ts index 6be2c9f..815b753 100644 --- a/backend/src/ops/dynamics.ts +++ b/backend/src/ops/dynamics.ts @@ -166,7 +166,7 @@ export async function applyDualPhaseDecayToAllMemories(): Promise { ); }); await Promise.all(ops); - console.log(`[DECAY] Applied to ${mems.length} memories`); + console.error(`[DECAY] Applied to ${mems.length} memories`); } export async function buildAssociativeWaypointGraphFromMemories(): Promise< diff --git a/backend/src/ops/ingest.ts b/backend/src/ops/ingest.ts index f665395..6c759aa 100644 --- a/backend/src/ops/ingest.ts +++ b/backend/src/ops/ingest.ts @@ -110,7 +110,7 @@ const link = async ( try { await q.ins_waypoint.run(rid, cid, user_id || "anonymous", 1.0, ts, ts); await transaction.commit(); - console.log( + console.error( `[INGEST] Linked: ${rid.slice(0, 8)} -> ${cid.slice(0, 8)} (section ${idx})`, ); } catch (e) { @@ -155,15 +155,15 @@ export async function ingestDocument( } const secs = split(text, sz); - console.log(`[INGEST] Document: ${exMeta.estimated_tokens} tokens`); - console.log(`[INGEST] Splitting into ${secs.length} sections`); + console.error(`[INGEST] Document: ${exMeta.estimated_tokens} tokens`); + console.error(`[INGEST] Splitting into ${secs.length} sections`); let rid: string; const cids: string[] = []; try { rid = await mkRoot(text, ex, meta, user_id); - console.log(`[INGEST] Root memory created: ${rid}`); + console.error(`[INGEST] Root memory created: ${rid}`); for (let i = 0; i < secs.length; i++) { try { const cid = await mkChild( @@ -176,7 +176,7 @@ export async function ingestDocument( ); cids.push(cid); await link(rid, cid, i, user_id); - console.log( + console.error( `[INGEST] Section ${i + 1}/${secs.length} processed: ${cid}`, ); } catch (e) { @@ -187,7 +187,7 @@ export async function ingestDocument( throw e; } } - console.log( + console.error( `[INGEST] Completed: ${cids.length} sections linked to ${rid}`, ); return { @@ -237,15 +237,15 @@ export async function ingestURL( } const secs = split(ex.text, sz); - console.log(`[INGEST] URL: ${ex.metadata.estimated_tokens} tokens`); - console.log(`[INGEST] Splitting into ${secs.length} sections`); + console.error(`[INGEST] URL: ${ex.metadata.estimated_tokens} tokens`); + console.error(`[INGEST] Splitting into ${secs.length} sections`); let rid: string; const cids: string[] = []; try { rid = await mkRoot(ex.text, ex, { ...meta, source_url: url }, user_id); - console.log(`[INGEST] Root memory for URL: ${rid}`); + console.error(`[INGEST] Root memory for URL: ${rid}`); for (let i = 0; i < secs.length; i++) { try { const cid = await mkChild( @@ -258,7 +258,7 @@ export async function ingestURL( ); cids.push(cid); await link(rid, cid, i, user_id); - console.log( + console.error( `[INGEST] URL section ${i + 1}/${secs.length} processed: ${cid}`, ); } catch (e) { @@ -269,7 +269,7 @@ export async function ingestURL( throw e; } } - console.log( + console.error( `[INGEST] URL completed: ${cids.length} sections linked to ${rid}`, ); return { diff --git a/backend/src/server/index.ts b/backend/src/server/index.ts index e6c8b79..dfaaca7 100644 --- a/backend/src/server/index.ts +++ b/backend/src/server/index.ts @@ -23,10 +23,10 @@ const ASC = ` ____ __ __ const app = server({ max_payload_size: env.max_payload_size }); -console.log(ASC); -console.log(`[CONFIG] Vector Dimension: ${env.vec_dim}`); -console.log(`[CONFIG] Cache Segments: ${env.cache_segments}`); -console.log(`[CONFIG] Max Active Queries: ${env.max_active}`); +console.error(ASC); +console.error(`[CONFIG] Vector Dimension: ${env.vec_dim}`); +console.error(`[CONFIG] Cache Segments: ${env.cache_segments}`); +console.error(`[CONFIG] Max Active Queries: ${env.max_active}`); // Warn about configuration mismatch that causes embedding incompatibility if (env.emb_kind !== "synthetic" && (tier === "hybrid" || tier === "fast")) { @@ -67,19 +67,19 @@ routes(app); mcp(app); if (env.mode === "langgraph") { - console.log("[MODE] LangGraph integration enabled"); + console.error("[MODE] LangGraph integration enabled"); } const decayIntervalMs = env.decay_interval_minutes * 60 * 1000; -console.log( +console.error( `[DECAY] Interval: ${env.decay_interval_minutes} minutes (${decayIntervalMs / 1000}s)`, ); setInterval(async () => { - console.log("[DECAY] Running HSG decay process..."); + console.error("[DECAY] Running HSG decay process..."); try { const result = await run_decay_process(); - console.log( + console.error( `[DECAY] Completed: ${result.decayed}/${result.processed} memories updated`, ); } catch (error) { @@ -88,10 +88,10 @@ setInterval(async () => { }, decayIntervalMs); setInterval( async () => { - console.log("[PRUNE] Pruning weak waypoints..."); + console.error("[PRUNE] Pruning weak waypoints..."); try { const pruned = await prune_weak_waypoints(); - console.log(`[PRUNE] Completed: ${pruned} waypoints removed`); + console.error(`[PRUNE] Completed: ${pruned} waypoints removed`); } catch (error) { console.error("[PRUNE] Failed:", error); } @@ -100,7 +100,7 @@ setInterval( ); run_decay_process() .then((result: any) => { - console.log( + console.error( `[INIT] Initial decay: ${result.decayed}/${result.processed} memories updated`, ); }) @@ -109,9 +109,9 @@ run_decay_process() start_reflection(); start_user_summary_reflection(); -console.log(`[SERVER] Starting on port ${env.port}`); +console.error(`[SERVER] Starting on port ${env.port}`); app.listen(env.port, () => { - console.log(`[SERVER] Running on http://localhost:${env.port}`); + console.error(`[SERVER] Running on http://localhost:${env.port}`); sendTelemetry().catch(() => { // ignore telemetry failures }); diff --git a/backend/src/server/middleware/auth.ts b/backend/src/server/middleware/auth.ts index 484056b..ad875d6 100644 --- a/backend/src/server/middleware/auth.ts +++ b/backend/src/server/middleware/auth.ts @@ -118,7 +118,7 @@ export function authenticate_api_request(req: any, res: any, next: any) { export function log_authenticated_request(req: any, res: any, next: any) { const key = extract_api_key(req); if (key) - console.log( + console.error( `[AUTH] ${req.method} ${req.path} [${crypto.createHash("sha256").update(key).digest("hex").slice(0, 8)}...]`, ); next(); diff --git a/backend/src/temporal_graph/store.ts b/backend/src/temporal_graph/store.ts index f2ab3f3..ffaf844 100644 --- a/backend/src/temporal_graph/store.ts +++ b/backend/src/temporal_graph/store.ts @@ -87,13 +87,13 @@ export const insert_edge = async ( VALUES (?, ?, ?, ?, ?, NULL, ?, ?) `, [id, source_id, target_id, relation_type, valid_from_ts, weight, metadata ? JSON.stringify(metadata) : null]) - console.log(`[TEMPORAL] Created edge: ${source_id} --[${relation_type}]--> ${target_id}`) + console.error(`[TEMPORAL] Created edge: ${source_id} --[${relation_type}]--> ${target_id}`) return id } export const invalidate_edge = async (id: string, valid_to: Date = new Date()): Promise => { await run_async(`UPDATE temporal_edges SET valid_to = ? WHERE id = ?`, [valid_to.getTime(), id]) - console.log(`[TEMPORAL] Invalidated edge ${id}`) + console.error(`[TEMPORAL] Invalidated edge ${id}`) } export const batch_insert_facts = async (facts: Array<{ @@ -120,7 +120,7 @@ export const batch_insert_facts = async (facts: Array<{ ids.push(id) } await run_async('COMMIT') - console.log(`[TEMPORAL] Batch inserted ${ids.length} facts`) + console.error(`[TEMPORAL] Batch inserted ${ids.length} facts`) } catch (error) { await run_async('ROLLBACK') throw error @@ -141,7 +141,7 @@ export const apply_confidence_decay = async (decay_rate: number = 0.01): Promise const result = await get_async(`SELECT changes() as changes`) as any const changes = result?.changes || 0 - console.log(`[TEMPORAL] Applied confidence decay to ${changes} facts`) + console.error(`[TEMPORAL] Applied confidence decay to ${changes} facts`) return changes }