diff --git a/bot/package.json b/bot/package.json index 9f42fc6..2294704 100644 --- a/bot/package.json +++ b/bot/package.json @@ -7,7 +7,8 @@ "scripts": { "build": "tsc", "start": "node dist/index.js", - "dev": "tsx src/index.ts" + "dev": "tsx src/index.ts", + "assistant": "tsx src/test-assistant.ts" }, "dependencies": { "@mariozechner/pi-ai": "^0.51.2", diff --git a/bot/src/llm.ts b/bot/src/llm.ts index 7c32dc3..6b34e51 100644 --- a/bot/src/llm.ts +++ b/bot/src/llm.ts @@ -25,6 +25,16 @@ import { getNipInfo, searchNips } from "./data/nips.js"; const PROVIDER = process.env.LLM_PROVIDER || "anthropic"; const MODEL_ID = process.env.LLM_MODEL || "claude-3-5-haiku-20241022"; +// API key from environment +const API_KEY = process.env.ANTHROPIC_API_KEY; + +if (!API_KEY) { + console.error("ERROR: ANTHROPIC_API_KEY environment variable is not set."); + console.error("Please set it before running the bot:"); + console.error(" export ANTHROPIC_API_KEY=sk-ant-..."); + process.exit(1); +} + // Use default model for simplicity (typed correctly) const model = getModel("anthropic", "claude-3-5-haiku-20241022"); @@ -247,7 +257,14 @@ export async function processMessage(userMessage: string): Promise { while (iterations < maxIterations) { iterations++; - const response = await complete(model, context); + const response = await complete(model, context, { apiKey: API_KEY }); + + // Debug: log full response if DEBUG is set + if (process.env.DEBUG) { + console.error(`DEBUG: Iteration ${iterations}`); + console.error("DEBUG: response =", JSON.stringify(response, null, 2)); + } + context.messages.push(response); // Check for tool calls @@ -257,6 +274,16 @@ export async function processMessage(userMessage: string): Promise { // No tool calls, extract text response const textBlocks = response.content.filter((b) => b.type === "text"); const textContent = textBlocks.map((b) => (b as any).text).join("\n"); + + // Debug: log response structure if empty + if (!textContent && process.env.DEBUG) { + console.error("DEBUG: Empty text response"); + console.error( + "DEBUG: response.content =", + JSON.stringify(response.content, null, 2), + ); + } + return ( textContent || "I couldn't generate a response. Please try rephrasing your question." diff --git a/bot/src/test-assistant.ts b/bot/src/test-assistant.ts new file mode 100644 index 0000000..5c2712b --- /dev/null +++ b/bot/src/test-assistant.ts @@ -0,0 +1,40 @@ +/** + * Test script for the Grimoire REQ Assistant + * + * Usage: npm run assistant "your question here" + */ + +import { processMessage } from "./llm.js"; + +async function main() { + const question = process.argv.slice(2).join(" "); + + if (!question) { + console.error('Usage: npm run assistant "your question here"'); + console.error(""); + console.error("Examples:"); + console.error( + ' npm run assistant "how do I see what my contacts are zapping"', + ); + console.error(' npm run assistant "find all articles about bitcoin"'); + console.error(' npm run assistant "what kind is used for reactions"'); + process.exit(1); + } + + console.log("Question:", question); + console.log(""); + console.log("Processing..."); + console.log(""); + + try { + const response = await processMessage(question); + console.log("Response:"); + console.log("========="); + console.log(response); + } catch (error) { + console.error("Error:", error); + process.exit(1); + } +} + +main();