diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000000..779f99a12b5 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/.env.example b/.env.example index 91d354b80eb..622bf27af1b 100644 --- a/.env.example +++ b/.env.example @@ -1,22 +1,21 @@ # Discord Configuration DISCORD_APPLICATION_ID= -DISCORD_API_TOKEN= # Bot token -DISCORD_VOICE_CHANNEL_ID= # The ID of the voice channel the bot should join (optional) +DISCORD_API_TOKEN= # Bot token +DISCORD_VOICE_CHANNEL_ID= # The ID of the voice channel the bot should join (optional) # AI Model API Keys -OPENAI_API_KEY= # OpenAI API key, starting with sk- +OPENAI_API_KEY= # OpenAI API key, starting with sk- ETERNALAI_URL= ETERNALAI_API_KEY= - -GROK_API_KEY= # GROK API Key -GROQ_API_KEY= # Starts with gsk_ +GROK_API_KEY= # GROK API Key +GROQ_API_KEY= # Starts with gsk_ OPENROUTER_API_KEY= -GOOGLE_GENERATIVE_AI_API_KEY= # Gemini API key +GOOGLE_GENERATIVE_AI_API_KEY= # Gemini API key # Speech Synthesis -ELEVENLABS_XI_API_KEY= # API key from elevenlabs +ELEVENLABS_XI_API_KEY= # API key from elevenlabs # ElevenLabs Settings ELEVENLABS_MODEL_ID=eleven_multilingual_v2 @@ -30,53 +29,53 @@ ELEVENLABS_OUTPUT_FORMAT=pcm_16000 # Twitter/X Configuration TWITTER_DRY_RUN=false -TWITTER_USERNAME= # Account username -TWITTER_PASSWORD= # Account password -TWITTER_EMAIL= # Account email +TWITTER_USERNAME= # Account username +TWITTER_PASSWORD= # Account password +TWITTER_EMAIL= # Account email TWITTER_2FA_SECRET= -TWITTER_COOKIES= # Account cookies +TWITTER_COOKIES= # Account cookies X_SERVER_URL= XAI_API_KEY= XAI_MODEL= # Post Interval Settings (in minutes) -POST_INTERVAL_MIN= # Default: 90 -POST_INTERVAL_MAX= # Default: 180 +POST_INTERVAL_MIN= # Default: 90 +POST_INTERVAL_MAX= # Default: 180 POST_IMMEDIATELY= # Feature Flags -IMAGE_GEN= # Set to TRUE to enable image generation -USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI, leave blank for local +IMAGE_GEN= # Set to TRUE to enable image generation +USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI/1536, leave blank for local +USE_OLLAMA_EMBEDDING= # Set to TRUE for OLLAMA/1024, leave blank for local # OpenRouter Models -OPENROUTER_MODEL= # Default: uses hermes 70b/405b +OPENROUTER_MODEL= # Default: uses hermes 70b/405b SMALL_OPENROUTER_MODEL= MEDIUM_OPENROUTER_MODEL= LARGE_OPENROUTER_MODEL= - # REDPILL Configuration -# https://docs.red-pill.ai/get-started/supported-models -REDPILL_API_KEY= # REDPILL API Key +# https://docs.red-pill.ai/get-started/supported-models +REDPILL_API_KEY= # REDPILL API Key REDPILL_MODEL= -SMALL_REDPILL_MODEL= # Default: gpt-4o-mini -MEDIUM_REDPILL_MODEL= # Default: gpt-4o -LARGE_REDPILL_MODEL= # Default: gpt-4o +SMALL_REDPILL_MODEL= # Default: gpt-4o-mini +MEDIUM_REDPILL_MODEL= # Default: gpt-4o +LARGE_REDPILL_MODEL= # Default: gpt-4o # Ollama Configuration -OLLAMA_SERVER_URL= # Default: localhost:11434 +OLLAMA_SERVER_URL= # Default: localhost:11434 OLLAMA_MODEL= -OLLAMA_EMBEDDING_MODEL= # Default: mxbai-embed-large -SMALL_OLLAMA_MODEL= # Default: llama3.2 -MEDIUM_OLLAMA_MODEL= # Default: hermes3 -LARGE_OLLAMA_MODEL= # Default: hermes3:70b +OLLAMA_EMBEDDING_MODEL= # Default: mxbai-embed-large +SMALL_OLLAMA_MODEL= # Default: llama3.2 +MEDIUM_OLLAMA_MODEL= # Default: hermes3 +LARGE_OLLAMA_MODEL= # Default: hermes3:70b #LlamaLocal Configuration -LLAMALOCAL_PATH= # Default: "" which is the current directory in plugin-node/dist/ which gets destroyed and recreated on every build +LLAMALOCAL_PATH= # Default: "" which is the current directory in plugin-node/dist/ which gets destroyed and recreated on every build # API Keys -ANTHROPIC_API_KEY= # For Claude -HEURIST_API_KEY= # Get from https://heurist.ai/dev-access +ANTHROPIC_API_KEY= # For Claude +HEURIST_API_KEY= # Get from https://heurist.ai/dev-access # Heurist Models SMALL_HEURIST_LANGUAGE_MODEL= @@ -122,7 +121,6 @@ STARKNET_RPC_URL= # Intiface Configuration INTIFACE_WEBSOCKET_URL=ws://localhost:12345 - # Farcaster FARCASTER_HUB_URL= FARCASTER_FID= @@ -130,10 +128,10 @@ FARCASTER_PRIVATE_KEY= # Coinbase COINBASE_COMMERCE_KEY= # from coinbase developer portal -COINBASE_API_KEY= # from coinbase developer portal -COINBASE_PRIVATE_KEY= # from coinbase developer portal +COINBASE_API_KEY= # from coinbase developer portal +COINBASE_PRIVATE_KEY= # from coinbase developer portal # if not configured it will be generated and written to runtime.character.settings.secrets.COINBASE_GENERATED_WALLET_ID and runtime.character.settings.secrets.COINBASE_GENERATED_WALLET_HEX_SEED -COINBASE_GENERATED_WALLET_ID= # not your address but the wallet id from generating a wallet through the plugin +COINBASE_GENERATED_WALLET_ID= # not your address but the wallet id from generating a wallet through the plugin COINBASE_GENERATED_WALLET_HEX_SEED= # not your address but the wallet hex seed from generating a wallet through the plugin and calling export # Conflux Configuration @@ -149,7 +147,6 @@ ZEROG_EVM_RPC= ZEROG_PRIVATE_KEY= ZEROG_FLOW_ADDRESS= - # Coinbase COINBASE_COMMERCE_KEY= COINBASE_API_KEY= @@ -170,11 +167,11 @@ FAL_API_KEY= FAL_AI_LORA_PATH= # WhatsApp Cloud API Configuration -WHATSAPP_ACCESS_TOKEN= # Permanent access token from Facebook Developer Console -WHATSAPP_PHONE_NUMBER_ID= # Phone number ID from WhatsApp Business API -WHATSAPP_BUSINESS_ACCOUNT_ID= # Business Account ID from Facebook Business Manager -WHATSAPP_WEBHOOK_VERIFY_TOKEN= # Custom string for webhook verification -WHATSAPP_API_VERSION=v17.0 # WhatsApp API version (default: v17.0) +WHATSAPP_ACCESS_TOKEN= # Permanent access token from Facebook Developer Console +WHATSAPP_PHONE_NUMBER_ID= # Phone number ID from WhatsApp Business API +WHATSAPP_BUSINESS_ACCOUNT_ID= # Business Account ID from Facebook Business Manager +WHATSAPP_WEBHOOK_VERIFY_TOKEN= # Custom string for webhook verification +WHATSAPP_API_VERSION=v17.0 # WhatsApp API version (default: v17.0) # ICP INTERNET_COMPUTER_PRIVATE_KEY= diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 00000000000..e795935eb73 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,47 @@ +{ + "plugins": [ + "@stylistic" + ], + "extends": [ + "next/core-web-vitals", + "plugin:@stylistic/recommended-extends" + ], + "rules": { + "@stylistic/indent": [ + "error", + 4, + { + "SwitchCase": 1 + } + ], + "@stylistic/no-tabs": "error", + "@stylistic/member-delimiter-style": [ + "error", + { + "multiline": { + "delimiter": "semi", + "requireLast": true + }, + "singleline": { + "delimiter": "semi", + "requireLast": false + } + } + ], + "@stylistic/eol-last": [ + "error", + "always" + ], + "@stylistic/multiline-ternary": "off", + "@stylistic/semi": [ + "error", + "always" + ], + "@stylistic/quotes": "off", + "@stylistic/comma-dangle": "off", + "@stylistic/brace-style": [ + "error", + "1tbs" + ] + } +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000000..941b22ad04b --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,45 @@ +{ + "editor.codeActionsOnSave": { + "source.fixAll.eslint": "explicit" + }, + "editor.formatOnSave": true, + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.insertSpaces": true, + "editor.detectIndentation": false, + "editor.tabSize": 4, + "eslint.format.enable": true, + "javascript.format.enable": false, + "typescript.format.enable": false, + "files.trimTrailingWhitespace": true, + "editor.trimAutoWhitespace": true, + "[properties]": { + "editor.defaultFormatter": "foxundermoon.shell-format" + }, + "[typescript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[typescriptreact]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, + "[javascriptreact]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, + "[jsonc]": { + "editor.defaultFormatter": "vscode.json-language-features" + }, + "[javascript]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint" + }, + "[scss]": { + "editor.defaultFormatter": "vscode.css-language-features" + }, + "[dotenv]": { + "editor.defaultFormatter": "foxundermoon.shell-format" + }, + "files.associations": { + "*.css": "tailwindcss" + }, + "[shellscript]": { + "editor.defaultFormatter": "foxundermoon.shell-format" + } +} \ No newline at end of file diff --git a/agent/package.json b/agent/package.json index 152fc91dc70..0f7b18729d6 100644 --- a/agent/package.json +++ b/agent/package.json @@ -4,13 +4,22 @@ "main": "src/index.ts", "type": "module", "scripts": { - "start": "node --loader ts-node/esm src/index.ts", - "dev": "node --loader ts-node/esm src/index.ts", + "start": "node --enable-source-maps --loader ts-node/esm src/index.ts", + "dev": "tsc && nodemon --watch src --watch ../core/dist --ext ts,json --exec 'node --enable-source-maps --loader ts-node/esm' src/index.ts", + "dev:debug": "tsc && nodemon --watch src --watch ../core/dist --ext ts,json --exec 'verbose=true node --enable-source-maps --loader ts-node/esm' src/index.ts", "check-types": "tsc --noEmit", "start:service:all": "pm2 start pnpm --name=\"all\" --restart-delay=3000 --max-restarts=10 -- run start:all", "stop:service:all": "pm2 stop all", "start:all": "node --loader ts-node/esm src/index.ts --characters=\"../characters/eliza.json\",\"../characters/degenspartan.json\",\"../characters/ruby.json\",\"../characters/pmairca.json\"" }, + "nodemonConfig": { + "watch": [ + "src", + "../core/dist" + ], + "ext": "ts,json", + "exec": "node --enable-source-maps --loader ts-node/esm src/index.ts" + }, "dependencies": { "@ai16z/adapter-postgres": "workspace:*", "@ai16z/adapter-sqlite": "workspace:*", @@ -39,4 +48,4 @@ "ts-node": "10.9.2", "tsup": "8.3.5" } -} +} \ No newline at end of file diff --git a/agent/src/index.ts b/agent/src/index.ts index 7103e187519..ab96f408dc9 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -90,6 +90,24 @@ export async function loadCharacters( .map((filePath) => filePath.trim()); const loadedCharacters = []; + // Add logging here + elizaLogger.info("Character loading details:", { + characterPaths, + cwd: process.cwd(), + dirname: __dirname, + fullPath: path.resolve( + process.cwd(), + "characters/8bitoracle.laozi.character.json" + ), + exists: fs.existsSync( + path.resolve( + process.cwd(), + "characters/8bitoracle.laozi.character.json" + ) + ), + dirContents: fs.readdirSync(process.cwd()), + }); + if (characterPaths?.length > 0) { for (const characterPath of characterPaths) { let content = null; @@ -99,7 +117,13 @@ export async function loadCharacters( const pathsToTry = [ characterPath, // exact path as specified path.resolve(process.cwd(), characterPath), // relative to cwd + path.resolve(process.cwd(), "agent", characterPath), // Add this path.resolve(__dirname, characterPath), // relative to current script + path.resolve( + __dirname, + "characters", + path.basename(characterPath) + ), // relative to agent/characters path.resolve( __dirname, "../characters", @@ -112,6 +136,14 @@ export async function loadCharacters( ), // relative to project root characters dir ]; + elizaLogger.info( + "Trying paths:", + pathsToTry.map((p) => ({ + path: p, + exists: fs.existsSync(p), + })) + ); + for (const tryPath of pathsToTry) { content = tryLoadFile(tryPath); if (content !== null) { @@ -231,18 +263,30 @@ export function getTokenForProvider( ); case ModelProviderName.FAL: return ( - character.settings?.secrets?.FAL_API_KEY || - settings.FAL_API_KEY + character.settings?.secrets?.FAL_API_KEY || settings.FAL_API_KEY ); } } function initializeDatabase(dataDir: string) { if (process.env.POSTGRES_URL) { + elizaLogger.info("Initializing PostgreSQL connection..."); const db = new PostgresDatabaseAdapter({ connectionString: process.env.POSTGRES_URL, parseInputs: true, }); + + // Test the connection + db.init() + .then(() => { + elizaLogger.success( + "Successfully connected to PostgreSQL database" + ); + }) + .catch((error) => { + elizaLogger.error("Failed to connect to PostgreSQL:", error); + }); + return db; } else { const filePath = @@ -340,8 +384,8 @@ export function createAgent( ? coinbaseCommercePlugin : null, getSecret(character, "FAL_API_KEY") || - getSecret(character, "OPENAI_API_KEY") || - getSecret(character, "HEURIST_API_KEY") + getSecret(character, "OPENAI_API_KEY") || + getSecret(character, "HEURIST_API_KEY") ? imageGenerationPlugin : null, ...(getSecret(character, "COINBASE_API_KEY") && @@ -476,7 +520,9 @@ async function handleUserInput(input, agentId) { ); const data = await response.json(); - data.forEach((message) => elizaLogger.log(`${"Agent"}: ${message.text}`)); + data.forEach((message) => + elizaLogger.log(`${"Agent"}: ${message.text}`) + ); } catch (error) { console.error("Error fetching response:", error); } diff --git a/package.json b/package.json index 9ba52f9c337..776ccad93ea 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,7 @@ "stop:service:all": "pnpm --filter \"@ai16z/agent\" stop:service:all", "start": "pnpm --filter \"@ai16z/agent\" start --isRoot", "start:client": "pnpm --dir client start --isRoot", + "start:debug": "cross-env NODE_ENV=development VERBOSE=true DEBUG=eliza:* pnpm --filter \"@ai16z/agent\" start --isRoot", "dev": "bash ./scripts/dev.sh", "lint": "bash ./scripts/lint.sh", "prettier-check": "npx prettier --check .", @@ -28,7 +29,9 @@ "@typescript-eslint/eslint-plugin": "^8.16.0", "@typescript-eslint/parser": "^8.16.0", "concurrently": "9.1.0", + "cross-env": "^7.0.3", "eslint": "^9.15.0", + "eslint-config-prettier": "9.1.0", "husky": "9.1.7", "lerna": "8.1.5", "only-allow": "1.2.1", @@ -50,10 +53,13 @@ "dependencies": { "@0glabs/0g-ts-sdk": "^0.2.1", "@coinbase/coinbase-sdk": "^0.10.0", + "@discordjs/opus": "^0.9.0", "amqplib": "0.10.5", "csv-parse": "^5.6.0", + "node-opus": "^0.3.3", "ollama-ai-provider": "^0.16.1", "optional": "^0.1.4", + "opusscript": "^0.1.1", "pnpm": "^9.14.3", "sharp": "^0.33.5", "tslog": "^4.9.3" @@ -62,4 +68,4 @@ "workspaces": [ "packages/*" ] -} +} \ No newline at end of file diff --git a/packages/adapter-postgres/schema.sql b/packages/adapter-postgres/schema.sql index e1122136c12..34493265267 100644 --- a/packages/adapter-postgres/schema.sql +++ b/packages/adapter-postgres/schema.sql @@ -14,6 +14,22 @@ CREATE EXTENSION IF NOT EXISTS vector; CREATE EXTENSION IF NOT EXISTS fuzzystrmatch; +-- Create a function to determine vector dimension +CREATE OR REPLACE FUNCTION get_embedding_dimension() +RETURNS INTEGER AS $$ +BEGIN + -- Check for OpenAI first + IF current_setting('app.use_openai_embedding', TRUE) = 'true' THEN + RETURN 1536; -- OpenAI dimension + -- Then check for Ollama + ELSIF current_setting('app.use_ollama_embedding', TRUE) = 'true' THEN + RETURN 1024; -- Ollama mxbai-embed-large dimension + ELSE + RETURN 384; -- BGE/Other embedding dimension + END IF; +END; +$$ LANGUAGE plpgsql; + BEGIN; CREATE TABLE IF NOT EXISTS accounts ( @@ -36,7 +52,7 @@ CREATE TABLE IF NOT EXISTS memories ( "type" TEXT NOT NULL, "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, "content" JSONB NOT NULL, - "embedding" vector(1536), + "embedding" vector(get_embedding_dimension()), -- Dynamic vector size "userId" UUID REFERENCES accounts("id"), "agentId" UUID REFERENCES accounts("id"), "roomId" UUID REFERENCES rooms("id"), @@ -97,9 +113,9 @@ CREATE TABLE IF NOT EXISTS relationships ( CREATE TABLE IF NOT EXISTS cache ( "key" TEXT NOT NULL, "agentId" TEXT NOT NULL, - "value" JSONB DEFAULT '{}'::jsonb, + "value" JSONB DEFAULT '{}'::jsonb, "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - "expiresAt" TIMESTAMP, + "expiresAt" TIMESTAMP, PRIMARY KEY ("key", "agentId") ); diff --git a/packages/adapter-postgres/src/index.ts b/packages/adapter-postgres/src/index.ts index ddd89458f9a..9611e7f6e3b 100644 --- a/packages/adapter-postgres/src/index.ts +++ b/packages/adapter-postgres/src/index.ts @@ -1,6 +1,11 @@ import { v4 } from "uuid"; -import pg, { - type Pool, + +// Import the entire module as default +import pg from "pg"; +const { Pool } = pg; +type Pool = pg.Pool; + +import { QueryConfig, QueryConfigValues, QueryResult, @@ -18,6 +23,7 @@ import { Participant, DatabaseAdapter, elizaLogger, + getEmbeddingConfig, } from "@ai16z/eliza"; import fs from "fs"; import { fileURLToPath } from "url"; @@ -33,6 +39,11 @@ export class PostgresDatabaseAdapter implements IDatabaseCacheAdapter { private pool: Pool; + private readonly maxRetries: number = 3; + private readonly baseDelay: number = 1000; // 1 second + private readonly maxDelay: number = 10000; // 10 seconds + private readonly jitterMax: number = 1000; // 1 second + private readonly connectionTimeout: number = 5000; // 5 seconds constructor(connectionConfig: any) { super(); @@ -40,7 +51,7 @@ export class PostgresDatabaseAdapter const defaultConfig = { max: 20, idleTimeoutMillis: 30000, - connectionTimeoutMillis: 2000, + connectionTimeoutMillis: this.connectionTimeout, }; this.pool = new pg.Pool({ @@ -48,42 +59,103 @@ export class PostgresDatabaseAdapter ...connectionConfig, // Allow overriding defaults }); - this.pool.on("error", async (err) => { - elizaLogger.error("Unexpected error on idle client", err); + this.pool.on("error", (err) => { + elizaLogger.error("Unexpected pool error", err); + this.handlePoolError(err); + }); + + this.setupPoolErrorHandling(); + this.testConnection(); + } - // Attempt to reconnect with exponential backoff - let retryCount = 0; - const maxRetries = 5; - const baseDelay = 1000; // Start with 1 second delay + private setupPoolErrorHandling() { + process.on("SIGINT", async () => { + await this.cleanup(); + process.exit(0); + }); - while (retryCount < maxRetries) { - try { - const delay = baseDelay * Math.pow(2, retryCount); - elizaLogger.warn( - `Attempting to reconnect in ${delay}ms...` + process.on("SIGTERM", async () => { + await this.cleanup(); + process.exit(0); + }); + } + + private async withRetry(operation: () => Promise): Promise { + let lastError: Error = new Error("Unknown error"); // Initialize with default + + for (let attempt = 1; attempt <= this.maxRetries; attempt++) { + try { + return await operation(); + } catch (error) { + lastError = error as Error; + + if (attempt < this.maxRetries) { + // Calculate delay with exponential backoff + const backoffDelay = Math.min( + this.baseDelay * Math.pow(2, attempt - 1), + this.maxDelay ); - await new Promise((resolve) => setTimeout(resolve, delay)); - // Create new pool with same config - this.pool = new pg.Pool(this.pool.options); - await this.testConnection(); + // Add jitter to prevent thundering herd + const jitter = Math.random() * this.jitterMax; + const delay = backoffDelay + jitter; - elizaLogger.success("Successfully reconnected to database"); - return; - } catch (error) { - retryCount++; - elizaLogger.error( - `Reconnection attempt ${retryCount} failed:`, - error + elizaLogger.warn( + `Database operation failed (attempt ${attempt}/${this.maxRetries}):`, + { + error: + error instanceof Error + ? error.message + : String(error), + nextRetryIn: `${(delay / 1000).toFixed(1)}s`, + } ); + + await new Promise((resolve) => setTimeout(resolve, delay)); + } else { + elizaLogger.error("Max retry attempts reached:", { + error: + error instanceof Error + ? error.message + : String(error), + totalAttempts: attempt, + }); + throw error instanceof Error + ? error + : new Error(String(error)); } } + } - elizaLogger.error( - `Failed to reconnect after ${maxRetries} attempts` - ); - throw new Error("Database connection lost and unable to reconnect"); + throw lastError; + } + + private async handlePoolError(error: Error) { + elizaLogger.error("Pool error occurred, attempting to reconnect", { + error: error.message, }); + + try { + // Close existing pool + await this.pool.end(); + + // Create new pool + this.pool = new Pool({ + ...this.pool.options, + connectionTimeoutMillis: this.connectionTimeout, + }); + + await this.testConnection(); + elizaLogger.success("Pool reconnection successful"); + } catch (reconnectError) { + elizaLogger.error("Failed to reconnect pool", { + error: + reconnectError instanceof Error + ? reconnectError.message + : String(reconnectError), + }); + throw reconnectError; + } } async query( @@ -137,61 +209,78 @@ export class PostgresDatabaseAdapter } } - async getRoom(roomId: UUID): Promise { - const { rows } = await this.query( - "SELECT id FROM rooms WHERE id = $1", - [roomId] - ); + async cleanup(): Promise { + try { + await this.pool.end(); + elizaLogger.info("Database pool closed"); + } catch (error) { + elizaLogger.error("Error closing database pool:", error); + } + } - return rows.length > 0 ? (rows[0].id as UUID) : null; + async getRoom(roomId: UUID): Promise { + return this.withRetry(async () => { + const { rows } = await this.pool.query( + "SELECT id FROM rooms WHERE id = $1", + [roomId] + ); + return rows.length > 0 ? (rows[0].id as UUID) : null; + }); } async getParticipantsForAccount(userId: UUID): Promise { - const { rows } = await this.query( - `SELECT id, "userId", "roomId", "last_message_read" - FROM participants - WHERE "userId" = $1`, - [userId] - ); - return rows as Participant[]; + return this.withRetry(async () => { + const { rows } = await this.pool.query( + `SELECT id, "userId", "roomId", "last_message_read" + FROM participants + WHERE "userId" = $1`, + [userId] + ); + return rows as Participant[]; + }); } async getParticipantUserState( roomId: UUID, userId: UUID ): Promise<"FOLLOWED" | "MUTED" | null> { - const { rows } = await this.query( - `SELECT "userState" FROM participants WHERE "roomId" = $1 AND "userId" = $2`, - [roomId, userId] - ); - return rows.length > 0 ? rows[0].userState : null; + return this.withRetry(async () => { + const { rows } = await this.pool.query( + `SELECT "userState" FROM participants WHERE "roomId" = $1 AND "userId" = $2`, + [roomId, userId] + ); + return rows.length > 0 ? rows[0].userState : null; + }); } async getMemoriesByRoomIds(params: { - agentId: UUID; roomIds: UUID[]; + agentId?: UUID; tableName: string; }): Promise { - if (params.roomIds.length === 0) return []; - const placeholders = params.roomIds - .map((_, i) => `$${i + 3}`) - .join(", "); - - const query = `SELECT * FROM memories WHERE type = $1 AND "agentId" = $2 AND "roomId" IN (${placeholders})`; - const queryParams = [ - params.tableName, - params.agentId, - ...params.roomIds, - ]; - - const { rows } = await this.query(query, queryParams); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); + return this.withRetry(async () => { + if (params.roomIds.length === 0) return []; + const placeholders = params.roomIds + .map((_, i) => `$${i + 2}`) + .join(", "); + + let query = `SELECT * FROM memories WHERE type = $1 AND "roomId" IN (${placeholders})`; + let queryParams = [params.tableName, ...params.roomIds]; + + if (params.agentId) { + query += ` AND "agentId" = $${params.roomIds.length + 2}`; + queryParams = [...queryParams, params.agentId]; + } + + const { rows } = await this.pool.query(query, queryParams); + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + })); + }); } async setParticipantUserState( @@ -199,125 +288,187 @@ export class PostgresDatabaseAdapter userId: UUID, state: "FOLLOWED" | "MUTED" | null ): Promise { - await this.query( - `UPDATE participants SET "userState" = $1 WHERE "roomId" = $2 AND "userId" = $3`, - [state, roomId, userId] - ); + return this.withRetry(async () => { + await this.pool.query( + `UPDATE participants SET "userState" = $1 WHERE "roomId" = $2 AND "userId" = $3`, + [state, roomId, userId] + ); + }); } async getParticipantsForRoom(roomId: UUID): Promise { - const { rows } = await this.query( - 'SELECT "userId" FROM participants WHERE "roomId" = $1', - [roomId] - ); - return rows.map((row) => row.userId); + return this.withRetry(async () => { + const { rows } = await this.pool.query( + 'SELECT "userId" FROM participants WHERE "roomId" = $1', + [roomId] + ); + return rows.map((row) => row.userId); + }); } async getAccountById(userId: UUID): Promise { - const { rows } = await this.query( - "SELECT * FROM accounts WHERE id = $1", - [userId] - ); - if (rows.length === 0) return null; - - const account = rows[0]; - elizaLogger.debug("account", account); - return { - ...account, - details: - typeof account.details === "string" - ? JSON.parse(account.details) - : account.details, - }; + return this.withRetry(async () => { + const { rows } = await this.pool.query( + "SELECT * FROM accounts WHERE id = $1", + [userId] + ); + if (rows.length === 0) { + elizaLogger.debug("Account not found:", { userId }); + return null; + } + + const account = rows[0]; + // elizaLogger.debug("Account retrieved:", { + // userId, + // hasDetails: !!account.details, + // }); + + return { + ...account, + details: + typeof account.details === "string" + ? JSON.parse(account.details) + : account.details, + }; + }); } async createAccount(account: Account): Promise { - try { - await this.query( - `INSERT INTO accounts (id, name, username, email, "avatarUrl", details) - VALUES ($1, $2, $3, $4, $5, $6)`, - [ - account.id ?? v4(), - account.name, - account.username || "", - account.email || "", - account.avatarUrl || "", - JSON.stringify(account.details), - ] - ); - - return true; - } catch (error) { - elizaLogger.log("Error creating account", error); - return false; - } + return this.withRetry(async () => { + try { + const accountId = account.id ?? v4(); + await this.pool.query( + `INSERT INTO accounts (id, name, username, email, "avatarUrl", details) + VALUES ($1, $2, $3, $4, $5, $6)`, + [ + accountId, + account.name, + account.username || "", + account.email || "", + account.avatarUrl || "", + JSON.stringify(account.details), + ] + ); + elizaLogger.debug("Account created successfully:", { + accountId, + }); + return true; + } catch (error) { + elizaLogger.error("Error creating account:", { + error: + error instanceof Error ? error.message : String(error), + accountId: account.id, + name: account.name, // Only log non-sensitive fields + }); + return false; // Return false instead of throwing to maintain existing behavior + } + }); } async getActorById(params: { roomId: UUID }): Promise { - const { rows } = await this.query( - `SELECT a.id, a.name, a.username, a.details - FROM participants p - LEFT JOIN accounts a ON p."userId" = a.id - WHERE p."roomId" = $1`, - [params.roomId] - ); - return rows.map((row) => ({ - ...row, - details: - typeof row.details === "string" - ? JSON.parse(row.details) - : row.details, - })); + return this.withRetry(async () => { + const { rows } = await this.pool.query( + `SELECT a.id, a.name, a.username, a.details + FROM participants p + LEFT JOIN accounts a ON p."userId" = a.id + WHERE p."roomId" = $1`, + [params.roomId] + ); + + elizaLogger.debug("Retrieved actors:", { + roomId: params.roomId, + actorCount: rows.length, + }); + + return rows.map((row) => { + try { + return { + ...row, + details: + typeof row.details === "string" + ? JSON.parse(row.details) + : row.details, + }; + } catch (error) { + elizaLogger.warn("Failed to parse actor details:", { + actorId: row.id, + error: + error instanceof Error + ? error.message + : String(error), + }); + return { + ...row, + details: {}, // Provide default empty details on parse error + }; + } + }); + }).catch((error) => { + elizaLogger.error("Failed to get actors:", { + roomId: params.roomId, + error: error.message, + }); + throw error; // Re-throw to let caller handle database errors + }); } async getMemoryById(id: UUID): Promise { - const { rows } = await this.query( - "SELECT * FROM memories WHERE id = $1", - [id] - ); - if (rows.length === 0) return null; - - return { - ...rows[0], - content: - typeof rows[0].content === "string" - ? JSON.parse(rows[0].content) - : rows[0].content, - }; + return this.withRetry(async () => { + const { rows } = await this.pool.query( + "SELECT * FROM memories WHERE id = $1", + [id] + ); + if (rows.length === 0) return null; + + return { + ...rows[0], + content: + typeof rows[0].content === "string" + ? JSON.parse(rows[0].content) + : rows[0].content, + }; + }); } async createMemory(memory: Memory, tableName: string): Promise { - let isUnique = true; - if (memory.embedding) { - const similarMemories = await this.searchMemoriesByEmbedding( - memory.embedding, - { + return this.withRetry(async () => { + elizaLogger.debug("PostgresAdapter createMemory:", { + memoryId: memory.id, + embeddingLength: memory.embedding?.length, + contentLength: memory.content?.text?.length, + }); + + let isUnique = true; + if (memory.embedding) { + const similarMemories = await this.searchMemoriesByEmbedding( + memory.embedding, + { + tableName, + roomId: memory.roomId, + match_threshold: 0.95, + count: 1, + } + ); + isUnique = similarMemories.length === 0; + } + + await this.pool.query( + `INSERT INTO memories ( + id, type, content, embedding, "userId", "roomId", "agentId", "unique", "createdAt" + ) VALUES ($1, $2, $3, $4, $5::uuid, $6::uuid, $7::uuid, $8, to_timestamp($9/1000.0))`, + [ + memory.id ?? v4(), tableName, - agentId: memory.agentId, - roomId: memory.roomId, - match_threshold: 0.95, - count: 1, - } + JSON.stringify(memory.content), + memory.embedding ? `[${memory.embedding.join(",")}]` : null, + memory.userId, + memory.roomId, + memory.agentId, + memory.unique ?? isUnique, + Date.now(), + ] ); - isUnique = similarMemories.length === 0; - } - - await this.query( - `INSERT INTO memories ( - id, type, content, embedding, "userId", "roomId", "agentId", "unique", "createdAt" - ) VALUES ($1, $2, $3, $4, $5::uuid, $6::uuid, $7::uuid, $8, to_timestamp($9/1000.0))`, - [ - memory.id ?? v4(), - tableName, - JSON.stringify(memory.content), - memory.embedding ? `[${memory.embedding.join(",")}]` : null, - memory.userId, - memory.roomId, - memory.agentId, - memory.unique ?? isUnique, - Date.now(), - ] - ); + }); } async searchMemories(params: { @@ -344,48 +495,81 @@ export class PostgresDatabaseAdapter count?: number; unique?: boolean; tableName: string; - agentId: UUID; + agentId?: UUID; start?: number; end?: number; }): Promise { + // Parameter validation if (!params.tableName) throw new Error("tableName is required"); if (!params.roomId) throw new Error("roomId is required"); - let sql = `SELECT * FROM memories WHERE type = $1 AND "agentId" = $2 AND "roomId" = $3`; - const values: any[] = [params.tableName, params.agentId, params.roomId]; - let paramCount = 3; // Updated to start at 3 since we already have 3 parameters - - if (params.start) { - paramCount++; - sql += ` AND "createdAt" >= to_timestamp($${paramCount})`; - values.push(params.start / 1000); - } - if (params.end) { - paramCount++; - sql += ` AND "createdAt" <= to_timestamp($${paramCount})`; - values.push(params.end / 1000); - } + return this.withRetry(async () => { + // Build query + let sql = `SELECT * FROM memories WHERE type = $1 AND "roomId" = $2`; + const values: any[] = [params.tableName, params.roomId]; + let paramCount = 2; + + // Add time range filters + if (params.start) { + paramCount++; + sql += ` AND "createdAt" >= to_timestamp($${paramCount})`; + values.push(params.start / 1000); + } - if (params.unique) { - sql += ` AND "unique" = true`; - } + if (params.end) { + paramCount++; + sql += ` AND "createdAt" <= to_timestamp($${paramCount})`; + values.push(params.end / 1000); + } - sql += ' ORDER BY "createdAt" DESC'; + // Add other filters + if (params.unique) { + sql += ` AND "unique" = true`; + } - if (params.count && typeof params.count === "number") { - paramCount++; - sql += ` LIMIT $${paramCount}::integer`; // Cast to integer - values.push(params.count); - } + if (params.agentId) { + paramCount++; + sql += ` AND "agentId" = $${paramCount}`; + values.push(params.agentId); + } - const { rows } = await this.query(sql, values); - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - })); + // Add ordering and limit + sql += ' ORDER BY "createdAt" DESC'; + + if (params.count) { + paramCount++; + sql += ` LIMIT $${paramCount}`; + values.push(params.count); + } + + elizaLogger.debug("Fetching memories:", { + roomId: params.roomId, + tableName: params.tableName, + unique: params.unique, + agentId: params.agentId, + timeRange: + params.start || params.end + ? { + start: params.start + ? new Date(params.start).toISOString() + : undefined, + end: params.end + ? new Date(params.end).toISOString() + : undefined, + } + : undefined, + limit: params.count, + }); + + const { rows } = await this.pool.query(sql, values); + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + })); + }); } async getGoals(params: { @@ -394,111 +578,299 @@ export class PostgresDatabaseAdapter onlyInProgress?: boolean; count?: number; }): Promise { - let sql = `SELECT * FROM goals WHERE "roomId" = $1`; - const values: any[] = [params.roomId]; - let paramCount = 1; - - if (params.userId) { - paramCount++; - sql += ` AND "userId" = $${paramCount}`; - values.push(params.userId); - } + return this.withRetry(async () => { + let sql = `SELECT * FROM goals WHERE "roomId" = $1`; + const values: any[] = [params.roomId]; + let paramCount = 1; + + if (params.userId) { + paramCount++; + sql += ` AND "userId" = $${paramCount}`; + values.push(params.userId); + } - if (params.onlyInProgress) { - sql += " AND status = 'IN_PROGRESS'"; - } + if (params.onlyInProgress) { + sql += " AND status = 'IN_PROGRESS'"; + } - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } + if (params.count) { + paramCount++; + sql += ` LIMIT $${paramCount}`; + values.push(params.count); + } - const { rows } = await this.query(sql, values); - return rows.map((row) => ({ - ...row, - objectives: - typeof row.objectives === "string" - ? JSON.parse(row.objectives) - : row.objectives, - })); + const { rows } = await this.pool.query(sql, values); + return rows.map((row) => ({ + ...row, + objectives: + typeof row.objectives === "string" + ? JSON.parse(row.objectives) + : row.objectives, + })); + }); } async updateGoal(goal: Goal): Promise { - await this.query( - `UPDATE goals SET name = $1, status = $2, objectives = $3 WHERE id = $4`, - [goal.name, goal.status, JSON.stringify(goal.objectives), goal.id] - ); + return this.withRetry(async () => { + try { + await this.pool.query( + `UPDATE goals SET name = $1, status = $2, objectives = $3 WHERE id = $4`, + [ + goal.name, + goal.status, + JSON.stringify(goal.objectives), + goal.id, + ] + ); + } catch (error) { + elizaLogger.error("Failed to update goal:", { + goalId: goal.id, + error: + error instanceof Error ? error.message : String(error), + status: goal.status, + }); + throw error; + } + }); } async createGoal(goal: Goal): Promise { - await this.query( - `INSERT INTO goals (id, "roomId", "userId", name, status, objectives) - VALUES ($1, $2, $3, $4, $5, $6)`, - [ - goal.id ?? v4(), - goal.roomId, - goal.userId, - goal.name, - goal.status, - JSON.stringify(goal.objectives), - ] - ); + return this.withRetry(async () => { + await this.pool.query( + `INSERT INTO goals (id, "roomId", "userId", name, status, objectives) + VALUES ($1, $2, $3, $4, $5, $6)`, + [ + goal.id ?? v4(), + goal.roomId, + goal.userId, + goal.name, + goal.status, + JSON.stringify(goal.objectives), + ] + ); + }); } async removeGoal(goalId: UUID): Promise { - await this.query("DELETE FROM goals WHERE id = $1", [goalId]); + if (!goalId) throw new Error("Goal ID is required"); + + return this.withRetry(async () => { + try { + const result = await this.pool.query( + "DELETE FROM goals WHERE id = $1 RETURNING id", + [goalId] + ); + + elizaLogger.debug("Goal removal attempt:", { + goalId, + removed: result?.rowCount ?? 0 > 0, + }); + } catch (error) { + elizaLogger.error("Failed to remove goal:", { + goalId, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } + }); } async createRoom(roomId?: UUID): Promise { - const newRoomId = roomId || v4(); - await this.query("INSERT INTO rooms (id) VALUES ($1)", [newRoomId]); - return newRoomId as UUID; + return this.withRetry(async () => { + const newRoomId = roomId || v4(); + await this.pool.query("INSERT INTO rooms (id) VALUES ($1)", [ + newRoomId, + ]); + return newRoomId as UUID; + }); } async removeRoom(roomId: UUID): Promise { - await this.query("DELETE FROM rooms WHERE id = $1", [roomId]); + if (!roomId) throw new Error("Room ID is required"); + + return this.withRetry(async () => { + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + + // First check if room exists + const checkResult = await client.query( + "SELECT id FROM rooms WHERE id = $1", + [roomId] + ); + + if (checkResult.rowCount === 0) { + elizaLogger.warn("No room found to remove:", { roomId }); + throw new Error(`Room not found: ${roomId}`); + } + + // Remove related data first (if not using CASCADE) + await client.query('DELETE FROM memories WHERE "roomId" = $1', [ + roomId, + ]); + await client.query( + 'DELETE FROM participants WHERE "roomId" = $1', + [roomId] + ); + await client.query('DELETE FROM goals WHERE "roomId" = $1', [ + roomId, + ]); + + // Finally remove the room + const result = await client.query( + "DELETE FROM rooms WHERE id = $1 RETURNING id", + [roomId] + ); + + await client.query("COMMIT"); + + elizaLogger.debug( + "Room and related data removed successfully:", + { + roomId, + removed: result?.rowCount ?? 0 > 0, + } + ); + } catch (error) { + await client.query("ROLLBACK"); + elizaLogger.error("Failed to remove room:", { + roomId, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } finally { + client.release(); + } + }); } async createRelationship(params: { userA: UUID; userB: UUID; }): Promise { + // Input validation if (!params.userA || !params.userB) { throw new Error("userA and userB are required"); } - try { - await this.query( - `INSERT INTO relationships (id, "userA", "userB", "userId") - VALUES ($1, $2, $3, $4)`, - [v4(), params.userA, params.userB, params.userA] - ); - return true; - } catch (error) { - elizaLogger.log("Error creating relationship", error); - return false; - } + return this.withRetry(async () => { + try { + const relationshipId = v4(); + const result = await this.pool.query( + `INSERT INTO relationships (id, "userA", "userB", "userId") + VALUES ($1, $2, $3, $4) + RETURNING id`, + [relationshipId, params.userA, params.userB, params.userA] + ); + + elizaLogger.debug("Relationship created successfully:", { + relationshipId, + userA: params.userA, + userB: params.userB, + }); + + return true; + } catch (error) { + // Check for unique constraint violation or other specific errors + if ((error as { code?: string }).code === "23505") { + // Unique violation + elizaLogger.warn("Relationship already exists:", { + userA: params.userA, + userB: params.userB, + error: + error instanceof Error + ? error.message + : String(error), + }); + } else { + elizaLogger.error("Failed to create relationship:", { + userA: params.userA, + userB: params.userB, + error: + error instanceof Error + ? error.message + : String(error), + }); + } + return false; + } + }); } async getRelationship(params: { userA: UUID; userB: UUID; }): Promise { - const { rows } = await this.query( - `SELECT * FROM relationships - WHERE ("userA" = $1 AND "userB" = $2) OR ("userA" = $2 AND "userB" = $1)`, - [params.userA, params.userB] - ); - return rows.length > 0 ? rows[0] : null; + if (!params.userA || !params.userB) { + throw new Error("userA and userB are required"); + } + + return this.withRetry(async () => { + try { + const { rows } = await this.pool.query( + `SELECT * FROM relationships + WHERE ("userA" = $1 AND "userB" = $2) + OR ("userA" = $2 AND "userB" = $1)`, + [params.userA, params.userB] + ); + + if (rows.length > 0) { + elizaLogger.debug("Relationship found:", { + relationshipId: rows[0].id, + userA: params.userA, + userB: params.userB, + }); + return rows[0]; + } + + elizaLogger.debug("No relationship found between users:", { + userA: params.userA, + userB: params.userB, + }); + return null; + } catch (error) { + elizaLogger.error("Error fetching relationship:", { + userA: params.userA, + userB: params.userB, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } + }); } async getRelationships(params: { userId: UUID }): Promise { - const { rows } = await this.query( - `SELECT * FROM relationships WHERE "userA" = $1 OR "userB" = $1`, - [params.userId] - ); - return rows; + if (!params.userId) { + throw new Error("userId is required"); + } + + return this.withRetry(async () => { + try { + const { rows } = await this.pool.query( + `SELECT * FROM relationships + WHERE "userA" = $1 OR "userB" = $1 + ORDER BY "createdAt" DESC`, // Add ordering if you have this field + [params.userId] + ); + + elizaLogger.debug("Retrieved relationships:", { + userId: params.userId, + count: rows.length, + }); + + return rows; + } catch (error) { + elizaLogger.error("Failed to fetch relationships:", { + userId: params.userId, + error: + error instanceof Error ? error.message : String(error), + }); + throw error; + } + }); } async getCachedEmbeddings(opts: { @@ -509,42 +881,104 @@ export class PostgresDatabaseAdapter query_field_sub_name: string; query_match_count: number; }): Promise<{ embedding: number[]; levenshtein_score: number }[]> { - // Get the JSON field content as text first - const sql = ` - WITH content_text AS ( - SELECT + // Input validation + if (!opts.query_table_name) + throw new Error("query_table_name is required"); + if (!opts.query_input) throw new Error("query_input is required"); + if (!opts.query_field_name) + throw new Error("query_field_name is required"); + if (!opts.query_field_sub_name) + throw new Error("query_field_sub_name is required"); + if (opts.query_match_count <= 0) + throw new Error("query_match_count must be positive"); + + return this.withRetry(async () => { + try { + elizaLogger.debug("Fetching cached embeddings:", { + tableName: opts.query_table_name, + fieldName: opts.query_field_name, + subFieldName: opts.query_field_sub_name, + matchCount: opts.query_match_count, + inputLength: opts.query_input.length, + }); + + const sql = ` + WITH content_text AS ( + SELECT + embedding, + COALESCE( + content->$2->>$3, + '' + ) as content_text + FROM memories + WHERE type = $4 + AND content->$2->>$3 IS NOT NULL + ) + SELECT embedding, - COALESCE( - content->$2->>$3, - '' - ) as content_text - FROM memories - WHERE type = $4 - AND content->$2->>$3 IS NOT NULL - ) - SELECT - embedding, - levenshtein( + levenshtein( + $1, + content_text + ) as levenshtein_score + FROM content_text + WHERE levenshtein( $1, content_text - ) as levenshtein_score - FROM content_text - ORDER BY levenshtein_score - LIMIT $5 - `; - - const { rows } = await this.query(sql, [ - opts.query_input, - opts.query_field_name, - opts.query_field_sub_name, - opts.query_table_name, - opts.query_match_count, - ]); - - return rows.map((row) => ({ - embedding: row.embedding, - levenshtein_score: row.levenshtein_score, - })); + ) <= $6 -- Add threshold check + ORDER BY levenshtein_score + LIMIT $5 + `; + + const { rows } = await this.pool.query(sql, [ + opts.query_input, + opts.query_field_name, + opts.query_field_sub_name, + opts.query_table_name, + opts.query_match_count, + opts.query_threshold, + ]); + + elizaLogger.debug("Retrieved cached embeddings:", { + count: rows.length, + tableName: opts.query_table_name, + matchCount: opts.query_match_count, + }); + + return rows + .map( + ( + row + ): { + embedding: number[]; + levenshtein_score: number; + } | null => { + if (!Array.isArray(row.embedding)) return null; + return { + embedding: row.embedding, + levenshtein_score: Number( + row.levenshtein_score + ), + }; + } + ) + .filter( + ( + row + ): row is { + embedding: number[]; + levenshtein_score: number; + } => row !== null + ); + } catch (error) { + elizaLogger.error("Error in getCachedEmbeddings:", { + error: + error instanceof Error ? error.message : String(error), + tableName: opts.query_table_name, + fieldName: opts.query_field_name, + }); + throw error; + } + }); } async log(params: { @@ -553,11 +987,54 @@ export class PostgresDatabaseAdapter roomId: UUID; type: string; }): Promise { - await this.query( - `INSERT INTO logs (body, "userId", "roomId", type) - VALUES ($1, $2, $3, $4)`, - [params.body, params.userId, params.roomId, params.type] - ); + // Input validation + if (!params.userId) throw new Error("userId is required"); + if (!params.roomId) throw new Error("roomId is required"); + if (!params.type) throw new Error("type is required"); + if (!params.body || typeof params.body !== "object") { + throw new Error("body must be a valid object"); + } + + return this.withRetry(async () => { + try { + const logId = v4(); // Generate ID for tracking + await this.pool.query( + `INSERT INTO logs ( + id, + body, + "userId", + "roomId", + type, + "createdAt" + ) VALUES ($1, $2, $3, $4, $5, NOW()) + RETURNING id`, + [ + logId, + JSON.stringify(params.body), // Ensure body is stringified + params.userId, + params.roomId, + params.type, + ] + ); + + elizaLogger.debug("Log entry created:", { + logId, + type: params.type, + roomId: params.roomId, + userId: params.userId, + bodyKeys: Object.keys(params.body), + }); + } catch (error) { + elizaLogger.error("Failed to create log entry:", { + error: + error instanceof Error ? error.message : String(error), + type: params.type, + roomId: params.roomId, + userId: params.userId, + }); + throw error; + } + }); } async searchMemoriesByEmbedding( @@ -571,138 +1048,156 @@ export class PostgresDatabaseAdapter tableName: string; } ): Promise { - const vectorStr = `[${embedding.join(",")}]`; + return this.withRetry(async () => { + elizaLogger.debug("Incoming vector:", { + length: embedding.length, + sample: embedding.slice(0, 5), + isArray: Array.isArray(embedding), + allNumbers: embedding.every((n) => typeof n === "number"), + }); + + // Validate embedding dimension + if (embedding.length !== getEmbeddingConfig().dimensions) { + throw new Error( + `Invalid embedding dimension: expected ${getEmbeddingConfig().dimensions}, got ${embedding.length}` + ); + } + + // Ensure vector is properly formatted + const cleanVector = embedding.map((n) => { + if (!Number.isFinite(n)) return 0; + // Limit precision to avoid floating point issues + return Number(n.toFixed(6)); + }); + + // Format for Postgres pgvector + const vectorStr = `[${cleanVector.join(",")}]`; - let sql = ` + elizaLogger.debug("Vector debug:", { + originalLength: embedding.length, + cleanLength: cleanVector.length, + sampleStr: vectorStr.slice(0, 100), + }); + + let sql = ` SELECT *, - 1 - (embedding <-> $1::vector) as similarity + 1 - (embedding <-> $1::vector(${getEmbeddingConfig().dimensions})) as similarity FROM memories WHERE type = $2 `; - const values: any[] = [vectorStr, params.tableName]; - let paramCount = 2; + const values: any[] = [vectorStr, params.tableName]; - if (params.unique) { - sql += ` AND "unique" = true`; - } + // Log the query for debugging + elizaLogger.debug("Query debug:", { + sql: sql.slice(0, 200), + paramTypes: values.map((v) => typeof v), + vectorStrLength: vectorStr.length, + }); - if (params.agentId) { - paramCount++; - sql += ` AND "agentId" = $${paramCount}`; - values.push(params.agentId); - } + let paramCount = 2; - if (params.roomId) { - paramCount++; - sql += ` AND "roomId" = $${paramCount}::uuid`; - values.push(params.roomId); - } + if (params.unique) { + sql += ` AND "unique" = true`; + } - if (params.match_threshold) { - paramCount++; - sql += ` AND 1 - (embedding <-> $1::vector) >= $${paramCount}`; - values.push(params.match_threshold); - } + if (params.agentId) { + paramCount++; + sql += ` AND "agentId" = $${paramCount}`; + values.push(params.agentId); + } - sql += ` ORDER BY embedding <-> $1::vector`; + if (params.roomId) { + paramCount++; + sql += ` AND "roomId" = $${paramCount}::uuid`; + values.push(params.roomId); + } - if (params.count) { - paramCount++; - sql += ` LIMIT $${paramCount}`; - values.push(params.count); - } + if (params.match_threshold) { + paramCount++; + sql += ` AND 1 - (embedding <-> $1::vector) >= $${paramCount}`; + values.push(params.match_threshold); + } - const { rows } = await this.query(sql, values); + sql += ` ORDER BY embedding <-> $1::vector`; - return rows.map((row) => ({ - ...row, - content: - typeof row.content === "string" - ? JSON.parse(row.content) - : row.content, - similarity: row.similarity, - })); + if (params.count) { + paramCount++; + sql += ` LIMIT $${paramCount}`; + values.push(params.count); + } + + const { rows } = await this.pool.query(sql, values); + return rows.map((row) => ({ + ...row, + content: + typeof row.content === "string" + ? JSON.parse(row.content) + : row.content, + similarity: row.similarity, + })); + }); } async addParticipant(userId: UUID, roomId: UUID): Promise { - try { - const existingParticipant = await this.query( - // Check if the participant already exists - `SELECT * FROM participants WHERE "userId" = $1 AND "roomId" = $2`, - [userId, roomId] - ); - - if (existingParticipant.rows.length > 0) { - elizaLogger.error( - `Participant with userId ${userId} already exists in room ${roomId}.` + return this.withRetry(async () => { + try { + await this.pool.query( + `INSERT INTO participants (id, "userId", "roomId") + VALUES ($1, $2, $3)`, + [v4(), userId, roomId] ); - return true; // Exit early if the participant already exists - } - - // Proceed to add the participant if they do not exist - await this.query( - `INSERT INTO participants (id, "userId", "roomId") - VALUES ($1, $2, $3)`, - [v4(), userId, roomId] - ); - return true; - } catch (error) { - if (error instanceof DatabaseError) { - elizaLogger.error("Error adding participant", error); - // This is to prevent duplicate participant error in case of a race condition - // Handle unique constraint violation error (code 23505) - if (error.code === "23505") { - elizaLogger.warn( - `Participant with userId ${userId} already exists in room ${roomId}.` - ); // Optionally, you can log this or handle it differently - return true; - } else { - // Handle other errors - elizaLogger.error("Error adding participant:", error); - return false; - } + return true; + } catch (error) { + console.log("Error adding participant", error); + return false; } - - return false; - } + }); } async removeParticipant(userId: UUID, roomId: UUID): Promise { - try { - await this.query( - `DELETE FROM participants WHERE "userId" = $1 AND "roomId" = $2`, - [userId, roomId] - ); - return true; - } catch (error) { - elizaLogger.log("Error removing participant", error); - return false; - } + return this.withRetry(async () => { + try { + await this.pool.query( + `DELETE FROM participants WHERE "userId" = $1 AND "roomId" = $2`, + [userId, roomId] + ); + return true; + } catch (error) { + console.log("Error removing participant", error); + return false; + } + }); } async updateGoalStatus(params: { goalId: UUID; status: GoalStatus; }): Promise { - await this.query("UPDATE goals SET status = $1 WHERE id = $2", [ - params.status, - params.goalId, - ]); + return this.withRetry(async () => { + await this.pool.query( + "UPDATE goals SET status = $1 WHERE id = $2", + [params.status, params.goalId] + ); + }); } async removeMemory(memoryId: UUID, tableName: string): Promise { - await this.query("DELETE FROM memories WHERE type = $1 AND id = $2", [ - tableName, - memoryId, - ]); + return this.withRetry(async () => { + await this.pool.query( + "DELETE FROM memories WHERE type = $1 AND id = $2", + [tableName, memoryId] + ); + }); } async removeAllMemories(roomId: UUID, tableName: string): Promise { - await this.query( - `DELETE FROM memories WHERE type = $1 AND "roomId" = $2`, - [tableName, roomId] - ); + return this.withRetry(async () => { + await this.pool.query( + `DELETE FROM memories WHERE type = $1 AND "roomId" = $2`, + [tableName, roomId] + ); + }); } async countMemories( @@ -712,75 +1207,133 @@ export class PostgresDatabaseAdapter ): Promise { if (!tableName) throw new Error("tableName is required"); - let sql = `SELECT COUNT(*) as count FROM memories WHERE type = $1 AND "roomId" = $2`; - if (unique) { - sql += ` AND "unique" = true`; - } + return this.withRetry(async () => { + let sql = `SELECT COUNT(*) as count FROM memories WHERE type = $1 AND "roomId" = $2`; + if (unique) { + sql += ` AND "unique" = true`; + } - const { rows } = await this.query(sql, [tableName, roomId]); - return parseInt(rows[0].count); + const { rows } = await this.pool.query(sql, [tableName, roomId]); + return parseInt(rows[0].count); + }); } async removeAllGoals(roomId: UUID): Promise { - await this.query(`DELETE FROM goals WHERE "roomId" = $1`, [roomId]); + return this.withRetry(async () => { + await this.pool.query(`DELETE FROM goals WHERE "roomId" = $1`, [ + roomId, + ]); + }); } async getRoomsForParticipant(userId: UUID): Promise { - const { rows } = await this.query( - `SELECT "roomId" FROM participants WHERE "userId" = $1`, - [userId] - ); - return rows.map((row) => row.roomId); + return this.withRetry(async () => { + const { rows } = await this.pool.query( + `SELECT "roomId" FROM participants WHERE "userId" = $1`, + [userId] + ); + return rows.map((row) => row.roomId); + }); } async getRoomsForParticipants(userIds: UUID[]): Promise { - const placeholders = userIds.map((_, i) => `$${i + 1}`).join(", "); - const { rows } = await this.query( - `SELECT DISTINCT "roomId" FROM participants WHERE "userId" IN (${placeholders})`, - userIds - ); - return rows.map((row) => row.roomId); + return this.withRetry(async () => { + const placeholders = userIds.map((_, i) => `$${i + 1}`).join(", "); + const { rows } = await this.pool.query( + `SELECT DISTINCT "roomId" FROM participants WHERE "userId" IN (${placeholders})`, + userIds + ); + return rows.map((row) => row.roomId); + }); } async getActorDetails(params: { roomId: string }): Promise { - const sql = ` - SELECT - a.id, - a.name, - a.username, - COALESCE(a.details::jsonb, '{}'::jsonb) as details - FROM participants p - LEFT JOIN accounts a ON p."userId" = a.id - WHERE p."roomId" = $1 - `; - - try { - const result = await this.query(sql, [params.roomId]); - return result.rows.map((row) => ({ - ...row, - details: row.details, // PostgreSQL automatically handles JSON parsing - })); - } catch (error) { - elizaLogger.error("Error fetching actor details:", error); - throw new Error("Failed to fetch actor details"); + if (!params.roomId) { + throw new Error("roomId is required"); } + + return this.withRetry(async () => { + try { + const sql = ` + SELECT + a.id, + a.name, + a.username, + a."avatarUrl", + COALESCE(a.details::jsonb, '{}'::jsonb) as details + FROM participants p + LEFT JOIN accounts a ON p."userId" = a.id + WHERE p."roomId" = $1 + ORDER BY a.name + `; + + const result = await this.pool.query(sql, [ + params.roomId, + ]); + + elizaLogger.debug("Retrieved actor details:", { + roomId: params.roomId, + actorCount: result.rows.length, + }); + + return result.rows.map((row) => { + try { + return { + ...row, + details: + typeof row.details === "string" + ? JSON.parse(row.details) + : row.details, + }; + } catch (parseError) { + elizaLogger.warn("Failed to parse actor details:", { + actorId: row.id, + error: + parseError instanceof Error + ? parseError.message + : String(parseError), + }); + return { + ...row, + details: {}, // Fallback to empty object if parsing fails + }; + } + }); + } catch (error) { + elizaLogger.error("Failed to fetch actor details:", { + roomId: params.roomId, + error: + error instanceof Error ? error.message : String(error), + }); + throw new Error( + `Failed to fetch actor details: ${error instanceof Error ? error.message : String(error)}` + ); + } + }); } async getCache(params: { key: string; agentId: UUID; }): Promise { - try { - const sql = `SELECT "value"::TEXT FROM cache WHERE "key" = $1 AND "agentId" = $2`; - const { rows } = await this.query<{ value: string }>(sql, [ - params.key, - params.agentId, - ]); - return rows[0]?.value ?? undefined; - } catch (error) { - elizaLogger.log("Error fetching cache", error); - return undefined; - } + return this.withRetry(async () => { + try { + const sql = `SELECT "value"::TEXT FROM cache WHERE "key" = $1 AND "agentId" = $2`; + const { rows } = await this.query<{ value: string }>(sql, [ + params.key, + params.agentId, + ]); + return rows[0]?.value ?? undefined; + } catch (error) { + elizaLogger.error("Error fetching cache", { + error: + error instanceof Error ? error.message : String(error), + key: params.key, + agentId: params.agentId, + }); + return undefined; + } + }); } async setCache(params: { @@ -788,33 +1341,81 @@ export class PostgresDatabaseAdapter agentId: UUID; value: string; }): Promise { - try { - await this.query( - `INSERT INTO cache ("key", "agentId", "value", "createdAt") VALUES ($1, $2, $3, CURRENT_TIMESTAMP) - ON CONFLICT ("key", "agentId") - DO UPDATE SET "value" = EXCLUDED.value, "createdAt" = CURRENT_TIMESTAMP`, - [params.key, params.agentId, params.value] - ); - return true; - } catch (error) { - elizaLogger.log("Error setting cache", error); - return false; - } + return this.withRetry(async () => { + try { + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + await client.query( + `INSERT INTO cache ("key", "agentId", "value", "createdAt") + VALUES ($1, $2, $3, CURRENT_TIMESTAMP) + ON CONFLICT ("key", "agentId") + DO UPDATE SET "value" = EXCLUDED.value, "createdAt" = CURRENT_TIMESTAMP`, + [params.key, params.agentId, params.value] + ); + await client.query("COMMIT"); + return true; + } catch (error) { + await client.query("ROLLBACK"); + elizaLogger.error("Error setting cache", { + error: + error instanceof Error + ? error.message + : String(error), + key: params.key, + agentId: params.agentId, + }); + return false; + } finally { + client.release(); + } + } catch (error) { + elizaLogger.error( + "Database connection error in setCache", + error + ); + return false; + } + }); } async deleteCache(params: { key: string; agentId: UUID; }): Promise { - try { - await this.query( - `DELETE FROM cache WHERE "key" = $1 AND "agentId" = $2`, - [params.key, params.agentId] - ); - return true; - } catch { - return false; - } + return this.withRetry(async () => { + try { + const client = await this.pool.connect(); + try { + await client.query("BEGIN"); + await client.query( + `DELETE FROM cache WHERE "key" = $1 AND "agentId" = $2`, + [params.key, params.agentId] + ); + await client.query("COMMIT"); + return true; + } catch (error) { + await client.query("ROLLBACK"); + elizaLogger.error("Error deleting cache", { + error: + error instanceof Error + ? error.message + : String(error), + key: params.key, + agentId: params.agentId, + }); + return false; + } finally { + client.release(); + } + } catch (error) { + elizaLogger.error( + "Database connection error in deleteCache", + error + ); + return false; + } + }); } } diff --git a/packages/adapter-supabase/schema.sql b/packages/adapter-supabase/schema.sql index b2e77227c2f..69771f5793c 100644 --- a/packages/adapter-supabase/schema.sql +++ b/packages/adapter-supabase/schema.sql @@ -30,7 +30,8 @@ CREATE TABLE rooms ( "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP ); -CREATE TABLE memories ( +-- Create tables for both vector sizes +CREATE TABLE memories_1536 ( "id" UUID PRIMARY KEY, "type" TEXT NOT NULL, "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, @@ -45,6 +46,45 @@ CREATE TABLE memories ( CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE ); +CREATE TABLE memories_1024 ( + "id" UUID PRIMARY KEY, + "type" TEXT NOT NULL, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "content" JSONB NOT NULL, + "embedding" vector(1024), -- Ollama mxbai-embed-large + "userId" UUID REFERENCES accounts("id"), + "agentId" UUID REFERENCES accounts("id"), + "roomId" UUID REFERENCES rooms("id"), + "unique" BOOLEAN DEFAULT true NOT NULL, + CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, + CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE +); + +CREATE TABLE memories_384 ( + "id" UUID PRIMARY KEY, + "type" TEXT NOT NULL, + "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, + "content" JSONB NOT NULL, + "embedding" vector(384), + "userId" UUID REFERENCES accounts("id"), + "agentId" UUID REFERENCES accounts("id"), + "roomId" UUID REFERENCES rooms("id"), + "unique" BOOLEAN DEFAULT true NOT NULL, + CONSTRAINT fk_room FOREIGN KEY ("roomId") REFERENCES rooms("id") ON DELETE CASCADE, + CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE, + CONSTRAINT fk_agent FOREIGN KEY ("agentId") REFERENCES accounts("id") ON DELETE CASCADE +); + +-- Update view to include Ollama table +CREATE VIEW memories AS + SELECT * FROM memories_1536 + UNION ALL + SELECT * FROM memories_1024 + UNION ALL + SELECT * FROM memories_384; + + CREATE TABLE goals ( "id" UUID PRIMARY KEY, "createdAt" TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP, @@ -93,9 +133,13 @@ CREATE TABLE relationships ( CONSTRAINT fk_user FOREIGN KEY ("userId") REFERENCES accounts("id") ON DELETE CASCADE ); --- Indexes -CREATE INDEX idx_memories_embedding ON memories USING hnsw ("embedding" vector_cosine_ops); -CREATE INDEX idx_memories_type_room ON memories("type", "roomId"); +-- Add index for Ollama table +CREATE INDEX idx_memories_1024_embedding ON memories_1024 USING hnsw ("embedding" vector_cosine_ops); +CREATE INDEX idx_memories_1024_type_room ON memories_1024("type", "roomId"); +CREATE INDEX idx_memories_1536_embedding ON memories_1536 USING hnsw ("embedding" vector_cosine_ops); +CREATE INDEX idx_memories_384_embedding ON memories_384 USING hnsw ("embedding" vector_cosine_ops); +CREATE INDEX idx_memories_1536_type_room ON memories_1536("type", "roomId"); +CREATE INDEX idx_memories_384_type_room ON memories_384("type", "roomId"); CREATE INDEX idx_participants_user ON participants("userId"); CREATE INDEX idx_participants_room ON participants("roomId"); CREATE INDEX idx_relationships_users ON relationships("userA", "userB"); diff --git a/packages/core/src/config.ts b/packages/core/src/config.ts new file mode 100644 index 00000000000..5a1e7948d56 --- /dev/null +++ b/packages/core/src/config.ts @@ -0,0 +1,9 @@ +import dotenv from "dotenv"; +import path from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Load environment variables from root .env file +dotenv.config({ path: path.resolve(__dirname, "../../../.env") }); diff --git a/packages/core/src/embedding.ts b/packages/core/src/embedding.ts index 4082ddd8501..b06a11cf39e 100644 --- a/packages/core/src/embedding.ts +++ b/packages/core/src/embedding.ts @@ -1,19 +1,43 @@ import path from "node:path"; - -import { trimTokens } from "./generation.ts"; -import elizaLogger from "./logger.ts"; import { models } from "./models.ts"; +import { IAgentRuntime, ModelProviderName, ModelClass } from "./types.ts"; +// import fs from "fs"; +import { trimTokens } from "./generation.ts"; import settings from "./settings.ts"; -import { IAgentRuntime, ModelClass, ModelProviderName } from "./types.ts"; - +import elizaLogger from "./logger.ts"; +import { EmbeddingModel } from "fastembed"; interface EmbeddingOptions { model: string; endpoint: string; apiKey?: string; length?: number; isOllama?: boolean; + dimensions?: number; + provider?: string; } +// Add the embedding configuration +export const getEmbeddingConfig = () => ({ + dimensions: + settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true" + ? 1536 // OpenAI + : settings.USE_OLLAMA_EMBEDDING?.toLowerCase() === "true" + ? 1024 // Ollama mxbai-embed-large + : 384, // BGE + model: + settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true" + ? "text-embedding-3-small" + : settings.USE_OLLAMA_EMBEDDING?.toLowerCase() === "true" + ? settings.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large" + : EmbeddingModel.BGESmallENV15, + provider: + settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true" + ? "OpenAI" + : settings.USE_OLLAMA_EMBEDDING?.toLowerCase() === "true" + ? "Ollama" + : "BGE", +}); + async function getRemoteEmbedding( input: string, options: EmbeddingOptions @@ -37,9 +61,12 @@ async function getRemoteEmbedding( : {}), }, body: JSON.stringify({ - input: trimTokens(input, 8000, "gpt-4o-mini"), + input, model: options.model, - length: options.length || 384, + dimensions: + options.dimensions || + options.length || + getEmbeddingConfig().dimensions, // Prefer dimensions, fallback to length }), }; @@ -84,152 +111,239 @@ export function getEmbeddingType(runtime: IAgentRuntime): "local" | "remote" { } export function getEmbeddingZeroVector(runtime: IAgentRuntime): number[] { - const embeddingDimension = - getEmbeddingType(runtime) === "local" ? 384 : 1536; + let embeddingDimension = 384; // Default BGE dimension + + if (settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true") { + embeddingDimension = 1536; // OpenAI dimension + } else if (settings.USE_OLLAMA_EMBEDDING?.toLowerCase() === "true") { + embeddingDimension = 1024; // Ollama mxbai-embed-large dimension + } + return Array(embeddingDimension).fill(0); } /** - * Send a message to the OpenAI API for embedding. - * @param input The input to be embedded. - * @returns The embedding of the input. - */ -/** - * Generate embeddings for input text using configured model provider - * @param runtime The agent runtime containing model configuration - * @param input The text to generate embeddings for - * @returns Array of embedding numbers + * Gets embeddings from a remote API endpoint. Falls back to local BGE/384 + * + * @param {string} input - The text to generate embeddings for + * @param {EmbeddingOptions} options - Configuration options including: + * - model: The model name to use + * - endpoint: Base API endpoint URL + * - apiKey: Optional API key for authentication + * - isOllama: Whether this is an Ollama endpoint + * - dimensions: Desired embedding dimensions + * @param {IAgentRuntime} runtime - The agent runtime context + * @returns {Promise} Array of embedding values + * @throws {Error} If the API request fails */ + export async function embed(runtime: IAgentRuntime, input: string) { - // Get model provider configuration - const modelProvider = models[runtime.character.modelProvider]; - - // Determine which embedding model to use: - // 1. OpenAI if USE_OPENAI_EMBEDDING is true - // 2. Provider's own embedding model if available - // 3. Fallback to OpenAI embedding model - const embeddingModel = settings.USE_OPENAI_EMBEDDING - ? "text-embedding-3-small" - : runtime.character.modelProvider === ModelProviderName.OLLAMA - ? settings.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large" - : modelProvider.model?.[ModelClass.EMBEDDING] || - models[ModelProviderName.OPENAI].model[ModelClass.EMBEDDING]; - - if (!embeddingModel) { - throw new Error("No embedding model configured"); - } + elizaLogger.debug("Embedding request:", { + modelProvider: runtime.character.modelProvider, + useOpenAI: process.env.USE_OPENAI_EMBEDDING, + input: input?.slice(0, 50) + "...", + inputType: typeof input, + inputLength: input?.length, + isString: typeof input === "string", + isEmpty: !input, + }); - if (getEmbeddingType(runtime) === "local") { - return await getLocalEmbedding(input); + // Validate input + if (!input || typeof input !== "string" || input.trim().length === 0) { + elizaLogger.warn("Invalid embedding input:", { + input, + type: typeof input, + length: input?.length, + }); + return []; // Return empty embedding array } - // Try to get cached embedding first + // Check cache first const cachedEmbedding = await retrieveCachedEmbedding(runtime, input); - if (cachedEmbedding) { - return cachedEmbedding; + if (cachedEmbedding) return cachedEmbedding; + + const config = getEmbeddingConfig(); + const isNode = typeof process !== "undefined" && process.versions?.node; + + // Determine which embedding path to use + if (config.provider === "OpenAI") { + return await getRemoteEmbedding(input, { + model: config.model, + endpoint: "https://api.openai.com/v1", + apiKey: settings.OPENAI_API_KEY, + dimensions: config.dimensions, + }); + } + + if (config.provider === "Ollama") { + return await getRemoteEmbedding(input, { + model: config.model, + endpoint: + runtime.character.modelEndpointOverride || + models[ModelProviderName.OLLAMA].endpoint, + isOllama: true, + dimensions: config.dimensions, + }); + } + + // BGE - try local first if in Node + if (isNode) { + try { + return await getLocalEmbedding(input); + } catch (error) { + elizaLogger.warn( + "Local embedding failed, falling back to remote", + error + ); + } } - // Generate new embedding remotely + // Fallback to remote override return await getRemoteEmbedding(input, { - model: embeddingModel, - // Use OpenAI endpoint if specified, otherwise use provider endpoint - endpoint: settings.USE_OPENAI_EMBEDDING - ? "https://api.openai.com/v1" - : runtime.character.modelEndpointOverride || modelProvider.endpoint, - // Use OpenAI API key if specified, otherwise use runtime token - apiKey: settings.USE_OPENAI_EMBEDDING - ? settings.OPENAI_API_KEY - : runtime.token, - // Special handling for Ollama provider - isOllama: - runtime.character.modelProvider === ModelProviderName.OLLAMA && - !settings.USE_OPENAI_EMBEDDING, + model: config.model, + endpoint: + runtime.character.modelEndpointOverride || + models[runtime.character.modelProvider].endpoint, + apiKey: runtime.token, + dimensions: config.dimensions, }); -} -async function getLocalEmbedding(input: string): Promise { - // Check if we're in Node.js environment - const isNode = - typeof process !== "undefined" && - process.versions != null && - process.versions.node != null; + async function getLocalEmbedding(input: string): Promise { + elizaLogger.debug("DEBUG - Inside getLocalEmbedding function"); - if (!isNode) { - elizaLogger.warn( - "Local embedding not supported in browser, falling back to remote embedding" - ); - throw new Error("Local embedding not supported in browser"); - } + // Check if we're in Node.js environment + const isNode = + typeof process !== "undefined" && + process.versions != null && + process.versions.node != null; - try { - // Try to dynamically import all required Node.js modules - const moduleImports = await Promise.all([ - import("fs"), - import("url"), - // Wrap fastembed import in a try-catch to prevent build errors for non-Node.js environments. - (async () => { - try { - return await import("fastembed"); - // eslint-disable-next-line - } catch (_error) { - elizaLogger.error("Failed to load fastembed."); - throw new Error( - "fastembed import failed, falling back to remote embedding" - ); - } - })(), - ]); + if (isNode) { + const fs = await import("fs"); + const { FlagEmbedding } = await import("fastembed"); + const { fileURLToPath } = await import("url"); - const [fs, { fileURLToPath }, fastEmbed] = moduleImports; - const { FlagEmbedding } = fastEmbed; + function getRootPath() { + const __filename = fileURLToPath(import.meta.url); + const __dirname = path.dirname(__filename); - function getRootPath() { - const __filename = fileURLToPath(import.meta.url); - const __dirname = path.dirname(__filename); + const rootPath = path.resolve(__dirname, ".."); + if (rootPath.includes("/eliza/")) { + return rootPath.split("/eliza/")[0] + "/eliza/"; + } - const rootPath = path.resolve(__dirname, ".."); - if (rootPath.includes("/eliza/")) { - return rootPath.split("/eliza/")[0] + "/eliza/"; + return path.resolve(__dirname, ".."); } - return path.resolve(__dirname, ".."); - } + const cacheDir = getRootPath() + "/cache/"; - const cacheDir = getRootPath() + "/cache/"; + if (!fs.existsSync(cacheDir)) { + fs.mkdirSync(cacheDir, { recursive: true }); + } - if (!fs.existsSync(cacheDir)) { - fs.mkdirSync(cacheDir, { recursive: true }); - } + elizaLogger.debug("Initializing BGE embedding model..."); + + const embeddingModel = await FlagEmbedding.init({ + cacheDir: cacheDir, + model: EmbeddingModel.BGESmallENV15, + // BGE-small-en-v1.5 specific settings + maxLength: 512, // BGE's context window + }); + + elizaLogger.debug("Generating embedding for input:", { + inputLength: input.length, + inputPreview: input.slice(0, 100) + "...", + }); + + // Let fastembed handle tokenization internally + const embedding = await embeddingModel.queryEmbed(input); + + // Debug the raw embedding + elizaLogger.debug("Raw embedding from BGE:", { + type: typeof embedding, + isArray: Array.isArray(embedding), + dimensions: Array.isArray(embedding) + ? embedding.length + : "not an array", + sample: Array.isArray(embedding) + ? embedding.slice(0, 5) + : embedding, + }); + + // Process the embedding into the correct format + let finalEmbedding: number[]; + + if ( + ArrayBuffer.isView(embedding) && + embedding.constructor === Float32Array + ) { + // Direct Float32Array result + finalEmbedding = Array.from(embedding); + } else if ( + Array.isArray(embedding) && + ArrayBuffer.isView(embedding[0]) && + embedding[0].constructor === Float32Array + ) { + // Nested Float32Array result + finalEmbedding = Array.from(embedding[0]); + } else if (Array.isArray(embedding)) { + // Direct array result + finalEmbedding = embedding; + } else { + throw new Error( + `Unexpected embedding format: ${typeof embedding}` + ); + } - const embeddingModel = await FlagEmbedding.init({ - cacheDir: cacheDir, - }); + elizaLogger.debug("Processed embedding:", { + length: finalEmbedding.length, + sample: finalEmbedding.slice(0, 5), + allNumbers: finalEmbedding.every((n) => typeof n === "number"), + }); + + // Ensure all values are proper numbers + finalEmbedding = finalEmbedding.map((n) => Number(n)); + + // Validate the final embedding + if ( + !Array.isArray(finalEmbedding) || + finalEmbedding[0] === undefined + ) { + throw new Error( + "Invalid embedding format: must be an array starting with a number" + ); + } - const trimmedInput = trimTokens(input, 8000, "gpt-4o-mini"); - const embedding = await embeddingModel.queryEmbed(trimmedInput); - return embedding; - // eslint-disable-next-line - } catch (_error) { - elizaLogger.warn( - "Local embedding not supported in browser, falling back to remote embedding." - ); - throw new Error("Local embedding not supported in browser"); - } -} + // Validate embedding dimensions (should be 384 for BGE-small) + if (finalEmbedding.length !== 384) { + elizaLogger.warn( + `Unexpected embedding dimension: ${finalEmbedding.length} (expected 384)` + ); + } -export async function retrieveCachedEmbedding( - runtime: IAgentRuntime, - input: string -) { - if (!input) { - elizaLogger.log("No input to retrieve cached embedding for"); - return null; + return finalEmbedding; + } else { + // Browser implementation - fallback to remote embedding + elizaLogger.warn( + "Local embedding not supported in browser, falling back to remote embedding" + ); + throw new Error("Local embedding not supported in browser"); + } } - const similaritySearchResult = - await runtime.messageManager.getCachedEmbeddings(input); - if (similaritySearchResult.length > 0) { - return similaritySearchResult[0].embedding; + async function retrieveCachedEmbedding( + runtime: IAgentRuntime, + input: string + ) { + if (!input) { + elizaLogger.log("No input to retrieve cached embedding for"); + return null; + } + + const similaritySearchResult = + await runtime.messageManager.getCachedEmbeddings(input); + if (similaritySearchResult.length > 0) { + return similaritySearchResult[0].embedding; + } + return null; } - return null; } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index b204add28a6..b9a78da8485 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,3 +1,5 @@ +import "./config.ts"; // Add this line first + export * from "./actions.ts"; export * from "./context.ts"; export * from "./database.ts"; @@ -21,4 +23,4 @@ export * from "./uuid.ts"; export * from "./enviroment.ts"; export * from "./cache.ts"; export { default as knowledge } from "./knowledge.ts"; -export * from "./utils.ts"; \ No newline at end of file +export * from "./utils.ts"; diff --git a/packages/core/src/knowledge.ts b/packages/core/src/knowledge.ts index c29cf48f31e..9277b90e0b5 100644 --- a/packages/core/src/knowledge.ts +++ b/packages/core/src/knowledge.ts @@ -9,8 +9,29 @@ async function get( runtime: AgentRuntime, message: Memory ): Promise { + // Add validation for message + if (!message?.content?.text) { + elizaLogger.warn("Invalid message for knowledge query:", { + message, + content: message?.content, + text: message?.content?.text, + }); + return []; + } + const processed = preprocess(message.content.text); - elizaLogger.log(`Querying knowledge for: ${processed}`); + elizaLogger.debug("Knowledge query:", { + original: message.content.text, + processed, + length: processed?.length, + }); + + // Validate processed text + if (!processed || processed.trim().length === 0) { + elizaLogger.warn("Empty processed text for knowledge query"); + return []; + } + const embedding = await embed(runtime, processed); const fragments = await runtime.knowledgeManager.searchMemoriesByEmbedding( embedding, @@ -82,6 +103,16 @@ async function set( } export function preprocess(content: string): string { + elizaLogger.debug("Preprocessing text:", { + input: content, + length: content?.length, + }); + + if (!content || typeof content !== "string") { + elizaLogger.warn("Invalid input for preprocessing"); + return ""; + } + return ( content // Remove code blocks and their content diff --git a/packages/core/src/logger.ts b/packages/core/src/logger.ts index a2bd21971e3..978f23e628b 100644 --- a/packages/core/src/logger.ts +++ b/packages/core/src/logger.ts @@ -9,7 +9,15 @@ class ElizaLogger { process.versions.node != null; // Set verbose based on environment - this.verbose = this.isNode ? settings.VERBOSE === "true" : false; + this.verbose = this.isNode ? process.env.VERBOSE === "true" : false; + + // Add initialization logging + console.log(`[ElizaLogger] Initializing with: + isNode: ${this.isNode} + verbose: ${this.verbose} + VERBOSE env: ${process.env.VERBOSE} + NODE_ENV: ${process.env.NODE_ENV} + `); } private isNode: boolean; @@ -212,7 +220,13 @@ class ElizaLogger { } debug(...strings) { - if (!this.verbose) return; + if (!this.verbose) { + console.log( + "[ElizaLogger] Debug message suppressed (verbose=false):", + ...strings + ); + return; + } this.#logWithStyle(strings, { fg: "magenta", bg: "", diff --git a/packages/core/src/models.ts b/packages/core/src/models.ts index 1ab8749e188..562ccb1e559 100644 --- a/packages/core/src/models.ts +++ b/packages/core/src/models.ts @@ -45,14 +45,14 @@ export const models: Models = { settings: { stop: [], maxInputTokens: 200000, - maxOutputTokens: 8192, + maxOutputTokens: 4096, frequency_penalty: 0.4, presence_penalty: 0.4, temperature: 0.7, }, endpoint: "https://api.anthropic.com/v1", model: { - [ModelClass.SMALL]: "claude-3-5-haiku-20241022", + [ModelClass.SMALL]: "claude-3-haiku-20240307", [ModelClass.MEDIUM]: "claude-3-5-sonnet-20241022", [ModelClass.LARGE]: "claude-3-5-sonnet-20241022", }, @@ -272,7 +272,7 @@ export const models: Models = { settings: { stop: [], maxInputTokens: 128000, - maxOutputTokens: 8192, + maxOutputTokens: 8192, frequency_penalty: 0.5, presence_penalty: 0.5, temperature: 0.8, @@ -293,11 +293,11 @@ export const models: Models = { temperature: 0.7, }, imageSettings: { - steps: 28, + steps: 28, }, endpoint: "https://api.fal.ai/v1", model: { - [ModelClass.SMALL]: "", // FAL doesn't provide text models + [ModelClass.SMALL]: "", // FAL doesn't provide text models [ModelClass.MEDIUM]: "", [ModelClass.LARGE]: "", [ModelClass.EMBEDDING]: "", diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index 50d7c37f893..36fde717eee 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -296,12 +296,15 @@ export class AgentRuntime implements IAgentRuntime { this.serverUrl = opts.serverUrl ?? this.serverUrl; elizaLogger.info("Setting model provider..."); - elizaLogger.info( - "- Character model provider:", - this.character.modelProvider - ); - elizaLogger.info("- Opts model provider:", opts.modelProvider); - elizaLogger.info("- Current model provider:", this.modelProvider); + elizaLogger.info("Model Provider Selection:", { + characterModelProvider: this.character.modelProvider, + optsModelProvider: opts.modelProvider, + currentModelProvider: this.modelProvider, + finalSelection: + this.character.modelProvider ?? + opts.modelProvider ?? + this.modelProvider, + }); this.modelProvider = this.character.modelProvider ?? @@ -309,11 +312,13 @@ export class AgentRuntime implements IAgentRuntime { this.modelProvider; this.imageModelProvider = - this.character.imageModelProvider ?? - this.modelProvider; + this.character.imageModelProvider ?? this.modelProvider; elizaLogger.info("Selected model provider:", this.modelProvider); - elizaLogger.info("Selected image model provider:", this.imageModelProvider); + elizaLogger.info( + "Selected image model provider:", + this.imageModelProvider + ); // Validate model provider if (!Object.values(ModelProviderName).includes(this.modelProvider)) { @@ -825,7 +830,7 @@ export class AgentRuntime implements IAgentRuntime { .map( (attachment) => `ID: ${attachment.id} -Name: ${attachment.title} +Name: ${attachment.title} URL: ${attachment.url} Type: ${attachment.source} Description: ${attachment.description} @@ -1227,7 +1232,7 @@ Text: ${attachment.text} (attachment) => `ID: ${attachment.id} Name: ${attachment.title} -URL: ${attachment.url} +URL: ${attachment.url} Type: ${attachment.source} Description: ${attachment.description} Text: ${attachment.text} diff --git a/packages/core/src/settings.ts b/packages/core/src/settings.ts index 92bdc049a5e..29fd4f4752f 100644 --- a/packages/core/src/settings.ts +++ b/packages/core/src/settings.ts @@ -1,6 +1,22 @@ import { config } from "dotenv"; import fs from "fs"; import path from "path"; +import elizaLogger from "./logger.ts"; + +elizaLogger.info("Loading embedding settings:", { + USE_OPENAI_EMBEDDING: process.env.USE_OPENAI_EMBEDDING, + USE_OLLAMA_EMBEDDING: process.env.USE_OLLAMA_EMBEDDING, + OLLAMA_EMBEDDING_MODEL: + process.env.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large", +}); + +// Add this logging block +elizaLogger.info("Loading character settings:", { + CHARACTER_PATH: process.env.CHARACTER_PATH, + ARGV: process.argv, + CHARACTER_ARG: process.argv.find((arg) => arg.startsWith("--character=")), + CWD: process.cwd(), +}); interface Settings { [key: string]: string | undefined; @@ -108,4 +124,14 @@ export function hasEnvVariable(key: string): boolean { // Initialize settings based on environment export const settings = isBrowser() ? environmentSettings : loadEnvConfig(); + +elizaLogger.info("Parsed settings:", { + USE_OPENAI_EMBEDDING: settings.USE_OPENAI_EMBEDDING, + USE_OPENAI_EMBEDDING_TYPE: typeof settings.USE_OPENAI_EMBEDDING, + USE_OLLAMA_EMBEDDING: settings.USE_OLLAMA_EMBEDDING, + USE_OLLAMA_EMBEDDING_TYPE: typeof settings.USE_OLLAMA_EMBEDDING, + OLLAMA_EMBEDDING_MODEL: + settings.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large", +}); + export default settings; diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 5f9324cb7d6..1edd49de65b 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -221,7 +221,7 @@ export enum ModelProviderName { OLLAMA = "ollama", HEURIST = "heurist", GALADRIEL = "galadriel", - FAL = "falai" + FAL = "falai", } /** diff --git a/packages/core/tsconfig.build.json b/packages/core/tsconfig.build.json new file mode 100644 index 00000000000..e56b5e155c1 --- /dev/null +++ b/packages/core/tsconfig.build.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "sourceMap": true, + "inlineSources": true, + "sourceRoot": "/" + } + } diff --git a/packages/plugin-node/src/services/transcription.ts b/packages/plugin-node/src/services/transcription.ts index 763982e4a16..43bbf373607 100644 --- a/packages/plugin-node/src/services/transcription.ts +++ b/packages/plugin-node/src/services/transcription.ts @@ -289,7 +289,7 @@ export class TranscriptionService translateToEnglish: false, wordTimestamps: false, timestamps_length: 60, - splitOnWord: true, + // splitOnWord: true, }, }); diff --git a/prettier.config.cjs b/prettier.config.cjs index c882598e1ce..6384caaceab 100644 --- a/prettier.config.cjs +++ b/prettier.config.cjs @@ -7,4 +7,7 @@ module.exports = { trailingComma: "es5", bracketSpacing: true, printWidth: 80, + useTabs: false, // Add this to enforce spaces + indent: 4, // Add this to match tabWidth + trimTrailingWhitespace: true // Add this for trailing spaces }; diff --git a/scripts/build.sh b/scripts/build.sh index 48c965e7377..05502052ad4 100644 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -4,7 +4,7 @@ REQUIRED_NODE_VERSION=22 CURRENT_NODE_VERSION=$(node -v | cut -d'.' -f1 | sed 's/v//') -if (( CURRENT_NODE_VERSION < REQUIRED_NODE_VERSION )); then +if ((CURRENT_NODE_VERSION < REQUIRED_NODE_VERSION)); then echo "Error: Node.js version must be $REQUIRED_NODE_VERSION or higher. Current version is $CURRENT_NODE_VERSION." exit 1 fi @@ -25,7 +25,7 @@ PACKAGES=( "adapter-sqlite" "adapter-sqljs" "adapter-supabase" - "plugin-buttplug" + # "plugin-buttplug" "plugin-node" "plugin-trustdb" "plugin-solana" @@ -69,52 +69,53 @@ for package in "${PACKAGES[@]}"; do echo "No package.json found in $package, skipping..." fi - cd - > /dev/null || exit + cd - >/dev/null || exit done - # Download the latest intiface-engine release from GitHub based on OS (linux/macos/win) # Determine OS type -OS="" -case "$(uname -s)" in - Linux*) OS="linux";; - Darwin*) OS="macos";; - MINGW*|MSYS*|CYGWIN*) OS="win";; - *) echo "Unsupported OS"; exit 1;; -esac - -echo -e "\033[1mDownloading intiface-engine for $OS...\033[0m" - -# Get latest release info from GitHub API -LATEST_RELEASE=$(curl -s https://api.github.com/repos/intiface/intiface-engine/releases/latest) -DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | grep -o "https://.*intiface-engine-$OS-x64-Release\.zip" | head -n 1) - -if [ -z "$DOWNLOAD_URL" ]; then - echo -e "\033[1;31mCould not find download URL for $OS\033[0m" - exit 1 -fi - -# Download and extract into packages/plugin-buttplug/intiface-engine -if curl -L "$DOWNLOAD_URL" -o "packages/plugin-buttplug/intiface-engine.zip"; then - echo -e "\033[1;32mSuccessfully downloaded intiface-engine\033[0m" - - # Clean previous installation if exists - rm -rf packages/plugin-buttplug/intiface-engine - - # Extract - unzip -q packages/plugin-buttplug/intiface-engine.zip -d packages/plugin-buttplug/intiface-engine - rm packages/plugin-buttplug/intiface-engine.zip - - # Make binary executable on Unix-like systems - if [ "$OS" != "win" ]; then - chmod +x packages/plugin-buttplug/intiface-engine/intiface-engine - fi - - echo -e "\033[1;32mSuccessfully set up intiface-engine\033[0m" -else - echo -e "\033[1;31mFailed to download intiface-engine\033[0m" - exit 1 -fi - +# OS="" +# case "$(uname -s)" in +# Linux*) OS="linux" ;; +# Darwin*) OS="macos" ;; +# MINGW* | MSYS* | CYGWIN*) OS="win" ;; +# *) +# echo "Unsupported OS" +# exit 1 +# ;; +# esac + +# echo -e "\033[1mDownloading intiface-engine for $OS...\033[0m" + +# # Get latest release info from GitHub API +# LATEST_RELEASE=$(curl -s https://api.github.com/repos/intiface/intiface-engine/releases/latest) +# DOWNLOAD_URL=$(echo "$LATEST_RELEASE" | grep -o "https://.*intiface-engine-$OS-x64-Release\.zip" | head -n 1) + +# if [ -z "$DOWNLOAD_URL" ]; then +# echo -e "\033[1;31mCould not find download URL for $OS\033[0m" +# exit 1 +# fi + +# # Download and extract into packages/plugin-buttplug/intiface-engine +# if curl -L "$DOWNLOAD_URL" -o "packages/plugin-buttplug/intiface-engine.zip"; then +# echo -e "\033[1;32mSuccessfully downloaded intiface-engine\033[0m" + +# # Clean previous installation if exists +# rm -rf packages/plugin-buttplug/intiface-engine + +# # Extract +# unzip -q packages/plugin-buttplug/intiface-engine.zip -d packages/plugin-buttplug/intiface-engine +# rm packages/plugin-buttplug/intiface-engine.zip + +# # Make binary executable on Unix-like systems +# if [ "$OS" != "win" ]; then +# chmod +x packages/plugin-buttplug/intiface-engine/intiface-engine +# fi + +# echo -e "\033[1;32mSuccessfully set up intiface-engine\033[0m" +# else +# echo -e "\033[1;31mFailed to download intiface-engine\033[0m" +# exit 1 +# fi echo -e "\033[1mBuild process completed.😎\033[0m"