I got an error when I try to call the AI Agent node from the chat message node
AI Agent Node Input
[
{
"sessionId": "test-dc14c161-6ae6-44be-b37f-50e9dfc6b7bd",
"action": "sendMessage",
"chatInput": "testing"
}
]
{
"errorMessage": "Cannot read properties of undefined (reading 'filter')",
"errorDetails": {},
"n8nDetails": {
"n8nVersion": "1.61.0 (Self Hosted)",
"binaryDataMode": "default",
"stackTrace": [
"TypeError: Cannot read properties of undefined (reading 'filter')",
" at convertAMessagesToOllama (/usr/local/lib/node_modules/n8n/node_modules/@langchain/ollama/dist/utils.cjs:34:41)",
" at /usr/local/lib/node_modules/n8n/node_modules/@langchain/ollama/dist/utils.cjs:138:20",
" at Array.flatMap (<anonymous>)",
" at convertToOllamaMessages (/usr/local/lib/node_modules/n8n/node_modules/@langchain/ollama/dist/utils.cjs:133:21)",
" at ChatOllama._streamResponseChunks (/usr/local/lib/node_modules/n8n/node_modules/@langchain/ollama/dist/chat_models.cjs:727:71)",
" at _streamResponseChunks.next (<anonymous>)",
" at ChatOllama._generate (/usr/local/lib/node_modules/n8n/node_modules/@langchain/ollama/dist/chat_models.cjs:686:26)",
" at /usr/local/lib/node_modules/n8n/node_modules/@langchain/core/dist/language_models/chat_models.cjs:186:96",
" at Array.map (<anonymous>)",
" at ChatOllama._generateUncached (/usr/local/lib/node_modules/n8n/node_modules/@langchain/core/dist/language_models/chat_models.cjs:186:67)",
" at processTicksAndRejections (node:internal/process/task_queues:95:5)",
" at LLMChain._call (/usr/local/lib/node_modules/n8n/node_modules/langchain/dist/chains/llm_chain.cjs:162:37)",
" at LLMChain.invoke (/usr/local/lib/node_modules/n8n/node_modules/langchain/dist/chains/base.cjs:58:28)",
" at LLMChain.predict (/usr/local/lib/node_modules/n8n/node_modules/langchain/dist/chains/llm_chain.cjs:188:24)",
" at ChatConversationalAgent._plan (/usr/local/lib/node_modules/n8n/node_modules/langchain/dist/agents/agent.cjs:476:24)",
" at AgentExecutor._call (/usr/local/lib/node_modules/n8n/node_modules/langchain/dist/agents/executor.cjs:432:26)",
" at AgentExecutor.invoke (/usr/local/lib/node_modules/n8n/node_modules/langchain/dist/chains/base.cjs:58:28)",
" at Object.conversationalAgentExecute (/usr/local/lib/node_modules/n8n/node_modules/@n8n/n8n-nodes-langchain/dist/nodes/agents/Agent/agents/ConversationalAgent/execute.js:71:28)",
" at Object.execute (/usr/local/lib/node_modules/n8n/node_modules/@n8n/n8n-nodes-langchain/dist/nodes/agents/Agent/Agent.node.js:349:20)",
" at Workflow.runNode (/usr/local/lib/node_modules/n8n/node_modules/n8n-workflow/dist/Workflow.js:722:19)",
" at /usr/local/lib/node_modules/n8n/node_modules/n8n-core/dist/WorkflowExecute.js:711:51",
" at /usr/local/lib/node_modules/n8n/node_modules/n8n-core/dist/WorkflowExecute.js:1141:20"
]
}
}
Information on your n8n setup
- n8n version: 1.61.0
- Database (default: SQLite): Postgres
- n8n EXECUTIONS_PROCESS setting (default: own, main):
- Running n8n via (Docker, npm, n8n cloud, desktop app): Docker
- Operating system: Windows 11