Loop stops execution after first element

Hey! I have a strange problem with my n8n setup. In general it pull data from http, splits with function to list of items and then for each item it should:

  • query db with item
  • create new record in DB if no record exists

Output from my function node shows 10 items and thats ok,
But then after pg execute query and if-node I got only one result.
How to properly iterate over each item and not only first?

More details

{
  "name": "(Exp) Import WI data from airtable to postgres",
  "nodes": [
    {
      "parameters": {},
      "name": "Start",
      "type": "n8n-nodes-base.start",
      "typeVersion": 1,
      "position": [
        250,
        300
      ]
    },
    {
      "parameters": {
        "url": "https://api.airtable.com/v0/...",
        "options": {},
        "headerParametersUi": {
          "parameter": [
            {
              "name": "Authorization",
              "value": "..."
            }
          ]
        },
        "queryParametersUi": {
          "parameter": [
            {
              "name": "maxRecords",
              "value": "10"
            },
            {
              "name": "sort[0][field]",
              "value": "Date"
            },
            {
              "name": "sort[0][direction]",
              "value": "asc"
            },
            {
              "name": "filterByFormula",
              "value": "='Date>\"{{$json.max}}\"'"
            }
          ]
        }
      },
      "name": "List survey HTTP",
      "type": "n8n-nodes-base.httpRequest",
      "typeVersion": 1,
      "position": [
        820,
        300
      ]
    },
    {
      "parameters": {
        "functionCode": "return items[0].json.records.map(function cb(x) {\n  return { \"json\": x }\n})"
      },
      "name": "parse json",
      "type": "n8n-nodes-base.function",
      "typeVersion": 1,
      "position": [
        1000,
        300
      ]
    },
    {
      "parameters": {
        "functionCode": "// Code here will run once per input item.\n// More info and help: https://docs.n8n.io/nodes/n8n-nodes-base.functionItem\n//console.log(item);\nitem = $node[\"parse json\"].json\nnew_item = {\n  \"created_at\": item.fields[\"Date\"],\n  \"email_sent_at\": item.fields[\"Date\"],\n  \"external_id\": item.id,\n  \"email\": (item.fields[\"index\"] || \"\").replace(/\\s/g, \"\"),\n  \"name\": (item.fields[\"Имя\"] || \"Нетинфо\").replace(/\\s/g, \"\"),\n  \"phone\": (item.fields[\"Телефон\"] || \"\").replace(/[^\\d]+/g, \"\"),\n  \"total\": item.fields[\"Результат общий\"],\n  \"food\": item.fields[\"Результат питание\"],\n  \"stress\": item.fields[\"Результат стресс\"],\n  \"sleep\": item.fields[\"Результат сон\"],\n  \"energy\": item.fields[\"Результат энергия\"],\n  \"activity\": item.fields[\"Результат активность\"],\n  \"attention\": item.fields[\"Результат внимание\"],\n  \"source\": item.fields[\"Откуда узнали 2\"] || \"\",\n  \"utm_source\": item.fields[\"utmSource\"] || \"\",\n  \"utm_medium\": item.fields[\"utmMedium\"] || \"\",\n  \"utm_content\": item.fields[\"utmContent\"] || \"\",\n  \"utm_campaign\": item.fields[\"Рекламная компания\"] || \"\",\n  \"json_result\": item.fields[\"SurveyResultJSON\"] || \"{}\",\n};\n\n//return `INSERT INTO wi_results (${Object.keys(new_item).sort().join(',')}) VALUES (${Object.keys(new_item).sort().map((x) => \"'\" + new_item[x] + \"'\").join(',')}) ON CONFLICT (external_id) DO NOTHING RETURNING id`;\n\nreturn new_item;"
      },
      "name": "Set fields",
      "type": "n8n-nodes-base.functionItem",
      "typeVersion": 1,
      "position": [
        1530,
        490
      ]
    },
    {
      "parameters": {
        "operation": "executeQuery",
        "query": "select max(created_at) from wi_results",
        "additionalFields": {}
      },
      "name": "max created_at",
      "type": "n8n-nodes-base.postgres",
      "typeVersion": 1,
      "position": [
        430,
        300
      ],
      "credentials": {
        "postgres": {
          "id": "6",
          "name": "wi_production pgBouncer"
        }
      }
    },
    {
      "parameters": {
        "functionCode": "// Code here will run once per input item.\n// More info and help: https://docs.n8n.io/nodes/n8n-nodes-base.functionItem\n\n// Add a new field called 'myNewField' to the JSON of the item\nif (!!!item.max) {\n  item.max = Date('December 17, 1995 03:24:00')\n}\n\n\nreturn item;"
      },
      "name": "set old max created_at",
      "type": "n8n-nodes-base.functionItem",
      "typeVersion": 1,
      "position": [
        630,
        300
      ]
    },
    {
      "parameters": {
        "operation": "executeQuery",
        "query": "=select * from wi_results where external_id = '{{$json[\"id\"]}}' limit 1",
        "additionalFields": {}
      },
      "name": "Find result by external_id",
      "type": "n8n-nodes-base.postgres",
      "typeVersion": 1,
      "position": [
        1160,
        300
      ],
      "alwaysOutputData": true,
      "credentials": {
        "postgres": {
          "id": "6",
          "name": "wi_production pgBouncer"
        }
      }
    },
    {
      "parameters": {
        "conditions": {
          "boolean": [
            {
              "value1": "={{ !!$json[\"id\"] > 0}}",
              "value2": true
            }
          ]
        }
      },
      "name": "Exists?",
      "type": "n8n-nodes-base.if",
      "typeVersion": 1,
      "position": [
        1330,
        300
      ],
      "alwaysOutputData": true
    },
    {
      "parameters": {},
      "name": "NoOp",
      "type": "n8n-nodes-base.noOp",
      "typeVersion": 1,
      "position": [
        1550,
        190
      ],
      "alwaysOutputData": true
    },
    {
      "parameters": {
        "table": "wi_results",
        "columns": "={{Object.keys($json).join(',')}}",
        "additionalFields": {}
      },
      "name": "Insert record",
      "type": "n8n-nodes-base.postgres",
      "typeVersion": 1,
      "position": [
        1730,
        490
      ],
      "credentials": {
        "postgres": {
          "id": "6",
          "name": "wi_production pgBouncer"
        }
      }
    }
  ],
  "connections": {
    "Start": {
      "main": [
        [
          {
            "node": "max created_at",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "List survey HTTP": {
      "main": [
        [
          {
            "node": "parse json",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "parse json": {
      "main": [
        [
          {
            "node": "Find result by external_id",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "max created_at": {
      "main": [
        [
          {
            "node": "set old max created_at",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "set old max created_at": {
      "main": [
        [
          {
            "node": "List survey HTTP",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Find result by external_id": {
      "main": [
        [
          {
            "node": "Exists?",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Exists?": {
      "main": [
        [
          {
            "node": "NoOp",
            "type": "main",
            "index": 0
          }
        ],
        [
          {
            "node": "Set fields",
            "type": "main",
            "index": 0
          }
        ]
      ]
    },
    "Set fields": {
      "main": [
        [
          {
            "node": "Insert record",
            "type": "main",
            "index": 0
          }
        ]
      ]
    }
  },
  "active": false,
  "settings": {},
  "id": 1
}

Split in batches with 1 item in batch and definition of loop helped

1 Like

I think that if you use mode: independently in the execute query node, it will return as many items as inputs you provided. You can try that out. That way, you might now need the split in batches node.

1 Like

Did that fix the issue?

No, only adding split in batches with loop helped.

1 Like