From ba8724876f782966c5d34d782178a4b5d2692c77 Mon Sep 17 00:00:00 2001 From: Thomas Bonnin <233326+TBonnin@users.noreply.github.com> Date: Tue, 19 Mar 2024 15:54:11 -0400 Subject: [PATCH] fix: deleting lots of records (track_deletes=true) Deleting records (track_deletes=true) currently involves copying records from snapshot table back into the records table. If there is a lot of records to copy, the query fails because the number of parameters goes beyond what's postgres supports, which I believe is 65k Fixing this issue by inserting in chunks --- .../lib/services/sync/data/delete.service.ts | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/packages/shared/lib/services/sync/data/delete.service.ts b/packages/shared/lib/services/sync/data/delete.service.ts index 598b5c757f..7014784247 100644 --- a/packages/shared/lib/services/sync/data/delete.service.ts +++ b/packages/shared/lib/services/sync/data/delete.service.ts @@ -46,13 +46,20 @@ export const getDeletedKeys = async (dbTable: string, uniqueKey: string, nangoCo }; }); - await schema() - .from(RECORDS_TABLE) - .where({ - nango_connection_id: nangoConnectionId, - model - }) - .insert(deletedResults); + // insert the deleted records into the main table in batch of 1000 + // this is to avoid the error of too many variables in the query + await db.knex.transaction(async (trx) => { + const chunkSize = 1000; + for (let i = 0; i < deletedResults.length; i += chunkSize) { + await trx + .from(RECORDS_TABLE) + .where({ + nango_connection_id: nangoConnectionId, + model + }) + .insert(deletedResults.slice(i, i + chunkSize)); + } + }); return results.map((result: DataRecord) => result.external_id); };