From 3620feee2726d7c9c89600e6e9587d9129b4c051 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Thu, 28 Nov 2024 11:20:10 +0100 Subject: [PATCH 01/22] Add migration to update primary/foreign keys from int to BigInt. --- front/migrations/db/migrate_int_to_bigint.sql | 333 ++++++++++++++++++ 1 file changed, 333 insertions(+) create mode 100644 front/migrations/db/migrate_int_to_bigint.sql diff --git a/front/migrations/db/migrate_int_to_bigint.sql b/front/migrations/db/migrate_int_to_bigint.sql new file mode 100644 index 000000000000..68ee42fdef11 --- /dev/null +++ b/front/migrations/db/migrate_int_to_bigint.sql @@ -0,0 +1,333 @@ +-- Migration script to convert INT primary keys and foreign keys to BIGINT +-- This script handles: +-- 1. Finding all referencing tables +-- 2. Adding new BIGINT columns to all affected tables +-- 3. Setting up synchronization triggers +-- 4. Backfilling data in batches +-- +-- Usage: psql -v table_name=your_table -v batch_size=10000 -f migrate_int_to_bigint.sql +---------------------------------------------------------------------------- + +-- Enable notice output and stop on error +\set ON_ERROR_STOP on +\set VERBOSITY verbose + +-- Function to create the sync trigger (local to the table, name does not need to be unique) +CREATE OR REPLACE FUNCTION create_bigint_sync_function(p_table_name text) +RETURNS void AS $$ +BEGIN + EXECUTE format($func$ + CREATE OR REPLACE FUNCTION %I_bigint_sync_trigger() + RETURNS TRIGGER AS $BODY$ + BEGIN + NEW.new_id := NEW.id; + RETURN NEW; + END; + $BODY$ LANGUAGE plpgsql; + $func$, + p_table_name -- For function name + ); +END; +$$ LANGUAGE plpgsql; + +-- Function to create the sync trigger +CREATE OR REPLACE FUNCTION create_bigint_sync_trigger(p_table_name text) +RETURNS void AS $$ +BEGIN + RAISE NOTICE 'Setting up triggers for table: %', p_table_name; + + -- First drop if exists (will show notice) + EXECUTE format('DROP TRIGGER IF EXISTS bigint_sync ON %I', p_table_name); + + -- Then create new trigger + EXECUTE format($trig$ + CREATE TRIGGER bigint_sync + BEFORE INSERT OR UPDATE ON %I + FOR EACH ROW + EXECUTE FUNCTION %I_bigint_sync_trigger(); + $trig$, + p_table_name, -- For ON table + p_table_name -- For function name + ); + + RAISE NOTICE 'Completed trigger setup for table: %', p_table_name; +END; +$$ LANGUAGE plpgsql; + +-- Function to create the foreign key sync function +CREATE OR REPLACE FUNCTION create_fk_sync_function( + p_table_name text, + p_fk_column text +) RETURNS void AS $$ +BEGIN + EXECUTE format($func$ + CREATE OR REPLACE FUNCTION %I_fk_sync_trigger() + RETURNS TRIGGER AS $TRIG$ + BEGIN + NEW.%I := NEW.%I; + RETURN NEW; + END; + $TRIG$ LANGUAGE plpgsql; + $func$, + p_table_name, -- For function name + format('%s_new', p_fk_column), -- For new column + p_fk_column -- For old column + ); +END; +$$ LANGUAGE plpgsql; + +-- Function to create the foreign key sync trigger +CREATE OR REPLACE FUNCTION create_fk_sync_trigger( + p_table_name text +) RETURNS void AS $$ +BEGIN + RAISE NOTICE 'Setting up FK triggers for table: %', p_table_name; + + -- First drop if exists (will show notice) + EXECUTE format('DROP TRIGGER IF EXISTS fk_sync ON %I', p_table_name); + + -- Then create new trigger + EXECUTE format($trig$ + CREATE TRIGGER fk_sync + BEFORE INSERT OR UPDATE ON %I + FOR EACH ROW + EXECUTE FUNCTION %I_fk_sync_trigger(); + $trig$, + p_table_name, -- For ON table + p_table_name, -- For ON table again + p_table_name -- For function name + ); + + RAISE NOTICE 'Completed FK trigger setup for table: %', p_table_name; +END; +$$ LANGUAGE plpgsql; + +-- Batching procedure - handles its own transactions +CREATE OR REPLACE PROCEDURE backfill_column_in_batches( + p_table_name text, -- Table to update + p_source_column text, -- Column to copy from + p_target_column text, -- Column to copy to + p_batch_size int -- Batch size +) AS $$ +DECLARE + v_max_id INTEGER; + v_current_id INTEGER := 0; +BEGIN + -- Get max id + EXECUTE format('SELECT COALESCE(MAX(id), 0) FROM %I', p_table_name) INTO v_max_id; + + RAISE NOTICE 'Starting backfill of %.% into %.%', + p_table_name, p_source_column, p_table_name, p_target_column; + + -- Process in batches + WHILE v_current_id <= v_max_id LOOP + -- Update batch + EXECUTE format( + 'UPDATE %I SET %I = %I + WHERE id > $1 AND id <= $2 + AND %I IS NULL', -- Only processes unprocessed rows + p_table_name, + p_target_column, + p_source_column, + p_target_column + ) USING v_current_id, v_current_id + p_batch_size; + + v_current_id := v_current_id + p_batch_size; + RAISE NOTICE 'Processed % up to id % of %', + p_table_name, v_current_id, v_max_id; + COMMIT; -- Each batch in its own transaction + END LOOP; +END; +$$ LANGUAGE plpgsql; + +-- Function to verify backfill progress +CREATE OR REPLACE FUNCTION check_migration_progress( + p_table_name text, + p_temp_table_name text +) RETURNS TABLE ( + table_name text, + total_rows bigint, + migrated_rows bigint, + progress_percentage numeric +) AS $$ +DECLARE + r RECORD; +BEGIN + -- Check main table + RETURN QUERY + EXECUTE format( + 'SELECT %L::text as table_name, + COUNT(*)::bigint as total_rows, + COUNT(new_id)::bigint as migrated_rows, + ROUND((COUNT(new_id)::numeric / COUNT(*)::numeric * 100), 2) as progress_percentage + FROM %I', + p_table_name, p_table_name + ); + + -- Check each referencing table + FOR r IN EXECUTE format('SELECT table_name, foreign_key_column FROM %s', p_temp_table_name) LOOP + RETURN QUERY + EXECUTE format( + 'SELECT %L::text as table_name, + COUNT(*)::bigint as total_rows, + COUNT(%I)::bigint as migrated_rows, + ROUND((COUNT(%I)::numeric / COUNT(*)::numeric * 100), 2) as progress_percentage + FROM %I', + r.table_name, -- table_name + format('%s_new', r.foreign_key_column), -- new column count + format('%s_new', r.foreign_key_column), -- new column ratio + r.table_name -- table name + ); + END LOOP; +END; +$$ LANGUAGE plpgsql; + + +-- 1. Setup Phase (DDL operations, auto-committing) +CREATE OR REPLACE FUNCTION setup_bigint_migration( + p_table_name text +) RETURNS text AS $$ +DECLARE + r RECORD; + v_temp_table_name text; + v_table_exists boolean; +BEGIN + -- Validate parameter + IF p_table_name IS NULL THEN + RAISE EXCEPTION 'Table name must be provided'; + END IF; + + -- First verify the table exists + EXECUTE format(' + SELECT EXISTS ( + SELECT 1 FROM information_schema.tables + WHERE table_name = %L + )', p_table_name) INTO v_table_exists; + + IF NOT v_table_exists THEN + RAISE EXCEPTION 'Table % does not exist', p_table_name; + END IF; + + -- Set temp table name + v_temp_table_name := format('%I_referencing_tables', p_table_name); + RAISE NOTICE 'Starting setup for table: %', p_table_name; + + -- Find and store all referencing tables + -- This query identifies all tables that have foreign keys pointing to our target table + -- We store them in a temporary table for easier processing + EXECUTE format(' + CREATE TEMP TABLE %s AS + SELECT DISTINCT + tc.table_schema, + tc.table_name, + kcu.column_name as foreign_key_column, + tc.constraint_name + FROM + information_schema.table_constraints AS tc + JOIN information_schema.key_column_usage AS kcu + ON tc.constraint_name = kcu.constraint_name + JOIN information_schema.constraint_column_usage AS ccu + ON ccu.constraint_name = tc.constraint_name + WHERE + tc.constraint_type = ''FOREIGN KEY'' + AND ccu.table_name = %L + AND ccu.column_name = ''id''', + v_temp_table_name, p_table_name + ); + + EXECUTE format('SELECT COUNT(*) FROM %s', v_temp_table_name) INTO r; + RAISE NOTICE 'Found % referencing tables', r; + + -- Add new columns + -- Add BIGINT column to main table + EXECUTE format('ALTER TABLE %I ADD COLUMN IF NOT EXISTS new_id BIGINT', p_table_name); + RAISE NOTICE 'Created new column in %', p_table_name; + + -- Add columns to referencing tables + FOR r IN EXECUTE format('SELECT * FROM %s', v_temp_table_name) LOOP + RAISE NOTICE 'Adding new column to %', r.table_name; + + EXECUTE format( + 'ALTER TABLE %I ADD COLUMN IF NOT EXISTS %I BIGINT', + r.table_name, + format('%s_new', r.foreign_key_column) + ); + END LOOP; + + -- Setup triggers + -- Setup trigger on main table to keep new_id in sync with id + PERFORM create_bigint_sync_function(p_table_name); + PERFORM create_bigint_sync_trigger(p_table_name); + + FOR r IN EXECUTE format('SELECT * FROM %s', v_temp_table_name) LOOP + RAISE NOTICE 'Creating trigger for %', r.table_name; + + -- Create FK sync function and trigger + PERFORM create_fk_sync_function(r.table_name, r.foreign_key_column); + PERFORM create_fk_sync_trigger(r.table_name); + END LOOP; + + -- Return the temp table name + RETURN v_temp_table_name; +END; +$$ LANGUAGE plpgsql; + + +-- 2. Backfill Phase (as a procedure to handle transactions) +CREATE OR REPLACE PROCEDURE backfill_bigint_migration( + p_table_name text, + p_temp_table_name text, + p_batch_size int +) AS $$ +DECLARE + r RECORD; +BEGIN + -- Validate parameters + IF p_batch_size IS NULL THEN + RAISE EXCEPTION 'Batch size must be provided'; + END IF; + + -- Backfill main table + CALL backfill_column_in_batches( + p_table_name, -- table name + 'id', -- source column + 'new_id', -- target column + p_batch_size -- batch size + ); + + -- Backfill referencing tables + FOR r IN EXECUTE format('SELECT * FROM %s', p_temp_table_name) LOOP + -- Process each referencing table in batches + CALL backfill_column_in_batches( + r.table_name, -- table name + r.foreign_key_column, -- source column + format('%s_new', r.foreign_key_column), -- target column + p_batch_size -- batch size + ); + END LOOP; +END; +$$ LANGUAGE plpgsql; + + +-- We don't use function here because we want to handle transactions. +-- The function rollbacks the entire transaction if an error occurs. + +-- 1. Setup (returns temp table name) +\echo 'Step 1: Setting up migration for table' :table_name +SELECT setup_bigint_migration(:'table_name') AS temp_table_name \gset + +-- Now we can use :temp_table_name in subsequent commands +\echo 'Using temp table:' :temp_table_name + +-- 2. Backfill (handles its own transactions) +\echo 'Step 2: Starting backfill' +CALL backfill_bigint_migration(:'table_name', :'temp_table_name', :batch_size); + +-- 3. Verify results +SELECT * FROM check_migration_progress(:'table_name', :'temp_table_name'); + +-- 3. Finalize +-- SELECT finalize_bigint_migration('users', 'users_referencing_tables'); + +-- Cleanup +DROP TABLE IF EXISTS :temp_table_name; From 5b9bf4293a95c1a9e4bbcb07a0bb1e99f53458c2 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Fri, 29 Nov 2024 09:29:46 +0100 Subject: [PATCH 02/22] Better structure steps in script --- front/migrations/db/migrate_int_to_bigint.sql | 201 ++++++++++++------ 1 file changed, 135 insertions(+), 66 deletions(-) diff --git a/front/migrations/db/migrate_int_to_bigint.sql b/front/migrations/db/migrate_int_to_bigint.sql index 68ee42fdef11..fa9d828b3c56 100644 --- a/front/migrations/db/migrate_int_to_bigint.sql +++ b/front/migrations/db/migrate_int_to_bigint.sql @@ -143,7 +143,7 @@ $$ LANGUAGE plpgsql; -- Function to verify backfill progress CREATE OR REPLACE FUNCTION check_migration_progress( p_table_name text, - p_temp_table_name text + p_refs jsonb ) RETURNS TABLE ( table_name text, total_rows bigint, @@ -165,7 +165,8 @@ BEGIN ); -- Check each referencing table - FOR r IN EXECUTE format('SELECT table_name, foreign_key_column FROM %s', p_temp_table_name) LOOP + FOR r IN SELECT * FROM jsonb_array_elements(p_refs) AS t(ref) + LOOP RETURN QUERY EXECUTE format( 'SELECT %L::text as table_name, @@ -173,24 +174,24 @@ BEGIN COUNT(%I)::bigint as migrated_rows, ROUND((COUNT(%I)::numeric / COUNT(*)::numeric * 100), 2) as progress_percentage FROM %I', - r.table_name, -- table_name - format('%s_new', r.foreign_key_column), -- new column count - format('%s_new', r.foreign_key_column), -- new column ratio - r.table_name -- table name + r.ref->>'table_name', -- table_name + format('%s_new', r.ref->>'foreign_key_column'), -- new column count + format('%s_new', r.ref->>'foreign_key_column'), -- new column ratio + r.ref->>'table_name' -- table name ); END LOOP; END; $$ LANGUAGE plpgsql; - -- 1. Setup Phase (DDL operations, auto-committing) CREATE OR REPLACE FUNCTION setup_bigint_migration( p_table_name text -) RETURNS text AS $$ +) RETURNS jsonb AS $$ DECLARE r RECORD; - v_temp_table_name text; + v_referencing_tables jsonb; v_table_exists boolean; + v_count int; BEGIN -- Validate parameter IF p_table_name IS NULL THEN @@ -208,35 +209,43 @@ BEGIN RAISE EXCEPTION 'Table % does not exist', p_table_name; END IF; - -- Set temp table name - v_temp_table_name := format('%I_referencing_tables', p_table_name); RAISE NOTICE 'Starting setup for table: %', p_table_name; -- Find and store all referencing tables -- This query identifies all tables that have foreign keys pointing to our target table -- We store them in a temporary table for easier processing + -- Get referencing tables info EXECUTE format(' - CREATE TEMP TABLE %s AS - SELECT DISTINCT - tc.table_schema, - tc.table_name, - kcu.column_name as foreign_key_column, - tc.constraint_name - FROM - information_schema.table_constraints AS tc - JOIN information_schema.key_column_usage AS kcu - ON tc.constraint_name = kcu.constraint_name - JOIN information_schema.constraint_column_usage AS ccu - ON ccu.constraint_name = tc.constraint_name - WHERE - tc.constraint_type = ''FOREIGN KEY'' - AND ccu.table_name = %L - AND ccu.column_name = ''id''', - v_temp_table_name, p_table_name - ); - - EXECUTE format('SELECT COUNT(*) FROM %s', v_temp_table_name) INTO r; - RAISE NOTICE 'Found % referencing tables', r; + SELECT jsonb_agg( + jsonb_build_object( + ''schema'', table_schema, + ''table_name'', table_name, + ''foreign_key_column'', foreign_key_column, + ''constraint_name'', constraint_name + ) + ) + FROM ( + SELECT DISTINCT + tc.table_schema, + tc.table_name, + kcu.column_name as foreign_key_column, + tc.constraint_name + FROM + information_schema.table_constraints AS tc + JOIN information_schema.key_column_usage AS kcu + ON tc.constraint_name = kcu.constraint_name + JOIN information_schema.constraint_column_usage AS ccu + ON ccu.constraint_name = tc.constraint_name + WHERE + tc.constraint_type = ''FOREIGN KEY'' + AND ccu.table_name = %L + AND ccu.column_name = ''id'' + ) t', + p_table_name + ) INTO v_referencing_tables; + + SELECT jsonb_array_length(COALESCE(v_referencing_tables, '[]'::jsonb)) INTO v_count; + RAISE NOTICE 'Found % referencing tables', v_count; -- Add new columns -- Add BIGINT column to main table @@ -244,13 +253,14 @@ BEGIN RAISE NOTICE 'Created new column in %', p_table_name; -- Add columns to referencing tables - FOR r IN EXECUTE format('SELECT * FROM %s', v_temp_table_name) LOOP - RAISE NOTICE 'Adding new column to %', r.table_name; + FOR r IN SELECT * FROM jsonb_array_elements(v_referencing_tables) AS t(ref) + LOOP + RAISE NOTICE 'Adding new column to %', r.ref->>'table_name'; EXECUTE format( 'ALTER TABLE %I ADD COLUMN IF NOT EXISTS %I BIGINT', - r.table_name, - format('%s_new', r.foreign_key_column) + r.ref->>'table_name', + format('%s_new', r.ref->>'foreign_key_column') ); END LOOP; @@ -259,16 +269,17 @@ BEGIN PERFORM create_bigint_sync_function(p_table_name); PERFORM create_bigint_sync_trigger(p_table_name); - FOR r IN EXECUTE format('SELECT * FROM %s', v_temp_table_name) LOOP - RAISE NOTICE 'Creating trigger for %', r.table_name; + FOR r IN SELECT * FROM jsonb_array_elements(v_referencing_tables) AS t(ref) + LOOP + RAISE NOTICE 'Creating trigger for %', r.ref->>'table_name'; -- Create FK sync function and trigger - PERFORM create_fk_sync_function(r.table_name, r.foreign_key_column); - PERFORM create_fk_sync_trigger(r.table_name); + PERFORM create_fk_sync_function(r.ref->>'table_name', r.ref->>'foreign_key_column'); + PERFORM create_fk_sync_trigger(r.ref->>'table_name'); END LOOP; - -- Return the temp table name - RETURN v_temp_table_name; + -- Return the referencing tables info + RETURN v_referencing_tables; END; $$ LANGUAGE plpgsql; @@ -276,7 +287,7 @@ $$ LANGUAGE plpgsql; -- 2. Backfill Phase (as a procedure to handle transactions) CREATE OR REPLACE PROCEDURE backfill_bigint_migration( p_table_name text, - p_temp_table_name text, + p_refs jsonb, p_batch_size int ) AS $$ DECLARE @@ -296,13 +307,14 @@ BEGIN ); -- Backfill referencing tables - FOR r IN EXECUTE format('SELECT * FROM %s', p_temp_table_name) LOOP + FOR r IN SELECT * FROM jsonb_array_elements(p_refs) AS t(ref) + LOOP -- Process each referencing table in batches CALL backfill_column_in_batches( - r.table_name, -- table name - r.foreign_key_column, -- source column - format('%s_new', r.foreign_key_column), -- target column - p_batch_size -- batch size + r.ref->>'table_name', -- table name + r.ref->>'foreign_key_column', -- source column + format('%s_new', r.ref->>'foreign_key_column'), -- target column + p_batch_size -- batch size ); END LOOP; END; @@ -312,22 +324,79 @@ $$ LANGUAGE plpgsql; -- We don't use function here because we want to handle transactions. -- The function rollbacks the entire transaction if an error occurs. --- 1. Setup (returns temp table name) -\echo 'Step 1: Setting up migration for table' :table_name -SELECT setup_bigint_migration(:'table_name') AS temp_table_name \gset - --- Now we can use :temp_table_name in subsequent commands -\echo 'Using temp table:' :temp_table_name +-- Validate the inputs. +\if :{?step} +\else + \echo 'Error: step parameter is required' + \quit +\endif + +\if :{?table_name} +\else + \echo 'Error: table_name parameter is required' + \quit +\endif + +\if :{?batch_size} +\else + \echo 'Error: batch_size parameter is required' + \quit +\endif + +-- Main execution flow based on step + +-- Create status table if it doesn't exist +CREATE TABLE IF NOT EXISTS bigint_migration_status ( + table_name text PRIMARY KEY, + setup_completed timestamptz, + backfill_completed timestamptz, + cutover_completed timestamptz, + referencing_tables jsonb -- Store referencing tables info as JSON +); --- 2. Backfill (handles its own transactions) -\echo 'Step 2: Starting backfill' -CALL backfill_bigint_migration(:'table_name', :'temp_table_name', :batch_size); - --- 3. Verify results -SELECT * FROM check_migration_progress(:'table_name', :'temp_table_name'); - --- 3. Finalize --- SELECT finalize_bigint_migration('users', 'users_referencing_tables'); - --- Cleanup -DROP TABLE IF EXISTS :temp_table_name; +-- 1. Setup (returns temp table name) +\set step_is_setup `echo :step | grep -q '^setup$' && echo 1 || echo 0` +\if :step_is_setup + -- Run setup and store result + INSERT INTO bigint_migration_status (table_name, referencing_tables, setup_completed) + VALUES ( + :'table_name', + (SELECT setup_bigint_migration(:'table_name')), + NOW() + ) + ON CONFLICT (table_name) DO UPDATE + SET referencing_tables = EXCLUDED.referencing_tables, + setup_completed = NOW(); + + \echo 'Setup completed. You can now run the backfill step.' +\endif + +-- Backfill Phase +\set step_is_backfill `echo :step | grep -q '^backfill$' && echo 1 || echo 0` +\if :step_is_backfill + \echo 'Step 2: Starting backfill' + + -- Get temp table name directly in the current connection + \gset + SELECT + referencing_tables::text as refs + FROM bigint_migration_status + WHERE table_name = :'table_name' \gset + + \if :{?refs} + -- Run backfill + CALL backfill_bigint_migration(:'table_name', :'refs'::jsonb, :batch_size); + + -- Update status + UPDATE bigint_migration_status + SET backfill_completed = NOW() + WHERE table_name = :'table_name'; + + -- Show progress + SELECT * FROM check_migration_progress(:'table_name', :'refs'::jsonb); + \echo 'Backfill completed. You can now run the cutover step.' + \else + \echo 'Error: Setup step must be completed first' + \quit + \endif +\endif \ No newline at end of file From 6be0af171810fa40a165a12c1f3993bfd61e7ef3 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Fri, 29 Nov 2024 18:09:32 +0100 Subject: [PATCH 03/22] Tmp --- .../20241129_migrate_int_to_bigint.ts | 1238 +++++++++++++++++ front/package-lock.json | 513 ++++++- front/package.json | 4 +- 3 files changed, 1747 insertions(+), 8 deletions(-) create mode 100644 front/migrations/20241129_migrate_int_to_bigint.ts diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts new file mode 100644 index 000000000000..0f8856cd958d --- /dev/null +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -0,0 +1,1238 @@ +import type { PoolClient } from "pg"; +import { Pool } from "pg"; + +import logger from "@app/logger/logger"; +import { makeScript } from "@app/scripts/helpers"; +import chalk from "chalk"; + +// Types. +interface MigrationConfig { + tableName: string; + schemaName?: string; + batchSize: number; + timeoutSeconds?: number; + dryRun: boolean; +} + +interface CompositeIndex { + indexName: string; + columns: string[]; + isUnique: boolean; +} + +interface ReferencingTable { + schema: string; + tableName: string; + foreignKeyColumn: string; + constraintName: string; +} + +interface MigrationProgress { + tableName: string; + totalRows: number; + migratedRows: number; + progressPercentage: number; +} + +interface MainTableSwitchConfig { + currentColumn: string; // e.g., 'id' + newColumn: string; // e.g., 'new_id' + legacyColumn: string; // e.g., 'id_legacy' + sequenceName: string; // e.g., 'table_id_seq' + indexName: string; // e.g., 'table_pkey_bigint' + triggerName: string; // e.g., 'bigint_sync' + triggerFunction: string; // e.g., 'table_bigint_sync_trigger' +} + +interface ReferencingColumnSwitchConfig { + currentColumn: string; // e.g., "groupId" + newColumn: string; // e.g., "groupId_new" + legacyColumn: string; // e.g., "groupId_legacy" + constraintName: string; // e.g., "group_memberships_groupId_fkey" +} + +const MigrationSteps = [ + "setup", + "backfill", + "pre-cutover", + "cutover", + "rollback", +] as const; +type MigrationStepType = (typeof MigrationSteps)[number]; + +const MAIN_TABLE_NEW_COLUMN = "new_id"; +const MAIN_TABLE_LEGACY_COLUMN = "id_legacy"; + +const makeNewForeignKeyColumn = (column: string) => `${column}_new`; + +const makeNewIndexName = ( + indexName: string, + { isPrimary }: { isPrimary: boolean } +) => `${indexName}_${isPrimary ? "pkey" : "fk"}_bigint`; + +const makeTriggerName = ({ isLegacy }: { isLegacy: boolean }) => + isLegacy ? "legacy_sync" : "bigint_sync"; + +const makeTriggerFunctionName = ( + tableName: string, + { isLegacy }: { isLegacy: boolean } +) => `${tableName}_${isLegacy ? "legacy_sync_trigger" : "bigint_sync_trigger"}`; + +/** + * Converts Postgres array string to JS array + * Example: '{userId,groupId}' -> ['userId', 'groupId'] + */ +function parsePostgresArray(arrayString: string): string[] { + return arrayString + .replace(/^\{|\}$/g, "") + .split(",") + .map((s) => s.trim()); +} + +// Errors. + +class MigrationError extends Error { + constructor( + message: string, + public step: string, + public cause?: Error + ) { + super(message); + this.name = "MigrationError"; + } +} + +// Migration. + +class IntToBigIntMigration { + private pool: Pool; + private config: Required; + + constructor(connectionString: string, config: MigrationConfig) { + this.pool = new Pool({ connectionString }); + this.config = { + schemaName: "public", + timeoutSeconds: 3600, + ...config, + }; + } + + async execute(step: MigrationStepType): Promise { + try { + switch (step) { + case "setup": + await this.setup(); + break; + + case "backfill": + await this.backfill(); + break; + + case "pre-cutover": + await this.prepareCutover(); + break; + + case "cutover": + await this.cutover(); + break; + + case "rollback": + await this.rollback(); + break; + + default: + throw new Error(`Unknown step: ${step}`); + } + } catch (error) { + throw new MigrationError( + `Migration failed during ${step}`, + step, + error instanceof Error ? error : undefined + ); + } + } + + // Setup Phase + private async setup(): Promise { + const client = await this.pool.connect(); + try { + // Verify table we want to migrate exists + const tableExists = await this.verifyTableExists(client); + if (!tableExists) { + throw new Error(`Table ${this.config.tableName} does not exist`); + } + + // Find referencing tables + const referencingTables = await this.findReferencingTables(client); + console.log(`Found ${referencingTables.length} referencing tables`); + + // Add new columns + await this.addNewColumns(client, referencingTables); + + // Setup triggers + await this.setupTriggers(client, referencingTables); + } finally { + client.release(); + } + } + + // Backfill Phase + private async backfill(): Promise { + const client = await this.pool.connect(); + try { + // Find referencing tables + const referencingTables = await this.findReferencingTables(client); + console.log(`Found ${referencingTables.length} referencing tables`); + + // Backfill main table + await this.backfillTable( + client, + this.config.tableName, + "id", + MAIN_TABLE_NEW_COLUMN + ); + + // Backfill referencing tables + for (const ref of referencingTables) { + await this.backfillTable( + client, + ref.tableName, + ref.foreignKeyColumn, + makeNewForeignKeyColumn(ref.foreignKeyColumn) + ); + } + + // Show progress + await this.showProgress(client, referencingTables); + } finally { + client.release(); + } + } + + // Pre-cutover phase + + private async prepareCutover(): Promise { + const client = await this.pool.connect(); + + try { + // Find referencing tables + const referencingTables = await this.findReferencingTables(client); + console.log(`Found ${referencingTables.length} referencing tables`); + + // Verify backfill is complete + const progress = await this.checkProgress(client, referencingTables); + if (!this.isMigrationComplete(progress) && !this.config.dryRun) { + throw new Error("Cannot prepare cutove - backfill is not complete"); + } + + // Create indexes and constraints now that data is ready + console.log( + chalk.yellow(`[Pre-Cutover] Creating indexes and constraints`) + ); + + // Create indexes concurrently. + await this.createIndexes(client, referencingTables); + + console.log( + chalk.green( + `[Pre-Cutover] Successfully created all indexes and constraints` + ) + ); + } finally { + client.release(); + } + } + + // Cutover Phase + private async cutover(): Promise { + const client = await this.pool.connect(); + + try { + // Find referencing tables + const referencingTables = await this.findReferencingTables(client); + console.log(`Found ${referencingTables.length} referencing tables`); + + // Use one global transaction for all table switches. + await this.executeSql(client, "BEGIN"); + + // Switch referencing table first as we can't switch main table with active FK constraints. + for (const ref of referencingTables) { + await this.switchReferencingTable(client, ref, { + currentColumn: ref.foreignKeyColumn, + newColumn: makeNewForeignKeyColumn(ref.foreignKeyColumn), + legacyColumn: `${ref.foreignKeyColumn}_legacy`, + constraintName: ref.constraintName, + }); + } + + // Switch main table. + await this.switchMainTable(client, { + currentColumn: "id", + newColumn: MAIN_TABLE_NEW_COLUMN, + legacyColumn: MAIN_TABLE_LEGACY_COLUMN, + sequenceName: `${this.config.tableName}_id_seq`, + indexName: makeNewIndexName(this.config.tableName, { isPrimary: true }), + triggerName: makeTriggerName({ isLegacy: false }), + triggerFunction: makeTriggerFunctionName(this.config.tableName, { + isLegacy: false, + }), + }); + + // Setup legacy sync triggers for the main table and referencing tables. + await this.setupTriggers(client, referencingTables, { isLegacy: true }); + + await this.executeSql(client, "COMMIT"); + console.log( + chalk.green(`[Switch] Successfully completed all table switches`) + ); + } catch (error) { + await this.executeSql(client, "ROLLBACK"); + throw new Error( + `Failed to switch columns: ${error instanceof Error ? error.message : "Unknown error"}` + ); + } + } + + // Rollback Phase + + private async rollback(): Promise { + const client = await this.pool.connect(); + + try { + // Find referencing tables + const referencingTables = await this.findReferencingTables(client); + console.log(`Found ${referencingTables.length} referencing tables`); + + // Use one global transaction for all table switches. + await this.executeSql(client, "BEGIN"); + + // Switch referencing table first as we can't switch main table with active FK constraints. + for (const ref of referencingTables) { + console.log(">> currentColon", ref.foreignKeyColumn); + console.log(">> constraintName", ref.constraintName); + + await this.switchReferencingTable( + client, + ref, + { + currentColumn: ref.foreignKeyColumn, + newColumn: `${ref.foreignKeyColumn}_temp`, + legacyColumn: `${ref.foreignKeyColumn}_legacy`, + constraintName: ref.constraintName, + }, + { isRollback: true } + ); + } + + // Switch main table. + await this.switchMainTable( + client, + { + currentColumn: "id", + newColumn: MAIN_TABLE_NEW_COLUMN, + legacyColumn: MAIN_TABLE_LEGACY_COLUMN, + sequenceName: `${this.config.tableName}_id_seq`, + indexName: makeNewIndexName(this.config.tableName, { + isPrimary: true, + }), + triggerName: makeTriggerName({ isLegacy: false }), + triggerFunction: makeTriggerFunctionName(this.config.tableName, { + isLegacy: false, + }), + }, + { isRollback: true } + ); + + await this.executeSql(client, "COMMIT"); + console.log( + chalk.green(`[Switch] Successfully completed all table switches`) + ); + } catch (error) { + await this.executeSql(client, "ROLLBACK"); + throw new Error( + `Failed to switch columns: ${error instanceof Error ? error.message : "Unknown error"}` + ); + } + } + + // Helper methods + + private async verifyTableExists(client: PoolClient): Promise { + const { rows } = await this.executeSql<{ exists: boolean }>( + client, + ` + SELECT EXISTS ( + SELECT 1 FROM information_schema.tables + WHERE table_schema = $1 AND table_name = $2 + ) + `, + [this.config.schemaName, this.config.tableName] + ); + + return rows[0].exists; + } + + private async findReferencingTables( + client: PoolClient + ): Promise { + const { rows } = await this.executeSql( + client, + ` + SELECT DISTINCT + tc.table_schema as schema, + tc.table_name as "tableName", + kcu.column_name as "foreignKeyColumn", + tc.constraint_name as "constraintName" + FROM information_schema.table_constraints tc + JOIN information_schema.key_column_usage kcu + ON tc.constraint_name = kcu.constraint_name + JOIN information_schema.constraint_column_usage ccu + ON ccu.constraint_name = tc.constraint_name + WHERE tc.constraint_type = 'FOREIGN KEY' + AND ccu.table_name = $1 + AND ccu.column_name = 'id' + `, + [this.config.tableName] + ); + + return rows; + } + + private async addNewColumns( + client: PoolClient, + referencingTables: ReferencingTable[] + ): Promise { + // Add MAIN_TABLE_NEW_COLUMN column to main table + await this.executeSql( + client, + ` + ALTER TABLE ${this.config.tableName} + ADD COLUMN IF NOT EXISTS "${MAIN_TABLE_NEW_COLUMN}" BIGINT + ` + ); + console.log( + `Added ${MAIN_TABLE_NEW_COLUMN} column to ${this.config.tableName}` + ); + + // Add new columns to referencing tables + for (const ref of referencingTables) { + await this.executeSql( + client, + // We need double quote to escape the camel case column name + ` + ALTER TABLE ${ref.tableName} + ADD COLUMN IF NOT EXISTS "${makeNewForeignKeyColumn(ref.foreignKeyColumn)}" BIGINT + ` + ); + console.log( + `Added ${makeNewForeignKeyColumn(ref.foreignKeyColumn)} column to ${ref.tableName}` + ); + } + } + + private async setupTriggers( + client: PoolClient, + referencingTables: ReferencingTable[], + opts: { isLegacy?: boolean } = {} + ): Promise { + await this.createPKSyncTrigger(client, this.config.tableName, opts); + for (const ref of referencingTables) { + await this.createFKSyncTrigger(client, ref, opts); + } + } + + private async createPKSyncTrigger( + client: PoolClient, + tableName: string, + { isLegacy = false }: { isLegacy?: boolean } = {} + ): Promise { + const triggerName = makeTriggerName({ isLegacy }); + const sourceCol = "id"; + const targetCol = isLegacy + ? MAIN_TABLE_LEGACY_COLUMN + : MAIN_TABLE_NEW_COLUMN; + + await this.executeSql( + client, + `DROP TRIGGER IF EXISTS ${triggerName} ON ${tableName}` + ); + + await this.executeSql( + client, + ` + CREATE OR REPLACE FUNCTION ${tableName}_${triggerName}_trigger() + RETURNS TRIGGER AS $$ + BEGIN + NEW.${targetCol} := NEW.${sourceCol}; + RETURN NEW; + END; + $$ LANGUAGE plpgsql + ` + ); + + await this.executeSql( + client, + ` + CREATE TRIGGER ${triggerName} + BEFORE INSERT OR UPDATE ON ${tableName} + FOR EACH ROW + EXECUTE FUNCTION ${tableName}_${triggerName}_trigger() + ` + ); + + console.log(`Created ${triggerName} trigger on ${tableName}`); + } + + private async createFKSyncTrigger( + client: PoolClient, + ref: ReferencingTable, + { isLegacy = false }: { isLegacy?: boolean } = {} + ): Promise { + const triggerName = isLegacy ? "legacy_fk_sync" : "fk_sync"; + const sourceCol = ref.foreignKeyColumn; + const targetCol = isLegacy + ? `${ref.foreignKeyColumn}_legacy` + : makeNewForeignKeyColumn(ref.foreignKeyColumn); + + console.log( + chalk.blue( + `[Trigger] Creating ${chalk.bold(triggerName)} for ${chalk.bold(ref.tableName)}` + ) + ); + + await this.executeSql( + client, + `DROP TRIGGER IF EXISTS ${triggerName} ON ${ref.tableName}` + ); + + console.log(chalk.dim(`• Dropped existing trigger if present`)); + + await this.executeSql( + client, + ` + CREATE OR REPLACE FUNCTION ${ref.tableName}_${triggerName}_trigger() + RETURNS TRIGGER AS $$ + BEGIN + NEW."${targetCol}" := NEW."${sourceCol}"; + RETURN NEW; + END; + $$ LANGUAGE plpgsql + ` + ); + + console.log(chalk.dim(`• Created trigger function`)); + + await this.executeSql( + client, + ` + CREATE TRIGGER ${triggerName} + BEFORE INSERT OR UPDATE ON ${ref.tableName} + FOR EACH ROW + EXECUTE FUNCTION ${ref.tableName}_${triggerName}_trigger() + ` + ); + console.log(chalk.dim(`• Created trigger`)); + + console.log( + chalk.green( + `[Trigger] Completed ${chalk.bold(triggerName)} setup for ${chalk.bold(ref.tableName)}` + ) + ); + } + + private async backfillTable( + client: PoolClient, + tableName: string, + sourceColumn: string, + targetColumn: string + ): Promise { + let currentId = 0; + + const { rows } = await this.executeSql<{ max_id: number }>( + client, + ` + SELECT COALESCE(MAX(id), 0) AS max_id FROM ${tableName} + ` + ); + + const maxId = rows[0].max_id; + + while (currentId <= maxId) { + await this.executeSql( + client, + ` + UPDATE ${tableName} + SET "${targetColumn}" = "${sourceColumn}" + WHERE id > $1 AND id <= $2 + AND "${targetColumn}" IS NULL + `, + [currentId, currentId + this.config.batchSize] + ); + + currentId = currentId + this.config.batchSize; + + console.log( + chalk.blue( + `[Progress] ${chalk.bold(tableName)}: ${currentId}/${maxId} (${Math.round((currentId / maxId) * 100)}%) - batch: ${this.config.batchSize}` + ) + ); + } + } + + private async findCompositeIndexes( + client: PoolClient, + tableName: string, + columnName: string + ): Promise { + const { rows } = await this.executeSql<{ + index_name: string; + column_names: string; + is_unique: boolean; + }>( + client, + ` + SELECT + i.relname as index_name, + ix.indisunique as is_unique, + array_agg(a.attname ORDER BY k.ordering) as column_names + FROM pg_class t + JOIN pg_index ix ON t.oid = ix.indrelid + JOIN pg_class i ON ix.indexrelid = i.oid + JOIN pg_attribute a ON t.oid = a.attrelid + JOIN LATERAL unnest(ix.indkey) WITH ORDINALITY AS k(attnum, ordering) + ON a.attnum = k.attnum + WHERE t.relname = $1 + AND t.relkind = 'r' + AND ix.indisprimary = false + GROUP BY i.relname, ix.indisunique + HAVING + array_length(array_agg(a.attname), 1) > 1 + AND EXISTS ( + SELECT 1 + FROM unnest(array_agg(a.attname)) as cols + WHERE cols = $2 + ) + `, + [tableName, columnName] + ); + + console.log( + chalk.blue(`[Composite Indexes] Found ${chalk.bold(rows.length)}:`) + ); + rows.forEach((r) => + console.log( + chalk.magenta( + ` • ${chalk.bold(r.index_name)} (${r.column_names})${r.is_unique ? " UNIQUE" : ""}` + ) + ) + ); + + return rows.map((row) => ({ + indexName: row.index_name, + columns: parsePostgresArray(row.column_names), + isUnique: row.is_unique, + })); + } + + private async createCompositeIndexes( + client: PoolClient, + tableName: string, + oldColumn: string, + newColumn: string + ): Promise { + const compositeIndexes = await this.findCompositeIndexes( + client, + tableName, + oldColumn + ); + + for (const index of compositeIndexes) { + // Create new index name + const newIndexName = `${index.indexName}_bigint`; + + // Replace old column with new column in the columns array + const newColumns = index.columns.map((col) => + col === oldColumn ? newColumn : `"${col}"` + ); + + await this.executeSql( + client, + ` + CREATE ${index.isUnique ? "UNIQUE" : ""} INDEX CONCURRENTLY + IF NOT EXISTS "${newIndexName}" + ON ${tableName}(${newColumns.join(", ")}); + ` + ); + + await this.waitForIndex(client, newIndexName); + } + } + + private async createIndexes( + client: PoolClient, + referencingTables: ReferencingTable[] + ): Promise { + console.log( + chalk.yellow( + `[Indexes] Creating for table: ${chalk.bold(this.config.tableName)}` + ) + ); + + // Create PK index + await this.createAndWaitForIndex( + client, + this.config.tableName, + MAIN_TABLE_NEW_COLUMN, + makeNewIndexName(this.config.tableName, { isPrimary: true }), + { isPrimary: true } + ); + + // Create composite indexes for the main table + await this.createCompositeIndexes( + client, + this.config.tableName, + "id", // Original column + MAIN_TABLE_NEW_COLUMN + ); + + // Create FK indexes and their composite indexes + for (const ref of referencingTables) { + console.log( + chalk.yellow( + `[Indexes] Creating for table: ${chalk.bold(ref.tableName)}` + ) + ); + + const newFKColumn = makeNewForeignKeyColumn(ref.foreignKeyColumn); + + // Create the basic FK index + await this.createAndWaitForIndex( + client, + ref.tableName, + newFKColumn, + makeNewIndexName(`${ref.tableName}_${ref.foreignKeyColumn}`, { + isPrimary: false, + }), + { isPrimary: false } + ); + + // Create foreign key constraint. + await this.createForeignKeyConstraints(client, ref); + + // Create composite indexes for this referencing table + await this.createCompositeIndexes( + client, + ref.tableName, + ref.foreignKeyColumn, + newFKColumn + ); + } + } + + private async createAndWaitForIndex( + client: PoolClient, + tableName: string, + columnName: string, + indexName: string, + { isPrimary }: { isPrimary: boolean } + ): Promise { + // Only primary key indexes are unique. + await this.executeSql( + client, + ` + CREATE ${isPrimary ? "UNIQUE" : ""} INDEX CONCURRENTLY + IF NOT EXISTS "${indexName}" ON ${tableName}("${columnName}"); + ` + ); + + await this.waitForIndex(client, indexName); + + if (isPrimary) { + const contraintName = `${tableName}_not_null_bigint`; + + try { + // We can't use `IF NOT EXISTS` for constraints with CHECL, so we need to catch the error. + await this.executeSql( + client, + ` + ALTER TABLE ${tableName} + ADD CONSTRAINT ${contraintName} + CHECK ("${columnName}" IS NOT NULL) NOT VALID; + ` + ); + } catch (error) { + if ( + error instanceof Error && + !error.message.includes("already exists") + ) { + throw error; + } + } + + await this.executeSql( + client, + ` + ALTER TABLE ${tableName} VALIDATE CONSTRAINT ${contraintName}; + ` + ); + } + } + + private async waitForIndex( + client: PoolClient, + indexName: string + ): Promise { + const startTime = Date.now(); + + if (this.config.dryRun) { + console.log( + chalk.yellow( + `[Index] Would wait for ${chalk.bold(indexName)} in dry-run mode` + ) + ); + return; + } + + console.log( + chalk.blue(`[Index] Waiting for ${chalk.bold(indexName)} to be ready`) + ); + + while (true) { + const { rows } = await this.executeSql<{ + indisready: boolean; + indisvalid: boolean; + }>( + client, + ` + SELECT indisready, indisvalid + FROM pg_class c + JOIN pg_index i ON i.indexrelid = c.oid + WHERE c.relname = $1 + `, + [indexName] + ); + + if (!rows[0]) { + console.log( + chalk.dim( + `[Index] Waiting for ${chalk.bold(indexName)} to appear (${Math.floor((Date.now() - startTime) / 1000)}s)` + ) + ); + await new Promise((resolve) => setTimeout(resolve, 10000)); + continue; + } + + const { indisready, indisvalid } = rows[0]; + if (indisvalid && !indisready) { + console.log( + chalk.red(`[Index] Creation failed for ${chalk.bold(indexName)}`) + ); + throw new Error(`Index ${indexName} creation failed`); + } + if (indisready && indisvalid) { + const duration = Math.floor((Date.now() - startTime) / 1000); + console.log( + chalk.green( + `[Index] ${chalk.bold(indexName)} ready after ${duration}s` + ) + ); + + return; + } + + if (Date.now() - startTime > this.config.timeoutSeconds * 1000) { + console.log( + chalk.red( + `[Index] Timeout waiting for ${chalk.bold(indexName)} after ${this.config.timeoutSeconds}s` + ) + ); + throw new Error(`Index ${indexName} creation timed out`); + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); + } + } + + private async createForeignKeyConstraints( + client: PoolClient, + ref: ReferencingTable + ): Promise { + const newConstraintName = `${ref.constraintName}_bigint`; + const newColumn = makeNewForeignKeyColumn(ref.foreignKeyColumn); + + console.log( + chalk.yellow( + `[Setup] Creating FK constraint for ${chalk.bold(ref.tableName)}."${newColumn}"` + ) + ); + + // Create the new FK constraint concurrently. + // /!\ We restrict the update and delete actions to RESTRICT. + try { + await this.executeSql( + client, + ` + ALTER TABLE ${ref.tableName} + ADD CONSTRAINT ${newConstraintName} + FOREIGN KEY ("${newColumn}") + REFERENCES ${this.config.tableName}(${MAIN_TABLE_NEW_COLUMN}) + ON UPDATE RESTRICT + ON DELETE RESTRICT + NOT VALID; + ` + ); + } catch (error) { + if (error instanceof Error && !error.message.includes("already exists")) { + throw error; + } + } + + // Validate the constraint + await this.executeSql( + client, + ` + ALTER TABLE ${ref.tableName} + VALIDATE CONSTRAINT ${newConstraintName}; + ` + ); + + console.log( + chalk.green( + `[Setup] Created and validated FK constraint ${chalk.bold(newConstraintName)}` + ) + ); + } + + // Swap columns + + private async switchMainTable( + client: PoolClient, + config: MainTableSwitchConfig, + { isRollback }: { isRollback: boolean } = { isRollback: false } + ): Promise { + const { + currentColumn, + newColumn, + legacyColumn, + sequenceName, + indexName, + triggerName, + triggerFunction, + } = config; + + console.log( + chalk.yellow( + `[Switch] Processing main table: ${chalk.bold(this.config.tableName)}` + ) + ); + + await this.executeSql( + client, + `LOCK TABLE ${this.config.tableName} IN EXCLUSIVE MODE` + ); + + if (isRollback) { + await this.executeSql( + client, + ` + -- Convert sequence back to integer + ALTER SEQUENCE ${sequenceName} AS integer; + + -- Drop current PK constraint + ALTER TABLE ${this.config.tableName} + DROP CONSTRAINT ${this.config.tableName}_pkey; + + -- Rename current column to temp + ALTER TABLE ${this.config.tableName} + RENAME COLUMN "${currentColumn}" TO "${currentColumn}_temp"; + + -- Restore from legacy + ALTER TABLE ${this.config.tableName} + RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; + + -- Add back original PK + ALTER TABLE ${this.config.tableName} + ADD CONSTRAINT ${this.config.tableName}_pkey + PRIMARY KEY (${currentColumn}); + + -- Set sequence ownership back to original column + ALTER SEQUENCE ${sequenceName} + OWNED BY ${this.config.tableName}."${currentColumn}"; + + -- Clean up temp column + ALTER TABLE ${this.config.tableName} + DROP COLUMN ${currentColumn}_temp; + + DROP TRIGGER IF EXISTS ${triggerName} ON ${this.config.tableName}; + DROP FUNCTION IF EXISTS ${triggerFunction}(); + ` + ); + } else { + await this.executeSql( + client, + ` + -- First convert sequence to bigint (can be done before any column changes) + ALTER SEQUENCE ${sequenceName} AS bigint; + + -- Drop current PK constraint + ALTER TABLE ${this.config.tableName} + DROP CONSTRAINT ${this.config.tableName}_pkey; + + -- Rename old column to legacy + ALTER TABLE ${this.config.tableName} + RENAME COLUMN ${currentColumn} TO "${legacyColumn}"; + + -- Rename new column to final name + ALTER TABLE ${this.config.tableName} + RENAME COLUMN "${newColumn}" TO "${currentColumn}"; + + -- Add new PK using the prepared unique index + ALTER TABLE ${this.config.tableName} + ADD CONSTRAINT ${this.config.tableName}_pkey + PRIMARY KEY USING INDEX ${indexName}; + + -- Set sequence ownership (this also sets the default and removes old defaults) + ALTER SEQUENCE ${sequenceName} + OWNED BY ${this.config.tableName}."${currentColumn}"; + + -- Drop old trigger and function + DROP TRIGGER IF EXISTS ${triggerName} ON ${this.config.tableName}; + DROP FUNCTION IF EXISTS ${triggerFunction}(); + ` + ); + } + + console.log(chalk.green(`[Switch] Main table processed successfully`)); + } + + private async switchReferencingTable( + client: PoolClient, + ref: ReferencingTable, + config: ReferencingColumnSwitchConfig, + { isRollback }: { isRollback: boolean } = { isRollback: false } + ): Promise { + const { currentColumn, newColumn, legacyColumn, constraintName } = config; + + console.log( + chalk.yellow( + `[Switch] Processing referencing table: ${chalk.bold(ref.tableName)}` + ) + ); + + await this.executeSql( + client, + `LOCK TABLE ${ref.tableName} IN EXCLUSIVE MODE` + ); + + if (isRollback) { + await this.executeSql( + client, + ` + -- Drop current FK constraint (bigint version) + ALTER TABLE ${ref.tableName} + DROP CONSTRAINT "${constraintName}"; + + -- Rename current bigint column to temp + ALTER TABLE ${ref.tableName} + RENAME COLUMN "${currentColumn}" TO "${newColumn}"; + + -- Restore original column from legacy + ALTER TABLE ${ref.tableName} + RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; + + -- Add back original FK constraint + ALTER TABLE ${ref.tableName} + ADD CONSTRAINT "${constraintName.replace("_bigint", "")}" + FOREIGN KEY ("${currentColumn}") + REFERENCES ${this.config.tableName}("${MAIN_TABLE_LEGACY_COLUMN}"); + + -- Clean up the bigint column + ALTER TABLE ${ref.tableName} + DROP COLUMN "${newColumn}"; + + -- Drop legacy sync trigger and function + DROP TRIGGER IF EXISTS ${makeTriggerName({ isLegacy: true })} ON ${ref.tableName}; + DROP FUNCTION IF EXISTS ${makeTriggerFunctionName(ref.tableName, { isLegacy: true })}; + ` + ); + } else { + await this.executeSql( + client, + ` + -- Drop old FK constraint (this has to happen before we can rename) + ALTER TABLE ${ref.tableName} + DROP CONSTRAINT "${constraintName}"; + + -- Rename old column to legacy + ALTER TABLE ${ref.tableName} + RENAME COLUMN "${currentColumn}" TO "${legacyColumn}"; + + -- Rename new column to final name + ALTER TABLE ${ref.tableName} + RENAME COLUMN "${newColumn}" TO "${currentColumn}"; + + -- Drop old trigger and function + DROP TRIGGER IF EXISTS fk_sync ON ${ref.tableName}; + DROP FUNCTION IF EXISTS ${ref.tableName}_fk_sync_trigger(); + ` + ); + } + } + + private async checkProgress( + client: PoolClient, + referencingTables: ReferencingTable[] + ): Promise { + const progress: MigrationProgress[] = []; + + // Check main table + const mainProgress = await this.checkTableProgress( + client, + this.config.tableName, + MAIN_TABLE_NEW_COLUMN + ); + progress.push(mainProgress); + + // Check referencing tables + for (const ref of referencingTables) { + const refProgress = await this.checkTableProgress( + client, + ref.tableName, + makeNewForeignKeyColumn(ref.foreignKeyColumn) + ); + progress.push(refProgress); + } + + return progress; + } + + private async checkTableProgress( + client: PoolClient, + tableName: string, + targetColumn: string + ): Promise { + const { rows } = await this.executeSql<{ + total_rows: number; + migrated_rows: number; + progress_percentage: string; + }>( + client, + ` + SELECT + COUNT(*)::bigint as total_rows, + COUNT(${targetColumn})::bigint as migrated_rows, + ROUND((COUNT(${targetColumn})::numeric / NULLIF(COUNT(*)::numeric, 0) * 100), 2) as progress_percentage + FROM ${tableName} + ` + ); + + return { + tableName, + totalRows: rows[0].total_rows, + migratedRows: rows[0].migrated_rows, + progressPercentage: parseInt(rows[0].progress_percentage, 10), + }; + } + + private isMigrationComplete(progress: MigrationProgress[]): boolean { + return progress.every((p) => p.progressPercentage === 100); + } + + private async showProgress( + client: PoolClient, + referencingTables: ReferencingTable[] + ): Promise { + const progress = await this.checkProgress(client, referencingTables); + + console.log(chalk.blue("\n[Migration Progress]")); + for (const p of progress) { + const status = + p.progressPercentage === 100 ? chalk.green("✓") : chalk.yellow("~"); + + console.log( + ` ${status} ${chalk.bold(p.tableName.padEnd(20))} ${p.migratedRows}/${p.totalRows} rows (${chalk.bold(p.progressPercentage)}%)` + ); + } + } + + private async executeSql( + client: PoolClient, + sql: string, + params?: any[] + ): Promise<{ rows: T[] }> { + if (this.config.dryRun) { + let formattedSql = sql; + if (params) { + params.forEach((param, index) => { + const placeholder = `$${index + 1}`; + formattedSql = formattedSql.replace(placeholder, param); + }); + } + + console.info("[DRY RUN] Would execute:"); + console.info(formattedSql); + + // Even in dry run, we run SELECT queries to get results. + if (sql.trim().toLowerCase().startsWith("select")) { + const { rows } = await client.query(sql, params); + return { rows } as { rows: T[] }; + } + + return { rows: [] } as { rows: T[] }; + } + + const { rows } = await client.query(sql, params); + return { rows } as { rows: T[] }; + } +} + +makeScript( + { + database: { + type: "string", + describe: "Database connection string", + demandOption: true, + alias: "d", + }, + table: { + type: "string", + describe: "Table to migrate", + demandOption: true, + alias: "t", + }, + step: { + type: "string", + describe: `Migration step (${MigrationSteps.join("|")})`, + demandOption: true, + alias: "s", + choices: MigrationSteps, + }, + batchSize: { + type: "number", + describe: "Batch size for backfill", + default: 10000, + alias: "b", + }, + schema: { + type: "string", + describe: "Schema name", + default: "public", + }, + timeout: { + type: "number", + describe: "Timeout in seconds", + default: 3600, + }, + }, + async ({ database, table, step, batchSize, schema, timeout, execute }) => { + const migration = new IntToBigIntMigration(database, { + tableName: table, + schemaName: schema, + batchSize, + timeoutSeconds: timeout, + dryRun: !execute, + }); + + await migration.execute(step as MigrationStepType); + } +); diff --git a/front/package-lock.json b/front/package-lock.json index c876e93bff8e..b4de55ff616c 100644 --- a/front/package-lock.json +++ b/front/package-lock.json @@ -143,6 +143,7 @@ "@types/node": "^20.12.12", "@types/pdfjs-dist": "^2.10.378", "@types/pegjs": "^0.10.3", + "@types/pg": "^8.11.10", "@types/react": "^18.3.11", "@types/react-dom": "^18.3.0", "@types/react-katex": "^3.0.4", @@ -155,6 +156,7 @@ "@typescript-eslint/eslint-plugin": "^7.9.0", "@typescript-eslint/parser": "^7.9.0", "autoprefixer": "^10.4.12", + "chalk": "^5.3.0", "concurrently": "^9.0.1", "danger": "^11.3.1", "eslint": "^8.56.0", @@ -10619,6 +10621,21 @@ "sprintf-js": "~1.0.2" } }, + "node_modules/@apidevtools/swagger-cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@apidevtools/swagger-cli/node_modules/cliui": { "version": "6.0.0", "license": "ISC", @@ -12777,6 +12794,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/@jest/console/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@jest/core": { "version": "29.7.0", "dev": true, @@ -12823,6 +12856,22 @@ } } }, + "node_modules/@jest/core/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@jest/environment": { "version": "29.7.0", "dev": true, @@ -12932,6 +12981,22 @@ } } }, + "node_modules/@jest/reporters/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@jest/schemas": { "version": "29.6.3", "dev": true, @@ -13009,6 +13074,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/@jest/transform/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@jest/transform/node_modules/convert-source-map": { "version": "2.0.0", "dev": true, @@ -13030,6 +13111,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/@jest/types/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.5", "license": "MIT", @@ -18563,6 +18660,74 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/pg": { + "version": "8.11.10", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.10.tgz", + "integrity": "sha512-LczQUW4dbOQzsH2RQ5qoeJ6qJPdrcM/DcMLoqWQkMLMsq83J5lAX3LXjdkWdpscFy67JSOWDnh7Ny/sPFykmkg==", + "dev": true, + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^4.0.1" + } + }, + "node_modules/@types/pg/node_modules/pg-types": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-4.0.2.tgz", + "integrity": "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==", + "dev": true, + "dependencies": { + "pg-int8": "1.0.1", + "pg-numeric": "1.0.2", + "postgres-array": "~3.0.1", + "postgres-bytea": "~3.0.0", + "postgres-date": "~2.1.0", + "postgres-interval": "^3.0.0", + "postgres-range": "^1.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@types/pg/node_modules/postgres-array": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.2.tgz", + "integrity": "sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/pg/node_modules/postgres-bytea": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-3.0.0.tgz", + "integrity": "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==", + "dev": true, + "dependencies": { + "obuf": "~1.1.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/pg/node_modules/postgres-date": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-2.1.0.tgz", + "integrity": "sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/@types/pg/node_modules/postgres-interval": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz", + "integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/@types/prismjs": { "version": "1.26.1", "license": "MIT" @@ -19968,6 +20133,22 @@ "@babel/core": "^7.8.0" } }, + "node_modules/babel-jest/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/babel-plugin-istanbul": { "version": "6.1.1", "dev": true, @@ -20455,14 +20636,12 @@ } }, "node_modules/chalk": { - "version": "4.1.2", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, "engines": { - "node": ">=10" + "node": "^12.17.0 || ^14.13 || >=16.0.0" }, "funding": { "url": "https://github.com/chalk/chalk?sponsor=1" @@ -20804,6 +20983,34 @@ "url": "https://github.com/open-cli-tools/concurrently?sponsor=1" } }, + "node_modules/concurrently/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/concurrently/node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/concurrently/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -20911,6 +21118,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/create-jest/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/crelt": { "version": "1.0.6", "license": "MIT" @@ -22850,6 +23073,22 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/eslint/node_modules/eslint-scope": { "version": "7.2.2", "dev": true, @@ -26027,6 +26266,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-circus/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-cli": { "version": "29.7.0", "dev": true, @@ -26059,6 +26314,22 @@ } } }, + "node_modules/jest-cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-config": { "version": "29.7.0", "dev": true, @@ -26103,6 +26374,22 @@ } } }, + "node_modules/jest-config/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-diff": { "version": "29.7.0", "dev": true, @@ -26117,6 +26404,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-diff/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-docblock": { "version": "29.7.0", "license": "MIT", @@ -26142,6 +26445,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-each/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-environment-node": { "version": "29.7.0", "dev": true, @@ -26216,6 +26535,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-matcher-utils/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-message-util": { "version": "29.7.0", "dev": true, @@ -26235,6 +26570,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-message-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-mock": { "version": "29.7.0", "dev": true, @@ -26303,6 +26654,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-resolve/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-runner": { "version": "29.7.0", "dev": true, @@ -26334,6 +26701,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-runner/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-runtime": { "version": "29.7.0", "dev": true, @@ -26366,6 +26749,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-runtime/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-snapshot": { "version": "29.7.0", "dev": true, @@ -26396,6 +26795,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-snapshot/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-util": { "version": "29.7.0", "dev": true, @@ -26412,6 +26827,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-validate": { "version": "29.7.0", "dev": true, @@ -26439,6 +26870,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/jest-validate/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-watcher": { "version": "29.7.0", "dev": true, @@ -26457,6 +26904,22 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-watcher/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jest-worker": { "version": "29.7.0", "dev": true, @@ -28905,6 +29368,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/obuf": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", + "dev": true + }, "node_modules/oidc-token-hash": { "version": "5.0.3", "license": "MIT", @@ -29436,6 +29905,15 @@ "node": ">=4.0.0" } }, + "node_modules/pg-numeric": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pg-numeric/-/pg-numeric-1.0.2.tgz", + "integrity": "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/pg-pool": { "version": "3.6.1", "license": "MIT", @@ -29862,6 +30340,12 @@ "node": ">=0.10.0" } }, + "node_modules/postgres-range": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/postgres-range/-/postgres-range-1.1.4.tgz", + "integrity": "sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==", + "dev": true + }, "node_modules/pprof-format": { "version": "2.1.0", "license": "MIT" @@ -33249,6 +33733,21 @@ "node": ">=10.13.0" } }, + "node_modules/tsconfig-paths-webpack-plugin/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/tsconfig-paths-webpack-plugin/node_modules/strip-bom": { "version": "3.0.0", "license": "MIT", diff --git a/front/package.json b/front/package.json index 01932cc1c6ea..404c38fe835b 100644 --- a/front/package.json +++ b/front/package.json @@ -79,9 +79,9 @@ "jszip": "^3.10.1", "jwks-rsa": "^3.1.0", "lodash": "^4.17.21", + "lru-memoizer": "^2.2.0", "lucide-react": "^0.363.0", "luxon": "^3.4.4", - "lru-memoizer": "^2.2.0", "marked": "^14.1.3", "mermaid": "^10.9.0", "minimist": "^1.2.8", @@ -156,6 +156,7 @@ "@types/node": "^20.12.12", "@types/pdfjs-dist": "^2.10.378", "@types/pegjs": "^0.10.3", + "@types/pg": "^8.11.10", "@types/react": "^18.3.11", "@types/react-dom": "^18.3.0", "@types/react-katex": "^3.0.4", @@ -168,6 +169,7 @@ "@typescript-eslint/eslint-plugin": "^7.9.0", "@typescript-eslint/parser": "^7.9.0", "autoprefixer": "^10.4.12", + "chalk": "^5.3.0", "concurrently": "^9.0.1", "danger": "^11.3.1", "eslint": "^8.56.0", From fcd920cc097170c811df3b59a5c602a24fcb50fd Mon Sep 17 00:00:00 2001 From: Flavien David Date: Mon, 2 Dec 2024 17:34:24 +0100 Subject: [PATCH 04/22] Add cutover/rollback migration --- connectors/src/resources/storage/index.ts | 3 +- front/lib/resources/storage/index.ts | 15 + .../20241129_migrate_int_to_bigint.ts | 661 ++++++++++++------ 3 files changed, 448 insertions(+), 231 deletions(-) diff --git a/connectors/src/resources/storage/index.ts b/connectors/src/resources/storage/index.ts index 3d31a16107b8..d87dedecb494 100644 --- a/connectors/src/resources/storage/index.ts +++ b/connectors/src/resources/storage/index.ts @@ -1,6 +1,5 @@ -import assert from "node:assert"; - import { isDevelopment } from "@dust-tt/types"; +import assert from "assert"; import types, { builtins } from "pg-types"; import { Sequelize } from "sequelize"; diff --git a/front/lib/resources/storage/index.ts b/front/lib/resources/storage/index.ts index b755346a72e2..3973b50cb5fe 100644 --- a/front/lib/resources/storage/index.ts +++ b/front/lib/resources/storage/index.ts @@ -1,4 +1,6 @@ import { isDevelopment } from "@dust-tt/types"; +import assert from "assert"; +import types, { builtins } from "pg-types"; import { Sequelize } from "sequelize"; import { dbConfig } from "@app/lib/resources/storage/config"; @@ -8,6 +10,19 @@ const acquireAttempts = new WeakMap(); const { DB_LOGGING_ENABLED = false } = process.env; +// Parse PostgreSQL BIGINT (INT8) values into JavaScript numbers, but only if they +// fall within JavaScript's safe integer range (-(2^53 - 1) to 2^53 - 1). This +// prevents silent precision loss when handling large integers from the database. +// Throws an assertion error if a BIGINT value exceeds JavaScript's safe integer +// limits. +types.setTypeParser(builtins.INT8, function (val) { + assert( + Number.isSafeInteger(Number(val)), + `Found a value stored as a BIGINT that is not a safe integer: ${val}` + ); + return Number(val); +}); + function sequelizeLogger(message: string) { console.log(message.replace("Executing (default): ", "")); } diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 0f8856cd958d..07ea88f07b4a 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -1,9 +1,9 @@ +import chalk from "chalk"; import type { PoolClient } from "pg"; import { Pool } from "pg"; import logger from "@app/logger/logger"; import { makeScript } from "@app/scripts/helpers"; -import chalk from "chalk"; // Types. interface MigrationConfig { @@ -40,8 +40,6 @@ interface MainTableSwitchConfig { legacyColumn: string; // e.g., 'id_legacy' sequenceName: string; // e.g., 'table_id_seq' indexName: string; // e.g., 'table_pkey_bigint' - triggerName: string; // e.g., 'bigint_sync' - triggerFunction: string; // e.g., 'table_bigint_sync_trigger' } interface ReferencingColumnSwitchConfig { @@ -60,23 +58,58 @@ const MigrationSteps = [ ] as const; type MigrationStepType = (typeof MigrationSteps)[number]; -const MAIN_TABLE_NEW_COLUMN = "new_id"; -const MAIN_TABLE_LEGACY_COLUMN = "id_legacy"; - -const makeNewForeignKeyColumn = (column: string) => `${column}_new`; - -const makeNewIndexName = ( - indexName: string, - { isPrimary }: { isPrimary: boolean } -) => `${indexName}_${isPrimary ? "pkey" : "fk"}_bigint`; - -const makeTriggerName = ({ isLegacy }: { isLegacy: boolean }) => - isLegacy ? "legacy_sync" : "bigint_sync"; - -const makeTriggerFunctionName = ( - tableName: string, - { isLegacy }: { isLegacy: boolean } -) => `${tableName}_${isLegacy ? "legacy_sync_trigger" : "bigint_sync_trigger"}`; +const MAIN_ID_COLUMN = "id"; + +export const COLUMN_TYPE = { + INT: "int", + BIGINT: "bigint", +} as const; + +export const SYNC_DIRECTION = { + TO_BIGINT: "to_bigint", + TO_LEGACY: "to_legacy", +} as const; + +// Make these type-safe using TypeScript literal types +type ColumnType = (typeof COLUMN_TYPE)[keyof typeof COLUMN_TYPE]; +type SyncDirection = (typeof SYNC_DIRECTION)[keyof typeof SYNC_DIRECTION]; + +// naming.ts +export const createColumnName = { + new: (baseColumn: string) => `${baseColumn}_new` as const, + legacy: (baseColumn: string) => `${baseColumn}_legacy` as const, +}; + +export const createIndexName = { + primary: (tableName: string, column: ColumnType) => + `${tableName}_pkey_${column}` as const, + foreign: (tableName: string, columnName: string, column: ColumnType) => + `${tableName}_${columnName}_fk_${column}` as const, +}; + +export const createTriggerNames = { + // For primary key (main table) + pk: (tableName: string, direction: SyncDirection) => ({ + trigger: `${tableName}_pk_sync_${direction}` as const, + function: `${tableName}_pk_sync_${direction}_trigger` as const, + }), + + // For foreign key (referencing tables) + fk: (tableName: string, direction: SyncDirection) => ({ + trigger: `${tableName}_fk_sync_${direction}` as const, + function: `${tableName}_fk_sync_${direction}_trigger` as const, + }), +}; + +export const createConstraintName = { + notNull: (tableName: string, type: ColumnType) => + `${tableName}_not_null_${type}` as const, + foreignKey: (tableName: string, columnName: string, type: ColumnType) => + `${tableName}_${columnName}_fkey_${type}` as const, +}; + +export const createSequenceName = (tableName: string) => + `${tableName}_id_seq` as const; /** * Converts Postgres array string to JS array @@ -164,13 +197,16 @@ class IntToBigIntMigration { // Find referencing tables const referencingTables = await this.findReferencingTables(client); - console.log(`Found ${referencingTables.length} referencing tables`); // Add new columns await this.addNewColumns(client, referencingTables); // Setup triggers - await this.setupTriggers(client, referencingTables); + await this.setupTriggers( + client, + referencingTables, + SYNC_DIRECTION.TO_BIGINT + ); } finally { client.release(); } @@ -182,24 +218,21 @@ class IntToBigIntMigration { try { // Find referencing tables const referencingTables = await this.findReferencingTables(client); - console.log(`Found ${referencingTables.length} referencing tables`); // Backfill main table - await this.backfillTable( - client, - this.config.tableName, - "id", - MAIN_TABLE_NEW_COLUMN - ); + await this.backfillTable(client, { + tableName: this.config.tableName, + sourceColumn: MAIN_ID_COLUMN, + targetColumn: createColumnName.new(MAIN_ID_COLUMN), + }); // Backfill referencing tables for (const ref of referencingTables) { - await this.backfillTable( - client, - ref.tableName, - ref.foreignKeyColumn, - makeNewForeignKeyColumn(ref.foreignKeyColumn) - ); + await this.backfillTable(client, { + tableName: ref.tableName, + sourceColumn: ref.foreignKeyColumn, + targetColumn: createColumnName.new(ref.foreignKeyColumn), + }); } // Show progress @@ -217,7 +250,6 @@ class IntToBigIntMigration { try { // Find referencing tables const referencingTables = await this.findReferencingTables(client); - console.log(`Found ${referencingTables.length} referencing tables`); // Verify backfill is complete const progress = await this.checkProgress(client, referencingTables); @@ -243,14 +275,30 @@ class IntToBigIntMigration { } } + private async dropTriggers( + client: PoolClient, + { tableName, isPrimaryKey }: { tableName: string; isPrimaryKey: boolean }, + direction: SyncDirection + ): Promise { + const triggerInfo = isPrimaryKey + ? createTriggerNames.pk(tableName, direction) + : createTriggerNames.fk(tableName, direction); + + await this.executeSql( + client, + ` + DROP TRIGGER IF EXISTS ${triggerInfo.trigger} ON ${tableName}; + DROP FUNCTION IF EXISTS ${triggerInfo.function}; + ` + ); + } + // Cutover Phase private async cutover(): Promise { const client = await this.pool.connect(); - try { // Find referencing tables const referencingTables = await this.findReferencingTables(client); - console.log(`Found ${referencingTables.length} referencing tables`); // Use one global transaction for all table switches. await this.executeSql(client, "BEGIN"); @@ -259,27 +307,30 @@ class IntToBigIntMigration { for (const ref of referencingTables) { await this.switchReferencingTable(client, ref, { currentColumn: ref.foreignKeyColumn, - newColumn: makeNewForeignKeyColumn(ref.foreignKeyColumn), - legacyColumn: `${ref.foreignKeyColumn}_legacy`, + newColumn: createColumnName.new(ref.foreignKeyColumn), + legacyColumn: createColumnName.legacy(ref.foreignKeyColumn), constraintName: ref.constraintName, }); } // Switch main table. await this.switchMainTable(client, { - currentColumn: "id", - newColumn: MAIN_TABLE_NEW_COLUMN, - legacyColumn: MAIN_TABLE_LEGACY_COLUMN, + currentColumn: MAIN_ID_COLUMN, + newColumn: createColumnName.new(MAIN_ID_COLUMN), + legacyColumn: createColumnName.legacy(MAIN_ID_COLUMN), sequenceName: `${this.config.tableName}_id_seq`, - indexName: makeNewIndexName(this.config.tableName, { isPrimary: true }), - triggerName: makeTriggerName({ isLegacy: false }), - triggerFunction: makeTriggerFunctionName(this.config.tableName, { - isLegacy: false, - }), + indexName: createIndexName.primary( + this.config.tableName, + COLUMN_TYPE.BIGINT + ), }); - // Setup legacy sync triggers for the main table and referencing tables. - await this.setupTriggers(client, referencingTables, { isLegacy: true }); + // Setup legacy sync triggers to sync id to the legacy columns. + await this.setupTriggers( + client, + referencingTables, + SYNC_DIRECTION.TO_LEGACY + ); await this.executeSql(client, "COMMIT"); console.log( @@ -301,23 +352,33 @@ class IntToBigIntMigration { try { // Find referencing tables const referencingTables = await this.findReferencingTables(client); - console.log(`Found ${referencingTables.length} referencing tables`); // Use one global transaction for all table switches. await this.executeSql(client, "BEGIN"); // Switch referencing table first as we can't switch main table with active FK constraints. for (const ref of referencingTables) { - console.log(">> currentColon", ref.foreignKeyColumn); - console.log(">> constraintName", ref.constraintName); + // Create former FK indexes before renaming. + console.log( + chalk.yellow( + `[Indexes] Creating constraints for table: ${chalk.bold(ref.tableName)}` + ) + ); + + // Create foreign key constraint. + await this.createForeignKeyConstraints( + client, + ref, + SYNC_DIRECTION.TO_LEGACY + ); await this.switchReferencingTable( client, ref, { currentColumn: ref.foreignKeyColumn, - newColumn: `${ref.foreignKeyColumn}_temp`, - legacyColumn: `${ref.foreignKeyColumn}_legacy`, + newColumn: createColumnName.new(ref.foreignKeyColumn), + legacyColumn: createColumnName.legacy(ref.foreignKeyColumn), constraintName: ref.constraintName, }, { isRollback: true } @@ -328,21 +389,26 @@ class IntToBigIntMigration { await this.switchMainTable( client, { - currentColumn: "id", - newColumn: MAIN_TABLE_NEW_COLUMN, - legacyColumn: MAIN_TABLE_LEGACY_COLUMN, + currentColumn: MAIN_ID_COLUMN, + newColumn: createColumnName.new(MAIN_ID_COLUMN), + legacyColumn: createColumnName.legacy(MAIN_ID_COLUMN), sequenceName: `${this.config.tableName}_id_seq`, - indexName: makeNewIndexName(this.config.tableName, { - isPrimary: true, - }), - triggerName: makeTriggerName({ isLegacy: false }), - triggerFunction: makeTriggerFunctionName(this.config.tableName, { - isLegacy: false, - }), + indexName: createIndexName.primary( + this.config.tableName, + // We are rolling back to int + COLUMN_TYPE.INT + ), }, { isRollback: true } ); + // Setup sync triggers to sync id to the new columns. + await this.setupTriggers( + client, + referencingTables, + SYNC_DIRECTION.TO_BIGINT + ); + await this.executeSql(client, "COMMIT"); console.log( chalk.green(`[Switch] Successfully completed all table switches`) @@ -395,6 +461,18 @@ class IntToBigIntMigration { [this.config.tableName] ); + console.log( + chalk.magenta( + `[Referencing Tables] Found ${rows.length} referencing tables for table ${this.config.tableName}` + ) + ); + + rows.forEach((row) => + console.log( + chalk.dim(`• ${row.schema}.${row.tableName} (${row.foreignKeyColumn})`) + ) + ); + return rows; } @@ -402,97 +480,136 @@ class IntToBigIntMigration { client: PoolClient, referencingTables: ReferencingTable[] ): Promise { - // Add MAIN_TABLE_NEW_COLUMN column to main table + const newColumn = createColumnName.new(MAIN_ID_COLUMN); + + console.log(chalk.blue("[Columns] Adding new BigInt columns")); + + console.log( + chalk.yellow(`- Main table (${chalk.bold(this.config.tableName)}):`) + ); + + // Add new ID BigInt column to main table await this.executeSql( client, ` ALTER TABLE ${this.config.tableName} - ADD COLUMN IF NOT EXISTS "${MAIN_TABLE_NEW_COLUMN}" BIGINT + ADD COLUMN IF NOT EXISTS "${newColumn}" BIGINT ` ); + + console.log(chalk.dim(`✓ Added column "${newColumn}"`)); + console.log( - `Added ${MAIN_TABLE_NEW_COLUMN} column to ${this.config.tableName}` + chalk.yellow( + `- Referencing tables (${chalk.bold(referencingTables.length)}):` + ) ); // Add new columns to referencing tables for (const ref of referencingTables) { + const newRefColumn = createColumnName.new(ref.foreignKeyColumn); + await this.executeSql( client, // We need double quote to escape the camel case column name ` ALTER TABLE ${ref.tableName} - ADD COLUMN IF NOT EXISTS "${makeNewForeignKeyColumn(ref.foreignKeyColumn)}" BIGINT + ADD COLUMN IF NOT EXISTS "${newRefColumn}" BIGINT ` ); - console.log( - `Added ${makeNewForeignKeyColumn(ref.foreignKeyColumn)} column to ${ref.tableName}` - ); + + console.log(chalk.dim(`✓ ${ref.tableName} Added "${newRefColumn}"`)); } + + console.log(chalk.green(`[Columns] Successfully added all BigInt columns`)); } private async setupTriggers( client: PoolClient, referencingTables: ReferencingTable[], - opts: { isLegacy?: boolean } = {} + direction: SyncDirection ): Promise { - await this.createPKSyncTrigger(client, this.config.tableName, opts); + // Create PK sync trigger for the main table + await this.createPKSyncTrigger(client, this.config.tableName, direction); + + // Create FK sync triggers for the referencing tables for (const ref of referencingTables) { - await this.createFKSyncTrigger(client, ref, opts); + await this.createFKSyncTrigger(client, ref, direction); } } private async createPKSyncTrigger( client: PoolClient, tableName: string, - { isLegacy = false }: { isLegacy?: boolean } = {} + direction: SyncDirection ): Promise { - const triggerName = makeTriggerName({ isLegacy }); - const sourceCol = "id"; - const targetCol = isLegacy - ? MAIN_TABLE_LEGACY_COLUMN - : MAIN_TABLE_NEW_COLUMN; + const { trigger: triggerName, function: functionName } = + createTriggerNames.pk(tableName, direction); + + const sourceCol = MAIN_ID_COLUMN; + const targetCol = + direction === SYNC_DIRECTION.TO_LEGACY + ? createColumnName.legacy(sourceCol) + : createColumnName.new(sourceCol); + + console.log( + chalk.blue( + `[Trigger] Creating ${chalk.bold(triggerName)} for ${chalk.bold(tableName)}` + ) + ); await this.executeSql( client, `DROP TRIGGER IF EXISTS ${triggerName} ON ${tableName}` ); + console.log(chalk.dim(`- Dropped existing trigger if present`)); + await this.executeSql( client, ` - CREATE OR REPLACE FUNCTION ${tableName}_${triggerName}_trigger() + CREATE OR REPLACE FUNCTION ${functionName}() RETURNS TRIGGER AS $$ BEGIN - NEW.${targetCol} := NEW.${sourceCol}; + NEW."${targetCol}" := NEW."${sourceCol}"; RETURN NEW; END; $$ LANGUAGE plpgsql ` ); + console.log(chalk.dim(`- Created trigger function`)); + await this.executeSql( client, ` CREATE TRIGGER ${triggerName} BEFORE INSERT OR UPDATE ON ${tableName} FOR EACH ROW - EXECUTE FUNCTION ${tableName}_${triggerName}_trigger() + EXECUTE FUNCTION ${functionName}() ` ); - console.log(`Created ${triggerName} trigger on ${tableName}`); + console.log( + chalk.green( + `[Trigger] Completed ${chalk.bold(triggerName)} setup for ${chalk.bold(tableName)}` + ) + ); } private async createFKSyncTrigger( client: PoolClient, ref: ReferencingTable, - { isLegacy = false }: { isLegacy?: boolean } = {} + direction: SyncDirection ): Promise { - const triggerName = isLegacy ? "legacy_fk_sync" : "fk_sync"; + const { trigger: triggerName, function: functionName } = + createTriggerNames.fk(ref.tableName, direction); + const sourceCol = ref.foreignKeyColumn; - const targetCol = isLegacy - ? `${ref.foreignKeyColumn}_legacy` - : makeNewForeignKeyColumn(ref.foreignKeyColumn); + const targetCol = + direction === SYNC_DIRECTION.TO_LEGACY + ? createColumnName.legacy(ref.foreignKeyColumn) + : createColumnName.new(ref.foreignKeyColumn); console.log( chalk.blue( @@ -505,12 +622,12 @@ class IntToBigIntMigration { `DROP TRIGGER IF EXISTS ${triggerName} ON ${ref.tableName}` ); - console.log(chalk.dim(`• Dropped existing trigger if present`)); + console.log(chalk.dim(`- Dropped existing trigger if present`)); await this.executeSql( client, ` - CREATE OR REPLACE FUNCTION ${ref.tableName}_${triggerName}_trigger() + CREATE OR REPLACE FUNCTION ${functionName}() RETURNS TRIGGER AS $$ BEGIN NEW."${targetCol}" := NEW."${sourceCol}"; @@ -520,7 +637,7 @@ class IntToBigIntMigration { ` ); - console.log(chalk.dim(`• Created trigger function`)); + console.log(chalk.dim(`- Created trigger function`)); await this.executeSql( client, @@ -528,7 +645,7 @@ class IntToBigIntMigration { CREATE TRIGGER ${triggerName} BEFORE INSERT OR UPDATE ON ${ref.tableName} FOR EACH ROW - EXECUTE FUNCTION ${ref.tableName}_${triggerName}_trigger() + EXECUTE FUNCTION ${functionName}() ` ); console.log(chalk.dim(`• Created trigger`)); @@ -542,9 +659,11 @@ class IntToBigIntMigration { private async backfillTable( client: PoolClient, - tableName: string, - sourceColumn: string, - targetColumn: string + { + tableName, + sourceColumn, + targetColumn, + }: { tableName: string; sourceColumn: string; targetColumn: string } ): Promise { let currentId = 0; @@ -617,7 +736,9 @@ class IntToBigIntMigration { ); console.log( - chalk.blue(`[Composite Indexes] Found ${chalk.bold(rows.length)}:`) + chalk.blue( + `[Composite Indexes] Found ${chalk.bold(rows.length)} for ${tableName}:` + ) ); rows.forEach((r) => console.log( @@ -636,9 +757,11 @@ class IntToBigIntMigration { private async createCompositeIndexes( client: PoolClient, - tableName: string, - oldColumn: string, - newColumn: string + { + tableName, + oldColumn, + newColumn, + }: { tableName: string; oldColumn: string; newColumn: string } ): Promise { const compositeIndexes = await this.findCompositeIndexes( client, @@ -652,7 +775,7 @@ class IntToBigIntMigration { // Replace old column with new column in the columns array const newColumns = index.columns.map((col) => - col === oldColumn ? newColumn : `"${col}"` + col === oldColumn ? `"${newColumn}"` : `"${col}"` ); await this.executeSql( @@ -678,22 +801,39 @@ class IntToBigIntMigration { ) ); - // Create PK index - await this.createAndWaitForIndex( - client, - this.config.tableName, - MAIN_TABLE_NEW_COLUMN, - makeNewIndexName(this.config.tableName, { isPrimary: true }), - { isPrimary: true } - ); + const newMainColumn = createColumnName.new(MAIN_ID_COLUMN); + + // Create an index for the existing PK. In case we need to rollback. + // If we need to rollback, we will need to use this index to create the PK constraint from. + await this.createAndWaitForIndex(client, { + tableName: this.config.tableName, + columnName: MAIN_ID_COLUMN, + columnType: COLUMN_TYPE.INT, + indexName: createIndexName.primary( + this.config.tableName, + COLUMN_TYPE.INT + ), + isPrimary: true, + }); + + // Create PK index for the new column. + await this.createAndWaitForIndex(client, { + tableName: this.config.tableName, + columnName: newMainColumn, + columnType: COLUMN_TYPE.BIGINT, + indexName: createIndexName.primary( + this.config.tableName, + COLUMN_TYPE.BIGINT + ), + isPrimary: true, + }); // Create composite indexes for the main table - await this.createCompositeIndexes( - client, - this.config.tableName, - "id", // Original column - MAIN_TABLE_NEW_COLUMN - ); + await this.createCompositeIndexes(client, { + tableName: this.config.tableName, + oldColumn: MAIN_ID_COLUMN, + newColumn: newMainColumn, + }); // Create FK indexes and their composite indexes for (const ref of referencingTables) { @@ -703,38 +843,52 @@ class IntToBigIntMigration { ) ); - const newFKColumn = makeNewForeignKeyColumn(ref.foreignKeyColumn); + const newFKColumn = createColumnName.new(ref.foreignKeyColumn); // Create the basic FK index - await this.createAndWaitForIndex( - client, - ref.tableName, - newFKColumn, - makeNewIndexName(`${ref.tableName}_${ref.foreignKeyColumn}`, { - isPrimary: false, - }), - { isPrimary: false } - ); + await this.createAndWaitForIndex(client, { + tableName: ref.tableName, + columnName: newFKColumn, + columnType: COLUMN_TYPE.BIGINT, + indexName: createIndexName.foreign( + ref.tableName, + ref.foreignKeyColumn, + COLUMN_TYPE.BIGINT + ), + isPrimary: false, + }); // Create foreign key constraint. - await this.createForeignKeyConstraints(client, ref); - - // Create composite indexes for this referencing table - await this.createCompositeIndexes( + await this.createForeignKeyConstraints( client, - ref.tableName, - ref.foreignKeyColumn, - newFKColumn + ref, + SYNC_DIRECTION.TO_BIGINT ); + + // Create composite indexes for this referencing table + await this.createCompositeIndexes(client, { + tableName: ref.tableName, + oldColumn: ref.foreignKeyColumn, + newColumn: newFKColumn, + }); } } private async createAndWaitForIndex( client: PoolClient, - tableName: string, - columnName: string, - indexName: string, - { isPrimary }: { isPrimary: boolean } + { + columnName, + columnType, + indexName, + isPrimary, + tableName, + }: { + columnName: string; + columnType: ColumnType; + indexName: string; + isPrimary: boolean; + tableName: string; + } ): Promise { // Only primary key indexes are unique. await this.executeSql( @@ -748,15 +902,18 @@ class IntToBigIntMigration { await this.waitForIndex(client, indexName); if (isPrimary) { - const contraintName = `${tableName}_not_null_bigint`; + const constraintName = createConstraintName.notNull( + tableName, + columnType + ); try { - // We can't use `IF NOT EXISTS` for constraints with CHECL, so we need to catch the error. + // We can't use `IF NOT EXISTS` for constraints, so we need to catch the error. await this.executeSql( client, ` ALTER TABLE ${tableName} - ADD CONSTRAINT ${contraintName} + ADD CONSTRAINT "${constraintName}" CHECK ("${columnName}" IS NOT NULL) NOT VALID; ` ); @@ -772,7 +929,7 @@ class IntToBigIntMigration { await this.executeSql( client, ` - ALTER TABLE ${tableName} VALIDATE CONSTRAINT ${contraintName}; + ALTER TABLE ${tableName} VALIDATE CONSTRAINT ${constraintName}; ` ); } @@ -855,27 +1012,44 @@ class IntToBigIntMigration { private async createForeignKeyConstraints( client: PoolClient, - ref: ReferencingTable + ref: ReferencingTable, + direction: SyncDirection ): Promise { - const newConstraintName = `${ref.constraintName}_bigint`; - const newColumn = makeNewForeignKeyColumn(ref.foreignKeyColumn); + const isForward = direction === SYNC_DIRECTION.TO_BIGINT; + + const newConstraintName = createConstraintName.foreignKey( + ref.tableName, + ref.foreignKeyColumn, + isForward ? COLUMN_TYPE.BIGINT : COLUMN_TYPE.INT + ); + + // For forward (TO_BIGINT): new column references new PK + // For backward (TO_LEGACY): legacy column references legacy PK + const sourceColumn = isForward + ? createColumnName.new(ref.foreignKeyColumn) + : createColumnName.legacy(ref.foreignKeyColumn); + + const targetColumn = isForward + ? createColumnName.new(MAIN_ID_COLUMN) + : createColumnName.legacy(MAIN_ID_COLUMN); console.log( chalk.yellow( - `[Setup] Creating FK constraint for ${chalk.bold(ref.tableName)}."${newColumn}"` + `[Setup] Creating FK constraint for ${chalk.bold(ref.tableName)}."${sourceColumn}"` ) ); // Create the new FK constraint concurrently. // /!\ We restrict the update and delete actions to RESTRICT. + // Even with rollback, we won't rollback to the previous constraint. try { await this.executeSql( client, ` ALTER TABLE ${ref.tableName} - ADD CONSTRAINT ${newConstraintName} - FOREIGN KEY ("${newColumn}") - REFERENCES ${this.config.tableName}(${MAIN_TABLE_NEW_COLUMN}) + ADD CONSTRAINT "${newConstraintName}" + FOREIGN KEY ("${sourceColumn}") + REFERENCES ${this.config.tableName}(${targetColumn}) ON UPDATE RESTRICT ON DELETE RESTRICT NOT VALID; @@ -892,7 +1066,7 @@ class IntToBigIntMigration { client, ` ALTER TABLE ${ref.tableName} - VALIDATE CONSTRAINT ${newConstraintName}; + VALIDATE CONSTRAINT "${newConstraintName}"; ` ); @@ -910,15 +1084,8 @@ class IntToBigIntMigration { config: MainTableSwitchConfig, { isRollback }: { isRollback: boolean } = { isRollback: false } ): Promise { - const { - currentColumn, - newColumn, - legacyColumn, - sequenceName, - indexName, - triggerName, - triggerFunction, - } = config; + const { currentColumn, newColumn, legacyColumn, sequenceName, indexName } = + config; console.log( chalk.yellow( @@ -932,42 +1099,65 @@ class IntToBigIntMigration { ); if (isRollback) { + // First remove the to_bigint triggers on the main table. + await this.dropTriggers( + client, + { + tableName: this.config.tableName, + isPrimaryKey: true, + }, + SYNC_DIRECTION.TO_LEGACY + ); + await this.executeSql( client, ` - -- Convert sequence back to integer - ALTER SEQUENCE ${sequenceName} AS integer; - - -- Drop current PK constraint - ALTER TABLE ${this.config.tableName} - DROP CONSTRAINT ${this.config.tableName}_pkey; - - -- Rename current column to temp - ALTER TABLE ${this.config.tableName} - RENAME COLUMN "${currentColumn}" TO "${currentColumn}_temp"; - - -- Restore from legacy - ALTER TABLE ${this.config.tableName} - RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; - - -- Add back original PK - ALTER TABLE ${this.config.tableName} - ADD CONSTRAINT ${this.config.tableName}_pkey - PRIMARY KEY (${currentColumn}); - - -- Set sequence ownership back to original column - ALTER SEQUENCE ${sequenceName} - OWNED BY ${this.config.tableName}."${currentColumn}"; - - -- Clean up temp column - ALTER TABLE ${this.config.tableName} - DROP COLUMN ${currentColumn}_temp; - - DROP TRIGGER IF EXISTS ${triggerName} ON ${this.config.tableName}; - DROP FUNCTION IF EXISTS ${triggerFunction}(); + -- Drop current PK constraint + ALTER TABLE ${this.config.tableName} + DROP CONSTRAINT ${this.config.tableName}_pkey; + + -- Rename current column back to new + ALTER TABLE ${this.config.tableName} + RENAME COLUMN "${currentColumn}" TO "${newColumn}"; + + -- Rename legacy column back to original + ALTER TABLE ${this.config.tableName} + RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; + + -- Add back original PK using the int index + ALTER TABLE ${this.config.tableName} + ADD CONSTRAINT "${this.config.tableName}_pkey" + PRIMARY KEY USING INDEX "${indexName}"; + + -- Convert sequence back to integer + ALTER SEQUENCE ${sequenceName} AS integer; + + -- Set sequence ownership back to original column + ALTER SEQUENCE ${sequenceName} + OWNED BY ${this.config.tableName}."${currentColumn}"; + + -- Remove default from new column + ALTER TABLE groups + ALTER COLUMN "${newColumn}" + DROP DEFAULT; + + -- Set default on column + ALTER TABLE groups + ALTER COLUMN "${currentColumn}" + SET DEFAULT nextval('"${sequenceName}"'::regclass); ` ); } else { + // First remove the to_bigint triggers on the main table. + await this.dropTriggers( + client, + { + tableName: this.config.tableName, + isPrimaryKey: true, + }, + SYNC_DIRECTION.TO_BIGINT + ); + await this.executeSql( client, ` @@ -988,16 +1178,22 @@ class IntToBigIntMigration { -- Add new PK using the prepared unique index ALTER TABLE ${this.config.tableName} - ADD CONSTRAINT ${this.config.tableName}_pkey - PRIMARY KEY USING INDEX ${indexName}; + ADD CONSTRAINT "${this.config.tableName}_pkey" + PRIMARY KEY USING INDEX "${indexName}"; - -- Set sequence ownership (this also sets the default and removes old defaults) + -- Set sequence ownership ALTER SEQUENCE ${sequenceName} OWNED BY ${this.config.tableName}."${currentColumn}"; - -- Drop old trigger and function - DROP TRIGGER IF EXISTS ${triggerName} ON ${this.config.tableName}; - DROP FUNCTION IF EXISTS ${triggerFunction}(); + -- Remove default from legacy column + ALTER TABLE groups + ALTER COLUMN "${legacyColumn}" + DROP DEFAULT; + + -- Set default on column + ALTER TABLE groups + ALTER COLUMN "${currentColumn}" + SET DEFAULT nextval('"${sequenceName}"'::regclass); ` ); } @@ -1025,41 +1221,48 @@ class IntToBigIntMigration { ); if (isRollback) { + // First remove the to_bigint triggers on the referencing table. + await this.dropTriggers( + client, + { + tableName: ref.tableName, + isPrimaryKey: false, + }, + SYNC_DIRECTION.TO_LEGACY + ); + await this.executeSql( client, ` - -- Drop current FK constraint (bigint version) - ALTER TABLE ${ref.tableName} - DROP CONSTRAINT "${constraintName}"; - - -- Rename current bigint column to temp - ALTER TABLE ${ref.tableName} - RENAME COLUMN "${currentColumn}" TO "${newColumn}"; - - -- Restore original column from legacy - ALTER TABLE ${ref.tableName} - RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; - - -- Add back original FK constraint - ALTER TABLE ${ref.tableName} - ADD CONSTRAINT "${constraintName.replace("_bigint", "")}" - FOREIGN KEY ("${currentColumn}") - REFERENCES ${this.config.tableName}("${MAIN_TABLE_LEGACY_COLUMN}"); + -- Drop new FK constraint (this has to happen before we can drop the PK constraint) + ALTER TABLE ${ref.tableName} + DROP CONSTRAINT "${constraintName}"; - -- Clean up the bigint column - ALTER TABLE ${ref.tableName} - DROP COLUMN "${newColumn}"; + ALTER TABLE ${ref.tableName} + RENAME COLUMN "${currentColumn}" TO "${newColumn}"; - -- Drop legacy sync trigger and function - DROP TRIGGER IF EXISTS ${makeTriggerName({ isLegacy: true })} ON ${ref.tableName}; - DROP FUNCTION IF EXISTS ${makeTriggerFunctionName(ref.tableName, { isLegacy: true })}; + -- Rename old column to original name + ALTER TABLE ${ref.tableName} + RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; ` ); + + // TODO: Add back the old FK constraint. } else { + // First remove the to_bigint triggers on the referencing table. + await this.dropTriggers( + client, + { + tableName: ref.tableName, + isPrimaryKey: false, + }, + SYNC_DIRECTION.TO_BIGINT + ); + await this.executeSql( client, ` - -- Drop old FK constraint (this has to happen before we can rename) + -- Drop old FK constraint (this has to happen before we can drop the PK constraint) ALTER TABLE ${ref.tableName} DROP CONSTRAINT "${constraintName}"; @@ -1070,10 +1273,6 @@ class IntToBigIntMigration { -- Rename new column to final name ALTER TABLE ${ref.tableName} RENAME COLUMN "${newColumn}" TO "${currentColumn}"; - - -- Drop old trigger and function - DROP TRIGGER IF EXISTS fk_sync ON ${ref.tableName}; - DROP FUNCTION IF EXISTS ${ref.tableName}_fk_sync_trigger(); ` ); } @@ -1089,7 +1288,7 @@ class IntToBigIntMigration { const mainProgress = await this.checkTableProgress( client, this.config.tableName, - MAIN_TABLE_NEW_COLUMN + createColumnName.new(MAIN_ID_COLUMN) ); progress.push(mainProgress); @@ -1098,7 +1297,7 @@ class IntToBigIntMigration { const refProgress = await this.checkTableProgress( client, ref.tableName, - makeNewForeignKeyColumn(ref.foreignKeyColumn) + createColumnName.new(ref.foreignKeyColumn) ); progress.push(refProgress); } @@ -1120,17 +1319,21 @@ class IntToBigIntMigration { ` SELECT COUNT(*)::bigint as total_rows, - COUNT(${targetColumn})::bigint as migrated_rows, - ROUND((COUNT(${targetColumn})::numeric / NULLIF(COUNT(*)::numeric, 0) * 100), 2) as progress_percentage + COUNT("${targetColumn}")::bigint as migrated_rows, + ROUND((COUNT("${targetColumn}")::numeric / NULLIF(COUNT(*)::numeric, 0) * 100), 2) as progress_percentage FROM ${tableName} ` ); + const { total_rows, migrated_rows, progress_percentage } = rows[0]; + return { tableName, - totalRows: rows[0].total_rows, - migratedRows: rows[0].migrated_rows, - progressPercentage: parseInt(rows[0].progress_percentage, 10), + totalRows: total_rows, + migratedRows: migrated_rows, + progressPercentage: progress_percentage + ? parseInt(progress_percentage, 10) + : 100, }; } From 7994ff160c0e757eab3d2cfabff8fbbc8270e49d Mon Sep 17 00:00:00 2001 From: Flavien David Date: Mon, 2 Dec 2024 17:48:27 +0100 Subject: [PATCH 05/22] =?UTF-8?q?=E2=9C=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../20241129_migrate_int_to_bigint.ts | 3 +- front/migrations/db/migrate_int_to_bigint.sql | 402 ------------------ 2 files changed, 1 insertion(+), 404 deletions(-) delete mode 100644 front/migrations/db/migrate_int_to_bigint.sql diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 07ea88f07b4a..52420860406d 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -372,6 +372,7 @@ class IntToBigIntMigration { SYNC_DIRECTION.TO_LEGACY ); + // Then rename. await this.switchReferencingTable( client, ref, @@ -1246,8 +1247,6 @@ class IntToBigIntMigration { RENAME COLUMN "${legacyColumn}" TO "${currentColumn}"; ` ); - - // TODO: Add back the old FK constraint. } else { // First remove the to_bigint triggers on the referencing table. await this.dropTriggers( diff --git a/front/migrations/db/migrate_int_to_bigint.sql b/front/migrations/db/migrate_int_to_bigint.sql deleted file mode 100644 index fa9d828b3c56..000000000000 --- a/front/migrations/db/migrate_int_to_bigint.sql +++ /dev/null @@ -1,402 +0,0 @@ --- Migration script to convert INT primary keys and foreign keys to BIGINT --- This script handles: --- 1. Finding all referencing tables --- 2. Adding new BIGINT columns to all affected tables --- 3. Setting up synchronization triggers --- 4. Backfilling data in batches --- --- Usage: psql -v table_name=your_table -v batch_size=10000 -f migrate_int_to_bigint.sql ----------------------------------------------------------------------------- - --- Enable notice output and stop on error -\set ON_ERROR_STOP on -\set VERBOSITY verbose - --- Function to create the sync trigger (local to the table, name does not need to be unique) -CREATE OR REPLACE FUNCTION create_bigint_sync_function(p_table_name text) -RETURNS void AS $$ -BEGIN - EXECUTE format($func$ - CREATE OR REPLACE FUNCTION %I_bigint_sync_trigger() - RETURNS TRIGGER AS $BODY$ - BEGIN - NEW.new_id := NEW.id; - RETURN NEW; - END; - $BODY$ LANGUAGE plpgsql; - $func$, - p_table_name -- For function name - ); -END; -$$ LANGUAGE plpgsql; - --- Function to create the sync trigger -CREATE OR REPLACE FUNCTION create_bigint_sync_trigger(p_table_name text) -RETURNS void AS $$ -BEGIN - RAISE NOTICE 'Setting up triggers for table: %', p_table_name; - - -- First drop if exists (will show notice) - EXECUTE format('DROP TRIGGER IF EXISTS bigint_sync ON %I', p_table_name); - - -- Then create new trigger - EXECUTE format($trig$ - CREATE TRIGGER bigint_sync - BEFORE INSERT OR UPDATE ON %I - FOR EACH ROW - EXECUTE FUNCTION %I_bigint_sync_trigger(); - $trig$, - p_table_name, -- For ON table - p_table_name -- For function name - ); - - RAISE NOTICE 'Completed trigger setup for table: %', p_table_name; -END; -$$ LANGUAGE plpgsql; - --- Function to create the foreign key sync function -CREATE OR REPLACE FUNCTION create_fk_sync_function( - p_table_name text, - p_fk_column text -) RETURNS void AS $$ -BEGIN - EXECUTE format($func$ - CREATE OR REPLACE FUNCTION %I_fk_sync_trigger() - RETURNS TRIGGER AS $TRIG$ - BEGIN - NEW.%I := NEW.%I; - RETURN NEW; - END; - $TRIG$ LANGUAGE plpgsql; - $func$, - p_table_name, -- For function name - format('%s_new', p_fk_column), -- For new column - p_fk_column -- For old column - ); -END; -$$ LANGUAGE plpgsql; - --- Function to create the foreign key sync trigger -CREATE OR REPLACE FUNCTION create_fk_sync_trigger( - p_table_name text -) RETURNS void AS $$ -BEGIN - RAISE NOTICE 'Setting up FK triggers for table: %', p_table_name; - - -- First drop if exists (will show notice) - EXECUTE format('DROP TRIGGER IF EXISTS fk_sync ON %I', p_table_name); - - -- Then create new trigger - EXECUTE format($trig$ - CREATE TRIGGER fk_sync - BEFORE INSERT OR UPDATE ON %I - FOR EACH ROW - EXECUTE FUNCTION %I_fk_sync_trigger(); - $trig$, - p_table_name, -- For ON table - p_table_name, -- For ON table again - p_table_name -- For function name - ); - - RAISE NOTICE 'Completed FK trigger setup for table: %', p_table_name; -END; -$$ LANGUAGE plpgsql; - --- Batching procedure - handles its own transactions -CREATE OR REPLACE PROCEDURE backfill_column_in_batches( - p_table_name text, -- Table to update - p_source_column text, -- Column to copy from - p_target_column text, -- Column to copy to - p_batch_size int -- Batch size -) AS $$ -DECLARE - v_max_id INTEGER; - v_current_id INTEGER := 0; -BEGIN - -- Get max id - EXECUTE format('SELECT COALESCE(MAX(id), 0) FROM %I', p_table_name) INTO v_max_id; - - RAISE NOTICE 'Starting backfill of %.% into %.%', - p_table_name, p_source_column, p_table_name, p_target_column; - - -- Process in batches - WHILE v_current_id <= v_max_id LOOP - -- Update batch - EXECUTE format( - 'UPDATE %I SET %I = %I - WHERE id > $1 AND id <= $2 - AND %I IS NULL', -- Only processes unprocessed rows - p_table_name, - p_target_column, - p_source_column, - p_target_column - ) USING v_current_id, v_current_id + p_batch_size; - - v_current_id := v_current_id + p_batch_size; - RAISE NOTICE 'Processed % up to id % of %', - p_table_name, v_current_id, v_max_id; - COMMIT; -- Each batch in its own transaction - END LOOP; -END; -$$ LANGUAGE plpgsql; - --- Function to verify backfill progress -CREATE OR REPLACE FUNCTION check_migration_progress( - p_table_name text, - p_refs jsonb -) RETURNS TABLE ( - table_name text, - total_rows bigint, - migrated_rows bigint, - progress_percentage numeric -) AS $$ -DECLARE - r RECORD; -BEGIN - -- Check main table - RETURN QUERY - EXECUTE format( - 'SELECT %L::text as table_name, - COUNT(*)::bigint as total_rows, - COUNT(new_id)::bigint as migrated_rows, - ROUND((COUNT(new_id)::numeric / COUNT(*)::numeric * 100), 2) as progress_percentage - FROM %I', - p_table_name, p_table_name - ); - - -- Check each referencing table - FOR r IN SELECT * FROM jsonb_array_elements(p_refs) AS t(ref) - LOOP - RETURN QUERY - EXECUTE format( - 'SELECT %L::text as table_name, - COUNT(*)::bigint as total_rows, - COUNT(%I)::bigint as migrated_rows, - ROUND((COUNT(%I)::numeric / COUNT(*)::numeric * 100), 2) as progress_percentage - FROM %I', - r.ref->>'table_name', -- table_name - format('%s_new', r.ref->>'foreign_key_column'), -- new column count - format('%s_new', r.ref->>'foreign_key_column'), -- new column ratio - r.ref->>'table_name' -- table name - ); - END LOOP; -END; -$$ LANGUAGE plpgsql; - --- 1. Setup Phase (DDL operations, auto-committing) -CREATE OR REPLACE FUNCTION setup_bigint_migration( - p_table_name text -) RETURNS jsonb AS $$ -DECLARE - r RECORD; - v_referencing_tables jsonb; - v_table_exists boolean; - v_count int; -BEGIN - -- Validate parameter - IF p_table_name IS NULL THEN - RAISE EXCEPTION 'Table name must be provided'; - END IF; - - -- First verify the table exists - EXECUTE format(' - SELECT EXISTS ( - SELECT 1 FROM information_schema.tables - WHERE table_name = %L - )', p_table_name) INTO v_table_exists; - - IF NOT v_table_exists THEN - RAISE EXCEPTION 'Table % does not exist', p_table_name; - END IF; - - RAISE NOTICE 'Starting setup for table: %', p_table_name; - - -- Find and store all referencing tables - -- This query identifies all tables that have foreign keys pointing to our target table - -- We store them in a temporary table for easier processing - -- Get referencing tables info - EXECUTE format(' - SELECT jsonb_agg( - jsonb_build_object( - ''schema'', table_schema, - ''table_name'', table_name, - ''foreign_key_column'', foreign_key_column, - ''constraint_name'', constraint_name - ) - ) - FROM ( - SELECT DISTINCT - tc.table_schema, - tc.table_name, - kcu.column_name as foreign_key_column, - tc.constraint_name - FROM - information_schema.table_constraints AS tc - JOIN information_schema.key_column_usage AS kcu - ON tc.constraint_name = kcu.constraint_name - JOIN information_schema.constraint_column_usage AS ccu - ON ccu.constraint_name = tc.constraint_name - WHERE - tc.constraint_type = ''FOREIGN KEY'' - AND ccu.table_name = %L - AND ccu.column_name = ''id'' - ) t', - p_table_name - ) INTO v_referencing_tables; - - SELECT jsonb_array_length(COALESCE(v_referencing_tables, '[]'::jsonb)) INTO v_count; - RAISE NOTICE 'Found % referencing tables', v_count; - - -- Add new columns - -- Add BIGINT column to main table - EXECUTE format('ALTER TABLE %I ADD COLUMN IF NOT EXISTS new_id BIGINT', p_table_name); - RAISE NOTICE 'Created new column in %', p_table_name; - - -- Add columns to referencing tables - FOR r IN SELECT * FROM jsonb_array_elements(v_referencing_tables) AS t(ref) - LOOP - RAISE NOTICE 'Adding new column to %', r.ref->>'table_name'; - - EXECUTE format( - 'ALTER TABLE %I ADD COLUMN IF NOT EXISTS %I BIGINT', - r.ref->>'table_name', - format('%s_new', r.ref->>'foreign_key_column') - ); - END LOOP; - - -- Setup triggers - -- Setup trigger on main table to keep new_id in sync with id - PERFORM create_bigint_sync_function(p_table_name); - PERFORM create_bigint_sync_trigger(p_table_name); - - FOR r IN SELECT * FROM jsonb_array_elements(v_referencing_tables) AS t(ref) - LOOP - RAISE NOTICE 'Creating trigger for %', r.ref->>'table_name'; - - -- Create FK sync function and trigger - PERFORM create_fk_sync_function(r.ref->>'table_name', r.ref->>'foreign_key_column'); - PERFORM create_fk_sync_trigger(r.ref->>'table_name'); - END LOOP; - - -- Return the referencing tables info - RETURN v_referencing_tables; -END; -$$ LANGUAGE plpgsql; - - --- 2. Backfill Phase (as a procedure to handle transactions) -CREATE OR REPLACE PROCEDURE backfill_bigint_migration( - p_table_name text, - p_refs jsonb, - p_batch_size int -) AS $$ -DECLARE - r RECORD; -BEGIN - -- Validate parameters - IF p_batch_size IS NULL THEN - RAISE EXCEPTION 'Batch size must be provided'; - END IF; - - -- Backfill main table - CALL backfill_column_in_batches( - p_table_name, -- table name - 'id', -- source column - 'new_id', -- target column - p_batch_size -- batch size - ); - - -- Backfill referencing tables - FOR r IN SELECT * FROM jsonb_array_elements(p_refs) AS t(ref) - LOOP - -- Process each referencing table in batches - CALL backfill_column_in_batches( - r.ref->>'table_name', -- table name - r.ref->>'foreign_key_column', -- source column - format('%s_new', r.ref->>'foreign_key_column'), -- target column - p_batch_size -- batch size - ); - END LOOP; -END; -$$ LANGUAGE plpgsql; - - --- We don't use function here because we want to handle transactions. --- The function rollbacks the entire transaction if an error occurs. - --- Validate the inputs. -\if :{?step} -\else - \echo 'Error: step parameter is required' - \quit -\endif - -\if :{?table_name} -\else - \echo 'Error: table_name parameter is required' - \quit -\endif - -\if :{?batch_size} -\else - \echo 'Error: batch_size parameter is required' - \quit -\endif - --- Main execution flow based on step - --- Create status table if it doesn't exist -CREATE TABLE IF NOT EXISTS bigint_migration_status ( - table_name text PRIMARY KEY, - setup_completed timestamptz, - backfill_completed timestamptz, - cutover_completed timestamptz, - referencing_tables jsonb -- Store referencing tables info as JSON -); - --- 1. Setup (returns temp table name) -\set step_is_setup `echo :step | grep -q '^setup$' && echo 1 || echo 0` -\if :step_is_setup - -- Run setup and store result - INSERT INTO bigint_migration_status (table_name, referencing_tables, setup_completed) - VALUES ( - :'table_name', - (SELECT setup_bigint_migration(:'table_name')), - NOW() - ) - ON CONFLICT (table_name) DO UPDATE - SET referencing_tables = EXCLUDED.referencing_tables, - setup_completed = NOW(); - - \echo 'Setup completed. You can now run the backfill step.' -\endif - --- Backfill Phase -\set step_is_backfill `echo :step | grep -q '^backfill$' && echo 1 || echo 0` -\if :step_is_backfill - \echo 'Step 2: Starting backfill' - - -- Get temp table name directly in the current connection - \gset - SELECT - referencing_tables::text as refs - FROM bigint_migration_status - WHERE table_name = :'table_name' \gset - - \if :{?refs} - -- Run backfill - CALL backfill_bigint_migration(:'table_name', :'refs'::jsonb, :batch_size); - - -- Update status - UPDATE bigint_migration_status - SET backfill_completed = NOW() - WHERE table_name = :'table_name'; - - -- Show progress - SELECT * FROM check_migration_progress(:'table_name', :'refs'::jsonb); - \echo 'Backfill completed. You can now run the cutover step.' - \else - \echo 'Error: Setup step must be completed first' - \quit - \endif -\endif \ No newline at end of file From 201ca81de886e6aee95130b357938f677b9fc905 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Mon, 2 Dec 2024 18:05:41 +0100 Subject: [PATCH 06/22] =?UTF-8?q?=E2=9C=82=EF=B8=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- front/migrations/20241129_migrate_int_to_bigint.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 52420860406d..309ff1278c4a 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -70,11 +70,9 @@ export const SYNC_DIRECTION = { TO_LEGACY: "to_legacy", } as const; -// Make these type-safe using TypeScript literal types type ColumnType = (typeof COLUMN_TYPE)[keyof typeof COLUMN_TYPE]; type SyncDirection = (typeof SYNC_DIRECTION)[keyof typeof SYNC_DIRECTION]; -// naming.ts export const createColumnName = { new: (baseColumn: string) => `${baseColumn}_new` as const, legacy: (baseColumn: string) => `${baseColumn}_legacy` as const, @@ -470,7 +468,7 @@ class IntToBigIntMigration { rows.forEach((row) => console.log( - chalk.dim(`• ${row.schema}.${row.tableName} (${row.foreignKeyColumn})`) + chalk.dim(`- ${row.schema}.${row.tableName} (${row.foreignKeyColumn})`) ) ); @@ -649,7 +647,7 @@ class IntToBigIntMigration { EXECUTE FUNCTION ${functionName}() ` ); - console.log(chalk.dim(`• Created trigger`)); + console.log(chalk.dim(`- Created trigger`)); console.log( chalk.green( @@ -744,7 +742,7 @@ class IntToBigIntMigration { rows.forEach((r) => console.log( chalk.magenta( - ` • ${chalk.bold(r.index_name)} (${r.column_names})${r.is_unique ? " UNIQUE" : ""}` + ` - ${chalk.bold(r.index_name)} (${r.column_names})${r.is_unique ? " UNIQUE" : ""}` ) ) ); From 88825e9386bc2c0b032a076e2d69f7e8b9df383b Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 12:18:59 +0100 Subject: [PATCH 07/22] Add clean up step --- .../20241129_migrate_int_to_bigint.ts | 568 ++++++++++++++++-- 1 file changed, 526 insertions(+), 42 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 309ff1278c4a..4bee56089689 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -1,8 +1,8 @@ +import { assertNever } from "@dust-tt/types"; import chalk from "chalk"; import type { PoolClient } from "pg"; import { Pool } from "pg"; -import logger from "@app/logger/logger"; import { makeScript } from "@app/scripts/helpers"; // Types. @@ -55,6 +55,7 @@ const MigrationSteps = [ "pre-cutover", "cutover", "rollback", + "cleanup", ] as const; type MigrationStepType = (typeof MigrationSteps)[number]; @@ -102,6 +103,8 @@ export const createTriggerNames = { export const createConstraintName = { notNull: (tableName: string, type: ColumnType) => `${tableName}_not_null_${type}` as const, + notNullForColumn: (tableName: string, columnName: string) => + `${tableName}_not_null_${columnName}` as const, foreignKey: (tableName: string, columnName: string, type: ColumnType) => `${tableName}_${columnName}_fkey_${type}` as const, }; @@ -171,8 +174,12 @@ class IntToBigIntMigration { await this.rollback(); break; + case "cleanup": + await this.cleanup(); + break; + default: - throw new Error(`Unknown step: ${step}`); + assertNever(step); } } catch (error) { throw new MigrationError( @@ -184,6 +191,7 @@ class IntToBigIntMigration { } // Setup Phase + private async setup(): Promise { const client = await this.pool.connect(); try { @@ -211,6 +219,7 @@ class IntToBigIntMigration { } // Backfill Phase + private async backfill(): Promise { const client = await this.pool.connect(); try { @@ -260,9 +269,24 @@ class IntToBigIntMigration { chalk.yellow(`[Pre-Cutover] Creating indexes and constraints`) ); - // Create indexes concurrently. + // Create indexes concurrently await this.createIndexes(client, referencingTables); + // Set NOT NULL constraints on the new columns + for (const ref of referencingTables) { + const isNotNull = await this.isColumnNotNull(client, { + tableName: ref.tableName, + columnName: ref.foreignKeyColumn, + }); + + if (isNotNull) { + await this.setNotNullConstraint(client, { + tableName: ref.tableName, + columnName: createColumnName.new(ref.foreignKeyColumn), + }); + } + } + console.log( chalk.green( `[Pre-Cutover] Successfully created all indexes and constraints` @@ -273,25 +297,8 @@ class IntToBigIntMigration { } } - private async dropTriggers( - client: PoolClient, - { tableName, isPrimaryKey }: { tableName: string; isPrimaryKey: boolean }, - direction: SyncDirection - ): Promise { - const triggerInfo = isPrimaryKey - ? createTriggerNames.pk(tableName, direction) - : createTriggerNames.fk(tableName, direction); - - await this.executeSql( - client, - ` - DROP TRIGGER IF EXISTS ${triggerInfo.trigger} ON ${tableName}; - DROP FUNCTION IF EXISTS ${triggerInfo.function}; - ` - ); - } - // Cutover Phase + private async cutover(): Promise { const client = await this.pool.connect(); try { @@ -420,6 +427,96 @@ class IntToBigIntMigration { } } + // Clean up Phase + + // We can't use a transaction for cleanup as it does not support creating indexes concurrently. + private async cleanup(): Promise { + const client = await this.pool.connect(); + try { + // Find referencing tables + const referencingTables = await this.findReferencingTables(client); + + // Drop Triggers for main table that was replicating to legacy columns + await this.dropTriggers( + client, + { + tableName: this.config.tableName, + isPrimaryKey: true, + }, + SYNC_DIRECTION.TO_LEGACY + ); + + // Drop the rollback index that was created + const rollbackIndexName = createIndexName.primary( + this.config.tableName, + COLUMN_TYPE.INT + ); + await this.executeSql( + client, + `DROP INDEX IF EXISTS "${rollbackIndexName}"` + ); + + // Drop the legacy column (this also drops the indexes) + const legacyColumn = createColumnName.legacy(MAIN_ID_COLUMN); + await this.dropColumn(client, { + tableName: this.config.tableName, + columnName: legacyColumn, + }); + + // Recreate the indexes without the `_bigint` suffix + await this.cleanupBigintIndexes(client, { + columnName: MAIN_ID_COLUMN, + tableName: this.config.tableName, + }); + + // Clean up the primary key constraint check. + const constraintName = createConstraintName.notNull( + this.config.tableName, + COLUMN_TYPE.BIGINT + ); + + await this.executeSql( + client, + ` + ALTER TABLE ${this.config.tableName} + DROP CONSTRAINT IF EXISTS "${constraintName}"; + ` + ); + + for (const ref of referencingTables) { + // Drop Triggers for referencing tables that was replicating to legacy columns + await this.dropTriggers( + client, + { + tableName: ref.tableName, + isPrimaryKey: false, + }, + SYNC_DIRECTION.TO_LEGACY + ); + + // Drop the legacy column (this also drops the indexes) + const legacyColumn = createColumnName.legacy(ref.foreignKeyColumn); + await this.dropColumn(client, { + tableName: ref.tableName, + columnName: legacyColumn, + }); + + // Recreate the indexes without the `_bigint` suffix + await this.cleanupBigintIndexes(client, { + columnName: ref.foreignKeyColumn, + tableName: ref.tableName, + }); + + // Recreate the foreign key constraints without the `_bigint` suffix + await this.cleanupForeignKeyConstraints(client, ref); + } + + console.log(chalk.green("[Cleanup] Successfully cleaned up all tables")); + } finally { + client.release(); + } + } + // Helper methods private async verifyTableExists(client: PoolClient): Promise { @@ -475,6 +572,154 @@ class IntToBigIntMigration { return rows; } + // Columns helpers. + + private async isColumnNotNull( + client: PoolClient, + { + tableName, + columnName, + }: { + tableName: string; + columnName: string; + } + ): Promise { + const result = await this.executeSql<{ is_nullable: string }>( + client, + ` + SELECT is_nullable + FROM information_schema.columns + WHERE table_name = $1 + AND column_name = $2; + `, + [tableName, columnName] + ); + + return result.rows[0]?.is_nullable === "NO"; + } + + private async shouldUseCheckConstraint( + client: PoolClient, + tableName: string + ): Promise { + // Get table size in bytes + const result = await this.executeSql<{ size_bytes: string }>( + client, + ` + SELECT pg_table_size($1) as size_bytes + FROM pg_stat_user_tables + WHERE relname = $2 + `, + [tableName, tableName] + ); + + const sizeBytes = parseInt(result.rows[0]?.size_bytes || "0", 10); + const sizeGB = sizeBytes / (1024 * 1024 * 1024); + + console.log( + chalk.yellow( + `[Migration] Table ${chalk.bold(tableName)} size: ${chalk.bold( + `${Math.round(sizeGB * 100) / 100}GB` + )}` + ) + ); + + // >= 1GB: Use CHECK constraint only + // < 1GB: Case by case + const LARGE_TABLE_THRESHOLD_GB = 1; + const useCheckConstraint = sizeGB >= LARGE_TABLE_THRESHOLD_GB; + + console.log( + chalk.yellow( + `[Migration] Using ${chalk.bold( + useCheckConstraint ? "CHECK constraint" : "SET NOT NULL" + )} approach for table ${chalk.bold(tableName)}` + ) + ); + + return useCheckConstraint; + } + + private async setNotNullConstraint( + client: PoolClient, + { + tableName, + columnName, + }: { + tableName: string; + columnName: string; + } + ): Promise { + try { + // PostgreSQL does not leverage the CHECK CONSRAINT when altering the table to set NOT NULL. + // Which involves a full table scan which creates an ACCESS EXCLUSIVE lock. + // For tables bigger than 10GB we set a CHECK CONSTRAINT temporary. + // We will clean up later during off hours. + const useNotNullConstraint = await this.shouldUseCheckConstraint( + client, + tableName + ); + if (useNotNullConstraint) { + const constraintName = createConstraintName.notNullForColumn( + tableName, + columnName + ); + + console.log( + chalk.yellow( + `[Constraints] Adding NOT NULL constraint for ${chalk.bold( + `${tableName}.${columnName}` + )}` + ) + ); + + // Add the NOT NULL constraint without validation + await this.executeSql( + client, + ` + ALTER TABLE ${tableName} + ADD CONSTRAINT "${constraintName}" + CHECK ("${columnName}" IS NOT NULL) NOT VALID; + ` + ); + + // Validate the constraint + console.log( + chalk.yellow( + `[Constraints] Validating NOT NULL constraint for ${chalk.bold( + `${tableName}.${columnName}` + )}` + ) + ); + + await this.executeSql( + client, + ` + ALTER TABLE ${tableName} + VALIDATE CONSTRAINT "${constraintName}"; + ` + ); + } else { + // /!\ This performs a full table scan and locks the table. + // ONLY PERFORM THIS ON SMALL TABLE. + await this.executeSql( + client, + ` + ALTER TABLE ${tableName} + ALTER COLUMN "${columnName}" SET NOT NULL; + ` + ); + } + } catch (error) { + console.error( + chalk.red( + `[ERROR] Failed to set NOT NULL constraint on ${tableName}.${columnName}` + ) + ); + throw error; + } + } + private async addNewColumns( client: PoolClient, referencingTables: ReferencingTable[] @@ -523,6 +768,18 @@ class IntToBigIntMigration { console.log(chalk.green(`[Columns] Successfully added all BigInt columns`)); } + private async dropColumn( + client: PoolClient, + { tableName, columnName }: { tableName: string; columnName: string } + ): Promise { + await this.executeSql( + client, + `ALTER TABLE ${tableName} DROP COLUMN IF EXISTS "${columnName}"` + ); + } + + // Triggers helpers. + private async setupTriggers( client: PoolClient, referencingTables: ReferencingTable[], @@ -697,6 +954,54 @@ class IntToBigIntMigration { } } + // Indexes helpers. + + private async findForeignKeyIndexes( + client: PoolClient, + referencingTables: ReferencingTable[] + ): Promise< + Array<{ + tableName: string; + columnName: string; + indexName: string; + }> + > { + // Build the list of (table_name, column_name) pairs for the WHERE clause + const conditions = referencingTables + .map((ref, i) => `($${i * 2 + 1}::text, $${i * 2 + 2}::text)`) + .join(", "); + + const params = referencingTables.flatMap((ref) => [ + ref.tableName, + ref.foreignKeyColumn, + ]); + + const result = await this.executeSql<{ + tableName: string; + columnName: string; + indexName: string; + }>( + client, + ` + SELECT DISTINCT + t.relname AS "tableName", + a.attname AS "columnName", + i.relname AS "indexName" + FROM pg_class t + JOIN pg_index ix ON t.oid = ix.indrelid + JOIN pg_class i ON ix.indexrelid = i.oid + JOIN pg_attribute a ON t.oid = a.attrelid + WHERE (t.relname, a.attname) IN (${conditions}) + AND array_position(ix.indkey, a.attnum) = 0 -- Only when it's the first column + AND t.relkind = 'r' + AND ix.indisprimary = false; + `, + params + ); + + return result.rows; + } + private async findCompositeIndexes( client: PoolClient, tableName: string, @@ -757,10 +1062,16 @@ class IntToBigIntMigration { private async createCompositeIndexes( client: PoolClient, { - tableName, - oldColumn, newColumn, - }: { tableName: string; oldColumn: string; newColumn: string } + oldColumn, + tableName, + transformIndexName, + }: { + newColumn: string; + oldColumn: string; + tableName: string; + transformIndexName: (indexName: string) => string; + } ): Promise { const compositeIndexes = await this.findCompositeIndexes( client, @@ -769,8 +1080,7 @@ class IntToBigIntMigration { ); for (const index of compositeIndexes) { - // Create new index name - const newIndexName = `${index.indexName}_bigint`; + const newIndexName = transformIndexName(index.indexName); // Replace old column with new column in the columns array const newColumns = index.columns.map((col) => @@ -790,6 +1100,43 @@ class IntToBigIntMigration { } } + private async cleanupBigintIndexes( + client: PoolClient, + { tableName, columnName }: { tableName: string; columnName: string } + ): Promise { + console.log(chalk.yellow(`[Cleanup] Processing indexes for ${tableName}`)); + + // Composite indexes. + + // Create clean indexes (without the _bigint suffix) + await this.createCompositeIndexes(client, { + tableName, + oldColumn: columnName, // current bigint column + newColumn: columnName, // same column (we're just removing suffix) + transformIndexName: (indexName) => indexName.replace("_bigint", ""), // remove _bigint suffix + }); + + // Find and drop old _bigint indexes + const allIndexes = await this.findCompositeIndexes( + client, + tableName, + columnName + ); + + for (const index of allIndexes) { + if (index.indexName.endsWith("_bigint")) { + await this.executeSql( + client, + `DROP INDEX CONCURRENTLY IF EXISTS "${index.indexName}";` + ); + + console.log( + chalk.green(`[Cleanup] Dropped index ${chalk.bold(index.indexName)}`) + ); + } + } + } + private async createIndexes( client: PoolClient, referencingTables: ReferencingTable[] @@ -827,13 +1174,23 @@ class IntToBigIntMigration { isPrimary: true, }); + // Transform index name to add _bigint suffix + const transformIndexName = (indexName: string) => `${indexName}_bigint`; + // Create composite indexes for the main table await this.createCompositeIndexes(client, { tableName: this.config.tableName, oldColumn: MAIN_ID_COLUMN, newColumn: newMainColumn, + transformIndexName, }); + // Find existing FK indexes for all referencing tables at once + const fkIndexes = await this.findForeignKeyIndexes( + client, + referencingTables + ); + // Create FK indexes and their composite indexes for (const ref of referencingTables) { console.log( @@ -844,18 +1201,33 @@ class IntToBigIntMigration { const newFKColumn = createColumnName.new(ref.foreignKeyColumn); - // Create the basic FK index - await this.createAndWaitForIndex(client, { - tableName: ref.tableName, - columnName: newFKColumn, - columnType: COLUMN_TYPE.BIGINT, - indexName: createIndexName.foreign( - ref.tableName, - ref.foreignKeyColumn, - COLUMN_TYPE.BIGINT - ), - isPrimary: false, - }); + // Check if this table/column has a dedicated FK index + const hasFKIndex = fkIndexes.some( + (idx) => + idx.tableName === ref.tableName && + idx.columnName === ref.foreignKeyColumn + ); + + // Only create FK index if one existed before + if (hasFKIndex) { + console.log( + chalk.yellow( + `[Indexes] Creating FK index for ${chalk.bold(ref.tableName)}` + ) + ); + + await this.createAndWaitForIndex(client, { + tableName: ref.tableName, + columnName: newFKColumn, + columnType: COLUMN_TYPE.BIGINT, + indexName: createIndexName.foreign( + ref.tableName, + ref.foreignKeyColumn, + COLUMN_TYPE.BIGINT + ), + isPrimary: false, + }); + } // Create foreign key constraint. await this.createForeignKeyConstraints( @@ -869,6 +1241,7 @@ class IntToBigIntMigration { tableName: ref.tableName, oldColumn: ref.foreignKeyColumn, newColumn: newFKColumn, + transformIndexName, }); } } @@ -1009,6 +1382,99 @@ class IntToBigIntMigration { } } + private async cleanupForeignKeyConstraints( + client: PoolClient, + ref: ReferencingTable + ) { + // Foreign key index. + const fkIndexName = createIndexName.foreign( + ref.tableName, + ref.foreignKeyColumn, + COLUMN_TYPE.BIGINT + ); + const newFkIndexName = fkIndexName.replace("_bigint", ""); + + // Find existing FK indexes for all referencing tables at once + const fkIndexes = await this.findForeignKeyIndexes(client, [ref]); + + // Check if this table/column has a dedicated FK index + const hasFKIndex = fkIndexes.some( + (idx) => + idx.tableName === ref.tableName && + idx.columnName === ref.foreignKeyColumn + ); + + // Create new FK index without _bigint suffix + if (hasFKIndex) { + await this.createAndWaitForIndex(client, { + tableName: ref.tableName, + columnName: ref.foreignKeyColumn, + columnType: COLUMN_TYPE.BIGINT, + indexName: newFkIndexName, + isPrimary: false, + }); + + // Drop old FK index with _bigint suffix + await this.executeSql( + client, + `DROP INDEX CONCURRENTLY IF EXISTS "${fkIndexName}";` + ); + } + + // For cleanup, we want to rename from _bigint to clean name + const constraintName = createConstraintName.foreignKey( + ref.tableName, + ref.foreignKeyColumn, + COLUMN_TYPE.BIGINT + ); + + const newConstraintName = constraintName.replace("_bigint", ""); + + console.log( + chalk.yellow( + `[Cleanup] Creating clean FK constraint for ${chalk.bold(ref.tableName)}` + ) + ); + + // Create new FK without _bigint suffix + try { + await this.executeSql( + client, + ` + ALTER TABLE ${ref.tableName} + ADD CONSTRAINT "${newConstraintName}" + FOREIGN KEY ("${ref.foreignKeyColumn}") + REFERENCES ${this.config.tableName}(${MAIN_ID_COLUMN}) + ON UPDATE RESTRICT + ON DELETE RESTRICT + NOT VALID; + ` + ); + } catch (error) { + if (error instanceof Error && !error.message.includes("already exists")) { + throw error; + } + } + + await this.executeSql( + client, + `ALTER TABLE ${ref.tableName} VALIDATE CONSTRAINT "${newConstraintName}";` + ); + + // Drop old FK + await this.executeSql( + client, + `ALTER TABLE ${ref.tableName} DROP CONSTRAINT IF EXISTS "${constraintName}";` + ); + + console.log( + chalk.green( + `[Cleanup] Renamed FK constraint ${chalk.bold(constraintName)} to ${chalk.bold(newConstraintName)}` + ) + ); + return; + } + private async createForeignKeyConstraints( client: PoolClient, ref: ReferencingTable, @@ -1078,6 +1544,24 @@ class IntToBigIntMigration { // Swap columns + private async dropTriggers( + client: PoolClient, + { tableName, isPrimaryKey }: { tableName: string; isPrimaryKey: boolean }, + direction: SyncDirection + ): Promise { + const triggerInfo = isPrimaryKey + ? createTriggerNames.pk(tableName, direction) + : createTriggerNames.fk(tableName, direction); + + await this.executeSql( + client, + ` + DROP TRIGGER IF EXISTS ${triggerInfo.trigger} ON ${tableName}; + DROP FUNCTION IF EXISTS ${triggerInfo.function}; + ` + ); + } + private async switchMainTable( client: PoolClient, config: MainTableSwitchConfig, @@ -1136,12 +1620,12 @@ class IntToBigIntMigration { OWNED BY ${this.config.tableName}."${currentColumn}"; -- Remove default from new column - ALTER TABLE groups + ALTER TABLE "${this.config.tableName}" ALTER COLUMN "${newColumn}" DROP DEFAULT; -- Set default on column - ALTER TABLE groups + ALTER TABLE "${this.config.tableName}" ALTER COLUMN "${currentColumn}" SET DEFAULT nextval('"${sequenceName}"'::regclass); ` @@ -1185,12 +1669,12 @@ class IntToBigIntMigration { OWNED BY ${this.config.tableName}."${currentColumn}"; -- Remove default from legacy column - ALTER TABLE groups + ALTER TABLE "${this.config.tableName}" ALTER COLUMN "${legacyColumn}" DROP DEFAULT; -- Set default on column - ALTER TABLE groups + ALTER TABLE "${this.config.tableName}" ALTER COLUMN "${currentColumn}" SET DEFAULT nextval('"${sequenceName}"'::regclass); ` From 3e1e089201e64044aa74a6a10dec18ab884e7686 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 12:21:28 +0100 Subject: [PATCH 08/22] Address comments from review --- front/migrations/20241129_migrate_int_to_bigint.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 4bee56089689..dff9f403c846 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -323,7 +323,7 @@ class IntToBigIntMigration { currentColumn: MAIN_ID_COLUMN, newColumn: createColumnName.new(MAIN_ID_COLUMN), legacyColumn: createColumnName.legacy(MAIN_ID_COLUMN), - sequenceName: `${this.config.tableName}_id_seq`, + sequenceName: createSequenceName(this.config.tableName), indexName: createIndexName.primary( this.config.tableName, COLUMN_TYPE.BIGINT @@ -398,7 +398,7 @@ class IntToBigIntMigration { currentColumn: MAIN_ID_COLUMN, newColumn: createColumnName.new(MAIN_ID_COLUMN), legacyColumn: createColumnName.legacy(MAIN_ID_COLUMN), - sequenceName: `${this.config.tableName}_id_seq`, + sequenceName: createSequenceName(this.config.tableName), indexName: createIndexName.primary( this.config.tableName, // We are rolling back to int From 1457dc9826c457192e973ffad380db88d1245add Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 13:57:34 +0100 Subject: [PATCH 09/22] Fix non nullable progress --- .../20241129_migrate_int_to_bigint.ts | 147 ++++++++++++++---- 1 file changed, 117 insertions(+), 30 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index dff9f403c846..c4a3661b870b 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -1,4 +1,5 @@ import { assertNever } from "@dust-tt/types"; +import assert from "assert"; import chalk from "chalk"; import type { PoolClient } from "pg"; import { Pool } from "pg"; @@ -36,7 +37,7 @@ interface MigrationProgress { interface MainTableSwitchConfig { currentColumn: string; // e.g., 'id' - newColumn: string; // e.g., 'new_id' + newColumn: string; // e.g., 'id_new' legacyColumn: string; // e.g., 'id_legacy' sequenceName: string; // e.g., 'table_id_seq' indexName: string; // e.g., 'table_pkey_bigint' @@ -183,7 +184,7 @@ class IntToBigIntMigration { } } catch (error) { throw new MigrationError( - `Migration failed during ${step}`, + `Migration failed during ${step} on table ${this.config.tableName}`, step, error instanceof Error ? error : undefined ); @@ -1766,20 +1767,20 @@ class IntToBigIntMigration { const progress: MigrationProgress[] = []; // Check main table - const mainProgress = await this.checkTableProgress( - client, - this.config.tableName, - createColumnName.new(MAIN_ID_COLUMN) - ); + const mainProgress = await this.checkTableProgress(client, { + tableName: this.config.tableName, + sourceColumn: MAIN_ID_COLUMN, + targetColumn: createColumnName.new(MAIN_ID_COLUMN), + }); progress.push(mainProgress); // Check referencing tables for (const ref of referencingTables) { - const refProgress = await this.checkTableProgress( - client, - ref.tableName, - createColumnName.new(ref.foreignKeyColumn) - ); + const refProgress = await this.checkTableProgress(client, { + tableName: ref.tableName, + sourceColumn: ref.foreignKeyColumn, + targetColumn: createColumnName.new(ref.foreignKeyColumn), + }); progress.push(refProgress); } @@ -1788,30 +1789,45 @@ class IntToBigIntMigration { private async checkTableProgress( client: PoolClient, - tableName: string, - targetColumn: string + { + tableName, + sourceColumn, + targetColumn, + }: { tableName: string; sourceColumn: string; targetColumn: string } ): Promise { const { rows } = await this.executeSql<{ - total_rows: number; - migrated_rows: number; + source_count: number; + target_count: number; progress_percentage: string; }>( client, ` SELECT - COUNT(*)::bigint as total_rows, - COUNT("${targetColumn}")::bigint as migrated_rows, - ROUND((COUNT("${targetColumn}")::numeric / NULLIF(COUNT(*)::numeric, 0) * 100), 2) as progress_percentage + COUNT("${sourceColumn}")::bigint as source_count, + COUNT("${targetColumn}")::bigint as target_count, + CASE + WHEN COUNT("${sourceColumn}") = 0 THEN 100 + ELSE ROUND( + (COUNT("${targetColumn}")::numeric / + NULLIF(COUNT("${sourceColumn}")::numeric, 0) + * 100 + ), 2 + ) + END as progress_percentage FROM ${tableName} ` ); - const { total_rows, migrated_rows, progress_percentage } = rows[0]; + const { source_count, target_count, progress_percentage } = rows[0]; + + console.log(chalk.dim(`[Progress Details] ${tableName}:`)); + console.log(chalk.dim(`- Non-NULL in ${sourceColumn}: ${source_count}`)); + console.log(chalk.dim(`- Non-NULL in ${targetColumn}: ${target_count}`)); return { tableName, - totalRows: total_rows, - migratedRows: migrated_rows, + totalRows: source_count, + migratedRows: target_count, progressPercentage: progress_percentage ? parseInt(progress_percentage, 10) : 100, @@ -1870,6 +1886,24 @@ class IntToBigIntMigration { } } +async function getAllTables(connectionString: string): Promise { + const pool = new Pool({ connectionString }); + + const client = await pool.connect(); + try { + const { rows } = await client.query(` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_type = 'BASE TABLE' + `); + + return rows.map((r) => r.table_name); + } finally { + client.release(); + } +} + makeScript( { database: { @@ -1909,14 +1943,67 @@ makeScript( }, }, async ({ database, table, step, batchSize, schema, timeout, execute }) => { - const migration = new IntToBigIntMigration(database, { - tableName: table, - schemaName: schema, - batchSize, - timeoutSeconds: timeout, - dryRun: !execute, - }); + if (table === "all") { + assert( + process.env.NODE_ENV === "development", + "Only allowed in development" + ); - await migration.execute(step as MigrationStepType); + console.log( + chalk.red("About to run migration on all tables in development") + ); + + const tables = await getAllTables(database); + console.log(chalk.yellow(`Found ${tables.length} tables`)); + tables.forEach((t) => console.log(chalk.yellow(`- ${t}`))); + + for (const t of tables) { + if (["vaults", "groups"].includes(t)) { + console.log("Skipping table: ", t); + } + + console.log( + chalk.blue(`\n[Migration] Starting migration for table: ${t}`) + ); + const migration = new IntToBigIntMigration(database, { + tableName: t, + schemaName: schema, + batchSize, + timeoutSeconds: timeout, + dryRun: !execute, + }); + + // Run all steps for this table + for (const step of MigrationSteps) { + console.log(chalk.blue(`\n[${t}] Executing step: ${step}`)); + + try { + await migration.execute(step); + } catch (error) { + console.error( + chalk.red(`Failed during ${step} for table ${t}:`), + error + ); + throw error; // Stop the entire migration if any step fails + } + } + + console.log( + chalk.green(`\n[Migration] Completed migration for table: ${t}`) + ); + + await migration.execute(step as MigrationStepType); + } + } else { + const migration = new IntToBigIntMigration(database, { + tableName: table, + schemaName: schema, + batchSize, + timeoutSeconds: timeout, + dryRun: !execute, + }); + + await migration.execute(step as MigrationStepType); + } } ); From 3405d613ca744981df3565ecc3068afc39f2711a Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 14:14:13 +0100 Subject: [PATCH 10/22] Handle whereClause on composite index --- .../20241129_migrate_int_to_bigint.ts | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index c4a3661b870b..9de23583ce47 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -19,6 +19,7 @@ interface CompositeIndex { indexName: string; columns: string[]; isUnique: boolean; + whereClause: string; } interface ReferencingTable { @@ -967,6 +968,10 @@ class IntToBigIntMigration { indexName: string; }> > { + if (referencingTables.length === 0) { + return []; + } + // Build the list of (table_name, column_name) pairs for the WHERE clause const conditions = referencingTables .map((ref, i) => `($${i * 2 + 1}::text, $${i * 2 + 2}::text)`) @@ -1012,13 +1017,15 @@ class IntToBigIntMigration { index_name: string; column_names: string; is_unique: boolean; + where_clause: string; }>( client, ` SELECT i.relname as index_name, ix.indisunique as is_unique, - array_agg(a.attname ORDER BY k.ordering) as column_names + array_agg(a.attname ORDER BY k.ordering) as column_names, + pg_get_expr(ix.indpred, ix.indrelid) as where_clause FROM pg_class t JOIN pg_index ix ON t.oid = ix.indrelid JOIN pg_class i ON ix.indexrelid = i.oid @@ -1028,7 +1035,7 @@ class IntToBigIntMigration { WHERE t.relname = $1 AND t.relkind = 'r' AND ix.indisprimary = false - GROUP BY i.relname, ix.indisunique + GROUP BY i.relname, ix.indisunique, ix.indpred, ix.indrelid HAVING array_length(array_agg(a.attname), 1) > 1 AND EXISTS ( @@ -1048,7 +1055,7 @@ class IntToBigIntMigration { rows.forEach((r) => console.log( chalk.magenta( - ` - ${chalk.bold(r.index_name)} (${r.column_names})${r.is_unique ? " UNIQUE" : ""}` + ` - ${chalk.bold(r.index_name)} (${r.column_names})${r.is_unique ? " UNIQUE" : ""}${r.where_clause ? ` WHERE ${r.where_clause}` : ""}` ) ) ); @@ -1057,6 +1064,7 @@ class IntToBigIntMigration { indexName: row.index_name, columns: parsePostgresArray(row.column_names), isUnique: row.is_unique, + whereClause: row.where_clause, })); } @@ -1093,7 +1101,8 @@ class IntToBigIntMigration { ` CREATE ${index.isUnique ? "UNIQUE" : ""} INDEX CONCURRENTLY IF NOT EXISTS "${newIndexName}" - ON ${tableName}(${newColumns.join(", ")}); + ON ${tableName}(${newColumns.join(", ")}) + ${index.whereClause ? `WHERE ${index.whereClause}` : ""}; ` ); @@ -1958,7 +1967,7 @@ makeScript( tables.forEach((t) => console.log(chalk.yellow(`- ${t}`))); for (const t of tables) { - if (["vaults", "groups"].includes(t)) { + if (["vaults", "groups", "users"].includes(t)) { console.log("Skipping table: ", t); } From bf9ec877531069120e8e1e7666edf08cfc81d6f7 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 14:21:10 +0100 Subject: [PATCH 11/22] =?UTF-8?q?=E2=9C=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- front/migrations/20241129_migrate_int_to_bigint.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 9de23583ce47..174e35caf151 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -1967,13 +1967,10 @@ makeScript( tables.forEach((t) => console.log(chalk.yellow(`- ${t}`))); for (const t of tables) { - if (["vaults", "groups", "users"].includes(t)) { - console.log("Skipping table: ", t); - } - console.log( chalk.blue(`\n[Migration] Starting migration for table: ${t}`) ); + const migration = new IntToBigIntMigration(database, { tableName: t, schemaName: schema, @@ -1984,6 +1981,11 @@ makeScript( // Run all steps for this table for (const step of MigrationSteps) { + if (step === "rollback") { + // Skip rollback for all tables + continue; + } + console.log(chalk.blue(`\n[${t}] Executing step: ${step}`)); try { From 2a9ae0f3c918d6dafc441e71d49d494787f3194f Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 14:31:16 +0100 Subject: [PATCH 12/22] =?UTF-8?q?=E2=9C=82=EF=B8=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- front/migrations/20241129_migrate_int_to_bigint.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 174e35caf151..d724918e82e4 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -2002,8 +2002,6 @@ makeScript( console.log( chalk.green(`\n[Migration] Completed migration for table: ${t}`) ); - - await migration.execute(step as MigrationStepType); } } else { const migration = new IntToBigIntMigration(database, { From 1c49f23f8cbd2ecabaea24440b339a9dd6e5d450 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 14:36:35 +0100 Subject: [PATCH 13/22] Release --- front/migrations/20241129_migrate_int_to_bigint.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index d724918e82e4..b20d836b176a 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -348,6 +348,8 @@ class IntToBigIntMigration { throw new Error( `Failed to switch columns: ${error instanceof Error ? error.message : "Unknown error"}` ); + } finally { + client.release(); } } From dba224f6244bc47cf4bad9d7be6d24406e68752e Mon Sep 17 00:00:00 2001 From: JulesBelveze Date: Tue, 3 Dec 2024 15:38:48 +0100 Subject: [PATCH 14/22] [front/migrations] - feature: enhance foreign key references with actions for update and delete - Added fields to track delete and update actions on foreign key constraints in migration script - The migration script now includes additional SQL JOIN to fetch action information from the pg_constraint table --- front/migrations/20241129_migrate_int_to_bigint.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index b20d836b176a..37db40227bc4 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -27,6 +27,8 @@ interface ReferencingTable { tableName: string; foreignKeyColumn: string; constraintName: string; + deleteAction?: string; + updateAction?: string; } interface MigrationProgress { @@ -548,12 +550,16 @@ class IntToBigIntMigration { tc.table_schema as schema, tc.table_name as "tableName", kcu.column_name as "foreignKeyColumn", - tc.constraint_name as "constraintName" + tc.constraint_name as "constraintName", + pc.confdeltype as "deleteAction", + pc.confupdtype as "updateAction" FROM information_schema.table_constraints tc JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name + JOIN pg_constraint pc + ON tc.constraint_name = pc.conname WHERE tc.constraint_type = 'FOREIGN KEY' AND ccu.table_name = $1 AND ccu.column_name = 'id' From 3fce4d53d9397fa8a3c2707665f7a778da8cec37 Mon Sep 17 00:00:00 2001 From: JulesBelveze Date: Tue, 3 Dec 2024 15:43:04 +0100 Subject: [PATCH 15/22] [front/migrations] - refactor: update foreign key actions in bigint migration - Change foreign key `ON UPDATE` and `ON DELETE` actions to dynamic based on reference settings - Set actions to `SET NULL` when reference actions are marked with "r", otherwise keep as `RESTRICT` --- front/migrations/20241129_migrate_int_to_bigint.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 37db40227bc4..aceeb950f763 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -1463,8 +1463,8 @@ class IntToBigIntMigration { ADD CONSTRAINT "${newConstraintName}" FOREIGN KEY ("${ref.foreignKeyColumn}") REFERENCES ${this.config.tableName}(${MAIN_ID_COLUMN}) - ON UPDATE RESTRICT - ON DELETE RESTRICT + ON UPDATE ${ref.updateAction === "r" ? "SET NULL" : "RESTRICT"} + ON DELETE ${ref.deleteAction === "r" ? "SET NULL" : "RESTRICT"} NOT VALID; ` ); @@ -1533,8 +1533,8 @@ class IntToBigIntMigration { ADD CONSTRAINT "${newConstraintName}" FOREIGN KEY ("${sourceColumn}") REFERENCES ${this.config.tableName}(${targetColumn}) - ON UPDATE RESTRICT - ON DELETE RESTRICT + ON UPDATE ${ref.updateAction === "r" ? "SET NULL" : "RESTRICT"} + ON DELETE ${ref.deleteAction === "r" ? "SET NULL" : "RESTRICT"} NOT VALID; ` ); From 26b67e04a948deac5e753b8cd2a93f0a5ab61fbd Mon Sep 17 00:00:00 2001 From: JulesBelveze Date: Tue, 3 Dec 2024 15:48:12 +0100 Subject: [PATCH 16/22] [front] - fix: correct ON UPDATE/DELETE actions in foreign key constraints - Fixed an issue where the update and delete actions for foreign key constraints were incorrectly set to RESTRICT when they should be SET NULL - Updated the conditional logic to match 'n' values to SET NULL action instead of 'r' in the migration script --- front/migrations/20241129_migrate_int_to_bigint.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index aceeb950f763..afb55eeaccd5 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -1463,8 +1463,8 @@ class IntToBigIntMigration { ADD CONSTRAINT "${newConstraintName}" FOREIGN KEY ("${ref.foreignKeyColumn}") REFERENCES ${this.config.tableName}(${MAIN_ID_COLUMN}) - ON UPDATE ${ref.updateAction === "r" ? "SET NULL" : "RESTRICT"} - ON DELETE ${ref.deleteAction === "r" ? "SET NULL" : "RESTRICT"} + ON UPDATE ${ref.updateAction === "n" ? "SET NULL" : "RESTRICT"} + ON DELETE ${ref.deleteAction === "n" ? "SET NULL" : "RESTRICT"} NOT VALID; ` ); @@ -1533,8 +1533,8 @@ class IntToBigIntMigration { ADD CONSTRAINT "${newConstraintName}" FOREIGN KEY ("${sourceColumn}") REFERENCES ${this.config.tableName}(${targetColumn}) - ON UPDATE ${ref.updateAction === "r" ? "SET NULL" : "RESTRICT"} - ON DELETE ${ref.deleteAction === "r" ? "SET NULL" : "RESTRICT"} + ON UPDATE ${ref.updateAction === "n" ? "SET NULL" : "RESTRICT"} + ON DELETE ${ref.deleteAction === "n" ? "SET NULL" : "RESTRICT"} NOT VALID; ` ); From e7dac697566febec07e941d412783898a4e74963 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 16:31:42 +0100 Subject: [PATCH 17/22] Add support for minId --- front/migrations/20241129_migrate_int_to_bigint.ts | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index afb55eeaccd5..d93bf36136fd 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -931,16 +931,23 @@ class IntToBigIntMigration { targetColumn, }: { tableName: string; sourceColumn: string; targetColumn: string } ): Promise { - let currentId = 0; + const { rows: min_id_rows } = await this.executeSql<{ min_id: number }>( + client, + ` + SELECT COALESCE(MIN(id), 0) AS min_id FROM ${tableName} + ` + ); + + let currentId = min_id_rows[0].min_id; - const { rows } = await this.executeSql<{ max_id: number }>( + const { rows: max_id_rows } = await this.executeSql<{ max_id: number }>( client, ` SELECT COALESCE(MAX(id), 0) AS max_id FROM ${tableName} ` ); - const maxId = rows[0].max_id; + const maxId = max_id_rows[0].max_id; while (currentId <= maxId) { await this.executeSql( From d3edffb7b89bcbbc4fb59bd21da423480ded2850 Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 16:36:24 +0100 Subject: [PATCH 18/22] Account for delta --- front/migrations/20241129_migrate_int_to_bigint.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index d93bf36136fd..eb4aa706d82b 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -938,7 +938,7 @@ class IntToBigIntMigration { ` ); - let currentId = min_id_rows[0].min_id; + let currentId = min_id_rows[0].min_id > 0 ? min_id_rows[0].min_id - 1 : 0; const { rows: max_id_rows } = await this.executeSql<{ max_id: number }>( client, From f0c2b0918fca9a288c5d5bf22fcf8090b3ac284b Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 18:03:36 +0100 Subject: [PATCH 19/22] Assert index name length --- front/migrations/20241129_migrate_int_to_bigint.ts | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index eb4aa706d82b..788868785c28 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -457,6 +457,11 @@ class IntToBigIntMigration { this.config.tableName, COLUMN_TYPE.INT ); + assert( + rollbackIndexName.length < 63, + `Index name too long: ${rollbackIndexName}` + ); + await this.executeSql( client, `DROP INDEX IF EXISTS "${rollbackIndexName}"` @@ -558,7 +563,7 @@ class IntToBigIntMigration { ON tc.constraint_name = kcu.constraint_name JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name - JOIN pg_constraint pc + JOIN pg_constraint pc ON tc.constraint_name = pc.conname WHERE tc.constraint_type = 'FOREIGN KEY' AND ccu.table_name = $1 @@ -1287,6 +1292,8 @@ class IntToBigIntMigration { tableName: string; } ): Promise { + assert(indexName.length < 63, `Index name too long: ${indexName}`); + // Only primary key indexes are unique. await this.executeSql( client, @@ -1417,6 +1424,8 @@ class IntToBigIntMigration { ref.foreignKeyColumn, COLUMN_TYPE.BIGINT ); + assert(fkIndexName.length < 63, `Index name too long: ${fkIndexName}`); + const newFkIndexName = fkIndexName.replace("_bigint", ""); // Find existing FK indexes for all referencing tables at once From 35926876ab30c5617a423af15a9a1030d78637ee Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 18:42:37 +0100 Subject: [PATCH 20/22] Shorten index name --- front/migrations/20241129_migrate_int_to_bigint.ts | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 788868785c28..5a9c6ae0d10f 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -86,8 +86,18 @@ export const createColumnName = { export const createIndexName = { primary: (tableName: string, column: ColumnType) => `${tableName}_pkey_${column}` as const, - foreign: (tableName: string, columnName: string, column: ColumnType) => - `${tableName}_${columnName}_fk_${column}` as const, + foreign: (tableName: string, columnName: string, column: ColumnType) => { + // If table name is too long, create an abbreviation + const shortTableName = + tableName.length > 20 + ? tableName + .split("_") + .map((word) => word[0]) + .join("") // e.g., 'ncrce' + : tableName; + + return `${shortTableName}_${columnName}_fk_${column}` as const; + }, }; export const createTriggerNames = { From 999c75dc5dac5ed40f04f95d5504ea0d54a8a79d Mon Sep 17 00:00:00 2001 From: Flavien David Date: Tue, 3 Dec 2024 18:46:34 +0100 Subject: [PATCH 21/22] Shorten table name --- .../20241129_migrate_int_to_bigint.ts | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 5a9c6ae0d10f..168ba5bbb9e2 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -83,21 +83,20 @@ export const createColumnName = { legacy: (baseColumn: string) => `${baseColumn}_legacy` as const, }; +// If table name is too long, create an abbreviation +const shortenTableName = (tableName: string) => + tableName.length > 35 + ? tableName + .split("_") + .map((word) => word[0]) + .join("") // e.g., 'ncrce' + : tableName; + export const createIndexName = { primary: (tableName: string, column: ColumnType) => `${tableName}_pkey_${column}` as const, - foreign: (tableName: string, columnName: string, column: ColumnType) => { - // If table name is too long, create an abbreviation - const shortTableName = - tableName.length > 20 - ? tableName - .split("_") - .map((word) => word[0]) - .join("") // e.g., 'ncrce' - : tableName; - - return `${shortTableName}_${columnName}_fk_${column}` as const; - }, + foreign: (tableName: string, columnName: string, column: ColumnType) => + `${shortenTableName(tableName)}_${columnName}_fk_${column}` as const, }; export const createTriggerNames = { @@ -116,11 +115,11 @@ export const createTriggerNames = { export const createConstraintName = { notNull: (tableName: string, type: ColumnType) => - `${tableName}_not_null_${type}` as const, + `${shortenTableName(tableName)}_not_null_${type}` as const, notNullForColumn: (tableName: string, columnName: string) => - `${tableName}_not_null_${columnName}` as const, + `${shortenTableName(tableName)}_not_null_${columnName}` as const, foreignKey: (tableName: string, columnName: string, type: ColumnType) => - `${tableName}_${columnName}_fkey_${type}` as const, + `${shortenTableName(tableName)}_${columnName}_fkey_${type}` as const, }; export const createSequenceName = (tableName: string) => From 85d754eb55af06d029e20108416f71ee5981fe0f Mon Sep 17 00:00:00 2001 From: Flavien David Date: Thu, 5 Dec 2024 18:13:51 +0100 Subject: [PATCH 22/22] Only assert in production --- .../20241129_migrate_int_to_bigint.ts | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/front/migrations/20241129_migrate_int_to_bigint.ts b/front/migrations/20241129_migrate_int_to_bigint.ts index 168ba5bbb9e2..6f25ecbd2aa1 100644 --- a/front/migrations/20241129_migrate_int_to_bigint.ts +++ b/front/migrations/20241129_migrate_int_to_bigint.ts @@ -83,6 +83,12 @@ export const createColumnName = { legacy: (baseColumn: string) => `${baseColumn}_legacy` as const, }; +function assertInProduction(value: unknown, message?: string): asserts value { + if (process.env.NODE_ENV !== "development") { + assert(value, message); + } +} + // If table name is too long, create an abbreviation const shortenTableName = (tableName: string) => tableName.length > 35 @@ -466,7 +472,7 @@ class IntToBigIntMigration { this.config.tableName, COLUMN_TYPE.INT ); - assert( + assertInProduction( rollbackIndexName.length < 63, `Index name too long: ${rollbackIndexName}` ); @@ -1301,7 +1307,10 @@ class IntToBigIntMigration { tableName: string; } ): Promise { - assert(indexName.length < 63, `Index name too long: ${indexName}`); + assertInProduction( + indexName.length < 63, + `Index name too long: ${indexName}` + ); // Only primary key indexes are unique. await this.executeSql( @@ -1433,7 +1442,10 @@ class IntToBigIntMigration { ref.foreignKeyColumn, COLUMN_TYPE.BIGINT ); - assert(fkIndexName.length < 63, `Index name too long: ${fkIndexName}`); + assertInProduction( + fkIndexName.length < 63, + `Index name too long: ${fkIndexName}` + ); const newFkIndexName = fkIndexName.replace("_bigint", "");