chore: add database migration rollback system (#12289)
Co-authored-by: MrChaker <mr.chakerw@gmail.com>
This commit is contained in:
138
.agents/skills/db-migration/SKILL.md
Normal file
138
.agents/skills/db-migration/SKILL.md
Normal file
@@ -0,0 +1,138 @@
|
||||
---
|
||||
name: db-migration
|
||||
description: Creates TypeORM database migrations for the Activepieces server. Use when the user asks to add a column, create a table, add an index, or make any schema change to the database.
|
||||
---
|
||||
|
||||
# Activepieces DB Migration
|
||||
|
||||
Create TypeORM database migrations for schema changes in the Activepieces server API.
|
||||
|
||||
## Workflow
|
||||
|
||||
### Step 1: DETERMINE THE CHANGE
|
||||
|
||||
Before generating, identify:
|
||||
- Which table(s) are affected
|
||||
- What SQL is needed (`ADD COLUMN`, `CREATE TABLE`, `CREATE INDEX`, etc.)
|
||||
- Whether the migration is **breaking** (drops columns/tables, transforms data irreversibly — cannot be rolled back safely)
|
||||
- The current release version (check root `package.json` → `version`)
|
||||
|
||||
### Step 2: UPDATE THE ENTITY
|
||||
|
||||
Update the TypeORM entity file in `packages/server/api/src/app/` to reflect the new schema. This ensures the generation command can diff against the current state.
|
||||
|
||||
Array columns always use this pattern:
|
||||
```ts
|
||||
columnName: {
|
||||
type: String,
|
||||
array: true,
|
||||
nullable: false,
|
||||
}
|
||||
```
|
||||
|
||||
### Step 2: CREATE THE MIGRATION FILE
|
||||
|
||||
```ts
|
||||
import { QueryRunner } from 'typeorm'
|
||||
import { Migration } from '../../migration' // ← must import from ../..
|
||||
|
||||
export class AddMyColumn1234567890 implements Migration {
|
||||
name = 'AddMyColumn1234567890'
|
||||
breaking = false
|
||||
release = '0.78.0' // ← match the upcoming release version from root package.json
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "project" ADD COLUMN "description" text`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "project" DROP COLUMN "description"`)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Required fields:**
|
||||
- `breaking = false` — set to `true` only if rolling back is destructive
|
||||
- `release = '<version>'` — the upcoming release version from root `package.json`
|
||||
- `down()` — must reverse `up()` (required)
|
||||
|
||||
CI fails if any of these are missing.
|
||||
|
||||
### Step 5: REGISTER THE MIGRATION
|
||||
|
||||
Open `packages/server/api/src/app/database/postgres-connection.ts` and add the new migration class to the `getMigrations()` array (at the end, in chronological order):
|
||||
|
||||
```ts
|
||||
import { AddMyColumn1234567890 } from './migration/postgres/1234567890-AddMyColumn'
|
||||
|
||||
// Inside getMigrations():
|
||||
return [
|
||||
// ... existing migrations ...
|
||||
AddMyColumn1234567890,
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
## Note:
|
||||
Always try to create non-breaking migrations if possible to allow safe rollbacks
|
||||
|
||||
Zero errors required before the task is complete.
|
||||
|
||||
---
|
||||
|
||||
## PGlite Compatibility
|
||||
|
||||
PGlite does **not** support `CONCURRENTLY` (it is a single-connection embedded database). When creating or dropping indexes with `CONCURRENTLY`, add a PGlite check:
|
||||
|
||||
```ts
|
||||
import { QueryRunner } from 'typeorm'
|
||||
import { Migration } from '../../migration'
|
||||
import { system } from '../../../helper/system/system'
|
||||
import { AppSystemProp } from '../../../helper/system/system-props'
|
||||
import { DatabaseType } from '../../database-type'
|
||||
|
||||
const isPGlite = system.get(AppSystemProp.DB_TYPE) === DatabaseType.PGLITE
|
||||
|
||||
export class AddMyIndex1234567890 implements Migration {
|
||||
name = 'AddMyIndex1234567890'
|
||||
breaking = false
|
||||
release = '0.78.0'
|
||||
transaction = false // Required when using CONCURRENTLY
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
if (isPGlite) {
|
||||
await queryRunner.query(`CREATE INDEX "idx_name" ON "table" ("column")`)
|
||||
} else {
|
||||
await queryRunner.query(`CREATE INDEX CONCURRENTLY "idx_name" ON "table" ("column")`)
|
||||
}
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
if (isPGlite) {
|
||||
await queryRunner.query(`DROP INDEX "idx_name"`)
|
||||
} else {
|
||||
await queryRunner.query(`DROP INDEX CONCURRENTLY "idx_name"`)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Set `transaction = false` whenever using `CONCURRENTLY` — PostgreSQL requires it.
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Field | Value |
|
||||
|---|---|
|
||||
| Migration files | `packages/server/api/src/app/database/migration/postgres/` |
|
||||
| Registration | `packages/server/api/src/app/database/postgres-connection.ts` |
|
||||
| `Migration` import | `import { Migration } from '../../migration'` |
|
||||
|
||||
|
||||
## Critical Reminders
|
||||
|
||||
1. **Never use `MigrationInterface`** — always use `Migration` from `../../migration`
|
||||
2. **`breaking`, `release`, and `down()` are mandatory** — CI will reject the migration without them
|
||||
3. **Register in `postgres-connection.ts`** — migration won't run without this
|
||||
4. **PGlite + CONCURRENTLY** — always guard with `isPGlite` and set `transaction = false`
|
||||
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
@@ -93,7 +93,7 @@ jobs:
|
||||
if: steps.check-framework-common.outputs.framework_or_common_changed == 'true'
|
||||
run: npx turbo run build --filter='@activepieces/piece-*'
|
||||
|
||||
- name: Run all tests and migration check in parallel
|
||||
- name: Run all tests and migration checks in parallel
|
||||
run: |
|
||||
set -euo pipefail
|
||||
pids=()
|
||||
@@ -104,6 +104,9 @@ jobs:
|
||||
npx turbo run test-ce test-ee test-cloud check-migrations --filter=api &
|
||||
pids+=($!)
|
||||
|
||||
npx tsx tools/scripts/check-migration-rollback.ts &
|
||||
pids+=($!)
|
||||
|
||||
status=0
|
||||
for pid in "${pids[@]}"; do
|
||||
if ! wait "$pid"; then
|
||||
|
||||
@@ -180,7 +180,8 @@
|
||||
"install/guides/setup-opentelemetry",
|
||||
"install/guides/setup-s3",
|
||||
"install/guides/separate-workers",
|
||||
"install/guides/setup-app-webhooks"
|
||||
"install/guides/setup-app-webhooks",
|
||||
"install/guides/rollback"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -48,10 +48,38 @@ PGlite is a lightweight PostgreSQL implementation that runs embedded, so migrati
|
||||
Replace `<MIGRATION_NAME>` with a descriptive name for your migration.
|
||||
</Step>
|
||||
|
||||
<Step title="Review Migration File">
|
||||
<Step title="Update Migration File">
|
||||
The command will generate a new migration file in `packages/server/api/src/app/database/migration/postgres/`.
|
||||
|
||||
Review the generated file and register it in `postgres-connection.ts`.
|
||||
|
||||
The generated file uses `MigrationInterface` — you need to update it:
|
||||
|
||||
1. Change `implements MigrationInterface` to `implements Migration`
|
||||
2. Update the import from `typeorm` to import `Migration` from `../../migration`
|
||||
3. Add `breaking = false` (or `true` if the migration drops columns/tables or transforms data irreversibly)
|
||||
4. Add `release = '<version>'` matching the upcoming release version (check `package.json` in the repo root)
|
||||
5. Implement the `down()` method with queries that reverse the `up()` changes (unless `breaking = true`)
|
||||
6. Register it in `postgres-connection.ts`
|
||||
|
||||
```typescript
|
||||
import { QueryRunner } from 'typeorm'
|
||||
import { Migration } from '../../migration'
|
||||
|
||||
export class AddMyColumn1234567890 implements Migration {
|
||||
name = 'AddMyColumn1234567890'
|
||||
breaking = false
|
||||
release = '0.78.0'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "project" ADD COLUMN "description" text`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "project" DROP COLUMN "description"`)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
CI will fail if `breaking`, `release`, or `down()` are missing on new migrations.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
@@ -60,15 +88,19 @@ PGlite is a lightweight PostgreSQL implementation that runs embedded, so migrati
|
||||
While PGlite is mostly PostgreSQL-compatible, some features are not supported. When using features like `CONCURRENTLY` for index operations, you need to conditionally handle PGlite:
|
||||
|
||||
```typescript
|
||||
import { AppSystemProp } from '@activepieces/server-utils'
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
import { DatabaseType, system } from '../../../helper/system/system'
|
||||
import { QueryRunner } from 'typeorm'
|
||||
import { Migration } from '../../migration'
|
||||
import { system } from '../../../helper/system/system'
|
||||
import { AppSystemProp } from '../../../helper/system/system-props'
|
||||
import { DatabaseType } from '../../database-type'
|
||||
|
||||
const databaseType = system.get(AppSystemProp.DB_TYPE)
|
||||
const isPGlite = databaseType === DatabaseType.PGLITE
|
||||
|
||||
export class AddMyIndex1234567890 implements MigrationInterface {
|
||||
export class AddMyIndex1234567890 implements Migration {
|
||||
name = 'AddMyIndex1234567890'
|
||||
breaking = false
|
||||
release = '0.78.0'
|
||||
transaction = false // Required when using CONCURRENTLY
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
|
||||
118
docs/install/guides/rollback.mdx
Normal file
118
docs/install/guides/rollback.mdx
Normal file
@@ -0,0 +1,118 @@
|
||||
---
|
||||
title: "Rollback Guide"
|
||||
description: "How to rollback Activepieces to a previous version"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Activepieces ships a rollback command that reverses database migrations when you need to downgrade to a previous version. Most releases are fully rollback-safe — the release notes will let you know if one isn't.
|
||||
|
||||
## Backups
|
||||
|
||||
For most upgrades you won't need a backup, but if you want to be extra safe:
|
||||
|
||||
### PostgreSQL
|
||||
|
||||
```bash
|
||||
pg_dump -Fc $DATABASE_URL > backup-pre-upgrade.dump
|
||||
```
|
||||
|
||||
### PGLite
|
||||
|
||||
Copy the `pglite` folder inside your configured `AP_CONFIG_PATH`:
|
||||
|
||||
```bash
|
||||
cp -r /path/to/config/pglite /path/to/config/pglite-backup
|
||||
```
|
||||
|
||||
## Release Notes
|
||||
|
||||
Releases that include non-reversible database changes will have a note at the bottom of the release notes mentioning which migrations are affected. If you don't see a note, the release is rollback-safe.
|
||||
|
||||
## Rolling Back
|
||||
|
||||
The rollback command runs against the **current (newer) image** since it has the migration reversal logic. After rolling back the database, you swap to the older image.
|
||||
|
||||
### Step 1: Stop Activepieces
|
||||
|
||||
```bash
|
||||
docker compose down
|
||||
```
|
||||
|
||||
### Step 2: Run the rollback command
|
||||
|
||||
Replace `0.78.0` with your current version and `0.77.0` with the version you want to go back to:
|
||||
|
||||
```bash
|
||||
docker run --rm --env-file .env --entrypoint npm \
|
||||
activepieces/activepieces:0.78.0 \
|
||||
run rollback -- --to 0.77.0
|
||||
```
|
||||
|
||||
If the release has breaking migrations, the command will ask you to confirm with `--force`:
|
||||
|
||||
```bash
|
||||
docker run --rm --env-file .env --entrypoint npm \
|
||||
activepieces/activepieces:0.78.0 \
|
||||
run rollback -- --to 0.77.0 --force
|
||||
```
|
||||
|
||||
### Step 3: Switch to the older image
|
||||
|
||||
Update your `docker-compose.yml`:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
activepieces:
|
||||
image: activepieces/activepieces:0.77.0
|
||||
```
|
||||
|
||||
### Step 4: Start Activepieces
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
## Restoring From Backup
|
||||
|
||||
If you took a backup and prefer to restore from it:
|
||||
|
||||
```bash
|
||||
dropdb activepieces
|
||||
createdb activepieces
|
||||
pg_restore -d activepieces backup-pre-upgrade.dump
|
||||
```
|
||||
|
||||
Then switch your `docker-compose.yml` to the previous image version and start Activepieces.
|
||||
|
||||
## For Contributors
|
||||
|
||||
Every new database migration must:
|
||||
|
||||
1. **Implement `Migration`** instead of `MigrationInterface`
|
||||
2. **Set `breaking`** to `true` (destructive changes) or `false` (additive only)
|
||||
3. **Set `release`** to the target release version (e.g., `'0.78.0'`)
|
||||
4. **Implement `down()`** with working rollback queries
|
||||
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
import { Migration } from '../../migration'
|
||||
import { QueryRunner } from 'typeorm'
|
||||
|
||||
export class AddNewColumn1710000000000 implements Migration {
|
||||
name = 'AddNewColumn1710000000000'
|
||||
breaking = false
|
||||
release = '0.78.0'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "project" ADD COLUMN "description" text`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "project" DROP COLUMN "description"`)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
These requirements are enforced by CI — PRs with migrations that don't meet them will fail checks.
|
||||
@@ -17,6 +17,7 @@
|
||||
"test:e2e": "npx playwright test --config=packages/tests-e2e/playwright.config.ts",
|
||||
"db-migration": "npx turbo run db-migration --filter=api --",
|
||||
"check-migrations": "npx turbo run check-migrations --filter=api",
|
||||
"rollback": "npm run rollback --workspace=packages/server/api --",
|
||||
"lint-core": "turbo run lint --filter=@activepieces/shared --filter=@activepieces/engine --filter=api --filter=web --filter=@activepieces/server-utils --filter=worker --filter=ee-embed-sdk",
|
||||
"lint-pieces": "turbo run lint --filter='@activepieces/piece-*'",
|
||||
"lint-affected": "turbo run lint --affected",
|
||||
|
||||
@@ -150,6 +150,7 @@
|
||||
"check-migrations": "echo '🔍 Checking for schema changes...' && export $(cat .env.tests | xargs) && export AP_DEV_PIECES='' && export AP_ENVIRONMENT=dev && AP_EDITION=ce && ts-node --transpile-only -r tsconfig-paths/register -P tsconfig.app.json node_modules/typeorm/cli.js migration:run -d src/app/database/migration-data-source.ts > /dev/null 2>&1 && echo 'Checking for schema drift...' && ts-node --transpile-only -r tsconfig-paths/register -P tsconfig.app.json node_modules/typeorm/cli.js migration:generate -p -d src/app/database/migration-data-source.ts src/app/database/migration/postgres/check --dryrun --check || (echo '❌ ERROR: Schema changes detected without migration!' && exit 1) && echo '✅ No missing migrations detected'",
|
||||
"db": "ts-node --transpile-only -r tsconfig-paths/register -P tsconfig.app.json node_modules/typeorm/cli.js",
|
||||
"db-migration": "export $(cat .env.tests | xargs) && export AP_DEV_PIECES='' && export AP_ENVIRONMENT=dev && export AP_EDITION=ce && ts-node --transpile-only -r tsconfig-paths/register -P tsconfig.app.json node_modules/typeorm/cli.js migration:generate -p -d src/app/database/migration-data-source.ts",
|
||||
"test:docker": "UID=\"$(id -u)\" GID=\"$(id -g)\" docker compose --profile full -f docker-compose.test.yml up --exit-code-from app --attach app"
|
||||
"test:docker": "UID=\"$(id -u)\" GID=\"$(id -g)\" docker compose --profile full -f docker-compose.test.yml up --exit-code-from app --attach app",
|
||||
"rollback": "node dist/src/rollback.js"
|
||||
}
|
||||
}
|
||||
|
||||
7
packages/server/api/src/app/database/migration.ts
Normal file
7
packages/server/api/src/app/database/migration.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { MigrationInterface } from 'typeorm'
|
||||
|
||||
export type Migration = {
|
||||
name?: string
|
||||
breaking?: boolean
|
||||
release?: string
|
||||
} & MigrationInterface
|
||||
@@ -1,7 +1,7 @@
|
||||
import { TlsOptions } from 'node:tls'
|
||||
import 'pg'
|
||||
import { isNil, spreadIfDefined } from '@activepieces/shared'
|
||||
import { DataSource, MigrationInterface } from 'typeorm'
|
||||
import { DataSource } from 'typeorm'
|
||||
import { MakeStripeSubscriptionNullable1685053959806 } from '../ee/database/migrations/postgres/1685053959806-MakeStripeSubscriptionNullable'
|
||||
import { AddTemplates1685538145476 } from '../ee/database/migrations/postgres/1685538145476-addTemplates'
|
||||
import { ChangeToJsonToKeepKeysOrder1685991260335 } from '../ee/database/migrations/postgres/1685991260335-ChangeToJsonToPeserveKeys'
|
||||
@@ -25,6 +25,7 @@ import { AddMetadataFieldToFlowTemplates1744780800000 } from '../ee/database/mig
|
||||
import { system } from '../helper/system/system'
|
||||
import { AppSystemProp } from '../helper/system/system-props'
|
||||
import { commonProperties } from './database-connection'
|
||||
import { Migration } from './migration'
|
||||
import { AddPieceTypeAndPackageTypeToFlowVersion1696245170061 } from './migration/common/1696245170061-add-piece-type-and-package-type-to-flow-version'
|
||||
import { AddPieceTypeAndPackageTypeToFlowTemplate1696245170062 } from './migration/common/1696245170062-add-piece-type-and-package-type-to-flow-template'
|
||||
import { StoreCodeInsideFlow1697969398200 } from './migration/common/1697969398200-store-code-inside-flow'
|
||||
@@ -362,7 +363,7 @@ const getSslConfig = (): boolean | TlsOptions => {
|
||||
return false
|
||||
}
|
||||
|
||||
export const getMigrations = (): (new () => MigrationInterface)[] => {
|
||||
export const getMigrations = (): (new () => Migration)[] => {
|
||||
const migrations = [
|
||||
FlowAndFileProjectId1674788714498,
|
||||
initializeSchema1676238396411,
|
||||
@@ -772,6 +773,6 @@ export const createPostgresDataSource = (): DataSource => {
|
||||
type MigrationConfig = {
|
||||
migrationsRun?: boolean
|
||||
migrationsTransactionMode?: 'all' | 'none' | 'each'
|
||||
migrations?: (new () => MigrationInterface)[]
|
||||
migrations?: (new () => Migration)[]
|
||||
synchronize: false
|
||||
}
|
||||
|
||||
94
packages/server/api/src/app/database/rollback-migrations.ts
Normal file
94
packages/server/api/src/app/database/rollback-migrations.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
/* eslint-disable no-console */
|
||||
import semver from 'semver'
|
||||
import { DataSource } from 'typeorm'
|
||||
import { Migration } from './migration'
|
||||
import { getMigrations } from './postgres-connection'
|
||||
|
||||
export async function rollbackToVersion(params: {
|
||||
dataSource: DataSource
|
||||
targetVersion: string
|
||||
force: boolean
|
||||
}): Promise<void> {
|
||||
const { dataSource, targetVersion, force } = params
|
||||
|
||||
if (!semver.valid(targetVersion)) {
|
||||
throw new Error(`Invalid semver version: ${targetVersion}`)
|
||||
}
|
||||
|
||||
const migrationClasses = getMigrations()
|
||||
const candidates = identifyCandidates(migrationClasses, targetVersion)
|
||||
if (candidates.length === 0) {
|
||||
console.log(`No migrations found to rollback for versions after ${targetVersion}`)
|
||||
return
|
||||
}
|
||||
|
||||
console.log(`Found ${candidates.length} migration(s) to rollback:`)
|
||||
for (const m of candidates) {
|
||||
console.log(` - ${m.name ?? 'unknown'} (release: ${m.release}, breaking: ${m.breaking ?? false})`)
|
||||
}
|
||||
|
||||
await verifyDatabaseState(dataSource, candidates)
|
||||
|
||||
const breakingMigrations = candidates.filter((m) => m.breaking === true)
|
||||
if (breakingMigrations.length > 0 && !force) {
|
||||
console.error('\nThe following migrations are marked as breaking:')
|
||||
for (const m of breakingMigrations) {
|
||||
console.error(` - ${m.name ?? 'unknown'}`)
|
||||
}
|
||||
console.error('\nRolling back breaking migrations may cause data loss.')
|
||||
console.error('Use --force to proceed anyway.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
for (let i = 0; i < candidates.length; i++) {
|
||||
const migration = candidates[i]
|
||||
console.log(`\nReverting (${i + 1}/${candidates.length}): ${migration.name ?? 'unknown'}...`)
|
||||
await dataSource.undoLastMigration()
|
||||
console.log(' Reverted successfully.')
|
||||
}
|
||||
|
||||
console.log(`\nRollback complete. Reverted ${candidates.length} migration(s).`)
|
||||
}
|
||||
|
||||
export function identifyCandidates(
|
||||
migrationClasses: (new () => Migration)[],
|
||||
targetVersion: string,
|
||||
): Migration[] {
|
||||
const instances = migrationClasses.map((MigrationClass) => new MigrationClass())
|
||||
|
||||
const candidates = instances.filter((m) => {
|
||||
if (!m.release) {
|
||||
return false
|
||||
}
|
||||
return semver.gt(m.release, targetVersion)
|
||||
})
|
||||
|
||||
return candidates.reverse()
|
||||
}
|
||||
|
||||
export async function verifyDatabaseState(
|
||||
dataSource: DataSource,
|
||||
candidates: Migration[],
|
||||
): Promise<void> {
|
||||
const executedMigrations = await dataSource.query(
|
||||
'SELECT "name" FROM "migrations" ORDER BY "id" DESC LIMIT $1',
|
||||
[candidates.length],
|
||||
)
|
||||
|
||||
const executedNames: string[] = executedMigrations.map(
|
||||
(row: { name: string }) => row.name,
|
||||
)
|
||||
|
||||
for (let i = 0; i < candidates.length; i++) {
|
||||
const candidateName = candidates[i].name
|
||||
const executedName = executedNames[i]
|
||||
|
||||
if (candidateName !== executedName) {
|
||||
throw new Error(
|
||||
`Migration order mismatch: expected "${candidateName}" at position ${i + 1} from the top, `
|
||||
+ `but found "${executedName}" in the database. `
|
||||
+ 'The database state does not match the expected migration history. Aborting rollback.',
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
55
packages/server/api/src/rollback.ts
Normal file
55
packages/server/api/src/rollback.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
/* eslint-disable no-console */
|
||||
import { databaseConnection } from './app/database/database-connection'
|
||||
import { rollbackToVersion } from './app/database/rollback-migrations'
|
||||
|
||||
function setupTimeZone(): void {
|
||||
process.env.TZ = 'UTC'
|
||||
}
|
||||
|
||||
function parseArgs(): { targetVersion: string, force: boolean } {
|
||||
const args = process.argv.slice(2)
|
||||
let targetVersion: string | undefined
|
||||
let force = false
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] === '--to' && args[i + 1]) {
|
||||
targetVersion = args[i + 1]
|
||||
i++
|
||||
}
|
||||
else if (args[i] === '--force') {
|
||||
force = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetVersion) {
|
||||
console.error('Usage: npm run rollback -- --to <version> [--force]')
|
||||
console.error('Example: npm run rollback -- --to 0.77.0')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
return { targetVersion, force }
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
setupTimeZone()
|
||||
|
||||
const { targetVersion, force } = parseArgs()
|
||||
|
||||
console.log('Initializing database connection...')
|
||||
const dataSource = databaseConnection()
|
||||
await dataSource.initialize()
|
||||
|
||||
console.log(`Rolling back migrations to version ${targetVersion}...`)
|
||||
await rollbackToVersion({
|
||||
dataSource,
|
||||
targetVersion,
|
||||
force,
|
||||
})
|
||||
|
||||
await dataSource.destroy()
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error('Rollback failed:', e)
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -0,0 +1,244 @@
|
||||
import { QueryRunner } from 'typeorm'
|
||||
import { databaseConnection, resetDatabaseConnection } from '../../../../src/app/database/database-connection'
|
||||
import { initializeDatabase } from '../../../../src/app/database'
|
||||
import { Migration } from '../../../../src/app/database/migration'
|
||||
import { identifyCandidates, verifyDatabaseState } from '../../../../src/app/database/rollback-migrations'
|
||||
|
||||
const TEST_TABLE = 'rollback_test_table'
|
||||
|
||||
class TestMigrationSafe1999000000001 implements Migration {
|
||||
name = 'TestMigrationSafe1999000000001'
|
||||
breaking = false
|
||||
release = '99.0.0'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE TABLE "${TEST_TABLE}" ("id" varchar PRIMARY KEY, "value" varchar)`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "${TEST_TABLE}"`)
|
||||
}
|
||||
}
|
||||
|
||||
class TestMigrationSafe1999000000002 implements Migration {
|
||||
name = 'TestMigrationSafe1999000000002'
|
||||
breaking = false
|
||||
release = '99.0.0'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "${TEST_TABLE}" ADD COLUMN "extra" varchar`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "${TEST_TABLE}" DROP COLUMN "extra"`)
|
||||
}
|
||||
}
|
||||
|
||||
class TestMigrationBreaking1999000000003 implements Migration {
|
||||
name = 'TestMigrationBreaking1999000000003'
|
||||
breaking = true
|
||||
release = '99.1.0'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "${TEST_TABLE}" ADD COLUMN "breaking_col" varchar`)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "${TEST_TABLE}" DROP COLUMN "breaking_col"`)
|
||||
}
|
||||
}
|
||||
|
||||
async function applyTestMigration(ds: ReturnType<typeof databaseConnection>, migration: Migration): Promise<void> {
|
||||
const queryRunner = ds.createQueryRunner()
|
||||
await queryRunner.connect()
|
||||
await migration.up(queryRunner)
|
||||
await queryRunner.release()
|
||||
await ds.query(
|
||||
`INSERT INTO "migrations" ("timestamp", "name") VALUES ($1, $2)`,
|
||||
[Date.now(), migration.name],
|
||||
)
|
||||
}
|
||||
|
||||
async function revertTestMigration(ds: ReturnType<typeof databaseConnection>, migration: Migration): Promise<void> {
|
||||
const queryRunner = ds.createQueryRunner()
|
||||
await queryRunner.connect()
|
||||
await migration.down(queryRunner)
|
||||
await queryRunner.release()
|
||||
await ds.query(`DELETE FROM "migrations" WHERE "name" = $1`, [migration.name])
|
||||
}
|
||||
|
||||
async function cleanupTestState(ds: ReturnType<typeof databaseConnection>): Promise<void> {
|
||||
await ds.query(`DROP TABLE IF EXISTS "${TEST_TABLE}"`).catch(() => { /* ignore */ })
|
||||
await ds.query(`DELETE FROM "migrations" WHERE "name" LIKE 'TestMigration%'`).catch(() => { /* ignore */ })
|
||||
}
|
||||
|
||||
async function getTableColumns(ds: ReturnType<typeof databaseConnection>, tableName: string): Promise<string[]> {
|
||||
const columns = await ds.query(
|
||||
`SELECT column_name FROM information_schema.columns WHERE table_name = $1 ORDER BY ordinal_position`,
|
||||
[tableName],
|
||||
)
|
||||
return columns.map((c: { column_name: string }) => c.column_name)
|
||||
}
|
||||
|
||||
async function tableExists(ds: ReturnType<typeof databaseConnection>, tableName: string): Promise<boolean> {
|
||||
const result = await ds.query(
|
||||
`SELECT table_name FROM information_schema.tables WHERE table_name = $1`,
|
||||
[tableName],
|
||||
)
|
||||
return result.length > 0
|
||||
}
|
||||
|
||||
describe('Rollback Integration', () => {
|
||||
let ds: ReturnType<typeof databaseConnection>
|
||||
|
||||
beforeAll(async () => {
|
||||
resetDatabaseConnection()
|
||||
await initializeDatabase({ runMigrations: true })
|
||||
ds = databaseConnection()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await cleanupTestState(ds)
|
||||
await ds.destroy()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTestState(ds)
|
||||
})
|
||||
|
||||
describe('identifyCandidates', () => {
|
||||
it('should return migrations with release greater than target version in reverse order', () => {
|
||||
const migrations: (new () => Migration)[] = [
|
||||
TestMigrationSafe1999000000001,
|
||||
TestMigrationSafe1999000000002,
|
||||
TestMigrationBreaking1999000000003,
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '98.0.0')
|
||||
|
||||
expect(candidates).toHaveLength(3)
|
||||
expect(candidates[0].name).toBe('TestMigrationBreaking1999000000003')
|
||||
expect(candidates[1].name).toBe('TestMigrationSafe1999000000002')
|
||||
expect(candidates[2].name).toBe('TestMigrationSafe1999000000001')
|
||||
})
|
||||
|
||||
it('should filter by version correctly', () => {
|
||||
const migrations: (new () => Migration)[] = [
|
||||
TestMigrationSafe1999000000001,
|
||||
TestMigrationSafe1999000000002,
|
||||
TestMigrationBreaking1999000000003,
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '99.0.0')
|
||||
|
||||
expect(candidates).toHaveLength(1)
|
||||
expect(candidates[0].name).toBe('TestMigrationBreaking1999000000003')
|
||||
expect(candidates[0].breaking).toBe(true)
|
||||
})
|
||||
|
||||
it('should return empty when no migrations are newer than target', () => {
|
||||
const candidates = identifyCandidates([TestMigrationSafe1999000000001], '99.0.0')
|
||||
expect(candidates).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should skip migrations without a release field', () => {
|
||||
const NoReleaseMigration = class implements Migration {
|
||||
name = 'NoRelease'
|
||||
async up(): Promise<void> { /* noop */ }
|
||||
async down(): Promise<void> { /* noop */ }
|
||||
}
|
||||
|
||||
const candidates = identifyCandidates(
|
||||
[NoReleaseMigration, TestMigrationSafe1999000000001],
|
||||
'98.0.0',
|
||||
)
|
||||
|
||||
expect(candidates).toHaveLength(1)
|
||||
expect(candidates[0].name).toBe('TestMigrationSafe1999000000001')
|
||||
})
|
||||
})
|
||||
|
||||
describe('verifyDatabaseState', () => {
|
||||
it('should pass when database matches candidates in order', async () => {
|
||||
await applyTestMigration(ds, new TestMigrationSafe1999000000001())
|
||||
await applyTestMigration(ds, new TestMigrationSafe1999000000002())
|
||||
|
||||
const candidates = [
|
||||
new TestMigrationSafe1999000000002(),
|
||||
new TestMigrationSafe1999000000001(),
|
||||
]
|
||||
|
||||
await expect(verifyDatabaseState(ds, candidates)).resolves.toBeUndefined()
|
||||
})
|
||||
|
||||
it('should throw when database order does not match', async () => {
|
||||
await applyTestMigration(ds, new TestMigrationSafe1999000000001())
|
||||
await applyTestMigration(ds, new TestMigrationSafe1999000000002())
|
||||
|
||||
const wrongOrder = [
|
||||
new TestMigrationSafe1999000000001(),
|
||||
new TestMigrationSafe1999000000002(),
|
||||
]
|
||||
|
||||
await expect(verifyDatabaseState(ds, wrongOrder)).rejects.toThrow('Migration order mismatch')
|
||||
})
|
||||
})
|
||||
|
||||
describe('migration up/down lifecycle', () => {
|
||||
it('should apply and revert two safe migrations', async () => {
|
||||
const m1 = new TestMigrationSafe1999000000001()
|
||||
const m2 = new TestMigrationSafe1999000000002()
|
||||
|
||||
// Apply forward
|
||||
await applyTestMigration(ds, m1)
|
||||
await applyTestMigration(ds, m2)
|
||||
|
||||
expect(await tableExists(ds, TEST_TABLE)).toBe(true)
|
||||
expect(await getTableColumns(ds, TEST_TABLE)).toEqual(
|
||||
expect.arrayContaining(['id', 'value', 'extra']),
|
||||
)
|
||||
|
||||
// Revert m2 — drops "extra" column
|
||||
await revertTestMigration(ds, m2)
|
||||
expect(await tableExists(ds, TEST_TABLE)).toBe(true)
|
||||
expect(await getTableColumns(ds, TEST_TABLE)).not.toContain('extra')
|
||||
|
||||
// Revert m1 — drops table
|
||||
await revertTestMigration(ds, m1)
|
||||
expect(await tableExists(ds, TEST_TABLE)).toBe(false)
|
||||
|
||||
// Migration records cleaned up
|
||||
const remaining = await ds.query(
|
||||
`SELECT "name" FROM "migrations" WHERE "name" LIKE 'TestMigration%'`,
|
||||
)
|
||||
expect(remaining).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should apply and revert three migrations including a breaking one', async () => {
|
||||
const m1 = new TestMigrationSafe1999000000001()
|
||||
const m2 = new TestMigrationSafe1999000000002()
|
||||
const m3 = new TestMigrationBreaking1999000000003()
|
||||
|
||||
await applyTestMigration(ds, m1)
|
||||
await applyTestMigration(ds, m2)
|
||||
await applyTestMigration(ds, m3)
|
||||
|
||||
expect(await getTableColumns(ds, TEST_TABLE)).toEqual(
|
||||
expect.arrayContaining(['id', 'value', 'extra', 'breaking_col']),
|
||||
)
|
||||
|
||||
// Revert m3 (breaking)
|
||||
await revertTestMigration(ds, m3)
|
||||
expect(await getTableColumns(ds, TEST_TABLE)).not.toContain('breaking_col')
|
||||
expect(await getTableColumns(ds, TEST_TABLE)).toContain('extra')
|
||||
|
||||
// Revert m2
|
||||
await revertTestMigration(ds, m2)
|
||||
expect(await getTableColumns(ds, TEST_TABLE)).not.toContain('extra')
|
||||
|
||||
// Revert m1
|
||||
await revertTestMigration(ds, m1)
|
||||
expect(await tableExists(ds, TEST_TABLE)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,126 @@
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
import { Migration } from '../../../../src/app/database/migration'
|
||||
import { identifyCandidates, verifyDatabaseState } from '../../../../src/app/database/rollback-migrations'
|
||||
|
||||
function createMockMigration(overrides: Partial<Migration> & { name: string }): new () => Migration {
|
||||
return class implements Migration {
|
||||
name = overrides.name
|
||||
breaking = overrides.breaking
|
||||
release = overrides.release
|
||||
async up(_queryRunner: QueryRunner): Promise<void> { /* noop */ }
|
||||
async down(_queryRunner: QueryRunner): Promise<void> { /* noop */ }
|
||||
}
|
||||
}
|
||||
|
||||
function createMockDataSource(executedMigrationNames: string[]): { query: ReturnType<typeof vi.fn> } {
|
||||
return {
|
||||
query: vi.fn().mockResolvedValue(
|
||||
executedMigrationNames.map((name) => ({ name })),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
describe('identifyCandidates', () => {
|
||||
it('should return migrations with release greater than target version', () => {
|
||||
const migrations = [
|
||||
createMockMigration({ name: 'OldMigration', release: '0.76.0', breaking: false }),
|
||||
createMockMigration({ name: 'CurrentMigration', release: '0.77.0', breaking: false }),
|
||||
createMockMigration({ name: 'NewMigration', release: '0.78.0', breaking: false }),
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '0.77.0')
|
||||
|
||||
expect(candidates).toHaveLength(1)
|
||||
expect(candidates[0].name).toBe('NewMigration')
|
||||
})
|
||||
|
||||
it('should return multiple migrations in reverse order', () => {
|
||||
const migrations = [
|
||||
createMockMigration({ name: 'M1', release: '0.76.0', breaking: false }),
|
||||
createMockMigration({ name: 'M2', release: '0.78.0', breaking: false }),
|
||||
createMockMigration({ name: 'M3', release: '0.78.0', breaking: true }),
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '0.77.0')
|
||||
|
||||
expect(candidates).toHaveLength(2)
|
||||
expect(candidates[0].name).toBe('M3')
|
||||
expect(candidates[1].name).toBe('M2')
|
||||
})
|
||||
|
||||
it('should skip migrations without a release field', () => {
|
||||
const migrations = [
|
||||
createMockMigration({ name: 'NoRelease' }),
|
||||
createMockMigration({ name: 'HasRelease', release: '0.78.0', breaking: false }),
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '0.77.0')
|
||||
|
||||
expect(candidates).toHaveLength(1)
|
||||
expect(candidates[0].name).toBe('HasRelease')
|
||||
})
|
||||
|
||||
it('should return empty array when no migrations match', () => {
|
||||
const migrations = [
|
||||
createMockMigration({ name: 'M1', release: '0.76.0', breaking: false }),
|
||||
createMockMigration({ name: 'M2', release: '0.77.0', breaking: false }),
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '0.77.0')
|
||||
|
||||
expect(candidates).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should return empty array when all migrations lack release', () => {
|
||||
const migrations = [
|
||||
createMockMigration({ name: 'M1' }),
|
||||
createMockMigration({ name: 'M2' }),
|
||||
]
|
||||
|
||||
const candidates = identifyCandidates(migrations, '0.77.0')
|
||||
|
||||
expect(candidates).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('verifyDatabaseState', () => {
|
||||
it('should pass when DB matches candidates', async () => {
|
||||
const candidates: Migration[] = [
|
||||
{ name: 'M3', up: vi.fn(), down: vi.fn() },
|
||||
{ name: 'M2', up: vi.fn(), down: vi.fn() },
|
||||
]
|
||||
const mockDataSource = createMockDataSource(['M3', 'M2'])
|
||||
|
||||
await expect(
|
||||
verifyDatabaseState(mockDataSource as unknown as Parameters<typeof verifyDatabaseState>[0], candidates),
|
||||
).resolves.toBeUndefined()
|
||||
|
||||
expect(mockDataSource.query).toHaveBeenCalledWith(
|
||||
'SELECT "name" FROM "migrations" ORDER BY "id" DESC LIMIT $1',
|
||||
[2],
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw when DB order does not match candidates', async () => {
|
||||
const candidates: Migration[] = [
|
||||
{ name: 'M3', up: vi.fn(), down: vi.fn() },
|
||||
{ name: 'M2', up: vi.fn(), down: vi.fn() },
|
||||
]
|
||||
const mockDataSource = createMockDataSource(['M2', 'M3'])
|
||||
|
||||
await expect(
|
||||
verifyDatabaseState(mockDataSource as unknown as Parameters<typeof verifyDatabaseState>[0], candidates),
|
||||
).rejects.toThrow('Migration order mismatch')
|
||||
})
|
||||
|
||||
it('should throw when DB has different migration names', async () => {
|
||||
const candidates: Migration[] = [
|
||||
{ name: 'M3', up: vi.fn(), down: vi.fn() },
|
||||
]
|
||||
const mockDataSource = createMockDataSource(['SomethingElse'])
|
||||
|
||||
await expect(
|
||||
verifyDatabaseState(mockDataSource as unknown as Parameters<typeof verifyDatabaseState>[0], candidates),
|
||||
).rejects.toThrow('Migration order mismatch')
|
||||
})
|
||||
})
|
||||
100
tools/scripts/check-migration-rollback.ts
Normal file
100
tools/scripts/check-migration-rollback.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { execSync } from 'child_process'
|
||||
import semver from 'semver'
|
||||
import { Migration } from '../../packages/server/api/src/app/database/migration'
|
||||
|
||||
const MIGRATION_DIRS = [
|
||||
'packages/server/api/src/app/database/migration/postgres',
|
||||
'packages/server/api/src/app/database/migration/common',
|
||||
'packages/server/api/src/app/ee/database/migrations/postgres',
|
||||
]
|
||||
|
||||
function getChangedMigrationFiles(): string[] {
|
||||
const baseBranch = process.env.GITHUB_BASE_REF ?? 'main'
|
||||
const diffOutput = execSync(
|
||||
`git diff --name-only --diff-filter=A origin/${baseBranch}...HEAD`,
|
||||
{ encoding: 'utf-8' },
|
||||
).trim()
|
||||
|
||||
if (!diffOutput) {
|
||||
return []
|
||||
}
|
||||
|
||||
return diffOutput
|
||||
.split('\n')
|
||||
.filter((file) =>
|
||||
MIGRATION_DIRS.some((dir) => file.startsWith(dir)) && file.endsWith('.ts'),
|
||||
)
|
||||
}
|
||||
|
||||
async function checkMigrationFile(filePath: string): Promise<string[]> {
|
||||
const errors: string[] = []
|
||||
|
||||
const mod = await import(`../../${filePath}`)
|
||||
const MigrationClass = Object.values(mod).find(
|
||||
(v): v is new () => Migration => typeof v === 'function' && v.prototype?.up,
|
||||
)
|
||||
|
||||
if (!MigrationClass) {
|
||||
errors.push('No exported migration class found')
|
||||
return errors
|
||||
}
|
||||
|
||||
const instance = new MigrationClass()
|
||||
|
||||
if (instance.breaking === undefined) {
|
||||
errors.push('Missing "breaking" property (must be set to true or false)')
|
||||
}
|
||||
|
||||
if (!instance.release || !semver.valid(instance.release)) {
|
||||
errors.push("Missing or invalid \"release\" property (must be valid semver, e.g. release = '0.78.0')")
|
||||
}
|
||||
|
||||
if (instance.breaking !== true) {
|
||||
if (typeof instance.down !== 'function') {
|
||||
errors.push('Missing down() method (required for non-breaking migrations)')
|
||||
}
|
||||
}
|
||||
|
||||
return errors
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const changedFiles = getChangedMigrationFiles()
|
||||
|
||||
if (changedFiles.length === 0) {
|
||||
console.log('No new migration files detected.')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
console.log(`Checking ${changedFiles.length} new migration file(s)...\n`)
|
||||
|
||||
let hasErrors = false
|
||||
|
||||
for (const file of changedFiles) {
|
||||
const errors = await checkMigrationFile(file)
|
||||
if (errors.length > 0) {
|
||||
hasErrors = true
|
||||
console.error(`❌ ${file}:`)
|
||||
for (const error of errors) {
|
||||
console.error(` - ${error}`)
|
||||
}
|
||||
console.error()
|
||||
}
|
||||
else {
|
||||
console.log(`✅ ${file}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (hasErrors) {
|
||||
console.error('\nMigration rollback checks failed. See errors above.')
|
||||
console.error('All new migrations must:')
|
||||
console.error(' 1. Set breaking = true or breaking = false')
|
||||
console.error(" 2. Set release = '<semver>' (e.g. '0.78.0')")
|
||||
console.error(' 3. Have a down() method (unless breaking = true)')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log('\n✅ All migration rollback checks passed.')
|
||||
}
|
||||
|
||||
main()
|
||||
44
tools/scripts/check-release-migrations.ts
Normal file
44
tools/scripts/check-release-migrations.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import * as fs from 'fs'
|
||||
import { getMigrations } from '../../packages/server/api/src/app/database/postgres-connection'
|
||||
import { Migration } from '../../packages/server/api/src/app/database/migration'
|
||||
|
||||
function findMigrationsForRelease(releaseVersion: string): Migration[] {
|
||||
const migrationClasses = getMigrations()
|
||||
const instances = migrationClasses.map((M) => new M())
|
||||
return instances.filter((m) => m.release === releaseVersion)
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const releaseVersion = process.argv[2]
|
||||
if (!releaseVersion) {
|
||||
console.error('Usage: check-release-migrations.ts <version>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const migrations = findMigrationsForRelease(releaseVersion)
|
||||
const breakingMigrations = migrations.filter((m) => m.breaking === true)
|
||||
|
||||
const outputFile = process.env.GITHUB_OUTPUT
|
||||
if (outputFile) {
|
||||
fs.appendFileSync(outputFile, `migration_count=${migrations.length}\n`)
|
||||
fs.appendFileSync(outputFile, `has_breaking=${breakingMigrations.length > 0}\n`)
|
||||
fs.appendFileSync(outputFile, `breaking_names=${breakingMigrations.map((m) => m.name ?? 'unknown').join(', ')}\n`)
|
||||
}
|
||||
|
||||
if (migrations.length === 0) {
|
||||
console.log(`No migrations tagged for release ${releaseVersion}.`)
|
||||
return
|
||||
}
|
||||
|
||||
console.log(`Found ${migrations.length} migration(s) for release ${releaseVersion}:`)
|
||||
for (const m of migrations) {
|
||||
const label = m.breaking === true ? 'BREAKING' : 'safe'
|
||||
console.log(` ${label}: ${m.name ?? 'unknown'}`)
|
||||
}
|
||||
|
||||
if (breakingMigrations.length > 0) {
|
||||
console.log(`\n${breakingMigrations.length} breaking migration(s) detected.`)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
Reference in New Issue
Block a user