diff --git a/README.md b/README.md index 3be1b2241..6acb4d37c 100644 --- a/README.md +++ b/README.md @@ -103,11 +103,17 @@ This project requires a Redis-compatible key-value store. You'll need to: ``` #### c. Database Setup -1. Apply the database migrations manually: - - Navigate to the `/migrations` folder in the project - - Execute each SQL migration file in sequential order against your Supabase database - - You can run these migrations using the Supabase SQL Editor or a PostgreSQL client - - Make sure to apply migrations in the correct order based on their timestamp prefixes +Apply the database migrations: +```bash +# Using Bun (recommended) +source .env.local && bun run db:migrations:apply + +# Using npm +source .env.local && npm run db:migrations:apply +``` +This runs all SQL files in `/migrations` in timestamp order. Already-applied migrations are automatically skipped. + +Alternatively, you can apply migrations manually using the Supabase SQL Editor or a PostgreSQL client — just execute each file in `/migrations` in sequential order. #### d. Supabase Storage Setup 1. Go to Storage > Buckets diff --git a/package.json b/package.json index 8d093d598..b0dcc7af7 100644 --- a/package.json +++ b/package.json @@ -26,6 +26,7 @@ "scripts:check-e2e-env": "bun scripts/check-e2e-env.ts", "scripts:check-all-env": "bun scripts:check-app-env && bun scripts:check-e2e-env", "scripts:create-migration": "bun scripts/create-migration.ts", + "scripts:apply-migrations": "bun scripts/apply-migrations.ts", "<<<<<<< Development": "", "shad": "bunx shadcn@canary", "test:dev:traffic": "vitest run src/__test__/development/traffic.test.ts", diff --git a/scripts/apply-migrations.ts b/scripts/apply-migrations.ts new file mode 100644 index 000000000..ec5d20665 --- /dev/null +++ b/scripts/apply-migrations.ts @@ -0,0 +1,48 @@ +import postgres from 'postgres' +import { readdirSync, readFileSync } from 'fs' +import { join } from 'path' + +const connectionString = process.env.POSTGRES_URL_NON_POOLING + +if (!connectionString) { + console.error('❌ POSTGRES_URL_NON_POOLING is not set') + process.exit(1) +} + +const sql = postgres(connectionString) + +async function applyMigrations() { + const migrationsDir = join(process.cwd(), 'migrations') + const files = readdirSync(migrationsDir) + .filter((f) => f.endsWith('.sql')) + .sort() + + console.log(`Found ${files.length} migration(s) to apply:\n`) + + for (const file of files) { + const filePath = join(migrationsDir, file) + const content = readFileSync(filePath, 'utf-8') + + console.log(`⏳ Applying ${file}...`) + try { + await sql.unsafe(content) + console.log(`✅ Applied ${file}`) + } catch (err: any) { + if ( + err.message.includes('already exists') || + err.message.includes('duplicate') + ) { + console.log(`⏭️ Skipped ${file} (already applied)`) + } else { + console.error(`❌ Failed on ${file}: ${err.message}`) + await sql.end() + process.exit(1) + } + } + } + + console.log('\n🎉 All migrations applied successfully!') + await sql.end() +} + +applyMigrations()