-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmigrate.js
114 lines (101 loc) · 3.38 KB
/
migrate.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
#!/usr/bin/env node
let debug
try {
debug = require('debug')('migrate')
} catch (e) {
debug = console.log
}
try {
debug('try to use the `.env` file via `dotenv-safe`')
require('dotenv-safe').config()
} catch (e) {
debug('package `dotenv-safe` or file `.env` not found')
debug = console.log
}
const fs = require('fs')
const path = require('path')
;(async () => {
debug('starting migrations')
let client, sql
const filePath = path.join(process.cwd(), 'sql.js')
if (fs.existsSync(filePath)) {
debug('use the projects `sql.js` to connect to the database')
const init = await require(filePath)()
client = init.client
sql = init.sql
} else {
debug('use default `process.env.DATABASE_URL` to connect to the database')
const { Client } = require('pg')
client = new Client({ connectionString: process.env.DATABASE_URL })
client.connect()
sql = require('../')({ client })
}
debug('create columns helper for "id", "created_at" and "updated_at"')
const columns = {
id: sql`${sql.column('id')} SERIAL NOT NULL PRIMARY KEY`,
created_at: sql`${sql.column('created_at')} TIMESTAMPTZ NOT NULL DEFAULT NOW()`,
updated_at: sql`${sql.column('updated_at')} TIMESTAMPTZ NOT NULL DEFAULT NOW()`
}
debug('create trigger function and trigger assign helper for "updated_at"')
await sql.query(sql`
CREATE OR REPLACE FUNCTION trigger_set_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
`)
const updatedAt = table => sql`
DROP TRIGGER IF EXISTS ${sql.identifier(`set_timestamp_${table}`)}
ON ${sql.table(table)};
CREATE TRIGGER ${sql.identifier(`set_timestamp_${table}`)}
BEFORE UPDATE ON ${sql.table(table)}
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
`
debug('create table "migrations" if not exists')
await sql.query(sql`
CREATE TABLE IF NOT EXISTS "migrations" (
${columns.id},
${columns.created_at},
${columns.updated_at},
"file" VARCHAR(255) UNIQUE
)
`)
await sql.query(updatedAt('migrations'))
debug('select already processed migrations:')
const processed = (await sql.any('migrations', ['file']))
.map(({ file }) => file)
debug('%o', processed)
debug('read directory, filter and sort migrations:')
const directory = path.join(process.cwd(), 'migrations')
const migrations = fs.readdirSync(directory)
.filter(file => (file.endsWith('.js') || file.endsWith('.sql')) && !processed.includes(file))
.sort((fileA, fileB) => parseInt(fileA, 10) - parseInt(fileB, 10))
debug('%o', migrations)
if (migrations.length === 0) {
debug('no unprocessed migrations found, database already up to date')
} else {
for (const file of migrations) {
debug('process migration file: "%s"', file)
try {
await sql.transaction(async () => {
if (file.endsWith('.js')) {
await require(path.join(directory, file))(sql, { columns, updatedAt })
}
if (file.endsWith('.sql')) {
await client.query(fs.readFileSync(path.join(directory, file)).toString())
}
await sql.insert('migrations', { file })
})
debug('file successfully processed')
} catch (e) {
debug('error: "%s"', e.message)
throw e
}
}
debug('all migrations successfully processed')
}
await client.end()
})()