2016-04-11 17:16:30 -05:00
|
|
|
package com.jdblabs.dbmigrate
|
|
|
|
|
|
|
|
import groovy.sql.Sql
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
import ch.qos.logback.classic.Level
|
|
|
|
import ch.qos.logback.classic.Logger as LBLogger
|
2016-04-11 17:16:30 -05:00
|
|
|
import com.jdbernard.util.AnsiEscapeCodeSequence as ANSI
|
|
|
|
import com.zaxxer.hikari.HikariConfig
|
|
|
|
import com.zaxxer.hikari.HikariDataSource
|
2016-04-11 21:08:16 -05:00
|
|
|
import java.io.FilenameFilter
|
2016-04-11 17:16:30 -05:00
|
|
|
import java.text.SimpleDateFormat
|
|
|
|
import org.docopt.Docopt
|
|
|
|
import org.slf4j.Logger
|
|
|
|
import org.slf4j.LoggerFactory
|
|
|
|
|
|
|
|
public class DbMigrate {
|
|
|
|
|
2016-05-17 21:35:57 -05:00
|
|
|
public static final VERSION = "0.2.3"
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
public static final def DOC = """\
|
2016-04-11 19:49:29 -05:00
|
|
|
db-migrate.groovy v${VERSION}
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
Usage:
|
|
|
|
db_migrate [options] create <migration-name>
|
|
|
|
db_migrate [options] up [<count>]
|
|
|
|
db_migrate [options] down [<count>]
|
|
|
|
db_migrate [options] init <schema-name>
|
|
|
|
db_migrate (-V | --version)
|
|
|
|
db_migrate (-h | --help)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
-c --config <config-file> Use the given configuration file (defaults to
|
|
|
|
"database.properties").
|
|
|
|
-q --quiet Suppress log information.
|
|
|
|
-v --verbose Print detailed log information.
|
|
|
|
--very-verbose Print very detailed log information.
|
|
|
|
-V --version Print the tools version information.
|
|
|
|
-h --help Print this usage information.
|
|
|
|
"""
|
|
|
|
|
|
|
|
private static sdf = new SimpleDateFormat('yyyyMMddHHmmss')
|
|
|
|
private static Logger LOGGER = LoggerFactory.getLogger(DbMigrate)
|
|
|
|
|
|
|
|
Sql sql
|
|
|
|
File migrationsDir
|
|
|
|
|
|
|
|
public static void main(String[] args) {
|
|
|
|
|
|
|
|
// Parse arguments
|
|
|
|
def opts = new Docopt(DOC).withVersion("wdiwtlt v$VERSION").parse(args)
|
|
|
|
|
|
|
|
if (opts['--version']) {
|
|
|
|
println "db-migrate.groovy v$VERSION"
|
|
|
|
System.exit(0) }
|
|
|
|
|
|
|
|
if (opts['--help']) { println DOC; System.exit(0) }
|
|
|
|
|
|
|
|
// TODO: Setup logging & output levels
|
|
|
|
Logger clilog = LoggerFactory.getLogger("db-migrate.cli")
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
if (opts['--quiet']) {
|
|
|
|
((LBLogger) LOGGER).level = Level.ERROR
|
|
|
|
((LBLogger) LoggerFactory.getLogger(LBLogger.ROOT_LOGGER_NAME))
|
|
|
|
.level = Level.ERROR }
|
|
|
|
if (opts['--verbose']) {
|
|
|
|
((LBLogger) LOGGER).level = Level.DEBUG
|
|
|
|
((LBLogger) LoggerFactory.getLogger(LBLogger.ROOT_LOGGER_NAME))
|
|
|
|
.level = Level.INFO }
|
|
|
|
if (opts['--very-verbose']) {
|
|
|
|
((LBLogger) LOGGER).level = Level.TRACE
|
|
|
|
((LBLogger) LoggerFactory.getLogger(LBLogger.ROOT_LOGGER_NAME))
|
|
|
|
.level = Level.DEBUG }
|
|
|
|
|
2016-04-11 17:16:30 -05:00
|
|
|
// Load the configuration file
|
|
|
|
def givenCfg = new Properties()
|
|
|
|
File cfgFile
|
|
|
|
if (opts["--config"]) cfgFile = new File(opts["--config"])
|
|
|
|
|
|
|
|
if (!cfgFile || !cfgFile.exists() || !cfgFile.isFile())
|
|
|
|
cfgFile = new File("database.properties")
|
|
|
|
|
|
|
|
if (!cfgFile.exists() || !cfgFile.isFile()) {
|
|
|
|
clilog.warn("Config file '{}' does not exist or is not a regular file.",
|
|
|
|
cfgFile.canonicalPath) }
|
|
|
|
|
|
|
|
if (cfgFile.exists() && cfgFile.isFile()) {
|
|
|
|
try { cfgFile.withInputStream { givenCfg.load(it) } }
|
|
|
|
catch (Exception e) {
|
|
|
|
clilog.error("Could not read configuration file.", e)
|
|
|
|
givenCfg.clear() } }
|
|
|
|
|
|
|
|
// Check for migrations directory
|
2016-04-11 21:08:16 -05:00
|
|
|
File migrationsDir = new File(givenCfg["migrations.dir"] ?: 'migrations')
|
2016-04-11 17:16:30 -05:00
|
|
|
if (!migrationsDir.exists() || !migrationsDir.isDirectory()) {
|
|
|
|
clilog.error("'{}' does not exist or is not a directory.",
|
|
|
|
migrationsDir.canonicalPath)
|
|
|
|
System.exit(1) }
|
|
|
|
|
|
|
|
// Instantiate the DbMigrate instance
|
2016-04-11 21:08:16 -05:00
|
|
|
DbMigrate dbmigrate = new DbMigrate(migrationsDir: migrationsDir)
|
2016-04-11 17:16:30 -05:00
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
// If we've only been asked to create a new migration, we don't need to
|
|
|
|
// setup the DB connection.
|
2016-04-11 17:16:30 -05:00
|
|
|
if (opts['create']) {
|
|
|
|
try {
|
|
|
|
List<File> files = dbmigrate.createMigration(opts['<migration-name>'])
|
2016-04-11 21:08:16 -05:00
|
|
|
clilog.info("Created new migration files:\n\t${files.name.join('\n\t')}")
|
|
|
|
return }
|
2016-04-11 17:16:30 -05:00
|
|
|
catch (Exception e) {
|
2016-04-11 21:08:16 -05:00
|
|
|
clilog.error('Unable to create migration scripts.', e)
|
|
|
|
System.exit(1) } }
|
|
|
|
|
|
|
|
// Create the datasource.
|
|
|
|
Properties dsProps = new Properties()
|
|
|
|
dsProps.putAll(givenCfg.findAll { it.key != 'migrations.dir' })
|
|
|
|
|
|
|
|
HikariDataSource hds = new HikariDataSource(new HikariConfig(dsProps))
|
|
|
|
|
|
|
|
dbmigrate.sql = new Sql(hds)
|
2016-04-11 17:16:30 -05:00
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
// Execute the appropriate command.
|
|
|
|
if (opts['up']) dbmigrate.up(opts['<count>'])
|
2016-04-11 17:16:30 -05:00
|
|
|
else if (opts['down']) dbmigrate.down(opts['<count>'] ?: 1) }
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
public List<File> createMigration(String migrationName) {
|
2016-04-11 17:16:30 -05:00
|
|
|
String timestamp = sdf.format(new Date())
|
|
|
|
|
|
|
|
File upFile = new File(migrationsDir, "$timestamp-$migrationName-up.sql")
|
2016-04-11 21:08:16 -05:00
|
|
|
File downFile = new File(migrationsDir, "$timestamp-$migrationName-down.sql")
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
upFile.text = "-- UP script for $migrationName ($timestamp)"
|
|
|
|
downFile.text = "-- DOWN script for $migrationName ($timestamp)"
|
|
|
|
|
|
|
|
return [upFile, downFile] }
|
|
|
|
|
2016-04-11 19:49:29 -05:00
|
|
|
public def createMigrationsTable() {
|
2016-04-11 21:08:16 -05:00
|
|
|
LOGGER.trace('Checking for the existence of the migrations table and ' +
|
|
|
|
'creating it if it does not exist.')
|
2016-04-11 17:16:30 -05:00
|
|
|
sql.execute('''
|
|
|
|
CREATE TABLE IF NOT EXISTS migrations (
|
|
|
|
id SERIAL PRIMARY KEY,
|
|
|
|
name VARCHAR NOT NULL,
|
|
|
|
run_at TIMESTAMP NOT NULL DEFAULT NOW())''') }
|
|
|
|
|
|
|
|
public def diffMigrations() {
|
|
|
|
def results = [notRun: [], missing: []]
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
LOGGER.trace('Diffing migrations...')
|
2016-04-11 17:16:30 -05:00
|
|
|
results.run = sql.rows('SELECT name FROM migrations ORDER BY name')
|
|
|
|
.collect { it.name }.sort()
|
|
|
|
|
|
|
|
SortedSet<String> available = new TreeSet<>()
|
2016-04-11 21:08:16 -05:00
|
|
|
available.addAll(migrationsDir
|
|
|
|
.listFiles({ d, n -> n ==~ /.+-(up|down).sql$/ } as FilenameFilter)
|
|
|
|
.collect { f -> f.name.replaceAll(/-(up|down).sql$/, '') })
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
available.each { migrationName ->
|
|
|
|
if (!results.run.contains(migrationName))
|
|
|
|
results.notRun << migrationName
|
|
|
|
|
|
|
|
// If we've already seen some migrations that have not been run but this
|
|
|
|
// one has been run, that means we have a gap and are missing migrations.
|
|
|
|
else if (results.notRun.size() > 0) {
|
|
|
|
results.missing += reults.notRun
|
|
|
|
results.notRun = [] } }
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
LOGGER.trace('Migrations diff:\n\trun: {}\n\tnot run: {}\n\tmissing: {}',
|
|
|
|
results.run, results.notRun, results.missing)
|
|
|
|
|
2016-04-11 17:16:30 -05:00
|
|
|
return results }
|
|
|
|
|
|
|
|
public List<String> up(Integer count = null) {
|
|
|
|
createMigrationsTable()
|
|
|
|
def diff = diffMigrations()
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
if (diff.missing) {
|
|
|
|
LOGGER.error('Missing migrations:\n\t{}', diff.missing)
|
|
|
|
throw new Exception('Database is in an inconsistent state.') }
|
|
|
|
|
|
|
|
LOGGER.debug('Migrating up.')
|
|
|
|
List<String> toRun
|
|
|
|
|
|
|
|
if (!count || count >= diff.notRun.size()) toRun = diff.notRun
|
|
|
|
else toRun = diff.notRun[0..<count]
|
|
|
|
|
|
|
|
LOGGER.debug('{} migrations to run.', toRun.size())
|
|
|
|
LOGGER.trace('Migrations: {}.', toRun)
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
return runMigrations(toRun, true) }
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
public List<String> down(Integer count = 1) {
|
2016-04-11 17:16:30 -05:00
|
|
|
createMigrationsTable()
|
|
|
|
def diff = diffMigrations()
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
if (diff.missing) {
|
|
|
|
LOGGER.error('Missing migrations:\n\t{}', diff.missing)
|
|
|
|
throw new Exception('Database is in an inconsistent state.') }
|
|
|
|
|
|
|
|
LOGGER.debug('Migrating down.')
|
|
|
|
|
|
|
|
List<String> toRun = count < diff.run.size() ?
|
|
|
|
diff.run.reverse()[0..<count] : diff.run.reverse()
|
|
|
|
|
|
|
|
LOGGER.debug('{} migrations to run.', toRun.size())
|
|
|
|
LOGGER.trace('Migrations: {}.', toRun)
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
return runMigrations(toRun, false) }
|
|
|
|
|
|
|
|
private List<String> runMigrations(List<String> toRun, boolean up = true) {
|
|
|
|
List<String> migrationsRun = []
|
|
|
|
|
|
|
|
try {
|
2016-04-11 21:08:16 -05:00
|
|
|
LOGGER.trace("Beginning transaction.")
|
2016-04-11 17:16:30 -05:00
|
|
|
sql.execute('BEGIN')
|
|
|
|
|
|
|
|
toRun.each { migrationName ->
|
2016-04-11 21:08:16 -05:00
|
|
|
LOGGER.info(migrationName)
|
2016-04-11 17:16:30 -05:00
|
|
|
File migrationFile = new File(migrationsDir,
|
|
|
|
"$migrationName-${up ? 'up' : 'down'}.sql")
|
|
|
|
|
|
|
|
if (!migrationFile.exists() || !migrationFile.isFile())
|
|
|
|
throw new FileNotFoundException(migrationFile.canonicalPath +
|
|
|
|
"does not exist or is not a regular file.")
|
|
|
|
|
2016-05-17 21:35:57 -05:00
|
|
|
runFile(migrationFile)
|
2016-04-11 21:08:16 -05:00
|
|
|
|
|
|
|
if (up) sql.execute(
|
|
|
|
'INSERT INTO migrations (name) VALUES (?)', migrationName)
|
2016-04-11 17:16:30 -05:00
|
|
|
else sql.execute(
|
2016-04-11 21:08:16 -05:00
|
|
|
'DELETE FROM migrations WHERE name = ?', migrationName)
|
2016-04-11 17:16:30 -05:00
|
|
|
|
|
|
|
migrationsRun << migrationName }
|
|
|
|
|
2016-04-11 21:08:16 -05:00
|
|
|
sql.execute('COMMIT')
|
|
|
|
LOGGER.info('Went {} {} migrations.',
|
|
|
|
up ? 'up' : 'down', migrationsRun.size()) }
|
|
|
|
|
2016-04-11 17:16:30 -05:00
|
|
|
catch (Exception e) { sql.execute('ROLLBACK'); }
|
|
|
|
|
|
|
|
return migrationsRun }
|
2016-05-17 21:35:57 -05:00
|
|
|
|
|
|
|
public void runFile(File file) {
|
|
|
|
LOGGER.trace('Raw statements:\n\n{}\n', file.text.split(/;/).join('\n'))
|
|
|
|
|
|
|
|
List<String> statements = file.text.split(/;/)
|
|
|
|
.collect { it.replaceAll(/--.*$/, '').trim() }
|
|
|
|
.findAll { it.length() > 0 }
|
|
|
|
|
|
|
|
LOGGER.trace('Statements:\n\n{}\n', statements.join('\n'))
|
|
|
|
|
|
|
|
statements.each {
|
|
|
|
LOGGER.trace('Executing SQL: {}', it)
|
|
|
|
sql.execute(it) } }
|
2016-04-11 17:16:30 -05:00
|
|
|
}
|