16 Commits
0.2.0 ... 0.2.8

Author SHA1 Message Date
6837e5448b Update for Nim 1.4.x+ 2021-07-03 22:00:14 -05:00
daf3a8dad0 Rename migrationsDir sqlDir. 2020-09-01 16:30:50 -05:00
4e771345ea Add more info to the version string. 2018-07-01 10:36:13 -05:00
e26b6eb01c Fix argument parsing bug. 2018-06-01 01:23:23 -05:00
ad1540ca20 Add timestamp to migrations.run_at. 2017-02-11 20:19:39 -06:00
e35a5177ef Add env configuration, bug in SQL parsing for Nim db_migrate.
* The Nim binary now recognizes the following environment and allows them to
  override configured values:

  - `DATABASE_DRIVER`: overrides the `driver` config value, selects which
    kind of database we expect to connect to.
  - `MIGRATIONS_DIR`: overrides the `sqlDir` config value, sets the path to
    the directory containing the migration SQL files.
  - `DATABASE_URL`: overrides the `connectionString` config value, supplies
    connection parameters to the database.
  - `DBM_LOG_LEVEL`: overrides the `logLevel` config value, sets the logging
    level of db_migrate.

* Fixes a bug in the parsing of migration files. Previously the file was split
  on `;` characters and chunks that started with `--` were ignored as comments.
  This was wrong in the common case where a block starts with a comment but
  then contains SQL further. Such a block was being ignored. The new behavior
  is to consider each line and build up queries that way.

* Fixes a logging bug when parsing the configuration. If there was an that
  prevented the configuration from loading this in turn prevented logging from
  being setup correctly, and so the error was not logged. Now errors that may
  occur before logging is setup are hard-coded to be logged to STDERR.

* Changes the logic for creating the `migrations` table to check before
  performing the query that creates the table. This is to avoid continually
  printing messages about skipping this creation when the table already exists
  (which is the normal case). This change is PostgreSQL-specific, but of course
  the entire tool is currently PostgreSQL-specific.
2017-02-11 20:00:37 -06:00
9f38da0043 Fixed typo in groovy docopt. 2017-02-09 11:22:43 -06:00
c933d6ac2b Separated statement running logic in Groovy implementation to allow reuse. 2016-05-17 21:35:57 -05:00
b49b648358 Added JDB Labs Maven repo. 2016-04-21 07:29:13 -05:00
37c1c01ffc Added maven build plugin. 2016-04-17 17:40:44 -05:00
6bf2b86592 Groovy impl: added verbosity levels. Tested and fixed create/up/down. 2016-04-11 21:08:16 -05:00
9b3e7b4d26 Incrementing version for Nim and Groovy implementations. 2016-04-11 19:49:29 -05:00
5b9de9b10f Added initial, untested groovy implementation. 2016-04-11 17:16:30 -05:00
109a667fd5 Added Gradle build file. 2016-04-10 21:02:09 -05:00
462da00dd3 Restructured to accomodate being more than just a Nim implementation. 2016-04-10 21:01:53 -05:00
7242a13e51 Added better logging when database connection fails. 2016-02-10 03:25:14 -06:00
8 changed files with 398 additions and 31 deletions

1
.gitignore vendored
View File

@ -2,3 +2,4 @@ nimcache/
*.sw?
build/
db_migrate
.gradle/

View File

@ -1,4 +1,36 @@
Nim DB Migrate
==============
# DB Migrate
Small tool to manage database migrations in Nim.
Small tool(s) to manage database migrations in various languages.
## Usage
```
Usage:
db_migrate [options] create <migration-name>
db_migrate [options] up [<count>]
db_migrate [options] down [<count>]
db_migrate [options] init <schema-name>
db_migrate (-V | --version)
db_migrate (-h | --help)
Options:
-c --config <config-file> Use the given configuration file (defaults to
"database.properties").
-q --quiet Suppress log information.
-v --verbose Print detailed log information.
--very-verbose Print very detailed log information.
-V --version Print the tools version information.
-h --help Print this usage information.
```
## Database Config Format
The database config is formatted as JSON. The following keys are supported by
all of the implementations:
* `sqlDir` -- Directory to store SQL files.
The following keys are supported by the Nim implementation:
* `connectionString` --

28
build.gradle Normal file
View File

@ -0,0 +1,28 @@
apply plugin: 'groovy'
apply plugin: 'application'
apply plugin: 'maven'
group = 'com.jdblabs'
version = '0.2.5'
mainClassName = 'com.jdblabs.dbmigrate.DbMigrate'
repositories {
mavenLocal()
mavenCentral()
maven { url "http://mvn.jdb-labs.com/repo" }
}
dependencies {
compile localGroovy()
compile 'ch.qos.logback:logback-classic:1.1.3'
compile 'ch.qos.logback:logback-core:1.1.3'
compile 'com.jdbernard:jdb-util:4.4'
compile 'com.offbytwo:docopt:0.6.0.20150202'
compile 'com.zaxxer:HikariCP:2.4.3'
testCompile 'junit:junit:4.12'
runtime 'com.h2database:h2:1.4.185'
runtime 'org.postgresql:postgresql:9.4.1207.jre7'
}

View File

@ -1,12 +1,12 @@
# Package
bin = @["db_migrate"]
version = "0.2.0"
version = "0.2.8"
author = "Jonathan Bernard"
description = "Simple tool to handle database migrations."
license = "BSD"
srcDir = "src/main/nim"
# Dependencies
requires: @["nim >= 0.13.0", "docopt >= 0.1.0"]
requires: @["nim >= 1.4.0", "docopt >= 0.1.0"]

1
settings.gradle Normal file
View File

@ -0,0 +1 @@
rootProject.name = "db-migrate.groovy"

View File

@ -0,0 +1,254 @@
package com.jdblabs.dbmigrate
import groovy.sql.Sql
import ch.qos.logback.classic.Level
import ch.qos.logback.classic.Logger as LBLogger
import com.jdbernard.util.AnsiEscapeCodeSequence as ANSI
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import java.io.FilenameFilter
import java.text.SimpleDateFormat
import org.docopt.Docopt
import org.slf4j.Logger
import org.slf4j.LoggerFactory
public class DbMigrate {
public static final VERSION = "0.2.5"
public static final def DOC = """\
db-migrate.groovy v${VERSION}
Usage:
db_migrate [options] create <migration-name>
db_migrate [options] up [<count>]
db_migrate [options] down [<count>]
db_migrate [options] init <schema-name>
db_migrate (-V | --version)
db_migrate (-h | --help)
Options:
-c --config <config-file> Use the given configuration file (defaults to
"database.properties").
-q --quiet Suppress log information.
-v --verbose Print detailed log information.
--very-verbose Print very detailed log information.
-V --version Print the tools version information.
-h --help Print this usage information.
"""
private static sdf = new SimpleDateFormat('yyyyMMddHHmmss')
private static Logger LOGGER = LoggerFactory.getLogger(DbMigrate)
Sql sql
File sqlDir
public static void main(String[] args) {
// Parse arguments
def opts = new Docopt(DOC).withVersion("db-migrate.groovy v$VERSION").parse(args)
if (opts['--version']) {
println "db-migrate.groovy v$VERSION"
System.exit(0) }
if (opts['--help']) { println DOC; System.exit(0) }
// TODO: Setup logging & output levels
Logger clilog = LoggerFactory.getLogger("db-migrate.cli")
if (opts['--quiet']) {
((LBLogger) LOGGER).level = Level.ERROR
((LBLogger) LoggerFactory.getLogger(LBLogger.ROOT_LOGGER_NAME))
.level = Level.ERROR }
if (opts['--verbose']) {
((LBLogger) LOGGER).level = Level.DEBUG
((LBLogger) LoggerFactory.getLogger(LBLogger.ROOT_LOGGER_NAME))
.level = Level.INFO }
if (opts['--very-verbose']) {
((LBLogger) LOGGER).level = Level.TRACE
((LBLogger) LoggerFactory.getLogger(LBLogger.ROOT_LOGGER_NAME))
.level = Level.DEBUG }
// Load the configuration file
def givenCfg = new Properties()
File cfgFile
if (opts["--config"]) cfgFile = new File(opts["--config"])
if (!cfgFile || !cfgFile.exists() || !cfgFile.isFile())
cfgFile = new File("database.properties")
if (!cfgFile.exists() || !cfgFile.isFile()) {
clilog.warn("Config file '{}' does not exist or is not a regular file.",
cfgFile.canonicalPath) }
if (cfgFile.exists() && cfgFile.isFile()) {
try { cfgFile.withInputStream { givenCfg.load(it) } }
catch (Exception e) {
clilog.error("Could not read configuration file.", e)
givenCfg.clear() } }
// Check for migrations directory
File sqlDir = new File(givenCfg["sqlDir"] ?: 'migrations')
if (!sqlDir.exists() || !sqlDir.isDirectory()) {
clilog.error("'{}' does not exist or is not a directory.",
sqlDir.canonicalPath)
System.exit(1) }
// Instantiate the DbMigrate instance
DbMigrate dbmigrate = new DbMigrate(sqlDir: sqlDir)
// If we've only been asked to create a new migration, we don't need to
// setup the DB connection.
if (opts['create']) {
try {
List<File> files = dbmigrate.createMigration(opts['<migration-name>'])
clilog.info("Created new migration files:\n\t${files.name.join('\n\t')}")
return }
catch (Exception e) {
clilog.error('Unable to create migration scripts.', e)
System.exit(1) } }
// Create the datasource.
Properties dsProps = new Properties()
dsProps.putAll(givenCfg.findAll { it.key != 'sqlDir' })
HikariDataSource hds = new HikariDataSource(new HikariConfig(dsProps))
dbmigrate.sql = new Sql(hds)
// Execute the appropriate command.
if (opts['up']) dbmigrate.up(opts['<count>'])
else if (opts['down']) dbmigrate.down(opts['<count>'] ?: 1) }
public List<File> createMigration(String migrationName) {
String timestamp = sdf.format(new Date())
File upFile = new File(sqlDir, "$timestamp-$migrationName-up.sql")
File downFile = new File(sqlDir, "$timestamp-$migrationName-down.sql")
upFile.text = "-- UP script for $migrationName ($timestamp)"
downFile.text = "-- DOWN script for $migrationName ($timestamp)"
return [upFile, downFile] }
public def createMigrationsTable() {
LOGGER.trace('Checking for the existence of the migrations table and ' +
'creating it if it does not exist.')
sql.execute('''
CREATE TABLE IF NOT EXISTS migrations (
id SERIAL PRIMARY KEY,
name VARCHAR NOT NULL,
run_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW())''') }
public def diffMigrations() {
def results = [notRun: [], missing: []]
LOGGER.trace('Diffing migrations...')
results.run = sql.rows('SELECT name FROM migrations ORDER BY name')
.collect { it.name }.sort()
SortedSet<String> available = new TreeSet<>()
available.addAll(sqlDir
.listFiles({ d, n -> n ==~ /.+-(up|down).sql$/ } as FilenameFilter)
.collect { f -> f.name.replaceAll(/-(up|down).sql$/, '') })
available.each { migrationName ->
if (!results.run.contains(migrationName))
results.notRun << migrationName
// If we've already seen some migrations that have not been run but this
// one has been run, that means we have a gap and are missing migrations.
else if (results.notRun.size() > 0) {
results.missing += reults.notRun
results.notRun = [] } }
LOGGER.trace('Migrations diff:\n\trun: {}\n\tnot run: {}\n\tmissing: {}',
results.run, results.notRun, results.missing)
return results }
public List<String> up(Integer count = null) {
createMigrationsTable()
def diff = diffMigrations()
if (diff.missing) {
LOGGER.error('Missing migrations:\n\t{}', diff.missing)
throw new Exception('Database is in an inconsistent state.') }
LOGGER.debug('Migrating up.')
List<String> toRun
if (!count || count >= diff.notRun.size()) toRun = diff.notRun
else toRun = diff.notRun[0..<count]
LOGGER.debug('{} migrations to run.', toRun.size())
LOGGER.trace('Migrations: {}.', toRun)
return runMigrations(toRun, true) }
public List<String> down(Integer count = 1) {
createMigrationsTable()
def diff = diffMigrations()
if (diff.missing) {
LOGGER.error('Missing migrations:\n\t{}', diff.missing)
throw new Exception('Database is in an inconsistent state.') }
LOGGER.debug('Migrating down.')
List<String> toRun = count < diff.run.size() ?
diff.run.reverse()[0..<count] : diff.run.reverse()
LOGGER.debug('{} migrations to run.', toRun.size())
LOGGER.trace('Migrations: {}.', toRun)
return runMigrations(toRun, false) }
private List<String> runMigrations(List<String> toRun, boolean up = true) {
List<String> migrationsRun = []
try {
LOGGER.trace("Beginning transaction.")
sql.execute('BEGIN')
toRun.each { migrationName ->
LOGGER.info(migrationName)
File migrationFile = new File(sqlDir,
"$migrationName-${up ? 'up' : 'down'}.sql")
if (!migrationFile.exists() || !migrationFile.isFile())
throw new FileNotFoundException(migrationFile.canonicalPath +
"does not exist or is not a regular file.")
runFile(migrationFile)
if (up) sql.execute(
'INSERT INTO migrations (name) VALUES (?)', migrationName)
else sql.execute(
'DELETE FROM migrations WHERE name = ?', migrationName)
migrationsRun << migrationName }
sql.execute('COMMIT')
LOGGER.info('Went {} {} migrations.',
up ? 'up' : 'down', migrationsRun.size()) }
catch (Exception e) { sql.execute('ROLLBACK'); }
return migrationsRun }
public void runFile(File file) {
LOGGER.trace('Raw statements:\n\n{}\n', file.text.split(/;/).join('\n'))
List<String> statements = file.text.split(/;/)
.collect { it.replaceAll(/--.*$/, '').trim() }
.findAll { it.length() > 0 }
LOGGER.trace('Statements:\n\n{}\n', statements.join('\n'))
statements.each {
LOGGER.trace('Executing SQL: {}', it)
sql.execute(it) } }
}

View File

@ -3,18 +3,24 @@
##
## Simple tool to manage database migrations.
import algorithm, json, times, os, strutils, docopt, db_postgres, sets,
sequtils, logging
import algorithm, db_postgres, docopt, json, logging, os, sequtils, sets,
strutils, times
type
DbMigrateConfig* = tuple[ driver, sqlDir, connectionString: string, logLevel: Level ]
proc createMigrationsTable(conn: DbConn): void =
conn.exec(sql("""
proc ensureMigrationsTableExists(conn: DbConn): void =
let tableCount = conn.getValue(sql"""
SELECT COUNT(*) FROM information_schema.tables
WHERE table_name = 'migrations';""")
if tableCount.strip == "0":
info "Creating the migrations table as it does not already exist."
conn.exec(sql("""
CREATE TABLE IF NOT EXISTS migrations (
id SERIAL PRIMARY KEY,
name VARCHAR NOT NULL,
run_at TIMESTAMP NOT NULL DEFAULT NOW())"""))
run_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW())"""))
proc rollbackWithErr (pgConn: DbConn, errMsg: string): void =
pgConn.exec(sql"ROLLBACK")
@ -24,20 +30,32 @@ proc loadConfig*(filename: string): DbMigrateConfig =
## Load DbMigrateConfig from a file.
let cfg = json.parseFile(filename)
var logLevel: Level
if cfg.hasKey("logLevel"):
var logLevel: Level = lvlInfo
if existsEnv("DBM_LOG_LEVEL"):
let idx = find(LevelNames, $getEnv("DBM_LOG_LEVEL").toUpper)
logLevel = if idx == -1: lvlInfo else: (Level)(idx)
elif cfg.hasKey("logLevel"):
let idx = find(LevelNames, cfg["logLevel"].getStr.toUpper)
logLevel = if idx == -1: lvlInfo else: (Level)(idx)
return (
driver: if cfg.hasKey("driver"): cfg["driver"].getStr else: "postres",
sqlDir: if cfg.hasKey("sqlDir"): cfg["sqlDir"].getStr else: "migrations",
connectionString: cfg["connectionString"].getStr,
driver:
if existsEnv("DATABASE_DRIVER"): $getEnv("DATABASE_DRIVER")
elif cfg.hasKey("driver"): cfg["driver"].getStr
else: "postres",
sqlDir:
if existsEnv("MIGRATIONS_DIR"): $getEnv("MIGRATIONS_DIR")
elif cfg.hasKey("sqlDir"): cfg["sqlDir"].getStr
else: "migrations",
connectionString:
if existsEnv("DATABASE_URL"): $getEnv("DATABASE_URL")
elif cfg.hasKey("connectionString"): cfg["connectionString"].getStr
else: "",
logLevel: logLevel)
proc createMigration*(config: DbMigrateConfig, migrationName: string): seq[string] =
## Create a new set of database migration files.
let timestamp = getTime().getLocalTime().format("yyyyMMddHHmmss")
let timestamp = now().format("yyyyMMddHHmmss")
let filenamePrefix = timestamp & "-" & migrationName
let upFilename = joinPath(config.sqlDir, filenamePrefix & "-up.sql")
@ -60,13 +78,13 @@ proc diffMigrations*(pgConn: DbConn, config: DbMigrateConfig):
tuple[ run, notRun, missing: seq[string] ] =
# Query the database to find out what migrations have been run.
var migrationsRun = initSet[string]()
var migrationsRun = initHashSet[string]()
for row in pgConn.fastRows(sql"SELECT * FROM migrations ORDER BY name", @[]):
migrationsRun.incl(row[1])
# Inspect the filesystem to see what migrations are available.
var migrationsAvailable = initSet[string]()
var migrationsAvailable = initHashSet[string]()
for filePath in walkFiles(joinPath(config.sqlDir, "*.sql")):
var migrationName = filePath.extractFilename
migrationName.removeSuffix("-up.sql")
@ -96,10 +114,21 @@ proc diffMigrations*(pgConn: DbConn, config: DbMigrateConfig):
missing: missingMigrations)
proc readStatements*(filename: string): seq[SqlQuery] =
let migrationSql = filename.readFile
result = migrationSql.split(';').
filter(proc(st: string): bool = st.strip.len > 0 and not st.startsWith("--")).
map(proc(st: string): SqlQuery = sql(st & ";"))
result = @[]
var stmt: string = ""
for line in filename.lines:
let l = line.strip
if l.len == 0 or l.startsWith("--"): continue
let parts = line.split(';')
stmt &= "\n" & parts[0]
if parts.len > 1:
result.add(sql(stmt & ";"))
stmt = parts[1] & "";
if stmt.strip.len > 0: result.add(sql(stmt))
proc up*(pgConn: DbConn, config: DbMigrateConfig, toRun: seq[string]): seq[string] =
var migrationsRun = newSeq[string]()
@ -118,7 +147,8 @@ proc up*(pgConn: DbConn, config: DbMigrateConfig, toRun: seq[string]): seq[strin
let statements = filename.readStatements
try:
for statement in statements: pgConn.exec(statement)
for statement in statements:
pgConn.exec(statement)
pgConn.exec(sql"INSERT INTO migrations (name) VALUES (?);", migration)
except DbError:
pgConn.rollbackWithErr "Migration '" & migration & "' failed:\n\t" &
@ -166,12 +196,15 @@ Usage:
db_migrate [options] down [<count>]
db_migrate [options] init <schema-name>
db_migrate (-V | --version)
db_migrate (-h | --help)
Options:
-c --config <config-file> Use the given configuration file (defaults to
"database.json").
-h --help Show this usage information.
-q --quiet Suppress log information.
-v --verbose Print detailed log information.
@ -182,7 +215,7 @@ Options:
"""
# Parse arguments
let args = docopt(doc, version = "db-migrate 0.2.0")
let args = docopt(doc, version = "db-migrate (Nim) 0.2.8\nhttps://git.jdb-software.com/jdb/db-migrate")
let exitErr = proc(msg: string): void =
fatal("db_migrate: " & msg)
@ -190,16 +223,17 @@ Options:
# Load configuration file
let configFilename =
if args["--config"]: $args["<config-file>"]
if args["--config"]: $args["--config"]
else: "database.json"
var config: DbMigrateConfig
try:
config = loadConfig(configFilename)
except IOError:
exitErr "Cannot open config file: " & configFilename
writeLine(stderr, "db_migrate: Cannot open config file: " & configFilename)
except:
exitErr "Error parsing config file: " & configFilename & "\L\t" & getCurrentExceptionMsg()
writeLine(stderr, "db_migrate: Error parsing config file: " &
configFilename & "\L\t" & getCurrentExceptionMsg())
logging.addHandler(newConsoleLogger())
@ -209,7 +243,7 @@ Options:
else: logging.setLogFilter(config.logLevel)
# Check for migrations directory
if not existsDir config.sqlDir:
if not dirExists config.sqlDir:
try:
warn "SQL directory '" & config.sqlDir &
"' does not exist and will be created."
@ -230,10 +264,10 @@ Options:
let pgConn: DbConn =
try: open("", "", "", config.connectionString)
except DbError:
exitErr "Unable to connect to the database."
exitErr "Unable to connect to the database: " & getCurrentExceptionMsg()
(DbConn)(nil)
pgConn.createMigrationsTable
pgConn.ensureMigrationsTableExists
let (run, notRun, missing) = diffMigrations(pgConn, config)

View File

@ -0,0 +1,17 @@
import ch.qos.logback.core.*;
import ch.qos.logback.core.encoder.*;
import ch.qos.logback.core.read.*;
import ch.qos.logback.core.rolling.*;
import ch.qos.logback.core.status.*;
import ch.qos.logback.classic.net.*;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
appender("STDOUT", ConsoleAppender) {
encoder(PatternLayoutEncoder) {
pattern = "db-migrate.groovy: %level - %msg%n"
}
}
root(WARN, ["STDOUT"])
logger('com.jdblabs.dbmigrate', INFO)