Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
6837e5448b | |||
daf3a8dad0 | |||
4e771345ea |
36
README.md
36
README.md
@ -1,4 +1,36 @@
|
||||
DB Migrate
|
||||
==========
|
||||
# DB Migrate
|
||||
|
||||
Small tool(s) to manage database migrations in various languages.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
Usage:
|
||||
db_migrate [options] create <migration-name>
|
||||
db_migrate [options] up [<count>]
|
||||
db_migrate [options] down [<count>]
|
||||
db_migrate [options] init <schema-name>
|
||||
db_migrate (-V | --version)
|
||||
db_migrate (-h | --help)
|
||||
|
||||
Options:
|
||||
-c --config <config-file> Use the given configuration file (defaults to
|
||||
"database.properties").
|
||||
-q --quiet Suppress log information.
|
||||
-v --verbose Print detailed log information.
|
||||
--very-verbose Print very detailed log information.
|
||||
-V --version Print the tools version information.
|
||||
-h --help Print this usage information.
|
||||
```
|
||||
|
||||
|
||||
## Database Config Format
|
||||
|
||||
The database config is formatted as JSON. The following keys are supported by
|
||||
all of the implementations:
|
||||
|
||||
* `sqlDir` -- Directory to store SQL files.
|
||||
|
||||
The following keys are supported by the Nim implementation:
|
||||
|
||||
* `connectionString` --
|
||||
|
@ -1,7 +1,7 @@
|
||||
# Package
|
||||
|
||||
bin = @["db_migrate"]
|
||||
version = "0.2.6"
|
||||
version = "0.2.8"
|
||||
author = "Jonathan Bernard"
|
||||
description = "Simple tool to handle database migrations."
|
||||
license = "BSD"
|
||||
@ -9,5 +9,4 @@ srcDir = "src/main/nim"
|
||||
|
||||
# Dependencies
|
||||
|
||||
requires: @["nim >= 0.13.0", "docopt >= 0.1.0"]
|
||||
|
||||
requires: @["nim >= 1.4.0", "docopt >= 0.1.0"]
|
||||
|
@ -42,7 +42,7 @@ Options:
|
||||
private static Logger LOGGER = LoggerFactory.getLogger(DbMigrate)
|
||||
|
||||
Sql sql
|
||||
File migrationsDir
|
||||
File sqlDir
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
@ -90,14 +90,14 @@ Options:
|
||||
givenCfg.clear() } }
|
||||
|
||||
// Check for migrations directory
|
||||
File migrationsDir = new File(givenCfg["migrations.dir"] ?: 'migrations')
|
||||
if (!migrationsDir.exists() || !migrationsDir.isDirectory()) {
|
||||
File sqlDir = new File(givenCfg["sqlDir"] ?: 'migrations')
|
||||
if (!sqlDir.exists() || !sqlDir.isDirectory()) {
|
||||
clilog.error("'{}' does not exist or is not a directory.",
|
||||
migrationsDir.canonicalPath)
|
||||
sqlDir.canonicalPath)
|
||||
System.exit(1) }
|
||||
|
||||
// Instantiate the DbMigrate instance
|
||||
DbMigrate dbmigrate = new DbMigrate(migrationsDir: migrationsDir)
|
||||
DbMigrate dbmigrate = new DbMigrate(sqlDir: sqlDir)
|
||||
|
||||
// If we've only been asked to create a new migration, we don't need to
|
||||
// setup the DB connection.
|
||||
@ -112,7 +112,7 @@ Options:
|
||||
|
||||
// Create the datasource.
|
||||
Properties dsProps = new Properties()
|
||||
dsProps.putAll(givenCfg.findAll { it.key != 'migrations.dir' })
|
||||
dsProps.putAll(givenCfg.findAll { it.key != 'sqlDir' })
|
||||
|
||||
HikariDataSource hds = new HikariDataSource(new HikariConfig(dsProps))
|
||||
|
||||
@ -125,8 +125,8 @@ Options:
|
||||
public List<File> createMigration(String migrationName) {
|
||||
String timestamp = sdf.format(new Date())
|
||||
|
||||
File upFile = new File(migrationsDir, "$timestamp-$migrationName-up.sql")
|
||||
File downFile = new File(migrationsDir, "$timestamp-$migrationName-down.sql")
|
||||
File upFile = new File(sqlDir, "$timestamp-$migrationName-up.sql")
|
||||
File downFile = new File(sqlDir, "$timestamp-$migrationName-down.sql")
|
||||
|
||||
upFile.text = "-- UP script for $migrationName ($timestamp)"
|
||||
downFile.text = "-- DOWN script for $migrationName ($timestamp)"
|
||||
@ -140,7 +140,7 @@ Options:
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
run_at TIMESTAMP NOT NULL DEFAULT NOW())''') }
|
||||
run_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW())''') }
|
||||
|
||||
public def diffMigrations() {
|
||||
def results = [notRun: [], missing: []]
|
||||
@ -150,7 +150,7 @@ CREATE TABLE IF NOT EXISTS migrations (
|
||||
.collect { it.name }.sort()
|
||||
|
||||
SortedSet<String> available = new TreeSet<>()
|
||||
available.addAll(migrationsDir
|
||||
available.addAll(sqlDir
|
||||
.listFiles({ d, n -> n ==~ /.+-(up|down).sql$/ } as FilenameFilter)
|
||||
.collect { f -> f.name.replaceAll(/-(up|down).sql$/, '') })
|
||||
|
||||
@ -215,7 +215,7 @@ CREATE TABLE IF NOT EXISTS migrations (
|
||||
|
||||
toRun.each { migrationName ->
|
||||
LOGGER.info(migrationName)
|
||||
File migrationFile = new File(migrationsDir,
|
||||
File migrationFile = new File(sqlDir,
|
||||
"$migrationName-${up ? 'up' : 'down'}.sql")
|
||||
|
||||
if (!migrationFile.exists() || !migrationFile.isFile())
|
||||
|
@ -3,15 +3,15 @@
|
||||
##
|
||||
## Simple tool to manage database migrations.
|
||||
|
||||
import algorithm, json, times, os, strutils, docopt, db_postgres, sets,
|
||||
sequtils, logging
|
||||
import algorithm, db_postgres, docopt, json, logging, os, sequtils, sets,
|
||||
strutils, times
|
||||
|
||||
type
|
||||
DbMigrateConfig* = tuple[ driver, sqlDir, connectionString: string, logLevel: Level ]
|
||||
|
||||
proc ensureMigrationsTableExists(conn: DbConn): void =
|
||||
let tableCount = conn.getValue(sql"""
|
||||
SELECT COUNT(*) FROM information_schema.tables
|
||||
SELECT COUNT(*) FROM information_schema.tables
|
||||
WHERE table_name = 'migrations';""")
|
||||
|
||||
if tableCount.strip == "0":
|
||||
@ -55,7 +55,7 @@ proc loadConfig*(filename: string): DbMigrateConfig =
|
||||
|
||||
proc createMigration*(config: DbMigrateConfig, migrationName: string): seq[string] =
|
||||
## Create a new set of database migration files.
|
||||
let timestamp = getTime().getLocalTime().format("yyyyMMddHHmmss")
|
||||
let timestamp = now().format("yyyyMMddHHmmss")
|
||||
let filenamePrefix = timestamp & "-" & migrationName
|
||||
|
||||
let upFilename = joinPath(config.sqlDir, filenamePrefix & "-up.sql")
|
||||
@ -78,13 +78,13 @@ proc diffMigrations*(pgConn: DbConn, config: DbMigrateConfig):
|
||||
tuple[ run, notRun, missing: seq[string] ] =
|
||||
|
||||
# Query the database to find out what migrations have been run.
|
||||
var migrationsRun = initSet[string]()
|
||||
var migrationsRun = initHashSet[string]()
|
||||
|
||||
for row in pgConn.fastRows(sql"SELECT * FROM migrations ORDER BY name", @[]):
|
||||
migrationsRun.incl(row[1])
|
||||
|
||||
# Inspect the filesystem to see what migrations are available.
|
||||
var migrationsAvailable = initSet[string]()
|
||||
var migrationsAvailable = initHashSet[string]()
|
||||
for filePath in walkFiles(joinPath(config.sqlDir, "*.sql")):
|
||||
var migrationName = filePath.extractFilename
|
||||
migrationName.removeSuffix("-up.sql")
|
||||
@ -196,12 +196,15 @@ Usage:
|
||||
db_migrate [options] down [<count>]
|
||||
db_migrate [options] init <schema-name>
|
||||
db_migrate (-V | --version)
|
||||
db_migrate (-h | --help)
|
||||
|
||||
Options:
|
||||
|
||||
-c --config <config-file> Use the given configuration file (defaults to
|
||||
"database.json").
|
||||
|
||||
-h --help Show this usage information.
|
||||
|
||||
-q --quiet Suppress log information.
|
||||
|
||||
-v --verbose Print detailed log information.
|
||||
@ -212,7 +215,7 @@ Options:
|
||||
"""
|
||||
|
||||
# Parse arguments
|
||||
let args = docopt(doc, version = "db-migrate 0.2.6")
|
||||
let args = docopt(doc, version = "db-migrate (Nim) 0.2.8\nhttps://git.jdb-software.com/jdb/db-migrate")
|
||||
|
||||
let exitErr = proc(msg: string): void =
|
||||
fatal("db_migrate: " & msg)
|
||||
@ -240,7 +243,7 @@ Options:
|
||||
else: logging.setLogFilter(config.logLevel)
|
||||
|
||||
# Check for migrations directory
|
||||
if not existsDir config.sqlDir:
|
||||
if not dirExists config.sqlDir:
|
||||
try:
|
||||
warn "SQL directory '" & config.sqlDir &
|
||||
"' does not exist and will be created."
|
||||
|
Reference in New Issue
Block a user