Update API to handle the multiple formats of datetime Postgres might use.

This commit is contained in:
Jonathan Bernard 2019-05-18 12:18:35 -05:00
parent 4c60c30b7d
commit 5026e3963c
4 changed files with 22 additions and 7 deletions

5
api/database-prod.json Normal file
View File

@ -0,0 +1,5 @@
{
"driver": "postgres",
"connectionString": "host=localhost port=5999 dbname=personal_measure user=postgres",
"sqlDir": "src/main/sql/migrations"
}

View File

@ -17,4 +17,4 @@ requires @["nim >= 0.19.4", "bcrypt", "docopt >= 0.6.8", "isaac >= 0.1.3",
"jester >= 0.4.1", "jwt", "tempfile", "uuids >= 0.1.10" ] "jester >= 0.4.1", "jwt", "tempfile", "uuids >= 0.1.10" ]
requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.6.3" requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.6.3"
requires "https://git.jdb-labs.com/jdb/nim-time-utils.git >= 0.4.0" requires "https://git.jdb-labs.com/jdb/nim-time-utils.git >= 0.5.0"

View File

@ -2,7 +2,10 @@ import json, macros, options, sequtils, strutils, times, timeutils, unicode,
uuids uuids
const UNDERSCORE_RUNE = "_".toRunes[0] const UNDERSCORE_RUNE = "_".toRunes[0]
const PG_TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:sszz" const PG_TIMESTAMP_FORMATS = [
"yyyy-MM-dd HH:mm:sszz",
"yyyy-MM-dd HH:mm:ss'.'fffzz"
]
type type
MutateClauses* = object MutateClauses* = object
@ -62,6 +65,13 @@ proc dbFormat*[T](item: T): string = return $item
type DbArrayParseState = enum type DbArrayParseState = enum
expectStart, inQuote, inVal, expectEnd expectStart, inQuote, inVal, expectEnd
proc parsePGDatetime*(val: string): DateTime =
var errStr = ""
for df in PG_TIMESTAMP_FORMATS:
try: return val.parse(df)
except: errStr &= "\n" & getCurrentExceptionMsg()
raise newException(ValueError, "Cannot parse PG date. Tried:" & errStr)
proc parseDbArray*(val: string): seq[string] = proc parseDbArray*(val: string): seq[string] =
result = newSeq[string]() result = newSeq[string]()
@ -129,7 +139,7 @@ proc createParseStmt*(t, value: NimNode): NimNode =
result = quote do: parseUUID(`value`) result = quote do: parseUUID(`value`)
elif t.getType == DateTime.getType: elif t.getType == DateTime.getType:
result = quote do: `value`.parse(PG_TIMESTAMP_FORMAT) result = quote do: parsePGDatetime(`value`)
elif t.getTypeInst == Option.getType: elif t.getTypeInst == Option.getType:
let innerType = t.getTypeImpl[2][0][0][1] let innerType = t.getTypeImpl[2][0][0][1]

View File

@ -14,8 +14,8 @@ create table "api_tokens" (
id uuid default uuid_generate_v4() primary key, id uuid default uuid_generate_v4() primary key,
user_id uuid not null references users (id) on delete cascade on update cascade, user_id uuid not null references users (id) on delete cascade on update cascade,
name varchar not null, name varchar not null,
created timestamp with time zone not null default current_timestamp, created timestamp(0) with time zone not null default current_timestamp,
expires timestamp with time zone default null, expires timestamp(0) with time zone default null,
hashed_token varchar not null hashed_token varchar not null
); );
@ -33,7 +33,7 @@ create table "measurements" (
id uuid default uuid_generate_v4() primary key, id uuid default uuid_generate_v4() primary key,
measure_id uuid not null references measures (id) on delete cascade on update cascade, measure_id uuid not null references measures (id) on delete cascade on update cascade,
value integer not null, value integer not null,
"timestamp" timestamp with time zone not null default current_timestamp, "timestamp" timestamp(0) with time zone not null default current_timestamp,
ext_data jsonb not null default '{}'::json ext_data jsonb not null default '{}'::json
); );
@ -44,7 +44,7 @@ create table client_log_entries (
"scope" varchar not null, "scope" varchar not null,
message varchar not null, message varchar not null,
stacktrace varchar not null, stacktrace varchar not null,
"timestamp" timestamp with time zone not null default current_timestamp "timestamp" timestamp(0) with time zone not null default current_timestamp
); );
create index client_log_entries_by_level on client_log_entries ("level"); create index client_log_entries_by_level on client_log_entries ("level");