Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
a05555ee67 | |||
454fc8c47a |
@ -1,6 +1,6 @@
|
|||||||
# Package
|
# Package
|
||||||
|
|
||||||
version = "1.0.4"
|
version = "2.1.0"
|
||||||
author = "Jonathan Bernard"
|
author = "Jonathan Bernard"
|
||||||
description = "Lightweight Postgres ORM for Nim."
|
description = "Lightweight Postgres ORM for Nim."
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
|
@ -104,26 +104,34 @@
|
|||||||
##
|
##
|
||||||
## .. code-block:: Nim
|
## .. code-block:: Nim
|
||||||
## proc getTodoItem*(db: TodoDB, id: UUID): TodoItem;
|
## proc getTodoItem*(db: TodoDB, id: UUID): TodoItem;
|
||||||
## proc getAllTodoItems*(db: TodoDB): seq[TodoItem];
|
##
|
||||||
## proc createTodoItem*(db: TodoDB, rec: TodoItem): TodoItem;
|
## proc createTodoItem*(db: TodoDB, rec: TodoItem): TodoItem;
|
||||||
## proc updateTodoItem*(db: TodoDB, rec: TodoItem): bool;
|
## proc updateTodoItem*(db: TodoDB, rec: TodoItem): bool;
|
||||||
## proc deleteTodoItem*(db: TodoDB, rec: TodoItem): bool;
|
## proc deleteTodoItem*(db: TodoDB, rec: TodoItem): bool;
|
||||||
## proc deleteTodoItem*(db: TodoDB, id: UUID): bool;
|
## proc deleteTodoItem*(db: TodoDB, id: UUID): bool;
|
||||||
##
|
##
|
||||||
|
## proc getAllTodoItems*(db: TodoDB,
|
||||||
|
## pagination = none[PaginationParams]()): seq[TodoItem];
|
||||||
|
##
|
||||||
## proc findTodoItemsWhere*(db: TodoDB, whereClause: string,
|
## proc findTodoItemsWhere*(db: TodoDB, whereClause: string,
|
||||||
## values: varargs[string, dbFormat]): seq[TodoItem];
|
## values: varargs[string, dbFormat], pagination = none[PaginationParams]()
|
||||||
|
## ): seq[TodoItem];
|
||||||
##
|
##
|
||||||
## proc getTimeEntry*(db: TodoDB, id: UUID): TimeEntry;
|
## proc getTimeEntry*(db: TodoDB, id: UUID): TimeEntry;
|
||||||
## proc getAllTimeEntries*(db: TodoDB): seq[TimeEntry];
|
|
||||||
## proc createTimeEntry*(db: TodoDB, rec: TimeEntry): TimeEntry;
|
## proc createTimeEntry*(db: TodoDB, rec: TimeEntry): TimeEntry;
|
||||||
## proc updateTimeEntry*(db: TodoDB, rec: TimeEntry): bool;
|
## proc updateTimeEntry*(db: TodoDB, rec: TimeEntry): bool;
|
||||||
## proc deleteTimeEntry*(db: TodoDB, rec: TimeEntry): bool;
|
## proc deleteTimeEntry*(db: TodoDB, rec: TimeEntry): bool;
|
||||||
## proc deleteTimeEntry*(db: TodoDB, id: UUID): bool;
|
## proc deleteTimeEntry*(db: TodoDB, id: UUID): bool;
|
||||||
##
|
##
|
||||||
## proc findTimeEntriesWhere*(db: TodoDB, whereClause: string,
|
## proc getAllTimeEntries*(db: TodoDB,
|
||||||
## values: varargs[string, dbFormat]): seq[TimeEntry];
|
## pagination = none[PaginationParams]()): seq[TimeEntry];
|
||||||
##
|
##
|
||||||
## proc findTimeEntriesByTodoItemId(db: TodoDB, todoItemId: UUID): seq[TimeEntry];
|
## proc findTimeEntriesWhere*(db: TodoDB, whereClause: string,
|
||||||
|
## values: varargs[string, dbFormat], pagination = none[PaginationParams]()
|
||||||
|
## ): seq[TimeEntry];
|
||||||
|
##
|
||||||
|
## proc findTimeEntriesByTodoItemId(db: TodoDB, todoItemId: UUID,
|
||||||
|
## pagination = none[PaginationParams]()): seq[TimeEntry];
|
||||||
##
|
##
|
||||||
## Object-Relational Modeling
|
## Object-Relational Modeling
|
||||||
## ==========================
|
## ==========================
|
||||||
@ -274,11 +282,12 @@
|
|||||||
##
|
##
|
||||||
## .. _pool.DbConnPool: fiber_orm/pool.html#DbConnPool
|
## .. _pool.DbConnPool: fiber_orm/pool.html#DbConnPool
|
||||||
##
|
##
|
||||||
import std/db_postgres, std/macros, std/options, std/sequtils, std/strutils
|
import std/[db_common, logging, macros, options, sequtils, strutils]
|
||||||
import namespaced_logging, uuids
|
import namespaced_logging, uuids
|
||||||
|
|
||||||
from std/unicode import capitalize
|
from std/unicode import capitalize
|
||||||
|
|
||||||
|
import ./fiber_orm/db_common as fiber_db_common
|
||||||
import ./fiber_orm/pool
|
import ./fiber_orm/pool
|
||||||
import ./fiber_orm/util
|
import ./fiber_orm/util
|
||||||
|
|
||||||
@ -292,13 +301,24 @@ export
|
|||||||
util.rowToModel,
|
util.rowToModel,
|
||||||
util.tableName
|
util.tableName
|
||||||
|
|
||||||
type NotFoundError* = object of CatchableError ##\
|
type
|
||||||
|
PaginationParams* = object
|
||||||
|
pageSize*: int
|
||||||
|
offset*: int
|
||||||
|
orderBy*: Option[string]
|
||||||
|
|
||||||
|
PagedRecords*[T] = object
|
||||||
|
pagination*: Option[PaginationParams]
|
||||||
|
records*: seq[T]
|
||||||
|
totalRecords*: int
|
||||||
|
|
||||||
|
NotFoundError* = object of CatchableError ##\
|
||||||
## Error type raised when no record matches a given ID
|
## Error type raised when no record matches a given ID
|
||||||
|
|
||||||
var logNs {.threadvar.}: LoggingNamespace
|
var logNs {.threadvar.}: LoggingNamespace
|
||||||
|
|
||||||
template log(): untyped =
|
template log(): untyped =
|
||||||
if logNs.isNil: logNs = initLoggingNamespace(name = "fiber_orm", level = lvlNotice)
|
if logNs.isNil: logNs = getLoggerForNamespace(namespace = "fiber_orm", level = lvlNotice)
|
||||||
logNs
|
logNs
|
||||||
|
|
||||||
proc newMutateClauses(): MutateClauses =
|
proc newMutateClauses(): MutateClauses =
|
||||||
@ -307,7 +327,7 @@ proc newMutateClauses(): MutateClauses =
|
|||||||
placeholders: @[],
|
placeholders: @[],
|
||||||
values: @[])
|
values: @[])
|
||||||
|
|
||||||
proc createRecord*[T](db: DbConn, rec: T): T =
|
proc createRecord*[D: DbConnType, T](db: D, rec: T): T =
|
||||||
## Create a new record. `rec` is expected to be a `model class`_. The `id`
|
## Create a new record. `rec` is expected to be a `model class`_. The `id`
|
||||||
## field is only set if it is non-empty (see `ID Field`_ for details).
|
## field is only set if it is non-empty (see `ID Field`_ for details).
|
||||||
##
|
##
|
||||||
@ -330,7 +350,7 @@ proc createRecord*[T](db: DbConn, rec: T): T =
|
|||||||
|
|
||||||
result = rowToModel(T, newRow)
|
result = rowToModel(T, newRow)
|
||||||
|
|
||||||
proc updateRecord*[T](db: DbConn, rec: T): bool =
|
proc updateRecord*[D: DbConnType, T](db: D, rec: T): bool =
|
||||||
## Update a record by id. `rec` is expected to be a `model class`_.
|
## Update a record by id. `rec` is expected to be a `model class`_.
|
||||||
var mc = newMutateClauses()
|
var mc = newMutateClauses()
|
||||||
populateMutateClauses(rec, false, mc)
|
populateMutateClauses(rec, false, mc)
|
||||||
@ -346,13 +366,13 @@ proc updateRecord*[T](db: DbConn, rec: T): bool =
|
|||||||
|
|
||||||
return numRowsUpdated > 0;
|
return numRowsUpdated > 0;
|
||||||
|
|
||||||
template deleteRecord*(db: DbConn, modelType: type, id: typed): untyped =
|
template deleteRecord*[D: DbConnType](db: D, modelType: type, id: typed): untyped =
|
||||||
## Delete a record by id.
|
## Delete a record by id.
|
||||||
let sqlStmt = "DELETE FROM " & tableName(modelType) & " WHERE id = ?"
|
let sqlStmt = "DELETE FROM " & tableName(modelType) & " WHERE id = ?"
|
||||||
log().debug "deleteRecord: [" & sqlStmt & "] id: " & $id
|
log().debug "deleteRecord: [" & sqlStmt & "] id: " & $id
|
||||||
db.tryExec(sql(sqlStmt), $id)
|
db.tryExec(sql(sqlStmt), $id)
|
||||||
|
|
||||||
proc deleteRecord*[T](db: DbConn, rec: T): bool =
|
proc deleteRecord*[D: DbConnType, T](db: D, rec: T): bool =
|
||||||
## Delete a record by `id`_.
|
## Delete a record by `id`_.
|
||||||
##
|
##
|
||||||
## .. _id: #model-class-id-field
|
## .. _id: #model-class-id-field
|
||||||
@ -360,7 +380,7 @@ proc deleteRecord*[T](db: DbConn, rec: T): bool =
|
|||||||
log().debug "deleteRecord: [" & sqlStmt & "] id: " & $rec.id
|
log().debug "deleteRecord: [" & sqlStmt & "] id: " & $rec.id
|
||||||
return db.tryExec(sql(sqlStmt), $rec.id)
|
return db.tryExec(sql(sqlStmt), $rec.id)
|
||||||
|
|
||||||
template getRecord*(db: DbConn, modelType: type, id: typed): untyped =
|
template getRecord*[D: DbConnType](db: D, modelType: type, id: typed): untyped =
|
||||||
## Fetch a record by id.
|
## Fetch a record by id.
|
||||||
let sqlStmt =
|
let sqlStmt =
|
||||||
"SELECT " & columnNamesForModel(modelType).join(",") &
|
"SELECT " & columnNamesForModel(modelType).join(",") &
|
||||||
@ -375,37 +395,114 @@ template getRecord*(db: DbConn, modelType: type, id: typed): untyped =
|
|||||||
|
|
||||||
rowToModel(modelType, row)
|
rowToModel(modelType, row)
|
||||||
|
|
||||||
template findRecordsWhere*(db: DbConn, modelType: type, whereClause: string, values: varargs[string, dbFormat]): untyped =
|
template findRecordsWhere*[D: DbConnType](
|
||||||
|
db: D,
|
||||||
|
modelType: type,
|
||||||
|
whereClause: string,
|
||||||
|
values: varargs[string, dbFormat],
|
||||||
|
page: Option[PaginationParams]): untyped =
|
||||||
## Find all records matching a given `WHERE` clause. The number of elements in
|
## Find all records matching a given `WHERE` clause. The number of elements in
|
||||||
## the `values` array must match the number of placeholders (`?`) in the
|
## the `values` array must match the number of placeholders (`?`) in the
|
||||||
## provided `WHERE` clause.
|
## provided `WHERE` clause.
|
||||||
let sqlStmt =
|
var fetchStmt =
|
||||||
"SELECT " & columnNamesForModel(modelType).join(",") &
|
"SELECT " & columnNamesForModel(modelType).join(",") &
|
||||||
" FROM " & tableName(modelType) &
|
" FROM " & tableName(modelType) &
|
||||||
" WHERE " & whereClause
|
" WHERE " & whereClause
|
||||||
|
|
||||||
log().debug "findRecordsWhere: [" & sqlStmt & "] values: (" & values.join(", ") & ")"
|
var countStmt =
|
||||||
db.getAllRows(sql(sqlStmt), values).mapIt(rowToModel(modelType, it))
|
"SELECT COUNT(*) FROM " & tableName(modelType) &
|
||||||
|
" WHERE " & whereClause
|
||||||
|
|
||||||
template getAllRecords*(db: DbConn, modelType: type): untyped =
|
if page.isSome:
|
||||||
|
let p = page.get
|
||||||
|
if p.orderBy.isSome:
|
||||||
|
fetchStmt &= " ORDER BY " & p.orderBy.get
|
||||||
|
else:
|
||||||
|
fetchStmt &= " ORDER BY id"
|
||||||
|
|
||||||
|
fetchStmt &= " LIMIT " & $p.pageSize &
|
||||||
|
" OFFSET " & $p.offset
|
||||||
|
|
||||||
|
log().debug "findRecordsWhere: [" & fetchStmt & "] values: (" & values.join(", ") & ")"
|
||||||
|
let records = db.getAllRows(sql(fetchStmt), values).mapIt(rowToModel(modelType, it))
|
||||||
|
|
||||||
|
PagedRecords[modelType](
|
||||||
|
pagination: page,
|
||||||
|
records: records,
|
||||||
|
totalRecords:
|
||||||
|
if page.isNone: records.len
|
||||||
|
else: db.getRow(sql(countStmt), values)[0].parseInt)
|
||||||
|
|
||||||
|
template getAllRecords*[D: DbConnType](
|
||||||
|
db: D,
|
||||||
|
modelType: type,
|
||||||
|
page: Option[PaginationParams]): untyped =
|
||||||
## Fetch all records of the given type.
|
## Fetch all records of the given type.
|
||||||
let sqlStmt =
|
var fetchStmt =
|
||||||
"SELECT " & columnNamesForModel(modelType).join(",") &
|
"SELECT " & columnNamesForModel(modelType).join(",") &
|
||||||
" FROM " & tableName(modelType)
|
" FROM " & tableName(modelType)
|
||||||
|
|
||||||
log().debug "getAllRecords: [" & sqlStmt & "]"
|
var countStmt = "SELECT COUNT(*) FROM " & tableName(modelType)
|
||||||
db.getAllRows(sql(sqlStmt)).mapIt(rowToModel(modelType, it))
|
|
||||||
|
|
||||||
template findRecordsBy*(db: DbConn, modelType: type, lookups: seq[tuple[field: string, value: string]]): untyped =
|
if page.isSome:
|
||||||
|
let p = page.get
|
||||||
|
if p.orderBy.isSome:
|
||||||
|
fetchStmt &= " ORDER BY " & p.orderBy.get
|
||||||
|
else:
|
||||||
|
fetchStmt &= " ORDER BY id"
|
||||||
|
|
||||||
|
fetchStmt &= " LIMIT " & $p.pageSize &
|
||||||
|
" OFFSET " & $p.offset
|
||||||
|
|
||||||
|
log().debug "getAllRecords: [" & fetchStmt & "]"
|
||||||
|
let records = db.getAllRows(sql(fetchStmt)).mapIt(rowToModel(modelType, it))
|
||||||
|
|
||||||
|
PagedRecords[modelType](
|
||||||
|
pagination: page,
|
||||||
|
records: records,
|
||||||
|
totalRecords:
|
||||||
|
if page.isNone: records.len
|
||||||
|
else: db.getRow(sql(countStmt))[0].parseInt)
|
||||||
|
|
||||||
|
|
||||||
|
template findRecordsBy*[D: DbConnType](
|
||||||
|
db: D,
|
||||||
|
modelType: type,
|
||||||
|
lookups: seq[tuple[field: string, value: string]],
|
||||||
|
page: Option[PaginationParams]): untyped =
|
||||||
## Find all records matching the provided lookup values.
|
## Find all records matching the provided lookup values.
|
||||||
let sqlStmt =
|
let whereClause = lookups.mapIt(it.field & " = ?").join(" AND ")
|
||||||
"SELECT " & columnNamesForModel(modelType).join(",") &
|
|
||||||
" FROM " & tableName(modelType) &
|
|
||||||
" WHERE " & lookups.mapIt(it.field & " = ?").join(" AND ")
|
|
||||||
let values = lookups.mapIt(it.value)
|
let values = lookups.mapIt(it.value)
|
||||||
|
|
||||||
log().debug "findRecordsBy: [" & sqlStmt & "] values (" & values.join(", ") & ")"
|
var fetchStmt =
|
||||||
db.getAllRows(sql(sqlStmt), values).mapIt(rowToModel(modelType, it))
|
"SELECT " & columnNamesForModel(modelType).join(",") &
|
||||||
|
" FROM " & tableName(modelType) &
|
||||||
|
" WHERE " & whereClause
|
||||||
|
|
||||||
|
var countStmt =
|
||||||
|
"SELECT COUNT(*) FROM " & tableName(modelType) &
|
||||||
|
" WHERE " & whereClause
|
||||||
|
|
||||||
|
if page.isSome:
|
||||||
|
let p = page.get
|
||||||
|
if p.orderBy.isSome:
|
||||||
|
fetchStmt &= " ORDER BY " & p.orderBy.get
|
||||||
|
else:
|
||||||
|
fetchStmt &= " ORDER BY id"
|
||||||
|
|
||||||
|
fetchStmt &= " LIMIT " & $p.pageSize &
|
||||||
|
" OFFSET " & $p.offset
|
||||||
|
|
||||||
|
log().debug "findRecordsBy: [" & fetchStmt & "] values (" & values.join(", ") & ")"
|
||||||
|
let records = db.getAllRows(sql(fetchStmt), values).mapIt(rowToModel(modelType, it))
|
||||||
|
|
||||||
|
PagedRecords[modelType](
|
||||||
|
pagination: page,
|
||||||
|
records: records,
|
||||||
|
totalRecords:
|
||||||
|
if page.isNone: records.len
|
||||||
|
else: db.getRow(sql(countStmt), values)[0].parseInt)
|
||||||
|
|
||||||
|
|
||||||
macro generateProcsForModels*(dbType: type, modelTypes: openarray[type]): untyped =
|
macro generateProcsForModels*(dbType: type, modelTypes: openarray[type]): untyped =
|
||||||
## Generate all standard access procedures for the given model types. For a
|
## Generate all standard access procedures for the given model types. For a
|
||||||
@ -430,6 +527,10 @@ macro generateProcsForModels*(dbType: type, modelTypes: openarray[type]): untype
|
|||||||
result = newStmtList()
|
result = newStmtList()
|
||||||
|
|
||||||
for t in modelTypes:
|
for t in modelTypes:
|
||||||
|
if t.getType[1].typeKind == ntyRef:
|
||||||
|
raise newException(ValueError,
|
||||||
|
"fiber_orm model object must be objects, not refs")
|
||||||
|
|
||||||
let modelName = $(t.getType[1])
|
let modelName = $(t.getType[1])
|
||||||
let getName = ident("get" & modelName)
|
let getName = ident("get" & modelName)
|
||||||
let getAllName = ident("getAll" & pluralize(modelName))
|
let getAllName = ident("getAll" & pluralize(modelName))
|
||||||
@ -442,12 +543,16 @@ macro generateProcsForModels*(dbType: type, modelTypes: openarray[type]): untype
|
|||||||
proc `getName`*(db: `dbType`, id: `idType`): `t` =
|
proc `getName`*(db: `dbType`, id: `idType`): `t` =
|
||||||
db.withConn: result = getRecord(conn, `t`, id)
|
db.withConn: result = getRecord(conn, `t`, id)
|
||||||
|
|
||||||
proc `getAllName`*(db: `dbType`): seq[`t`] =
|
proc `getAllName`*(db: `dbType`, pagination = none[PaginationParams]()): PagedRecords[`t`] =
|
||||||
db.withConn: result = getAllRecords(conn, `t`)
|
db.withConn: result = getAllRecords(conn, `t`, pagination)
|
||||||
|
|
||||||
proc `findWhereName`*(db: `dbType`, whereClause: string, values: varargs[string, dbFormat]): seq[`t`] =
|
proc `findWhereName`*(
|
||||||
|
db: `dbType`,
|
||||||
|
whereClause: string,
|
||||||
|
values: varargs[string, dbFormat],
|
||||||
|
pagination = none[PaginationParams]()): PagedRecords[`t`] =
|
||||||
db.withConn:
|
db.withConn:
|
||||||
result = findRecordsWhere(conn, `t`, whereClause, values)
|
result = findRecordsWhere(conn, `t`, whereClause, values, pagination)
|
||||||
|
|
||||||
proc `createName`*(db: `dbType`, rec: `t`): `t` =
|
proc `createName`*(db: `dbType`, rec: `t`): `t` =
|
||||||
db.withConn: result = createRecord(conn, rec)
|
db.withConn: result = createRecord(conn, rec)
|
||||||
@ -478,7 +583,7 @@ macro generateLookup*(dbType: type, modelType: type, fields: seq[string]): untyp
|
|||||||
|
|
||||||
# Create proc skeleton
|
# Create proc skeleton
|
||||||
result = quote do:
|
result = quote do:
|
||||||
proc `procName`*(db: `dbType`): seq[`modelType`] =
|
proc `procName`*(db: `dbType`): PagedRecords[`modelType`] =
|
||||||
db.withConn: result = findRecordsBy(conn, `modelType`)
|
db.withConn: result = findRecordsBy(conn, `modelType`)
|
||||||
|
|
||||||
var callParams = quote do: @[]
|
var callParams = quote do: @[]
|
||||||
@ -496,12 +601,18 @@ macro generateLookup*(dbType: type, modelType: type, fields: seq[string]): untyp
|
|||||||
# Build up the AST for the inner procedure call
|
# Build up the AST for the inner procedure call
|
||||||
callParams[1].add(paramTuple)
|
callParams[1].add(paramTuple)
|
||||||
|
|
||||||
|
# Add the optional pagination parameters to the generated proc definition
|
||||||
|
result[3].add(newIdentDefs(
|
||||||
|
ident("pagination"), newEmptyNode(),
|
||||||
|
quote do: none[PaginationParams]()))
|
||||||
|
|
||||||
# Add the call params to the inner procedure call
|
# Add the call params to the inner procedure call
|
||||||
# result[6][0][1][0][1] is
|
# result[6][0][1][0][1] is
|
||||||
# ProcDef -> [6]: StmtList (body) -> [0]: Call ->
|
# ProcDef -> [6]: StmtList (body) -> [0]: Call ->
|
||||||
# [1]: StmtList (withConn body) -> [0]: Asgn (result =) ->
|
# [1]: StmtList (withConn body) -> [0]: Asgn (result =) ->
|
||||||
# [1]: Call (inner findRecords invocation)
|
# [1]: Call (inner findRecords invocation)
|
||||||
result[6][0][1][0][1].add(callParams)
|
result[6][0][1][0][1].add(callParams)
|
||||||
|
result[6][0][1][0][1].add(quote do: pagination)
|
||||||
|
|
||||||
macro generateProcsForFieldLookups*(dbType: type, modelsAndFields: openarray[tuple[t: type, fields: seq[string]]]): untyped =
|
macro generateProcsForFieldLookups*(dbType: type, modelsAndFields: openarray[tuple[t: type, fields: seq[string]]]): untyped =
|
||||||
result = newStmtList()
|
result = newStmtList()
|
||||||
@ -514,7 +625,7 @@ macro generateProcsForFieldLookups*(dbType: type, modelsAndFields: openarray[tup
|
|||||||
|
|
||||||
# Create proc skeleton
|
# Create proc skeleton
|
||||||
let procDefAST = quote do:
|
let procDefAST = quote do:
|
||||||
proc `procName`*(db: `dbType`): seq[`modelType`] =
|
proc `procName`*(db: `dbType`): PagedRecords[`modelType`] =
|
||||||
db.withConn: result = findRecordsBy(conn, `modelType`)
|
db.withConn: result = findRecordsBy(conn, `modelType`)
|
||||||
|
|
||||||
var callParams = quote do: @[]
|
var callParams = quote do: @[]
|
||||||
@ -528,15 +639,22 @@ macro generateProcsForFieldLookups*(dbType: type, modelsAndFields: openarray[tup
|
|||||||
procDefAST[3].add(newIdentDefs(ident(n), ident("string")))
|
procDefAST[3].add(newIdentDefs(ident(n), ident("string")))
|
||||||
callParams[1].add(paramTuple)
|
callParams[1].add(paramTuple)
|
||||||
|
|
||||||
|
# Add the optional pagination parameters to the generated proc definition
|
||||||
|
procDefAST[3].add(newIdentDefs(
|
||||||
|
ident("pagination"), newEmptyNode(),
|
||||||
|
quote do: none[PaginationParams]()))
|
||||||
|
|
||||||
procDefAST[6][0][1][0][1].add(callParams)
|
procDefAST[6][0][1][0][1].add(callParams)
|
||||||
|
procDefAST[6][0][1][0][1].add(quote do: pagination)
|
||||||
|
|
||||||
result.add procDefAST
|
result.add procDefAST
|
||||||
|
|
||||||
proc initPool*(
|
proc initPool*[D: DbConnType](
|
||||||
connect: proc(): DbConn,
|
connect: proc(): D,
|
||||||
poolSize = 10,
|
poolSize = 10,
|
||||||
hardCap = false,
|
hardCap = false,
|
||||||
healthCheckQuery = "SELECT 'true' AS alive"): DbConnPool =
|
healthCheckQuery = "SELECT 'true' AS alive"): DbConnPool[D] =
|
||||||
|
|
||||||
## Initialize a new DbConnPool. See the `initDb` procedure in the `Example
|
## Initialize a new DbConnPool. See the `initDb` procedure in the `Example
|
||||||
## Fiber ORM Usage`_ for an example
|
## Fiber ORM Usage`_ for an example
|
||||||
##
|
##
|
||||||
@ -554,13 +672,13 @@ proc initPool*(
|
|||||||
##
|
##
|
||||||
## .. _Example Fiber ORM Usage: #basic-usage-example-fiber-orm-usage
|
## .. _Example Fiber ORM Usage: #basic-usage-example-fiber-orm-usage
|
||||||
|
|
||||||
initDbConnPool(DbConnPoolConfig(
|
initDbConnPool(DbConnPoolConfig[D](
|
||||||
connect: connect,
|
connect: connect,
|
||||||
poolSize: poolSize,
|
poolSize: poolSize,
|
||||||
hardCap: hardCap,
|
hardCap: hardCap,
|
||||||
healthCheckQuery: healthCheckQuery))
|
healthCheckQuery: healthCheckQuery))
|
||||||
|
|
||||||
template inTransaction*(db: DbConnPool, body: untyped) =
|
template inTransaction*[D: DbConnType](db: DbConnPool[D], body: untyped) =
|
||||||
pool.withConn(db):
|
pool.withConn(db):
|
||||||
conn.exec(sql"BEGIN TRANSACTION")
|
conn.exec(sql"BEGIN TRANSACTION")
|
||||||
try:
|
try:
|
||||||
|
3
src/fiber_orm/db_common.nim
Normal file
3
src/fiber_orm/db_common.nim
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import std/[db_postgres, db_sqlite]
|
||||||
|
|
||||||
|
type DbConnType* = db_postgres.DbConn or db_sqlite.DbConn
|
@ -4,65 +4,70 @@
|
|||||||
|
|
||||||
## Simple database connection pooling implementation compatible with Fiber ORM.
|
## Simple database connection pooling implementation compatible with Fiber ORM.
|
||||||
|
|
||||||
import std/db_postgres, std/sequtils, std/strutils, std/sugar
|
import std/[db_common, logging, sequtils, strutils, sugar]
|
||||||
|
|
||||||
|
from std/db_sqlite import getRow
|
||||||
|
from std/db_postgres import getRow
|
||||||
|
|
||||||
import namespaced_logging
|
import namespaced_logging
|
||||||
|
import ./db_common as fiber_db_common
|
||||||
|
|
||||||
type
|
type
|
||||||
DbConnPoolConfig* = object
|
DbConnPoolConfig*[D: DbConnType] = object
|
||||||
connect*: () -> DbConn ## Factory procedure to create a new DBConn
|
connect*: () -> D ## Factory procedure to create a new DBConn
|
||||||
poolSize*: int ## The pool capacity.
|
poolSize*: int ## The pool capacity.
|
||||||
|
|
||||||
hardCap*: bool ## Is the pool capacity a hard cap?
|
hardCap*: bool ## Is the pool capacity a hard cap?
|
||||||
##
|
##
|
||||||
## When `false`, the pool can grow beyond the configured
|
## When `false`, the pool can grow beyond the
|
||||||
## capacity, but will release connections down to the its
|
## configured capacity, but will release connections
|
||||||
## capacity (no less than `poolSize`).
|
## down to the its capacity (no less than `poolSize`).
|
||||||
##
|
##
|
||||||
## When `true` the pool will not create more than its
|
## When `true` the pool will not create more than its
|
||||||
## configured capacity. It a connection is requested, none
|
## configured capacity. It a connection is requested,
|
||||||
## are free, and the pool is at capacity, this will result
|
## none are free, and the pool is at capacity, this
|
||||||
## in an Error being raised.
|
## will result in an Error being raised.
|
||||||
|
|
||||||
healthCheckQuery*: string ## Should be a simple and fast SQL query that the
|
healthCheckQuery*: string ## Should be a simple and fast SQL query that the
|
||||||
## pool can use to test the liveliness of pooled
|
## pool can use to test the liveliness of pooled
|
||||||
## connections.
|
## connections.
|
||||||
|
|
||||||
PooledDbConn = ref object
|
PooledDbConn[D: DbConnType] = ref object
|
||||||
conn: DbConn
|
conn: D
|
||||||
id: int
|
id: int
|
||||||
free: bool
|
free: bool
|
||||||
|
|
||||||
DbConnPool* = ref object
|
DbConnPool*[D: DbConnType] = ref object
|
||||||
## Database connection pool
|
## Database connection pool
|
||||||
conns: seq[PooledDbConn]
|
conns: seq[PooledDbConn[D]]
|
||||||
cfg: DbConnPoolConfig
|
cfg: DbConnPoolConfig[D]
|
||||||
lastId: int
|
lastId: int
|
||||||
|
|
||||||
var logNs {.threadvar.}: LoggingNamespace
|
var logNs {.threadvar.}: LoggingNamespace
|
||||||
|
|
||||||
template log(): untyped =
|
template log(): untyped =
|
||||||
if logNs.isNil: logNs = initLoggingNamespace(name = "fiber_orm/pool", level = lvlNotice)
|
if logNs.isNil: logNs = getLoggerForNamespace(namespace = "fiber_orm/pool", level = lvlNotice)
|
||||||
logNs
|
logNs
|
||||||
|
|
||||||
proc initDbConnPool*(cfg: DbConnPoolConfig): DbConnPool =
|
proc initDbConnPool*[D: DbConnType](cfg: DbConnPoolConfig[D]): DbConnPool[D] =
|
||||||
log().debug("Initializing new pool (size: " & $cfg.poolSize)
|
log().debug("Initializing new pool (size: " & $cfg.poolSize)
|
||||||
result = DbConnPool(
|
result = DbConnPool[D](
|
||||||
conns: @[],
|
conns: @[],
|
||||||
cfg: cfg)
|
cfg: cfg)
|
||||||
|
|
||||||
proc newConn(pool: DbConnPool): PooledDbConn =
|
proc newConn[D: DbConnType](pool: DbConnPool[D]): PooledDbConn[D] =
|
||||||
log().debug("Creating a new connection to add to the pool.")
|
log().debug("Creating a new connection to add to the pool.")
|
||||||
pool.lastId += 1
|
pool.lastId += 1
|
||||||
let conn = pool.cfg.connect()
|
let conn = pool.cfg.connect()
|
||||||
result = PooledDbConn(
|
result = PooledDbConn[D](
|
||||||
conn: conn,
|
conn: conn,
|
||||||
id: pool.lastId,
|
id: pool.lastId,
|
||||||
free: true)
|
free: true)
|
||||||
pool.conns.add(result)
|
pool.conns.add(result)
|
||||||
|
|
||||||
proc maintain(pool: DbConnPool): void =
|
proc maintain[D: DbConnType](pool: DbConnPool[D]): void =
|
||||||
log().debug("Maintaining pool. $# connections." % [$pool.conns.len])
|
log().debug("Maintaining pool. $# connections." % [$pool.conns.len])
|
||||||
pool.conns.keepIf(proc (pc: PooledDbConn): bool =
|
pool.conns.keepIf(proc (pc: PooledDbConn[D]): bool =
|
||||||
if not pc.free: return true
|
if not pc.free: return true
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -91,7 +96,7 @@ proc maintain(pool: DbConnPool): void =
|
|||||||
"Trimming pool size. Culled $# free connections. $# connections remaining." %
|
"Trimming pool size. Culled $# free connections. $# connections remaining." %
|
||||||
[$toCull.len, $pool.conns.len])
|
[$toCull.len, $pool.conns.len])
|
||||||
|
|
||||||
proc take*(pool: DbConnPool): tuple[id: int, conn: DbConn] =
|
proc take*[D: DbConnType](pool: DbConnPool[D]): tuple[id: int, conn: D] =
|
||||||
## Request a connection from the pool. Returns a DbConn if the pool has free
|
## Request a connection from the pool. Returns a DbConn if the pool has free
|
||||||
## connections, or if it has the capacity to create a new connection. If the
|
## connections, or if it has the capacity to create a new connection. If the
|
||||||
## pool is configured with a hard capacity limit and is out of free
|
## pool is configured with a hard capacity limit and is out of free
|
||||||
@ -113,13 +118,13 @@ proc take*(pool: DbConnPool): tuple[id: int, conn: DbConn] =
|
|||||||
log().debug("Reserve connection $#" % [$reserved.id])
|
log().debug("Reserve connection $#" % [$reserved.id])
|
||||||
return (id: reserved.id, conn: reserved.conn)
|
return (id: reserved.id, conn: reserved.conn)
|
||||||
|
|
||||||
proc release*(pool: DbConnPool, connId: int): void =
|
proc release*[D: DbConnType](pool: DbConnPool[D], connId: int): void =
|
||||||
## Release a connection back to the pool.
|
## Release a connection back to the pool.
|
||||||
log().debug("Reclaiming released connaction $#" % [$connId])
|
log().debug("Reclaiming released connaction $#" % [$connId])
|
||||||
let foundConn = pool.conns.filterIt(it.id == connId)
|
let foundConn = pool.conns.filterIt(it.id == connId)
|
||||||
if foundConn.len > 0: foundConn[0].free = true
|
if foundConn.len > 0: foundConn[0].free = true
|
||||||
|
|
||||||
template withConn*(pool: DbConnPool, stmt: untyped): untyped =
|
template withConn*[D: DbConnType](pool: DbConnPool[D], stmt: untyped): untyped =
|
||||||
## Convenience template to provide a connection from the pool for use in a
|
## Convenience template to provide a connection from the pool for use in a
|
||||||
## statement block, automatically releasing that connnection when done.
|
## statement block, automatically releasing that connnection when done.
|
||||||
##
|
##
|
||||||
|
@ -3,10 +3,10 @@
|
|||||||
# Copyright 2019 Jonathan Bernard <jonathan@jdbernard.com>
|
# Copyright 2019 Jonathan Bernard <jonathan@jdbernard.com>
|
||||||
|
|
||||||
## Utility methods used internally by Fiber ORM.
|
## Utility methods used internally by Fiber ORM.
|
||||||
import json, macros, options, sequtils, strutils, times, unicode,
|
import std/[json, macros, options, sequtils, strutils, times, unicode]
|
||||||
uuids
|
import uuids
|
||||||
|
|
||||||
import nre except toSeq
|
import std/nre except toSeq
|
||||||
|
|
||||||
type
|
type
|
||||||
MutateClauses* = object
|
MutateClauses* = object
|
||||||
@ -207,21 +207,14 @@ proc parseDbArray*(val: string): seq[string] =
|
|||||||
if not (parseState == inQuote) and curStr.len > 0:
|
if not (parseState == inQuote) and curStr.len > 0:
|
||||||
result.add(curStr)
|
result.add(curStr)
|
||||||
|
|
||||||
proc createParseStmt*(t, value: NimNode): NimNode =
|
func createParseStmt*(t, value: NimNode): NimNode =
|
||||||
## Utility method to create the Nim cod required to parse a value coming from
|
## Utility method to create the Nim cod required to parse a value coming from
|
||||||
## the a database query. This is used by functions like `rowToModel` to parse
|
## the a database query. This is used by functions like `rowToModel` to parse
|
||||||
## the dataabase columns into the Nim object fields.
|
## the dataabase columns into the Nim object fields.
|
||||||
|
|
||||||
#echo "Creating parse statment for ", t.treeRepr
|
|
||||||
if t.typeKind == ntyObject:
|
if t.typeKind == ntyObject:
|
||||||
|
|
||||||
if t.getType == UUID.getType:
|
if t.getTypeInst == Option.getType:
|
||||||
result = quote do: parseUUID(`value`)
|
|
||||||
|
|
||||||
elif t.getType == DateTime.getType:
|
|
||||||
result = quote do: parsePGDatetime(`value`)
|
|
||||||
|
|
||||||
elif t.getTypeInst == Option.getType:
|
|
||||||
var innerType = t.getTypeImpl[2][0] # start at the first RecList
|
var innerType = t.getTypeImpl[2][0] # start at the first RecList
|
||||||
# If the value is a non-pointer type, there is another inner RecList
|
# If the value is a non-pointer type, there is another inner RecList
|
||||||
if innerType.kind == nnkRecList: innerType = innerType[0]
|
if innerType.kind == nnkRecList: innerType = innerType[0]
|
||||||
@ -232,8 +225,28 @@ proc createParseStmt*(t, value: NimNode): NimNode =
|
|||||||
if `value`.len == 0: none[`innerType`]()
|
if `value`.len == 0: none[`innerType`]()
|
||||||
else: some(`parseStmt`)
|
else: some(`parseStmt`)
|
||||||
|
|
||||||
|
elif t.getType == UUID.getType:
|
||||||
|
result = quote do: parseUUID(`value`)
|
||||||
|
|
||||||
|
elif t.getType == DateTime.getType:
|
||||||
|
result = quote do: parsePGDatetime(`value`)
|
||||||
|
|
||||||
else: error "Unknown value object type: " & $t.getTypeInst
|
else: error "Unknown value object type: " & $t.getTypeInst
|
||||||
|
|
||||||
|
elif t.typeKind == ntyGenericInst:
|
||||||
|
|
||||||
|
if t.kind == nnkBracketExpr and
|
||||||
|
t.len > 0 and
|
||||||
|
t[0] == Option.getType:
|
||||||
|
|
||||||
|
var innerType = t.getTypeInst[1]
|
||||||
|
let parseStmt = createParseStmt(innerType, value)
|
||||||
|
result = quote do:
|
||||||
|
if `value`.len == 0: none[`innerType`]()
|
||||||
|
else: some(`parseStmt`)
|
||||||
|
|
||||||
|
else: error "Unknown generic instance type: " & $t.getTypeInst
|
||||||
|
|
||||||
elif t.typeKind == ntyRef:
|
elif t.typeKind == ntyRef:
|
||||||
|
|
||||||
if $t.getTypeInst == "JsonNode":
|
if $t.getTypeInst == "JsonNode":
|
||||||
@ -268,28 +281,72 @@ proc createParseStmt*(t, value: NimNode): NimNode =
|
|||||||
else:
|
else:
|
||||||
error "Unknown value type: " & $t.typeKind
|
error "Unknown value type: " & $t.typeKind
|
||||||
|
|
||||||
|
func fields(t: NimNode): seq[tuple[fieldIdent: NimNode, fieldType: NimNode]] =
|
||||||
|
#[
|
||||||
|
debugEcho "T: " & t.treeRepr
|
||||||
|
debugEcho "T.kind: " & $t.kind
|
||||||
|
debugEcho "T.typeKind: " & $t.typeKind
|
||||||
|
debugEcho "T.GET_TYPE[1]: " & t.getType[1].treeRepr
|
||||||
|
debugEcho "T.GET_TYPE[1].kind: " & $t.getType[1].kind
|
||||||
|
debugEcho "T.GET_TYPE[1].typeKind: " & $t.getType[1].typeKind
|
||||||
|
|
||||||
|
debugEcho "T.GET_TYPE: " & t.getType.treeRepr
|
||||||
|
debugEcho "T.GET_TYPE[1].GET_TYPE: " & t.getType[1].getType.treeRepr
|
||||||
|
]#
|
||||||
|
|
||||||
|
# Get the object type AST, with base object (if present) and record list.
|
||||||
|
var objDefAst: NimNode
|
||||||
|
if t.typeKind == ntyObject: objDefAst = t.getType
|
||||||
|
elif t.typeKind == ntyTypeDesc:
|
||||||
|
# In this case we have a type AST that is like:
|
||||||
|
# BracketExpr
|
||||||
|
# Sym "typeDesc"
|
||||||
|
# Sym "ModelType"
|
||||||
|
objDefAst = t.
|
||||||
|
getType[1]. # get the Sym "ModelType"
|
||||||
|
getType # get the object definition type
|
||||||
|
|
||||||
|
if objDefAst.kind != nnkObjectTy:
|
||||||
|
error ("unable to enumerate the fields for model type '$#', " &
|
||||||
|
"tried to resolve the type of the provided symbol to an object " &
|
||||||
|
"definition (nnkObjectTy) but got a '$#'.\pAST:\p$#") % [
|
||||||
|
$t, $objDefAst.kind, objDefAst.treeRepr ]
|
||||||
|
else:
|
||||||
|
error ("unable to enumerate the fields for model type '$#', " &
|
||||||
|
"expected a symbol with type ntyTypeDesc but got a '$#'.\pAST:\p$#") % [
|
||||||
|
$t, $t.typeKind, t.treeRepr ]
|
||||||
|
|
||||||
|
# At this point objDefAst should look something like:
|
||||||
|
# ObjectTy
|
||||||
|
# Empty
|
||||||
|
# Sym "BaseObject"" | Empty
|
||||||
|
# RecList
|
||||||
|
# Sym "field1"
|
||||||
|
# Sym "field2"
|
||||||
|
# ...
|
||||||
|
|
||||||
|
if objDefAst[1].kind == nnkSym:
|
||||||
|
# We have a base class symbol, let's recurse and try and resolve the fields
|
||||||
|
# for the base class
|
||||||
|
for fieldDef in objDefAst[1].fields: result.add(fieldDef)
|
||||||
|
|
||||||
|
for fieldDef in objDefAst[2].children:
|
||||||
|
# objDefAst[2] is a RecList of
|
||||||
|
# ignore AST nodes that are not field definitions
|
||||||
|
if fieldDef.kind == nnkIdentDefs: result.add((fieldDef[0], fieldDef[1]))
|
||||||
|
elif fieldDef.kind == nnkSym: result.add((fieldDef, fieldDef.getTypeInst))
|
||||||
|
else: error "unknown object field definition AST: $#" % $fieldDef.kind
|
||||||
|
|
||||||
template walkFieldDefs*(t: NimNode, body: untyped) =
|
template walkFieldDefs*(t: NimNode, body: untyped) =
|
||||||
## Iterate over every field of the given Nim object, yielding and defining
|
## Iterate over every field of the given Nim object, yielding and defining
|
||||||
## `fieldIdent` and `fieldType`, the name of the field as a Nim Ident node
|
## `fieldIdent` and `fieldType`, the name of the field as a Nim Ident node
|
||||||
## and the type of the field as a Nim Type node respectively.
|
## and the type of the field as a Nim Type node respectively.
|
||||||
let tTypeImpl = t.getTypeImpl
|
for (fieldIdent {.inject.}, fieldType {.inject.}) in t.fields: body
|
||||||
|
|
||||||
var nodeToItr: NimNode
|
#[ TODO: replace walkFieldDefs with things like this:
|
||||||
if tTypeImpl.typeKind == ntyObject: nodeToItr = tTypeImpl[2]
|
func columnNamesForModel*(modelType: typedesc): seq[string] =
|
||||||
elif tTypeImpl.typeKind == ntyTypeDesc: nodeToItr = tTypeImpl.getType[1].getType[2]
|
modelType.fields.mapIt(identNameToDb($it[0]))
|
||||||
else: error $t & " is not an object or type desc (it's a " & $tTypeImpl.typeKind & ")."
|
]#
|
||||||
|
|
||||||
for fieldDef {.inject.} in nodeToItr.children:
|
|
||||||
# ignore AST nodes that are not field definitions
|
|
||||||
if fieldDef.kind == nnkIdentDefs:
|
|
||||||
let fieldIdent {.inject.} = fieldDef[0]
|
|
||||||
let fieldType {.inject.} = fieldDef[1]
|
|
||||||
body
|
|
||||||
|
|
||||||
elif fieldDef.kind == nnkSym:
|
|
||||||
let fieldIdent {.inject.} = fieldDef
|
|
||||||
let fieldType {.inject.} = fieldDef.getType
|
|
||||||
body
|
|
||||||
|
|
||||||
macro columnNamesForModel*(modelType: typed): seq[string] =
|
macro columnNamesForModel*(modelType: typed): seq[string] =
|
||||||
## Return the column names corresponding to the the fields of the given
|
## Return the column names corresponding to the the fields of the given
|
||||||
@ -317,6 +374,7 @@ macro rowToModel*(modelType: typed, row: seq[string]): untyped =
|
|||||||
createParseStmt(fieldType, itemLookup)))
|
createParseStmt(fieldType, itemLookup)))
|
||||||
idx += 1
|
idx += 1
|
||||||
|
|
||||||
|
#[
|
||||||
macro listFields*(t: typed): untyped =
|
macro listFields*(t: typed): untyped =
|
||||||
var fields: seq[tuple[n: string, t: string]] = @[]
|
var fields: seq[tuple[n: string, t: string]] = @[]
|
||||||
t.walkFieldDefs:
|
t.walkFieldDefs:
|
||||||
@ -324,6 +382,7 @@ macro listFields*(t: typed): untyped =
|
|||||||
else: fields.add((n: $fieldIdent, t: $fieldType))
|
else: fields.add((n: $fieldIdent, t: $fieldType))
|
||||||
|
|
||||||
result = newLit(fields)
|
result = newLit(fields)
|
||||||
|
]#
|
||||||
|
|
||||||
proc typeOfColumn*(modelType: NimNode, colName: string): NimNode =
|
proc typeOfColumn*(modelType: NimNode, colName: string): NimNode =
|
||||||
## Given a model type and a column name, return the Nim type for that column.
|
## Given a model type and a column name, return the Nim type for that column.
|
||||||
|
Reference in New Issue
Block a user