39 Commits
0.1.0 ... 0.8.0

Author SHA1 Message Date
b4b125d750 Update package version to 0.8.0 2020-02-16 23:55:25 -06:00
826f0eaa73 web: Add support for decimal measure values. 2020-02-16 23:21:56 -06:00
adddef3188 api: Support for decimal values in measures. 2020-02-16 23:20:02 -06:00
3e2faf9554 Update dependencies. 2020-02-16 21:09:25 -06:00
744ad9211b Use straight lines on simple measure graphs. 2020-02-16 21:09:14 -06:00
35a116abbb Fix insert/update logic of the measurements store. 2020-02-16 21:08:05 -06:00
4cb5b8d814 Add version tag in HTML, fix lint error. 2020-02-09 05:09:49 -06:00
efb86cf6ce Update package version to 0.7.0 2020-02-09 04:25:21 -06:00
c6863293c5 api: User released version of fiber_orm (not local). 2020-02-09 04:25:09 -06:00
ce582383c3 api: Change root application path to '/v0' instead of '/api'. 2020-02-09 04:25:09 -06:00
31053c1014 Add clean target to Makefile. 2020-02-09 04:25:09 -06:00
f5b891b966 update-version.sh should include changes to package-lock.json. 2020-02-09 04:18:38 -06:00
74b8a42d29 web: Sort data in simple measure graphs. 2020-02-09 04:16:26 -06:00
8af6c65c9b api: Add support for necessary CORS headers. 2020-02-09 04:16:15 -06:00
e14097117f api: Add nginx configuration for OPTIONS CORS support. 2020-02-09 04:15:43 -06:00
c6d8d14a1f Add update-version.sh convenience script. 2020-02-09 04:10:10 -06:00
ff3c1cf04e Clean up logging service. 2020-02-09 03:41:22 -06:00
8ac1cdf476 Fix environment-specific builds. 2020-02-09 03:40:49 -06:00
c28eb7b240 Update operations documentation. 2020-02-09 03:16:03 -06:00
31326d40c8 web: Update dependencies (npm audit fix). 2020-02-09 03:16:03 -06:00
716f09681c Move Terraform state into S3 (using DynamoDB for locking). 2020-02-09 03:16:03 -06:00
ead77534ce api: Extract database common code into its own library (fiber-orm). 2020-02-09 03:15:58 -06:00
c5daa76102 web: Parameterize build process with env-dependent config files. 2020-02-09 00:30:38 -06:00
cfd5463b4d Add CDN cache invalidation to deploy scripts. 2019-09-25 10:48:46 -05:00
5c81d756df api: Refactor so all endpoints are CORS-aware. 2019-09-25 08:15:08 -05:00
cf60793395 api: WIP CORS supoport 2019-09-24 22:43:23 -05:00
0a8f701c3c Created terraform configuration to manage AWS infrastructure. 2019-09-24 22:40:08 -05:00
a4b798cec4 Bump version. 2019-05-19 18:31:33 -05:00
5f257e9b4a Target dev environment by default for deploys. 2019-05-19 18:30:53 -05:00
7e5827a7a2 api: Add Cache-Control header. 2019-05-19 18:28:38 -05:00
793dbcc611 Add support for parsing Postgres dates that only use one or two millisecond digits. 2019-05-19 18:28:22 -05:00
d37dc77490 Bump version. 2019-05-19 01:29:46 -05:00
ffa7e1a4de Documentation around new deployed environments. 2019-05-19 01:28:22 -05:00
30ced3ecfd Allow targeting different environments with deply. Add version string on the web App component. 2019-05-19 01:28:02 -05:00
4bc8c00c49 api: Update systemd [Install] so that pmapi starts with the ssytem. 2019-05-19 01:26:22 -05:00
e33ba9707c web: Move the .env.prod to .env.production (where it belongs). 2019-05-19 01:25:57 -05:00
9af4af6c5d Added deployment documentation. 2019-05-18 18:48:12 -05:00
d88689ee31 Add build and deploy targets to top-level Makefile. 2019-05-18 17:45:43 -05:00
1d544dad0b Bump version. 2019-05-18 13:28:02 -05:00
46 changed files with 3665 additions and 3154 deletions

6
.gitignore vendored
View File

@ -26,3 +26,9 @@ yarn-error.log*
*.njsproj *.njsproj
*.sln *.sln
*.sw? *.sw?
# Terrform files
.terraform/
# API Testing Files
api/temp/

View File

@ -1,15 +1,38 @@
VERSION=`git describe --always` VERSION:=$(shell git describe --always)
TARGET_ENV ?= dev
build: build-api build-web build: dist/personal-measure-api.tar.gz dist/personal-measure-web.tar.gz
build-api: clean:
-rm -r dist
-rm api/personal_measure_api
-rm -r web/dist
dist/personal-measure-api.tar.gz:
-mkdir dist -mkdir dist
make -C api personal_measure_api make -C api personal_measure_api
tar czf dist/personal-measure-api-${VERSION}.tar.gz -C api personal_measure_api tar czf dist/personal-measure-api-${VERSION}.tar.gz -C api personal_measure_api
cp dist/personal-measure-api-${VERSION}.tar.gz dist/personal-measure-api.tar.gz
build-web: dist/personal-measure-web.tar.gz:
-mkdir dist -mkdir dist
(cd web && npm run build) TARGET_ENV=$(TARGET_ENV) make -C web build
tar czf dist/personal-measure-web-${VERSION}.tar.gz -C web/dist . tar czf dist/personal-measure-web-${VERSION}.tar.gz -C web/dist .
cp dist/personal-measure-web-${VERSION}.tar.gz dist/personal-measure-web.tar.gz
#deploy-api: build-api deploy-api: dist/personal-measure-api.tar.gz
mkdir -p temp-deploy/personal-measure-api-${VERSION}
tar xzf dist/personal-measure-api-${VERSION}.tar.gz -C temp-deploy/personal-measure-api-${VERSION}
-ssh pmapi@pmapi.jdb-labs.com "sudo systemctl stop personal_measure_api.$(TARGET_ENV).service"
scp temp-deploy/personal-measure-api-${VERSION}/personal_measure_api pmapi@pmapi.jdb-labs.com:/home/pmapi/$(TARGET_ENV)/personal_measure_api
ssh pmapi@pmapi.jdb-labs.com "sudo systemctl start personal_measure_api.$(TARGET_ENV).service"
rm -r temp-deploy
deploy-web: dist/personal-measure-web.tar.gz
mkdir -p temp-deploy/personal-measure-web-${VERSION}
tar xzf dist/personal-measure-web-${VERSION}.tar.gz -C temp-deploy/personal-measure-web-${VERSION}
aws s3 sync temp-deploy/personal-measure-web-${VERSION} s3://pm.jdb-labs.com/$(TARGET_ENV)/webroot
TARGET_ENV=${TARGET_ENV} operations/invalidate-cdn-cache.sh
rm -r temp-deploy
deploy: deploy-api deploy-web

View File

@ -3,5 +3,6 @@
"dbConnString":"host=localhost port=5500 dbname=personal_measure user=postgres password=password", "dbConnString":"host=localhost port=5500 dbname=personal_measure user=postgres password=password",
"debug":true, "debug":true,
"port":8081, "port":8081,
"pwdCost":11 "pwdCost":11,
"knownOrigins": [ "https://curl.localhost" ]
} }

View File

@ -1,5 +1,6 @@
{ {
"debug":false, "debug":false,
"port":80, "port":80,
"pwdCost":11 "pwdCost":11,
"knownOrigins": [ "https://pm.jdb-labs.com" ]
} }

View File

@ -2,7 +2,7 @@
include "src/main/nim/personal_measure_apipkg/version.nim" include "src/main/nim/personal_measure_apipkg/version.nim"
version = PM_API_VERSION version = "0.8.0"
author = "Jonathan Bernard" author = "Jonathan Bernard"
description = "JDB\'s Personal Measures API" description = "JDB\'s Personal Measures API"
license = "MIT" license = "MIT"
@ -14,7 +14,8 @@ skipExt = @["nim"]
# Dependencies # Dependencies
requires @["nim >= 0.19.4", "bcrypt", "docopt >= 0.6.8", "isaac >= 0.1.3", requires @["nim >= 0.19.4", "bcrypt", "docopt >= 0.6.8", "isaac >= 0.1.3",
"jester >= 0.4.1", "jwt", "tempfile", "uuids >= 0.1.10" ] "jester >= 0.4.3", "jwt", "tempfile", "uuids >= 0.1.10" ]
requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.6.3" requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.6.3"
requires "https://git.jdb-labs.com/jdb/nim-time-utils.git >= 0.5.0" requires "https://git.jdb-labs.com/jdb/nim-time-utils.git >= 0.5.2"
requires "https://git.jdb-labs.com/jdb-labs/fiber-orm-nim.git >= 0.3.0"

View File

@ -34,6 +34,10 @@ proc loadConfig*(args: Table[string, docopt.Value] = initTable[string, docopt.Va
warn "Cannot read configuration file \"" & filePath & "\":\n\t" & warn "Cannot read configuration file \"" & filePath & "\":\n\t" &
getCurrentExceptionMsg() getCurrentExceptionMsg()
let knownOriginsArray =
if json.hasKey("knownOrigins"): json["knownOrigins"]
else: newJArray()
let cfg = CombinedConfig(docopt: args, json: json) let cfg = CombinedConfig(docopt: args, json: json)
result = PMApiConfig( result = PMApiConfig(
@ -41,7 +45,8 @@ proc loadConfig*(args: Table[string, docopt.Value] = initTable[string, docopt.Va
dbConnString: cfg.getVal("db-conn-string"), dbConnString: cfg.getVal("db-conn-string"),
debug: "true".startsWith(cfg.getVal("debug", "false").toLower()), debug: "true".startsWith(cfg.getVal("debug", "false").toLower()),
port: parseInt(cfg.getVal("port", "8080")), port: parseInt(cfg.getVal("port", "8080")),
pwdCost: cast[int8](parseInt(cfg.getVal("pwd-cost", "11")))) pwdCost: cast[int8](parseInt(cfg.getVal("pwd-cost", "11"))),
knownOrigins: toSeq(knownOriginsArray).mapIt(it.getStr))
proc initContext(args: Table[string, docopt.Value]): PMApiContext = proc initContext(args: Table[string, docopt.Value]): PMApiContext =

View File

@ -1,7 +1,8 @@
import asyncdispatch, base64, jester, json, jwt, logging, options, sequtils, import asyncdispatch, base64, jester, json, jwt, logging, options, sequtils,
strutils, times, uuids times, uuids
from unicode import capitalize from unicode import capitalize
import timeutils except `<` import strutils except capitalize
import timeutils
import ./db, ./configuration, ./models, ./service, ./version import ./db, ./configuration, ./models, ./service, ./version
@ -20,7 +21,7 @@ proc newSession*(user: User): Session =
template halt(code: HttpCode, template halt(code: HttpCode,
headers: RawHeaders, headers: RawHeaders,
content: string): typed = content: string) =
## Immediately replies with the specified request. This means any further ## Immediately replies with the specified request. This means any further
## code will not be executed after calling this template in the current ## code will not be executed after calling this template in the current
## route. ## route.
@ -32,21 +33,42 @@ template halt(code: HttpCode,
result.matched = true result.matched = true
break allRoutes break allRoutes
template jsonResp(code: HttpCode, details: string = "", headers: RawHeaders = @{:} ) = template jsonResp(code: HttpCode, body: string = "", headersToSend: RawHeaders = @{:} ) =
let reqOrigin =
if request.headers.hasKey("Origin"): $(request.headers["Origin"])
else: ""
let corsHeaders =
if ctx.cfg.knownOrigins.contains(reqOrigin):
@{
"Access-Control-Allow-Origin": reqOrigin,
"Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Methods": $(request.reqMethod),
"Access-Control-Allow-Headers": "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization"
}
else: @{:}
halt( halt(
code, code,
headers & @{"Content-Type": JSON}, headersToSend & corsHeaders & @{
"Content-Type": JSON,
"Cache-Control": "no-cache"
},
body
)
template jsonResp(body: string) = jsonResp(Http200, body)
template statusResp(code: HttpCode, details: string = "", headersToSend: RawHeaders = @{:} ) =
jsonResp(
code,
$(%* { $(%* {
"statusCode": code.int, "statusCode": code.int,
"status": $code, "status": $code,
"details": details "details": details
}) }),
) headersToSend)
template json500Resp(ex: ref Exception, details: string = ""): void =
when not defined(release): debug ex.getStackTrace()
error details & ":\n" & ex.msg
jsonResp(Http500)
# internal JSON parsing utils # internal JSON parsing utils
proc getIfExists(n: JsonNode, key: string): JsonNode = proc getIfExists(n: JsonNode, key: string): JsonNode =
@ -173,10 +195,10 @@ template checkAuth(requiresAdmin = false) =
try: session = extractSession(ctx, request) try: session = extractSession(ctx, request)
except: except:
debug "Auth failed: " & getCurrentExceptionMsg() debug "Auth failed: " & getCurrentExceptionMsg()
jsonResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"}) statusResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
if requiresAdmin and not session.user.isAdmin: if requiresAdmin and not session.user.isAdmin:
jsonResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"}) statusResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
proc start*(ctx: PMApiContext): void = proc start*(ctx: PMApiContext): void =
@ -186,12 +208,12 @@ proc start*(ctx: PMApiContext): void =
settings: settings:
port = Port(ctx.cfg.port) port = Port(ctx.cfg.port)
appName = "/api" appName = "/v0"
routes: routes:
get "/version": get "/version":
resp($(%("personal_measure_api v" & PM_API_VERSION)), JSON) jsonResp($(%("personal_measure_api v" & PM_API_VERSION)))
post "/auth-token": post "/auth-token":
@ -200,9 +222,9 @@ proc start*(ctx: PMApiContext): void =
let email = jsonBody.getOrFail("email").getStr let email = jsonBody.getOrFail("email").getStr
let pwd = jsonBody.getOrFail("password").getStr let pwd = jsonBody.getOrFail("password").getStr
let authToken = makeAuthToken(ctx, email, pwd) let authToken = makeAuthToken(ctx, email, pwd)
resp($(%authToken), JSON) jsonResp($(%authToken))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except: jsonResp(Http401, getCurrentExceptionMsg()) except: statusResp(Http401, getCurrentExceptionMsg())
post "/change-pwd": post "/change-pwd":
checkAuth() checkAuth()
@ -215,15 +237,15 @@ proc start*(ctx: PMApiContext): void =
let newHash = hashWithSalt(jsonBody.getOrFail("newPassword").getStr, session.user.salt) let newHash = hashWithSalt(jsonBody.getOrFail("newPassword").getStr, session.user.salt)
session.user.hashedPwd = newHash.hash session.user.hashedPwd = newHash.hash
if ctx.db.updateUser(session.user): jsonResp(Http200) if ctx.db.updateUser(session.user): statusResp(Http200)
else: jsonResp(Http500, "unable to change pwd") else: statusResp(Http500, "unable to change pwd")
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except AuthError: jsonResp(Http401, getCurrentExceptionMsg()) except AuthError: statusResp(Http401, getCurrentExceptionMsg())
except: except:
error "internal error changing password: " & getCurrentExceptionMsg() error "internal error changing password: " & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
post "/change-pwd/@userId": post "/change-pwd/@userId":
checkAuth(true) checkAuth(true)
@ -234,22 +256,22 @@ proc start*(ctx: PMApiContext): void =
var user = ctx.db.getUser(parseUUID(@"userId")) var user = ctx.db.getUser(parseUUID(@"userId"))
let newHash = hashWithSalt(jsonBody.getOrFail("newPassword").getStr, user.salt) let newHash = hashWithSalt(jsonBody.getOrFail("newPassword").getStr, user.salt)
user.hashedPwd = newHash.hash user.hashedPwd = newHash.hash
if ctx.db.updateUser(user): jsonResp(Http200) if ctx.db.updateUser(user): statusResp(Http200)
else: jsonResp(Http500, "unable to change pwd") else: statusResp(Http500, "unable to change pwd")
except ValueError: jsonResp(Http400, "invalid UUID") except ValueError: statusResp(Http400, "invalid UUID")
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except AuthError: jsonResp(Http401, getCurrentExceptionMsg()) except AuthError: statusResp(Http401, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, "no such user") except NotFoundError: statusResp(Http404, "no such user")
except: except:
error "internal error changing password: " & getCurrentExceptionMsg() error "internal error changing password: " & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/user": get "/user":
checkAuth() checkAuth()
resp(Http200, $(%session.user), JSON) jsonResp($(%session.user))
put "/user": put "/user":
checkAuth() checkAuth()
@ -262,18 +284,18 @@ proc start*(ctx: PMApiContext): void =
if jsonBody.hasKey("displayName"): if jsonBody.hasKey("displayName"):
updatedUser.displayName = jsonBody["displayName"].getStr() updatedUser.displayName = jsonBody["displayName"].getStr()
jsonResp(Http200, $(%ctx.db.updateUser(updatedUser))) statusResp(Http200, $(%ctx.db.updateUser(updatedUser)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: except:
error "Could not update user information:\n\t" & getCurrentExceptionMsg() error "Could not update user information:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/users": get "/users":
checkAuth(true) checkAuth(true)
resp(Http200, $(%ctx.db.getAllUsers())) jsonResp($(%ctx.db.getAllUsers()))
post "/users": post "/users":
checkAuth(true) checkAuth(true)
@ -290,18 +312,18 @@ proc start*(ctx: PMApiContext): void =
salt: pwdAndSalt.salt, salt: pwdAndSalt.salt,
isAdmin: false) isAdmin: false)
resp($(%ctx.db.createUser(newUser)), JSON) jsonResp($(%ctx.db.createUser(newUser)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: except:
error "Could not create new user:\n\t" & getCurrentExceptionMsg() error "Could not create new user:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/users/@userId": get "/users/@userId":
checkAuth(true) checkAuth(true)
resp(Http200, $(%ctx.db.getUser(parseUUID(@"userId")))) jsonResp($(%ctx.db.getUser(parseUUID(@"userId"))))
delete "/users/@userId": delete "/users/@userId":
checkAuth(true) checkAuth(true)
@ -310,18 +332,18 @@ proc start*(ctx: PMApiContext): void =
try: try:
let userId = parseUUID(@"userId") let userId = parseUUID(@"userId")
user = ctx.db.getUser(userId) user = ctx.db.getUser(userId)
except: jsonResp(Http404) except: statusResp(Http404)
try: try:
if not ctx.db.deleteUser(user): raiseEx "unable to delete user" if not ctx.db.deleteUser(user): raiseEx "unable to delete user"
jsonResp(Http200, "user " & user.email & " deleted") statusResp(Http200, "user " & user.email & " deleted")
except: jsonResp(Http500, getCurrentExceptionMsg()) except: statusResp(Http500, getCurrentExceptionMsg())
get "/api-tokens": get "/api-tokens":
checkAuth() checkAuth()
resp(Http200, $(%ctx.db.findApiTokensByUserId($session.user.id))) jsonResp($(%ctx.db.findApiTokensByUserId($session.user.id)))
post "/api-tokens": post "/api-tokens":
checkAuth() checkAuth()
@ -343,40 +365,40 @@ proc start*(ctx: PMApiContext): void =
let respToken = %newToken let respToken = %newToken
respToken["value"] = %tokenValue respToken["value"] = %tokenValue
resp($respToken, JSON) jsonResp($respToken)
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except AuthError: jsonResp(Http401, getCurrentExceptionMsg()) except AuthError: statusResp(Http401, getCurrentExceptionMsg())
except: except:
debug getCurrentExceptionMsg() debug getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/api-tokens/@tokenId": get "/api-tokens/@tokenId":
checkAuth() checkAuth()
try: try:
resp(Http200, $(%ctx.db.getApiToken(parseUUID(@"tokenId")))) jsonResp($(%ctx.db.getApiToken(parseUUID(@"tokenId"))))
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: jsonResp(Http500) except: statusResp(Http500)
delete "/api-tokens/@tokenId": delete "/api-tokens/@tokenId":
checkAuth() checkAuth()
try: try:
let token = ctx.db.getApiToken(parseUUID(@"tokenId")) let token = ctx.db.getApiToken(parseUUID(@"tokenId"))
if ctx.db.deleteApiToken(token): jsonResp(Http200) if ctx.db.deleteApiToken(token): statusResp(Http200)
else: jsonResp(Http500) else: statusResp(Http500)
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: jsonResp(Http500) except: statusResp(Http500)
get "/measures": get "/measures":
checkAuth() checkAuth()
try: resp($(%ctx.db.findMeasuresByUserId($session.user.id)), JSON) try: jsonResp($(%ctx.db.findMeasuresByUserId($session.user.id)))
except: except:
error "unable to retrieve measures for user:\n\t" & getCurrentExceptionMsg() error "unable to retrieve measures for user:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
post "/measures": post "/measures":
checkAuth() checkAuth()
@ -406,45 +428,45 @@ proc start*(ctx: PMApiContext): void =
description: jsonBody.getIfExists("description").getStr(""), description: jsonBody.getIfExists("description").getStr(""),
config: config) config: config)
resp($(%ctx.db.createMeasure(newMeasure)), JSON) jsonResp($(%ctx.db.createMeasure(newMeasure)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: except:
error "unable to create new measure:\n\t" & getCurrentExceptionMsg() error "unable to create new measure:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/measures/@slug": get "/measures/@slug":
checkAuth() checkAuth()
try: resp($(%ctx.getMeasureForSlug(session.user.id, @"slug")), JSON) try: jsonResp($(%ctx.getMeasureForSlug(session.user.id, @"slug")))
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to look up a measure by id:\n\t" & getCurrentExceptionMsg() error "unable to look up a measure by id:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
delete "/measures/@slug": delete "/measures/@slug":
checkAuth() checkAuth()
try: try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug") let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
if ctx.db.deleteMeasure(measure): jsonResp(Http200) if ctx.db.deleteMeasure(measure): statusResp(Http200)
else: raiseEx "" else: raiseEx ""
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to delete a measure:\n\t" & getCurrentExceptionMsg() error "unable to delete a measure:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/measure/@slug": get "/measure/@slug":
checkAuth() checkAuth()
try: try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug") let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
resp($(%ctx.db.findMeasurementsByMeasureId($measure.id)), JSON) jsonResp($(%ctx.db.findMeasurementsByMeasureId($measure.id)))
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to list measurements:\n\t" & getCurrentExceptionMsg() error "unable to list measurements:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
post "/measure/@slug": post "/measure/@slug":
checkAuth() checkAuth()
@ -455,7 +477,7 @@ proc start*(ctx: PMApiContext): void =
let newMeasurement = Measurement( let newMeasurement = Measurement(
measureId: measure.id, measureId: measure.id,
value: jsonBody.getOrFail("value").getInt, value: jsonBody.getOrFail("value").getFloat,
timestamp: timestamp:
if jsonBody.hasKey("timestamp"): jsonBody["timestamp"].getStr.parseIso8601.utc if jsonBody.hasKey("timestamp"): jsonBody["timestamp"].getStr.parseIso8601.utc
else: getTime().utc, else: getTime().utc,
@ -463,29 +485,29 @@ proc start*(ctx: PMApiContext): void =
if jsonBody.hasKey("extData"): jsonBody["extData"] if jsonBody.hasKey("extData"): jsonBody["extData"]
else: newJObject()) else: newJObject())
resp($(%ctx.db.createMeasurement(newMeasurement)), JSON) jsonResp($(%ctx.db.createMeasurement(newMeasurement)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to add measurement:\n\t" & getCurrentExceptionMsg() error "unable to add measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
get "/measure/@slug/@id": get "/measure/@slug/@id":
checkAuth() checkAuth()
try: try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug") let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
resp($(%ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))), JSON) jsonResp($(%ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))))
except ValueError: jsonResp(Http400, getCurrentExceptionMsg()) except ValueError: statusResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to retrieve measurement:\n\t" & getCurrentExceptionMsg() error "unable to retrieve measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
put "/measure/@slug/@id": put "/measure/@slug/@id":
checkAuth() checkAuth()
@ -494,18 +516,18 @@ proc start*(ctx: PMApiContext): void =
let measure = ctx.getMeasureForSlug(session.user.id, @"slug") let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
var measurement = ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id")) var measurement = ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))
let jsonBody = parseJson(request.body) let jsonBody = parseJson(request.body)
if jsonBody.hasKey("value"): measurement.value = jsonBody["value"].getInt if jsonBody.hasKey("value"): measurement.value = jsonBody["value"].getFloat
if jsonBody.hasKey("timestamp"): measurement.timestamp = jsonBody["timestamp"].getStr.parseIso8601 if jsonBody.hasKey("timestamp"): measurement.timestamp = jsonBody["timestamp"].getStr.parseIso8601
if jsonBody.hasKey("extData"): measurement.extData = jsonBody["extData"] if jsonBody.hasKey("extData"): measurement.extData = jsonBody["extData"]
resp($(%ctx.db.updateMeasurement(measurement)), JSON) jsonResp($(%ctx.db.updateMeasurement(measurement)))
except ValueError: jsonResp(Http400, getCurrentExceptionMsg()) except ValueError: statusResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to retrieve measurement:\n\t" & getCurrentExceptionMsg() error "unable to retrieve measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
delete "/measure/@slug/@id": delete "/measure/@slug/@id":
checkAuth() checkAuth()
@ -513,16 +535,16 @@ proc start*(ctx: PMApiContext): void =
try: try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug") let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
let measurement = ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id")) let measurement = ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))
if ctx.db.deleteMeasurement(measurement): jsonResp(Http200) if ctx.db.deleteMeasurement(measurement): statusResp(Http200)
else: raiseEx "" else: raiseEx ""
except ValueError: jsonResp(Http400, getCurrentExceptionMsg()) except ValueError: statusResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg()) except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg()) except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: except:
error "unable to delete measurement:\n\t" & getCurrentExceptionMsg() error "unable to delete measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500) statusResp(Http500)
post "/log": post "/log":
checkAuth() checkAuth()
@ -537,9 +559,9 @@ proc start*(ctx: PMApiContext): void =
stacktrace: jsonBody.getIfExists("stacktrace").getStr(""), stacktrace: jsonBody.getIfExists("stacktrace").getStr(""),
timestamp: jsonBody.getOrFail("timestamp").getStr.parseIso8601 timestamp: jsonBody.getOrFail("timestamp").getStr.parseIso8601
) )
resp(Http200, $(%ctx.db.createClientLogEntry(logEntry)), JSON) jsonResp($(%ctx.db.createClientLogEntry(logEntry)))
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: jsonResp(Http500, getCurrentExceptionMsg()) except: statusResp(Http500, getCurrentExceptionMsg())
post "/log/batch": post "/log/batch":
checkAuth() checkAuth()
@ -555,15 +577,15 @@ proc start*(ctx: PMApiContext): void =
stacktrace: it.getIfExists("stacktrace").getStr(""), stacktrace: it.getIfExists("stacktrace").getStr(""),
timestamp: it.getOrFail("timestamp").getStr.parseIso8601 timestamp: it.getOrFail("timestamp").getStr.parseIso8601
)) ))
resp(Http200, $(%respMsgs), JSON) jsonResp($(%respMsgs))
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg()) except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: jsonResp(Http500, getCurrentExceptionMsg()) except: statusResp(Http500, getCurrentExceptionMsg())
post "/service/debug/stop": post "/service/debug/stop":
if not ctx.cfg.debug: jsonResp(Http404) if not ctx.cfg.debug: statusResp(Http404)
else: else:
let shutdownFut = sleepAsync(100) let shutdownFut = sleepAsync(100)
shutdownFut.callback = proc(): void = complete(stopFuture) shutdownFut.callback = proc(): void = complete(stopFuture)
resp($(%"shutting down"), JSON) jsonResp($(%"shutting down"))
waitFor(stopFuture) waitFor(stopFuture)

View File

@ -9,6 +9,7 @@ type
debug*: bool debug*: bool
port*: int port*: int
pwdCost*: int8 pwdCost*: int8
knownOrigins*: seq[string]
PMApiContext* = object PMApiContext* = object
cfg*: PMApiConfig cfg*: PMApiConfig
@ -23,7 +24,8 @@ proc `%`*(cfg: PMApiConfig): JsonNode =
"dbConnString": cfg.dbConnString, "dbConnString": cfg.dbConnString,
"debug": cfg.debug, "debug": cfg.debug,
"port": cfg.port, "port": cfg.port,
"pwdCost": cfg.pwdCost } "pwdCost": cfg.pwdCost,
"knownOrigins": cfg.knownOrigins }
template raiseEx*(errorType: type, reason: string): void = template raiseEx*(errorType: type, reason: string): void =
raise newException(errorType, reason) raise newException(errorType, reason)

View File

@ -1,10 +1,8 @@
import db_postgres, macros, options, postgres, sequtils, strutils, import db_postgres, fiber_orm, uuids
times, timeutils, unicode, uuids
import ./models import ./models
import ./db_common
export db_common.NotFoundError export fiber_orm.NotFoundError
type type
PMApiDb* = ref object PMApiDb* = ref object
@ -14,18 +12,24 @@ type
proc connect*(connString: string): PMApiDb = proc connect*(connString: string): PMApiDb =
result = PMApiDb(conn: open("", "", "", connString)) result = PMApiDb(conn: open("", "", "", connString))
generateProcsForModels([User, ApiToken, Measure, Measurement, ClientLogEntry]) generateProcsForModels(PMApiDb, [
User,
ApiToken,
Measure,
Measurement,
ClientLogEntry
])
generateLookup(User, @["email"]) generateLookup(PMApiDb, User, @["email"])
generateLookup(ApiToken, @["userId"]) generateLookup(PMApiDb, ApiToken, @["userId"])
generateLookup(ApiToken, @["hashedToken"]) generateLookup(PMApiDb, ApiToken, @["hashedToken"])
generateLookup(Measure, @["userId"]) generateLookup(PMApiDb, Measure, @["userId"])
generateLookup(Measure, @["userId", "id"]) generateLookup(PMApiDb, Measure, @["userId", "id"])
generateLookup(Measure, @["userId", "slug"]) generateLookup(PMApiDb, Measure, @["userId", "slug"])
generateLookup(Measurement, @["measureId"]) generateLookup(PMApiDb, Measurement, @["measureId"])
generateLookup(Measurement, @["measureId", "id"]) generateLookup(PMApiDb, Measurement, @["measureId", "id"])
generateLookup(ClientLogEntry, @["userId"]) generateLookup(PMApiDb, ClientLogEntry, @["userId"])

View File

@ -1,150 +0,0 @@
import db_postgres, macros, options, sequtils, strutils, uuids
from unicode import capitalize
import ./db_util
type NotFoundError* = object of CatchableError
proc newMutateClauses(): MutateClauses =
return MutateClauses(
columns: @[],
placeholders: @[],
values: @[])
proc createRecord*[T](db: DbConn, rec: T): T =
var mc = newMutateClauses()
populateMutateClauses(rec, true, mc)
# Confusingly, getRow allows inserts and updates. We use it to get back the ID
# we want from the row.
let newRow = db.getRow(sql(
"INSERT INTO " & tableName(rec) &
" (" & mc.columns.join(",") & ") " &
" VALUES (" & mc.placeholders.join(",") & ") " &
" RETURNING *"), mc.values)
result = rowToModel(T, newRow)
proc updateRecord*[T](db: DbConn, rec: T): bool =
var mc = newMutateClauses()
populateMutateClauses(rec, false, mc)
let setClause = zip(mc.columns, mc.placeholders).mapIt(it.a & " = " & it.b).join(",")
let numRowsUpdated = db.execAffectedRows(sql(
"UPDATE " & tableName(rec) &
" SET " & setClause &
" WHERE id = ? "), mc.values.concat(@[$rec.id]))
return numRowsUpdated > 0;
template deleteRecord*(db: DbConn, modelType: type, id: typed): untyped =
db.tryExec(sql("DELETE FROM " & tableName(modelType) & " WHERE id = ?"), $id)
proc deleteRecord*[T](db: DbConn, rec: T): bool =
return db.tryExec(sql("DELETE FROM " & tableName(rec) & " WHERE id = ?"), $rec.id)
template getRecord*(db: DbConn, modelType: type, id: typed): untyped =
let row = db.getRow(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType) &
" WHERE id = ?"), @[$id])
if row.allIt(it.len == 0):
raise newException(NotFoundError, "no record for id " & $id)
rowToModel(modelType, row)
template findRecordsWhere*(db: DbConn, modelType: type, whereClause: string, values: varargs[string, dbFormat]): untyped =
db.getAllRows(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType) &
" WHERE " & whereClause), values)
.mapIt(rowToModel(modelType, it))
template getAllRecords*(db: DbConn, modelType: type): untyped =
db.getAllRows(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType)))
.mapIt(rowToModel(modelType, it))
template findRecordsBy*(db: DbConn, modelType: type, lookups: seq[tuple[field: string, value: string]]): untyped =
db.getAllRows(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType) &
" WHERE " & lookups.mapIt(it.field & " = ?").join(" AND ")),
lookups.mapIt(it.value))
.mapIt(rowToModel(modelType, it))
macro generateProcsForModels*(modelTypes: openarray[type]): untyped =
result = newStmtList()
for t in modelTypes:
let modelName = $(t.getType[1])
let getName = ident("get" & modelName)
let getAllName = ident("getAll" & modelName & "s")
let findWhereName = ident("find" & modelName & "sWhere")
let createName = ident("create" & modelName)
let updateName = ident("update" & modelName)
let deleteName = ident("delete" & modelName)
let idType = typeOfColumn(t, "id")
result.add quote do:
proc `getName`*(db: PMApiDb, id: `idType`): `t` = getRecord(db.conn, `t`, id)
proc `getAllName`*(db: PMApiDb): seq[`t`] = getAllRecords(db.conn, `t`)
proc `findWhereName`*(db: PMApiDb, whereClause: string, values: varargs[string, dbFormat]): seq[`t`] =
return findRecordsWhere(db.conn, `t`, whereClause, values)
proc `createName`*(db: PMApiDb, rec: `t`): `t` = createRecord(db.conn, rec)
proc `updateName`*(db: PMApiDb, rec: `t`): bool = updateRecord(db.conn, rec)
proc `deleteName`*(db: PMApiDb, rec: `t`): bool = deleteRecord(db.conn, rec)
proc `deleteName`*(db: PMApiDb, id: `idType`): bool = deleteRecord(db.conn, `t`, id)
macro generateLookup*(modelType: type, fields: seq[string]): untyped =
let fieldNames = fields[1].mapIt($it)
let procName = ident("find" & $modelType.getType[1] & "sBy" & fieldNames.mapIt(it.capitalize).join("And"))
# Create proc skeleton
result = quote do:
proc `procName`*(db: PMApiDb): seq[`modelType`] =
return findRecordsBy(db.conn, `modelType`)
var callParams = quote do: @[]
# Add dynamic parameters for the proc definition and inner proc call
for n in fieldNames:
let paramTuple = newNimNode(nnkPar)
paramTuple.add(newColonExpr(ident("field"), newLit(identNameToDb(n))))
paramTuple.add(newColonExpr(ident("value"), ident(n)))
result[3].add(newIdentDefs(ident(n), ident("string")))
callParams[1].add(paramTuple)
result[6][0][0].add(callParams)
macro generateProcsForFieldLookups*(modelsAndFields: openarray[tuple[t: type, fields: seq[string]]]): untyped =
result = newStmtList()
for i in modelsAndFields:
var modelType = i[1][0]
let fieldNames = i[1][1][1].mapIt($it)
let procName = ident("find" & $modelType & "sBy" & fieldNames.mapIt(it.capitalize).join("And"))
# Create proc skeleton
let procDefAST = quote do:
proc `procName`*(db: PMApiDb): seq[`modelType`] =
return findRecordsBy(db.conn, `modelType`)
var callParams = quote do: @[]
# Add dynamic parameters for the proc definition and inner proc call
for n in fieldNames:
let paramTuple = newNimNode(nnkPar)
paramTuple.add(newColonExpr(ident("field"), newLit(n)))
paramTuple.add(newColonExpr(ident("value"), ident(n)))
procDefAST[3].add(newIdentDefs(ident(n), ident("string")))
callParams[1].add(paramTuple)
procDefAST[6][0][0].add(callParams)
result.add procDefAST

View File

@ -1,285 +0,0 @@
import json, macros, options, sequtils, strutils, times, timeutils, unicode,
uuids
const UNDERSCORE_RUNE = "_".toRunes[0]
const PG_TIMESTAMP_FORMATS = [
"yyyy-MM-dd HH:mm:sszz",
"yyyy-MM-dd HH:mm:ss'.'fffzz"
]
type
MutateClauses* = object
columns*: seq[string]
placeholders*: seq[string]
values*: seq[string]
# TODO: more complete implementation
# see https://github.com/blakeembrey/pluralize
proc pluralize(name: string): string =
if name[^2..^1] == "ey": return name[0..^3] & "ies"
if name[^1] == 'y': return name[0..^2] & "ies"
return name & "s"
macro modelName*(model: object): string =
return $model.getTypeInst
macro modelName*(modelType: type): string =
return $modelType.getType[1]
proc identNameToDb*(name: string): string =
let nameInRunes = name.toRunes
var prev: Rune
var resultRunes = newSeq[Rune]()
for cur in nameInRunes:
if resultRunes.len == 0:
resultRunes.add(toLower(cur))
elif isLower(prev) and isUpper(cur):
resultRunes.add(UNDERSCORE_RUNE)
resultRunes.add(toLower(cur))
else: resultRunes.add(toLower(cur))
prev = cur
return $resultRunes
proc dbNameToIdent*(name: string): string =
let parts = name.split("_")
return @[parts[0]].concat(parts[1..^1].mapIt(capitalize(it))).join("")
proc tableName*(modelType: type): string =
return pluralize(modelName(modelType).identNameToDb)
proc tableName*[T](rec: T): string =
return pluralize(modelName(rec).identNameToDb)
proc dbFormat*(s: string): string = return s
proc dbFormat*(dt: DateTime): string = return dt.formatIso8601
proc dbFormat*[T](list: seq[T]): string =
return "{" & list.mapIt(dbFormat(it)).join(",") & "}"
proc dbFormat*[T](item: T): string = return $item
type DbArrayParseState = enum
expectStart, inQuote, inVal, expectEnd
proc parsePGDatetime*(val: string): DateTime =
var errStr = ""
for df in PG_TIMESTAMP_FORMATS:
try: return val.parse(df)
except: errStr &= "\n" & getCurrentExceptionMsg()
raise newException(ValueError, "Cannot parse PG date. Tried:" & errStr)
proc parseDbArray*(val: string): seq[string] =
result = newSeq[string]()
var parseState = DbArrayParseState.expectStart
var curStr = ""
var idx = 1
var sawEscape = false
while idx < val.len - 1:
var curChar = val[idx]
idx += 1
case parseState:
of expectStart:
if curChar == ' ': continue
elif curChar == '"':
parseState = inQuote
continue
else:
parseState = inVal
of expectEnd:
if curChar == ' ': continue
elif curChar == ',':
result.add(curStr)
curStr = ""
parseState = expectStart
continue
of inQuote:
if curChar == '"' and not sawEscape:
parseState = expectEnd
continue
of inVal:
if curChar == '"' and not sawEscape:
raise newException(ValueError, "Invalid DB array value (cannot have '\"' in the middle of an unquoted string).")
elif curChar == ',':
result.add(curStr)
curStr = ""
parseState = expectStart
continue
# if we saw an escaped \", add just the ", otherwise add both
if sawEscape:
if curChar != '"': curStr.add('\\')
curStr.add(curChar)
sawEscape = false
elif curChar == '\\':
sawEscape = true
else: curStr.add(curChar)
if not (parseState == inQuote) and curStr.len > 0:
result.add(curStr)
proc createParseStmt*(t, value: NimNode): NimNode =
#echo "Creating parse statment for ", t.treeRepr
if t.typeKind == ntyObject:
if t.getType == UUID.getType:
result = quote do: parseUUID(`value`)
elif t.getType == DateTime.getType:
result = quote do: parsePGDatetime(`value`)
elif t.getTypeInst == Option.getType:
let innerType = t.getTypeImpl[2][0][0][1]
let parseStmt = createParseStmt(innerType, value)
result = quote do:
if `value`.len == 0: none[`innerType`]()
else: some(`parseStmt`)
else: error "Unknown value object type: " & $t.getTypeInst
elif t.typeKind == ntyRef:
if $t.getTypeInst == "JsonNode":
result = quote do: parseJson(`value`)
else:
error "Unknown ref type: " & $t.getTypeInst
elif t.typeKind == ntySequence:
let innerType = t[1]
let parseStmts = createParseStmt(innerType, ident("it"))
result = quote do: parseDbArray(`value`).mapIt(`parseStmts`)
elif t.typeKind == ntyString:
result = quote do: `value`
elif t.typeKind == ntyInt:
result = quote do: parseInt(`value`)
elif t.typeKind == ntyBool:
result = quote do: "true".startsWith(`value`.toLower)
else:
error "Unknown value type: " & $t.typeKind
template walkFieldDefs*(t: NimNode, body: untyped) =
let tTypeImpl = t.getTypeImpl
var nodeToItr: NimNode
if tTypeImpl.typeKind == ntyObject: nodeToItr = tTypeImpl[2]
elif tTypeImpl.typeKind == ntyTypeDesc: nodeToItr = tTypeImpl.getType[1].getType[2]
else: error $t & " is not an object or type desc (it's a " & $tTypeImpl.typeKind & ")."
for fieldDef {.inject.} in nodeToItr.children:
# ignore AST nodes that are not field definitions
if fieldDef.kind == nnkIdentDefs:
let fieldIdent {.inject.} = fieldDef[0]
let fieldType {.inject.} = fieldDef[1]
body
elif fieldDef.kind == nnkSym:
let fieldIdent {.inject.} = fieldDef
let fieldType {.inject.} = fieldDef.getType
body
macro columnNamesForModel*(modelType: typed): seq[string] =
var columnNames = newSeq[string]()
modelType.walkFieldDefs:
columnNames.add(identNameToDb($fieldIdent))
result = newLit(columnNames)
macro rowToModel*(modelType: typed, row: seq[string]): untyped =
# Create the object constructor AST node
result = newNimNode(nnkObjConstr).add(modelType)
# Create new colon expressions for each of the property initializations
var idx = 0
modelType.walkFieldDefs:
let itemLookup = quote do: `row`[`idx`]
result.add(newColonExpr(
fieldIdent,
createParseStmt(fieldType, itemLookup)))
idx += 1
macro listFields*(t: typed): untyped =
var fields: seq[tuple[n: string, t: string]] = @[]
t.walkFieldDefs:
if fieldDef.kind == nnkSym: fields.add((n: $fieldIdent, t: fieldType.repr))
else: fields.add((n: $fieldIdent, t: $fieldType))
result = newLit(fields)
proc typeOfColumn*(modelType: NimNode, colName: string): NimNode =
modelType.walkFieldDefs:
if $fieldIdent != colName: continue
if fieldType.typeKind == ntyObject:
if fieldType.getType == UUID.getType: return ident("UUID")
elif fieldType.getType == DateTime.getType: return ident("DateTime")
elif fieldType.getType == Option.getType: return ident("Option")
else: error "Unknown column type: " & $fieldType.getTypeInst
else: return fieldType
raise newException(Exception,
"model of type '" & $modelType & "' has no column named '" & colName & "'")
proc isZero(val: int): bool = return val == 0
macro populateMutateClauses*(t: typed, newRecord: bool, mc: var MutateClauses): untyped =
result = newStmtList()
# iterate over all the object's fields
t.walkFieldDefs:
# grab the field, it's string name, and it's type
let fieldName = $fieldIdent
# we do not update the ID, but we do check: if we're creating a new
# record, we should not have an existing ID
if fieldName == "id":
result.add quote do:
if `newRecord` and not `t`.id.isZero:
raise newException(
AssertionError,
"Trying to create a new record, but the record already has an ID (" & $(`t`.id) & ").")
# if we're looking at an optional field, add logic to check for presence
elif fieldType.kind == nnkBracketExpr and
fieldType.len > 0 and
fieldType[0] == Option.getType:
result.add quote do:
`mc`.columns.add(identNameToDb(`fieldName`))
if `t`.`fieldIdent`.isSome:
`mc`.placeholders.add("?")
`mc`.values.add(dbFormat(`t`.`fieldIdent`.get))
else:
`mc`.placeholders.add("NULL")
# otherwise assume we can convert and go ahead.
else:
result.add quote do:
`mc`.columns.add(identNameToDb(`fieldName`))
`mc`.placeholders.add("?")
`mc`.values.add(dbFormat(`t`.`fieldIdent`))

View File

@ -28,7 +28,7 @@ type
Measurement* = object Measurement* = object
id*: UUID id*: UUID
measureId*: UUID measureId*: UUID
value*: int value*: float
timestamp*: DateTime timestamp*: DateTime
extData*: JsonNode extData*: JsonNode

View File

@ -1 +1 @@
const PM_API_VERSION* = "0.1.0" const PM_API_VERSION* = "0.8.0"

View File

@ -0,0 +1,2 @@
-- DOWN script for measure-value-is-numeric (20200216230431)
alter table "measurements" alter column "value" type integer;

View File

@ -0,0 +1,2 @@
-- UP script for measure-value-is-numeric (20200216230431)
alter table "measurements" alter column "value" type numeric;

View File

@ -7,3 +7,6 @@ User=pmapi
WorkingDirectory=/home/pmapi WorkingDirectory=/home/pmapi
ExecStart=/home/pmapi/personal_measure_api ExecStart=/home/pmapi/personal_measure_api
Restart=on-failure Restart=on-failure
[Install]
WantedBy=multi-user.target

View File

@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
host="${PM_API_HOST:-localhost:8081}" api_base_url="${PM_API_BASE_URL:-http://localhost:8081}"
if [ $# -eq 1 ]; then if [ $# -eq 1 ]; then
url="$1" url="$1"
method="GET" method="GET"
@ -17,6 +17,8 @@ fi
curl -s -X "$method" \ curl -s -X "$method" \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
-H "Authorization: $(cat credential)"\ -H "Authorization: $(cat credential)" \
"http://${host}/api/$url" \ -H "Origin: https://curl.localhost" \
-d "$data" "${api_base_url}/api/$url" \
-d "$data" \
-v

View File

@ -0,0 +1,10 @@
<RoutingRules>
<RoutingRule>
<Condition>
<KeyPrefixEquals>api</KeyPrefixEquals>
</Condition>
<Redirect>
<HostName>https://pmapi.jdbernard.com</HostName>
</Redirect>
</RoutingRule>
</RoutingRules>

65
operations/doc.md Normal file
View File

@ -0,0 +1,65 @@
### Web App Hosting (static HTML)
Web app is deployed to an S3 bucket. Each top level directory in this bucket
represents a different deployed environment. For example:
s3://pm.jdb-labs.com
├── prod
│   ├── logs
│   │   └── cloudfront
│ └── webroot
   ├── css
   ├── img
   ├── js
   ├── index.html
│ └── ...
└── dev
   ├── logs
└── webroot
### API Hosting
API is served from razgriz:
* dev: https://pmapi-dev.jdb-labs.com (:80 -> :8281)
* prod: https://pmapi.jdb-labs.com (:80 -> :8280)
#### Server Setup Notes
The home folder of the `pmapi` user, like the S3 bucket, contains one folder
for each environment:
/home/pmapi
├── dev
│ ├── personal_measure_api
│ └── personal_measuer_api.config.json
└── prod
├── personal_measure_api
└── personal_measuer_api.config.json
As part of the automated deployment process, pmapi.jdb-labs.com has one systemd
service definitions for each environment. The sudoers file allows the `pmapi`
user to manage these without a password.
# Allow pmapi to manage the personal_measure_api service
pmapi ALL=NOPASSWD: /bin/systemctl stop personal_measure_api.prod.service
pmapi ALL=NOPASSWD: /bin/systemctl start personal_measure_api.prod.service
pmapi ALL=NOPASSWD: /bin/systemctl stop personal_measure_api.dev.service
pmapi ALL=NOPASSWD: /bin/systemctl start personal_measure_api.dev.service
### Database
razgriz-db.jdb-labs.com RDS instance maintains databases for each environment:
* dev: `personal_measure_dev`
* prod: `personal_measure`
### Routing
CloudFront manages the routing of all of the external facing URLs.
https://pm.jdb-labs.com (CloudFront)
└── s3://pm.jdb-labs.com/prod/webroot (static HTML)
https://pm-dev.jdb-labs.com (CloudFront)
└── s3://pm.jdb-labs.com/dev/webroot (static HTML)

View File

@ -0,0 +1,28 @@
#!/bin/bash
echo "Looking up CloudFront distribution ID for Personal Measure ${TARGET_ENV} environment."
cloudfront_distribution_id=$(\
aws cloudfront list-distributions \
--query "DistributionList.Items[?starts_with(Comment, 'Personal Measure ${TARGET_ENV}')].Id | [0]" \
| sed -e 's/^"//' -e 's/"$//'
)
if [[ -z "${cloudfront_distribution_id}" ]]; then
>&2 echo "Unable to find CloudFront distribution for domain ${TARGET_ENV}."
exit 3
fi
echo "Found distribution ID ${cloudfront_distribution_id}."
echo "Invalidating the CloudFront cache for ${TARGET_ENV}."
invalidation_id=$(aws cloudfront create-invalidation \
--query 'Invalidation.Id' \
--distribution-id "${cloudfront_distribution_id}" \
--paths '/index.html')
if [[ $? -ne 0 || -z "${invalidation_id}" ]]; then
>&2 echo "Unable to create the CloudFront invalidation."
else
echo "Successfully created invalidation ${invalidation_id}."
fi
echo "Done."

View File

@ -0,0 +1,33 @@
server {
listen 80;
server_name pmapi-dev.jdb-labs.com;
return 301 https://pmapi-dev.jdb-labs.com$request_uri;
}
server {
listen 443;
server_name pmapi-dev.jdb-labs.com;
ssl on;
location / {
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' 'https://pm-dev.jdb-labs.com';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization';
return 204;
}
proxy_pass http://localhost:8281;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}

View File

@ -0,0 +1,33 @@
server {
listen 80;
server_name pmapi.jdb-labs.com;
return 301 https://pmapi.jdb-labs.com$request_uri;
}
server {
listen 443;
server_name pmapi.jdb-labs.com;
ssl on;
location / {
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' 'https://pm.jdb-labs.com';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization';
return 204;
}
proxy_pass http://localhost:8280;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}

View File

@ -0,0 +1,11 @@
### Variables
variable "aws_region" {
description = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html"
default = "us-west-2" # Oregon
}
variable "app_root_url" {
description = "Name of the S3 bucket to store deployed artifacts, logs, etc."
default = "pm.jdb-labs.com"
}

View File

@ -0,0 +1,102 @@
data "aws_iam_policy_document" "bucket_access_policy" {
statement {
actions = [ "s3:GetObject" ]
effect = "Allow"
resources = [ "${var.artifact_bucket.arn}/${var.environment}/webroot/*" ]
principals {
type = "AWS"
identifiers = [ "${aws_cloudfront_origin_access_identity.origin_access_identity.iam_arn}" ]
}
}
statement {
actions = [ "s3:ListBucket" ]
effect = "Allow"
resources = [ "${var.artifact_bucket.arn}" ]
principals {
type = "AWS"
identifiers = [ "${aws_cloudfront_origin_access_identity.origin_access_identity.iam_arn}" ]
}
}
}
output "oai_access_policy" {
value = data.aws_iam_policy_document.bucket_access_policy
}
locals {
env_domain_name = "pm${var.environment == "prod" ? "" : "-${var.environment}"}.jdb-labs.com"
}
resource "aws_cloudfront_origin_access_identity" "origin_access_identity" {
comment = "OAI for Personal Measure {$var.environment} environment."
}
resource "aws_cloudfront_distribution" "s3_distribution" {
origin {
domain_name = "${var.artifact_bucket.bucket_regional_domain_name}"
origin_id = "S3-PersonalMeasure-${var.environment}"
origin_path = "/${var.environment}/webroot"
s3_origin_config {
origin_access_identity = "${aws_cloudfront_origin_access_identity.origin_access_identity.cloudfront_access_identity_path}"
}
}
enabled = true
is_ipv6_enabled = true
comment = "Personal Measure ${var.environment} distribution."
default_root_object = "/index.html"
logging_config {
include_cookies = false
bucket = "${var.artifact_bucket.bucket_domain_name}"
prefix = "${var.environment}/logs/cloudfront"
}
aliases = ["${local.env_domain_name}"]
default_cache_behavior {
allowed_methods = ["GET", "HEAD", "OPTIONS"]
cached_methods = ["GET", "HEAD", "OPTIONS"]
target_origin_id = "S3-PersonalMeasure-${var.environment}"
forwarded_values {
query_string = false
cookies {
forward = "none"
}
}
min_ttl = 0
default_ttl = 60 * 60 * 24 * 365 # cache for a year
max_ttl = 60 * 60 * 24 * 365 # cache for a year
compress = true
viewer_protocol_policy = "redirect-to-https"
}
custom_error_response {
error_code = 404
response_code = 200
response_page_path = "/index.html"
}
price_class = "PriceClass_100" # US and Canada only
restrictions {
geo_restriction {
restriction_type = "none"
}
}
tags = {
Environment = "${var.environment}"
}
viewer_certificate {
acm_certificate_arn = "${var.cloudfront_ssl_certificate_arn}"
ssl_support_method = "sni-only"
}
}

View File

@ -0,0 +1,13 @@
### Variables
variable "environment" {
description = "The short name of this deployed environment. For example: 'dev' or 'prod'. This short name will be used to name resources (CloudFront distributions, etc.)"
}
variable "artifact_bucket" {
description = "The aws_s3_bucket object representing the artifact bucket where deployed artifacts, logs, etc. live."
}
variable "cloudfront_ssl_certificate_arn" {
description = "ARN of the managed SSL certificate to use for this environment."
}

View File

@ -0,0 +1,50 @@
provider "aws" {
region = var.aws_region
}
resource "aws_s3_bucket" "personal_measure" {
bucket = "${var.app_root_url}"
acl = "log-delivery-write"
}
resource "aws_dynamodb_table" "dynamodb_terraform-state-lock" {
name = "terraform-state-lock.${var.app_root_url}"
hash_key = "LockID"
read_capacity = 20
write_capacity = 20
attribute {
name = "LockID"
type = "S"
}
tags = {
Name = "Terraform DynamoDB State Lock Table"
}
}
module "dev_env" {
source = "./deployed_env"
environment = "dev"
artifact_bucket = aws_s3_bucket.personal_measure
cloudfront_ssl_certificate_arn = "arn:aws:acm:us-east-1:063932952339:certificate/48fe3ce0-4700-4eaa-b433-bb634f47934c"
}
module "prod_env" {
source = "./deployed_env"
environment = "prod"
artifact_bucket = aws_s3_bucket.personal_measure
cloudfront_ssl_certificate_arn = "arn:aws:acm:us-east-1:063932952339:certificate/48fe3ce0-4700-4eaa-b433-bb634f47934c"
}
data "aws_iam_policy_document" "cloudfront_access_policy" {
source_json = "${module.dev_env.oai_access_policy.json}"
override_json = "${module.prod_env.oai_access_policy.json}"
}
resource "aws_s3_bucket_policy" "personal_measure" {
bucket = "${aws_s3_bucket.personal_measure.id}"
policy = "${data.aws_iam_policy_document.cloudfront_access_policy.json}"
}

View File

@ -0,0 +1,8 @@
terraform {
backend "s3" {
bucket = "pm.jdb-labs.com"
region = "us-west-2"
key = "terraform.tfstate"
dynamodb_table = "terraform-state-lock.pm.jdb-labs.com"
}
}

63
operations/update-version.sh Executable file
View File

@ -0,0 +1,63 @@
#!/bin/bash
#
# Script to update the version number, commit the changes to the version files,
# and tag the new commit.
set -e
origDir=$(pwd)
rootDir=$(git rev-parse --show-toplevel)
cd "$rootDir"
currentBranch=$(git rev-parse --abbrev-ref HEAD)
if [ "$currentBranch" != "develop" ]; then
printf "You are currently on the '%s' branch. Is this intended (yes/no)? " "$currentBranch"
read -r confirmation
if [ "$confirmation" != "yes" ]; then exit 1; fi
fi
lastVersion=$(jq -r .version web/package.json)
printf "Last version: %s\n" "$lastVersion"
printf "New version: "
read -r newVersion
printf "New version will be \"%s\". Is this correct (yes/no)? " "$newVersion"
read -r confirmation
if [ "$confirmation" != "yes" ]; then
printf "\n"
"$origDir/$0"
exit
fi
printf ">> Updating /web/package.json with \"version\": \"%s\"\n" "$newVersion"
printf "jq \".version = \\\"%s\\\"\" web/package.json > temp.json\n" "$newVersion"
jq ".version = \"${newVersion}\"" web/package.json > temp.json
printf "mv temp.json web/package.json\n"
mv temp.json web/package.json
printf ">> Updating /web/package-lock.json with \"version\": \"%s\"\n" "$newVersion"
printf "jq \".version = \\\"%s\\\"\" web/package-lock.json > temp.json\n" "$newVersion"
jq ".version = \"${newVersion}\"" web/package-lock.json > temp.json
printf "mv temp.json web/package-lock.json\n"
mv temp.json web/package-lock.json
printf ">> Updating /api/src/main/nim/personal_measure_apipkg/version.nim with PM_API_VERSION* = \"%s\"" "$newVersion"
printf "sed -i \"s/%s/%s/\" api/src/main/nim/personal_measure_apipkg/version.nim" "$lastVersion" "$newVersion"
sed -i "s/${lastVersion}/${newVersion}/" api/src/main/nim/personal_measure_apipkg/version.nim
printf ">> Updating /api/personal_measure_api.nimble with version = \"%s\"" "$newVersion"
printf "sed -i \"s/%s/%s/\" api/personal_measure_api.nimble" "$lastVersion" "$newVersion"
sed -i "s/${lastVersion}/${newVersion}/" api/personal_measure_api.nimble
printf ">> Committing new version.\n"
printf "git add web/package.json web/package-lock.json api/src/main/nim/personal_measure_apipkg/version.nim"
git add web/package.json web/package-lock.json api/src/main/nim/personal_measure_apipkg/version.nim api/personal_measure_api.nimble
printf "git commit -m \"Update package version to %s\"\n" "$newVersion"
git commit -m "Update package version to ${newVersion}"
printf ">> Tagging commit.\n"
printf "git tag -m \"Version %s\" \"%s\"\n" "$newVersion" "$newVersion"
git tag -m "Version ${newVersion}" "${newVersion}"

4
web/.env.development Normal file
View File

@ -0,0 +1,4 @@
NODE_ENV=production
VUE_APP_PM_API_BASE=https://pmapi-dev.jdb-labs.com/v0
VUE_APP_LOG_LEVEL=INFO
VUE_APP_API_LOG_LEVEL=ERROR

View File

@ -1,3 +0,0 @@
VUE_APP_PM_API_BASE=https://personal-measure.jdb-labs.com/api
VUE_APP_LOG_LEVEL=INFO
VUE_APP_API_LOG_LEVEL=ERROR

3
web/.env.production Normal file
View File

@ -0,0 +1,3 @@
VUE_APP_PM_API_BASE=https://pmapi.jdb-labs.com/v0
VUE_APP_LOG_LEVEL=INFO
VUE_APP_API_LOG_LEVEL=ERROR

View File

@ -1,24 +1,5 @@
API_ADDR='localhost:8080'
API_LOG_LEVEL='WARN'
LOG_LEVEL='TRACE'
build-dev:
npm run build-dev
build: build:
npm run build npm run build-${TARGET_ENV}
serve: serve:
VUE_APP_PM_API_BASE=/api \
VUE_APP_API_LOG_LEVEL=${API_LOG_LEVEL} \
VUE_APP_LOG_LEVEL=${LOG_LEVEL} \
npm run serve npm run serve
serve-dev: build-dev
(cd dist && npx live-server . --port=8080 --entry-file=index.html --proxy=/api:http://localhost:8081/api --no-browser)
serve-ssl: build-dev
(cd dist && \
(local-ssl-proxy --source=8443 --target=8080 & \
echo `pwd` && \
npx live-server . --port=8080 --entry-file=index.html --proxy=/api:http://localhost:8081/api --no-browser))

5469
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,61 +1,61 @@
{ {
"name": "personal-measure-web", "name": "personal-measure-web",
"version": "0.1.0", "version": "0.8.0",
"private": true, "private": true,
"scripts": { "scripts": {
"serve": "vue-cli-service serve", "serve": "vue-cli-service serve",
"build": "vue-cli-service build", "build-prod": "vue-cli-service build --mode production",
"build-dev": "vue-cli-service build --mode development", "build-dev": "vue-cli-service build --mode development",
"lint": "vue-cli-service lint", "lint": "vue-cli-service lint",
"test:unit": "vue-cli-service test:unit" "test:unit": "vue-cli-service test:unit"
}, },
"dependencies": { "dependencies": {
"@fortawesome/fontawesome-svg-core": "^1.2.15", "@fortawesome/fontawesome-svg-core": "^1.2.27",
"@fortawesome/free-solid-svg-icons": "^5.7.2", "@fortawesome/free-solid-svg-icons": "^5.12.1",
"@fortawesome/vue-fontawesome": "^0.1.5", "@fortawesome/vue-fontawesome": "^0.1.9",
"@types/js-cookie": "^2.2.1", "@types/js-cookie": "^2.2.4",
"@types/jwt-decode": "^2.2.1", "@types/jwt-decode": "^2.2.1",
"@types/lodash.assign": "^4.2.6", "@types/lodash.assign": "^4.2.6",
"@types/lodash.findindex": "^4.6.6", "@types/lodash.findindex": "^4.6.6",
"@types/lodash.merge": "^4.6.5", "@types/lodash.merge": "^4.6.6",
"apexcharts": "^3.6.5", "apexcharts": "^3.15.6",
"axios": "^0.18.0", "axios": "^0.18.1",
"js-cookie": "^2.2.0", "js-cookie": "^2.2.1",
"jwt-decode": "^2.2.0", "jwt-decode": "^2.2.0",
"keen-ui": "^1.1.2", "keen-ui": "^1.2.1",
"lodash.assign": "^4.2.0", "lodash.assign": "^4.2.0",
"lodash.findindex": "^4.6.0", "lodash.findindex": "^4.6.0",
"lodash.keyby": "^4.6.0", "lodash.keyby": "^4.6.0",
"lodash.merge": "^4.6.1", "lodash.merge": "^4.6.2",
"moment": "^2.24.0", "moment": "^2.24.0",
"register-service-worker": "^1.5.2", "register-service-worker": "^1.5.2",
"vue": "^2.6.6", "vue": "^2.6.11",
"vue-apexcharts": "^1.3.2", "vue-apexcharts": "^1.5.2",
"vue-class-component": "^6.0.0", "vue-class-component": "^6.0.0",
"vue-property-decorator": "^7.0.0", "vue-property-decorator": "^7.0.0",
"vue-router": "^3.0.1", "vue-router": "^3.1.5",
"vuejs-smart-table": "0.0.3", "vuejs-smart-table": "0.0.3",
"vuex": "^3.0.1", "vuex": "^3.1.2",
"vuex-module-decorators": "^0.9.8" "vuex-module-decorators": "^0.9.11"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^23.1.4", "@types/jest": "^23.1.4",
"@types/lodash.keyby": "^4.6.6", "@types/lodash.keyby": "^4.6.6",
"@vue/cli-plugin-babel": "^3.4.0", "@vue/cli-plugin-babel": "^3.12.1",
"@vue/cli-plugin-pwa": "^3.4.0", "@vue/cli-plugin-pwa": "^3.12.1",
"@vue/cli-plugin-typescript": "^3.4.0", "@vue/cli-plugin-typescript": "^3.12.1",
"@vue/cli-plugin-unit-jest": "^3.7.0", "@vue/cli-plugin-unit-jest": "^3.12.1",
"@vue/cli-service": "^3.5.3", "@vue/cli-service": "^3.12.1",
"@vue/test-utils": "^1.0.0-beta.20", "@vue/test-utils": "^1.0.0-beta.31",
"babel-core": "7.0.0-bridge.0", "babel-core": "7.0.0-bridge.0",
"lint-staged": "^8.1.0", "lint-staged": "^8.2.1",
"live-server": "^1.2.1", "live-server": "^1.2.1",
"node-sass": "^4.12.0", "node-sass": "^4.13.1",
"sass-loader": "^7.1.0", "sass-loader": "^7.3.1",
"ts-jest": "^23.0.0", "ts-jest": "^23.0.0",
"typescript": "^3.0.0", "typescript": "^3.7.5",
"vue-cli-plugin-webpack-bundle-analyzer": "^1.3.0", "vue-cli-plugin-webpack-bundle-analyzer": "^1.4.0",
"vue-template-compiler": "^2.5.21" "vue-template-compiler": "^2.6.11"
}, },
"gitHooks": { "gitHooks": {
"pre-commit": "lint-staged" "pre-commit": "lint-staged"

View File

@ -2,6 +2,7 @@
<div id="app"> <div id="app">
<NavBar></NavBar> <NavBar></NavBar>
<router-view class=main /> <router-view class=main />
<span id="personal-measure-version" hidden>{{ version }}</span>
</div> </div>
</template> </template>
<script lang="ts" src="./app.ts"></script> <script lang="ts" src="./app.ts"></script>

View File

@ -13,6 +13,7 @@ const logger = logService.getLogger('/app');
}) })
export default class App extends Vue { export default class App extends Vue {
public version = process.env.PM_VERSION;
private apiLogAppender!: ApiLogAppender; private apiLogAppender!: ApiLogAppender;
private consoleLogAppender!: ConsoleLogAppender; private consoleLogAppender!: ConsoleLogAppender;

View File

@ -12,21 +12,24 @@ export class SimpleDetails extends Vue {
@Prop() private measure!: Measure<MeasureConfig>; @Prop() private measure!: Measure<MeasureConfig>;
@Prop() private measurements!: Array<Measurement<MeasurementMeta>>; @Prop() private measurements!: Array<Measurement<MeasurementMeta>>;
private newMeasurement; // private newMeasurement;
private moment = moment; private moment = moment;
private chartOptions = { private chartOptions = {
markers: { size: 6 },
noData: { text: 'no data', noData: { text: 'no data',
style: { fontSize: '18px' } }, style: { fontSize: '18px' } },
stroke: { curve: 'smooth' }, stroke: { curve: 'straight' },
xaxis: { type: 'datetime' } xaxis: { type: 'datetime' }
}; };
private get measurementChartData(): ApexAxisChartSeries { private get measurementChartData(): ApexAxisChartSeries {
const measurementData = this.measurements || []; const measurementData = this.measurements.slice() || [];
return [{ return [{
name: this.measure.name, name: this.measure.name,
data: measurementData.map((m) => ({ x: m.timestamp.toISOString(), y: m.value })) data: measurementData
.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime())
.map((m) => ({ x: m.timestamp.toISOString(), y: m.value }))
}]; }];
} }

View File

@ -1,5 +1,5 @@
<template> <template>
<div v-if="measure.config.isVisible" class="measure-summary" :data-name="'measure-' + measure.slug"> <div v-if="measure.config.isVisible" v-bind:key="measure.slug" class="measure-summary" :data-name="'measure-' + measure.slug">
<h2><router-link <h2><router-link
:to="'/measures/' + measure.slug"> :to="'/measures/' + measure.slug">
{{measure.name}}</router-link></h2> {{measure.name}}</router-link></h2>

View File

@ -9,18 +9,20 @@ export class SimpleSummaryGraph extends Vue {
private chartOptions = { private chartOptions = {
chart: { sparkline: { enabled: true } }, chart: { sparkline: { enabled: true } },
grid: { padding: { top: 20 }}, grid: { padding: { top: 20 }},
stroke: { curve: 'smooth' }, stroke: { curve: 'straight' },
noData: { text: 'no data', noData: { text: 'no data',
style: { fontSize: '18px' } }, style: { fontSize: '18px' } },
xaxis: { type: 'datetime' } xaxis: { type: 'datetime' }
}; };
private get measurementData(): ApexAxisChartSeries { private get measurementData(): ApexAxisChartSeries {
const measurementData = this.measurements || []; const measurementData = this.measurements.slice() || [];
return [{ return [{
name: this.measure.name, name: this.measure.name,
data: measurementData.map((m) => ({ x: m.timestamp.toISOString(), y: m.value })) data: measurementData
.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime())
.map((m) => ({ x: m.timestamp.toISOString(), y: m.value }))
}]; }];
} }
} }

View File

@ -12,7 +12,7 @@
</div> </div>
<div> <div>
<label for=measurementValue>{{measure.name}}</label> <label for=measurementValue>{{measure.name}}</label>
<input required type=number v-model=value.value :disabled=disabled /> <input name=measurementValue required type=number step=any v-model.number=value.value :disabled=disabled />
</div> </div>
</fieldset> </fieldset>
</template> </template>

View File

@ -5,7 +5,7 @@ import { Measure, MeasureConfig, MeasureType, Measurement, MeasurementMeta } fro
export class SimpleEntry extends Vue { export class SimpleEntry extends Vue {
@Prop() public measure!: Measure<MeasureConfig>; @Prop() public measure!: Measure<MeasureConfig>;
@Prop() public value!: Measurement<MeasurementMeta>; @Prop() public value!: Measurement<MeasurementMeta>;
@Prop() public disabled: boolean = false; @Prop() public disabled!: boolean;
private editTimestamp: boolean = false; private editTimestamp: boolean = false;
@Watch('value', { immediate: true, deep: true }) @Watch('value', { immediate: true, deep: true })

View File

@ -1,6 +1,5 @@
import { LogLevel } from './log-message'; import { LogLevel } from './log-message';
import Logger from './logger'; import Logger from './logger';
import { default as Axios, AxiosInstance } from 'axios';
const ROOT_LOGGER_NAME = 'ROOT'; const ROOT_LOGGER_NAME = 'ROOT';
@ -8,7 +7,6 @@ const ROOT_LOGGER_NAME = 'ROOT';
export class LogService { export class LogService {
private loggers: { [key: string]: Logger }; private loggers: { [key: string]: Logger };
private http: AxiosInstance = Axios.create();
public get ROOT_LOGGER() { public get ROOT_LOGGER() {
return this.loggers[ROOT_LOGGER_NAME]; return this.loggers[ROOT_LOGGER_NAME];

View File

@ -32,7 +32,7 @@ export class AuthStoreModule extends VuexModule {
// this should be guaranteed by the server (redirect HTTP -> HTTPS) // this should be guaranteed by the server (redirect HTTP -> HTTPS)
// but we'll do a sanity check just to make sure. // but we'll do a sanity check just to make sure.
if (window.location.protocol === 'https:' || if (window.location.protocol === 'https:' ||
process.env.NODE_ENV === 'development') { // allow in dev process.env.NODE_ENV === 'development') { // allow http in dev
localStorage.setItem(SESSION_KEY, authToken); localStorage.setItem(SESSION_KEY, authToken);
} }

View File

@ -54,7 +54,7 @@ export class MeasurementStoreModule extends VuexModule {
const newMeasurements = existing.slice(); const newMeasurements = existing.slice();
const index = findIndex(existing, { id: measurement.id }); const index = findIndex(existing, { id: measurement.id });
if (index > 0) { newMeasurements.push(measurement); } if (index < 0) { newMeasurements.push(measurement); }
else { newMeasurements[index] = measurement; } else { newMeasurements[index] = measurement; }
this.measurements = assign({}, this.measurements, { [measure.id]: newMeasurements }); this.measurements = assign({}, this.measurements, { [measure.id]: newMeasurements });
} }

View File

@ -1,5 +1,7 @@
.user-account { .user-account {
justify-content: flex-start;
section { section {
margin-top: 1rem; margin-top: 2rem;
} }
} }

View File

@ -1,7 +1,14 @@
const merge = require('deepmerge');
const VERSION = {
'process.env': {
PM_VERSION: JSON.stringify(require('./package.json').version)
}
};
module.exports = { module.exports = {
devServer: { devServer: {
proxy: { proxy: {
'/api': { target: 'http://localhost:8081' } '/v0': { target: 'http://localhost:8081' }
}, },
host: 'localhost', host: 'localhost',
disableHostCheck: true disableHostCheck: true
@ -17,5 +24,11 @@ module.exports = {
analyzerMode: 'static', analyzerMode: 'static',
openAnalyzer: false openAnalyzer: false
} }
},
chainWebpack: config => {
config
.plugin('define')
.tap(args => merge(args, [VERSION]))
} }
}; };