22 Commits
0.3.0 ... 0.7.0

Author SHA1 Message Date
efb86cf6ce Update package version to 0.7.0 2020-02-09 04:25:21 -06:00
c6863293c5 api: User released version of fiber_orm (not local). 2020-02-09 04:25:09 -06:00
ce582383c3 api: Change root application path to '/v0' instead of '/api'. 2020-02-09 04:25:09 -06:00
31053c1014 Add clean target to Makefile. 2020-02-09 04:25:09 -06:00
f5b891b966 update-version.sh should include changes to package-lock.json. 2020-02-09 04:18:38 -06:00
74b8a42d29 web: Sort data in simple measure graphs. 2020-02-09 04:16:26 -06:00
8af6c65c9b api: Add support for necessary CORS headers. 2020-02-09 04:16:15 -06:00
e14097117f api: Add nginx configuration for OPTIONS CORS support. 2020-02-09 04:15:43 -06:00
c6d8d14a1f Add update-version.sh convenience script. 2020-02-09 04:10:10 -06:00
ff3c1cf04e Clean up logging service. 2020-02-09 03:41:22 -06:00
8ac1cdf476 Fix environment-specific builds. 2020-02-09 03:40:49 -06:00
c28eb7b240 Update operations documentation. 2020-02-09 03:16:03 -06:00
31326d40c8 web: Update dependencies (npm audit fix). 2020-02-09 03:16:03 -06:00
716f09681c Move Terraform state into S3 (using DynamoDB for locking). 2020-02-09 03:16:03 -06:00
ead77534ce api: Extract database common code into its own library (fiber-orm). 2020-02-09 03:15:58 -06:00
c5daa76102 web: Parameterize build process with env-dependent config files. 2020-02-09 00:30:38 -06:00
cfd5463b4d Add CDN cache invalidation to deploy scripts. 2019-09-25 10:48:46 -05:00
5c81d756df api: Refactor so all endpoints are CORS-aware. 2019-09-25 08:15:08 -05:00
cf60793395 api: WIP CORS supoport 2019-09-24 22:43:23 -05:00
0a8f701c3c Created terraform configuration to manage AWS infrastructure. 2019-09-24 22:40:08 -05:00
a4b798cec4 Bump version. 2019-05-19 18:31:33 -05:00
5f257e9b4a Target dev environment by default for deploys. 2019-05-19 18:30:53 -05:00
34 changed files with 792 additions and 808 deletions

6
.gitignore vendored
View File

@ -26,3 +26,9 @@ yarn-error.log*
*.njsproj
*.sln
*.sw?
# Terrform files
.terraform/
# API Testing Files
api/temp/

View File

@ -1,8 +1,13 @@
VERSION:=$(shell git describe --always)
TARGET_ENV=prod
TARGET_ENV ?= dev
build: dist/personal-measure-api.tar.gz dist/personal-measure-web.tar.gz
clean:
-rm -r dist
-rm api/personal_measure_api
-rm -r web/dist
dist/personal-measure-api.tar.gz:
-mkdir dist
make -C api personal_measure_api
@ -11,7 +16,7 @@ dist/personal-measure-api.tar.gz:
dist/personal-measure-web.tar.gz:
-mkdir dist
(cd web && npm run build)
TARGET_ENV=$(TARGET_ENV) make -C web build
tar czf dist/personal-measure-web-${VERSION}.tar.gz -C web/dist .
cp dist/personal-measure-web-${VERSION}.tar.gz dist/personal-measure-web.tar.gz
@ -27,6 +32,7 @@ deploy-web: dist/personal-measure-web.tar.gz
mkdir -p temp-deploy/personal-measure-web-${VERSION}
tar xzf dist/personal-measure-web-${VERSION}.tar.gz -C temp-deploy/personal-measure-web-${VERSION}
aws s3 sync temp-deploy/personal-measure-web-${VERSION} s3://pm.jdb-labs.com/$(TARGET_ENV)/webroot
TARGET_ENV=${TARGET_ENV} operations/invalidate-cdn-cache.sh
rm -r temp-deploy
deploy: deploy-api deploy-web

View File

@ -3,5 +3,6 @@
"dbConnString":"host=localhost port=5500 dbname=personal_measure user=postgres password=password",
"debug":true,
"port":8081,
"pwdCost":11
"pwdCost":11,
"knownOrigins": [ "https://curl.localhost" ]
}

View File

@ -1,5 +1,6 @@
{
"debug":false,
"port":80,
"pwdCost":11
"pwdCost":11,
"knownOrigins": [ "https://pm.jdb-labs.com" ]
}

View File

@ -14,7 +14,8 @@ skipExt = @["nim"]
# Dependencies
requires @["nim >= 0.19.4", "bcrypt", "docopt >= 0.6.8", "isaac >= 0.1.3",
"jester >= 0.4.1", "jwt", "tempfile", "uuids >= 0.1.10" ]
"jester >= 0.4.3", "jwt", "tempfile", "uuids >= 0.1.10" ]
requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.6.3"
requires "https://git.jdb-labs.com/jdb/nim-time-utils.git >= 0.5.0"
requires "https://git.jdb-labs.com/jdb-labs/fiber-orm-nim.git >= 0.2.0"

View File

@ -34,6 +34,10 @@ proc loadConfig*(args: Table[string, docopt.Value] = initTable[string, docopt.Va
warn "Cannot read configuration file \"" & filePath & "\":\n\t" &
getCurrentExceptionMsg()
let knownOriginsArray =
if json.hasKey("knownOrigins"): json["knownOrigins"]
else: newJArray()
let cfg = CombinedConfig(docopt: args, json: json)
result = PMApiConfig(
@ -41,8 +45,9 @@ proc loadConfig*(args: Table[string, docopt.Value] = initTable[string, docopt.Va
dbConnString: cfg.getVal("db-conn-string"),
debug: "true".startsWith(cfg.getVal("debug", "false").toLower()),
port: parseInt(cfg.getVal("port", "8080")),
pwdCost: cast[int8](parseInt(cfg.getVal("pwd-cost", "11"))))
pwdCost: cast[int8](parseInt(cfg.getVal("pwd-cost", "11"))),
knownOrigins: toSeq(knownOriginsArray).mapIt(it.getStr))
proc initContext(args: Table[string, docopt.Value]): PMApiContext =
var cfg: PMApiConfig

View File

@ -20,7 +20,7 @@ proc newSession*(user: User): Session =
template halt(code: HttpCode,
headers: RawHeaders,
content: string): typed =
content: string) =
## Immediately replies with the specified request. This means any further
## code will not be executed after calling this template in the current
## route.
@ -32,24 +32,42 @@ template halt(code: HttpCode,
result.matched = true
break allRoutes
template jsonResp(code: HttpCode, details: string = "", headers: RawHeaders = @{:} ) =
template jsonResp(code: HttpCode, body: string = "", headersToSend: RawHeaders = @{:} ) =
let reqOrigin =
if request.headers.hasKey("Origin"): $(request.headers["Origin"])
else: ""
let corsHeaders =
if ctx.cfg.knownOrigins.contains(reqOrigin):
@{
"Access-Control-Allow-Origin": reqOrigin,
"Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Methods": $(request.reqMethod),
"Access-Control-Allow-Headers": "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization"
}
else: @{:}
halt(
code,
headers & @{
headersToSend & corsHeaders & @{
"Content-Type": JSON,
"Cache-Control": "no-cache"
},
body
)
template jsonResp(body: string) = jsonResp(Http200, body)
template statusResp(code: HttpCode, details: string = "", headersToSend: RawHeaders = @{:} ) =
jsonResp(
code,
$(%* {
"statusCode": code.int,
"status": $code,
"details": details
})
)
template json500Resp(ex: ref Exception, details: string = ""): void =
when not defined(release): debug ex.getStackTrace()
error details & ":\n" & ex.msg
jsonResp(Http500)
}),
headersToSend)
# internal JSON parsing utils
proc getIfExists(n: JsonNode, key: string): JsonNode =
@ -176,10 +194,10 @@ template checkAuth(requiresAdmin = false) =
try: session = extractSession(ctx, request)
except:
debug "Auth failed: " & getCurrentExceptionMsg()
jsonResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
statusResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
if requiresAdmin and not session.user.isAdmin:
jsonResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
statusResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
proc start*(ctx: PMApiContext): void =
@ -189,12 +207,12 @@ proc start*(ctx: PMApiContext): void =
settings:
port = Port(ctx.cfg.port)
appName = "/api"
appName = "/v0"
routes:
get "/version":
resp($(%("personal_measure_api v" & PM_API_VERSION)), JSON)
jsonResp($(%("personal_measure_api v" & PM_API_VERSION)))
post "/auth-token":
@ -203,9 +221,9 @@ proc start*(ctx: PMApiContext): void =
let email = jsonBody.getOrFail("email").getStr
let pwd = jsonBody.getOrFail("password").getStr
let authToken = makeAuthToken(ctx, email, pwd)
resp($(%authToken), JSON)
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except: jsonResp(Http401, getCurrentExceptionMsg())
jsonResp($(%authToken))
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except: statusResp(Http401, getCurrentExceptionMsg())
post "/change-pwd":
checkAuth()
@ -218,15 +236,15 @@ proc start*(ctx: PMApiContext): void =
let newHash = hashWithSalt(jsonBody.getOrFail("newPassword").getStr, session.user.salt)
session.user.hashedPwd = newHash.hash
if ctx.db.updateUser(session.user): jsonResp(Http200)
else: jsonResp(Http500, "unable to change pwd")
if ctx.db.updateUser(session.user): statusResp(Http200)
else: statusResp(Http500, "unable to change pwd")
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except AuthError: jsonResp(Http401, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except AuthError: statusResp(Http401, getCurrentExceptionMsg())
except:
error "internal error changing password: " & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
post "/change-pwd/@userId":
checkAuth(true)
@ -237,22 +255,22 @@ proc start*(ctx: PMApiContext): void =
var user = ctx.db.getUser(parseUUID(@"userId"))
let newHash = hashWithSalt(jsonBody.getOrFail("newPassword").getStr, user.salt)
user.hashedPwd = newHash.hash
if ctx.db.updateUser(user): jsonResp(Http200)
else: jsonResp(Http500, "unable to change pwd")
if ctx.db.updateUser(user): statusResp(Http200)
else: statusResp(Http500, "unable to change pwd")
except ValueError: jsonResp(Http400, "invalid UUID")
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except AuthError: jsonResp(Http401, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, "no such user")
except ValueError: statusResp(Http400, "invalid UUID")
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except AuthError: statusResp(Http401, getCurrentExceptionMsg())
except NotFoundError: statusResp(Http404, "no such user")
except:
error "internal error changing password: " & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/user":
checkAuth()
resp(Http200, $(%session.user), JSON)
jsonResp($(%session.user))
put "/user":
checkAuth()
@ -265,18 +283,18 @@ proc start*(ctx: PMApiContext): void =
if jsonBody.hasKey("displayName"):
updatedUser.displayName = jsonBody["displayName"].getStr()
jsonResp(Http200, $(%ctx.db.updateUser(updatedUser)))
statusResp(Http200, $(%ctx.db.updateUser(updatedUser)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except:
error "Could not update user information:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/users":
checkAuth(true)
resp(Http200, $(%ctx.db.getAllUsers()))
jsonResp($(%ctx.db.getAllUsers()))
post "/users":
checkAuth(true)
@ -293,18 +311,18 @@ proc start*(ctx: PMApiContext): void =
salt: pwdAndSalt.salt,
isAdmin: false)
resp($(%ctx.db.createUser(newUser)), JSON)
jsonResp($(%ctx.db.createUser(newUser)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except:
error "Could not create new user:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/users/@userId":
checkAuth(true)
resp(Http200, $(%ctx.db.getUser(parseUUID(@"userId"))))
jsonResp($(%ctx.db.getUser(parseUUID(@"userId"))))
delete "/users/@userId":
checkAuth(true)
@ -313,18 +331,18 @@ proc start*(ctx: PMApiContext): void =
try:
let userId = parseUUID(@"userId")
user = ctx.db.getUser(userId)
except: jsonResp(Http404)
except: statusResp(Http404)
try:
if not ctx.db.deleteUser(user): raiseEx "unable to delete user"
jsonResp(Http200, "user " & user.email & " deleted")
statusResp(Http200, "user " & user.email & " deleted")
except: jsonResp(Http500, getCurrentExceptionMsg())
except: statusResp(Http500, getCurrentExceptionMsg())
get "/api-tokens":
checkAuth()
resp(Http200, $(%ctx.db.findApiTokensByUserId($session.user.id)))
jsonResp($(%ctx.db.findApiTokensByUserId($session.user.id)))
post "/api-tokens":
checkAuth()
@ -346,40 +364,40 @@ proc start*(ctx: PMApiContext): void =
let respToken = %newToken
respToken["value"] = %tokenValue
resp($respToken, JSON)
jsonResp($respToken)
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except AuthError: jsonResp(Http401, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except AuthError: statusResp(Http401, getCurrentExceptionMsg())
except:
debug getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/api-tokens/@tokenId":
checkAuth()
try:
resp(Http200, $(%ctx.db.getApiToken(parseUUID(@"tokenId"))))
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except: jsonResp(Http500)
jsonResp($(%ctx.db.getApiToken(parseUUID(@"tokenId"))))
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: statusResp(Http500)
delete "/api-tokens/@tokenId":
checkAuth()
try:
let token = ctx.db.getApiToken(parseUUID(@"tokenId"))
if ctx.db.deleteApiToken(token): jsonResp(Http200)
else: jsonResp(Http500)
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except: jsonResp(Http500)
if ctx.db.deleteApiToken(token): statusResp(Http200)
else: statusResp(Http500)
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except: statusResp(Http500)
get "/measures":
checkAuth()
try: resp($(%ctx.db.findMeasuresByUserId($session.user.id)), JSON)
try: jsonResp($(%ctx.db.findMeasuresByUserId($session.user.id)))
except:
error "unable to retrieve measures for user:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
post "/measures":
checkAuth()
@ -409,45 +427,45 @@ proc start*(ctx: PMApiContext): void =
description: jsonBody.getIfExists("description").getStr(""),
config: config)
resp($(%ctx.db.createMeasure(newMeasure)), JSON)
jsonResp($(%ctx.db.createMeasure(newMeasure)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except:
error "unable to create new measure:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/measures/@slug":
checkAuth()
try: resp($(%ctx.getMeasureForSlug(session.user.id, @"slug")), JSON)
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
try: jsonResp($(%ctx.getMeasureForSlug(session.user.id, @"slug")))
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to look up a measure by id:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
delete "/measures/@slug":
checkAuth()
try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
if ctx.db.deleteMeasure(measure): jsonResp(Http200)
if ctx.db.deleteMeasure(measure): statusResp(Http200)
else: raiseEx ""
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to delete a measure:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/measure/@slug":
checkAuth()
try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
resp($(%ctx.db.findMeasurementsByMeasureId($measure.id)), JSON)
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
jsonResp($(%ctx.db.findMeasurementsByMeasureId($measure.id)))
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to list measurements:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
post "/measure/@slug":
checkAuth()
@ -466,29 +484,29 @@ proc start*(ctx: PMApiContext): void =
if jsonBody.hasKey("extData"): jsonBody["extData"]
else: newJObject())
resp($(%ctx.db.createMeasurement(newMeasurement)), JSON)
jsonResp($(%ctx.db.createMeasurement(newMeasurement)))
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to add measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
get "/measure/@slug/@id":
checkAuth()
try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
resp($(%ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))), JSON)
jsonResp($(%ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))))
except ValueError: jsonResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except ValueError: statusResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to retrieve measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
put "/measure/@slug/@id":
checkAuth()
@ -500,15 +518,15 @@ proc start*(ctx: PMApiContext): void =
if jsonBody.hasKey("value"): measurement.value = jsonBody["value"].getInt
if jsonBody.hasKey("timestamp"): measurement.timestamp = jsonBody["timestamp"].getStr.parseIso8601
if jsonBody.hasKey("extData"): measurement.extData = jsonBody["extData"]
resp($(%ctx.db.updateMeasurement(measurement)), JSON)
jsonResp($(%ctx.db.updateMeasurement(measurement)))
except ValueError: jsonResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except ValueError: statusResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to retrieve measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
delete "/measure/@slug/@id":
checkAuth()
@ -516,16 +534,16 @@ proc start*(ctx: PMApiContext): void =
try:
let measure = ctx.getMeasureForSlug(session.user.id, @"slug")
let measurement = ctx.getMeasurementForMeasure(measure.id, parseUUID(@"id"))
if ctx.db.deleteMeasurement(measurement): jsonResp(Http200)
if ctx.db.deleteMeasurement(measurement): statusResp(Http200)
else: raiseEx ""
except ValueError: jsonResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: jsonResp(Http400, getCurrentExceptionMsg())
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except NotFoundError: jsonResp(Http404, getCurrentExceptionMsg())
except ValueError: statusResp(Http400, getCurrentExceptionMsg())
except JsonParsingError: statusResp(Http400, getCurrentExceptionMsg())
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except NotFoundError: statusResp(Http404, getCurrentExceptionMsg())
except:
error "unable to delete measurement:\n\t" & getCurrentExceptionMsg()
jsonResp(Http500)
statusResp(Http500)
post "/log":
checkAuth()
@ -540,9 +558,9 @@ proc start*(ctx: PMApiContext): void =
stacktrace: jsonBody.getIfExists("stacktrace").getStr(""),
timestamp: jsonBody.getOrFail("timestamp").getStr.parseIso8601
)
resp(Http200, $(%ctx.db.createClientLogEntry(logEntry)), JSON)
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except: jsonResp(Http500, getCurrentExceptionMsg())
jsonResp($(%ctx.db.createClientLogEntry(logEntry)))
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: statusResp(Http500, getCurrentExceptionMsg())
post "/log/batch":
checkAuth()
@ -558,15 +576,15 @@ proc start*(ctx: PMApiContext): void =
stacktrace: it.getIfExists("stacktrace").getStr(""),
timestamp: it.getOrFail("timestamp").getStr.parseIso8601
))
resp(Http200, $(%respMsgs), JSON)
except BadRequestError: jsonResp(Http400, getCurrentExceptionMsg())
except: jsonResp(Http500, getCurrentExceptionMsg())
jsonResp($(%respMsgs))
except BadRequestError: statusResp(Http400, getCurrentExceptionMsg())
except: statusResp(Http500, getCurrentExceptionMsg())
post "/service/debug/stop":
if not ctx.cfg.debug: jsonResp(Http404)
if not ctx.cfg.debug: statusResp(Http404)
else:
let shutdownFut = sleepAsync(100)
shutdownFut.callback = proc(): void = complete(stopFuture)
resp($(%"shutting down"), JSON)
jsonResp($(%"shutting down"))
waitFor(stopFuture)

View File

@ -9,6 +9,7 @@ type
debug*: bool
port*: int
pwdCost*: int8
knownOrigins*: seq[string]
PMApiContext* = object
cfg*: PMApiConfig
@ -23,7 +24,8 @@ proc `%`*(cfg: PMApiConfig): JsonNode =
"dbConnString": cfg.dbConnString,
"debug": cfg.debug,
"port": cfg.port,
"pwdCost": cfg.pwdCost }
"pwdCost": cfg.pwdCost,
"knownOrigins": cfg.knownOrigins }
template raiseEx*(errorType: type, reason: string): void =
raise newException(errorType, reason)

View File

@ -1,10 +1,8 @@
import db_postgres, macros, options, postgres, sequtils, strutils,
times, timeutils, unicode, uuids
import db_postgres, fiber_orm, uuids
import ./models
import ./db_common
export db_common.NotFoundError
export fiber_orm.NotFoundError
type
PMApiDb* = ref object
@ -14,18 +12,24 @@ type
proc connect*(connString: string): PMApiDb =
result = PMApiDb(conn: open("", "", "", connString))
generateProcsForModels([User, ApiToken, Measure, Measurement, ClientLogEntry])
generateProcsForModels(PMApiDb, [
User,
ApiToken,
Measure,
Measurement,
ClientLogEntry
])
generateLookup(User, @["email"])
generateLookup(PMApiDb, User, @["email"])
generateLookup(ApiToken, @["userId"])
generateLookup(ApiToken, @["hashedToken"])
generateLookup(PMApiDb, ApiToken, @["userId"])
generateLookup(PMApiDb, ApiToken, @["hashedToken"])
generateLookup(Measure, @["userId"])
generateLookup(Measure, @["userId", "id"])
generateLookup(Measure, @["userId", "slug"])
generateLookup(PMApiDb, Measure, @["userId"])
generateLookup(PMApiDb, Measure, @["userId", "id"])
generateLookup(PMApiDb, Measure, @["userId", "slug"])
generateLookup(Measurement, @["measureId"])
generateLookup(Measurement, @["measureId", "id"])
generateLookup(PMApiDb, Measurement, @["measureId"])
generateLookup(PMApiDb, Measurement, @["measureId", "id"])
generateLookup(ClientLogEntry, @["userId"])
generateLookup(PMApiDb, ClientLogEntry, @["userId"])

View File

@ -1,150 +0,0 @@
import db_postgres, macros, options, sequtils, strutils, uuids
from unicode import capitalize
import ./db_util
type NotFoundError* = object of CatchableError
proc newMutateClauses(): MutateClauses =
return MutateClauses(
columns: @[],
placeholders: @[],
values: @[])
proc createRecord*[T](db: DbConn, rec: T): T =
var mc = newMutateClauses()
populateMutateClauses(rec, true, mc)
# Confusingly, getRow allows inserts and updates. We use it to get back the ID
# we want from the row.
let newRow = db.getRow(sql(
"INSERT INTO " & tableName(rec) &
" (" & mc.columns.join(",") & ") " &
" VALUES (" & mc.placeholders.join(",") & ") " &
" RETURNING *"), mc.values)
result = rowToModel(T, newRow)
proc updateRecord*[T](db: DbConn, rec: T): bool =
var mc = newMutateClauses()
populateMutateClauses(rec, false, mc)
let setClause = zip(mc.columns, mc.placeholders).mapIt(it.a & " = " & it.b).join(",")
let numRowsUpdated = db.execAffectedRows(sql(
"UPDATE " & tableName(rec) &
" SET " & setClause &
" WHERE id = ? "), mc.values.concat(@[$rec.id]))
return numRowsUpdated > 0;
template deleteRecord*(db: DbConn, modelType: type, id: typed): untyped =
db.tryExec(sql("DELETE FROM " & tableName(modelType) & " WHERE id = ?"), $id)
proc deleteRecord*[T](db: DbConn, rec: T): bool =
return db.tryExec(sql("DELETE FROM " & tableName(rec) & " WHERE id = ?"), $rec.id)
template getRecord*(db: DbConn, modelType: type, id: typed): untyped =
let row = db.getRow(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType) &
" WHERE id = ?"), @[$id])
if row.allIt(it.len == 0):
raise newException(NotFoundError, "no record for id " & $id)
rowToModel(modelType, row)
template findRecordsWhere*(db: DbConn, modelType: type, whereClause: string, values: varargs[string, dbFormat]): untyped =
db.getAllRows(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType) &
" WHERE " & whereClause), values)
.mapIt(rowToModel(modelType, it))
template getAllRecords*(db: DbConn, modelType: type): untyped =
db.getAllRows(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType)))
.mapIt(rowToModel(modelType, it))
template findRecordsBy*(db: DbConn, modelType: type, lookups: seq[tuple[field: string, value: string]]): untyped =
db.getAllRows(sql(
"SELECT " & columnNamesForModel(modelType).join(",") &
" FROM " & tableName(modelType) &
" WHERE " & lookups.mapIt(it.field & " = ?").join(" AND ")),
lookups.mapIt(it.value))
.mapIt(rowToModel(modelType, it))
macro generateProcsForModels*(modelTypes: openarray[type]): untyped =
result = newStmtList()
for t in modelTypes:
let modelName = $(t.getType[1])
let getName = ident("get" & modelName)
let getAllName = ident("getAll" & modelName & "s")
let findWhereName = ident("find" & modelName & "sWhere")
let createName = ident("create" & modelName)
let updateName = ident("update" & modelName)
let deleteName = ident("delete" & modelName)
let idType = typeOfColumn(t, "id")
result.add quote do:
proc `getName`*(db: PMApiDb, id: `idType`): `t` = getRecord(db.conn, `t`, id)
proc `getAllName`*(db: PMApiDb): seq[`t`] = getAllRecords(db.conn, `t`)
proc `findWhereName`*(db: PMApiDb, whereClause: string, values: varargs[string, dbFormat]): seq[`t`] =
return findRecordsWhere(db.conn, `t`, whereClause, values)
proc `createName`*(db: PMApiDb, rec: `t`): `t` = createRecord(db.conn, rec)
proc `updateName`*(db: PMApiDb, rec: `t`): bool = updateRecord(db.conn, rec)
proc `deleteName`*(db: PMApiDb, rec: `t`): bool = deleteRecord(db.conn, rec)
proc `deleteName`*(db: PMApiDb, id: `idType`): bool = deleteRecord(db.conn, `t`, id)
macro generateLookup*(modelType: type, fields: seq[string]): untyped =
let fieldNames = fields[1].mapIt($it)
let procName = ident("find" & $modelType.getType[1] & "sBy" & fieldNames.mapIt(it.capitalize).join("And"))
# Create proc skeleton
result = quote do:
proc `procName`*(db: PMApiDb): seq[`modelType`] =
return findRecordsBy(db.conn, `modelType`)
var callParams = quote do: @[]
# Add dynamic parameters for the proc definition and inner proc call
for n in fieldNames:
let paramTuple = newNimNode(nnkPar)
paramTuple.add(newColonExpr(ident("field"), newLit(identNameToDb(n))))
paramTuple.add(newColonExpr(ident("value"), ident(n)))
result[3].add(newIdentDefs(ident(n), ident("string")))
callParams[1].add(paramTuple)
result[6][0][0].add(callParams)
macro generateProcsForFieldLookups*(modelsAndFields: openarray[tuple[t: type, fields: seq[string]]]): untyped =
result = newStmtList()
for i in modelsAndFields:
var modelType = i[1][0]
let fieldNames = i[1][1][1].mapIt($it)
let procName = ident("find" & $modelType & "sBy" & fieldNames.mapIt(it.capitalize).join("And"))
# Create proc skeleton
let procDefAST = quote do:
proc `procName`*(db: PMApiDb): seq[`modelType`] =
return findRecordsBy(db.conn, `modelType`)
var callParams = quote do: @[]
# Add dynamic parameters for the proc definition and inner proc call
for n in fieldNames:
let paramTuple = newNimNode(nnkPar)
paramTuple.add(newColonExpr(ident("field"), newLit(n)))
paramTuple.add(newColonExpr(ident("value"), ident(n)))
procDefAST[3].add(newIdentDefs(ident(n), ident("string")))
callParams[1].add(paramTuple)
procDefAST[6][0][0].add(callParams)
result.add procDefAST

View File

@ -1,287 +0,0 @@
import json, macros, options, sequtils, strutils, times, timeutils, unicode,
uuids
const UNDERSCORE_RUNE = "_".toRunes[0]
const PG_TIMESTAMP_FORMATS = [
"yyyy-MM-dd HH:mm:sszz",
"yyyy-MM-dd HH:mm:ss'.'fzz",
"yyyy-MM-dd HH:mm:ss'.'ffzz",
"yyyy-MM-dd HH:mm:ss'.'fffzz"
]
type
MutateClauses* = object
columns*: seq[string]
placeholders*: seq[string]
values*: seq[string]
# TODO: more complete implementation
# see https://github.com/blakeembrey/pluralize
proc pluralize(name: string): string =
if name[^2..^1] == "ey": return name[0..^3] & "ies"
if name[^1] == 'y': return name[0..^2] & "ies"
return name & "s"
macro modelName*(model: object): string =
return $model.getTypeInst
macro modelName*(modelType: type): string =
return $modelType.getType[1]
proc identNameToDb*(name: string): string =
let nameInRunes = name.toRunes
var prev: Rune
var resultRunes = newSeq[Rune]()
for cur in nameInRunes:
if resultRunes.len == 0:
resultRunes.add(toLower(cur))
elif isLower(prev) and isUpper(cur):
resultRunes.add(UNDERSCORE_RUNE)
resultRunes.add(toLower(cur))
else: resultRunes.add(toLower(cur))
prev = cur
return $resultRunes
proc dbNameToIdent*(name: string): string =
let parts = name.split("_")
return @[parts[0]].concat(parts[1..^1].mapIt(capitalize(it))).join("")
proc tableName*(modelType: type): string =
return pluralize(modelName(modelType).identNameToDb)
proc tableName*[T](rec: T): string =
return pluralize(modelName(rec).identNameToDb)
proc dbFormat*(s: string): string = return s
proc dbFormat*(dt: DateTime): string = return dt.formatIso8601
proc dbFormat*[T](list: seq[T]): string =
return "{" & list.mapIt(dbFormat(it)).join(",") & "}"
proc dbFormat*[T](item: T): string = return $item
type DbArrayParseState = enum
expectStart, inQuote, inVal, expectEnd
proc parsePGDatetime*(val: string): DateTime =
var errStr = ""
for df in PG_TIMESTAMP_FORMATS:
try: return val.parse(df)
except: errStr &= "\n" & getCurrentExceptionMsg()
raise newException(ValueError, "Cannot parse PG date. Tried:" & errStr)
proc parseDbArray*(val: string): seq[string] =
result = newSeq[string]()
var parseState = DbArrayParseState.expectStart
var curStr = ""
var idx = 1
var sawEscape = false
while idx < val.len - 1:
var curChar = val[idx]
idx += 1
case parseState:
of expectStart:
if curChar == ' ': continue
elif curChar == '"':
parseState = inQuote
continue
else:
parseState = inVal
of expectEnd:
if curChar == ' ': continue
elif curChar == ',':
result.add(curStr)
curStr = ""
parseState = expectStart
continue
of inQuote:
if curChar == '"' and not sawEscape:
parseState = expectEnd
continue
of inVal:
if curChar == '"' and not sawEscape:
raise newException(ValueError, "Invalid DB array value (cannot have '\"' in the middle of an unquoted string).")
elif curChar == ',':
result.add(curStr)
curStr = ""
parseState = expectStart
continue
# if we saw an escaped \", add just the ", otherwise add both
if sawEscape:
if curChar != '"': curStr.add('\\')
curStr.add(curChar)
sawEscape = false
elif curChar == '\\':
sawEscape = true
else: curStr.add(curChar)
if not (parseState == inQuote) and curStr.len > 0:
result.add(curStr)
proc createParseStmt*(t, value: NimNode): NimNode =
#echo "Creating parse statment for ", t.treeRepr
if t.typeKind == ntyObject:
if t.getType == UUID.getType:
result = quote do: parseUUID(`value`)
elif t.getType == DateTime.getType:
result = quote do: parsePGDatetime(`value`)
elif t.getTypeInst == Option.getType:
let innerType = t.getTypeImpl[2][0][0][1]
let parseStmt = createParseStmt(innerType, value)
result = quote do:
if `value`.len == 0: none[`innerType`]()
else: some(`parseStmt`)
else: error "Unknown value object type: " & $t.getTypeInst
elif t.typeKind == ntyRef:
if $t.getTypeInst == "JsonNode":
result = quote do: parseJson(`value`)
else:
error "Unknown ref type: " & $t.getTypeInst
elif t.typeKind == ntySequence:
let innerType = t[1]
let parseStmts = createParseStmt(innerType, ident("it"))
result = quote do: parseDbArray(`value`).mapIt(`parseStmts`)
elif t.typeKind == ntyString:
result = quote do: `value`
elif t.typeKind == ntyInt:
result = quote do: parseInt(`value`)
elif t.typeKind == ntyBool:
result = quote do: "true".startsWith(`value`.toLower)
else:
error "Unknown value type: " & $t.typeKind
template walkFieldDefs*(t: NimNode, body: untyped) =
let tTypeImpl = t.getTypeImpl
var nodeToItr: NimNode
if tTypeImpl.typeKind == ntyObject: nodeToItr = tTypeImpl[2]
elif tTypeImpl.typeKind == ntyTypeDesc: nodeToItr = tTypeImpl.getType[1].getType[2]
else: error $t & " is not an object or type desc (it's a " & $tTypeImpl.typeKind & ")."
for fieldDef {.inject.} in nodeToItr.children:
# ignore AST nodes that are not field definitions
if fieldDef.kind == nnkIdentDefs:
let fieldIdent {.inject.} = fieldDef[0]
let fieldType {.inject.} = fieldDef[1]
body
elif fieldDef.kind == nnkSym:
let fieldIdent {.inject.} = fieldDef
let fieldType {.inject.} = fieldDef.getType
body
macro columnNamesForModel*(modelType: typed): seq[string] =
var columnNames = newSeq[string]()
modelType.walkFieldDefs:
columnNames.add(identNameToDb($fieldIdent))
result = newLit(columnNames)
macro rowToModel*(modelType: typed, row: seq[string]): untyped =
# Create the object constructor AST node
result = newNimNode(nnkObjConstr).add(modelType)
# Create new colon expressions for each of the property initializations
var idx = 0
modelType.walkFieldDefs:
let itemLookup = quote do: `row`[`idx`]
result.add(newColonExpr(
fieldIdent,
createParseStmt(fieldType, itemLookup)))
idx += 1
macro listFields*(t: typed): untyped =
var fields: seq[tuple[n: string, t: string]] = @[]
t.walkFieldDefs:
if fieldDef.kind == nnkSym: fields.add((n: $fieldIdent, t: fieldType.repr))
else: fields.add((n: $fieldIdent, t: $fieldType))
result = newLit(fields)
proc typeOfColumn*(modelType: NimNode, colName: string): NimNode =
modelType.walkFieldDefs:
if $fieldIdent != colName: continue
if fieldType.typeKind == ntyObject:
if fieldType.getType == UUID.getType: return ident("UUID")
elif fieldType.getType == DateTime.getType: return ident("DateTime")
elif fieldType.getType == Option.getType: return ident("Option")
else: error "Unknown column type: " & $fieldType.getTypeInst
else: return fieldType
raise newException(Exception,
"model of type '" & $modelType & "' has no column named '" & colName & "'")
proc isZero(val: int): bool = return val == 0
macro populateMutateClauses*(t: typed, newRecord: bool, mc: var MutateClauses): untyped =
result = newStmtList()
# iterate over all the object's fields
t.walkFieldDefs:
# grab the field, it's string name, and it's type
let fieldName = $fieldIdent
# we do not update the ID, but we do check: if we're creating a new
# record, we should not have an existing ID
if fieldName == "id":
result.add quote do:
if `newRecord` and not `t`.id.isZero:
raise newException(
AssertionError,
"Trying to create a new record, but the record already has an ID (" & $(`t`.id) & ").")
# if we're looking at an optional field, add logic to check for presence
elif fieldType.kind == nnkBracketExpr and
fieldType.len > 0 and
fieldType[0] == Option.getType:
result.add quote do:
`mc`.columns.add(identNameToDb(`fieldName`))
if `t`.`fieldIdent`.isSome:
`mc`.placeholders.add("?")
`mc`.values.add(dbFormat(`t`.`fieldIdent`.get))
else:
`mc`.placeholders.add("NULL")
# otherwise assume we can convert and go ahead.
else:
result.add quote do:
`mc`.columns.add(identNameToDb(`fieldName`))
`mc`.placeholders.add("?")
`mc`.values.add(dbFormat(`t`.`fieldIdent`))

View File

@ -1 +1 @@
const PM_API_VERSION* = "0.3.0"
const PM_API_VERSION* = "0.7.0"

View File

@ -1,6 +1,6 @@
#!/bin/bash
host="${PM_API_HOST:-localhost:8081}"
api_base_url="${PM_API_BASE_URL:-http://localhost:8081}"
if [ $# -eq 1 ]; then
url="$1"
method="GET"
@ -17,6 +17,8 @@ fi
curl -s -X "$method" \
-H "Content-Type: application/json" \
-H "Authorization: $(cat credential)"\
"http://${host}/api/$url" \
-d "$data"
-H "Authorization: $(cat credential)" \
-H "Origin: https://curl.localhost" \
"${api_base_url}/api/$url" \
-d "$data" \
-v

View File

@ -46,9 +46,6 @@ user to manage these without a password.
pmapi ALL=NOPASSWD: /bin/systemctl stop personal_measure_api.dev.service
pmapi ALL=NOPASSWD: /bin/systemctl start personal_measure_api.dev.service
two systemd
service definitions, one for
### Database
razgriz-db.jdb-labs.com RDS instance maintains databases for each environment:
@ -60,17 +57,9 @@ razgriz-db.jdb-labs.com RDS instance maintains databases for each environment:
CloudFront manages the routing of all of the external facing URLs.
https://pm.jdb-labs.com (CloudFront)
├── /api/<path>
│ └── https://pmapi.jdb-labs.com/api/
│ ├── nginx:80 --> nim/jester:8280
│ └── razgriz-db: database personal_measure
└── s3://pm.jdb-labs.com/prod/webroot (static HTML)
https://pm-dev.jdb-labs.com (CloudFront)
├── /api/<path>
│ └── https://pmapi-dev.jdb-labs.com/api/
│ ├── nginx:80 --> nim/jester:8281
│ └── razgriz-db: database personal_measure_dev
└── s3://pm.jdb-labs.com/dev/webroot (static HTML)

View File

@ -0,0 +1,28 @@
#!/bin/bash
echo "Looking up CloudFront distribution ID for Personal Measure ${TARGET_ENV} environment."
cloudfront_distribution_id=$(\
aws cloudfront list-distributions \
--query "DistributionList.Items[?starts_with(Comment, 'Personal Measure ${TARGET_ENV}')].Id | [0]" \
| sed -e 's/^"//' -e 's/"$//'
)
if [[ -z "${cloudfront_distribution_id}" ]]; then
>&2 echo "Unable to find CloudFront distribution for domain ${TARGET_ENV}."
exit 3
fi
echo "Found distribution ID ${cloudfront_distribution_id}."
echo "Invalidating the CloudFront cache for ${TARGET_ENV}."
invalidation_id=$(aws cloudfront create-invalidation \
--query 'Invalidation.Id' \
--distribution-id "${cloudfront_distribution_id}" \
--paths '/index.html')
if [[ $? -ne 0 || -z "${invalidation_id}" ]]; then
>&2 echo "Unable to create the CloudFront invalidation."
else
echo "Successfully created invalidation ${invalidation_id}."
fi
echo "Done."

View File

@ -0,0 +1,33 @@
server {
listen 80;
server_name pmapi-dev.jdb-labs.com;
return 301 https://pmapi-dev.jdb-labs.com$request_uri;
}
server {
listen 443;
server_name pmapi-dev.jdb-labs.com;
ssl on;
location / {
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' 'https://pm-dev.jdb-labs.com';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization';
return 204;
}
proxy_pass http://localhost:8281;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}

View File

@ -0,0 +1,33 @@
server {
listen 80;
server_name pmapi.jdb-labs.com;
return 301 https://pmapi.jdb-labs.com$request_uri;
}
server {
listen 443;
server_name pmapi.jdb-labs.com;
ssl on;
location / {
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' 'https://pm.jdb-labs.com';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization';
return 204;
}
proxy_pass http://localhost:8280;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}

View File

@ -0,0 +1,11 @@
### Variables
variable "aws_region" {
description = "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html"
default = "us-west-2" # Oregon
}
variable "app_root_url" {
description = "Name of the S3 bucket to store deployed artifacts, logs, etc."
default = "pm.jdb-labs.com"
}

View File

@ -0,0 +1,102 @@
data "aws_iam_policy_document" "bucket_access_policy" {
statement {
actions = [ "s3:GetObject" ]
effect = "Allow"
resources = [ "${var.artifact_bucket.arn}/${var.environment}/webroot/*" ]
principals {
type = "AWS"
identifiers = [ "${aws_cloudfront_origin_access_identity.origin_access_identity.iam_arn}" ]
}
}
statement {
actions = [ "s3:ListBucket" ]
effect = "Allow"
resources = [ "${var.artifact_bucket.arn}" ]
principals {
type = "AWS"
identifiers = [ "${aws_cloudfront_origin_access_identity.origin_access_identity.iam_arn}" ]
}
}
}
output "oai_access_policy" {
value = data.aws_iam_policy_document.bucket_access_policy
}
locals {
env_domain_name = "pm${var.environment == "prod" ? "" : "-${var.environment}"}.jdb-labs.com"
}
resource "aws_cloudfront_origin_access_identity" "origin_access_identity" {
comment = "OAI for Personal Measure {$var.environment} environment."
}
resource "aws_cloudfront_distribution" "s3_distribution" {
origin {
domain_name = "${var.artifact_bucket.bucket_regional_domain_name}"
origin_id = "S3-PersonalMeasure-${var.environment}"
origin_path = "/${var.environment}/webroot"
s3_origin_config {
origin_access_identity = "${aws_cloudfront_origin_access_identity.origin_access_identity.cloudfront_access_identity_path}"
}
}
enabled = true
is_ipv6_enabled = true
comment = "Personal Measure ${var.environment} distribution."
default_root_object = "/index.html"
logging_config {
include_cookies = false
bucket = "${var.artifact_bucket.bucket_domain_name}"
prefix = "${var.environment}/logs/cloudfront"
}
aliases = ["${local.env_domain_name}"]
default_cache_behavior {
allowed_methods = ["GET", "HEAD", "OPTIONS"]
cached_methods = ["GET", "HEAD", "OPTIONS"]
target_origin_id = "S3-PersonalMeasure-${var.environment}"
forwarded_values {
query_string = false
cookies {
forward = "none"
}
}
min_ttl = 0
default_ttl = 60 * 60 * 24 * 365 # cache for a year
max_ttl = 60 * 60 * 24 * 365 # cache for a year
compress = true
viewer_protocol_policy = "redirect-to-https"
}
custom_error_response {
error_code = 404
response_code = 200
response_page_path = "/index.html"
}
price_class = "PriceClass_100" # US and Canada only
restrictions {
geo_restriction {
restriction_type = "none"
}
}
tags = {
Environment = "${var.environment}"
}
viewer_certificate {
acm_certificate_arn = "${var.cloudfront_ssl_certificate_arn}"
ssl_support_method = "sni-only"
}
}

View File

@ -0,0 +1,13 @@
### Variables
variable "environment" {
description = "The short name of this deployed environment. For example: 'dev' or 'prod'. This short name will be used to name resources (CloudFront distributions, etc.)"
}
variable "artifact_bucket" {
description = "The aws_s3_bucket object representing the artifact bucket where deployed artifacts, logs, etc. live."
}
variable "cloudfront_ssl_certificate_arn" {
description = "ARN of the managed SSL certificate to use for this environment."
}

View File

@ -0,0 +1,50 @@
provider "aws" {
region = var.aws_region
}
resource "aws_s3_bucket" "personal_measure" {
bucket = "${var.app_root_url}"
acl = "log-delivery-write"
}
resource "aws_dynamodb_table" "dynamodb_terraform-state-lock" {
name = "terraform-state-lock.${var.app_root_url}"
hash_key = "LockID"
read_capacity = 20
write_capacity = 20
attribute {
name = "LockID"
type = "S"
}
tags = {
Name = "Terraform DynamoDB State Lock Table"
}
}
module "dev_env" {
source = "./deployed_env"
environment = "dev"
artifact_bucket = aws_s3_bucket.personal_measure
cloudfront_ssl_certificate_arn = "arn:aws:acm:us-east-1:063932952339:certificate/48fe3ce0-4700-4eaa-b433-bb634f47934c"
}
module "prod_env" {
source = "./deployed_env"
environment = "prod"
artifact_bucket = aws_s3_bucket.personal_measure
cloudfront_ssl_certificate_arn = "arn:aws:acm:us-east-1:063932952339:certificate/48fe3ce0-4700-4eaa-b433-bb634f47934c"
}
data "aws_iam_policy_document" "cloudfront_access_policy" {
source_json = "${module.dev_env.oai_access_policy.json}"
override_json = "${module.prod_env.oai_access_policy.json}"
}
resource "aws_s3_bucket_policy" "personal_measure" {
bucket = "${aws_s3_bucket.personal_measure.id}"
policy = "${data.aws_iam_policy_document.cloudfront_access_policy.json}"
}

View File

@ -0,0 +1,8 @@
terraform {
backend "s3" {
bucket = "pm.jdb-labs.com"
region = "us-west-2"
key = "terraform.tfstate"
dynamodb_table = "terraform-state-lock.pm.jdb-labs.com"
}
}

59
operations/update-version.sh Executable file
View File

@ -0,0 +1,59 @@
#!/bin/bash
#
# Script to update the version number, commit the changes to the version files,
# and tag the new commit.
set -e
origDir=$(pwd)
rootDir=$(git rev-parse --show-toplevel)
cd "$rootDir"
currentBranch=$(git rev-parse --abbrev-ref HEAD)
if [ "$currentBranch" != "develop" ]; then
printf "You are currently on the '%s' branch. Is this intended (yes/no)? " "$currentBranch"
read -r confirmation
if [ "$confirmation" != "yes" ]; then exit 1; fi
fi
lastVersion=$(jq -r .version web/package.json)
printf "Last version: %s\n" "$lastVersion"
printf "New version: "
read -r newVersion
printf "New version will be \"%s\". Is this correct (yes/no)? " "$newVersion"
read -r confirmation
if [ "$confirmation" != "yes" ]; then
printf "\n"
"$origDir/$0"
exit
fi
printf ">> Updating /web/package.json with \"version\": \"%s\"\n" "$newVersion"
printf "jq \".version = \\\"%s\\\"\" web/package.json > temp.json\n" "$newVersion"
jq ".version = \"${newVersion}\"" web/package.json > temp.json
printf "mv temp.json web/package.json\n"
mv temp.json web/package.json
printf ">> Updating /web/package-lock.json with \"version\": \"%s\"\n" "$newVersion"
printf "jq \".version = \\\"%s\\\"\" web/package-lock.json > temp.json\n" "$newVersion"
jq ".version = \"${newVersion}\"" web/package-lock.json > temp.json
printf "mv temp.json web/package-lock.json\n"
mv temp.json web/package-lock.json
printf ">> Updating /api/src/main/nim/personal_measure_apipkg/version.nim with PM_API_VERSION* = \"%s\"" "$newVersion"
printf "sed -i \"s/%s/%s/\" api/src/main/nim/personal_measure_apipkg/version.nim" "$lastVersion" "$newVersion"
sed -i "s/${lastVersion}/${newVersion}/" api/src/main/nim/personal_measure_apipkg/version.nim
printf ">> Committing new version.\n"
printf "git add web/package.json web/package-lock.json api/src/main/nim/personal_measure_apipkg/version.nim"
git add web/package.json web/package-lock.json api/src/main/nim/personal_measure_apipkg/version.nim
printf "git commit -m \"Update package version to %s\"\n" "$newVersion"
git commit -m "Update package version to ${newVersion}"
printf ">> Tagging commit.\n"
printf "git tag -m \"Version %s\" \"%s\"\n" "$newVersion" "$newVersion"
git tag -m "Version ${newVersion}" "${newVersion}"

4
web/.env.development Normal file
View File

@ -0,0 +1,4 @@
NODE_ENV=production
VUE_APP_PM_API_BASE=https://pmapi-dev.jdb-labs.com/v0
VUE_APP_LOG_LEVEL=INFO
VUE_APP_API_LOG_LEVEL=ERROR

View File

@ -1,3 +1,3 @@
VUE_APP_PM_API_BASE=https://pm.jdb-labs.com/api
VUE_APP_PM_API_BASE=https://pmapi.jdb-labs.com/v0
VUE_APP_LOG_LEVEL=INFO
VUE_APP_API_LOG_LEVEL=ERROR

View File

@ -1,23 +1,5 @@
API_LOG_LEVEL='WARN'
LOG_LEVEL='TRACE'
build-dev:
npm run build-dev
build:
npm run build
npm run build-${TARGET_ENV}
serve:
VUE_APP_PM_API_BASE=/api \
VUE_APP_API_LOG_LEVEL=${API_LOG_LEVEL} \
VUE_APP_LOG_LEVEL=${LOG_LEVEL} \
npm run serve
serve-dev: build-dev
(cd dist && npx live-server . --port=8080 --entry-file=index.html --proxy=/api:http://localhost:8081/api --no-browser)
serve-ssl: build-dev
(cd dist && \
(local-ssl-proxy --source=8443 --target=8080 & \
echo `pwd` && \
npx live-server . --port=8080 --entry-file=index.html --proxy=/api:http://localhost:8081/api --no-browser))

453
web/package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "personal-measure-web",
"version": "0.1.0",
"version": "0.7.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -2167,12 +2167,40 @@
"dev": true
},
"axios": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.18.0.tgz",
"integrity": "sha1-MtU+SFHv3AoRmTts0AB4nXDAUQI=",
"version": "0.18.1",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz",
"integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==",
"requires": {
"follow-redirects": "^1.3.0",
"is-buffer": "^1.1.5"
"follow-redirects": "1.5.10",
"is-buffer": "^2.0.2"
},
"dependencies": {
"debug": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
"requires": {
"ms": "2.0.0"
}
},
"follow-redirects": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz",
"integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==",
"requires": {
"debug": "=3.1.0"
}
},
"is-buffer": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz",
"integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A=="
},
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
}
}
},
"babel-code-frame": {
@ -3040,28 +3068,6 @@
"integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=",
"dev": true
},
"cacache": {
"version": "11.3.2",
"resolved": "https://registry.npmjs.org/cacache/-/cacache-11.3.2.tgz",
"integrity": "sha512-E0zP4EPGDOaT2chM08Als91eYnf8Z+eH1awwwVsngUmgppfM5jjJ8l3z5vO5p5w/I3LsiXawb1sW0VY65pQABg==",
"dev": true,
"requires": {
"bluebird": "^3.5.3",
"chownr": "^1.1.1",
"figgy-pudding": "^3.5.1",
"glob": "^7.1.3",
"graceful-fs": "^4.1.15",
"lru-cache": "^5.1.1",
"mississippi": "^3.0.0",
"mkdirp": "^0.5.1",
"move-concurrently": "^1.0.1",
"promise-inflight": "^1.0.1",
"rimraf": "^2.6.2",
"ssri": "^6.0.1",
"unique-filename": "^1.1.1",
"y18n": "^4.0.0"
}
},
"cache-base": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz",
@ -5724,6 +5730,7 @@
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.7.0.tgz",
"integrity": "sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ==",
"dev": true,
"requires": {
"debug": "^3.2.6"
},
@ -5732,6 +5739,7 @@
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz",
"integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==",
"dev": true,
"requires": {
"ms": "^2.1.1"
}
@ -6643,9 +6651,9 @@
}
},
"globule": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/globule/-/globule-1.2.1.tgz",
"integrity": "sha512-g7QtgWF4uYSL5/dn71WxubOrS7JVGCnFPEnoeChJmBnyR9Mw8nGoEwOgJL/RC2Te0WhbsEUCejfH8SZNJ+adYQ==",
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/globule/-/globule-1.3.0.tgz",
"integrity": "sha512-YlD4kdMqRCQHrhVdonet4TdRtv1/sZKepvoxNT4Nrhrp5HI8XFfc8kFlGlBn2myBo80aGp8Eft259mbcUJhgSg==",
"dev": true,
"requires": {
"glob": "~7.1.1",
@ -6682,9 +6690,9 @@
"dev": true
},
"handlebars": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.1.2.tgz",
"integrity": "sha512-nvfrjqvt9xQ8Z/w0ijewdD/vvWDTOweBUm96NTr66Wfvo1mJenBLwcYmPs3TIBP5ruzYGD7Hx/DaM9RmhroGPw==",
"version": "4.7.3",
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.3.tgz",
"integrity": "sha512-SRGwSYuNfx8DwHD/6InAPzD6RgeruWLT+B8e8a7gGs8FWgHzlExpTFMEq2IA6QpAfOClpKHy6+8IqTjeBCu6Kg==",
"dev": true,
"requires": {
"neo-async": "^2.6.0",
@ -7289,6 +7297,12 @@
"integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=",
"dev": true
},
"infer-owner": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz",
"integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==",
"dev": true
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
@ -7406,7 +7420,8 @@
"is-buffer": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz",
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w=="
"integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==",
"dev": true
},
"is-callable": {
"version": "1.1.4",
@ -9506,9 +9521,9 @@
}
},
"lodash": {
"version": "4.17.11",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz",
"integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==",
"version": "4.17.15",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==",
"dev": true
},
"lodash._reinterpolate": {
@ -9528,9 +9543,9 @@
"integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168="
},
"lodash.defaultsdeep": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.0.tgz",
"integrity": "sha1-vsECT4WxvZbL6kBbI8FK1kQ6b4E=",
"version": "4.6.1",
"resolved": "https://registry.npmjs.org/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.1.tgz",
"integrity": "sha512-3j8wdDzYuWO3lM3Reg03MuQR957t287Rpcxp1njpEa8oDrikb+FwGdW3n+FELh/A6qib6yPit0j/pv9G/yeAqA==",
"dev": true
},
"lodash.findindex": {
@ -9568,9 +9583,9 @@
"dev": true
},
"lodash.merge": {
"version": "4.6.1",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.1.tgz",
"integrity": "sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ=="
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
},
"lodash.sortby": {
"version": "4.7.0",
@ -9800,15 +9815,6 @@
"integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=",
"dev": true
},
"mem": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz",
"integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=",
"dev": true,
"requires": {
"mimic-fn": "^1.0.0"
}
},
"memory-fs": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz",
@ -10015,9 +10021,9 @@
}
},
"mixin-deep": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz",
"integrity": "sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ==",
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz",
"integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==",
"dev": true,
"requires": {
"for-in": "^1.0.2",
@ -10122,7 +10128,8 @@
"ms": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
"integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg=="
"integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
"dev": true
},
"multicast-dns": {
"version": "6.2.3",
@ -10339,9 +10346,9 @@
}
},
"node-sass": {
"version": "4.12.0",
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-4.12.0.tgz",
"integrity": "sha512-A1Iv4oN+Iel6EPv77/HddXErL2a+gZ4uBeZUy+a8O35CFYTXhgA8MgLCWBtwpGZdCvTvQ9d+bQxX/QC36GDPpQ==",
"version": "4.13.1",
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-4.13.1.tgz",
"integrity": "sha512-TTWFx+ZhyDx1Biiez2nB0L3YrCZ/8oHagaDalbuBSlqXgUPsdkUSzJsVxeDO9LtPB49+Fh3WQl3slABo6AotNw==",
"dev": true,
"requires": {
"async-foreach": "^0.1.3",
@ -10351,7 +10358,7 @@
"get-stdin": "^4.0.1",
"glob": "^7.0.3",
"in-publish": "^2.0.0",
"lodash": "^4.17.11",
"lodash": "^4.17.15",
"meow": "^3.7.0",
"mkdirp": "^0.5.1",
"nan": "^2.13.2",
@ -10409,9 +10416,9 @@
}
},
"nan": {
"version": "2.13.2",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.13.2.tgz",
"integrity": "sha512-TghvYc72wlMGMVMluVo9WRJc0mB8KxxF/gZ4YYFy7V2ZQX9l7rgbPg7vjS9mt6U5HXODVFVI2bOduCzwOMv/lw==",
"version": "2.14.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
"integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==",
"dev": true
},
"strip-ansi": {
@ -10785,67 +10792,6 @@
"integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=",
"dev": true
},
"os-locale": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz",
"integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==",
"dev": true,
"requires": {
"execa": "^0.7.0",
"lcid": "^1.0.0",
"mem": "^1.1.0"
},
"dependencies": {
"cross-spawn": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz",
"integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=",
"dev": true,
"requires": {
"lru-cache": "^4.0.1",
"shebang-command": "^1.2.0",
"which": "^1.2.9"
}
},
"execa": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz",
"integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=",
"dev": true,
"requires": {
"cross-spawn": "^5.0.1",
"get-stream": "^3.0.0",
"is-stream": "^1.1.0",
"npm-run-path": "^2.0.0",
"p-finally": "^1.0.0",
"signal-exit": "^3.0.0",
"strip-eof": "^1.0.0"
}
},
"get-stream": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz",
"integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=",
"dev": true
},
"lru-cache": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz",
"integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==",
"dev": true,
"requires": {
"pseudomap": "^1.0.2",
"yallist": "^2.1.2"
}
},
"yallist": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz",
"integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=",
"dev": true
}
}
},
"os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
@ -12852,9 +12798,9 @@
"dev": true
},
"set-value": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.0.tgz",
"integrity": "sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz",
"integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==",
"dev": true,
"requires": {
"extend-shallow": "^2.0.1",
@ -13753,41 +13699,99 @@
"inherits": "2"
}
},
"terser": {
"version": "3.16.1",
"resolved": "https://registry.npmjs.org/terser/-/terser-3.16.1.tgz",
"integrity": "sha512-JDJjgleBROeek2iBcSNzOHLKsB/MdDf+E/BOAJ0Tk9r7p9/fVobfv7LMJ/g/k3v9SXdmjZnIlFd5nfn/Rt0Xow==",
"dev": true,
"requires": {
"commander": "~2.17.1",
"source-map": "~0.6.1",
"source-map-support": "~0.5.9"
},
"dependencies": {
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"dev": true
}
}
},
"terser-webpack-plugin": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.2.2.tgz",
"integrity": "sha512-1DMkTk286BzmfylAvLXwpJrI7dWa5BnFmscV/2dCr8+c56egFcbaeFAl7+sujAjdmpLam21XRdhA4oifLyiWWg==",
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz",
"integrity": "sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA==",
"dev": true,
"requires": {
"cacache": "^11.0.2",
"find-cache-dir": "^2.0.0",
"cacache": "^12.0.2",
"find-cache-dir": "^2.1.0",
"is-wsl": "^1.1.0",
"schema-utils": "^1.0.0",
"serialize-javascript": "^1.4.0",
"serialize-javascript": "^2.1.2",
"source-map": "^0.6.1",
"terser": "^3.16.1",
"webpack-sources": "^1.1.0",
"worker-farm": "^1.5.2"
"terser": "^4.1.2",
"webpack-sources": "^1.4.0",
"worker-farm": "^1.7.0"
},
"dependencies": {
"bluebird": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
"integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==",
"dev": true
},
"cacache": {
"version": "12.0.3",
"resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz",
"integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==",
"dev": true,
"requires": {
"bluebird": "^3.5.5",
"chownr": "^1.1.1",
"figgy-pudding": "^3.5.1",
"glob": "^7.1.4",
"graceful-fs": "^4.1.15",
"infer-owner": "^1.0.3",
"lru-cache": "^5.1.1",
"mississippi": "^3.0.0",
"mkdirp": "^0.5.1",
"move-concurrently": "^1.0.1",
"promise-inflight": "^1.0.1",
"rimraf": "^2.6.3",
"ssri": "^6.0.1",
"unique-filename": "^1.1.1",
"y18n": "^4.0.0"
}
},
"commander": {
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"dev": true
},
"find-cache-dir": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz",
"integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==",
"dev": true,
"requires": {
"commondir": "^1.0.1",
"make-dir": "^2.0.0",
"pkg-dir": "^3.0.0"
}
},
"glob": {
"version": "7.1.6",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"make-dir": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz",
"integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==",
"dev": true,
"requires": {
"pify": "^4.0.1",
"semver": "^5.6.0"
}
},
"pify": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz",
"integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==",
"dev": true
},
"schema-utils": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz",
@ -13799,11 +13803,57 @@
"ajv-keywords": "^3.1.0"
}
},
"serialize-javascript": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-2.1.2.tgz",
"integrity": "sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ==",
"dev": true
},
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"dev": true
},
"source-map-support": {
"version": "0.5.16",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz",
"integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==",
"dev": true,
"requires": {
"buffer-from": "^1.0.0",
"source-map": "^0.6.0"
}
},
"terser": {
"version": "4.6.3",
"resolved": "https://registry.npmjs.org/terser/-/terser-4.6.3.tgz",
"integrity": "sha512-Lw+ieAXmY69d09IIc/yqeBqXpEQIpDGZqT34ui1QWXIUpR2RjbqEkT8X7Lgex19hslSqcWM5iMN2kM11eMsESQ==",
"dev": true,
"requires": {
"commander": "^2.20.0",
"source-map": "~0.6.1",
"source-map-support": "~0.5.12"
}
},
"webpack-sources": {
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz",
"integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==",
"dev": true,
"requires": {
"source-list-map": "^2.0.0",
"source-map": "~0.6.1"
}
},
"worker-farm": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz",
"integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==",
"dev": true,
"requires": {
"errno": "~0.1.7"
}
}
}
},
@ -14321,38 +14371,15 @@
"dev": true
},
"union-value": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.0.tgz",
"integrity": "sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ=",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",
"integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==",
"dev": true,
"requires": {
"arr-union": "^3.1.0",
"get-value": "^2.0.6",
"is-extendable": "^0.1.1",
"set-value": "^0.4.3"
},
"dependencies": {
"extend-shallow": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
"integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=",
"dev": true,
"requires": {
"is-extendable": "^0.1.0"
}
},
"set-value": {
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/set-value/-/set-value-0.4.3.tgz",
"integrity": "sha1-fbCPnT0i3H945Trzw79GZuzfzPE=",
"dev": true,
"requires": {
"extend-shallow": "^2.0.1",
"is-extendable": "^0.1.1",
"is-plain-object": "^2.0.1",
"to-object-path": "^0.3.0"
}
}
"set-value": "^2.0.1"
}
},
"uniq": {
@ -15451,15 +15478,6 @@
"workbox-build": "^3.6.3"
}
},
"worker-farm": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.6.0.tgz",
"integrity": "sha512-6w+3tHbM87WnSWnENBUvA2pxJPLhQUg5LKwUQHq3r+XPhIM+Gh2R5ycbwPCyuGbNg+lPgdcnQUhuC02kJCvffQ==",
"dev": true,
"requires": {
"errno": "~0.1.7"
}
},
"wrap-ansi": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz",
@ -15558,16 +15576,16 @@
"dev": true
},
"yargs": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-11.1.0.tgz",
"integrity": "sha512-NwW69J42EsCSanF8kyn5upxvjp5ds+t3+udGBeTbFnERA+lF541DDpMawzo4z6W/QrzNM18D+BPMiOBibnFV5A==",
"version": "11.1.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-11.1.1.tgz",
"integrity": "sha512-PRU7gJrJaXv3q3yQZ/+/X6KBswZiaQ+zOmdprZcouPYtQgvNU35i+68M4b1ZHLZtYFT5QObFLV+ZkmJYcwKdiw==",
"dev": true,
"requires": {
"cliui": "^4.0.0",
"decamelize": "^1.1.1",
"find-up": "^2.1.0",
"get-caller-file": "^1.0.1",
"os-locale": "^2.0.0",
"os-locale": "^3.1.0",
"require-directory": "^2.1.1",
"require-main-filename": "^1.0.1",
"set-blocking": "^2.0.0",
@ -15586,6 +15604,21 @@
"locate-path": "^2.0.0"
}
},
"invert-kv": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz",
"integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==",
"dev": true
},
"lcid": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz",
"integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==",
"dev": true,
"requires": {
"invert-kv": "^2.0.0"
}
},
"locate-path": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz",
@ -15596,6 +15629,34 @@
"path-exists": "^3.0.0"
}
},
"mem": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz",
"integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==",
"dev": true,
"requires": {
"map-age-cleaner": "^0.1.1",
"mimic-fn": "^2.0.0",
"p-is-promise": "^2.0.0"
}
},
"mimic-fn": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
"dev": true
},
"os-locale": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz",
"integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==",
"dev": true,
"requires": {
"execa": "^1.0.0",
"lcid": "^2.0.0",
"mem": "^4.0.0"
}
},
"p-limit": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz",

View File

@ -1,10 +1,10 @@
{
"name": "personal-measure-web",
"version": "0.3.0",
"version": "0.7.0",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
"build": "vue-cli-service build --mode production",
"build-prod": "vue-cli-service build --mode production",
"build-dev": "vue-cli-service build --mode development",
"lint": "vue-cli-service lint",
"test:unit": "vue-cli-service test:unit"
@ -19,14 +19,14 @@
"@types/lodash.findindex": "^4.6.6",
"@types/lodash.merge": "^4.6.5",
"apexcharts": "^3.6.5",
"axios": "^0.18.0",
"axios": "^0.18.1",
"js-cookie": "^2.2.0",
"jwt-decode": "^2.2.0",
"keen-ui": "^1.1.2",
"lodash.assign": "^4.2.0",
"lodash.findindex": "^4.6.0",
"lodash.keyby": "^4.6.0",
"lodash.merge": "^4.6.1",
"lodash.merge": "^4.6.2",
"moment": "^2.24.0",
"register-service-worker": "^1.5.2",
"vue": "^2.6.6",
@ -50,7 +50,7 @@
"babel-core": "7.0.0-bridge.0",
"lint-staged": "^8.1.0",
"live-server": "^1.2.1",
"node-sass": "^4.12.0",
"node-sass": "^4.13.1",
"sass-loader": "^7.1.0",
"ts-jest": "^23.0.0",
"typescript": "^3.0.0",

View File

@ -22,11 +22,13 @@ export class SimpleDetails extends Vue {
};
private get measurementChartData(): ApexAxisChartSeries {
const measurementData = this.measurements || [];
const measurementData = this.measurements.slice() || [];
return [{
name: this.measure.name,
data: measurementData.map((m) => ({ x: m.timestamp.toISOString(), y: m.value }))
data: measurementData
.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime())
.map((m) => ({ x: m.timestamp.toISOString(), y: m.value }))
}];
}

View File

@ -16,11 +16,13 @@ export class SimpleSummaryGraph extends Vue {
};
private get measurementData(): ApexAxisChartSeries {
const measurementData = this.measurements || [];
let measurementData = this.measurements.slice() || [];
return [{
name: this.measure.name,
data: measurementData.map((m) => ({ x: m.timestamp.toISOString(), y: m.value }))
data: measurementData
.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime())
.map((m) => ({ x: m.timestamp.toISOString(), y: m.value }))
}];
}
}

View File

@ -1,6 +1,5 @@
import { LogLevel } from './log-message';
import Logger from './logger';
import { default as Axios, AxiosInstance } from 'axios';
const ROOT_LOGGER_NAME = 'ROOT';
@ -8,7 +7,6 @@ const ROOT_LOGGER_NAME = 'ROOT';
export class LogService {
private loggers: { [key: string]: Logger };
private http: AxiosInstance = Axios.create();
public get ROOT_LOGGER() {
return this.loggers[ROOT_LOGGER_NAME];

View File

@ -32,7 +32,7 @@ export class AuthStoreModule extends VuexModule {
// this should be guaranteed by the server (redirect HTTP -> HTTPS)
// but we'll do a sanity check just to make sure.
if (window.location.protocol === 'https:' ||
process.env.NODE_ENV === 'development') { // allow in dev
process.env.NODE_ENV === 'development') { // allow http in dev
localStorage.setItem(SESSION_KEY, authToken);
}

View File

@ -8,7 +8,7 @@ const VERSION = {
module.exports = {
devServer: {
proxy: {
'/api': { target: 'http://localhost:8081' }
'/v0': { target: 'http://localhost:8081' }
},
host: 'localhost',
disableHostCheck: true