Compare commits
22 Commits
Author | SHA1 | Date | |
---|---|---|---|
deac844d02 | |||
80a3ba4621 | |||
|
774d0b446f | ||
|
ee1147a1a5 | ||
|
186b7d5b29 | ||
|
52eaa63f25 | ||
|
e61fe3b01e | ||
|
e83e64273b | ||
|
b2d4df0aac | ||
|
c827beab5e | ||
|
0574f0ec6a | ||
|
ce7d4b60de | ||
|
2622877db5 | ||
|
c6be698572 | ||
|
07037616ac | ||
|
b85cf8b367 | ||
|
741124b734 | ||
|
a4e6a4cb81 | ||
|
dcf82d8999 | ||
6556a86209 | |||
|
ff7f570ab1 | ||
|
d1f04951e5 |
17
TODO.md
17
TODO.md
@ -1,6 +1,11 @@
|
||||
* Write a tool to convert JSON Schema into a human-readable format suitable for
|
||||
documentation. Should use the description, title, and other fields from the
|
||||
JSON spec. Use this for writing the JSON schema docs instead of duplicating
|
||||
the description of configuration files between JSON schema and the
|
||||
documentation. In other words, use the schemas as the single source of truth
|
||||
and generate everything else from that.
|
||||
TODO
|
||||
|
||||
* Orchestration of docker containers for running builds.
|
||||
* Write API docs.
|
||||
|
||||
NICE TO HAVE
|
||||
|
||||
* Use/create some json-schema -> nim code generator to auto-generate json
|
||||
handling code from schemas.
|
||||
* Use some json-schema -> docs generator to document the API.
|
||||
* Support unique UUID prefixes in URLs.
|
||||
|
@ -3,13 +3,12 @@ import cliutils, docopt, os, sequtils, strutils, tempfile, uuids
|
||||
import strawbosspkg/configuration
|
||||
import strawbosspkg/core
|
||||
import strawbosspkg/server
|
||||
|
||||
let SB_VER = "0.3.1"
|
||||
import strawbosspkg/version
|
||||
|
||||
proc logProcOutput*(outMsg, errMsg: TaintedString, cmd: string) =
|
||||
let prefix = if cmd != nil: cmd & ": " else: ""
|
||||
if outMsg != nil: stdout.writeLine prefix & outMsg
|
||||
if errMsg != nil: stderr.writeLine prefix & errMsg
|
||||
let prefix = if cmd.len > 0: cmd & ": " else: ""
|
||||
if outMsg.len > 0: stdout.writeLine prefix & outMsg
|
||||
if errMsg.len > 0: stderr.writeLine prefix & errMsg
|
||||
|
||||
when isMainModule:
|
||||
|
||||
@ -19,14 +18,14 @@ Usage:
|
||||
strawboss run <requestFile> [options]
|
||||
strawboss hashpwd <pwd>
|
||||
strawboss api-key <username>
|
||||
|
||||
|
||||
Options
|
||||
|
||||
|
||||
-c --config-file <cfgFile> Use this config file instead of the default
|
||||
(strawboss.config.json).
|
||||
"""
|
||||
|
||||
let args = docopt(doc, version = "strawboss v" & SB_VER)
|
||||
let args = docopt(doc, version = "strawboss v" & SB_VERSION)
|
||||
|
||||
let cfgFile = if args["--config-file"]: $args["--config-file"]
|
||||
else: "strawboss.config.json"
|
||||
@ -50,7 +49,7 @@ Options
|
||||
|
||||
try:
|
||||
|
||||
if req.workspaceDir.isNilOrEmpty: req.workspaceDir = mkdtemp()
|
||||
if req.workspaceDir.len == 0: req.workspaceDir = mkdtemp()
|
||||
|
||||
let status = core.run(cfg, req, logProcOutput)
|
||||
if status.state == BuildState.failed: raiseEx status.details
|
||||
|
@ -1,4 +1,5 @@
|
||||
import cliutils, logging, json, os, sequtils, strtabs, strutils, tables, times, uuids
|
||||
import cliutils, logging, json, os, sequtils, strtabs, strutils, tables, times,
|
||||
unicode, uuids
|
||||
|
||||
from langutils import sameContents
|
||||
from typeinfo import toAny
|
||||
@ -7,23 +8,22 @@ from strutils import parseEnum
|
||||
const ISO_TIME_FORMAT = "yyyy-MM-dd'T'HH:mm:sszzz"
|
||||
|
||||
# Types
|
||||
#
|
||||
|
||||
type
|
||||
BuildState* {.pure.} = enum
|
||||
queued, complete, failed, running, setup, rejected
|
||||
complete, failed, queued, rejected, running, setup, stepComplete
|
||||
|
||||
BuildStatus* = object
|
||||
runId*, details*: string
|
||||
runId*, details*, version*: string
|
||||
state*: BuildState
|
||||
|
||||
Step* = object
|
||||
name*, stepCmd*, workingDir*: string
|
||||
containerImage*, name*, stepCmd*, workingDir*: string
|
||||
artifacts*, cmdInput*, depends*, expectedEnv*: seq[string]
|
||||
dontSkip*: bool
|
||||
|
||||
ProjectConfig* = object
|
||||
name*: string
|
||||
versionCmd*: string
|
||||
containerImage*, name*, versionCmd*: string
|
||||
steps*: Table[string, Step]
|
||||
|
||||
ProjectDef* = object
|
||||
@ -33,7 +33,7 @@ type
|
||||
RunRequest* = object
|
||||
runId*: UUID
|
||||
projectName*, stepName*, buildRef*, workspaceDir*: string
|
||||
timestamp*: TimeInfo
|
||||
timestamp*: DateTime
|
||||
forceRebuild*: bool
|
||||
|
||||
Run* = object
|
||||
@ -41,6 +41,10 @@ type
|
||||
request*: RunRequest
|
||||
status*: BuildStatus
|
||||
|
||||
RunLogs* = object
|
||||
runId*: UUID
|
||||
stdout*, stderr*: seq[string]
|
||||
|
||||
User* = object
|
||||
name*: string
|
||||
hashedPwd*: string
|
||||
@ -54,6 +58,7 @@ type
|
||||
debug*: bool
|
||||
logLevel*: Level
|
||||
pathToExe*: string
|
||||
port*: int
|
||||
projects*: seq[ProjectDef]
|
||||
pwdCost*: int8
|
||||
users*: seq[UserRef]
|
||||
@ -79,6 +84,7 @@ proc `==`*(a, b: StrawBossConfig): bool =
|
||||
a.buildDataDir == b.buildDataDir and
|
||||
a.authSecret == b.authSecret and
|
||||
a.pwdCost == b.pwdCost and
|
||||
a.port == b.port and
|
||||
a.maintenancePeriod == b.maintenancePeriod and
|
||||
a.logLevel == b.logLevel and
|
||||
sameContents(a.users, b.users) and
|
||||
@ -114,7 +120,7 @@ proc getOrFail(n: JsonNode, key: string, objName: string = ""): JsonNode =
|
||||
# Configuration parsing code
|
||||
|
||||
proc parseLogLevel*(level: string): Level =
|
||||
let lvlStr = "lvl" & toUpper(level[0]) & level[1..^1]
|
||||
let lvlStr = "lvl" & toUpperAscii(level[0]) & level[1..^1]
|
||||
result = parseEnum[Level](lvlStr)
|
||||
|
||||
proc parseProjectDef*(pJson: JsonNode): ProjectDef =
|
||||
@ -139,10 +145,11 @@ proc parseStrawBossConfig*(jsonCfg: JsonNode): StrawBossConfig =
|
||||
result = StrawBossConfig(
|
||||
buildDataDir: jsonCfg.getIfExists("buildDataDir").getStr("build-data"),
|
||||
authSecret: jsonCfg.getOrFail("authSecret", "strawboss config").getStr,
|
||||
debug: jsonCfg.getIfExists("debug").getBVal(false),
|
||||
pwdCost: int8(jsonCfg.getOrFail("pwdCost", "strawboss config").getNum),
|
||||
debug: jsonCfg.getIfExists("debug").getBool(false),
|
||||
port: int(jsonCfg.getIfExists("port").getInt(8180)),
|
||||
pwdCost: int8(jsonCfg.getOrFail("pwdCost", "strawboss config").getInt),
|
||||
projects: jsonCfg.getIfExists("projects").getElems.mapIt(parseProjectDef(it)),
|
||||
maintenancePeriod: int(jsonCfg.getIfExists("maintenancePeriod").getNum(10000)),
|
||||
maintenancePeriod: int(jsonCfg.getIfExists("maintenancePeriod").getInt(10000)),
|
||||
logLevel: parseLogLevel(jsonCfg.getIfExists("logLevel").getStr("info")),
|
||||
users: users)
|
||||
|
||||
@ -166,14 +173,15 @@ proc loadProjectConfig*(cfgFile: string): ProjectConfig =
|
||||
var steps = initTable[string, Step]()
|
||||
for sName, pJson in jsonCfg.getOrFail("steps", "project configuration").getFields:
|
||||
steps[sName] = Step(
|
||||
name: sName,
|
||||
workingDir: pJson.getIfExists("workingDir").getStr("."),
|
||||
stepCmd: pJson.getIfExists("stepCmd").getStr("NOT GIVEN"),
|
||||
depends: pJson.getIfExists("depends").getElems.mapIt(it.getStr),
|
||||
artifacts: pJson.getIfExists("artifacts").getElems.mapIt(it.getStr),
|
||||
cmdInput: pJson.getIfExists("cmdInput").getElems.mapIt(it.getStr),
|
||||
expectedEnv: pJson.getIfExists("expectedEnv").getElems.mapIt(it.getStr),
|
||||
dontSkip: pJson.getIfExists("dontSkip").getBVal(false))
|
||||
name: sName,
|
||||
workingDir: pJson.getIfExists("workingDir").getStr("."),
|
||||
stepCmd: pJson.getIfExists("stepCmd").getStr("NOT GIVEN"),
|
||||
depends: pJson.getIfExists("depends").getElems.mapIt(it.getStr),
|
||||
artifacts: pJson.getIfExists("artifacts").getElems.mapIt(it.getStr),
|
||||
cmdInput: pJson.getIfExists("cmdInput").getElems.mapIt(it.getStr),
|
||||
expectedEnv: pJson.getIfExists("expectedEnv").getElems.mapIt(it.getStr),
|
||||
containerImage: pJson.getIfExists("containerImage").getStr(""),
|
||||
dontSkip: pJson.getIfExists("dontSkip").getBool(false))
|
||||
|
||||
# cmdInput and stepCmd are related, so we have a conditional defaulting.
|
||||
# Four possibilities:
|
||||
@ -190,6 +198,7 @@ proc loadProjectConfig*(cfgFile: string): ProjectConfig =
|
||||
|
||||
result = ProjectConfig(
|
||||
name: jsonCfg.getOrFail("name", "project configuration").getStr,
|
||||
containerImage: jsonCfg.getIfExists("containerImage").getStr(""),
|
||||
versionCmd: jsonCfg.getIfExists("versionCmd").getStr("git describe --tags --always"),
|
||||
steps: steps)
|
||||
|
||||
@ -213,7 +222,7 @@ proc parseRunRequest*(reqJson: JsonNode): RunRequest =
|
||||
buildRef: reqJson.getOrFail("buildRef", "RunRequest").getStr,
|
||||
workspaceDir: reqJson.getOrFail("workspaceDir", "RunRequest").getStr,
|
||||
timestamp: times.parse(reqJson.getOrFail("timestamp", "RunRequest").getStr, ISO_TIME_FORMAT),
|
||||
forceRebuild: reqJson.getOrFail("forceRebuild", "RunRequest").getBVal)
|
||||
forceRebuild: reqJson.getOrFail("forceRebuild", "RunRequest").getBool)
|
||||
|
||||
proc loadRunRequest*(reqFilePath: string): RunRequest =
|
||||
if not existsFile(reqFilePath):
|
||||
@ -255,6 +264,9 @@ proc `%`*(s: Step): JsonNode =
|
||||
"expectedEnv": s.expectedEnv,
|
||||
"dontSkip": s.dontSkip }
|
||||
|
||||
if s.containerImage.len > 0:
|
||||
result["containerImage"] = %s.containerImage
|
||||
|
||||
proc `%`*(p: ProjectConfig): JsonNode =
|
||||
result = %* {
|
||||
"name": p.name,
|
||||
@ -264,6 +276,9 @@ proc `%`*(p: ProjectConfig): JsonNode =
|
||||
for name, step in p.steps:
|
||||
result["steps"][name] = %step
|
||||
|
||||
if p.containerImage.len > 0:
|
||||
result["containerImage"] = %p.containerImage
|
||||
|
||||
proc `%`*(req: RunRequest): JsonNode =
|
||||
result = %* {
|
||||
"runId": $(req.runId),
|
||||
@ -284,10 +299,11 @@ proc `%`*(cfg: StrawBossConfig): JsonNode =
|
||||
"buildDataDir": cfg.buildDataDir,
|
||||
"authSecret": cfg.authSecret,
|
||||
"debug": cfg.debug,
|
||||
"port": cfg.port,
|
||||
"projects": %cfg.projects,
|
||||
"pwdCost": cfg.pwdCost,
|
||||
"maintenancePeriod": cfg.maintenancePeriod,
|
||||
"logLevel": toLower(($cfg.logLevel)[3]) & ($cfg.logLevel)[4..^1],
|
||||
"logLevel": toLowerAscii(($cfg.logLevel)[3]) & ($cfg.logLevel)[4..^1],
|
||||
"users": %cfg.users }
|
||||
|
||||
proc `%`*(run: Run): JsonNode =
|
||||
@ -296,8 +312,15 @@ proc `%`*(run: Run): JsonNode =
|
||||
"request": %run.request,
|
||||
"status": %run.status }
|
||||
|
||||
proc `%`*(logs: RunLogs): JsonNode =
|
||||
result = %* {
|
||||
"runId": $logs.runId,
|
||||
"stdout": %logs.stdout,
|
||||
"stderr": %logs.stderr }
|
||||
|
||||
proc `$`*(s: BuildStatus): string = result = pretty(%s)
|
||||
proc `$`*(req: RunRequest): string = result = pretty(%req)
|
||||
proc `$`*(pd: ProjectDef): string = result = pretty(%pd)
|
||||
proc `$`*(cfg: StrawBossConfig): string = result = pretty(%cfg)
|
||||
proc `$`*(run: Run): string = result = pretty(%run)
|
||||
proc `$`*(logs: RunLogs): string = result = pretty(%logs)
|
||||
|
@ -1,9 +1,9 @@
|
||||
import cliutils, logging, json, os, osproc, sequtils, streams,
|
||||
strtabs, strutils, tables, times, uuids
|
||||
import cliutils, logging, json, os, ospaths, osproc, sequtils, streams,
|
||||
strtabs, strutils, tables, tempfile, times, uuids
|
||||
|
||||
import ./configuration
|
||||
import nre except toSeq
|
||||
from posix import link
|
||||
from posix import link, realpath
|
||||
from algorithm import sorted
|
||||
|
||||
type
|
||||
@ -27,7 +27,7 @@ type
|
||||
projectName*: string
|
||||
process*: Process
|
||||
|
||||
NotFoundException = object of Exception
|
||||
NotFoundException* = object of Exception
|
||||
|
||||
proc newCopy(w: Workspace): Workspace =
|
||||
var newEnv: StringTableRef = newStringTable()
|
||||
@ -49,20 +49,64 @@ proc newCopy(w: Workspace): Workspace =
|
||||
step: w.step,
|
||||
version: w.version)
|
||||
|
||||
# Logging wrappers around
|
||||
const WKSP_ROOT = "/strawboss/wksp"
|
||||
const ARTIFACTS_ROOT = "/strawboss/artifacts"
|
||||
|
||||
proc execWithOutput(wksp: Workspace, cmd, workingDir: string,
|
||||
args: openarray[string], env: StringTableRef,
|
||||
options: set[ProcessOption] = {poUsePath},
|
||||
msgCB: HandleProcMsgCB = nil):
|
||||
tuple[output: TaintedString, error: TaintedString, exitCode: int]
|
||||
{.tags: [ExecIOEffect, ReadIOEffect, RootEffect] .} =
|
||||
|
||||
# Look for a container image to use
|
||||
let containerImage =
|
||||
if wksp.step.containerImage.len > 0: wksp.step.containerImage
|
||||
else: wksp.project.containerImage
|
||||
|
||||
if containerImage.len == 0:
|
||||
return execWithOutput(cmd, workingDir, args, env, options, msgCB)
|
||||
|
||||
var fullEnv = newStringTable(modeCaseSensitive)
|
||||
for k,v in env: fullEnv[k] = v
|
||||
|
||||
var fullArgs = @["run", "-w", WKSP_ROOT, "-v", wksp.dir & ":" & WKSP_ROOT ]
|
||||
|
||||
if wksp.step.name.len == 0:
|
||||
for depStep in wksp.step.depends:
|
||||
fullArgs.add(["-v", ARTIFACTS_ROOT / depStep])
|
||||
fullEnv[depStep & "_DIR"] = ARTIFACTS_ROOT / depStep
|
||||
|
||||
let envFile = mkstemp().name
|
||||
writeFile(envFile, toSeq(fullEnv.pairs()).mapIt(it[0] & "=" & it[1]).join("\n"))
|
||||
|
||||
fullArgs.add(["--env-file", envFile])
|
||||
fullArgs.add(containerImage)
|
||||
fullArgs.add(cmd)
|
||||
|
||||
echo "Executing docker command: \n\t" & "docker " & $(fullArgs & @args)
|
||||
return execWithOutput("docker", wksp.dir, fullArgs & @args, fullEnv, options, msgCB)
|
||||
|
||||
proc exec(w: Workspace, cmd, workingDir: string, args: openarray[string],
|
||||
env: StringTableRef, options: set[ProcessOption] = {poUsePath},
|
||||
msgCB: HandleProcMsgCB = nil): int
|
||||
{.tags: [ExecIOEffect, ReadIOEffect, RootEffect] .} =
|
||||
|
||||
return execWithOutput(w, cmd, workingDir, args, env, options, msgCB)[2]
|
||||
|
||||
# Utility methods for Workspace activities
|
||||
proc sendStatusMsg(oh: HandleProcMsgCB, status: BuildStatus): void =
|
||||
if not oh.isNil:
|
||||
oh.sendMsg($status.state & ": " & status.details, nil, "strawboss")
|
||||
oh.sendMsg($status.state & ": " & status.details, "", "strawboss")
|
||||
|
||||
proc sendMsg(w: Workspace, msg: TaintedString): void =
|
||||
w.outputHandler.sendMsg(msg, nil, "strawboss")
|
||||
w.outputHandler.sendMsg(msg, "", "strawboss")
|
||||
|
||||
proc sendMsg(w: Workspace, l: Level, msg: TaintedString): void =
|
||||
if l >= w.logLevel: w.sendMsg(msg)
|
||||
|
||||
proc sendErrMsg(w: Workspace, msg: TaintedString): void =
|
||||
w.outputHandler.sendMsg(nil, msg, "strawboss")
|
||||
w.outputHandler.sendMsg("", msg, "strawboss")
|
||||
|
||||
proc sendErrMsg(w: Workspace, l: Level, msg: TaintedString): void =
|
||||
if l >= w.logLevel: w.sendErrMsg(msg)
|
||||
@ -79,30 +123,33 @@ proc publishStatus(wksp: Workspace, state: BuildState, details: string): void =
|
||||
## Update the status for a Workspace and publish this status to the
|
||||
## Workspace's status file and any output message handlers.
|
||||
wksp.status = BuildStatus(
|
||||
runId: $wksp.runRequest.runId, state: state, details: details)
|
||||
runId: $wksp.runRequest.runId,
|
||||
state: state,
|
||||
details: details,
|
||||
version: wksp.version)
|
||||
|
||||
# Write to our run directory, and to our version status
|
||||
writeFile(wksp.buildDataDir & "/runs/" &
|
||||
writeFile(wksp.buildDataDir / "runs" /
|
||||
$wksp.runRequest.runId & ".status.json", $wksp.status)
|
||||
|
||||
# If we have our step we can save status to the step status
|
||||
if not wksp.step.name.isNilOrEmpty():
|
||||
let stepStatusDir = wksp.buildDataDir & "/status/" & wksp.step.name
|
||||
if wksp.step.name.len > 0:
|
||||
let stepStatusDir = wksp.buildDataDir / "status" / wksp.step.name
|
||||
if not existsDir(stepStatusDir): createDir(stepStatusDir)
|
||||
writeFile(stepStatusDir & "/" & wksp.version & ".json", $wksp.status)
|
||||
writeFile(stepStatusDir / wksp.version & ".json", $wksp.status)
|
||||
|
||||
# If we were asked to build a ref that is not the version directly (like
|
||||
# "master" or something), then let's also save our status under that name.
|
||||
# We're probably overwriting a prior status, but that's OK.
|
||||
if wksp.runRequest.buildRef != wksp.version:
|
||||
writeFile(stepStatusDir & "/" & wksp.runRequest.buildRef & ".json",
|
||||
writeFile(stepStatusDir / wksp.runRequest.buildRef & ".json",
|
||||
$wksp.status)
|
||||
|
||||
wksp.outputHandler.sendStatusMsg(wksp.status)
|
||||
|
||||
proc ensureProjectDirsExist(cfg: StrawBossConfig, p: ProjectDef): void =
|
||||
for subdir in ["configurations", "runs", "status", "artifacts"]:
|
||||
let fullPath = cfg.buildDataDir & "/" & p.name & "/" & subdir
|
||||
let fullPath = cfg.buildDataDir / p.name / subdir
|
||||
if not existsDir(fullPath):
|
||||
createDir(fullPath)
|
||||
|
||||
@ -112,9 +159,9 @@ proc getProject*(cfg: StrawBossConfig, projectName: string): ProjectDef =
|
||||
## Get a project definition by name from the service configuration
|
||||
let candidates = cfg.projects.filterIt(it.name == projectName)
|
||||
if candidates.len == 0:
|
||||
raise newException(KeyError, "no project named " & projectName)
|
||||
raise newException(NotFoundException, "no project named " & projectName)
|
||||
elif candidates.len > 1:
|
||||
raise newException(KeyError, "multiple projects named " & projectName)
|
||||
raise newException(NotFoundException, "multiple projects named " & projectName)
|
||||
else: result = candidates[0]
|
||||
|
||||
proc setProject*(cfg: var StrawBossConfig, projectName: string, newDef: ProjectDef): void =
|
||||
@ -136,23 +183,67 @@ proc listVersions*(cfg: StrawBossConfig, projectName: string): seq[string] =
|
||||
ensureProjectDirsExist(cfg, project)
|
||||
|
||||
let versionFiles = filesMatching(
|
||||
cfg.buildDataDir & "/" & project.name & "/configurations/*.json")
|
||||
cfg.buildDataDir / project.name / "configurations/*.json")
|
||||
|
||||
result = versionFiles.map(proc(s: string): string =
|
||||
let slashIdx = s.rfind('/')
|
||||
result = s[(slashIdx + 1)..^6])
|
||||
|
||||
proc getBuildStatus*(cfg: StrawBossConfig,
|
||||
projectName, stepName, buildRef: string): BuildStatus =
|
||||
|
||||
let project = cfg.getProject(projectName)
|
||||
|
||||
let statusFile = cfg.buildDataDir / project.name / "status" /
|
||||
stepName / buildRef & ".json"
|
||||
|
||||
if not existsFile(statusFile):
|
||||
raise newException(NotFoundException,
|
||||
stepName & " has never been built for " & projectName & "@" & buildRef)
|
||||
|
||||
result = loadBuildStatus(statusFile)
|
||||
|
||||
|
||||
proc listArtifacts*(cfg: StrawBossConfig,
|
||||
projectName, stepName, version: string): seq[string] =
|
||||
## List the artifacts that have been built for a step.
|
||||
|
||||
let project = cfg.getProject(projectName)
|
||||
|
||||
ensureProjectDirsExist(cfg, project)
|
||||
|
||||
let buildStatus = cfg.getBuildStatus(projectName, stepName, version)
|
||||
|
||||
if buildStatus.state != BuildState.complete:
|
||||
raise newException(NotFoundException, "step " & stepName &
|
||||
" has never been successfully built for " & projectName & "@" & version)
|
||||
|
||||
result = filesMatching(
|
||||
cfg.buildDataDir / project.name / "artifacts" / stepName / version / "*")
|
||||
.mapIt(it.extractFilename)
|
||||
|
||||
proc getArtifactPath*(cfg: StrawBossConfig,
|
||||
projectName, stepName, version, artifactName: string): string =
|
||||
|
||||
let artifacts = cfg.listArtifacts(projectName, stepName, version)
|
||||
if not artifacts.contains(artifactName):
|
||||
raise newException(NotFoundException, "no artifact named " &
|
||||
artifactName & " exists for step " & stepName & " in project " &
|
||||
projectName & "@" & version)
|
||||
|
||||
result = cfg.buildDataDir / projectName / "artifacts" / stepName / version / artifactName
|
||||
|
||||
proc existsRun*(cfg: StrawBossConfig, projectName, runId: string): bool =
|
||||
existsFile(cfg.buildDataDir & "/" & projectName & "/runs/" & runId & ".request.json")
|
||||
existsFile(cfg.buildDataDir / projectName / "runs" / runId & ".request.json")
|
||||
|
||||
proc getRun*(cfg: StrawBossConfig, projectName, runId: string): Run =
|
||||
let project = cfg.getProject(projectName)
|
||||
let runsPath = cfg.buildDataDir & "/" & project.name & "/runs"
|
||||
let runsPath = cfg.buildDataDir / project.name / "runs"
|
||||
|
||||
try: result = Run(
|
||||
id: parseUUID(runId),
|
||||
request: loadRunRequest(runsPath & "/" & runId & ".request.json"),
|
||||
status: loadBuildStatus(runsPath & "/" & runId & ".status.json"))
|
||||
request: loadRunRequest(runsPath / runId & ".request.json"),
|
||||
status: loadBuildStatus(runsPath / runId & ".status.json"))
|
||||
except: raiseEx "unable to load run information for id " & runId
|
||||
|
||||
proc listRuns*(cfg: StrawBossConfig, projectName: string): seq[Run] =
|
||||
@ -160,29 +251,25 @@ proc listRuns*(cfg: StrawBossConfig, projectName: string): seq[Run] =
|
||||
let project = cfg.getProject(projectName)
|
||||
ensureProjectDirsExist(cfg, project)
|
||||
|
||||
let runsPath = cfg.buildDataDir & "/" & project.name & "/runs"
|
||||
let reqPaths = filesMatching(runsPath & "/*.request.json")
|
||||
let runsPath = cfg.buildDataDir / project.name / "runs"
|
||||
let reqPaths = filesMatching(runsPath / "*.request.json")
|
||||
|
||||
result = reqPaths.map(proc(reqPath: string): Run =
|
||||
let runId = reqPath[(runsPath.len + 1)..^14]
|
||||
result = Run(
|
||||
id: parseUUID(runId),
|
||||
request: loadRunRequest(reqPath),
|
||||
status: loadBuildStatus(runsPath & "/" & runId & ".status.json")))
|
||||
|
||||
proc getBuildStatus*(cfg: StrawBossConfig,
|
||||
projectName, stepName, buildRef: string): BuildStatus =
|
||||
status: loadBuildStatus(runsPath / runId & ".status.json")))
|
||||
|
||||
proc getLogs*(cfg: StrawBossConfig, projectname, runId: string): RunLogs =
|
||||
let project = cfg.getProject(projectName)
|
||||
let runsPath = cfg.buildDataDir / project.name / "runs"
|
||||
|
||||
let statusFile = cfg.buildDataDir & "/" & project.name & "/status/" &
|
||||
stepName & "/" & buildRef & ".json"
|
||||
|
||||
if not existsFile(statusFile):
|
||||
raise newException(NotFoundException,
|
||||
stepName & " has never been built for reference '" & buildRef)
|
||||
|
||||
result = loadBuildStatus(statusFile)
|
||||
try: result = RunLogs(
|
||||
runId: parseUUID(runId),
|
||||
stdout: toSeq(lines(runsPath / runId & ".stdout.log")),
|
||||
stderr: toSeq(lines(runsPath / runId & ".stderr.log")))
|
||||
except: raiseEx "unable to load logs for run " & runId
|
||||
|
||||
proc getProjectConfig*(cfg: StrawBossConfig,
|
||||
projectName, version: string): ProjectConfig =
|
||||
@ -193,10 +280,10 @@ proc getProjectConfig*(cfg: StrawBossConfig,
|
||||
# If they didn't give us a version, let try to figure out what is the latest one.
|
||||
var confFilePath: string
|
||||
|
||||
if version.isNilOrEmpty:
|
||||
if version.len == 0:
|
||||
|
||||
let candidatePaths = filesMatching(
|
||||
cfg.buildDataDir & "/" & project.name & "/configurations/*.json")
|
||||
cfg.buildDataDir / project.name / "configurations/*.json")
|
||||
|
||||
if candidatePaths.len == 0:
|
||||
raise newException(NotFoundException,
|
||||
@ -212,8 +299,7 @@ proc getProjectConfig*(cfg: StrawBossConfig,
|
||||
# If they did, let's try to load that
|
||||
else:
|
||||
confFilePath =
|
||||
cfg.buildDataDir & "/" & project.name & "/configurations/" &
|
||||
version & ".json"
|
||||
cfg.buildDataDir / project.name / "configurations" / version & ".json"
|
||||
|
||||
if not existsFile(confFilePath):
|
||||
raise newException(NotFoundException,
|
||||
@ -228,7 +314,7 @@ proc setupProject(wksp: Workspace) =
|
||||
wksp.sendMsg(lvlDebug, "Setting up project.")
|
||||
|
||||
# Clone the project into the $temp directory
|
||||
let cloneArgs = ["clone", wksp.projectDef.repo, wksp.dir]
|
||||
let cloneArgs = @["clone", wksp.projectDef.repo, wksp.dir]
|
||||
wksp.sendMsg(lvlDebug, "git " & $cloneArgs)
|
||||
|
||||
let cloneResult = exec("git", ".", cloneArgs, wksp.env, {poUsePath},
|
||||
@ -238,7 +324,7 @@ proc setupProject(wksp: Workspace) =
|
||||
raiseEx "unable to clone repo for '" & wksp.projectDef.name & "'"
|
||||
|
||||
# Checkout the requested ref
|
||||
let checkoutArgs = ["checkout", wksp.buildRef]
|
||||
let checkoutArgs = @["checkout", wksp.buildRef]
|
||||
wksp.sendMsg(lvlDebug, "git " & $checkoutArgs)
|
||||
|
||||
let checkoutResult = exec("git", wksp.dir, checkoutArgs,
|
||||
@ -249,7 +335,7 @@ proc setupProject(wksp: Workspace) =
|
||||
" for '" & wksp.projectDef.name & "'"
|
||||
|
||||
# Find the strawboss project configuration
|
||||
let projCfgFile = wksp.dir & "/" & wksp.projectDef.cfgFilePath
|
||||
let projCfgFile = wksp.dir / wksp.projectDef.cfgFilePath
|
||||
wksp.sendMsg(lvlDebug, "Looking for project configuration at '" & projCfgFile & "'")
|
||||
if not existsFile(projCfgFile):
|
||||
raiseEx "Cannot find strawboss project configuration in the project " &
|
||||
@ -283,21 +369,20 @@ proc doStep*(wksp: Workspace, step: Step): BuildStatus =
|
||||
|
||||
wksp.step = step
|
||||
|
||||
let artifactsDir = wksp.buildDataDir & "/artifacts/" &
|
||||
step.name & "/" & wksp.version
|
||||
let artifactsDir = wksp.buildDataDir / "artifacts" / step.name / wksp.version
|
||||
|
||||
if not existsDir(artifactsDir): createDir(artifactsDir)
|
||||
|
||||
# Have we tried to build this before and are we caching the results?
|
||||
let statusFilePath = wksp.buildDataDir & "/status/" & step.name &
|
||||
"/" & wksp.version & ".json"
|
||||
let statusFilePath = wksp.buildDataDir / "status" / step.name /
|
||||
wksp.version & ".json"
|
||||
|
||||
if existsFile(statusFilePath) and not step.dontSkip:
|
||||
let prevStatus = loadBuildStatus(statusFilePath)
|
||||
|
||||
# If we succeeded last time, no need to rebuild
|
||||
if prevStatus.state == BuildState.complete:
|
||||
wksp.publishStatus(BuildState.complete,
|
||||
wksp.publishStatus(BuildState.stepComplete,
|
||||
"Skipping step '" & step.name & "' for version '" & wksp.version &
|
||||
"': already completed.")
|
||||
return wksp.status
|
||||
@ -328,7 +413,7 @@ proc doStep*(wksp: Workspace, step: Step): BuildStatus =
|
||||
# Run that step (may get skipped)
|
||||
let runStatus = doStep(core.newCopy(wksp), depStep)
|
||||
|
||||
if not (runStatus.state == BuildState.complete):
|
||||
if not (runStatus.state == BuildState.stepComplete):
|
||||
raiseEx "dependent step failed: " & depStep.name
|
||||
|
||||
wksp.sendMsg(lvlDebug, "dependent step '" & depStep.name &
|
||||
@ -336,8 +421,8 @@ proc doStep*(wksp: Workspace, step: Step): BuildStatus =
|
||||
|
||||
# Add the artifacts directory for the dependent step to our env so that
|
||||
# further steps can reference it via $<stepname>_DIR
|
||||
wksp.env[depStep.name & "_DIR"] = wksp.buildDataDir & "/artifacts/" &
|
||||
dep & "/" & wksp.version
|
||||
wksp.env[depStep.name & "_DIR"] = wksp.buildDataDir / "artifacts" /
|
||||
dep / wksp.version
|
||||
|
||||
# Run the step command, piping in cmdInput
|
||||
let stepCmd = wksp.resolveEnvVars(step.stepCmd)
|
||||
@ -345,7 +430,7 @@ proc doStep*(wksp: Workspace, step: Step): BuildStatus =
|
||||
else: stepCmd
|
||||
wksp.sendMsg step.name & ": starting stepCmd: " & stepCmd
|
||||
let cmdProc = startProcess(stepCmd,
|
||||
wksp.dir & "/" & step.workingDir, [], wksp.env, {poUsePath, poEvalCommand})
|
||||
wksp.dir / step.workingDir, [], wksp.env, {poUsePath, poEvalCommand})
|
||||
|
||||
let cmdInStream = inputStream(cmdProc)
|
||||
|
||||
@ -353,7 +438,7 @@ proc doStep*(wksp: Workspace, step: Step): BuildStatus =
|
||||
for line in step.cmdInput: cmdInStream.writeLine(wksp.resolveEnvVars(line))
|
||||
cmdInStream.flush()
|
||||
cmdInStream.close()
|
||||
|
||||
|
||||
let cmdResult = waitFor(cmdProc, wksp.outputHandler, cmdName)
|
||||
|
||||
if cmdResult != 0:
|
||||
@ -367,16 +452,16 @@ proc doStep*(wksp: Workspace, step: Step): BuildStatus =
|
||||
let artifactName = artifactPath[(artifactPath.rfind("/")+1)..^1]
|
||||
try:
|
||||
wksp.sendMsg "copy " &
|
||||
wksp.dir & "/" & step.workingDir & "/" & artifactPath & " -> " &
|
||||
artifactsDir & "/" & artifactName
|
||||
wksp.dir / step.workingDir / artifactPath & " -> " &
|
||||
artifactsDir / artifactName
|
||||
|
||||
copyFileWithPermissions(wksp.dir & "/" & step.workingDir & "/" &
|
||||
artifactPath, artifactsDir & "/" & artifactName)
|
||||
copyFileWithPermissions(wksp.dir / step.workingDir / artifactPath,
|
||||
artifactsDir / artifactName)
|
||||
except:
|
||||
raiseEx "step " & step.name & " failed: unable to copy artifact " &
|
||||
artifactPath & ":\n" & getCurrentExceptionMsg()
|
||||
|
||||
wksp.publishStatus(BuildState.complete, "")
|
||||
wksp.publishStatus(BuildState.stepComplete, "step " & step.name & " complete")
|
||||
result = wksp.status
|
||||
|
||||
proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
@ -388,7 +473,8 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
result = BuildStatus(
|
||||
runId: $req.runId,
|
||||
state: BuildState.setup,
|
||||
details: "initializing build workspace")
|
||||
details: "initializing build workspace",
|
||||
version: "")
|
||||
outputHandler.sendStatusMsg(result)
|
||||
|
||||
var wksp: Workspace
|
||||
@ -401,8 +487,8 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
ensureProjectDirsExist(cfg, projectDef)
|
||||
|
||||
# Update our run status
|
||||
let runDir = cfg.buildDataDir & "/" & projectDef.name & "/runs"
|
||||
writeFile(runDir & "/" & $req.runId & ".status.json", $result)
|
||||
let runDir = cfg.buildDataDir / projectDef.name / "runs"
|
||||
writeFile(runDir / $req.runId & ".status.json", $result)
|
||||
|
||||
# Read in the existing system environment
|
||||
var env = loadEnv()
|
||||
@ -413,15 +499,15 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
if not existsDir(req.workspaceDir): createDir(req.workspaceDir)
|
||||
|
||||
# Setup our STDOUT and STDERR files
|
||||
let stdoutFile = open(runDir & "/" & $req.runId & ".stdout.log", fmWrite)
|
||||
let stderrFile = open(runDir & "/" & $req.runId & ".stderr.log", fmWrite)
|
||||
let stdoutFile = open(runDir / $req.runId & ".stdout.log", fmWrite)
|
||||
let stderrFile = open(runDir / $req.runId & ".stderr.log", fmWrite)
|
||||
|
||||
let logFilesOH = makeProcMsgHandler(stdoutFile, stderrFile)
|
||||
|
||||
wksp = Workspace(
|
||||
buildDataDir: cfg.buildDataDir & "/" & projectDef.name,
|
||||
buildDataDir: cfg.buildDataDir / projectDef.name,
|
||||
buildRef:
|
||||
if req.buildRef != nil and req.buildRef.len > 0: req.buildRef
|
||||
if req.buildRef.len > 0: req.buildRef
|
||||
else: projectDef.defaultBranch,
|
||||
dir: req.workspaceDir,
|
||||
env: env,
|
||||
@ -433,12 +519,12 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
runRequest: req,
|
||||
status: result,
|
||||
step: Step(),
|
||||
version: nil)
|
||||
version: "")
|
||||
|
||||
except:
|
||||
when not defined(release): echo getCurrentException().getStackTrace()
|
||||
result = BuildStatus(runId: $req.runId, state: BuildState.failed,
|
||||
details: getCurrentExceptionMsg())
|
||||
details: getCurrentExceptionMsg(), version: "")
|
||||
try: outputHandler.sendStatusMsg(result)
|
||||
except: discard ""
|
||||
return
|
||||
@ -452,8 +538,8 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
# Update our cache of project configurations.
|
||||
# TODO: what happens if this fails?
|
||||
copyFileWithPermissions(
|
||||
wksp.dir & "/" & wksp.projectDef.cfgFilePath,
|
||||
wksp.buildDataDir & "/configurations/" & wksp.version & ".json")
|
||||
wksp.dir / wksp.projectDef.cfgFilePath,
|
||||
wksp.buildDataDir / "configurations" / wksp.version & ".json")
|
||||
|
||||
# Find the requested step
|
||||
if not wksp.project.steps.hasKey(req.stepName):
|
||||
@ -462,7 +548,12 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
|
||||
if req.forceRebuild: step.dontSkip = true
|
||||
|
||||
result = doStep(wksp, step)
|
||||
var buildStatus = doStep(wksp, step)
|
||||
if buildStatus.state == BuildState.stepComplete:
|
||||
buildStatus.state = BuildState.complete
|
||||
wksp.publishStatus(buildStatus.state, "all steps complete")
|
||||
|
||||
result = wksp.status
|
||||
|
||||
except:
|
||||
when not defined(release): echo getCurrentException().getStackTrace()
|
||||
@ -471,25 +562,26 @@ proc run*(cfg: StrawBossConfig, req: RunRequest,
|
||||
wksp.publishStatus(BuildState.failed, msg)
|
||||
result = wksp.status
|
||||
except:
|
||||
result = BuildStatus(runId: $req.runId, state: BuildState.failed, details: msg)
|
||||
result = BuildStatus(runId: $req.runId, state: BuildState.failed,
|
||||
details: msg, version: "")
|
||||
try: outputHandler.sendStatusMsg(result)
|
||||
except: discard ""
|
||||
|
||||
finally:
|
||||
if wksp != nil:
|
||||
# Close open files
|
||||
# Close open files
|
||||
for f in wksp.openedFiles:
|
||||
try: close(f)
|
||||
except: discard ""
|
||||
|
||||
proc spawnWorker*(cfg: StrawBossConfig, req: RunRequest):
|
||||
tuple[status: BuildStatus, worker: Worker] =
|
||||
|
||||
|
||||
# Find the project definition (will throw appropriate exceptions)
|
||||
let projectDef = cfg.getProject(req.projectName)
|
||||
let runDir = cfg.buildDataDir & "/" & projectDef.name & "/runs"
|
||||
let reqFile = runDir & "/" & $req.runId & ".request.json"
|
||||
let statusFile = runDir & "/" & $req.runId & ".status.json"
|
||||
let runDir = cfg.buildDataDir / projectDef.name / "runs"
|
||||
let reqFile = runDir / $req.runId & ".request.json"
|
||||
let statusFile = runDir / $req.runId & ".status.json"
|
||||
|
||||
try:
|
||||
# Make sure the build data directories for this project exist.
|
||||
@ -502,7 +594,8 @@ proc spawnWorker*(cfg: StrawBossConfig, req: RunRequest):
|
||||
let queuedStatus = BuildStatus(
|
||||
runId: $req.runId,
|
||||
state: BuildState.queued,
|
||||
details: "request queued for execution")
|
||||
details: "request queued for execution",
|
||||
version: "")
|
||||
writeFile(statusFile, $queuedStatus)
|
||||
|
||||
var args = @["run", reqFile, "-c", cfg.filePath]
|
||||
@ -517,8 +610,9 @@ proc spawnWorker*(cfg: StrawBossConfig, req: RunRequest):
|
||||
|
||||
except:
|
||||
let exMsg = "run request rejected: " & getCurrentExceptionMsg()
|
||||
raiseEx exMsg
|
||||
try:
|
||||
writeFile(statusFile,
|
||||
$(BuildStatus(runId: $req.runId, state: BuildState.rejected, details: exMsg)))
|
||||
$(BuildStatus(runId: $req.runId, state: BuildState.rejected,
|
||||
details: exMsg, version: "")))
|
||||
except: discard ""
|
||||
raiseEx exMsg
|
||||
|
@ -1,7 +1,13 @@
|
||||
import asyncdispatch, bcrypt, cliutils, jester, json, jwt, logging,
|
||||
os, osproc, sequtils, strutils, tempfile, times, unittest, uuids
|
||||
import asyncdispatch, bcrypt, cliutils, jester, json, jwt, logging, md5,
|
||||
options, os, osproc, sequtils, strutils, tempfile, times, unittest, uuids
|
||||
|
||||
import ./configuration, ./core
|
||||
from mimetypes import getMimeType
|
||||
from asyncfile import openAsync, readToStream, close
|
||||
from asyncnet import send
|
||||
from re import re, find
|
||||
from timeutils import trimNanoSec
|
||||
|
||||
import ./configuration, ./core, ./version
|
||||
|
||||
type
|
||||
Session = object
|
||||
@ -14,15 +20,38 @@ const JSON = "application/json"
|
||||
proc newSession*(user: UserRef): Session =
|
||||
result = Session(
|
||||
user: user,
|
||||
issuedAt: getTime(),
|
||||
expires: daysForward(7).toTime())
|
||||
issuedAt: getTime().local.trimNanoSec.toTime,
|
||||
expires: daysForward(7).trimNanoSec.toTime)
|
||||
|
||||
proc makeJsonResp(status: HttpCode, details: string = ""): string =
|
||||
result = $(%* {
|
||||
"statusCode": status.int,
|
||||
"status": $status,
|
||||
"details": details
|
||||
})
|
||||
template halt(code: HttpCode,
|
||||
headers: RawHeaders,
|
||||
content: string): typed =
|
||||
## Immediately replies with the specified request. This means any further
|
||||
## code will not be executed after calling this template in the current
|
||||
## route.
|
||||
bind TCActionSend, newHttpHeaders
|
||||
result[0] = CallbackAction.TCActionSend
|
||||
result[1] = code
|
||||
result[2] = some(headers)
|
||||
result[3] = content
|
||||
result.matched = true
|
||||
break allRoutes
|
||||
|
||||
template jsonResp(code: HttpCode, details: string = "", headers: RawHeaders = @{:} ) =
|
||||
halt(
|
||||
code,
|
||||
headers & @{"Content-Type": JSON},
|
||||
$(%* {
|
||||
"statusCode": code.int,
|
||||
"status": $code,
|
||||
"details": details
|
||||
})
|
||||
)
|
||||
|
||||
template json500Resp(ex: ref Exception, details: string = ""): void =
|
||||
when not defined(release): debug ex.getStackTrace()
|
||||
error details & ":\n" & ex.msg
|
||||
jsonResp(Http500)
|
||||
|
||||
proc toJWT*(cfg: StrawBossConfig, session: Session): string =
|
||||
## Make a JST token for this session.
|
||||
@ -30,8 +59,8 @@ proc toJWT*(cfg: StrawBossConfig, session: Session): string =
|
||||
header: JOSEHeader(alg: HS256, typ: "jwt"),
|
||||
claims: toClaims(%*{
|
||||
"sub": session.user.name,
|
||||
"iat": session.issuedAt.toSeconds().int,
|
||||
"exp": session.expires.toSeconds().int }))
|
||||
"iat": session.issuedAt.toUnix.int,
|
||||
"exp": session.expires.toUnix.int }))
|
||||
|
||||
jwt.sign(cfg.authSecret)
|
||||
result = $jwt
|
||||
@ -46,13 +75,12 @@ proc fromJWT*(cfg: StrawBossConfig, strTok: string): Session =
|
||||
# Find the user record (if authenticated)
|
||||
let username = jwt.claims["sub"].node.str
|
||||
let users = cfg.users.filterIt(it.name == username)
|
||||
debug "username: " & username & "\n\tusers: " & $users.mapIt(it.name) & "\n\tall users: " & cfg.users.mapIt(it.name)
|
||||
if users.len != 1: raiseEx "Could not find session user."
|
||||
|
||||
result = Session(
|
||||
user: users[0],
|
||||
issuedAt: fromSeconds(jwt.claims["iat"].node.num),
|
||||
expires: fromSeconds(jwt.claims["exp"].node.num))
|
||||
issuedAt: fromUnix(jwt.claims["iat"].node.num),
|
||||
expires: fromUnix(jwt.claims["exp"].node.num))
|
||||
|
||||
proc extractSession(cfg: StrawBossConfig, request: Request): Session =
|
||||
## Helper to extract a session from a reqest.
|
||||
@ -80,7 +108,7 @@ proc makeAuthToken*(cfg: StrawBossConfig, uname, pwd: string): string =
|
||||
## Given a username and pwd, validate the combination and generate a JWT
|
||||
## token string.
|
||||
|
||||
if uname == nil or pwd == nil:
|
||||
if uname.len == 0 or pwd.len == 0:
|
||||
raiseEx "fields 'username' and 'password' required"
|
||||
|
||||
# find the user record
|
||||
@ -102,7 +130,7 @@ proc makeApiKey*(cfg: StrawBossConfig, uname: string): string =
|
||||
## function for an administrator to setup a unsupervised account (git access
|
||||
## for example).
|
||||
|
||||
if uname == nil: raiseEx "no username given"
|
||||
if uname.len == 0: raiseEx "no username given"
|
||||
|
||||
# find the user record
|
||||
let users = cfg.users.filterIt(it.name == uname)
|
||||
@ -117,21 +145,15 @@ proc makeApiKey*(cfg: StrawBossConfig, uname: string): string =
|
||||
|
||||
template checkAuth() =
|
||||
## Check this request for authentication and authorization information.
|
||||
## Injects two variables into the running context: the session and authed:
|
||||
## true if the request is authorized, false otherwise. If the request is not
|
||||
## authorized, this template sets up the 401 response correctly. The calling
|
||||
## context needs only to return from the route.
|
||||
## Injects the session into the running context. If the request is not
|
||||
## authorized, this template returns an appropriate 401 response.
|
||||
|
||||
var session {.inject.}: Session
|
||||
var authed {.inject.} = false
|
||||
|
||||
try:
|
||||
session = extractSession(cfg, request)
|
||||
authed = true
|
||||
try: session = extractSession(cfg, request)
|
||||
except:
|
||||
debug "Auth failed: " & getCurrentExceptionMsg()
|
||||
response.data[2]["WWW-Authenticate"] = "Bearer"
|
||||
resp(Http401, makeJsonResp(Http401), JSON)
|
||||
jsonResp(Http401, "Unauthorized", @{"WWW-Authenticate": "Bearer"})
|
||||
|
||||
proc start*(cfg: StrawBossConfig): void =
|
||||
|
||||
@ -139,53 +161,64 @@ proc start*(cfg: StrawBossConfig): void =
|
||||
var workers: seq[Worker] = @[]
|
||||
|
||||
settings:
|
||||
port = Port(8180)
|
||||
port = Port(cfg.port)
|
||||
appName = "/api"
|
||||
|
||||
routes:
|
||||
|
||||
get "/version":
|
||||
resp($(%("strawboss v" & SB_VERSION)), JSON)
|
||||
|
||||
post "/auth-token":
|
||||
var uname, pwd: string
|
||||
try:
|
||||
let jsonBody = parseJson(request.body)
|
||||
uname = jsonBody["username"].getStr
|
||||
pwd = jsonBody["password"].getStr
|
||||
except: resp(Http400, makeJsonResp(Http400), JSON)
|
||||
except: jsonResp(Http400)
|
||||
|
||||
try:
|
||||
let authToken = makeAuthToken(cfg, uname, pwd)
|
||||
resp($(%authToken), JSON)
|
||||
except: resp(Http401, makeJsonResp(Http401, getCurrentExceptionMsg()), JSON)
|
||||
except:
|
||||
jsonResp(Http401, getCurrentExceptionMsg())
|
||||
if ctx.cfg.debug: echo getStackTrace()
|
||||
|
||||
get "/verify-auth":
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
resp(Http200, $(%*{ "username": session.user.name }), JSON)
|
||||
|
||||
get "/projects":
|
||||
## List project summaries (ProjectDefs only)
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
resp($(%cfg.projects), JSON)
|
||||
|
||||
post "/projects":
|
||||
## Create a new project definition
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
# TODO
|
||||
resp(Http501, makeJsonResp(Http501), JSON)
|
||||
jsonResp(Http501)
|
||||
|
||||
get "/project/@projectName":
|
||||
## Return a project's configuration, as well as it's versions.
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
# Make sure we know about that project
|
||||
var projDef: ProjectDef
|
||||
try: projDef = cfg.getProject(@"projectName")
|
||||
except: resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
except:
|
||||
try: raise getCurrentException()
|
||||
except NotFoundException:
|
||||
jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except:
|
||||
let msg = "unable to load project definition for project " & @"projectName"
|
||||
json500Resp(getCurrentException(), msg)
|
||||
|
||||
var projConf: ProjectConfig
|
||||
try: projConf = getProjectConfig(cfg, @"projectName", "")
|
||||
@ -194,7 +227,7 @@ proc start*(cfg: StrawBossConfig): void =
|
||||
let respJson = newJObject()
|
||||
respJson["definition"] = %projDef
|
||||
respJson["versions"] = %listVersions(cfg, @"projectName")
|
||||
if not projConf.name.isNil:
|
||||
if projConf.name.len > 0:
|
||||
respJson["latestConfig"] = %projConf
|
||||
|
||||
resp(pretty(respJson), JSON)
|
||||
@ -202,87 +235,191 @@ proc start*(cfg: StrawBossConfig): void =
|
||||
get "/project/@projectName/versions":
|
||||
## Get a list of all versions that we have built
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
try: resp($(%listVersions(cfg, @"projectName")), JSON)
|
||||
except:
|
||||
if getCurrentException() is KeyError:
|
||||
resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
else:
|
||||
when not defined(release): debug getCurrentException().getStackTrace()
|
||||
error "unable to list versions for project " & @"projectName" &
|
||||
":\n" & getCurrentExceptionMsg()
|
||||
resp(Http500, makeJsonResp(Http500, "internal server error"), JSON)
|
||||
try: raise getCurrentException()
|
||||
except NotFoundException:
|
||||
jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except:
|
||||
let msg = "unable to list versions for project " & @"projectName"
|
||||
json500Resp(getCurrentException(), msg)
|
||||
|
||||
get "/project/@projectName/version/@version?":
|
||||
## Get a detailed project record including step definitions (ProjectConfig).
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
# Make sure we know about that project
|
||||
try: resp($(%getProjectConfig(cfg, @"projectName", @"version")), JSON)
|
||||
except: resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
except: jsonResp(Http404, getCurrentExceptionMsg())
|
||||
|
||||
get "/project/@projectName/runs":
|
||||
## List all runs
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
try: resp($(%listRuns(cfg, @"projectName")), JSON)
|
||||
except: resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
except: jsonResp(Http404, getCurrentExceptionMsg())
|
||||
|
||||
get "/project/@projectName/runs/active":
|
||||
## List all currently active runs
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
var details = ""
|
||||
try:
|
||||
let activeRuns = workers
|
||||
.filterIt(it.process.running and it.projectName == @"projectName")
|
||||
.mapIt(cfg.getRun(@"projecName", $it.runId));
|
||||
.mapIt(cfg.getRun(@"projectName", $it.runId));
|
||||
resp($(%activeRuns), JSON)
|
||||
except NotFoundException:
|
||||
jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except:
|
||||
if getCurrentException() is KeyError:
|
||||
resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
else:
|
||||
when not defined(release): debug getCurrentException().getStackTrace()
|
||||
error "problem loading active runs: " & getCurrentExceptionMsg()
|
||||
resp(Http500, makeJsonResp(Http500, "internal server error"), JSON)
|
||||
json500Resp(getCurrentException(), "problem loading active runs")
|
||||
|
||||
get "/project/@projectName/run/@runId":
|
||||
## Details for a specific run
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
# Make sure we know about that project
|
||||
try: discard cfg.getProject(@"projectName")
|
||||
except: resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
except: jsonResp(Http404, getCurrentExceptionMsg())
|
||||
|
||||
if not existsRun(cfg, @"projectName", @"runId"):
|
||||
resp(Http404, makeJsonResp(Http404, "no such run for project"), JSON)
|
||||
jsonResp(Http404, "no such run for project")
|
||||
|
||||
try: resp($getRun(cfg, @"projectName", @"runId"), JSON)
|
||||
except: resp(Http500, makeJsonResp(Http500, getCurrentExceptionMsg()), JSON)
|
||||
except:
|
||||
json500Resp(getCurrentException(),
|
||||
"unable to load run details for project " & @"projectName" &
|
||||
" run " & @"runId")
|
||||
|
||||
get "/project/@projectName/run/@runId/logs":
|
||||
## Get logs from a specific run
|
||||
|
||||
checkAuth()
|
||||
|
||||
try: discard cfg.getProject(@"projectName")
|
||||
except:
|
||||
jsonResp(Http404, getCurrentExceptionMsg())
|
||||
|
||||
if not existsRun(cfg, @"projectName", @"runId"):
|
||||
jsonResp(Http404, "no such run for project")
|
||||
|
||||
try: resp($getLogs(cfg, @"projectName", @"runId"))
|
||||
except:
|
||||
json500Resp(getCurrentException(),
|
||||
"unable to load run logs for " & @"projectName" & " run " & @"runId")
|
||||
|
||||
get "/project/@projectName/step/@stepName/artifacts/@version":
|
||||
## Get the list of artifacts that were built for
|
||||
|
||||
checkAuth()
|
||||
|
||||
debug "Matched artifacts list request: " & $(%*{
|
||||
"project": @"projectName",
|
||||
"step": @"stepName",
|
||||
"version": @"version"
|
||||
})
|
||||
|
||||
try: resp($(%listArtifacts(cfg, @"projectName", @"stepName", @"version")), JSON)
|
||||
except:
|
||||
try: raise getCurrentException()
|
||||
except NotFoundException:
|
||||
jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except:
|
||||
json500Resp(getCurrentException(), "unable to list artifacts for " &
|
||||
@"projectName" & ":" & @"stepName" & "@" & @"buildRef")
|
||||
|
||||
get "/project/@projectName/step/@stepName/artifact/@version/@artifactName":
|
||||
## Get a specific artifact that was built.
|
||||
|
||||
checkAuth()
|
||||
|
||||
var artifactPath: string
|
||||
try: artifactPath = getArtifactPath(cfg,
|
||||
@"projectName", @"stepName", @"version", @"artifactName")
|
||||
except:
|
||||
try: raise getCurrentException()
|
||||
except NotFoundException:
|
||||
jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except:
|
||||
json500Resp(getCurrentException(), "unable to check artifact path for " &
|
||||
@"projectName" & ":" & @"stepName" & "@" & @"version")
|
||||
|
||||
enableRawMode
|
||||
|
||||
debug "Preparing: " & artifactPath
|
||||
let fileSize = getFileSize(artifactPath)
|
||||
let mimetype = request.settings.mimes.getMimetype(artifactPath.splitFile.ext[1 .. ^1])
|
||||
if fileSize < 10_000_000: # 10 mb
|
||||
var file = readFile(artifactPath)
|
||||
|
||||
var hashed = getMD5(file)
|
||||
|
||||
# If the user has a cached version of this file and it matches our
|
||||
# version, let them use it
|
||||
if request.headers.hasKey("If-None-Match") and request.headers["If-None-Match"] == hashed:
|
||||
resp(Http304)
|
||||
else:
|
||||
resp(Http200, [
|
||||
("Content-Disposition", "; filename=\"" & @"artifactName" & "\""),
|
||||
("Content-Type", mimetype),
|
||||
("ETag", hashed )], file)
|
||||
else:
|
||||
let headers = @{
|
||||
"Content-Disposition": "; filename=\"" & @"artifactName" & "\"",
|
||||
"Content-Type": mimetype,
|
||||
"Content-Length": $fileSize
|
||||
}
|
||||
request.sendHeaders(Http200, headers)
|
||||
|
||||
var fileStream = newFutureStream[string]("sendStaticIfExists")
|
||||
var file = openAsync(artifactPath, fmRead)
|
||||
# Let `readToStream` write file data into fileStream in the
|
||||
# background.
|
||||
asyncCheck file.readToStream(fileStream)
|
||||
# The `writeFromStream` proc will complete once all the data in the
|
||||
# `bodyStream` has been written to the file.
|
||||
while true:
|
||||
let (hasValue, value) = await fileStream.read()
|
||||
if hasValue: request.send(value)
|
||||
else: break
|
||||
file.close()
|
||||
|
||||
get "/project/@projectName/step/@stepName/status/@buildRef":
|
||||
## Get detailed information about the status of a step (assuming it has been built)
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
try: resp($cfg.getBuildStatus(@"projectName", @"stepName", @"buildRef"), JSON)
|
||||
except: resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
except:
|
||||
try: raise getCurrentException()
|
||||
except NotFoundException: jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except:
|
||||
json500Resp(getCurrentException(), "unable to load the build state for " &
|
||||
@"projectName" & ":" & @"stepName" & "@" & @"buildRef")
|
||||
|
||||
#get "/project/@projectName/step/@stepName/status/@buildRef.svg":
|
||||
## Get an image representing the status of a build
|
||||
|
||||
## TODO: how do we want to handle auth for this? Unlike
|
||||
#checkAuth(): if not authed: return true
|
||||
|
||||
post "/project/@projectName/step/@stepName/run/@buildRef?":
|
||||
# Kick off a run
|
||||
|
||||
checkAuth(); if not authed: return true
|
||||
checkAuth()
|
||||
|
||||
let runRequest = RunRequest(
|
||||
runId: genUUID(),
|
||||
projectName: @"projectName",
|
||||
stepName: @"stepName",
|
||||
buildRef: if @"buildRef" != "": @"buildRef" else: nil,
|
||||
timestamp: getLocalTime(getTime()),
|
||||
buildRef: if @"buildRef" != "": @"buildRef" else: "",
|
||||
timestamp: getTime().local,
|
||||
forceRebuild: false) # TODO support this with optional query params
|
||||
|
||||
# TODO: instead of immediately spawning a worker, add the request to a
|
||||
@ -296,26 +433,24 @@ proc start*(cfg: StrawBossConfig): void =
|
||||
id: runRequest.runId,
|
||||
request: runRequest,
|
||||
status: status), JSON)
|
||||
except: resp(Http404, makeJsonResp(Http404, getCurrentExceptionMsg()), JSON)
|
||||
|
||||
get "/service/debug/ping":
|
||||
if not cfg.debug: resp(Http404, makeJsonResp(Http404), JSON)
|
||||
else: resp($(%"pong"), JSON)
|
||||
except:
|
||||
try: raise getCurrentException()
|
||||
except NotFoundException: jsonResp(Http404, getCurrentExceptionMsg())
|
||||
except: jsonResp(Http400, getCurrentExceptionMsg())
|
||||
|
||||
post "/service/debug/stop":
|
||||
if not cfg.debug: resp(Http404, makeJsonResp(Http404), JSON)
|
||||
if not cfg.debug: jsonResp(Http404)
|
||||
else:
|
||||
let shutdownFut = sleepAsync(100)
|
||||
shutdownFut.callback = proc(): void = complete(stopFuture)
|
||||
resp($(%"shutting down"), JSON)
|
||||
|
||||
#[
|
||||
|
||||
get re".*":
|
||||
resp(Http404, makeJsonResp(Http404), JSON)
|
||||
jsonResp(Http404, "URL [" & request.path & "] is not present on this server.")
|
||||
|
||||
post re".*":
|
||||
resp(Http404, makeJsonResp(Http404), JSON)
|
||||
]#
|
||||
jsonResp(Http404)
|
||||
|
||||
proc performMaintenance(cfg: StrawBossConfig): void =
|
||||
# Prune workers
|
||||
|
2
src/main/nim/strawbosspkg/version.nim
Normal file
2
src/main/nim/strawbosspkg/version.nim
Normal file
@ -0,0 +1,2 @@
|
||||
const SB_VERSION* = "0.5.1"
|
||||
|
9
src/main/systemd/strawboss.service
Normal file
9
src/main/systemd/strawboss.service
Normal file
@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=StrawBoss build server.
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=strawboss
|
||||
WorkingDirectory=/home/strawboss
|
||||
ExecStart=/home/strawboss/strawboss
|
||||
Restart=on-failure
|
@ -1,8 +1,10 @@
|
||||
{
|
||||
"name": "dummy-project",
|
||||
"versionCmd": "git describe --all --always",
|
||||
"containerImage": "ubuntu",
|
||||
"steps": {
|
||||
"build": {
|
||||
"containerImage": "alpine",
|
||||
"depends": ["test"],
|
||||
"workingDir": "dir1",
|
||||
"stepCmd": "cust-build",
|
||||
|
@ -6,6 +6,7 @@
|
||||
{ "name": "bob@builder.com", "hashedPwd": "$2a$11$lVZ9U4optQMhzPh0E9A7Yu6XndXblUF3gCa.zmEvJy4F.4C4718b." },
|
||||
{ "name": "sam@sousa.com", "hashedPwd": "testvalue" }
|
||||
],
|
||||
"port": 8180,
|
||||
"pwdCost": 11,
|
||||
"projects": [
|
||||
{ "name": "dummy-project",
|
||||
|
@ -1,4 +1,4 @@
|
||||
import unittest
|
||||
import tempfile, times, unittest, untar
|
||||
|
||||
from langutils import sameContents
|
||||
|
||||
@ -12,8 +12,8 @@ let TIMEOUT = 2.minutes
|
||||
suite "strawboss core":
|
||||
|
||||
# Suite setup: extract test project
|
||||
let testProjTempDir = mkdir()
|
||||
let testProjTarFile = newTarFile("src/test/test-project.tar.gz:)
|
||||
let testProjTempDir = mkdtemp()
|
||||
let testProjTarFile = newTarFile("src/test/test-project.tar.gz")
|
||||
let testProjName = "test-project"
|
||||
testProjTarFile.extract(testProjTempDir)
|
||||
|
||||
|
@ -2,6 +2,7 @@ import cliutils, httpclient, json, os, osproc, sequtils, strutils, tempfile,
|
||||
times, unittest, untar, uuids
|
||||
|
||||
from langutils import sameContents
|
||||
from algorithm import sorted
|
||||
|
||||
import ../testutil
|
||||
import ../../../main/nim/strawbosspkg/configuration
|
||||
@ -50,7 +51,7 @@ suite "strawboss server":
|
||||
@["serve", "-c", tempCfgPath], loadEnv(), {poUsePath})
|
||||
|
||||
# give the server time to spin up
|
||||
sleep(100)
|
||||
sleep(200)
|
||||
|
||||
teardown:
|
||||
discard newAsyncHttpClient().post(apiBase & "/service/debug/stop")
|
||||
@ -60,7 +61,7 @@ suite "strawboss server":
|
||||
removeFile(tempCfgPath)
|
||||
|
||||
# give the server time to spin down but kill it after that
|
||||
sleep(100)
|
||||
sleep(200)
|
||||
if serverProcess.running: kill(serverProcess)
|
||||
|
||||
test "handle missing project configuration":
|
||||
@ -135,13 +136,68 @@ suite "strawboss server":
|
||||
let completedRun = http.waitForBuild(apiBase, testProjName, $queuedRun.id)
|
||||
|
||||
# there should be successful status files for both the build and test steps
|
||||
for stepName in ["build", "test"]:
|
||||
let statusFile = tempBuildDataDir & "/" & testProjName & "/status/" & stepName & "/0.2.1.json"
|
||||
for step in [("build", BuildState.stepComplete), ("test", BuildState.complete)]:
|
||||
let statusFile = tempBuildDataDir & "/" & testProjName & "/status/" & step[0] & "/0.2.1.json"
|
||||
check fileExists(statusFile)
|
||||
|
||||
let status = loadBuildStatus(statusFile)
|
||||
check status.state == BuildState.complete
|
||||
check status.state == step[1]
|
||||
|
||||
test "run a build in docker":
|
||||
let http = newAuthenticatedHttpClient(apibase, "bob@builder.com", "password")
|
||||
|
||||
# Run the "build-docker" step
|
||||
var resp = http.post(apiBase & "/project/" & testProjName & "/step/build-docker/run/0.3.0")
|
||||
check resp.status.startsWith("200")
|
||||
|
||||
let queuedRun = parseRun(parseJson(resp.body))
|
||||
check queuedRun.status.state == BuildState.queued
|
||||
|
||||
# Wait for the build to complete
|
||||
let completedRun = http.waitForBuild(apiBase, testProjName, $queuedRun.id)
|
||||
|
||||
# check that the run directory, run request, status, and output logs exist
|
||||
let runsDir = tempBuildDataDir & "/" & testProjName & "/runs"
|
||||
let runId = $completedRun.id
|
||||
check existsDir(runsDir)
|
||||
for suffix in [".request.json", ".status.json", ".stdout.log", ".stderr.log"]:
|
||||
check existsFile(runsDir & "/" & runId & suffix)
|
||||
|
||||
# check that the project directory has been created in the artifacts repo
|
||||
let runArtifactsDir = tempBuildDataDir & "/" & testProjName & "/artifacts/build-docker/0.3.0"
|
||||
check existsDir(runArtifactsDir)
|
||||
|
||||
# check that the build step status file has been created
|
||||
let statusFile = tempBuildDataDir & "/" & testProjName & "/status/build-docker/0.3.0.json"
|
||||
check fileExists(statusFile)
|
||||
|
||||
# check that the status is complete
|
||||
var status = loadBuildStatus(statusFile)
|
||||
check status.state == BuildState.complete
|
||||
|
||||
# check that the artifacts we expect are present
|
||||
let binFile = runArtifactsDir & "/test_project"
|
||||
check existsFile(binFile)
|
||||
|
||||
test "run a multi-step docker-based build":
|
||||
let http = newAuthenticatedHttpClient(apibase, "bob@builder.com", "password")
|
||||
|
||||
# Run the "test" step (depends on "build")
|
||||
var resp = http.post(apiBase & "/project/" & testProjname & "/step/test-docker/run/0.3.0")
|
||||
check resp.status.startsWith("200")
|
||||
|
||||
let queuedRun = parseRun(parseJson(resp.body))
|
||||
let completedRun = http.waitForBuild(apiBase, testProjName, $queuedRun.id)
|
||||
|
||||
# there should be successful status files for both the build and test steps
|
||||
for step in [("build-docker", BuildState.stepComplete), ("test-docker", BuildState.complete)]:
|
||||
let statusFile = tempBuildDataDir & "/" & testProjName & "/status/" & step[0] & "/0.3.0.json"
|
||||
check fileExists(statusFile)
|
||||
|
||||
let status = loadBuildStatus(statusFile)
|
||||
check status.state == step[1]
|
||||
|
||||
# TODO
|
||||
#test "already completed steps should not be rebuilt":
|
||||
# let http = newAuthenticatedHttpClient(apibase, "bob@builder.com", "password")
|
||||
# let runArtifactsDir = tempBuildDataDir & "/" & testProjName & "/artifacts/build/0.2.1"
|
||||
@ -150,9 +206,40 @@ suite "strawboss server":
|
||||
# Run the "build" step
|
||||
# Kick off a build that depends on "build" (which was run in the last test)
|
||||
|
||||
# TODO
|
||||
#test "kick off multiple runs and check the list of active runs via the API":
|
||||
# check false
|
||||
test "kick off multiple runs and check the list of active runs via the API":
|
||||
let http = newAuthenticatedHttpClient(apiBase, "bob@builder.com", "password")
|
||||
|
||||
# Kick off multiple runs of the "long-running" job
|
||||
let queuedRuns = toSeq((1..3)).map(proc (idx: int): Run =
|
||||
let resp = http.post(apiBase & "/project/" & testProjName & "/step/long-running/run/0.3.1")
|
||||
check resp.status.startsWith("200")
|
||||
|
||||
return parseRun(parseJson(resp.body)))
|
||||
|
||||
# Collect run ids.
|
||||
let runIds = queuedRuns.mapIt($(it.id)).sorted(cmpIgnoreCase)
|
||||
|
||||
# Check on the runs
|
||||
let getActiveResp = http.get(apiBase & "/project/" & testProjName & "/runs/active")
|
||||
check getActiveResp.status.startsWith("200")
|
||||
|
||||
let activeRuns = parseJson(getActiveResp.body).getElems().mapIt(parseRun(it))
|
||||
let activeRunIds = activeRuns.mapIt($(it.id)).sorted(cmpIgnoreCase)
|
||||
|
||||
# Make sure we see all runs in the active state.
|
||||
check runIds == activeRunIds
|
||||
|
||||
let completedRuns = runIds.map(proc (runId: string): Run =
|
||||
return http.waitForBuild(apiBase, testProjName, runId))
|
||||
|
||||
# Make sure all are completed and all are accounted for
|
||||
check completedRuns.allIt(it.status.state == BuildState.complete)
|
||||
check completedRuns.mapIt($(it.id)).sorted(cmpIgnoreCase) == runIds;
|
||||
|
||||
# Check that there are no more active runs
|
||||
let getActiveResp2 = http.get(apiBase & "/project/" & testProjName & "/runs/active")
|
||||
let remainingActiveRuns = parseJson(getActiveResp2.body).getElems().mapIt(parseRun(it))
|
||||
check remainingActiveRuns.len == 0
|
||||
|
||||
# Last-chance catch to kill the server in case some test err'ed and didn't
|
||||
# reach it's teardown handler
|
||||
|
@ -26,7 +26,7 @@ proc waitForBuild*(client: HttpClient, apiBase, projectName, runId: string,
|
||||
#echo "Checking (" & $curElapsed & " has passed)."
|
||||
|
||||
if curElapsed > toFloat(timeout):
|
||||
raise newException(SystemError, "Timeout exceeded waiting for build.")
|
||||
raise newException(Exception, "Timeout exceeded waiting for build.")
|
||||
|
||||
let resp = client.get(apiBase & "/project/" & projectName & "/run/" & runId)
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
import json, strtabs, times, tables, unittest, uuids
|
||||
|
||||
from langutils import sameContents
|
||||
from timeutils import trimNanoSec
|
||||
import ../../../main/nim/strawbosspkg/configuration
|
||||
|
||||
suite "load and save configuration objects":
|
||||
@ -26,7 +27,7 @@ suite "load and save configuration objects":
|
||||
stepName: "build",
|
||||
buildRef: "master",
|
||||
workspaceDir: "/no-real/dir",
|
||||
timestamp: getLocalTime(getTime()),
|
||||
timestamp: getTime().local.trimNanoSec,
|
||||
forceRebuild: true)
|
||||
|
||||
let rrStr = $rr1
|
||||
@ -99,6 +100,7 @@ suite "load and save configuration objects":
|
||||
check:
|
||||
pc.name == "dummy-project"
|
||||
pc.versionCmd == "git describe --all --always"
|
||||
pc.containerImage == "ubuntu"
|
||||
pc.steps.len == 2
|
||||
|
||||
# Explicitly set properties
|
||||
@ -106,6 +108,7 @@ suite "load and save configuration objects":
|
||||
pc.steps["build"].dontSkip == true
|
||||
pc.steps["build"].stepCmd == "cust-build"
|
||||
pc.steps["build"].workingDir == "dir1"
|
||||
pc.steps["build"].containerImage == "alpine"
|
||||
sameContents(pc.steps["build"].artifacts, @["bin1", "doc1"])
|
||||
sameContents(pc.steps["build"].depends, @["test"])
|
||||
sameContents(pc.steps["build"].expectedEnv, @["VAR1"])
|
||||
@ -116,6 +119,7 @@ suite "load and save configuration objects":
|
||||
pc.steps["test"].dontSkip == false
|
||||
pc.steps["test"].stepCmd == "true"
|
||||
pc.steps["test"].workingDir == "."
|
||||
pc.steps["test"].containerImage.len == 0
|
||||
sameContents(pc.steps["test"].artifacts, @[])
|
||||
sameContents(pc.steps["test"].depends, @[])
|
||||
sameContents(pc.steps["test"].expectedEnv, @[])
|
||||
|
@ -6,6 +6,7 @@ from langutils import sameContents
|
||||
import ../testutil
|
||||
import ../../../main/nim/strawbosspkg/configuration
|
||||
import ../../../main/nim/strawbosspkg/server
|
||||
import ../../../main/nim/strawbosspkg/version
|
||||
|
||||
let apiBase = "http://localhost:8180/api"
|
||||
let cfgFilePath = "src/test/json/strawboss.config.json"
|
||||
@ -40,11 +41,11 @@ suite "strawboss server":
|
||||
let tok = toJWT(cfg, session)
|
||||
check fromJWT(cfg, tok) == session
|
||||
|
||||
test "ping":
|
||||
let resp = http.get(apiBase & "/service/debug/ping")
|
||||
test "version":
|
||||
let resp = http.get(apiBase & "/version")
|
||||
check:
|
||||
resp.status.startsWith("200")
|
||||
resp.body == "\"pong\""
|
||||
resp.body == "\"strawboss v" & SB_VERSION & "\""
|
||||
|
||||
test "fail auth":
|
||||
let resp = http.post(apiBase & "/auth-token",
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 127be8f66fcc6d4d223acf56668d42ff9c37bfb0
|
||||
Subproject commit ab883bd9602a1373347a23c8bee4ed28dd475aec
|
Binary file not shown.
20
src/util/bash/client.sh
Executable file
20
src/util/bash/client.sh
Executable file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
host="${STRAWBOSS_HOST:-localhost:8180}"
|
||||
if [ $# -eq 1 ]; then
|
||||
url="$1"
|
||||
method="GET"
|
||||
data=""
|
||||
elif [ $# -eq 2 ]; then
|
||||
method="$1"
|
||||
url="$2"
|
||||
data=""
|
||||
else
|
||||
method="$1"
|
||||
url="$2"
|
||||
data="$3"
|
||||
fi
|
||||
|
||||
curl -X "$method" -H "Authorization: Bearer $(cat token.txt)" "http://${host}/api/$url" -d "$data"
|
||||
echo ""
|
||||
#echo "curl -X \"$method\" -H \"Authorization: Bearer $(cat token.txt)\" \"localhost:8180/api/$url\" | jq . "
|
@ -1,7 +1,7 @@
|
||||
# Package
|
||||
|
||||
bin = @["strawboss"]
|
||||
version = "0.3.1"
|
||||
version = "0.5.1"
|
||||
author = "Jonathan Bernard"
|
||||
description = "My personal continious integration worker."
|
||||
license = "MIT"
|
||||
@ -9,15 +9,22 @@ srcDir = "src/main/nim"
|
||||
|
||||
# Dependencies
|
||||
|
||||
requires @["nim >= 0.16.1", "docopt >= 0.6.5", "isaac >= 0.1.2", "tempfile", "jester", "bcrypt",
|
||||
"untar", "uuids"]
|
||||
requires @["nim >= 0.19.0", "docopt >= 0.6.8", "isaac >= 0.1.3", "tempfile", "jester >= 0.4.1", "bcrypt",
|
||||
"untar", "uuids >= 0.1.10", "jwt"]
|
||||
|
||||
requires "https://github.com/yglukhov/nim-jwt"
|
||||
requires "https://git.jdb-labs.com/jdb/nim-lang-utils.git >= 0.3.0"
|
||||
requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.3.1"
|
||||
# Hacky to point to a specific hash. But there is some bug building in the
|
||||
# docker image we use to build the project with the next version. It adds an
|
||||
# ifdef branch to support libssl 1.1 but for some reason that ifdef is set
|
||||
# wrong and it tries to build against the 1.1 API even though the image only
|
||||
# has the 1.0 API. I'm crossing my fingers and hoping that our base image
|
||||
# supports libssl 1.1 before I need to update this library.
|
||||
#requires "https://github.com/yglukhov/nim-jwt#549aa1eb13b8ddc0c6861d15cc2cc5b52bcbef01"
|
||||
|
||||
requires "https://git.jdb-labs.com/jdb/nim-lang-utils.git >= 0.4.0"
|
||||
requires "https://git.jdb-labs.com/jdb/nim-cli-utils.git >= 0.6.0"
|
||||
requires "https://git.jdb-labs.com/jdb/nim-time-utils.git >= 0.4.0"
|
||||
|
||||
# Tasks
|
||||
#
|
||||
task functest, "Runs the functional test suite.":
|
||||
exec "nimble build"
|
||||
exec "nim c -r src/test/nim/run_functional_tests.nim"
|
||||
@ -37,3 +44,8 @@ task test, "Runs both the unit and functional test suites.":
|
||||
echo "\nRunning functional tests."
|
||||
echo "-------------------------"
|
||||
exec "src/test/nim/run_functional_tests"
|
||||
|
||||
task dist, "Creates distributable package.":
|
||||
exec "nimble build"
|
||||
mkdir "dist"
|
||||
exec "cp strawboss strawboss.config.json example.json dist/."
|
||||
|
@ -1,12 +1,37 @@
|
||||
{
|
||||
"name": "strawboss",
|
||||
"containerImage": "nimlang/nim:0.19.0",
|
||||
"steps": {
|
||||
"build": {
|
||||
"compile": {
|
||||
"artifacts": ["strawboss"],
|
||||
"stepCmd": "nimble build"
|
||||
},
|
||||
"test": { "depends": ["unittest", "functest"] },
|
||||
"functest": { "stepCmd": "nimble functest" },
|
||||
"unittest": { "stepCmd": "nimble unittest" }
|
||||
"unittest": {
|
||||
"depends": ["compile"],
|
||||
"stepCmd": "/bin/bash",
|
||||
"cmdInput": [
|
||||
"cp $compile_DIR/strawboss .",
|
||||
"nimble install --depsOnly",
|
||||
"nim c -r src/test/nim/run_unit_tests"
|
||||
]
|
||||
},
|
||||
"functest": {
|
||||
"depends": ["compile"],
|
||||
"stepCmd": "/bin/bash",
|
||||
"cmdInput": [
|
||||
"cp $compile_DIR/strawboss .",
|
||||
"nimble install --depsOnly",
|
||||
"nim c -r src/test/nim/run_functional_tests"
|
||||
]
|
||||
},
|
||||
"build": {
|
||||
"artifacts": ["strawboss-$VERSION.zip"],
|
||||
"depends": ["compile", "unittest", "functest"],
|
||||
"stepCmd": "/bin/bash",
|
||||
"cmdInput": [
|
||||
"cp $compile_DIR/strawboss .",
|
||||
"zip strawboss-$VERSION.zip strawboss strawboss.config.json example.json src/main/systemd/strawboss.service"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
11
test-spec.txt
Normal file
11
test-spec.txt
Normal file
@ -0,0 +1,11 @@
|
||||
Run a build. Look for:
|
||||
- Run request archived
|
||||
- Output logs archived with the run request
|
||||
- Artifacts archived in the build-data directory.
|
||||
- Configuration for that version archived in configurations directory.
|
||||
- Status for that version archived in the status directory
|
||||
|
||||
Run the build again for the same project and build ref:
|
||||
- Build should be skipped.
|
||||
- Run request should be archived.
|
||||
|
Loading…
x
Reference in New Issue
Block a user