Initial commit: tool to help analyze data from JIRA.

This commit is contained in:
Jonathan Bernard 2021-04-02 13:31:35 -05:00
commit dfd80ccf6a
8 changed files with 331 additions and 0 deletions

26
Makefile Normal file
View File

@ -0,0 +1,26 @@
PGSQL_CONTAINER_ID=`cat postgres.container.id`
createdb:
docker run \
--name postgres-tegra118 \
-e POSTGRES_PASSWORD=password \
-p 5500:5432 \
-d postgres \
> postgres.container.id
sleep 5
PGPASSWORD=password psql -p 5500 -U postgres -h localhost \
-c 'CREATE DATABASE tegra118;'
startdb:
docker start $(PGSQL_CONTAINER_ID)
stopdb:
docker stop $(PGSQL_CONTAINER_ID)
deletedb:
-docker stop $(PGSQL_CONTAINER_ID)
docker rm $(PGSQL_CONTAINER_ID)
rm postgres.container.id
connect:
PGPASSWORD=password psql -p 5500 -U postgres -h localhost tegra118

93
src/nim/tm_pm.nim Normal file
View File

@ -0,0 +1,93 @@
import csvtools, docopt, fiber_orm, db_postgres, sequtils, sets, strutils
import ./tm_pmpkg/jira_api
type
Feature* = object
id*: int
name*: string
epic*: int
stories*: seq[string]
defects*: seq[string]
status*: string
confidence*: int
target_release*: string
TmPmDb* = ref object
conn: DbConn
func connect(connString: string): TmPmDb =
result = TmPmDb(conn: open("", "", "", connString))
generateProcsForModels(TmPmDb, [ChangeLog, Feature, Issue])
generateLookup(TmPmDb, ChangeLog, @["historyId"])
when isMainModule:
let doc = """
Usage:
tm_pm import-csv <import-file>
tm_pm api-sync <username> <api-key>
"""
let args = docopt(doc, version = "0.1.0")
let db = connect("host=localhost port=5500 dbname=tegra118 user=postgres password=password")
if args["import-csv"]:
let rows = toSeq(csvRows(path = $args["<import-file>"]))
let jiraIssues = rows.map(proc (r: seq[string]): Issue =
Issue(
issueType: r[0],
id: r[1],
summary: r[2],
priority: r[3],
status: r[4],
epicId: r[5],
testPhase: r[6],
assignee: r[7],
linkedIssueIds: r[8..<r.len].filterIt(not it.isEmptyOrWhitespace)
))
for issue in jiraIssues:
discard db.createIssue(issue);
# see if the issue already exists
# try:
# let existingRecord = db.getJiraIssue(issue.id);
# except NotFoundError:
# db.createJiraIssue(issue);
if args["api-sync"]:
initJiraClient("https://tegra118.atlassian.net", $args["<username>"], $args["<api-key>"])
let issuesAndChangelogs = searchIssues(
"project = \"UUP\" and (labels is empty or labels != \"Design&Reqs\") ORDER BY key ASC",
includeChangelog = true
)
var issuesUpdated = 0
var issuesCreated = 0
var changelogsCreated = 0
stdout.write("\nRetrieved " & $issuesAndChangelogs[0].len & " issues. ")
for issue in issuesAndChangelogs[0]:
try:
discard db.getIssue(issue.id)
discard db.updateIssue(issue)
issuesUpdated += 1;
except NotFoundError:
discard db.createIssue(issue)
issuesCreated += 1;
stdout.writeLine("Created " & $issuesCreated & " and updated " & $issuesUpdated)
stdout.write("Retrieved " & $issuesAndChangelogs[1].len & " change logs. ")
var newHistoryIds: HashSet[string] = initHashSet[string]()
for changelog in issuesAndChangelogs[1]:
try:
if newHistoryIds.contains(changelog.historyId) or
db.findChangeLogsByHistoryId(changelog.historyId).len == 0:
newHistoryIds.incl(changelog.historyId)
discard db.createChangeLog(changelog)
changelogsCreated += 1;
except NotFoundError: discard
stdout.writeLine("Recorded " & $changelogsCreated & " we didn't already have.\n")

1
src/nim/tm_pm.nim.cfg Normal file
View File

@ -0,0 +1 @@
--d:ssl

View File

@ -0,0 +1,111 @@
import base64, httpclient, json, sequtils, strutils, times, uri
type
ChangeLog* = object
id*: string
historyId*: string
issueId*: string
author*: string
createdAt*: DateTime
field*: string
oldValue*: string
newValue*: string
Issue* = object
id*: string
issueType*: string
summary*: string
epicId*: string
assignee*: string
status*: string
priority*: string
linkedIssueIds*: seq[string]
testPhase*: string
let client = newHttpClient()
var API_BASE = "";
const FIELDS = "issuetype,summary,customfield_10014,assignee,status,priority,issuelinks,customfield_10218,changelog"
proc parseIssue(json: JsonNode): (Issue, seq[ChangeLog]) =
let f = json["fields"]
return (
Issue(
id: json["key"].getStr(),
issueType: f["issuetype"]["name"].getStr(),
summary: f["summary"].getStr(),
epicId: f["customfield_10014"].getStr(),
assignee:
if f["assignee"].kind == JNull: "Unassigned"
else: f["assignee"]["displayName"].getStr(),
status: f["status"]["name"].getStr(),
priority: f["priority"].getStr(),
linkedIssueIds: f["issuelinks"].mapIt(
if it.hasKey("inwardIssue"): it["inwardIssue"]["key"].getStr()
else: it["outwardIssue"]["key"].getStr()),
testPhase: f["customfield_10218"].getStr()),
if json.hasKey("changelog") and json["changelog"]["histories"].getElems().len > 0:
json["changelog"]["histories"].getElems().map(
proc (h: JsonNode): seq[ChangeLog] = h["items"].mapIt(
ChangeLog(
historyId: h["id"].getStr(),
issueId: json["key"].getStr(),
author: h["author"]["displayName"].getStr(),
createdAt: parse(
h["created"].getStr()[0..17] & h["created"].getStr()[^6..^3],
"yyyy-MM-dd'T'HH:mm:sszz"),
field: it["field"].getStr(),
oldValue: it["fromString"].getStr(),
newValue: it["toString"].getStr()
)
)
).foldl(a & b)
else: @[]
)
proc initJiraClient*(apiBasePath: string, username: string, apiToken: string) =
API_BASE = apiBasePath
client.headers = newHttpHeaders({
"Content-Type": "application/json",
"Authorization": "Basic " & encode(username & ":" & apiToken)
})
proc searchIssues*(jql: string, includeChangelog: bool = false):
(seq[Issue], seq[ChangeLog]) =
result = (@[], @[])
var query = @[
("jql", jql),
("fields", FIELDS)
]
if includeChangelog: query.add(("expand", "changelog"))
var resp = client.get(API_BASE & "/rest/api/3/search?" & encodeQuery(query))
while true:
if not resp.status.startsWith("2"):
raise newException(Exception,
"Received error from API: " & resp.status &
"\nHeaders: " & $resp.headers &
"\nBody: " & $resp.body)
let body = parseJson(resp.body)
let nextStartAt = body["startAt"].getInt(0) + body["maxResults"].getInt(0)
echo "Retrieved records " &
$body["startAt"].getInt() & " to " &
$(nextStartAt - 1) & " of " &
$body["total"].getInt() &
" (" & $body["issues"].getElems().len & " records received)"
let issuesAndLogs = body["issues"].getElems().mapIt(parseIssue(it))
result[0] &= issuesAndLogs.mapIt(it[0])
result[1] &= issuesAndLogs.mapIt(it[1]).foldl(a & b)
if nextStartAt > body["total"].getInt(): break
resp = client.get(
API_BASE & "/rest/api/3/search?" &
encodeQuery(query & ("startAt", $nextStartAt)))

34
src/sql/01-schema-up.sql Normal file
View File

@ -0,0 +1,34 @@
CREATE TABLE issues (
id varchar primary key,
issue_type varchar not null,
summary varchar not null,
epicId varchar,
assignee varchar,
test_phase varchar,
status varchar not null,
priority varchar not null,
linked_issue_ids varchar[]
);
CREATE TABLE features (
id serial primary key,
name varchar not null,
epicId varchar not null default '',
stories varchar[] not null default '{}',
defects varchar[] not null default '{}',
status varchar default 'todo',
confidence int not null default 0,
target_release varchar not null default '',
notes varchar not null default ''
);
CREATE TABLE change_logs (
id serial primary key,
history_id varchar,
issue_id varchar not null references issues(id),
author varchar,
created_at timestamp with time zone,
field varchar not null,
old_value varchar,
new_value varchar
);

View File

@ -0,0 +1,7 @@
UPDATE jira_issues SET linked_issues = collected.linked_issues from (
SELECT a.id, array_remove(array_cat(a.linked_issues, array_agg(b.id)) as linked_issues, NULL) FROM
jira_issues a LEFT OUTER JOIN
jira_issues b ON b.linked_issues @> ARRAY[a.id]
GROUP BY a.id
) AS collected
WHERE jira_issues.id = collected.id;

44
src/sql/queries.sql Normal file
View File

@ -0,0 +1,44 @@
-- Show bugs moved to 'Resolved' with a full accounting of everyone who has
-- touched the issue, most recent issues first.
SELECT
i.id,
i.epic_id,
i.status,
i.test_phase,
-- i.summary,
i.assignee,
array_agg(DISTINCT c2.author) AS involved,
c.created_at AS resolved_at
FROM
issues i JOIN
change_logs c ON
i.issue_type = 'Bug' AND
i.id = c.issue_id AND
c.field = 'status' AND
c.new_value = 'Resolved' JOIN
change_logs c2 on i.id = c2.issue_id
GROUP BY
i.id,
i.epic_id,
i.status,
i.test_phase,
-- i.summary,
i.assignee,
resolved_at
ORDER BY resolved_at DESC;
-- Show everyone involved with a specific ticket
SELECT
i.id,
i.epic_id,
i.status,
i.summary,
array_agg(DISTINCT c.author) AS involved
FROM
issues i JOIN
change_logs c ON i.id = c.issue_id
WHERE i.id in ('UUP-848')
GROUP BY i.id, i.epic_id, i.status;
select status, count(*) from issues where issue_type = 'Bug' group by status;

15
tm_pm.nimble Normal file
View File

@ -0,0 +1,15 @@
# Package
version = "0.1.0"
author = "Jonathan Bernard"
description = "A new awesome nimble package"
license = "MIT"
srcDir = "src/nim"
bin = @["tm_pm"]
# Dependencies
requires @["nim >= 1.4.0", "docopt", "uuids", "timeutils", "fiber_orm >= 0.3.1"]
#requires "https://git.jdb-software.com/jdb-software/fiber-orm-nim.git"
requires "https://github.com/andreaferretti/csvtools.git"