ACN: Add last changes to JIRA analysis tool code.
This commit is contained in:
parent
c2da051878
commit
22af5abd26
9
Makefile
9
Makefile
@ -1,5 +1,8 @@
|
||||
PGSQL_CONTAINER_ID=`cat postgres.container.id`
|
||||
|
||||
updatedb: startdb
|
||||
./jira_analysis api-sync https://tegra118.atlassian.net jonathan.bernard@fiserv.com mS5cT0YntfQ6KYT0OWgb6A10
|
||||
|
||||
createdb:
|
||||
docker run \
|
||||
--name postgres-tegra118 \
|
||||
@ -24,3 +27,9 @@ deletedb:
|
||||
|
||||
connect:
|
||||
PGPASSWORD=password psql -p 5500 -U postgres -h localhost tegra118
|
||||
|
||||
update-fields:
|
||||
curl -u 'jonathan.bernard@fiserv.com:mS5cT0YntfQ6KYT0OWgb6A10' 'https://tegra118.atlassian.net/rest/api/3/field' | jq . > fields.json
|
||||
|
||||
update-sample:
|
||||
curl -u 'jonathan.bernard@fiserv.com:mS5cT0YntfQ6KYT0OWgb6A10' 'https://tegra118.atlassian.net/rest/api/3/search?jql=project%20%3D%20"UUP"%20and%20(labels%20is%20empty%20or%20labels%20!%3D%20"Design%26Reqs")%20ORDER%20BY%20key%20ASC&fields=summary,assignee,issuetype,customfield_10014,issuelinks,resolutiondate,status,customfield_10218,resolution,fixVersions,versions,customfield_10001&expand=changelog' | jq . > all-issues-filtered-fields.json
|
||||
|
BIN
features-2.ods
Normal file
BIN
features-2.ods
Normal file
Binary file not shown.
@ -12,4 +12,3 @@ bin = @["jira_analysis"]
|
||||
|
||||
requires @["nim >= 1.4.0", "docopt", "uuids", "timeutils", "fiber_orm >= 0.3.1"]
|
||||
#requires "https://git.jdb-software.com/jdb-software/fiber-orm-nim.git"
|
||||
requires "https://github.com/andreaferretti/csvtools.git"
|
||||
|
@ -1,4 +1,4 @@
|
||||
import csvtools, docopt, fiber_orm, db_postgres, sequtils, sets, strutils
|
||||
import db_common, docopt, fiber_orm, db_postgres, sequtils, sets, strutils
|
||||
|
||||
import ./jira_analysispkg/jira_api
|
||||
|
||||
@ -16,60 +16,43 @@ type
|
||||
TmPmDb* = ref object
|
||||
conn: DbConn
|
||||
|
||||
FeaturesIssue* = object
|
||||
id*: int
|
||||
featureId*: int
|
||||
issueId*: string
|
||||
issueType*: string
|
||||
|
||||
func connect(connString: string): TmPmDb =
|
||||
result = TmPmDb(conn: open("", "", "", connString))
|
||||
|
||||
generateProcsForModels(TmPmDb, [ChangeLog, Feature, Issue])
|
||||
generateProcsForModels(TmPmDb, [ChangeLog, Feature, Issue, FeaturesIssue, LinkedIssue])
|
||||
|
||||
generateLookup(TmPmDb, ChangeLog, @["historyId"])
|
||||
generateLookup(TmPmDb, LinkedIssue, @["fromId", "toId", "linkType"])
|
||||
|
||||
when isMainModule:
|
||||
|
||||
let doc = """
|
||||
Usage:
|
||||
jira_analysis import-csv <import-file>
|
||||
jira_analysis api-sync <url-base> <username> <api-key>
|
||||
"""
|
||||
|
||||
let args = docopt(doc, version = "0.2.0")
|
||||
let db = connect("host=localhost port=5500 dbname=tegra118 user=postgres password=password")
|
||||
|
||||
if args["import-csv"]:
|
||||
let rows = toSeq(csvRows(path = $args["<import-file>"]))
|
||||
let jiraIssues = rows.map(proc (r: seq[string]): Issue =
|
||||
Issue(
|
||||
issueType: r[0],
|
||||
id: r[1],
|
||||
summary: r[2],
|
||||
priority: r[3],
|
||||
status: r[4],
|
||||
epicId: r[5],
|
||||
testPhase: r[6],
|
||||
assignee: r[7],
|
||||
linkedIssueIds: r[8..<r.len].filterIt(not it.isEmptyOrWhitespace)
|
||||
))
|
||||
|
||||
for issue in jiraIssues:
|
||||
discard db.createIssue(issue);
|
||||
# see if the issue already exists
|
||||
# try:
|
||||
# let existingRecord = db.getJiraIssue(issue.id);
|
||||
# except NotFoundError:
|
||||
# db.createJiraIssue(issue);
|
||||
|
||||
if args["api-sync"]:
|
||||
initJiraClient($args["<url-base>"], $args["<username>"], $args["<api-key>"])
|
||||
let issuesAndChangelogs = searchIssues(
|
||||
let issuesLogsAndLinks = searchIssues(
|
||||
"project = \"UUP\" and (labels is empty or labels != \"Design&Reqs\") ORDER BY key ASC",
|
||||
includeChangelog = true
|
||||
)
|
||||
|
||||
var issuesUpdated = 0
|
||||
var issuesCreated = 0
|
||||
var changelogsCreated = 0
|
||||
var linksAdded = 0
|
||||
|
||||
stdout.write("\nRetrieved " & $issuesAndChangelogs[0].len & " issues. ")
|
||||
for issue in issuesAndChangelogs[0]:
|
||||
stdout.write("\nRetrieved " & $issuesLogsAndLinks[0].len & " issues. ")
|
||||
for issue in issuesLogsAndLinks[0]:
|
||||
try:
|
||||
discard db.getIssue(issue.id)
|
||||
discard db.updateIssue(issue)
|
||||
@ -79,15 +62,21 @@ Usage:
|
||||
issuesCreated += 1;
|
||||
stdout.writeLine("Created " & $issuesCreated & " and updated " & $issuesUpdated)
|
||||
|
||||
stdout.write("Retrieved " & $issuesAndChangelogs[1].len & " change logs. ")
|
||||
var newHistoryIds: HashSet[string] = initHashSet[string]()
|
||||
for changelog in issuesAndChangelogs[1]:
|
||||
try:
|
||||
if newHistoryIds.contains(changelog.historyId) or
|
||||
db.findChangeLogsByHistoryId(changelog.historyId).len == 0:
|
||||
newHistoryIds.incl(changelog.historyId)
|
||||
discard db.createChangeLog(changelog)
|
||||
changelogsCreated += 1;
|
||||
except NotFoundError: discard
|
||||
stdout.write("\nFound " & $issuesLogsAndLinks[2].len & " issue links. ")
|
||||
for link in issuesLogsAndLinks[2]:
|
||||
let existingLinks = db.findLinkedIssuesByFromIdAndToIdAndLinkType(
|
||||
link.fromId, link.toId, link.linkType)
|
||||
if existingLinks.len == 0:
|
||||
discard db.createLinkedIssue(link);
|
||||
linksAdded += 1;
|
||||
stdout.writeLine("Recorded " & $linksAdded & " we didn't already have.")
|
||||
|
||||
stdout.writeLine("Recorded " & $changelogsCreated & " we didn't already have.\n")
|
||||
stdout.write("Retrieved " & $issuesLogsAndLinks[1].len & " change logs. ")
|
||||
let knownHistoryIds: HashSet[string] = toHashSet[string](db.getAllChangeLogs().mapIt(it.historyId))
|
||||
let newChangeLogs: seq[ChangeLog] = issuesLogsAndLinks[1].filterIt(not knownHistoryIds.contains(it.historyId))
|
||||
|
||||
for changelog in newChangeLogs:
|
||||
try:
|
||||
discard db.createChangeLog(changelog)
|
||||
except NotFoundError: discard
|
||||
stdout.writeLine("Recorded " & $newChangeLogs.len & " we didn't already have.\n")
|
||||
|
@ -19,17 +19,26 @@ type
|
||||
assignee*: string
|
||||
status*: string
|
||||
priority*: string
|
||||
linkedIssueIds*: seq[string]
|
||||
affectsVersions*: seq[string]
|
||||
fixVersions*: seq[string]
|
||||
resolution*: string
|
||||
testPhase*: string
|
||||
teams*: seq[string]
|
||||
|
||||
LinkedIssue* = object
|
||||
id*: int
|
||||
toId*: string
|
||||
fromId*: string
|
||||
linkType*: string
|
||||
|
||||
let client = newHttpClient()
|
||||
var API_BASE = "";
|
||||
const FIELDS = "issuetype,summary,customfield_10014,assignee,status,priority,issuelinks,customfield_10218,changelog"
|
||||
const FIELDS = "issuetype,summary,customfield_10014,assignee,status,priority,issuelinks,customfield_10218,changelog,resolution,versions,fixVersions,customfield_10001"
|
||||
|
||||
proc parseIssue(json: JsonNode): (Issue, seq[ChangeLog]) =
|
||||
proc parseIssue(json: JsonNode): (Issue, seq[ChangeLog], seq[LinkedIssue]) =
|
||||
let f = json["fields"]
|
||||
return (
|
||||
Issue(
|
||||
|
||||
let issue = Issue(
|
||||
id: json["key"].getStr(),
|
||||
issueType: f["issuetype"]["name"].getStr(),
|
||||
summary: f["summary"].getStr(),
|
||||
@ -38,18 +47,39 @@ proc parseIssue(json: JsonNode): (Issue, seq[ChangeLog]) =
|
||||
if f["assignee"].kind == JNull: "Unassigned"
|
||||
else: f["assignee"]["displayName"].getStr(),
|
||||
status: f["status"]["name"].getStr(),
|
||||
priority: f["priority"].getStr(),
|
||||
linkedIssueIds: f["issuelinks"].mapIt(
|
||||
if it.hasKey("inwardIssue"): it["inwardIssue"]["key"].getStr()
|
||||
else: it["outwardIssue"]["key"].getStr()),
|
||||
testPhase: f["customfield_10218"].getStr()),
|
||||
priority:
|
||||
if f["priority"].kind == JObject: f["priority"]["name"].getStr()
|
||||
else: "",
|
||||
resolution:
|
||||
if f["resolution"].kind == JNull: ""
|
||||
else: f["resolution"]["name"].getStr(),
|
||||
affectsVersions:
|
||||
if f["versions"].getElems().len > 0:
|
||||
f["versions"].getElems().mapIt(it["name"].getStr())
|
||||
else: @[],
|
||||
fixVersions:
|
||||
if f["fixVersions"].getElems().len > 0:
|
||||
f["fixVersions"].getElems().mapIt(it["name"].getStr())
|
||||
else: @[],
|
||||
testPhase:
|
||||
if f["customfield_10218"].kind == JNull: ""
|
||||
else: f["customfield_10218"]["value"].getStr(),
|
||||
teams:
|
||||
if f["customfield_10001"].getElems().len > 0:
|
||||
f["customfield_10001"].getElems.mapIt(it[""].getStr())
|
||||
else : @[]
|
||||
)
|
||||
|
||||
let changelogs =
|
||||
if json.hasKey("changelog") and json["changelog"]["histories"].getElems().len > 0:
|
||||
json["changelog"]["histories"].getElems().map(
|
||||
proc (h: JsonNode): seq[ChangeLog] = h["items"].mapIt(
|
||||
ChangeLog(
|
||||
historyId: h["id"].getStr(),
|
||||
issueId: json["key"].getStr(),
|
||||
author: h["author"]["displayName"].getStr(),
|
||||
author:
|
||||
if h.hasKey("author"): h["author"]["displayName"].getStr()
|
||||
else: "",
|
||||
createdAt: parse(
|
||||
h["created"].getStr()[0..17] & h["created"].getStr()[^6..^3],
|
||||
"yyyy-MM-dd'T'HH:mm:sszz"),
|
||||
@ -60,7 +90,36 @@ proc parseIssue(json: JsonNode): (Issue, seq[ChangeLog]) =
|
||||
)
|
||||
).foldl(a & b)
|
||||
else: @[]
|
||||
)
|
||||
|
||||
let linkedIssues =
|
||||
if f.hasKey("issuelinks") and f["issuelinks"].getElems().len > 0:
|
||||
f["issuelinks"].mapIt(
|
||||
if it.hasKey("inwardIssue"):
|
||||
@[
|
||||
LinkedIssue(
|
||||
fromId: json["key"].getStr(),
|
||||
toId: it["inwardIssue"]["key"].getStr(),
|
||||
linkType: it["type"]["inward"].getStr()),
|
||||
LinkedIssue(
|
||||
toId: json["key"].getStr(),
|
||||
fromId: it["inwardIssue"]["key"].getStr(),
|
||||
linkType: it["type"]["outward"].getStr())
|
||||
]
|
||||
else:
|
||||
@[
|
||||
LinkedIssue(
|
||||
fromId: json["key"].getStr(),
|
||||
toId: it["outwardIssue"]["key"].getStr(),
|
||||
linkType: it["type"]["outward"].getStr()),
|
||||
LinkedIssue(
|
||||
toId: json["key"].getStr(),
|
||||
fromId: it["outwardIssue"]["key"].getStr(),
|
||||
linkType: it["type"]["inward"].getStr())
|
||||
]
|
||||
).foldl(a & b)
|
||||
else: @[]
|
||||
|
||||
result = (issue, changelogs, linkedIssues)
|
||||
|
||||
proc initJiraClient*(apiBasePath: string, username: string, apiToken: string) =
|
||||
API_BASE = apiBasePath
|
||||
@ -70,9 +129,9 @@ proc initJiraClient*(apiBasePath: string, username: string, apiToken: string) =
|
||||
})
|
||||
|
||||
proc searchIssues*(jql: string, includeChangelog: bool = false):
|
||||
(seq[Issue], seq[ChangeLog]) =
|
||||
(seq[Issue], seq[ChangeLog], seq[LinkedIssue]) =
|
||||
|
||||
result = (@[], @[])
|
||||
result = (@[], @[], @[])
|
||||
|
||||
var query = @[
|
||||
("jql", jql),
|
||||
@ -99,10 +158,12 @@ proc searchIssues*(jql: string, includeChangelog: bool = false):
|
||||
$body["total"].getInt() &
|
||||
" (" & $body["issues"].getElems().len & " records received)"
|
||||
|
||||
let issuesAndLogs = body["issues"].getElems().mapIt(parseIssue(it))
|
||||
let issuesLogsAndLinks = body["issues"].getElems().mapIt(parseIssue(it))
|
||||
|
||||
result[0] &= issuesAndLogs.mapIt(it[0])
|
||||
result[1] &= issuesAndLogs.mapIt(it[1]).foldl(a & b)
|
||||
if issuesLogsAndLinks.len > 0:
|
||||
result[0] &= issuesLogsAndLinks.mapIt(it[0])
|
||||
result[1] &= issuesLogsAndLinks.mapIt(it[1]).foldl(a & b)
|
||||
result[2] &= issuesLogsAndLinks.mapIt(it[2]).foldl(a & b)
|
||||
|
||||
if nextStartAt > body["total"].getInt(): break
|
||||
|
||||
|
@ -7,15 +7,17 @@ CREATE TABLE issues (
|
||||
test_phase varchar,
|
||||
status varchar not null,
|
||||
priority varchar not null,
|
||||
linked_issue_ids varchar[]
|
||||
resolution varchar not null default '',
|
||||
affects_versions varchar[] default '{}',
|
||||
fix_versions varchar[] default '{}',
|
||||
teams varchar[] default '{}'
|
||||
);
|
||||
|
||||
CREATE TABLE features (
|
||||
id serial primary key,
|
||||
category varchar not null,
|
||||
name varchar not null,
|
||||
epicId varchar not null default '',
|
||||
stories varchar[] not null default '{}',
|
||||
defects varchar[] not null default '{}',
|
||||
status varchar default 'todo',
|
||||
confidence int not null default 0,
|
||||
target_release varchar not null default '',
|
||||
@ -32,3 +34,16 @@ CREATE TABLE change_logs (
|
||||
old_value varchar,
|
||||
new_value varchar
|
||||
);
|
||||
|
||||
CREATE TABLE linked_issues (
|
||||
id serial primary key,
|
||||
to_id varchar,
|
||||
from_id varchar,
|
||||
link_type varchar
|
||||
);
|
||||
|
||||
CREATE TABLE features_issues (
|
||||
id serial primary key,
|
||||
feature_id int references features(id),
|
||||
issue_id varchar
|
||||
);
|
||||
|
0
src/sql/02-add-version-info.sql
Normal file
0
src/sql/02-add-version-info.sql
Normal file
@ -1,7 +0,0 @@
|
||||
UPDATE jira_issues SET linked_issues = collected.linked_issues from (
|
||||
SELECT a.id, array_remove(array_cat(a.linked_issues, array_agg(b.id)) as linked_issues, NULL) FROM
|
||||
jira_issues a LEFT OUTER JOIN
|
||||
jira_issues b ON b.linked_issues @> ARRAY[a.id]
|
||||
GROUP BY a.id
|
||||
) AS collected
|
||||
WHERE jira_issues.id = collected.id;
|
@ -1,11 +1,18 @@
|
||||
-- Show bugs moved to 'Resolved' with a full accounting of everyone who has
|
||||
-- Queries helpful in mining project information from Jira.
|
||||
--
|
||||
-- Useful VIM commands:
|
||||
-- :DB g:db = postgres://postgresLpassword@localhost:5500/tegra118
|
||||
-- :nmap <Leader>G mx?query:<CR>ww*vN:DB g:db<CR>'x
|
||||
|
||||
-- Show bugs moved to 'Done' with a full accounting of everyone who has
|
||||
-- touched the issue, most recent issues first.
|
||||
-- query: DEFECTS_COMPLETED_WITH_PROVENANCE
|
||||
SELECT
|
||||
i.id,
|
||||
i.epic_id,
|
||||
i.status,
|
||||
i.test_phase,
|
||||
-- i.summary,
|
||||
i.summary,
|
||||
i.assignee,
|
||||
array_agg(DISTINCT c2.author) AS involved,
|
||||
c.created_at AS resolved_at
|
||||
@ -15,19 +22,49 @@ FROM
|
||||
i.issue_type = 'Bug' AND
|
||||
i.id = c.issue_id AND
|
||||
c.field = 'status' AND
|
||||
c.new_value = 'Resolved' JOIN
|
||||
c.new_value = 'Done' JOIN
|
||||
change_logs c2 on i.id = c2.issue_id
|
||||
GROUP BY
|
||||
i.id,
|
||||
i.epic_id,
|
||||
i.status,
|
||||
i.test_phase,
|
||||
-- i.summary,
|
||||
i.summary,
|
||||
i.assignee,
|
||||
resolved_at
|
||||
ORDER BY resolved_at DESC;
|
||||
-- end query: DEFECTS_COMPLETED_WITH_PROVENANCE
|
||||
|
||||
-- Issues moved to "In Testing" within the last 24 hours
|
||||
-- query: ISSUES_IN_TESTING_RECENTLY
|
||||
SELECT
|
||||
i.id,
|
||||
i.epic_id,
|
||||
i.test_phase,
|
||||
i.status,
|
||||
i.summary,
|
||||
array_remove(array_remove(array_agg(distinct c2.author), 'Lisa Schaffer'), 'Alec Mishra') AS involved,
|
||||
c.created_at AS testing_at
|
||||
FROM
|
||||
issues i JOIN
|
||||
change_logs c ON
|
||||
i.issue_type = 'Bug' AND
|
||||
i.id = c.issue_id AND
|
||||
c.field = 'status' AND
|
||||
(c.new_value = 'In Testing' OR c.old_value = 'In Testing') AND
|
||||
c.created_at > (current_timestamp - interval '1 day') JOIN
|
||||
change_logs c2 ON i.id = c2.issue_id
|
||||
GROUP BY
|
||||
i.id,
|
||||
i.epic_id,
|
||||
i.test_phase,
|
||||
i.summary,
|
||||
testing_at
|
||||
ORDER BY testing_at DESC;
|
||||
-- end query: ISSUES_IN_TESTING_RECENTLY
|
||||
|
||||
-- Show everyone involved with a specific ticket
|
||||
-- query: WHO_TOUCHED IT
|
||||
SELECT
|
||||
i.id,
|
||||
i.epic_id,
|
||||
@ -37,8 +74,431 @@ SELECT
|
||||
FROM
|
||||
issues i JOIN
|
||||
change_logs c ON i.id = c.issue_id
|
||||
WHERE i.id in ('UUP-848')
|
||||
WHERE i.id in ('UUP-146')
|
||||
GROUP BY i.id, i.epic_id, i.status;
|
||||
-- endquery: WHO_TOUCHED IT
|
||||
|
||||
|
||||
select status, count(*) from issues where issue_type = 'Bug' group by status;
|
||||
-- List all linked issues for an issue
|
||||
-- query: LINKED_ISSUES
|
||||
SELECT
|
||||
l.from_id,
|
||||
l.link_type,
|
||||
i.id,
|
||||
i.summary
|
||||
FROM linked_issues l JOIN
|
||||
issues i ON l.to_id = i.id
|
||||
WHERE l.from_id = 'UUP-441';
|
||||
-- end query: LINKED_ISSUES
|
||||
|
||||
-- Summarize progress by feature
|
||||
-- query: PROGRESS_BY_FEATURE
|
||||
SELECT
|
||||
f.id AS fid,
|
||||
f.epic_id AS "Epic",
|
||||
f.category AS "Feature Category",
|
||||
f.name AS "Feature Name",
|
||||
SUM(CASE WHEN i.issue_type = 'Story' AND i.status = 'Done' THEN 1 ELSE 0 END) AS "Stories Done",
|
||||
SUM(CASE WHEN i.issue_type = 'Bug' AND i.status = 'Done' THEN 1 ELSE 0 END) AS "Defects Closed",
|
||||
SUM(CASE WHEN i.issue_type = 'Story' AND i.status <> 'Done' THEN 1 ELSE 0 END) AS "Str Open",
|
||||
SUM(CASE WHEN i.issue_type = 'Bug' AND i.status <> 'Done' THEN 1 ELSE 0 END) AS "Defects Open",
|
||||
COUNT(i.id) AS "Tot. Issues",
|
||||
f.status AS "Status",
|
||||
f.target_release AS "Rev",
|
||||
f.confidence AS "Conf."
|
||||
FROM features f
|
||||
LEFT JOIN features_issues l ON f.id = l.feature_id
|
||||
LEFT JOIN issues i ON i.id = l.issue_id
|
||||
WHERE
|
||||
i.resolution NOT IN ('Declined', 'Duplicate', 'Won''t Do')
|
||||
GROUP BY f.id
|
||||
ORDER BY f.target_release, f.category, f.id;
|
||||
-- query: PROGRESS_BY_FEATURE
|
||||
|
||||
-- Find all stories for a feature
|
||||
-- query: FEATURE_STORIES
|
||||
\set feature_id (127)
|
||||
-- select id, epic_id, category, name, status, target_release as rev, confidence
|
||||
-- from features
|
||||
-- where id in :feature_id
|
||||
-- order by id;
|
||||
|
||||
SELECT
|
||||
f.id as fid,
|
||||
f.category,
|
||||
f.name as feature_name,
|
||||
f.status,
|
||||
f.target_release as rev,
|
||||
f.confidence as conf,
|
||||
i.id,
|
||||
-- l.id as link_id,
|
||||
i.issue_type,
|
||||
i.status,
|
||||
i.test_phase,
|
||||
i.summary,
|
||||
i.resolution
|
||||
FROM features f
|
||||
LEFT JOIN features_issues l ON f.id = l.feature_id
|
||||
LEFT JOIN issues i ON
|
||||
--i.status <> 'Done' AND
|
||||
i.id = l.issue_id
|
||||
WHERE
|
||||
f.id in :feature_id AND
|
||||
(i.resolution is null or i.resolution NOT IN ('Declined', 'Duplicate', 'Won''t Do'))
|
||||
ORDER BY f.id, i.issue_type desc, i.status;
|
||||
-- end query: FEATURE_STORIES
|
||||
|
||||
-- Find all issues not linked to a feature
|
||||
-- query: ISSUES_WITHOUT_FEATURE
|
||||
SELECT
|
||||
i.id,
|
||||
i.issue_type,
|
||||
i.epic_id,
|
||||
i.summary,
|
||||
i.status
|
||||
FROM issues i LEFT OUTER JOIN
|
||||
features_issues l ON i.id = l.issue_id
|
||||
WHERE
|
||||
l.id IS NULL AND
|
||||
i.issue_type <> 'Epic' AND
|
||||
i.resolution NOT IN ('Declined', 'Duplicate', 'Won''t Do');
|
||||
-- end query: ISSUES_WITHOUT_FEATURE
|
||||
|
||||
-- Feature Report for Sara
|
||||
SELECT
|
||||
f.id, f.name, f.epic_id, array_remove(array_agg(s.id), NULL) as stories, array_remove(array_agg(d.id), NULL) as defects
|
||||
FROM features f LEFT JOIN features_issues l ON l.feature_id = f.id
|
||||
LEFT JOIN issues s ON l.issue_id = s.id AND s.issue_type in ('Story', 'Task')
|
||||
LEFT JOIN issues d ON l.issue_id = d.id AND d.issue_type = 'Bug'
|
||||
GROUP BY f.id, f.name, f.epic_id ORDER BY f.id;
|
||||
|
||||
|
||||
-- Find issues by keywords in the summary
|
||||
select id, status, summary, resolution
|
||||
from issues
|
||||
where status = 'Done' and LOWER(summary) like '%delink%' order by status desc;
|
||||
|
||||
|
||||
-- Find issues by keyword and link to feature
|
||||
-- query: KEYWORD_FIND_WITH_FEATURES
|
||||
select f.id, f.name, i.id, i.status, i.summary, i.resolution
|
||||
from issues i
|
||||
left join features_issues l on i.id = l.issue_id
|
||||
left join features f on f.id = l.feature_id
|
||||
where i.status = 'Done' and LOWER(i.summary) like '%quick%' order by i.status desc;
|
||||
-- end query: KEYWORD_FIND_WITH_FEATURES
|
||||
|
||||
|
||||
--- WORKING ---
|
||||
select id, epic_id, category, name from features order by id asc;
|
||||
select * from features order by id;
|
||||
|
||||
select * from issues where summary like '%onfigurable%';
|
||||
select * from features_issues;
|
||||
select * from change_logs limit 5;
|
||||
|
||||
select * from features_issues where issue_id = 'UUP-128';
|
||||
|
||||
update features set target_release = '2.1-alpha' where target_release = '1.1-alpha1';
|
||||
select distinct target_release from features;
|
||||
|
||||
-- query: SET_FEATURE_STATUS
|
||||
\set feature_id (109)
|
||||
UPDATE features SET
|
||||
target_release = 'TBD',
|
||||
confidence = 100,
|
||||
status = 'todo'
|
||||
WHERE id in :feature_id;
|
||||
-- end query: SET_FEATURE_STATUS
|
||||
|
||||
update features set
|
||||
target_release = '2.1',
|
||||
notes = 'Depends on Acount Adjuster functionality.'
|
||||
where id = 82;
|
||||
|
||||
select * from features_issues where feature_id = 11;
|
||||
|
||||
-- Find issues missing the version based on the feature target_release
|
||||
select
|
||||
f.id, f.name as feature_name, i.id, i.summary, f.target_release, i.fix_versions
|
||||
from features f
|
||||
join features_issues l on f.id = l.feature_id
|
||||
join issues i on i.id = l.issue_id
|
||||
where not i.fix_versions @> ARRAY[f.target_release];
|
||||
|
||||
select * from features where category = 'Miscellaneous';
|
||||
select distinct category from features;
|
||||
insert into features (category, name, target_release, confidence, epic_id) values
|
||||
('Miscellaneous', 'Grid Template Functionality', '2.1', 80, 'UUP-18')
|
||||
returning *;
|
||||
|
||||
update features set epic_id = 'UUP-28' where id = 134;
|
||||
update features set category = 'Account Maintenance' where id = 11;
|
||||
|
||||
('Drop 3.5 Items', 'Auto-adjust Column Widths', '1.1'),
|
||||
('Drop 3.5 Items', 'Proposed Orders - Add Aollcation', '1.1'),
|
||||
('Drop 3.5 Items', 'Auto Select Accounts when clicking Maintain', '1.1'),
|
||||
('Drop 3.5 Items', 'Trade Shortcut on TCA Screens.', '1.1'),
|
||||
('Drop 3.5 Items', 'Paging Defaults', '1.1-alpha'),
|
||||
('Drop 3.5 Items', 'More UI Clean Up', '1.1'),
|
||||
('Drop 3.5 Items', 'Proposed Orders - Enhance field validations', '1.1'),
|
||||
('Blotter / Drop 3.5 Items', 'Wash Sale Options', '1.1'),
|
||||
('Drop 3.5 Items', 'Terms of Use file and hyperlink', '1.1'),
|
||||
('Drop 3.5 Items', 'Improve PDF Export', '1.1');
|
||||
|
||||
insert into features_issues (feature_id, issue_id) values
|
||||
(60, 'UUP-74'),
|
||||
(60, 'UUP-78'),
|
||||
(60, 'UUP-30'),
|
||||
(60, 'UUP-101');
|
||||
|
||||
insert into features_issues (feature_id, issue_id) values
|
||||
('83', 'UUP-244'),('83', 'UUP-284'),('83', 'UUP-513'),('83', 'UUP-285'),
|
||||
('84', 'UUP-296'),('84', 'UUP-519'),('84', 'UUP-297'),
|
||||
('85', 'UUP-271'),('85', 'UUP-270'),('85', 'UUP-506'),
|
||||
('86', 'UUP-287'),('86', 'UUP-286'),('86', 'UUP-514'),
|
||||
('87', 'UUP-290'),('87', 'UUP-516'),('87', 'UUP-291'),
|
||||
('88', 'UUP-272'),('88', 'UUP-507'),('88', 'UUP-273'),
|
||||
('89', 'UUP-282'),('89', 'UUP-512'),('89', 'UUP-283'),
|
||||
('90', 'UUP-281'),('90', 'UUP-280'),('90', 'UUP-511'),
|
||||
('91', 'UUP-288'),('91', 'UUP-515'),('91', 'UUP-289'),
|
||||
('92', 'UUP-277'),('92', 'UUP-276'),('92', 'UUP-509'),
|
||||
('93', 'UUP-269'),('93', 'UUP-268'),('93', 'UUP-505'),
|
||||
('94', 'UUP-292'),('94', 'UUP-517'),('94', 'UUP-293'),
|
||||
('95', 'UUP-295'),('95', 'UUP-294'),('95', 'UUP-518'),
|
||||
('96', 'UUP-279'),('96', 'UUP-278'),('96', 'UUP-510'),
|
||||
('97', 'UUP-247'),('97', 'UUP-494'),('97', 'UUP-246'),
|
||||
('98', 'UUP-249'),('98', 'UUP-495'),('98', 'UUP-248'),
|
||||
('99', 'UUP-255'),('99', 'UUP-254'),('99', 'UUP-498'),
|
||||
('100', 'UUP-262'),('100', 'UUP-502'),('100', 'UUP-263'),
|
||||
('101', 'UUP-253'),('101', 'UUP-252'),('101', 'UUP-497'),
|
||||
('102', 'UUP-258'),('102', 'UUP-500'),('102', 'UUP-259'),
|
||||
('103', 'UUP-266'),('103', 'UUP-504'),('103', 'UUP-267'),
|
||||
('104', 'UUP-264'),('104', 'UUP-503'),('104', 'UUP-265'),
|
||||
('105', 'UUP-257'),('105', 'UUP-256'),('105', 'UUP-499'),
|
||||
('106', 'UUP-250'),('106', 'UUP-496'),('106', 'UUP-251');
|
||||
|
||||
insert into features_issues (feature_id, issue_id) values
|
||||
(131, 'UUP-631'),
|
||||
(74, 'UUP-1103'),
|
||||
(4, 'UUP-1114'),
|
||||
(97, 'UUP-80'),
|
||||
(72, 'UUP-465'),
|
||||
(71, 'UUP-1159'),
|
||||
(35, 'UUP-1180'),
|
||||
(70, 'UUP-637'),
|
||||
(45, 'UUP-76'),
|
||||
(37, 'UUP-1175'),
|
||||
(70, 'UUP-1216'),
|
||||
(110, 'UUP-176'),
|
||||
(70, 'UUP-65'),
|
||||
(45, 'UUP-931'),
|
||||
(69, 'UUP-1004'),
|
||||
(45, 'UUP-928'),
|
||||
(115, 'UUP-316'),
|
||||
(49, 'UUP-1142'),
|
||||
(16, 'UUP-383'),
|
||||
(24, 'UUP-400'),
|
||||
(69, 'UUP-1005'),
|
||||
(53, 'UUP-937'),
|
||||
(25, 'UUP-1118'),
|
||||
(109, 'UUP-274'),
|
||||
(32, 'UUP-1178'),
|
||||
(70, 'UUP-431'),
|
||||
(17, 'UUP-912'),
|
||||
(16, 'UUP-687'),
|
||||
(16, 'UUP-417'),
|
||||
(12, 'UUP-1112'),
|
||||
(131, 'UUP-106'),
|
||||
(12, 'UUP-1108'),
|
||||
(47, 'UUP-930'),
|
||||
(24, 'UUP-611'),
|
||||
(70, 'UUP-682'),
|
||||
(57, 'UUP-936'),
|
||||
(35, 'UUP-1137'),
|
||||
(70, 'UUP-1158'),
|
||||
(18, 'UUP-914'),
|
||||
(19, 'UUP-1196'),
|
||||
(12, 'UUP-376'),
|
||||
(134, 'UUP-298'),
|
||||
(69, 'UUP-1217'),
|
||||
(24, 'UUP-1051'),
|
||||
(11, 'UUP-1127'),
|
||||
(16, 'UUP-548'),
|
||||
(70, 'UUP-681'),
|
||||
(73, 'UUP-1176'),
|
||||
(70, 'UUP-955'),
|
||||
(49, 'UUP-802'),
|
||||
(25, 'UUP-1069'),
|
||||
(135, 'UUP-194'),
|
||||
(25, 'UUP-1099'),
|
||||
(37, 'UUP-1227'),
|
||||
(72, 'UUP-1035'),
|
||||
(72, 'UUP-1073'),
|
||||
(25, 'UUP-1068'),
|
||||
(43, 'UUP-845'),
|
||||
(47, 'UUP-1133'),
|
||||
(11, 'UUP-1132'),
|
||||
(45, 'UUP-138'),
|
||||
(37, 'UUP-1177'),
|
||||
(16, 'UUP-1163'),
|
||||
(131, 'UUP-82'),
|
||||
(50, 'UUP-932'),
|
||||
(62, 'UUP-1151'),
|
||||
(126, 'UUP-324'),
|
||||
(4, 'UUP-1181'),
|
||||
(16, 'UUP-1162'),
|
||||
(16, 'UUP-1078'),
|
||||
(41, 'UUP-557'),
|
||||
(97, 'UUP-721'),
|
||||
(11, 'UUP-1131'),
|
||||
(72, 'UUP-1226'),
|
||||
(110, 'UUP-459'),
|
||||
(8, 'UUP-966'),
|
||||
(47, 'UUP-36'),
|
||||
(136, 'UUP-180');
|
||||
|
||||
UUP-1084
|
||||
UUP-1152
|
||||
UUP-1192
|
||||
UUP-120
|
||||
UUP-1055
|
||||
UUP-1136
|
||||
UUP-1223
|
||||
UUP-1134
|
||||
UUP-84
|
||||
UUP-1021
|
||||
UUP-1006
|
||||
UUP-127
|
||||
UUP-924
|
||||
UUP-1167
|
||||
UUP-748
|
||||
UUP-1019
|
||||
UUP-1002
|
||||
UUP-1199
|
||||
UUP-1003
|
||||
UUP-439
|
||||
UUP-108
|
||||
UUP-1170
|
||||
UUP-438
|
||||
UUP-1198
|
||||
UUP-1079
|
||||
UUP-873
|
||||
UUP-212
|
||||
UUP-919
|
||||
UUP-1206
|
||||
UUP-929
|
||||
UUP-775
|
||||
UUP-1173
|
||||
UUP-723
|
||||
UUP-322
|
||||
UUP-1190
|
||||
UUP-871
|
||||
UUP-61
|
||||
UUP-673
|
||||
UUP-338
|
||||
UUP-1074
|
||||
UUP-320
|
||||
UUP-330
|
||||
UUP-1076
|
||||
UUP-1071
|
||||
UUP-875
|
||||
UUP-1085
|
||||
UUP-690
|
||||
UUP-695
|
||||
UUP-1143
|
||||
UUP-566
|
||||
UUP-700
|
||||
UUP-1072
|
||||
UUP-1187
|
||||
UUP-1062
|
||||
UUP-1014
|
||||
UUP-1224
|
||||
UUP-216
|
||||
UUP-1208
|
||||
UUP-951
|
||||
UUP-1119
|
||||
UUP-1202
|
||||
UUP-100
|
||||
UUP-705
|
||||
UUP-1113
|
||||
UUP-314
|
||||
UUP-1225
|
||||
UUP-1222
|
||||
UUP-1066
|
||||
UUP-915
|
||||
UUP-573
|
||||
UUP-1053
|
||||
UUP-876
|
||||
UUP-467
|
||||
UUP-1090
|
||||
UUP-933
|
||||
UUP-909
|
||||
UUP-945
|
||||
UUP-47
|
||||
UUP-874
|
||||
UUP-1148
|
||||
UUP-956
|
||||
UUP-1149
|
||||
UUP-1123
|
||||
UUP-952
|
||||
UUP-441
|
||||
UUP-140
|
||||
UUP-1034
|
||||
UUP-868
|
||||
UUP-81
|
||||
UUP-922
|
||||
UUP-1182
|
||||
UUP-143
|
||||
UUP-1179
|
||||
UUP-1204
|
||||
UUP-1120
|
||||
UUP-139
|
||||
UUP-125
|
||||
UUP-93
|
||||
UUP-107
|
||||
UUP-312
|
||||
UUP-242
|
||||
UUP-170
|
||||
UUP-1080
|
||||
UUP-575
|
||||
UUP-549
|
||||
UUP-1083
|
||||
UUP-946
|
||||
UUP-869
|
||||
UUP-1195
|
||||
UUP-571
|
||||
UUP-1191
|
||||
UUP-410
|
||||
UUP-659
|
||||
UUP-870
|
||||
UUP-1077
|
||||
UUP-1188
|
||||
UUP-940
|
||||
UUP-70
|
||||
UUP-1213
|
||||
UUP-971
|
||||
UUP-925
|
||||
UUP-35
|
||||
UUP-411
|
||||
UUP-432
|
||||
UUP-336
|
||||
UUP-1000
|
||||
UUP-684
|
||||
UUP-592
|
||||
UUP-1157
|
||||
UUP-910
|
||||
UUP-1194
|
||||
UUP-326
|
||||
UUP-926
|
||||
UUP-689
|
||||
UUP-943
|
||||
UUP-950
|
||||
UUP-627
|
||||
UUP-1220
|
||||
UUP-69
|
||||
UUP-606
|
||||
UUP-1036
|
||||
UUP-1212
|
||||
UUP-917
|
||||
UUP-1100
|
||||
|
Loading…
Reference in New Issue
Block a user