diff --git a/grafana/dashboards/AzureDevOps.json b/grafana/dashboards/AzureDevOps.json index fd00b601c4b..38e416180fd 100644 --- a/grafana/dashboards/AzureDevOps.json +++ b/grafana/dashboards/AzureDevOps.json @@ -154,7 +154,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\t\n\n", + "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\t\n\n", "refId": "A", "select": [ [ @@ -292,7 +292,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id:sqlstring}+'')\n and $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id})\n and $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -431,7 +431,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", "refId": "A", "select": [ [ @@ -563,7 +563,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status='CLOSED' then id else null end)/count(distinct case when status in ('MERGED','CLOSED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status='CLOSED' then id else null end)/count(distinct case when status in ('MERGED','CLOSED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})", "refId": "A", "select": [ [ @@ -678,7 +678,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -769,7 +769,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'CLOSED'", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'CLOSED'", "refId": "A", "select": [ [ @@ -905,7 +905,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('MERGED', 'CLOSED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('MERGED', 'CLOSED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -995,7 +995,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\n\n", + "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not null\n\n\n", "refId": "A", "select": [ [ @@ -1115,7 +1115,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -1245,7 +1245,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -1348,7 +1348,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -1535,7 +1535,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", + "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", "refId": "A", "select": [ [ @@ -1636,7 +1636,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -1774,7 +1774,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Pipeline runs Count\"\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Pipeline runs Count\"\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ @@ -1931,7 +1931,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%azure%\"\r\n and cicd_scope_id in (${repo_id:sqlstring}+'')\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY \r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Pipeline runs Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", + "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%azure%\"\r\n and cicd_scope_id in (${repo_id})\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY \r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Pipeline runs Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", "refId": "A", "select": [ [ @@ -2085,7 +2085,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%azure%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Bamboo.json b/grafana/dashboards/Bamboo.json index 763e7f3fd89..d34c5e75782 100644 --- a/grafana/dashboards/Bamboo.json +++ b/grafana/dashboards/Bamboo.json @@ -120,7 +120,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -223,7 +223,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -410,7 +410,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", + "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", "refId": "A", "select": [ [ @@ -511,7 +511,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -649,7 +649,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Build Count\"\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Build Count\"\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ @@ -806,7 +806,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%bamboo%\"\r\n and cicd_scope_id in (${plan_id:sqlstring}+'')\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY\r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Build Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", + "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%bamboo%\"\r\n and cicd_scope_id in (${plan_id})\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY\r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Build Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", "refId": "A", "select": [ [ @@ -973,7 +973,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_build_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", + "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_build_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1109,7 +1109,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%bamboo%\"\n and cicd_scope_id in (${plan_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/BitBucket.json b/grafana/dashboards/BitBucket.json index aba570cdf3e..775bf092dcc 100644 --- a/grafana/dashboards/BitBucket.json +++ b/grafana/dashboards/BitBucket.json @@ -154,7 +154,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\t\n\n", + "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\t\n\n", "refId": "A", "select": [ [ @@ -292,7 +292,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id:sqlstring}+'')\n and $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id})\n and $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -431,7 +431,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", "refId": "A", "select": [ [ @@ -563,7 +563,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status='CLOSED' then id else null end)/count(distinct case when status in ('MERGED','CLOSED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status='CLOSED' then id else null end)/count(distinct case when status in ('MERGED','CLOSED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})", "refId": "A", "select": [ [ @@ -678,7 +678,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -768,7 +768,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'CLOSED'", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'CLOSED'", "refId": "A", "select": [ [ @@ -903,7 +903,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('MERGED', 'CLOSED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('MERGED', 'CLOSED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -992,7 +992,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\n\n", + "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not null\n\n\n", "refId": "A", "select": [ [ @@ -1111,7 +1111,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/CircleCI.json b/grafana/dashboards/CircleCI.json index b8d61e1cc8d..90ba846f894 100644 --- a/grafana/dashboards/CircleCI.json +++ b/grafana/dashboards/CircleCI.json @@ -120,7 +120,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -223,7 +223,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -410,7 +410,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", + "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", "refId": "A", "select": [ [ @@ -510,7 +510,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -631,7 +631,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Build Count\"\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Build Count\"\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ @@ -788,7 +788,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%circleci%\"\r\n and cicd_scope_id in (${full_name:sqlstring}+'')\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY\r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Build Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY 1\r\nORDER BY 1", + "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%circleci%\"\r\n and cicd_scope_id in (${full_name})\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY\r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Build Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY 1\r\nORDER BY 1", "refId": "A", "select": [ [ @@ -955,7 +955,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_build_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", + "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_build_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1091,7 +1091,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%circleci%\"\n and cicd_scope_id in (${full_name})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/ComponentAndFileLevelMetrics.json b/grafana/dashboards/ComponentAndFileLevelMetrics.json index 81c9d489256..d6ef8d62775 100644 --- a/grafana/dashboards/ComponentAndFileLevelMetrics.json +++ b/grafana/dashboards/ComponentAndFileLevelMetrics.json @@ -136,7 +136,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT file_path,\n count(distinct author_name) AS cnt\nFROM commits\nJOIN commit_files\nJOIN repo_commits rc\n ON commit_files.commit_sha = rc.commit_sha\n AND commit_files.commit_sha = commits.sha\nWHERE repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.authored_date)\n AND file_path REGEXP '(${selected_path:regex})'\nGROUP BY file_path\nORDER BY cnt DESC limit 10;", + "rawSql": "SELECT file_path,\n count(distinct author_name) AS cnt\nFROM commits\nJOIN commit_files\nJOIN repo_commits rc\n ON commit_files.commit_sha = rc.commit_sha\n AND commit_files.commit_sha = commits.sha\nWHERE repo_id IN (${repo_id})\n AND $__timeFilter(commits.authored_date)\n AND file_path REGEXP '(${selected_path:regex})'\nGROUP BY file_path\nORDER BY cnt DESC limit 10;", "refId": "A", "select": [ [ @@ -237,7 +237,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT file_path,\n COUNT(DISTINCT author_name) AS author_count,\n MAX(rst) AS file_total_size,\n MAX(rst) / COUNT(DISTINCT author_name) AS rate\nFROM commits\nJOIN (\n SELECT file_path,\n commit_files.commit_sha,\n SUM(additions - deletions) AS rst\n FROM commit_files\n JOIN repo_commits rc ON commit_files.commit_sha = rc.commit_sha\n WHERE repo_id IN (${repo_id:sqlstring}+'')\n GROUP BY file_path, commit_files.commit_sha\n) a ON a.commit_sha = commits.sha\nGROUP BY file_path\nHAVING author_count > 0\nORDER BY rate DESC\n", + "rawSql": "SELECT file_path,\n COUNT(DISTINCT author_name) AS author_count,\n MAX(rst) AS file_total_size,\n MAX(rst) / COUNT(DISTINCT author_name) AS rate\nFROM commits\nJOIN (\n SELECT file_path,\n commit_files.commit_sha,\n SUM(additions - deletions) AS rst\n FROM commit_files\n JOIN repo_commits rc ON commit_files.commit_sha = rc.commit_sha\n WHERE repo_id IN (${repo_id})\n GROUP BY file_path, commit_files.commit_sha\n) a ON a.commit_sha = commits.sha\nGROUP BY file_path\nHAVING author_count > 0\nORDER BY rate DESC\n", "refId": "A", "select": [ [ @@ -349,7 +349,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT file_path,\n count(distinct sha) AS modified_num\nFROM commits\nJOIN commit_files\nJOIN repo_commits rc\n ON commit_files.commit_sha = rc.commit_sha\n AND sha=commit_files.commit_sha\n AND $__timeFilter(commits.authored_date)\nWHERE repo_id IN (${repo_id:sqlstring}+'') and file_path REGEXP '(${selected_path:regex})'\nAND $__timeFilter(commits.authored_date)\nGROUP BY file_path\nORDER BY modified_num desc\nLIMIT 15;", + "rawSql": "SELECT file_path,\n count(distinct sha) AS modified_num\nFROM commits\nJOIN commit_files\nJOIN repo_commits rc\n ON commit_files.commit_sha = rc.commit_sha\n AND sha=commit_files.commit_sha\n AND $__timeFilter(commits.authored_date)\nWHERE repo_id IN (${repo_id}) and file_path REGEXP '(${selected_path:regex})'\nAND $__timeFilter(commits.authored_date)\nGROUP BY file_path\nORDER BY modified_num desc\nLIMIT 15;", "refId": "A", "select": [ [ @@ -481,7 +481,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT file_path,\n count(distinct sha) AS cnt\nFROM commits\nJOIN commit_files\nJOIN repo_commits rc\n ON commit_files.commit_sha = rc.commit_sha\n AND sha=commit_files.commit_sha\nWHERE repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.authored_date)\nGROUP BY file_path\nORDER BY cnt desc\nLIMIT 10;", + "rawSql": "SELECT file_path,\n count(distinct sha) AS cnt\nFROM commits\nJOIN commit_files\nJOIN repo_commits rc\n ON commit_files.commit_sha = rc.commit_sha\n AND sha=commit_files.commit_sha\nWHERE repo_id IN (${repo_id})\n AND $__timeFilter(commits.authored_date)\nGROUP BY file_path\nORDER BY cnt desc\nLIMIT 10;", "refId": "A", "select": [ [ @@ -638,7 +638,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT author_name,\n count(distinct commit_sha) AS commit_nums\nFROM commits\nJOIN repo_commits\nWHERE commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.authored_date)\nGROUP BY author_name,author_id\nORDER BY commit_nums desc\nLIMIT 10; ", + "rawSql": "SELECT author_name,\n count(distinct commit_sha) AS commit_nums\nFROM commits\nJOIN repo_commits\nWHERE commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id})\n AND $__timeFilter(commits.authored_date)\nGROUP BY author_name,author_id\nORDER BY commit_nums desc\nLIMIT 10; ", "refId": "A", "select": [ [ @@ -753,7 +753,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT author_name,\n sum(additions-deletions) AS cnt\nFROM commits\nJOIN repo_commits\nWHERE commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.authored_date)\nGROUP BY author_name,author_id\nORDER BY cnt DESC \nLIMIT 10;", + "rawSql": "SELECT author_name,\n sum(additions-deletions) AS cnt\nFROM commits\nJOIN repo_commits\nWHERE commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id})\n AND $__timeFilter(commits.authored_date)\nGROUP BY author_name,author_id\nORDER BY cnt DESC \nLIMIT 10;", "refId": "A", "select": [ [ @@ -892,7 +892,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n CASE cast(dayofweek(authored_date) AS char) \n WHEN '1' THEN\n '1.Monday'\n WHEN '2' THEN\n '2.Tuesday'\n WHEN '3' THEN\n '3.Wednesday'\n WHEN '4' THEN\n '4.Thursday'\n WHEN '5' THEN\n '5.Friday'\n WHEN '6' THEN\n '6.Saturday'\n WHEN '7' THEN\n '7.Sunday'\n END AS weekday , count(distinct commit_sha) AS commit_nums\nFROM commits\nJOIN repo_commits\nWHERE $__timeFilter(commits.authored_date)\n AND commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id:sqlstring}+'')\nGROUP BY weekday\n", + "rawSql": "SELECT\n CASE cast(dayofweek(authored_date) AS char) \n WHEN '1' THEN\n '1.Monday'\n WHEN '2' THEN\n '2.Tuesday'\n WHEN '3' THEN\n '3.Wednesday'\n WHEN '4' THEN\n '4.Thursday'\n WHEN '5' THEN\n '5.Friday'\n WHEN '6' THEN\n '6.Saturday'\n WHEN '7' THEN\n '7.Sunday'\n END AS weekday , count(distinct commit_sha) AS commit_nums\nFROM commits\nJOIN repo_commits\nWHERE $__timeFilter(commits.authored_date)\n AND commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id})\nGROUP BY weekday\n", "refId": "A", "select": [ [ @@ -1005,7 +1005,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n CASE cast(dayofweek(authored_date) AS char)\n WHEN '1' THEN\n '1.Monday'\n WHEN '2' THEN\n '2.Tuesday'\n WHEN '3' THEN\n '3.Wednesday'\n WHEN '4' THEN\n '4.Thursday'\n WHEN '5' THEN\n '5.Friday'\n WHEN '6' THEN\n '6.Saturday'\n WHEN '7' THEN\n '7.Sunday'\n END AS weekday , sum(additions-deletions) AS changed_nums, sum(additions) AS total_additions, sum(deletions) AS total_deletions\nFROM commits\nJOIN repo_commits\nWHERE commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.authored_date)\nGROUP BY weekday\nORDER BY weekday", + "rawSql": "SELECT\n CASE cast(dayofweek(authored_date) AS char)\n WHEN '1' THEN\n '1.Monday'\n WHEN '2' THEN\n '2.Tuesday'\n WHEN '3' THEN\n '3.Wednesday'\n WHEN '4' THEN\n '4.Thursday'\n WHEN '5' THEN\n '5.Friday'\n WHEN '6' THEN\n '6.Saturday'\n WHEN '7' THEN\n '7.Sunday'\n END AS weekday , sum(additions-deletions) AS changed_nums, sum(additions) AS total_additions, sum(deletions) AS total_deletions\nFROM commits\nJOIN repo_commits\nWHERE commits.sha =repo_commits.commit_sha\n AND repo_commits.repo_id IN (${repo_id})\n AND $__timeFilter(commits.authored_date)\nGROUP BY weekday\nORDER BY weekday", "refId": "A", "select": [ [ @@ -1142,7 +1142,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT CASE cast(dayofweek(commits.committed_date) AS char)\n WHEN '1' THEN\n '1.Monday'\n WHEN '2' THEN\n '2.Tuesday'\n WHEN '3' THEN\n '3.Wednesday'\n WHEN '4' THEN\n '4.Thursday'\n WHEN '5' THEN\n '5.Friday'\n WHEN '6' THEN\n '6.Saturday'\n WHEN '7' THEN\n '7.Sunday'\n END AS wd,\n count(*) as lived_lines\nFROM repo_snapshot\n JOIN commits\n ON commits.sha=repo_snapshot.commit_sha\nWHERE repo_snapshot.repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.committed_date)\nGROUP BY wd\nORDER BY wd ;", + "rawSql": "SELECT CASE cast(dayofweek(commits.committed_date) AS char)\n WHEN '1' THEN\n '1.Monday'\n WHEN '2' THEN\n '2.Tuesday'\n WHEN '3' THEN\n '3.Wednesday'\n WHEN '4' THEN\n '4.Thursday'\n WHEN '5' THEN\n '5.Friday'\n WHEN '6' THEN\n '6.Saturday'\n WHEN '7' THEN\n '7.Sunday'\n END AS wd,\n count(*) as lived_lines\nFROM repo_snapshot\n JOIN commits\n ON commits.sha=repo_snapshot.commit_sha\nWHERE repo_snapshot.repo_id IN (${repo_id})\n AND $__timeFilter(commits.committed_date)\nGROUP BY wd\nORDER BY wd ;", "refId": "A", "select": [ [ @@ -1228,7 +1228,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT file_path,\n avg(timestampdiff(day,\n commits.committed_date,\n now())) AS line_age\nFROM repo_snapshot\nJOIN commits\n ON repo_snapshot.commit_sha = commits.sha\nWHERE repo_snapshot.repo_id IN (${repo_id:sqlstring}+'')\n AND $__timeFilter(commits.committed_date)\nGROUP BY file_path\nORDER BY line_age desc \nLIMIT 20;", + "rawSql": "SELECT file_path,\n avg(timestampdiff(day,\n commits.committed_date,\n now())) AS line_age\nFROM repo_snapshot\nJOIN commits\n ON repo_snapshot.commit_sha = commits.sha\nWHERE repo_snapshot.repo_id IN (${repo_id})\n AND $__timeFilter(commits.committed_date)\nGROUP BY file_path\nORDER BY line_age desc \nLIMIT 20;", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/ContributorExperience.json b/grafana/dashboards/ContributorExperience.json index 498f5174339..5cc9fdc6940 100644 --- a/grafana/dashboards/ContributorExperience.json +++ b/grafana/dashboards/ContributorExperience.json @@ -117,7 +117,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n date(i.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n)\n\nselect\n avg((TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/1440)\nfrom issue_comment_list\nwhere comment_rank = 1", + "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n date(i.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and b.id in (${repo_id})\n)\n\nselect\n avg((TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/1440)\nfrom issue_comment_list\nwhere comment_rank = 1", "refId": "A", "select": [ [ @@ -201,7 +201,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \n\tAVG(i.lead_time_minutes/1440) issue_lead_time\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n date(i.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and i.status = \"DONE\"\n and b.id in (${repo_id:sqlstring}+'')", + "rawSql": "select \n\tAVG(i.lead_time_minutes/1440) issue_lead_time\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n date(i.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and i.status = \"DONE\"\n and b.id in (${repo_id})", "refId": "A", "select": [ [ @@ -285,7 +285,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n date(i.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n)\n\nselect\n 100 * sum(case when (TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/60 < $iir_sla then 1 else null end) / count(*)\nfrom issue_comment_list\nwhere comment_rank = 1", + "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n date(i.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and b.id in (${repo_id})\n)\n\nselect\n 100 * sum(case when (TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/60 < $iir_sla then 1 else null end) / count(*)\nfrom issue_comment_list\nwhere comment_rank = 1", "refId": "A", "select": [ [ @@ -369,7 +369,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n join issue_labels il on il.issue_id = i.id\nwhere\n il.label_name = \"$label_gfi\" and\n i.status != 'DONE' and\n b.id in (${repo_id:sqlstring}+'')", + "rawSql": "select\n count(distinct i.id)\nfrom\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n join issue_labels il on il.issue_id = i.id\nwhere\n il.label_name = \"$label_gfi\" and\n i.status != 'DONE' and\n b.id in (${repo_id})", "refId": "A", "select": [ [ @@ -470,7 +470,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with pr_comment_list as(\n select\n pr.id as issue_id,\n pr.url,\n pr.title,\n pr.created_date as pr_created_date,\n prc.id as comment_id,\n prc.created_date as comment_date,\n prc.account_id,\n case when prc.id is not null then rank() over (partition by pr.id order by prc.created_date asc) else null end as comment_rank\n from\n pull_requests pr\n left join pull_request_comments prc on pr.id = prc.pull_request_id\n where\n date(pr.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and pr.base_repo_id in (${repo_id:sqlstring}+'')\n)\n\nselect\n avg((TIMESTAMPDIFF(MINUTE, pr_created_date, comment_date))/1440)\nfrom pr_comment_list\nwhere comment_rank = 1", + "rawSql": "with pr_comment_list as(\n select\n pr.id as issue_id,\n pr.url,\n pr.title,\n pr.created_date as pr_created_date,\n prc.id as comment_id,\n prc.created_date as comment_date,\n prc.account_id,\n case when prc.id is not null then rank() over (partition by pr.id order by prc.created_date asc) else null end as comment_rank\n from\n pull_requests pr\n left join pull_request_comments prc on pr.id = prc.pull_request_id\n where\n date(pr.created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and pr.base_repo_id in (${repo_id})\n)\n\nselect\n avg((TIMESTAMPDIFF(MINUTE, pr_created_date, comment_date))/1440)\nfrom pr_comment_list\nwhere comment_rank = 1", "refId": "A", "select": [ [ @@ -554,7 +554,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tavg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\nfrom \n\tpull_requests pr\nwhere \n date(created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n\tand status in ('CLOSED', 'MERGED')\n\tand pr.base_repo_id in (${repo_id:sqlstring}+'')\n\n\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tavg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\nfrom \n\tpull_requests pr\nwhere \n date(created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n\tand status in ('CLOSED', 'MERGED')\n\tand pr.base_repo_id in (${repo_id})\n\n\n", "refId": "A", "select": [ [ @@ -639,7 +639,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n 100 * sum(case when TIMESTAMPDIFF(Minute, created_date, closed_date) / 1440 < $prrt_sla then 1 else null end) / count(*)\nfrom \n\tpull_requests pr\nwhere \n date(created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n\tand status in ('CLOSED', 'MERGED')\n\tand pr.base_repo_id in (${repo_id:sqlstring}+'')\n\n\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n 100 * sum(case when TIMESTAMPDIFF(Minute, created_date, closed_date) / 1440 < $prrt_sla then 1 else null end) / count(*)\nfrom \n\tpull_requests pr\nwhere \n date(created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n\tand status in ('CLOSED', 'MERGED')\n\tand pr.base_repo_id in (${repo_id})\n\n\n", "refId": "A", "select": [ [ @@ -747,7 +747,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n 100 * count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and pr.base_repo_id in (${repo_id:sqlstring}+'')", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n 100 * count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN\n curdate() - INTERVAL DAYOFMONTH(curdate())-1 DAY - INTERVAL 1 month and\n curdate() - INTERVAL DAYOFMONTH(curdate()) DAY\n and pr.base_repo_id in (${repo_id})", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/DORA.json b/grafana/dashboards/DORA.json index 725f39074b5..0c77ec165ed 100644 --- a/grafana/dashboards/DORA.json +++ b/grafana/dashboards/DORA.json @@ -205,7 +205,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, 0)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n),\n\n_metric_deployment_frequency as (\n\tSELECT \n\t 'Deployment frequency' as metric, \n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months\n),\n\n-- Metric 2: median lead time for changes\n_pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr \n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n),\n\n_metric_change_lead_time as (\n\tSELECT \n\t\t'Lead time for changes' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\nFROM _median_change_lead_time\n),\n\n\n-- Metric 3: Median time to restore service \n_incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n),\n\n\n_metric_mttr as (\n\tSELECT \n\t\t'Time to restore service' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND \n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_median_mttr\n),\n\n-- Metric 4: change failure rate\n_deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n),\n\n_metric_cfr as (\n\tSELECT\n\t\t'Change failure rate' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_change_failure_rate, _is_collected_data\n),\n\n_final_results as (\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m1.metric as _metric, m1.value FROM dora_benchmarks db\n\tleft join _metric_deployment_frequency m1 on db.metric = m1.metric\n\tWHERE m1.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m2.metric as _metric, m2.value FROM dora_benchmarks db\n\tleft join _metric_change_lead_time m2 on db.metric = m2.metric\n\tWHERE m2.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m3.metric as _metric, m3.value FROM dora_benchmarks db\n\tleft join _metric_mttr m3 on db.metric = m3.metric\n\tWHERE m3.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m4.metric as _metric, m4.value FROM dora_benchmarks db\n\tleft join _metric_cfr m4 on db.metric = m4.metric\n\tWHERE m4.metric is not null and db.benchmarks = ('$benchmarks')\n)\n\n\nSELECT \n\tmetric,\n\tcase when low = value then low else null end as low,\n\tcase when medium = value then medium else null end as medium,\n\tcase when high = value then high else null end as high,\n\tcase when elite = value then elite else null end as elite\nFROM _final_results\nORDER BY id", + "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, 0)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n),\n\n_metric_deployment_frequency as (\n\tSELECT \n\t 'Deployment frequency' as metric, \n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months\n),\n\n-- Metric 2: median lead time for changes\n_pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr \n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t pm.project_name in (${project}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n),\n\n_metric_change_lead_time as (\n\tSELECT \n\t\t'Lead time for changes' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\nFROM _median_change_lead_time\n),\n\n\n-- Metric 3: Median time to restore service \n_incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project})\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n),\n\n\n_metric_mttr as (\n\tSELECT \n\t\t'Time to restore service' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND \n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_median_mttr\n),\n\n-- Metric 4: change failure rate\n_deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n),\n\n_metric_cfr as (\n\tSELECT\n\t\t'Change failure rate' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_change_failure_rate, _is_collected_data\n),\n\n_final_results as (\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m1.metric as _metric, m1.value FROM dora_benchmarks db\n\tleft join _metric_deployment_frequency m1 on db.metric = m1.metric\n\tWHERE m1.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m2.metric as _metric, m2.value FROM dora_benchmarks db\n\tleft join _metric_change_lead_time m2 on db.metric = m2.metric\n\tWHERE m2.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m3.metric as _metric, m3.value FROM dora_benchmarks db\n\tleft join _metric_mttr m3 on db.metric = m3.metric\n\tWHERE m3.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m4.metric as _metric, m4.value FROM dora_benchmarks db\n\tleft join _metric_cfr m4 on db.metric = m4.metric\n\tWHERE m4.metric is not null and db.benchmarks = ('$benchmarks')\n)\n\n\nSELECT \n\tmetric,\n\tcase when low = value then low else null end as low,\n\tcase when medium = value then medium else null end as medium,\n\tcase when high = value then high else null end as high,\n\tcase when elite = value then elite else null end as elite\nFROM _final_results\nORDER BY id", "refId": "A", "select": [ [ @@ -343,7 +343,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t \tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS 'Deployment Frequency'\nFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months\n", + "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t \tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS 'Deployment Frequency'\nFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months\n", "refId": "A", "select": [ [ @@ -481,7 +481,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 2: median lead time for changes\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr \n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n WHEN ('$benchmarks') = '2021 report' THEN\n\t\t CASE\n\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_change_lead_time\nFROM _median_change_lead_time", + "rawSql": "-- Metric 2: median lead time for changes\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr \n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t pm.project_name in (${project}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n WHEN ('$benchmarks') = '2021 report' THEN\n\t\t CASE\n\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_change_lead_time\nFROM _median_change_lead_time", "refId": "A", "select": [ [ @@ -623,7 +623,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 3: Median time to restore service \nwith _incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tEND \n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n \t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_time_to_resolve\nFROM \n\t_median_mttr", + "rawSql": "-- Metric 3: Median time to restore service \nwith _incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project})\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tEND \n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n \t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_time_to_resolve\nFROM \n\t_median_mttr", "refId": "A", "select": [ [ @@ -761,7 +761,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 4: change failure rate\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n)\n\n\nSELECT\n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS change_failure_rate\nFROM \n\t_change_failure_rate, _is_collected_data", + "rawSql": "-- Metric 4: change failure rate\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n)\n\n\nSELECT\n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS change_failure_rate\nFROM \n\t_change_failure_rate, _is_collected_data", "refId": "A", "select": [ [ @@ -888,7 +888,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 1: Number of deployments per month\nwith _deployments as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcount(cicd_deployment_id) as deployment_count\n\tFROM (\n\t\tSELECT\n\t\t\tcdc.cicd_deployment_id,\n\t\t\tmax(cdc.finished_date) as deployment_finished_date\n\t\tFROM cicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\t\tWHERE\n\t\t\tpm.project_name in (${project:sqlstring}+'')\n\t\t\tand cdc.result = 'SUCCESS'\n\t\t\tand cdc.environment = 'PRODUCTION'\n\t\tGROUP BY 1\n\t\tHAVING $__timeFilter(max(cdc.finished_date))\n\t) _production_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month, \n\tcase when d.deployment_count is null then 0 else d.deployment_count end as deployment_count\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _deployments d on cm.month = d.month\n\tWHERE $__timeFilter(cm.month_timestamp)", + "rawSql": "-- Metric 1: Number of deployments per month\nwith _deployments as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcount(cicd_deployment_id) as deployment_count\n\tFROM (\n\t\tSELECT\n\t\t\tcdc.cicd_deployment_id,\n\t\t\tmax(cdc.finished_date) as deployment_finished_date\n\t\tFROM cicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\t\tWHERE\n\t\t\tpm.project_name in (${project})\n\t\t\tand cdc.result = 'SUCCESS'\n\t\t\tand cdc.environment = 'PRODUCTION'\n\t\tGROUP BY 1\n\t\tHAVING $__timeFilter(max(cdc.finished_date))\n\t) _production_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month, \n\tcase when d.deployment_count is null then 0 else d.deployment_count end as deployment_count\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _deployments d on cm.month = d.month\n\tWHERE $__timeFilter(cm.month_timestamp)", "refId": "A", "select": [ [ @@ -996,7 +996,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- Metric 2: median change lead time per month\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished each month\n\tSELECT\n\t\tdistinct pr.id,\n\t\tdate_format(cdc.finished_date,'%y/%m') as month,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_find_median_clt_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_clt as(\n\tSELECT month, max(pr_cycle_time) as median_change_lead_time\n\tFROM _find_median_clt_each_month_ranks\n\tWHERE ranks <= 0.5\n\tgroup by month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen _clt.median_change_lead_time is null then 0 \n\t\telse _clt.median_change_lead_time/60 end as median_change_lead_time_in_hour\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _clt on cm.month = _clt.month\n WHERE $__timeFilter(cm.month_timestamp)", + "rawSql": "-- Metric 2: median change lead time per month\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished each month\n\tSELECT\n\t\tdistinct pr.id,\n\t\tdate_format(cdc.finished_date,'%y/%m') as month,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t\tpm.project_name in (${project}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_find_median_clt_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_clt as(\n\tSELECT month, max(pr_cycle_time) as median_change_lead_time\n\tFROM _find_median_clt_each_month_ranks\n\tWHERE ranks <= 0.5\n\tgroup by month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen _clt.median_change_lead_time is null then 0 \n\t\telse _clt.median_change_lead_time/60 end as median_change_lead_time_in_hour\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _clt on cm.month = _clt.month\n WHERE $__timeFilter(cm.month_timestamp)", "refId": "A", "select": [ [ @@ -1124,7 +1124,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- Metric 3: median time to restore service - MTTR\nwith _incidents as (\n-- get the number of incidents created each month\n\tSELECT\n\t distinct i.id,\n\t\tdate_format(i.created_date,'%y/%m') as month,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand i.lead_time_minutes is not null\n),\n\n_find_median_mttr_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_mttr as(\n\tSELECT month, max(lead_time_minutes) as median_time_to_resolve\n\tFROM _find_median_mttr_each_month_ranks\n\tWHERE ranks <= 0.5\n\tGROUP BY month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen m.median_time_to_resolve is null then 0 \n\t\telse m.median_time_to_resolve/60 end as median_time_to_resolve_in_hour\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _mttr m on cm.month = m.month\n WHERE $__timeFilter(cm.month_timestamp)", + "rawSql": "-- Metric 3: median time to restore service - MTTR\nwith _incidents as (\n-- get the number of incidents created each month\n\tSELECT\n\t distinct i.id,\n\t\tdate_format(i.created_date,'%y/%m') as month,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project})\n\t\tand i.type = 'INCIDENT'\n\t\tand i.lead_time_minutes is not null\n),\n\n_find_median_mttr_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_mttr as(\n\tSELECT month, max(lead_time_minutes) as median_time_to_resolve\n\tFROM _find_median_mttr_each_month_ranks\n\tWHERE ranks <= 0.5\n\tGROUP BY month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen m.median_time_to_resolve is null then 0 \n\t\telse m.median_time_to_resolve/60 end as median_time_to_resolve_in_hour\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _mttr m on cm.month = m.month\n WHERE $__timeFilter(cm.month_timestamp)", "refId": "A", "select": [ [ @@ -1252,7 +1252,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- Metric 4: change failure rate per month\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate_for_each_month as (\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month,\n\tcfr.change_failure_rate\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _change_failure_rate_for_each_month cfr on cm.month = cfr.month\n\tWHERE $__timeFilter(cm.month_timestamp)", + "rawSql": "-- Metric 4: change failure rate per month\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate_for_each_month as (\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month,\n\tcfr.change_failure_rate\nFROM \n\tcalendar_months cm\n\tLEFT JOIN _change_failure_rate_for_each_month cfr on cm.month = cfr.month\n\tWHERE $__timeFilter(cm.month_timestamp)", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/DORAByTeam.json b/grafana/dashboards/DORAByTeam.json index bc77030c68d..87143c07993 100644 --- a/grafana/dashboards/DORAByTeam.json +++ b/grafana/dashboards/DORAByTeam.json @@ -208,7 +208,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN commits c on cdc.commit_sha = c.sha\n\tjoin user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, 0)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n),\n\n_metric_deployment_frequency as (\n\tSELECT \n\t 'Deployment frequency' as metric, \n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months\n),\n\n-- Metric 2: median lead time for changes\n_pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin user_accounts ua on pr.author_id = ua.account_id\n \tjoin users u on ua.user_id = u.id\n \tjoin team_users tu on u.id = tu.user_id\n \tjoin teams t on tu.team_id = t.id\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t t.name in (${team:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n),\n\n_metric_change_lead_time as (\n\tSELECT \n\t\t'Lead time for changes' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\nFROM _median_change_lead_time\n),\n\n-- Metric 3: Median time to restore service \n_incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\t join user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tWHERE\n\t t.name in (${team:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n),\n\n_metric_mttr as (\n\tSELECT \n\t\t'Time to restore service' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND \n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_median_mttr\n),\n\n-- Metric 4: change failure rate\n_deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t JOIN commits c on cdc.commit_sha = c.sha\n\t join user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n),\n\n_metric_cfr as (\n\tSELECT\n\t\t'Change failure rate' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_change_failure_rate, _is_collected_data\n),\n\n\n_final_results as (\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m1.metric as _metric, m1.value FROM dora_benchmarks db\n\tleft join _metric_deployment_frequency m1 on db.metric = m1.metric\n\tWHERE m1.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m2.metric as _metric, m2.value FROM dora_benchmarks db\n\tleft join _metric_change_lead_time m2 on db.metric = m2.metric\n\tWHERE m2.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m3.metric as _metric, m3.value FROM dora_benchmarks db\n\tleft join _metric_mttr m3 on db.metric = m3.metric\n\tWHERE m3.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m4.metric as _metric, m4.value FROM dora_benchmarks db\n\tleft join _metric_cfr m4 on db.metric = m4.metric\n\tWHERE m4.metric is not null and db.benchmarks = ('$benchmarks')\n)\n\n\nSELECT \n\tmetric,\n\tcase when low = value then low else null end as low,\n\tcase when medium = value then medium else null end as medium,\n\tcase when high = value then high else null end as high,\n\tcase when elite = value then elite else null end as elite\nFROM _final_results\nORDER BY id", + "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN commits c on cdc.commit_sha = c.sha\n\tjoin user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, 0)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n),\n\n_metric_deployment_frequency as (\n\tSELECT \n\t 'Deployment frequency' as metric, \n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months\n),\n\n-- Metric 2: median lead time for changes\n_pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin user_accounts ua on pr.author_id = ua.account_id\n \tjoin users u on ua.user_id = u.id\n \tjoin team_users tu on u.id = tu.user_id\n \tjoin teams t on tu.team_id = t.id\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t t.name in (${team}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n),\n\n_metric_change_lead_time as (\n\tSELECT \n\t\t'Lead time for changes' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\nFROM _median_change_lead_time\n),\n\n-- Metric 3: Median time to restore service \n_incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\t join user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tWHERE\n\t t.name in (${team})\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n),\n\n_metric_mttr as (\n\tSELECT \n\t\t'Time to restore service' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND \n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE\n\t\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_median_mttr\n),\n\n-- Metric 4: change failure rate\n_deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t JOIN commits c on cdc.commit_sha = c.sha\n\t join user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n),\n\n_metric_cfr as (\n\tSELECT\n\t\t'Change failure rate' as metric,\n\t\tCASE\n\t\t\tWHEN ('$benchmarks') = '2023 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\t\tCASE \n\t\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t WHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\t WHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\t\tEND\n\t\t\tELSE 'Invalid Benchmarks'\n\t\tEND AS value\n\tFROM \n\t\t_change_failure_rate, _is_collected_data\n),\n\n\n_final_results as (\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m1.metric as _metric, m1.value FROM dora_benchmarks db\n\tleft join _metric_deployment_frequency m1 on db.metric = m1.metric\n\tWHERE m1.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m2.metric as _metric, m2.value FROM dora_benchmarks db\n\tleft join _metric_change_lead_time m2 on db.metric = m2.metric\n\tWHERE m2.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m3.metric as _metric, m3.value FROM dora_benchmarks db\n\tleft join _metric_mttr m3 on db.metric = m3.metric\n\tWHERE m3.metric is not null and db.benchmarks = ('$benchmarks')\n\t\n\tunion \n\t\n\tSELECT distinct db.id,db.metric,db.low,db.medium,db.high,db.elite,m4.metric as _metric, m4.value FROM dora_benchmarks db\n\tleft join _metric_cfr m4 on db.metric = m4.metric\n\tWHERE m4.metric is not null and db.benchmarks = ('$benchmarks')\n)\n\n\nSELECT \n\tmetric,\n\tcase when low = value then low else null end as low,\n\tcase when medium = value then medium else null end as medium,\n\tcase when high = value then high else null end as high,\n\tcase when elite = value then elite else null end as elite\nFROM _final_results\nORDER BY id", "refId": "A", "sql": { "columns": [ @@ -326,7 +326,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN commits c on cdc.commit_sha = c.sha\n\tjoin user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, 0)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t \tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS 'Deployment Frequency'\nFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months", + "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN commits c on cdc.commit_sha = c.sha\n\tjoin user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, 0)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t \tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS 'Deployment Frequency'\nFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months", "refId": "A", "select": [ [ @@ -462,7 +462,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 2: median lead time for changes\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin user_accounts ua on pr.author_id = ua.account_id\n \tjoin users u on ua.user_id = u.id\n \tjoin team_users tu on u.id = tu.user_id\n \tjoin teams t on tu.team_id = t.id\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t t.name in (${team:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n WHEN ('$benchmarks') = '2021 report' THEN\n\t\t CASE\n\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_change_lead_time\nFROM _median_change_lead_time", + "rawSql": "-- Metric 2: median lead time for changes\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin user_accounts ua on pr.author_id = ua.account_id\n \tjoin users u on ua.user_id = u.id\n \tjoin team_users tu on u.id = tu.user_id\n \tjoin teams t on tu.team_id = t.id\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t t.name in (${team}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n WHEN ('$benchmarks') = '2021 report' THEN\n\t\t CASE\n\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_change_lead_time\nFROM _median_change_lead_time", "refId": "A", "sql": { "columns": [ @@ -582,7 +582,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 3: Median time to restore service \nwith _incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\t join user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tWHERE\n\t t.name in (${team:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tEND \n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n \t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_time_to_resolve\nFROM \n\t_median_mttr", + "rawSql": "-- Metric 3: Median time to restore service \nwith _incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\t join user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tWHERE\n\t t.name in (${team})\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tEND \n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n \t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_time_to_resolve\nFROM \n\t_median_mttr", "refId": "A", "sql": { "columns": [ @@ -698,7 +698,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 4: change failure rate\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t JOIN commits c on cdc.commit_sha = c.sha\n\t join user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n)\n\nSELECT\n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS change_failure_rate\nFROM \n\t_change_failure_rate, _is_collected_data", + "rawSql": "-- Metric 4: change failure rate\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t JOIN commits c on cdc.commit_sha = c.sha\n\t join user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n)\n\nSELECT\n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS change_failure_rate\nFROM \n\t_change_failure_rate, _is_collected_data", "refId": "A", "sql": { "columns": [ @@ -804,7 +804,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 1: Number of deployments per month\nwith _deployments as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcount(cicd_deployment_id) as deployment_count\n\tFROM (\n\t\tSELECT\n\t\t\tcdc.cicd_deployment_id,\n\t\t\tmax(cdc.finished_date) as deployment_finished_date\n\t\tFROM cicd_deployment_commits cdc\n\t\tJOIN commits c on cdc.commit_sha = c.sha\n\t join user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\t\tWHERE\n\t\t\tt.name in (${team:sqlstring}+'')\n\t\t\tand cdc.result = 'SUCCESS'\n\t\t\tand cdc.environment = 'PRODUCTION'\n\t\tGROUP BY 1\n\t\tHAVING $__timeFilter(max(cdc.finished_date))\n\t) _production_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month, \n\tcase when d.deployment_count is null then 0 else d.deployment_count end as deployment_count\nFROM \n\tcalendar_months cm\n\tleft join _deployments d on cm.month = d.month\nWHERE $__timeFilter(month_timestamp) ", + "rawSql": "-- Metric 1: Number of deployments per month\nwith _deployments as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcount(cicd_deployment_id) as deployment_count\n\tFROM (\n\t\tSELECT\n\t\t\tcdc.cicd_deployment_id,\n\t\t\tmax(cdc.finished_date) as deployment_finished_date\n\t\tFROM cicd_deployment_commits cdc\n\t\tJOIN commits c on cdc.commit_sha = c.sha\n\t join user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\t\tWHERE\n\t\t\tt.name in (${team})\n\t\t\tand cdc.result = 'SUCCESS'\n\t\t\tand cdc.environment = 'PRODUCTION'\n\t\tGROUP BY 1\n\t\tHAVING $__timeFilter(max(cdc.finished_date))\n\t) _production_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month, \n\tcase when d.deployment_count is null then 0 else d.deployment_count end as deployment_count\nFROM \n\tcalendar_months cm\n\tleft join _deployments d on cm.month = d.month\nWHERE $__timeFilter(month_timestamp) ", "refId": "A", "sql": { "columns": [ @@ -908,7 +908,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 2: median change lead time per month\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished each month\n\tSELECT\n\t\tdistinct pr.id,\n\t\tdate_format(cdc.finished_date,'%y/%m') as month,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin user_accounts ua on pr.author_id = ua.account_id\n \tjoin users u on ua.user_id = u.id\n \tjoin team_users tu on u.id = tu.user_id\n \tjoin teams t on tu.team_id = t.id\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t\tt.name in (${team:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_find_median_clt_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_clt as(\n\tSELECT month, max(pr_cycle_time) as median_change_lead_time\n\tFROM _find_median_clt_each_month_ranks\n\tWHERE ranks <= 0.5\n\tgroup by month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen _clt.median_change_lead_time is null then 0 \n\t\telse _clt.median_change_lead_time/60 end as median_change_lead_time_in_hour\nFROM \n\tcalendar_months cm\n\tleft join _clt on cm.month = _clt.month\nWHERE $__timeFilter(month_timestamp) ", + "rawSql": "-- Metric 2: median change lead time per month\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished each month\n\tSELECT\n\t\tdistinct pr.id,\n\t\tdate_format(cdc.finished_date,'%y/%m') as month,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr\n\t\tjoin user_accounts ua on pr.author_id = ua.account_id\n \tjoin users u on ua.user_id = u.id\n \tjoin team_users tu on u.id = tu.user_id\n \tjoin teams t on tu.team_id = t.id\n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t\tt.name in (${team}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_find_median_clt_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_clt as(\n\tSELECT month, max(pr_cycle_time) as median_change_lead_time\n\tFROM _find_median_clt_each_month_ranks\n\tWHERE ranks <= 0.5\n\tgroup by month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen _clt.median_change_lead_time is null then 0 \n\t\telse _clt.median_change_lead_time/60 end as median_change_lead_time_in_hour\nFROM \n\tcalendar_months cm\n\tleft join _clt on cm.month = _clt.month\nWHERE $__timeFilter(month_timestamp) ", "refId": "A", "sql": { "columns": [ @@ -1032,7 +1032,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 3: median time to restore service - MTTR\nwith _incidents as (\n-- get the number of incidents created each month\n\tSELECT\n\t distinct i.id,\n\t\tdate_format(i.created_date,'%y/%m') as month,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\t join user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tWHERE\n\t t.name in (${team:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand i.lead_time_minutes is not null\n),\n\n_find_median_mttr_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_mttr as(\n\tSELECT month, max(lead_time_minutes) as median_time_to_resolve\n\tFROM _find_median_mttr_each_month_ranks\n\tWHERE ranks <= 0.5\n\tGROUP BY month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen m.median_time_to_resolve is null then 0 \n\t\telse m.median_time_to_resolve/60 end as median_time_to_resolve_in_hour\nFROM \n\tcalendar_months cm\n\tleft join _mttr m on cm.month = m.month\nWHERE $__timeFilter(month_timestamp) ", + "rawSql": "-- Metric 3: median time to restore service - MTTR\nwith _incidents as (\n-- get the number of incidents created each month\n\tSELECT\n\t distinct i.id,\n\t\tdate_format(i.created_date,'%y/%m') as month,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\t join user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tWHERE\n\t t.name in (${team})\n\t\tand i.type = 'INCIDENT'\n\t\tand i.lead_time_minutes is not null\n),\n\n_find_median_mttr_each_month_ranks as(\n\tSELECT *, percent_rank() over(PARTITION BY month order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_mttr as(\n\tSELECT month, max(lead_time_minutes) as median_time_to_resolve\n\tFROM _find_median_mttr_each_month_ranks\n\tWHERE ranks <= 0.5\n\tGROUP BY month\n)\n\nSELECT \n\tcm.month,\n\tcase \n\t\twhen m.median_time_to_resolve is null then 0 \n\t\telse m.median_time_to_resolve/60 end as median_time_to_resolve_in_hour\nFROM \n\tcalendar_months cm\n\tleft join _mttr m on cm.month = m.month\nWHERE $__timeFilter(month_timestamp) ", "refId": "A", "sql": { "columns": [ @@ -1154,7 +1154,7 @@ "format": "table", "hide": false, "rawQuery": true, - "rawSql": "-- Metric 4: change failure rate per month\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN commits c on cdc.commit_sha = c.sha\n\tjoin user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate_for_each_month as (\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month,\n\tcfr.change_failure_rate\nFROM \n\tcalendar_months cm\n\tleft join _change_failure_rate_for_each_month cfr on cm.month = cfr.month\nWHERE $__timeFilter(month_timestamp) ", + "rawSql": "-- Metric 4: change failure rate per month\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN commits c on cdc.commit_sha = c.sha\n\tjoin user_accounts ua on c.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tt.name in (${team})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate_for_each_month as (\n\tSELECT \n\t\tdate_format(deployment_finished_date,'%y/%m') as month,\n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n\tGROUP BY 1\n)\n\nSELECT \n\tcm.month,\n\tcfr.change_failure_rate\nFROM \n\tcalendar_months cm\n\tleft join _change_failure_rate_for_each_month cfr on cm.month = cfr.month\nWHERE $__timeFilter(month_timestamp) ", "refId": "A", "sql": { "columns": [ diff --git a/grafana/dashboards/DORADebug.json b/grafana/dashboards/DORADebug.json index ca9c31ea7f0..ca7d9873f28 100644 --- a/grafana/dashboards/DORADebug.json +++ b/grafana/dashboards/DORADebug.json @@ -321,7 +321,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t \tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS 'Deployment Frequency'\nFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months", + "rawSql": "-- Metric 1: Deployment Frequency\nwith last_few_calendar_months as(\n-- construct the last few calendar months within the selected time period in the top-right corner\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) day\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) > $__timeFrom()\n),\n\n_production_deployment_days as(\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(DATE(cdc.finished_date)) as day\n\tFROM cicd_deployment_commits cdc\n\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n),\n\n_days_weekly_deploy as(\n-- calculate the number of deployment days every week\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -WEEKDAY(last_few_calendar_months.day) DAY)) as week,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as weeks_deployed,\n\t\t\tCOUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY week\n\t),\n\n_days_monthly_deploy as(\n-- calculate the number of deployment days every month\n\tSELECT\n\t\t\tdate(DATE_ADD(last_few_calendar_months.day, INTERVAL -DAY(last_few_calendar_months.day)+1 DAY)) as month,\n\t\t\tMAX(if(_production_deployment_days.day is not null, 1, null)) as months_deployed,\n\t\t COUNT(distinct _production_deployment_days.day) as days_deployed\n\tFROM \n\t\tlast_few_calendar_months\n\t\tLEFT JOIN _production_deployment_days ON _production_deployment_days.day = last_few_calendar_months.day\n\tGROUP BY month\n\t),\n\n_days_six_months_deploy AS (\n SELECT\n month,\n SUM(days_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS days_deployed_per_six_months,\n COUNT(months_deployed) OVER (\n ORDER BY month\n ROWS BETWEEN 5 PRECEDING AND CURRENT ROW\n ) AS months_deployed_count,\n ROW_NUMBER() OVER (\n PARTITION BY DATE_FORMAT(month, '%Y-%m') DIV 6\n ORDER BY month DESC\n ) AS rn\n FROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_week_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_weekly_deploy\n),\n\n_median_number_of_deployment_days_per_week as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_week\n\tFROM _median_number_of_deployment_days_per_week_ranks\n\tWHERE ranks <= 0.5\n),\n\n_median_number_of_deployment_days_per_month_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed) as ranks\n\tFROM _days_monthly_deploy\n),\n\n_median_number_of_deployment_days_per_month as(\n\tSELECT max(days_deployed) as median_number_of_deployment_days_per_month\n\tFROM _median_number_of_deployment_days_per_month_ranks\n\tWHERE ranks <= 0.5\n),\n\n_days_per_six_months_deploy_by_filter AS (\nSELECT\n month,\n days_deployed_per_six_months,\n months_deployed_count\nFROM _days_six_months_deploy\nWHERE rn%6 = 1\n),\n\n\n_median_number_of_deployment_days_per_six_months_ranks as(\n\tSELECT *, percent_rank() over(order by days_deployed_per_six_months) as ranks\n\tFROM _days_per_six_months_deploy_by_filter\n),\n\n_median_number_of_deployment_days_per_six_months as(\n\tSELECT min(days_deployed_per_six_months) as median_number_of_deployment_days_per_six_months, min(months_deployed_count) as is_collected\n\tFROM _median_number_of_deployment_days_per_six_months_ranks\n\tWHERE ranks >= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 1 THEN 'Between once per day and once per week(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per week and once per month(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month < 1 and is_collected != NULL THEN 'Fewer than once per month(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t \tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN median_number_of_deployment_days_per_week >= 7 THEN 'On-demand(elite)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_month >= 1 THEN 'Between once per day and once per month(high)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months >= 1 THEN 'Between once per month and once every 6 months(medium)'\n\t\t\t\tWHEN median_number_of_deployment_days_per_six_months < 1 and is_collected != NULL THEN 'Fewer than once per six months(low)'\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments.\" END\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS 'Deployment Frequency'\nFROM _median_number_of_deployment_days_per_week, _median_number_of_deployment_days_per_month, _median_number_of_deployment_days_per_six_months", "refId": "A", "select": [ [ @@ -1177,7 +1177,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 2: median lead time for changes\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr \n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'') \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n WHEN ('$benchmarks') = '2021 report' THEN\n\t\t CASE\n\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_change_lead_time\nFROM _median_change_lead_time", + "rawSql": "-- Metric 2: median lead time for changes\nwith _pr_stats as (\n-- get the cycle time of PRs deployed by the deployments finished in the selected period\n\tSELECT\n\t\tdistinct pr.id,\n\t\tppm.pr_cycle_time\n\tFROM\n\t\tpull_requests pr \n\t\tjoin project_pr_metrics ppm on ppm.id = pr.id\n\t\tjoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.`table` = 'repos'\n\t\tjoin cicd_deployment_commits cdc on ppm.deployment_commit_id = cdc.id\n\tWHERE\n\t pm.project_name in (${project}) \n\t\tand pr.merged_date is not null\n\t\tand ppm.pr_cycle_time is not null\n\t\tand $__timeFilter(cdc.finished_date)\n),\n\n_median_change_lead_time_ranks as(\n\tSELECT *, percent_rank() over(order by pr_cycle_time) as ranks\n\tFROM _pr_stats\n),\n\n_median_change_lead_time as(\n-- use median PR cycle time as the median change lead time\n\tSELECT max(pr_cycle_time) as median_change_lead_time\n\tFROM _median_change_lead_time_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_change_lead_time < 24 * 60 THEN \"Less than one day(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Between one day and one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 30 * 24 * 60 THEN \"Between one week and one month(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 30 * 24 * 60 THEN \"More than one month(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n WHEN ('$benchmarks') = '2021 report' THEN\n\t\t CASE\n\t\t\t\tWHEN median_change_lead_time < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_change_lead_time < 7 * 24 * 60 THEN \"Less than one week(high)\"\n\t\t\t\tWHEN median_change_lead_time < 180 * 24 * 60 THEN \"Between one week and six months(medium)\"\n\t\t\t\tWHEN median_change_lead_time >= 180 * 24 * 60 THEN \"More than six months(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/pull_requests.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_change_lead_time\nFROM _median_change_lead_time", "refId": "A", "select": [ [ @@ -2912,7 +2912,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 3: Median time to restore service \nwith _incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project:sqlstring}+'')\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tEND \n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n \t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_time_to_resolve\nFROM \n\t_median_mttr", + "rawSql": "-- Metric 3: Median time to restore service \nwith _incidents as (\n-- get the incidents created within the selected time period in the top-right corner\n\tSELECT\n\t distinct i.id,\n\t\tcast(lead_time_minutes as signed) as lead_time_minutes\n\tFROM\n\t\tissues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id and pm.`table` = 'boards'\n\tWHERE\n\t pm.project_name in (${project})\n\t\tand i.type = 'INCIDENT'\n\t\tand $__timeFilter(i.created_date)\n),\n\n_median_mttr_ranks as(\n\tSELECT *, percent_rank() over(order by lead_time_minutes) as ranks\n\tFROM _incidents\n),\n\n_median_mttr as(\n\tSELECT max(lead_time_minutes) as median_time_to_resolve\n\tFROM _median_mttr_ranks\n\tWHERE ranks <= 0.5\n)\n\nSELECT \n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tEND \n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE\n\t\t\t\tWHEN median_time_to_resolve < 60 THEN \"Less than one hour(elite)\"\n\t\t\t\tWHEN median_time_to_resolve < 24 * 60 THEN \"Less than one day(high)\"\n\t\t\t\tWHEN median_time_to_resolve < 7 * 24 * 60 THEN \"Between one day and one week(medium)\"\n\t\t\t\tWHEN median_time_to_resolve >= 7 * 24 * 60 THEN \"More than one week(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected incidents.\"\n \t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS median_time_to_resolve\nFROM \n\t_median_mttr", "refId": "A", "select": [ [ @@ -3315,7 +3315,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Metric 4: change failure rate\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project:sqlstring}+'')\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n)\n\nSELECT\n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS change_failure_rate\nFROM \n\t_change_failure_rate, _is_collected_data", + "rawSql": "-- Metric 4: change failure rate\nwith _deployments as (\n-- When deploying multiple commits in one pipeline, GitLab and BitBucket may generate more than one deployment. However, DevLake consider these deployments as ONE production deployment and use the last one's finished_date as the finished date.\n\tSELECT\n\t\tcdc.cicd_deployment_id as deployment_id,\n\t\tmax(cdc.finished_date) as deployment_finished_date\n\tFROM \n\t\tcicd_deployment_commits cdc\n\t\tJOIN project_mapping pm on cdc.cicd_scope_id = pm.row_id and pm.`table` = 'cicd_scopes'\n\tWHERE\n\t\tpm.project_name in (${project})\n\t\tand cdc.result = 'SUCCESS'\n\t\tand cdc.environment = 'PRODUCTION'\n\tGROUP BY 1\n\tHAVING $__timeFilter(max(cdc.finished_date))\n),\n\n_failure_caused_by_deployments as (\n-- calculate the number of incidents caused by each deployment\n\tSELECT\n\t\td.deployment_id,\n\t\td.deployment_finished_date,\n\t\tcount(distinct case when i.type = 'INCIDENT' then d.deployment_id else null end) as has_incident\n\tFROM\n\t\t_deployments d\n\t\tleft join project_issue_metrics pim on d.deployment_id = pim.deployment_id\n\t\tleft join issues i on pim.id = i.id\n\tGROUP BY 1,2\n),\n\n_change_failure_rate as (\n\tSELECT \n\t\tcase \n\t\t\twhen count(deployment_id) is null then null\n\t\t\telse sum(has_incident)/count(deployment_id) end as change_failure_rate\n\tFROM\n\t\t_failure_caused_by_deployments\n),\n\n_is_collected_data as(\n\tSELECT\n CASE \n WHEN COUNT(i.id) = 0 AND COUNT(cdc.id) = 0 THEN 'No All'\n WHEN COUNT(i.id) = 0 THEN 'No Incidents' \n WHEN COUNT(cdc.id) = 0 THEN 'No Deployments'\n END AS is_collected\nFROM\n (SELECT 1) AS dummy\nLEFT JOIN\n issues i ON i.type = 'INCIDENT'\nLEFT JOIN\n cicd_deployment_commits cdc ON 1=1\n)\n\nSELECT\n CASE\n WHEN ('$benchmarks') = '2023 report' THEN\n\t\t\tCASE \n\t\t\t\tWHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= 5 THEN \"0-5%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .10 THEN \"5%-10%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"10%-15%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .15 THEN \"> 15%(low)\"\n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tWHEN ('$benchmarks') = '2021 report' THEN\n\t\t\tCASE \n\t\t\t WHEN is_collected = \"No All\" THEN \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tWHEN is_collected = \"No Incidents\" THEN \"N/A. Please check if you have collected incidents.\"\n\t\t\t\tWHEN is_collected = \"No Deployments\" THEN \"N/A. Please check if you have collected deployments.\"\n\t\t\t\tWHEN change_failure_rate <= .15 THEN \"0-15%(elite)\"\n\t\t\t\tWHEN change_failure_rate <= .20 THEN \"16%-20%(high)\"\n\t\t\t\tWHEN change_failure_rate <= .30 THEN \"21%-30%(medium)\"\n\t\t\t\tWHEN change_failure_rate > .30 THEN \"> 30%(low)\" \n\t\t\t\tELSE \"N/A. Please check if you have collected deployments/incidents.\"\n\t\t\t\tEND\n\t\tELSE 'Invalid Benchmarks'\n\tEND AS change_failure_rate\nFROM \n\t_change_failure_rate, _is_collected_data", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/EngineeringOverview.json b/grafana/dashboards/EngineeringOverview.json index 69705bfd4ce..5755f05d944 100644 --- a/grafana/dashboards/EngineeringOverview.json +++ b/grafana/dashboards/EngineeringOverview.json @@ -119,7 +119,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom\n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nwhere\n pm.project_name in (${project:sqlstring}+'') and\n i.priority in (${priority:sqlstring}+'') and\n i.type = 'BUG' and\n date(i.created_date) between STR_TO_DATE('$month','%Y-%m-%d') and STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY;", + "rawSql": "select\n count(distinct i.id)\nfrom\n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nwhere\n pm.project_name in (${project}) and\n i.priority in (${priority}) and\n i.type = 'BUG' and\n date(i.created_date) between STR_TO_DATE('$month','%Y-%m-%d') and STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY;", "refId": "A", "select": [ [ @@ -250,7 +250,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _issues as(\n select\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as defect_count\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n i.priority in (${priority:sqlstring}+'')\n and i.type = 'BUG'\n and $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n)\n\nselect\n date_format(time,'%M %Y') as month,\n defect_count\nfrom _issues\norder by time asc\n", + "rawSql": "with _issues as(\n select\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as defect_count\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n where\n pm.project_name in (${project}) and\n i.priority in (${priority})\n and i.type = 'BUG'\n and $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n)\n\nselect\n date_format(time,'%M %Y') as month,\n defect_count\nfrom _issues\norder by time asc\n", "refId": "A", "select": [ [ @@ -354,7 +354,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\n\tjoin project_mapping pm on b.id = pm.row_id\r\nwhere \r\n pm.project_name in (${project:sqlstring}+'') and\r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\n\tjoin project_mapping pm on b.id = pm.row_id\r\nwhere \r\n pm.project_name in (${project}) and\r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)", "refId": "A", "select": [ [ @@ -475,7 +475,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n DATE_ADD(date(i.resolution_date), INTERVAL -DAY(date(i.resolution_date))+1 DAY) as time,\n AVG(i.lead_time_minutes/1440) as issue_lead_time\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n WHERE\n pm.project_name in (${project:sqlstring}+'')\n and i.status = \"DONE\"\n and $__timeFilter(i.resolution_date)\n and i.resolution_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n issue_lead_time as \"Mean Requirement Lead Time in Days\"\nFROM _issues\nORDER BY time", + "rawSql": "with _issues as(\n SELECT\n DATE_ADD(date(i.resolution_date), INTERVAL -DAY(date(i.resolution_date))+1 DAY) as time,\n AVG(i.lead_time_minutes/1440) as issue_lead_time\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n WHERE\n pm.project_name in (${project})\n and i.status = \"DONE\"\n and $__timeFilter(i.resolution_date)\n and i.resolution_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n issue_lead_time as \"Mean Requirement Lead Time in Days\"\nFROM _issues\nORDER BY time", "refId": "A", "select": [ [ @@ -562,7 +562,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct author_name)\nfrom\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\nwhere\n date(authored_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY and\n pm.project_name in (${project:sqlstring}+'');", + "rawSql": "select\n count(distinct author_name)\nfrom\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\nwhere\n date(authored_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY and\n pm.project_name in (${project});", "refId": "A", "select": [ [ @@ -675,7 +675,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _developers as(\n select\n DATE_ADD(date(c.authored_date), INTERVAL -DAY(date(c.authored_date))+1 DAY) as time,\n count(distinct author_name) as developer_count\n from\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\n where\n $__timeFilter(c.authored_date)\n and c.authored_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project:sqlstring}+'')\n group by time\n)\n\nselect\n date_format(time,'%M %Y') as month,\n developer_count\nfrom _developers\norder by time asc", + "rawSql": "with _developers as(\n select\n DATE_ADD(date(c.authored_date), INTERVAL -DAY(date(c.authored_date))+1 DAY) as time,\n count(distinct author_name) as developer_count\n from\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\n where\n $__timeFilter(c.authored_date)\n and c.authored_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project})\n group by time\n)\n\nselect\n date_format(time,'%M %Y') as month,\n developer_count\nfrom _developers\norder by time asc", "refId": "A", "select": [ [ @@ -771,7 +771,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _num_issues_with_sprint_updated as (\n select\n count(distinct i.id) as num_issues_with_sprint_updated\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n join issue_changelogs c on i.id = c.issue_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n c.field_name = 'Sprint' and\n c.original_from_value != '' and\n c.original_to_value != '' and\n date(i.created_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY\n),\n\n_total_num_issues as (\n select\n count(distinct i.id) as total_num_issues\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n date(i.created_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY\n)\n\nselect\n now() as time,\n 100 - 100 * (select 1.0 * num_issues_with_sprint_updated from _num_issues_with_sprint_updated) / (select total_num_issues from _total_num_issues) as ratio;", + "rawSql": "with _num_issues_with_sprint_updated as (\n select\n count(distinct i.id) as num_issues_with_sprint_updated\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n join issue_changelogs c on i.id = c.issue_id\n where\n pm.project_name in (${project}) and\n c.field_name = 'Sprint' and\n c.original_from_value != '' and\n c.original_to_value != '' and\n date(i.created_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY\n),\n\n_total_num_issues as (\n select\n count(distinct i.id) as total_num_issues\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n where\n pm.project_name in (${project}) and\n date(i.created_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY\n)\n\nselect\n now() as time,\n 100 - 100 * (select 1.0 * num_issues_with_sprint_updated from _num_issues_with_sprint_updated) / (select total_num_issues from _total_num_issues) as ratio;", "refId": "A", "select": [ [ @@ -907,7 +907,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _num_issues_with_sprint_updated as (\n select\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as num_issues_with_sprint_updated\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n join issue_changelogs c on i.id = c.issue_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n c.field_name = 'Sprint'\n and c.original_from_value != '' \n and c.original_to_value != ''\n and $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n),\n\n_total_num_issues as (\n select\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as total_num_issues\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n)\n\nselect\n x.time,\n 100 - 100 * (1.0 * x.num_issues_with_sprint_updated / y.total_num_issues) as delivery_rate\nfrom \n _num_issues_with_sprint_updated x \n join _total_num_issues y on x.time = y.time", + "rawSql": "with _num_issues_with_sprint_updated as (\n select\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as num_issues_with_sprint_updated\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n join issue_changelogs c on i.id = c.issue_id\n where\n pm.project_name in (${project}) and\n c.field_name = 'Sprint'\n and c.original_from_value != '' \n and c.original_to_value != ''\n and $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n),\n\n_total_num_issues as (\n select\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as total_num_issues\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n where\n pm.project_name in (${project}) and\n $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n)\n\nselect\n x.time,\n 100 - 100 * (1.0 * x.num_issues_with_sprint_updated / y.total_num_issues) as delivery_rate\nfrom \n _num_issues_with_sprint_updated x \n join _total_num_issues y on x.time = y.time", "refId": "A", "select": [ [ @@ -1015,7 +1015,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct pr.id)\nfrom\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere\n pr.merged_date is not null \n and date(pr.merged_date) between\n STR_TO_DATE('$month','%Y-%m-%d')\n and STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY \n and pm.project_name in (${project:sqlstring}+'');", + "rawSql": "select\n count(distinct pr.id)\nfrom\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere\n pr.merged_date is not null \n and date(pr.merged_date) between\n STR_TO_DATE('$month','%Y-%m-%d')\n and STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY \n and pm.project_name in (${project});", "refId": "A", "select": [ [ @@ -1153,7 +1153,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _merged_prs as(\n select\n DATE_ADD(date(pr.merged_date), INTERVAL -DAY(date(pr.merged_date))+1 DAY) as time,\n count(distinct pr.id) as pr_merged_count\n from\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n where\n pm.project_name in (${project:sqlstring}+'')\n and pr.merged_date is not null\n and $__timeFilter(pr.merged_date)\n and pr.merged_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n)\n\nselect\n date_format(time,'%M %Y') as month,\n pr_merged_count\nfrom _merged_prs\norder by time asc", + "rawSql": "with _merged_prs as(\n select\n DATE_ADD(date(pr.merged_date), INTERVAL -DAY(date(pr.merged_date))+1 DAY) as time,\n count(distinct pr.id) as pr_merged_count\n from\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n where\n pm.project_name in (${project})\n and pr.merged_date is not null\n and $__timeFilter(pr.merged_date)\n and pr.merged_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by time\n)\n\nselect\n date_format(time,'%M %Y') as month,\n pr_merged_count\nfrom _merged_prs\norder by time asc", "refId": "A", "select": [ [ @@ -1265,7 +1265,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \n 100*count(distinct case when pr.id in (select pull_request_id from pull_request_issues) then pr.id else null end)/count(distinct pr.id) as unlinked_pr_rate\nfrom pull_requests pr\njoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere pm.project_name in (${project:sqlstring}+'')", + "rawSql": "select \n 100*count(distinct case when pr.id in (select pull_request_id from pull_request_issues) then pr.id else null end)/count(distinct pr.id) as unlinked_pr_rate\nfrom pull_requests pr\njoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere pm.project_name in (${project})", "refId": "A", "select": [ [ @@ -1400,7 +1400,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n 100*count(distinct case when pr.id in (select pull_request_id from pull_request_issues) then pr.id else null end)/count(distinct pr.id) as unlinked_pr_rate\nfrom pull_requests pr\njoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere pm.project_name in (${project:sqlstring}+'')\nand $__timeFilter(created_date)\nand created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\ngroup by time\n\n", + "rawSql": "select\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n 100*count(distinct case when pr.id in (select pull_request_id from pull_request_issues) then pr.id else null end)/count(distinct pr.id) as unlinked_pr_rate\nfrom pull_requests pr\njoin project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere pm.project_name in (${project})\nand $__timeFilter(created_date)\nand created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\ngroup by time\n\n", "refId": "A", "select": [ [ @@ -1502,7 +1502,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _commits_groupby_name_and_date as (\n select\n author_name,\n date(authored_date) as _day,\n count(distinct c.sha)\n from\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n WEEKDAY(authored_date) between 0 and 4 and\n date(authored_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY\n group by\n author_name, date(authored_date)\n)\n\nselect 100 * count(*) / (count(distinct author_name) * count(distinct _day))\nfrom _commits_groupby_name_and_date;", + "rawSql": "with _commits_groupby_name_and_date as (\n select\n author_name,\n date(authored_date) as _day,\n count(distinct c.sha)\n from\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\n where\n pm.project_name in (${project}) and\n WEEKDAY(authored_date) between 0 and 4 and\n date(authored_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY\n group by\n author_name, date(authored_date)\n)\n\nselect 100 * count(*) / (count(distinct author_name) * count(distinct _day))\nfrom _commits_groupby_name_and_date;", "refId": "A", "select": [ [ @@ -1631,7 +1631,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _commits_groupby_name_and_date as (\n select\n author_name,\n date(authored_date) as _day,\n count(distinct c.sha)\n from\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\n where\n pm.project_name in (${project:sqlstring}+'') and\n (WEEKDAY(authored_date) between 0 and 4)\n and $__timeFilter(authored_date)\n and authored_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1,2\n)\n\nselect\n DATE_ADD(_day, INTERVAL -DAY(_day)+1 DAY) as time,\n 100*count(*)/(count(distinct author_name) * count(distinct _day)) as working_days_percentatages_per_month\nfrom _commits_groupby_name_and_date\ngroup by time", + "rawSql": "with _commits_groupby_name_and_date as (\n select\n author_name,\n date(authored_date) as _day,\n count(distinct c.sha)\n from\n commits c\n join repo_commits rc on c.sha = rc.commit_sha\n join project_mapping pm on rc.repo_id = pm.row_id\n where\n pm.project_name in (${project}) and\n (WEEKDAY(authored_date) between 0 and 4)\n and $__timeFilter(authored_date)\n and authored_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1,2\n)\n\nselect\n DATE_ADD(_day, INTERVAL -DAY(_day)+1 DAY) as time,\n 100*count(*)/(count(distinct author_name) * count(distinct _day)) as working_days_percentatages_per_month\nfrom _commits_groupby_name_and_date\ngroup by time", "refId": "A", "select": [ [ @@ -1740,7 +1740,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n AVG(TIMESTAMPDIFF(MINUTE, pr.created_date, pr.merged_date) / 1440)\nfrom\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere\n pm.project_name in (${project:sqlstring}+'') and\n pr.merged_date is not null\n and date(pr.created_date) between\n STR_TO_DATE('$month','%Y-%m-%d') \n and STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY", + "rawSql": "select\n AVG(TIMESTAMPDIFF(MINUTE, pr.created_date, pr.merged_date) / 1440)\nfrom\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere\n pm.project_name in (${project}) and\n pr.merged_date is not null\n and date(pr.created_date) between\n STR_TO_DATE('$month','%Y-%m-%d') \n and STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY", "refId": "A", "select": [ [ @@ -1860,7 +1860,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n DATE_ADD(date(pr.created_date), INTERVAL -DAY(date(pr.created_date))+1 DAY) as time,\n AVG(TIMESTAMPDIFF(MINUTE, pr.created_date, pr.merged_date) / 1440) as pr_time_to_merge_in_days\nfrom\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere\n pm.project_name in (${project:sqlstring}+'') and\n pr.merged_date is not null\n and $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\ngroup by time\norder by time", + "rawSql": "select\n DATE_ADD(date(pr.created_date), INTERVAL -DAY(date(pr.created_date))+1 DAY) as time,\n AVG(TIMESTAMPDIFF(MINUTE, pr.created_date, pr.merged_date) / 1440) as pr_time_to_merge_in_days\nfrom\n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nwhere\n pm.project_name in (${project}) and\n pr.merged_date is not null\n and $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\ngroup by time\norder by time", "refId": "A", "select": [ [ @@ -1990,7 +1990,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n i.priority as 'Priority',\n AVG(TIMESTAMPDIFF(MINUTE, i.created_date, NOW()) / 1440) as 'Average Age'\nfrom\n issues i\n join board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nwhere\n pm.project_name in (${project:sqlstring}+'') and\n i.status = 'TODO'\n and i.type = 'BUG'\n and i.priority in (${priority:sqlstring}+'')\ngroup by\n i.priority", + "rawSql": "select\n i.priority as 'Priority',\n AVG(TIMESTAMPDIFF(MINUTE, i.created_date, NOW()) / 1440) as 'Average Age'\nfrom\n issues i\n join board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nwhere\n pm.project_name in (${project}) and\n i.status = 'TODO'\n and i.type = 'BUG'\n and i.priority in (${priority})\ngroup by\n i.priority", "refId": "A", "select": [ [ @@ -2079,7 +2079,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct case when i.type = 'BUG' then i.id else null end) as 'Bug',\n count(distinct case when i.type != 'BUG' and epic_key != '' then i.id else null end) as 'Strategic',\n count(distinct case when i.type != 'BUG' and epic_key = '' then i.id else null end) as 'Non-Strategic'\nfrom\n issues i\n join board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nwhere\n pm.project_name in (${project:sqlstring}+'') and\n i.resolution_date is not null and\n date(resolution_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY", + "rawSql": "select\n count(distinct case when i.type = 'BUG' then i.id else null end) as 'Bug',\n count(distinct case when i.type != 'BUG' and epic_key != '' then i.id else null end) as 'Strategic',\n count(distinct case when i.type != 'BUG' and epic_key = '' then i.id else null end) as 'Non-Strategic'\nfrom\n issues i\n join board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nwhere\n pm.project_name in (${project}) and\n i.resolution_date is not null and\n date(resolution_date) between\n STR_TO_DATE('$month','%Y-%m-%d') and\n STR_TO_DATE('$month','%Y-%m-%d') + INTERVAL 1 MONTH - INTERVAL 1 DAY", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/EngineeringThroughputAndCycleTime.json b/grafana/dashboards/EngineeringThroughputAndCycleTime.json index 80f2d4a131f..4fe7f9c8637 100644 --- a/grafana/dashboards/EngineeringThroughputAndCycleTime.json +++ b/grafana/dashboards/EngineeringThroughputAndCycleTime.json @@ -207,7 +207,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n count(distinct pr.id) as \"PR: Opened\",\n count(distinct case when pr.merged_date is not null then id else null end) as \"PR: Merged\"\nFROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nWHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\ngroup by 1\n", + "rawSql": "SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n count(distinct pr.id) as \"PR: Opened\",\n count(distinct case when pr.merged_date is not null then id else null end) as \"PR: Merged\"\nFROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nWHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -356,7 +356,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -$interval(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as 'Issues Opened',\n count(distinct case when i.status = 'DONE' then i.id else null end) as 'Issues Completed'\nFROM issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nWHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\ngroup by 1", + "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -$interval(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as 'Issues Opened',\n count(distinct case when i.status = 'DONE' then i.id else null end) as 'Issues Completed'\nFROM issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nWHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\ngroup by 1", "refId": "A", "select": [ [ @@ -476,7 +476,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(i.resolution_date), INTERVAL -$interval(date(i.resolution_date))+1 DAY) as time,\n sum(case when i.status = 'DONE' then i.story_point else 0 end) as 'Story Points Completed'\nFROM (\n SELECT DISTINCT i.id, i.resolution_date, i.status, i.story_point\n FROM issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n join project_mapping pm on b.id = pm.row_id\n WHERE\n $__timeFilter(i.resolution_date)\n and pm.project_name in (${project:sqlstring}+'')\n) as i\ngroup by 1\norder by 1", + "rawSql": "SELECT\n DATE_ADD(date(i.resolution_date), INTERVAL -$interval(date(i.resolution_date))+1 DAY) as time,\n sum(case when i.status = 'DONE' then i.story_point else 0 end) as 'Story Points Completed'\nFROM (\n SELECT DISTINCT i.id, i.resolution_date, i.status, i.story_point\n FROM issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n join project_mapping pm on b.id = pm.row_id\n WHERE\n $__timeFilter(i.resolution_date)\n and pm.project_name in (${project})\n) as i\ngroup by 1\norder by 1", "refId": "A", "select": [ [ @@ -612,7 +612,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n count(distinct prc.id)/count(distinct pr.id) as \"PR Review Depth\"\nFROM \n pull_requests pr\n left join pull_request_comments prc on pr.id = prc.pull_request_id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nWHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and pr.merged_date is not null\ngroup by 1\n", + "rawSql": "SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n count(distinct prc.id)/count(distinct pr.id) as \"PR Review Depth\"\nFROM \n pull_requests pr\n left join pull_request_comments prc on pr.id = prc.pull_request_id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nWHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n and pr.merged_date is not null\ngroup by 1\n", "refId": "A", "select": [ [ @@ -732,7 +732,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -$interval(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as 'P0/P1 Bugs'\nFROM issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nWHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and i.type = 'BUG'\n and i.priority in (${priority:sqlstring}+'')\ngroup by 1", + "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -$interval(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as 'P0/P1 Bugs'\nFROM issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\n\tjoin project_mapping pm on b.id = pm.row_id\nWHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n and i.type = 'BUG'\n and i.priority in (${priority})\ngroup by 1", "refId": "A", "select": [ [ @@ -852,7 +852,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _pr_commits_data as(\n SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n pr.id as pr_id,\n pr.merge_commit_sha,\n sum(c.additions)+sum(c.deletions) as loc\n FROM \n pull_requests pr\n left join commits c on pr.merge_commit_sha = c.sha\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and pr.status = 'MERGED'\n group by 1,2,3\n)\n\nSELECT \n time,\n sum(loc)/count(distinct pr_id) as 'PR Size'\nFROM _pr_commits_data\nGROUP BY 1", + "rawSql": "with _pr_commits_data as(\n SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n pr.id as pr_id,\n pr.merge_commit_sha,\n sum(c.additions)+sum(c.deletions) as loc\n FROM \n pull_requests pr\n left join commits c on pr.merge_commit_sha = c.sha\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n and pr.status = 'MERGED'\n group by 1,2,3\n)\n\nSELECT \n time,\n sum(loc)/count(distinct pr_id) as 'PR Size'\nFROM _pr_commits_data\nGROUP BY 1", "refId": "A", "select": [ [ @@ -985,7 +985,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n sum(case when pr.id not in (SELECT pull_request_id FROM pull_request_comments) then 1 else 0 end) as \"PRs Merged w/o Review\"\nFROM \n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nWHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and pr.merged_date is not null\nGROUP BY 1\nORDER BY 1", + "rawSql": "SELECT\n DATE_ADD(date(pr.created_date), INTERVAL -$interval(date(pr.created_date))+1 DAY) as time,\n sum(case when pr.id not in (SELECT pull_request_id FROM pull_request_comments) then 1 else 0 end) as \"PRs Merged w/o Review\"\nFROM \n pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \nWHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n and pr.merged_date is not null\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -1158,7 +1158,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no cycle_time to make sure cycle_time equals the sum of the four metrics below\n\t\tcoalesce(prm.pr_cycle_time/60,0) as cycle_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -DAYOFMONTH(date(pr_issued_date))+1 DAY) as time,\n avg(cycle_time) as 'PR Cycle Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no cycle_time to make sure cycle_time equals the sum of the four metrics below\n\t\tcoalesce(prm.pr_cycle_time/60,0) as cycle_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project})\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -DAYOFMONTH(date(pr_issued_date))+1 DAY) as time,\n avg(cycle_time) as 'PR Cycle Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -1274,7 +1274,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no coding_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_coding_time/60,0) as coding_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(coding_time) as 'Coding Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no coding_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_coding_time/60,0) as coding_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project})\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(coding_time) as 'Coding Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -1421,7 +1421,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no pickup_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_pickup_time/60,0) as pickup_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(pickup_time) as 'Pickup Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no pickup_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_pickup_time/60,0) as pickup_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project})\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(pickup_time) as 'Pickup Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -1568,7 +1568,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no review_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_review_time/60,0) as review_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(review_time) as 'Review Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no review_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_review_time/60,0) as review_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project})\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(review_time) as 'Review Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -1715,7 +1715,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no deploy_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_deploy_time/60,0) as deploy_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(deploy_time) as 'PR Deploy Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no deploy_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_deploy_time/60,0) as deploy_time\n FROM pull_requests pr\n left join project_pr_metrics prm on pr.id = prm.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pr.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and pm.project_name in (${project})\n GROUP BY 1,2,3\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(deploy_time) as 'PR Deploy Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json b/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json index f3fac3be162..182a4184c48 100644 --- a/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json +++ b/grafana/dashboards/EngineeringThroughputAndCycleTimeTeamView.json @@ -208,7 +208,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1:sqlstring}+'') then id else null end) as \"Team1: Total PR Opened\",\n count(distinct case when team_id in (${team2:sqlstring}+'') then id else null end) as \"Team2: Total PR Opened\"\nFROM _prs\nGROUP BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1}) then id else null end) as \"Team1: Total PR Opened\",\n count(distinct case when team_id in (${team2}) then id else null end) as \"Team2: Total PR Opened\"\nFROM _prs\nGROUP BY 1", "refId": "A", "select": [ [ @@ -375,7 +375,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1:sqlstring}+'') then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as \"Team1: PR Opened per Member\",\n count(distinct case when team_id in (${team2:sqlstring}+'') then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as \"Team2: PR Opened per Member\",\n count(distinct id)/(select count(*) from users) as \"Org: PR Opened per Member\"\nFROM _prs\nGROUP BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1}) then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as \"Team1: PR Opened per Member\",\n count(distinct case when team_id in (${team2}) then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as \"Team2: PR Opened per Member\",\n count(distinct id)/(select count(*) from users) as \"Org: PR Opened per Member\"\nFROM _prs\nGROUP BY 1", "refId": "A", "select": [ [ @@ -525,7 +525,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case \n when team_id in (${team1:sqlstring}+'') and merged_date is not null then id \n when team_id in (${team1:sqlstring}+'') and merged_date is null then null end) as \"Team1: Total PR Merged\",\n count(distinct case \n when team_id in (${team2:sqlstring}+'') and merged_date is not null then id \n when team_id in (${team2:sqlstring}+'') and merged_date is null then null end) as \"Team2: Total PR Merged\"\nFROM _prs\nGROUP BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case \n when team_id in (${team1}) and merged_date is not null then id \n when team_id in (${team1}) and merged_date is null then null end) as \"Team1: Total PR Merged\",\n count(distinct case \n when team_id in (${team2}) and merged_date is not null then id \n when team_id in (${team2}) and merged_date is null then null end) as \"Team2: Total PR Merged\"\nFROM _prs\nGROUP BY 1", "refId": "A", "select": [ [ @@ -675,7 +675,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case \n when team_id in (${team1:sqlstring}+'') and merged_date is not null then id \n when team_id in (${team1:sqlstring}+'') and merged_date is null then null end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as \"Team1: PR Merged per Member\",\n count(distinct case \n when team_id in (${team2:sqlstring}+'') and merged_date is not null then id\n when team_id in (${team2:sqlstring}+'') and merged_date is null then null end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as \"Team2: PR Merged per Member\",\n count(distinct case when merged_date is not null then id end)/(select count(*) from users) as \"Org: PR Merged per Member\"\nFROM _prs\nGROUP BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case \n when team_id in (${team1}) and merged_date is not null then id \n when team_id in (${team1}) and merged_date is null then null end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as \"Team1: PR Merged per Member\",\n count(distinct case \n when team_id in (${team2}) and merged_date is not null then id\n when team_id in (${team2}) and merged_date is null then null end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as \"Team2: PR Merged per Member\",\n count(distinct case when merged_date is not null then id end)/(select count(*) from users) as \"Org: PR Merged per Member\"\nFROM _prs\nGROUP BY 1", "refId": "A", "select": [ [ @@ -852,7 +852,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n -- count(i.id) as 'Issues Opened',\n count(distinct case when status = 'DONE' and team_id in (${team1:sqlstring}+'') then id else null end) as 'Team1: Issues Completed',\n count(distinct case when status = 'DONE' and team_id in (${team2:sqlstring}+'') then id else null end) as 'Team2: Issues Completed'\nFROM _issues\nGROUP BY 1", + "rawSql": "with _issues as(\n SELECT\n i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n -- count(i.id) as 'Issues Opened',\n count(distinct case when status = 'DONE' and team_id in (${team1}) then id else null end) as 'Team1: Issues Completed',\n count(distinct case when status = 'DONE' and team_id in (${team2}) then id else null end) as 'Team2: Issues Completed'\nFROM _issues\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1020,7 +1020,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'DONE' and team_id in (${team1:sqlstring}+'') then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as 'Team1: Issues Completed per Member',\n count(distinct case when status = 'DONE' and team_id in (${team2:sqlstring}+'') then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as 'Team2: Issues Completed per Member',\n count(distinct id)/(select count(*) from users) as \"Org: Issues Completed per Member\"\nFROM _issues\nGROUP BY 1", + "rawSql": "with _issues as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'DONE' and team_id in (${team1}) then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as 'Team1: Issues Completed per Member',\n count(distinct case when status = 'DONE' and team_id in (${team2}) then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as 'Team2: Issues Completed per Member',\n count(distinct id)/(select count(*) from users) as \"Org: Issues Completed per Member\"\nFROM _issues\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1214,7 +1214,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n -- count(i.id) as 'Issues Opened',\n sum(case when status = 'DONE' and team_id in (${team1:sqlstring}+'') then story_point else 0 end) as 'Team1: Story Points Completed',\n sum(case when status = 'DONE' and team_id in (${team2:sqlstring}+'') then story_point else 0 end) as 'Team2: Story Points Completed'\nFROM _issues\nGROUP BY 1", + "rawSql": "with _issues as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n -- count(i.id) as 'Issues Opened',\n sum(case when status = 'DONE' and team_id in (${team1}) then story_point else 0 end) as 'Team1: Story Points Completed',\n sum(case when status = 'DONE' and team_id in (${team2}) then story_point else 0 end) as 'Team2: Story Points Completed'\nFROM _issues\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1382,7 +1382,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n sum(case when status = 'DONE' and team_id in (${team1:sqlstring}+'') then story_point else 0 end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as 'Team1: Story Points Completed per Member',\n sum(case when status = 'DONE' and team_id in (${team2:sqlstring}+'') then story_point else 0 end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as 'Team2: Story Points Completed per Member',\n count(distinct id)/(select count(*) FROM users) as \"Org: Story Points Completed per Member\"\nFROM _issues\nGROUP BY 1", + "rawSql": "with _issues as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n sum(case when status = 'DONE' and team_id in (${team1}) then story_point else 0 end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as 'Team1: Story Points Completed per Member',\n sum(case when status = 'DONE' and team_id in (${team2}) then story_point else 0 end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as 'Team2: Story Points Completed per Member',\n count(distinct id)/(select count(*) FROM users) as \"Org: Story Points Completed per Member\"\nFROM _issues\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1576,7 +1576,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _merged_prs as(\n SELECT\n distinct pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n prc.id as comment_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n left join pull_request_comments prc on pr.id = prc.pull_request_id\n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and pr.merged_date is not null\n ORDER BY 1\n)\n\nselect\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1:sqlstring}+'') then comment_id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as \"Team1: PR Review Depth\",\n count(distinct case when team_id in (${team2:sqlstring}+'') then comment_id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as \"Team2: PR Review Depth\",\n count(distinct comment_id)/(select count(*) FROM users) as \"Org: PR Review Depth\"\nFROM _merged_prs\nGROUP BY 1", + "rawSql": "with _merged_prs as(\n SELECT\n distinct pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n prc.id as comment_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n left join pull_request_comments prc on pr.id = prc.pull_request_id\n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n and pr.merged_date is not null\n ORDER BY 1\n)\n\nselect\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1}) then comment_id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as \"Team1: PR Review Depth\",\n count(distinct case when team_id in (${team2}) then comment_id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as \"Team2: PR Review Depth\",\n count(distinct comment_id)/(select count(*) FROM users) as \"Org: PR Review Depth\"\nFROM _merged_prs\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1744,7 +1744,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n distinct pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n pr.merge_commit_sha,\n c.additions + c.deletions as loc,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n left join commits c on pr.merge_commit_sha = c.sha\n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and pr.status = 'MERGED'\n ORDER BY 1\n)\n\nselect\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n sum(case when team_id in (${team1:sqlstring}+'') then loc else null end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as \"Team1: PR Size\",\n sum(case when team_id in (${team2:sqlstring}+'') then loc else null end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as \"Team2: PR Size\",\n sum(loc)/(select count(*) FROM users) as \"Org: PR Size\"\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n distinct pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n pr.merge_commit_sha,\n c.additions + c.deletions as loc,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n left join commits c on pr.merge_commit_sha = c.sha\n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n and pr.status = 'MERGED'\n ORDER BY 1\n)\n\nselect\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n sum(case when team_id in (${team1}) then loc else null end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as \"Team1: PR Size\",\n sum(case when team_id in (${team2}) then loc else null end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as \"Team2: PR Size\",\n sum(loc)/(select count(*) FROM users) as \"Org: PR Size\"\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -1938,7 +1938,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _bugs as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and i.type = 'BUG'\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(case when team_id in (${team1:sqlstring}+'') then id else null end) as 'Team1: P0/P1 Bugs',\n count(case when team_id in (${team2:sqlstring}+'') then id else null end) as 'Team2: P0/P1 Bugs'\nFROM _bugs\nWHERE\n -- please choose the priorities in the filter above\n priority in (${priority:sqlstring}+'')\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _bugs as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n and i.type = 'BUG'\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(case when team_id in (${team1}) then id else null end) as 'Team1: P0/P1 Bugs',\n count(case when team_id in (${team2}) then id else null end) as 'Team2: P0/P1 Bugs'\nFROM _bugs\nWHERE\n -- please choose the priorities in the filter above\n priority in (${priority})\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -2106,7 +2106,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _bugs as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and i.type = 'BUG'\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(case when team_id in (${team1:sqlstring}+'') then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1:sqlstring}+'')) as 'Team1: P0/P1 Bugs per Member',\n count(case when team_id in (${team2:sqlstring}+'') then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2:sqlstring}+'')) as 'Team2: P0/P1 Bugs per Member',\n count(distinct id)/(select count(*) FROM users) as \"Org: P0/P1 Bugs per Member\"\nFROM _bugs\nWHERE\n -- please choose the priorities in the filter above\n priority in (${priority:sqlstring}+'')\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _bugs as(\n SELECT\n distinct i.id,\n i.url,\n i.created_date,\n i.status,\n i.assignee_id,\n i.story_point,\n i.priority,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM issues i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n\t join project_mapping pm on b.id = pm.row_id\n \tjoin user_accounts ua on i.assignee_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(i.created_date)\n and pm.project_name in (${project})\n and i.type = 'BUG'\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(case when team_id in (${team1}) then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team1})) as 'Team1: P0/P1 Bugs per Member',\n count(case when team_id in (${team2}) then id else null end)/(select count(distinct user_id) from team_users where team_id in (${team2})) as 'Team2: P0/P1 Bugs per Member',\n count(distinct id)/(select count(*) FROM users) as \"Org: P0/P1 Bugs per Member\"\nFROM _bugs\nWHERE\n -- please choose the priorities in the filter above\n priority in (${priority})\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -2296,7 +2296,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no cycle_time to make sure cycle_time equals the sum of the four metrics below\n\t\tcoalesce(prm.pr_cycle_time/60,0) as cycle_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1:sqlstring}+'') then cycle_time end) as 'Team1: Avg Cycle Time(h)',\n avg(case when team_id in (${team2:sqlstring}+'') then cycle_time end) as 'Team2: Avg Cycle Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no cycle_time to make sure cycle_time equals the sum of the four metrics below\n\t\tcoalesce(prm.pr_cycle_time/60,0) as cycle_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1}) then cycle_time end) as 'Team1: Avg Cycle Time(h)',\n avg(case when team_id in (${team2}) then cycle_time end) as 'Team2: Avg Cycle Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -2443,7 +2443,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no coding_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_coding_time/60,0) as coding_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1:sqlstring}+'') then coding_time end) as 'Team1: Avg Coding Time(h)',\n avg(case when team_id in (${team2:sqlstring}+'') then coding_time end) as 'Team2: Avg Coding Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no coding_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_coding_time/60,0) as coding_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1}) then coding_time end) as 'Team1: Avg Coding Time(h)',\n avg(case when team_id in (${team2}) then coding_time end) as 'Team2: Avg Coding Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -2590,7 +2590,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no pickup_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_pickup_time/60,0) as pickup_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1:sqlstring}+'') then pickup_time end) as 'Team1: Avg Pickup Time(h)',\n avg(case when team_id in (${team2:sqlstring}+'') then pickup_time end) as 'Team2: Avg Pickup Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no pickup_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_pickup_time/60,0) as pickup_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1}) then pickup_time end) as 'Team1: Avg Pickup Time(h)',\n avg(case when team_id in (${team2}) then pickup_time end) as 'Team2: Avg Pickup Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -2737,7 +2737,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no review_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_review_time/60,0) as review_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1:sqlstring}+'') then review_time end) as 'Team1: Avg Review Time(h)',\n avg(case when team_id in (${team2:sqlstring}+'') then review_time end) as 'Team2: Avg Review Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no review_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_review_time/60,0) as review_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1}) then review_time end) as 'Team1: Avg Review Time(h)',\n avg(case when team_id in (${team2}) then review_time end) as 'Team2: Avg Review Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -2884,7 +2884,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no deploy_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_deploy_time/60,0) as deploy_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1:sqlstring}+'') then deploy_time end) as 'Team1: Avg Deploy Time(h)',\n avg(case when team_id in (${team2:sqlstring}+'') then deploy_time end) as 'Team2: Avg Deploy Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", + "rawSql": "with _prs as(\n SELECT\n pr.id,\n pr.created_date as pr_issued_date,\n -- convert null to 0 if a PR has no deploy_time to make sure cycle_time equals the sum of the four sub-metrics\n\t\tcoalesce(prm.pr_deploy_time/60,0) as deploy_time,\n\t\tpr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n\t\tleft join project_pr_metrics prm on pr.id = prm.id\n left join user_accounts ua on pr.author_id = ua.account_id\n left join users u on ua.user_id = u.id\n left join team_users tu on u.id = tu.user_id\n left join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n GROUP BY 1,2,3,4,5,6,7,8\n)\n\nSELECT \n DATE_ADD(date(pr_issued_date), INTERVAL -$interval(date(pr_issued_date))+1 DAY) as time,\n avg(case when team_id in (${team1}) then deploy_time end) as 'Team1: Avg Deploy Time(h)',\n avg(case when team_id in (${team2}) then deploy_time end) as 'Team2: Avg Deploy Time(h)'\nFROM _prs\nGROUP BY 1\nORDER BY 1", "refId": "A", "select": [ [ @@ -3058,7 +3058,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _merged_prs as(\n SELECT\n distinct pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project:sqlstring}+'')\n and pr.merged_date is not null\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1:sqlstring}+'') and id not in (SELECT pull_request_id FROM pull_request_comments) then id else null end) as \"Team1: PRs Merged w/o Review\",\n count(distinct case when team_id in (${team2:sqlstring}+'') and id not in (SELECT pull_request_id FROM pull_request_comments) then id else null end) as \"Team2: PRs Merged w/o Review\"\nFROM _merged_prs\nGROUP BY 1", + "rawSql": "with _merged_prs as(\n SELECT\n distinct pr.id,\n pr.url,\n pr.created_date,\n pr.merged_date,\n pr.author_id,\n u.id as user_id,\n u.name as user_name,\n t.id as team_id,\n t.name as team\n FROM pull_requests pr\n join project_mapping pm on pr.base_repo_id = pm.row_id and pm.table = 'repos' \n join user_accounts ua on pr.author_id = ua.account_id\n join users u on ua.user_id = u.id\n join team_users tu on u.id = tu.user_id\n join teams t on tu.team_id = t.id\n WHERE\n $__timeFilter(pr.created_date)\n and pm.project_name in (${project})\n and pr.merged_date is not null\n ORDER BY 1\n)\n\nSELECT\n DATE_ADD(date(created_date), INTERVAL -$interval(date(created_date))+1 DAY) as time,\n count(distinct case when team_id in (${team1}) and id not in (SELECT pull_request_id FROM pull_request_comments) then id else null end) as \"Team1: PRs Merged w/o Review\",\n count(distinct case when team_id in (${team2}) and id not in (SELECT pull_request_id FROM pull_request_comments) then id else null end) as \"Team2: PRs Merged w/o Review\"\nFROM _merged_prs\nGROUP BY 1", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/GitHub.json b/grafana/dashboards/GitHub.json index 8cc1cf1f495..437b1cb852d 100644 --- a/grafana/dashboards/GitHub.json +++ b/grafana/dashboards/GitHub.json @@ -153,7 +153,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select \n\tcount(distinct i.id)\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n $__timeFilter(i.created_date)\n and b.id in (${repo_id:sqlstring}+'')\n", + "rawSql": "select \n\tcount(distinct i.id)\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n $__timeFilter(i.created_date)\n and b.id in (${repo_id})\n", "refId": "A", "select": [ [ @@ -275,7 +275,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as issue_count\n FROM issues i\n \tjoin board_issues bi on i.id = bi.issue_id\n \tjoin boards b on bi.board_id = b.id\n WHERE\n $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and b.id in (${repo_id:sqlstring}+'')\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n issue_count as \"Issue Count\"\nFROM _issues\nORDER BY time\n", + "rawSql": "with _issues as(\n SELECT\n DATE_ADD(date(i.created_date), INTERVAL -DAY(date(i.created_date))+1 DAY) as time,\n count(distinct i.id) as issue_count\n FROM issues i\n \tjoin board_issues bi on i.id = bi.issue_id\n \tjoin boards b on bi.board_id = b.id\n WHERE\n $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and b.id in (${repo_id})\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n issue_count as \"Issue Count\"\nFROM _issues\nORDER BY time\n", "refId": "A", "select": [ [ @@ -391,7 +391,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select \n\tcount(distinct i.id)\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n $__timeFilter(i.created_date)\n and b.id in (${repo_id:sqlstring}+'')\n and i.status = \"DONE\"\n\n\n", + "rawSql": "select \n\tcount(distinct i.id)\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n $__timeFilter(i.created_date)\n and b.id in (${repo_id})\n and i.status = \"DONE\"\n\n\n", "refId": "A", "select": [ [ @@ -526,7 +526,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\n count(distinct case when status != 'DONE' then i.id else null end) as open_issue_count,\n count(distinct case when status = 'DONE' then i.id else null end) as closed_issue_count\nFROM issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nWHERE\n $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and b.id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "SELECT\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\n count(distinct case when status != 'DONE' then i.id else null end) as open_issue_count,\n count(distinct case when status = 'DONE' then i.id else null end) as closed_issue_count\nFROM issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nWHERE\n $__timeFilter(i.created_date)\n and i.created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and b.id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -616,7 +616,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select \n\tAVG(i.lead_time_minutes/1440) issue_lead_time_in_days\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n $__timeFilter(i.resolution_date)\n and i.resolution_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and b.id in (${repo_id:sqlstring}+'')\n and i.status = \"DONE\"", + "rawSql": "select \n\tAVG(i.lead_time_minutes/1440) issue_lead_time_in_days\nfrom \n\tissues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n $__timeFilter(i.resolution_date)\n and i.resolution_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and b.id in (${repo_id})\n and i.status = \"DONE\"", "refId": "A", "select": [ [ @@ -736,7 +736,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _issues as(\n SELECT\n DATE_ADD(date(i.resolution_date), INTERVAL -DAY(date(i.resolution_date))+1 DAY) as time,\n AVG(i.lead_time_minutes/1440) as issue_lead_time\n FROM issues i\n \tjoin board_issues bi on i.id = bi.issue_id\n \tjoin boards b on bi.board_id = b.id\n WHERE\n b.id in (${repo_id:sqlstring}+'')\n and i.status = \"DONE\"\n and $__timeFilter(i.resolution_date)\n and i.resolution_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n issue_lead_time as \"Mean Issue Lead Time in Days\"\nFROM _issues\nORDER BY time\n", + "rawSql": "with _issues as(\n SELECT\n DATE_ADD(date(i.resolution_date), INTERVAL -DAY(date(i.resolution_date))+1 DAY) as time,\n AVG(i.lead_time_minutes/1440) as issue_lead_time\n FROM issues i\n \tjoin board_issues bi on i.id = bi.issue_id\n \tjoin boards b on bi.board_id = b.id\n WHERE\n b.id in (${repo_id})\n and i.status = \"DONE\"\n and $__timeFilter(i.resolution_date)\n and i.resolution_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n issue_lead_time as \"Mean Issue Lead Time in Days\"\nFROM _issues\nORDER BY time\n", "refId": "A", "select": [ [ @@ -851,7 +851,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the queue time of all outstanding bugs\nwith _outstanding_issues as(\n select \n DISTINCT\n b.name as repo_name,\n i.issue_key as issue_key,\n i.title,\n i.created_date,\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as queue_time_in_days,\n concat(b.url,'/',i.issue_key) as url\n from \n issues i\n left join board_issues bi on i.id = bi.issue_id\n left join boards b on bi.board_id = b.id\n where\n b.id in (${repo_id:sqlstring}+'')\n and $__timeFilter(i.created_date)\n and i.status != 'DONE'\n)\n\nselect issue_key, title, queue_time_in_days from _outstanding_issues\norder by 3 desc", + "rawSql": "-- Get the queue time of all outstanding bugs\nwith _outstanding_issues as(\n select \n DISTINCT\n b.name as repo_name,\n i.issue_key as issue_key,\n i.title,\n i.created_date,\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as queue_time_in_days,\n concat(b.url,'/',i.issue_key) as url\n from \n issues i\n left join board_issues bi on i.id = bi.issue_id\n left join boards b on bi.board_id = b.id\n where\n b.id in (${repo_id})\n and $__timeFilter(i.created_date)\n and i.status != 'DONE'\n)\n\nselect issue_key, title, queue_time_in_days from _outstanding_issues\norder by 3 desc", "refId": "A", "select": [ [ @@ -1048,7 +1048,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the queue time of all outstanding bugs\nselect \n b.name as repo_name,\n i.issue_key as issue_key,\n i.title,\n i.created_date,\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as queue_time_in_days,\n concat(b.url,'/',i.issue_key) as url\nfrom \n issues i\n left join board_issues bi on i.id = bi.issue_id\n left join boards b on bi.board_id = b.id\nwhere\n b.id in (${repo_id:sqlstring}+'')\n and $__timeFilter(i.created_date)\n and i.status != 'DONE'\norder by queue_time_in_days desc", + "rawSql": "-- Get the queue time of all outstanding bugs\nselect \n b.name as repo_name,\n i.issue_key as issue_key,\n i.title,\n i.created_date,\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as queue_time_in_days,\n concat(b.url,'/',i.issue_key) as url\nfrom \n issues i\n left join board_issues bi on i.id = bi.issue_id\n left join boards b on bi.board_id = b.id\nwhere\n b.id in (${repo_id})\n and $__timeFilter(i.created_date)\n and i.status != 'DONE'\norder by queue_time_in_days desc", "refId": "A", "select": [ [ @@ -1164,7 +1164,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\t\n\n", + "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\t\n\n", "refId": "A", "select": [ [ @@ -1301,7 +1301,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id:sqlstring}+'')\n and $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id})\n and $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -1439,7 +1439,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n and pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n and pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", "refId": "A", "select": [ [ @@ -1570,7 +1570,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status = 'MERGED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status = 'MERGED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})", "refId": "A", "select": [ [ @@ -1684,7 +1684,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Closed and merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Closed and merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -1774,7 +1774,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'CLOSED'", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'CLOSED'", "refId": "A", "select": [ [ @@ -1907,7 +1907,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -1996,7 +1996,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\n\n", + "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not null\n\n\n", "refId": "A", "select": [ [ @@ -2115,7 +2115,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and base_repo_id in (${repo_id:sqlstring}+'')\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and base_repo_id in (${repo_id})\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -2202,7 +2202,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand status in ('CLOSED', 'MERGED')\n\n\n", + "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand status in ('CLOSED', 'MERGED')\n\n\n", "refId": "A", "select": [ [ @@ -2321,7 +2321,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and base_repo_id in (${repo_id:sqlstring}+'')\n and status in ('CLOSED', 'MERGED')\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_close as \"Time to Close\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n and base_repo_id in (${repo_id})\n and status in ('CLOSED', 'MERGED')\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_close as \"Time to Close\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -2416,7 +2416,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -2518,7 +2518,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -2705,7 +2705,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", + "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", "refId": "A", "select": [ [ @@ -2805,7 +2805,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -2942,7 +2942,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Workflow Runs Count\"\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Workflow Runs Count\"\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ @@ -3098,7 +3098,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%github%\"\r\n and cicd_scope_id in (${repo_id:sqlstring}+'')\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY \r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Workflow Runs Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", + "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%github%\"\r\n and cicd_scope_id in (${repo_id})\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY \r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Workflow Runs Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", "refId": "A", "select": [ [ @@ -3265,7 +3265,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_workflow_run_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_workflow_run_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", + "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_workflow_run_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_workflow_run_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", "refId": "A", "select": [ [ @@ -3418,7 +3418,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%github%\"\n and cicd_scope_id in (${repo_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/GithubReleaseQualityAndContributionAnalysis.json b/grafana/dashboards/GithubReleaseQualityAndContributionAnalysis.json index ca8673cb626..db64e2d8c44 100644 --- a/grafana/dashboards/GithubReleaseQualityAndContributionAnalysis.json +++ b/grafana/dashboards/GithubReleaseQualityAndContributionAnalysis.json @@ -118,7 +118,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the bug distribution in major versions\nwith bugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\tand i.type = 'BUG'\n)\n\n\nSELECT \n\tconcat(SUBSTRING_INDEX(biet.tag_name,'.',3), '.x') as minor_version,\n\tcount(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", + "rawSql": "-- Get the bug distribution in major versions\nwith bugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\tand i.type = 'BUG'\n)\n\n\nSELECT \n\tconcat(SUBSTRING_INDEX(biet.tag_name,'.',3), '.x') as minor_version,\n\tcount(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", "refId": "A", "select": [ [ @@ -245,7 +245,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the number of fixed bugs in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n -- distinct new_ref_id, old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs_of_tags as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name, \n\t\t-- SUBSTRING_INDEX(rid.new_ref_id,':', 3) as repo_id,\n\t\tcount(*) as bug_count\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n\t-- GROUP BY 1,2\n\t GROUP BY 1\n),\n\n_combine_pr as (\n select pull_request_id as id, commit_sha from pull_request_commits left join pull_requests p on pull_request_commits.pull_request_id = p.id\n where p.base_repo_id in (${repo_id:sqlstring}+'')\n union\n select id, merge_commit_sha as commit_sha from pull_requests where base_repo_id in (${repo_id:sqlstring}+'')\n),\n\n_commit_count_of_pr as(\n select\n SUBSTRING_INDEX(rcd.new_ref_id,'tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rcd.new_ref_id,':', 4) as repo_id,\n pr.id as pull_request_id,\n count(c.sha) as commit_count\n FROM \n refs_commits_diffs rcd\n\t\tleft join commits c on rcd.commit_sha = c.sha\n\t\t-- left join pull_request_commits prc on c.sha = prc.commit_sha\n\t\tleft join _combine_pr pr on c.sha = pr.commit_sha\n\twhere\n\t\tSUBSTRING_INDEX(rcd.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rcd.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rcd.new_ref_id,':', -1) in (SELECT SUBSTRING_INDEX(new_ref_id,':', -1) FROM _last_5_tags)\n\tgroup by 1,2,3\n),\n\n_pr_worktype as(\n select\n distinct pri.pull_request_id,i.type\n from\n pull_request_issues pri\n left join pull_requests pr on pri.pull_request_id = pr.id\n left join issues i on pri.issue_id = i.id\n where \n i.issue_key != 0\n),\n\n_pr_elco_and_worktype as(\n select\n ccop.tag_name, \n sum(case when pw.type = 'BUG' then commit_count else 0 end)/sum(commit_count) as cost_percentage\n from \n _commit_count_of_pr ccop\n left join _pr_worktype pw on ccop.pull_request_id = pw.pull_request_id\n GROUP BY 1\n)\n\nSELECT \n\tbot.tag_name,\n\tbot.bug_count,\n\tpeaw.cost_percentage as \"cost_percentage(bugfixing commits/total commits)\"\nFROM \n\t_bugs_of_tags bot\n\tjoin _pr_elco_and_worktype peaw on bot.tag_name = peaw.tag_name\nORDER BY 1", + "rawSql": "-- Get the number of fixed bugs in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n -- distinct new_ref_id, old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs_of_tags as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name, \n\t\t-- SUBSTRING_INDEX(rid.new_ref_id,':', 3) as repo_id,\n\t\tcount(*) as bug_count\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n\t-- GROUP BY 1,2\n\t GROUP BY 1\n),\n\n_combine_pr as (\n select pull_request_id as id, commit_sha from pull_request_commits left join pull_requests p on pull_request_commits.pull_request_id = p.id\n where p.base_repo_id in (${repo_id})\n union\n select id, merge_commit_sha as commit_sha from pull_requests where base_repo_id in (${repo_id})\n),\n\n_commit_count_of_pr as(\n select\n SUBSTRING_INDEX(rcd.new_ref_id,'tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rcd.new_ref_id,':', 4) as repo_id,\n pr.id as pull_request_id,\n count(c.sha) as commit_count\n FROM \n refs_commits_diffs rcd\n\t\tleft join commits c on rcd.commit_sha = c.sha\n\t\t-- left join pull_request_commits prc on c.sha = prc.commit_sha\n\t\tleft join _combine_pr pr on c.sha = pr.commit_sha\n\twhere\n\t\tSUBSTRING_INDEX(rcd.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rcd.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rcd.new_ref_id,':', -1) in (SELECT SUBSTRING_INDEX(new_ref_id,':', -1) FROM _last_5_tags)\n\tgroup by 1,2,3\n),\n\n_pr_worktype as(\n select\n distinct pri.pull_request_id,i.type\n from\n pull_request_issues pri\n left join pull_requests pr on pri.pull_request_id = pr.id\n left join issues i on pri.issue_id = i.id\n where \n i.issue_key != 0\n),\n\n_pr_elco_and_worktype as(\n select\n ccop.tag_name, \n sum(case when pw.type = 'BUG' then commit_count else 0 end)/sum(commit_count) as cost_percentage\n from \n _commit_count_of_pr ccop\n left join _pr_worktype pw on ccop.pull_request_id = pw.pull_request_id\n GROUP BY 1\n)\n\nSELECT \n\tbot.tag_name,\n\tbot.bug_count,\n\tpeaw.cost_percentage as \"cost_percentage(bugfixing commits/total commits)\"\nFROM \n\t_bugs_of_tags bot\n\tjoin _pr_elco_and_worktype peaw on bot.tag_name = peaw.tag_name\nORDER BY 1", "refId": "A", "select": [ [ @@ -331,7 +331,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the severity distribution in bugs\n-- Get the work-type distribution in the last n tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_n_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 1\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description, i.severity \n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand i.type = 'BUG'\n)\n\nSELECT \n concat(biet.tag_name, \" \", case when biet.severity != '' then biet.severity else 'UNKNOWN' end) as severity,\n count(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", + "rawSql": "-- Get the severity distribution in bugs\n-- Get the work-type distribution in the last n tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_n_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 1\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description, i.severity \n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand i.type = 'BUG'\n)\n\nSELECT \n concat(biet.tag_name, \" \", case when biet.severity != '' then biet.severity else 'UNKNOWN' end) as severity,\n count(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", "refId": "A", "select": [ [ @@ -487,7 +487,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the severity distribution in bugs\n-- Get the work-type distribution in the last n tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_n_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 1,1\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description, i.severity \n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand i.type = 'BUG'\n)\n\nSELECT \n concat(biet.tag_name, \" \", case when biet.severity != '' then biet.severity else 'UNKNOWN' end) as severity,\n count(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", + "rawSql": "-- Get the severity distribution in bugs\n-- Get the work-type distribution in the last n tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_n_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 1,1\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description, i.severity \n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand i.type = 'BUG'\n)\n\nSELECT \n concat(biet.tag_name, \" \", case when biet.severity != '' then biet.severity else 'UNKNOWN' end) as severity,\n count(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", "refId": "A", "select": [ [ @@ -573,7 +573,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the severity distribution in bugs\n-- Get the work-type distribution in the last n tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_n_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 2,1\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description, i.severity \n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand i.type = 'BUG'\n)\n\nSELECT \n concat(biet.tag_name, \" \", case when biet.severity != '' then biet.severity else 'UNKNOWN' end) as severity,\n count(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", + "rawSql": "-- Get the severity distribution in bugs\n-- Get the work-type distribution in the last n tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_n_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 2,1\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/tags/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.type, i.title, i.description, i.severity \n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_n_tags)\n\t\tand i.type = 'BUG'\n)\n\nSELECT \n concat(biet.tag_name, \" \", case when biet.severity != '' then biet.severity else 'UNKNOWN' end) as severity,\n count(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", "refId": "A", "select": [ [ @@ -735,7 +735,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the number of fixed bugs in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n)\n\t\nselect distinct\n\tb.name as repo_name,\n\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name, \n\ti.issue_key as issue_key,\n\ti.title,\n\ti.assignee_name,\n\ti.lead_time_minutes/1440 as lead_time_in_days,\n\tconcat(b.url,'/',i.issue_key) as url\nfrom\n\trefs_issues_diffs rid\n\tleft join issues i on rid.issue_id = i.id\n\tjoin boards b on SUBSTRING_INDEX(rid.new_ref_id,':', 4) = b.id\nwhere\n\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT SUBSTRING_INDEX(new_ref_id,':', -1) FROM _last_5_tags)\n\tand i.type = 'BUG'\norder by tag_name desc", + "rawSql": "-- Get the number of fixed bugs in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n)\n\t\nselect distinct\n\tb.name as repo_name,\n\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name, \n\ti.issue_key as issue_key,\n\ti.title,\n\ti.assignee_name,\n\ti.lead_time_minutes/1440 as lead_time_in_days,\n\tconcat(b.url,'/',i.issue_key) as url\nfrom\n\trefs_issues_diffs rid\n\tleft join issues i on rid.issue_id = i.id\n\tjoin boards b on SUBSTRING_INDEX(rid.new_ref_id,':', 4) = b.id\nwhere\n\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT SUBSTRING_INDEX(new_ref_id,':', -1) FROM _last_5_tags)\n\tand i.type = 'BUG'\norder by tag_name desc", "refId": "A", "select": [ [ @@ -822,7 +822,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Component distribution of bugs fixed in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.component, i.severity, i.title, i.description\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n)\n\n\nSELECT\n\tcase when component = '' then 'unlabeled' else 'labeled' end as component,\n\tcount(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", + "rawSql": "-- Component distribution of bugs fixed in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.component, i.severity, i.title, i.description\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n)\n\n\nSELECT\n\tcase when component = '' then 'unlabeled' else 'labeled' end as component,\n\tcount(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nGROUP BY 1", "refId": "A", "select": [ [ @@ -909,7 +909,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Component distribution of bugs fixed in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.component, i.severity, i.title, i.description\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n)\n\n\nSELECT\n\tcomponent,\n\tcount(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nwhere \n component != ''\nGROUP BY 1", + "rawSql": "-- Component distribution of bugs fixed in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\nbugs_in_each_tag as(\n\tselect \n\t\tSUBSTRING_INDEX(rid.new_ref_id,'refs/', -1) as tag_name, \n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) as repo_id,\n\t\ti.issue_key, i.component, i.severity, i.title, i.description\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n)\n\n\nSELECT\n\tcomponent,\n\tcount(*) as bug_count\nFROM \n\tbugs_in_each_tag biet\nwhere \n component != ''\nGROUP BY 1", "refId": "A", "select": [ [ @@ -997,7 +997,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the % of contributors who fixed 80% of bugs in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect \n\t\ti.issue_key, i.type, i.severity, i.title, i.description,\n\t\tpr.id, pr.author_name as pr_author, pr.created_date,\n\t\trank() over(partition by i.id order by pr.created_date asc) as pr_rank\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\t\tleft join pull_requests pr on pri.pull_request_id = pr.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n\torder by i.issue_key\n),\n\n_bug_fixed_count as(\n SELECT \n pr_author,\n count(*) bug_fixed_count\n FROM _bugs\n WHERE pr_rank = 1\n GROUP BY 1\n),\n\n_bug_fixed_count_running_total as(\n SELECT \n *, \n sum(bug_fixed_count) OVER (Order by bug_fixed_count desc) AS running_total\n FROM \n _bug_fixed_count\n),\n\n_percentile as(\n SELECT \n pr_author,\n bug_fixed_count,\n running_total/sum(bug_fixed_count) OVER () AS cumulative_percentage\n FROM \n _bug_fixed_count_running_total\n)\n\n\nSELECT \n count(case when cumulative_percentage <= 0.8 then pr_author else null end)/count(*) as \"% of contributors who fixed 80% of the bugs\"\nFROM _percentile", + "rawSql": "-- Get the % of contributors who fixed 80% of bugs in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect \n\t\ti.issue_key, i.type, i.severity, i.title, i.description,\n\t\tpr.id, pr.author_name as pr_author, pr.created_date,\n\t\trank() over(partition by i.id order by pr.created_date asc) as pr_rank\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\t\tleft join pull_requests pr on pri.pull_request_id = pr.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n\torder by i.issue_key\n),\n\n_bug_fixed_count as(\n SELECT \n pr_author,\n count(*) bug_fixed_count\n FROM _bugs\n WHERE pr_rank = 1\n GROUP BY 1\n),\n\n_bug_fixed_count_running_total as(\n SELECT \n *, \n sum(bug_fixed_count) OVER (Order by bug_fixed_count desc) AS running_total\n FROM \n _bug_fixed_count\n),\n\n_percentile as(\n SELECT \n pr_author,\n bug_fixed_count,\n running_total/sum(bug_fixed_count) OVER () AS cumulative_percentage\n FROM \n _bug_fixed_count_running_total\n)\n\n\nSELECT \n count(case when cumulative_percentage <= 0.8 then pr_author else null end)/count(*) as \"% of contributors who fixed 80% of the bugs\"\nFROM _percentile", "refId": "A", "select": [ [ @@ -1095,7 +1095,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the bug fixer distribution in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect \n\t\ti.issue_key, i.type, i.severity, i.title, i.description,\n\t\tpr.id, pr.author_name as pr_author, pr.created_date,\n\t\trank() over(partition by i.id order by pr.created_date asc) as pr_rank\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\t\tleft join pull_requests pr on pri.pull_request_id = pr.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n\torder by i.issue_key\n)\n\nSELECT \n pr_author,\n count(*) bug_fixed_count\nFROM _bugs\nWHERE pr_rank = 1\nGROUP BY 1\nORDER BY 2 desc\nlimit 10", + "rawSql": "-- Get the bug fixer distribution in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect \n\t\ti.issue_key, i.type, i.severity, i.title, i.description,\n\t\tpr.id, pr.author_name as pr_author, pr.created_date,\n\t\trank() over(partition by i.id order by pr.created_date asc) as pr_rank\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\t\tleft join pull_requests pr on pri.pull_request_id = pr.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n\torder by i.issue_key\n)\n\nSELECT \n pr_author,\n count(*) bug_fixed_count\nFROM _bugs\nWHERE pr_rank = 1\nGROUP BY 1\nORDER BY 2 desc\nlimit 10", "refId": "A", "select": [ [ @@ -1185,7 +1185,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the avg bug age in history\nselect \n avg(lead_time_minutes)/1440 as average_bug_age\nfrom issues\nleft join board_issues bi on issues.id = bi.issue_id\nwhere \n type = 'BUG'\n and status = 'DONE'\n and bi.board_id in (${repo_id:sqlstring}+'')", + "rawSql": "-- Get the avg bug age in history\nselect \n avg(lead_time_minutes)/1440 as average_bug_age\nfrom issues\nleft join board_issues bi on issues.id = bi.issue_id\nwhere \n type = 'BUG'\n and status = 'DONE'\n and bi.board_id in (${repo_id})", "refId": "A", "select": [ [ @@ -1289,7 +1289,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the bug age in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect distinct\n\t\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name,\n\t\ti.id,\n\t\ti.lead_time_minutes\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n),\n\n_bugs_percentile as(\n select \n *,\n percent_rank() over (partition by tag_name order by lead_time_minutes) as percentile\n from _bugs order by 1\n),\n\n_avg_bug_age as(\n select \n tag_name,\n avg(lead_time_minutes)/1440 as average_bug_age\n from _bugs_percentile\n group by 1\n),\n\n_50th_bug_age as(\n select \n tag_name,\n min(lead_time_minutes)/1440 as \"50th_bug_age\"\n from _bugs_percentile\n where percentile >= 0.5\n group by 1\n)\n\nselect \n aba.*,\n eba.50th_bug_age\nfrom \n _avg_bug_age aba\n join _50th_bug_age eba on aba.tag_name = eba.tag_name", + "rawSql": "-- Get the bug age in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect distinct\n\t\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name,\n\t\ti.id,\n\t\ti.lead_time_minutes\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n),\n\n_bugs_percentile as(\n select \n *,\n percent_rank() over (partition by tag_name order by lead_time_minutes) as percentile\n from _bugs order by 1\n),\n\n_avg_bug_age as(\n select \n tag_name,\n avg(lead_time_minutes)/1440 as average_bug_age\n from _bugs_percentile\n group by 1\n),\n\n_50th_bug_age as(\n select \n tag_name,\n min(lead_time_minutes)/1440 as \"50th_bug_age\"\n from _bugs_percentile\n where percentile >= 0.5\n group by 1\n)\n\nselect \n aba.*,\n eba.50th_bug_age\nfrom \n _avg_bug_age aba\n join _50th_bug_age eba on aba.tag_name = eba.tag_name", "refId": "A", "select": [ [ @@ -1547,7 +1547,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the bug fixer distribution in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect distinct\n\t b.name,\n\t\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name,\n\t\ti.issue_key as issue_key,\n i.title,\n i.lead_time_minutes/1440 as lead_time_in_days,\n concat(b.url,'/',i.issue_key) as url\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\t\tjoin boards b on SUBSTRING_INDEX(rid.new_ref_id,':', 4) = b.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id:sqlstring}+'')\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n),\n\n_bug_age_rank as(\n select \n *,\n row_number() over (partition by tag_name order by lead_time_in_days desc) as bug_age_rank\n from _bugs\n)\n\nselect \n name,\n tag_name,\n issue_key,\n title,\n lead_time_in_days,\n url\nfrom _bug_age_rank\nwhere bug_age_rank <=10 \norder by tag_name, lead_time_in_days desc", + "rawSql": "-- Get the bug fixer distribution in the last 5 tags\nwith refs_commits_diffs as(\n SELECT\n new_refs.id as new_ref_id, old_refs.id as old_ref_id, commits_diffs.commit_sha, new_commit_sha, old_commit_sha\n FROM\n commits_diffs\n LEFT JOIN refs new_refs on new_refs.commit_sha = commits_diffs.new_commit_sha\n LEFT JOIN refs old_refs on old_refs.commit_sha = commits_diffs.old_commit_sha\n),\n\n_last_5_tags as(\n SELECT \n -- distinct new_ref_id, old_ref_id\n distinct SUBSTRING_INDEX(new_ref_id,':', -1) as new_ref_id, SUBSTRING_INDEX(old_ref_id,':', -1) as old_ref_id\n FROM \n refs_commits_diffs\n WHERE\n\t\tSUBSTRING_INDEX(new_ref_id,':', 4) in (${repo_id})\n\tORDER BY 1 desc\n\tLIMIT 5\n),\n\n_bugs as(\n\tselect distinct\n\t b.name,\n\t\tSUBSTRING_INDEX(rid.new_ref_id,'tags/', -1) as tag_name,\n\t\ti.issue_key as issue_key,\n i.title,\n i.lead_time_minutes/1440 as lead_time_in_days,\n concat(b.url,'/',i.issue_key) as url\n\tfrom\n\t\trefs_issues_diffs rid\n\t\tleft join issues i on rid.issue_id = i.id\n\t\tleft join pull_request_issues pri on i.id = pri.issue_id\n\t\tjoin boards b on SUBSTRING_INDEX(rid.new_ref_id,':', 4) = b.id\n\twhere\n\t\tSUBSTRING_INDEX(rid.new_ref_id,':', 4) in (${repo_id})\n\t\t-- and rid.new_ref_id in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand SUBSTRING_INDEX(rid.new_ref_id,':', -1) in (SELECT new_ref_id FROM _last_5_tags)\n\t\tand i.type = 'BUG'\n),\n\n_bug_age_rank as(\n select \n *,\n row_number() over (partition by tag_name order by lead_time_in_days desc) as bug_age_rank\n from _bugs\n)\n\nselect \n name,\n tag_name,\n issue_key,\n title,\n lead_time_in_days,\n url\nfrom _bug_age_rank\nwhere bug_age_rank <=10 \norder by tag_name, lead_time_in_days desc", "refId": "A", "select": [ [ @@ -1696,7 +1696,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "-- Get the avg bug age in history\nwith _avg_bug_age as(\n select \n type,\n avg(lead_time_minutes) as average_bug_age\n from \n issues\n left join board_issues bi on issues.id = bi.issue_id\n where \n type = 'BUG'\n and status = 'DONE'\n and bi.board_id in (${repo_id:sqlstring}+'')\n group by 1\n),\n\n\n_bug_queue_time as(\n select \n i.id,\n abg.average_bug_age,\n TIMESTAMPDIFF(MINUTE,created_date,NOW()) as queue_time,\n case when TIMESTAMPDIFF(MINUTE,created_date,NOW()) >= average_bug_age then \">=avg_bug_age\" else \"= average_bug_age then \">=avg_bug_age\" else \"= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n count(distinct id) as pr_count\n FROM pull_requests\n WHERE\n base_repo_id in (${repo_id})\n and $__timeFilter(created_date)\n -- The following condition will remove the month with incomplete data\n -- and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n pr_count as \"Pull Request Count\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -431,7 +431,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'MERGED'\ngroup by 1\norder by 2 desc\nlimit 20\n", "refId": "A", "select": [ [ @@ -558,7 +558,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})", "refId": "A", "select": [ [ @@ -672,7 +672,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'OPEN' then id else null end) as \"PR: Open\",\n count(distinct case when status = 'CLOSED' then id else null end) as \"PR: Closed without merging\",\n count(distinct case when status = 'MERGED' then id else null end) as \"PR: Merged\"\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -762,7 +762,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand pr.status = 'CLOSED'", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand pr.status = 'CLOSED'", "refId": "A", "select": [ [ @@ -896,7 +896,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\ngroup by 1\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nSELECT\n DATE_ADD(date(created_date), INTERVAL -DAYOFMONTH(date(created_date))+1 DAY) as time,\n count(distinct case when status = 'CLOSED' then id else null end)/count(distinct case when status in ('CLOSED', 'MERGED') then id else null end) as ratio\nFROM pull_requests\nWHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\ngroup by 1\n", "refId": "A", "select": [ [ @@ -984,7 +984,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\n\n", + "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not null\n\n\n", "refId": "A", "select": [ [ @@ -1102,7 +1102,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\n -- The following condition will remove the month with incomplete data\n -- and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", + "rawSql": "with _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440) as time_to_merge\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\n -- The following condition will remove the month with incomplete data\n -- and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_merge as \"Time to Merge\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ @@ -1205,7 +1205,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tavg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand status in ('CLOSED', 'MERGED')\n\n\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nselect\n\tavg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\nfrom \n\tpull_requests\nwhere \n $__timeFilter(created_date)\n\tand base_repo_id in (${repo_id})\n\tand status in ('CLOSED', 'MERGED')\n\n\n", "refId": "A", "select": [ [ @@ -1323,7 +1323,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nwith _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id:sqlstring}+'')\n and status in ('CLOSED', 'MERGED')\n -- The following condition will remove the month with incomplete data\n -- and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_close as \"Time to Close\"\nFROM _prs\nORDER BY time\n", + "rawSql": "-- The PR/MR statuses are standardized to DevLake's statuses 'OPEN', 'MERGED' and 'CLOSED'. You can check out the original status from the field `original_status`\nwith _prs as(\n SELECT\n DATE_ADD(date(created_date), INTERVAL -DAY(date(created_date))+1 DAY) as time,\n avg(TIMESTAMPDIFF(Minute,created_date,closed_date)/1440) as time_to_close\n FROM pull_requests\n WHERE\n $__timeFilter(created_date)\n and base_repo_id in (${repo_id})\n and status in ('CLOSED', 'MERGED')\n -- The following condition will remove the month with incomplete data\n -- and created_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n group by 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n time_to_close as \"Time to Close\"\nFROM _prs\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Jenkins.json b/grafana/dashboards/Jenkins.json index 73058ee4e2c..893d1c8cec2 100644 --- a/grafana/dashboards/Jenkins.json +++ b/grafana/dashboards/Jenkins.json @@ -120,7 +120,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n count(distinct id)\nFROM \n cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -223,7 +223,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n 1.0 * count(case when result = 'SUCCESS' then id else null end)/count(distinct id)\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -410,7 +410,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", + "rawSql": "SELECT\n result,\n count(distinct id) as build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1\nORDER BY 2 desc", "refId": "A", "select": [ [ @@ -510,7 +510,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", + "rawSql": "SELECT\n avg(duration_sec/60) as duration_in_minutes\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)", "refId": "A", "select": [ [ @@ -631,7 +631,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Build Count\"\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct id) as build_count\n FROM cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and result = 'SUCCESS'\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n build_count as \"Build Count\"\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ @@ -788,7 +788,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%jenkins%\"\r\n and cicd_scope_id in (${job_id:sqlstring}+'')\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY\r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Build Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", + "rawSql": "WITH _build_success_rate as(\r\n SELECT\r\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\r\n result,\r\n id\r\n FROM\r\n cicd_pipelines\r\n WHERE\r\n $__timeFilter(finished_date)\r\n and id like \"%jenkins%\"\r\n and cicd_scope_id in (${job_id})\r\n -- the following condition will remove the month with incomplete data\r\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\r\n GROUP BY\r\n time, result, id\r\n)\r\n\r\nSELECT \r\n date_format(time,'%M %Y') as month,\r\n 1.0 * sum(case when result = 'SUCCESS' then 1 else 0 end)/ count(*) as \"Build Success Rate\"\r\nFROM _build_success_rate\r\nGROUP BY time\r\nORDER BY time", "refId": "A", "select": [ [ @@ -955,7 +955,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_build_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", + "rawSql": "SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n count(distinct case when result = 'SUCCESS' then id else null end) as successful_build_count,\n count(distinct case when result != 'SUCCESS' then id else null end) as failed_build_count\nFROM cicd_pipelines\nWHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\nGROUP BY 1", "refId": "A", "select": [ [ @@ -1091,7 +1091,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id:sqlstring}+'')\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", + "rawSql": "WITH _builds as(\n SELECT\n DATE_ADD(date(finished_date), INTERVAL -DAYOFMONTH(date(finished_date))+1 DAY) as time,\n avg(duration_sec) as mean_duration_sec\n FROM \n cicd_pipelines\n WHERE\n $__timeFilter(finished_date)\n and id like \"%jenkins%\"\n and cicd_scope_id in (${job_id})\n -- the following condition will remove the month with incomplete data\n and finished_date >= DATE_ADD(DATE_ADD($__timeFrom(), INTERVAL -DAY($__timeFrom())+1 DAY), INTERVAL +1 MONTH)\n GROUP BY 1\n)\n\nSELECT \n date_format(time,'%M %Y') as month,\n mean_duration_sec/60 as mean_duration_minutes\nFROM _builds\nORDER BY time\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Jira.json b/grafana/dashboards/Jira.json index 9be90dbcdf9..2286e341f56 100644 --- a/grafana/dashboards/Jira.json +++ b/grafana/dashboards/Jira.json @@ -175,7 +175,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -278,7 +278,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -413,7 +413,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\ngroup by 1", + "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1", "refId": "A", "select": [ [ @@ -502,7 +502,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", "refId": "A", "select": [ [ @@ -618,7 +618,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", "refId": "A", "select": [ [ @@ -730,7 +730,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -833,7 +833,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", "refId": "A", "select": [ [ @@ -954,7 +954,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", "refId": "A", "select": [ [ @@ -1038,7 +1038,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Opsgenie.json b/grafana/dashboards/Opsgenie.json index 964403fbf22..d42cb874f95 100644 --- a/grafana/dashboards/Opsgenie.json +++ b/grafana/dashboards/Opsgenie.json @@ -177,7 +177,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select count(distinct i.issue_key) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n", + "rawSql": "select count(distinct i.issue_key) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n", "refId": "A", "select": [ [ @@ -280,7 +280,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select count(distinct i.issue_key) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n", + "rawSql": "select count(distinct i.issue_key) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n", "refId": "A", "select": [ [ @@ -412,7 +412,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n i.title as \"Title\",\r\n i.issue_key as \"Id\",\r\n group_concat(b.name separator \", \") as \"Service(s)\",\r\n i.description as \"Description\",\r\n i.original_status as \"Original Status\",\r\n i.priority as \"Priority\",\r\n i.created_date as \"Created Date\",\r\n i.updated_date as \"Updated Date\",\r\n round((i.lead_time_minutes/1440),1) as \"Lead Time Days\",\r\n i.url as \"URL\"\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n join boards b on bi.board_id = b.id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\nGROUP BY i.title, i.issue_key, i.description, i.original_status, i.priority, i.created_date, i.updated_date, i.lead_time_minutes, i.url", + "rawSql": "select \r\n i.title as \"Title\",\r\n i.issue_key as \"Id\",\r\n group_concat(b.name separator \", \") as \"Service(s)\",\r\n i.description as \"Description\",\r\n i.original_status as \"Original Status\",\r\n i.priority as \"Priority\",\r\n i.created_date as \"Created Date\",\r\n i.updated_date as \"Updated Date\",\r\n round((i.lead_time_minutes/1440),1) as \"Lead Time Days\",\r\n i.url as \"URL\"\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n join boards b on bi.board_id = b.id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\nGROUP BY i.title, i.issue_key, i.description, i.original_status, i.priority, i.created_date, i.updated_date, i.lead_time_minutes, i.url", "refId": "A", "select": [ [ @@ -515,7 +515,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select count(distinct i.issue_key) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'closed'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n", + "rawSql": "select count(distinct i.issue_key) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'closed'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n", "refId": "A", "select": [ [ @@ -622,7 +622,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.original_status = 'resolved' then i.id else null end) as resolved_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * resolved_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.original_status = 'resolved' then i.id else null end) as resolved_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * resolved_count/total_count as requirement_delivery_rate\r\nfrom _requirements", "refId": "A", "select": [ [ @@ -756,7 +756,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.original_status = 'resolved' then i.id else null end)/count(distinct i.id) as resolved_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n resolved_rate\r\nfrom _requirements\r\norder by time", + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.original_status = 'resolved' then i.id else null end)/count(distinct i.id) as resolved_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n resolved_rate\r\nfrom _requirements\r\norder by time", "refId": "A", "select": [ [ @@ -890,7 +890,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -999,7 +999,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", "refId": "A", "select": [ [ @@ -1138,7 +1138,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_incident_age\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_incident_age\r\nfrom _requirements\r\norder by time asc", + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_incident_age\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_incident_age\r\nfrom _requirements\r\norder by time asc", "refId": "A", "select": [ [ @@ -1240,7 +1240,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/PagerDuty.json b/grafana/dashboards/PagerDuty.json index 5b9cce889d3..75dd09508b8 100644 --- a/grafana/dashboards/PagerDuty.json +++ b/grafana/dashboards/PagerDuty.json @@ -175,7 +175,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -278,7 +278,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -398,7 +398,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n b.name as service,\r\n i.issue_key,\r\n i.description,\r\n i.original_status,\r\n i.priority,\r\n i.created_date,\r\n i.updated_date,\r\n round((i.lead_time_minutes/1440),1) as lead_time_days,\r\n i.url\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n join boards b on bi.board_id = b.id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n b.name as service,\r\n i.issue_key,\r\n i.description,\r\n i.original_status,\r\n i.priority,\r\n i.created_date,\r\n i.updated_date,\r\n round((i.lead_time_minutes/1440),1) as lead_time_days,\r\n i.url\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n join boards b on bi.board_id = b.id\r\nwhere \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -505,7 +505,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.original_status = 'resolved' then i.id else null end) as resolved_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * resolved_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.original_status = 'resolved' then i.id else null end) as resolved_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * resolved_count/total_count as requirement_delivery_rate\r\nfrom _requirements", "refId": "A", "select": [ [ @@ -639,7 +639,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.original_status = 'resolved' then i.id else null end)/count(distinct i.id) as resolved_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n resolved_rate\r\nfrom _requirements\r\norder by time", + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.original_status = 'resolved' then i.id else null end)/count(distinct i.id) as resolved_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n resolved_rate\r\nfrom _requirements\r\norder by time", "refId": "A", "select": [ [ @@ -773,7 +773,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -882,7 +882,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", "refId": "A", "select": [ [ @@ -1021,7 +1021,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_incident_age\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_incident_age\r\nfrom _requirements\r\norder by time asc", + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_incident_age\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_incident_age\r\nfrom _requirements\r\norder by time asc", "refId": "A", "select": [ [ @@ -1123,7 +1123,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.original_status = 'resolved'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Sonarqube.json b/grafana/dashboards/Sonarqube.json index a52a7a222ac..d95d0568380 100644 --- a/grafana/dashboards/Sonarqube.json +++ b/grafana/dashboards/Sonarqube.json @@ -156,7 +156,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id) as 'Bugs'\nFROM cq_issues\nWHERE\n project_key in (${project_id:sqlstring}+'')\n and type = 'BUG'\n and severity in (${severity:sqlstring}+'')\n ", + "rawSql": "SELECT\n count(distinct id) as 'Bugs'\nFROM cq_issues\nWHERE\n project_key in (${project_id})\n and type = 'BUG'\n and severity in (${severity})\n ", "refId": "A", "select": [ [ @@ -268,7 +268,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n count(distinct id) as 'Vulnerabilities'\nFROM cq_issues\nWHERE\n project_key in (${project_id:sqlstring}+'')\n and type = 'VULNERABILITY'\n and severity in (${severity:sqlstring}+'')", + "rawSql": "SELECT\n count(distinct id) as 'Vulnerabilities'\nFROM cq_issues\nWHERE\n project_key in (${project_id})\n and type = 'VULNERABILITY'\n and severity in (${severity})", "refId": "A", "select": [ [ @@ -381,7 +381,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n COUNT(distinct id) AS 'Security Hotspots'\nFROM cq_issues\nWHERE\n project_key in (${project_id:sqlstring}+'')\n and type = 'HOTSPOTS'\n and severity in (${severity:sqlstring}+'')\n", + "rawSql": "SELECT\n COUNT(distinct id) AS 'Security Hotspots'\nFROM cq_issues\nWHERE\n project_key in (${project_id})\n and type = 'HOTSPOTS'\n and severity in (${severity})\n", "refId": "A", "select": [ [ @@ -493,7 +493,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n CONCAT(ROUND(COUNT(IF(status != 'TO_REVIEW', id, NULL)) / COUNT(distinct id) * 100, 2), '%') AS 'Reviewed'\nFROM cq_issues\nWHERE\n project_key in (${project_id:sqlstring}+'')\n and type = 'HOTSPOTS'\n and severity in (${severity:sqlstring}+'')", + "rawSql": "SELECT\n CONCAT(ROUND(COUNT(IF(status != 'TO_REVIEW', id, NULL)) / COUNT(distinct id) * 100, 2), '%') AS 'Reviewed'\nFROM cq_issues\nWHERE\n project_key in (${project_id})\n and type = 'HOTSPOTS'\n and severity in (${severity})", "refId": "A", "select": [ [ @@ -630,7 +630,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n CONCAT(ROUND((sum(lines_to_cover) - sum(uncovered_lines)) / sum(lines_to_cover) * 100, 1), '% ', 'Coverage on ', ROUND(sum(lines_to_cover) / 1000, 0),'k Lines to cover')\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id:sqlstring}+'')\n", + "rawSql": "SELECT\n CONCAT(ROUND((sum(lines_to_cover) - sum(uncovered_lines)) / sum(lines_to_cover) * 100, 1), '% ', 'Coverage on ', ROUND(sum(lines_to_cover) / 1000, 0),'k Lines to cover')\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id})\n", "refId": "A", "select": [ [ @@ -742,7 +742,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n concat(FLOOR(SUM(debt)/8/60), \" day(s) \", FLOOR((SUM(debt)%480)/60), \" hour(s) \") AS 'Debt'\nFROM cq_issues\nWHERE\n project_key in (${project_id:sqlstring}+'')\n and type = 'CODE_SMELL'\n and severity in (${severity:sqlstring}+'')\n ", + "rawSql": "SELECT\n concat(FLOOR(SUM(debt)/8/60), \" day(s) \", FLOOR((SUM(debt)%480)/60), \" hour(s) \") AS 'Debt'\nFROM cq_issues\nWHERE\n project_key in (${project_id})\n and type = 'CODE_SMELL'\n and severity in (${severity})\n ", "refId": "A", "select": [ [ @@ -855,7 +855,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n\tCOUNT(distinct id) as 'Code Smells'\nFROM cq_issues\nWHERE\n project_key in (${project_id:sqlstring}+'')\n and type = 'CODE_SMELL'\n and severity in (${severity:sqlstring}+'')", + "rawSql": "SELECT\n\tCOUNT(distinct id) as 'Code Smells'\nFROM cq_issues\nWHERE\n project_key in (${project_id})\n and type = 'CODE_SMELL'\n and severity in (${severity})", "refId": "A", "select": [ [ @@ -992,7 +992,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n sum(duplicated_blocks)\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id:sqlstring}+'')\n", + "rawSql": "SELECT\n sum(duplicated_blocks)\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id})\n", "refId": "A", "select": [ [ @@ -1103,7 +1103,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n CONCAT(ROUND(sum(duplicated_lines) / sum(num_of_lines) * 100, 1), '% ', 'Duplications on ', ROUND(sum(ncloc) / 1000, 0),'k Lines')\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id:sqlstring}+'')\n", + "rawSql": "SELECT\n CONCAT(ROUND(sum(duplicated_lines) / sum(num_of_lines) * 100, 1), '% ', 'Duplications on ', ROUND(sum(ncloc) / 1000, 0),'k Lines')\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id})\n", "refId": "A", "select": [ [ @@ -1202,7 +1202,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\n\tfile_name, num_of_lines as 'Lines of Code', bugs as 'Bugs', vulnerabilities as 'Vulnerabilities', code_smells as 'Code Smells', \n\tsecurity_hotspots as 'Security Hotspots', CONCAT(ROUND(coverage, 2), '%') as 'Coverage', CONCAT(ROUND(duplicated_lines_density, 2), '%') as 'Duplications'\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id:sqlstring}+'')\nORDER BY bugs desc\nlimit 20", + "rawSql": "SELECT\n\tfile_name, num_of_lines as 'Lines of Code', bugs as 'Bugs', vulnerabilities as 'Vulnerabilities', code_smells as 'Code Smells', \n\tsecurity_hotspots as 'Security Hotspots', CONCAT(ROUND(coverage, 2), '%') as 'Coverage', CONCAT(ROUND(duplicated_lines_density, 2), '%') as 'Duplications'\nFROM cq_file_metrics\nWHERE\n project_key in (${project_id})\nORDER BY bugs desc\nlimit 20", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/TAPD.json b/grafana/dashboards/TAPD.json index 36ffa94922e..66af4057c5e 100644 --- a/grafana/dashboards/TAPD.json +++ b/grafana/dashboards/TAPD.json @@ -175,7 +175,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -278,7 +278,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -413,7 +413,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\ngroup by 1", + "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1", "refId": "A", "select": [ [ @@ -502,7 +502,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", "refId": "A", "select": [ [ @@ -618,7 +618,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", "refId": "A", "select": [ [ @@ -729,7 +729,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -815,7 +815,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", "refId": "A", "select": [ [ @@ -936,7 +936,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", "refId": "A", "select": [ [ @@ -1020,7 +1020,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Teambition.json b/grafana/dashboards/Teambition.json index a1d5adc2be2..c8d139a02a7 100644 --- a/grafana/dashboards/Teambition.json +++ b/grafana/dashboards/Teambition.json @@ -175,7 +175,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -278,7 +278,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -413,7 +413,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\ngroup by 1", + "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1", "refId": "A", "select": [ [ @@ -502,7 +502,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", "refId": "A", "select": [ [ @@ -618,7 +618,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", "refId": "A", "select": [ [ @@ -729,7 +729,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -815,7 +815,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", "refId": "A", "select": [ [ @@ -936,7 +936,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", "refId": "A", "select": [ [ @@ -1020,7 +1020,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/WeeklyBugRetro.json b/grafana/dashboards/WeeklyBugRetro.json index a26b1740911..df90e103a38 100644 --- a/grafana/dashboards/WeeklyBugRetro.json +++ b/grafana/dashboards/WeeklyBugRetro.json @@ -121,7 +121,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -232,7 +232,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n priority,\n count(distinct i.id) as 'Issue Number'\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')\ngroup by 1", + "rawSql": "select\n priority,\n count(distinct i.id) as 'Issue Number'\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})\ngroup by 1", "refId": "A", "select": [ [ @@ -421,7 +421,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n i.issue_key as 'Issue Number',\n i.title as 'Title',\n i.url as 'Url'\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select\n i.issue_key as 'Issue Number',\n i.title as 'Title',\n i.url as 'Url'\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -511,7 +511,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -622,7 +622,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n avg(lead_time_minutes / 1440)\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select\n avg(lead_time_minutes / 1440)\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -767,7 +767,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n i.issue_key as 'Issue Number',\n i.title as 'Title',\n lead_time_minutes/1440 as 'Lead Time in Days',\n i.url as 'Url'\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select\n i.issue_key as 'Issue Number',\n i.title as 'Title',\n lead_time_minutes/1440 as 'Lead Time in Days',\n i.url as 'Url'\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -887,7 +887,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n concat('#',i.issue_key, ' ', i.title) as issue_key,\n lead_time_minutes/1440 as lead_time\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id:sqlstring}+'')\norder by lead_time desc", + "rawSql": "select\n concat('#',i.issue_key, ' ', i.title) as issue_key,\n lead_time_minutes/1440 as lead_time\nfrom\n issues as i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and status = 'DONE'\n and date(i.resolution_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${board_id})\norder by lead_time desc", "refId": "A", "select": [ [ @@ -976,7 +976,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and i.status != 'DONE'\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select\n count(distinct i.id)\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and i.status != 'DONE'\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -1081,7 +1081,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \n avg((TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440)\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and i.status != 'DONE'\n and b.id in (${board_id:sqlstring}+'')", + "rawSql": "select \n avg((TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440)\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and i.status != 'DONE'\n and b.id in (${board_id})", "refId": "A", "select": [ [ @@ -1248,7 +1248,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \n i.issue_key as 'Issue Number',\n i.title as 'Title',\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as 'Queue Time in Days',\n i.url as 'Url',\n priority\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and i.status != 'DONE'\n and b.id in (${board_id:sqlstring}+'')\n and priority in (${priority:sqlstring}+'')\norder by 'Queue Time' desc", + "rawSql": "select \n i.issue_key as 'Issue Number',\n i.title as 'Title',\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as 'Queue Time in Days',\n i.url as 'Url',\n priority\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and i.status != 'DONE'\n and b.id in (${board_id})\n and priority in (${priority})\norder by 'Queue Time' desc", "refId": "A", "select": [ [ @@ -1369,7 +1369,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \n concat('#', i.issue_key) as issue_key,\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as 'Queue Time in Days'\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and i.status != 'DONE'\n and b.id in (${board_id:sqlstring}+'')\norder by 2 desc", + "rawSql": "select \n concat('#', i.issue_key) as issue_key,\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as 'Queue Time in Days'\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and i.status != 'DONE'\n and b.id in (${board_id})\norder by 2 desc", "refId": "A", "select": [ [ @@ -1513,7 +1513,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \n i.issue_key as 'Issue Number',\n i.title as 'Title',\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as 'Queue Time in Days',\n i.url as 'Url'\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and i.status != 'DONE'\n and i.assignee_name = ''\n and b.id in (${board_id:sqlstring}+'')\norder by 'Queue Time' desc", + "rawSql": "select \n i.issue_key as 'Issue Number',\n i.title as 'Title',\n (TIMESTAMPDIFF(MINUTE, i.created_date,NOW()))/1440 as 'Queue Time in Days',\n i.url as 'Url'\nfrom \n issues i\n\tjoin board_issues bi on i.id = bi.issue_id\n\tjoin boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and i.status != 'DONE'\n and i.assignee_name = ''\n and b.id in (${board_id})\norder by 'Queue Time' desc", "refId": "A", "select": [ [ @@ -1631,7 +1631,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with bugs as ( \n select \n DATE_ADD(date(i.created_date), INTERVAL -WEEKDAY(date(i.created_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type:sqlstring}+'')\n and $__timeFilter(i.created_date)\n and b.id in (${board_id:sqlstring}+'')\n group by time\n order by time desc\n),\n\ncalendar_date as(\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) d\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) >= (SYSDATE()-INTERVAL 6 MONTH)\n),\n\ncalendar_weeks as(\n select \n \tdistinct date(DATE_ADD(date(d), INTERVAL -WEEKDAY(date(d)) DAY)) as start_of_week\n FROM calendar_date\n ORDER BY 1 asc\n)\n\n\nselect \n concat(date_format(cw.start_of_week,'%m/%d'), ' - ', date_format(DATE_ADD(cw.start_of_week, INTERVAL +6 DAY),'%m/%d')) as week,\n case when bug_count is not null then bug_count else 0 end as 'Weekly New Bugs'\nfrom calendar_weeks cw left join bugs b on cw.start_of_week = b.time\norder by cw.start_of_week asc\n", + "rawSql": "with bugs as ( \n select \n DATE_ADD(date(i.created_date), INTERVAL -WEEKDAY(date(i.created_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type})\n and $__timeFilter(i.created_date)\n and b.id in (${board_id})\n group by time\n order by time desc\n),\n\ncalendar_date as(\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) d\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) >= (SYSDATE()-INTERVAL 6 MONTH)\n),\n\ncalendar_weeks as(\n select \n \tdistinct date(DATE_ADD(date(d), INTERVAL -WEEKDAY(date(d)) DAY)) as start_of_week\n FROM calendar_date\n ORDER BY 1 asc\n)\n\n\nselect \n concat(date_format(cw.start_of_week,'%m/%d'), ' - ', date_format(DATE_ADD(cw.start_of_week, INTERVAL +6 DAY),'%m/%d')) as week,\n case when bug_count is not null then bug_count else 0 end as 'Weekly New Bugs'\nfrom calendar_weeks cw left join bugs b on cw.start_of_week = b.time\norder by cw.start_of_week asc\n", "refId": "A", "select": [ [ @@ -1766,7 +1766,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with bugs as (\n select \n DATE_ADD(date(i.resolution_date), INTERVAL -WEEKDAY(date(i.resolution_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type:sqlstring}+'')\n and status = 'DONE'\n and $__timeFilter(i.resolution_date)\n and b.id in (${board_id:sqlstring}+'')\n group by time\n order by time desc\n),\n\ncalendar_date as(\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) d\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) >= (SYSDATE()-INTERVAL 6 MONTH)\n),\n\ncalendar_weeks as(\n select \n \tdistinct date(DATE_ADD(date(d), INTERVAL -WEEKDAY(date(d)) DAY)) as start_of_week\n FROM calendar_date\n ORDER BY 1 asc\n)\n\nselect \n concat(date_format(cw.start_of_week,'%m/%d'), ' - ', date_format(DATE_ADD(cw.start_of_week, INTERVAL +6 DAY),'%m/%d')) as week,\n case when bug_count is not null then bug_count else 0 end as 'Weekly Closed Bugs'\nfrom calendar_weeks cw left join bugs b on cw.start_of_week = b.time\norder by cw.start_of_week asc", + "rawSql": "with bugs as (\n select \n DATE_ADD(date(i.resolution_date), INTERVAL -WEEKDAY(date(i.resolution_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type})\n and status = 'DONE'\n and $__timeFilter(i.resolution_date)\n and b.id in (${board_id})\n group by time\n order by time desc\n),\n\ncalendar_date as(\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) d\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) >= (SYSDATE()-INTERVAL 6 MONTH)\n),\n\ncalendar_weeks as(\n select \n \tdistinct date(DATE_ADD(date(d), INTERVAL -WEEKDAY(date(d)) DAY)) as start_of_week\n FROM calendar_date\n ORDER BY 1 asc\n)\n\nselect \n concat(date_format(cw.start_of_week,'%m/%d'), ' - ', date_format(DATE_ADD(cw.start_of_week, INTERVAL +6 DAY),'%m/%d')) as week,\n case when bug_count is not null then bug_count else 0 end as 'Weekly Closed Bugs'\nfrom calendar_weeks cw left join bugs b on cw.start_of_week = b.time\norder by cw.start_of_week asc", "refId": "A", "select": [ [ @@ -1903,7 +1903,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with calendar_date as(\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) d\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) >= (SYSDATE()-INTERVAL 6 MONTH)\n),\n\ncalendar_weeks as(\n select \n \tdistinct date(DATE_ADD(date(d), INTERVAL -WEEKDAY(date(d)) DAY)) as start_of_week\n FROM calendar_date\n ORDER BY 1 asc\n),\n\ncreated_bugs as ( \n select \n DATE_ADD(date(i.created_date), INTERVAL -WEEKDAY(date(i.created_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type:sqlstring}+'')\n and $__timeFilter(i.created_date)\n and b.id in (${board_id:sqlstring}+'')\n group by time\n order by time desc\n),\n\nresolved_bugs as (\n select \n DATE_ADD(date(i.resolution_date), INTERVAL -WEEKDAY(date(i.resolution_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type:sqlstring}+'')\n and status = 'DONE'\n and $__timeFilter(i.resolution_date)\n and b.id in (${board_id:sqlstring}+'')\n group by time\n order by time desc\n),\n\nweekly_new_bug as(\n select\n cw.start_of_week as week,\n case when bug_count is not null then bug_count else 0 end as weekly_new_bug\n from calendar_weeks cw left join created_bugs cb on cw.start_of_week = cb.time\n),\n\nweekly_closed_bug as(\n select\n cw.start_of_week as week,\n case when bug_count is not null then bug_count else 0 end as weekly_closed_bug\n from calendar_weeks cw left join resolved_bugs cb on cw.start_of_week = cb.time\n),\n\nweekly_updates as(\n SELECT t1.week, weekly_new_bug, weekly_closed_bug FROM weekly_new_bug t1\n LEFT JOIN weekly_closed_bug t2 ON t1.week = t2.week\n UNION\n SELECT t1.week, weekly_new_bug, weekly_closed_bug FROM weekly_new_bug t1\n RIGHT JOIN weekly_closed_bug t2 ON t1.week = t2.week\n),\n\n\noriginal_open_bugs as (\n SELECT \n count(distinct i.id) as original_open_bug_count\n FROM\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type:sqlstring}+'')\n and i.created_date < $__timeFrom()\n and (i.status != 'DONE' or $__timeFilter(i.resolution_date))\n and b.id in (${board_id:sqlstring}+'')\n),\n\nweekly_updated_without_null as(\n SELECT \n week, \n COALESCE(weekly_new_bug,0) as weekly_new_bug, \n COALESCE(weekly_closed_bug,0) as weekly_closed_bug,\n original_open_bug_count\n from weekly_updates, original_open_bugs\n where week is not null\n),\n\nweekly_delta as(\n SELECT \n *,\n (weekly_new_bug - weekly_closed_bug) as weekly_delta\n from weekly_updated_without_null\n order by week asc\n),\n\nfinal_data as(\n SELECT \n *,\n concat(date_format(week,'%m/%d'), ' - ', date_format(DATE_ADD(week, INTERVAL +6 DAY),'%m/%d')) as _week,\n sum(weekly_delta) over(order by week asc) as weekly_accumulated\n from weekly_delta\n)\n\nSELECT \n _week,\n (original_open_bug_count + weekly_accumulated) as \"Total No. of Outstanding Bugs By the End of Week\" from final_data", + "rawSql": "with calendar_date as(\n\tSELECT CAST((SYSDATE()-INTERVAL (H+T+U) DAY) AS date) d\n\tFROM ( SELECT 0 H\n\t\t\tUNION ALL SELECT 100 UNION ALL SELECT 200 UNION ALL SELECT 300\n\t\t) H CROSS JOIN ( SELECT 0 T\n\t\t\tUNION ALL SELECT 10 UNION ALL SELECT 20 UNION ALL SELECT 30\n\t\t\tUNION ALL SELECT 40 UNION ALL SELECT 50 UNION ALL SELECT 60\n\t\t\tUNION ALL SELECT 70 UNION ALL SELECT 80 UNION ALL SELECT 90\n\t\t) T CROSS JOIN ( SELECT 0 U\n\t\t\tUNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3\n\t\t\tUNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6\n\t\t\tUNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9\n\t\t) U\n\tWHERE\n\t\t(SYSDATE()-INTERVAL (H+T+U) DAY) >= (SYSDATE()-INTERVAL 6 MONTH)\n),\n\ncalendar_weeks as(\n select \n \tdistinct date(DATE_ADD(date(d), INTERVAL -WEEKDAY(date(d)) DAY)) as start_of_week\n FROM calendar_date\n ORDER BY 1 asc\n),\n\ncreated_bugs as ( \n select \n DATE_ADD(date(i.created_date), INTERVAL -WEEKDAY(date(i.created_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type})\n and $__timeFilter(i.created_date)\n and b.id in (${board_id})\n group by time\n order by time desc\n),\n\nresolved_bugs as (\n select \n DATE_ADD(date(i.resolution_date), INTERVAL -WEEKDAY(date(i.resolution_date)) DAY) as time,\n count(distinct i.id) as bug_count\n from\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type})\n and status = 'DONE'\n and $__timeFilter(i.resolution_date)\n and b.id in (${board_id})\n group by time\n order by time desc\n),\n\nweekly_new_bug as(\n select\n cw.start_of_week as week,\n case when bug_count is not null then bug_count else 0 end as weekly_new_bug\n from calendar_weeks cw left join created_bugs cb on cw.start_of_week = cb.time\n),\n\nweekly_closed_bug as(\n select\n cw.start_of_week as week,\n case when bug_count is not null then bug_count else 0 end as weekly_closed_bug\n from calendar_weeks cw left join resolved_bugs cb on cw.start_of_week = cb.time\n),\n\nweekly_updates as(\n SELECT t1.week, weekly_new_bug, weekly_closed_bug FROM weekly_new_bug t1\n LEFT JOIN weekly_closed_bug t2 ON t1.week = t2.week\n UNION\n SELECT t1.week, weekly_new_bug, weekly_closed_bug FROM weekly_new_bug t1\n RIGHT JOIN weekly_closed_bug t2 ON t1.week = t2.week\n),\n\n\noriginal_open_bugs as (\n SELECT \n count(distinct i.id) as original_open_bug_count\n FROM\n issues as i\n\t join board_issues bi on i.id = bi.issue_id\n\t join boards b on bi.board_id = b.id\n where \n i.type in (${issue_type})\n and i.created_date < $__timeFrom()\n and (i.status != 'DONE' or $__timeFilter(i.resolution_date))\n and b.id in (${board_id})\n),\n\nweekly_updated_without_null as(\n SELECT \n week, \n COALESCE(weekly_new_bug,0) as weekly_new_bug, \n COALESCE(weekly_closed_bug,0) as weekly_closed_bug,\n original_open_bug_count\n from weekly_updates, original_open_bugs\n where week is not null\n),\n\nweekly_delta as(\n SELECT \n *,\n (weekly_new_bug - weekly_closed_bug) as weekly_delta\n from weekly_updated_without_null\n order by week asc\n),\n\nfinal_data as(\n SELECT \n *,\n concat(date_format(week,'%m/%d'), ' - ', date_format(DATE_ADD(week, INTERVAL +6 DAY),'%m/%d')) as _week,\n sum(weekly_delta) over(order by week asc) as weekly_accumulated\n from weekly_delta\n)\n\nSELECT \n _week,\n (original_open_bug_count + weekly_accumulated) as \"Total No. of Outstanding Bugs By the End of Week\" from final_data", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/WeeklyCommunityRetro.json b/grafana/dashboards/WeeklyCommunityRetro.json index e753e41ad23..6c8605b6fc9 100644 --- a/grafana/dashboards/WeeklyCommunityRetro.json +++ b/grafana/dashboards/WeeklyCommunityRetro.json @@ -147,7 +147,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')", + "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})", "refId": "A", "select": [ [ @@ -254,7 +254,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n and i.creator_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))", + "rawSql": "select\n count(distinct i.id)\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})\n and i.creator_id not in (select distinct id from accounts where organization in (${org}))", "refId": "A", "select": [ [ @@ -362,7 +362,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n sum(case when i.creator_id not in (select distinct id from accounts where organization in (${org:sqlstring}+'')\n ) then 1 else 0 end)/count(distinct i.id) as community_issue_ratio\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')", + "rawSql": "select\n sum(case when i.creator_id not in (select distinct id from accounts where organization in (${org})\n ) then 1 else 0 end)/count(distinct i.id) as community_issue_ratio\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})", "refId": "A", "select": [ [ @@ -472,7 +472,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n distinct i.creator_name\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n and i.creator_name not in (select distinct creator_name from issues where created_date < curdate() - INTERVAL WEEKDAY(curdate())+7 DAY and creator_name is not null)", + "rawSql": "select\n distinct i.creator_name\nfrom\n issues as i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\nwhere\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})\n and i.creator_name not in (select distinct creator_name from issues where created_date < curdate() - INTERVAL WEEKDAY(curdate())+7 DAY and creator_name is not null)", "refId": "A", "select": [ [ @@ -566,7 +566,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n and i.creator_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\n)\n\nselect\n 1 - count(distinct case when comment_id is null then issue_id else null end)/count(distinct issue_id) as response_rate\nfrom issue_comment_list", + "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})\n and i.creator_id not in (select distinct id from accounts where organization in (${org}))\n)\n\nselect\n 1 - count(distinct case when comment_id is null then issue_id else null end)/count(distinct issue_id) as response_rate\nfrom issue_comment_list", "refId": "A", "select": [ [ @@ -736,7 +736,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.issue_key,\n i.title,\n i.creator_name,\n i.created_date as issue_created_date,\n i.status,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n and i.creator_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\n)\n\nselect \n issue_key,\n title,\n creator_name,\n issue_created_date,\n status,\n (TIMESTAMPDIFF(MINUTE,issue_created_date,NOW()))/1440 as 'queue_time_in_days',\n url\nfrom issue_comment_list\nwhere comment_id is null", + "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.issue_key,\n i.title,\n i.creator_name,\n i.created_date as issue_created_date,\n i.status,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})\n and i.creator_id not in (select distinct id from accounts where organization in (${org}))\n)\n\nselect \n issue_key,\n title,\n creator_name,\n issue_created_date,\n status,\n (TIMESTAMPDIFF(MINUTE,issue_created_date,NOW()))/1440 as 'queue_time_in_days',\n url\nfrom issue_comment_list\nwhere comment_id is null", "refId": "A", "select": [ [ @@ -846,7 +846,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n and i.creator_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\n)\n\nselect\n avg((TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/1440)\nfrom issue_comment_list\nwhere comment_rank = 1", + "rawSql": "with issue_comment_list as(\n select\n i.id as issue_id,\n i.url,\n i.title,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})\n and i.creator_id not in (select distinct id from accounts where organization in (${org}))\n)\n\nselect\n avg((TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/1440)\nfrom issue_comment_list\nwhere comment_rank = 1", "refId": "A", "select": [ [ @@ -1020,7 +1020,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with issue_comment_list as(\n select\n SUBSTRING_INDEX(i.url, '/', -1) as issue_number,\n i.url,\n i.issue_key,\n i.title,\n i.creator_name,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type:sqlstring}+'')\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id:sqlstring}+'')\n and i.creator_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\n)\n\nselect\n issue_key,\n title,\n -- body,\n creator_name,\n issue_created_date,\n comment_date,\n (TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/1440 as response_time_in_days,\n url\nfrom issue_comment_list\nwhere comment_rank = 1", + "rawSql": "with issue_comment_list as(\n select\n SUBSTRING_INDEX(i.url, '/', -1) as issue_number,\n i.url,\n i.issue_key,\n i.title,\n i.creator_name,\n i.created_date as issue_created_date,\n ic.id as comment_id,\n ic.created_date as comment_date,\n ic.body,\n case when ic.id is not null then rank() over (partition by i.id order by ic.created_date asc) else null end as comment_rank\n from\n issues i\n join board_issues bi on i.id = bi.issue_id\n join boards b on bi.board_id = b.id\n left join issue_comments ic on i.id = ic.issue_id\n where\n i.type in (${issue_type})\n and date(i.created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n and b.id in (${repo_id})\n and i.creator_id not in (select distinct id from accounts where organization in (${org}))\n)\n\nselect\n issue_key,\n title,\n -- body,\n creator_name,\n issue_created_date,\n comment_date,\n (TIMESTAMPDIFF(MINUTE, issue_created_date,comment_date))/1440 as response_time_in_days,\n url\nfrom issue_comment_list\nwhere comment_rank = 1", "refId": "A", "select": [ [ @@ -1153,7 +1153,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\t\n\n", + "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\t\n\n", "refId": "A", "select": [ [ @@ -1260,7 +1260,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))", + "rawSql": "select\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\tand author_id not in (select distinct id from accounts where organization in (${org}))", "refId": "A", "select": [ [ @@ -1367,7 +1367,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n sum(case when author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\n then 1 else 0 end)/count(distinct pr.id) as community_pr_ratio\nfrom\n pull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')", + "rawSql": "select\n sum(case when author_id not in (select distinct id from accounts where organization in (${org}))\n then 1 else 0 end)/count(distinct pr.id) as community_pr_ratio\nfrom\n pull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})", "refId": "A", "select": [ [ @@ -1474,7 +1474,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\tand author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))", + "rawSql": "select\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not null\n\tand author_id not in (select distinct id from accounts where organization in (${org}))", "refId": "A", "select": [ [ @@ -1581,7 +1581,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\tand author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))", + "rawSql": "select\n\tavg(TIMESTAMPDIFF(Minute,created_date,merged_date)/1440)\nfrom \n\tpull_requests\nwhere \n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not null\n\tand author_id not in (select distinct id from accounts where organization in (${org}))", "refId": "A", "select": [ [ @@ -1675,7 +1675,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n distinct author_name\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand merged_date is not null\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand author_name not in (select distinct author_name from pull_requests where created_date < curdate() - INTERVAL WEEKDAY(curdate())+7 DAY and author_name is not null)", + "rawSql": "select\n distinct author_name\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand merged_date is not null\n\tand base_repo_id in (${repo_id})\n\tand author_name not in (select distinct author_name from pull_requests where created_date < curdate() - INTERVAL WEEKDAY(curdate())+7 DAY and author_name is not null)", "refId": "A", "select": [ [ @@ -1787,7 +1787,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct case when merged_date is not null then pr.id else null end)/ count(distinct pr.id) as merged_pull_request_ratio\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))", + "rawSql": "select\n\tcount(distinct case when merged_date is not null then pr.id else null end)/ count(distinct pr.id) as merged_pull_request_ratio\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\tand author_id not in (select distinct id from accounts where organization in (${org}))", "refId": "A", "select": [ [ @@ -1926,7 +1926,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select\n pull_request_key,\n title,\n status,\n author_name,\n created_date,\n (TIMESTAMPDIFF(MINUTE, created_date,curdate()))/1440 as queue_time_in_days,\n url\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is null\n and author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))", + "rawSql": "select\n pull_request_key,\n title,\n status,\n author_name,\n created_date,\n (TIMESTAMPDIFF(MINUTE, created_date,curdate()))/1440 as queue_time_in_days,\n url\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\tand merged_date is null\n and author_id not in (select distinct id from accounts where organization in (${org}))", "refId": "A", "select": [ [ @@ -2064,7 +2064,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n author_name,\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not NULL\n\tand author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\ngroup by 1\norder by 2 desc\nlimit 20\n", + "rawSql": "select\n author_name,\n\tcount(distinct pr.id) as pull_request_count\nfrom \n\tpull_requests pr\nwhere\n date(created_date) BETWEEN curdate() - INTERVAL WEEKDAY(curdate())+7 DAY AND curdate() - INTERVAL WEEKDAY(curdate()) DAY\n\tand base_repo_id in (${repo_id})\n\tand merged_date is not NULL\n\tand author_id not in (select distinct id from accounts where organization in (${org}))\ngroup by 1\norder by 2 desc\nlimit 20\n", "refId": "A", "select": [ [ @@ -2166,7 +2166,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n\tcount(distinct author_id) as all_contributor_count\nfrom \n\tpull_requests pr\nwhere\n\tbase_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not null\n\t-- and author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\n", + "rawSql": "select\n\tcount(distinct author_id) as all_contributor_count\nfrom \n\tpull_requests pr\nwhere\n\tbase_repo_id in (${repo_id})\n\tand merged_date is not null\n\t-- and author_id not in (select distinct id from accounts where organization in (${org}))\n", "refId": "A", "select": [ [ @@ -2273,7 +2273,7 @@ "hide": false, "metricColumn": "none", "rawQuery": true, - "rawSql": "select\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n\tbase_repo_id in (${repo_id:sqlstring}+'')\n\tand merged_date is not NULL\n\t-- and author_id not in (select distinct id from accounts where organization in (${org:sqlstring}+''))\ngroup by 1\norder by 2 desc\nlimit 20\n", + "rawSql": "select\n author_name,\n\tcount(distinct pr.id) as merged_pull_request_count\nfrom \n\tpull_requests pr\nwhere\n\tbase_repo_id in (${repo_id})\n\tand merged_date is not NULL\n\t-- and author_id not in (select distinct id from accounts where organization in (${org}))\ngroup by 1\norder by 2 desc\nlimit 20\n", "refId": "A", "select": [ [ diff --git a/grafana/dashboards/Zentao.json b/grafana/dashboards/Zentao.json index 33e314ca29b..072f6e4f1df 100644 --- a/grafana/dashboards/Zentao.json +++ b/grafana/dashboards/Zentao.json @@ -175,7 +175,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -278,7 +278,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n count(distinct i.id) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -413,7 +413,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\ngroup by 1", + "rawSql": "SELECT\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n count(distinct case when status != 'DONE' then i.id else null end) as \"Number of Open Issues\",\r\n count(distinct case when status = 'DONE' then i.id else null end) as \"Number of Delivered Issues\"\r\nFROM issues i\r\n\tjoin board_issues bi on i.id = bi.issue_id\r\n\tjoin boards b on bi.board_id = b.id\r\nwhere \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\ngroup by 1", "refId": "A", "select": [ [ @@ -502,7 +502,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", + "rawSql": "with _requirements as(\r\n select\r\n count(distinct i.id) as total_count,\r\n count(distinct case when i.status = 'DONE' then i.id else null end) as delivered_count\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect \r\n now() as time,\r\n 1.0 * delivered_count/total_count as requirement_delivery_rate\r\nfrom _requirements", "refId": "A", "select": [ [ @@ -618,7 +618,7 @@ "metricColumn": "none", "queryType": "randomWalk", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", + "rawSql": "with _requirements as(\r\n select\r\n DATE_ADD(date(i.created_date), INTERVAL -DAYOFMONTH(date(i.created_date))+1 DAY) as time,\r\n 1.0 * count(distinct case when i.status = 'DONE' then i.id else null end)/count(distinct i.id) as delivered_rate\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and $__timeFilter(i.created_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect\r\n time,\r\n delivered_rate\r\nfrom _requirements\r\norder by time", "refId": "A", "select": [ [ @@ -729,7 +729,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')", + "rawSql": "select \r\n avg(lead_time_minutes/1440) as value\r\nfrom issues i\r\n join board_issues bi on i.id = bi.issue_id\r\nwhere \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})", "refId": "A", "select": [ [ @@ -815,7 +815,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", + "rawSql": "with _ranks as(\r\n select \r\n i.lead_time_minutes,\r\n percent_rank() over (order by lead_time_minutes asc) as ranks\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n)\r\n\r\nselect\r\n max(lead_time_minutes/1440) as value\r\nfrom _ranks\r\nwhere \r\n ranks <= 0.8", "refId": "A", "select": [ [ @@ -936,7 +936,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", + "rawSql": "with _requirements as(\r\n select \r\n DATE_ADD(date(i.resolution_date), INTERVAL -DAYOFMONTH(date(i.resolution_date))+1 DAY) as time,\r\n avg(lead_time_minutes/1440) as mean_lead_time\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n group by 1\r\n)\r\n\r\nselect \r\n date_format(time,'%M %Y') as month,\r\n mean_lead_time\r\nfrom _requirements\r\norder by time asc", "refId": "A", "select": [ [ @@ -1020,7 +1020,7 @@ "group": [], "metricColumn": "none", "rawQuery": true, - "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type:sqlstring}+'')\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id:sqlstring}+'')\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", + "rawSql": "with _ranks as(\r\n select \r\n round(i.lead_time_minutes/1440) as lead_time_day\r\n from issues i\r\n join board_issues bi on i.id = bi.issue_id\r\n where \r\n i.type in (${type})\r\n and i.status = 'DONE'\r\n and $__timeFilter(i.resolution_date)\r\n and bi.board_id in (${board_id})\r\n order by lead_time_day asc\r\n)\r\n\r\nselect \r\n now() as time,\r\n lpad(concat(lead_time_day,'d'), 4, ' ') as metric,\r\n percent_rank() over (order by lead_time_day asc) as value\r\nfrom _ranks\r\norder by lead_time_day asc", "refId": "A", "select": [ [