Fix #2331 - Show stacked bar graph for in time and late reviews.
On the stats page, a single stacked bar graph is now shown which combines in time and late reviews, replacing the separate graphs for these statistics. Tests for the charts are also expanded to validate the actual graph content for both stacked and non-stacked charts. Commit ready for merge. - Legacy-Id: 16852
This commit is contained in:
parent
e145832808
commit
f651320ef3
7
ietf/externals/static/flot/jquery.flot.stack.min.js
vendored
Normal file
7
ietf/externals/static/flot/jquery.flot.stack.min.js
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
/* Javascript plotting library for jQuery, version 0.8.3.
|
||||||
|
|
||||||
|
Copyright (c) 2007-2014 IOLA and Ole Laursen.
|
||||||
|
Licensed under the MIT license.
|
||||||
|
|
||||||
|
*/
|
||||||
|
(function($){var options={series:{stack:null}};function init(plot){function findMatchingSeries(s,allseries){var res=null;for(var i=0;i<allseries.length;++i){if(s==allseries[i])break;if(allseries[i].stack==s.stack)res=allseries[i]}return res}function stackData(plot,s,datapoints){if(s.stack==null||s.stack===false)return;var other=findMatchingSeries(s,plot.getData());if(!other)return;var ps=datapoints.pointsize,points=datapoints.points,otherps=other.datapoints.pointsize,otherpoints=other.datapoints.points,newpoints=[],px,py,intery,qx,qy,bottom,withlines=s.lines.show,horizontal=s.bars.horizontal,withbottom=ps>2&&(horizontal?datapoints.format[2].x:datapoints.format[2].y),withsteps=withlines&&s.lines.steps,fromgap=true,keyOffset=horizontal?1:0,accumulateOffset=horizontal?0:1,i=0,j=0,l,m;while(true){if(i>=points.length)break;l=newpoints.length;if(points[i]==null){for(m=0;m<ps;++m)newpoints.push(points[i+m]);i+=ps}else if(j>=otherpoints.length){if(!withlines){for(m=0;m<ps;++m)newpoints.push(points[i+m])}i+=ps}else if(otherpoints[j]==null){for(m=0;m<ps;++m)newpoints.push(null);fromgap=true;j+=otherps}else{px=points[i+keyOffset];py=points[i+accumulateOffset];qx=otherpoints[j+keyOffset];qy=otherpoints[j+accumulateOffset];bottom=0;if(px==qx){for(m=0;m<ps;++m)newpoints.push(points[i+m]);newpoints[l+accumulateOffset]+=qy;bottom=qy;i+=ps;j+=otherps}else if(px>qx){if(withlines&&i>0&&points[i-ps]!=null){intery=py+(points[i-ps+accumulateOffset]-py)*(qx-px)/(points[i-ps+keyOffset]-px);newpoints.push(qx);newpoints.push(intery+qy);for(m=2;m<ps;++m)newpoints.push(points[i+m]);bottom=qy}j+=otherps}else{if(fromgap&&withlines){i+=ps;continue}for(m=0;m<ps;++m)newpoints.push(points[i+m]);if(withlines&&j>0&&otherpoints[j-otherps]!=null)bottom=qy+(otherpoints[j-otherps+accumulateOffset]-qy)*(px-qx)/(otherpoints[j-otherps+keyOffset]-qx);newpoints[l+accumulateOffset]+=bottom;i+=ps}fromgap=false;if(l!=newpoints.length&&withbottom)newpoints[l+2]+=bottom}if(withsteps&&l!=newpoints.length&&l>0&&newpoints[l]!=null&&newpoints[l]!=newpoints[l-ps]&&newpoints[l+1]!=newpoints[l-ps+1]){for(m=0;m<ps;++m)newpoints[l+ps+m]=newpoints[l+m];newpoints[l+1]=newpoints[l-ps+1]}}datapoints.points=newpoints}plot.hooks.processDatapoints.push(stackData)}$.plot.plugins.push({init:init,options:options,name:"stack",version:"1.2"})})(jQuery);
|
|
@ -4,7 +4,9 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import calendar
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
from mock import patch
|
from mock import patch
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
@ -197,11 +199,27 @@ class StatisticsTests(TestCase):
|
||||||
if stats_type != "results":
|
if stats_type != "results":
|
||||||
self.assertTrue(q('.review-stats td:contains("1")'))
|
self.assertTrue(q('.review-stats td:contains("1")'))
|
||||||
|
|
||||||
# check chart
|
# check stacked chart
|
||||||
|
expected_date = datetime.date.today().replace(day=1)
|
||||||
|
expected_js_timestamp = calendar.timegm(expected_date.timetuple()) * 1000
|
||||||
url = urlreverse(ietf.stats.views.review_stats, kwargs={ "stats_type": "time" })
|
url = urlreverse(ietf.stats.views.review_stats, kwargs={ "stats_type": "time" })
|
||||||
url += "?team={}".format(review_req.team.acronym)
|
url += "?team={}".format(review_req.team.acronym)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(json.loads(r.context['data']), [
|
||||||
|
{"label": "in time", "color": "#3d22b3", "data": [[expected_js_timestamp, 0]]},
|
||||||
|
{"label": "late", "color": "#b42222", "data": [[expected_js_timestamp, 0]]}
|
||||||
|
])
|
||||||
|
q = PyQuery(r.content)
|
||||||
|
self.assertTrue(q('.stats-time-graph'))
|
||||||
|
|
||||||
|
# check non-stacked chart
|
||||||
|
url = urlreverse(ietf.stats.views.review_stats, kwargs={ "stats_type": "time" })
|
||||||
|
url += "?team={}".format(review_req.team.acronym)
|
||||||
|
url += "&completion=not_completed"
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(json.loads(r.context['data']), [{"color": "#3d22b3", "data": [[expected_js_timestamp, 0]]}])
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertTrue(q('.stats-time-graph'))
|
self.assertTrue(q('.stats-time-graph'))
|
||||||
|
|
||||||
|
|
|
@ -1175,8 +1175,7 @@ def review_stats(request, stats_type=None, acronym=None):
|
||||||
# choice
|
# choice
|
||||||
|
|
||||||
possible_completion_types = add_url_to_choices([
|
possible_completion_types = add_url_to_choices([
|
||||||
("completed_in_time", "Completed in time"),
|
("completed_in_time_or_late", "Completed (in time or late)"),
|
||||||
("completed_late", "Completed late"),
|
|
||||||
("not_completed", "Not completed"),
|
("not_completed", "Not completed"),
|
||||||
("average_assignment_to_closure_days", "Avg. compl. days"),
|
("average_assignment_to_closure_days", "Avg. compl. days"),
|
||||||
], lambda slug: build_review_stats_url(get_overrides={ "completion": slug, "result": None, "state": None }))
|
], lambda slug: build_review_stats_url(get_overrides={ "completion": slug, "result": None, "state": None }))
|
||||||
|
@ -1198,25 +1197,37 @@ def review_stats(request, stats_type=None, acronym=None):
|
||||||
selected_state = get_choice(request, "state", possible_states)
|
selected_state = get_choice(request, "state", possible_states)
|
||||||
|
|
||||||
if not selected_completion_type and not selected_result and not selected_state:
|
if not selected_completion_type and not selected_result and not selected_state:
|
||||||
selected_completion_type = "completed_in_time"
|
selected_completion_type = "completed_in_time_or_late"
|
||||||
|
|
||||||
series_data = []
|
standard_color = '#3d22b3'
|
||||||
|
if selected_completion_type == 'completed_in_time_or_late':
|
||||||
|
graph_data = [
|
||||||
|
{'label': 'in time', 'color': standard_color, 'data': []},
|
||||||
|
{'label': 'late', 'color': '#b42222', 'data': []}
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
graph_data = [{'color': standard_color, 'data': []}]
|
||||||
if selected_completion_type == "completed_combined":
|
if selected_completion_type == "completed_combined":
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
for d, aggr in aggrs:
|
for d, aggr in aggrs:
|
||||||
v = 0
|
v1 = 0
|
||||||
if selected_completion_type is not None:
|
v2 = None
|
||||||
v = aggr[selected_completion_type]
|
js_timestamp = calendar.timegm(d.timetuple()) * 1000
|
||||||
|
if selected_completion_type == 'completed_in_time_or_late':
|
||||||
|
v1 = aggr['completed_in_time']
|
||||||
|
v2 = aggr['completed_late']
|
||||||
|
elif selected_completion_type is not None:
|
||||||
|
v1 = aggr[selected_completion_type]
|
||||||
elif selected_result is not None:
|
elif selected_result is not None:
|
||||||
v = aggr["result"][selected_result]
|
v1 = aggr["result"][selected_result]
|
||||||
elif selected_state is not None:
|
elif selected_state is not None:
|
||||||
v = aggr["state"][selected_state]
|
v1 = aggr["state"][selected_state]
|
||||||
|
|
||||||
series_data.append((calendar.timegm(d.timetuple()) * 1000, v))
|
graph_data[0]['data'].append((js_timestamp, v1))
|
||||||
data = json.dumps([{
|
if v2 is not None:
|
||||||
"data": series_data
|
graph_data[1]['data'].append((js_timestamp, v2))
|
||||||
}])
|
data = json.dumps(graph_data)
|
||||||
|
|
||||||
else: # tabular data
|
else: # tabular data
|
||||||
extracted_data = extract_review_assignment_data(query_teams, query_reviewers, from_time, to_time, ordering=[level])
|
extracted_data = extract_review_assignment_data(query_teams, query_reviewers, from_time, to_time, ordering=[level])
|
||||||
|
|
|
@ -220,7 +220,7 @@
|
||||||
tickDecimals: {% if selected_completion_type == "average_assignment_to_closure_days" %}null{% else %}0{% endif %}
|
tickDecimals: {% if selected_completion_type == "average_assignment_to_closure_days" %}null{% else %}0{% endif %}
|
||||||
},
|
},
|
||||||
series: {
|
series: {
|
||||||
color: "#3d22b3",
|
stack: true,
|
||||||
bars: {
|
bars: {
|
||||||
show: true,
|
show: true,
|
||||||
barWidth: 20 * 24 * 60 * 60 * 1000,
|
barWidth: 20 * 24 * 60 * 60 * 1000,
|
||||||
|
@ -257,6 +257,7 @@
|
||||||
{% if stats_type == "time" %}
|
{% if stats_type == "time" %}
|
||||||
<script src="{% static 'flot/jquery.flot.min.js' %}"></script>
|
<script src="{% static 'flot/jquery.flot.min.js' %}"></script>
|
||||||
<script src="{% static 'flot/jquery.flot.time.min.js' %}"></script>
|
<script src="{% static 'flot/jquery.flot.time.min.js' %}"></script>
|
||||||
|
<script src="{% static 'flot/jquery.flot.stack.min.js' %}"></script>
|
||||||
<script src="{% static 'ietf/js/review-stats.js' %}"></script>
|
<script src="{% static 'ietf/js/review-stats.js' %}"></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
Loading…
Reference in a new issue