=== modified file 'dashboard_app/templates/dashboard_app/_test_run_list_table.html'
@@ -2,6 +2,7 @@
<table class="demo_jui display" id="test_runs">
<thead>
<tr>
+ <th>{% trans "Device" %}</th>
<th>{% trans "Test Run" %}</th>
<th>{% trans "Test" %}</th>
<th>{% trans "Passes" %}</th>
@@ -13,6 +14,11 @@
<tbody>
{% for test_run in test_run_list %}
<tr>
+ {% for attribute in test_run.attributes.all %}
+ {% if attribute.name == "target" %}
+ <td>{{ attribute.value }}</td>
+ {% endif %}
+ {% endfor %}
<td><a href="{{ test_run.get_absolute_url }}"><code>{{ test_run.test }} results<code/></a></td>
<td>{{ test_run.test }}</td>
<td>{{ test_run.get_summary_results.pass }}</td>
=== modified file 'dashboard_app/xmlrpc.py'
@@ -2,7 +2,7 @@
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
-# This file is part of Launch Control.
+# This file is part of LAVA Dashboard
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
@@ -25,7 +25,9 @@
import logging
import re
import xmlrpclib
-
+import hashlib
+import json
+import os
from django.contrib.auth.models import User, Group
from django.core.urlresolvers import reverse
from django.db import IntegrityError, DatabaseError
@@ -105,9 +107,9 @@
logging.debug("Getting bundle stream")
bundle_stream = BundleStream.objects.accessible_by_principal(self.user).get(pathname=pathname)
except BundleStream.DoesNotExist:
- logging.debug("Bundle stream does not exists, aborting")
+ logging.debug("Bundle stream does not exist, aborting")
raise xmlrpclib.Fault(errors.NOT_FOUND,
- "Bundle stream not found")
+ "Bundle stream not found")
if not bundle_stream.can_upload(self.user):
raise xmlrpclib.Fault(
errors.FORBIDDEN, "You cannot upload to this stream")
@@ -243,6 +245,186 @@
'dashboard_app.views.redirect_to_bundle',
kwargs={'content_sha1':bundle.content_sha1}))
+ def put_pending(self, content, pathname, group_name):
+ """
+ Name
+ ----
+ `put_pending` (`content`, `pathname`, `group_name`)
+
+ Description
+ -----------
+ MultiNode internal call.
+
+ Stores the bundle until the coordinator allows the complete
+ bundle list to be aggregated from the list and submitted by put_group
+
+ Arguments
+ ---------
+ `content`: string
+ Full text of the bundle. This *MUST* be a valid JSON
+ document and it *SHOULD* match the "Dashboard Bundle Format
+ 1.0" schema. The SHA1 of the content *MUST* be unique or a
+ ``Fault(409, "...")`` is raised. This is used to protect
+ from simple duplicate submissions.
+ `pathname`: string
+ Pathname of the bundle stream where a new bundle should
+ be created and stored. This argument *MUST* designate a
+ pre-existing bundle stream or a ``Fault(404, "...")`` exception
+ is raised. In addition the user *MUST* have access
+ permission to upload bundles there or a ``Fault(403, "...")``
+ exception is raised. See below for access rules.
+ `group_name`: string
+ Unique ID of the MultiNode group. Other pending bundles will
+ be aggregated into a single result bundle for this group.
+
+ Return value
+ ------------
+ If all goes well this function returns the SHA1 of the content.
+
+ Exceptions raised
+ -----------------
+ 404
+ Either:
+
+ - Bundle stream not found
+ - Uploading to specified stream is not permitted
+ 409
+ Duplicate bundle content
+
+ Rules for bundle stream access
+ ------------------------------
+ The following rules govern bundle stream upload access rights:
+ - all anonymous streams are accessible
+ - personal streams are accessible to owners
+ - team streams are accessible to team members
+
+ """
+ try:
+ logging.debug("Getting bundle stream")
+ bundle_stream = BundleStream.objects.accessible_by_principal(self.user).get(pathname=pathname)
+ except BundleStream.DoesNotExist:
+ logging.debug("Bundle stream does not exist, aborting")
+ raise xmlrpclib.Fault(errors.NOT_FOUND,
+ "Bundle stream not found")
+ if not bundle_stream.can_upload(self.user):
+ raise xmlrpclib.Fault(
+ errors.FORBIDDEN, "You cannot upload to this stream")
+ try:
+ # add this to a list which put_group can use.
+ sha1 = hashlib.sha1()
+ sha1.update(content)
+ hexdigest = sha1.hexdigest()
+ groupfile = "/tmp/%s" % group_name
+ with open(groupfile, "a+") as grp_file:
+ grp_file.write("%s\n" % content)
+ return hexdigest
+ except Exception as e:
+ logging.debug("Dashboard pending submission caused an exception: %s" % e)
+
+ def put_group(self, content, content_filename, pathname, group_name):
+ """
+ Name
+ ----
+ `put_group` (`content`, `content_filename`, `pathname`, `group_name`)
+
+ Description
+ -----------
+ MultiNode internal call.
+
+ Adds the final bundle to the list, aggregates the list
+ into a single group bundle and submits the group bundle.
+
+ Arguments
+ ---------
+ `content`: string
+ Full text of the bundle. This *MUST* be a valid JSON
+ document and it *SHOULD* match the "Dashboard Bundle Format
+ 1.0" schema. The SHA1 of the content *MUST* be unique or a
+ ``Fault(409, "...")`` is raised. This is used to protect
+ from simple duplicate submissions.
+ `content_filename`: string
+ Name of the file that contained the text of the bundle. The
+ `content_filename` can be an arbitrary string and will be
+ stored along with the content for reference.
+ `pathname`: string
+ Pathname of the bundle stream where a new bundle should
+ be created and stored. This argument *MUST* designate a
+ pre-existing bundle stream or a ``Fault(404, "...")`` exception
+ is raised. In addition the user *MUST* have access
+ permission to upload bundles there or a ``Fault(403, "...")``
+ exception is raised. See below for access rules.
+ `group_name`: string
+ Unique ID of the MultiNode group. Other pending bundles will
+ be aggregated into a single result bundle for this group. At
+ least one other bundle must have already been submitted as
+ pending for the specified MultiNode group. LAVA Coordinator
+ causes the parent job to wait until all nodes have been marked
+ as having pending bundles, even if some bundles are empty.
+
+ Return value
+ ------------
+ If all goes well this function returns the full URL of the bundle.
+
+ Exceptions raised
+ -----------------
+ ValueError:
+ One or more bundles could not be converted to JSON prior
+ to aggregation.
+ 404
+ Either:
+
+ - Bundle stream not found
+ - Uploading to specified stream is not permitted
+ 409
+ Duplicate bundle content
+
+ Rules for bundle stream access
+ ------------------------------
+ The following rules govern bundle stream upload access rights:
+ - all anonymous streams are accessible
+ - personal streams are accessible to owners
+ - team streams are accessible to team members
+
+ """
+ grp_file = "/tmp/%s" % group_name
+ bundle_set = {}
+ bundle_set[group_name] = []
+ if os.path.isfile(grp_file):
+ with open(grp_file, "r") as grp_data:
+ grp_list = grp_data.readlines()
+ for testrun in grp_list:
+ bundle_set[group_name].append(json.loads(testrun))
+ # Note: now that we have the data from the group, the group data file could be re-used
+ # as an error log which is simpler than debugging through XMLRPC.
+ else:
+ raise ValueError("Aggregation failure for %s - check coordinator rpc_delay?" % group_name)
+ group_tests = []
+ try:
+ json_data = json.loads(content)
+ except ValueError:
+ logging.debug("Invalid JSON content within the sub_id zero bundle")
+ json_data = None
+ try:
+ bundle_set[group_name].append(json_data)
+ except Exception as e:
+ logging.debug("appending JSON caused exception %s" % e)
+ try:
+ for bundle_list in bundle_set[group_name]:
+ for test_run in bundle_list['test_runs']:
+ group_tests.append(test_run)
+ except Exception as e:
+ logging.debug("aggregating bundles caused exception %s" % e)
+ group_content = json.dumps({"test_runs": group_tests, "format": json_data['format']})
+ bundle = self._put(group_content, content_filename, pathname)
+ logging.debug("Returning permalink to aggregated bundle for %s" % group_name)
+ permalink = self._context.request.build_absolute_uri(
+ reverse('dashboard_app.views.redirect_to_bundle',
+ kwargs={'content_sha1': bundle.content_sha1}))
+ # only delete the group file when things go well.
+ if os.path.isfile(grp_file):
+ os.remove(grp_file)
+ return permalink
+
def get(self, content_sha1):
"""
Name