[Piglit] [PATCH 2/9] framework: Split backend into a separate package

Dylan Baker baker.dylan.c at gmail.com
Tue Sep 23 17:55:49 PDT 2014


This moves all of the Backend classes into a separate package from the
main framework, and uses it's __init__.py to allow some predefined
objects from all of the packages to be accessed directly through
backends. This API should be sufficient for most applications. It is
possible to directly access the modules in the backends package.

In many of the cases where we need to bypass the backends package API
the code accessing could use refactoring. Some of this refactoring will
be done by this series.

Signed-off-by: Dylan Baker <dylanx.c.baker at intel.com>
---
 framework/backends/__init__.py    |  41 ++++
 framework/backends/abstract.py    | 125 ++++++++++++
 framework/backends/json_.py       | 261 ++++++++++++++++++++++++
 framework/backends/junit.py       | 103 ++++++++++
 framework/programs/run.py         |  12 +-
 framework/results.py              | 419 +-------------------------------------
 framework/tests/backends_tests.py | 153 ++++++++++++++
 framework/tests/dmesg_tests.py    |   3 +-
 framework/tests/results_tests.py  | 126 ------------
 9 files changed, 694 insertions(+), 549 deletions(-)
 create mode 100644 framework/backends/__init__.py
 create mode 100644 framework/backends/abstract.py
 create mode 100644 framework/backends/json_.py
 create mode 100644 framework/backends/junit.py
 create mode 100644 framework/tests/backends_tests.py

diff --git a/framework/backends/__init__.py b/framework/backends/__init__.py
new file mode 100644
index 0000000..b2b3c85
--- /dev/null
+++ b/framework/backends/__init__.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2014 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+""" Import public backend classes into one place """
+
+from .json_ import *
+from .junit import *
+
+
+# A list of available backends
+BACKENDS = ['json', 'junit']
+
+
+def get_backend(backend):
+    """ Returns a BackendInstance based on the string passed """
+    backends = {
+        'json': JSONBackend,
+        'junit': JUnitBackend,
+    }
+
+    # Be sure that we're exporting the same list of backends that we actually
+    # have available
+    assert backends.keys() == BACKENDS
+    return backends[backend]
diff --git a/framework/backends/abstract.py b/framework/backends/abstract.py
new file mode 100644
index 0000000..62df91b
--- /dev/null
+++ b/framework/backends/abstract.py
@@ -0,0 +1,125 @@
+# Copyright (c) 2014 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+
+""" Base classes for backends
+
+This module provides mixins and base classes for backend modules.
+
+"""
+
+import os
+import abc
+
+
+class FSyncMixin(object):
+    """ Mixin class that adds fsync support
+
+    This class provides an init method that sets self._file_sync from a keyword
+    arugment file_fsync, and then provides an _fsync() method that does an
+    fsync if self._file_sync is truthy
+
+    """
+    def __init__(self, file_fsync=False, **options):
+        self._file_sync = file_fsync
+        assert self._file
+
+    def _fsync(self):
+        """ Sync the file to disk
+
+        If self._fsync is truthy this will sync self._file to disk
+
+        """
+        if self._file_sync:
+            self._file.flush()
+            os.fsync(self._file.fileno())
+
+
+class Backend(object):
+    """ Abstract base class for summary backends
+
+    This class provides an abstract ancestor for classes implementing backends,
+    providing a light public API. The goal of this API is to be "just enough",
+    not a generic writing solution. To that end it provides two public methods,
+    'finalize', and 'write_test'. These two methods are designed to be just
+    enough to write a backend without needing format specific options.
+
+    Any locking that is necessary should be done in the child classes, as not
+    all potential backends need locking (for example, a SQL based backend might
+    be thread safe and not need to be locked during write)
+
+    """
+    __metaclass__ = abc.ABCMeta
+
+    @abc.abstractmethod
+    def __init__(self, dest, metadata, **options):
+        """ Generic constructor
+
+        The backend storage container should be created and made ready to write
+        into in the constructor, along with any other setup.
+
+        This method also write any initial metadata as appropriate. No backend
+        is required to write all metadata, but each should write as much as
+        possible.
+
+        In addition it takes keyword arguments that define options for the
+        backends. options should be prefixed to identify which backends they
+        apply to. For example, a json specific value should be passed as
+        json_*, while a file specific value should be passed as file_*)
+
+        Arguments:
+        dest -- the place to write the results to. This should be correctly
+                handled based on the backend, the example is calls open() on a
+                file, but other backends might want different options
+        metadata -- a dict or dict-like object that contains metadata to be
+                    written into the backend
+
+        """
+        self.dest = open(dest, 'w+')
+
+    @abc.abstractmethod
+    def finalize(self, metadata=None):
+        """ Write final metadata into to the store and close it
+
+        This method writes any final metadata into the store, what can be
+        written is implementation specific, backends are free to ignore any
+        data that is not applicable.
+
+        metadata is not required, and Backend derived classes need to handle
+        being passed None correctly.
+
+        Keyword Arguments:
+        metadata -- Any metadata to be written in after the tests, should be a
+                    dict or dict-like object
+
+
+        """
+
+    @abc.abstractmethod
+    def write_test(self, name, data):
+        """ Write a test into the backend store
+
+        This method writes an actual test into the backend store.
+
+        Arguments:
+        name -- the name of the test to be written
+        data -- a TestResult object representing the test data
+
+        """
diff --git a/framework/backends/json_.py b/framework/backends/json_.py
new file mode 100644
index 0000000..b37a7de
--- /dev/null
+++ b/framework/backends/json_.py
@@ -0,0 +1,261 @@
+# Copyright (c) 2014 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+""" Module providing json backend for piglit """
+
+import os
+import threading
+try:
+    import simplejson as json
+except ImportError:
+    import json
+import framework.status as status
+from .abstract import Backend, FSyncMixin
+
+__all__ = [
+    'CURRENT_JSON_VERSION',
+    'JSONBackend',
+    'piglit_encoder',
+]
+
+
+# The current version of the JSON results
+CURRENT_JSON_VERSION = 1
+
+
+def piglit_encoder(obj):
+    """ Encoder for piglit that can transform additional classes into json
+
+    Adds support for status.Status objects and for set() instances
+
+    """
+    if isinstance(obj, status.Status):
+        return str(obj)
+    elif isinstance(obj, set):
+        return list(obj)
+    return obj
+
+
+class JSONBackend(FSyncMixin, Backend):
+    '''
+    Writes to a JSON file stream
+
+    JSONWriter is threadsafe.
+
+    Example
+    -------
+
+    This call to ``json.dump``::
+        json.dump(
+            {
+                'a': [1, 2, 3],
+                'b': 4,
+                'c': {
+                    'x': 100,
+                },
+            }
+            file,
+            indent=JSONWriter.INDENT)
+
+    is equivalent to::
+        w = JSONWriter(file)
+        w._open_dict()
+        w._write_dict_item('a', [1, 2, 3])
+        w._write_dict_item('b', 4)
+        w._write_dict_item('c', {'x': 100})
+        w._close_dict()
+
+    which is also equivalent to::
+        w = JSONWriter(file)
+        w._open_dict()
+        w._write_dict_item('a', [1, 2, 3])
+        w._write_dict_item('b', 4)
+
+        w._write_dict_key('c')
+        w._open_dict()
+        w._write_dict_item('x', 100)
+        w._close_dict()
+
+        w._close_dict()
+    '''
+
+    INDENT = 4
+    _LOCK = threading.RLock()
+
+    def __init__(self, f, metadata, **options):
+        self._file = open(os.path.join(f, 'results.json'), 'w')
+        FSyncMixin.__init__(self, **options)
+        self.__indent_level = 0
+        self.__inhibit_next_indent = False
+        self.__encoder = json.JSONEncoder(indent=self.INDENT,
+                                          default=piglit_encoder)
+
+        # self.__is_collection_empty
+        #
+        # A stack that indicates if the currect collection is empty
+        #
+        # When _open_dict is called, True is pushed onto the
+        # stack. When the first element is written to the newly
+        # opened dict, the top of the stack is set to False.
+        # When the _close_dict is called, the stack is popped.
+        #
+        # The top of the stack is element -1.
+        #
+        self.__is_collection_empty = []
+
+        # self._open_containers
+        #
+        # A FILO stack that stores container information, each time
+        # self._open_dict() 'dict' is added to the stack, (other elements like
+        # 'list' could be added if support was added to JSONWriter for handling
+        # them), each to time self._close_dict() is called an element is
+        # removed. When self.close_json() is called each element of the stack
+        # is popped and written into the json
+        self._open_containers = []
+
+        # Write initial metadata into the backend store
+        self._initialize(metadata)
+
+    def _initialize(self, metadata):
+        """ Write boilerplate json code
+
+        This writes all of the json except the actual tests.
+
+        Arguments:
+        options -- any values to be put in the options dictionary, must be a
+                   dict-like object
+        name -- the name of the test
+        env -- any environment information to be written into the results, must
+               be a dict-like object
+
+        """
+        with self._LOCK:
+            self._open_dict()
+            self._write_dict_item('results_version', CURRENT_JSON_VERSION)
+            self._write_dict_item('name', metadata['name'])
+
+            self._write_dict_key('options')
+            self._open_dict()
+            for key, value in metadata.iteritems():
+                # Dont' write env or name into the options dictionary
+                if key in ['env', 'name']:
+                    continue
+
+                # Loading a NoneType will break resume, and are a bug
+                assert value is not None, "Value {} is NoneType".format(key)
+                self._write_dict_item(key, value)
+            self._close_dict()
+
+            for key, value in metadata['env'].iteritems():
+                self._write_dict_item(key, value)
+
+            # Open the tests dictinoary so that tests can be written
+            self._write_dict_key('tests')
+            self._open_dict()
+
+    def finalize(self, metadata=None):
+        """ End json serialization and cleanup
+
+        This method is called after all of tests are written, it closes any
+        containers that are still open and closes the file
+
+        """
+        # Ensure that there are no tests still writing by taking the lock here
+        with self._LOCK:
+            # Close the tests dictionary
+            self._close_dict()
+
+            # Write closing metadata
+            if metadata:
+                for key, value in metadata.iteritems():
+                    self._write_dict_item(key, value)
+
+            # Close the root dictionary object
+            self._close_dict()
+
+            # Close the file.
+            assert self._open_containers == [], \
+                "containers stack: {0}".format(self._open_containers)
+            self._file.close()
+
+    def __write_indent(self):
+        if self.__inhibit_next_indent:
+            self.__inhibit_next_indent = False
+            return
+        else:
+            i = ' ' * self.__indent_level * self.INDENT
+            self._file.write(i)
+
+    def __write(self, obj):
+        lines = list(self.__encoder.encode(obj).split('\n'))
+        n = len(lines)
+        for i in range(n):
+            self.__write_indent()
+            self._file.write(lines[i])
+            if i != n - 1:
+                self._file.write('\n')
+
+    def _open_dict(self):
+        self.__write_indent()
+        self._file.write('{')
+
+        self.__indent_level += 1
+        self.__is_collection_empty.append(True)
+        self._open_containers.append('dict')
+        self._fsync()
+
+    def _close_dict(self):
+        self.__indent_level -= 1
+        self.__is_collection_empty.pop()
+
+        self._file.write('\n')
+        self.__write_indent()
+        self._file.write('}')
+        assert self._open_containers[-1] == 'dict'
+        self._open_containers.pop()
+        self._fsync()
+
+    def _write_dict_item(self, key, value):
+        # Write key.
+        self._write_dict_key(key)
+
+        # Write value.
+        self.__write(value)
+
+        self._fsync()
+
+    def _write_dict_key(self, key):
+        # Write comma if this is not the initial item in the dict.
+        if self.__is_collection_empty[-1]:
+            self.__is_collection_empty[-1] = False
+        else:
+            self._file.write(',')
+
+        self._file.write('\n')
+        self.__write(key)
+        self._file.write(': ')
+
+        self.__inhibit_next_indent = True
+        self._fsync()
+
+    def write_test(self, name, data):
+        """ Write a test into the JSON tests dictionary """
+        with self._LOCK:
+            self._write_dict_item(name, data)
diff --git a/framework/backends/junit.py b/framework/backends/junit.py
new file mode 100644
index 0000000..9156d4f
--- /dev/null
+++ b/framework/backends/junit.py
@@ -0,0 +1,103 @@
+# Copyright (c) 2014 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+""" Module implementing a JUnitBackend for piglit """
+
+import os
+import re
+import posixpath
+try:
+    from lxml import etree
+except ImportError:
+    import xml.etree.cElementTree as etree
+from .abstract import Backend, FSyncMixin
+
+__all__ = [
+    'JUnitBackend',
+]
+
+
+class JUnitBackend(FSyncMixin, Backend):
+    """ Backend that produces ANT JUnit XML
+
+    Based on the following schema:
+    https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-7.xsd
+
+    """
+    _REPLACE = re.compile(r'[/\\]')
+
+    def __init__(self, dest, metadata, **options):
+        self._file = open(os.path.join(dest, 'results.xml'), 'w')
+        FSyncMixin.__init__(self, **options)
+
+        # Write initial headers and other data that etree cannot write for us
+        self._file.write('<?xml version="1.0" encoding="UTF-8" ?>\n')
+        self._file.write('<testsuites>\n')
+        self._file.write(
+            '<testsuite name="piglit" tests="{}">\n'.format(
+                metadata['test_count']))
+        self._test_suffix = metadata["test_suffix"]
+
+    def finalize(self, metadata=None):
+        self._file.write('</testsuite>\n')
+        self._file.write('</testsuites>\n')
+        self._file.close()
+
+    def write_test(self, name, data):
+        # Split the name of the test and the group (what junit refers to as
+        # classname), and replace piglits '/' separated groups with '.', after
+        # replacing any '.' with '_' (so we don't get false groups). Also
+        # remove any '\\' that has been inserted on windows accidentally
+        classname, testname = posixpath.split(name)
+        classname = classname.replace('.', '_')
+        classname = JUnitBackend._REPLACE.sub('.', classname)
+
+        # Add the test to the piglit group rather than directly to the root
+        # group, this allows piglit junit to be used in conjunction with other
+        # piglit
+        # TODO: It would be nice if other suites integrating with piglit could
+        # set different root names.
+        classname = 'piglit.' + classname
+
+        # Create the root element
+        element = etree.Element('testcase', name=testname + self._test_suffix,
+                                classname=classname,
+                                time=str(data['time']),
+                                status=str(data['result']))
+
+        # Add stdout
+        out = etree.SubElement(element, 'system-out')
+        out.text = data['out']
+
+        # Add stderr
+        err = etree.SubElement(element, 'system-err')
+        err.text = data['err']
+
+        # Add relevant result value, if the result is pass then it doesn't need
+        # one of these statuses
+        if data['result'] == 'skip':
+            etree.SubElement(element, 'skipped')
+        elif data['result'] in ['warn', 'fail', 'dmesg-warn', 'dmesg-fail']:
+            etree.SubElement(element, 'failure')
+        elif data['result'] == 'crash':
+            etree.SubElement(element, 'error')
+
+        self._file.write(etree.tostring(element))
+        self._file.write('\n')
diff --git a/framework/programs/run.py b/framework/programs/run.py
index 9c586f3..a784080 100644
--- a/framework/programs/run.py
+++ b/framework/programs/run.py
@@ -31,13 +31,13 @@ import ConfigParser
 import framework.core as core
 import framework.results
 import framework.profile
+import framework.backends as backends
 
 __all__ = ['run',
            'resume']
 
 
 _PLATFORMS = ["glx", "x11_egl", "wayland", "gbm", "mixed_glx_egl"]
-_BACKENDS = ['json', 'junit']
 
 
 def _default_platform():
@@ -80,9 +80,9 @@ def _default_backend():
     """
     try:
         backend = core.PIGLIT_CONFIG.get('core', 'backend')
-        if backend not in _BACKENDS:
+        if backend not in backends.BACKENDS:
             print('Backend is not valid\n',
-                  'valid backends are: {}'.format(' '.join(_BACKENDS)),
+                  'valid backends are: {}'.format(' '.join(backends.BACKENDS)),
                   file=sys.stderr)
             sys.exit(1)
         return backend
@@ -129,7 +129,7 @@ def _run_parser(input_):
                              "(can be used more than once)")
     parser.add_argument('-b', '--backend',
                         default=_default_backend(),
-                        choices=framework.results.BACKENDS,
+                        choices=backends.BACKENDS,
                         help='select a results backend to use')
     conc_parser = parser.add_mutually_exclusive_group()
     conc_parser.add_argument('-c', '--all-concurrent',
@@ -257,7 +257,7 @@ def run(input_):
     options['test_suffix'] = args.junit_suffix
 
     # Begin json.
-    backend = framework.results.get_backend(args.backend)(
+    backend = backends.get_backend(args.backend)(
         args.results_path,
         options,
         file_fsync=opts.sync)
@@ -309,7 +309,7 @@ def resume(input_):
     results.options['env'] = core.collect_system_info()
 
     # Resume only works with the JSON backend
-    backend = framework.results.get_backend('json')(
+    backend = backends.get_backend('json')(
         args.results_path,
         results.options,
         file_fsync=opts.sync)
diff --git a/framework/results.py b/framework/results.py
index b567553..5a2ebf8 100644
--- a/framework/results.py
+++ b/framework/results.py
@@ -23,423 +23,23 @@
 
 from __future__ import print_function
 import os
-import re
 import sys
-import abc
-import threading
-import posixpath
 from cStringIO import StringIO
 try:
     import simplejson as json
 except ImportError:
     import json
-try:
-    from lxml import etree
-except ImportError:
-    import xml.etree.cElementTree as etree
 
 import framework.status as status
+from framework.backends import (CURRENT_JSON_VERSION, piglit_encoder,
+                                JSONBackend)
 
 __all__ = [
     'TestrunResult',
     'TestResult',
     'load_results',
-    'get_backend',
-    'BACKENDS',
 ]
 
-# A list of available backends
-BACKENDS = ['json', 'junit']
-
-# The current version of the JSON results
-CURRENT_JSON_VERSION = 1
-
-
-def _piglit_encoder(obj):
-    """ Encoder for piglit that can transform additional classes into json
-
-    Adds support for status.Status objects and for set() instances
-
-    """
-    if isinstance(obj, status.Status):
-        return str(obj)
-    elif isinstance(obj, set):
-        return list(obj)
-    return obj
-
-
-class FSyncMixin(object):
-    """ Mixin class that adds fsync support
-
-    This class provides an init method that sets self._file_sync from a keyword
-    arugment file_fsync, and then provides an _fsync() method that does an
-    fsync if self._file_sync is truthy
-
-    """
-    def __init__(self, file_fsync=False, **options):
-        self._file_sync = file_fsync
-        assert self._file
-
-    def _fsync(self):
-        """ Sync the file to disk
-
-        If self._fsync is truthy this will sync self._file to disk
-
-        """
-        if self._file_sync:
-            self._file.flush()
-            os.fsync(self._file.fileno())
-
-
-class Backend(object):
-    """ Abstract base class for summary backends
-
-    This class provides an abstract ancestor for classes implementing backends,
-    providing a light public API. The goal of this API is to be "just enough",
-    not a generic writing solution. To that end it provides two public methods,
-    'finalize', and 'write_test'. These two methods are designed to be just
-    enough to write a backend without needing format specific options.
-
-    Any locking that is necessary should be done in the child classes, as not
-    all potential backends need locking (for example, a SQL based backend might
-    be thread safe and not need to be locked during write)
-
-    """
-    __metaclass__ = abc.ABCMeta
-
-    @abc.abstractmethod
-    def __init__(self, dest, metadata, **options):
-        """ Generic constructor
-
-        The backend storage container should be created and made ready to write
-        into in the constructor, along with any other setup.
-
-        This method also write any initial metadata as appropriate. No backend
-        is required to write all metadata, but each should write as much as
-        possible.
-
-        In addition it takes keyword arguments that define options for the
-        backends. options should be prefixed to identify which backends they
-        apply to. For example, a json specific value should be passed as
-        json_*, while a file specific value should be passed as file_*)
-
-        Arguments:
-        dest -- the place to write the results to. This should be correctly
-                handled based on the backend, the example is calls open() on a
-                file, but other backends might want different options
-        metadata -- a dict or dict-like object that contains metadata to be
-                    written into the backend
-
-        """
-        self.dest = open(dest, 'w+')
-
-    @abc.abstractmethod
-    def finalize(self, metadata=None):
-        """ Write final metadata into to the store and close it
-
-        This method writes any final metadata into the store, what can be
-        written is implementation specific, backends are free to ignore any
-        data that is not applicable.
-
-        metadata is not required, and Backend derived classes need to handle
-        being passed None correctly.
-
-        Keyword Arguments:
-        metadata -- Any metadata to be written in after the tests, should be a
-                    dict or dict-like object
-
-
-        """
-
-    @abc.abstractmethod
-    def write_test(self, name, data):
-        """ Write a test into the backend store
-
-        This method writes an actual test into the backend store.
-
-        Arguments:
-        name -- the name of the test to be written
-        data -- a TestResult object representing the test data
-
-        """
-
-
-class JSONBackend(FSyncMixin, Backend):
-    '''
-    Writes to a JSON file stream
-
-    JSONWriter is threadsafe.
-
-    Example
-    -------
-
-    This call to ``json.dump``::
-        json.dump(
-            {
-                'a': [1, 2, 3],
-                'b': 4,
-                'c': {
-                    'x': 100,
-                },
-            }
-            file,
-            indent=JSONWriter.INDENT)
-
-    is equivalent to::
-        w = JSONWriter(file)
-        w._open_dict()
-        w._write_dict_item('a', [1, 2, 3])
-        w._write_dict_item('b', 4)
-        w._write_dict_item('c', {'x': 100})
-        w._close_dict()
-
-    which is also equivalent to::
-        w = JSONWriter(file)
-        w._open_dict()
-        w._write_dict_item('a', [1, 2, 3])
-        w._write_dict_item('b', 4)
-
-        w._write_dict_key('c')
-        w._open_dict()
-        w._write_dict_item('x', 100)
-        w._close_dict()
-
-        w._close_dict()
-    '''
-
-    INDENT = 4
-    _LOCK = threading.RLock()
-
-    def __init__(self, f, metadata, **options):
-        self._file = open(os.path.join(f, 'results.json'), 'w')
-        FSyncMixin.__init__(self, **options)
-        self.__indent_level = 0
-        self.__inhibit_next_indent = False
-        self.__encoder = json.JSONEncoder(indent=self.INDENT,
-                                          default=_piglit_encoder)
-
-        # self.__is_collection_empty
-        #
-        # A stack that indicates if the currect collection is empty
-        #
-        # When _open_dict is called, True is pushed onto the
-        # stack. When the first element is written to the newly
-        # opened dict, the top of the stack is set to False.
-        # When the _close_dict is called, the stack is popped.
-        #
-        # The top of the stack is element -1.
-        #
-        self.__is_collection_empty = []
-
-        # self._open_containers
-        #
-        # A FILO stack that stores container information, each time
-        # self._open_dict() 'dict' is added to the stack, (other elements like
-        # 'list' could be added if support was added to JSONWriter for handling
-        # them), each to time self._close_dict() is called an element is
-        # removed. When self.close_json() is called each element of the stack
-        # is popped and written into the json
-        self._open_containers = []
-
-        # Write initial metadata into the backend store
-        self._initialize(metadata)
-
-    def _initialize(self, metadata):
-        """ Write boilerplate json code
-
-        This writes all of the json except the actual tests.
-
-        Arguments:
-        options -- any values to be put in the options dictionary, must be a
-                   dict-like object
-        name -- the name of the test
-        env -- any environment information to be written into the results, must
-               be a dict-like object
-
-        """
-        with self._LOCK:
-            self._open_dict()
-            self._write_dict_item('results_version', CURRENT_JSON_VERSION)
-            self._write_dict_item('name', metadata['name'])
-
-            self._write_dict_key('options')
-            self._open_dict()
-            for key, value in metadata.iteritems():
-                # Dont' write env or name into the options dictionary
-                if key in ['env', 'name']:
-                    continue
-
-                # Loading a NoneType will break resume, and are a bug
-                assert value is not None, "Value {} is NoneType".format(key)
-                self._write_dict_item(key, value)
-            self._close_dict()
-
-            for key, value in metadata['env'].iteritems():
-                self._write_dict_item(key, value)
-
-            # Open the tests dictinoary so that tests can be written
-            self._write_dict_key('tests')
-            self._open_dict()
-
-    def finalize(self, metadata=None):
-        """ End json serialization and cleanup
-
-        This method is called after all of tests are written, it closes any
-        containers that are still open and closes the file
-
-        """
-        # Ensure that there are no tests still writing by taking the lock here
-        with self._LOCK:
-            # Close the tests dictionary
-            self._close_dict()
-
-            # Write closing metadata
-            if metadata:
-                for key, value in metadata.iteritems():
-                    self._write_dict_item(key, value)
-
-            # Close the root dictionary object
-            self._close_dict()
-
-            # Close the file.
-            assert self._open_containers == [], \
-                "containers stack: {0}".format(self._open_containers)
-            self._file.close()
-
-    def __write_indent(self):
-        if self.__inhibit_next_indent:
-            self.__inhibit_next_indent = False
-            return
-        else:
-            i = ' ' * self.__indent_level * self.INDENT
-            self._file.write(i)
-
-    def __write(self, obj):
-        lines = list(self.__encoder.encode(obj).split('\n'))
-        n = len(lines)
-        for i in range(n):
-            self.__write_indent()
-            self._file.write(lines[i])
-            if i != n - 1:
-                self._file.write('\n')
-
-    def _open_dict(self):
-        self.__write_indent()
-        self._file.write('{')
-
-        self.__indent_level += 1
-        self.__is_collection_empty.append(True)
-        self._open_containers.append('dict')
-        self._fsync()
-
-    def _close_dict(self):
-        self.__indent_level -= 1
-        self.__is_collection_empty.pop()
-
-        self._file.write('\n')
-        self.__write_indent()
-        self._file.write('}')
-        assert self._open_containers[-1] == 'dict'
-        self._open_containers.pop()
-        self._fsync()
-
-    def _write_dict_item(self, key, value):
-        # Write key.
-        self._write_dict_key(key)
-
-        # Write value.
-        self.__write(value)
-
-        self._fsync()
-
-    def _write_dict_key(self, key):
-        # Write comma if this is not the initial item in the dict.
-        if self.__is_collection_empty[-1]:
-            self.__is_collection_empty[-1] = False
-        else:
-            self._file.write(',')
-
-        self._file.write('\n')
-        self.__write(key)
-        self._file.write(': ')
-
-        self.__inhibit_next_indent = True
-        self._fsync()
-
-    def write_test(self, name, data):
-        """ Write a test into the JSON tests dictionary """
-        with self._LOCK:
-            self._write_dict_item(name, data)
-
-
-class JUnitBackend(FSyncMixin, Backend):
-    """ Backend that produces ANT JUnit XML
-
-    Based on the following schema:
-    https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-7.xsd
-
-    """
-    _REPLACE = re.compile(r'[/\\]')
-
-    def __init__(self, dest, metadata, **options):
-        self._file = open(os.path.join(dest, 'results.xml'), 'w')
-        FSyncMixin.__init__(self, **options)
-
-        # Write initial headers and other data that etree cannot write for us
-        self._file.write('<?xml version="1.0" encoding="UTF-8" ?>\n')
-        self._file.write('<testsuites>\n')
-        self._file.write(
-            '<testsuite name="piglit" tests="{}">\n'.format(
-                metadata['test_count']))
-        self._test_suffix = metadata["test_suffix"]
-
-    def finalize(self, metadata=None):
-        self._file.write('</testsuite>\n')
-        self._file.write('</testsuites>\n')
-        self._file.close()
-
-    def write_test(self, name, data):
-        # Split the name of the test and the group (what junit refers to as
-        # classname), and replace piglits '/' separated groups with '.', after
-        # replacing any '.' with '_' (so we don't get false groups). Also
-        # remove any '\\' that has been inserted on windows accidentally
-        classname, testname = posixpath.split(name)
-        classname = classname.replace('.', '_')
-        classname = JUnitBackend._REPLACE.sub('.', classname)
-
-        # Add the test to the piglit group rather than directly to the root
-        # group, this allows piglit junit to be used in conjunction with other
-        # piglit
-        # TODO: It would be nice if other suites integrating with piglit could
-        # set different root names.
-        classname = 'piglit.' + classname
-
-        # Create the root element
-        element = etree.Element('testcase', name=testname + self._test_suffix,
-                                classname=classname,
-                                time=str(data['time']),
-                                status=str(data['result']))
-
-        # Add stdout
-        out = etree.SubElement(element, 'system-out')
-        out.text = data['out']
-
-        # Add stderr
-        err = etree.SubElement(element, 'system-err')
-        err.text = data['err']
-
-        # Add relevant result value, if the result is pass then it doesn't need
-        # one of these statuses
-        if data['result'] == 'skip':
-            etree.SubElement(element, 'skipped')
-        elif data['result'] in ['warn', 'fail', 'dmesg-warn', 'dmesg-fail']:
-            etree.SubElement(element, 'failure')
-        elif data['result'] == 'crash':
-            etree.SubElement(element, 'error')
-
-        self._file.write(etree.tostring(element))
-        self._file.write('\n')
-
 
 class TestResult(dict):
     def __init__(self, *args):
@@ -598,7 +198,7 @@ class TestrunResult(object):
         with open(file_, 'w') as f:
             json.dump(dict((k, v) for k, v in self.__dict__.iteritems()
                            if k in self.serialized_keys),
-                      f, default=_piglit_encoder, indent=JSONBackend.INDENT)
+                      f, default=piglit_encoder, indent=JSONBackend.INDENT)
 
 
 def load_results(filename):
@@ -677,19 +277,6 @@ def update_results(results, filepath):
     return results
 
 
-def get_backend(backend):
-    """ Returns a BackendInstance based on the string passed """
-    backends = {
-        'json': JSONBackend,
-        'junit': JUnitBackend,
-    }
-
-    # Be sure that we're exporting the same list of backends that we actually
-    # have available
-    assert backends.keys() == BACKENDS
-    return backends[backend]
-
-
 def _update_zero_to_one(results):
     """ Update version zero results to version 1 results
 
diff --git a/framework/tests/backends_tests.py b/framework/tests/backends_tests.py
new file mode 100644
index 0000000..921c60e
--- /dev/null
+++ b/framework/tests/backends_tests.py
@@ -0,0 +1,153 @@
+# Copyright (c) 2014 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+""" Tests for the backend package """
+
+import os
+try:
+    from lxml import etree
+except ImportError:
+    import xml.etree.cElementTree as etree
+import nose.tools as nt
+import framework.results as results
+import framework.backends as backends
+import framework.tests.utils as utils
+
+
+BACKEND_INITIAL_META = {
+    'name': 'name',
+    'env': {},
+    'test_count': 0,
+    'test_suffix': '',
+}
+
+JUNIT_SCHEMA = 'framework/tests/schema/junit-7.xsd'
+
+
+def test_initialize_jsonbackend():
+    """ Test that JSONBackend initializes
+
+    This needs to be handled separately from the others because it requires
+    arguments
+
+    """
+    with utils.tempdir() as tdir:
+        func = results.JSONBackend(tdir, BACKEND_INITIAL_META)
+        assert isinstance(func, results.JSONBackend)
+
+
+ at utils.nose_generator
+def test_get_backend():
+    """ Generate tests to get various backends """
+    # We use a hand generated list here to ensure that we are getting what we
+    # expect
+    backends_ = {
+        'json': backends.JSONBackend,
+    }
+
+    check = lambda n, i: nt.assert_is(backends.get_backend(n), i)
+
+    for name, inst in backends_.iteritems():
+        check.description = 'get_backend({0}) returns {0} backend'.format(name)
+        yield check, name, inst
+
+
+class TestJunitNoTests(utils.StaticDirectory):
+    @classmethod
+    def setup_class(cls):
+        super(TestJunitNoTests, cls).setup_class()
+        test = backends.JUnitBackend(cls.tdir, BACKEND_INITIAL_META)
+        test.finalize()
+        cls.test_file = os.path.join(cls.tdir, 'results.xml')
+
+    def test_xml_well_formed(self):
+        """ JUnitBackend.__init__ and finalize produce well formed xml
+
+        While it will produce valid XML, it cannot produc valid JUnit, since
+        JUnit requires at least one test to be valid
+
+        """
+        try:
+            etree.parse(self.test_file)
+        except Exception as e:
+            raise AssertionError(e)
+
+
+class TestJUnitSingleTest(TestJunitNoTests):
+    @classmethod
+    def setup_class(cls):
+        super(TestJUnitSingleTest, cls).setup_class()
+        cls.test_file = os.path.join(cls.tdir, 'results.xml')
+        test = backends.JUnitBackend(cls.tdir, BACKEND_INITIAL_META)
+        test.write_test(
+            'a/test/group/test1',
+            results.TestResult({
+                'time': 1.2345,
+                'result': 'pass',
+                'out': 'this is stdout',
+                'err': 'this is stderr',
+            })
+        )
+        test.finalize()
+
+    def test_xml_well_formed(self):
+        """ JUnitBackend.write_test() (once) produces well formed xml """
+        super(TestJUnitSingleTest, self).test_xml_well_formed()
+
+    def test_xml_valid(self):
+        """ JUnitBackend.write_test() (once) produces valid xml """
+        schema = etree.XMLSchema(file=JUNIT_SCHEMA)
+        with open(self.test_file, 'r') as f:
+            assert schema.validate(etree.parse(f)), 'xml is not valid'
+
+
+class TestJUnitMultiTest(TestJUnitSingleTest):
+    @classmethod
+    def setup_class(cls):
+        super(TestJUnitMultiTest, cls).setup_class()
+        cls.test_file = os.path.join(cls.tdir, 'results.xml')
+        test = backends.JUnitBackend(cls.tdir, BACKEND_INITIAL_META)
+        test.write_test(
+            'a/test/group/test1',
+            results.TestResult({
+                'time': 1.2345,
+                'result': 'pass',
+                'out': 'this is stdout',
+                'err': 'this is stderr',
+            })
+        )
+        test.write_test(
+            'a/different/test/group/test2',
+            results.TestResult({
+                'time': 1.2345,
+                'result': 'fail',
+                'out': 'this is stdout',
+                'err': 'this is stderr',
+            })
+        )
+        test.finalize()
+
+    def test_xml_well_formed(self):
+        """ JUnitBackend.write_test() (twice) produces well formed xml """
+        super(TestJUnitMultiTest, self).test_xml_well_formed()
+
+    def test_xml_valid(self):
+        """ JUnitBackend.write_test() (twice) produces valid xml """
+        super(TestJUnitMultiTest, self).test_xml_valid()
diff --git a/framework/tests/dmesg_tests.py b/framework/tests/dmesg_tests.py
index 108b76e..a4d7627 100644
--- a/framework/tests/dmesg_tests.py
+++ b/framework/tests/dmesg_tests.py
@@ -38,6 +38,7 @@ import framework.exectest
 import framework.gleantest
 import framework.shader_test
 import framework.glsl_parser_test
+import framework.backends
 import framework.tests.utils as utils
 
 
@@ -324,7 +325,7 @@ def test_json_serialize_updated_result():
     test._new_messages = ['some', 'new', 'messages']
     result = test.update_result(result)
 
-    encoder = json.JSONEncoder(default=framework.results._piglit_encoder)
+    encoder = json.JSONEncoder(default=framework.backends.piglit_encoder)
     encoder.encode(result)
 
 
diff --git a/framework/tests/results_tests.py b/framework/tests/results_tests.py
index 9c2ee32..a020775 100644
--- a/framework/tests/results_tests.py
+++ b/framework/tests/results_tests.py
@@ -23,26 +23,12 @@
 
 import os
 import json
-try:
-    from lxml import etree
-except ImportError:
-    import xml.etree.cElementTree as etree
 import nose.tools as nt
 import framework.tests.utils as utils
 import framework.results as results
 import framework.status as status
 
 
-BACKEND_INITIAL_META = {
-    'name': 'name',
-    'env': {},
-    'test_count': 0,
-    'test_suffix': '',
-}
-
-JUNIT_SCHEMA = 'framework/tests/schema/junit-7.xsd'
-
-
 def check_initialize(target):
     """ Check that a class initializes without error """
     func = target()
@@ -69,18 +55,6 @@ def test_generate_initialize():
         yield yieldable, target
 
 
-def test_initialize_jsonbackend():
-    """ Test that JSONBackend initializes
-
-    This needs to be handled separately from the others because it requires
-    arguments
-
-    """
-    with utils.tempdir() as tdir:
-        func = results.JSONBackend(tdir, BACKEND_INITIAL_META)
-        assert isinstance(func, results.JSONBackend)
-
-
 def test_load_results_folder_as_main():
     """ Test that load_results takes a folder with a file named main in it """
     with utils.tempdir() as tdir:
@@ -173,103 +147,3 @@ def test_update_results_old():
         res = results.update_results(base, f.name)
 
     nt.assert_equal(res.results_version, results.CURRENT_JSON_VERSION)
-
-
- at utils.nose_generator
-def test_get_backend():
-    """ Generate tests to get various backends """
-    # We use a hand generated list here to ensure that we are getting what we
-    # expect
-    backends = {
-        'json': results.JSONBackend,
-    }
-
-    check = lambda n, i: nt.assert_is(results.get_backend(n), i)
-
-    for name, inst in backends.iteritems():
-        check.description = 'get_backend({0}) returns {0} backend'.format(name)
-        yield check, name, inst
-
-
-class TestJunitNoTests(utils.StaticDirectory):
-    @classmethod
-    def setup_class(cls):
-        super(TestJunitNoTests, cls).setup_class()
-        test = results.JUnitBackend(cls.tdir, BACKEND_INITIAL_META)
-        test.finalize()
-        cls.test_file = os.path.join(cls.tdir, 'results.xml')
-
-    def test_xml_well_formed(self):
-        """ JUnitBackend.__init__ and finalize produce well formed xml
-
-        While it will produce valid XML, it cannot produc valid JUnit, since
-        JUnit requires at least one test to be valid
-
-        """
-        try:
-            etree.parse(self.test_file)
-        except Exception as e:
-            raise AssertionError(e)
-
-
-class TestJUnitSingleTest(TestJunitNoTests):
-    @classmethod
-    def setup_class(cls):
-        super(TestJUnitSingleTest, cls).setup_class()
-        cls.test_file = os.path.join(cls.tdir, 'results.xml')
-        test = results.JUnitBackend(cls.tdir, BACKEND_INITIAL_META)
-        test.write_test(
-            'a/test/group/test1',
-            results.TestResult({
-                'time': 1.2345,
-                'result': 'pass',
-                'out': 'this is stdout',
-                'err': 'this is stderr',
-            })
-        )
-        test.finalize()
-
-    def test_xml_well_formed(self):
-        """ JUnitBackend.write_test() (once) produces well formed xml """
-        super(TestJUnitSingleTest, self).test_xml_well_formed()
-
-    def test_xml_valid(self):
-        """ JUnitBackend.write_test() (once) produces valid xml """
-        schema = etree.XMLSchema(file=JUNIT_SCHEMA)
-        with open(self.test_file, 'r') as f:
-            assert schema.validate(etree.parse(f)), 'xml is not valid'
-
-
-class TestJUnitMultiTest(TestJUnitSingleTest):
-    @classmethod
-    def setup_class(cls):
-        super(TestJUnitMultiTest, cls).setup_class()
-        cls.test_file = os.path.join(cls.tdir, 'results.xml')
-        test = results.JUnitBackend(cls.tdir, BACKEND_INITIAL_META)
-        test.write_test(
-            'a/test/group/test1',
-            results.TestResult({
-                'time': 1.2345,
-                'result': 'pass',
-                'out': 'this is stdout',
-                'err': 'this is stderr',
-            })
-        )
-        test.write_test(
-            'a/different/test/group/test2',
-            results.TestResult({
-                'time': 1.2345,
-                'result': 'fail',
-                'out': 'this is stdout',
-                'err': 'this is stderr',
-            })
-        )
-        test.finalize()
-
-    def test_xml_well_formed(self):
-        """ JUnitBackend.write_test() (twice) produces well formed xml """
-        super(TestJUnitMultiTest, self).test_xml_well_formed()
-
-    def test_xml_valid(self):
-        """ JUnitBackend.write_test() (twice) produces valid xml """
-        super(TestJUnitMultiTest, self).test_xml_valid()
-- 
2.1.1



More information about the Piglit mailing list