[Piglit] [PATCH 2/2] framework: add support for gzip compression for json results files

Dylan Baker baker.dylan.c at gmail.com
Fri May 29 14:43:33 PDT 2015


This adds support to the json backend for using the compression
framework, specifically for doing gzip compression.

Signed-off-by: Dylan Baker <dylanx.c.baker at intel.com>
---
 framework/backends/__init__.py               |   2 +-
 framework/backends/abstract.py               |  10 +-
 framework/backends/json.py                   |  50 +++++-----
 framework/tests/json_backend_tests.py        | 131 +++++++++++++++++----------
 framework/tests/json_results_update_tests.py |   7 +-
 framework/tests/json_tests.py                |   4 +-
 framework/tests/utils.py                     |  33 +++++++
 piglit.conf.example                          |   8 +-
 8 files changed, 164 insertions(+), 81 deletions(-)

diff --git a/framework/backends/__init__.py b/framework/backends/__init__.py
index 8af0326..bbb8760 100644
--- a/framework/backends/__init__.py
+++ b/framework/backends/__init__.py
@@ -55,7 +55,7 @@ __all__ = [
     'set_meta',
 ]
 
-_COMPRESSION_SUFFIXES = []
+_COMPRESSION_SUFFIXES = ['.gz']
 
 
 class BackendError(Exception):
diff --git a/framework/backends/abstract.py b/framework/backends/abstract.py
index 38c0b39..eae7081 100644
--- a/framework/backends/abstract.py
+++ b/framework/backends/abstract.py
@@ -29,6 +29,7 @@ from __future__ import print_function, absolute_import
 import abc
 import contextlib
 import functools
+import gzip
 import itertools
 import os
 import shutil
@@ -37,11 +38,15 @@ from framework.core import PIGLIT_CONFIG
 from framework.results import TestResult
 from framework.status import INCOMPLETE
 
-_DEFAULT_COMPRESSION_MODE = 'none'
+# WARNING: this assumes that the method name will be the compression extension:
+# use gz not gzip, bz2 not bzip2, xz not lzma, etc.
 _COMPRESSORS = {
     'none': functools.partial(open, mode='w'),
+    'gz': functools.partial(gzip.open, mode='w'),
 }
 
+_DEFAULT_COMPRESSION_MODE = 'gz'
+
 
 @contextlib.contextmanager
 def write_compressed(filename):
@@ -50,12 +55,11 @@ def write_compressed(filename):
     This helper function reads the piglit.conf to decide whether to use
     compression, and what type of compression to use.
 
-    Currently it implements no compression
-
     """
     method = (os.environ.get('PIGLIT_COMPRESSION') or
               PIGLIT_CONFIG.safe_get('core', 'compression') or
               _DEFAULT_COMPRESSION_MODE)
+
     assert method in _COMPRESSORS, \
         'unsupported compression method {}'.format(method)
 
diff --git a/framework/backends/json.py b/framework/backends/json.py
index e3be2f5..7b9022b 100644
--- a/framework/backends/json.py
+++ b/framework/backends/json.py
@@ -21,10 +21,12 @@
 """ Module providing json backend for piglit """
 
 from __future__ import print_function, absolute_import
+import functools
+import gzip
 import os
-import sys
-import shutil
 import posixpath
+import shutil
+import sys
 
 try:
     import simplejson as json
@@ -164,39 +166,45 @@ def load_results(filename):
     "main"
 
     """
+    def update(open_, filepath):
+        with open_(filepath) as f:
+            testrun = _load(f)
+        return _update_results(testrun, filepath)
+
     # This will load any file or file-like thing. That would include pipes and
     # file descriptors
     if not os.path.isdir(filename):
-        filepath = filename
+        if os.path.splitext(filename)[1] == '.gz':
+            # There is still a possible problem of a file being passed that is
+            # gzipped, but doesn't have a '.gz' extension. However, the gzip
+            # tool doesn't normally work with '.gz', so I think it's fair to
+            # just assume that we will have gz.
+            return update(functools.partial(gzip.open, mode='rb'), filename)
+        else:
+            return update(functools.partial(open, mode='r'), filename)
     elif os.path.exists(os.path.join(filename, 'metadata.json')):
         # If the test is still running we need to use the resume code, since
         # there will not be a results.json file.
         # We want to return here since the results are known current (there's
         # an assert in TestrunResult.load), and there is no filepath
         # to pass to update_results
-        # XXX: This needs to be run before searching for a results.json file so
-        #      that if the new run is overwriting an old one we load the
-        #      partial and not the original. It might be better to just delete
-        #      the contents of the folder if there is anything in it.
-        # XXX: What happens if the tests folder gets deleted in the middle of
-        #      this?
         return _resume(filename)
     else:
         # If there are both old and new results in a directory pick the new
         # ones first
-        if os.path.exists(os.path.join(filename, 'results.json')):
-            filepath = os.path.join(filename, 'results.json')
+        basepath = os.path.join(filename, 'results.json')
+        if os.path.exists(basepath + '.gz'):
+            return update(functools.partial(gzip.open, mode='rb'),
+                          basepath + '.gz')
+        elif os.path.exists(basepath):
+            return update(functools.partial(open, mode='r'), basepath)
         # Version 0 results are called 'main'
         elif os.path.exists(os.path.join(filename, 'main')):
-            filepath = os.path.join(filename, 'main')
-        else:
-            raise exceptions.PiglitFatalError(
-                'No results found in "{}"'.format(filename))
-
-    with open(filepath, 'r') as f:
-        testrun = _load(f)
+            return update(functools.partial(open, mode='r'),
+                          os.path.join(filename, 'main'))
 
-    return _update_results(testrun, filepath)
+    raise exceptions.PiglitFatalError(
+        'No results found in "{}"'.format(filename))
 
 
 def set_meta(results):
@@ -511,7 +519,7 @@ def _update_three_to_four(results):
     return results
 
 
-def _update_four_to_five(results):                                                                                                    
+def _update_four_to_five(results):
     """Updates json results from version 4 to version 5."""
     new_tests = {}
 
@@ -525,7 +533,7 @@ def _update_four_to_five(results):
 
 
 REGISTRY = Registry(
-    extensions=['', '.json'],
+    extensions=['', '.json', '.json.gz'],
     backend=JSONBackend,
     load=load_results,
     meta=set_meta,
diff --git a/framework/tests/json_backend_tests.py b/framework/tests/json_backend_tests.py
index ac460d7..a1bc52a 100644
--- a/framework/tests/json_backend_tests.py
+++ b/framework/tests/json_backend_tests.py
@@ -24,7 +24,7 @@
 
 from __future__ import print_function, absolute_import
 import os
-import functools
+import gzip
 
 try:
     import simplejson as json
@@ -32,37 +32,10 @@ except ImportError:
     import json
 import nose.tools as nt
 
-from framework import results, backends, exceptions
-from framework.core import PIGLIT_CONFIG
+from framework import results, backends, exceptions, grouptools
 import framework.tests.utils as utils
 from .backends_tests import BACKEND_INITIAL_META
 
-PIGLIT_CONFIG.add_section('core')
-
-
-def _set_compression(compression):
-    """Set a specific compression level on at test."""
-    def _decorator(func):
-        """The actual decorator."""
-
-        @functools.wraps(func)
-        def _inner(*args, **kwargs):
-            """The returned function wrapper."""
-            restore = PIGLIT_CONFIG.safe_get('core', 'compression')
-            PIGLIT_CONFIG.set('core', 'compression', compression)
-
-            try:
-                func(*args, **kwargs)
-            finally:
-                if restore:
-                    PIGLIT_CONFIG.set('core', 'compression', restore)
-                else:
-                    PIGLIT_CONFIG.remove_option('core', 'compression')
-
-        return _inner
-
-    return _decorator
-
 
 def test_initialize_jsonbackend():
     """backends.json.JSONBackend: Class initializes
@@ -88,7 +61,7 @@ def test_json_initialize_metadata():
 class TestJSONTestMethod(utils.StaticDirectory):
     @classmethod
     def setup_class(cls):
-        cls.test_name = 'a/test/group/test1'
+        cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
         cls.result = results.TestResult({
             'time': 1.2345,
             'result': 'pass',
@@ -119,11 +92,41 @@ class TestJSONTestMethod(utils.StaticDirectory):
         nt.assert_dict_equal({self.test_name: self.result}, test)
 
 
+class TestJSONTestFinalizeGZ(utils.StaticDirectory):
+    @classmethod
+    @utils.set_compression('gz')
+    def setup_class(cls):
+        cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
+        cls.result = results.TestResult({
+            'time': 1.2345,
+            'result': 'pass',
+            'out': 'this is stdout',
+            'err': 'this is stderr',
+        })
+        super(TestJSONTestFinalizeGZ, cls).setup_class()
+        test = backends.json.JSONBackend(cls.tdir)
+        test.initialize(BACKEND_INITIAL_META)
+        with test.write_test(cls.test_name) as t:
+            t(cls.result)
+        test.finalize()
+
+    def test_create_results(self):
+        """backends.json.JSONBackend.finalize(): creates a results.json.gz file
+        """
+        assert os.path.exists(os.path.join(self.tdir, 'results.json.gz'))
+
+    @utils.no_error
+    def test_results_valid(self):
+        """backends.json.JSONBackend.finalize(): results.json.gz is valid"""
+        with gzip.open(os.path.join(self.tdir, 'results.json.gz'), 'rb') as f:
+            json.load(f)
+
+
 class TestJSONTestFinalizeNone(utils.StaticDirectory):
     @classmethod
-    @_set_compression('none')
+    @utils.set_compression('none')
     def setup_class(cls):
-        cls.test_name = 'a/test/group/test1'
+        cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
         cls.result = results.TestResult({
             'time': 1.2345,
             'result': 'pass',
@@ -145,6 +148,7 @@ class TestJSONTestFinalizeNone(utils.StaticDirectory):
         """backends.json.JSONBackend.finalize(): removes tests directory"""
         assert not os.path.exists(os.path.join(self.tdir, 'tests'))
 
+
     def test_create_results(self):
         """backends.json.JSONBackend.finalize(): creates a results.json file
         """
@@ -160,16 +164,16 @@ class TestJSONTestFinalizeNone(utils.StaticDirectory):
 class TestJSONTestFinalizeEnvNone(utils.StaticDirectory):
     @classmethod
     @utils.set_env(PIGLIT_COMPRESSION='none')
-    @_set_compression('gz')
+    @utils.set_compression('gz')
     def setup_class(cls):
-        cls.test_name = 'a/test/group/test1'
+        super(TestJSONTestFinalizeEnvNone, cls).setup_class()
+        cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
         cls.result = results.TestResult({
             'time': 1.2345,
             'result': 'pass',
             'out': 'this is stdout',
             'err': 'this is stderr',
         })
-        super(TestJSONTestFinalizeEnvNone, cls).setup_class()
         test = backends.json.JSONBackend(cls.tdir)
         test.initialize(BACKEND_INITIAL_META)
         with test.write_test(cls.test_name) as t:
@@ -246,18 +250,20 @@ def test_resume_load_valid():
     with utils.tempdir() as f:
         backend = backends.json.JSONBackend(f)
         backend.initialize(BACKEND_INITIAL_META)
-        with backend.write_test("group1/test1") as t:
+        with backend.write_test(grouptools.join('group1', 'test1')) as t:
             t({'result': 'fail'})
-        with backend.write_test("group1/test2") as t:
+        with backend.write_test(grouptools.join('group1', 'test2')) as t:
             t({'result': 'pass'})
-        with backend.write_test("group2/test3") as t:
+        with backend.write_test(grouptools.join('group2', 'test3')) as t:
             t({'result': 'fail'})
 
         test = backends.json._resume(f)
 
         nt.assert_set_equal(
             set(test.tests.keys()),
-            set(['group1/test1', 'group1/test2', 'group2/test3']),
+            set([grouptools.join('group1', 'test1'),
+                 grouptools.join('group1', 'test2'),
+                 grouptools.join('group2', 'test3')]),
         )
 
 
@@ -266,11 +272,11 @@ def test_resume_load_invalid():
     with utils.tempdir() as f:
         backend = backends.json.JSONBackend(f)
         backend.initialize(BACKEND_INITIAL_META)
-        with backend.write_test("group1/test1") as t:
+        with backend.write_test(grouptools.join('group1', 'test1')) as t:
             t({'result': 'fail'})
-        with backend.write_test("group1/test2") as t:
+        with backend.write_test(grouptools.join('group1', 'test2')) as t:
             t({'result': 'pass'})
-        with backend.write_test("group2/test3") as t:
+        with backend.write_test(grouptools.join('group2', 'test3')) as t:
             t({'result': 'fail'})
         with open(os.path.join(f, 'tests', 'x.json'), 'w') as w:
             w.write('foo')
@@ -279,7 +285,9 @@ def test_resume_load_invalid():
 
         nt.assert_set_equal(
             set(test.tests.keys()),
-            set(['group1/test1', 'group1/test2', 'group2/test3']),
+            set([grouptools.join('group1', 'test1'),
+                 grouptools.join('group1', 'test2'),
+                 grouptools.join('group2', 'test3')]),
         )
 
 
@@ -294,21 +302,23 @@ def test_resume_load_incomplete():
     with utils.tempdir() as f:
         backend = backends.json.JSONBackend(f)
         backend.initialize(BACKEND_INITIAL_META)
-        with backend.write_test("group1/test1") as t:
+        with backend.write_test(grouptools.join('group1', 'test1')) as t:
             t({'result': 'fail'})
-        with backend.write_test("group1/test2") as t:
+        with backend.write_test(grouptools.join('group1', 'test2')) as t:
             t({'result': 'pass'})
-        with backend.write_test("group2/test3") as t:
+        with backend.write_test(grouptools.join('group2', 'test3')) as t:
             t({'result': 'crash'})
-        with backend.write_test("group2/test4") as t:
+        with backend.write_test(grouptools.join('group2', 'test4')) as t:
             t({'result': 'incomplete'})
 
         test = backends.json._resume(f)
 
         nt.assert_set_equal(
             set(test.tests.keys()),
-            set(['group1/test1', 'group1/test2', 'group2/test3',
-                 'group2/test4']),
+            set([grouptools.join('group1', 'test1'),
+                 grouptools.join('group1', 'test2'),
+                 grouptools.join('group2', 'test3'),
+                 grouptools.join('group2', 'test4')]),
         )
 
 
@@ -333,6 +343,16 @@ def test_load_results_folder():
 
 
 @utils.no_error
+def test_load_results_folder_gz():
+    """backends.json.load_results: takes a folder with a file named results.json.gz"""
+    with utils.tempdir() as tdir:
+        with gzip.open(os.path.join(tdir, 'results.json.gz'), 'wt') as tfile:
+            tfile.write(json.dumps(utils.JSON_DATA))
+
+        backends.json.load_results(tdir)
+
+
+ at utils.no_error
 def test_load_results_file():
     """backends.json.load_results: Loads a file passed by name"""
     with utils.resultfile() as tfile:
@@ -352,6 +372,19 @@ def test_load_json():
     nt.assert_in('sometest', result.tests)
 
 
+def test_load_json_gz():
+    """backends.load(): Loads .json.gz files"""
+    with utils.tempdir() as tdir:
+        filename = os.path.join(tdir, 'results.json.gz')
+        with gzip.open(filename, 'w') as f:
+            json.dump(utils.JSON_DATA, f)
+
+        result = backends.load(filename)
+
+    nt.assert_is_instance(result, results.TestrunResult)
+    nt.assert_in('sometest', result.tests)
+
+
 def test_piglit_decoder():
     """backends.json.piglit_decoder: Works correctly"""
     test = json.loads('{"foo": {"result": "pass"}}',
diff --git a/framework/tests/json_results_update_tests.py b/framework/tests/json_results_update_tests.py
index 9a79b9e..f45ed6a 100644
--- a/framework/tests/json_results_update_tests.py
+++ b/framework/tests/json_results_update_tests.py
@@ -232,8 +232,11 @@ class TestV0toV1(object):
             # There is the potential that the file will be renamed. In that event
             # remove the renamed files
             if e.errno == 2:
-                os.unlink(os.path.join(tempfile.tempdir, 'results.json'))
-                os.unlink(os.path.join(tempfile.tempdir, 'results.json.old'))
+                for file_ in [
+                        os.path.join(tempfile.tempdir, 'results.json'),
+                        os.path.join(tempfile.tempdir, 'results.json.old')]:
+                    if os.path.exists(file_):
+                        os.unlink(file_)
             else:
                 raise
 
diff --git a/framework/tests/json_tests.py b/framework/tests/json_tests.py
index d6ca0bf..695a620 100644
--- a/framework/tests/json_tests.py
+++ b/framework/tests/json_tests.py
@@ -28,6 +28,7 @@ tests and they will change with each version of the json output.
 
 from __future__ import print_function, absolute_import
 import os
+import gzip
 
 import nose.tools as nt
 try:
@@ -52,6 +53,7 @@ class Namespace(object):
 class TestJsonOutput(utils.StaticDirectory):
     """Class for testing JSON output."""
     @classmethod
+    @utils.set_compression('gz')
     def setup_class(cls):
         super(TestJsonOutput, cls).setup_class()
 
@@ -66,7 +68,7 @@ class TestJsonOutput(utils.StaticDirectory):
         with backend.write_test('result') as t:
             t({'result': 'pass'})
         backend.finalize({'time_elapsed': 1.22})
-        with open(os.path.join(cls.tdir, 'results.json'), 'r') as f:
+        with gzip.open(os.path.join(cls.tdir, 'results.json.gz'), 'r') as f:
             cls.json = json.load(f)
 
     def test_root_results_version(self):
diff --git a/framework/tests/utils.py b/framework/tests/utils.py
index a75f9fe..379965c 100644
--- a/framework/tests/utils.py
+++ b/framework/tests/utils.py
@@ -32,6 +32,7 @@ import shutil
 import tempfile as tempfile_
 import functools
 import subprocess
+import ConfigParser
 from contextlib import contextmanager
 
 try:
@@ -41,6 +42,7 @@ except ImportError:
 from nose.plugins.skip import SkipTest
 
 from framework import test, backends, results
+from framework.core import PIGLIT_CONFIG
 
 
 __all__ = [
@@ -362,6 +364,37 @@ def capture_stderr(func):
     return _inner
 
 
+def set_compression(compression):
+    """Set a specific compression level on at test."""
+    assert compression in ['gz', 'none']
+
+    def _decorator(func):
+        """The actual decorator."""
+        @functools.wraps(func)
+        def _inner(*args, **kwargs):
+            """The returned function wrapper."""
+            try:
+                restore = PIGLIT_CONFIG.get('core', 'compression')
+            except ConfigParser.NoSectionError:
+                PIGLIT_CONFIG.add_section('core')
+                restore = None
+            except ConfigParser.NoOptionError:
+                restore = None
+
+            PIGLIT_CONFIG.set('core', 'compression', compression)
+
+            try:
+                func(*args, **kwargs)
+            finally:
+                if restore:
+                    PIGLIT_CONFIG.set('core', 'compression', restore)
+                else:
+                    PIGLIT_CONFIG.remove_option('core', 'compression')
+
+        return _inner
+    return _decorator
+
+
 def set_env(**envargs):
     """Decorator that sets environment variables and then unsets them."""
     def _decorator(func):
diff --git a/piglit.conf.example b/piglit.conf.example
index 9601ad6..201958c 100644
--- a/piglit.conf.example
+++ b/piglit.conf.example
@@ -91,9 +91,9 @@ run_test=./%(test_name)s
 ;backend=json
 
 ; Set the default compression method to use,
-; May be one of: 'none'
-; Default: none (Note that this may change in the future)
-;compression=none
+; May be one of: 'none', 'gz'
+; Default: gz (Note that this may change in the future)
+;compression=gz
 
 [expected-failures]
 ; Provide a list of test names that are expected to fail.  These tests
@@ -105,4 +105,4 @@ run_test=./%(test_name)s
 
 [expected-crashes]
 ; Like expected-failures, but specifies that a test is expected to
-; crash.
\ No newline at end of file
+; crash.
-- 
2.4.2



More information about the Piglit mailing list