[Piglit] [PATCH 38/49] unittests: port json_backend_tests to pytest
Dylan Baker
dylan at pnwbakers.com
Fri Jul 29 18:39:24 UTC 2016
Signed-off-by: Dylan Baker <dylanx.c.baker at intel.com>
---
unittests/framework/backends/shared.py | 151 ++++++++++++++
unittests/framework/backends/test_json.py | 276 +++++++++++++++++++++++++
unittests/json_backend_tests.py | 324 ------------------------------
3 files changed, 427 insertions(+), 324 deletions(-)
create mode 100644 unittests/framework/backends/shared.py
create mode 100644 unittests/framework/backends/test_json.py
delete mode 100644 unittests/json_backend_tests.py
diff --git a/unittests/framework/backends/shared.py b/unittests/framework/backends/shared.py
new file mode 100644
index 0000000..e9bc5e4
--- /dev/null
+++ b/unittests/framework/backends/shared.py
@@ -0,0 +1,151 @@
+# encoding=utf-8
+# Copyright © 2016 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Shared data for backend tests."""
+
+from __future__ import (
+ absolute_import, division, print_function, unicode_literals
+)
+
+
+INITIAL_METADATA = {
+ 'name': 'name',
+ 'test_count': 0,
+ 'env': {},
+ 'options': {},
+}
+
+# This is current JSON data, in raw form with only the minimum required
+# changes. This does not contain piglit specifc objects, only strings, floats,
+# ints, and Nones (instead of JSON's null)
+JSON = {
+ "results_version": 8,
+ "time_elapsed": {
+ "start": 1469638791.2351687,
+ "__type__": "TimeAttribute",
+ "end": 1469638791.4387212
+ },
+ "wglinfo": None,
+ "tests": {
+ "spec@!opengl 1.0 at gl-1.0-readpixsanity": {
+ "dmesg": "",
+ "traceback": None,
+ "err": "piglit: error: waffle_display_connect failed due to "
+ "WAFFLE_ERROR_UNKNOWN: open drm file for gbm failed\n",
+ "subtests": {
+ "__type__": "Subtests"
+ },
+ "out": "",
+ "exception": None,
+ "command": "/home/user/source/piglit/bin/gl-1.0-readpixsanity "
+ "-auto -fbo",
+ "time": {
+ "start": 1469638791.2383287,
+ "__type__": "TimeAttribute",
+ "end": 1469638791.2439244
+ },
+ "pid": 11768,
+ "__type__": "TestResult",
+ "returncode": 1,
+ "result": "fail",
+ "environment": ("PIGLIT_SOURCE_DIR=\"/home/user/source/piglit\" "
+ " PIGLIT_PLATFORM=\"gbm\"")
+ }
+ },
+ # pylint: disable=line-too-long
+ "lspci": "00:00.0 Host bridge...",
+ "clinfo": None,
+ "uname": "uname",
+ # pylint: enable=line-too-long
+ "options": {
+ "dmesg": False,
+ "concurrent": "some",
+ "include_filter": [],
+ "monitored": False,
+ "execute": True,
+ "valgrind": False,
+ "profile": [
+ "sanity"
+ ],
+ "log_level": "quiet",
+ "env": {
+ "PIGLIT_SOURCE_DIR": "/home/user/source/piglit",
+ "PIGLIT_PLATFORM": "gbm"
+ },
+ "platform": "gbm",
+ "sync": False,
+ "exclude_tests": [],
+ "exclude_filter": []
+ },
+ "name": "foo",
+ "__type__": "TestrunResult",
+ "glxinfo": None,
+ "totals": {
+ "spec": {
+ "warn": 0,
+ "timeout": 0,
+ "skip": 0,
+ "crash": 0,
+ "pass": 0,
+ "fail": 1,
+ "dmesg-warn": 0,
+ "incomplete": 0,
+ "notrun": 0,
+ "dmesg-fail": 0
+ },
+ "": {
+ "warn": 0,
+ "timeout": 0,
+ "skip": 0,
+ "crash": 0,
+ "pass": 0,
+ "fail": 1,
+ "dmesg-warn": 0,
+ "incomplete": 0,
+ "notrun": 0,
+ "dmesg-fail": 0
+ },
+ "spec@!opengl 1.0": {
+ "warn": 0,
+ "timeout": 0,
+ "skip": 0,
+ "crash": 0,
+ "pass": 0,
+ "fail": 1,
+ "dmesg-warn": 0,
+ "incomplete": 0,
+ "notrun": 0,
+ "dmesg-fail": 0
+ },
+ "root": {
+ "warn": 0,
+ "timeout": 0,
+ "skip": 0,
+ "crash": 0,
+ "pass": 0,
+ "fail": 1,
+ "dmesg-warn": 0,
+ "incomplete": 0,
+ "notrun": 0,
+ "dmesg-fail": 0
+ }
+ }
+}
diff --git a/unittests/framework/backends/test_json.py b/unittests/framework/backends/test_json.py
new file mode 100644
index 0000000..ab91705
--- /dev/null
+++ b/unittests/framework/backends/test_json.py
@@ -0,0 +1,276 @@
+# Copyright (c) 2014, 2016 Intel Corporation
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+""" Tests for the backend package """
+
+from __future__ import (
+ absolute_import, division, print_function, unicode_literals
+)
+import os
+try:
+ import simplejson as json
+except ImportError:
+ import json
+try:
+ import mock
+except ImportError:
+ from unittests import mock
+
+import pytest
+import six
+
+from framework import backends
+from framework import exceptions
+from framework import grouptools
+from framework import results
+
+from . import shared
+
+# pylint: disable=no-self-use,protected-access
+
+
+ at pytest.yield_fixture(scope='module', autouse=True)
+def mock_compression():
+ with mock.patch.dict(backends.json.compression.os.environ,
+ {'PIGLIT_COMPRESSION': 'none'}):
+ yield
+
+
+class TestJSONBackend(object):
+ """Tests for the JSONBackend class."""
+
+ class TestInitialize(object):
+ """Tests for the initialize method."""
+
+ def test_metadata_file_created(self, tmpdir):
+ p = six.text_type(tmpdir)
+ test = backends.json.JSONBackend(p)
+ test.initialize(shared.INITIAL_METADATA)
+ assert os.path.exists(os.path.join(p, 'metadata.json'))
+
+ class TestWriteTest(object):
+ """Tests for the write_test method."""
+
+ def test_write(self, tmpdir):
+ """The write method should create a file."""
+ p = six.text_type(tmpdir)
+ test = backends.json.JSONBackend(p)
+ test.initialize(shared.INITIAL_METADATA)
+
+ with test.write_test('bar') as t:
+ t(results.TestResult())
+
+ assert tmpdir.join('tests/0.json').check()
+
+ def test_load(self, tmpdir):
+ """Test that the written JSON can be loaded.
+
+ This doesn't attempt to validate the schema of the code (That is
+ handled elsewhere), instead it just attempts a touch test of "can
+ this be read as JSON".
+ """
+ p = six.text_type(tmpdir)
+ test = backends.json.JSONBackend(p)
+ test.initialize(shared.INITIAL_METADATA)
+
+ with test.write_test('bar') as t:
+ t(results.TestResult())
+
+ with tmpdir.join('tests/0.json').open('r') as f:
+ json.load(f)
+
+ class TestFinalize(object):
+ """Tests for the finalize method."""
+
+ name = grouptools.join('a', 'test', 'group', 'test1')
+ result = results.TestResult('pass')
+
+ @pytest.fixture(autouse=True)
+ def setup(self, tmpdir):
+ test = backends.json.JSONBackend(six.text_type(tmpdir))
+ test.initialize(shared.INITIAL_METADATA)
+ with test.write_test(self.name) as t:
+ t(self.result)
+ test.finalize()
+
+ def test_metadata_removed(self, tmpdir):
+ assert not tmpdir.join('metadata.json').check()
+
+ def test_tests_directory_removed(self, tmpdir):
+ assert not tmpdir.join('tests').check()
+
+ def test_results_file_created(self, tmpdir):
+ # Normally this would also have a compression extension, but this
+ # module has a setup fixture that forces the compression to None.
+ assert tmpdir.join('results.json').check()
+
+ def test_results_are_json(self, tmpdir):
+ # This only checks that the output is valid JSON, not that the
+ # schema is correct
+ with tmpdir.join('results.json').open('r') as f:
+ json.load(f)
+
+
+class TestUpdateResults(object):
+ """Test for the _update_results function."""
+
+ def test_current_version(self, tmpdir, mocker):
+ """backends.json.update_results(): returns early when the
+ results_version is current.
+ """
+ class Sentinel(Exception):
+ pass
+
+ mocker.patch('framework.backends.json.os.rename',
+ mocker.Mock(side_effect=Sentinel))
+ p = tmpdir.join('results.json')
+ p.write(json.dumps(shared.JSON))
+
+ with p.open('r') as f:
+ base = backends.json._load(f)
+ backends.json._update_results(base, six.text_type(p))
+
+
+class TestResume(object):
+ """tests for the resume function."""
+
+ def test_load_file(self, tmpdir):
+ p = tmpdir.join('results.json')
+ p.write('')
+
+ with pytest.raises(AssertionError):
+ backends.json._resume(six.text_type(p))
+
+ def test_load_valid_folder(self, tmpdir):
+ """backends.json._resume: loads valid results."""
+ backend = backends.json.JSONBackend(six.text_type(tmpdir))
+ backend.initialize(shared.INITIAL_METADATA)
+ with backend.write_test("group1/test1") as t:
+ t(results.TestResult('fail'))
+ with backend.write_test("group1/test2") as t:
+ t(results.TestResult('pass'))
+ with backend.write_test("group2/test3") as t:
+ t(results.TestResult('fail'))
+ test = backends.json._resume(six.text_type(tmpdir))
+
+ assert set(test.tests.keys()) == \
+ {'group1/test1', 'group1/test2', 'group2/test3'}
+
+ def test_load_invalid_folder(self, tmpdir):
+ """backends.json._resume: ignores invalid results"""
+ f = six.text_type(tmpdir)
+ backend = backends.json.JSONBackend(f)
+ backend.initialize(shared.INITIAL_METADATA)
+ with backend.write_test("group1/test1") as t:
+ t(results.TestResult('fail'))
+ with backend.write_test("group1/test2") as t:
+ t(results.TestResult('pass'))
+ with backend.write_test("group2/test3") as t:
+ t(results.TestResult('fail'))
+ with open(os.path.join(f, 'tests', 'x.json'), 'w') as w:
+ w.write('foo')
+ test = backends.json._resume(f)
+
+ assert set(test.tests.keys()) == \
+ {'group1/test1', 'group1/test2', 'group2/test3'}
+
+ def test_load_incomplete(self, tmpdir):
+ """backends.json._resume: loads incomplete results.
+
+ Because resume, aggregate, and summary all use the function called
+ _resume we can't remove incomplete tests here. It's probably worth
+ doing a refactor to split some code out and allow this to be done in
+ the resume path.
+ """
+ f = six.text_type(tmpdir)
+ backend = backends.json.JSONBackend(f)
+ backend.initialize(shared.INITIAL_METADATA)
+ with backend.write_test("group1/test1") as t:
+ t(results.TestResult('fail'))
+ with backend.write_test("group1/test2") as t:
+ t(results.TestResult('pass'))
+ with backend.write_test("group2/test3") as t:
+ t(results.TestResult('crash'))
+ with backend.write_test("group2/test4") as t:
+ t(results.TestResult('incomplete'))
+ test = backends.json._resume(f)
+
+ assert set(test.tests.keys()) == \
+ {'group1/test1', 'group1/test2', 'group2/test3', 'group2/test4'}
+
+
+class TestLoadResults(object):
+ """Tests for the load_results function."""
+
+ def test_folder_with_main(self, tmpdir):
+ """backends.json.load_results: takes a folder with a file named main in
+ it.
+ """
+ p = tmpdir.join('main')
+ with p.open('w') as f:
+ f.write(json.dumps(shared.JSON))
+ backends.json.load_results(six.text_type(tmpdir), 'none')
+
+ def test_folder_with_results_json(self, tmpdir):
+ """backends.json.load_results: takes a folder with a file named
+ results.json.
+ """
+ p = tmpdir.join('results.json')
+ with p.open('w') as f:
+ f.write(json.dumps(shared.JSON))
+ backends.json.load_results(six.text_type(tmpdir), 'none')
+
+ def test_load_file(self, tmpdir):
+ """backends.json.load_results: Loads a file passed by name"""
+ p = tmpdir.join('my file')
+ with p.open('w') as f:
+ f.write(json.dumps(shared.JSON))
+ backends.json.load_results(six.text_type(p), 'none')
+
+
+class TestLoad(object):
+ """Tests for the _load function."""
+
+ def test_load_bad_json(self, tmpdir):
+ """backends.json._load: Raises fatal error if json is corrupt"""
+ p = tmpdir.join('foo')
+ p.write('{"bad json": }')
+ with p.open('r') as f:
+ with pytest.raises(exceptions.PiglitFatalError):
+ backends.json._load(f)
+
+
+class TestPiglitDecooder(object):
+ """Tests for the piglit_decoder function."""
+
+ def test_result(self):
+ """backends.json.piglit_decoder: turns results into TestResults."""
+ test = json.loads(
+ '{"foo": {"result": "pass", "__type__": "TestResult"}}',
+ object_hook=backends.json.piglit_decoder)
+ assert isinstance(test['foo'], results.TestResult)
+
+ def test_old_result(self):
+ """backends.json.piglit_decoder: does not turn old results into
+ TestResults.
+ """
+ test = json.loads('{"foo": {"result": "pass"}}',
+ object_hook=backends.json.piglit_decoder)
+ assert isinstance(test['foo'], dict)
diff --git a/unittests/json_backend_tests.py b/unittests/json_backend_tests.py
deleted file mode 100644
index cf19d30..0000000
--- a/unittests/json_backend_tests.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# Copyright (c) 2014 Intel Corporation
-
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-# pylint: disable=missing-docstring,protected-access
-
-""" Tests for the backend package """
-
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-import os
-
-try:
- import simplejson as json
-except ImportError:
- import json
-import nose.tools as nt
-
-from framework import results, backends, exceptions, grouptools
-from . import utils
-from .backends_tests import BACKEND_INITIAL_META
-
-
-def setup_module():
- utils.piglit.set_compression('none')
-
-
-def teardown_module():
- utils.piglit.unset_compression()
-
-
-def test_initialize_jsonbackend():
- """backends.json.JSONBackend: Class initializes
-
- This needs to be handled separately from the others because it requires
- arguments
-
- """
- with utils.nose.tempdir() as tdir:
- func = backends.json.JSONBackend(tdir)
- nt.ok_(isinstance(func, backends.json.JSONBackend))
-
-
-def test_json_initialize_metadata():
- """backends.json.JSONBackend.initialize(): produces a metadata.json file"""
- with utils.nose.tempdir() as f:
- test = backends.json.JSONBackend(f)
- test.initialize(BACKEND_INITIAL_META)
-
- nt.ok_(os.path.exists(os.path.join(f, 'metadata.json')))
-
-
-class TestJSONTestMethod(utils.nose.StaticDirectory):
- @classmethod
- def setup_class(cls):
- cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
- cls.result = results.TestResult()
- cls.time = 1.2345
- cls.result = 'pass'
- cls.out = 'this is stdout'
- cls.err = 'this is stderr'
-
- super(TestJSONTestMethod, cls).setup_class()
- test = backends.json.JSONBackend(cls.tdir)
- test.initialize(BACKEND_INITIAL_META)
- with test.write_test(cls.test_name) as t:
- t(cls.result)
-
- def test_write_test(self):
- """backends.json.JSONBackend.write_test(): adds tests to a 'tests' directory"""
- nt.ok_(os.path.exists(os.path.join(self.tdir, 'tests', '0.json')))
-
- @utils.nose.no_error
- def test_json_is_valid(self):
- """backends.json.JSONBackend.write_test(): produces valid json"""
- with open(os.path.join(self.tdir, 'tests', '0.json'), 'r') as f:
- json.load(f)
-
- def test_json_is_correct(self):
- """backends.json.JSONBackend.write_test(): produces correct json"""
- with open(os.path.join(self.tdir, 'tests', '0.json'), 'r') as f:
- test = json.load(f)
-
- nt.assert_dict_equal({self.test_name: self.result}, test)
-
-
-class TestJSONTestFinalize(utils.nose.StaticDirectory):
- # We're explictely setting none here since the default can change from none
- @classmethod
- def setup_class(cls):
- cls.test_name = grouptools.join('a', 'test', 'group', 'test1')
- cls.result = results.TestResult('pass')
-
- super(TestJSONTestFinalize, cls).setup_class()
- test = backends.json.JSONBackend(cls.tdir)
- test.initialize(BACKEND_INITIAL_META)
- with test.write_test(cls.test_name) as t:
- t(cls.result)
- test.finalize()
-
- def test_remove_metadata(self):
- """backends.json.JSONBackend.finalize(): removes metadata.json"""
- nt.ok_(not os.path.exists(os.path.join(self.tdir, 'metadata.json')))
-
- def test_remove_tests(self):
- """backends.json.JSONBackend.finalize(): removes tests directory"""
- nt.ok_(not os.path.exists(os.path.join(self.tdir, 'tests')))
-
- def test_create_results(self):
- """backends.json.JSONBackend.finalize(): creates a results.json file
- """
- nt.ok_(os.path.exists(os.path.join(self.tdir, 'results.json')))
-
- @utils.nose.no_error
- def test_results_valid(self):
- """backends.json.JSONBackend.finalize(): results.json is valid"""
- with open(os.path.join(self.tdir, 'results.json'), 'r') as f:
- json.load(f)
-
-
-def test_update_results_current():
- """backends.json.update_results(): returns early when the results_version is current"""
- data = utils.piglit.JSON_DATA.copy()
- data['results_version'] = backends.json.CURRENT_JSON_VERSION
-
- with utils.nose.tempdir() as d:
- with open(os.path.join(d, 'main'), 'w') as f:
- json.dump(data, f, default=backends.json.piglit_encoder)
-
- with open(os.path.join(d, 'main'), 'r') as f:
- base = backends.json._load(f)
-
- res = backends.json._update_results(base, f.name)
-
- nt.assert_dict_equal(res.__dict__, base.__dict__)
-
-
-def test_update_results_old():
- """backends.json.update_results(): updates results
-
- Because of the design of the our updates (namely that they silently
- incrementally update from x to y) it's impossible to know exactly what
- we'll get at th end without having tests that have to be reworked each time
- updates are run. Since there already is (at least for v0 -> v1) a fairly
- comprehensive set of tests, this test only tests that update_results() has
- been set equal to the CURRENT_JSON_VERSION, (which is one of the effects of
- running update_results() with the assumption that there is sufficient other
- testing of the update process.
-
- """
- data = utils.piglit.JSON_DATA.copy()
- data['results_version'] = 0
-
- with utils.nose.tempdir() as d:
- with open(os.path.join(d, 'main'), 'w') as f:
- json.dump(data, f)
-
- with open(os.path.join(d, 'main'), 'r') as f:
- base = backends.json._load(f)
-
- res = backends.json._update_results(base, f.name)
-
- nt.assert_equal(res.results_version, backends.json.CURRENT_JSON_VERSION)
-
-
- at nt.raises(AssertionError)
-def test_json_resume_non_folder():
- """backends.json._resume: doesn't accept a file"""
- with utils.nose.tempfile('') as f:
- backends.json._resume(f)
-
-
-def test_resume_load_valid():
- """backends.json._resume: loads valid results"""
- with utils.nose.tempdir() as f:
- backend = backends.json.JSONBackend(f)
- backend.initialize(BACKEND_INITIAL_META)
- with backend.write_test("group1/test1") as t:
- t(results.TestResult('fail'))
- with backend.write_test("group1/test2") as t:
- t(results.TestResult('pass'))
- with backend.write_test("group2/test3") as t:
- t(results.TestResult('fail'))
-
- test = backends.json._resume(f)
-
- nt.assert_set_equal(
- set(test.tests.keys()),
- set(['group1/test1', 'group1/test2', 'group2/test3']),
- )
-
-
-def test_resume_load_invalid():
- """backends.json._resume: ignores invalid results"""
- with utils.nose.tempdir() as f:
- backend = backends.json.JSONBackend(f)
- backend.initialize(BACKEND_INITIAL_META)
- with backend.write_test("group1/test1") as t:
- t(results.TestResult('fail'))
- with backend.write_test("group1/test2") as t:
- t(results.TestResult('pass'))
- with backend.write_test("group2/test3") as t:
- t(results.TestResult('fail'))
- with open(os.path.join(f, 'tests', 'x.json'), 'w') as w:
- w.write('foo')
-
- test = backends.json._resume(f)
-
- nt.assert_set_equal(
- set(test.tests.keys()),
- set(['group1/test1', 'group1/test2', 'group2/test3']),
- )
-
-
-def test_resume_load_incomplete():
- """backends.json._resume: loads incomplete results.
-
- Because resume, aggregate, and summary all use the function called _resume
- we can't remove incomplete tests here. It's probably worth doing a refactor
- to split some code out and allow this to be done in the resume path.
-
- """
- with utils.nose.tempdir() as f:
- backend = backends.json.JSONBackend(f)
- backend.initialize(BACKEND_INITIAL_META)
- with backend.write_test("group1/test1") as t:
- t(results.TestResult('fail'))
- with backend.write_test("group1/test2") as t:
- t(results.TestResult('pass'))
- with backend.write_test("group2/test3") as t:
- t(results.TestResult('crash'))
- with backend.write_test("group2/test4") as t:
- t(results.TestResult('incomplete'))
-
- test = backends.json._resume(f)
-
- nt.assert_set_equal(
- set(test.tests.keys()),
- set(['group1/test1', 'group1/test2', 'group2/test3',
- 'group2/test4']),
- )
-
-
- at utils.nose.no_error
-def test_load_results_folder_as_main():
- """backends.json.load_results: takes a folder with a file named main in it
- """
- with utils.nose.tempdir() as tdir:
- with open(os.path.join(tdir, 'main'), 'w') as tfile:
- tfile.write(json.dumps(utils.piglit.JSON_DATA,
- default=backends.json.piglit_encoder))
-
- backends.json.load_results(tdir, 'none')
-
-
- at utils.nose.no_error
-def test_load_results_folder():
- """backends.json.load_results: takes a folder with a file named results.json"""
- with utils.nose.tempdir() as tdir:
- with open(os.path.join(tdir, 'results.json'), 'w') as tfile:
- tfile.write(json.dumps(utils.piglit.JSON_DATA,
- default=backends.json.piglit_encoder))
-
- backends.json.load_results(tdir, 'none')
-
-
- at utils.nose.no_error
-def test_load_results_file():
- """backends.json.load_results: Loads a file passed by name"""
- with utils.piglit.resultfile() as tfile:
- backends.json.load_results(tfile.name, 'none')
-
-
-def test_load_json():
- """backends.load(): Loads .json files."""
- with utils.nose.tempdir() as tdir:
- filename = os.path.join(tdir, 'results.json')
- with open(filename, 'w') as f:
- json.dump(utils.piglit.JSON_DATA, f, default=backends.json.piglit_encoder)
-
- result = backends.load(filename)
-
- nt.assert_is_instance(result, results.TestrunResult)
- nt.assert_in('sometest', result.tests)
-
-
-def test_piglit_decoder_result():
- """backends.json.piglit_decoder: turns results into TestResults"""
- test = json.loads('{"foo": {"result": "pass", "__type__": "TestResult"}}',
- object_hook=backends.json.piglit_decoder)
- nt.assert_is_instance(test['foo'], results.TestResult)
-
-
-def test_piglit_decoder_old_result():
- """backends.json.piglit_decoder: does not turn old results into TestResults
- """
- test = json.loads('{"foo": {"result": "pass"}}',
- object_hook=backends.json.piglit_decoder)
- nt.assert_is_instance(test['foo'], dict)
-
-
- at nt.raises(exceptions.PiglitFatalError)
-def test_load_bad_json():
- """backends.json._load: Raises fatal error if json is corrupt"""
- with utils.nose.tempfile('{"bad json": }') as f:
- with open(f, 'r') as tfile:
- backends.json._load(tfile)
--
2.9.0
More information about the Piglit
mailing list