[Piglit] [PATCH 1/7] framework: fix typos in comments

Nicolai Hähnle nhaehnle at gmail.com
Wed Oct 11 10:12:49 UTC 2017


From: Nicolai Hähnle <nicolai.haehnle at amd.com>

---
 framework/backends/abstract.py | 2 +-
 framework/backends/json.py     | 8 ++++----
 framework/core.py              | 2 +-
 framework/grouptools.py        | 2 +-
 framework/profile.py           | 2 +-
 framework/results.py           | 2 +-
 framework/summary/common.py    | 2 +-
 7 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/framework/backends/abstract.py b/framework/backends/abstract.py
index 13a7b6707..85abfa52d 100644
--- a/framework/backends/abstract.py
+++ b/framework/backends/abstract.py
@@ -192,21 +192,21 @@ class FileBackend(Backend):
     def _file_extension(self):
         """The file extension of the backend."""
 
     @contextlib.contextmanager
     def write_test(self, name):
         """Write a test.
 
         When this context manager is opened it will first write a placeholder
         file with the status incomplete.
 
-        When it is called to write the finall result it will create a temporary
+        When it is called to write the final result it will create a temporary
         file, write to that file, then move that file over the original,
         incomplete status file. This helps to make the operation atomic, as
         long as the filesystem continues running and the result was valid in
         the original file it will be valid at the end
 
         """
         def finish(val):
             tfile = file_ + '.tmp'
             with open(tfile, 'w') as f:
                 self._write(f, name, val)
diff --git a/framework/backends/json.py b/framework/backends/json.py
index dd08bf053..c810dcaa5 100644
--- a/framework/backends/json.py
+++ b/framework/backends/json.py
@@ -76,21 +76,21 @@ def piglit_encoder(obj):
         return obj.to_json()
     return obj
 
 
 class JSONBackend(FileBackend):
     """ Piglit's native JSON backend
 
     This writes out to piglit's native json backend. This class uses the python
     json module or the simplejson.
 
-    This class is atomic, writes either completely fail or completley succeed.
+    This class is atomic, writes either completely fail or completely succeed.
     To achieve this it writes individual files for each test and for the
     metadata, and composes them at the end into a single file and removes the
     intermediate files. When it tries to compose these files if it cannot read
     a file it just ignores it, making the result atomic.
 
     """
     _file_extension = 'json'
 
     def initialize(self, metadata):
         """ Write boilerplate json code
@@ -298,23 +298,23 @@ def _resume(results_dir):
         with open(os.path.join(tests_dir, file_), 'r') as f:
             try:
                 meta['tests'].update(json.load(f))
             except ValueError:
                 continue
 
     return results.TestrunResult.from_dict(meta)
 
 
 def _update_results(results, filepath):
-    """ Update results to the lastest version
+    """ Update results to the latest version
 
-    This function is a wraper for other update_* functions, providing
+    This function is a wrapper for other update_* functions, providing
     incremental updates from one version to another.
 
     Arguments:
     results -- a TestrunResults instance
     filepath -- the name of the file that the Testrunresults instance was
                 created from
 
     """
 
     def loop_updates(results):
@@ -349,21 +349,21 @@ def _update_results(results, filepath):
         os.rename(filepath, filepath + '.old')
         _write(results, filepath)
     except OSError:
         print("WARNING: Could not write updated results {}".format(filepath),
               file=sys.stderr)
 
     return results
 
 
 def _write(results, file_):
-    """WRite the values of the results out to a file."""
+    """Write the values of the results out to a file."""
     with write_compressed(file_) as f:
         json.dump(results, f, default=piglit_encoder, indent=INDENT)
 
 
 def _update_seven_to_eight(result):
     """Update json results from version 7 to 8.
 
     This update replaces the time attribute float with a TimeAttribute object,
     which stores a start time and an end time, and provides methods for getting
     total and delta.
diff --git a/framework/core.py b/framework/core.py
index 9abc1284c..61643af95 100644
--- a/framework/core.py
+++ b/framework/core.py
@@ -213,20 +213,20 @@ def parse_listfile(filename):
 
 class lazy_property(object):  # pylint: disable=invalid-name,too-few-public-methods
     """Descriptor that replaces the function it wraps with the value generated.
 
     This makes a property that is truly lazy, it is calculated once on demand,
     and stored. This makes this very useful for values that you might want to
     calculate and reuse, but they cannot change.
 
     This works by very cleverly shadowing itself with the calculated value. It
     adds the value to the instance, which pushes itself up the MRO and will
-    never be quired again.
+    never be queried again.
 
     """
     def __init__(self, func):
         self.__func = func
 
     def __get__(self, instance, cls):
         value = self.__func(instance)
         setattr(instance, self.__func.__name__, value)
         return value
diff --git a/framework/grouptools.py b/framework/grouptools.py
index 241cf24db..f28241d3c 100644
--- a/framework/grouptools.py
+++ b/framework/grouptools.py
@@ -112,21 +112,21 @@ def commonprefix(args):
 
 def join(first, *args):
     """Join multiple groups together.
 
     This function is implemented via string concatenation, while most
     pythonistas would use list joining, because it is accepted as better.  I
     wrote a number of implementations and timed them with timeit.  I found for
     small number of joins (2-10) that str concatenation was quite a bit faster,
     at around 100 elements list joining became faster. Since most of piglit's
     use of join is for 2-10 elements I used string concatentation, which is
-    conincedently very similar to the way posixpath.join is implemented.
+    coincidentally very similar to the way posixpath.join is implemented.
 
     """
     # If first happens to be a non-existant value, walk through args until we
     # find a real value and use that.
     args = (a for a in args)
     if not first:
         for group in args:
             if group:
                 first = group
                 break
diff --git a/framework/profile.py b/framework/profile.py
index 356ec6479..53358868b 100644
--- a/framework/profile.py
+++ b/framework/profile.py
@@ -285,21 +285,21 @@ class TestProfile(object):
         self.options = {
             'dmesg': get_dmesg(False),
             'monitor': Monitoring(False),
             'ignore_missing': False,
         }
 
     def setup(self):
         """Method to do pre-run setup."""
 
     def teardown(self):
-        """Method to od post-run teardown."""
+        """Method to do post-run teardown."""
 
     def copy(self):
         """Create a copy of the TestProfile.
 
         This method creates a copy with references to the original instance
         using copy.copy. This allows profiles to be "subclassed" by other
         profiles, without modifying the original.
         """
         new = copy.copy(self)
         new.test_list = copy.copy(self.test_list)
diff --git a/framework/results.py b/framework/results.py
index 4cfe78250..99dd3735b 100644
--- a/framework/results.py
+++ b/framework/results.py
@@ -318,21 +318,21 @@ class TestrunResult(object):
         try:
             return self.tests[key].result
         except KeyError as e:
             name, test = grouptools.splitname(key)
             try:
                 return self.tests[name].subtests[test]
             except KeyError:
                 raise e
 
     def calculate_group_totals(self):
-        """Calculate the number of pases, fails, etc at each level."""
+        """Calculate the number of passes, fails, etc at each level."""
         for name, result in six.iteritems(self.tests):
             # If there are subtests treat the test as if it is a group instead
             # of a test.
             if result.subtests:
                 for res in six.itervalues(result.subtests):
                     res = str(res)
                     temp = name
 
                     self.totals[temp][res] += 1
                     while temp:
diff --git a/framework/summary/common.py b/framework/summary/common.py
index 8c7e2c78a..5af692703 100644
--- a/framework/summary/common.py
+++ b/framework/summary/common.py
@@ -45,21 +45,21 @@ class Results(object):  # pylint: disable=too-few-public-methods
 
     """
     def __init__(self, results):
         self.results = results
         self.names = Names(self)
         self.counts = Counts(self)
 
     def get_result(self, name):
         """Get all results for a single test.
 
-        Replace any missing vaules with status.NOTRUN, correclty handles
+        Replace any missing vaules with status.NOTRUN, correctly handles
         subtests.
 
         """
         results = []
         for res in self.results:
             try:
                 results.append(res.get_result(name))
             except KeyError:
                 results.append(so.NOTRUN)
         return results
-- 
2.11.0



More information about the Piglit mailing list