[Libreoffice-commits] core.git: bin/check-implementer-notes.py bin/extract-tooltip.py bin/find-duplicated-sids.py bin/find-german-comments bin/find-unused-defines.py bin/find-unused-sid-commands.py bin/find-unused-typedefs.py bin/gbuild-to-ide bin/symbolstore.py bin/test-hid-vs-ui.py bin/update bin/update_pch compilerplugins/clang solenv/bin solenv/gdb sw/qa

Mike Kaganski (via logerrit) logerrit at kemper.freedesktop.org
Mon Jul 13 20:24:29 UTC 2020


 bin/check-implementer-notes.py              |    2 +-
 bin/extract-tooltip.py                      |    2 +-
 bin/find-duplicated-sids.py                 |    4 ++--
 bin/find-german-comments                    |    6 +++---
 bin/find-unused-defines.py                  |    4 ++--
 bin/find-unused-sid-commands.py             |    2 +-
 bin/find-unused-typedefs.py                 |    2 +-
 bin/gbuild-to-ide                           |   22 +++++++++++-----------
 bin/symbolstore.py                          |    4 ++--
 bin/test-hid-vs-ui.py                       |    2 +-
 bin/update/uncompress_mar.py                |    2 +-
 bin/update_pch                              |    2 +-
 compilerplugins/clang/constantparam.py      |    2 +-
 compilerplugins/clang/finalclasses.py       |    4 ++--
 compilerplugins/clang/pahole-all-classes.py |    8 ++++----
 solenv/bin/native-code.py                   |    2 +-
 solenv/gdb/boost/util/printing.py           |    2 +-
 solenv/gdb/libreoffice/util/printing.py     |    2 +-
 sw/qa/python/check_styles.py                |    4 ++--
 19 files changed, 39 insertions(+), 39 deletions(-)

New commits:
commit bd96a6f7b7eb103f97bcd6eadc21908187e94dce
Author:     Mike Kaganski <mike.kaganski at collabora.com>
AuthorDate: Wed Jul 8 11:51:48 2020 +0300
Commit:     Mike Kaganski <mike.kaganski at collabora.com>
CommitDate: Mon Jul 13 22:23:44 2020 +0200

    Don't rely on Python's treatment of unrecognized escape sequences
    
    According to [1]:
    
    > Changed in version 3.6: Unrecognized escape sequences produce a DeprecationWarning.
    > In a future Python version they will be a SyntaxWarning and eventually a SyntaxError.
    
    [1] https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
    
    Change-Id: Ia4f79f17ccb121f423f35b1e1306d5ae285e8762
    Reviewed-on: https://gerrit.libreoffice.org/c/core/+/98321
    Tested-by: Jenkins
    Reviewed-by: Noel Grandin <noel.grandin at collabora.co.uk>
    Reviewed-by: Mike Kaganski <mike.kaganski at collabora.com>

diff --git a/bin/check-implementer-notes.py b/bin/check-implementer-notes.py
index 10b7c168ba0e..234e447e7d19 100755
--- a/bin/check-implementer-notes.py
+++ b/bin/check-implementer-notes.py
@@ -11,7 +11,7 @@ wiki_pages = [
 
 # get all commit hashes mentioned in implementer notes
 wiki_commit_hashes = {}
-query = re.compile('\{\{commit\|(\\w+)\|\\w*\|\\w*\}\}', re.IGNORECASE)
+query = re.compile(r'\{\{commit\|(\\w+)\|\\w*\|\\w*\}\}', re.IGNORECASE)
 for page in wiki_pages:
     r = http.request('GET', page)
     data = json.loads(r.data.decode('utf-8'))
diff --git a/bin/extract-tooltip.py b/bin/extract-tooltip.py
index 5397c718ff2b..b313c9474af7 100755
--- a/bin/extract-tooltip.py
+++ b/bin/extract-tooltip.py
@@ -16,7 +16,7 @@ def parseFile(filename):
     data = [line.rstrip('\n') for line in data]
 
     pairs = {}
-    regEx = re.compile("^(\S+)\s(\S+)\s(\S+)\s((?:\s*\S*)+)$")
+    regEx = re.compile(r"^(\S+)\s(\S+)\s(\S+)\s((?:\s*\S*)+)$")
     old_line = None
     for line in data:
         if len(line) > 0:
diff --git a/bin/find-duplicated-sids.py b/bin/find-duplicated-sids.py
index 8f5e4ff9246f..ebffce230b09 100755
--- a/bin/find-duplicated-sids.py
+++ b/bin/find-duplicated-sids.py
@@ -30,8 +30,8 @@ def extractSidValue(sidValue):
     return rv
 
 
-#a = subprocess.Popen("git grep -P '#define\s+(SID_|SC_|DETECTIVE_|DRAWTEXTBAR_|DRAW_BAR_|RID_|OBJBAR_FORMAT_|TAB_POPUP_|DATA_MENU_|EXTRA_MENU_|FORMAT_MENU_|INSERT_MENU_|VIEW_MENU_|EDIT_MENU_|FILE_MENU_|SC_FUNCTION_|RC_)'", stdout=subprocess.PIPE, shell=True)
-a = subprocess.Popen("git grep -Pn '#define\s+(\S+)' -- *.hrc", stdout=subprocess.PIPE, shell=True)
+#a = subprocess.Popen(r"git grep -P '#define\s+(SID_|SC_|DETECTIVE_|DRAWTEXTBAR_|DRAW_BAR_|RID_|OBJBAR_FORMAT_|TAB_POPUP_|DATA_MENU_|EXTRA_MENU_|FORMAT_MENU_|INSERT_MENU_|VIEW_MENU_|EDIT_MENU_|FILE_MENU_|SC_FUNCTION_|RC_)'", stdout=subprocess.PIPE, shell=True)
+a = subprocess.Popen(r"git grep -Pn '#define\s+(\S+)' -- *.hrc", stdout=subprocess.PIPE, shell=True)
 
 with a.stdout as txt:
     for line in txt:
diff --git a/bin/find-german-comments b/bin/find-german-comments
index f26de960f2d9..98ae5acfcee7 100755
--- a/bin/find-german-comments
+++ b/bin/find-german-comments
@@ -96,12 +96,12 @@ class Parser:
                 # start of a real multiline comment
                 in_comment = True
                 linenum = count
-                s = re.sub(".*/\*+", "", i.strip(self.strip))
+                s = re.sub(r".*/\*+", "", i.strip(self.strip))
                 if len(s):
                     buf.append(s.strip(self.strip))
             elif in_comment and not "*/" in i:
                 # in multiline comment
-                s = re.sub("^( |\|)*\*?", "", i)
+                s = re.sub(r"^( |\|)*\*?", "", i)
                 if len(s.strip(self.strip)):
                     buf.append(s.strip(self.strip))
             elif "*/" in i and in_comment:
@@ -114,7 +114,7 @@ class Parser:
                 buf = []
             elif "/*" in i and "*/" in i:
                 # c-style oneliner comment
-                yield (count, re.sub(".*/\*(.*)\*/.*", r"\1", i).strip(self.strip))
+                yield (count, re.sub(r".*/\*(.*)\*/.*", r"\1", i).strip(self.strip))
             count += 1
 
     def start_text_cat(self):
diff --git a/bin/find-unused-defines.py b/bin/find-unused-defines.py
index 8e708c4a055d..6ac190c02638 100755
--- a/bin/find-unused-defines.py
+++ b/bin/find-unused-defines.py
@@ -99,9 +99,9 @@ def in_exclusion_set( a ):
     return False;
 
 # find defines, excluding the externals folder
-a = subprocess.Popen("git grep -hP '^#define\s+\w\w\w\w+\s*' -- \"[!e][!x][!t]*\" | sort -u", stdout=subprocess.PIPE, shell=True)
+a = subprocess.Popen(r"git grep -hP '^#define\s+\w\w\w\w+\s*' -- \"[!e][!x][!t]*\" | sort -u", stdout=subprocess.PIPE, shell=True)
 
-name_re = re.compile("#define\s+(\w+)")
+name_re = re.compile(r"#define\s+(\w+)")
 with a.stdout as txt:
     for line in txt:
         idName = name_re.match(line).group(1)
diff --git a/bin/find-unused-sid-commands.py b/bin/find-unused-sid-commands.py
index 32f45e0f810a..7cdf0cd3be5d 100755
--- a/bin/find-unused-sid-commands.py
+++ b/bin/find-unused-sid-commands.py
@@ -9,7 +9,7 @@
 import subprocess
 
 # search for entries in .sdi files that declare UNO/SID commands
-a = subprocess.Popen("git grep -P '^\s*\w+Item\s+\w+\s+SID_\w+$' -- *.sdi", stdout=subprocess.PIPE, shell=True)
+a = subprocess.Popen(r"git grep -P '^\s*\w+Item\s+\w+\s+SID_\w+$' -- *.sdi", stdout=subprocess.PIPE, shell=True)
 
 # parse out the UNO command names
 commandSet = list()
diff --git a/bin/find-unused-typedefs.py b/bin/find-unused-typedefs.py
index b07c16d2bcb1..bf88c83bc7bd 100755
--- a/bin/find-unused-typedefs.py
+++ b/bin/find-unused-typedefs.py
@@ -3,7 +3,7 @@
 import subprocess
 
 # find typedefs, excluding the externals folder
-a = subprocess.Popen("git grep -P 'typedef\s+.+\s+\w+;' -- \"[!e][!x][!t]*\"", stdout=subprocess.PIPE, shell=True)
+a = subprocess.Popen(r"git grep -P 'typedef\s+.+\s+\w+;' -- \"[!e][!x][!t]*\"", stdout=subprocess.PIPE, shell=True)
 
 # parse out the typedef names
 typedefSet = set()
diff --git a/bin/gbuild-to-ide b/bin/gbuild-to-ide
index f23592469fd9..7a6964abd454 100755
--- a/bin/gbuild-to-ide
+++ b/bin/gbuild-to-ide
@@ -94,12 +94,12 @@ class GbuildParser:
         (self.libs, self.exes, self.tests, self.modulenamelist) = ([], [], [], [])
         (self.target_by_path, self.target_by_location) = ({}, {})
 
-    includepattern = re.compile('-I(\S+)')
-    isystempattern = re.compile('-isystem\s*(\S+)')
-    warningpattern = re.compile('-W\S+')
-    libpattern = re.compile('Library_(.*)\.mk')
-    exepattern = re.compile('Executable_(.*)\.mk')
-    testpattern = re.compile('CppunitTest_(.*)\.mk')
+    includepattern = re.compile(r'-I(\S+)')
+    isystempattern = re.compile(r'-isystem\s*(\S+)')
+    warningpattern = re.compile(r'-W\S+')
+    libpattern = re.compile(r'Library_(.*)\.mk')
+    exepattern = re.compile(r'Executable_(.*)\.mk')
+    testpattern = re.compile(r'CppunitTest_(.*)\.mk')
 
     @staticmethod
     def __split_includes(includes):
@@ -1071,7 +1071,7 @@ class VisualStudioIntegrationGenerator(IdeIntegrationGenerator):
         proj_name_node = ET.SubElement(globals_node, '{%s}ProjectName' % ns)
         proj_name_node.text = target.short_name()
 
-        ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+        ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\\Microsoft.Cpp.Default.props')
         for configuration in self.configurations:
             conf_node = ET.SubElement(proj_node, '{%s}PropertyGroup' % ns, Label="Configuration",
                                       Condition="'$(Configuration)|$(Platform)'=='%s|%s'" % (configuration, platform))
@@ -1082,14 +1082,14 @@ class VisualStudioIntegrationGenerator(IdeIntegrationGenerator):
             platform_toolset_node = ET.SubElement(conf_node, '{%s}PlatformToolset' % ns)
             platform_toolset_node.text = self.toolset
 
-        ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+        ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\\Microsoft.Cpp.props')
         ET.SubElement(proj_node, '{%s}ImportGroup' % ns, Label='ExtensionSettings')
         for configuration in self.configurations:
             prop_sheets_node = ET.SubElement(proj_node, '{%s}ImportGroup' % ns, Label='Configuration',
                                              Condition="'$(Configuration)|$(Platform)'=='%s|%s'" % (configuration, platform))
             ET.SubElement(prop_sheets_node, '{%s}Import' % ns,
-                          Project='$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props',
-                          Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')",
+                          Project='$(UserRootDir)\\Microsoft.Cpp.$(Platform).user.props',
+                          Condition="exists('$(UserRootDir)\\Microsoft.Cpp.$(Platform).user.props')",
                           Label='LocalAppDataPlatform')
 
         ET.SubElement(proj_node, '{%s}PropertyGroup' % ns, Label='UserMacros')
@@ -1156,7 +1156,7 @@ class VisualStudioIntegrationGenerator(IdeIntegrationGenerator):
             hfile = include_abs_path + '.h'
             if os.path.isfile(hfile):
                 ET.SubElement(includes_node, '{%s}ClInclude' % ns, Include=hfile)
-        ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+        ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\\Microsoft.Cpp.targets')
         ET.SubElement(proj_node, '{%s}ImportGroup' % ns, Label='ExtensionTargets')
         self.write_pretty_xml(proj_node, project_path)
         self.write_filters(project_path + '.filters',
diff --git a/bin/symbolstore.py b/bin/symbolstore.py
index 7ddd8d2ac234..1f80e2f69e99 100755
--- a/bin/symbolstore.py
+++ b/bin/symbolstore.py
@@ -316,7 +316,7 @@ def SourceIndex(fileStream, outputPath, cvs_root):
     pdbStreamFile = open(outputPath, "w")
     pdbStreamFile.write('''SRCSRV: ini ------------------------------------------------\r\nVERSION=1\r\nSRCSRV: variables ------------------------------------------\r\nCVS_EXTRACT_CMD=%fnchdir%(%targ%)cvs.exe -d %fnvar%(%var2%) checkout -r %var4% -d %var4% -N %var3%\r\nMYSERVER=''')
     pdbStreamFile.write(cvs_root)
-    pdbStreamFile.write('''\r\nSRCSRVTRG=%targ%\%var4%\%fnbksl%(%var3%)\r\nSRCSRVCMD=%CVS_EXTRACT_CMD%\r\nSRCSRV: source files ---------------------------------------\r\n''')
+    pdbStreamFile.write('''\r\nSRCSRVTRG=%targ%\\%var4%\\%fnbksl%(%var3%)\r\nSRCSRVCMD=%CVS_EXTRACT_CMD%\r\nSRCSRV: source files ---------------------------------------\r\n''')
     pdbStreamFile.write(fileStream) # can't do string interpolation because the source server also uses this and so there are % in the above
     pdbStreamFile.write("SRCSRV: end ------------------------------------------------\r\n\n")
     pdbStreamFile.close()
@@ -417,7 +417,7 @@ class Dumper:
                     # MODULE os cpu guid debug_file
                     (guid, debug_file) = (module_line.split())[3:5]
                     # strip off .pdb extensions, and append .sym
-                    sym_file = re.sub("\.pdb$", "", debug_file) + ".sym"
+                    sym_file = re.sub(r"\.pdb$", "", debug_file) + ".sym"
                     # we do want forward slashes here
                     rel_path = os.path.join(debug_file,
                                             guid,
diff --git a/bin/test-hid-vs-ui.py b/bin/test-hid-vs-ui.py
index 635a121ad1d1..ac3321649927 100755
--- a/bin/test-hid-vs-ui.py
+++ b/bin/test-hid-vs-ui.py
@@ -58,7 +58,7 @@ def init_core_files():
     elif not args['git_static']:
         subprocess.call(['git','fetch','origin'])
     allfiles = subprocess.check_output(['git','ls-tree','--name-only','--full-name','-r','master'])
-    return re.findall('.*\.ui',allfiles)
+    return re.findall(r'.*\.ui',allfiles)
 
 
 if __name__ == "__main__":
diff --git a/bin/update/uncompress_mar.py b/bin/update/uncompress_mar.py
index 0989c7e92d6d..02dafbaff30b 100755
--- a/bin/update/uncompress_mar.py
+++ b/bin/update/uncompress_mar.py
@@ -27,7 +27,7 @@ def extract_mar(mar_file, target_dir):
     subprocess.check_call([mar, "-C", convert_to_native(target_dir), "-x", convert_to_native(mar_file)])
     file_info = subprocess.check_output([mar, "-t", convert_to_native(mar_file)])
     lines = file_info.splitlines()
-    prog = re.compile("\d+\s+\d+\s+(.+)")
+    prog = re.compile(r"\d+\s+\d+\s+(.+)")
     for line in lines:
         match = prog.match(line.decode("utf-8", "strict"))
         if match is None:
diff --git a/bin/update_pch b/bin/update_pch
index e5939e7a441c..7411e79acb80 100755
--- a/bin/update_pch
+++ b/bin/update_pch
@@ -581,7 +581,7 @@ def parse_makefile(groups, lines, lineno, lastif, ifstack):
     ingeneratedobjects = False
     inelse = False
     suffix = 'cxx'
-    os_cond_re = re.compile('(ifeq|ifneq)\s*\(\$\(OS\)\,(\w*)\)')
+    os_cond_re = re.compile(r'(ifeq|ifneq)\s*\(\$\(OS\)\,(\w*)\)')
 
     line = lines[lineno]
     if line.startswith('if'):
diff --git a/compilerplugins/clang/constantparam.py b/compilerplugins/clang/constantparam.py
index a2a8207781be..78abc6a76b48 100755
--- a/compilerplugins/clang/constantparam.py
+++ b/compilerplugins/clang/constantparam.py
@@ -38,7 +38,7 @@ def RepresentsInt(s):
     except ValueError:
         return False
 
-constructor_regex = re.compile("^\w+\(\)$")
+constructor_regex = re.compile(r"^\w+\(\)$")
 
 tmp1list = list()
 tmp2list = list()
diff --git a/compilerplugins/clang/finalclasses.py b/compilerplugins/clang/finalclasses.py
index 524aec6f48a2..68c94d6d324b 100755
--- a/compilerplugins/clang/finalclasses.py
+++ b/compilerplugins/clang/finalclasses.py
@@ -32,8 +32,8 @@ with open("workdir/loplugin.finalclasses.log") as txt:
         else:
             print( "unknown line: " + line)
 
-match_module_inc1 = re.compile('^\w+/inc/')
-match_module_inc2 = re.compile('^\w+/.*/inc/')
+match_module_inc1 = re.compile(r'^\w+/inc/')
+match_module_inc2 = re.compile(r'^\w+/.*/inc/')
 tmpset = set()
 for clazz in sorted(definitionSet - inheritFromSet):
     file = definitionToFileDict[clazz]
diff --git a/compilerplugins/clang/pahole-all-classes.py b/compilerplugins/clang/pahole-all-classes.py
index 9f9ca86a237c..6037287a82ca 100755
--- a/compilerplugins/clang/pahole-all-classes.py
+++ b/compilerplugins/clang/pahole-all-classes.py
@@ -99,11 +99,11 @@ with open("compilerplugins/clang/pahole.results", "wt") as f:
 
         _thread.start_new_thread( write_pahole_commands, (currClassList,) )
 
-        firstLineRegex = re.compile("/\*\s+(\d+)\s+\*/ struct") # /* 16 */ struct Foo
-        fieldLineRegex = re.compile("/\*\s+(\d+)\s+(\d+)\s+\*/ ") # /* 12 8 */ class rtl::OUString aName
-        holeLineRegex = re.compile("/\* XXX (\d+) bit hole, try to pack \*/")
+        firstLineRegex = re.compile(r"/\*\s+(\d+)\s+\*/ struct") # /* 16 */ struct Foo
+        fieldLineRegex = re.compile(r"/\*\s+(\d+)\s+(\d+)\s+\*/ ") # /* 12 8 */ class rtl::OUString aName
+        holeLineRegex = re.compile(r"/\* XXX (\d+) bit hole, try to pack \*/")
         # sometimes pahole can't determine the size of a sub-struct, and then it returns bad data
-        bogusLineRegex = re.compile("/\*\s+\d+\s+0\s+\*/")
+        bogusLineRegex = re.compile(r"/\*\s+\d+\s+0\s+\*/")
         structLines = list()
         foundHole = False
         cumulativeHoleBits = 0
diff --git a/solenv/bin/native-code.py b/solenv/bin/native-code.py
index 63cd7413942a..b76c8ecacfe5 100755
--- a/solenv/bin/native-code.py
+++ b/solenv/bin/native-code.py
@@ -640,7 +640,7 @@ def limit_rdb(services_rdb, full_factory_map, full_constructor_map):
         uri = component.get('uri')
         component_name = None
         if uri != None:
-            component_name = re.sub('^vnd.sun.star.expand:\$LO_LIB_DIR/([^.]*).so$', '\\1.a', uri)
+            component_name = re.sub(r'^vnd.sun.star.expand:\$LO_LIB_DIR/([^.]*).so$', r'\1.a', uri)
         if component_name in full_factory_map:
             continue
 
diff --git a/solenv/gdb/boost/util/printing.py b/solenv/gdb/boost/util/printing.py
index 1d5d0bac9fc6..ca8b6c28eeca 100644
--- a/solenv/gdb/boost/util/printing.py
+++ b/solenv/gdb/boost/util/printing.py
@@ -42,7 +42,7 @@ class NameLookup(Mapping):
 
     def __init__(self):
         self.map = {}
-        self.name_regex = re.compile('^([\w:]+)(<.*>)?')
+        self.name_regex = re.compile(r'^([\w:]+)(<.*>)?')
 
     def add(self, name, printer):
         self.map[name] = printer
diff --git a/solenv/gdb/libreoffice/util/printing.py b/solenv/gdb/libreoffice/util/printing.py
index 9cbae3080a64..b44c1ec8bd5c 100644
--- a/solenv/gdb/libreoffice/util/printing.py
+++ b/solenv/gdb/libreoffice/util/printing.py
@@ -30,7 +30,7 @@ class NameLookup(Mapping):
 
     def __init__(self):
         self.map = {}
-        self.name_regex = re.compile('^([\w:]+)(<.*>)?')
+        self.name_regex = re.compile(r'^([\w:]+)(<.*>)?')
 
     def add(self, name, printer):
         self.map[name] = printer
diff --git a/sw/qa/python/check_styles.py b/sw/qa/python/check_styles.py
index c12d5ef0d708..c2ae21cf9899 100644
--- a/sw/qa/python/check_styles.py
+++ b/sw/qa/python/check_styles.py
@@ -38,7 +38,7 @@ class CheckStyle(unittest.TestCase):
             self.assertTrue(xStyleFamilies.supportsService(servicename))
         self.assertFalse(xStyleFamilies.supportsService("foobarbaz"))
         self.assertTrue(xStyleFamilies.hasElements())
-        self.assertRegex(str(xStyleFamilies.ElementType), "com\.sun\.star\.container\.XNameContainer")
+        self.assertRegex(str(xStyleFamilies.ElementType), r"com\.sun\.star\.container\.XNameContainer")
         self.assertEqual(len(xStyleFamilies.ElementNames), 7)
 
         for sFamilyname in xStyleFamilies.ElementNames:
@@ -58,7 +58,7 @@ class CheckStyle(unittest.TestCase):
             self.assertTrue(xFamily.supportsService(sServicename))
         self.assertFalse(xFamily.supportsService("foobarbaz"))
         self.assertTrue(xFamily.hasElements())
-        self.assertRegex(str(xFamily.ElementType), "com\.sun\.star\.style\.XStyle")
+        self.assertRegex(str(xFamily.ElementType), r"com\.sun\.star\.style\.XStyle")
 
         with self.assertRaises(NoSuchElementException):
             xFamily.getByName("foobarbaz")


More information about the Libreoffice-commits mailing list