[Libreoffice-commits] dev-tools.git: 10 commits - esc-reporting/qa-tools.py qa/bugzillaAutomation.py qa/bugzillaChecker.py qa/bugzillaDataAnalyzer.py qa/common.py qa/createCrashesList.py qa/createMassPingLists.py qa/createWeeklyReport.py qa/createWikiStats.py

Xisco Fauli xiscofauli at libreoffice.org
Tue Feb 20 15:46:49 UTC 2018


 esc-reporting/qa-tools.py  | 1541 ---------------------------------------------
 qa/bugzillaAutomation.py   |  167 ++++
 qa/bugzillaChecker.py      |  465 +++++++++++++
 qa/bugzillaDataAnalyzer.py |  427 ++++++++++++
 qa/common.py               |  123 +++
 qa/createCrashesList.py    |   60 +
 qa/createMassPingLists.py  |   67 +
 qa/createWeeklyReport.py   |  382 +++++++++++
 qa/createWikiStats.py      |  414 ++++++++++++
 9 files changed, 2105 insertions(+), 1541 deletions(-)

New commits:
commit fd443069fe366da22ce75fda75e37feb571ac982
Author: Xisco Fauli <xiscofauli at libreoffice.org>
Date:   Tue Feb 20 16:39:09 2018 +0100

    QA: Move bugzilla automation to its own script

diff --git a/qa/bugzillaAutomation.py b/qa/bugzillaAutomation.py
new file mode 100755
index 0000000..b614b0b
--- /dev/null
+++ b/qa/bugzillaAutomation.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import common
+import requests
+import datetime
+import os
+import json
+
+moveToNeedInfoComment = "I have set the bug's status to 'NEEDINFO'"
+
+needInfoFollowUpPingComment = "Dear Bug Submitter,\n\nPlease read this message in its entirety before proceeding."
+
+untouchedPeriodDays = 365
+
+#Path to addObsolete.txt
+addObsoleteDir = '/home/xisco/dev-tools/qa'
+
+def util_create_statList():
+    return {
+        'tags':
+            {
+                'addObsolete': set(),
+                'removeObsolete': set()
+            },
+        'untouched': []
+    }
+def analyze_bugzilla(statList, bugzillaData, cfg):
+    print("Analyze bugzilla\n", end="", flush=True)
+    for key, row in bugzillaData['bugs'].items():
+
+        rowId = row['id']
+
+        #Ignore META bugs and deletionrequest bugs.
+        if not row['summary'].lower().startswith('[meta]') and row['component'] != 'deletionrequest':
+            rowStatus = row['status']
+            rowResolution = row['resolution']
+
+            if rowStatus == 'VERIFIED' or rowStatus == 'RESOLVED':
+                rowStatus += "_" + rowResolution
+
+            rowKeywords = row['keywords']
+
+            comments = row['comments'][1:]
+            for idx, comment in enumerate(comments):
+                #Check for duplicated comments
+                if idx > 0 and comment['text'] == comments[idx-1]['text']:
+                        statList['tags']['addObsolete'].add(comment["id"])
+
+                if rowStatus != 'NEEDINFO' and \
+                        "obsolete" not in [x.lower() for x in comment["tags"]] and \
+                        (comment["text"].startswith(common.untouchedPingComment[:250]) or \
+                        moveToNeedInfoComment in comment["text"] or \
+                        comment["text"].startswith("A polite ping, still working on this bug") or \
+                        comment["text"].startswith(common.needInfoPingComment) or \
+                        comment["text"].startswith(needInfoFollowUpPingComment)):
+                    statList['tags']['addObsolete'].add(comment["id"])
+
+            if len(comments) > 0:
+                if comments[-1]["text"].startswith(common.untouchedPingComment[:250]):
+
+                    if rowStatus != 'NEEDINFO':
+                        if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
+                            statList['tags']['addObsolete'].remove(comments[-1]["id"])
+                        else:
+                            statList['tags']['removeObsolete'].add(comments[-1]["id"])
+                elif comments[-1]["text"].startswith(common.needInfoPingComment):
+                    if rowStatus != 'NEEDINFO':
+                        if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
+                            statList['tags']['addObsolete'].remove(comments[-1]["id"])
+                        else:
+                            statList['tags']['removeObsolete'].add(comments[-1]["id"])
+                elif comments[-1]["text"].startswith(needInfoFollowUpPingComment) or \
+                        comments[-1]["text"].startswith("A polite ping, still working on this bug") or \
+                        moveToNeedInfoComment in comments[-1]["text"]:
+                    if rowStatus != 'NEEDINFO':
+                        if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
+                            statList['tags']['addObsolete'].remove(comments[-1]["id"])
+                        else:
+                            statList['tags']['removeObsolete'].add(comments[-1]["id"])
+                else:
+                    if datetime.datetime.strptime(row['last_change_time'], "%Y-%m-%dT%H:%M:%SZ") < cfg['untouchedPeriod'] and \
+                            rowStatus == 'NEW' and 'needsUXEval' not in rowKeywords and 'easyHack' not in rowKeywords and \
+                            row['component'] != 'Documentation' and (row['product'] == 'LibreOffice' or \
+                            row['product'] == 'Impress Remote') and row['severity'] != 'enhancement':
+                        statList['untouched'].append(rowId)
+
+def automated_untouched(statList):
+
+    print('== Untouched bugs ==')
+    for bugId in statList['untouched']:
+        bugId = str(bugId)
+        command = '{"comment" : "' + common.untouchedPingComment.replace('\n', '\\n') + '", "is_private" : false}'
+
+        urlGet = 'https://bugs.documentfoundation.org/rest/bug/' + bugId + '/comment?api_key=' + cfg['configQA']['api-key']
+        rGet = requests.get(urlGet)
+        rawData = json.loads(rGet.text)
+        rGet.close()
+
+        if rawData['bugs'][bugId]['comments'][-1]['text'][:250] != common.untouchedPingComment[:250]:
+            urlPost = 'https://bugs.documentfoundation.org/rest/bug/' + bugId + '/comment?api_key=' + cfg['configQA']['api-key']
+            rPost = requests.post(urlPost, command)
+            print('Bug: ' + bugId + ' - Comment: ' + str(json.loads(rPost.text)['id']))
+            rPost.close()
+
+def automated_tagging(statList):
+    #tags are sometimes not saved in bugzilla_dump.json
+    #thus, save those comments automatically tagged as obsolete
+    #so we don't tag them again next time
+
+    print('== Obsolete comments ==')
+    lAddObsolete = []
+    filename = addObsoleteDir + "addObsolete.txt"
+    if os.path.exists(filename):
+        f = open(filename, 'r')
+        lAddObsolete = f.read().splitlines()
+        f.close()
+
+    for comment_id in list(statList['tags']['addObsolete']):
+        if str(comment_id) not in lAddObsolete:
+            command = '{"comment_id" : ' + str(comment_id) + ', "add" : ["obsolete"]}'
+            url = 'https://bugs.documentfoundation.org/rest/bug/comment/' + \
+                str(comment_id) + '/tags' + '?api_key=' + cfg['configQA']['api-key']
+            r = requests.put(url, command)
+            if os.path.exists(filename):
+                append_write = 'a'
+            else:
+                append_write = 'w'
+            f = open(filename,append_write)
+            f.write(str(comment_id) + '\n')
+            f.close()
+            print(str(comment_id) + ' - ' +  r.text)
+            r.close()
+
+    for comment_id in list(statList['tags']['removeObsolete']):
+        command = '{"comment_id" : ' + str(comment_id) + ', "remove" : ["obsolete"]}'
+        url = 'https://bugs.documentfoundation.org/rest/bug/comment/' + \
+                str(comment_id) + '/tags' + '?api_key=' + cfg['configQA']['api-key']
+        r = requests.put(url, command)
+        print(str(comment_id) + ' - ' +  r.text)
+        r.close()
+
+def runCfg():
+    cfg = common.get_config()
+    cfg['untouchedPeriod'] = common.util_convert_days_to_datetime(cfg, untouchedPeriodDays)
+
+    return cfg
+
+if __name__ == '__main__':
+    print("Reading and writing data to " + common.dataDir)
+
+    cfg = runCfg()
+
+    bugzillaData = common.get_bugzilla()
+
+    statList = util_create_statList()
+
+    analyze_bugzilla(statList, bugzillaData, cfg)
+
+    automated_tagging(statList)
+    automated_untouched(statList)
diff --git a/qa/bugzillaChecker.py b/qa/bugzillaChecker.py
index 91e04e6..cfd9c0b 100755
--- a/qa/bugzillaChecker.py
+++ b/qa/bugzillaChecker.py
@@ -29,6 +29,7 @@ untouchedPeriodDays = 365
 
 inactiveAssignedPeriodDays = 90
 
+reopened6MonthsComment = "This bug has been in RESOLVED FIXED status for more than 6 months."
 
 def util_create_statList_checkers():
     return {
@@ -253,7 +254,7 @@ def analyze_bugzilla_checkers(statList, bugzillaData, cfg):
 
                 common.util_check_bugzilla_mail(statList, commentMail, '', commentDate, rowId)
 
-                if common.isOpen(rowStatus) and common.reopened6MonthsComment in comment['text']:
+                if common.isOpen(rowStatus) and reopened6MonthsComment in comment['text']:
                     isReopened6Months = True
 
             if len(comments) > 0:
diff --git a/qa/common.py b/qa/common.py
index be51efd..f1a2e8e 100755
--- a/qa/common.py
+++ b/qa/common.py
@@ -7,11 +7,9 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 #
 
-import sys
 import os
 import datetime
 import json
-import requests
 from pyshorteners import Shortener
 
 #Path where bugzilla_dump.py is
@@ -20,7 +18,6 @@ dataDir = '/home/xisco/dev-tools/esc-reporting/dump/'
 #Path where configQA.json and addObsolete.txt are
 configDir = '/home/xisco/dev-tools/qa/'
 
-untouchedPeriodDays = 365
 
 priorities_list = ['highest','high','medium','low','lowest']
 
@@ -52,12 +49,6 @@ untouchedPingComment = "** Please read this message in its entirety before respo
 
 needInfoPingComment = "Dear Bug Submitter,\n\nThis bug has been in NEEDINFO status with no change for at least"
 
-needInfoFollowUpPingComment = "Dear Bug Submitter,\n\nPlease read this message in its entirety before proceeding."
-
-moveToNeedInfoComment = "I have set the bug's status to 'NEEDINFO'"
-
-reopened6MonthsComment = "This bug has been in RESOLVED FIXED status for more than 6 months."
-
 def util_convert_days_to_datetime(cfg, period):
     cfg['todayDate'] = datetime.datetime.now().replace(hour=0, minute=0,second=0)
     return cfg['todayDate'] - datetime.timedelta(days= period)
@@ -91,17 +82,6 @@ def util_create_person_bugzilla(email, name):
              'bugs': set()
         }
 
-
-def util_create_statList():
-    return {
-        'tags':
-            {
-                'addObsolete': set(),
-                'removeObsolete': set()
-            },
-        'stat': {'oldest': datetime.datetime.now(), 'newest': datetime.datetime(2001, 1, 1)}
-    }
-
 def util_check_bugzilla_mail(statList, mail, name, date=None, bug=None):
     if mail not in statList['people']:
         statList['people'][mail] = util_create_person_bugzilla(mail, name)
@@ -141,157 +121,3 @@ def isOpen(status):
 def isClosed(status):
     #Use row['status'], not rowStatus
     return status == 'VERIFIED' or status == 'RESOLVED' or status == 'CLOSED'
-
-def analyze_bugzilla(statList, bugzillaData, cfg):
-    print("Analyze bugzilla\n", end="", flush=True)
-    statNewDate = statList['stat']['newest']
-    statOldDate = statList['stat']['oldest']
-
-    for key, row in bugzillaData['bugs'].items():
-        rowId = row['id']
-
-        #Ignore META bugs and deletionrequest bugs.
-        if not row['summary'].lower().startswith('[meta]') and row['component'] != 'deletionrequest':
-            creationDate = datetime.datetime.strptime(row['creation_time'], "%Y-%m-%dT%H:%M:%SZ")
-            if creationDate < statOldDate:
-                statOldDate = creationDate
-            if creationDate > statNewDate:
-                statNewDate = creationDate
-
-            rowStatus = row['status']
-            rowResolution = row['resolution']
-
-            if rowStatus == 'VERIFIED' or rowStatus == 'RESOLVED':
-                rowStatus += "_" + rowResolution
-
-            rowKeywords = row['keywords']
-
-            comments = row['comments'][1:]
-            for idx, comment in enumerate(comments):
-                #Check for duplicated comments
-                if idx > 0 and comment['text'] == comments[idx-1]['text']:
-                        statList['tags']['addObsolete'].add(comment["id"])
-
-                if rowStatus != 'NEEDINFO' and \
-                        "obsolete" not in [x.lower() for x in comment["tags"]] and \
-                        (comment["text"].startswith(untouchedPingComment[:250]) or \
-                        moveToNeedInfoComment in comment["text"] or \
-                        comment["text"].startswith("A polite ping, still working on this bug") or \
-                        comment["text"].startswith(needInfoPingComment) or \
-                        comment["text"].startswith(needInfoFollowUpPingComment)):
-                    statList['tags']['addObsolete'].add(comment["id"])
-
-            if len(comments) > 0:
-                if comments[-1]["text"].startswith(untouchedPingComment[:250]):
-
-                    if rowStatus != 'NEEDINFO':
-                        if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
-                            statList['tags']['addObsolete'].remove(comments[-1]["id"])
-                        else:
-                            statList['tags']['removeObsolete'].add(comments[-1]["id"])
-                elif comments[-1]["text"].startswith(needInfoPingComment):
-                    if rowStatus != 'NEEDINFO':
-                        if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
-                            statList['tags']['addObsolete'].remove(comments[-1]["id"])
-                        else:
-                            statList['tags']['removeObsolete'].add(comments[-1]["id"])
-                elif comments[-1]["text"].startswith(needInfoFollowUpPingComment) or \
-                        comments[-1]["text"].startswith("A polite ping, still working on this bug") or \
-                        moveToNeedInfoComment in comments[-1]["text"]:
-                    if rowStatus != 'NEEDINFO':
-                        if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
-                            statList['tags']['addObsolete'].remove(comments[-1]["id"])
-                        else:
-                            statList['tags']['removeObsolete'].add(comments[-1]["id"])
-                else:
-                    if datetime.datetime.strptime(row['last_change_time'], "%Y-%m-%dT%H:%M:%SZ") < cfg['untouchedPeriod'] and \
-                            rowStatus == 'NEW' and 'needsUXEval' not in rowKeywords and 'easyHack' not in rowKeywords and \
-                            row['component'] != 'Documentation' and (row['product'] == 'LibreOffice' or \
-                            row['product'] == 'Impress Remote') and row['severity'] != 'enhancement':
-                        statList['massping']['untouched'].append(rowId)
-
-    statList['stat']['newest'] = statNewDate.strftime("%Y-%m-%d")
-    statList['stat']['oldest'] = statOldDate.strftime("%Y-%m-%d")
-    print(" from " + statList['stat']['oldest'] + " to " + statList['stat']['newest'])
-
-def automated_massping(statList):
-
-    print('== Massping ==')
-    for bugId in statList['massping']['untouched']:
-        bugId = str(bugId)
-        command = '{"comment" : "' + untouchedPingComment.replace('\n', '\\n') + '", "is_private" : false}'
-
-        urlGet = 'https://bugs.documentfoundation.org/rest/bug/' + bugId + '/comment?api_key=' + cfg['configQA']['api-key']
-        rGet = requests.get(urlGet)
-        rawData = json.loads(rGet.text)
-        rGet.close()
-
-        if rawData['bugs'][bugId]['comments'][-1]['text'][:250] != untouchedPingComment[:250]:
-            urlPost = 'https://bugs.documentfoundation.org/rest/bug/' + bugId + '/comment?api_key=' + cfg['configQA']['api-key']
-            rPost = requests.post(urlPost, command)
-            print('Bug: ' + bugId + ' - Comment: ' + str(json.loads(rPost.text)['id']))
-            rPost.close()
-
-def automated_tagging(statList):
-    #tags are sometimes not saved in bugzilla_dump.json
-    #thus, save those comments automatically tagged as obsolete
-    #so we don't tag them again next time
-
-    print('== Obsolete comments ==')
-    lAddObsolete = []
-    filename = configDir + "addObsolete.txt"
-    if os.path.exists(filename):
-        f = open(filename, 'r')
-        lAddObsolete = f.read().splitlines()
-        f.close()
-
-    for comment_id in list(statList['tags']['addObsolete']):
-        if str(comment_id) not in lAddObsolete:
-            command = '{"comment_id" : ' + str(comment_id) + ', "add" : ["obsolete"]}'
-            url = 'https://bugs.documentfoundation.org/rest/bug/comment/' + \
-                str(comment_id) + '/tags' + '?api_key=' + cfg['configQA']['api-key']
-            r = requests.put(url, command)
-            if os.path.exists(filename):
-                append_write = 'a'
-            else:
-                append_write = 'w'
-            f = open(filename,append_write)
-            f.write(str(comment_id) + '\n')
-            f.close()
-            print(str(comment_id) + ' - ' +  r.text)
-            r.close()
-
-    for comment_id in list(statList['tags']['removeObsolete']):
-        command = '{"comment_id" : ' + str(comment_id) + ', "remove" : ["obsolete"]}'
-        url = 'https://bugs.documentfoundation.org/rest/bug/comment/' + \
-                str(comment_id) + '/tags' + '?api_key=' + cfg['configQA']['api-key']
-        r = requests.put(url, command)
-        print(str(comment_id) + ' - ' +  r.text)
-        r.close()
-
-def runCfg():
-    cfg = get_config()
-    cfg['untouchedPeriod'] = util_convert_days_to_datetime(cfg, untouchedPeriodDays)
-
-    return cfg
-
-if __name__ == '__main__':
-    print("Reading and writing data to " + dataDir)
-
-    cfg = runCfg()
-
-    bugzillaData = get_bugzilla()
-
-    statList = util_create_statList()
-
-    analyze_bugzilla(statList, bugzillaData, cfg)
-
-    if len(sys.argv) > 1:
-        if sys.argv[1] == 'automate':
-            automated_tagging(statList)
-            automated_massping(statList)
-        else:
-            print("You must use 'blog', 'target', 'period', 'users', 'massping', 'automate' as parameter.")
-            sys.exit(1)
-
-    print('End of report')
commit 02ddea9b338a9cadf330264db3b1d7f31c1b1053
Author: Xisco Fauli <xiscofauli at libreoffice.org>
Date:   Tue Feb 20 13:54:52 2018 +0100

    QA: move massping lists to its own script

diff --git a/qa/common.py b/qa/common.py
index a7a5dff..be51efd 100755
--- a/qa/common.py
+++ b/qa/common.py
@@ -20,8 +20,6 @@ dataDir = '/home/xisco/dev-tools/esc-reporting/dump/'
 #Path where configQA.json and addObsolete.txt are
 configDir = '/home/xisco/dev-tools/qa/'
 
-reportPeriodDays = 7
-
 untouchedPeriodDays = 365
 
 priorities_list = ['highest','high','medium','low','lowest']
@@ -96,14 +94,6 @@ def util_create_person_bugzilla(email, name):
 
 def util_create_statList():
     return {
-        'massping':
-            {
-                'needinfo': [],
-                'untouched': [],
-                '1year': [],
-                '2years': [],
-                '3years': []
-            },
         'tags':
             {
                 'addObsolete': set(),
@@ -176,16 +166,8 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
 
             rowKeywords = row['keywords']
 
-            creatorMail = row['creator']
-
-            commentMail = None
             comments = row['comments'][1:]
             for idx, comment in enumerate(comments):
-                commentMail = comment['creator']
-                commentDate = datetime.datetime.strptime(comment['time'], "%Y-%m-%dT%H:%M:%SZ")
-
-                util_check_bugzilla_mail(statList, commentMail, '', commentDate, rowId)
-
                 #Check for duplicated comments
                 if idx > 0 and comment['text'] == comments[idx-1]['text']:
                         statList['tags']['addObsolete'].add(comment["id"])
@@ -202,23 +184,13 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
             if len(comments) > 0:
                 if comments[-1]["text"].startswith(untouchedPingComment[:250]):
 
-                    if len(comments) > 1 and comments[-2]["text"].startswith(untouchedPingComment[:250]):
-                        if len(comments) > 2 and comments[-3]["text"].startswith(untouchedPingComment[:250]):
-                            statList['massping']['3years'].append(rowId)
-                        else:
-                            statList['massping']['2years'].append(rowId)
-                    else:
-                        statList['massping']['1year'].append(rowId)
-
                     if rowStatus != 'NEEDINFO':
                         if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
                             statList['tags']['addObsolete'].remove(comments[-1]["id"])
                         else:
                             statList['tags']['removeObsolete'].add(comments[-1]["id"])
                 elif comments[-1]["text"].startswith(needInfoPingComment):
-                    if rowStatus == 'NEEDINFO':
-                        statList['massping']['needinfo'].append(rowId)
-                    else:
+                    if rowStatus != 'NEEDINFO':
                         if "obsolete" not in [x.lower() for x in comments[-1]["tags"]]:
                             statList['tags']['addObsolete'].remove(comments[-1]["id"])
                         else:
@@ -242,20 +214,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
     statList['stat']['oldest'] = statOldDate.strftime("%Y-%m-%d")
     print(" from " + statList['stat']['oldest'] + " to " + statList['stat']['newest'])
 
-
-def massping_Report(statList):
-    fp = open('/tmp/massping_report.txt', 'w', encoding='utf-8')
-
-    print('* Massping Report from {} to {}'.format(cfg['reportPeriod'].strftime("%Y-%m-%d"), statList['stat']['newest']), file=fp )
-    for key, value in sorted(statList['massping'].items()):
-        print(file=fp)
-        print('* ' + key + ' - ' + str(len(value)) + ' bugs.', file=fp)
-        for i in range(0, len(value), 400):
-            subList = value[i:i + 400]
-            util_create_short_url(fp, subList)
-
-    fp.close()
-
 def automated_massping(statList):
 
     print('== Massping ==')
@@ -313,7 +271,6 @@ def automated_tagging(statList):
 
 def runCfg():
     cfg = get_config()
-    cfg['reportPeriod'] = util_convert_days_to_datetime(cfg, reportPeriodDays)
     cfg['untouchedPeriod'] = util_convert_days_to_datetime(cfg, untouchedPeriodDays)
 
     return cfg
@@ -330,9 +287,7 @@ if __name__ == '__main__':
     analyze_bugzilla(statList, bugzillaData, cfg)
 
     if len(sys.argv) > 1:
-        if sys.argv[1] == 'massping':
-            massping_Report(statList)
-        elif sys.argv[1] == 'automate':
+        if sys.argv[1] == 'automate':
             automated_tagging(statList)
             automated_massping(statList)
         else:
diff --git a/qa/createMassPingLists.py b/qa/createMassPingLists.py
new file mode 100755
index 0000000..619b650
--- /dev/null
+++ b/qa/createMassPingLists.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import common
+
+def util_create_statList():
+    return {
+        'needinfo': [],
+        '1year': [],
+        '2years': [],
+        '3years': []
+    }
+
+def analyze_bugzilla(statList, bugzillaData):
+    print("Analyze bugzilla\n", end="", flush=True)
+
+    for key, row in bugzillaData['bugs'].items():
+        #Ignore META bugs and deletionrequest bugs.
+        if not row['summary'].lower().startswith('[meta]') and row['component'] != 'deletionrequest':
+            rowId = row['id']
+
+            comments = row['comments'][1:]
+
+            if len(comments) > 0:
+                if comments[-1]["text"].startswith(common.untouchedPingComment[:250]):
+
+                    if len(comments) > 1 and comments[-2]["text"].startswith(common.untouchedPingComment[:250]):
+                        if len(comments) > 2 and comments[-3]["text"].startswith(common.untouchedPingComment[:250]):
+                            statList['3years'].append(rowId)
+                        else:
+                            statList['2years'].append(rowId)
+                    else:
+                        statList['1year'].append(rowId)
+
+                elif comments[-1]["text"].startswith(common.needInfoPingComment):
+                    if row['status'] == 'NEEDINFO':
+                        statList['needinfo'].append(rowId)
+
+def massping_Report(statList):
+    fp = open('/tmp/massping_report.txt', 'w', encoding='utf-8')
+
+    for key, value in sorted(statList.items()):
+        print(file=fp)
+        print('* ' + key + ' - ' + str(len(value)) + ' bugs.', file=fp)
+        for i in range(0, len(value), 400):
+            subList = value[i:i + 400]
+            common.util_create_short_url(fp, subList)
+
+    fp.close()
+
+if __name__ == '__main__':
+    print("Reading and writing data to " + common.dataDir)
+
+
+    bugzillaData = common.get_bugzilla()
+
+    statList = util_create_statList()
+
+    analyze_bugzilla(statList, bugzillaData)
+
+    massping_Report(statList)
commit f6e08544f41f4225abd66c4675eb0cf60cee7e99
Author: Xisco Fauli <xiscofauli at libreoffice.org>
Date:   Wed Feb 14 22:44:34 2018 +0100

    QA: move the data analyzer to its own script

diff --git a/qa/bugzillaDataAnalyzer.py b/qa/bugzillaDataAnalyzer.py
new file mode 100755
index 0000000..44cdcc5
--- /dev/null
+++ b/qa/bugzillaDataAnalyzer.py
@@ -0,0 +1,427 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import common
+import datetime
+
+reportPeriodDays = 365
+
+# Use enhancements, bugs, all
+kindOfData = ['enhancements', 'bugs', 'all']
+
+lKeywords = ['haveBacktrace', 'regression', 'bisected']
+
+
+def util_create_basic_schema():
+    return {
+        'id': [],
+        'author': [],
+        'split_week': {},
+        'split_month': {},
+        'component': {},
+        'product': {p : 0 for p in common.product_list},
+        'system': {s : 0 for s in common.system_list},
+        'platform': {},
+        'status': {s : 0 for s in common.statutes_list},
+        'resolution': {},
+        'difftime': []
+        }
+
+def util_create_ticket_schema():
+    return {
+        'created': util_create_basic_schema(),
+        'confirmed': util_create_basic_schema(),
+        'closed': util_create_basic_schema(),
+        'fixed': util_create_basic_schema(),
+        'keywords': { k : util_create_basic_schema() for k in lKeywords},
+        }
+
+def util_create_statList():
+    return {
+        'enhancements' : util_create_ticket_schema(),
+        'bugs' : util_create_ticket_schema(),
+        'all' : util_create_ticket_schema(),
+        'people' : {},
+        'stat': {'oldest': datetime.datetime.now(), 'newest': datetime.datetime(2001, 1, 1)}
+    }
+
+def util_increase_action(value, rowId, creatorMail, status, product,
+        component, resolution, platform, system, week, month, difftime=-1):
+    value['id'].append(rowId)
+    value['author'].append(creatorMail)
+    value['status'][status] += 1
+    value['product'][product] += 1
+
+    if component not in value['component']:
+        value['component'][component] = 0
+    value['component'][component] += 1
+
+    if resolution not in value['resolution']:
+        value['resolution'][resolution] = 0
+    value['resolution'][resolution] += 1
+
+    if platform not in value['platform']:
+        value['platform'][platform] = 0
+    value['platform'][platform] += 1
+
+    if system not in value['system']:
+        value['system'][system] = 0
+    value['system'][system] += 1
+
+    if week not in value['split_week']:
+        value['split_week'][week] = 0
+    value['split_week'][week] += 1
+
+    if month not in value['split_month']:
+        value['split_month'][month] = 0
+    value['split_month'][month] += 1
+
+    if difftime >= 0:
+        value['difftime'].append(difftime)
+
+def util_decrease_action(value, rowId, creatorMail, status, product,
+        component, resolution, platform, system, week, month):
+    value['id'].pop()
+    value['author'].pop()
+    value['status'][status] -= 1
+    value['product'][product] -= 1
+    value['component'][component] -= 1
+    value['resolution'][resolution] -= 1
+    value['platform'][platform] -= 1
+    value['system'][system] -= 1
+    value['split_week'][week] -= 1
+    value['split_month'][month] -= 1
+
+    if value['difftime']:
+        value['difftime'].pop()
+
+def check_kindOfTicket(severity):
+    if severity == 'enhancement':
+        return 'enhancements'
+    else:
+        return 'bugs'
+
+def analyze_bugzilla_data(statList, bugzillaData, cfg):
+    print("Analyzing bugzilla\n", end="", flush=True)
+    statNewDate = statList['stat']['newest']
+    statOldDate = statList['stat']['oldest']
+
+    statList['addDate'] = datetime.date.today().strftime('%Y-%m-%d')
+
+    for key, row in bugzillaData['bugs'].items():
+        rowId = row['id']
+
+        #Ignore META bugs and deletionrequest bugs.
+        if not row['summary'].lower().startswith('[meta]') and row['component'] != 'deletionrequest':
+            creationDate = datetime.datetime.strptime(row['creation_time'], "%Y-%m-%dT%H:%M:%SZ")
+            if creationDate < statOldDate:
+                statOldDate = creationDate
+            if creationDate > statNewDate:
+                statNewDate = creationDate
+
+            rowStatus = row['status']
+            rowResolution = row['resolution']
+
+            if rowStatus == 'VERIFIED' or rowStatus == 'RESOLVED':
+                rowStatus += "_" + rowResolution
+
+            rowKeywords = row['keywords']
+
+            creatorMail = row['creator']
+
+            kindOfTicket = check_kindOfTicket(row['severity'])
+            rowComponent = row['component']
+            rowPlatform = row['platform']
+            rowSystem = row['op_sys']
+            rowProduct = row['product']
+
+            #get information about created bugs in reportPeriod
+            if creationDate >= cfg['reportPeriod']:
+                week = str(creationDate.year) + '-' + str(creationDate.strftime("%V"))
+                month = str(creationDate.year) + '-' + str(creationDate.strftime("%m"))
+                util_increase_action(statList[kindOfTicket]['created'], rowId, creatorMail, rowStatus, rowProduct,
+                    rowComponent, rowResolution, rowPlatform, rowSystem, week, month)
+
+                util_increase_action(statList['all']['created'], rowId, creatorMail, rowStatus, rowProduct,
+                    rowComponent, rowResolution, rowPlatform, rowSystem, week, month)
+
+            common.util_check_bugzilla_mail(
+                    statList, creatorMail, row['creator_detail']['real_name'], creationDate, rowId)
+
+            isFixed = False
+            isClosed = False
+            isConfirmed = False
+            weekConfirmed = None
+            monthConfirmed = None
+            weekClosed = None
+            monthClosed = None
+            weekFixed = None
+            monthFixed = None
+
+            for action in row['history']:
+                actionMail = action['who']
+                actionDate = datetime.datetime.strptime(action['when'], "%Y-%m-%dT%H:%M:%SZ")
+                common.util_check_bugzilla_mail(
+                        statList, actionMail, '', actionDate, rowId)
+
+                # Use this variable in case the status is set before the resolution
+                newStatus = None
+                for change in action['changes']:
+                    if change['field_name'] == 'is_confirmed':
+                        if actionDate >= cfg['reportPeriod'] and row['is_confirmed']:
+                            if change['added'] == "1":
+                                weekConfirmed = str(actionDate.year) + '-' + str(actionDate.strftime("%V"))
+                                monthConfirmed = str(actionDate.year) + '-' + str(actionDate.strftime("%m"))
+                                difftimeConfirmed = (actionDate - creationDate).days
+                                util_increase_action(statList[kindOfTicket]['confirmed'], rowId, actionMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekConfirmed, monthConfirmed, difftimeConfirmed)
+
+                                util_increase_action(statList['all']['confirmed'], rowId, actionMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekConfirmed, monthConfirmed, difftimeConfirmed)
+
+                                isConfirmed = True
+
+                            elif isConfirmed:
+                                util_decrease_action(statList[kindOfTicket]['confirmed'], rowId, creatorMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekConfirmed, monthConfirmed)
+
+                                util_decrease_action(statList['all']['confirmed'], rowId, creatorMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekConfirmed, monthConfirmed)
+
+                                isConfirmed = False
+
+                    if change['field_name'] == 'status':
+                        addedStatus = change['added']
+                        removedStatus = change['removed']
+
+                        if actionDate >= cfg['reportPeriod'] and common.isOpen(removedStatus) and \
+                                common.isClosed(addedStatus) and common.isClosed(row['status']):
+                            if isClosed:
+                                util_decrease_action(statList[kindOfTicket]['closed'], rowId, creatorMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekClosed, monthClosed)
+
+                                util_decrease_action(statList['all']['closed'], rowId, creatorMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekClosed, monthClosed)
+
+                            weekClosed = str(actionDate.year) + '-' + str(actionDate.strftime("%V"))
+                            monthClosed = str(actionDate.year) + '-' + str(actionDate.strftime("%m"))
+                            difftimeClosed = (actionDate - creationDate).days
+                            util_increase_action(statList[kindOfTicket]['closed'], rowId, actionMail, rowStatus, rowProduct,
+                                rowComponent, rowResolution, rowPlatform, rowSystem, weekClosed, monthClosed, difftimeClosed)
+
+                            util_increase_action(statList['all']['closed'], rowId, actionMail, rowStatus, rowProduct,
+                                rowComponent, rowResolution, rowPlatform, rowSystem, weekClosed, monthClosed, difftimeClosed)
+
+                            isClosed = True
+
+                        if  addedStatus == 'RESOLVED' or addedStatus == 'VERIFIED':
+                            if(rowResolution):
+                                addedStatus = addedStatus + "_" + rowResolution
+                            else:
+                                newStatus = addedStatus
+
+                        if actionDate >= cfg['reportPeriod'] and addedStatus == 'RESOLVED_FIXED' and \
+                                removedStatus != 'REOPENED' and row['resolution'] == 'FIXED':
+                            if isFixed:
+                                util_decrease_action(statList[kindOfTicket]['fixed'], rowId, creatorMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekFixed, monthFixed)
+
+                                util_decrease_action(statList['all']['fixed'], rowId, creatorMail, rowStatus, rowProduct,
+                                    rowComponent, rowResolution, rowPlatform, rowSystem, weekFixed, monthFixed)
+
+                            weekFixed = str(actionDate.year) + '-' + str(actionDate.strftime("%V"))
+                            monthFixed = str(actionDate.year) + '-' + str(actionDate.strftime("%m"))
+                            difftimeFixed = (actionDate - creationDate).days
+                            util_increase_action(statList[kindOfTicket]['fixed'], rowId, actionMail, rowStatus, rowProduct,
+                                rowComponent, rowResolution, rowPlatform, rowSystem, weekFixed, monthFixed, difftimeFixed)
+
+                            util_increase_action(statList['all']['fixed'], rowId, actionMail, rowStatus, rowProduct,
+                                rowComponent, rowResolution, rowPlatform, rowSystem, weekFixed, monthFixed, difftimeFixed)
+
+                            isFixed = True
+
+                    elif change['field_name'] == 'resolution':
+                        if newStatus:
+                            addedStatus = newStatus + "_" + change['added']
+
+                            newStatus = None
+
+                    elif change['field_name'] == 'keywords':
+                        keywordsAdded = change['added'].split(", ")
+                        for keyword in keywordsAdded:
+                            if keyword in lKeywords:
+                                if actionDate >= cfg['reportPeriod'] and keyword in rowKeywords:
+                                    weekKeyword = str(actionDate.year) + '-' + str(actionDate.strftime("%V"))
+                                    monthKeyword = str(actionDate.year) + '-' + str(actionDate.strftime("%m"))
+                                    difftimeKeyword = (actionDate - creationDate).days
+                                    util_increase_action(statList[kindOfTicket]['keywords'][keyword], rowId, actionMail, rowStatus, rowProduct,
+                                        rowComponent, rowResolution, rowPlatform, rowSystem, weekKeyword, monthKeyword, difftimeKeyword)
+
+                                    util_increase_action(statList['all']['keywords'][keyword], rowId, actionMail, rowStatus, rowProduct,
+                                        rowComponent, rowResolution, rowPlatform, rowSystem, weekKeyword, monthKeyword, difftimeKeyword)
+
+            commentMail = None
+            comments = row['comments'][1:]
+            for idx, comment in enumerate(comments):
+                commentMail = comment['creator']
+                commentDate = datetime.datetime.strptime(comment['time'], "%Y-%m-%dT%H:%M:%SZ")
+
+                common.util_check_bugzilla_mail(
+                        statList, commentMail, '', commentDate, rowId)
+
+            for person in row['cc_detail']:
+                email = person['email']
+                if commentMail == email or actionMail == email:
+                    common.util_check_bugzilla_mail(statList, email, person['real_name'])
+
+    for k, v in statList['people'].items():
+        if not statList['people'][k]['name']:
+            statList['people'][k]['name'] = statList['people'][k]['email'].split('@')[0]
+
+        statList['people'][k]['oldest'] = statList['people'][k]['oldest'].strftime("%Y-%m-%d")
+        statList['people'][k]['newest'] = statList['people'][k]['newest'].strftime("%Y-%m-%d")
+
+
+    statList['stat']['newest'] = statNewDate.strftime("%Y-%m-%d")
+    statList['stat']['oldest'] = statOldDate.strftime("%Y-%m-%d")
+    print(" from " + statList['stat']['oldest'] + " to " + statList['stat']['newest'])
+
+def util_print_QA_line_data(statList, dValue, kind, action, total_count):
+
+    fileName = '/tmp/data_' + action + '_' + kind + '_report.txt'
+    fp = open(fileName, 'w', encoding='utf-8')
+    print('Creating ' + action + ' ' + kind + ' report in ' + fileName)
+
+    print(('  * {} {} {}.').format(len(dValue['id']), kind, action), file=fp)
+
+    #Count the number of reps
+    my_dict = {i: dValue['author'].count(i) for i in dValue['author']}
+
+    d_view = [(v, k) for k, v in my_dict.items()]
+    d_view.sort(reverse=True)
+
+    print('  * Total users: {}'.format(len(d_view)), file=fp)
+
+    usersString = '  * Done by: \n'
+    count = 0
+    for i1,i2 in d_view:
+        try:
+            count += 1
+            if count <= total_count:
+                usersString += statList['people'][i2]['name'] + ' ( ' + str(i1) + ' ) \n'
+            else:
+                break
+        except:
+            continue
+
+    print(usersString[:-2], file=fp)
+
+    print(file=fp)
+    print('   * {} {} by week'.format(kind, action), file=fp)
+    for key, value in sorted(dValue['split_week'].items()):
+        print('{}: {}'.format(key, value), file=fp)
+
+
+    print(file=fp)
+    print('   * {} {} by month'.format(kind, action), file=fp)
+
+    for key, value in sorted(dValue['split_month'].items()):
+        print('{}: {}'.format(key, value), file=fp)
+
+    print(file=fp)
+    print('   * Components of {} {}'.format(kind, action), file=fp)
+    util_print_QA_line(fp, dValue['component'])
+
+    print(file=fp)
+    print('   * Systems of {} {}'.format(kind, action), file=fp)
+    util_print_QA_line(fp, dValue['system'])
+
+    print(file=fp)
+    print('   * Platforms of {} {}'.format(kind, action), file=fp)
+    util_print_QA_line(fp, dValue['platform'])
+
+    print(file=fp)
+    print('   * statuses of {} {}'.format(kind, action), file=fp)
+    util_print_QA_line(fp, dValue['status'])
+
+    print(file=fp)
+    print('   * Products of {} {}'.format(kind, action), file=fp)
+    util_print_QA_line(fp, dValue['product'])
+
+    print(file=fp)
+    print('   * Resolution of {} {}'.format(kind, action), file=fp)
+    util_print_QA_line(fp, dValue['resolution'])
+    print(file=fp)
+
+    if 'difftime' in dValue and dValue['difftime']:
+        sortList = sorted(dValue['difftime'])
+        rangeList = sortList[-1] - sortList[0]
+        subLists = {}
+        for i in sortList:
+            timePeriod = ''
+            if i < 1:
+                timePeriod = '1. 1 day'
+            elif i < 7:
+                timePeriod = '2. 7 days'
+            elif i < 30:
+                timePeriod = '3. 1 month'
+            elif i < 90:
+                timePeriod = '4. 3 months'
+            elif i < 180:
+                timePeriod = '5. 6 months'
+            elif i < 365:
+                timePeriod = '6. 1 year'
+            elif i < 1095:
+                timePeriod = '7. 3 years'
+            else:
+                timePeriod = '8. Older'
+            if timePeriod not in subLists:
+                subLists[timePeriod] = []
+            subLists[timePeriod].append(i)
+
+        print('  * Times: ', file=fp)
+        for k,v in sorted(subLists.items()):
+            print(str(k) + ' : ' + str(len(v)), file=fp)
+    fp.close()
+
+def util_print_QA_line(fp, dValue ):
+    s = [(k, dValue[k]) for k in sorted(dValue, key=dValue.get, reverse=True)]
+    for k, v in s:
+        if v > 0:
+            print('{}: {}'.format(k, v), file=fp)
+
+def data_Report(statList) :
+    for kind in kindOfData:
+        for k,v in statList[kind].items():
+            if k == 'keywords':
+                for kKey, vKey in v.items():
+                    util_print_QA_line_data(statList, vKey, kind, kKey, 10)
+            else:
+                util_print_QA_line_data(statList, v, kind, k, 10)
+
+def runCfg():
+    cfg = {}
+    cfg['reportPeriod'] = common.util_convert_days_to_datetime(cfg, reportPeriodDays)
+
+    return cfg
+
+if __name__ == '__main__':
+    print("Reading and writing data to " + common.dataDir)
+
+    cfg = runCfg()
+
+    bugzillaData = common.get_bugzilla()
+
+    statList = util_create_statList()
+
+    analyze_bugzilla_data(statList, bugzillaData, cfg)
+
+    data_Report(statList)
+
+    print('End of report')
diff --git a/qa/common.py b/qa/common.py
index 0b3ca37..a7a5dff 100755
--- a/qa/common.py
+++ b/qa/common.py
@@ -28,6 +28,9 @@ priorities_list = ['highest','high','medium','low','lowest']
 
 severities_list = ['blocker', 'critical', 'major', 'normal', 'minor', 'trivial','enhancement']
 
+product_list = ['cppunit', 'LibreOffice', 'LibreOffice Online', 'Document Liberation Project', 'Impress Remote',
+        'libexttextcat', 'QA Tools']
+
 statutes_list = ['UNCONFIRMED', 'NEW', 'CLOSED', 'NEEDINFO', 'REOPENED', 'ASSIGNED', 'RESOLVED_FIXED',
         'RESOLVED_DUPLICATE', 'RESOLVED_WORKSFORME', 'RESOLVED_NOTABUG', 'RESOLVED_NOTOURBUG', 'RESOLVED_WONTFIX',
         'RESOLVED_INVALID', 'RESOLVED_MOVED', 'RESOLVED_INSUFFICIENTDATA', 'VERIFIED_FIXED', 'VERIFIED_DUPLICATE',
@@ -58,6 +61,7 @@ moveToNeedInfoComment = "I have set the bug's status to 'NEEDINFO'"
 reopened6MonthsComment = "This bug has been in RESOLVED FIXED status for more than 6 months."
 
 def util_convert_days_to_datetime(cfg, period):
+    cfg['todayDate'] = datetime.datetime.now().replace(hour=0, minute=0,second=0)
     return cfg['todayDate'] - datetime.timedelta(days= period)
 
 def util_load_file(fileName):
@@ -92,46 +96,6 @@ def util_create_person_bugzilla(email, name):
 
 def util_create_statList():
     return {
-        'bugs':
-        {
-            'all':
-                {
-                    'status': {s:0 for s in statutes_list},
-                },
-            'created':
-                {
-                    'id': [],
-                    'author': [],
-                    'enhancement_count': 0,
-                    'no_enhancement_count': 0,
-                    'split_week': {},
-                    'split_month': {},
-                    'component': {},
-                    'system': {p:0 for p in system_list},
-                    'platform': {},
-                    'status': {s:0 for s in statutes_list},
-                    'resolution': {},
-                    'unconfirmed': []
-                },
-            'closed':
-                {
-                    'status': {s:0 for s in statutes_list},
-                    'split_week': {}
-                },
-            'confirmed':
-                {
-                    'id': [],
-                    'author': [],
-                    'status': {s:0 for s in statutes_list},
-                    'difftime': []
-                },
-            'fixed':
-                {
-                    'id': [],
-                    'author': [],
-                    'difftime': []
-                },
-        },
         'massping':
             {
                 'needinfo': [],
@@ -145,7 +109,6 @@ def util_create_statList():
                 'addObsolete': set(),
                 'removeObsolete': set()
             },
-        'people': {},
         'stat': {'oldest': datetime.datetime.now(), 'newest': datetime.datetime(2001, 1, 1)}
     }
 
@@ -165,6 +128,15 @@ def util_check_bugzilla_mail(statList, mail, name, date=None, bug=None):
     if bug:
        statList['people'][mail]['bugs'].add(bug)
 
+def util_create_short_url(fp, lBugs, text='Link'):
+    url = "https://bugs.documentfoundation.org/buglist.cgi?bug_id="
+    for bug in lBugs:
+        url += str(bug) + "%2C"
+
+    url = url[:-3]
+    shortener = Shortener('Tinyurl', timeout=9000)
+    print('\t\t+ ' + text + ': ' + shortener.short(url), file=fp)
+
 def get_bugzilla():
     fileName = dataDir + 'bugzilla_dump.json'
     return util_load_file(fileName)
@@ -185,8 +157,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
     statNewDate = statList['stat']['newest']
     statOldDate = statList['stat']['oldest']
 
-    statList['addDate'] = datetime.date.today().strftime('%Y-%m-%d')
-
     for key, row in bugzillaData['bugs'].items():
         rowId = row['id']
 
@@ -204,141 +174,10 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
             if rowStatus == 'VERIFIED' or rowStatus == 'RESOLVED':
                 rowStatus += "_" + rowResolution
 
-            statList['bugs']['all']['status'][rowStatus] += 1
-
             rowKeywords = row['keywords']
 
             creatorMail = row['creator']
 
-            #get information about created bugs in reportPeriod
-            if creationDate >= cfg['reportPeriod']:
-                if row['severity'] == 'enhancement':
-                    statList['bugs']['created']['enhancement_count'] += 1
-                else:
-                    statList['bugs']['created']['no_enhancement_count'] += 1
-
-                component = row['component']
-                if component not in statList['bugs']['created']['component']:
-                    statList['bugs']['created']['component'][component] = 0
-                statList['bugs']['created']['component'][component] += 1
-
-                statList['bugs']['created']['status'][rowStatus] += 1
-
-                if isClosed(row['status']):
-                    if rowResolution not in statList['bugs']['created']['resolution']:
-                        statList['bugs']['created']['resolution'][rowResolution] = 0
-                    statList['bugs']['created']['resolution'][rowResolution] += 1
-
-                platform = row['platform']
-                if platform not in statList['bugs']['created']['platform']:
-                    statList['bugs']['created']['platform'][platform] = 0
-                statList['bugs']['created']['platform'][platform] += 1
-
-                system = row['op_sys']
-                if system not in statList['bugs']['created']['system']:
-                    statList['bugs']['created']['system'][system] = 0
-                statList['bugs']['created']['system'][system] += 1
-
-                statList['bugs']['created']['id'].append(rowId)
-                statList['bugs']['created']['author'].append(creatorMail)
-
-                if rowStatus == 'UNCONFIRMED':
-                    statList['bugs']['created']['unconfirmed'].append(rowId)
-
-                week = str(creationDate.year) + '-' + str(creationDate.strftime("%V"))
-                if week not in statList['bugs']['created']['split_week']:
-                    statList['bugs']['created']['split_week'][week] = 0
-                statList['bugs']['created']['split_week'][week] += 1
-
-                month = str(creationDate.year) + '-' + str(creationDate.strftime("%m"))
-                if month not in statList['bugs']['created']['split_month']:
-                    statList['bugs']['created']['split_month'][month] = 0
-                statList['bugs']['created']['split_month'][month] += 1
-
-
-            whiteboard_list = row['whiteboard'].split(' ')
-            bugTargets = []
-            for whiteboard in whiteboard_list:
-                if whiteboard.startswith("target:"):
-                    bugVersion = whiteboard.split(':')[1][:5]
-                    if bugVersion in targets_list:
-                        bugTargets.append(bugVersion)
-                        statList['targets'][bugVersion]['count'] += 1
-
-            for period in periods_list:
-                if creationDate >= cfg[period]:
-                    statList['period'][period]['count'] += 1
-
-            util_check_bugzilla_mail(statList, creatorMail, row['creator_detail']['real_name'], creationDate, rowId)
-
-            if isOpen(rowStatus) and len(row['cc']) >= 10:
-                statList['MostCCBugs'][rowId] = util_create_bug(
-                        row['summary'], row['component'], row['version'], rowKeywords, creationDate, len(row['cc']))
-
-            isFixed = False
-            bResolved = False
-            isConfirmed = False
-
-            for action in row['history']:
-                actionMail = action['who']
-                actionDate = datetime.datetime.strptime(action['when'], "%Y-%m-%dT%H:%M:%SZ")
-                util_check_bugzilla_mail(statList, actionMail, '', actionDate, rowId)
-
-                # Use this variable in case the status is set before the resolution
-                newStatus = None
-                for change in action['changes']:
-                    if change['field_name'] == 'is_confirmed':
-                        if actionDate >= cfg['reportPeriod']:
-                            if change['added'] == "1":
-                                statList['bugs']['confirmed']['id'].append(rowId)
-                                statList['bugs']['confirmed']['author'].append(actionMail)
-                                statList['bugs']['confirmed']['status'][rowStatus] += 1
-                                isConfirmed = True
-                                statList['bugs']['confirmed']['difftime'].append((actionDate - creationDate).days)
-                            elif isConfirmed:
-                                statList['bugs']['confirmed']['id'].pop()
-                                statList['bugs']['confirmed']['author'].pop()
-                                statList['bugs']['confirmed']['status'][rowStatus] -= 1
-                                isConfirmed = False
-                                statList['bugs']['confirmed']['difftime'].pop()
-
-                    if change['field_name'] == 'status':
-                        addedStatus = change['added']
-                        removedStatus = change['removed']
-
-                        if actionDate >= cfg['reportPeriod'] and not bResolved and isClosed(addedStatus) and isClosed(row['status']):
-                            bResolved = True
-                            week = str(actionDate.year) + '-' + str(actionDate.strftime("%V"))
-                            if week not in statList['bugs']['closed']['split_week']:
-                                statList['bugs']['closed']['split_week'][week] = 0
-                            statList['bugs']['closed']['split_week'][week] += 1
-
-                            statList['bugs']['closed']['status'][rowStatus] += 1
-
-                        if  addedStatus == 'RESOLVED' or addedStatus == 'VERIFIED':
-                            if(rowResolution):
-                                addedStatus = addedStatus + "_" + rowResolution
-                            else:
-                                newStatus = addedStatus
-
-                        if actionDate >= cfg['reportPeriod'] and addedStatus == 'RESOLVED_FIXED' and \
-                                removedStatus != 'REOPENED' and row['resolution'] == 'FIXED':
-                            if isFixed:
-                                statList['bugs']['fixed']['id'].pop()
-                                statList['bugs']['fixed']['author'].pop()
-                                statList['bugs']['fixed']['difftime'].pop()
-
-                            statList['bugs']['fixed']['id'].append(rowId)
-                            statList['bugs']['fixed']['author'].append(actionMail)
-                            statList['bugs']['fixed']['difftime'].append((actionDate - creationDate).days)
-                            isFixed = True
-
-                    elif change['field_name'] == 'resolution':
-                        if newStatus:
-                            addedStatus = newStatus + "_" + change['added']
-
-                            newStatus = None
-
             commentMail = None
             comments = row['comments'][1:]
             for idx, comment in enumerate(comments):
@@ -399,108 +238,10 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                             row['product'] == 'Impress Remote') and row['severity'] != 'enhancement':
                         statList['massping']['untouched'].append(rowId)
 
-            for person in row['cc_detail']:
-                email = person['email']
-                if commentMail == email or actionMail == email:
-                    util_check_bugzilla_mail(statList, email, person['real_name'])
-
-    for k, v in statList['people'].items():
-        if not statList['people'][k]['name']:
-            statList['people'][k]['name'] = statList['people'][k]['email'].split('@')[0]
-
-        statList['people'][k]['oldest'] = statList['people'][k]['oldest'].strftime("%Y-%m-%d")
-        statList['people'][k]['newest'] = statList['people'][k]['newest'].strftime("%Y-%m-%d")
-
-
     statList['stat']['newest'] = statNewDate.strftime("%Y-%m-%d")
     statList['stat']['oldest'] = statOldDate.strftime("%Y-%m-%d")
     print(" from " + statList['stat']['oldest'] + " to " + statList['stat']['newest'])
 
-def util_create_short_url(fp, lBugs, text='Link'):
-    url = "https://bugs.documentfoundation.org/buglist.cgi?bug_id="
-    for bug in lBugs:
-        url += str(bug) + "%2C"
-
-    url = url[:-3]
-    shortener = Shortener('Tinyurl', timeout=9000)
-    print('\t\t+ ' + text + ': ' + shortener.short(url), file=fp)
-
-def util_print_QA_line_blog(fp, statList, dValue, total_count):
-
-    if len(dValue['id']) > 1:
-        auxString = 'bugs.'
-    else:
-        auxString = "bug."
-
-    print(('  * {} ' + auxString).format(len(dValue['id'])), file=fp)
-
-    #Count the number of reps
-    my_dict = {i: dValue['author'].count(i) for i in dValue['author']}
-
-    d_view = [(v, k) for k, v in my_dict.items()]
-    d_view.sort(reverse=True)
-
-    print('  * Total users: {}'.format(len(d_view)), file=fp)
-
-    usersString = '  * Done by: \n'
-    count = 0
-    for i1,i2 in d_view:
-        try:
-            count += 1
-            if count <= total_count:
-                usersString += '      ' +  statList['people'][i2]['name'] + ' ( ' + str(i1) + ' ) \n'
-            else:
-                break
-        except:
-            continue
-
-    print(usersString[:-2], file=fp)
-
-    if 'status' in dValue:
-        print('  * Status: ', file=fp)
-        for k,v in dValue['status'].items():
-            print('      ' + str(k) + ' : ' + str(v), file=fp)
-
-    print(file=fp)
-
-    if 'difftime' in dValue:
-        sortList = sorted(dValue['difftime'])
-        rangeList = sortList[-1] - sortList[0]
-        subLists = {}
-        for i in sortList:
-            timePeriod = ''
-            if i < 1:
-                timePeriod = '0001day'
-            elif i < 3:
-                timePeriod = '0003days'
-            elif i < 7:
-                timePeriod = '0007days'
-            elif i < 30:
-                timePeriod = '0030days'
-            elif i < 90:
-                timePeriod = '0090days'
-            elif i < 180:
-                timePeriod = '0180days'
-            elif i < 365:
-                timePeriod = '0365days'
-            elif i < 1095:
-                timePeriod = '1095days'
-            else:
-                timePeriod = 'older'
-            if timePeriod not in subLists:
-                subLists[timePeriod] = []
-            subLists[timePeriod].append(i)
-
-        print('  * Times: ', file=fp)
-        for k,v in sorted(subLists.items()):
-            print('      ' + str(k) + ' : ' + str(len(v)), file=fp)
-
-def util_print_QA_line_created(fp, dValue ):
-    others = 0
-    s = [(k, dValue[k]) for k in sorted(dValue, key=dValue.get, reverse=True)]
-    total = 0
-    for k, v in s:
-        print('      {}: {}'.format(k, v), file=fp)
 
 def massping_Report(statList):
     fp = open('/tmp/massping_report.txt', 'w', encoding='utf-8')
@@ -570,104 +311,11 @@ def automated_tagging(statList):
         print(str(comment_id) + ' - ' +  r.text)
         r.close()
 
-def users_Report(statList):
-    print('Users report from {} to {}'.format(cfg['newUserPeriod'].strftime("%Y-%m-%d"), statList['stat']['newest']))
-    #fp = open('/tmp/users_report.txt', 'w', encoding='utf-8')
-
-    print('{} new users in the last {} days'.format(len(statList['newUsersPeriod']), cfg['newUserPeriod']))
-
-    for v,k in statList['newUsersPeriod'].items():
-        print(v)
-
-
-def Blog_Report(statList) :
-    fp = open('/tmp/blog_report.txt', 'w', encoding='utf-8')
-
-    print('* Report from {} to {}'.format(cfg['reportPeriod'].strftime("%Y-%m-%d"), statList['stat']['newest']), file=fp )
-
-    print('* Total reports created: {}'.format(len(statList['bugs']['created']['id'])), file=fp)
-
-    print('* Total enhancements created: {}'.format(statList['bugs']['created']['enhancement_count']), file=fp)
-
-    print('* Total bugs created: {}'.format(statList['bugs']['created']['no_enhancement_count']), file=fp)
-    print(file=fp)
-
-    print('* Bugs reported.', file=fp)
-    util_print_QA_line_blog(fp, statList, statList['bugs']['created'], 15)
-
-    print(file=fp)
-    print('* Bugs confirmed.', file=fp)
-    util_print_QA_line_blog(fp, statList, statList['bugs']['confirmed'], 20)
-
-    print(file=fp)
-    print('* Bugs fixed.', file=fp)
-    util_print_QA_line_blog(fp, statList, statList['bugs']['fixed'], 20)
-
-    print(file=fp)
-    for key, value in sorted(statList['weeklyReport']['keyword_added'].items()):
-        if value and key in ['easyHack', 'bisected', 'haveBacktrace', 'regression']:
-            print('* ' + key + '.', file=fp)
-            util_print_QA_line_blog(fp, statList, value, 15)
-
-    print(file=fp)
-    for key, value in sorted(statList['weeklyReport']['status_changed'].items()):
-        if value and key in ['RESOLVED_DUPLICATE', 'VERIFIED_FIXED']:
-            print('* ' + key.replace("_", " ") + '.', file=fp)
-            util_print_QA_line_blog(fp, statList, value, 20)
-
-    print(file=fp)
-    print('* Bugs created by week', file=fp)
-
-    for key, value in sorted(statList['bugs']['created']['split_week'].items()):
-        print('{}: {}'.format(key, value), file=fp)
-
-    print(file=fp)
-    print('* Bugs created by month', file=fp)
-
-    for key, value in sorted(statList['bugs']['created']['split_month'].items()):
-        print('{}: {}'.format(key, value), file=fp)
-
-    print(file=fp)
-    print('* Components of created bugs', file=fp)
-    util_print_QA_line_created(fp, statList['bugs']['created']['component'])
-
-    print(file=fp)
-    print('* Systems of created bugs', file=fp)
-    util_print_QA_line_created(fp, statList['bugs']['created']['system'])
-
-    print(file=fp)
-    print('* Platforms of created bugs', file=fp)
-    util_print_QA_line_created(fp, statList['bugs']['created']['platform'])
-
-    print(file=fp)
-    print('* Statuses of created bugs', file=fp)
-    util_print_QA_line_created(fp, statList['bugs']['created']['status'])
-
-    print(file=fp)
-    print('* Resolution of created bugs', file=fp)
-    util_print_QA_line_created(fp, statList['bugs']['created']['resolution'])
-    print(file=fp)
-
-    print('* Bugs moved to resolved by week', file=fp)
-
-    for key, value in sorted(statList['bugs']['closed']['split_week'].items()):
-        print('{}: {}'.format(key, value), file=fp)
-
-    print(file=fp)
-    print('* Statuses of bugs moved to resolved', file=fp)
-    util_print_QA_line_created(fp, statList['bugs']['closed']['status'])
-
-    fp.close()
-
 def runCfg():
     cfg = get_config()
-    cfg['todayDate'] = datetime.datetime.now().replace(hour=0, minute=0,second=0)
     cfg['reportPeriod'] = util_convert_days_to_datetime(cfg, reportPeriodDays)
     cfg['untouchedPeriod'] = util_convert_days_to_datetime(cfg, untouchedPeriodDays)
 
-    for period in periods_list:
-        cfg[period] = util_convert_days_to_datetime(cfg, period)
-
     return cfg
 
 if __name__ == '__main__':
@@ -682,11 +330,7 @@ if __name__ == '__main__':
     analyze_bugzilla(statList, bugzillaData, cfg)
 
     if len(sys.argv) > 1:
-        if sys.argv[1] == 'blog':
-            Blog_Report(statList)
-        elif sys.argv[1] == 'user':
-            users_Report(statList)
-        elif sys.argv[1] == 'massping':
+        if sys.argv[1] == 'massping':
             massping_Report(statList)
         elif sys.argv[1] == 'automate':
             automated_tagging(statList)
commit 5e50980353b2ad335aac7d7acdb335f80a14a3d1
Author: Xisco Fauli <xiscofauli at libreoffice.org>
Date:   Wed Feb 14 00:25:47 2018 +0100

    QA: Move wiki stats to their own script

diff --git a/qa/common.py b/qa/common.py
index 5b90510..0b3ca37 100755
--- a/qa/common.py
+++ b/qa/common.py
@@ -11,9 +11,8 @@ import sys
 import os
 import datetime
 import json
-from pyshorteners import Shortener
 import requests
-from tabulate import tabulate
+from pyshorteners import Shortener
 
 #Path where bugzilla_dump.py is
 dataDir = '/home/xisco/dev-tools/esc-reporting/dump/'
@@ -25,10 +24,6 @@ reportPeriodDays = 7
 
 untouchedPeriodDays = 365
 
-targets_list = ['5.4.4', '6.0.0']
-
-periods_list = [30, 60, 90, 180]
-
 priorities_list = ['highest','high','medium','low','lowest']
 
 severities_list = ['blocker', 'critical', 'major', 'normal', 'minor', 'trivial','enhancement']
@@ -94,31 +89,7 @@ def util_create_person_bugzilla(email, name):
              'bugs': set()
         }
 
-def util_create_detailed_person(email):
-    return { 'email': email,
-             'bugs': [],
-             'created': 0,
-             'comments':0,
-             'status_changed': 0,
-             'keyword_added': 0,
-             'keyword_removed': 0,
-             'whiteboard_added': 0,
-             'whiteboard_removed': 0,
-             'severity_changed': 0,
-             'priority_changed': 0,
-             'system_changed': 0,
-             'metabug_added': 0,
-             'metabug_removed': 0
-         }
-
-def util_create_bug(summary, component, version, keywords, creationDate, count_cc):
-    return { 'summary': summary,
-             'component': component,
-             'version': version,
-             'keywords': keywords,
-             'creationDate': creationDate,
-             'count': count_cc
-        }
+
 def util_create_statList():
     return {
         'bugs':
@@ -160,7 +131,6 @@ def util_create_statList():
                     'author': [],
                     'difftime': []
                 },
-            'metabugAlias': {}
         },
         'massping':
             {
@@ -176,11 +146,6 @@ def util_create_statList():
                 'removeObsolete': set()
             },
         'people': {},
-        'targets': {t:{'count':0, 'people':{}} for t in targets_list},
-        'period': {p:{'count':0, 'people':{}} for p in periods_list},
-        'MostCCBugs': {},
-        'dupesBugs': {},
-        'MostDupeBugs': {},
         'stat': {'oldest': datetime.datetime.now(), 'newest': datetime.datetime(2001, 1, 1)}
     }
 
@@ -215,35 +180,6 @@ def isClosed(status):
     #Use row['status'], not rowStatus
     return status == 'VERIFIED' or status == 'RESOLVED' or status == 'CLOSED'
 
-def util_increase_user_actions(statList, bug, mail, targets, action, actionTime):
-    for target in targets:
-        if mail not in statList['targets'][target]['people']:
-            statList['targets'][target]['people'][mail] = util_create_detailed_person(mail)
-
-        statList['targets'][target]['people'][mail][action] += 1
-        statList['targets'][target]['people'][mail]['bugs'].append(bug)
-
-    for period in periods_list:
-        if actionTime >= cfg[period]:
-            if mail not in statList['period'][period]['people']:
-                statList['period'][period]['people'][mail] = util_create_detailed_person(mail)
-
-            statList['period'][period]['people'][mail][action] += 1
-            statList['period'][period]['people'][mail]['bugs'].append(bug)
-
-def util_check_duplicated(bugID, isFirst=True):
-    rowDupeOf = bugzillaData['bugs'][str(bugID)]['dupe_of']
-    if rowDupeOf:
-        if str(rowDupeOf) in bugzillaData['bugs']:
-            return util_check_duplicated(rowDupeOf, False)
-        else:
-            return bugID
-    else:
-        if isFirst:
-            return None
-        else:
-            return bugID
-
 def analyze_bugzilla(statList, bugzillaData, cfg):
     print("Analyze bugzilla\n", end="", flush=True)
     statNewDate = statList['stat']['newest']
@@ -334,30 +270,11 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                     statList['period'][period]['count'] += 1
 
             util_check_bugzilla_mail(statList, creatorMail, row['creator_detail']['real_name'], creationDate, rowId)
-            util_increase_user_actions(statList, key, creatorMail, bugTargets, 'created', creationDate)
 
             if isOpen(rowStatus) and len(row['cc']) >= 10:
                 statList['MostCCBugs'][rowId] = util_create_bug(
                         row['summary'], row['component'], row['version'], rowKeywords, creationDate, len(row['cc']))
 
-            rowDupeOf = util_check_duplicated(rowId)
-            if rowDupeOf:
-                if rowDupeOf not in statList['dupesBugs']:
-                    statList['dupesBugs'][rowDupeOf] = []
-                statList['dupesBugs'][rowDupeOf].append(rowId)
-
-                if str(rowDupeOf) in bugzillaData['bugs'] and \
-                        isOpen(bugzillaData['bugs'][str(rowDupeOf)]['status']):
-                    if rowDupeOf not in statList['MostDupeBugs']:
-                        statList['MostDupeBugs'][rowDupeOf] = util_create_bug(
-                        bugzillaData['bugs'][str(rowDupeOf)]['summary'],
-                        bugzillaData['bugs'][str(rowDupeOf)]['component'],
-                        bugzillaData['bugs'][str(rowDupeOf)]['version'],
-                        bugzillaData['bugs'][str(rowDupeOf)]['keywords'],
-                        datetime.datetime.strptime(
-                            bugzillaData['bugs'][str(rowDupeOf)]['creation_time'], "%Y-%m-%dT%H:%M:%SZ"), 1)
-
-
             isFixed = False
             bResolved = False
             isConfirmed = False
@@ -370,19 +287,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                 # Use this variable in case the status is set before the resolution
                 newStatus = None
                 for change in action['changes']:
-                    if change['field_name'] == 'blocks':
-                        if change['added']:
-                            for metabug in change['added'].split(', '):
-                                continue
-                                #TODO
-                                #util_increase_user_actions(statList, key, actionMail, bugTargets, 'metabug_added', actionDate)
-
-                        if change['removed']:
-                            for metabug in change['removed'].split(', '):
-                                continue
-                                #TODO
-                                #util_increase_user_actions(statList, key, actionMail, bugTargets, 'metabug_added', actionDate)
-
                     if change['field_name'] == 'is_confirmed':
                         if actionDate >= cfg['reportPeriod']:
                             if change['added'] == "1":
@@ -402,9 +306,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                         addedStatus = change['added']
                         removedStatus = change['removed']
 
-                        if rowStatus == 'ASSIGNED' and addedStatus == 'ASSIGNED':
-                            lastAssignedEmail = actionMail
-
                         if actionDate >= cfg['reportPeriod'] and not bResolved and isClosed(addedStatus) and isClosed(row['status']):
                             bResolved = True
                             week = str(actionDate.year) + '-' + str(actionDate.strftime("%V"))
@@ -417,11 +318,8 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                         if  addedStatus == 'RESOLVED' or addedStatus == 'VERIFIED':
                             if(rowResolution):
                                 addedStatus = addedStatus + "_" + rowResolution
-                                util_increase_user_actions(statList, key, actionMail, bugTargets, 'status_changed', actionDate)
                             else:
                                 newStatus = addedStatus
-                        else:
-                            util_increase_user_actions(statList, key, actionMail, bugTargets, 'status_changed', actionDate)
 
                         if actionDate >= cfg['reportPeriod'] and addedStatus == 'RESOLVED_FIXED' and \
                                 removedStatus != 'REOPENED' and row['resolution'] == 'FIXED':
@@ -438,58 +336,9 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                     elif change['field_name'] == 'resolution':
                         if newStatus:
                             addedStatus = newStatus + "_" + change['added']
-                            util_increase_user_actions(statList, key, actionMail, bugTargets, 'status_changed', actionDate)
 
                             newStatus = None
 
-                    elif change['field_name'] == 'priority':
-                        newPriority = change['added']
-                        util_increase_user_actions(statList, key, actionMail, bugTargets, 'priority_changed', actionDate)
-
-                    elif change['field_name'] == 'severity':
-                        newSeverity = change['added']
-                        util_increase_user_actions(statList, key, actionMail, bugTargets, 'severity_changed', actionDate)
-
-                    elif change['field_name'] == 'keywords':
-                        keywordsAdded = change['added'].split(", ")
-                        for keyword in keywordsAdded:
-                            if keyword in keywords_list:
-                                util_increase_user_actions(statList, key, actionMail, bugTargets, 'keyword_added', actionDate)
-
-                        keywordsRemoved = change['removed'].split(", ")
-                        for keyword in keywordsRemoved:
-                            if keyword in keywords_list:
-                                util_increase_user_actions(statList, key, actionMail, bugTargets, 'keyword_removed', actionDate)
-
-
-                    elif change['field_name'] == 'whiteboard':
-                        for whiteboard in change['added'].split(' '):
-                            if 'backportrequest' in whiteboard.lower():
-                                util_increase_user_actions(statList, rowId, actionMail, bugTargets, 'whiteboard_added', actionDate)
-
-                        for whiteboard in change['removed'].split(' '):
-                            if 'backportrequest' in whiteboard.lower():
-                                util_increase_user_actions(statList, rowId, actionMail, bugTargets, 'whiteboard_removed', actionDate)
-
-                    elif change['field_name'] == 'op_sys':
-                        newSystem = change['added']
-                        util_increase_user_actions(statList, rowId, actionMail, bugTargets, 'system_changed', actionDate)
-
-                    elif change['field_name'] == 'assigned_to':
-                        if actionDate >= cfg['reportPeriod']:
-                            removedAssignee = change['removed']
-                            addedAssignee = change['added']
-                            if  removedAssignee == "libreoffice-bugs at lists.freedesktop.org" and \
-                                    row['assigned_to'] != 'libreoffice-bugs at lists.freedesktop.org' and \
-                                    ( rowStatus == 'NEW' or rowStatus == 'UNCONFIRMED'):
-                                addAssigned = True
-                                addAssignedMail = actionMail
-                            if addedAssignee == "libreoffice-bugs at lists.freedesktop.org" and \
-                                    row['assigned_to'] == 'libreoffice-bugs at lists.freedesktop.org' and \
-                                    rowStatus == 'ASSIGNED':
-                                removeAssigned = True
-                                removeAssignedMail = actionMail
-
             commentMail = None
             comments = row['comments'][1:]
             for idx, comment in enumerate(comments):
@@ -498,8 +347,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
 
                 util_check_bugzilla_mail(statList, commentMail, '', commentDate, rowId)
 
-                util_increase_user_actions(statList, rowId, commentMail, bugTargets, 'comments', commentDate)
-
                 #Check for duplicated comments
                 if idx > 0 and comment['text'] == comments[idx-1]['text']:
                         statList['tags']['addObsolete'].add(comment["id"])
@@ -557,9 +404,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
                 if commentMail == email or actionMail == email:
                     util_check_bugzilla_mail(statList, email, person['real_name'])
 
-        elif row['summary'].lower().startswith('[meta]'):
-            statList['bugs']['metabugAlias'][rowId] = row['alias']
-
     for k, v in statList['people'].items():
         if not statList['people'][k]['name']:
             statList['people'][k]['name'] = statList['people'][k]['email'].split('@')[0]
@@ -572,53 +416,6 @@ def analyze_bugzilla(statList, bugzillaData, cfg):
     statList['stat']['oldest'] = statOldDate.strftime("%Y-%m-%d")
     print(" from " + statList['stat']['oldest'] + " to " + statList['stat']['newest'])
 
-def util_print_QA_line_weekly(fp, statList, dValue, action, isMetabug=False):
-
-    #Replace metabugs keys by aliases
-    if isMetabug:
-        dValueAux = {}
-        for key, value in dValue.items():
-            if int(key) in statList['bugs']['metabugAlias'] and \
-                    statList['bugs']['metabugAlias'][int(key)]:
-                dValueAux[statList['bugs']['metabugAlias'][int(key)][0]] = dValue[key]
-        dValue = dValueAux
-
-    for key, value in sorted(dValue.items()):
-        if value['id']:
-            nBugs = len(value['id'])
-            if nBugs == 1:
-                aux1 = 'bug has'
-                aux2 = 'bug'
-            else:
-                aux1 = "bugs have"
-                aux2 = 'bugs'
-
-            if action == 'added' or action == 'removed':
-                aux3 = 'to'
-                if action == 'removed':
-                    aux3 = 'from'
-                print(('  * \'{}\' has been {} {} {} {}.').format(key, action, aux3, nBugs, aux2), file=fp)
-            else:
-                print(('  * {} {} been changed to \'{}\'.').format(nBugs, aux1, key.replace('_', ' ')), file=fp)
-
-            util_create_short_url(fp, value['id'])
-            #Count the number of reps
-            my_dict = {i: value['author'].count(i) for i in value['author']}
-
-            d_view = [(v, k) for k, v in my_dict.items()]
-
-            d_view.sort(reverse=True)
-            usersString = '\t\t+ Done by: '
-
-            for i1,i2 in d_view:
-                try:
-                    usersString += statList['people'][i2]['name'] + ' ( ' + str(i1) + ' ), '
-                except:
-                    continue
-
-            print(usersString[:-2], file=fp)
-            print(file=fp)
-
 def util_create_short_url(fp, lBugs, text='Link'):
     url = "https://bugs.documentfoundation.org/buglist.cgi?bug_id="
     for bug in lBugs:
@@ -705,166 +502,6 @@ def util_print_QA_line_created(fp, dValue ):
     for k, v in s:
         print('      {}: {}'.format(k, v), file=fp)
 
-def create_wikimedia_table_mostCCBugs(cfg, statList):
-
-    for k, v in statList['dupesBugs'].items():
-        if k in statList['MostDupeBugs']:
-            if len(v) >= 3:
-                statList['MostDupeBugs'][k]['count'] = len(v)
-            else:
-                del statList['MostDupeBugs'][k]
-
-    for nameList in ['MostCCBugs', 'MostDupeBugs']:
-        print('Creating wikimedia table for ' + nameList)
-        output = ""
-
-        output += '{{TopMenu}}\n'
-        output += '{{Menu}}\n'
-        output += '{{Menu.QA}}\n'
-        output += '\n'
-        table = []
-        headers = ['Id', 'Summary', 'Component', 'Version', 'isRegression', 'isBisected',
-                           'isEasyHack', 'haveBackTrace', 'Reported']
-        if nameList == 'MostCCBugs':
-            headers.append('Total CC')
-            output += '{} bugs have 10 or more emails in the CC list. (sorted in alphabetical order by number of users)\n'.format(
-                    len(statList['MostCCBugs']))
-        else:
-            headers.append('Total Duplicates')
-            output += '{} open bugs have 3 or more duplicates. (sorted in alphabetical order by number of duplicates)\n'.format(
-                    len(statList['MostDupeBugs']))
-
-        for k,v in statList[nameList].items():
-            row = []
-            row.append('[' + urlShowBug + str(k) + ' #tdf' + str(k) + ']')
-            row.append(v['summary'])
-            row.append(v['component'])
-            row.append(v['version'])
-            if 'regression' in v['keywords']:
-                row.append('True')
-            else:
-                row.append('False')
-            if 'bisected' in v['keywords']:
-                row.append('True')
-            else:
-                row.append('False')
-            if 'easyHack' in v['keywords']:
-                row.append('True')
-            else:
-                row.append('False')
-            if 'haveBacktrace' in v['keywords']:
-                row.append('True')
-            else:
-                row.append('False')
-            row.append(v['creationDate'].strftime("%Y-%m-%d %H:%M:%S"))
-            row.append(v['count'])
-            table.append(row)
-
-        output += tabulate(sorted(table, key = lambda x: x[9], reverse=True), headers, tablefmt='mediawiki')
-        output += "\n"
-        output +='Generated on {}.'.format(cfg['todayDate'])
-        output += "\n"
-        output += '[[Category:EN]]\n'
-        output += '[[Category:QA/Stats]]'
-
-        fp = open('/tmp/table_' + nameList + '.txt', 'w', encoding='utf-8')
-        print(output.replace('wikitable', 'wikitable sortable'), file=fp)
-        fp.close()
-
-def create_wikimedia_table_by_target(cfg, statList):
-    for kT,vT in sorted(statList['targets'].items()):
-        print('Creating wikimedia table for release ' + kT)
-        output = ""
-
-        output += '{{TopMenu}}\n'
-        output += '{{Menu}}\n'
-        output += '{{Menu.QA}}\n'
-        output += '\n'
-
-        output += '{} people helped to triage {} bugs tagged with target:{}. (sorted in alphabetical order by user\'s name)\n'.format(
-            len(vT['people']), vT['count'], kT)
-        output += '\n'
-        table = []
-        headers = ['Name', 'Created', 'Comments', 'Status Changed', 'Keyword Added', 'Keyword Removed',
-                   'Severity Changed', 'Priority Changed', 'System Changed', 'Total Bugs']
-
-        for kP, vP in vT['people'].items():
-            name = statList['people'][kP]['name']
-            if not name:
-                name = statList['people'][kP]['email'].split('@')[0]
-
-            if not name == 'libreoffice-commits':
-                row = []
-                row.append(name)
-                row.append(vP['created'])
-                row.append(vP['comments'])
-                row.append(vP['status_changed'])
-                row.append(vP['keyword_added'])
-                row.append(vP['keyword_removed'])
-                row.append(vP['severity_changed'])
-                row.append(vP['priority_changed'])
-                row.append(vP['system_changed'])
-                row.append(len(set(vP['bugs'])))
-                table.append(row)
-
-        output += tabulate(sorted(table, key = lambda x: x[0]), headers, tablefmt='mediawiki')
-        output += "\n"
-        output +='Generated on {}.'.format(cfg['todayDate'])
-        output += "\n"
-        output += '[[Category:EN]]\n'
-        output += '[[Category:QA/Stats]]'
-
-        fp = open('/tmp/table_' + kT + '.txt', 'w', encoding='utf-8')
-        print(output.replace('wikitable', 'wikitable sortable'), file=fp)
-        fp.close()
-
-def create_wikimedia_table_by_period(cfg, statList):
-    for kT,vT in sorted(statList['period'].items()):
-        print('Creating wikimedia table for actions done in the last {} days.'.format(kT[:-1]))
-        output = ""
-
-        output += '{{TopMenu}}\n'
-        output += '{{Menu}}\n'
-        output += '{{Menu.QA}}\n'
-        output += '\n'
-
-        output += '{} people helped to triage {} bugs in the last {} days. (sorted in alphabetical order by user\'s name)\n'.format(
-            len(vT['people']), vT['count'], kT[:-1])
-        output += '\n'
-        table = []
-        headers = ['Name', 'Created', 'Comments', 'Status Changed', 'Keyword Added', 'Keyword Removed',
-                   'Severity Changed', 'Priority Changed', 'System Changed', 'Total Bugs']
-
-        for kP, vP in vT['people'].items():
-            name = statList['people'][kP]['name']
-            if not name:
-                name = statList['people'][kP]['email'].split('@')[0]
-
-            if not name == 'libreoffice-commits':
-                row = []
-                row.append(name)
-                row.append(vP['created'])
-                row.append(vP['comments'])
-                row.append(vP['status_changed'])
-                row.append(vP['keyword_added'])
-                row.append(vP['keyword_removed'])
-                row.append(vP['severity_changed'])
-                row.append(vP['priority_changed'])
-                row.append(vP['system_changed'])
-                row.append(len(set(vP['bugs'])))
-                table.append(row)
-
-        output += tabulate(sorted(table, key = lambda x: x[0]), headers, tablefmt='mediawiki')
-        output += "\n"
-        output += 'Generated on {}.'.format(cfg['todayDate'])
-        output += "\n"
-        output += '[[Category:EN]]\n'
-        output += '[[Category:QA/Stats]]'
-
-        fp = open('/tmp/period_' + kT + '.txt', 'w', encoding='utf-8')
-        print(output.replace('wikitable', 'wikitable sortable'), file=fp)
-        fp.close()
-
 def massping_Report(statList):
     fp = open('/tmp/massping_report.txt', 'w', encoding='utf-8')
 
@@ -1047,12 +684,6 @@ if __name__ == '__main__':
     if len(sys.argv) > 1:
         if sys.argv[1] == 'blog':
             Blog_Report(statList)
-        elif sys.argv[1] == 'target':
-            create_wikimedia_table_by_target(cfg, statList)
-        elif sys.argv[1] == 'period':
-            create_wikimedia_table_by_period(cfg, statList)
-        elif sys.argv[1] == 'stats':
-            create_wikimedia_table_mostCCBugs(cfg, statList)
         elif sys.argv[1] == 'user':
             users_Report(statList)
         elif sys.argv[1] == 'massping':
diff --git a/qa/createWikiStats.py b/qa/createWikiStats.py
new file mode 100755
index 0000000..ca672dd
--- /dev/null
+++ b/qa/createWikiStats.py
@@ -0,0 +1,414 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import common
+import datetime
+from tabulate import tabulate
+
+targets_list = ['5.4.5', '6.0.1']
+
+periods_list = [30, 60, 90, 180, 365]
+
+minNumOfDupes = 3
+
+def util_create_wiki_statList():
+    return {
+        'targets': {t:{'count':0, 'people':{}} for t in targets_list},
+        'period': {p:{'count':0, 'people':{}} for p in periods_list},
+        'MostCCBugs': {},
+        'dupesBugs': {},
+        'MostDupeBugs': {},
+        'people': {},
+        'stat': {'oldest': datetime.datetime.now(), 'newest': datetime.datetime(2001, 1, 1)}
+    }
+
+def util_create_detailed_person(email):
+    return { 'email': email,
+             'bugs': [],
+             'created': 0,
+             'comments':0,
+             'status_changed': 0,
+             'keyword_added': 0,
+             'keyword_removed': 0,
+             'whiteboard_added': 0,
+             'whiteboard_removed': 0,
+             'severity_changed': 0,
+             'priority_changed': 0,
+             'system_changed': 0,
+             'metabug_added': 0,
+             'metabug_removed': 0
+         }
+
+def util_increase_user_actions(statList, bug, mail, targets, action, actionTime):
+    if mail == 'libreoffice-commits at lists.freedesktop.org':
+        return
+
+    for target in targets:
+        if mail not in statList['targets'][target]['people']:
+            statList['targets'][target]['people'][mail] = util_create_detailed_person(mail)
+
+        statList['targets'][target]['people'][mail][action] += 1
+        statList['targets'][target]['people'][mail]['bugs'].append(bug)
+
+    for period in periods_list:
+        if actionTime >= cfg[period]:
+            if mail not in statList['period'][period]['people']:
+                statList['period'][period]['people'][mail] = util_create_detailed_person(mail)
+
+            statList['period'][period]['people'][mail][action] += 1
+            statList['period'][period]['people'][mail]['bugs'].append(bug)
+
+def util_check_duplicated(bugID, isFirst=True):
+    rowDupeOf = bugzillaData['bugs'][str(bugID)]['dupe_of']
+    if rowDupeOf:
+        if str(rowDupeOf) in bugzillaData['bugs']:
+            return util_check_duplicated(rowDupeOf, False)
+        else:
+            return bugID
+    else:
+        if isFirst:
+            return None
+        else:
+            return bugID
+
+def util_create_bug(summary, component, version, keywords, creationDate, count_cc):
+    return { 'summary': summary,
+             'component': component,
+             'version': version,
+             'keywords': keywords,
+             'creationDate': creationDate,
+             'count': count_cc
+        }
+
+def analyze_bugzilla_wiki_stats(statList, bugzillaData, cfg):
+    print("Analyzing bugzilla\n", end="", flush=True)
+    statNewDate = statList['stat']['newest']
+    statOldDate = statList['stat']['oldest']
+
+    for key, row in bugzillaData['bugs'].items():
+        rowId = row['id']
+
+        #Ignore META bugs and deletionrequest bugs.
+        if not row['summary'].lower().startswith('[meta]') and row['component'] != 'deletionrequest':
+            creationDate = datetime.datetime.strptime(row['creation_time'], "%Y-%m-%dT%H:%M:%SZ")
+            if creationDate < statOldDate:
+                statOldDate = creationDate
+            if creationDate > statNewDate:
+                statNewDate = creationDate
+
+            rowStatus = row['status']
+            rowResolution = row['resolution']
+
+            if rowStatus == 'VERIFIED' or rowStatus == 'RESOLVED':
+                rowStatus += "_" + rowResolution
+
+            rowKeywords = row['keywords']
+
+            creatorMail = row['creator']
+
+            whiteboard_list = row['whiteboard'].split(' ')
+            bugTargets = []
+            for whiteboard in whiteboard_list:
+                if whiteboard.startswith("target:"):
+                    bugVersion = whiteboard.split(':')[1][:5]
+                    if bugVersion in targets_list:
+                        bugTargets.append(bugVersion)
+                        statList['targets'][bugVersion]['count'] += 1
+
+            for period in periods_list:
+                if creationDate >= cfg[period]:
+                    statList['period'][period]['count'] += 1
+
+            util_increase_user_actions(statList, key, creatorMail, bugTargets, 'created', creationDate)
+
+            if common.isOpen(rowStatus) and len(row['cc']) >= 10:
+                statList['MostCCBugs'][rowId] = util_create_bug(
+                        row['summary'], row['component'], row['version'], rowKeywords, creationDate, len(row['cc']))
+
+            rowDupeOf = util_check_duplicated(rowId)
+            if rowDupeOf:
+                if rowDupeOf not in statList['dupesBugs']:
+                    statList['dupesBugs'][rowDupeOf] = []
+                statList['dupesBugs'][rowDupeOf].append(rowId)
+
+                if str(rowDupeOf) in bugzillaData['bugs'] and \
+                        common.isOpen(bugzillaData['bugs'][str(rowDupeOf)]['status']):
+                    if rowDupeOf not in statList['MostDupeBugs']:
+                        statList['MostDupeBugs'][rowDupeOf] = util_create_bug(
+                        bugzillaData['bugs'][str(rowDupeOf)]['summary'],
+                        bugzillaData['bugs'][str(rowDupeOf)]['component'],
+                        bugzillaData['bugs'][str(rowDupeOf)]['version'],
+                        bugzillaData['bugs'][str(rowDupeOf)]['keywords'],
+                        datetime.datetime.strptime(
+                            bugzillaData['bugs'][str(rowDupeOf)]['creation_time'], "%Y-%m-%dT%H:%M:%SZ"), 1)
+
+            for action in row['history']:
+                actionMail = action['who']
+                actionDate = datetime.datetime.strptime(action['when'], "%Y-%m-%dT%H:%M:%SZ")
+
+                # Use this variable in case the status is set before the resolution
+                newStatus = None
+                for change in action['changes']:
+                    if change['field_name'] == 'blocks':
+                        if change['added']:
+                            for metabug in change['added'].split(', '):
+                                continue
+                                #TODO
+                                #util_increase_user_actions(statList, key, actionMail, bugTargets, 'metabug_added', actionDate)
+
+                        if change['removed']:
+                            for metabug in change['removed'].split(', '):
+                                continue
+                                #TODO
+                                #util_increase_user_actions(statList, key, actionMail, bugTargets, 'metabug_added', actionDate)
+
+                    if change['field_name'] == 'status':
+                        addedStatus = change['added']
+                        removedStatus = change['removed']
+
+                        if  addedStatus == 'RESOLVED' or addedStatus == 'VERIFIED':
+                            if(rowResolution):
+                                addedStatus = addedStatus + "_" + rowResolution
+                                util_increase_user_actions(statList, key, actionMail, bugTargets, 'status_changed', actionDate)
+                            else:
+                                newStatus = addedStatus
+                        else:
+                            util_increase_user_actions(statList, key, actionMail, bugTargets, 'status_changed', actionDate)
+
+                    elif change['field_name'] == 'resolution':
+                        if newStatus:
+                            addedStatus = newStatus + "_" + change['added']
+                            util_increase_user_actions(statList, key, actionMail, bugTargets, 'status_changed', actionDate)
+
+                            newStatus = None
+
+                    elif change['field_name'] == 'priority':
+                        util_increase_user_actions(statList, key, actionMail, bugTargets, 'priority_changed', actionDate)
+
+                    elif change['field_name'] == 'severity':
+                        util_increase_user_actions(statList, key, actionMail, bugTargets, 'severity_changed', actionDate)
+
+                    elif change['field_name'] == 'keywords':
+                        keywordsAdded = change['added'].split(", ")
+                        for keyword in keywordsAdded:
+                            if keyword in common.keywords_list:
+                                util_increase_user_actions(statList, key, actionMail, bugTargets, 'keyword_added', actionDate)
+
+                        keywordsRemoved = change['removed'].split(", ")
+                        for keyword in keywordsRemoved:
+                            if keyword in common.keywords_list:
+                                util_increase_user_actions(statList, key, actionMail, bugTargets, 'keyword_removed', actionDate)
+
+                    elif change['field_name'] == 'op_sys':
+                        newSystem = change['added']
+                        util_increase_user_actions(statList, rowId, actionMail, bugTargets, 'system_changed', actionDate)
+
+            comments = row['comments'][1:]
+            for idx, comment in enumerate(comments):
+                commentMail = comment['creator']
+                commentDate = datetime.datetime.strptime(comment['time'], "%Y-%m-%dT%H:%M:%SZ")
+
+                util_increase_user_actions(statList, rowId, commentMail, bugTargets, 'comments', commentDate)
+
+            #this way we can get the users' name
+            for person in row['cc_detail']:
+                if person['email'] not in statList['people']:
+                    statList['people'][person['email']] = person['real_name']
+
+    statList['stat']['newest'] = statNewDate.strftime("%Y-%m-%d")
+    statList['stat']['oldest'] = statOldDate.strftime("%Y-%m-%d")
+    print(" from " + statList['stat']['oldest'] + " to " + statList['stat']['newest'])
+
+def create_wikimedia_table_mostCCBugs_and_MostDupes(cfg, statList):
+
+    for k, v in statList['dupesBugs'].items():
+        if k in statList['MostDupeBugs']:
+            if len(v) >= minNumOfDupes:
+                statList['MostDupeBugs'][k]['count'] = len(v)
+            else:
+                del statList['MostDupeBugs'][k]
+
+    for nameList in ['MostCCBugs', 'MostDupeBugs']:
+        print('Creating wikimedia table for ' + nameList)
+        output = ""
+
+        output += '{{TopMenu}}\n'
+        output += '{{Menu}}\n'
+        output += '{{Menu.QA}}\n'
+        output += '\n'
+        table = []
+        headers = ['Id', 'Summary', 'Component', 'Version', 'isRegression', 'isBisected',
+                           'isEasyHack', 'haveBackTrace', 'Reported']
+        if nameList == 'MostCCBugs':
+            headers.append('Total CC')
+            output += '{} bugs have 10 or more emails in the CC list. (sorted in alphabetical order by number of users)\n'.format(
+                    len(statList['MostCCBugs']))
+        else:
+            headers.append('Total Duplicates')
+            output += '{} open bugs have 3 or more duplicates. (sorted in alphabetical order by number of duplicates)\n'.format(
+                    len(statList['MostDupeBugs']))
+
+        for k,v in statList[nameList].items():
+            row = []
+            row.append('[' + common.urlShowBug + str(k) + ' #tdf' + str(k) + ']')
+            row.append(v['summary'])
+            row.append(v['component'])
+            row.append(v['version'])
+            if 'regression' in v['keywords']:
+                row.append('True')
+            else:
+                row.append('False')
+            if 'bisected' in v['keywords']:
+                row.append('True')
+            else:
+                row.append('False')
+            if 'easyHack' in v['keywords']:
+                row.append('True')
+            else:
+                row.append('False')
+            if 'haveBacktrace' in v['keywords']:
+                row.append('True')
+            else:
+                row.append('False')
+            row.append(v['creationDate'].strftime("%Y-%m-%d %H:%M:%S"))
+            row.append(v['count'])
+            table.append(row)
+
+        output += tabulate(sorted(table, key = lambda x: x[9], reverse=True), headers, tablefmt='mediawiki')
+        output += "\n"
+        output +='Generated on {}.'.format(cfg['todayDate'])
+        output += "\n"
+        output += '[[Category:EN]]\n'
+        output += '[[Category:QA/Stats]]'
+
+        fp = open('/tmp/table_' + nameList + '.txt', 'w', encoding='utf-8')
+        print(output.replace('wikitable', 'wikitable sortable'), file=fp)
+        fp.close()
+
+def create_wikimedia_table_by_target(cfg, statList):
+    for kT,vT in sorted(statList['targets'].items()):
+        print('Creating wikimedia table for release ' + kT)

... etc. - the rest is truncated


More information about the Libreoffice-commits mailing list