[Libreoffice-commits] dev-tools.git: esc-reporting/esc-analyze.py esc-reporting/esc-collect.py esc-reporting/esc-report.py esc-reporting/README
jan Iversen
jani at documentfoundation.org
Sat Nov 12 17:16:18 UTC 2016
esc-reporting/README | 6
esc-reporting/esc-analyze.py | 422 +++++++++++++++++++++++++++++++++++
esc-reporting/esc-collect.py | 310 ++++++++++++++++++++++++++
esc-reporting/esc-report.py | 506 +++++++++++++++++++++++++++++++++++++++++++
4 files changed, 1244 insertions(+)
New commits:
commit d64010eea0c92aa8ba80422de7ad776ac39e94ce
Author: jan Iversen <jani at documentfoundation.org>
Date: Sat Nov 12 18:15:45 2016 +0100
Added esc-reporting tools
diff --git a/esc-reporting/README b/esc-reporting/README
new file mode 100644
index 0000000..b3db828
--- /dev/null
+++ b/esc-reporting/README
@@ -0,0 +1,6 @@
+This directory contains the tools that run on VM174, to generate esc reports (currently mentoring and UX)
+
+Patches are welcome, deployment can be done by any INFRA person, vm174 is currently maintained by jani
+
+
+
diff --git a/esc-reporting/esc-analyze.py b/esc-reporting/esc-analyze.py
new file mode 100755
index 0000000..e613289
--- /dev/null
+++ b/esc-reporting/esc-analyze.py
@@ -0,0 +1,422 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+
+
+### DESCRIPTION
+#
+# This program uses data collected by esc-collect.py:
+# The data is dumped to json files, with a history of minimum 1 year
+# esc/dump/['openhub','bugzilla','gerrit','git']_dump.json
+#
+# it generates and maintains
+# esc/stats.json (the daily data)
+# esc/archive/stats_YYYY_MM_DD.json (copy of stats.json)
+# esc/weeks/week_YYYY_NN.json (thursday copy of stats.json)
+#
+# The analyze functions run through the data files and generates interesting numbers
+# You can add your own analyze function (see analyze_myfunc() for example).
+# The numbers are stored in stats.json, and a diff with last weeks numbers are automatically build
+#
+# dump/developers_dump.json is used to identify:
+# new contributors
+# contributors missing license
+# contributor award scheme
+# cross reference emails (several people uses multiple emails, there is a function to control that)
+#
+# Expand this program if you want to present numbers compared to last week (for e.g. the ESC meeting)
+#
+# By storing the data over time:
+# archive/ contains ca. 1 month
+# weeks/ contains ca. 1 year
+# it is possible to make trend analysis, this is however not part of this program
+#
+# Installed on vm174:/usr/local/bin runs every night (generating esc/stats.json)
+#
+# This program is intended to be extended by people interesting in performance numbers
+#
+
+
+
+import sys
+import csv
+import io
+import os
+import operator
+import datetime
+import json
+import xmltodict
+
+
+def util_load_file(fileName):
+ try:
+ fp = open(fileName, encoding='utf-8')
+ rawData = json.load(fp)
+ fp.close()
+ except Exception as e:
+ print('Error load file ' + fileName + ' due to ' + str(e))
+ rawData = None
+ pass
+ return rawData
+
+
+
+def util_dump_file(fileName, rawList):
+ try:
+ fp = open(fileName, 'w', encoding='utf-8')
+ json.dump(rawList, fp, ensure_ascii=False, indent=4, sort_keys=True)
+ fp.close()
+ except Exception as e:
+ print('Error dump file ' + fileName + ' due to ' + str(e))
+ os.remove(fileName)
+ exit(-1)
+
+
+
+def util_build_period_stat(cfg, statList, xDate, email, status, pstatus, base = 'gerrit'):
+ for i in '1year', '3month', '1month', '1week':
+ if xDate > cfg[i + 'Date']:
+ statList['people'][email][base][i][pstatus] += 1
+ statList['people'][email][base][i]['total'] += 1
+ if not base == 'gerrit' :
+ statList['data'][base][i][status] += 1
+ statList['data'][base][i]['total'] += 1
+ elif statList['people'][email]['isCommitter']:
+ statList['data'][base]['committer'][i][status] += 1
+ statList['data'][base]['committer'][i]['total'] += 1
+ else:
+ statList['data'][base]['contributor'][i]['total'] += 1
+ statList['data'][base]['contributor'][i][status] += 1
+
+
+
+def util_load_data_file(fileName):
+ rawList = util_load_file(fileName)
+ if rawList == None:
+ exit(-1)
+ return rawList
+
+
+
+def util_create_person_gerrit(person, email):
+ return { 'name': person,
+ 'email': email,
+ 'commits': {'1year': {'merged': 0, 'reviewMerged': 0},
+ '3month': {'merged': 0, 'reviewMerged': 0},
+ '1month': {'merged': 0, 'reviewMerged': 0},
+ '1week': {'merged': 0, 'reviewMerged': 0}},
+ 'gerrit': {'1year': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '3month': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '1month': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '1week': {'owner': 0, 'reviewer': 0, 'total': 0},
+ 'userName': '*DUMMY*'},
+ 'ui': {'1year': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '3month': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '1month': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '1week': {'owner': 0, 'reviewer': 0, 'total': 0}},
+ 'qa': {'1year': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '3month': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '1month': {'owner': 0, 'reviewer': 0, 'total': 0},
+ '1week': {'owner': 0, 'reviewer': 0,'total': 0}},
+ 'isCommitter': False,
+ 'isContributor': False,
+ 'hasLicense': False,
+ 'newestCommit' : datetime.datetime(2001, 1, 1),
+ 'prevCommit': datetime.datetime(2001, 1, 1)}
+
+
+
+def util_create_statList():
+ return {'data': {'commits': {'committer': {'1year': {'#': 0}, '3month': {'#': 0}, '1month': {'#': 0}, '1week': {'#': 0}},
+ 'contributor': {'1year': {'#': 0}, '3month': {'#': 0}, '1month': {'#': 0}, '1week': {'#': 0}}},
+ 'openhub': {'lines_of_code': 0,
+ 'total_commits': 0,
+ 'total_contributors': 0,
+ 'year_commits': 0,
+ 'year_contributors': 0},
+ 'gerrit': {'contributor': {'1year': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0},
+ '3month': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0},
+ '1month': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0},
+ '1week': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0}},
+ 'committer': {'1year': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0},
+ '3month': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0},
+ '1month': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0},
+ '1week': {'ABANDONED': 0, 'MERGED': 0, 'NEW': 0, 'reviewed': 0, 'total': 0}}},
+ 'ui': {'1year': {'added': 0, 'removed': 0, 'commented': 0, 'total': 0},
+ '3month': {'added': 0, 'removed': 0, 'commented': 0, 'total': 0},
+ '1month': {'added': 0, 'removed': 0, 'commented': 0, 'total': 0},
+ '1week': {'added': 0, 'removed': 0, 'commented': 0, 'total': 0},
+ 'needsUXEval' : 0,
+ 'topicUI': 0},
+ 'qa': {'1year': {'UNCONFIRMED': 0, 'NEW': 0, 'ASSIGNED': 0, 'REOPENED': 0, 'RESOLVED': 0,
+ 'VERIFIED': 0, 'CLOSED': 0, 'NEEDINFO': 0, 'PLEASETEST': 0, 'commented': 0, 'total': 0},
+ '3month': {'UNCONFIRMED': 0, 'NEW': 0, 'ASSIGNED': 0, 'REOPENED': 0, 'RESOLVED': 0,
+ 'VERIFIED': 0, 'CLOSED': 0, 'NEEDINFO': 0, 'PLEASETEST': 0, 'commented': 0, 'total': 0},
+ '1month': {'UNCONFIRMED': 0, 'NEW': 0, 'ASSIGNED': 0, 'REOPENED': 0, 'RESOLVED': 0,
+ 'VERIFIED': 0, 'CLOSED': 0, 'NEEDINFO': 0, 'PLEASETEST': 0, 'commented': 0, 'total': 0},
+ '1week': {'UNCONFIRMED': 0, 'NEW': 0, 'ASSIGNED': 0, 'REOPENED': 0, 'RESOLVED': 0,
+ 'VERIFIED': 0, 'CLOSED': 0, 'NEEDINFO': 0, 'PLEASETEST': 0, 'commented': 0, 'total': 0}},
+ 'easyhacks' : {'needsDevEval': 0, 'needsUXEval': 0, 'cleanup_comments': 0,
+ 'total': 0, 'assigned': 0, 'open': 0}},
+ 'stat': {'openhub_last_analyse': "2001-01-01"},
+ 'people': {}}
+
+
+
+
+def util_check_mail(name, mail, statList, combineMail):
+ if mail in combineMail:
+ mail = combineMail[mail]
+ if not mail in statList['people']:
+ statList['people'][mail] = util_create_person_gerrit(name, mail)
+ return mail
+
+
+
+def util_build_diff(newList, oldList):
+ result = {}
+ for i in newList:
+ if not i in oldList:
+ oldList[i] = newList[i]
+ if type(newList[i]) is dict:
+ if not type(oldList[i]) is dict:
+ result[i] = 0
+ else:
+ result[i] = util_build_diff(newList[i], oldList[i])
+ else:
+ result[i] = newList[i] - oldList[i]
+ return result
+
+
+
+def analyze_mentoring(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ print("mentoring: analyze openhub", end="", flush=True)
+ statList['data']['openhub']['lines_of_code'] = int(openhubData['project']['analysis']['total_code_lines'])
+ statList['data']['openhub']['total_commits'] = int(openhubData['project']['analysis']['total_commit_count'])
+ statList['data']['openhub']['total_contributors'] = int(openhubData['project']['analysis']['total_contributor_count'])
+ statList['data']['openhub']['year_commits'] = int(openhubData['project']['analysis']['twelve_month_commit_count'])
+ statList['data']['openhub']['year_contributors'] = int(openhubData['project']['analysis']['twelve_month_contributor_count'])
+ xDate = datetime.datetime.strptime(openhubData['project']['analysis']['updated_at'], "%Y-%m-%dT%H:%M:%SZ")
+ statList['stat']['openhub_last_analyse'] = xDate.strftime('%Y-%m-%d')
+
+ print(" to " + statList['stat']['openhub_last_analyse'])
+ print("mentoring: analyze gerrit", end="", flush=True)
+
+ for row in gerritData['committers']:
+ mail = util_check_mail(row['name'], row['email'], statList, cfg['contributor']['combine-email'])
+ statList['people'][mail]['gerrit']['userName'] = row['username']
+ statList['people'][mail]['isCommitter'] = True
+ statList['people'][mail]['isContributor'] = True
+
+ statNewDate = cfg['1yearDate']
+ statOldDate = cfg['nowDate']
+ for key in gerritData['patch']:
+ row = gerritData['patch'][key]
+ if row['status'] == 'SUBMITTED' or row['status'] == 'DRAFT':
+ row['status'] = 'NEW'
+ xDate = datetime.datetime.strptime(row['updated'], '%Y-%m-%d %H:%M:%S.%f000')
+ ownerEmail = util_check_mail(row['owner']['name'], row['owner']['email'], statList, cfg['contributor']['combine-email'])
+ statList['people'][ownerEmail]['gerrit']['userName'] = row['owner']['username']
+ util_build_period_stat(cfg, statList, xDate, ownerEmail, row['status'], 'owner')
+ if ownerEmail in cfg['contributor']['contributors'] or ownerEmail in cfg['contributor']['license-pending']:
+ statList['people'][ownerEmail]['hasLicense'] = True
+ if xDate < statOldDate:
+ statOldDate = xDate
+ if xDate > statNewDate:
+ statNewDate = xDate
+
+ for i in 'Verified', 'Code-Review':
+ for x in row['labels'][i]['all']:
+ xEmail = util_check_mail(x['name'], x['email'], statList, cfg['contributor']['combine-email'])
+ if xEmail != ownerEmail:
+ util_build_period_stat(cfg, statList, xDate, xEmail, 'reviewed', 'reviewer')
+
+ print(" from " + statOldDate.strftime('%Y-%m-%d') + " to " + statNewDate.strftime('%Y-%m-%d'))
+ print("mentoring: analyze git", end="", flush=True)
+
+ statNewDate = cfg['1yearDate']
+ statOldDate = cfg['nowDate']
+ for key in gitData['commits']:
+ row = gitData['commits'][key]
+ xDate = datetime.datetime.strptime(row['date'], "%Y-%m-%d %H:%M:%S")
+ if xDate < statOldDate:
+ statOldDate = xDate
+ if xDate > statNewDate:
+ statNewDate = xDate
+ author = util_check_mail(row['author'], row['author-email'], statList, cfg['contributor']['combine-email'])
+ committer = util_check_mail(row['committer'], row['committer-email'], statList, cfg['contributor']['combine-email'])
+ statList['people'][author]['isContributor'] = True
+ statList['people'][committer]['isContributor'] = True
+
+ for i in author, committer:
+ if xDate > statList['people'][i]['newestCommit']:
+ if statList['people'][i]['newestCommit'] > statList['people'][i]['prevCommit']:
+ statList['people'][i]['prevCommit'] = statList['people'][i]['newestCommit']
+ statList['people'][i]['newestCommit'] = xDate
+ elif xDate > statList['people'][i]['prevCommit']:
+ statList['people'][i]['prevCommit'] = xDate
+
+ for i in '1year', '3month', '1month', '1week':
+ if xDate > cfg[i + 'Date']:
+ if author != committer:
+ statList['people'][author]['commits'][i]['merged'] += 1
+ statList['people'][committer]['commits'][i]['reviewMerged'] += 1
+ statList['data']['commits']['contributor'][i]['#'] += 1
+ else:
+ statList['people'][author]['commits'][i]['merged'] += 1
+ statList['data']['commits']['committer'][i]['#'] += 1
+
+ print(" from " + statOldDate.strftime("%Y-%m-%d") + " to " + statNewDate.strftime("%Y-%m-%d"))
+ print("mentoring: analyze easyhacks", end="", flush=True)
+
+ statNewDate = cfg['1yearDate']
+ statOldDate = cfg['nowDate']
+ for key, row in bugzillaData['bugs'].items():
+ if row['status'] == 'RESOLVED' or row['status'] == 'VERIFIED' or not 'easyHack' in row['keywords']:
+ continue
+
+ xDate = datetime.datetime.strptime(row['last_change_time'], "%Y-%m-%dT%H:%M:%SZ")
+ if xDate < statOldDate:
+ statOldDate = xDate
+ if xDate > statNewDate:
+ statNewDate = xDate
+
+ statList['data']['easyhacks']['total'] += 1
+ bugBlocked = False
+ if 'needsDevEval' in row['keywords']:
+ statList['data']['easyhacks']['needsDevEval'] += 1
+ bugBlocked = True
+ if 'needsUXEval' in row['keywords']:
+ statList['data']['easyhacks']['needsUXEval'] += 1
+ bugBlocked = True
+
+ if row['status'] == 'NEEDINFO':
+ bugBlocked = True
+ elif row['status'] == 'ASSIGNED':
+ statList['data']['easyhacks']['assigned'] += 1
+ elif row['status'] == 'NEW' and not bugBlocked:
+ statList['data']['easyhacks']['open'] += 1
+
+ if len(row['comments']) >= 5:
+ statList['data']['easyhacks']['cleanup_comments'] += 1
+
+ print(" from " + statOldDate.strftime("%Y-%m-%d") + " to " + statNewDate.strftime("%Y-%m-%d"))
+
+
+
+def analyze_ui(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ print("ui: analyze bugzilla", flush=True)
+
+ for key, row in bugzillaData['bugs'].items():
+ if row['status'] == 'RESOLVED' or row['status'] == 'VERIFIED' or not 'topicUI' in row['keywords']:
+ continue
+
+ statList['data']['ui']['topicUI'] += 1
+ if 'needsUXEval' in row['keywords']:
+ statList['data']['ui']['needsUXEval'] += 1
+
+ for change in row['comments']:
+ email = util_check_mail('*UNKNOWN*', change['creator'], statList, cfg['contributor']['combine-email'])
+ xDate = datetime.datetime.strptime(change['creation_time'], "%Y-%m-%dT%H:%M:%SZ")
+ util_build_period_stat(cfg, statList, xDate, email, 'commented', 'reviewer', base='ui')
+
+ for change in row['history']:
+ email = util_check_mail('*UNKNOWN*', change['who'], statList, cfg['contributor']['combine-email'])
+ xDate = datetime.datetime.strptime(change['when'], "%Y-%m-%dT%H:%M:%SZ")
+ for entry in change['changes']:
+ if entry['added'] != '':
+ st = 'added'
+ else:
+ st = 'removed'
+ util_build_period_stat(cfg, statList, xDate, email, st, 'reviewer', base='ui')
+
+
+def analyze_qa(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ print("qa: analyze bugzilla", flush=True)
+
+ for key, row in bugzillaData['bugs'].items():
+ email = util_check_mail('*UNKNOWN*', row['creator'], statList, cfg['contributor']['combine-email'])
+ xDate = datetime.datetime.strptime(row['last_change_time'], "%Y-%m-%dT%H:%M:%SZ")
+ util_build_period_stat(cfg, statList, xDate, email, row['status'], 'owner', base='qa')
+
+ for change in row['comments']:
+ email = util_check_mail('*UNKNOWN*', change['creator'], statList, cfg['contributor']['combine-email'])
+ xDate = datetime.datetime.strptime(change['creation_time'], "%Y-%m-%dT%H:%M:%SZ")
+ util_build_period_stat(cfg, statList, xDate, email, 'commented', 'reviewer', base='qa')
+
+
+
+def analyze_myfunc(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ print("myfunc: analyze nothing", flush=True)
+
+
+
+def analyze_final(statList, cfg):
+ print("Analyze final")
+ statList['addDate'] = datetime.date.today().strftime('%Y-%m-%d')
+
+ zDate = datetime.datetime(year=2001, month=1, day=1)
+ for i in statList['people']:
+ person = statList['people'][i]
+ delta = person['newestCommit'] - person['prevCommit']
+ person['newestCommit'] = person['newestCommit'].strftime("%Y-%m-%d")
+ person['prevCommit'] = person['prevCommit'].strftime("%Y-%m-%d")
+
+ myDay = datetime.date.today()
+ x = (myDay - datetime.timedelta(days=7)).strftime('%Y-%m-%d')
+ weekList = util_load_file(cfg['homedir'] + 'archive/stats_' + x + '.json')
+ if weekList is None:
+ weekList = {'data': {}}
+ statList['diff'] = util_build_diff(statList['data'], weekList['data'])
+ util_dump_file(cfg['homedir'] + 'stats.json', statList)
+ x = myDay.strftime('%Y-%m-%d')
+ util_dump_file(cfg['homedir'] + 'archive/stats_' + x + '.json', statList)
+ if myDay.strftime('%w') == '4':
+ util_dump_file(cfg['homedir'] + 'weeks/week_' + myDay.strftime('%Y_%W') + '.json', statList)
+
+
+
+def runCfg(platform):
+ if 'esc_homedir' in os.environ:
+ homeDir = os.environ['esc_homedir']
+ else:
+ homeDir = '/home/jani/esc'
+ cfg = util_load_data_file(homeDir + '/config.json')
+ cfg['homedir'] = homeDir + '/'
+ cfg['platform'] = platform
+ print("Reading and writing data to " + cfg['homedir'])
+
+ cfg['contributor'] = util_load_data_file(cfg['homedir'] + 'dump/developers_dump.json')
+ cfg['nowDate'] = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
+ cfg['cutDate'] = cfg['nowDate'] - datetime.timedelta(days=365)
+ cfg['1weekDate'] = cfg['nowDate'] - datetime.timedelta(days=7)
+ cfg['1monthDate'] = cfg['nowDate'] - datetime.timedelta(days=30)
+ cfg['3monthDate'] = cfg['nowDate'] - datetime.timedelta(days=90)
+ cfg['1yearDate'] = cfg['nowDate'] - datetime.timedelta(days=365)
+ return cfg
+
+
+
+def runAnalyze(cfg):
+ openhubData = util_load_data_file(cfg['homedir'] + 'dump/openhub_dump.json')
+ bugzillaData = util_load_data_file(cfg['homedir'] + 'dump/bugzilla_dump.json')
+ gerritData = util_load_data_file(cfg['homedir'] + 'dump/gerrit_dump.json')
+ gitData = util_load_data_file(cfg['homedir'] + 'dump/git_dump.json')
+
+ statList = util_create_statList()
+ analyze_mentoring(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ analyze_ui(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ analyze_qa(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ analyze_myfunc(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ analyze_final(statList, cfg)
+
+
+
+if __name__ == '__main__':
+ runAnalyze(runCfg(sys.platform))
diff --git a/esc-reporting/esc-collect.py b/esc-reporting/esc-collect.py
new file mode 100755
index 0000000..6bfb9c0
--- /dev/null
+++ b/esc-reporting/esc-collect.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+
+### DESCRIPTION
+#
+# This program collect data from
+# Openhub (including history and committer list)
+# Bugzilla (including comments and history)
+# Gerrit (including list of committers)
+# Git (all LibreOffice repos)
+#
+# The data is dumped to json files, with a history of minimum 1 year
+# esc/dump/['openhub','bugzilla','gerrit','git']_dump.json
+#
+# The JSON is a 1-1 copy of the data in the systems
+# This program should only be changed when one of systems is updated.
+#
+# Installed on vm174:/usr/local/bin runs every night (making delta collection)
+#
+# Remark this program put a heavy load on our services, so please do not just run it.
+# For analysis and reporting see the 2 other programs available.
+#
+
+import sys
+import csv
+import io
+import os
+import operator
+import datetime
+import json
+import xmltodict
+import requests
+from requests.auth import HTTPDigestAuth
+
+
+
+def util_load_file(fileName):
+ try:
+ fp = open(fileName, encoding='utf-8')
+ rawData = json.load(fp)
+ fp.close()
+ except Exception as e:
+ print('Error load file ' + fileName + ' due to ' + str(e))
+ rawData = None
+ pass
+ return rawData
+
+
+
+def util_load_url(url, useDict=False, useRaw=False, uUser=None, uPass=None):
+ try:
+ if uUser is None:
+ r = requests.get(url)
+ if useDict:
+ rawData = xmltodict.parse(r.text)
+ elif useRaw:
+ rawData = r.text
+ else:
+ rawData = r.json()
+ else:
+ r = requests.get(url, auth=HTTPDigestAuth(uUser, uPass))
+ rawData = json.loads(r.text[5:])
+ r.close()
+ except Exception as e:
+ print('Error load url ' + url + ' due to ' + str(e))
+ exit(-1)
+ return rawData
+
+
+
+def util_dump_file(fileName, rawList):
+ try:
+ fp = open(fileName, 'w', encoding='utf-8')
+ json.dump(rawList, fp, ensure_ascii=False, indent=4, sort_keys=True)
+ fp.close()
+ except Exception as e:
+ print('Error dump file ' + fileName + ' due to ' + str(e))
+ os.remove(fileName)
+ exit(-1)
+
+
+
+def util_load_data_file(cfg, fileName, funcName, rawListProto):
+ rawList = util_load_file(fileName)
+ if rawList == None:
+ rawList = rawListProto
+ rawList['newest-entry'] = (datetime.datetime.now() - datetime.timedelta(days=365)).strftime("%Y-%m-%d 00")
+ print('retrieving full year of ' + funcName + ', take a coffee')
+ return datetime.datetime.strptime(rawList['newest-entry'], "%Y-%m-%d %H"), rawList
+
+
+
+def get_openhub(cfg):
+ fileName = cfg['homedir'] + 'dump/openhub_dump.json'
+ searchDate, rawList = util_load_data_file(cfg, fileName, 'openhub', {'project': {}, 'people': {}})
+ newDate = searchDate
+ print("Updating openHub dump from " + rawList['newest-entry'])
+
+ urlBase = 'https://www.openhub.net/p/libreoffice'
+ url = urlBase + '.xml?api_key=' + cfg['openhub']['api-key']
+ rawList['project'] = util_load_url(url, useDict=True)['response']['result']['project']
+
+ url = urlBase + '/contributors.xml?api_key=' + cfg['openhub']['api-key'] + '&sort=latest_commit&page='
+ pageId = -1
+ while True:
+ pageId += 1
+ idList = util_load_url(url + str(pageId), useDict=True)['response']['result']['contributor_fact']
+ for row in idList:
+ rawList['people'][row['contributor_id']] = row
+ xDate = datetime.datetime.strptime(idList[-1]['last_commit_time'], "%Y-%m-%dT%H:%M:%SZ")
+ if xDate < searchDate:
+ break
+ if xDate > newDate:
+ newDate = xDate
+ rawList['newest-entry'] = newDate.strftime("%Y-%m-%d %H")
+
+ util_dump_file(fileName, rawList)
+ return rawList
+
+
+
+def get_bugzilla(cfg):
+ fileName = cfg['homedir'] + 'dump/bugzilla_dump.json'
+ searchDate, rawList = util_load_data_file(cfg, fileName, 'bugzilla', {'bugs': {}})
+ print("Updating bugzilla dump from " + rawList['newest-entry'])
+
+ url = 'https://bugs.documentfoundation.org/rest/bug?' \
+ '&order=changeddate&chfieldto=Now&chfieldfrom=' + searchDate.strftime("%Y-%m-%d") + \
+ '&limit=200&offset='
+ newList = []
+ while True:
+ tmp = util_load_url(url + str(len(newList)))['bugs']
+ if len(tmp) == 0:
+ break
+ newList.extend(tmp)
+
+ urlH = 'https://bugs.documentfoundation.org/rest/bug/{}/history'
+ urlC = 'https://bugs.documentfoundation.org/rest/bug/{}/comment'
+ cnt = 0
+ for row in newList:
+ id = str(row['id'])
+ if not 'cc' in row:
+ row['cc'] = []
+ if not 'keywords' in row:
+ row['keywords'] = []
+ tmp = util_load_url(urlH.format(id))
+ row['history'] = tmp['bugs'][0]['history']
+ tmp = util_load_url(urlC.format(id))
+ row['comments'] = tmp['bugs'][id]['comments']
+ rawList['bugs'][id] = row
+ xDate = datetime.datetime.strptime(row['last_change_time'], "%Y-%m-%dT%H:%M:%SZ")
+ if xDate > searchDate:
+ searchDate = xDate
+ cnt += 1
+ if cnt > 400:
+ rawList['newest-entry'] = searchDate.strftime('%Y-%m-%d %H')
+ util_dump_file(fileName, rawList)
+ cnt = 0
+
+ rawList['newest-entry'] = searchDate.strftime('%Y-%m-%d %H')
+ util_dump_file(fileName, rawList)
+ return rawList
+
+
+
+def get_gerrit(cfg):
+ fileName = cfg['homedir'] + 'dump/gerrit_dump.json'
+ searchDate, rawList = util_load_data_file(cfg, fileName, 'gerrit', {'patch': {}, 'committers' : []})
+ print("Updating gerrit dump from " + rawList['newest-entry'])
+
+ urlBase = 'https://gerrit.libreoffice.org/a/'
+ uid = cfg['gerrit']['user']
+ upw = cfg['gerrit']['password']
+ rawList['committers'] = []
+ tmp = util_load_url(urlBase + 'groups/Committers/members', uUser=uid, uPass=upw)
+ for row in tmp:
+ for i in 'username', 'email':
+ if not i in row:
+ row[i] = '*DUMMY*'
+ rawList['committers'].append(row)
+
+ url = urlBase + 'changes/?q=after:' + searchDate.strftime("%Y-%m-%d") + \
+ '&o=DETAILED_LABELS&o=DETAILED_ACCOUNTS&o=MESSAGES&limit=200&start='
+ offset = 0
+ if 'offset' in rawList:
+ offset = int(rawList['offset'])
+ while True:
+ tmp = util_load_url(url + str(offset), uUser=uid, uPass=upw)
+ for row in tmp:
+ for i in 'email', 'username', 'name':
+ if not i in row['owner']:
+ row['owner'][i] = '*DUMMY*'
+ for x in row['messages']:
+ if not 'author' in x:
+ x['author'] = {}
+ for i in 'email', 'username', 'name':
+ if not i in x['author']:
+ x['author'][i] = '*DUMMY*'
+ for i in 'Verified', 'Code-Review':
+ if not i in row['labels']:
+ row['labels'][i] = {}
+ if not 'all' in row['labels'][i]:
+ row['labels'][i]['all'] = []
+ for x in row['labels'][i]['all']:
+ if 'name' not in x:
+ x['name'] = '*DUMMY*'
+ if 'email' not in x:
+ x['email'] = '*DUMMY*'
+ if 'username' not in x:
+ x['username'] = '*DUMMY*'
+ if 'value' not in x:
+ x['value'] = 0
+
+ rawList['patch'][str(row['_number'])] = row
+ xDate = datetime.datetime.strptime(row['updated'], "%Y-%m-%d %H:%M:%S.%f000")
+ if xDate > searchDate:
+ searchDate = xDate
+ if '_more_changes' in tmp[-1] and tmp[-1]['_more_changes'] == True:
+ rawList['offset'] = offset
+ offset += len(tmp)
+ del rawList['patch'][str(row['_number'])]['_more_changes']
+ else:
+ break
+ if 'offset' in rawList:
+ del rawList['offset']
+
+ rawList['newest-entry'] = searchDate.strftime('%Y-%m-%d %H')
+ util_dump_file(fileName, rawList)
+ return rawList
+
+
+
+def get_git(cfg):
+ fileName = cfg['homedir'] + 'dump/git_dump.json'
+ searchDate, rawList = util_load_data_file(cfg, fileName, 'git', {'commits': {}})
+ print("Updating git dump from " + rawList['newest-entry'])
+
+ for repo in cfg['git']['repos']:
+ print(' working on ' + repo['name'])
+ useFormat = '{"hash": "%H", "date": "%ci", "author": "%an", "author-email": "%ae", ' \
+ '"committer": "%cn", "committer-email": "%ce" }'
+ basedir = cfg['homedir'] + '../libreoffice/'
+ if repo['git'] and cfg['platform'] == 'linux':
+ os.system('(cd ' + basedir + repo['dir'] + ';git pull -r;git fetch --all) > /dev/null')
+ os.system('(cd ' + basedir + repo['dir'] + ";git log --pretty=format:'" + useFormat + "') > /tmp/git.log")
+ fp = open('/tmp/git.log', encoding='utf-8')
+ while True:
+ x = fp.readline()
+ if x is None or x == '':
+ break
+ row = json.loads(x)
+ row['repo'] = repo['name']
+ key = repo['name'] + '_' + row['hash']
+ if not key in rawList['commits']:
+ row['date'] = row['date'][:-6]
+ rawList['commits'][key] = row
+ x = row['date'].split(' ')[:2]
+ xDate = datetime.datetime.strptime(x[0]+' '+x[1], "%Y-%m-%d %H:%M:%S")
+ if xDate < searchDate:
+ break
+
+ nDate = searchDate
+ for key in rawList['commits']:
+ xDate = datetime.datetime.strptime(rawList['commits'][key]['date'], "%Y-%m-%d %H:%M:%S")
+ if xDate > nDate:
+ nDate = xDate
+
+ rawList['newest-entry'] = nDate.strftime('%Y-%m-%d %H')
+ util_dump_file(fileName, rawList)
+ return rawList
+
+
+
+def runCfg(platform):
+ if 'esc_homedir' in os.environ:
+ homeDir = os.environ['esc_homedir']
+ else:
+ homeDir = '/home/jani/esc'
+ cfg = util_load_file(homeDir + '/config.json')
+ if cfg == None:
+ exit(-1)
+ keys = util_load_file(homeDir + '/config_collect.json')
+ if keys == None:
+ exit(-1)
+
+ cfg.update(keys)
+ cfg['homedir'] = homeDir + '/'
+ cfg['platform'] = platform
+ print("Reading and writing data to " + cfg['homedir'])
+ return cfg
+
+
+
+def runBuild(cfg):
+ openhubData = get_openhub(cfg)
+ bugzillaData = get_bugzilla(cfg)
+ gerritData = get_gerrit(cfg)
+ gitData = get_git(cfg)
+
+
+
+if __name__ == '__main__':
+ runBuild(runCfg(sys.platform))
diff --git a/esc-reporting/esc-report.py b/esc-reporting/esc-report.py
new file mode 100755
index 0000000..3762297
--- /dev/null
+++ b/esc-reporting/esc-report.py
@@ -0,0 +1,506 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+
+
+
+
+### DESCRIPTION
+#
+# This program uses data collected by esc-collect.py:
+# The data is dumped to json files, with a history of minimum 1 year
+# esc/dump/['openhub','bugzilla','gerrit','git']_dump.json
+# and statistics created by esc-analyze.py:
+# esc/stats.json (the daily data)
+#
+# The report functions run through the data files and prints interesting numbers and lists
+# You can add your own report function (see analyze_myfunc() for example).
+# You can also get it mailed on a daily basis
+#
+# Installed on vm174:/usr/local/bin runs every night (generating and mailing reports)
+#
+# This program is intended to be extended by people interesting in performance numbers
+#
+
+
+
+import sys
+import csv
+import io
+import os
+import operator
+import datetime
+import json
+import xmltodict
+
+
+def util_load_data_file(fileName):
+ try:
+ fp = open(fileName, encoding='utf-8')
+ rawData = json.load(fp)
+ fp.close()
+ except Exception as e:
+ print('Error load file ' + fileName + ' due to ' + str(e))
+ exit(-1)
+ return rawData
+
+
+
+def util_check_mail(mail, statList, combineMail):
+ if mail in combineMail:
+ mail = combineMail[mail]
+ if not mail in statList['people']:
+ print('Error mail ' + mail + ' not in stats.json/people')
+ exit(-1)
+ return mail
+
+
+
+def util_formatBugzilla(id, reporter, title):
+ return 'https://bugs.documentfoundation.org/show_bug.cgi?id={} reporter:{} -> "{}"'.format(id, reporter, title)
+
+
+
+def util_formatGerrit(id, owner, title):
+ return 'https://gerrit.libreoffice.org/#/c/{}/ author:{} -> "{}"'.format(id, owner, title)
+
+
+
+def util_print_line(fp, loopList, title, doGerrit=False, doBugzilla=False, doName=None):
+ print("\n\n" + title + ':', file=fp)
+ for i in loopList:
+ if doGerrit:
+ x = 'https://gerrit.libreoffice.org/#/c/{} {} -> "{}"'.format(i['id'], i['email'], i['title'])
+ elif doBugzilla:
+ x = 'https://bugs.documentfoundation.org/show_bug.cgi?id=' + i
+ elif not doName is None:
+ x = i + ' ' + doName[i]['name']
+ else:
+ x = i
+ print(' ' + x, file=fp)
+
+
+
+def util_build_escNumber(db, tag, statList):
+ return str(statList['data'][db][tag]) + '(' + str(statList['data'][db][tag]) + ')'
+
+
+
+def util_build_matrix(title, lineDesc, index, statList):
+ xValue = [[title, '1 week', '', '1 month', '', '3 months', '', '12 months', '']]
+ xLen = [len(xValue[0][0]), 0, 0, 0, 0, 0, 0, 0, 0]
+ for row in lineDesc:
+ xLine = [row['text']]
+ for i in '1week', '1month', '3month', '1year':
+ if index is None:
+ x1 = statList['data'][row['db']][i][row['tag']]
+ x2 = statList['diff'][row['db']][i][row['tag']]
+ else:
+ x1 = statList['data'][row['db']][index][i][row['tag']]
+ x2 = statList['diff'][row['db']][index][i][row['tag']]
+ xLine.append(str(x1))
+ xLine.append('(' + str(x2) + ')')
+ xValue.append(xLine)
+ for i in range(0,9):
+ x = len(xLine[i])
+ if x > xLen[i]:
+ xLen[i] = x
+ xText = ''
+ for i in 1, 3, 5, 7:
+ x = len(xValue[0][i])
+ if x > xLen[i]+xLen[i+1]:
+ xLen[i+1] = x - xLen[i]
+ for row in xValue:
+ xText += (' {:>' + str(xLen[0]) + '} ').format(row[0])
+ for i in 1,3,5,7:
+ if row[2] == '':
+ xText += (' {:<' + str(xLen[i]+xLen[i+1]) + '} ').format(row[i])
+ else:
+ xText += (' {:>' + str(xLen[i]) + '}{:<' + str(xLen[i+1]) + '}').format(row[i], row[i+1])
+ xText += '\n'
+ return xText
+
+
+
+def report_mentoring(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ myStatList = {'missing_license': {},
+ 'to_abandon': [],
+ 'to_review': [],
+ 'we_miss_you_email': [],
+ 'remove_cc': [],
+ 'needsDevEval': [],
+ 'needsUXEval': [],
+ 'missing_ui_cc': [],
+ 'needinfo': [],
+ 'to_unassign': [],
+ 'assign_problem': [],
+ 'too_many_comments': [],
+ 'missing_cc': [],
+ 'to_be_closed': [],
+ 'easyhacks_new': [],
+ 'top10commit': [],
+ 'top10review': [],
+ 'welcome_back_email': [],
+ 'award_1st_email': []}
+
+ mailedDate = datetime.datetime.strptime(cfg['git']['last-mail-run'], '%Y-%m-%d') - datetime.timedelta(days=90)
+ zDate = datetime.datetime(year=2001, month=1, day=1)
+ for i in statList['people']:
+ row = statList['people'][i]
+ if not row['hasLicense'] and row['isContributor'] and row['commits']['1month']['merged'] != 0:
+ myStatList['missing_license'][row['email']] = row['name']
+ if row['commits']['1year']['merged'] == row['commits']['1month']['merged'] and \
+ row['commits']['1month']['merged'] != 0 and not row['email'] in cfg['contributor']['award-mailed']:
+ myStatList['award_1st_email'].append(row['email'])
+
+ newestCommitDate = datetime.datetime.strptime(row['newestCommit'], '%Y-%m-%d')
+ prevCommitDate = datetime.datetime.strptime(row['prevCommit'], '%Y-%m-%d')
+ if newestCommitDate >= mailedDate and \
+ newestCommitDate < cfg['3monthDate']:
+ myStatList['we_miss_you_email'].append(i)
+ delta = newestCommitDate - prevCommitDate
+ if delta > datetime.timedelta(days=180) and prevCommitDate != zDate and \
+ newestCommitDate > cfg['1weekDate'] :
+ myStatList['welcome_back_email'].append(i)
+
+ for key in gerritData['patch']:
+ row = gerritData['patch'][key]
+ if row['status'] == 'SUBMITTED' or row['status'] == 'DRAFT':
+ row['status'] = 'NEW'
+ xDate = datetime.datetime.strptime(row['updated'], '%Y-%m-%d %H:%M:%S.%f000')
+ ownerEmail = util_check_mail(row['owner']['email'], statList, cfg['contributor']['combine-email'])
+
+ if row['status'] == 'NEW':
+ doBlock = False
+ cntReview = 0
+ for x1 in 'Code-Review', 'Verified':
+ for x in row['labels'][x1]['all']:
+ if x['value'] == -2:
+ doBlock = True
+ if x['email'] != ownerEmail and x['email'] != 'ci at libreoffice.org':
+ cntReview += 1
+ if xDate < cfg['1monthDate'] and not doBlock:
+ myStatList['to_abandon'].append({'id': key, 'email': row['owner']['email'], 'title': row['subject']})
+ if cntReview == 0 and not statList['people'][ownerEmail]['isCommitter']:
+ myStatList['to_review'].append({'id': key, 'email': row['owner']['email'], 'title': row['subject']})
+
+ for key, row in bugzillaData['bugs'].items():
+ if not 'cc' in row:
+ row['cc'] = []
+ if not 'keywords' in row:
+ row['keywords'] = []
+
+ if row['status'] == 'RESOLVED' or row['status'] == 'VERIFIED':
+ continue
+
+ if not 'easyHack' in row['keywords']:
+ if 'jani' in row['cc']:
+ myStatList['remove_cc'].append(key)
+ continue
+
+ if 'needsDevEval' in row['keywords']:
+ myStatList['needsDevEval'].append(key)
+ if 'needsUXEval' in row['keywords']:
+ myStatList['needsUXEval'].append(key)
+ if 'topicUI' in row['keywords'] and 'libreoffice-ux-advise at lists.freedesktop.org' not in row['cc']:
+ myStatList['missing_ui_cc'].append(key)
+
+ if row['status'] == 'NEEDINFO':
+ myStatList['needinfo'].append(key)
+ elif row['status'] == 'ASSIGNED':
+ xDate = datetime.datetime.strptime(row['last_change_time'], "%Y-%m-%dT%H:%M:%SZ")
+ if xDate < cfg['1monthDate']:
+ myStatList['to_unassign'].append(key)
+
+ if (row['status'] == 'ASSIGNED' and row['assigned_to'] == '') or \
+ (row['status'] != 'ASSIGNED' and row['assigned_to'] != '' and \
+ row['assigned_to'] != 'libreoffice-bugs at lists.freedesktop.org') :
+ myStatList['assign_problem'].append(key)
+
+ if len(row['comments']) >= 5:
+ myStatList['too_many_comments'].append(key)
+
+ if not 'jani at documentfoundation.org' in row['cc']:
+ myStatList['missing_cc'].append(key)
+
+ if row['comments'][-1]['creator'] == 'libreoffice-commits at lists.freedesktop.org' and \
+ not key in cfg['bugzilla']['close_except']:
+ myStatList['to_be_closed'].append(key)
+
+ cDate = datetime.datetime.strptime(row['creation_time'], "%Y-%m-%dT%H:%M:%SZ")
+ if cDate >= cfg['1weekDate'] or 'easyhack' in row['history'][-1]['changes'][0]['added']:
+ myStatList['easyhacks_new'].append(key)
+
+ tmpClist = sorted(statList['people'], key=lambda k: (statList['people'][k]['commits']['1month']['merged']), reverse=True)
+ for i in tmpClist:
+ if not statList['people'][i]['isCommitter']:
+ x = {'mail': i, 'name': statList['people'][i]['name'],
+ 'month' :statList['people'][i]['commits']['1month']['merged'],
+ 'year':statList['people'][i]['commits']['1year']['merged']}
+ myStatList['top10commit'].append(x)
+ if len(myStatList['top10commit']) >= 10:
+ break
+ tmpRlist = sorted(statList['people'], key=lambda k: (statList['people'][k]['gerrit']['1month']['reviewer']), reverse=True)
+ for i in tmpRlist:
+ if i != 'ci at libreoffice.org':
+ x = {'mail': i, 'name': statList['people'][i]['name'],
+ 'month' :statList['people'][i]['gerrit']['1month']['reviewer'],
+ 'year':statList['people'][i]['gerrit']['1year']['reviewer']}
+ myStatList['top10review'].append(x)
+ if len(myStatList['top10review']) >= 10:
+ break
+
+ fp = open('/tmp/esc_mentoring_report.txt', 'w', encoding='utf-8')
+ print('ESC mentoring report, generated {} based on stats.json from {}'.format(
+ datetime.datetime.now().strftime("%Y-%m-%d"), statList['addDate']), file=fp)
+
+ print("copy/paste to esc pad:\n"
+ "* mentoring/easyhack update (janI)\n"
+ " + openhub statistics based on analysis from {}\n"
+ " {} people did in total: {} commits in {} lines of code\n"
+ " {} people did in 12 month: {} commits\n"
+ " + gerrit/git statistics:".format(
+ statList['stat']['openhub_last_analyse'],
+ util_build_escNumber('openhub', 'total_contributors', statList),
+ util_build_escNumber('openhub', 'total_commits', statList),
+ util_build_escNumber('openhub', 'lines_of_code', statList),
+ util_build_escNumber('openhub', 'year_contributors', statList),
+ util_build_escNumber('openhub', 'year_commits', statList)), file=fp)
+
+ xRow = [{'db': 'gerrit', 'tag': 'NEW', 'text': 'open'},
+ {'db': 'gerrit', 'tag': 'reviewed', 'text': 'reviews'},
+ {'db': 'gerrit', 'tag': 'MERGED', 'text': 'merged'},
+ {'db': 'gerrit', 'tag': 'ABANDONED', 'text': 'abandoned'},
+ {'db': 'commits', 'tag': '#', 'text': 'commits'}]
+ print(util_build_matrix('committer...', xRow, 'committer', statList), end='', file=fp)
+ print(util_build_matrix('contributor...', xRow, 'contributor', statList), end='', file=fp)
+
+ print(" + easyHack statistics:\n ", end='', file=fp)
+ for i1 in 'needsDevEval', 'needsUXEval', 'cleanup_comments', 'total', 'assigned', 'open':
+ print(i1 + ' ' + util_build_escNumber('easyhacks', i1, statList) + ' ', end="", file=fp)
+ if i1 == 'cleanup_comments':
+ print('\n ', end='', file=fp)
+ print("\n + received patches from " + str(len(myStatList['missing_license'])) + " emails the last month without licesense statement", file=fp)
+ print(" + top 5 contributors:", file=fp)
+ for i in range(0, 5):
+ print(' {} made {} patches in 1 month, and {} patches in 1 year'.format(
+ myStatList['top10commit'][i]['name'],
+ myStatList['top10commit'][i]['month'],
+ myStatList['top10commit'][i]['year']), file=fp)
+ print(" + top 5 reviewers:", file=fp)
+ for i in range(0, 5):
+ print(' {} made {} review comments in 1 month, and {} in 1 year'.format(
+ myStatList['top10review'][i]['name'],
+ myStatList['top10review'][i]['month'],
+ myStatList['top10review'][i]['year']), file=fp)
+
+ print(" + big CONGRATULATIONS to contributors who have at least 1 merged patch, since last report:", file=fp)
+ for i in myStatList['award_1st_email']:
+ print(' ' + statList['people'][i]['name'], file=fp)
+ print("\n\n\n\n\n\n\n\n\n\n", file=fp)
+
+ print('Day mentoring report, generated {} based on stats.json from {}'.format(
+ datetime.datetime.now().strftime("%Y-%m-%d"), statList['addDate']), file=fp)
+
+ util_print_line(fp, myStatList['welcome_back_email'], 'welcome back', doName=statList['people'])
+ util_print_line(fp, myStatList['missing_license'], 'missing license statement', doName=statList['people'])
+ util_print_line(fp, myStatList['to_abandon'], 'gerrit to abandon', doGerrit=True)
+ util_print_line(fp, myStatList['to_review'], 'gerrit to review', doGerrit=True)
+ util_print_line(fp, myStatList['to_unassign'], 'easyhacks to unassign', doBugzilla=True)
+ util_print_line(fp, myStatList['needinfo'], 'easyhacks with NEEDINFO', doBugzilla=True)
+ util_print_line(fp, myStatList['easyhacks_new'], 'easyhacks new', doBugzilla=True)
+ util_print_line(fp, myStatList['missing_cc'], 'easyhacks missing cc', doBugzilla=True)
+ util_print_line(fp, myStatList['remove_cc'], 'easyhacks remove cc', doBugzilla=True)
+ util_print_line(fp, myStatList['missing_ui_cc'], 'easyhacks missing ui cc', doBugzilla=True)
+ util_print_line(fp, myStatList['assign_problem'], 'easyhacks assign problem', doBugzilla=True)
+ util_print_line(fp, myStatList['to_be_closed'], 'easyhacks to be closed', doBugzilla=True)
+ util_print_line(fp, myStatList['needsDevEval'], 'easyhacks needsDevEval', doBugzilla=True)
+ util_print_line(fp, myStatList['needsUXEval'], 'easyhacks needsUXEval', doBugzilla=True)
+ util_print_line(fp, myStatList['we_miss_you_email'], 'we miss you email', doName=statList['people'])
+ util_print_line(fp, myStatList['too_many_comments'], 'easyhacks reduce comments', doBugzilla=True)
+ fp.close()
+
+ return {'title': 'esc_mentoring, MENTORING', 'mail': 'jani at documentfoundation.org', 'file': '/tmp/esc_mentoring_report.txt'}
+
+
+
+def report_ui(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ tmpClist = sorted(statList['people'], key=lambda k: (statList['people'][k]['ui']['1month']['total']), reverse=True)
+ top10list = []
+ for i in tmpClist:
+ if i != 'qa-admin at libreoffice.org' and i != 'libreoffice-commits at lists.freedesktop.org':
+ x = {'mail': i, 'name': statList['people'][i]['name'],
+ 'month' :statList['people'][i]['ui']['1month']['total'],
+ 'year':statList['people'][i]['ui']['1year']['total']}
+ top10list.append(x)
+ if len(top10list) >= 10:
+ break
+
+ fp = open('/tmp/esc_ui_report.txt', 'w', encoding='utf-8')
+ print('ESC UI report, generated {} based on stats.json from {}'.format(
+ datetime.datetime.now().strftime("%Y-%m-%d"), statList['addDate']), file=fp)
+
+ print("copy/paste to esc pad:\n"
+ "* UX update (heiko)\n"
+ " + Bugzilla (topicUI) statistics\n"
+ " {} bugs open, {} needs to be evaluated by the UXteam\n"
+ " + Updates:".format(
+ util_build_escNumber('ui', 'topicUI', statList),
+ util_build_escNumber('ui', 'needsUXEval', statList)), file=fp)
+
+ xRow = [{'db': 'ui', 'tag': 'added', 'text': 'added'},
+ {'db': 'ui', 'tag': 'commented', 'text': 'commented'},
+ {'db': 'ui', 'tag': 'removed', 'text': 'removed'}]
+ print(util_build_matrix('BZ changes', xRow, None, statList), end='', file=fp)
+
+ print(" + top 10 contributors:", file=fp)
+ for i in range(0, 10):
+ print(' {} made {} changes in 1 month, and {} changes in 1 year'.format(
+ top10list[i]['mail'], top10list[i]['month'], top10list[i]['year']), file=fp)
+ fp.close()
+ return {'title': 'esc_mentoring, UI', 'mail': 'jani at documentfoundation.org',
+ 'file': '/tmp/esc_ui_report.txt'}
+
+
+
+def report_qa(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+ tmpClist = sorted(statList['people'], key=lambda k: (statList['people'][k]['qa']['1month']['total']), reverse=True)
+ top10list = []
+ for i in tmpClist:
+ if i != 'qa-admin at libreoffice.org' and i != 'libreoffice-commits at lists.freedesktop.org':
+ x = {'mail': i, 'name': statList['people'][i]['name'],
+ 'month' :statList['people'][i]['qa']['1month']['total'],
+ 'year':statList['people'][i]['qa']['1year']['total']}
+ top10list.append(x)
+ if len(top10list) >= 10:
+ break
+
+ fp = open('/tmp/esc_qa_report.txt', 'w', encoding='utf-8')
+ print('ESC QA report, generated {} based on stats.json from {}'.format(
+ datetime.datetime.now().strftime("%Y-%m-%d"), statList['addDate']), file=fp)
+
+ print("copy/paste to esc pad:\n"
+ "* qa update (xisco)\n"
+ " + Bugzilla statistics", file=fp)
+
+ xRow = [{'db': 'qa', 'tag': 'ASSIGNED', 'text': 'ASSIGNED'},
+ {'db': 'qa', 'tag': 'CLOSED', 'text': 'CLOSED'},
+ {'db': 'qa', 'tag': 'NEEDINFO', 'text': 'NEEDINFO'},
+ {'db': 'qa', 'tag': 'NEW', 'text': 'NEW'},
+ {'db': 'qa', 'tag': 'PLEASETEST', 'text': 'PLEASETEST'},
+ {'db': 'qa', 'tag': 'REOPENED', 'text': 'REOPENED'},
+ {'db': 'qa', 'tag': 'RESOLVED', 'text': 'RESOLVED'},
+ {'db': 'qa', 'tag': 'UNCONFIRMED', 'text': 'UNCONFIRMED'},
+ {'db': 'qa', 'tag': 'VERIFIED', 'text': 'VERIFIED'},
+ {'db': 'qa', 'tag': 'commented', 'text': 'commented'},
+ {'db': 'qa', 'tag': 'total', 'text': 'total'}]
+ print(util_build_matrix('BZ changes', xRow, None, statList), end='', file=fp)
+
+ print("\n + top 10 contributors:", file=fp)
+ for i in range(0, 10):
+ print(' {} made {} changes in 1 month, and {} changes in 1 year'.format(
+ top10list[i]['mail'], top10list[i]['month'], top10list[i]['year']), file=fp)
+ fp.close()
+ return None
+
+
+
+def report_myfunc(statList, openhubData, gerritData, gitData, bugzillaData, cfg):
+
+ # {'title': 'mail from me', 'addr': 'my at own.home', 'file': '/tmp/myfile.txt'}
+ return None
+
+
+
+def DUMP_report(cfg, statList) :
+ return
+ tot = len(statList['list']['easyHacks_comments'])
+ print('duming {} easyHacks with more than 5 comments:'.format(tot))
+ x = 0
+ for id in statList['list']['easyHacks_comments']:
+ if x%10 == 0:
+ print('dumping {} of {}'.format(x, tot))
+ x += 1
+ bug = get_bug(id)
+ fileName = homeDir + 'bz_comments/bug_' + str(id) + '.json'
+ try:
+ fp = open(fileName, 'w')
+ json.dump(bug, fp, ensure_ascii=False, indent=4, sort_keys=True)
+ except:
+ print("could not dump "+fileName)
+ fp.close()
+ os.remove(fileName)
+ exit(-1)
+ fp.close()
+ fileName = homeDir + 'bz_comments/comment_' + str(id) + '.json'
+ try:
+ fp = open(fileName, 'w')
+ json.dump(bug['long_desc'], fp, ensure_ascii=False, indent=4, sort_keys=True)
+ except:
+ print("could not dump "+fileName)
+ fp.close()
+ os.remove(fileName)
+ exit(-1)
+ fp.close()
+
+
+
+def runCfg(platform):
+ if 'esc_homedir' in os.environ:
+ homeDir = os.environ['esc_homedir']
+ else:
+ homeDir = '/home/jani/esc'
+ cfg = util_load_data_file(homeDir + '/config.json')
+ cfg['homedir'] = homeDir + '/'
+ cfg['platform'] = platform
+ print("Reading and writing data to " + cfg['homedir'])
+
+ cfg['contributor'] = util_load_data_file(cfg['homedir'] + 'dump/developers_dump.json')
+ cfg['nowDate'] = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
+ cfg['cutDate'] = cfg['nowDate'] - datetime.timedelta(days=365)
+ cfg['1weekDate'] = cfg['nowDate'] - datetime.timedelta(days=7)
+ cfg['1monthDate'] = cfg['nowDate'] - datetime.timedelta(days=30)
+ cfg['3monthDate'] = cfg['nowDate'] - datetime.timedelta(days=90)
+ cfg['1yearDate'] = cfg['nowDate'] - datetime.timedelta(days=365)
+ return cfg
+
+
+
+def runReport(cfg):
+ statList = util_load_data_file(cfg['homedir'] + 'stats.json')
+ openhubData = util_load_data_file(cfg['homedir'] + 'dump/openhub_dump.json')
+ bugzillaData = util_load_data_file(cfg['homedir'] + 'dump/bugzilla_dump.json')
+ gerritData = util_load_data_file(cfg['homedir'] + 'dump/gerrit_dump.json')
+ gitData = util_load_data_file(cfg['homedir'] + 'dump/git_dump.json')
+
+ xMail = []
+ x = report_mentoring(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ if not x is None:
+ xMail.append(x)
+ x = report_ui(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ if not x is None:
+ xMail.append(x)
+ x = report_qa(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ if not x is None:
+ xMail.append(x)
+ x = report_myfunc(statList, openhubData, gerritData, gitData, bugzillaData, cfg)
+ if not x is None:
+ xMail.append(x)
+
+ fp = open('/tmp/runMail', 'w', encoding='utf-8')
+ print("#!/bin/bash", file=fp)
+ print("")
+ for i in xMail:
+ print("mail -s '" + i['title'] + "' " + i['mail'] + " < " + i['file'], file=fp)
+ fp.close()
+
+
+
+if __name__ == '__main__':
+ runReport(runCfg(sys.platform))
More information about the Libreoffice-commits
mailing list