Compare commits
10 Commits
3151881de3
...
aeee652a95
Author | SHA1 | Date |
---|---|---|
|
aeee652a95 | |
|
76511834a3 | |
|
7eebb69e51 | |
|
9f70876576 | |
|
1e23eb024e | |
|
9222024493 | |
|
70ac34a672 | |
|
6c7f1642da | |
|
e6d35a85a9 | |
|
197659c020 |
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env python
|
||||
import datetime
|
||||
import logging
|
||||
import pprint
|
||||
import requests
|
||||
import teamanalysis.config
|
||||
import teamanalysis.jira
|
||||
import teamanalysis.time
|
||||
|
||||
LOGGER = logging.getLogger('jira-summary')
|
||||
|
||||
def _get_without_epic(created):
|
||||
without_epic = [issue for issue in created['issues'] if issue['fields']['customfield_10008'] is None]
|
||||
return without_epic
|
||||
|
||||
def _get_without_estimate(created):
|
||||
def _has_estimate(issue):
|
||||
return any([issue['fields']['aggregatetimeestimate'],
|
||||
issue['fields']['aggregatetimeoriginalestimate']])
|
||||
|
||||
without_estimate = [issue for issue in created['issues'] if not _has_estimate(issue)]
|
||||
return without_estimate
|
||||
|
||||
def _show_summary(session, timepoint):
|
||||
start, end = teamanalysis.time.get_checkpoint(timepoint)
|
||||
created = teamanalysis.jira.issues_created_between(session, start, end)
|
||||
without_epic = _get_without_epic(created)
|
||||
without_estimate = _get_without_estimate(created)
|
||||
resolved = teamanalysis.jira.issues_resolved_between(session, start, end)
|
||||
print("\t".join([
|
||||
start.date().isoformat(),
|
||||
end.date().isoformat(),
|
||||
str(created['total']),
|
||||
str(resolved['total']),
|
||||
str(len(without_estimate)),
|
||||
str(len(without_epic)),
|
||||
]))
|
||||
|
||||
def main():
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
logging.getLogger('requests').setLevel(logging.WARN)
|
||||
|
||||
config = teamanalysis.config.get()
|
||||
|
||||
timepoint = datetime.datetime(2015, 8, 1, 0, 0, 1)
|
||||
session = teamanalysis.jira.create_session(**config['jira'])
|
||||
print("Start Date\tEnd Date\tCreated\tCompl\tNo est\tNo epic")
|
||||
now = datetime.datetime.utcnow()
|
||||
while timepoint < now + datetime.timedelta(days=7):
|
||||
_show_summary(session, timepoint)
|
||||
timepoint = timepoint + datetime.timedelta(days=7)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -7,6 +7,7 @@ import os
|
|||
import pprint
|
||||
import re
|
||||
import subprocess
|
||||
import teamanalysis.config
|
||||
import teamanalysis.git
|
||||
import teamanalysis.repos
|
||||
import teamanalysis.time
|
||||
|
@ -17,7 +18,7 @@ LOGGER = logging.getLogger('overview-by-date')
|
|||
def _get_abspath(repo):
|
||||
return '/Users/eliribble/src/teamanalysis/repos/{}/'.format(repo)
|
||||
|
||||
def _get_commit_count(repo, start, end):
|
||||
def _get_commit_count(repo, start, end, my_email):
|
||||
abspath = _get_abspath(repo)
|
||||
os.chdir(abspath)
|
||||
command = ['git', 'checkout', 'master']
|
||||
|
@ -25,14 +26,17 @@ def _get_commit_count(repo, start, end):
|
|||
command = [
|
||||
'git',
|
||||
'log',
|
||||
'--pretty=format:"%h %aI"',
|
||||
'--pretty=format:"%h %aI %aE"',
|
||||
'--after={}'.format(start.isoformat()),
|
||||
'--before={}'.format(end.isoformat())]
|
||||
LOGGER.debug(" ".join(command))
|
||||
output = subprocess.check_output(command)
|
||||
output = output.decode('utf-8')
|
||||
lines = output.split('\n')
|
||||
result = len(lines) if output else 0
|
||||
return result
|
||||
total = len(lines) if output else 0
|
||||
my_lines = [line for line in lines if my_email in line]
|
||||
mine = len(my_lines)
|
||||
return {'total': total, 'mine': mine}
|
||||
|
||||
def _get_commit_sha_by_date(repo, timepoint):
|
||||
abspath = _get_abspath(repo)
|
||||
|
@ -61,7 +65,11 @@ def _git_checkout_by_date(repo, timepoint):
|
|||
'checkout',
|
||||
commit,
|
||||
]
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
try:
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError:
|
||||
LOGGER.warning("Failed to execute '%s' in %s", command, abspath)
|
||||
return ''
|
||||
LOGGER.debug("Checked out %s at %s", repo, timepoint.date().isoformat())
|
||||
return output
|
||||
|
||||
|
@ -77,7 +85,8 @@ def _count_tests(repo):
|
|||
LOGGER.debug(" ".join(command))
|
||||
try:
|
||||
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError:
|
||||
output = output.decode('utf-8')
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOGGER.info("Failed to call py.test for %s", repo)
|
||||
return 0
|
||||
count = 0
|
||||
|
@ -92,12 +101,12 @@ def _get_test_count(repo, end):
|
|||
_git_checkout_by_date(repo, end)
|
||||
return _count_tests(repo)
|
||||
|
||||
def _show_summary(timepoint):
|
||||
def _show_summary(timepoint, my_email):
|
||||
start, end = teamanalysis.time.get_checkpoint(timepoint)
|
||||
LOGGER.debug("Working %s to %s for %s", start, end, timepoint)
|
||||
results = {}
|
||||
for repo in teamanalysis.repos.REPOSITORIES:
|
||||
commits = _get_commit_count(repo, start, end)
|
||||
commits = _get_commit_count(repo, start, end, my_email)
|
||||
tests = _get_test_count(repo, end)
|
||||
results[repo] = {
|
||||
'commits' : commits,
|
||||
|
@ -107,19 +116,22 @@ def _show_summary(timepoint):
|
|||
#pprint.pprint({k: v['tests'] for k, v in results.items()})
|
||||
#pprint.pprint({k: v['commits'] for k, v in results.items()})
|
||||
totals = {
|
||||
'commits' : sum([result['commits'] for result in results.values()]),
|
||||
'tests' : sum([result['tests'] for result in results.values() if result['tests']]),
|
||||
'all_commits' : sum([result['commits']['total'] for result in results.values()]),
|
||||
'my_commits' : sum([result['commits']['mine'] for result in results.values()]),
|
||||
'tests' : sum([result['tests'] for result in results.values() if result['tests']]),
|
||||
}
|
||||
print("{}\t{}\t{}\t{}".format(start.date().isoformat(), end.date().isoformat(), totals['commits'], totals['tests']))
|
||||
print("\t".join(map(str, [start.date().isoformat(), end.date().isoformat(), totals['all_commits'], totals['my_commits'], totals['tests']])))
|
||||
|
||||
def main():
|
||||
logging.basicConfig()
|
||||
LOGGER.setLevel(logging.INFO)
|
||||
#LOGGER.setLevel(logging.DEBUG)
|
||||
timepoint = datetime.datetime(2015, 5, 1, 0, 0, 1)
|
||||
|
||||
config = teamanalysis.config.get()
|
||||
timepoint = datetime.datetime(2015, 8, 1, 0, 0, 1)
|
||||
now = datetime.datetime.utcnow()
|
||||
while timepoint < now + datetime.timedelta(days=7):
|
||||
_show_summary(timepoint)
|
||||
_show_summary(timepoint, config['my_email'])
|
||||
timepoint = timepoint + datetime.timedelta(days=7)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import argparse
|
||||
import teamanalysis.git
|
||||
import teamanalysis.repos
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('name', help='The name of the person to look up')
|
||||
args = parser.parse_args()
|
||||
|
||||
for repo in teamanalysis.repos.REPOSITORIES:
|
||||
print(repo)
|
||||
abspath = '/Users/eliribble/src/{}/'.format(repo)
|
||||
entries = teamanalysis.git.shortlog(abspath)
|
||||
matching_entries = [entry for entry in entries if args.name in entry[1]]
|
||||
for entry in matching_entries:
|
||||
print("\t{} {}".format(*entry))
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -15,6 +15,20 @@ def _clone_repo(target, source):
|
|||
]
|
||||
return subprocess.check_output(command)
|
||||
|
||||
def _update_repo(target):
|
||||
os.chdir(target)
|
||||
command = ['git', 'checkout', 'master']
|
||||
try:
|
||||
subprocess.check_output(command)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise Exception("Failed to checkout master for repository at {}: {}".format(target, e))
|
||||
|
||||
command = ['git', 'pull']
|
||||
try:
|
||||
return subprocess.check_output(command)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise Exception("Failed to update repository at {}: {}".format(target, e))
|
||||
|
||||
def _create_virtualenv(target):
|
||||
if os.path.exists(os.path.join(target, 've')):
|
||||
print("Skipping virtualenv for {}".format(target))
|
||||
|
@ -36,12 +50,15 @@ def main():
|
|||
for repo in teamanalysis.repos.REPOSITORIES:
|
||||
target = os.path.join(target_root, repo)
|
||||
source = 'git@bitbucket.org:Authentise/{}.git'.format(repo)
|
||||
output = _clone_repo(target, source)
|
||||
_create_virtualenv(target)
|
||||
try:
|
||||
_install_dependencies(target)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to install dependencies for {}: {}".format(target, e))
|
||||
if not os.path.exists(target):
|
||||
output = _clone_repo(target, source)
|
||||
_create_virtualenv(target)
|
||||
try:
|
||||
_install_dependencies(target)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("Failed to install dependencies for {}: {}".format(target, e))
|
||||
else:
|
||||
output = _update_repo(target)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import json
|
||||
import os
|
||||
|
||||
def get():
|
||||
home = os.environ['HOME']
|
||||
path = os.path.join(home, '.teamanalysis')
|
||||
with open(path, 'r') as f:
|
||||
data = json.load(f)
|
||||
return data
|
|
@ -0,0 +1,62 @@
|
|||
import logging
|
||||
import requests
|
||||
import requests.auth
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def create_session(username, password):
|
||||
auth = requests.auth.HTTPBasicAuth(username, password)
|
||||
session = requests.Session()
|
||||
session.auth = auth
|
||||
return session
|
||||
|
||||
def _do_search(session, payload, startAt=None):
|
||||
startAt = startAt if startAt is not None else 0
|
||||
payload['startAt'] = startAt
|
||||
response = session.post("https://sendshapes.atlassian.net/rest/api/2/search", json=payload)
|
||||
if not response.ok:
|
||||
import pdb;pdb.set_trace()
|
||||
raise Exception("Failed to query jira: {}".format(response.json()))
|
||||
data = response.json()
|
||||
return data
|
||||
|
||||
def search(session, payload):
|
||||
startAt = 0
|
||||
data = _do_search(session, payload, startAt)
|
||||
results = data
|
||||
while results['total'] > results['maxResults']:
|
||||
startAt += data['maxResults']
|
||||
data = _do_search(session, payload, startAt)
|
||||
results['maxResults'] += data['maxResults']
|
||||
results['issues'] += data['issues']
|
||||
return results
|
||||
|
||||
def get_issue(session, issue):
|
||||
query = {
|
||||
"jql" :"id = {}".format(issue),
|
||||
}
|
||||
return search(session, query)
|
||||
|
||||
def issues_created_between(session, start, end):
|
||||
jql = "created >= {} AND created < {}".format(start.date().isoformat(), end.date().isoformat())
|
||||
query = {
|
||||
"jql" : jql,
|
||||
"fields" : [
|
||||
"aggregateprogress",
|
||||
"aggregatetimeestimate",
|
||||
"aggregatetimeoriginalestimate",
|
||||
"aggregatetimespent",
|
||||
"created",
|
||||
"customfield_10008",
|
||||
"id",
|
||||
],
|
||||
}
|
||||
return search(session, query)
|
||||
|
||||
def issues_resolved_between(session, start, end):
|
||||
jql = "resolved >= {} AND resolved < {}".format(start.date().isoformat(), end.date().isoformat())
|
||||
query = {
|
||||
"jql" : jql,
|
||||
"fields" : ["id", "resolved"],
|
||||
}
|
||||
return search(session, query)
|
|
@ -14,6 +14,8 @@ REPOSITORIES = [
|
|||
#'lowes-scanner',
|
||||
#'marketing-website',
|
||||
#'musicbot',
|
||||
'OctoPrint',
|
||||
'OctoPrint-Authentise',
|
||||
'pao',
|
||||
#'partner.authentise.com',
|
||||
'quickslice',
|
||||
|
|
Loading…
Reference in New Issue