Compare commits

..

No commits in common. "aeee652a956064027ae6b93b4dc0f7be5a7e675e" and "3151881de323a7a596990d5f4b79a6e1cf77dc0a" have entirely different histories.

7 changed files with 39 additions and 177 deletions

View File

@ -1,56 +0,0 @@
#!/usr/bin/env python
import datetime
import logging
import pprint
import requests
import teamanalysis.config
import teamanalysis.jira
import teamanalysis.time
LOGGER = logging.getLogger('jira-summary')
def _get_without_epic(created):
without_epic = [issue for issue in created['issues'] if issue['fields']['customfield_10008'] is None]
return without_epic
def _get_without_estimate(created):
def _has_estimate(issue):
return any([issue['fields']['aggregatetimeestimate'],
issue['fields']['aggregatetimeoriginalestimate']])
without_estimate = [issue for issue in created['issues'] if not _has_estimate(issue)]
return without_estimate
def _show_summary(session, timepoint):
start, end = teamanalysis.time.get_checkpoint(timepoint)
created = teamanalysis.jira.issues_created_between(session, start, end)
without_epic = _get_without_epic(created)
without_estimate = _get_without_estimate(created)
resolved = teamanalysis.jira.issues_resolved_between(session, start, end)
print("\t".join([
start.date().isoformat(),
end.date().isoformat(),
str(created['total']),
str(resolved['total']),
str(len(without_estimate)),
str(len(without_epic)),
]))
def main():
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger('requests').setLevel(logging.WARN)
config = teamanalysis.config.get()
timepoint = datetime.datetime(2015, 8, 1, 0, 0, 1)
session = teamanalysis.jira.create_session(**config['jira'])
print("Start Date\tEnd Date\tCreated\tCompl\tNo est\tNo epic")
now = datetime.datetime.utcnow()
while timepoint < now + datetime.timedelta(days=7):
_show_summary(session, timepoint)
timepoint = timepoint + datetime.timedelta(days=7)
if __name__ == '__main__':
main()

View File

@ -7,7 +7,6 @@ import os
import pprint
import re
import subprocess
import teamanalysis.config
import teamanalysis.git
import teamanalysis.repos
import teamanalysis.time
@ -18,7 +17,7 @@ LOGGER = logging.getLogger('overview-by-date')
def _get_abspath(repo):
return '/Users/eliribble/src/teamanalysis/repos/{}/'.format(repo)
def _get_commit_count(repo, start, end, my_email):
def _get_commit_count(repo, start, end):
abspath = _get_abspath(repo)
os.chdir(abspath)
command = ['git', 'checkout', 'master']
@ -26,17 +25,14 @@ def _get_commit_count(repo, start, end, my_email):
command = [
'git',
'log',
'--pretty=format:"%h %aI %aE"',
'--pretty=format:"%h %aI"',
'--after={}'.format(start.isoformat()),
'--before={}'.format(end.isoformat())]
LOGGER.debug(" ".join(command))
output = subprocess.check_output(command)
output = output.decode('utf-8')
lines = output.split('\n')
total = len(lines) if output else 0
my_lines = [line for line in lines if my_email in line]
mine = len(my_lines)
return {'total': total, 'mine': mine}
result = len(lines) if output else 0
return result
def _get_commit_sha_by_date(repo, timepoint):
abspath = _get_abspath(repo)
@ -65,11 +61,7 @@ def _git_checkout_by_date(repo, timepoint):
'checkout',
commit,
]
try:
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
LOGGER.warning("Failed to execute '%s' in %s", command, abspath)
return ''
LOGGER.debug("Checked out %s at %s", repo, timepoint.date().isoformat())
return output
@ -85,8 +77,7 @@ def _count_tests(repo):
LOGGER.debug(" ".join(command))
try:
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
output = output.decode('utf-8')
except subprocess.CalledProcessError as e:
except subprocess.CalledProcessError:
LOGGER.info("Failed to call py.test for %s", repo)
return 0
count = 0
@ -101,12 +92,12 @@ def _get_test_count(repo, end):
_git_checkout_by_date(repo, end)
return _count_tests(repo)
def _show_summary(timepoint, my_email):
def _show_summary(timepoint):
start, end = teamanalysis.time.get_checkpoint(timepoint)
LOGGER.debug("Working %s to %s for %s", start, end, timepoint)
results = {}
for repo in teamanalysis.repos.REPOSITORIES:
commits = _get_commit_count(repo, start, end, my_email)
commits = _get_commit_count(repo, start, end)
tests = _get_test_count(repo, end)
results[repo] = {
'commits' : commits,
@ -116,22 +107,19 @@ def _show_summary(timepoint, my_email):
#pprint.pprint({k: v['tests'] for k, v in results.items()})
#pprint.pprint({k: v['commits'] for k, v in results.items()})
totals = {
'all_commits' : sum([result['commits']['total'] for result in results.values()]),
'my_commits' : sum([result['commits']['mine'] for result in results.values()]),
'commits' : sum([result['commits'] for result in results.values()]),
'tests' : sum([result['tests'] for result in results.values() if result['tests']]),
}
print("\t".join(map(str, [start.date().isoformat(), end.date().isoformat(), totals['all_commits'], totals['my_commits'], totals['tests']])))
print("{}\t{}\t{}\t{}".format(start.date().isoformat(), end.date().isoformat(), totals['commits'], totals['tests']))
def main():
logging.basicConfig()
LOGGER.setLevel(logging.INFO)
#LOGGER.setLevel(logging.DEBUG)
config = teamanalysis.config.get()
timepoint = datetime.datetime(2015, 8, 1, 0, 0, 1)
timepoint = datetime.datetime(2015, 5, 1, 0, 0, 1)
now = datetime.datetime.utcnow()
while timepoint < now + datetime.timedelta(days=7):
_show_summary(timepoint, config['my_email'])
_show_summary(timepoint)
timepoint = timepoint + datetime.timedelta(days=7)
if __name__ == '__main__':

20
bin/popular-repositories Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python
import argparse
import teamanalysis.git
import teamanalysis.repos
def main():
parser = argparse.ArgumentParser()
parser.add_argument('name', help='The name of the person to look up')
args = parser.parse_args()
for repo in teamanalysis.repos.REPOSITORIES:
print(repo)
abspath = '/Users/eliribble/src/{}/'.format(repo)
entries = teamanalysis.git.shortlog(abspath)
matching_entries = [entry for entry in entries if args.name in entry[1]]
for entry in matching_entries:
print("\t{} {}".format(*entry))
if __name__ == '__main__':
main()

View File

@ -15,20 +15,6 @@ def _clone_repo(target, source):
]
return subprocess.check_output(command)
def _update_repo(target):
os.chdir(target)
command = ['git', 'checkout', 'master']
try:
subprocess.check_output(command)
except subprocess.CalledProcessError as e:
raise Exception("Failed to checkout master for repository at {}: {}".format(target, e))
command = ['git', 'pull']
try:
return subprocess.check_output(command)
except subprocess.CalledProcessError as e:
raise Exception("Failed to update repository at {}: {}".format(target, e))
def _create_virtualenv(target):
if os.path.exists(os.path.join(target, 've')):
print("Skipping virtualenv for {}".format(target))
@ -50,15 +36,12 @@ def main():
for repo in teamanalysis.repos.REPOSITORIES:
target = os.path.join(target_root, repo)
source = 'git@bitbucket.org:Authentise/{}.git'.format(repo)
if not os.path.exists(target):
output = _clone_repo(target, source)
_create_virtualenv(target)
try:
_install_dependencies(target)
except subprocess.CalledProcessError as e:
print("Failed to install dependencies for {}: {}".format(target, e))
else:
output = _update_repo(target)
if __name__ == '__main__':
main()

View File

@ -1,9 +0,0 @@
import json
import os
def get():
home = os.environ['HOME']
path = os.path.join(home, '.teamanalysis')
with open(path, 'r') as f:
data = json.load(f)
return data

View File

@ -1,62 +0,0 @@
import logging
import requests
import requests.auth
LOGGER = logging.getLogger(__name__)
def create_session(username, password):
auth = requests.auth.HTTPBasicAuth(username, password)
session = requests.Session()
session.auth = auth
return session
def _do_search(session, payload, startAt=None):
startAt = startAt if startAt is not None else 0
payload['startAt'] = startAt
response = session.post("https://sendshapes.atlassian.net/rest/api/2/search", json=payload)
if not response.ok:
import pdb;pdb.set_trace()
raise Exception("Failed to query jira: {}".format(response.json()))
data = response.json()
return data
def search(session, payload):
startAt = 0
data = _do_search(session, payload, startAt)
results = data
while results['total'] > results['maxResults']:
startAt += data['maxResults']
data = _do_search(session, payload, startAt)
results['maxResults'] += data['maxResults']
results['issues'] += data['issues']
return results
def get_issue(session, issue):
query = {
"jql" :"id = {}".format(issue),
}
return search(session, query)
def issues_created_between(session, start, end):
jql = "created >= {} AND created < {}".format(start.date().isoformat(), end.date().isoformat())
query = {
"jql" : jql,
"fields" : [
"aggregateprogress",
"aggregatetimeestimate",
"aggregatetimeoriginalestimate",
"aggregatetimespent",
"created",
"customfield_10008",
"id",
],
}
return search(session, query)
def issues_resolved_between(session, start, end):
jql = "resolved >= {} AND resolved < {}".format(start.date().isoformat(), end.date().isoformat())
query = {
"jql" : jql,
"fields" : ["id", "resolved"],
}
return search(session, query)

View File

@ -14,8 +14,6 @@ REPOSITORIES = [
#'lowes-scanner',
#'marketing-website',
#'musicbot',
'OctoPrint',
'OctoPrint-Authentise',
'pao',
#'partner.authentise.com',
'quickslice',