Skip to content

Instantly share code, notes, and snippets.

@sheac
Created September 9, 2014 21:49
Show Gist options
  • Save sheac/3b2bd841a479921fa614 to your computer and use it in GitHub Desktop.
Save sheac/3b2bd841a479921fa614 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
"""
This script monitors Github repos for an organization ("company") and squash-merges pull
requests if someone has commented with the ":shipit:" emoji.
Why would you want to squash pull requests? It makes it very easy to get a high-level overview
of your master/deploy/release branch without getting bogged down in miniature type commits. Also
, on the off chance you need to, rolling back whole features is dead simple. Finally, you can
still retain proper git history if you keep the feature branch that the PR came in on.
You can set it up as a cron job or as a rundeck job as long as the proper Github credentials
are in place (OAuth token and ssh keys). You'll also need to ensure that the python module
"requests" is installed already.
A lockfile is used to handle the case where a job run isn't finished by the time the next
scheduled run begins.
Note that Github does not register the pull request as "merged", since it seems to require
the merge commit to use the "--no-ff" option when pushed from command line.
"""
import requests
import json
import os
import sys
import traceback
import re
COMPANY_NAME = 'company'
GITHUB_BASE = 'https://api.github.com/'
REPOS_PATH = 'orgs/'+COMPANY_NAME+'/repos'
PULLS_PATH = 'repos/'+COMPANY_NAME+'/{repo_name}/pulls'
COMMENTS_PATH = 'repos/'+COMPANY_NAME+'/{repo_name}/pulls/{pull_num}/comments'
USER_PATH = 'users/{user_login}'
PULL_PATH = 'repos/'+COMPANY_NAME+'/{repo_name}/pulls/{pull_num}'
DESIRED_CWD = '/home/rundeck/pr-watcher/'
OAUTH_TOKEN = '<oath token>'
AUTH_HEADER_DICT = { 'Authorization': 'token ' + OAUTH_TOKEN }
SHIPIT_CODE =':shipit:'
MERGED_SUFFIX = ' -- Squash-Merged'
LOCK_FILE_PATH = './pr-watcher-lock-file.txt'
LOCK_FILE_TEXT = 'This is a lock file for the script "pr-watcher.py"'
LOGIN_2_EMAIL_LOCAL_PART = {
'login1': 'name1',
'login2': 'name2',
'login3': 'name3',
}
EMAIL_DOMAIN_PART = '@'+COMPANY_NAME+'.com'
ORIGINAL_CWD = os.getcwd()
############################################################
def main():
exit_code = 0
if os.path.exists(LOCK_FILE_PATH):
print 'Lockfile found: This script must already be exiting with another process. Exiting this execution.'
sys.exit(exit_code)
else:
os.system('echo {filetext} > {filepath}'.format(filetext=LOCK_FILE_TEXT, filepath=LOCK_FILE_PATH))
try:
for repo in path_json(REPOS_PATH):
print 'Checking for pull requests in repo: "{repo_name}"'.format(repo_name=repo['name'])
for pull in path_json(PULLS_PATH.format(repo_name=repo['name'])):
print '\tEnsuring pull request number {pull_num} is mergeable'.format(pull_num=pull['number'])
details = path_json(PULL_PATH.format(repo_name=repo['name'], pull_num=pull['number']))
if not details['mergeable']:
print '\t\tNot mergeable: skipping this pull request'
continue
print '\tChecking for "ShipIt" comments on pull request number {pull_num}'.format(pull_num=pull['number'])
for comment in url_json(pull['comments_url']):
if comment['body'].strip() == SHIPIT_CODE:
print '\t\tFound a "ShipIt" comment'
squash_merge_pull(pull)
close_pull(pull)
except Exception as e:
print traceback.format_exc()
exit_code = 1
finally:
os.chdir(ORIGINAL_CWD)
os.system('rm -f {filepath}'.format(filepath=LOCK_FILE_PATH))
sys.exit(exit_code)
############################################################
def url_json(url):
headers = AUTH_HEADER_DICT
return requests.get(url, headers=headers).json()
def path_json(path):
return url_json(GITHUB_BASE + path)
def squash_merge_pull(pull):
print '\t\t\tSquash-merging the pull request'
clone_url = pull['base']['repo']['ssh_url']
repo_name = pull['base']['repo']['name']
head_branch = pull['head']['ref']
base_branch = pull['base']['ref']
user_email = user_email_from_pull(pull)
user_name = user_name_from_pull(pull)
log_and_run('rm -rf '+repo_name)
log_and_run('git clone '+clone_url)
os.chdir('./'+repo_name)
log_and_run('git config --local user.email '+user_email)
log_and_run('git config --local user.name '+user_name)
log_and_run('git checkout '+base_branch)
log_and_run('git merge --squash origin/'+head_branch)
log_and_run('git commit -m "{msg}"'.format(msg=gen_commit_msg(pull)))
log_and_run('git push origin '+base_branch)
os.chdir('..')
log_and_run('rm -rf '+repo_name)
def log_and_run(cmd):
print 'executing: "' + cmd + '"'
os.system(cmd)
def user_email_from_pull(pull):
login = pull['user']['login']
return LOGIN_2_EMAIL_LOCAL_PART[login] + EMAIL_DOMAIN_PART
def user_name_from_pull(pull):
user_login = pull['user']['login']
return path_json(USER_PATH.format(user_login=user_login))['name']
def gen_commit_msg(pull):
msg_template = '{msg}\nCloses Pull Request {pull_num} of {repo}\nCommits:\n{squash_msg}'
with open('.git/SQUASH_MSG') as f:
squash_digests = get_only_commit_msg_digests(f.read())
return msg_template.format(
msg=pull['title'],
repo=pull['base']['repo']['name'],
pull_num=pull['number'],
squash_msg=squash_digests
)
def get_only_commit_msg_digests(msg):
lines = msg.split('\n')
digest_lines = [ line for line in lines if re.match(r'^commit [0-9a-f]{6,40}$', line) ]
return '\n'.join(digest_lines)
def close_pull(pull):
print '\t\t\tClosing the pull request'
url = GITHUB_BASE + PULL_PATH.format(repo_name=pull['base']['repo']['name'], pull_num=pull['number'])
new_title = pull['title'] + MERGED_SUFFIX
data = json.dumps({
'state': 'closed',
'title': new_title
})
headers = AUTH_HEADER_DICT.copy()
headers['content-type'] = 'application/json'
requests.post(url, data=data, headers=headers)
############################################################
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment