Move gerrit scripts to puppet.
Take the things from openstack-ci/gerrit and move them directly in to the puppet module. Install them using the model we're using for the jenkins slave scripts. Change-Id: I420b2b895bd57d40232b2cdda437617373a82890
This commit is contained in:
parent
f18ab506c9
commit
af48c6d986
@ -1,3 +1,3 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
python ~/openstack-ci/gerrit/update_bug.py change-merged "$@"
|
python /usr/local/gerrit/scripts/update_bug.py change-merged "$@"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
python ~/openstack-ci/gerrit/update_blueprint.py patchset-created "$@"
|
python /usr/local/gerrit/scripts/update_blueprint.py patchset-created "$@"
|
||||||
python ~/openstack-ci/gerrit/update_bug.py patchset-created "$@"
|
python /usr/local/gerrit/scripts/update_bug.py patchset-created "$@"
|
||||||
|
78
modules/gerrit/files/scripts/close_pull_requests.py
Executable file
78
modules/gerrit/files/scripts/close_pull_requests.py
Executable file
@ -0,0 +1,78 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
# Copyright (C) 2011 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# Close Github pull requests with instructions to use Gerrit for
|
||||||
|
# code review. The list of projects is found in github.config
|
||||||
|
# and should look like:
|
||||||
|
|
||||||
|
# [project "GITHUB_PROJECT"]
|
||||||
|
# close_pull = true
|
||||||
|
|
||||||
|
# Github authentication information is read from github.secure.config,
|
||||||
|
# which should look like:
|
||||||
|
|
||||||
|
# [github]
|
||||||
|
# username = GITHUB_USERNAME
|
||||||
|
# api_token = GITHUB_API_TOKEN
|
||||||
|
|
||||||
|
import github2.client
|
||||||
|
import os
|
||||||
|
import StringIO
|
||||||
|
import ConfigParser
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.ERROR)
|
||||||
|
|
||||||
|
GITHUB_CONFIG = os.environ.get('GITHUB_CONFIG',
|
||||||
|
'/home/gerrit2/github.config')
|
||||||
|
GITHUB_SECURE_CONFIG = os.environ.get('GITHUB_SECURE_CONFIG',
|
||||||
|
'/home/gerrit2/github.secure.config')
|
||||||
|
|
||||||
|
MESSAGE = """Thank you for contributing to OpenStack!
|
||||||
|
|
||||||
|
%(project)s uses Gerrit for code review.
|
||||||
|
|
||||||
|
Please visit http://wiki.openstack.org/GerritWorkflow and follow the instructions there to upload your change to Gerrit.
|
||||||
|
"""
|
||||||
|
|
||||||
|
PROJECT_RE = re.compile(r'^project\s+"(.*)"$')
|
||||||
|
|
||||||
|
secure_config = ConfigParser.ConfigParser()
|
||||||
|
secure_config.read(GITHUB_SECURE_CONFIG)
|
||||||
|
config = ConfigParser.ConfigParser()
|
||||||
|
config.read(GITHUB_CONFIG)
|
||||||
|
|
||||||
|
github = github2.client.Github(requests_per_second=1.0,
|
||||||
|
username=secure_config.get("github", "username"),
|
||||||
|
api_token=secure_config.get("github", "api_token"))
|
||||||
|
|
||||||
|
for section in config.sections():
|
||||||
|
# Each section looks like [project "openstack/project"]
|
||||||
|
m = PROJECT_RE.match(section)
|
||||||
|
if not m: continue
|
||||||
|
project = m.group(1)
|
||||||
|
|
||||||
|
# Make sure we're supposed to close pull requests for this project:
|
||||||
|
if not (config.has_option(section, "close_pull") and
|
||||||
|
config.get(section, "close_pull").lower() == 'true'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Close each pull request
|
||||||
|
pull_requests = github.pull_requests.list(project)
|
||||||
|
for req in pull_requests:
|
||||||
|
vars = dict(project=project)
|
||||||
|
github.issues.comment(project, req.number, MESSAGE%vars)
|
||||||
|
github.issues.close(project, req.number)
|
74
modules/gerrit/files/scripts/expire_old_reviews.py
Normal file
74
modules/gerrit/files/scripts/expire_old_reviews.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) 2012 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# This script is designed to expire old code reviews that have not been touched
|
||||||
|
# using the following rules:
|
||||||
|
# 1. if open and no activity in 2 weeks, expire
|
||||||
|
# 2. if negative comment and no activity in 1 week, expire
|
||||||
|
|
||||||
|
import os
|
||||||
|
import paramiko
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
GERRIT_USER = os.environ.get('GERRIT_USER', 'launchpadsync')
|
||||||
|
GERRIT_SSH_KEY = os.environ.get('GERRIT_SSH_KEY',
|
||||||
|
'/home/gerrit2/.ssh/launchpadsync_rsa')
|
||||||
|
|
||||||
|
logging.basicConfig(format='%(asctime)-6s: %(name)s - %(levelname)s - %(message)s', filename='/var/log/gerrit/expire_reviews.log')
|
||||||
|
logger= logging.getLogger('expire_reviews')
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
logger.info('Starting expire reviews')
|
||||||
|
logger.info('Connecting to Gerrit')
|
||||||
|
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
ssh.connect('localhost', username=GERRIT_USER, key_filename=GERRIT_SSH_KEY, port=29418)
|
||||||
|
|
||||||
|
def expire_patch_set(patch_id, patch_subject, has_negative):
|
||||||
|
if has_negative:
|
||||||
|
message= 'code review expired after 1 week of no activity after a negative review'
|
||||||
|
else:
|
||||||
|
message= 'code review expired after 2 weeks of no activity'
|
||||||
|
command='gerrit review --abandon --message="{0}" {1}'.format(message, patch_id)
|
||||||
|
logger.info('Expiring: %s - %s: %s', patch_id, patch_subject, message)
|
||||||
|
stdin, stdout, stderr = ssh.exec_command(command)
|
||||||
|
if stdout.channel.recv_exit_status() != 0:
|
||||||
|
logger.error(stderr.read())
|
||||||
|
|
||||||
|
# Query all open with no activity for 2 weeks
|
||||||
|
logger.info('Searching no activity for 2 weeks')
|
||||||
|
stdin, stdout, stderr = ssh.exec_command('gerrit query --current-patch-set --format JSON status:open age:2w')
|
||||||
|
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if not row.has_key('rowCount'):
|
||||||
|
expire_patch_set(row['currentPatchSet']['revision'], row['subject'], False)
|
||||||
|
|
||||||
|
# Query all reviewed with no activity for 1 week
|
||||||
|
logger.info('Searching no activity on negative review for 1 week')
|
||||||
|
stdin, stdout, stderr = ssh.exec_command('gerrit query --current-patch-set --all-approvals --format JSON status:reviewed age:1w')
|
||||||
|
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if not row.has_key('rowCount'):
|
||||||
|
# Search for negative approvals
|
||||||
|
for approval in row['currentPatchSet']['approvals']:
|
||||||
|
if approval['value'] == '-1':
|
||||||
|
expire_patch_set(row['currentPatchSet']['revision'], row['subject'], True)
|
||||||
|
break
|
||||||
|
|
||||||
|
logger.info('End expire review')
|
29
modules/gerrit/files/scripts/get_group_uuid.py
Normal file
29
modules/gerrit/files/scripts/get_group_uuid.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import argparse
|
||||||
|
import paramiko
|
||||||
|
import json
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--host", dest="host", default="review.openstack.org",
|
||||||
|
help="gerrit host to connect to")
|
||||||
|
parser.add_argument("--port", dest="port", action='store', type=int,
|
||||||
|
default=29418, help="gerrit port to connect to")
|
||||||
|
parser.add_argument("groups", nargs=1)
|
||||||
|
|
||||||
|
options = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
client = paramiko.SSHClient()
|
||||||
|
client.load_system_host_keys()
|
||||||
|
client.set_missing_host_key_policy(paramiko.WarningPolicy())
|
||||||
|
client.connect(options.host, port=options.port)
|
||||||
|
|
||||||
|
group = options.groups[0]
|
||||||
|
query = "select group_uuid from account_groups where name = '%s'" % group
|
||||||
|
command = 'gerrit gsql --format JSON -c "%s"' % query
|
||||||
|
stdin, stdout, stderr = client.exec_command(command)
|
||||||
|
|
||||||
|
for line in stdout:
|
||||||
|
row = json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
print row['columns']['group_uuid']
|
||||||
|
ret = stdout.channel.recv_exit_status()
|
139
modules/gerrit/files/scripts/update_blueprint.py
Executable file
139
modules/gerrit/files/scripts/update_blueprint.py
Executable file
@ -0,0 +1,139 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) 2011 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# This is designed to be called by a gerrit hook. It searched new
|
||||||
|
# patchsets for strings like "blueprint FOO" or "bp FOO" and updates
|
||||||
|
# corresponding Launchpad blueprints with links back to the change.
|
||||||
|
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
from launchpadlib.uris import LPNET_SERVICE_ROOT
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import StringIO
|
||||||
|
import ConfigParser
|
||||||
|
import MySQLdb
|
||||||
|
|
||||||
|
BASE_DIR = '/home/gerrit2/review_site'
|
||||||
|
GERRIT_CACHE_DIR = os.path.expanduser(os.environ.get('GERRIT_CACHE_DIR',
|
||||||
|
'~/.launchpadlib/cache'))
|
||||||
|
GERRIT_CREDENTIALS = os.path.expanduser(os.environ.get('GERRIT_CREDENTIALS',
|
||||||
|
'~/.launchpadlib/creds'))
|
||||||
|
GERRIT_CONFIG = os.environ.get('GERRIT_CONFIG',
|
||||||
|
'/home/gerrit2/review_site/etc/gerrit.config')
|
||||||
|
GERRIT_SECURE_CONFIG = os.environ.get('GERRIT_SECURE_CONFIG',
|
||||||
|
'/home/gerrit2/review_site/etc/secure.config')
|
||||||
|
SPEC_RE = re.compile(r'(blueprint|bp)\s*[#:]?\s*(\S+)', re.I)
|
||||||
|
BODY_RE = re.compile(r'^\s+.*$')
|
||||||
|
|
||||||
|
def get_broken_config(filename):
|
||||||
|
""" gerrit config ini files are broken and have leading tabs """
|
||||||
|
text = ""
|
||||||
|
with open(filename,"r") as conf:
|
||||||
|
for line in conf.readlines():
|
||||||
|
text = "%s%s" % (text, line.lstrip())
|
||||||
|
|
||||||
|
fp = StringIO.StringIO(text)
|
||||||
|
c=ConfigParser.ConfigParser()
|
||||||
|
c.readfp(fp)
|
||||||
|
return c
|
||||||
|
|
||||||
|
GERRIT_CONFIG = get_broken_config(GERRIT_CONFIG)
|
||||||
|
SECURE_CONFIG = get_broken_config(GERRIT_SECURE_CONFIG)
|
||||||
|
DB_USER = GERRIT_CONFIG.get("database", "username")
|
||||||
|
DB_PASS = SECURE_CONFIG.get("database","password")
|
||||||
|
DB_DB = GERRIT_CONFIG.get("database","database")
|
||||||
|
|
||||||
|
def update_spec(launchpad, project, name, subject, link, topic=None):
|
||||||
|
# For testing, if a project doesn't match openstack/foo, use
|
||||||
|
# the openstack-ci project instead.
|
||||||
|
group, project = project.split('/')
|
||||||
|
if group != 'openstack':
|
||||||
|
project = 'openstack-ci'
|
||||||
|
|
||||||
|
spec = launchpad.projects[project].getSpecification(name=name)
|
||||||
|
if not spec: return
|
||||||
|
|
||||||
|
if spec.whiteboard:
|
||||||
|
wb = spec.whiteboard.strip()
|
||||||
|
else:
|
||||||
|
wb = ''
|
||||||
|
changed = False
|
||||||
|
if topic:
|
||||||
|
topiclink = '%s/#q,topic:%s,n,z' % (link[:link.find('/',8)],
|
||||||
|
topic)
|
||||||
|
if topiclink not in wb:
|
||||||
|
wb += "\n\n\nGerrit topic: %(link)s" % dict(link=topiclink)
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if link not in wb:
|
||||||
|
wb += "\n\n\nAddressed by: %(link)s\n %(subject)s\n" % dict(subject=subject,
|
||||||
|
link=link)
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
spec.whiteboard = wb
|
||||||
|
spec.lp_save()
|
||||||
|
|
||||||
|
def find_specs(launchpad, dbconn, args):
|
||||||
|
git_log = subprocess.Popen(['git',
|
||||||
|
'--git-dir=' + BASE_DIR + '/git/' + args.project + '.git',
|
||||||
|
'log', '--no-merges',
|
||||||
|
args.commit + '^1..' + args.commit],
|
||||||
|
stdout=subprocess.PIPE).communicate()[0]
|
||||||
|
|
||||||
|
cur = dbconn.cursor()
|
||||||
|
cur.execute("select subject, topic from changes where change_key=%s", args.change)
|
||||||
|
subject, topic = cur.fetchone()
|
||||||
|
specs = set([m.group(2) for m in SPEC_RE.finditer(git_log)])
|
||||||
|
|
||||||
|
if topic:
|
||||||
|
topicspec = topic.split('/')[-1]
|
||||||
|
specs |= set([topicspec])
|
||||||
|
|
||||||
|
for spec in specs:
|
||||||
|
update_spec(launchpad, args.project, spec, subject,
|
||||||
|
args.change_url, topic)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('hook')
|
||||||
|
#common
|
||||||
|
parser.add_argument('--change', default=None)
|
||||||
|
parser.add_argument('--change-url', default=None)
|
||||||
|
parser.add_argument('--project', default=None)
|
||||||
|
parser.add_argument('--branch', default=None)
|
||||||
|
parser.add_argument('--commit', default=None)
|
||||||
|
#change-merged
|
||||||
|
parser.add_argument('--submitter', default=None)
|
||||||
|
# patchset-created
|
||||||
|
parser.add_argument('--uploader', default=None)
|
||||||
|
parser.add_argument('--patchset', default=None)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
launchpad = Launchpad.login_with('Gerrit User Sync', LPNET_SERVICE_ROOT,
|
||||||
|
GERRIT_CACHE_DIR,
|
||||||
|
credentials_file = GERRIT_CREDENTIALS,
|
||||||
|
version='devel')
|
||||||
|
|
||||||
|
conn = MySQLdb.connect(user = DB_USER, passwd = DB_PASS, db = DB_DB)
|
||||||
|
|
||||||
|
find_specs(launchpad, conn, args)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
200
modules/gerrit/files/scripts/update_bug.py
Executable file
200
modules/gerrit/files/scripts/update_bug.py
Executable file
@ -0,0 +1,200 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) 2011 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# This is designed to be called by a gerrit hook. It searched new
|
||||||
|
# patchsets for strings like "bug FOO" and updates corresponding Launchpad
|
||||||
|
# bugs status.
|
||||||
|
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
from launchpadlib.uris import LPNET_SERVICE_ROOT
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
BASE_DIR = '/home/gerrit2/review_site'
|
||||||
|
GERRIT_CACHE_DIR = os.path.expanduser(os.environ.get('GERRIT_CACHE_DIR',
|
||||||
|
'~/.launchpadlib/cache'))
|
||||||
|
GERRIT_CREDENTIALS = os.path.expanduser(os.environ.get('GERRIT_CREDENTIALS',
|
||||||
|
'~/.launchpadlib/creds'))
|
||||||
|
|
||||||
|
|
||||||
|
def add_change_proposed_message(bugtask, change_url, project, branch):
|
||||||
|
subject = 'Fix proposed to %s (%s)' % (short_project(project), branch)
|
||||||
|
body = 'Fix proposed to branch: %s\nReview: %s' % (branch, change_url)
|
||||||
|
bugtask.bug.newMessage(subject=subject, content=body)
|
||||||
|
|
||||||
|
|
||||||
|
def add_change_merged_message(bugtask, change_url, project, commit,
|
||||||
|
submitter, branch, git_log):
|
||||||
|
subject = 'Fix merged to %s (%s)' % (short_project(project), branch)
|
||||||
|
git_url = 'http://github.com/%s/commit/%s' % (project, commit)
|
||||||
|
body = '''Reviewed: %s
|
||||||
|
Committed: %s
|
||||||
|
Submitter: %s
|
||||||
|
Branch: %s\n''' % (change_url, git_url, submitter, branch)
|
||||||
|
body = body + '\n' + git_log
|
||||||
|
bugtask.bug.newMessage(subject=subject, content=body)
|
||||||
|
|
||||||
|
|
||||||
|
def set_in_progress(bugtask, launchpad, uploader, change_url):
|
||||||
|
"""Set bug In progress with assignee being the uploader"""
|
||||||
|
|
||||||
|
# Retrieve uploader from Launchpad. Use email as search key if
|
||||||
|
# provided, and only set if there is a clear match.
|
||||||
|
try:
|
||||||
|
searchkey = uploader[uploader.rindex("(") + 1:-1]
|
||||||
|
except ValueError:
|
||||||
|
searchkey = uploader
|
||||||
|
persons = launchpad.people.findPerson(text=searchkey)
|
||||||
|
if len(persons) == 1:
|
||||||
|
bugtask.assignee = persons[0]
|
||||||
|
|
||||||
|
bugtask.status = "In Progress"
|
||||||
|
bugtask.lp_save()
|
||||||
|
|
||||||
|
|
||||||
|
def set_fix_committed(bugtask):
|
||||||
|
"""Set bug fix committed"""
|
||||||
|
|
||||||
|
bugtask.status = "Fix Committed"
|
||||||
|
bugtask.lp_save()
|
||||||
|
|
||||||
|
|
||||||
|
def release_fixcommitted(bugtask):
|
||||||
|
"""Set bug FixReleased if it was FixCommitted"""
|
||||||
|
|
||||||
|
if bugtask.status == u'Fix Committed':
|
||||||
|
bugtask.status = "Fix Released"
|
||||||
|
bugtask.lp_save()
|
||||||
|
|
||||||
|
|
||||||
|
def tag_in_branchname(bugtask, branch):
|
||||||
|
"""Tag bug with in-branch-name tag (if name is appropriate)"""
|
||||||
|
|
||||||
|
lp_bug = bugtask.bug
|
||||||
|
branch_name = branch.replace('/', '-')
|
||||||
|
if branch_name.replace('-', '').isalnum():
|
||||||
|
lp_bug.tags = lp_bug.tags + ["in-%s" % branch_name]
|
||||||
|
lp_bug.tags.append("in-%s" % branch_name)
|
||||||
|
lp_bug.lp_save()
|
||||||
|
|
||||||
|
|
||||||
|
def short_project(full_project_name):
|
||||||
|
"""Return the project part of the git repository name"""
|
||||||
|
return full_project_name.split('/')[-1]
|
||||||
|
|
||||||
|
|
||||||
|
def git2lp(full_project_name):
|
||||||
|
"""Convert Git repo name to Launchpad project"""
|
||||||
|
project_map = {
|
||||||
|
'openstack/python-glanceclient': 'glance',
|
||||||
|
'openstack/python-keystoneclient': 'keystone',
|
||||||
|
'openstack/python-melangeclient': 'melange',
|
||||||
|
'openstack/python-novaclient': 'nova',
|
||||||
|
'openstack/python-quantumclient': 'quantum',
|
||||||
|
'openstack/openstack-ci-puppet': 'openstack-ci',
|
||||||
|
'openstack-ci/devstack-gate': 'openstack-ci',
|
||||||
|
}
|
||||||
|
return project_map.get(full_project_name, short_project(full_project_name))
|
||||||
|
|
||||||
|
|
||||||
|
def process_bugtask(launchpad, bugtask, git_log, args):
|
||||||
|
"""Apply changes to bugtask, based on hook / branch..."""
|
||||||
|
|
||||||
|
if args.hook == "change-merged":
|
||||||
|
if args.branch == 'master':
|
||||||
|
set_fix_committed(bugtask)
|
||||||
|
elif args.branch == 'milestone-proposed':
|
||||||
|
release_fixcommitted(bugtask)
|
||||||
|
else:
|
||||||
|
tag_in_branchname(bugtask, args.branch)
|
||||||
|
add_change_merged_message(bugtask, args.change_url, args.project,
|
||||||
|
args.commit, args.submitter, args.branch,
|
||||||
|
git_log)
|
||||||
|
|
||||||
|
if args.hook == "patchset-created":
|
||||||
|
if args.branch == 'master':
|
||||||
|
set_in_progress(bugtask, launchpad, args.uploader, args.change_url)
|
||||||
|
if args.patchset == '1':
|
||||||
|
add_change_proposed_message(bugtask, args.change_url,
|
||||||
|
args.project, args.branch)
|
||||||
|
|
||||||
|
|
||||||
|
def find_bugs(launchpad, git_log, args):
|
||||||
|
"""Find bugs referenced in the git log and return related bugtasks"""
|
||||||
|
|
||||||
|
bug_regexp = r'([Bb]ug|[Ll][Pp])[\s#:]*(\d+)'
|
||||||
|
tokens = re.split(bug_regexp, git_log)
|
||||||
|
|
||||||
|
# Extract unique bug tasks
|
||||||
|
bugtasks = {}
|
||||||
|
for token in tokens:
|
||||||
|
if re.match('^\d+$', token) and (token not in bugtasks):
|
||||||
|
try:
|
||||||
|
lp_bug = launchpad.bugs[token]
|
||||||
|
for lp_task in lp_bug.bug_tasks:
|
||||||
|
if lp_task.bug_target_name == git2lp(args.project):
|
||||||
|
bugtasks[token] = lp_task
|
||||||
|
break
|
||||||
|
except KeyError:
|
||||||
|
# Unknown bug
|
||||||
|
pass
|
||||||
|
|
||||||
|
return bugtasks.values()
|
||||||
|
|
||||||
|
|
||||||
|
def extract_git_log(args):
|
||||||
|
"""Extract git log of all merged commits"""
|
||||||
|
cmd = ['git',
|
||||||
|
'--git-dir=' + BASE_DIR + '/git/' + args.project + '.git',
|
||||||
|
'log', '--no-merges', args.commit + '^1..' + args.commit]
|
||||||
|
return subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('hook')
|
||||||
|
#common
|
||||||
|
parser.add_argument('--change', default=None)
|
||||||
|
parser.add_argument('--change-url', default=None)
|
||||||
|
parser.add_argument('--project', default=None)
|
||||||
|
parser.add_argument('--branch', default=None)
|
||||||
|
parser.add_argument('--commit', default=None)
|
||||||
|
#change-merged
|
||||||
|
parser.add_argument('--submitter', default=None)
|
||||||
|
#patchset-created
|
||||||
|
parser.add_argument('--uploader', default=None)
|
||||||
|
parser.add_argument('--patchset', default=None)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Connect to Launchpad
|
||||||
|
launchpad = Launchpad.login_with('Gerrit User Sync', LPNET_SERVICE_ROOT,
|
||||||
|
GERRIT_CACHE_DIR,
|
||||||
|
credentials_file=GERRIT_CREDENTIALS,
|
||||||
|
version='devel')
|
||||||
|
|
||||||
|
# Get git log
|
||||||
|
git_log = extract_git_log(args)
|
||||||
|
|
||||||
|
# Process bugtasks found in git log
|
||||||
|
for bugtask in find_bugs(launchpad, git_log, args):
|
||||||
|
process_bugtask(launchpad, bugtask, git_log, args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
77
modules/gerrit/files/scripts/update_cla_group.py
Executable file
77
modules/gerrit/files/scripts/update_cla_group.py
Executable file
@ -0,0 +1,77 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
# Copyright (C) 2011 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# Add launchpad ids listed in the wiki CLA page to the CLA group in LP.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import urllib
|
||||||
|
import re
|
||||||
|
|
||||||
|
import StringIO
|
||||||
|
import ConfigParser
|
||||||
|
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
from launchpadlib.uris import LPNET_SERVICE_ROOT
|
||||||
|
|
||||||
|
DEBUG = False
|
||||||
|
|
||||||
|
LP_CACHE_DIR = '~/.launchpadlib/cache'
|
||||||
|
LP_CREDENTIALS = '~/.launchpadlib/creds'
|
||||||
|
CONTRIBUTOR_RE = re.compile(r'.*?\|\|\s*(?P<name>.*?)\s*\|\|\s*(?P<login>.*?)\s*\|\|\s*(?P<trans>.*?)\s*\|\|.*?')
|
||||||
|
LINK_RE = re.compile(r'\[\[.*\|\s*(?P<name>.*)\s*\]\]')
|
||||||
|
|
||||||
|
for check_path in (os.path.dirname(LP_CACHE_DIR),
|
||||||
|
os.path.dirname(LP_CREDENTIALS)):
|
||||||
|
if not os.path.exists(check_path):
|
||||||
|
os.makedirs(check_path)
|
||||||
|
|
||||||
|
wiki_members = []
|
||||||
|
for line in urllib.urlopen('http://wiki.openstack.org/Contributors?action=raw'):
|
||||||
|
m = CONTRIBUTOR_RE.match(line)
|
||||||
|
if m and m.group('login') and m.group('trans'):
|
||||||
|
login = m.group('login')
|
||||||
|
if login=="<#c0c0c0>'''Launchpad ID'''": continue
|
||||||
|
l = LINK_RE.match(login)
|
||||||
|
if l:
|
||||||
|
login = l.group('name')
|
||||||
|
wiki_members.append(login)
|
||||||
|
|
||||||
|
launchpad = Launchpad.login_with('CLA Team Sync', LPNET_SERVICE_ROOT,
|
||||||
|
LP_CACHE_DIR,
|
||||||
|
credentials_file = LP_CREDENTIALS)
|
||||||
|
|
||||||
|
lp_members = []
|
||||||
|
|
||||||
|
team = launchpad.people['openstack-cla']
|
||||||
|
for detail in team.members_details:
|
||||||
|
user = None
|
||||||
|
# detail.self_link ==
|
||||||
|
# 'https://api.launchpad.net/1.0/~team/+member/${username}'
|
||||||
|
login = detail.self_link.split('/')[-1]
|
||||||
|
status = detail.status
|
||||||
|
lp_members.append(login)
|
||||||
|
|
||||||
|
for wm in wiki_members:
|
||||||
|
if wm not in lp_members:
|
||||||
|
print "Need to add %s to LP" % (wm)
|
||||||
|
try:
|
||||||
|
person = launchpad.people[wm]
|
||||||
|
except:
|
||||||
|
print 'Unable to find %s on LP'%wm
|
||||||
|
continue
|
||||||
|
status = team.addMember(person=person, status="Approved")
|
391
modules/gerrit/files/scripts/update_gerrit_users.py
Executable file
391
modules/gerrit/files/scripts/update_gerrit_users.py
Executable file
@ -0,0 +1,391 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
# Copyright (C) 2011 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# Synchronize Gerrit users from Launchpad.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import StringIO
|
||||||
|
import ConfigParser
|
||||||
|
|
||||||
|
import MySQLdb
|
||||||
|
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
from launchpadlib.uris import LPNET_SERVICE_ROOT
|
||||||
|
|
||||||
|
from openid.consumer import consumer
|
||||||
|
from openid.cryptutil import randomString
|
||||||
|
|
||||||
|
DEBUG = False
|
||||||
|
|
||||||
|
GERRIT_USER = os.environ.get('GERRIT_USER', 'launchpadsync')
|
||||||
|
GERRIT_CONFIG = os.environ.get('GERRIT_CONFIG',
|
||||||
|
'/home/gerrit2/review_site/etc/gerrit.config')
|
||||||
|
GERRIT_SECURE_CONFIG = os.environ.get('GERRIT_SECURE_CONFIG',
|
||||||
|
'/home/gerrit2/review_site/etc/secure.config')
|
||||||
|
GERRIT_SSH_KEY = os.environ.get('GERRIT_SSH_KEY',
|
||||||
|
'/home/gerrit2/.ssh/launchpadsync_rsa')
|
||||||
|
GERRIT_CACHE_DIR = os.path.expanduser(os.environ.get('GERRIT_CACHE_DIR',
|
||||||
|
'~/.launchpadlib/cache'))
|
||||||
|
GERRIT_CREDENTIALS = os.path.expanduser(os.environ.get('GERRIT_CREDENTIALS',
|
||||||
|
'~/.launchpadlib/creds'))
|
||||||
|
GERRIT_BACKUP_PATH = os.environ.get('GERRIT_BACKUP_PATH',
|
||||||
|
'/home/gerrit2/dbupdates')
|
||||||
|
|
||||||
|
for check_path in (os.path.dirname(GERRIT_CACHE_DIR),
|
||||||
|
os.path.dirname(GERRIT_CREDENTIALS),
|
||||||
|
GERRIT_BACKUP_PATH):
|
||||||
|
if not os.path.exists(check_path):
|
||||||
|
os.makedirs(check_path)
|
||||||
|
|
||||||
|
def get_broken_config(filename):
|
||||||
|
""" gerrit config ini files are broken and have leading tabs """
|
||||||
|
text = ""
|
||||||
|
with open(filename,"r") as conf:
|
||||||
|
for line in conf.readlines():
|
||||||
|
text = "%s%s" % (text, line.lstrip())
|
||||||
|
|
||||||
|
fp = StringIO.StringIO(text)
|
||||||
|
c=ConfigParser.ConfigParser()
|
||||||
|
c.readfp(fp)
|
||||||
|
return c
|
||||||
|
|
||||||
|
def get_type(in_type):
|
||||||
|
if in_type == "RSA":
|
||||||
|
return "ssh-rsa"
|
||||||
|
else:
|
||||||
|
return "ssh-dsa"
|
||||||
|
|
||||||
|
gerrit_config = get_broken_config(GERRIT_CONFIG)
|
||||||
|
secure_config = get_broken_config(GERRIT_SECURE_CONFIG)
|
||||||
|
|
||||||
|
DB_USER = gerrit_config.get("database", "username")
|
||||||
|
DB_PASS = secure_config.get("database","password")
|
||||||
|
DB_DB = gerrit_config.get("database","database")
|
||||||
|
|
||||||
|
db_backup_file = "%s.%s.sql" % (DB_DB, datetime.isoformat(datetime.now()))
|
||||||
|
db_backup_path = os.path.join(GERRIT_BACKUP_PATH, db_backup_file)
|
||||||
|
retval = os.system("mysqldump --opt -u%s -p%s %s > %s" %
|
||||||
|
(DB_USER, DB_PASS, DB_DB, db_backup_path))
|
||||||
|
if retval != 0:
|
||||||
|
print "Problem taking a db dump, aborting db update"
|
||||||
|
sys.exit(retval)
|
||||||
|
|
||||||
|
conn = MySQLdb.connect(user = DB_USER, passwd = DB_PASS, db = DB_DB)
|
||||||
|
cur = conn.cursor()
|
||||||
|
|
||||||
|
|
||||||
|
launchpad = Launchpad.login_with('Gerrit User Sync', LPNET_SERVICE_ROOT,
|
||||||
|
GERRIT_CACHE_DIR,
|
||||||
|
credentials_file = GERRIT_CREDENTIALS)
|
||||||
|
|
||||||
|
def get_sub_teams(team, have_teams):
|
||||||
|
for sub_team in launchpad.people[team].sub_teams:
|
||||||
|
if sub_team.name not in have_teams:
|
||||||
|
have_teams = get_sub_teams(sub_team.name, have_teams)
|
||||||
|
have_teams.append(team)
|
||||||
|
return have_teams
|
||||||
|
|
||||||
|
|
||||||
|
teams_todo = get_sub_teams('openstack', [])
|
||||||
|
|
||||||
|
users={}
|
||||||
|
groups={}
|
||||||
|
groups_in_groups={}
|
||||||
|
group_implies_groups={}
|
||||||
|
group_ids={}
|
||||||
|
projects = subprocess.check_output(['/usr/bin/ssh', '-p', '29418',
|
||||||
|
'-i', GERRIT_SSH_KEY,
|
||||||
|
'-l', GERRIT_USER, 'localhost',
|
||||||
|
'gerrit', 'ls-projects']).split('\n')
|
||||||
|
|
||||||
|
for team_todo in teams_todo:
|
||||||
|
|
||||||
|
team = launchpad.people[team_todo]
|
||||||
|
groups[team.name] = team.display_name
|
||||||
|
|
||||||
|
# Attempt to get nested group memberships. ~nova-core, for instance, is a
|
||||||
|
# member of ~nova, so membership in ~nova-core should imply membership in
|
||||||
|
# ~nova
|
||||||
|
group_in_group = groups_in_groups.get(team.name, {})
|
||||||
|
for subgroup in team.sub_teams:
|
||||||
|
group_in_group[subgroup.name] = 1
|
||||||
|
# We should now have a dictionary of the form {'nova': {'nova-core': 1}}
|
||||||
|
groups_in_groups[team.name] = group_in_group
|
||||||
|
|
||||||
|
for detail in team.members_details:
|
||||||
|
|
||||||
|
user = None
|
||||||
|
|
||||||
|
# detail.self_link ==
|
||||||
|
# 'https://api.launchpad.net/1.0/~team/+member/${username}'
|
||||||
|
login = detail.self_link.split('/')[-1]
|
||||||
|
|
||||||
|
if users.has_key(login):
|
||||||
|
user = users[login]
|
||||||
|
else:
|
||||||
|
|
||||||
|
user = dict(add_groups=[])
|
||||||
|
|
||||||
|
status = detail.status
|
||||||
|
if (status == "Approved" or status == "Administrator"):
|
||||||
|
user['add_groups'].append(team.name)
|
||||||
|
users[login] = user
|
||||||
|
|
||||||
|
# If we picked up subgroups that were not in our original list of groups
|
||||||
|
# make sure they get added
|
||||||
|
for (supergroup, subgroups) in groups_in_groups.items():
|
||||||
|
for group in subgroups.keys():
|
||||||
|
if group not in groups.keys():
|
||||||
|
groups[group] = None
|
||||||
|
|
||||||
|
# account_groups
|
||||||
|
# groups is a dict of team name to team display name
|
||||||
|
# here, for every group we have in that dict, we're building another dict of
|
||||||
|
# group_name to group_id - and if the database doesn't already have the
|
||||||
|
# group, we're adding it
|
||||||
|
for (group_name, group_display_name) in groups.items():
|
||||||
|
if cur.execute("select group_id from account_groups where name = %s",
|
||||||
|
group_name):
|
||||||
|
group_ids[group_name] = cur.fetchall()[0][0]
|
||||||
|
else:
|
||||||
|
cur.execute("""insert into account_group_id (s) values (NULL)""");
|
||||||
|
cur.execute("select max(s) from account_group_id")
|
||||||
|
group_id = cur.fetchall()[0][0]
|
||||||
|
|
||||||
|
# Match the 40-char 'uuid' that java is producing
|
||||||
|
group_uuid = uuid.uuid4()
|
||||||
|
second_uuid = uuid.uuid4()
|
||||||
|
full_uuid = "%s%s" % (group_uuid.hex, second_uuid.hex[:8])
|
||||||
|
|
||||||
|
cur.execute("""insert into account_groups
|
||||||
|
(group_id, group_type, owner_group_id,
|
||||||
|
name, description, group_uuid)
|
||||||
|
values
|
||||||
|
(%s, 'INTERNAL', 1, %s, %s, %s)""",
|
||||||
|
(group_id, group_name, group_display_name, full_uuid))
|
||||||
|
cur.execute("""insert into account_group_names (group_id, name) values
|
||||||
|
(%s, %s)""",
|
||||||
|
(group_id, group_name))
|
||||||
|
|
||||||
|
group_ids[group_name] = group_id
|
||||||
|
|
||||||
|
# account_group_includes
|
||||||
|
# groups_in_groups should be a dict of dicts, where the key is the larger
|
||||||
|
# group and the inner dict is a list of groups that are members of the
|
||||||
|
# larger group. So {'nova': {'nova-core': 1}}
|
||||||
|
for (group_name, subgroups) in groups_in_groups.items():
|
||||||
|
for subgroup_name in subgroups.keys():
|
||||||
|
try:
|
||||||
|
cur.execute("""insert into account_group_includes
|
||||||
|
(group_id, include_id)
|
||||||
|
values (%s, %s)""",
|
||||||
|
(group_ids[group_name], group_ids[subgroup_name]))
|
||||||
|
except MySQLdb.IntegrityError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Make a list of implied group membership
|
||||||
|
# building a list which is the opposite of groups_in_group. Here
|
||||||
|
# group_implies_groups is a dict keyed by group_id containing a list of
|
||||||
|
# group_ids of implied membership. SO: if nova is 1 and nova-core is 2:
|
||||||
|
# {'2': [1]}
|
||||||
|
for group_id in group_ids.values():
|
||||||
|
total_groups = []
|
||||||
|
groups_todo = [group_id]
|
||||||
|
while len(groups_todo) > 0:
|
||||||
|
current_group = groups_todo.pop()
|
||||||
|
total_groups.append(current_group)
|
||||||
|
cur.execute("""select group_id from account_group_includes
|
||||||
|
where include_id = %s""", (current_group))
|
||||||
|
for row in cur.fetchall():
|
||||||
|
if row[0] != 1 and row[0] not in total_groups:
|
||||||
|
groups_todo.append(row[0])
|
||||||
|
group_implies_groups[group_id] = total_groups
|
||||||
|
|
||||||
|
if DEBUG:
|
||||||
|
def get_group_name(in_group_id):
|
||||||
|
for (group_name, group_id) in group_ids.items():
|
||||||
|
if group_id == in_group_id:
|
||||||
|
return group_name
|
||||||
|
|
||||||
|
print "groups in groups"
|
||||||
|
for (k,v) in groups_in_groups.items():
|
||||||
|
print k, v
|
||||||
|
|
||||||
|
print "group_imples_groups"
|
||||||
|
for (k, v) in group_implies_groups.items():
|
||||||
|
print get_group_name(k)
|
||||||
|
new_groups=[]
|
||||||
|
for val in v:
|
||||||
|
new_groups.append(get_group_name(val))
|
||||||
|
print "\t", new_groups
|
||||||
|
|
||||||
|
for (username, user_details) in users.items():
|
||||||
|
|
||||||
|
# accounts
|
||||||
|
account_id = None
|
||||||
|
if cur.execute("""select account_id from account_external_ids where
|
||||||
|
external_id in (%s)""", ("username:%s" % username)):
|
||||||
|
account_id = cur.fetchall()[0][0]
|
||||||
|
# We have this bad boy - all we need to do is update his group membership
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# We need details
|
||||||
|
member = launchpad.people[username]
|
||||||
|
if not member.is_team:
|
||||||
|
|
||||||
|
openid_consumer = consumer.Consumer(dict(id=randomString(16, '0123456789abcdef')), None)
|
||||||
|
openid_request = openid_consumer.begin("https://launchpad.net/~%s" % member.name)
|
||||||
|
user_details['openid_external_id'] = openid_request.endpoint.getLocalID()
|
||||||
|
|
||||||
|
# Handle username change
|
||||||
|
if cur.execute("""select account_id from account_external_ids where
|
||||||
|
external_id in (%s)""", user_details['openid_external_id']):
|
||||||
|
account_id = cur.fetchall()[0][0]
|
||||||
|
cur.execute("""update account_external_ids
|
||||||
|
set external_id=%s
|
||||||
|
where external_id like 'username%%'
|
||||||
|
and account_id = %s""",
|
||||||
|
('username:%s' % username, account_id))
|
||||||
|
else:
|
||||||
|
user_details['ssh_keys'] = ["%s %s %s" % (get_type(key.keytype), key.keytext, key.comment) for key in member.sshkeys]
|
||||||
|
|
||||||
|
|
||||||
|
email = None
|
||||||
|
try:
|
||||||
|
email = member.preferred_email_address.email
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
user_details['email'] = email
|
||||||
|
|
||||||
|
|
||||||
|
cur.execute("""insert into account_id (s) values (NULL)""");
|
||||||
|
cur.execute("select max(s) from account_id")
|
||||||
|
account_id = cur.fetchall()[0][0]
|
||||||
|
|
||||||
|
cur.execute("""insert into accounts (account_id, full_name, preferred_email) values
|
||||||
|
(%s, %s, %s)""", (account_id, username, user_details['email']))
|
||||||
|
|
||||||
|
# account_ssh_keys
|
||||||
|
for key in user_details['ssh_keys']:
|
||||||
|
|
||||||
|
cur.execute("""select ssh_public_key from account_ssh_keys where
|
||||||
|
account_id = %s""", account_id)
|
||||||
|
db_keys = [r[0].strip() for r in cur.fetchall()]
|
||||||
|
if key.strip() not in db_keys:
|
||||||
|
|
||||||
|
cur.execute("""select max(seq)+1 from account_ssh_keys
|
||||||
|
where account_id = %s""", account_id)
|
||||||
|
seq = cur.fetchall()[0][0]
|
||||||
|
if seq is None:
|
||||||
|
seq = 1
|
||||||
|
cur.execute("""insert into account_ssh_keys
|
||||||
|
(ssh_public_key, valid, account_id, seq)
|
||||||
|
values
|
||||||
|
(%s, 'Y', %s, %s)""",
|
||||||
|
(key.strip(), account_id, seq))
|
||||||
|
|
||||||
|
# account_external_ids
|
||||||
|
## external_id
|
||||||
|
if not cur.execute("""select account_id from account_external_ids
|
||||||
|
where account_id = %s and external_id = %s""",
|
||||||
|
(account_id, user_details['openid_external_id'])):
|
||||||
|
cur.execute("""insert into account_external_ids
|
||||||
|
(account_id, email_address, external_id)
|
||||||
|
values (%s, %s, %s)""",
|
||||||
|
(account_id, user_details['email'], user_details['openid_external_id']))
|
||||||
|
if not cur.execute("""select account_id from account_external_ids
|
||||||
|
where account_id = %s and external_id = %s""",
|
||||||
|
(account_id, "username:%s" % username)):
|
||||||
|
cur.execute("""insert into account_external_ids
|
||||||
|
(account_id, external_id) values (%s, %s)""",
|
||||||
|
(account_id, "username:%s" % username))
|
||||||
|
|
||||||
|
if user_details.get('email', None) is not None:
|
||||||
|
if not cur.execute("""select account_id from account_external_ids
|
||||||
|
where account_id = %s and external_id = %s""",
|
||||||
|
(account_id, "mailto:%s" % user_details['email'])):
|
||||||
|
cur.execute("""insert into account_external_ids
|
||||||
|
(account_id, email_address, external_id)
|
||||||
|
values (%s, %s, %s)""",
|
||||||
|
(account_id, user_details['email'], "mailto:%s" %
|
||||||
|
user_details['email']))
|
||||||
|
|
||||||
|
if account_id is not None:
|
||||||
|
# account_group_members
|
||||||
|
# user_details['add_groups'] is a list of group names for which the
|
||||||
|
# user is either "Approved" or "Administrator"
|
||||||
|
|
||||||
|
groups_to_add = []
|
||||||
|
groups_to_watch = {}
|
||||||
|
groups_to_rm = {}
|
||||||
|
|
||||||
|
for group in user_details['add_groups']:
|
||||||
|
# if you are in the group nova-core, that should also put you in nova
|
||||||
|
add_groups = group_implies_groups[group_ids[group]]
|
||||||
|
add_groups.append(group_ids[group])
|
||||||
|
for add_group in add_groups:
|
||||||
|
if add_group not in groups_to_add:
|
||||||
|
groups_to_add.append(add_group)
|
||||||
|
# We only want to add watches for direct project membership groups
|
||||||
|
groups_to_watch[group_ids[group]] = group
|
||||||
|
|
||||||
|
# groups_to_add is now the full list of all groups we think the user
|
||||||
|
# should belong to. we want to limit the users groups to this list
|
||||||
|
for group in groups:
|
||||||
|
if group_ids[group] not in groups_to_add:
|
||||||
|
if group not in groups_to_rm.values():
|
||||||
|
groups_to_rm[group_ids[group]] = group
|
||||||
|
|
||||||
|
for group_id in groups_to_add:
|
||||||
|
if not cur.execute("""select account_id from account_group_members
|
||||||
|
where account_id = %s and group_id = %s""",
|
||||||
|
(account_id, group_id)):
|
||||||
|
# The current user does not exist in the group. Add it.
|
||||||
|
cur.execute("""insert into account_group_members
|
||||||
|
(account_id, group_id)
|
||||||
|
values (%s, %s)""", (account_id, group_id))
|
||||||
|
os_project_name = groups_to_watch.get(group_id, None)
|
||||||
|
if os_project_name is not None:
|
||||||
|
if os_project_name.endswith("-core"):
|
||||||
|
os_project_name = os_project_name[:-5]
|
||||||
|
os_project_name = "openstack/%s" % os_project_name
|
||||||
|
if os_project_name in projects:
|
||||||
|
if not cur.execute("""select account_id
|
||||||
|
from account_project_watches
|
||||||
|
where account_id = %s
|
||||||
|
and project_name = %s""",
|
||||||
|
(account_id, os_project_name)):
|
||||||
|
cur.execute("""insert into account_project_watches
|
||||||
|
VALUES
|
||||||
|
("Y", "N", "N", %s, %s, "*")""",
|
||||||
|
(account_id, os_project_name))
|
||||||
|
|
||||||
|
for (group_id, group_name) in groups_to_rm.items():
|
||||||
|
cur.execute("""delete from account_group_members
|
||||||
|
where account_id = %s and group_id = %s""",
|
||||||
|
(account_id, group_id))
|
||||||
|
|
||||||
|
os.system("ssh -i %s -p29418 %s@localhost gerrit flush-caches" %
|
||||||
|
(GERRIT_SSH_KEY, GERRIT_USER))
|
||||||
|
|
||||||
|
conn.commit()
|
430
modules/gerrit/files/scripts/update_users.py
Normal file
430
modules/gerrit/files/scripts/update_users.py
Normal file
@ -0,0 +1,430 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
# Copyright (C) 2012 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# Synchronize Gerrit users from Launchpad.
|
||||||
|
# TODO items:
|
||||||
|
# 1. add a temporary (instance level) object store for the launchpad class
|
||||||
|
# 2. split out the two classes into separate files to be used as a library
|
||||||
|
|
||||||
|
import os
|
||||||
|
import ConfigParser
|
||||||
|
import StringIO
|
||||||
|
import paramiko
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from launchpadlib.launchpad import Launchpad
|
||||||
|
from launchpadlib.uris import LPNET_SERVICE_ROOT
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from openid.consumer import consumer
|
||||||
|
from openid.cryptutil import randomString
|
||||||
|
|
||||||
|
GERRIT_USER = os.environ.get('GERRIT_USER', 'launchpadsync')
|
||||||
|
GERRIT_CONFIG = os.environ.get('GERRIT_CONFIG',
|
||||||
|
'/home/gerrit2/review_site/etc/gerrit.config')
|
||||||
|
GERRIT_SECURE_CONFIG = os.environ.get('GERRIT_SECURE_CONFIG',
|
||||||
|
'/home/gerrit2/review_site/etc/secure.config')
|
||||||
|
GERRIT_SSH_KEY = os.environ.get('GERRIT_SSH_KEY',
|
||||||
|
'/home/gerrit2/.ssh/launchpadsync_rsa')
|
||||||
|
GERRIT_CACHE_DIR = os.path.expanduser(os.environ.get('GERRIT_CACHE_DIR',
|
||||||
|
'~/.launchpadlib/cache'))
|
||||||
|
GERRIT_CREDENTIALS = os.path.expanduser(os.environ.get('GERRIT_CREDENTIALS',
|
||||||
|
'~/.launchpadlib/creds'))
|
||||||
|
GERRIT_BACKUP_PATH = os.environ.get('GERRIT_BACKUP_PATH',
|
||||||
|
'/home/gerrit2/dbupdates')
|
||||||
|
|
||||||
|
logging.basicConfig(format='%(asctime)-6s: %(name)s - %(levelname)s - %(message)s', filename='/var/log/gerrit/update_users.log')
|
||||||
|
logger= logging.getLogger('update_users')
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
for check_path in (os.path.dirname(GERRIT_CACHE_DIR),
|
||||||
|
os.path.dirname(GERRIT_CREDENTIALS),
|
||||||
|
GERRIT_BACKUP_PATH):
|
||||||
|
if not os.path.exists(check_path):
|
||||||
|
os.makedirs(check_path)
|
||||||
|
|
||||||
|
def get_broken_config(filename):
|
||||||
|
""" gerrit config ini files are broken and have leading tabs """
|
||||||
|
text = ""
|
||||||
|
with open(filename,"r") as conf:
|
||||||
|
for line in conf.readlines():
|
||||||
|
text = "%s%s" % (text, line.lstrip())
|
||||||
|
|
||||||
|
fp = StringIO.StringIO(text)
|
||||||
|
c=ConfigParser.ConfigParser()
|
||||||
|
c.readfp(fp)
|
||||||
|
return c
|
||||||
|
|
||||||
|
gerrit_config = get_broken_config(GERRIT_CONFIG)
|
||||||
|
secure_config = get_broken_config(GERRIT_SECURE_CONFIG)
|
||||||
|
|
||||||
|
DB_USER = gerrit_config.get("database", "username")
|
||||||
|
DB_PASS = secure_config.get("database","password")
|
||||||
|
DB_DB = gerrit_config.get("database","database")
|
||||||
|
|
||||||
|
def make_db_backup():
|
||||||
|
db_backup_file = "%s.%s.sql" % (DB_DB, datetime.isoformat(datetime.now()))
|
||||||
|
db_backup_path = os.path.join(GERRIT_BACKUP_PATH, db_backup_file)
|
||||||
|
retval = os.system("mysqldump --opt -u%s -p%s %s > %s" %
|
||||||
|
(DB_USER, DB_PASS, DB_DB, db_backup_path))
|
||||||
|
if retval != 0:
|
||||||
|
logger.error("Problem taking a db dump, aborting db update")
|
||||||
|
sys.exit(retval)
|
||||||
|
|
||||||
|
class LaunchpadAction(object):
|
||||||
|
def __init__(self):
|
||||||
|
logger.info('Connecting to Launchpad')
|
||||||
|
self.launchpad= Launchpad.login_with('Gerrit User Sync', LPNET_SERVICE_ROOT,
|
||||||
|
GERRIT_CACHE_DIR,
|
||||||
|
credentials_file = GERRIT_CREDENTIALS)
|
||||||
|
|
||||||
|
logger.info('Getting Launchpad teams')
|
||||||
|
self.lp_teams= self.get_all_sub_teams('openstack', [])
|
||||||
|
|
||||||
|
def get_all_sub_teams(self, team, have_teams):
|
||||||
|
for sub_team in self.launchpad.people[team].sub_teams:
|
||||||
|
if sub_team.name not in have_teams:
|
||||||
|
have_teams = self.get_all_sub_teams(sub_team.name, have_teams)
|
||||||
|
have_teams.append(team)
|
||||||
|
return have_teams
|
||||||
|
|
||||||
|
def get_sub_teams(self, team):
|
||||||
|
sub_teams= []
|
||||||
|
for sub_team in self.launchpad.people[team].sub_teams:
|
||||||
|
sub_teams.append(sub_team.name)
|
||||||
|
return sub_teams
|
||||||
|
|
||||||
|
def get_teams(self):
|
||||||
|
return self.lp_teams
|
||||||
|
|
||||||
|
def get_all_users(self):
|
||||||
|
logger.info('Getting Launchpad users')
|
||||||
|
users= []
|
||||||
|
for team in self.lp_teams:
|
||||||
|
for detail in self.launchpad.people[team].members_details:
|
||||||
|
if (detail.status == 'Approved' or detail.status == 'Administrator'):
|
||||||
|
name= detail.self_link.split('/')[-1]
|
||||||
|
if ((users.count(name) == 0) and (name not in self.lp_teams)):
|
||||||
|
users.append(name)
|
||||||
|
return users
|
||||||
|
|
||||||
|
def get_user_data(self, user):
|
||||||
|
return self.launchpad.people[user]
|
||||||
|
|
||||||
|
def get_team_members(self, team, gerrit):
|
||||||
|
users= []
|
||||||
|
for detail in self.launchpad.people[team].members_details:
|
||||||
|
if (detail.status == 'Approved' or detail.status == 'Administrator'):
|
||||||
|
name= detail.self_link.split('/')[-1]
|
||||||
|
# if we found a subteam
|
||||||
|
if name in self.lp_teams:
|
||||||
|
# check subteam for implied subteams
|
||||||
|
for implied_group in gerrit.get_implied_groups(name):
|
||||||
|
if implied_group in self.lp_teams:
|
||||||
|
users.extend(self.get_team_members(implied_group, gerrit))
|
||||||
|
users.extend(self.get_team_members(name, gerrit))
|
||||||
|
continue
|
||||||
|
users.append(name)
|
||||||
|
# check team for implied teams
|
||||||
|
for implied_group in gerrit.get_implied_groups(team):
|
||||||
|
if implied_group in self.lp_teams:
|
||||||
|
users.extend(self.get_team_members(implied_group, gerrit))
|
||||||
|
# filter out dupes
|
||||||
|
users= list(set(users))
|
||||||
|
return users
|
||||||
|
|
||||||
|
def get_team_watches(self, team):
|
||||||
|
users= []
|
||||||
|
for detail in self.launchpad.people[team].members_details:
|
||||||
|
if (detail.status == 'Approved' or detail.status == 'Administrator'):
|
||||||
|
name= detail.self_link.split('/')[-1]
|
||||||
|
if name in self.lp_teams:
|
||||||
|
continue
|
||||||
|
if users.count(name) == 0:
|
||||||
|
users.append(name)
|
||||||
|
return users
|
||||||
|
|
||||||
|
def get_team_display_name(self, team):
|
||||||
|
team_data = self.launchpad.people[team]
|
||||||
|
return team_data.display_name
|
||||||
|
|
||||||
|
class GerritAction(object):
|
||||||
|
def __init__(self):
|
||||||
|
logger.info('Connecting to Gerrit')
|
||||||
|
self.ssh= paramiko.SSHClient()
|
||||||
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
self.ssh.connect('localhost', username=GERRIT_USER, port=29418, key_filename=GERRIT_SSH_KEY)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
logger.info('Closing connection to Gerrit')
|
||||||
|
self.ssh.close()
|
||||||
|
|
||||||
|
def run_query(self, query):
|
||||||
|
command= 'gerrit gsql --format JSON -c "{0}"'.format(query)
|
||||||
|
stdin, stdout, stderr= self.ssh.exec_command(command)
|
||||||
|
# trying to get stdout return code or stderr can hang with large result sets
|
||||||
|
# for line in stderr:
|
||||||
|
# logger.error(line)
|
||||||
|
return stdout
|
||||||
|
|
||||||
|
def get_groups(self):
|
||||||
|
logger.info('Getting Gerrit groups')
|
||||||
|
groups= []
|
||||||
|
query= "select name from account_groups"
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
group= row['columns']['name']
|
||||||
|
groups.append(group)
|
||||||
|
return groups
|
||||||
|
|
||||||
|
def get_users(self):
|
||||||
|
logger.info('Getting Gerrit users')
|
||||||
|
users= []
|
||||||
|
query= "select external_id from account_external_ids"
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
user= row['columns']['external_id'].replace('username:','')
|
||||||
|
users.append(user)
|
||||||
|
return users
|
||||||
|
|
||||||
|
def get_group_id(self, group_name):
|
||||||
|
query= "select group_id from account_groups where name='{0}'".format(group_name)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
line= stdout.readline()
|
||||||
|
row= json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
return row['columns']['group_id']
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def get_user_id(self, user_name):
|
||||||
|
query= "select account_id from account_external_ids where external_id='username:{0}'".format(user_name)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
line= stdout.readline()
|
||||||
|
row= json.loads(line)
|
||||||
|
return row['columns']['account_id']
|
||||||
|
|
||||||
|
def get_users_from_group(self, group_name):
|
||||||
|
logger.info('Getting Gerrit users from group %s', group_name)
|
||||||
|
users= []
|
||||||
|
gid= self.get_group_id(group_name)
|
||||||
|
|
||||||
|
query= "select external_id from account_external_ids join account_group_members on account_group_members.account_id=account_external_ids.account_id where account_group_members.group_id={0} and external_id like 'username%%'".format(gid)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
user= row['columns']['external_id'].replace('username:','')
|
||||||
|
users.append(user)
|
||||||
|
return users
|
||||||
|
|
||||||
|
def get_users_from_watches(self, group_name):
|
||||||
|
logger.info('Getting Gerrit users from watch list %s', group_name)
|
||||||
|
users= []
|
||||||
|
if group_name.endswith("-core"):
|
||||||
|
group_name = group_name[:-5]
|
||||||
|
group_name = "openstack/{0}".format(group_name)
|
||||||
|
|
||||||
|
query= "select external_id from account_external_ids join account_project_watches on account_project_watches.account_id=account_external_ids.account_id where account_project_watches.project_name like '{0}' and external_id like 'username%%'".format(group_name)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
user= row['columns']['external_id'].replace('username:','')
|
||||||
|
users.append(user)
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
|
def get_implied_groups(self, group_name):
|
||||||
|
gid= self.get_group_id(group_name)
|
||||||
|
groups= []
|
||||||
|
query= "select name from account_groups join account_group_includes on account_group_includes.include_id=account_groups.group_id where account_group_includes.group_id={0}".format(gid)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
for line in stdout:
|
||||||
|
row= json.loads(line)
|
||||||
|
if row['type'] == 'row':
|
||||||
|
group= row['columns']['name']
|
||||||
|
groups.append(group)
|
||||||
|
return groups
|
||||||
|
|
||||||
|
def add_group(self, group_name, group_display_name):
|
||||||
|
logger.info('New group %s (%s)', group_display_name, group)
|
||||||
|
query= "insert into account_group_id (s) values (NULL)"
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
row= json.loads(stdout.readline())
|
||||||
|
if row['rowCount'] is not 1:
|
||||||
|
print "Could not get a new account group ID"
|
||||||
|
raise
|
||||||
|
query= "select max(s) from account_group_id"
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
row= json.loads(stdout.readline())
|
||||||
|
gid= row['columns']['max(s)']
|
||||||
|
full_uuid= "{0}{1}".format(uuid.uuid4().hex, uuid.uuid4().hex[:8])
|
||||||
|
query= "insert into account_groups (group_id, group_type, owner_group_id, name, description, group_uuid) values ({0}, 'INTERNAL', 1, '{1}', '{2}', '{3}')". format(gid, group_name, group_display_name, full_uuid)
|
||||||
|
self.run_query(query)
|
||||||
|
query= "insert into account_group_names (group_id, name) values ({0}, '{1}')".format(gid, group_name)
|
||||||
|
self.run_query(query)
|
||||||
|
|
||||||
|
def add_user(self, user_name, user_data):
|
||||||
|
logger.info("Adding Gerrit user %s", user_name)
|
||||||
|
openid_consumer = consumer.Consumer(dict(id=randomString(16, '0123456789abcdef')), None)
|
||||||
|
openid_request = openid_consumer.begin("https://launchpad.net/~%s" % user_data.name)
|
||||||
|
user_openid_external_id = openid_request.endpoint.getLocalID()
|
||||||
|
query= "select account_id from account_external_ids where external_id in ('{0}')".format(user_openid_external_id)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
row= json.loads(stdout.readline())
|
||||||
|
if row['type'] == 'row':
|
||||||
|
# we have a result so this is an updated user name
|
||||||
|
account_id= row['columns']['account_id']
|
||||||
|
query= "update account_external_ids set external_id='{0}' where external_id like 'username%%' and account_id = {1}".format('username:%s' % user_name, account_id)
|
||||||
|
self.run_query(query)
|
||||||
|
else:
|
||||||
|
# we really do have a new user
|
||||||
|
user_ssh_keys= ["%s %s %s" % ('ssh-%s' % key.keytype.lower(), key.keytext, key.comment) for key in user_data.sshkeys]
|
||||||
|
user_email= None
|
||||||
|
try:
|
||||||
|
email = user_data.preferred_email_address.email
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
query= "insert into account_id (s) values (NULL)"
|
||||||
|
self.run_query(query)
|
||||||
|
query= "select max(s) from account_id"
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
row= json.loads(stdout.readline())
|
||||||
|
uid= row['columns']['max(s)']
|
||||||
|
query= "insert into accounts (account_id, full_name, preferred_email) values ({0}, '{1}', '{2}')".format(uid, user_name, user_email)
|
||||||
|
self.run_query(query)
|
||||||
|
keyno= 1
|
||||||
|
for key in user_ssh_keys:
|
||||||
|
query= "insert into account_ssh_keys (ssh_public_key, valid, account_id, seq) values ('{0}', 'Y', {1}, {2})".format(key.strip(), uid, keyno)
|
||||||
|
self.run_query(query)
|
||||||
|
keyno = keyno + 1
|
||||||
|
query= "insert into account_external_ids (account_id, email_address, external_id) values ({0}, '{1}', '{2}')".format(uid, user_email, user_openid_external_id)
|
||||||
|
self.run_query(query)
|
||||||
|
query= "insert into account_external_ids (account_id, external_id) values ({0}, '{1}')".format(uid, "username:%s" % user_name)
|
||||||
|
self.run_query(query)
|
||||||
|
if user_email is not None:
|
||||||
|
query= "insert into account_external_ids (account_id, email_address, external_id) values ({0}. '{1}', '{2}')".format(uid, user_email, "mailto:%s" % user_email)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def add_user_to_group(self, user_name, group_name):
|
||||||
|
logger.info("Adding Gerrit user %s to group %s", user_name, group_name)
|
||||||
|
uid= self.get_user_id(user_name)
|
||||||
|
gid= self.get_group_id(group_name)
|
||||||
|
if gid is 0:
|
||||||
|
print "Trying to add user {0} to non-existent group {1}".format(user_name, group_name)
|
||||||
|
raise
|
||||||
|
query= "insert into account_group_members (account_id, group_id) values ({0}, {1})".format(uid, gid)
|
||||||
|
self.run_query(query)
|
||||||
|
|
||||||
|
def add_user_to_watch(self, user_name, group_name):
|
||||||
|
logger.info("Adding Gerrit user %s to watch group %s", user_name, group_name)
|
||||||
|
uid= self.get_user_id(user_name)
|
||||||
|
if group_name.endswith("-core"):
|
||||||
|
group_name = group_name[:-5]
|
||||||
|
group_name = "openstack/{0}".format(group_name)
|
||||||
|
query= "insert into account_project_watches VALUES ('Y', 'N', 'N', {0}, '{1}', '*')". format(uid, group_name)
|
||||||
|
self.run_query(query)
|
||||||
|
|
||||||
|
|
||||||
|
def del_user_from_group(self, user_name, group_name):
|
||||||
|
logger.info("Deleting Gerrit user %s from group %s", user_name, group_name)
|
||||||
|
uid= self.get_user_id(user_name)
|
||||||
|
gid= self.get_group_id(group_name)
|
||||||
|
query= "delete from account_group_members where account_id = {0} and group_id = {1}".format(uid, gid)
|
||||||
|
self.run_query(query)
|
||||||
|
if group_name.endswith("-core"):
|
||||||
|
group_name = group_name[:-5]
|
||||||
|
group_name= "openstack/{0}".format(group_name)
|
||||||
|
query= "delete from account_project_watches where account_id = {0} and project_name= '{1}'".format(uid, group_name)
|
||||||
|
self.run_query(query)
|
||||||
|
|
||||||
|
def rebuild_sub_groups(self, group, sub_groups):
|
||||||
|
gid= self.get_group_id(group)
|
||||||
|
for sub_group in sub_groups:
|
||||||
|
sgid= self.get_group_id(sub_group)
|
||||||
|
query= "select group_id from account_group_includes where group_id={0} and include_id={1}".format(gid, sgid)
|
||||||
|
stdout= self.run_query(query)
|
||||||
|
row= json.loads(stdout.readline())
|
||||||
|
if row['type'] != 'row':
|
||||||
|
logger.info('Adding implied group %s to group %s', group, sub_group)
|
||||||
|
query= "insert into account_group_includes (group_id, include_id) values ({0}, {1})".format(gid, sgid)
|
||||||
|
self.run_query(query)
|
||||||
|
|
||||||
|
|
||||||
|
# Actual work starts here!
|
||||||
|
|
||||||
|
lp= LaunchpadAction()
|
||||||
|
gerrit= GerritAction()
|
||||||
|
|
||||||
|
logger.info('Making DB backup')
|
||||||
|
make_db_backup()
|
||||||
|
|
||||||
|
logger.info('Starting group reconcile')
|
||||||
|
lp_groups= lp.get_teams()
|
||||||
|
gerrit_groups= gerrit.get_groups()
|
||||||
|
|
||||||
|
group_diff= filter(lambda a: a not in gerrit_groups, lp_groups)
|
||||||
|
for group in group_diff:
|
||||||
|
group_display_name= lp.get_team_display_name(group)
|
||||||
|
gerrit.add_group(group, group_display_name)
|
||||||
|
|
||||||
|
for group in lp_groups:
|
||||||
|
sub_group= lp.get_sub_teams(group)
|
||||||
|
if sub_group:
|
||||||
|
gerrit.rebuild_sub_groups(group, sub_group)
|
||||||
|
|
||||||
|
logger.info('End group reconcile')
|
||||||
|
|
||||||
|
logger.info('Starting user reconcile')
|
||||||
|
lp_users= lp.get_all_users()
|
||||||
|
gerrit_users= gerrit.get_users()
|
||||||
|
|
||||||
|
user_diff= filter(lambda a: a not in gerrit_users, lp_users)
|
||||||
|
for user in user_diff:
|
||||||
|
gerrit.add_user(user, lp.get_user_data(user))
|
||||||
|
|
||||||
|
logger.info('End user reconcile')
|
||||||
|
|
||||||
|
logger.info('Starting user to group reconcile')
|
||||||
|
lp_groups= lp.get_teams()
|
||||||
|
for group in lp_groups:
|
||||||
|
# First find users to attach to groups
|
||||||
|
gerrit_group_users= gerrit.get_users_from_group(group)
|
||||||
|
lp_group_users= lp.get_team_members(group, gerrit)
|
||||||
|
|
||||||
|
group_diff= filter(lambda a: a not in gerrit_group_users, lp_group_users)
|
||||||
|
for user in group_diff:
|
||||||
|
gerrit.add_user_to_group(user, group)
|
||||||
|
# Second find users to attach to watches
|
||||||
|
lp_group_watches= lp.get_team_watches(group)
|
||||||
|
gerrit_group_watches= gerrit.get_users_from_watches(group)
|
||||||
|
group_diff= filter(lambda a: a not in gerrit_group_watches, lp_group_watches)
|
||||||
|
for user in group_diff:
|
||||||
|
gerrit.add_user_to_watch(user, group)
|
||||||
|
# Third find users to remove from groups/watches
|
||||||
|
group_diff= filter(lambda a: a not in lp_group_users, gerrit_group_users)
|
||||||
|
for user in group_diff:
|
||||||
|
gerrit.del_user_from_group(user, group)
|
||||||
|
|
||||||
|
logger.info('Ending user to group reconcile')
|
||||||
|
|
||||||
|
gerrit.cleanup()
|
@ -109,29 +109,27 @@ class gerrit($virtual_hostname='',
|
|||||||
|
|
||||||
# Skip cron jobs if we're in test mode
|
# Skip cron jobs if we're in test mode
|
||||||
if ($testmode == false) {
|
if ($testmode == false) {
|
||||||
cron { "gerritupdateci":
|
|
||||||
user => gerrit2,
|
|
||||||
minute => "*/15",
|
|
||||||
command => 'sleep $((RANDOM\%60)) && cd /home/gerrit2/openstack-ci && /usr/bin/git pull -q origin master'
|
|
||||||
}
|
|
||||||
|
|
||||||
cron { "gerritsyncusers":
|
cron { "gerritsyncusers":
|
||||||
user => gerrit2,
|
user => gerrit2,
|
||||||
minute => "*/15",
|
minute => "*/15",
|
||||||
command => 'sleep $((RANDOM\%60+60)) && cd /home/gerrit2/openstack-ci && python gerrit/update_gerrit_users.py'
|
command => 'sleep $((RANDOM\%60+60)) && python /usr/local/gerrit/scripts/update_gerrit_users.py',
|
||||||
|
require => File['/usr/local/gerrit/scripts'],
|
||||||
}
|
}
|
||||||
|
|
||||||
cron { "gerritclosepull":
|
cron { "gerritclosepull":
|
||||||
user => gerrit2,
|
user => gerrit2,
|
||||||
minute => "*/5",
|
minute => "*/5",
|
||||||
command => 'sleep $((RANDOM\%60+90)) && cd /home/gerrit2/openstack-ci && python gerrit/close_pull_requests.py'
|
command => 'sleep $((RANDOM\%60+90)) && python /usr/local/gerrit/scripts/close_pull_requests.py',
|
||||||
|
require => File['/usr/local/gerrit/scripts'],
|
||||||
}
|
}
|
||||||
|
|
||||||
cron { "expireoldreviews":
|
cron { "expireoldreviews":
|
||||||
user => gerrit2,
|
user => gerrit2,
|
||||||
hour => 6,
|
hour => 6,
|
||||||
minute => 3,
|
minute => 3,
|
||||||
command => 'cd /home/gerrit2/openstack-ci && python gerrit/expire_old_reviews.py'
|
command => 'python /usr/local/gerrit/scripts/expire_old_reviews.py',
|
||||||
|
require => File['/usr/local/gerrit/scripts'],
|
||||||
}
|
}
|
||||||
|
|
||||||
cron { "gerrit_repack":
|
cron { "gerrit_repack":
|
||||||
@ -494,4 +492,23 @@ class gerrit($virtual_hostname='',
|
|||||||
require => File['/etc/init.d/gerrit'],
|
require => File['/etc/init.d/gerrit'],
|
||||||
refreshonly => true,
|
refreshonly => true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
file { '/usr/local/gerrit':
|
||||||
|
owner => 'root',
|
||||||
|
group => 'root',
|
||||||
|
mode => 755,
|
||||||
|
ensure => 'directory',
|
||||||
|
}
|
||||||
|
|
||||||
|
file { '/usr/local/gerrit/scripts':
|
||||||
|
owner => 'root',
|
||||||
|
group => 'root',
|
||||||
|
mode => 755,
|
||||||
|
ensure => 'directory',
|
||||||
|
recurse => true,
|
||||||
|
require => File['/usr/local/gerrit'],
|
||||||
|
source => [
|
||||||
|
"puppet:///modules/gerrit/scripts",
|
||||||
|
],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user