system-config/playbooks/roles/afs-release/files/release-volumes.py
Jeremy Stanley abcae98b8e Add a new docs.airshipit.org vhost on static01
The Airship project is continuously publishing documentation to AFS,
so serve that volume with a corresponding vhost on the static01
server. Also add it to the list of volumes for periodic vos release.

Change-Id: I718963533d9e8596d44d451b5e930314d699fa28
Depends-On: https://review.opendev.org/706599
2020-03-20 19:09:13 +00:00

192 lines
6.4 KiB
Python

#!/usr/bin/env python3
# Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# This script is intended to run on mirror-update.opendev.org
# periodically called from a cron job.
#
import argparse
import fcntl
import logging
import os
import re
import sys
import statsd
import subprocess
from contextlib import contextmanager
from datetime import datetime
VOLUMES = ['docs',
'docs.dev',
'project.airship',
'project.governance',
'project.opendev',
'project.releases',
'project.security',
'project.service-types',
'project.specs',
'project.starlingx',
'project.tarballs',
'project.zuul',
]
STATSD_PREFIX='afs.release'
UPDATE_RE = re.compile("^\s+Last Update (.*)$")
log = logging.getLogger("release")
def get_last_update(volume):
ret = []
out = subprocess.check_output(['vos', 'examine', volume],
stderr=subprocess.STDOUT).decode('utf-8')
state = 0
for line in out.split('\n'):
if state == 0 and line.startswith(volume):
state = 1
site = None
elif state == 1:
site = line.strip()
state = 0
m = UPDATE_RE.match(line)
if m:
ret.append(dict(site=site,
volume=volume,
updated=datetime.strptime(m.group(1),
'%a %b %d %H:%M:%S %Y')))
return ret
def release(volume, host, user, key, stats):
log.info("Releasing %s" % volume)
vn = volume.replace('.','_')
with stats.timer('%s.%s' % (STATSD_PREFIX, vn)):
# NOTE(ianw) : clearly paramiko would be better, but bionic
# version 2.0.0 can't read a ed25519 key which we used in the
# all the other ansible setup.
cmd = ('ssh', '-T', '-i', '%s' % key,
'%s@%s' % (user, host), '--',
'vos', 'release', volume)
log.debug('Running: %s' % ' '.join(cmd))
p = subprocess.Popen(cmd,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
output, error = p.communicate()
for line in output.split('\n'):
log.debug(line)
if not error:
log.info("Release of %s successful" % volume)
else:
log.error("Release of %s failed" % volume)
def check_release(volume):
'''Check if a volume needs release.
Return TRUE if it needs to be released, FALSE if not
'''
log.info("Checking %s" % volume)
rw = get_last_update(volume)[0]
log.debug(" %s %s %s" % (rw['site'], rw['updated'], rw['volume']))
ros = get_last_update(volume + '.readonly')
update = False
for ro in ros:
log.debug(" %s %s %s" % (ro['site'], ro['updated'], ro['volume']))
if ro['updated'] < rw['updated']:
update = True
if update:
return True
else:
log.info("... no release required")
@contextmanager
def get_lock(path):
with open(path, 'w') as f:
try:
fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print("Unable to get lockfile!")
sys.exit(1)
f.write("%s\n" % os.getpid())
f.flush()
log.debug("Acquired release lock")
yield
log.debug("Release lock")
fcntl.flock(f, fcntl.LOCK_UN)
def main():
parser = argparse.ArgumentParser(
description='Periodically release various OpenDev AFS volumes')
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('--lockfile', action='store',
default='/var/run/release-volumes.lock',
help='volume release lockfile')
parser.add_argument('--force-release', action='store_true',
help="Force vos release, even if not required")
parser.add_argument('--skip-release', action='store_true',
help="Skip vos release, even if required")
parser.add_argument('--ssh-user', action='store',
default='vos_release', help="SSH user on remote host")
parser.add_argument('--ssh-identity', action='store',
default='/root/.ssh/id_vos_release',
help="SSH identify file for remote vos release")
parser.add_argument('--ssh-server', action='store',
default='afs01.dfw.openstack.org',
help="Remote host to run vos release")
parser.add_argument('--statsd-host', action='store',
default='graphite.opendev.org',
help='Remote host to send stats to')
parser.add_argument('--statsd-port', action='store',
default=8125,
help='Remote port to send stats to')
args = parser.parse_args()
level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=level,
format='%(asctime)s %(name)s '
'%(levelname)-8s %(message)s')
log.debug("--- Starting %s ---" % datetime.now())
log.debug("Sending stats to %s:%s" % (args.statsd_host, args.statsd_port))
if args.force_release:
log.info("Forcing release of all volumes")
stats = statsd.StatsClient(host=args.statsd_host,
port=args.statsd_port)
with get_lock(args.lockfile):
for volume in VOLUMES:
if check_release(volume) or args.force_release:
if args.skip_release:
log.info("Force skipping release")
else:
release(volume, args.ssh_server, args.ssh_user,
args.ssh_identity, stats)
log.debug("--- Complete %s ---" % datetime.now())
if __name__ == '__main__':
main()