Fix Gerrit gates and other build issues

There are a few problems with the configuration in our source tree that
causes the Jenkins gates to always fail, and things that might cause
a problem later. This patch set will fix those and other problems, to bring
us into a known valid state for future commits.

Change-Id: Idf7a0ce5902c40985caa78390b01f9fc2dfbfcf4
This commit is contained in:
Phil Bridges 2016-06-08 15:53:51 -05:00
parent 8127749f17
commit 3c7bcc0c7a
8 changed files with 123 additions and 91 deletions

View File

@ -1,8 +1,6 @@
# TODO: get ourselves a nice and shiny CI system like this [gerrit]
host=review.openstack.org
#[gerrit] port=29418
#host=review.openstack.org project=openstack/swiftonhpss.git
#port=29418 defaultbranch=master
#project=openstack/swiftonfile.git defaultremote=gerrit
#defaultbranch=master
#defaultremote=gerrit

View File

@ -18,7 +18,6 @@
import sys import sys
import stat import stat
import os import os
import multiprocessing
from pwd import getpwuid from pwd import getpwuid
import logging import logging
import argparse import argparse
@ -87,8 +86,8 @@ def main(program_args):
del password del password
# Figure out what we're doing. # Figure out what we're doing.
target_account, target_container = program_args.account,\ target_account = program_args.account
program_args.container target_container = program_args.container
# Start doing it. # Start doing it.
# pool = multiprocessing.Pool(processes=multiprocessing.cpu_count()-1) # pool = multiprocessing.Pool(processes=multiprocessing.cpu_count()-1)
@ -227,7 +226,7 @@ def check_usage():
return parser.parse_args() return parser.parse_args()
class Reconciler: class Reconciler(object):
def __init__(self, args): def __init__(self, args):
self._args = args self._args = args
@ -436,7 +435,8 @@ class Reconciler:
logging.exception("Putting container %s went wrong" % logging.exception("Putting container %s went wrong" %
target_container) target_container)
raise e raise e
print "Reconciling container %s/%s" % (target_account, target_container) print "Reconciling container %s/%s" % \
(target_account, target_container)
# Make sure those objects get added into the Swift metadata DBs # Make sure those objects get added into the Swift metadata DBs
self.add_objects_from_hpss(swift_api, target_container, container_dir) self.add_objects_from_hpss(swift_api, target_container, container_dir)
@ -458,7 +458,8 @@ class Reconciler:
return objects return objects
@trace_function @trace_function
def add_objects_from_hpss(self, swift_api, target_container, container_dir): def add_objects_from_hpss(self, swift_api, target_container,
container_dir):
""" """
Update object metadata on object creates, and returns a list of all the Update object metadata on object creates, and returns a list of all the
objects existing in the container from Swift. objects existing in the container from Swift.
@ -512,7 +513,8 @@ class Reconciler:
try: try:
hpss_containers = os.listdir(account_directory) hpss_containers = os.listdir(account_directory)
except OSError as err: except OSError as err:
print "Unable to list files under directory: %s" % account_directory print "Unable to list files under directory: %s" % \
account_directory
raise err raise err
# Delete containers that only exist in Swift, but not HPSS # Delete containers that only exist in Swift, but not HPSS
@ -552,8 +554,8 @@ class Reconciler:
known_good_objects = [] known_good_objects = []
swift_only_objects = list(set(swift_objects) - set(hpss_objects)) swift_only_objects = list(set(swift_objects) - set(hpss_objects))
# If we have objects that only exist in the Swift metadata, delete those # If we have objects that only exist in the Swift metadata,
# objects. # delete those objects.
for target_obj in swift_only_objects: for target_obj in swift_only_objects:
try: try:
swift_api.delete_object(target_container, target_obj) swift_api.delete_object(target_container, target_obj)
@ -614,7 +616,8 @@ class Reconciler:
if file_user not in keystone_users: if file_user not in keystone_users:
fail_reason = \ fail_reason = \
"Cannot configure proper permissions for this path %s\ "Cannot configure proper permissions for this path %s\
because user %s does not exist in keystone" % (path, file_user) because user %s does not exist in keystone" % \
(path, file_user)
print fail_reason print fail_reason
logging.error(fail_reason) logging.error(fail_reason)
raise IOError(fail_reason) raise IOError(fail_reason)
@ -639,7 +642,7 @@ class Reconciler:
# This only exists because the keystoneclient library is so massive that it has # This only exists because the keystoneclient library is so massive that it has
# to have a lazy-loading mechanism that ensures only one of it can be active, # to have a lazy-loading mechanism that ensures only one of it can be active,
# so we can't have handles to multiple different Keystone scopes simultaneously # so we can't have handles to multiple different Keystone scopes simultaneously
class LightweightKeystoneAPI: class LightweightKeystoneAPI(object):
MEMBER_ROLE_ID = '9fe2ff9ee4384b1894a90878d3e92bab' MEMBER_ROLE_ID = '9fe2ff9ee4384b1894a90878d3e92bab'
@ -677,33 +680,19 @@ class LightweightKeystoneAPI:
if self.version == 'v2': if self.version == 'v2':
url = '%s/tokens' % self.url url = '%s/tokens' % self.url
creds = {'username': self.username, 'password': self.password}
token_req = {'auth': {'tenantName': self.tenant_name, token_req = {'auth': {'tenantName': self.tenant_name,
'passwordCredentials': { 'passwordCredentials': creds}}
'username': self.username,
'password': self.password
}}}
else: else:
url = '%s/auth/tokens' % self.url url = '%s/auth/tokens' % self.url
token_req = {'auth': {'identity': domain = {'id': 'default'}
{'methods': ['password'], creds = {'user': {'name': self.username,
'password': {
'user': {
'name': self.username,
'password': self.password, 'password': self.password,
'domain': {'id': 'default'} 'domain': domain}}
} scope = {'project': {'name': self.tenant_name, 'domain': domain}}
} token_req = {'auth': {'identity': {'methods': ['password'],
}, 'password': creds},
'scope': { 'scope': scope}}
'project': {
'name': self.tenant_name,
'domain': {
'id': 'default'
}
}
}
}
}
try: try:
resp_headers, resp_json =\ resp_headers, resp_json =\
self._get_keystone_response(requests.post, self._get_keystone_response(requests.post,

View File

@ -66,7 +66,7 @@ if 'install' in sys.argv:
# Install man pages the crappy hacky way, because setuptools doesn't # Install man pages the crappy hacky way, because setuptools doesn't
# have any facility to do it. # have any facility to do it.
man_path = '/usr/local/share/man/1' man_path = '/usr/local/share/man/1'
man_pages = filter(lambda x: os.path.isfile('./doc/troff/'+x), man_pages = filter(lambda x: os.path.isfile('./doc/troff/%s' % x),
os.listdir('./doc/troff')) os.listdir('./doc/troff'))
for page in man_pages: for page in man_pages:
shutil.copyfile('./doc/troff/'+page, man_path) shutil.copyfile('./doc/troff/%s' % page, man_path)

View File

@ -0,0 +1,40 @@
# Copyright (c) 2016 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import array
import fcntl
HPSSFS_GET_COS = 0x80046c01
HPSSFS_SET_COS_HINT = 0x40046c02
HPSSFS_SET_FSIZE_HINT = 0x40086c03
HPSSFS_SET_MAXSEGSZ_HINT = 0x40046c04
HPSSFS_PURGE_CACHE = 0x00006c05
HPSSFS_PURGE_LOCK = 0x40046c06
HPSSFS_UNDELETE = 0x40046c07
HPSSFS_UNDELETE_NONE = 0x00000000
HPSSFS_UNDELETE_RESTORE_TIME = 0x00000001
HPSSFS_UNDELETE_OVERWRITE = 0x00000002
HPSSFS_UNDELETE_OVERWRITE_AND_RESTORE = 0x00000003
def ioctl(fd, cmd, val=None):
if val is not None:
valbuf = array.array("i", val)
fcntl.ioctl(fd, cmd, valbuf)
else:
fcntl.ioctl(fd, cmd)

View File

@ -24,7 +24,6 @@ from eventlet import sleep
import cPickle as pickle import cPickle as pickle
from cStringIO import StringIO from cStringIO import StringIO
import pickletools import pickletools
import xattr
from swiftonhpss.swift.common.exceptions import SwiftOnFileSystemIOError from swiftonhpss.swift.common.exceptions import SwiftOnFileSystemIOError
from swift.common.exceptions import DiskFileNoSpace from swift.common.exceptions import DiskFileNoSpace
from swift.common.db import utf8encodekeys from swift.common.db import utf8encodekeys

View File

@ -23,7 +23,10 @@ except ImportError:
import random import random
import logging import logging
import time import time
try:
import hpssfs import hpssfs
except ImportError:
import swiftonhpss.swift.common.hpssfs_ioctl as hpssfs
import xattr import xattr
from uuid import uuid4 from uuid import uuid4
from hashlib import md5 from hashlib import md5
@ -55,7 +58,7 @@ from swift.obj.diskfile import get_async_dir
# FIXME: Hopefully we'll be able to move to Python 2.7+ where O_CLOEXEC will # FIXME: Hopefully we'll be able to move to Python 2.7+ where O_CLOEXEC will
# be back ported. See http://www.python.org/dev/peps/pep-0433/ # be back ported. See http://www.python.org/dev/peps/pep-0433/
O_CLOEXEC = 0o20000000 O_CLOEXEC = 0o02000000
MAX_RENAME_ATTEMPTS = 10 MAX_RENAME_ATTEMPTS = 10
MAX_OPEN_ATTEMPTS = 10 MAX_OPEN_ATTEMPTS = 10
@ -313,11 +316,12 @@ class DiskFileWriter(object):
# (HPSS) Purge lock the file now if we're asked to. # (HPSS) Purge lock the file now if we're asked to.
if purgelock: if purgelock:
try: try:
hpssfs.ioctl(self._fd, hpssfs.HPSSFS_PURGE_LOCK, int(purgelock)) hpssfs.ioctl(self._fd, hpssfs.HPSSFS_PURGE_LOCK,
int(purgelock))
except IOError as err: except IOError as err:
raise SwiftOnFileSystemIOError(err.errno, raise SwiftOnFileSystemIOError(
'%s, hpssfs.ioctl("%s", ...)' % ( err.errno,
err.strerror, self._fd)) '%s, hpssfs.ioctl("%s", ...)' % (err.strerror, self._fd))
# From the Department of the Redundancy Department, make sure # From the Department of the Redundancy Department, make sure
# we call drop_cache() after fsync() to avoid redundant work # we call drop_cache() after fsync() to avoid redundant work
@ -1051,17 +1055,19 @@ class DiskFile(object):
hpssfs.ioctl(fd, hpssfs.HPSSFS_SET_FSIZE_HINT, hpssfs.ioctl(fd, hpssfs.HPSSFS_SET_FSIZE_HINT,
long(size)) long(size))
except IOError as err: except IOError as err:
raise SwiftOnFileSystemIOError(err.errno, message = '%s, hpssfs.ioctl("%s", SET_FSIZE)'
'%s, hpssfs.ioctl("%s", SET_FSIZE)' % ( raise SwiftOnFileSystemIOError(
err.strerror, fd)) err.errno,
message % (err.strerror, fd))
if cos: if cos:
try: try:
hpssfs.ioctl(fd, hpssfs.HPSSFS_SET_COS_HINT, int(cos)) hpssfs.ioctl(fd, hpssfs.HPSSFS_SET_COS_HINT, int(cos))
except IOError as err: except IOError as err:
raise SwiftOnFileSystemIOError(err.errno, message = '%s, hpssfs.ioctl("%s", SET_COS)'
'%s, hpssfs.ioctl("%s", SET_COS)' % ( raise SwiftOnFileSystemIOError(
err.strerror, fd)) err.errno,
message % (err.strerror, fd))
except SwiftOnFileSystemOSError as gerr: except SwiftOnFileSystemOSError as gerr:
if gerr.errno in (errno.ENOSPC, errno.EDQUOT): if gerr.errno in (errno.ENOSPC, errno.EDQUOT):

View File

@ -19,7 +19,10 @@ import math
import logging import logging
import xattr import xattr
import os import os
try:
import hpssfs import hpssfs
except ImportError:
import swiftonhpss.swift.common.hpssfs_ioctl as hpssfs
import time import time
import eventlet import eventlet
@ -29,7 +32,7 @@ from swift.common.swob import HTTPConflict, HTTPBadRequest, HeaderKeyDict, \
HTTPInsufficientStorage, HTTPPreconditionFailed, HTTPRequestTimeout, \ HTTPInsufficientStorage, HTTPPreconditionFailed, HTTPRequestTimeout, \
HTTPClientDisconnect, HTTPUnprocessableEntity, HTTPNotImplemented, \ HTTPClientDisconnect, HTTPUnprocessableEntity, HTTPNotImplemented, \
HTTPServiceUnavailable, HTTPCreated, HTTPNotFound, HTTPAccepted, \ HTTPServiceUnavailable, HTTPCreated, HTTPNotFound, HTTPAccepted, \
HTTPNoContent, Request, Response HTTPNoContent, Response
from swift.common.utils import public, timing_stats, replication, \ from swift.common.utils import public, timing_stats, replication, \
config_true_value, Timestamp, csv_append config_true_value, Timestamp, csv_append
from swift.common.request_helpers import get_name_and_placement, \ from swift.common.request_helpers import get_name_and_placement, \
@ -40,7 +43,7 @@ from swiftonhpss.swift.common.exceptions import AlreadyExistsAsFile, \
from swift.common.exceptions import DiskFileDeviceUnavailable, \ from swift.common.exceptions import DiskFileDeviceUnavailable, \
DiskFileNotExist, DiskFileQuarantined, ChunkReadTimeout, DiskFileNoSpace, \ DiskFileNotExist, DiskFileQuarantined, ChunkReadTimeout, DiskFileNoSpace, \
DiskFileXattrNotSupported, DiskFileExpired, DiskFileDeleted DiskFileXattrNotSupported, DiskFileExpired, DiskFileDeleted
from swift.common.constraints import valid_timestamp, check_account_format from swift.common.constraints import valid_timestamp
from swift.obj import server from swift.obj import server
from swift.common.ring import Ring from swift.common.ring import Ring
@ -64,8 +67,9 @@ class SwiftOnFileDiskFileRouter(object):
class ObjectController(server.ObjectController): class ObjectController(server.ObjectController):
""" """
Subclass of the object server's ObjectController that supports HPSS-specific Subclass of the object server's ObjectController that supports
metadata headers and operations (such as COS assignment and purge locking). HPSS-specific metadata headers and operations (such as COS assignment
and purge locking).
""" """
def setup(self, conf): def setup(self, conf):
@ -91,7 +95,6 @@ class ObjectController(server.ObjectController):
self.container_ring = Ring(self.swift_dir, ring_name='container') self.container_ring = Ring(self.swift_dir, ring_name='container')
return self.container_ring return self.container_ring
@public @public
@timing_stats() @timing_stats()
def PUT(self, request): def PUT(self, request):
@ -195,8 +198,9 @@ class ObjectController(server.ObjectController):
return HTTPUnprocessableEntity(request=request) return HTTPUnprocessableEntity(request=request)
# Update object metadata # Update object metadata
content_type = request.headers['content-type']
metadata = {'X-Timestamp': request.timestamp.internal, metadata = {'X-Timestamp': request.timestamp.internal,
'Content-Type': request.headers['content-type'], 'Content-Type': content_type,
'ETag': etag, 'ETag': etag,
'Content-Length': str(upload_size), 'Content-Length': str(upload_size),
} }
@ -206,7 +210,8 @@ class ObjectController(server.ObjectController):
metadata.update(meta_headers) metadata.update(meta_headers)
backend_headers = \ backend_headers = \
request.headers.get('X-Backend-Replication-Headers') request.headers.get('X-Backend-Replication-Headers')
for header_key in (backend_headers or self.allowed_headers): for header_key in (backend_headers or
self.allowed_headers):
if header_key in request.headers: if header_key in request.headers:
header_caps = header_key.title() header_caps = header_key.title()
metadata[header_caps] = request.headers[header_key] metadata[header_caps] = request.headers[header_key]
@ -259,16 +264,12 @@ class ObjectController(server.ObjectController):
self.delete_at_update('DELETE', orig_delete_at, account, self.delete_at_update('DELETE', orig_delete_at, account,
container, obj, request, device, container, obj, request, device,
policy) policy)
container_headers = {'x-size': metadata['Content-Length'],
'x-content-type': metadata['Content-Type'],
'x-timestamp': metadata['X-Timestamp'],
'x-etag': metadata['ETag']}
self.container_update('PUT', account, container, obj, request, self.container_update('PUT', account, container, obj, request,
HeaderKeyDict( HeaderKeyDict(container_headers),
{'x-size':
metadata['Content-Length'],
'x-content-type':
metadata['Content-Type'],
'x-timestamp':
metadata['X-Timestamp'],
'x-etag':
metadata['ETag']}),
device, policy) device, policy)
# Create convenience symlink # Create convenience symlink
try: try:
@ -346,8 +347,8 @@ class ObjectController(server.ObjectController):
# Get DiskFile # Get DiskFile
try: try:
disk_file = self.get_diskfile(device, partition, account, container, disk_file = self.get_diskfile(device, partition, account,
obj, policy=policy) container, obj, policy=policy)
except DiskFileDeviceUnavailable: except DiskFileDeviceUnavailable:
return HTTPInsufficientStorage(drive=device, request=request) return HTTPInsufficientStorage(drive=device, request=request)
@ -417,8 +418,8 @@ class ObjectController(server.ObjectController):
# Get Diskfile # Get Diskfile
try: try:
disk_file = self.get_diskfile(device, partition, account, container, disk_file = self.get_diskfile(device, partition, account,
obj, policy) container, obj, policy)
except DiskFileDeviceUnavailable: except DiskFileDeviceUnavailable:
return HTTPInsufficientStorage(drive=device, request=request) return HTTPInsufficientStorage(drive=device, request=request)
@ -524,10 +525,9 @@ class ObjectController(server.ObjectController):
return HTTPNotFound(request=request) return HTTPNotFound(request=request)
orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0)) orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0))
if orig_timestamp >= req_timestamp: if orig_timestamp >= req_timestamp:
backend_headers = {'X-Backend-Timestamp': orig_timestamp.internal}
return HTTPConflict(request=request, return HTTPConflict(request=request,
headers={ headers=backend_headers)
'X-Backend-Timestamp': orig_timestamp.internal
})
metadata = {'X-Timestamp': req_timestamp.internal} metadata = {'X-Timestamp': req_timestamp.internal}
metadata.update(val for val in request.headers.iteritems() metadata.update(val for val in request.headers.iteritems()
if is_user_meta('object', val[0])) if is_user_meta('object', val[0]))

View File

@ -1,5 +1,6 @@
[tox] [tox]
envlist = py27,pep8,functest #envlist = py27,pep8,functest
envlist = py27,pep8
minversion = 1.6 minversion = 1.6
skipsdist = True skipsdist = True
@ -10,7 +11,7 @@ whitelist_externals=bash
setenv = VIRTUAL_ENV={envdir} setenv = VIRTUAL_ENV={envdir}
NOSE_WITH_COVERAGE=1 NOSE_WITH_COVERAGE=1
NOSE_COVER_BRANCHES=1 NOSE_COVER_BRANCHES=1
NOSE_COVER_PACKAGE=swiftonfile NOSE_COVER_PACKAGE=swiftonhpss
deps = deps =
# Note: pip supports installing from git repos. # Note: pip supports installing from git repos.
# https://pip.pypa.io/en/latest/reference/pip_install.html#git # https://pip.pypa.io/en/latest/reference/pip_install.html#git
@ -35,8 +36,7 @@ commands = bash ./.functests -q
[testenv:pep8] [testenv:pep8]
changedir = {toxinidir} changedir = {toxinidir}
commands = commands = flake8 swiftonhpss test setup.py
flake8 swiftonhpss test setup.py
flake8 --filename=swiftonhpss* bin flake8 --filename=swiftonhpss* bin
[testenv:venv] [testenv:venv]