Rebase to OpenStack Swift Havana (1.10.0)

Change-Id: I90821230a1a7100c74d97cccc9c445251d0f65e7
Signed-off-by: Peter Portante <peter.portante@redhat.com>
Reviewed-on: http://review.gluster.org/6157
Reviewed-by: Luis Pabon <lpabon@redhat.com>
Tested-by: Luis Pabon <lpabon@redhat.com>
This commit is contained in:
Peter Portante 2013-10-24 16:15:25 -04:00 committed by Luis Pabon
parent 6b8d7c5919
commit 286a1308db
24 changed files with 2130 additions and 1466 deletions

View File

@ -44,6 +44,6 @@ class PkgInfo(object):
### ###
### Change the Package version here ### Change the Package version here
### ###
_pkginfo = PkgInfo('1.9.1', '0', 'glusterfs-openstack-swift', False) _pkginfo = PkgInfo('1.10.0', '0', 'glusterfs-openstack-swift', False)
__version__ = _pkginfo.pretty_version __version__ = _pkginfo.pretty_version
__canonical_version__ = _pkginfo.canonical_version __canonical_version__ = _pkginfo.canonical_version

View File

@ -32,7 +32,7 @@ class Fake_file(object):
return 0 return 0
def read(self, count): def read(self, count):
return 0 return None
def fileno(self): def fileno(self):
return -1 return -1
@ -265,6 +265,14 @@ def do_fsync(fd):
err.errno, '%s, os.fsync("%s")' % (err.strerror, fd)) err.errno, '%s, os.fsync("%s")' % (err.strerror, fd))
def do_fdatasync(fd):
try:
os.fdatasync(fd)
except OSError as err:
raise GlusterFileSystemOSError(
err.errno, '%s, os.fdatasync("%s")' % (err.strerror, fd))
def mkdirs(path): def mkdirs(path):
""" """
Ensures the path is a directory or makes it if not. Errors if the path Ensures the path is a directory or makes it if not. Errors if the path

View File

@ -23,8 +23,9 @@ from hashlib import md5
from eventlet import sleep from eventlet import sleep
import cPickle as pickle import cPickle as pickle
from swift.common.utils import normalize_timestamp from swift.common.utils import normalize_timestamp
from gluster.swift.common.exceptions import GlusterFileSystemIOError
from gluster.swift.common.fs_utils import do_rename, do_fsync, os_path, \ from gluster.swift.common.fs_utils import do_rename, do_fsync, os_path, \
do_stat, do_listdir, do_walk, do_rmdir do_stat, do_fstat, do_listdir, do_walk, do_rmdir
from gluster.swift.common import Glusterfs from gluster.swift.common import Glusterfs
X_CONTENT_TYPE = 'Content-Type' X_CONTENT_TYPE = 'Content-Type'
@ -55,18 +56,6 @@ PICKLE_PROTOCOL = 2
CHUNK_SIZE = 65536 CHUNK_SIZE = 65536
class GlusterFileSystemOSError(OSError):
# Having our own class means the name will show up in the stack traces
# recorded in the log files.
pass
class GlusterFileSystemIOError(IOError):
# Having our own class means the name will show up in the stack traces
# recorded in the log files.
pass
def read_metadata(path_or_fd): def read_metadata(path_or_fd):
""" """
Helper function to read the pickled metadata from a File/Directory. Helper function to read the pickled metadata from a File/Directory.
@ -320,6 +309,23 @@ def get_account_details(acc_path):
return container_list, container_count return container_list, container_count
def _read_for_etag(fp):
etag = md5()
while True:
chunk = fp.read(CHUNK_SIZE)
if chunk:
etag.update(chunk)
if len(chunk) >= CHUNK_SIZE:
# It is likely that we have more data to be read from the
# file. Yield the co-routine cooperatively to avoid
# consuming the worker during md5sum() calculations on
# large files.
sleep()
else:
break
return etag.hexdigest()
def _get_etag(path): def _get_etag(path):
""" """
FIXME: It would be great to have a translator that returns the md5sum() of FIXME: It would be great to have a translator that returns the md5sum() of
@ -328,28 +334,24 @@ def _get_etag(path):
Since we don't have that we should yield after each chunk read and Since we don't have that we should yield after each chunk read and
computed so that we don't consume the worker thread. computed so that we don't consume the worker thread.
""" """
etag = md5() if isinstance(path, int):
with open(path, 'rb') as fp: with os.fdopen(os.dup(path), 'rb') as fp:
while True: etag = _read_for_etag(fp)
chunk = fp.read(CHUNK_SIZE) os.lseek(path, 0, os.SEEK_SET)
if chunk: else:
etag.update(chunk) with open(path, 'rb') as fp:
if len(chunk) >= CHUNK_SIZE: etag = _read_for_etag(fp)
# It is likely that we have more data to be read from the return etag
# file. Yield the co-routine cooperatively to avoid
# consuming the worker during md5sum() calculations on
# large files.
sleep()
else:
break
return etag.hexdigest()
def get_object_metadata(obj_path): def get_object_metadata(obj_path):
""" """
Return metadata of object. Return metadata of object.
""" """
stats = do_stat(obj_path) if isinstance(obj_path, int):
stats = do_fstat(obj_path)
else:
stats = do_stat(obj_path)
if not stats: if not stats:
metadata = {} metadata = {}
else: else:

View File

@ -32,10 +32,12 @@ from swift.common.utils import TRUE_VALUES, drop_buffer_cache, ThreadPool
from swift.common.exceptions import DiskFileNotExist, DiskFileError, \ from swift.common.exceptions import DiskFileNotExist, DiskFileError, \
DiskFileNoSpace, DiskFileDeviceUnavailable DiskFileNoSpace, DiskFileDeviceUnavailable
from gluster.swift.common.exceptions import GlusterFileSystemOSError from gluster.swift.common.exceptions import GlusterFileSystemOSError, \
GlusterFileSystemIOError
from gluster.swift.common.Glusterfs import mount from gluster.swift.common.Glusterfs import mount
from gluster.swift.common.fs_utils import do_fstat, do_open, do_close, \ from gluster.swift.common.fs_utils import do_fstat, do_open, do_close, \
do_unlink, do_chown, os_path, do_fsync, do_fchown, do_stat, Fake_file do_unlink, do_chown, os_path, do_fsync, do_fchown, do_stat, do_write, \
do_fdatasync, do_rename, Fake_file
from gluster.swift.common.utils import read_metadata, write_metadata, \ from gluster.swift.common.utils import read_metadata, write_metadata, \
validate_object, create_object_metadata, rmobjdir, dir_is_object, \ validate_object, create_object_metadata, rmobjdir, dir_is_object, \
get_object_metadata get_object_metadata
@ -45,7 +47,6 @@ from gluster.swift.common.utils import X_CONTENT_LENGTH, X_CONTENT_TYPE, \
from ConfigParser import ConfigParser, NoSectionError, NoOptionError from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.obj.diskfile import DiskFile as SwiftDiskFile from swift.obj.diskfile import DiskFile as SwiftDiskFile
from swift.obj.diskfile import DiskWriter as SwiftDiskWriter
# FIXME: Hopefully we'll be able to move to Python 2.7+ where O_CLOEXEC will # FIXME: Hopefully we'll be able to move to Python 2.7+ where O_CLOEXEC will
# be back ported. See http://www.python.org/dev/peps/pep-0433/ # be back ported. See http://www.python.org/dev/peps/pep-0433/
@ -278,7 +279,7 @@ def _adjust_metadata(metadata):
return metadata return metadata
class DiskWriter(SwiftDiskWriter): class DiskWriter(object):
""" """
Encapsulation of the write context for servicing PUT REST API Encapsulation of the write context for servicing PUT REST API
requests. Serves as the context manager object for DiskFile's writer() requests. Serves as the context manager object for DiskFile's writer()
@ -286,6 +287,126 @@ class DiskWriter(SwiftDiskWriter):
We just override the put() method for Gluster. We just override the put() method for Gluster.
""" """
def __init__(self, disk_file, fd, tmppath, threadpool):
self.disk_file = disk_file
self.fd = fd
self.tmppath = tmppath
self.upload_size = 0
self.last_sync = 0
self.threadpool = threadpool
def write(self, chunk):
"""
Write a chunk of data into the temporary file.
:param chunk: the chunk of data to write as a string object
"""
def _write_entire_chunk(chunk):
while chunk:
written = do_write(self.fd, chunk)
self.upload_size += written
chunk = chunk[written:]
self.threadpool.run_in_thread(_write_entire_chunk, chunk)
# For large files sync every 512MB (by default) written
diff = self.upload_size - self.last_sync
if diff >= self.disk_file.bytes_per_sync:
self.threadpool.force_run_in_thread(do_fdatasync, self.fd)
drop_buffer_cache(self.fd, self.last_sync, diff)
self.last_sync = self.upload_size
def _finalize_put(self, metadata):
# Write out metadata before fsync() to ensure it is also forced to
# disk.
write_metadata(self.fd, metadata)
# We call fsync() before calling drop_cache() to lower the
# amount of redundant work the drop cache code will perform on
# the pages (now that after fsync the pages will be all
# clean).
do_fsync(self.fd)
# From the Department of the Redundancy Department, make sure
# we call drop_cache() after fsync() to avoid redundant work
# (pages all clean).
drop_buffer_cache(self.fd, 0, self.upload_size)
# At this point we know that the object's full directory path
# exists, so we can just rename it directly without using Swift's
# swift.common.utils.renamer(), which makes the directory path and
# adds extra stat() calls.
df = self.disk_file
data_file = os.path.join(df.put_datadir, df._obj)
attempts = 1
while True:
try:
do_rename(self.tmppath, data_file)
except OSError as err:
if err.errno in (errno.ENOENT, errno.EIO) \
and attempts < MAX_RENAME_ATTEMPTS:
# FIXME: Why either of these two error conditions is
# happening is unknown at this point. This might be a
# FUSE issue of some sort or a possible race
# condition. So let's sleep on it, and double check
# the environment after a good nap.
_random_sleep()
# Tease out why this error occurred. The man page for
# rename reads:
# "The link named by tmppath does not exist; or, a
# directory component in data_file does not exist;
# or, tmppath or data_file is an empty string."
assert len(self.tmppath) > 0 and len(data_file) > 0
tpstats = do_stat(self.tmppath)
tfstats = do_fstat(self.fd)
assert tfstats
if not tpstats or tfstats.st_ino != tpstats.st_ino:
# Temporary file name conflict
raise DiskFileError(
'DiskFile.put(): temporary file, %s, was'
' already renamed (targeted for %s)' % (
self.tmppath, data_file))
else:
# Data file target name now has a bad path!
dfstats = do_stat(df.put_datadir)
if not dfstats:
raise DiskFileError(
'DiskFile.put(): path to object, %s, no'
' longer exists (targeted for %s)' % (
df.put_datadir,
data_file))
else:
is_dir = stat.S_ISDIR(dfstats.st_mode)
if not is_dir:
raise DiskFileError(
'DiskFile.put(): path to object, %s,'
' no longer a directory (targeted for'
' %s)' % (df.put_datadir,
data_file))
else:
# Let's retry since everything looks okay
logging.warn(
"DiskFile.put(): os.rename('%s','%s')"
" initially failed (%s) but a"
" stat('%s') following that succeeded:"
" %r" % (
self.tmppath, data_file,
str(err), df.put_datadir,
dfstats))
attempts += 1
continue
else:
raise GlusterFileSystemOSError(
err.errno, "%s, os.rename('%s', '%s')" % (
err.strerror, self.tmppath, data_file))
else:
# Success!
break
# Close here so the calling context does not have to perform this
# in a thread.
do_close(self.fd)
def put(self, metadata, extension='.data'): def put(self, metadata, extension='.data'):
""" """
Finalize writing the file on disk, and renames it from the temp file Finalize writing the file on disk, and renames it from the temp file
@ -306,120 +427,34 @@ class DiskWriter(SwiftDiskWriter):
if not df.data_file: if not df.data_file:
# Does not exist, create it # Does not exist, create it
data_file = os.path.join(df._obj_path, df._obj) data_file = os.path.join(df._obj_path, df._obj)
_, df.metadata = self.threadpool.force_run_in_thread( _, df._metadata = self.threadpool.force_run_in_thread(
df._create_dir_object, data_file, metadata) df._create_dir_object, data_file, metadata)
df.data_file = os.path.join(df._container_path, data_file) df.data_file = os.path.join(df._container_path, data_file)
elif not df.is_dir: elif not df._is_dir:
# Exists, but as a file # Exists, but as a file
raise DiskFileError('DiskFile.put(): directory creation failed' raise DiskFileError('DiskFile.put(): directory creation failed'
' since the target, %s, already exists as' ' since the target, %s, already exists as'
' a file' % df.data_file) ' a file' % df.data_file)
return return
if df._is_dir: try:
# A pre-existing directory already exists on the file self.threadpool.force_run_in_thread(self._finalize_put, metadata)
# system, perhaps gratuitously created when another except GlusterFileSystemOSError as err:
# object was created, or created externally to Swift if err.errno == errno.EISDIR:
# REST API servicing (UFO use case). # A pre-existing directory already exists on the file
raise DiskFileError('DiskFile.put(): file creation failed since' # system, perhaps gratuitously created when another
' the target, %s, already exists as a' # object was created, or created externally to Swift
' directory' % df.data_file) # REST API servicing (UFO use case).
raise DiskFileError('DiskFile.put(): file creation failed'
def finalize_put(): ' since the target, %s, already exists as'
# Write out metadata before fsync() to ensure it is also forced to ' a directory' % df.data_file)
# disk. raise
write_metadata(self.fd, metadata)
# We call fsync() before calling drop_cache() to lower the
# amount of redundant work the drop cache code will perform on
# the pages (now that after fsync the pages will be all
# clean).
do_fsync(self.fd)
# From the Department of the Redundancy Department, make sure
# we call drop_cache() after fsync() to avoid redundant work
# (pages all clean).
drop_buffer_cache(self.fd, 0, self.upload_size)
# At this point we know that the object's full directory path
# exists, so we can just rename it directly without using Swift's
# swift.common.utils.renamer(), which makes the directory path and
# adds extra stat() calls.
data_file = os.path.join(df.put_datadir, df._obj)
attempts = 1
while True:
try:
os.rename(self.tmppath, data_file)
except OSError as err:
if err.errno in (errno.ENOENT, errno.EIO) \
and attempts < MAX_RENAME_ATTEMPTS:
# FIXME: Why either of these two error conditions is
# happening is unknown at this point. This might be a
# FUSE issue of some sort or a possible race
# condition. So let's sleep on it, and double check
# the environment after a good nap.
_random_sleep()
# Tease out why this error occurred. The man page for
# rename reads:
# "The link named by tmppath does not exist; or, a
# directory component in data_file does not exist;
# or, tmppath or data_file is an empty string."
assert len(self.tmppath) > 0 and len(data_file) > 0
tpstats = do_stat(self.tmppath)
tfstats = do_fstat(self.fd)
assert tfstats
if not tpstats or tfstats.st_ino != tpstats.st_ino:
# Temporary file name conflict
raise DiskFileError(
'DiskFile.put(): temporary file, %s, was'
' already renamed (targeted for %s)' % (
self.tmppath, data_file))
else:
# Data file target name now has a bad path!
dfstats = do_stat(df.put_datadir)
if not dfstats:
raise DiskFileError(
'DiskFile.put(): path to object, %s, no'
' longer exists (targeted for %s)' % (
df.put_datadir,
data_file))
else:
is_dir = stat.S_ISDIR(dfstats.st_mode)
if not is_dir:
raise DiskFileError(
'DiskFile.put(): path to object, %s,'
' no longer a directory (targeted for'
' %s)' % (df.put_datadir,
data_file))
else:
# Let's retry since everything looks okay
logging.warn(
"DiskFile.put(): os.rename('%s','%s')"
" initially failed (%s) but a"
" stat('%s') following that succeeded:"
" %r" % (
self.tmppath, data_file,
str(err), df.put_datadir,
dfstats))
attempts += 1
continue
else:
raise GlusterFileSystemOSError(
err.errno, "%s, os.rename('%s', '%s')" % (
err.strerror, self.tmppath, data_file))
else:
# Success!
break
# Close here so the calling context does not have to perform this
# in a thread.
do_close(self.fd)
self.threadpool.force_run_in_thread(finalize_put)
# Avoid the unlink() system call as part of the mkstemp context # Avoid the unlink() system call as part of the mkstemp context
# cleanup # cleanup
self.tmppath = None self.tmppath = None
df.metadata = metadata df._metadata = metadata
df._filter_metadata() df._filter_metadata()
# Mark that it actually exists now # Mark that it actually exists now
@ -443,7 +478,6 @@ class DiskFile(SwiftDiskFile):
:param container: container name for the object :param container: container name for the object
:param obj: object name for the object :param obj: object name for the object
:param logger: logger object for writing out log file messages :param logger: logger object for writing out log file messages
:param keep_data_fp: if True, don't close the fp, otherwise close it
:param disk_chunk_Size: size of chunks on file reads :param disk_chunk_Size: size of chunks on file reads
:param bytes_per_sync: number of bytes between fdatasync calls :param bytes_per_sync: number of bytes between fdatasync calls
:param iter_hook: called when __iter__ returns a chunk :param iter_hook: called when __iter__ returns a chunk
@ -456,18 +490,15 @@ class DiskFile(SwiftDiskFile):
""" """
def __init__(self, path, device, partition, account, container, obj, def __init__(self, path, device, partition, account, container, obj,
logger, keep_data_fp=False, logger, disk_chunk_size=DEFAULT_DISK_CHUNK_SIZE,
disk_chunk_size=DEFAULT_DISK_CHUNK_SIZE,
bytes_per_sync=DEFAULT_BYTES_PER_SYNC, iter_hook=None, bytes_per_sync=DEFAULT_BYTES_PER_SYNC, iter_hook=None,
threadpool=None, obj_dir='objects', mount_check=False, threadpool=None, obj_dir='objects', mount_check=False,
disallowed_metadata_keys=None, uid=DEFAULT_UID, uid=DEFAULT_UID, gid=DEFAULT_GID):
gid=DEFAULT_GID):
if mount_check and not mount(path, device): if mount_check and not mount(path, device):
raise DiskFileDeviceUnavailable() raise DiskFileDeviceUnavailable()
self.disk_chunk_size = disk_chunk_size self.disk_chunk_size = disk_chunk_size
self.bytes_per_sync = bytes_per_sync self.bytes_per_sync = bytes_per_sync
self.iter_hook = iter_hook self.iter_hook = iter_hook
self.threadpool = threadpool or ThreadPool(nthreads=0)
obj = obj.strip(os.path.sep) obj = obj.strip(os.path.sep)
if os.path.sep in obj: if os.path.sep in obj:
@ -491,59 +522,78 @@ class DiskFile(SwiftDiskFile):
self.put_datadir = self.datadir self.put_datadir = self.datadir
self._is_dir = False self._is_dir = False
self.logger = logger self.logger = logger
self.metadata = {} self._metadata = None
self.meta_file = None # Don't store a value for data_file until we know it exists.
self.data_file = None
self._data_file_size = None
self.fp = None self.fp = None
self.iter_etag = None self.iter_etag = None
self.started_at_0 = False self.started_at_0 = False
self.read_to_eof = False self.read_to_eof = False
self.quarantined_dir = None self.quarantined_dir = None
self.suppress_file_closing = False
self._verify_close = False
self.threadpool = threadpool or ThreadPool(nthreads=0)
# FIXME(portante): this attribute is set after open and affects the
# behavior of the class (i.e. public interface)
self.keep_cache = False self.keep_cache = False
self.uid = int(uid) self.uid = int(uid)
self.gid = int(gid) self.gid = int(gid)
self.suppress_file_closing = False
# Don't store a value for data_file until we know it exists. def open(self, verify_close=False):
self.data_file = None """
Open the file and read the metadata.
This method must populate the _metadata attribute.
:param verify_close: force implicit close to verify_file, no effect on
explicit close.
:raises DiskFileCollision: on md5 collision
"""
data_file = os.path.join(self.put_datadir, self._obj) data_file = os.path.join(self.put_datadir, self._obj)
try: try:
stats = do_stat(data_file) fd = do_open(data_file, os.O_RDONLY | os.O_EXCL)
except OSError as err: except GlusterFileSystemOSError as err:
if err.errno == errno.ENOTDIR: self.logger.exception(
return "Error opening file, %s :: %s", data_file, err)
else: else:
if not stats: try:
return stats = do_fstat(fd)
except GlusterFileSystemOSError as err:
self.data_file = data_file self.logger.exception(
self._is_dir = stat.S_ISDIR(stats.st_mode) "Error stat'ing open file, %s :: %s", data_file, err)
self.metadata = read_metadata(data_file)
if not self.metadata:
create_object_metadata(data_file)
self.metadata = read_metadata(data_file)
if not validate_object(self.metadata):
create_object_metadata(data_file)
self.metadata = read_metadata(data_file)
self._filter_metadata()
if keep_data_fp:
if not self._is_dir:
# The caller has an assumption that the "fp" field of this
# object is an file object if keep_data_fp is set. However,
# this implementation of the DiskFile object does not need to
# open the file for internal operations. So if the caller
# requests it, we'll just open the file for them.
self.fp = do_open(data_file, 'rb')
else: else:
self.fp = Fake_file(data_file) self._is_dir = stat.S_ISDIR(stats.st_mode)
def drop_cache(self, fd, offset, length): self.data_file = data_file
self._metadata = read_metadata(fd)
if not self._metadata:
create_object_metadata(fd)
self._metadata = read_metadata(fd)
if not validate_object(self._metadata):
create_object_metadata(fd)
self._metadata = read_metadata(fd)
self._filter_metadata()
if self._is_dir:
# Use a fake file handle to satisfy the super class's
# __iter__ method requirement when dealing with
# directories as objects.
os.close(fd)
self.fp = Fake_file(data_file)
else:
self.fp = os.fdopen(fd, 'rb')
self._verify_close = verify_close
self._metadata = self._metadata or {}
return self
def _drop_cache(self, fd, offset, length):
if fd >= 0: if fd >= 0:
super(DiskFile, self).drop_cache(fd, offset, length) super(DiskFile, self)._drop_cache(fd, offset, length)
def close(self, verify_file=True): def close(self, verify_file=True):
""" """
@ -555,12 +605,17 @@ class DiskFile(SwiftDiskFile):
if self.fp: if self.fp:
do_close(self.fp) do_close(self.fp)
self.fp = None self.fp = None
self._metadata = None
self._data_file_size = None
self._verify_close = False
def _filter_metadata(self): def _filter_metadata(self):
if X_TYPE in self.metadata: if self._metadata is None:
self.metadata.pop(X_TYPE) return
if X_OBJECT_TYPE in self.metadata: if X_TYPE in self._metadata:
self.metadata.pop(X_OBJECT_TYPE) self._metadata.pop(X_TYPE)
if X_OBJECT_TYPE in self._metadata:
self._metadata.pop(X_OBJECT_TYPE)
def _create_dir_object(self, dir_path, metadata=None): def _create_dir_object(self, dir_path, metadata=None):
""" """
@ -619,7 +674,7 @@ class DiskFile(SwiftDiskFile):
return True, newmd return True, newmd
@contextmanager @contextmanager
def writer(self, size=None): def create(self, size=None):
""" """
Contextmanager to make a temporary file, optionally of a specified Contextmanager to make a temporary file, optionally of a specified
initial size. initial size.
@ -721,63 +776,68 @@ class DiskFile(SwiftDiskFile):
if tombstone: if tombstone:
# We don't write tombstone files. So do nothing. # We don't write tombstone files. So do nothing.
return return
assert self.data_file is not None, \
"put_metadata: no file to put metadata into"
metadata = _adjust_metadata(metadata) metadata = _adjust_metadata(metadata)
self.threadpool.run_in_thread(write_metadata, self.data_file, metadata) data_file = os.path.join(self.put_datadir, self._obj)
self.metadata = metadata self.threadpool.run_in_thread(write_metadata, data_file, metadata)
self._filter_metadata()
def unlinkold(self, timestamp): def _delete(self):
if self._is_dir:
# Marker, or object, directory.
#
# Delete from the filesystem only if it contains no objects.
# If it does contain objects, then just remove the object
# metadata tag which will make this directory a
# fake-filesystem-only directory and will be deleted when the
# container or parent directory is deleted.
metadata = read_metadata(self.data_file)
if dir_is_object(metadata):
metadata[X_OBJECT_TYPE] = DIR_NON_OBJECT
write_metadata(self.data_file, metadata)
rmobjdir(self.data_file)
else:
# Delete file object
do_unlink(self.data_file)
# Garbage collection of non-object directories. Now that we
# deleted the file, determine if the current directory and any
# parent directory may be deleted.
dirname = os.path.dirname(self.data_file)
while dirname and dirname != self._container_path:
# Try to remove any directories that are not objects.
if not rmobjdir(dirname):
# If a directory with objects has been found, we can stop
# garabe collection
break
else:
dirname = os.path.dirname(dirname)
def delete(self, timestamp):
""" """
Remove any older versions of the object file. Any file that has an Remove any older versions of the object file. Any file that has an
older timestamp than timestamp will be deleted. older timestamp than timestamp will be deleted.
:param timestamp: timestamp to compare with each file :param timestamp: timestamp to compare with each file
""" """
if not self.metadata or self.metadata[X_TIMESTAMP] >= timestamp: timestamp_fl = float(timestamp)
return data_file = os.path.join(self.put_datadir, self._obj)
try:
assert self.data_file, \ metadata = read_metadata(data_file)
"Have metadata, %r, but no data_file" % self.metadata except (GlusterFileSystemIOError, GlusterFileSystemOSError) as err:
if err.errno != errno.ENOENT:
def _unlinkold(): raise
if self._is_dir: else:
# Marker, or object, directory. try:
# old_ts = float(metadata[X_TIMESTAMP]) >= timestamp_fl
# Delete from the filesystem only if it contains no objects. except (KeyError, ValueError):
# If it does contain objects, then just remove the object # If no X-Timestamp to compare against, or the timestamp is
# metadata tag which will make this directory a # not a valid float, we'll just delete the object anyways.
# fake-filesystem-only directory and will be deleted when the old_ts = False
# container or parent directory is deleted. if not old_ts:
metadata = read_metadata(self.data_file) self.threadpool.run_in_thread(self._delete)
if dir_is_object(metadata): self._metadata = {}
metadata[X_OBJECT_TYPE] = DIR_NON_OBJECT
write_metadata(self.data_file, metadata)
rmobjdir(self.data_file)
else:
# Delete file object
do_unlink(self.data_file)
# Garbage collection of non-object directories. Now that we
# deleted the file, determine if the current directory and any
# parent directory may be deleted.
dirname = os.path.dirname(self.data_file)
while dirname and dirname != self._container_path:
# Try to remove any directories that are not objects.
if not rmobjdir(dirname):
# If a directory with objects has been found, we can stop
# garabe collection
break
else:
dirname = os.path.dirname(dirname)
self.threadpool.run_in_thread(_unlinkold)
self.metadata = {}
self.data_file = None self.data_file = None
def get_data_file_size(self): def _get_data_file_size(self):
""" """
Returns the os_path.getsize for the file. Raises an exception if this Returns the os_path.getsize for the file. Raises an exception if this
file does not match the Content-Length stored in the metadata, or if file does not match the Content-Length stored in the metadata, or if
@ -795,12 +855,12 @@ class DiskFile(SwiftDiskFile):
if self.data_file: if self.data_file:
def _old_getsize(): def _old_getsize():
file_size = os_path.getsize(self.data_file) file_size = os_path.getsize(self.data_file)
if X_CONTENT_LENGTH in self.metadata: if X_CONTENT_LENGTH in self._metadata:
metadata_size = int(self.metadata[X_CONTENT_LENGTH]) metadata_size = int(self._metadata[X_CONTENT_LENGTH])
if file_size != metadata_size: if file_size != metadata_size:
# FIXME - bit rot detection? # FIXME - bit rot detection?
self.metadata[X_CONTENT_LENGTH] = file_size self._metadata[X_CONTENT_LENGTH] = file_size
write_metadata(self.data_file, self.metadata) write_metadata(self.data_file, self._metadata)
return file_size return file_size
file_size = self.threadpool.run_in_thread(_old_getsize) file_size = self.threadpool.run_in_thread(_old_getsize)
return file_size return file_size

View File

@ -39,8 +39,6 @@ class ObjectController(server.ObjectController):
kwargs.setdefault('disk_chunk_size', self.disk_chunk_size) kwargs.setdefault('disk_chunk_size', self.disk_chunk_size)
kwargs.setdefault('threadpool', self.threadpools[device]) kwargs.setdefault('threadpool', self.threadpools[device])
kwargs.setdefault('obj_dir', server.DATADIR) kwargs.setdefault('obj_dir', server.DATADIR)
kwargs.setdefault('disallowed_metadata_keys',
server.DISALLOWED_HEADERS)
return DiskFile(self.devices, device, partition, account, return DiskFile(self.devices, device, partition, account,
container, obj, self.logger, **kwargs) container, obj, self.logger, **kwargs)

View File

@ -18,11 +18,14 @@
# needs # needs
import gluster.swift.common.constraints # noqa import gluster.swift.common.constraints # noqa
from swift.proxy import server from swift.proxy.server import Application, mimetypes # noqa
from swift.proxy.controllers import AccountController # noqa
from swift.proxy.controllers import ObjectController # noqa
from swift.proxy.controllers import ContainerController # noqa
def app_factory(global_conf, **local_conf): # noqa def app_factory(global_conf, **local_conf): # noqa
"""paste.deploy app factory for creating WSGI proxy apps.""" """paste.deploy app factory for creating WSGI proxy apps."""
conf = global_conf.copy() conf = global_conf.copy()
conf.update(local_conf) conf.update(local_conf)
return server.Application(conf) return Application(conf)

View File

@ -39,11 +39,11 @@ BuildRequires: python-setuptools
Requires : memcached Requires : memcached
Requires : openssl Requires : openssl
Requires : python Requires : python
Requires : openstack-swift = 1.9.1 Requires : openstack-swift = 1.10.0
Requires : openstack-swift-account = 1.9.1 Requires : openstack-swift-account = 1.10.0
Requires : openstack-swift-container = 1.9.1 Requires : openstack-swift-container = 1.10.0
Requires : openstack-swift-object = 1.9.1 Requires : openstack-swift-object = 1.10.0
Requires : openstack-swift-proxy = 1.9.1 Requires : openstack-swift-proxy = 1.10.0
Obsoletes: glusterfs-swift-plugin Obsoletes: glusterfs-swift-plugin
Obsoletes: glusterfs-swift Obsoletes: glusterfs-swift
Obsoletes: glusterfs-ufo Obsoletes: glusterfs-ufo

@ -1 +1 @@
Subproject commit 4bd9e4584d31eb37c7e30e555daeb6b90703ee3a Subproject commit 7accddf1c3f54f67cf29d6eb69e416f798af6e23

View File

@ -1,14 +1,44 @@
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# See http://code.google.com/p/python-nose/issues/detail?id=373 # See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks # The code below enables nosetests to work with i18n _() blocks
import __builtin__
import sys import sys
import os import os
try:
from unittest.util import safe_repr
except ImportError:
# Probably py26
_MAX_LENGTH = 80
def safe_repr(obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
# make unittests pass on all locale
import swift
setattr(swift, 'gettext_', lambda x: x)
from swift.common.utils import readconf from swift.common.utils import readconf
setattr(__builtin__, '_', lambda x: x)
# Work around what seems to be a Python bug. # Work around what seems to be a Python bug.
# c.f. https://bugs.launchpad.net/swift/+bug/820185. # c.f. https://bugs.launchpad.net/swift/+bug/820185.

View File

@ -1,19 +1,4 @@
# Copyright (c) 2010-2013 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2013 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -43,6 +28,8 @@ from nose import SkipTest
from xml.dom import minidom from xml.dom import minidom
from swiftclient import get_auth from swiftclient import get_auth
from test import safe_repr
class AuthenticationFailed(Exception): class AuthenticationFailed(Exception):
pass pass
@ -146,12 +133,10 @@ class Connection(object):
auth_netloc = "%s:%d" % (self.auth_host, self.auth_port) auth_netloc = "%s:%d" % (self.auth_host, self.auth_port)
auth_url = auth_scheme + auth_netloc + auth_path auth_url = auth_scheme + auth_netloc + auth_path
(storage_url, storage_token) = get_auth(auth_url, (storage_url, storage_token) = get_auth(
auth_user, self.password, auth_url, auth_user, self.password, snet=False,
snet=False, tenant_name=self.account, auth_version=self.auth_version,
tenant_name=self.account, os_options={})
auth_version=self.auth_version,
os_options={})
if not (storage_url and storage_token): if not (storage_url and storage_token):
raise AuthenticationFailed() raise AuthenticationFailed()
@ -233,18 +218,22 @@ class Connection(object):
self.response = None self.response = None
try_count = 0 try_count = 0
fail_messages = []
while try_count < 5: while try_count < 5:
try_count += 1 try_count += 1
try: try:
self.response = try_request() self.response = try_request()
except httplib.HTTPException: except httplib.HTTPException as e:
fail_messages.append(safe_repr(e))
continue continue
if self.response.status == 401: if self.response.status == 401:
fail_messages.append("Response 401")
self.authenticate() self.authenticate()
continue continue
elif self.response.status == 503: elif self.response.status == 503:
fail_messages.append("Response 503")
if try_count != 5: if try_count != 5:
time.sleep(5) time.sleep(5)
continue continue
@ -254,7 +243,11 @@ class Connection(object):
if self.response: if self.response:
return self.response.status return self.response.status
raise RequestError('Unable to complete http request') request = "{method} {path} headers: {headers} data: {data}".format(
method=method, path=path, headers=headers, data=data)
raise RequestError('Unable to complete http request: %s. '
'Attempts: %s, Failures: %s' %
(request, len(fail_messages), fail_messages))
def put_start(self, path, hdrs={}, parms={}, cfg={}, chunked=False): def put_start(self, path, hdrs={}, parms={}, cfg={}, chunked=False):
self.http_connect() self.http_connect()
@ -329,21 +322,21 @@ class Account(Base):
return Container(self.conn, self.name, container_name) return Container(self.conn, self.name, container_name)
def containers(self, hdrs={}, parms={}, cfg={}): def containers(self, hdrs={}, parms={}, cfg={}):
format = parms.get('format', None) format_type = parms.get('format', None)
if format not in [None, 'json', 'xml']: if format_type not in [None, 'json', 'xml']:
raise RequestError('Invalid format: %s' % format) raise RequestError('Invalid format: %s' % format_type)
if format is None and 'format' in parms: if format_type is None and 'format' in parms:
del parms['format'] del parms['format']
status = self.conn.make_request('GET', self.path, hdrs=hdrs, status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) parms=parms, cfg=cfg)
if status == 200: if status == 200:
if format == 'json': if format_type == 'json':
conts = json.loads(self.conn.response.read()) conts = json.loads(self.conn.response.read())
for cont in conts: for cont in conts:
cont['name'] = cont['name'].encode('utf-8') cont['name'] = cont['name'].encode('utf-8')
return conts return conts
elif format == 'xml': elif format_type == 'xml':
conts = [] conts = []
tree = minidom.parseString(self.conn.response.read()) tree = minidom.parseString(self.conn.response.read())
for x in tree.getElementsByTagName('container'): for x in tree.getElementsByTagName('container'):
@ -406,8 +399,8 @@ class Container(Base):
def delete_files(self): def delete_files(self):
for f in listing_items(self.files): for f in listing_items(self.files):
file = self.file(f) file_item = self.file(f)
if not file.delete(): if not file_item.delete():
return False return False
return listing_empty(self.files) return listing_empty(self.files)
@ -419,37 +412,39 @@ class Container(Base):
return File(self.conn, self.account, self.name, file_name) return File(self.conn, self.account, self.name, file_name)
def files(self, hdrs={}, parms={}, cfg={}): def files(self, hdrs={}, parms={}, cfg={}):
format = parms.get('format', None) format_type = parms.get('format', None)
if format not in [None, 'json', 'xml']: if format_type not in [None, 'json', 'xml']:
raise RequestError('Invalid format: %s' % format) raise RequestError('Invalid format: %s' % format_type)
if format is None and 'format' in parms: if format_type is None and 'format' in parms:
del parms['format'] del parms['format']
status = self.conn.make_request('GET', self.path, hdrs=hdrs, status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) parms=parms, cfg=cfg)
if status == 200: if status == 200:
if format == 'json': if format_type == 'json':
files = json.loads(self.conn.response.read()) files = json.loads(self.conn.response.read())
for file in files: for file_item in files:
file['name'] = file['name'].encode('utf-8') file_item['name'] = file_item['name'].encode('utf-8')
file['content_type'] = file['content_type'].encode('utf-8') file_item['content_type'] = file_item['content_type'].\
encode('utf-8')
return files return files
elif format == 'xml': elif format_type == 'xml':
files = [] files = []
tree = minidom.parseString(self.conn.response.read()) tree = minidom.parseString(self.conn.response.read())
for x in tree.getElementsByTagName('object'): for x in tree.getElementsByTagName('object'):
file = {} file_item = {}
for key in ['name', 'hash', 'bytes', 'content_type', for key in ['name', 'hash', 'bytes', 'content_type',
'last_modified']: 'last_modified']:
file[key] = x.getElementsByTagName(key)[0].\ file_item[key] = x.getElementsByTagName(key)[0].\
childNodes[0].nodeValue childNodes[0].nodeValue
files.append(file) files.append(file_item)
for file in files: for file_item in files:
file['name'] = file['name'].encode('utf-8') file_item['name'] = file_item['name'].encode('utf-8')
file['content_type'] = file['content_type'].encode('utf-8') file_item['content_type'] = file_item['content_type'].\
encode('utf-8')
return files return files
else: else:
content = self.conn.response.read() content = self.conn.response.read()
@ -616,11 +611,11 @@ class File(Base):
callback=None, cfg={}): callback=None, cfg={}):
if size > 0: if size > 0:
range = 'bytes=%d-%d' % (offset, (offset + size) - 1) range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1)
if hdrs: if hdrs:
hdrs['Range'] = range hdrs['Range'] = range_string
else: else:
hdrs = {'Range': range} hdrs = {'Range': range_string}
status = self.conn.make_request('GET', self.path, hdrs=hdrs, status = self.conn.make_request('GET', self.path, hdrs=hdrs,
cfg=cfg) cfg=cfg)
@ -736,7 +731,7 @@ class File(Base):
callback(transferred, self.size) callback(transferred, self.size)
self.conn.put_end() self.conn.put_end()
except socket.timeout, err: except socket.timeout as err:
raise err raise err
if (self.conn.response.status < 200) or \ if (self.conn.response.status < 200) or \

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +1,4 @@
# Copyright (c) 2010-2013 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2013 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -28,6 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from httplib import HTTPException
import os import os
import socket import socket
import sys import sys
@ -35,7 +21,7 @@ from time import sleep
from test import get_config from test import get_config
from swiftclient import get_auth, http_connection, HTTPException from swiftclient import get_auth, http_connection
conf = get_config('func_test') conf = get_config('func_test')
web_front_end = conf.get('web_front_end', 'integral') web_front_end = conf.get('web_front_end', 'integral')
@ -57,8 +43,8 @@ if conf:
if 'auth_prefix' not in conf: if 'auth_prefix' not in conf:
conf['auth_prefix'] = '/' conf['auth_prefix'] = '/'
try: try:
swift_test_auth += \ suffix = '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % conf
'://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % conf swift_test_auth += suffix
except KeyError: except KeyError:
pass # skip pass # skip
@ -71,17 +57,17 @@ if conf:
swift_test_user[0] = '%(username)s' % conf swift_test_user[0] = '%(username)s' % conf
swift_test_key[0] = conf['password'] swift_test_key[0] = conf['password']
try: try:
swift_test_user[1] = '%s%s' % \ swift_test_user[1] = '%s%s' % (
('%s:' % conf['account2'] if 'account2' in conf else '', '%s:' % conf['account2'] if 'account2' in conf else '',
conf['username2']) conf['username2'])
swift_test_key[1] = conf['password2'] swift_test_key[1] = conf['password2']
except KeyError, err: except KeyError as err:
pass # old conf, no second account tests can be run pass # old conf, no second account tests can be run
try: try:
swift_test_user[2] = '%s%s' % ('%s:' % conf['account'] if 'account' swift_test_user[2] = '%s%s' % ('%s:' % conf['account'] if 'account'
in conf else '', conf['username3']) in conf else '', conf['username3'])
swift_test_key[2] = conf['password3'] swift_test_key[2] = conf['password3']
except KeyError, err: except KeyError as err:
pass # old conf, no third account tests can be run pass # old conf, no third account tests can be run
for _ in range(3): for _ in range(3):
@ -99,7 +85,8 @@ if conf:
swift_test_key[2] = conf['password3'] swift_test_key[2] = conf['password3']
for _ in range(3): for _ in range(3):
swift_test_perm[_] = swift_test_tenant[_] + ':' + swift_test_user[_] swift_test_perm[_] = swift_test_tenant[_] + ':' \
+ swift_test_user[_]
skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]]) skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]])
if skip: if skip:
@ -108,12 +95,12 @@ if skip:
skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]]) skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]])
if not skip and skip2: if not skip and skip2:
print >>sys.stderr, \ print >>sys.stderr, \
'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]]) skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]])
if not skip and skip3: if not skip and skip3:
print >>sys.stderr, \ print >>sys.stderr, \
'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM'
class AuthError(Exception): class AuthError(Exception):
@ -160,7 +147,8 @@ def retry(func, *args, **kwargs):
parsed[use_account], conn[use_account] = \ parsed[use_account], conn[use_account] = \
http_connection(url[use_account]) http_connection(url[use_account])
return func(url[use_account], token[use_account], return func(url[use_account], token[use_account],
parsed[use_account], conn[use_account], *args, **kwargs) parsed[use_account], conn[use_account],
*args, **kwargs)
except (socket.error, HTTPException): except (socket.error, HTTPException):
if attempts > retries: if attempts > retries:
raise raise

View File

@ -1,6 +1,6 @@
#!/usr/bin/python #!/usr/bin/python
# Copyright (c) 2010-2013 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -15,22 +15,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest import unittest
from nose import SkipTest from nose import SkipTest
@ -45,16 +29,20 @@ class TestAccount(unittest.TestCase):
def test_metadata(self): def test_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def post(url, token, parsed, conn, value): def post(url, token, parsed, conn, value):
conn.request('POST', parsed.path, '', conn.request('POST', parsed.path, '',
{'X-Auth-Token': token, 'X-Account-Meta-Test': value}) {'X-Auth-Token': token, 'X-Account-Meta-Test': value})
return check_response(conn) return check_response(conn)
def head(url, token, parsed, conn): def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token}) conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(post, '') resp = retry(post, '')
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -121,13 +109,16 @@ class TestAccount(unittest.TestCase):
def test_multi_metadata(self): def test_multi_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def post(url, token, parsed, conn, name, value): def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path, '', conn.request('POST', parsed.path, '',
{'X-Auth-Token': token, name: value}) {'X-Auth-Token': token, name: value})
return check_response(conn) return check_response(conn)
def head(url, token, parsed, conn): def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(post, 'X-Account-Meta-One', '1') resp = retry(post, 'X-Account-Meta-One', '1')
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -147,26 +138,30 @@ class TestAccount(unittest.TestCase):
def test_bad_metadata(self): def test_bad_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def post(url, token, parsed, conn, extra_headers): def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token} headers = {'X-Auth-Token': token}
headers.update(extra_headers) headers.update(extra_headers)
conn.request('POST', parsed.path, '', headers) conn.request('POST', parsed.path, '', headers)
return check_response(conn) return check_response(conn)
resp = retry(post, resp = retry(post,
{'X-Account-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) {'X-Account-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'})
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
resp = retry(post, resp = retry(
{'X-Account-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) post,
{'X-Account-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'})
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)
resp = retry(post, resp = retry(post,
{'X-Account-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) {'X-Account-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH})
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
resp = retry(post, resp = retry(
{'X-Account-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) post,
{'X-Account-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)})
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)

View File

@ -1,21 +1,6 @@
#!/usr/bin/python #!/usr/bin/python
# Copyright (c) 2010-2013 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2013 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -39,7 +24,7 @@ from swift.common.constraints import MAX_META_COUNT, MAX_META_NAME_LENGTH, \
MAX_META_OVERALL_SIZE, MAX_META_VALUE_LENGTH MAX_META_OVERALL_SIZE, MAX_META_VALUE_LENGTH
from swift_testing import check_response, retry, skip, skip2, skip3, \ from swift_testing import check_response, retry, skip, skip2, skip3, \
swift_test_perm, web_front_end swift_test_perm, web_front_end
class TestContainer(unittest.TestCase): class TestContainer(unittest.TestCase):
@ -48,10 +33,12 @@ class TestContainer(unittest.TestCase):
if skip: if skip:
raise SkipTest raise SkipTest
self.name = uuid4().hex self.name = uuid4().hex
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + self.name, '', conn.request('PUT', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
self.assertEquals(resp.status, 201) self.assertEquals(resp.status, 201)
@ -59,15 +46,18 @@ class TestContainer(unittest.TestCase):
def tearDown(self): def tearDown(self):
if skip: if skip:
raise SkipTest raise SkipTest
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name + '?format=json', conn.request('GET', parsed.path + '/' + self.name + '?format=json',
'', {'X-Auth-Token': token}) '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
def delete(url, token, parsed, conn, obj): def delete(url, token, parsed, conn, obj):
conn.request('DELETE', conn.request('DELETE',
'/'.join([parsed.path, self.name, obj['name']]), '', '/'.join([parsed.path, self.name, obj['name']]), '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
while True: while True:
resp = retry(get) resp = retry(get)
body = resp.read() body = resp.read()
@ -79,10 +69,12 @@ class TestContainer(unittest.TestCase):
resp = retry(delete, obj) resp = retry(delete, obj)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
def delete(url, token, parsed, conn): def delete(url, token, parsed, conn):
conn.request('DELETE', parsed.path + '/' + self.name, '', conn.request('DELETE', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(delete) resp = retry(delete)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -90,14 +82,17 @@ class TestContainer(unittest.TestCase):
def test_multi_metadata(self): def test_multi_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def post(url, token, parsed, conn, name, value): def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value}) {'X-Auth-Token': token, name: value})
return check_response(conn) return check_response(conn)
def head(url, token, parsed, conn): def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '', conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(post, 'X-Container-Meta-One', '1') resp = retry(post, 'X-Container-Meta-One', '1')
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -154,27 +149,33 @@ class TestContainer(unittest.TestCase):
resp.read() resp.read()
self.assert_(resp.status in (200, 204), resp.status) self.assert_(resp.status in (200, 204), resp.status)
self.assertEquals(resp.getheader(uni_key.encode('utf-8')), self.assertEquals(resp.getheader(uni_key.encode('utf-8')),
uni_value.encode('utf-8')) uni_value.encode('utf-8'))
def test_PUT_metadata(self): def test_PUT_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def put(url, token, parsed, conn, name, value): def put(url, token, parsed, conn, name, value):
conn.request('PUT', parsed.path + '/' + name, '', conn.request('PUT', parsed.path + '/' + name, '',
{'X-Auth-Token': token, 'X-Container-Meta-Test': value}) {'X-Auth-Token': token,
'X-Container-Meta-Test': value})
return check_response(conn) return check_response(conn)
def head(url, token, parsed, conn, name): def head(url, token, parsed, conn, name):
conn.request('HEAD', parsed.path + '/' + name, '', conn.request('HEAD', parsed.path + '/' + name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
def get(url, token, parsed, conn, name): def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/' + name, '', conn.request('GET', parsed.path + '/' + name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
def delete(url, token, parsed, conn, name): def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '', conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
name = uuid4().hex name = uuid4().hex
resp = retry(put, name, 'Value') resp = retry(put, name, 'Value')
resp.read() resp.read()
@ -210,18 +211,23 @@ class TestContainer(unittest.TestCase):
def test_POST_metadata(self): def test_POST_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def post(url, token, parsed, conn, value): def post(url, token, parsed, conn, value):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Meta-Test': value}) {'X-Auth-Token': token,
'X-Container-Meta-Test': value})
return check_response(conn) return check_response(conn)
def head(url, token, parsed, conn): def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '', conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name, '', conn.request('GET', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(head) resp = retry(head)
resp.read() resp.read()
self.assert_(resp.status in (200, 204), resp.status) self.assert_(resp.status in (200, 204), resp.status)
@ -245,26 +251,31 @@ class TestContainer(unittest.TestCase):
def test_PUT_bad_metadata(self): def test_PUT_bad_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def put(url, token, parsed, conn, name, extra_headers): def put(url, token, parsed, conn, name, extra_headers):
headers = {'X-Auth-Token': token} headers = {'X-Auth-Token': token}
headers.update(extra_headers) headers.update(extra_headers)
conn.request('PUT', parsed.path + '/' + name, '', headers) conn.request('PUT', parsed.path + '/' + name, '', headers)
return check_response(conn) return check_response(conn)
def delete(url, token, parsed, conn, name): def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '', conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
name = uuid4().hex name = uuid4().hex
resp = retry(put, name, resp = retry(
{'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) put, name,
{'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'})
resp.read() resp.read()
self.assertEquals(resp.status, 201) self.assertEquals(resp.status, 201)
resp = retry(delete, name) resp = retry(delete, name)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
name = uuid4().hex name = uuid4().hex
resp = retry(put, name, resp = retry(
{'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) put, name,
{'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'})
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)
resp = retry(delete, name) resp = retry(delete, name)
@ -272,16 +283,18 @@ class TestContainer(unittest.TestCase):
self.assertEquals(resp.status, 404) self.assertEquals(resp.status, 404)
name = uuid4().hex name = uuid4().hex
resp = retry(put, name, resp = retry(
{'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) put, name,
{'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH})
resp.read() resp.read()
self.assertEquals(resp.status, 201) self.assertEquals(resp.status, 201)
resp = retry(delete, name) resp = retry(delete, name)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
name = uuid4().hex name = uuid4().hex
resp = retry(put, name, resp = retry(
{'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) put, name,
{'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)})
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)
resp = retry(delete, name) resp = retry(delete, name)
@ -340,26 +353,32 @@ class TestContainer(unittest.TestCase):
def test_POST_bad_metadata(self): def test_POST_bad_metadata(self):
if skip: if skip:
raise SkipTest raise SkipTest
def post(url, token, parsed, conn, extra_headers): def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token} headers = {'X-Auth-Token': token}
headers.update(extra_headers) headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers) conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn) return check_response(conn)
resp = retry(post,
{'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) resp = retry(
post,
{'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'})
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
resp = retry(post, resp = retry(
{'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) post,
{'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'})
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)
resp = retry(post, resp = retry(
{'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) post,
{'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH})
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
resp = retry(post, resp = retry(
{'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) post,
{'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)})
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)
@ -399,36 +418,42 @@ class TestContainer(unittest.TestCase):
def test_public_container(self): def test_public_container(self):
if skip: if skip:
raise SkipTest raise SkipTest
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name) conn.request('GET', parsed.path + '/' + self.name)
return check_response(conn) return check_response(conn)
try: try:
resp = retry(get) resp = retry(get)
raise Exception('Should not have been able to GET') raise Exception('Should not have been able to GET')
except Exception, err: except Exception as err:
self.assert_(str(err).startswith('No result after '), err) self.assert_(str(err).startswith('No result after '), err)
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, {'X-Auth-Token': token,
'X-Container-Read': '.r:*,.rlistings'}) 'X-Container-Read': '.r:*,.rlistings'})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
resp = retry(get) resp = retry(get)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': ''}) {'X-Auth-Token': token, 'X-Container-Read': ''})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
try: try:
resp = retry(get) resp = retry(get)
raise Exception('Should not have been able to GET') raise Exception('Should not have been able to GET')
except Exception, err: except Exception as err:
self.assert_(str(err).startswith('No result after '), err) self.assert_(str(err).startswith('No result after '), err)
def test_cross_account_container(self): def test_cross_account_container(self):
@ -436,27 +461,34 @@ class TestContainer(unittest.TestCase):
raise SkipTest raise SkipTest
# Obtain the first account's string # Obtain the first account's string
first_account = ['unknown'] first_account = ['unknown']
def get1(url, token, parsed, conn): def get1(url, token, parsed, conn):
first_account[0] = parsed.path first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '', conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get1) resp = retry(get1)
resp.read() resp.read()
# Ensure we can't access the container with the second account # Ensure we can't access the container with the second account
def get2(url, token, parsed, conn): def get2(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '', conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get2, use_account=2) resp = retry(get2, use_account=2)
resp.read() resp.read()
self.assertEquals(resp.status, 403) self.assertEquals(resp.status, 403)
# Make the container accessible by the second account # Make the container accessible by the second account
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': swift_test_perm[1], {'X-Auth-Token': token,
'X-Container-Write': swift_test_perm[1]}) 'X-Container-Read': swift_test_perm[1],
'X-Container-Write': swift_test_perm[1]})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -464,12 +496,14 @@ class TestContainer(unittest.TestCase):
resp = retry(get2, use_account=2) resp = retry(get2, use_account=2)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
# Make the container private again # Make the container private again
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': '', {'X-Auth-Token': token, 'X-Container-Read': '',
'X-Container-Write': ''}) 'X-Container-Write': ''})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -483,27 +517,33 @@ class TestContainer(unittest.TestCase):
raise SkipTest raise SkipTest
# Obtain the first account's string # Obtain the first account's string
first_account = ['unknown'] first_account = ['unknown']
def get1(url, token, parsed, conn): def get1(url, token, parsed, conn):
first_account[0] = parsed.path first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '', conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get1) resp = retry(get1)
resp.read() resp.read()
# Ensure we can't access the container with the second account # Ensure we can't access the container with the second account
def get2(url, token, parsed, conn): def get2(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '', conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get2, use_account=2) resp = retry(get2, use_account=2)
resp.read() resp.read()
self.assertEquals(resp.status, 403) self.assertEquals(resp.status, 403)
# Make the container completely public # Make the container completely public
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, {'X-Auth-Token': token,
'X-Container-Read': '.r:*,.rlistings'}) 'X-Container-Read': '.r:*,.rlistings'})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -511,20 +551,24 @@ class TestContainer(unittest.TestCase):
resp = retry(get2, use_account=2) resp = retry(get2, use_account=2)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
# But we shouldn't be able to write with the second account # But we shouldn't be able to write with the second account
def put2(url, token, parsed, conn): def put2(url, token, parsed, conn):
conn.request('PUT', first_account[0] + '/' + self.name + '/object', conn.request('PUT', first_account[0] + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token}) 'test object', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put2, use_account=2) resp = retry(put2, use_account=2)
resp.read() resp.read()
self.assertEquals(resp.status, 403) self.assertEquals(resp.status, 403)
# Now make the container also writeable by the second account # Now make the container also writeable by the second account
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, {'X-Auth-Token': token,
'X-Container-Write': swift_test_perm[1]}) 'X-Container-Write': swift_test_perm[1]})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -542,26 +586,33 @@ class TestContainer(unittest.TestCase):
raise SkipTest raise SkipTest
# Obtain the first account's string # Obtain the first account's string
first_account = ['unknown'] first_account = ['unknown']
def get1(url, token, parsed, conn): def get1(url, token, parsed, conn):
first_account[0] = parsed.path first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '', conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get1) resp = retry(get1)
resp.read() resp.read()
# Ensure we can't access the container with the third account # Ensure we can't access the container with the third account
def get3(url, token, parsed, conn): def get3(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '', conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get3, use_account=3) resp = retry(get3, use_account=3)
resp.read() resp.read()
self.assertEquals(resp.status, 403) self.assertEquals(resp.status, 403)
# Make the container accessible by the third account # Make the container accessible by the third account
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': swift_test_perm[2]}) {'X-Auth-Token': token,
'X-Container-Read': swift_test_perm[2]})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -569,20 +620,24 @@ class TestContainer(unittest.TestCase):
resp = retry(get3, use_account=3) resp = retry(get3, use_account=3)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
# But we shouldn't be able to write with the third account # But we shouldn't be able to write with the third account
def put3(url, token, parsed, conn): def put3(url, token, parsed, conn):
conn.request('PUT', first_account[0] + '/' + self.name + '/object', conn.request('PUT', first_account[0] + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token}) 'test object', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put3, use_account=3) resp = retry(put3, use_account=3)
resp.read() resp.read()
self.assertEquals(resp.status, 403) self.assertEquals(resp.status, 403)
# Now make the container also writeable by the third account # Now make the container also writeable by the third account
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '', conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, {'X-Auth-Token': token,
'X-Container-Write': swift_test_perm[2]}) 'X-Container-Write': swift_test_perm[2]})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
self.assertEquals(resp.status, 204) self.assertEquals(resp.status, 204)
@ -601,9 +656,10 @@ class TestContainer(unittest.TestCase):
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
container_name = 'X' * 2048 container_name = 'X' * 2048
conn.request('PUT', '%s/%s' % (parsed.path, conn.request('PUT', '%s/%s' % (parsed.path, container_name),
container_name), 'there', {'X-Auth-Token': token}) 'there', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
self.assertEquals(resp.status, 400) self.assertEquals(resp.status, 400)
@ -618,6 +674,7 @@ class TestContainer(unittest.TestCase):
conn.request('PUT', '%s/abc%%00def' % parsed.path, '', conn.request('PUT', '%s/abc%%00def' % parsed.path, '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
if (web_front_end == 'apache2'): if (web_front_end == 'apache2'):
self.assertEquals(resp.status, 404) self.assertEquals(resp.status, 404)

View File

@ -1,21 +1,6 @@
#!/usr/bin/python #!/usr/bin/python
# Copyright (c) 2010-2013 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2013 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -55,8 +40,9 @@ class TestObject(unittest.TestCase):
self.obj = uuid4().hex self.obj = uuid4().hex
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container, conn.request('PUT', '%s/%s/%s' % (
self.obj), 'test', {'X-Auth-Token': token}) parsed.path, self.container, self.obj), 'test',
{'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
@ -182,7 +168,7 @@ class TestObject(unittest.TestCase):
try: try:
resp = retry(get) resp = retry(get)
raise Exception('Should not have been able to GET') raise Exception('Should not have been able to GET')
except Exception, err: except Exception as err:
self.assert_(str(err).startswith('No result after ')) self.assert_(str(err).startswith('No result after '))
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
@ -207,7 +193,7 @@ class TestObject(unittest.TestCase):
try: try:
resp = retry(get) resp = retry(get)
raise Exception('Should not have been able to GET') raise Exception('Should not have been able to GET')
except Exception, err: except Exception as err:
self.assert_(str(err).startswith('No result after ')) self.assert_(str(err).startswith('No result after '))
def test_private_object(self): def test_private_object(self):
@ -216,9 +202,9 @@ class TestObject(unittest.TestCase):
# Ensure we can't access the object with the third account # Ensure we can't access the object with the third account
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/%s' % (parsed.path, self.container, conn.request('GET', '%s/%s/%s' % (
self.obj), '', parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
resp.read() resp.read()
@ -228,11 +214,11 @@ class TestObject(unittest.TestCase):
shared_container = uuid4().hex shared_container = uuid4().hex
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (parsed.path, conn.request('PUT', '%s/%s' % (
shared_container), '', parsed.path, shared_container), '',
{'X-Auth-Token': token, {'X-Auth-Token': token,
'X-Container-Read': swift_test_perm[2], 'X-Container-Read': swift_test_perm[2],
'X-Container-Write': swift_test_perm[2]}) 'X-Container-Write': swift_test_perm[2]})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
@ -240,13 +226,11 @@ class TestObject(unittest.TestCase):
# verify third account can not copy from private container # verify third account can not copy from private container
def copy(url, token, parsed, conn): def copy(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, conn.request('PUT', '%s/%s/%s' % (
shared_container, parsed.path, shared_container, 'private_object'), '',
'private_object'), {'X-Auth-Token': token,
'', {'X-Auth-Token': token, 'Content-Length': '0',
'Content-Length': '0', 'X-Copy-From': '%s/%s' % (self.container, self.obj)})
'X-Copy-From': '%s/%s' % (self.container,
self.obj)})
return check_response(conn) return check_response(conn)
resp = retry(copy, use_account=3) resp = retry(copy, use_account=3)
resp.read() resp.read()
@ -254,8 +238,9 @@ class TestObject(unittest.TestCase):
# verify third account can write "obj1" to shared container # verify third account can write "obj1" to shared container
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, shared_container, conn.request('PUT', '%s/%s/%s' % (
'obj1'), 'test', {'X-Auth-Token': token}) parsed.path, shared_container, 'obj1'), 'test',
{'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put, use_account=3) resp = retry(put, use_account=3)
resp.read() resp.read()
@ -263,12 +248,10 @@ class TestObject(unittest.TestCase):
# verify third account can copy "obj1" to shared container # verify third account can copy "obj1" to shared container
def copy2(url, token, parsed, conn): def copy2(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (parsed.path, conn.request('COPY', '%s/%s/%s' % (
shared_container, parsed.path, shared_container, 'obj1'), '',
'obj1'), {'X-Auth-Token': token,
'', {'X-Auth-Token': token, 'Destination': '%s/%s' % (shared_container, 'obj1')})
'Destination': '%s/%s' % (shared_container,
'obj1')})
return check_response(conn) return check_response(conn)
resp = retry(copy2, use_account=3) resp = retry(copy2, use_account=3)
resp.read() resp.read()
@ -276,12 +259,11 @@ class TestObject(unittest.TestCase):
# verify third account STILL can not copy from private container # verify third account STILL can not copy from private container
def copy3(url, token, parsed, conn): def copy3(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (parsed.path, conn.request('COPY', '%s/%s/%s' % (
self.container, parsed.path, self.container, self.obj), '',
self.obj), {'X-Auth-Token': token,
'', {'X-Auth-Token': token, 'Destination': '%s/%s' % (shared_container,
'Destination': '%s/%s' % (shared_container, 'private_object')})
'private_object')})
return check_response(conn) return check_response(conn)
resp = retry(copy3, use_account=3) resp = retry(copy3, use_account=3)
resp.read() resp.read()
@ -289,8 +271,9 @@ class TestObject(unittest.TestCase):
# clean up "obj1" # clean up "obj1"
def delete(url, token, parsed, conn): def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (parsed.path, shared_container, conn.request('DELETE', '%s/%s/%s' % (
'obj1'), '', {'X-Auth-Token': token}) parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(delete) resp = retry(delete)
resp.read() resp.read()
@ -316,8 +299,8 @@ class TestObject(unittest.TestCase):
# Upload the first set of segments # Upload the first set of segments
def put(url, token, parsed, conn, objnum): def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments1/%s' % (parsed.path, conn.request('PUT', '%s/%s/segments1/%s' % (
self.container, str(objnum)), segments1[objnum], parsed.path, self.container, str(objnum)), segments1[objnum],
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
for objnum in xrange(len(segments1)): for objnum in xrange(len(segments1)):
@ -327,10 +310,11 @@ class TestObject(unittest.TestCase):
# Upload the manifest # Upload the manifest
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (parsed.path, conn.request('PUT', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token, parsed.path, self.container), '', {
'X-Object-Manifest': '%s/segments1/' % self.container, 'X-Auth-Token': token,
'Content-Type': 'text/jibberish', 'Content-Length': '0'}) 'X-Object-Manifest': '%s/segments1/' % self.container,
'Content-Type': 'text/jibberish', 'Content-Length': '0'})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
@ -338,8 +322,8 @@ class TestObject(unittest.TestCase):
# Get the manifest (should get all the segments as the body) # Get the manifest (should get all the segments as the body)
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments1)) self.assertEquals(resp.read(), ''.join(segments1))
@ -348,9 +332,9 @@ class TestObject(unittest.TestCase):
# Get with a range at the start of the second segment # Get with a range at the start of the second segment
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token, 'Range': parsed.path, self.container), '', {
'bytes=3-'}) 'X-Auth-Token': token, 'Range': 'bytes=3-'})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments1[1:])) self.assertEquals(resp.read(), ''.join(segments1[1:]))
@ -358,9 +342,9 @@ class TestObject(unittest.TestCase):
# Get with a range in the middle of the second segment # Get with a range in the middle of the second segment
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token, 'Range': parsed.path, self.container), '', {
'bytes=5-'}) 'X-Auth-Token': token, 'Range': 'bytes=5-'})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments1)[5:]) self.assertEquals(resp.read(), ''.join(segments1)[5:])
@ -368,9 +352,9 @@ class TestObject(unittest.TestCase):
# Get with a full start and stop range # Get with a full start and stop range
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token, 'Range': parsed.path, self.container), '', {
'bytes=5-10'}) 'X-Auth-Token': token, 'Range': 'bytes=5-10'})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments1)[5:11]) self.assertEquals(resp.read(), ''.join(segments1)[5:11])
@ -378,8 +362,8 @@ class TestObject(unittest.TestCase):
# Upload the second set of segments # Upload the second set of segments
def put(url, token, parsed, conn, objnum): def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments2/%s' % (parsed.path, conn.request('PUT', '%s/%s/segments2/%s' % (
self.container, str(objnum)), segments2[objnum], parsed.path, self.container, str(objnum)), segments2[objnum],
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
for objnum in xrange(len(segments2)): for objnum in xrange(len(segments2)):
@ -389,8 +373,8 @@ class TestObject(unittest.TestCase):
# Get the manifest (should still be the first segments of course) # Get the manifest (should still be the first segments of course)
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments1)) self.assertEquals(resp.read(), ''.join(segments1))
@ -398,10 +382,11 @@ class TestObject(unittest.TestCase):
# Update the manifest # Update the manifest
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (parsed.path, conn.request('PUT', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token, parsed.path, self.container), '', {
'X-Object-Manifest': '%s/segments2/' % self.container, 'X-Auth-Token': token,
'Content-Length': '0'}) 'X-Object-Manifest': '%s/segments2/' % self.container,
'Content-Length': '0'})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
@ -409,8 +394,8 @@ class TestObject(unittest.TestCase):
# Get the manifest (should be the second set of segments now) # Get the manifest (should be the second set of segments now)
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments2)) self.assertEquals(resp.read(), ''.join(segments2))
@ -420,8 +405,8 @@ class TestObject(unittest.TestCase):
# Ensure we can't access the manifest with the third account # Ensure we can't access the manifest with the third account
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
resp.read() resp.read()
@ -430,8 +415,8 @@ class TestObject(unittest.TestCase):
# Grant access to the third account # Grant access to the third account
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), conn.request('POST', '%s/%s' % (parsed.path, self.container),
'', {'X-Auth-Token': token, '', {'X-Auth-Token': token,
'X-Container-Read': swift_test_perm[2]}) 'X-Container-Read': swift_test_perm[2]})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
@ -439,8 +424,8 @@ class TestObject(unittest.TestCase):
# The third account should be able to get the manifest now # The third account should be able to get the manifest now
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
self.assertEquals(resp.read(), ''.join(segments2)) self.assertEquals(resp.read(), ''.join(segments2))
@ -459,8 +444,8 @@ class TestObject(unittest.TestCase):
# Upload the third set of segments in the other container # Upload the third set of segments in the other container
def put(url, token, parsed, conn, objnum): def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments3/%s' % (parsed.path, conn.request('PUT', '%s/%s/segments3/%s' % (
acontainer, str(objnum)), segments3[objnum], parsed.path, acontainer, str(objnum)), segments3[objnum],
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
for objnum in xrange(len(segments3)): for objnum in xrange(len(segments3)):
@ -470,10 +455,11 @@ class TestObject(unittest.TestCase):
# Update the manifest # Update the manifest
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (parsed.path, conn.request('PUT', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token, parsed.path, self.container), '',
'X-Object-Manifest': '%s/segments3/' % acontainer, {'X-Auth-Token': token,
'Content-Length': '0'}) 'X-Object-Manifest': '%s/segments3/' % acontainer,
'Content-Length': '0'})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
@ -481,8 +467,8 @@ class TestObject(unittest.TestCase):
# Get the manifest to ensure it's the third set of segments # Get the manifest to ensure it's the third set of segments
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEquals(resp.read(), ''.join(segments3)) self.assertEquals(resp.read(), ''.join(segments3))
@ -495,8 +481,8 @@ class TestObject(unittest.TestCase):
# manifest itself is not). # manifest itself is not).
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
resp.read() resp.read()
@ -505,8 +491,8 @@ class TestObject(unittest.TestCase):
# Grant access to the third account # Grant access to the third account
def post(url, token, parsed, conn): def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, acontainer), conn.request('POST', '%s/%s' % (parsed.path, acontainer),
'', {'X-Auth-Token': token, '', {'X-Auth-Token': token,
'X-Container-Read': swift_test_perm[2]}) 'X-Container-Read': swift_test_perm[2]})
return check_response(conn) return check_response(conn)
resp = retry(post) resp = retry(post)
resp.read() resp.read()
@ -514,8 +500,8 @@ class TestObject(unittest.TestCase):
# The third account should be able to get the manifest now # The third account should be able to get the manifest now
def get(url, token, parsed, conn): def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (parsed.path, conn.request('GET', '%s/%s/manifest' % (
self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
self.assertEquals(resp.read(), ''.join(segments3)) self.assertEquals(resp.read(), ''.join(segments3))
@ -523,7 +509,8 @@ class TestObject(unittest.TestCase):
# Delete the manifest # Delete the manifest
def delete(url, token, parsed, conn, objnum): def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/manifest' % (parsed.path, conn.request('DELETE', '%s/%s/manifest' % (
parsed.path,
self.container), '', {'X-Auth-Token': token}) self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(delete, objnum) resp = retry(delete, objnum)
@ -532,8 +519,9 @@ class TestObject(unittest.TestCase):
# Delete the third set of segments # Delete the third set of segments
def delete(url, token, parsed, conn, objnum): def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments3/%s' % (parsed.path, conn.request('DELETE', '%s/%s/segments3/%s' % (
acontainer, str(objnum)), '', {'X-Auth-Token': token}) parsed.path, acontainer, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
for objnum in xrange(len(segments3)): for objnum in xrange(len(segments3)):
resp = retry(delete, objnum) resp = retry(delete, objnum)
@ -542,8 +530,9 @@ class TestObject(unittest.TestCase):
# Delete the second set of segments # Delete the second set of segments
def delete(url, token, parsed, conn, objnum): def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments2/%s' % (parsed.path, conn.request('DELETE', '%s/%s/segments2/%s' % (
self.container, str(objnum)), '', {'X-Auth-Token': token}) parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
for objnum in xrange(len(segments2)): for objnum in xrange(len(segments2)):
resp = retry(delete, objnum) resp = retry(delete, objnum)
@ -552,8 +541,9 @@ class TestObject(unittest.TestCase):
# Delete the first set of segments # Delete the first set of segments
def delete(url, token, parsed, conn, objnum): def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments1/%s' % (parsed.path, conn.request('DELETE', '%s/%s/segments1/%s' % (
self.container, str(objnum)), '', {'X-Auth-Token': token}) parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
for objnum in xrange(len(segments1)): for objnum in xrange(len(segments1)):
resp = retry(delete, objnum) resp = retry(delete, objnum)
@ -563,7 +553,7 @@ class TestObject(unittest.TestCase):
# Delete the extra container # Delete the extra container
def delete(url, token, parsed, conn): def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, acontainer), '', conn.request('DELETE', '%s/%s' % (parsed.path, acontainer), '',
{'X-Auth-Token': token}) {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(delete) resp = retry(delete)
resp.read() resp.read()
@ -574,8 +564,8 @@ class TestObject(unittest.TestCase):
raise SkipTest raise SkipTest
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/hi' % (parsed.path, conn.request('PUT', '%s/%s/hi' % (parsed.path, self.container),
self.container), 'there', {'X-Auth-Token': token}) 'there', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)
resp.read() resp.read()
@ -583,7 +573,7 @@ class TestObject(unittest.TestCase):
def delete(url, token, parsed, conn): def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container), conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container),
'', {'X-Auth-Token': token}) '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(delete) resp = retry(delete)
resp.read() resp.read()
@ -596,7 +586,8 @@ class TestObject(unittest.TestCase):
raise SkipTest raise SkipTest
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/abc%%00def' % (parsed.path, conn.request('PUT', '%s/%s/abc%%00def' % (
parsed.path,
self.container), 'test', {'X-Auth-Token': token}) self.container), 'test', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(put) resp = retry(put)

View File

@ -1,3 +1,18 @@
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Swift tests """ """ Swift tests """
import os import os
@ -8,6 +23,7 @@ from sys import exc_info
from contextlib import contextmanager from contextlib import contextmanager
from collections import defaultdict from collections import defaultdict
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
import time
from eventlet.green import socket from eventlet.green import socket
from tempfile import mkdtemp from tempfile import mkdtemp
from shutil import rmtree from shutil import rmtree
@ -142,13 +158,24 @@ def tmpfile(content):
xattr_data = {} xattr_data = {}
def _get_inode(fd): def _get_inode(fd_or_name):
if not isinstance(fd, int): try:
try: if isinstance(fd_or_name, int):
fd = fd.fileno() fd = fd_or_name
except AttributeError: else:
return os.stat(fd).st_ino try:
return os.fstat(fd).st_ino fd = fd_or_name.fileno()
except AttributeError:
fd = None
if fd is None:
ino = os.stat(fd_or_name).st_ino
else:
ino = os.fstat(fd).st_ino
except OSError as err:
ioerr = IOError()
ioerr.errno = err.errno
raise ioerr
return ino
def _setxattr(fd, k, v): def _setxattr(fd, k, v):
@ -199,27 +226,57 @@ class NullLoggingHandler(logging.Handler):
pass pass
class FakeLogger(object): class UnmockTimeModule(object):
"""
Even if a test mocks time.time - you can restore unmolested behavior in a
another module who imports time directly by monkey patching it's imported
reference to the module with an instance of this class
"""
_orig_time = time.time
def __getattribute__(self, name):
if name == 'time':
return UnmockTimeModule._orig_time
return getattr(time, name)
# logging.LogRecord.__init__ calls time.time
logging.time = UnmockTimeModule()
class FakeLogger(logging.Logger):
# a thread safe logger # a thread safe logger
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self._clear() self._clear()
self.name = 'swift.unit.fake_logger'
self.level = logging.NOTSET self.level = logging.NOTSET
if 'facility' in kwargs: if 'facility' in kwargs:
self.facility = kwargs['facility'] self.facility = kwargs['facility']
def _clear(self): def _clear(self):
self.log_dict = defaultdict(list) self.log_dict = defaultdict(list)
self.lines_dict = defaultdict(list)
def _store_in(store_name): def _store_in(store_name):
def stub_fn(self, *args, **kwargs): def stub_fn(self, *args, **kwargs):
self.log_dict[store_name].append((args, kwargs)) self.log_dict[store_name].append((args, kwargs))
return stub_fn return stub_fn
error = _store_in('error') def _store_and_log_in(store_name):
info = _store_in('info') def stub_fn(self, *args, **kwargs):
warning = _store_in('warning') self.log_dict[store_name].append((args, kwargs))
debug = _store_in('debug') self._log(store_name, args[0], args[1:], **kwargs)
return stub_fn
def get_lines_for_level(self, level):
return self.lines_dict[level]
error = _store_and_log_in('error')
info = _store_and_log_in('info')
warning = _store_and_log_in('warning')
debug = _store_and_log_in('debug')
def exception(self, *args, **kwargs): def exception(self, *args, **kwargs):
self.log_dict['exception'].append((args, kwargs, str(exc_info()[1]))) self.log_dict['exception'].append((args, kwargs, str(exc_info()[1])))
@ -267,7 +324,13 @@ class FakeLogger(object):
pass pass
def handle(self, record): def handle(self, record):
pass try:
line = record.getMessage()
except TypeError:
print 'WARNING: unable to format log message %r %% %r' % (
record.msg, record.args)
raise
self.lines_dict[record.levelno].append(line)
def flush(self): def flush(self):
pass pass
@ -354,11 +417,13 @@ def mock(update):
else: else:
deletes.append((module, attr)) deletes.append((module, attr))
setattr(module, attr, value) setattr(module, attr, value)
yield True try:
for module, attr, value in returns: yield True
setattr(module, attr, value) finally:
for module, attr in deletes: for module, attr, value in returns:
delattr(module, attr) setattr(module, attr, value)
for module, attr in deletes:
delattr(module, attr)
def fake_http_connect(*code_iter, **kwargs): def fake_http_connect(*code_iter, **kwargs):
@ -466,6 +531,8 @@ def fake_http_connect(*code_iter, **kwargs):
body_iter = iter(body_iter) body_iter = iter(body_iter)
def connect(*args, **ckwargs): def connect(*args, **ckwargs):
if kwargs.get('slow_connect', False):
sleep(0.1)
if 'give_content_type' in kwargs: if 'give_content_type' in kwargs:
if len(args) >= 7 and 'Content-Type' in args[6]: if len(args) >= 7 and 'Content-Type' in args[6]:
kwargs['give_content_type'](args[6]['Content-Type']) kwargs['give_content_type'](args[6]['Content-Type'])

View File

@ -30,8 +30,22 @@ from gluster.swift.common.exceptions import NotDirectoryError, \
def mock_os_fsync(fd): def mock_os_fsync(fd):
return True return True
def mock_tpool_execute(func, *args, **kwargs): def mock_os_fdatasync(fd):
func(*args, **kwargs) return True
class TestFakefile(unittest.TestCase):
""" Tests for common.fs_utils.Fake_file """
def test_Fake_file(self):
path = "/tmp/bar"
ff = fs.Fake_file(path)
self.assertEqual(path, ff.path)
self.assertEqual(0, ff.tell())
self.assertEqual(None, ff.read(50))
self.assertEqual(-1, ff.fileno())
self.assertEqual(None, ff.close())
class TestFsUtils(unittest.TestCase): class TestFsUtils(unittest.TestCase):
""" Tests for common.fs_utils """ """ Tests for common.fs_utils """
@ -688,9 +702,8 @@ class TestFsUtils(unittest.TestCase):
fd, tmpfile = mkstemp(dir=tmpdir) fd, tmpfile = mkstemp(dir=tmpdir)
try: try:
os.write(fd, 'test') os.write(fd, 'test')
with patch('eventlet.tpool.execute', mock_tpool_execute): with patch('os.fsync', mock_os_fsync):
with patch('os.fsync', mock_os_fsync): assert fs.do_fsync(fd) is None
assert fs.do_fsync(fd) is None
except GlusterFileSystemOSError as ose: except GlusterFileSystemOSError as ose:
self.fail('Opening a temporary file failed with %s' %ose.strerror) self.fail('Opening a temporary file failed with %s' %ose.strerror)
else: else:
@ -704,15 +717,47 @@ class TestFsUtils(unittest.TestCase):
try: try:
fd, tmpfile = mkstemp(dir=tmpdir) fd, tmpfile = mkstemp(dir=tmpdir)
os.write(fd, 'test') os.write(fd, 'test')
with patch('eventlet.tpool.execute', mock_tpool_execute): with patch('os.fsync', mock_os_fsync):
with patch('os.fsync', mock_os_fsync): assert fs.do_fsync(fd) is None
assert fs.do_fsync(fd) is None os.close(fd)
os.close(fd) try:
try: fs.do_fsync(fd)
fs.do_fsync(fd) except GlusterFileSystemOSError:
except GlusterFileSystemOSError: pass
pass else:
else: self.fail("Expected GlusterFileSystemOSError")
self.fail("Expected GlusterFileSystemOSError") finally:
shutil.rmtree(tmpdir)
def test_do_fdatasync(self):
tmpdir = mkdtemp()
try:
fd, tmpfile = mkstemp(dir=tmpdir)
try:
os.write(fd, 'test')
with patch('os.fdatasync', mock_os_fdatasync):
assert fs.do_fdatasync(fd) is None
except GlusterFileSystemOSError as ose:
self.fail('Opening a temporary file failed with %s' %ose.strerror)
else:
os.close(fd)
finally:
shutil.rmtree(tmpdir)
def test_do_fdatasync_err(self):
tmpdir = mkdtemp()
try:
fd, tmpfile = mkstemp(dir=tmpdir)
os.write(fd, 'test')
with patch('os.fdatasync', mock_os_fdatasync):
assert fs.do_fdatasync(fd) is None
os.close(fd)
try:
fs.do_fdatasync(fd)
except GlusterFileSystemOSError:
pass
else:
self.fail("Expected GlusterFileSystemOSError")
finally: finally:
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)

View File

@ -43,18 +43,35 @@ from test.unit import FakeLogger
_metadata = {} _metadata = {}
def _mock_read_metadata(filename): def _mapit(filename_or_fd):
if isinstance(filename_or_fd, int):
statmeth = os.fstat
else:
statmeth = os.lstat
try:
stats = statmeth(filename_or_fd)
except OSError as err:
if err.errno == errno.ENOENT:
raise GlusterFileSystemOSError(
err.errno, '%s, os.fstat(%s)' % (err.strerror, filename_or_fd))
raise
return stats.st_ino
def _mock_read_metadata(filename_or_fd):
global _metadata global _metadata
if filename in _metadata: ino = _mapit(filename_or_fd)
md = _metadata[filename] if ino in _metadata:
md = _metadata[ino].copy()
else: else:
md = {} md = {}
return md return md
def _mock_write_metadata(filename, metadata): def _mock_write_metadata(filename_or_fd, metadata):
global _metadata global _metadata
_metadata[filename] = metadata ino = _mapit(filename_or_fd)
_metadata[ino] = metadata.copy()
def _mock_clear_metadata(): def _mock_clear_metadata():
@ -127,8 +144,7 @@ class TestDiskFile(unittest.TestCase):
assert gdf.logger == self.lg assert gdf.logger == self.lg
assert gdf.uid == DEFAULT_UID assert gdf.uid == DEFAULT_UID
assert gdf.gid == DEFAULT_GID assert gdf.gid == DEFAULT_GID
assert gdf.metadata == {} assert gdf._metadata == None
assert gdf.meta_file is None
assert gdf.data_file is None assert gdf.data_file is None
assert gdf.fp is None assert gdf.fp is None
assert gdf.iter_etag is None assert gdf.iter_etag is None
@ -146,7 +162,7 @@ class TestDiskFile(unittest.TestCase):
assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "b", "a") assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "b", "a")
assert gdf.device_path == os.path.join(self.td, "vol0") assert gdf.device_path == os.path.join(self.td, "vol0")
def test_constructor_no_metadata(self): def test_open_no_metadata(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z") the_file = os.path.join(the_path, "z")
os.makedirs(the_path) os.makedirs(the_path)
@ -164,12 +180,13 @@ class TestDiskFile(unittest.TestCase):
'Content-Type': 'application/octet-stream'} 'Content-Type': 'application/octet-stream'}
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
assert gdf.fp is None assert not gdf._is_dir
assert gdf.metadata == exp_md assert gdf.fp is not None
assert gdf._metadata == exp_md
def test_constructor_existing_metadata(self): def test_open_existing_metadata(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z") the_file = os.path.join(the_path, "z")
os.makedirs(the_path) os.makedirs(the_path)
@ -182,37 +199,39 @@ class TestDiskFile(unittest.TestCase):
'ETag': 'etag', 'ETag': 'etag',
'X-Timestamp': 'ts', 'X-Timestamp': 'ts',
'Content-Type': 'application/loctet-stream'} 'Content-Type': 'application/loctet-stream'}
_metadata[the_file] = ini_md _metadata[_mapit(the_file)] = ini_md
exp_md = ini_md.copy() exp_md = ini_md.copy()
del exp_md['X-Type'] del exp_md['X-Type']
del exp_md['X-Object-Type'] del exp_md['X-Object-Type']
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
assert gdf.fp is None assert not gdf._is_dir
assert gdf.metadata == exp_md assert gdf.fp is not None
assert gdf._metadata == exp_md
def test_constructor_invalid_existing_metadata(self): def test_open_invalid_existing_metadata(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z") the_file = os.path.join(the_path, "z")
os.makedirs(the_path)
with open(the_file, "wb") as fd:
fd.write("1234")
inv_md = { inv_md = {
'Content-Length': 5, 'Content-Length': 5,
'ETag': 'etag', 'ETag': 'etag',
'X-Timestamp': 'ts', 'X-Timestamp': 'ts',
'Content-Type': 'application/loctet-stream'} 'Content-Type': 'application/loctet-stream'}
_metadata[the_file] = inv_md _metadata[_mapit(the_file)] = inv_md
os.makedirs(the_path)
with open(the_file, "wb") as fd:
fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
assert gdf.fp is None assert not gdf._is_dir
assert gdf.metadata != inv_md assert gdf.fp is not None
assert gdf._metadata != inv_md
def test_constructor_isdir(self): def test_open_isdir(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_path, "d") the_dir = os.path.join(the_path, "d")
os.makedirs(the_dir) os.makedirs(the_dir)
@ -223,29 +242,16 @@ class TestDiskFile(unittest.TestCase):
'ETag': 'etag', 'ETag': 'etag',
'X-Timestamp': 'ts', 'X-Timestamp': 'ts',
'Content-Type': 'application/loctet-stream'} 'Content-Type': 'application/loctet-stream'}
_metadata[the_dir] = ini_md _metadata[_mapit(the_dir)] = ini_md
exp_md = ini_md.copy() exp_md = ini_md.copy()
del exp_md['X-Type'] del exp_md['X-Type']
del exp_md['X-Object-Type'] del exp_md['X-Object-Type']
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d")
keep_data_fp=True)
assert gdf._obj == "d" assert gdf._obj == "d"
assert gdf.data_file == the_dir with gdf.open():
assert gdf._is_dir assert gdf.data_file == the_dir
assert gdf.metadata == exp_md assert gdf._is_dir
assert gdf._metadata == exp_md
def test_constructor_keep_data_fp(self):
the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z")
os.makedirs(the_path)
with open(the_file, "wb") as fd:
fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z",
keep_data_fp=True)
assert gdf._obj == "z"
assert gdf.data_file == the_file
assert not gdf._is_dir
assert gdf.fp is not None
def test_constructor_chunk_size(self): def test_constructor_chunk_size(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z",
@ -258,8 +264,7 @@ class TestDiskFile(unittest.TestCase):
assert gdf.iter_hook == 'hook' assert gdf.iter_hook == 'hook'
def test_close_no_open_fp(self): def test_close_no_open_fp(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
keep_data_fp=True)
gdf._is_dir = False gdf._is_dir = False
self.called = False self.called = False
@ -276,28 +281,32 @@ class TestDiskFile(unittest.TestCase):
the_dir = "dir" the_dir = "dir"
self.called = False self.called = False
os.makedirs(os.path.join(the_cont, the_dir)) os.makedirs(os.path.join(the_cont, the_dir))
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir")
keep_data_fp=True) with gdf.open():
ret = isinstance(gdf.fp, Fake_file)
self.assertTrue(ret)
ret = isinstance(gdf.fp, Fake_file) # Get a "Fake_file" pointer
self.assertTrue(ret) ffp = gdf.fp
# Get a File descriptor # This expected to call Fake_file interfaces
fd = gdf.fp ret = ffp.tell()
self.assertEqual(ret, 0)
# This expected to call Fake_file interfaces ret = ffp.read(1)
ret = fd.tell() self.assertEqual(ret, None)
self.assertEqual(ret , 0)
ret = fd.read(1) ret = ffp.fileno()
self.assertEqual(ret , 0) self.assertEqual(ret, -1)
ret = fd.fileno() def our_do_close(ffp):
self.assertEqual(ret, -1) self.called = True
ret = fd.close()
self.assertFalse(self.called)
with mock.patch("gluster.swift.obj.diskfile.do_close",
our_do_close):
ret = ffp.close()
self.assertEqual(ret, None)
self.assertFalse(self.called)
def test_close_file_object(self): def test_close_file_object(self):
the_cont = os.path.join(self.td, "vol0", "bar") the_cont = os.path.join(self.td, "vol0", "bar")
@ -306,22 +315,23 @@ class TestDiskFile(unittest.TestCase):
os.makedirs(the_cont) os.makedirs(the_cont)
with open(the_file, "wb") as fd: with open(the_file, "wb") as fd:
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
keep_data_fp=True)
def our_do_close(fp): def our_do_close(fp):
self.called = True self.called = True
with mock.patch("gluster.swift.obj.diskfile.do_close", with mock.patch("gluster.swift.obj.diskfile.do_close",
our_do_close): our_do_close):
gdf.close() with gdf.open():
assert not self.called
assert self.called assert self.called
def test_is_deleted(self): def test_is_deleted(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf.is_deleted() with gdf.open():
gdf.data_file = os.path.join(self.td, "bar") assert gdf.is_deleted()
assert not gdf.is_deleted() gdf.data_file = os.path.join(self.td, "bar")
assert not gdf.is_deleted()
def test_create_dir_object_no_md(self): def test_create_dir_object_no_md(self):
the_cont = os.path.join(self.td, "vol0", "bar") the_cont = os.path.join(self.td, "vol0", "bar")
@ -334,7 +344,7 @@ class TestDiskFile(unittest.TestCase):
gdf._create_dir_object(the_dir) gdf._create_dir_object(the_dir)
full_dir_path = os.path.join(the_cont, the_dir) full_dir_path = os.path.join(the_cont, the_dir)
assert os.path.isdir(full_dir_path) assert os.path.isdir(full_dir_path)
assert full_dir_path not in _metadata assert _mapit(full_dir_path) not in _metadata
def test_create_dir_object_with_md(self): def test_create_dir_object_with_md(self):
the_cont = os.path.join(self.td, "vol0", "bar") the_cont = os.path.join(self.td, "vol0", "bar")
@ -349,7 +359,7 @@ class TestDiskFile(unittest.TestCase):
gdf._create_dir_object(the_dir, dir_md) gdf._create_dir_object(the_dir, dir_md)
full_dir_path = os.path.join(the_cont, the_dir) full_dir_path = os.path.join(the_cont, the_dir)
assert os.path.isdir(full_dir_path) assert os.path.isdir(full_dir_path)
assert full_dir_path in _metadata assert _mapit(full_dir_path) in _metadata
def test_create_dir_object_exists(self): def test_create_dir_object_exists(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
@ -371,7 +381,7 @@ class TestDiskFile(unittest.TestCase):
DiskFileError, gdf._create_dir_object, the_dir) DiskFileError, gdf._create_dir_object, the_dir)
gluster.swift.obj.diskfile.do_chown = dc gluster.swift.obj.diskfile.do_chown = dc
self.assertFalse(os.path.isdir(the_dir)) self.assertFalse(os.path.isdir(the_dir))
self.assertFalse(the_dir in _metadata) self.assertFalse(_mapit(the_dir) in _metadata)
def test_create_dir_object_do_stat_failure(self): def test_create_dir_object_do_stat_failure(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
@ -393,25 +403,26 @@ class TestDiskFile(unittest.TestCase):
DiskFileError, gdf._create_dir_object, the_dir) DiskFileError, gdf._create_dir_object, the_dir)
gluster.swift.obj.diskfile.do_chown = dc gluster.swift.obj.diskfile.do_chown = dc
self.assertFalse(os.path.isdir(the_dir)) self.assertFalse(os.path.isdir(the_dir))
self.assertFalse(the_dir in _metadata) self.assertFalse(_mapit(the_dir) in _metadata)
def test_put_metadata(self): def test_put_metadata(self):
the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(self.td, "vol0", "bar", "z")
the_dir = os.path.join(the_path, "z")
os.makedirs(the_dir) os.makedirs(the_dir)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
md = {'Content-Type': 'application/octet-stream', 'a': 'b'} md = {'Content-Type': 'application/octet-stream', 'a': 'b'}
gdf.put_metadata(md.copy()) gdf.put_metadata(md.copy())
assert gdf.metadata == md, "gdf.metadata = %r, md = %r" % ( assert gdf._metadata is None
gdf.metadata, md) fmd = _metadata[_mapit(the_dir)]
assert _metadata[the_dir] == md md.update({'X-Object-Type': 'file', 'X-Type': 'Object'})
assert fmd == md, "on-disk md = %r, md = %r" % (fmd, md)
def test_put_w_tombstone(self): def test_put_w_tombstone(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf.metadata == {} assert gdf._metadata == None
gdf.put_metadata({'x': '1'}, tombstone=True) gdf.put_metadata({'x': '1'}, tombstone=True)
assert gdf.metadata == {} assert gdf._metadata is None
assert _metadata == {}
def test_put_w_meta_file(self): def test_put_w_meta_file(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
@ -420,11 +431,13 @@ class TestDiskFile(unittest.TestCase):
with open(the_file, "wb") as fd: with open(the_file, "wb") as fd:
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
newmd = gdf.metadata.copy() with gdf.open():
newmd['X-Object-Meta-test'] = '1234' newmd = gdf.get_metadata().copy()
newmd['X-Object-Meta-test'] = '1234'
gdf.put_metadata(newmd) gdf.put_metadata(newmd)
assert gdf.metadata == newmd assert gdf._metadata is None
assert _metadata[the_file] == newmd fmd = _metadata[_mapit(the_file)]
assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd)
def test_put_w_meta_file_no_content_type(self): def test_put_w_meta_file_no_content_type(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
@ -433,67 +446,72 @@ class TestDiskFile(unittest.TestCase):
with open(the_file, "wb") as fd: with open(the_file, "wb") as fd:
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
newmd = gdf.metadata.copy() with gdf.open():
newmd['Content-Type'] = '' newmd = gdf.get_metadata().copy()
newmd['X-Object-Meta-test'] = '1234' newmd['Content-Type'] = ''
newmd['X-Object-Meta-test'] = '1234'
gdf.put_metadata(newmd) gdf.put_metadata(newmd)
assert gdf.metadata == newmd assert gdf._metadata is None
assert _metadata[the_file] == newmd fmd = _metadata[_mapit(the_file)]
assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd)
def test_put_w_meta_dir(self): def test_put_w_meta_dir(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_path, "dir") the_dir = os.path.join(the_path, "dir")
os.makedirs(the_dir) os.makedirs(the_dir)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir")
newmd = gdf.metadata.copy() with gdf.open():
newmd['X-Object-Meta-test'] = '1234' newmd = gdf.get_metadata().copy()
newmd['X-Object-Meta-test'] = '1234'
gdf.put_metadata(newmd) gdf.put_metadata(newmd)
assert gdf.metadata == newmd assert gdf._metadata is None
assert _metadata[the_dir] == newmd fmd = _metadata[_mapit(the_dir)]
assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd)
def test_put_w_marker_dir(self): def test_put_w_marker_dir(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_path, "dir") the_dir = os.path.join(the_path, "dir")
os.makedirs(the_dir) os.makedirs(the_dir)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir")
newmd = gdf.metadata.copy() with gdf.open():
newmd['X-Object-Meta-test'] = '1234' newmd = gdf.get_metadata().copy()
newmd['X-Object-Meta-test'] = '1234'
gdf.put_metadata(newmd) gdf.put_metadata(newmd)
assert gdf.metadata == newmd assert gdf._metadata is None
assert _metadata[the_dir] == newmd fmd = _metadata[_mapit(the_dir)]
assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd)
def test_put_w_marker_dir_create(self): def test_put_w_marker_dir_create(self):
the_cont = os.path.join(self.td, "vol0", "bar") the_cont = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_cont, "dir") the_dir = os.path.join(the_cont, "dir")
os.makedirs(the_cont) os.makedirs(the_cont)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir")
assert gdf.metadata == {} assert gdf._metadata == None
newmd = { newmd = {
'ETag': 'etag', 'ETag': 'etag',
'X-Timestamp': 'ts', 'X-Timestamp': 'ts',
'Content-Type': 'application/directory'} 'Content-Type': 'application/directory'}
with gdf.writer() as dw: with gdf.create() as dw:
dw.put(newmd, extension='.dir') dw.put(newmd.copy(), extension='.dir')
assert gdf.data_file == the_dir with gdf.open():
for key, val in newmd.items(): assert gdf.data_file == the_dir
assert gdf.metadata[key] == val for key, val in newmd.items():
assert _metadata[the_dir][key] == val assert gdf._metadata[key] == val
assert gdf.metadata[X_OBJECT_TYPE] == DIR_OBJECT assert _metadata[_mapit(the_dir)][key] == val
assert _metadata[the_dir][X_OBJECT_TYPE] == DIR_OBJECT assert X_OBJECT_TYPE not in gdf._metadata, "md = %r" % gdf._metadata
assert _metadata[_mapit(the_dir)][X_OBJECT_TYPE] == DIR_OBJECT
def test_put_is_dir(self): def test_put_is_dir(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_path, "dir") the_dir = os.path.join(the_path, "dir")
os.makedirs(the_dir) os.makedirs(the_dir)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir")
origmd = gdf.metadata.copy()
origfmd = _metadata[the_dir]
newmd = gdf.metadata.copy()
# FIXME: This is a hack to get to the code-path; it is not clear # FIXME: This is a hack to get to the code-path; it is not clear
# how this can happen normally. # how this can happen normally.
newmd['Content-Type'] = '' newmd = {
newmd['X-Object-Meta-test'] = '1234' 'Content-Type': '',
with gdf.writer() as dw: 'X-Object-Meta-test': '1234'}
with gdf.create() as dw:
try: try:
dw.put(newmd, extension='.data') dw.put(newmd, extension='.data')
except DiskFileError: except DiskFileError:
@ -501,8 +519,6 @@ class TestDiskFile(unittest.TestCase):
else: else:
self.fail("Expected to encounter" self.fail("Expected to encounter"
" 'already-exists-as-dir' exception") " 'already-exists-as-dir' exception")
assert gdf.metadata == origmd
assert _metadata[the_dir] == origfmd
def test_put(self): def test_put(self):
the_cont = os.path.join(self.td, "vol0", "bar") the_cont = os.path.join(self.td, "vol0", "bar")
@ -525,7 +541,7 @@ class TestDiskFile(unittest.TestCase):
'Content-Length': '5', 'Content-Length': '5',
} }
with gdf.writer() as dw: with gdf.create() as dw:
assert dw.tmppath is not None assert dw.tmppath is not None
tmppath = dw.tmppath tmppath = dw.tmppath
dw.write(body) dw.write(body)
@ -561,7 +577,7 @@ class TestDiskFile(unittest.TestCase):
with mock.patch("os.open", mock_open): with mock.patch("os.open", mock_open):
try: try:
with gdf.writer() as dw: with gdf.create() as dw:
assert dw.tmppath is not None assert dw.tmppath is not None
dw.write(body) dw.write(body)
dw.put(metadata) dw.put(metadata)
@ -601,7 +617,7 @@ class TestDiskFile(unittest.TestCase):
with mock.patch("gluster.swift.obj.diskfile.sleep", mock_sleep): with mock.patch("gluster.swift.obj.diskfile.sleep", mock_sleep):
with mock.patch("os.rename", mock_rename): with mock.patch("os.rename", mock_rename):
try: try:
with gdf.writer() as dw: with gdf.create() as dw:
assert dw.tmppath is not None assert dw.tmppath is not None
dw.write(body) dw.write(body)
dw.put(metadata) dw.put(metadata)
@ -631,7 +647,7 @@ class TestDiskFile(unittest.TestCase):
'Content-Length': '5', 'Content-Length': '5',
} }
with gdf.writer() as dw: with gdf.create() as dw:
assert dw.tmppath is not None assert dw.tmppath is not None
tmppath = dw.tmppath tmppath = dw.tmppath
dw.write(body) dw.write(body)
@ -642,32 +658,32 @@ class TestDiskFile(unittest.TestCase):
assert os.path.exists(gdf.data_file) assert os.path.exists(gdf.data_file)
assert not os.path.exists(tmppath) assert not os.path.exists(tmppath)
def test_unlinkold_no_metadata(self): def test_delete_no_metadata(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf.metadata == {} assert gdf._metadata == None
_saved_rmobjdir = gluster.swift.obj.diskfile.rmobjdir _saved_rmobjdir = gluster.swift.obj.diskfile.rmobjdir
gluster.swift.obj.diskfile.rmobjdir = _mock_rmobjdir gluster.swift.obj.diskfile.rmobjdir = _mock_rmobjdir
try: try:
gdf.unlinkold(None) gdf.delete(1.0)
except MockException as exp: except MockException as exp:
self.fail(str(exp)) self.fail(str(exp))
finally: finally:
gluster.swift.obj.diskfile.rmobjdir = _saved_rmobjdir gluster.swift.obj.diskfile.rmobjdir = _saved_rmobjdir
def test_unlinkold_same_timestamp(self): def test_delete_same_timestamp(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf.metadata == {} assert gdf._metadata == None
gdf.metadata['X-Timestamp'] = 1 gdf._metadata = {'X-Timestamp': 1}
_saved_rmobjdir = gluster.swift.obj.diskfile.rmobjdir _saved_rmobjdir = gluster.swift.obj.diskfile.rmobjdir
gluster.swift.obj.diskfile.rmobjdir = _mock_rmobjdir gluster.swift.obj.diskfile.rmobjdir = _mock_rmobjdir
try: try:
gdf.unlinkold(1) gdf.delete(1)
except MockException as exp: except MockException as exp:
self.fail(str(exp)) self.fail(str(exp))
finally: finally:
gluster.swift.obj.diskfile.rmobjdir = _saved_rmobjdir gluster.swift.obj.diskfile.rmobjdir = _saved_rmobjdir
def test_unlinkold_file(self): def test_delete_file(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z") the_file = os.path.join(the_path, "z")
os.makedirs(the_path) os.makedirs(the_path)
@ -675,15 +691,14 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir later = float(gdf.get_metadata()['X-Timestamp']) + 1
assert gdf.data_file == the_file
later = float(gdf.metadata['X-Timestamp']) + 1 gdf.delete(normalize_timestamp(later))
gdf.unlinkold(normalize_timestamp(later))
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj)) assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj))
def test_unlinkold_file_not_found(self): def test_delete_file_not_found(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z") the_file = os.path.join(the_path, "z")
os.makedirs(the_path) os.makedirs(the_path)
@ -691,18 +706,19 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir later = float(gdf._metadata['X-Timestamp']) + 1
assert gdf.data_file == the_file
assert not gdf._is_dir
# Handle the case the file is not in the directory listing. # Handle the case the file is not in the directory listing.
os.unlink(the_file) os.unlink(the_file)
later = float(gdf.metadata['X-Timestamp']) + 1 gdf.delete(normalize_timestamp(later))
gdf.unlinkold(normalize_timestamp(later))
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj)) assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj))
def test_unlinkold_file_unlink_error(self): def test_delete_file_unlink_error(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_file = os.path.join(the_path, "z") the_file = os.path.join(the_path, "z")
os.makedirs(the_path) os.makedirs(the_path)
@ -710,10 +726,10 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
assert not gdf._is_dir
later = float(gdf.metadata['X-Timestamp']) + 1 later = float(gdf._metadata['X-Timestamp']) + 1
def _mock_os_unlink_eacces_err(f): def _mock_os_unlink_eacces_err(f):
raise OSError(errno.EACCES, os.strerror(errno.EACCES)) raise OSError(errno.EACCES, os.strerror(errno.EACCES))
@ -725,7 +741,7 @@ class TestDiskFile(unittest.TestCase):
# Handle the case os_unlink() raises an OSError # Handle the case os_unlink() raises an OSError
with patch("os.unlink", _mock_os_unlink_eacces_err): with patch("os.unlink", _mock_os_unlink_eacces_err):
try: try:
gdf.unlinkold(normalize_timestamp(later)) gdf.delete(normalize_timestamp(later))
except OSError as e: except OSError as e:
assert e.errno == errno.EACCES assert e.errno == errno.EACCES
else: else:
@ -736,17 +752,17 @@ class TestDiskFile(unittest.TestCase):
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
assert os.path.exists(os.path.join(gdf.datadir, gdf._obj)) assert os.path.exists(os.path.join(gdf.datadir, gdf._obj))
def test_unlinkold_is_dir(self): def test_delete_is_dir(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_path, "d") the_dir = os.path.join(the_path, "d")
os.makedirs(the_dir) os.makedirs(the_dir)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d")
keep_data_fp=True) assert gdf._obj == "d"
assert gdf.data_file == the_dir with gdf.open():
assert gdf._is_dir assert gdf.data_file == the_dir
assert gdf._is_dir
later = float(gdf.metadata['X-Timestamp']) + 1 later = float(gdf._metadata['X-Timestamp']) + 1
gdf.unlinkold(normalize_timestamp(later)) gdf.delete(normalize_timestamp(later))
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj)) assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj))
@ -758,9 +774,10 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
assert 4 == gdf.get_data_file_size() assert not gdf._is_dir
assert 4 == gdf.get_data_file_size()
def test_get_data_file_size_md_restored(self): def test_get_data_file_size_md_restored(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
@ -770,12 +787,13 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
assert 4 == gdf.metadata['Content-Length'] assert not gdf._is_dir
gdf.metadata['Content-Length'] = 3 assert 4 == gdf._metadata['Content-Length']
assert 4 == gdf.get_data_file_size() gdf._metadata['Content-Length'] = 3
assert 4 == gdf.metadata['Content-Length'] assert 4 == gdf.get_data_file_size()
assert 4 == gdf._metadata['Content-Length']
def test_get_data_file_size_dne(self): def test_get_data_file_size_dne(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar",
@ -795,15 +813,16 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
gdf.data_file = gdf.data_file + ".dne" assert not gdf._is_dir
try: gdf.data_file = gdf.data_file + ".dne"
gdf.get_data_file_size() try:
except DiskFileNotExist: gdf.get_data_file_size()
pass except DiskFileNotExist:
else: pass
self.fail("Expected DiskFileNotExist exception") else:
self.fail("Expected DiskFileNotExist exception")
def test_get_data_file_size_os_err(self): def test_get_data_file_size_os_err(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
@ -813,55 +832,57 @@ class TestDiskFile(unittest.TestCase):
fd.write("1234") fd.write("1234")
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf._obj == "z" assert gdf._obj == "z"
assert gdf.data_file == the_file with gdf.open():
assert not gdf._is_dir assert gdf.data_file == the_file
stats = os.stat(the_path) assert not gdf._is_dir
try: stats = os.stat(the_path)
os.chmod(the_path, 0) try:
os.chmod(the_path, 0)
def _mock_getsize_eaccess_err(f): def _mock_getsize_eaccess_err(f):
raise OSError(errno.EACCES, os.strerror(errno.EACCES)) raise OSError(errno.EACCES, os.strerror(errno.EACCES))
with patch("os.path.getsize", _mock_getsize_eaccess_err): with patch("os.path.getsize", _mock_getsize_eaccess_err):
try: try:
gdf.get_data_file_size() gdf.get_data_file_size()
except OSError as err: except OSError as err:
assert err.errno == errno.EACCES assert err.errno == errno.EACCES
else: else:
self.fail("Expected OSError exception") self.fail("Expected OSError exception")
finally: finally:
os.chmod(the_path, stats.st_mode) os.chmod(the_path, stats.st_mode)
def test_get_data_file_size_dir(self): def test_get_data_file_size_dir(self):
the_path = os.path.join(self.td, "vol0", "bar") the_path = os.path.join(self.td, "vol0", "bar")
the_dir = os.path.join(the_path, "d") the_dir = os.path.join(the_path, "d")
os.makedirs(the_dir) os.makedirs(the_dir)
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d", gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d")
keep_data_fp=True)
assert gdf._obj == "d" assert gdf._obj == "d"
assert gdf.data_file == the_dir with gdf.open():
assert gdf._is_dir assert gdf.data_file == the_dir
assert 0 == gdf.get_data_file_size() assert gdf._is_dir
assert 0 == gdf.get_data_file_size()
def test_filter_metadata(self): def test_filter_metadata(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z")
assert gdf.metadata == {} assert gdf._metadata == None
gdf._filter_metadata() gdf._filter_metadata()
assert gdf.metadata == {} assert gdf._metadata == None
gdf.metadata[X_TYPE] = 'a' gdf._metadata = {}
gdf.metadata[X_OBJECT_TYPE] = 'b' gdf._metadata[X_TYPE] = 'a'
gdf.metadata['foobar'] = 'c' gdf._metadata[X_OBJECT_TYPE] = 'b'
gdf._metadata['foobar'] = 'c'
gdf._filter_metadata() gdf._filter_metadata()
assert X_TYPE not in gdf.metadata assert X_TYPE not in gdf._metadata
assert X_OBJECT_TYPE not in gdf.metadata assert X_OBJECT_TYPE not in gdf._metadata
assert 'foobar' in gdf.metadata assert 'foobar' in gdf._metadata
def test_writer(self): def test_create(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z")
saved_tmppath = '' saved_tmppath = ''
saved_fd = None saved_fd = None
with gdf.writer() as dw: with gdf.create() as dw:
assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir") assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir")
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
saved_tmppath = dw.tmppath saved_tmppath = dw.tmppath
@ -881,10 +902,10 @@ class TestDiskFile(unittest.TestCase):
self.fail("Exception expected") self.fail("Exception expected")
assert not os.path.exists(saved_tmppath) assert not os.path.exists(saved_tmppath)
def test_writer_err_on_close(self): def test_create_err_on_close(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z")
saved_tmppath = '' saved_tmppath = ''
with gdf.writer() as dw: with gdf.create() as dw:
assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir") assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir")
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
saved_tmppath = dw.tmppath saved_tmppath = dw.tmppath
@ -896,10 +917,10 @@ class TestDiskFile(unittest.TestCase):
os.close(dw.fd) os.close(dw.fd)
assert not os.path.exists(saved_tmppath) assert not os.path.exists(saved_tmppath)
def test_writer_err_on_unlink(self): def test_create_err_on_unlink(self):
gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z")
saved_tmppath = '' saved_tmppath = ''
with gdf.writer() as dw: with gdf.create() as dw:
assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir") assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir")
assert os.path.isdir(gdf.datadir) assert os.path.isdir(gdf.datadir)
saved_tmppath = dw.tmppath saved_tmppath = dw.tmppath

View File

@ -1,4 +1,4 @@
# Copyright (c) 2010-2012 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -40,6 +40,28 @@ class TestAccountController(unittest.TestCase):
self.assertEqual(headers_to_account_info(resp.headers), self.assertEqual(headers_to_account_info(resp.headers),
resp.environ['swift.account/AUTH_bob']) resp.environ['swift.account/AUTH_bob'])
def test_swift_owner(self):
owner_headers = {
'x-account-meta-temp-url-key': 'value',
'x-account-meta-temp-url-key-2': 'value'}
controller = proxy_server.AccountController(self.app, 'a')
req = Request.blank('/a')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEquals(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key not in resp.headers)
req = Request.blank('/a', environ={'swift_owner': True})
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEquals(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key in resp.headers)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,4 +1,4 @@
# Copyright (c) 2010-2012 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -16,9 +16,9 @@
import unittest import unittest
from mock import patch from mock import patch
from swift.proxy.controllers.base import headers_to_container_info, \ from swift.proxy.controllers.base import headers_to_container_info, \
headers_to_account_info, get_container_info, get_container_memcache_key, \ headers_to_account_info, headers_to_object_info, get_container_info, \
get_account_info, get_account_memcache_key, _get_cache_key, get_info, \ get_container_memcache_key, get_account_info, get_account_memcache_key, \
Controller get_object_env_key, _get_cache_key, get_info, get_object_info, Controller
from swift.common.swob import Request from swift.common.swob import Request
from swift.common.utils import split_path from swift.common.utils import split_path
from test.unit import fake_http_connect, FakeRing, FakeMemcache from test.unit import fake_http_connect, FakeRing, FakeMemcache
@ -29,12 +29,18 @@ FakeResponse_status_int = 201
class FakeResponse(object): class FakeResponse(object):
def __init__(self, headers, env, account, container): def __init__(self, headers, env, account, container, obj):
self.headers = headers self.headers = headers
self.status_int = FakeResponse_status_int self.status_int = FakeResponse_status_int
self.environ = env self.environ = env
cache_key, env_key = _get_cache_key(account, container) if obj:
if container: env_key = get_object_env_key(account, container, obj)
else:
cache_key, env_key = _get_cache_key(account, container)
if account and container and obj:
info = headers_to_object_info(headers, FakeResponse_status_int)
elif account and container:
info = headers_to_container_info(headers, FakeResponse_status_int) info = headers_to_container_info(headers, FakeResponse_status_int)
else: else:
info = headers_to_account_info(headers, FakeResponse_status_int) info = headers_to_account_info(headers, FakeResponse_status_int)
@ -42,18 +48,27 @@ class FakeResponse(object):
class FakeRequest(object): class FakeRequest(object):
def __init__(self, env, path): def __init__(self, env, path, swift_source=None):
self.environ = env self.environ = env
(version, account, container, obj) = split_path(path, 2, 4, True) (version, account, container, obj) = split_path(path, 2, 4, True)
self.account = account self.account = account
self.container = container self.container = container
stype = container and 'container' or 'account' self.obj = obj
self.headers = {'x-%s-object-count' % (stype): 1000, if obj:
'x-%s-bytes-used' % (stype): 6666} stype = 'object'
self.headers = {'content-length': 5555,
'content-type': 'text/plain'}
else:
stype = container and 'container' or 'account'
self.headers = {'x-%s-object-count' % (stype): 1000,
'x-%s-bytes-used' % (stype): 6666}
if swift_source:
meta = 'x-%s-meta-fakerequest-swift-source' % stype
self.headers[meta] = swift_source
def get_response(self, app): def get_response(self, app):
return FakeResponse(self.headers, self.environ, self.account, return FakeResponse(self.headers, self.environ, self.account,
self.container) self.container, self.obj)
class FakeCache(object): class FakeCache(object):
@ -73,6 +88,21 @@ class TestFuncs(unittest.TestCase):
def test_GETorHEAD_base(self): def test_GETorHEAD_base(self):
base = Controller(self.app) base = Controller(self.app)
req = Request.blank('/a/c/o/with/slashes')
with patch('swift.proxy.controllers.base.'
'http_connect', fake_http_connect(200)):
resp = base.GETorHEAD_base(req, 'object', FakeRing(), 'part',
'/a/c/o/with/slashes')
self.assertTrue('swift.object/a/c/o/with/slashes' in resp.environ)
self.assertEqual(
resp.environ['swift.object/a/c/o/with/slashes']['status'], 200)
req = Request.blank('/a/c/o')
with patch('swift.proxy.controllers.base.'
'http_connect', fake_http_connect(200)):
resp = base.GETorHEAD_base(req, 'object', FakeRing(), 'part',
'/a/c/o')
self.assertTrue('swift.object/a/c/o' in resp.environ)
self.assertEqual(resp.environ['swift.object/a/c/o']['status'], 200)
req = Request.blank('/a/c') req = Request.blank('/a/c')
with patch('swift.proxy.controllers.base.' with patch('swift.proxy.controllers.base.'
'http_connect', fake_http_connect(200)): 'http_connect', fake_http_connect(200)):
@ -101,7 +131,7 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['bytes'], 6666)
self.assertEquals(info_a['total_object_count'], 1000) self.assertEquals(info_a['total_object_count'], 1000)
# Make sure the env cache is set # Make sure the env cache is set
self.assertEquals(env, {'swift.account/a': info_a}) self.assertEquals(env.get('swift.account/a'), info_a)
# Do an env cached call to account # Do an env cached call to account
info_a = get_info(None, env, 'a') info_a = get_info(None, env, 'a')
@ -110,7 +140,7 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['bytes'], 6666)
self.assertEquals(info_a['total_object_count'], 1000) self.assertEquals(info_a['total_object_count'], 1000)
# Make sure the env cache is set # Make sure the env cache is set
self.assertEquals(env, {'swift.account/a': info_a}) self.assertEquals(env.get('swift.account/a'), info_a)
# This time do env cached call to account and non cached to container # This time do env cached call to account and non cached to container
with patch('swift.proxy.controllers.base.' with patch('swift.proxy.controllers.base.'
@ -121,11 +151,12 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_c['bytes'], 6666) self.assertEquals(info_c['bytes'], 6666)
self.assertEquals(info_c['object_count'], 1000) self.assertEquals(info_c['object_count'], 1000)
# Make sure the env cache is set # Make sure the env cache is set
self.assertEquals(env['swift.account/a'], info_a) self.assertEquals(env.get('swift.account/a'), info_a)
self.assertEquals(env['swift.container/a/c'], info_c) self.assertEquals(env.get('swift.container/a/c'), info_c)
# This time do a non cached call to account than non cached to container # This time do a non cached call to account than non cached to
env = {} # abandon previous call to env # container
env = {} # abandon previous call to env
with patch('swift.proxy.controllers.base.' with patch('swift.proxy.controllers.base.'
'_prepare_pre_auth_info_request', FakeRequest): '_prepare_pre_auth_info_request', FakeRequest):
info_c = get_info(None, env, 'a', 'c') info_c = get_info(None, env, 'a', 'c')
@ -134,10 +165,11 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_c['bytes'], 6666) self.assertEquals(info_c['bytes'], 6666)
self.assertEquals(info_c['object_count'], 1000) self.assertEquals(info_c['object_count'], 1000)
# Make sure the env cache is set # Make sure the env cache is set
self.assertEquals(env['swift.account/a'], info_a) self.assertEquals(env.get('swift.account/a'), info_a)
self.assertEquals(env['swift.container/a/c'], info_c) self.assertEquals(env.get('swift.container/a/c'), info_c)
# This time do an env cached call to container while account is not cached # This time do an env cached call to container while account is not
# cached
del(env['swift.account/a']) del(env['swift.account/a'])
info_c = get_info(None, env, 'a', 'c') info_c = get_info(None, env, 'a', 'c')
# Check that you got proper info # Check that you got proper info
@ -145,7 +177,7 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_c['bytes'], 6666) self.assertEquals(info_c['bytes'], 6666)
self.assertEquals(info_c['object_count'], 1000) self.assertEquals(info_c['object_count'], 1000)
# Make sure the env cache is set and account still not cached # Make sure the env cache is set and account still not cached
self.assertEquals(env, {'swift.container/a/c': info_c}) self.assertEquals(env.get('swift.container/a/c'), info_c)
# Do a non cached call to account not found with ret_not_found # Do a non cached call to account not found with ret_not_found
env = {} env = {}
@ -161,7 +193,7 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['bytes'], 6666)
self.assertEquals(info_a['total_object_count'], 1000) self.assertEquals(info_a['total_object_count'], 1000)
# Make sure the env cache is set # Make sure the env cache is set
self.assertEquals(env, {'swift.account/a': info_a}) self.assertEquals(env.get('swift.account/a'), info_a)
# Do a cached call to account not found with ret_not_found # Do a cached call to account not found with ret_not_found
info_a = get_info(None, env, 'a', ret_not_found=True) info_a = get_info(None, env, 'a', ret_not_found=True)
@ -170,7 +202,7 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['bytes'], 6666)
self.assertEquals(info_a['total_object_count'], 1000) self.assertEquals(info_a['total_object_count'], 1000)
# Make sure the env cache is set # Make sure the env cache is set
self.assertEquals(env, {'swift.account/a': info_a}) self.assertEquals(env.get('swift.account/a'), info_a)
# Do a non cached call to account not found without ret_not_found # Do a non cached call to account not found without ret_not_found
env = {} env = {}
@ -191,6 +223,21 @@ class TestFuncs(unittest.TestCase):
self.assertEquals(info_a, None) self.assertEquals(info_a, None)
self.assertEquals(env['swift.account/a']['status'], 404) self.assertEquals(env['swift.account/a']['status'], 404)
def test_get_container_info_swift_source(self):
req = Request.blank("/v1/a/c", environ={'swift.cache': FakeCache({})})
with patch('swift.proxy.controllers.base.'
'_prepare_pre_auth_info_request', FakeRequest):
resp = get_container_info(req.environ, 'app', swift_source='MC')
self.assertEquals(resp['meta']['fakerequest-swift-source'], 'MC')
def test_get_object_info_swift_source(self):
req = Request.blank("/v1/a/c/o",
environ={'swift.cache': FakeCache({})})
with patch('swift.proxy.controllers.base.'
'_prepare_pre_auth_info_request', FakeRequest):
resp = get_object_info(req.environ, 'app', swift_source='LU')
self.assertEquals(resp['meta']['fakerequest-swift-source'], 'LU')
def test_get_container_info_no_cache(self): def test_get_container_info_no_cache(self):
req = Request.blank("/v1/AUTH_account/cont", req = Request.blank("/v1/AUTH_account/cont",
environ={'swift.cache': FakeCache({})}) environ={'swift.cache': FakeCache({})})
@ -217,11 +264,18 @@ class TestFuncs(unittest.TestCase):
cache_key = get_container_memcache_key("account", "cont") cache_key = get_container_memcache_key("account", "cont")
env_key = 'swift.%s' % cache_key env_key = 'swift.%s' % cache_key
req = Request.blank("/v1/account/cont", req = Request.blank("/v1/account/cont",
environ={ env_key: {'bytes': 3867}, environ={env_key: {'bytes': 3867},
'swift.cache': FakeCache({})}) 'swift.cache': FakeCache({})})
resp = get_container_info(req.environ, 'xxx') resp = get_container_info(req.environ, 'xxx')
self.assertEquals(resp['bytes'], 3867) self.assertEquals(resp['bytes'], 3867)
def test_get_account_info_swift_source(self):
req = Request.blank("/v1/a", environ={'swift.cache': FakeCache({})})
with patch('swift.proxy.controllers.base.'
'_prepare_pre_auth_info_request', FakeRequest):
resp = get_account_info(req.environ, 'a', swift_source='MC')
self.assertEquals(resp['meta']['fakerequest-swift-source'], 'MC')
def test_get_account_info_no_cache(self): def test_get_account_info_no_cache(self):
req = Request.blank("/v1/AUTH_account", req = Request.blank("/v1/AUTH_account",
environ={'swift.cache': FakeCache({})}) environ={'swift.cache': FakeCache({})})
@ -266,11 +320,33 @@ class TestFuncs(unittest.TestCase):
cache_key = get_account_memcache_key("account") cache_key = get_account_memcache_key("account")
env_key = 'swift.%s' % cache_key env_key = 'swift.%s' % cache_key
req = Request.blank("/v1/account", req = Request.blank("/v1/account",
environ={ env_key: {'bytes': 3867}, environ={env_key: {'bytes': 3867},
'swift.cache': FakeCache({})}) 'swift.cache': FakeCache({})})
resp = get_account_info(req.environ, 'xxx') resp = get_account_info(req.environ, 'xxx')
self.assertEquals(resp['bytes'], 3867) self.assertEquals(resp['bytes'], 3867)
def test_get_object_info_env(self):
cached = {'status': 200,
'length': 3333,
'type': 'application/json',
'meta': {}}
env_key = get_object_env_key("account", "cont", "obj")
req = Request.blank("/v1/account/cont/obj",
environ={env_key: cached,
'swift.cache': FakeCache({})})
resp = get_object_info(req.environ, 'xxx')
self.assertEquals(resp['length'], 3333)
self.assertEquals(resp['type'], 'application/json')
def test_get_object_info_no_env(self):
req = Request.blank("/v1/account/cont/obj",
environ={'swift.cache': FakeCache({})})
with patch('swift.proxy.controllers.base.'
'_prepare_pre_auth_info_request', FakeRequest):
resp = get_object_info(req.environ, 'xxx')
self.assertEquals(resp['length'], 5555)
self.assertEquals(resp['type'], 'text/plain')
def test_headers_to_container_info_missing(self): def test_headers_to_container_info_missing(self):
resp = headers_to_container_info({}, 404) resp = headers_to_container_info({}, 404)
self.assertEquals(resp['status'], 404) self.assertEquals(resp['status'], 404)
@ -329,3 +405,31 @@ class TestFuncs(unittest.TestCase):
self.assertEquals( self.assertEquals(
resp, resp,
headers_to_account_info(headers.items(), 200)) headers_to_account_info(headers.items(), 200))
def test_headers_to_object_info_missing(self):
resp = headers_to_object_info({}, 404)
self.assertEquals(resp['status'], 404)
self.assertEquals(resp['length'], None)
self.assertEquals(resp['etag'], None)
def test_headers_to_object_info_meta(self):
headers = {'X-Object-Meta-Whatevs': 14,
'x-object-meta-somethingelse': 0}
resp = headers_to_object_info(headers.items(), 200)
self.assertEquals(len(resp['meta']), 2)
self.assertEquals(resp['meta']['whatevs'], 14)
self.assertEquals(resp['meta']['somethingelse'], 0)
def test_headers_to_object_info_values(self):
headers = {
'content-length': '1024',
'content-type': 'application/json',
}
resp = headers_to_object_info(headers.items(), 200)
self.assertEquals(resp['length'], '1024')
self.assertEquals(resp['type'], 'application/json')
headers['x-unused-header'] = 'blahblahblah'
self.assertEquals(
resp,
headers_to_object_info(headers.items(), 200))

View File

@ -1,4 +1,4 @@
# Copyright (c) 2010-2012 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -40,6 +40,28 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(headers_to_container_info(resp.headers), self.assertEqual(headers_to_container_info(resp.headers),
resp.environ['swift.container/a/c']) resp.environ['swift.container/a/c'])
def test_swift_owner(self):
owner_headers = {
'x-container-read': 'value', 'x-container-write': 'value',
'x-container-sync-key': 'value', 'x-container-sync-to': 'value'}
controller = proxy_server.ContainerController(self.app, 'a', 'c')
req = Request.blank('/a/c')
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEquals(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key not in resp.headers)
req = Request.blank('/a/c', environ={'swift_owner': True})
with mock.patch('swift.proxy.controllers.base.http_connect',
fake_http_connect(200, 200, headers=owner_headers)):
resp = controller.HEAD(req)
self.assertEquals(2, resp.status_int // 100)
for key in owner_headers:
self.assertTrue(key in resp.headers)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,5 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
# Copyright (c) 2010-2012 OpenStack, LLC. # Copyright (c) 2010-2012 OpenStack Foundation
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -15,9 +15,28 @@
# limitations under the License. # limitations under the License.
import unittest import unittest
from contextlib import contextmanager
import mock
import swift
from swift.proxy import server as proxy_server from swift.proxy import server as proxy_server
from test.unit import FakeRing, FakeMemcache from test.unit import FakeRing, FakeMemcache, fake_http_connect
@contextmanager
def set_http_connect(*args, **kwargs):
old_connect = swift.proxy.controllers.base.http_connect
new_connect = fake_http_connect(*args, **kwargs)
swift.proxy.controllers.base.http_connect = new_connect
swift.proxy.controllers.obj.http_connect = new_connect
swift.proxy.controllers.account.http_connect = new_connect
swift.proxy.controllers.container.http_connect = new_connect
yield new_connect
swift.proxy.controllers.base.http_connect = old_connect
swift.proxy.controllers.obj.http_connect = old_connect
swift.proxy.controllers.account.http_connect = old_connect
swift.proxy.controllers.container.http_connect = old_connect
class TestObjControllerWriteAffinity(unittest.TestCase): class TestObjControllerWriteAffinity(unittest.TestCase):
@ -44,7 +63,8 @@ class TestObjControllerWriteAffinity(unittest.TestCase):
def test_iter_nodes_local_first_moves_locals_first(self): def test_iter_nodes_local_first_moves_locals_first(self):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
self.app.write_affinity_is_local_fn = (lambda node: node['region'] == 1) self.app.write_affinity_is_local_fn = (
lambda node: node['region'] == 1)
self.app.write_affinity_node_count = lambda ring: 4 self.app.write_affinity_node_count = lambda ring: 4
all_nodes = self.app.object_ring.get_part_nodes(1) all_nodes = self.app.object_ring.get_part_nodes(1)
@ -59,6 +79,44 @@ class TestObjControllerWriteAffinity(unittest.TestCase):
# we don't skip any nodes # we don't skip any nodes
self.assertEqual(sorted(all_nodes), sorted(local_first_nodes)) self.assertEqual(sorted(all_nodes), sorted(local_first_nodes))
def test_connect_put_node_timeout(self):
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
self.app.conn_timeout = 0.1
with set_http_connect(200, slow_connect=True):
nodes = [dict(ip='', port='', device='')]
res = controller._connect_put_node(nodes, '', '', {}, ('', ''))
self.assertTrue(res is None)
class TestObjController(unittest.TestCase):
def test_PUT_log_info(self):
# mock out enough to get to the area of the code we want to test
with mock.patch('swift.proxy.controllers.obj.check_object_creation',
mock.MagicMock(return_value=None)):
app = mock.MagicMock()
app.container_ring.get_nodes.return_value = (1, [2])
app.object_ring.get_nodes.return_value = (1, [2])
controller = proxy_server.ObjectController(app, 'a', 'c', 'o')
controller.container_info = mock.MagicMock(return_value={
'partition': 1,
'nodes': [{}],
'write_acl': None,
'sync_key': None,
'versions': None})
# and now test that we add the header to log_info
req = swift.common.swob.Request.blank('/v1/a/c/o')
req.headers['x-copy-from'] = 'somewhere'
controller.PUT(req)
self.assertEquals(
req.environ.get('swift.log_info'), ['x-copy-from:somewhere'])
# and then check that we don't do that for originating POSTs
req = swift.common.swob.Request.blank('/v1/a/c/o')
req.method = 'POST'
req.headers['x-copy-from'] = 'elsewhere'
controller.PUT(req)
self.assertEquals(req.environ.get('swift.log_info'), None)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

File diff suppressed because it is too large Load Diff

View File

@ -10,7 +10,7 @@ setenv = VIRTUAL_ENV={envdir}
NOSE_OPENSTACK_SHOW_ELAPSED=1 NOSE_OPENSTACK_SHOW_ELAPSED=1
NOSE_OPENSTACK_STDOUT=1 NOSE_OPENSTACK_STDOUT=1
deps = deps =
https://launchpad.net/swift/havana/1.9.1/+download/swift-1.9.1.tar.gz https://launchpad.net/swift/havana/1.10.0/+download/swift-1.10.0.tar.gz
--download-cache={homedir}/.pipcache --download-cache={homedir}/.pipcache
-r{toxinidir}/tools/test-requires -r{toxinidir}/tools/test-requires
changedir = {toxinidir}/test/unit changedir = {toxinidir}/test/unit