Fixed probetests

* Fixed issue with running probetests with the latest update
  of python-swiftclient that removed eventlet
* Fixed issue with replication server tests to not require hard
  coded paths

Change-Id: Ibbf727ae99c0f3893ae58e270e2f879a1f618e49
This commit is contained in:
Chuck Thier 2013-05-23 22:41:04 +00:00
parent b4b35c2561
commit 7a02c218bb
2 changed files with 92 additions and 77 deletions

View File

@ -20,6 +20,7 @@ from unittest import main, TestCase
from uuid import uuid4
from eventlet import GreenPool, Timeout
import eventlet
from sqlite3 import connect
from swiftclient import client
@ -28,6 +29,7 @@ from swift.common.utils import hash_path, readconf
from test.probe.common import get_to_final_state, kill_nonprimary_server, \
kill_server, kill_servers, reset_environment, start_server
eventlet.monkey_patch(all=False, socket=True)
def get_db_file_path(obj_dir):
files = sorted(listdir(obj_dir), reverse=True)

View File

@ -23,9 +23,9 @@ import shutil
from swiftclient import client
#from swift.common import direct_client
from test.probe.common import kill_server, kill_servers, reset_environment, \
start_server
from swift.common.utils import readconf
def collect_info(path_list):
@ -58,10 +58,10 @@ def find_max_occupancy_node(dir_list):
"""
count = 0
number = 0
lenght = 0
length = 0
for dirs in dir_list:
if lenght < len(dirs):
lenght = len(dirs)
if length < len(dirs):
length = len(dirs)
number = count
count += 1
return number
@ -100,8 +100,15 @@ class TestReplicatorFunctions(TestCase):
# Check, that files not replicated.
# Delete file "hashes.pkl".
# Check, that all files were replicated.
path_list = ['/srv/1/node/sdb1/', '/srv/2/node/sdb2/',
'/srv/3/node/sdb3/', '/srv/4/node/sdb4/']
path_list = []
# Figure out where the devices are
for node_id in range(1,5):
conf = readconf(self.configs['object'] % node_id)
device_path = conf['app:object-server']['devices']
for dev in self.object_ring.devs:
if dev['port'] == int(conf['app:object-server']['bind_port']):
device = dev['device']
path_list.append(os.path.join(device_path, device))
# Put data to storage nodes
container = 'container-%s' % uuid4()
@ -111,17 +118,18 @@ class TestReplicatorFunctions(TestCase):
client.put_object(self.url, self.token, container, obj, 'VERIFY')
# Get all data file information
(files_list, dirs_list) = collect_info(path_list)
num = find_max_occupancy_node(dirs_list)
(files_list, dir_list) = collect_info(path_list)
num = find_max_occupancy_node(dir_list)
test_node = path_list[num]
test_node_files_list = []
for files in files_list[num]:
if not files.endswith('.pending'):
test_node_files_list.append(files)
test_node_dirs_list = dirs_list[num]
test_node_dir_list = dir_list[num]
# Run all replicators
processes = []
try:
for num in xrange(1, 9):
for server in ['object-replicator',
'container-replicator',
@ -133,23 +141,23 @@ class TestReplicatorFunctions(TestCase):
'forever']))
# Delete some files
for dirs in os.listdir(test_node):
shutil.rmtree(test_node+dirs)
for directory in os.listdir(test_node):
shutil.rmtree(os.path.join(test_node, directory))
self.assertFalse(os.listdir(test_node))
# We will keep trying these tests until they pass for up to 60s
begin = time.time()
while True:
(new_files_list, new_dirs_list) = collect_info([test_node])
(new_files_list, new_dir_list) = collect_info([test_node])
try:
# Check replicate files and dirs
# Check replicate files and dir
for files in test_node_files_list:
self.assertTrue(files in new_files_list[0])
for dirs in test_node_dirs_list:
self.assertTrue(dirs in new_dirs_list[0])
for dir in test_node_dir_list:
self.assertTrue(dir in new_dir_list[0])
break
except Exception:
if time.time() - begin > 60:
@ -157,48 +165,53 @@ class TestReplicatorFunctions(TestCase):
time.sleep(1)
# Check behavior by deleting hashes.pkl file
for dirs in os.listdir(test_node + 'objects/'):
for input_dirs in os.listdir(test_node + 'objects/' + dirs):
eval_dirs = '/' + input_dirs
if os.path.isdir(test_node + 'objects/' + dirs + eval_dirs):
shutil.rmtree(test_node + 'objects/' + dirs + eval_dirs)
for directory in os.listdir(os.path.join(test_node, 'objects')):
for input_dir in os.listdir(os.path.join(
test_node, 'objects', directory)):
if os.path.isdir(os.path.join(
test_node, 'objects', directory, input_dir)):
shutil.rmtree(os.path.join(
test_node, 'objects', directory, input_dir))
# We will keep trying these tests until they pass for up to 60s
begin = time.time()
while True:
try:
for dirs in os.listdir(test_node + 'objects/'):
for input_dirs in os.listdir(
test_node + 'objects/' + dirs):
self.assertFalse(os.path.isdir(test_node + 'objects/' +
dirs + '/' + input_dirs))
for directory in os.listdir(os.path.join(
test_node, 'objects')):
for input_dir in os.listdir(os.path.join(
test_node, 'objects', directory)):
self.assertFalse(os.path.isdir(
os.path.join(test_node, 'objects',
directory, '/', input_dir)))
break
except Exception:
if time.time() - begin > 60:
raise
time.sleep(1)
for dirs in os.listdir(test_node + 'objects/'):
os.remove(test_node + 'objects/' + dirs + '/hashes.pkl')
for directory in os.listdir(os.path.join(test_node, 'objects')):
os.remove(os.path.join(
test_node, 'objects', directory, 'hashes.pkl'))
# We will keep trying these tests until they pass for up to 60s
begin = time.time()
while True:
try:
(new_files_list, new_dirs_list) = collect_info([test_node])
(new_files_list, new_dir_list) = collect_info([test_node])
# Check replicate files and dirs
for files in test_node_files_list:
self.assertTrue(files in new_files_list[0])
for dirs in test_node_dirs_list:
self.assertTrue(dirs in new_dirs_list[0])
for directory in test_node_dir_list:
self.assertTrue(directory in new_dir_list[0])
break
except Exception:
if time.time() - begin > 60:
raise
time.sleep(1)
finally:
for process in processes:
process.kill()