Cleanup db replicator probetest

Use a manager and config_number helper for clarity - then make a one
last assertion on the final consistent state.

Change-Id: I5030c314076003d17c41b8b136bcbda252474bad
Related-Change-Id: Icdf0a936fc456c5462471938cbc365bd012b05d4
This commit is contained in:
Clay Gerrard 2017-06-07 16:21:08 -07:00
parent 843184f3fe
commit 6687d2fcd0

View File

@ -20,7 +20,7 @@ from swiftclient import client, ClientException
from test.probe.common import kill_server, ReplProbeTest, start_server
from swift.common import direct_client, utils
from swift.common.manager import Server
from swift.common.manager import Manager
class TestDbUsyncReplicator(ReplProbeTest):
@ -85,10 +85,8 @@ class TestDbUsyncReplicator(ReplProbeTest):
# runs in first, db file may be replicated by rsync to other
# containers. In that case, the db file does not information about
# metadata, so metadata should be synced before replication
crepl = Server('container-replicator')
crepl.spawn(self.configs['container-replicator'][cnode['id'] + 1],
once=True)
crepl.interact()
Manager(['container-replicator']).once(
number=self.config_number(cnode))
expected_meta = {
'x-container-meta-a': '2',
@ -124,6 +122,21 @@ class TestDbUsyncReplicator(ReplProbeTest):
self.assertNotIn(resp_headers.get('x-container-object-count'),
(None, '0', 0))
# and after full pass on remaining nodes
for node in cnodes:
Manager(['container-replicator']).once(
number=self.config_number(node))
# ... all is right
for node in cnodes + [cnode]:
resp_headers = direct_client.direct_head_container(
node, cpart, self.account, container)
for header, value in expected_meta.items():
self.assertIn(header, resp_headers)
self.assertEqual(value, resp_headers[header])
self.assertNotIn(resp_headers.get('x-container-object-count'),
(None, '0', 0))
class TestDbRsyncReplicator(TestDbUsyncReplicator):
def setUp(self):