Merge "Add Timestamp.now() helper"

This commit is contained in:
Jenkins 2017-07-18 03:27:50 +00:00 committed by Gerrit Code Review
commit 83b62b4f39
28 changed files with 215 additions and 217 deletions

View File

@ -17,7 +17,6 @@ Pluggable Back-end for Account Server
""" """
from uuid import uuid4 from uuid import uuid4
import time
import six.moves.cPickle as pickle import six.moves.cPickle as pickle
import sqlite3 import sqlite3
@ -154,7 +153,7 @@ class AccountBroker(DatabaseBroker):
conn.execute(''' conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?, UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ?, status_changed_at = ? put_timestamp = ?, status_changed_at = ?
''', (self.account, Timestamp(time.time()).internal, str(uuid4()), ''', (self.account, Timestamp.now().internal, str(uuid4()),
put_timestamp, put_timestamp)) put_timestamp, put_timestamp))
def create_policy_stat_table(self, conn): def create_policy_stat_table(self, conn):

View File

@ -408,7 +408,7 @@ class AccountReaper(Daemon):
break break
successes = 0 successes = 0
failures = 0 failures = 0
timestamp = Timestamp(time()) timestamp = Timestamp.now()
for node in nodes: for node in nodes:
anode = account_nodes.pop() anode = account_nodes.pop()
try: try:
@ -483,7 +483,7 @@ class AccountReaper(Daemon):
part, nodes = ring.get_nodes(account, container, obj) part, nodes = ring.get_nodes(account, container, obj)
successes = 0 successes = 0
failures = 0 failures = 0
timestamp = Timestamp(time()) timestamp = Timestamp.now()
for node in nodes: for node in nodes:
cnode = next(cnodes) cnode = next(cnodes)

View File

@ -105,7 +105,7 @@ class AccountController(BaseStorageServer):
return HTTPInsufficientStorage(drive=drive, request=req) return HTTPInsufficientStorage(drive=drive, request=req)
if container: # put account container if container: # put account container
if 'x-timestamp' not in req.headers: if 'x-timestamp' not in req.headers:
timestamp = Timestamp(time.time()) timestamp = Timestamp.now()
else: else:
timestamp = valid_timestamp(req) timestamp = valid_timestamp(req)
pending_timeout = None pending_timeout = None

View File

@ -14,7 +14,6 @@
# limitations under the License. # limitations under the License.
import json import json
import time
from xml.sax import saxutils from xml.sax import saxutils
from swift.common.swob import HTTPOk, HTTPNoContent from swift.common.swob import HTTPOk, HTTPNoContent
@ -28,7 +27,7 @@ class FakeAccountBroker(object):
like an account broker would for a real, empty account with no metadata. like an account broker would for a real, empty account with no metadata.
""" """
def get_info(self): def get_info(self):
now = Timestamp(time.time()).internal now = Timestamp.now().internal
return {'container_count': 0, return {'container_count': 0,
'object_count': 0, 'object_count': 0,
'bytes_used': 0, 'bytes_used': 0,

View File

@ -476,7 +476,7 @@ class DatabaseBroker(object):
delete_timestamp=MAX(?, delete_timestamp) delete_timestamp=MAX(?, delete_timestamp)
''' % self.db_type, (created_at, put_timestamp, delete_timestamp)) ''' % self.db_type, (created_at, put_timestamp, delete_timestamp))
if old_status != self._is_deleted(conn): if old_status != self._is_deleted(conn):
timestamp = Timestamp(time.time()) timestamp = Timestamp.now()
self._update_status_changed_at(conn, timestamp.internal) self._update_status_changed_at(conn, timestamp.internal)
conn.commit() conn.commit()

View File

@ -21,7 +21,6 @@ through the proxy.
import json import json
import os import os
import socket import socket
from time import time
from eventlet import sleep, Timeout from eventlet import sleep, Timeout
import six import six
@ -122,7 +121,7 @@ def _get_direct_account_container(path, stype, node, part,
def gen_headers(hdrs_in=None, add_ts=False): def gen_headers(hdrs_in=None, add_ts=False):
hdrs_out = HeaderKeyDict(hdrs_in) if hdrs_in else HeaderKeyDict() hdrs_out = HeaderKeyDict(hdrs_in) if hdrs_in else HeaderKeyDict()
if add_ts: if add_ts:
hdrs_out['X-Timestamp'] = Timestamp(time()).internal hdrs_out['X-Timestamp'] = Timestamp.now().internal
hdrs_out['User-Agent'] = 'direct-client %s' % os.getpid() hdrs_out['User-Agent'] = 'direct-client %s' % os.getpid()
return hdrs_out return hdrs_out

View File

@ -909,6 +909,10 @@ class Timestamp(object):
if self.timestamp >= 10000000000: if self.timestamp >= 10000000000:
raise ValueError('timestamp too large') raise ValueError('timestamp too large')
@classmethod
def now(cls, offset=0, delta=0):
return cls(time.time(), offset=offset, delta=delta)
def __repr__(self): def __repr__(self):
return INTERNAL_FORMAT % (self.timestamp, self.offset) return INTERNAL_FORMAT % (self.timestamp, self.offset)

View File

@ -18,7 +18,6 @@ Pluggable Back-ends for Container Server
import os import os
from uuid import uuid4 from uuid import uuid4
import time
import six import six
import six.moves.cPickle as pickle import six.moves.cPickle as pickle
@ -314,7 +313,7 @@ class ContainerBroker(DatabaseBroker):
INSERT INTO container_info (account, container, created_at, id, INSERT INTO container_info (account, container, created_at, id,
put_timestamp, status_changed_at, storage_policy_index) put_timestamp, status_changed_at, storage_policy_index)
VALUES (?, ?, ?, ?, ?, ?, ?); VALUES (?, ?, ?, ?, ?, ?, ?);
""", (self.account, self.container, Timestamp(time.time()).internal, """, (self.account, self.container, Timestamp.now().internal,
str(uuid4()), put_timestamp, put_timestamp, str(uuid4()), put_timestamp, put_timestamp,
storage_policy_index)) storage_policy_index))
@ -611,7 +610,7 @@ class ContainerBroker(DatabaseBroker):
Update the container_stat policy_index and status_changed_at. Update the container_stat policy_index and status_changed_at.
""" """
if timestamp is None: if timestamp is None:
timestamp = Timestamp(time.time()).internal timestamp = Timestamp.now().internal
def _setit(conn): def _setit(conn):
conn.execute(''' conn.execute('''

View File

@ -204,7 +204,7 @@ def add_to_reconciler_queue(container_ring, account, container, obj,
# already been popped from the queue to be reprocessed, but # already been popped from the queue to be reprocessed, but
# could potentially prevent out of order updates from making it # could potentially prevent out of order updates from making it
# into the queue # into the queue
x_timestamp = Timestamp(time.time()).internal x_timestamp = Timestamp.now().internal
else: else:
x_timestamp = obj_timestamp x_timestamp = obj_timestamp
q_op_type = get_reconciler_content_type(op) q_op_type = get_reconciler_content_type(op)

View File

@ -16,7 +16,6 @@
import os import os
import itertools import itertools
import json import json
import time
from collections import defaultdict from collections import defaultdict
from eventlet import Timeout from eventlet import Timeout
@ -67,7 +66,7 @@ class ContainerReplicator(db_replicator.Replicator):
if is_success(response.status): if is_success(response.status):
remote_info = json.loads(response.data) remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info): if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()) status_changed_at = Timestamp.now()
broker.set_storage_policy_index( broker.set_storage_policy_index(
remote_info['storage_policy_index'], remote_info['storage_policy_index'],
timestamp=status_changed_at.internal) timestamp=status_changed_at.internal)
@ -284,7 +283,7 @@ class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
""" """
info = broker.get_replication_info() info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info): if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()).internal status_changed_at = Timestamp.now().internal
broker.set_storage_policy_index( broker.set_storage_policy_index(
remote_info['storage_policy_index'], remote_info['storage_policy_index'],
timestamp=status_changed_at) timestamp=status_changed_at)

View File

@ -1472,7 +1472,7 @@ class Controller(object):
headers = HeaderKeyDict(additional) if additional else HeaderKeyDict() headers = HeaderKeyDict(additional) if additional else HeaderKeyDict()
if transfer: if transfer:
self.transfer_headers(orig_req.headers, headers) self.transfer_headers(orig_req.headers, headers)
headers.setdefault('x-timestamp', Timestamp(time.time()).internal) headers.setdefault('x-timestamp', Timestamp.now().internal)
if orig_req: if orig_req:
referer = orig_req.as_referer() referer = orig_req.as_referer()
else: else:
@ -1780,7 +1780,7 @@ class Controller(object):
""" """
partition, nodes = self.app.account_ring.get_nodes(account) partition, nodes = self.app.account_ring.get_nodes(account)
path = '/%s' % account path = '/%s' % account
headers = {'X-Timestamp': Timestamp(time.time()).internal, headers = {'X-Timestamp': Timestamp.now().internal,
'X-Trans-Id': self.trans_id, 'X-Trans-Id': self.trans_id,
'X-Openstack-Request-Id': self.trans_id, 'X-Openstack-Request-Id': self.trans_id,
'Connection': 'close'} 'Connection': 'close'}

View File

@ -14,7 +14,6 @@
# limitations under the License. # limitations under the License.
from swift import gettext_ as _ from swift import gettext_ as _
import time
from six.moves.urllib.parse import unquote from six.moves.urllib.parse import unquote
from swift.common.utils import public, csv_append, Timestamp from swift.common.utils import public, csv_append, Timestamp
@ -234,7 +233,7 @@ class ContainerController(Controller):
def _backend_requests(self, req, n_outgoing, account_partition, accounts, def _backend_requests(self, req, n_outgoing, account_partition, accounts,
policy_index=None): policy_index=None):
additional = {'X-Timestamp': Timestamp(time.time()).internal} additional = {'X-Timestamp': Timestamp.now().internal}
if policy_index is None: if policy_index is None:
additional['X-Backend-Storage-Policy-Default'] = \ additional['X-Backend-Storage-Policy-Default'] = \
int(POLICIES.default) int(POLICIES.default)

View File

@ -260,7 +260,7 @@ class BaseObjectController(Controller):
partition, nodes = obj_ring.get_nodes( partition, nodes = obj_ring.get_nodes(
self.account_name, self.container_name, self.object_name) self.account_name, self.container_name, self.object_name)
req.headers['X-Timestamp'] = Timestamp(time.time()).internal req.headers['X-Timestamp'] = Timestamp.now().internal
headers = self._backend_requests( headers = self._backend_requests(
req, len(nodes), container_partition, container_nodes, req, len(nodes), container_partition, container_nodes,
@ -467,7 +467,7 @@ class BaseObjectController(Controller):
'was %r' % req.headers['x-timestamp']) 'was %r' % req.headers['x-timestamp'])
req.headers['X-Timestamp'] = req_timestamp.internal req.headers['X-Timestamp'] = req_timestamp.internal
else: else:
req.headers['X-Timestamp'] = Timestamp(time.time()).internal req.headers['X-Timestamp'] = Timestamp.now().internal
return None return None
def _check_failure_put_connections(self, putters, req, min_conns): def _check_failure_put_connections(self, putters, req, min_conns):
@ -780,7 +780,7 @@ class BaseObjectController(Controller):
'was %r' % req.headers['x-timestamp']) 'was %r' % req.headers['x-timestamp'])
req.headers['X-Timestamp'] = req_timestamp.internal req.headers['X-Timestamp'] = req_timestamp.internal
else: else:
req.headers['X-Timestamp'] = Timestamp(time.time()).internal req.headers['X-Timestamp'] = Timestamp.now().internal
# Include local handoff nodes if write-affinity is enabled. # Include local handoff nodes if write-affinity is enabled.
node_count = len(nodes) node_count = len(nodes)

View File

@ -1169,7 +1169,7 @@ def make_ec_object_stub(test_body, policy, timestamp):
segment_size = policy.ec_segment_size segment_size = policy.ec_segment_size
test_body = test_body or ( test_body = test_body or (
'test' * segment_size)[:-random.randint(1, 1000)] 'test' * segment_size)[:-random.randint(1, 1000)]
timestamp = timestamp or utils.Timestamp(time.time()) timestamp = timestamp or utils.Timestamp.now()
etag = md5(test_body).hexdigest() etag = md5(test_body).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(policy, test_body) ec_archive_bodies = encode_frag_archive_bodies(policy, test_body)

View File

@ -94,33 +94,33 @@ class TestAccountBroker(unittest.TestCase):
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(Timestamp('1').internal) broker.initialize(Timestamp('1').internal)
self.assertTrue(broker.empty()) self.assertTrue(broker.empty())
broker.put_container('o', Timestamp(time()).internal, 0, 0, 0, broker.put_container('o', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
self.assertTrue(not broker.empty()) self.assertTrue(not broker.empty())
sleep(.00001) sleep(.00001)
broker.put_container('o', 0, Timestamp(time()).internal, 0, 0, broker.put_container('o', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
self.assertTrue(broker.empty()) self.assertTrue(broker.empty())
def test_is_status_deleted(self): def test_is_status_deleted(self):
# Test AccountBroker.is_status_deleted # Test AccountBroker.is_status_deleted
broker1 = AccountBroker(':memory:', account='a') broker1 = AccountBroker(':memory:', account='a')
broker1.initialize(Timestamp(time()).internal) broker1.initialize(Timestamp.now().internal)
self.assertTrue(not broker1.is_status_deleted()) self.assertTrue(not broker1.is_status_deleted())
broker1.delete_db(Timestamp(time()).internal) broker1.delete_db(Timestamp.now().internal)
self.assertTrue(broker1.is_status_deleted()) self.assertTrue(broker1.is_status_deleted())
broker2 = AccountBroker(':memory:', account='a') broker2 = AccountBroker(':memory:', account='a')
broker2.initialize(Timestamp(time()).internal) broker2.initialize(Timestamp.now().internal)
# Set delete_timestamp greater than put_timestamp # Set delete_timestamp greater than put_timestamp
broker2.merge_timestamps( broker2.merge_timestamps(
time(), Timestamp(time()).internal, time(), Timestamp.now().internal,
Timestamp(time() + 999).internal) Timestamp(time() + 999).internal)
self.assertTrue(broker2.is_status_deleted()) self.assertTrue(broker2.is_status_deleted())
def test_reclaim(self): def test_reclaim(self):
broker = AccountBroker(':memory:', account='test_account') broker = AccountBroker(':memory:', account='test_account')
broker.initialize(Timestamp('1').internal) broker.initialize(Timestamp('1').internal)
broker.put_container('c', Timestamp(time()).internal, 0, 0, 0, broker.put_container('c', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -138,7 +138,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.put_container('c', 0, Timestamp(time()).internal, 0, 0, broker.put_container('c', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -156,7 +156,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1) "WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001) sleep(.00001)
broker.reclaim(Timestamp(time()).internal, time()) broker.reclaim(Timestamp.now().internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
@ -168,10 +168,10 @@ class TestAccountBroker(unittest.TestCase):
broker.put_container('x', 0, 0, 0, 0, POLICIES.default.idx) broker.put_container('x', 0, 0, 0, 0, POLICIES.default.idx)
broker.put_container('y', 0, 0, 0, 0, POLICIES.default.idx) broker.put_container('y', 0, 0, 0, 0, POLICIES.default.idx)
broker.put_container('z', 0, 0, 0, 0, POLICIES.default.idx) broker.put_container('z', 0, 0, 0, 0, POLICIES.default.idx)
broker.reclaim(Timestamp(time()).internal, time()) broker.reclaim(Timestamp.now().internal, time())
# Now delete the account # Now delete the account
broker.delete_db(Timestamp(time()).internal) broker.delete_db(Timestamp.now().internal)
broker.reclaim(Timestamp(time()).internal, time()) broker.reclaim(Timestamp.now().internal, time())
def test_delete_db_status(self): def test_delete_db_status(self):
ts = (Timestamp(t).internal for t in itertools.count(int(time()))) ts = (Timestamp(t).internal for t in itertools.count(int(time())))
@ -201,7 +201,7 @@ class TestAccountBroker(unittest.TestCase):
# Test AccountBroker.delete_container # Test AccountBroker.delete_container
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(Timestamp('1').internal) broker.initialize(Timestamp('1').internal)
broker.put_container('o', Timestamp(time()).internal, 0, 0, 0, broker.put_container('o', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -211,7 +211,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.put_container('o', 0, Timestamp(time()).internal, 0, 0, broker.put_container('o', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -227,7 +227,7 @@ class TestAccountBroker(unittest.TestCase):
broker.initialize(Timestamp('1').internal) broker.initialize(Timestamp('1').internal)
# Create initial container # Create initial container
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -255,7 +255,7 @@ class TestAccountBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -301,7 +301,7 @@ class TestAccountBroker(unittest.TestCase):
# Put new delete event # Put new delete event
sleep(.00001) sleep(.00001)
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0, broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -316,7 +316,7 @@ class TestAccountBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -347,31 +347,31 @@ class TestAccountBroker(unittest.TestCase):
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 0) self.assertEqual(info['container_count'], 0)
broker.put_container('c1', Timestamp(time()).internal, 0, 0, 0, broker.put_container('c1', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 1) self.assertEqual(info['container_count'], 1)
sleep(.00001) sleep(.00001)
broker.put_container('c2', Timestamp(time()).internal, 0, 0, 0, broker.put_container('c2', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 2) self.assertEqual(info['container_count'], 2)
sleep(.00001) sleep(.00001)
broker.put_container('c2', Timestamp(time()).internal, 0, 0, 0, broker.put_container('c2', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 2) self.assertEqual(info['container_count'], 2)
sleep(.00001) sleep(.00001)
broker.put_container('c1', 0, Timestamp(time()).internal, 0, 0, broker.put_container('c1', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 1) self.assertEqual(info['container_count'], 1)
sleep(.00001) sleep(.00001)
broker.put_container('c2', 0, Timestamp(time()).internal, 0, 0, broker.put_container('c2', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 0) self.assertEqual(info['container_count'], 0)
@ -383,16 +383,16 @@ class TestAccountBroker(unittest.TestCase):
for cont1 in range(4): for cont1 in range(4):
for cont2 in range(125): for cont2 in range(125):
broker.put_container('%d-%04d' % (cont1, cont2), broker.put_container('%d-%04d' % (cont1, cont2),
Timestamp(time()).internal, 0, 0, 0, Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
for cont in range(125): for cont in range(125):
broker.put_container('2-0051-%04d' % cont, broker.put_container('2-0051-%04d' % cont,
Timestamp(time()).internal, 0, 0, 0, Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
for cont in range(125): for cont in range(125):
broker.put_container('3-%04d-0049' % cont, broker.put_container('3-%04d-0049' % cont,
Timestamp(time()).internal, 0, 0, 0, Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
listing = broker.list_containers_iter(100, '', None, None, '') listing = broker.list_containers_iter(100, '', None, None, '')
@ -485,7 +485,7 @@ class TestAccountBroker(unittest.TestCase):
'3-0047-', '3-0048', '3-0048-', '3-0049', '3-0047-', '3-0048', '3-0048-', '3-0049',
'3-0049-', '3-0050']) '3-0049-', '3-0050'])
broker.put_container('3-0049-', Timestamp(time()).internal, 0, 0, 0, broker.put_container('3-0049-', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
listing = broker.list_containers_iter(10, '3-0048', None, None, None) listing = broker.list_containers_iter(10, '3-0048', None, None, None)
self.assertEqual(len(listing), 10) self.assertEqual(len(listing), 10)
@ -673,30 +673,30 @@ class TestAccountBroker(unittest.TestCase):
# account that has an odd container with a trailing delimiter # account that has an odd container with a trailing delimiter
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(Timestamp('1').internal) broker.initialize(Timestamp('1').internal)
broker.put_container('a', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a-', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-a', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a-a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-a-a', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a-a-a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-a-b', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a-a-b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-b', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a-b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
# NB: ord(".") == ord("-") + 1 # NB: ord(".") == ord("-") + 1
broker.put_container('a.', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a.', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a.b', Timestamp(time()).internal, 0, 0, 0, broker.put_container('a.b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b', Timestamp(time()).internal, 0, 0, 0, broker.put_container('b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b-a', Timestamp(time()).internal, 0, 0, 0, broker.put_container('b-a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b-b', Timestamp(time()).internal, 0, 0, 0, broker.put_container('b-b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('c', Timestamp(time()).internal, 0, 0, 0, broker.put_container('c', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
listing = broker.list_containers_iter(15, None, None, None, None) listing = broker.list_containers_iter(15, None, None, None, None)
self.assertEqual([row[0] for row in listing], self.assertEqual([row[0] for row in listing],
@ -1022,7 +1022,7 @@ def premetadata_create_account_stat_table(self, conn, put_timestamp):
conn.execute(''' conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?, UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, Timestamp(time()).internal, str(uuid4()), ''', (self.account, Timestamp.now().internal, str(uuid4()),
put_timestamp)) put_timestamp))
@ -1191,7 +1191,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
self.assertEqual(len(stats), 0) self.assertEqual(len(stats), 0)
# now do a PUT to create the table # now do a PUT to create the table
broker.put_container('o', Timestamp(time()).internal, 0, 0, 0, broker.put_container('o', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker._commit_puts_stale_ok() broker._commit_puts_stale_ok()
@ -1225,7 +1225,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
'from container table!') 'from container table!')
# manually insert an existing row to avoid migration # manually insert an existing row to avoid migration
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
with broker.get() as conn: with broker.get() as conn:
conn.execute(''' conn.execute('''
INSERT INTO container (name, put_timestamp, INSERT INTO container (name, put_timestamp,
@ -1247,7 +1247,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
# which will update the DB schema as well as update policy_stats # which will update the DB schema as well as update policy_stats
# for legacy containers in the DB (those without an SPI) # for legacy containers in the DB (those without an SPI)
other_policy = [p for p in POLICIES if p.idx != 0][0] other_policy = [p for p in POLICIES if p.idx != 0][0]
broker.put_container('test_second', Timestamp(time()).internal, broker.put_container('test_second', Timestamp.now().internal,
0, 3, 4, other_policy.idx) 0, 3, 4, other_policy.idx)
broker._commit_puts_stale_ok() broker._commit_puts_stale_ok()

View File

@ -28,7 +28,7 @@ import eventlet
from swift.account import reaper from swift.account import reaper
from swift.account.backend import DATADIR from swift.account.backend import DATADIR
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.utils import normalize_timestamp from swift.common.utils import normalize_timestamp, Timestamp
from test import unit from test import unit
from swift.common.storage_policy import StoragePolicy, POLICIES from swift.common.storage_policy import StoragePolicy, POLICIES
@ -333,11 +333,11 @@ class TestReaper(unittest.TestCase):
for policy in POLICIES: for policy in POLICIES:
r.reset_stats() r.reset_stats()
with patch(mock_path) as fake_direct_delete: with patch(mock_path) as fake_direct_delete:
with patch('swift.account.reaper.time') as mock_time: with patch('swift.common.utils.Timestamp.now') as mock_now:
mock_time.return_value = 1429117638.86767 mock_now.return_value = Timestamp(1429117638.86767)
r.reap_object('a', 'c', 'partition', cont_nodes, 'o', r.reap_object('a', 'c', 'partition', cont_nodes, 'o',
policy.idx) policy.idx)
mock_time.assert_called_once_with() mock_now.assert_called_once_with()
for i, call_args in enumerate( for i, call_args in enumerate(
fake_direct_delete.call_args_list): fake_direct_delete.call_args_list):
cnode = cont_nodes[i % len(cont_nodes)] cnode = cont_nodes[i % len(cont_nodes)]
@ -439,8 +439,9 @@ class TestReaper(unittest.TestCase):
return headers, obj_list return headers, obj_list
mocks['direct_get_container'].side_effect = fake_get_container mocks['direct_get_container'].side_effect = fake_get_container
with patch('swift.account.reaper.time') as mock_time: with patch('swift.common.utils.Timestamp.now') as mock_now:
mock_time.side_effect = [1429117638.86767, 1429117639.67676] mock_now.side_effect = [Timestamp(1429117638.86767),
Timestamp(1429117639.67676)]
r.reap_container('a', 'partition', acc_nodes, 'c') r.reap_container('a', 'partition', acc_nodes, 'c')
# verify calls to direct_delete_object # verify calls to direct_delete_object

View File

@ -400,7 +400,7 @@ class TestConstraints(unittest.TestCase):
constraints.valid_timestamp, constraints.valid_timestamp,
Request.blank('/', headers={ Request.blank('/', headers={
'X-Timestamp': 'asdf'})) 'X-Timestamp': 'asdf'}))
timestamp = utils.Timestamp(time.time()) timestamp = utils.Timestamp.now()
req = Request.blank('/', headers={'X-Timestamp': timestamp.internal}) req = Request.blank('/', headers={'X-Timestamp': timestamp.internal})
self.assertEqual(timestamp, constraints.valid_timestamp(req)) self.assertEqual(timestamp, constraints.valid_timestamp(req))
req = Request.blank('/', headers={'X-Timestamp': timestamp.normal}) req = Request.blank('/', headers={'X-Timestamp': timestamp.normal})

View File

@ -228,7 +228,7 @@ class ExampleBroker(DatabaseBroker):
INSERT INTO test_stat ( INSERT INTO test_stat (
account, created_at, id, put_timestamp, status_changed_at) account, created_at, id, put_timestamp, status_changed_at)
VALUES (?, ?, ?, ?, ?); VALUES (?, ?, ?, ?, ?);
""", (self.account, Timestamp(time.time()).internal, str(uuid4()), """, (self.account, Timestamp.now().internal, str(uuid4()),
put_timestamp, put_timestamp)) put_timestamp, put_timestamp))
def merge_items(self, item_list): def merge_items(self, item_list):
@ -552,7 +552,7 @@ class TestExampleBroker(unittest.TestCase):
def test_get_syncs(self): def test_get_syncs(self):
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
broker.initialize(Timestamp(time.time()).internal, broker.initialize(Timestamp.now().internal,
storage_policy_index=int(self.policy)) storage_policy_index=int(self.policy))
self.assertEqual([], broker.get_syncs()) self.assertEqual([], broker.get_syncs())
broker.merge_syncs([{'sync_point': 1, 'remote_id': 'remote1'}]) broker.merge_syncs([{'sync_point': 1, 'remote_id': 'remote1'}])

View File

@ -336,7 +336,7 @@ class TestDirectClient(unittest.TestCase):
self.assertTrue('HEAD' in str(err)) self.assertTrue('HEAD' in str(err))
def test_direct_head_container_deleted(self): def test_direct_head_container_deleted(self):
important_timestamp = Timestamp(time.time()).internal important_timestamp = Timestamp.now().internal
headers = HeaderKeyDict({'X-Backend-Important-Timestamp': headers = HeaderKeyDict({'X-Backend-Important-Timestamp':
important_timestamp}) important_timestamp})
@ -551,7 +551,7 @@ class TestDirectClient(unittest.TestCase):
self.assertTrue('HEAD' in str(err)) self.assertTrue('HEAD' in str(err))
def test_direct_head_object_not_found(self): def test_direct_head_object_not_found(self):
important_timestamp = Timestamp(time.time()).internal important_timestamp = Timestamp.now().internal
stub_headers = {'X-Backend-Important-Timestamp': important_timestamp} stub_headers = {'X-Backend-Important-Timestamp': important_timestamp}
with mocked_http_conn(404, headers=stub_headers) as conn: with mocked_http_conn(404, headers=stub_headers) as conn:
try: try:

View File

@ -178,7 +178,7 @@ class TestTimestamp(unittest.TestCase):
self.assertRaises(ValueError, utils.Timestamp, time.time(), offset=-1) self.assertRaises(ValueError, utils.Timestamp, time.time(), offset=-1)
def test_invalid_string_conversion(self): def test_invalid_string_conversion(self):
t = utils.Timestamp(time.time()) t = utils.Timestamp.now()
self.assertRaises(TypeError, str, t) self.assertRaises(TypeError, str, t)
def test_offset_limit(self): def test_offset_limit(self):

View File

@ -97,18 +97,18 @@ class TestContainerBroker(unittest.TestCase):
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)
self.assertTrue(broker.empty()) self.assertTrue(broker.empty())
broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('o', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
self.assertTrue(not broker.empty()) self.assertTrue(not broker.empty())
sleep(.00001) sleep(.00001)
broker.delete_object('o', Timestamp(time()).internal) broker.delete_object('o', Timestamp.now().internal)
self.assertTrue(broker.empty()) self.assertTrue(broker.empty())
def test_reclaim(self): def test_reclaim(self):
broker = ContainerBroker(':memory:', account='test_account', broker = ContainerBroker(':memory:', account='test_account',
container='test_container') container='test_container')
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('o', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -126,7 +126,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.delete_object('o', Timestamp(time()).internal) broker.delete_object('o', Timestamp.now().internal)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -143,7 +143,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1) "WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001) sleep(.00001)
broker.reclaim(Timestamp(time()).internal, time()) broker.reclaim(Timestamp.now().internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -152,17 +152,17 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
# Test the return values of reclaim() # Test the return values of reclaim()
broker.put_object('w', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('w', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('x', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('x', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('y', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('y', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('z', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('z', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
# Test before deletion # Test before deletion
broker.reclaim(Timestamp(time()).internal, time()) broker.reclaim(Timestamp.now().internal, time())
broker.delete_db(Timestamp(time()).internal) broker.delete_db(Timestamp.now().internal)
def test_get_info_is_deleted(self): def test_get_info_is_deleted(self):
start = int(time()) start = int(time())
@ -214,7 +214,7 @@ class TestContainerBroker(unittest.TestCase):
# Test ContainerBroker.delete_object # Test ContainerBroker.delete_object
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain', broker.put_object('o', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -224,7 +224,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.delete_object('o', Timestamp(time()).internal) broker.delete_object('o', Timestamp.now().internal)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -239,7 +239,7 @@ class TestContainerBroker(unittest.TestCase):
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)
# Create initial object # Create initial object
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 123, broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test', 'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
@ -283,7 +283,7 @@ class TestContainerBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 124, broker.put_object('"{<object \'&\' name>}"', timestamp, 124,
'application/x-test', 'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449') 'aa0749bacbc79ec65fe206943d8fe449')
@ -349,7 +349,7 @@ class TestContainerBroker(unittest.TestCase):
# Put new delete event # Put new delete event
sleep(.00001) sleep(.00001)
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 0, '', '', broker.put_object('"{<object \'&\' name>}"', timestamp, 0, '', '',
deleted=1) deleted=1)
with broker.get() as conn: with broker.get() as conn:
@ -363,7 +363,7 @@ class TestContainerBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 123, broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test', 'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
@ -386,12 +386,12 @@ class TestContainerBroker(unittest.TestCase):
# We'll use this later # We'll use this later
sleep(.0001) sleep(.0001)
in_between_timestamp = Timestamp(time()).internal in_between_timestamp = Timestamp.now().internal
# New post event # New post event
sleep(.0001) sleep(.0001)
previous_timestamp = timestamp previous_timestamp = timestamp
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT name FROM object").fetchone()[0], "SELECT name FROM object").fetchone()[0],
@ -946,7 +946,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(policy_stats[0]['bytes_used'], 0) self.assertEqual(policy_stats[0]['bytes_used'], 0)
self.assertEqual(policy_stats[0]['object_count'], 0) self.assertEqual(policy_stats[0]['object_count'], 0)
broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain', broker.put_object('o1', Timestamp.now().internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
@ -983,34 +983,34 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['object_count'], 0) self.assertEqual(info['object_count'], 0)
self.assertEqual(info['bytes_used'], 0) self.assertEqual(info['bytes_used'], 0)
broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain', broker.put_object('o1', Timestamp.now().internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 1) self.assertEqual(info['object_count'], 1)
self.assertEqual(info['bytes_used'], 123) self.assertEqual(info['bytes_used'], 123)
sleep(.00001) sleep(.00001)
broker.put_object('o2', Timestamp(time()).internal, 123, 'text/plain', broker.put_object('o2', Timestamp.now().internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 2) self.assertEqual(info['object_count'], 2)
self.assertEqual(info['bytes_used'], 246) self.assertEqual(info['bytes_used'], 246)
sleep(.00001) sleep(.00001)
broker.put_object('o2', Timestamp(time()).internal, 1000, broker.put_object('o2', Timestamp.now().internal, 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 2) self.assertEqual(info['object_count'], 2)
self.assertEqual(info['bytes_used'], 1123) self.assertEqual(info['bytes_used'], 1123)
sleep(.00001) sleep(.00001)
broker.delete_object('o1', Timestamp(time()).internal) broker.delete_object('o1', Timestamp.now().internal)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 1) self.assertEqual(info['object_count'], 1)
self.assertEqual(info['bytes_used'], 1000) self.assertEqual(info['bytes_used'], 1000)
sleep(.00001) sleep(.00001)
broker.delete_object('o2', Timestamp(time()).internal) broker.delete_object('o2', Timestamp.now().internal)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 0) self.assertEqual(info['object_count'], 0)
self.assertEqual(info['bytes_used'], 0) self.assertEqual(info['bytes_used'], 0)
@ -1046,7 +1046,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['reported_object_count'], 0) self.assertEqual(info['reported_object_count'], 0)
self.assertEqual(info['reported_bytes_used'], 0) self.assertEqual(info['reported_bytes_used'], 0)
broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain', broker.put_object('o1', Timestamp.now().internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 1) self.assertEqual(info['object_count'], 1)
@ -1055,7 +1055,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['reported_bytes_used'], 0) self.assertEqual(info['reported_bytes_used'], 0)
sleep(.00001) sleep(.00001)
broker.put_object('o2', Timestamp(time()).internal, 123, 'text/plain', broker.put_object('o2', Timestamp.now().internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 2) self.assertEqual(info['object_count'], 2)
@ -1064,7 +1064,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['reported_bytes_used'], 0) self.assertEqual(info['reported_bytes_used'], 0)
sleep(.00001) sleep(.00001)
broker.put_object('o2', Timestamp(time()).internal, 1000, broker.put_object('o2', Timestamp.now().internal, 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 2) self.assertEqual(info['object_count'], 2)
@ -1072,9 +1072,9 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['reported_object_count'], 0) self.assertEqual(info['reported_object_count'], 0)
self.assertEqual(info['reported_bytes_used'], 0) self.assertEqual(info['reported_bytes_used'], 0)
put_timestamp = Timestamp(time()).internal put_timestamp = Timestamp.now().internal
sleep(.001) sleep(.001)
delete_timestamp = Timestamp(time()).internal delete_timestamp = Timestamp.now().internal
broker.reported(put_timestamp, delete_timestamp, 2, 1123) broker.reported(put_timestamp, delete_timestamp, 2, 1123)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 2) self.assertEqual(info['object_count'], 2)
@ -1085,7 +1085,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['reported_bytes_used'], 1123) self.assertEqual(info['reported_bytes_used'], 1123)
sleep(.00001) sleep(.00001)
broker.delete_object('o1', Timestamp(time()).internal) broker.delete_object('o1', Timestamp.now().internal)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 1) self.assertEqual(info['object_count'], 1)
self.assertEqual(info['bytes_used'], 1000) self.assertEqual(info['bytes_used'], 1000)
@ -1093,7 +1093,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['reported_bytes_used'], 1123) self.assertEqual(info['reported_bytes_used'], 1123)
sleep(.00001) sleep(.00001)
broker.delete_object('o2', Timestamp(time()).internal) broker.delete_object('o2', Timestamp.now().internal)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['object_count'], 0) self.assertEqual(info['object_count'], 0)
self.assertEqual(info['bytes_used'], 0) self.assertEqual(info['bytes_used'], 0)
@ -1107,16 +1107,16 @@ class TestContainerBroker(unittest.TestCase):
for obj1 in range(4): for obj1 in range(4):
for obj2 in range(125): for obj2 in range(125):
broker.put_object('%d/%04d' % (obj1, obj2), broker.put_object('%d/%04d' % (obj1, obj2),
Timestamp(time()).internal, 0, 'text/plain', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in range(125): for obj in range(125):
broker.put_object('2/0051/%04d' % obj, broker.put_object('2/0051/%04d' % obj,
Timestamp(time()).internal, 0, 'text/plain', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in range(125): for obj in range(125):
broker.put_object('3/%04d/0049' % obj, broker.put_object('3/%04d/0049' % obj,
Timestamp(time()).internal, 0, 'text/plain', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(100, '', None, None, '') listing = broker.list_objects_iter(100, '', None, None, '')
@ -1215,7 +1215,7 @@ class TestContainerBroker(unittest.TestCase):
'3/0047/', '3/0048', '3/0048/', '3/0049', '3/0047/', '3/0048', '3/0048/', '3/0049',
'3/0049/', '3/0050']) '3/0049/', '3/0050'])
broker.put_object('3/0049/', Timestamp(time()).internal, 0, broker.put_object('3/0049/', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(10, '3/0048', None, None, None) listing = broker.list_objects_iter(10, '3/0048', None, None, None)
self.assertEqual(len(listing), 10) self.assertEqual(len(listing), 10)
@ -1407,16 +1407,16 @@ class TestContainerBroker(unittest.TestCase):
for obj1 in range(4): for obj1 in range(4):
for obj2 in range(125): for obj2 in range(125):
broker.put_object('%d:%04d' % (obj1, obj2), broker.put_object('%d:%04d' % (obj1, obj2),
Timestamp(time()).internal, 0, 'text/plain', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in range(125): for obj in range(125):
broker.put_object('2:0051:%04d' % obj, broker.put_object('2:0051:%04d' % obj,
Timestamp(time()).internal, 0, 'text/plain', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in range(125): for obj in range(125):
broker.put_object('3:%04d:0049' % obj, broker.put_object('3:%04d:0049' % obj,
Timestamp(time()).internal, 0, 'text/plain', Timestamp.now().internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(100, '', None, None, '') listing = broker.list_objects_iter(100, '', None, None, '')
@ -1481,7 +1481,7 @@ class TestContainerBroker(unittest.TestCase):
'3:0047:', '3:0048', '3:0048:', '3:0049', '3:0047:', '3:0048', '3:0048:', '3:0049',
'3:0049:', '3:0050']) '3:0049:', '3:0050'])
broker.put_object('3:0049:', Timestamp(time()).internal, 0, broker.put_object('3:0049:', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(10, '3:0048', None, None, None) listing = broker.list_objects_iter(10, '3:0048', None, None, None)
self.assertEqual(len(listing), 10) self.assertEqual(len(listing), 10)
@ -1602,49 +1602,49 @@ class TestContainerBroker(unittest.TestCase):
# container that has an odd file with a trailing delimiter # container that has an odd file with a trailing delimiter
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('a', Timestamp(time()).internal, 0, broker.put_object('a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/', Timestamp(time()).internal, 0, broker.put_object('a/', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a', Timestamp(time()).internal, 0, broker.put_object('a/a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/a', Timestamp(time()).internal, 0, broker.put_object('a/a/a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/b', Timestamp(time()).internal, 0, broker.put_object('a/a/b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/b', Timestamp(time()).internal, 0, broker.put_object('a/b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', Timestamp(time()).internal, 0, broker.put_object('b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/a', Timestamp(time()).internal, 0, broker.put_object('b/a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/b', Timestamp(time()).internal, 0, broker.put_object('b/b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('c', Timestamp(time()).internal, 0, broker.put_object('c', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/0', Timestamp(time()).internal, 0, broker.put_object('a/0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0', Timestamp(time()).internal, 0, broker.put_object('0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/', Timestamp(time()).internal, 0, broker.put_object('0/', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('00', Timestamp(time()).internal, 0, broker.put_object('00', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/0', Timestamp(time()).internal, 0, broker.put_object('0/0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/00', Timestamp(time()).internal, 0, broker.put_object('0/00', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/1', Timestamp(time()).internal, 0, broker.put_object('0/1', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/1/', Timestamp(time()).internal, 0, broker.put_object('0/1/', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/1/0', Timestamp(time()).internal, 0, broker.put_object('0/1/0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1', Timestamp(time()).internal, 0, broker.put_object('1', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1/', Timestamp(time()).internal, 0, broker.put_object('1/', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1/0', Timestamp(time()).internal, 0, broker.put_object('1/0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(25, None, None, None, None) listing = broker.list_objects_iter(25, None, None, None, None)
self.assertEqual(len(listing), 22) self.assertEqual(len(listing), 22)
@ -1682,49 +1682,49 @@ class TestContainerBroker(unittest.TestCase):
# container that has an odd file with a trailing delimiter # container that has an odd file with a trailing delimiter
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('a', Timestamp(time()).internal, 0, broker.put_object('a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:', Timestamp(time()).internal, 0, broker.put_object('a:', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:a', Timestamp(time()).internal, 0, broker.put_object('a:a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:a:a', Timestamp(time()).internal, 0, broker.put_object('a:a:a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:a:b', Timestamp(time()).internal, 0, broker.put_object('a:a:b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:b', Timestamp(time()).internal, 0, broker.put_object('a:b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', Timestamp(time()).internal, 0, broker.put_object('b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b:a', Timestamp(time()).internal, 0, broker.put_object('b:a', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b:b', Timestamp(time()).internal, 0, broker.put_object('b:b', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('c', Timestamp(time()).internal, 0, broker.put_object('c', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:0', Timestamp(time()).internal, 0, broker.put_object('a:0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0', Timestamp(time()).internal, 0, broker.put_object('0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:', Timestamp(time()).internal, 0, broker.put_object('0:', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('00', Timestamp(time()).internal, 0, broker.put_object('00', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:0', Timestamp(time()).internal, 0, broker.put_object('0:0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:00', Timestamp(time()).internal, 0, broker.put_object('0:00', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:1', Timestamp(time()).internal, 0, broker.put_object('0:1', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:1:', Timestamp(time()).internal, 0, broker.put_object('0:1:', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:1:0', Timestamp(time()).internal, 0, broker.put_object('0:1:0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1', Timestamp(time()).internal, 0, broker.put_object('1', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1:', Timestamp(time()).internal, 0, broker.put_object('1:', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1:0', Timestamp(time()).internal, 0, broker.put_object('1:0', Timestamp.now().internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(25, None, None, None, None) listing = broker.list_objects_iter(25, None, None, None, None)
self.assertEqual(len(listing), 22) self.assertEqual(len(listing), 22)
@ -2199,7 +2199,7 @@ def premetadata_create_container_info_table(self, conn, put_timestamp,
UPDATE container_stat UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?, SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, self.container, Timestamp(time()).internal, ''', (self.account, self.container, Timestamp.now().internal,
str(uuid4()), put_timestamp)) str(uuid4()), put_timestamp))
@ -2271,7 +2271,7 @@ def prexsync_create_container_info_table(self, conn, put_timestamp,
UPDATE container_stat UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?, SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, self.container, Timestamp(time()).internal, ''', (self.account, self.container, Timestamp.now().internal,
str(uuid4()), put_timestamp)) str(uuid4()), put_timestamp))
@ -2385,7 +2385,7 @@ def prespi_create_container_info_table(self, conn, put_timestamp,
UPDATE container_stat UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?, SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, self.container, Timestamp(time()).internal, ''', (self.account, self.container, Timestamp.now().internal,
str(uuid4()), put_timestamp)) str(uuid4()), put_timestamp))
@ -2443,7 +2443,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
'from object table!') 'from object table!')
# manually insert an existing row to avoid automatic migration # manually insert an existing row to avoid automatic migration
obj_put_timestamp = Timestamp(time()).internal obj_put_timestamp = Timestamp.now().internal
with broker.get() as conn: with broker.get() as conn:
conn.execute(''' conn.execute('''
INSERT INTO object (name, created_at, size, INSERT INTO object (name, created_at, size,
@ -2555,7 +2555,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
# now do a PUT with a different value for storage_policy_index # now do a PUT with a different value for storage_policy_index
# which will update the DB schema as well as update policy_stats # which will update the DB schema as well as update policy_stats
# for legacy objects in the DB (those without an SPI) # for legacy objects in the DB (those without an SPI)
second_object_put_timestamp = Timestamp(time()).internal second_object_put_timestamp = Timestamp.now().internal
other_policy = [p for p in POLICIES if p.idx != 0][0] other_policy = [p for p in POLICIES if p.idx != 0][0]
broker.put_object('test_second', second_object_put_timestamp, broker.put_object('test_second', second_object_put_timestamp,
456, 'text/plain', 456, 'text/plain',

View File

@ -367,7 +367,7 @@ class TestReconcilerUtils(unittest.TestCase):
def test_get_container_policy_index_for_deleted(self): def test_get_container_policy_index_for_deleted(self):
mock_path = 'swift.container.reconciler.direct_head_container' mock_path = 'swift.container.reconciler.direct_head_container'
headers = container_resp_headers( headers = container_resp_headers(
status_changed_at=Timestamp(time.time()).internal, status_changed_at=Timestamp.now().internal,
storage_policy_index=1, storage_policy_index=1,
) )
stub_resp_headers = [ stub_resp_headers = [
@ -564,7 +564,7 @@ class TestReconcilerUtils(unittest.TestCase):
'partition': partition, 'method': method, 'path': path, 'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string}) 'headers': headers, 'query_string': query_string})
x_timestamp = Timestamp(time.time()) x_timestamp = Timestamp.now()
headers = {'x-timestamp': x_timestamp.internal} headers = {'x-timestamp': x_timestamp.internal}
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect) fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
with mock.patch(mock_path, fake_hc): with mock.patch(mock_path, fake_hc):
@ -1191,7 +1191,7 @@ class TestReconciler(unittest.TestCase):
def test_src_object_unavailable_with_slightly_newer_tombstone(self): def test_src_object_unavailable_with_slightly_newer_tombstone(self):
# should be some sort of retry case # should be some sort of retry case
q_ts = float(Timestamp(time.time())) q_ts = float(Timestamp.now())
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
self._mock_listing({ self._mock_listing({
@ -1230,7 +1230,7 @@ class TestReconciler(unittest.TestCase):
def test_src_object_unavailable_server_error(self): def test_src_object_unavailable_server_error(self):
# should be some sort of retry case # should be some sort of retry case
q_ts = float(Timestamp(time.time())) q_ts = float(Timestamp.now())
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
self._mock_listing({ self._mock_listing({
@ -1583,7 +1583,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(self.reconciler.stats['retry'], 1) self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_no_such_object_no_tombstone_recent(self): def test_object_move_no_such_object_no_tombstone_recent(self):
q_ts = float(Timestamp(time.time())) q_ts = float(Timestamp.now())
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
@ -1615,7 +1615,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(deleted_container_entries, []) self.assertEqual(deleted_container_entries, [])
def test_object_move_no_such_object_no_tombstone_ancient(self): def test_object_move_no_such_object_no_tombstone_ancient(self):
queue_ts = float(Timestamp(time.time())) - \ queue_ts = float(Timestamp.now()) - \
self.reconciler.reclaim_age * 1.1 self.reconciler.reclaim_age * 1.1
container = str(int(queue_ts // 3600 * 3600)) container = str(int(queue_ts // 3600 * 3600))

View File

@ -17,7 +17,6 @@ import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
import time
from swift.common import utils from swift.common import utils
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
@ -78,7 +77,7 @@ class BaseTest(unittest.TestCase):
commit=True, verify=True): commit=True, verify=True):
policy = policy or POLICIES.legacy policy = policy or POLICIES.legacy
object_parts = account, container, obj object_parts = account, container, obj
timestamp = Timestamp(time.time()) if timestamp is None else timestamp timestamp = Timestamp.now() if timestamp is None else timestamp
if df_mgr is None: if df_mgr is None:
df_mgr = self.daemon._df_router[policy] df_mgr = self.daemon._df_router[policy]
df = df_mgr.get_diskfile( df = df_mgr.get_diskfile(

View File

@ -716,7 +716,7 @@ class TestAuditor(unittest.TestCase):
def setup_bad_zero_byte(self, timestamp=None): def setup_bad_zero_byte(self, timestamp=None):
if timestamp is None: if timestamp is None:
timestamp = Timestamp(time.time()) timestamp = Timestamp.now()
self.auditor = auditor.ObjectAuditor(self.conf) self.auditor = auditor.ObjectAuditor(self.conf)
self.auditor.log_time = 0 self.auditor.log_time = 0
etag = md5() etag = md5()

View File

@ -2826,7 +2826,7 @@ class TestECDiskFileManager(DiskFileManagerMixin, unittest.TestCase):
self.df_mgr.get_diskfile_from_hash, self.df_mgr.get_diskfile_from_hash,
self.existing_device, '0', hash_, self.existing_device, '0', hash_,
POLICIES.default) # sanity POLICIES.default) # sanity
timestamp = Timestamp(time()) timestamp = Timestamp.now()
for frag_index in (4, 7): for frag_index in (4, 7):
write_diskfile(df, timestamp, frag_index=frag_index, write_diskfile(df, timestamp, frag_index=frag_index,
legacy_durable=legacy_durable) legacy_durable=legacy_durable)
@ -3317,7 +3317,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
if ts: if ts:
timestamp = Timestamp(ts) timestamp = Timestamp(ts)
else: else:
timestamp = Timestamp(time()) timestamp = Timestamp.now()
if prealloc: if prealloc:
prealloc_size = fsize prealloc_size = fsize
else: else:
@ -3702,7 +3702,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
def test_write_metadata(self): def test_write_metadata(self):
df, df_data = self._create_test_file('1234567890') df, df_data = self._create_test_file('1234567890')
file_count = len(os.listdir(df._datadir)) file_count = len(os.listdir(df._datadir))
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'} metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'}
df.write_metadata(metadata) df.write_metadata(metadata)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
@ -3714,7 +3714,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
# if metadata has content-type then its time should be in file name # if metadata has content-type then its time should be in file name
df, df_data = self._create_test_file('1234567890') df, df_data = self._create_test_file('1234567890')
file_count = len(os.listdir(df._datadir)) file_count = len(os.listdir(df._datadir))
timestamp = Timestamp(time()) timestamp = Timestamp.now()
metadata = {'X-Timestamp': timestamp.internal, metadata = {'X-Timestamp': timestamp.internal,
'X-Object-Meta-test': 'data', 'X-Object-Meta-test': 'data',
'Content-Type': 'foo', 'Content-Type': 'foo',
@ -3807,7 +3807,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
'Expected file %s not found in %s' % (exp_name, dl)) 'Expected file %s not found in %s' % (exp_name, dl))
def test_write_metadata_no_xattr(self): def test_write_metadata_no_xattr(self):
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'} metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'}
def mock_setxattr(*args, **kargs): def mock_setxattr(*args, **kargs):
@ -3821,7 +3821,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
diskfile.write_metadata, 'n/a', metadata) diskfile.write_metadata, 'n/a', metadata)
def test_write_metadata_disk_full(self): def test_write_metadata_disk_full(self):
timestamp = Timestamp(time()).internal timestamp = Timestamp.now().internal
metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'} metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'}
def mock_setxattr_ENOSPC(*args, **kargs): def mock_setxattr_ENOSPC(*args, **kargs):
@ -3863,7 +3863,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
def test_commit(self): def test_commit(self):
for policy in POLICIES: for policy in POLICIES:
timestamp = Timestamp(time()) timestamp = Timestamp.now()
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o_%s' % policy, obj='o_%s' % policy,
policy=policy) policy=policy)
@ -3879,7 +3879,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
def _do_test_write_cleanup(self, policy, legacy_durable=False): def _do_test_write_cleanup(self, policy, legacy_durable=False):
# create first fileset as starting state # create first fileset as starting state
timestamp_1 = Timestamp(time()) timestamp_1 = Timestamp.now()
datadir_1 = self._create_diskfile_dir( datadir_1 = self._create_diskfile_dir(
timestamp_1, policy, legacy_durable) timestamp_1, policy, legacy_durable)
# second write should clean up first fileset # second write should clean up first fileset
@ -3977,7 +3977,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o', policy=policy) obj='o', policy=policy)
timestamp = Timestamp(time()) timestamp = Timestamp.now()
with df.create() as writer: with df.create() as writer:
metadata = { metadata = {
'ETag': 'bogus_etag', 'ETag': 'bogus_etag',
@ -3999,7 +3999,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o_error', policy=policy) obj='o_error', policy=policy)
timestamp = Timestamp(time()) timestamp = Timestamp.now()
with df.create() as writer: with df.create() as writer:
metadata = { metadata = {
'ETag': 'bogus_etag', 'ETag': 'bogus_etag',
@ -4037,7 +4037,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
}[policy.policy_type] }[policy.policy_type]
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o_error', policy=policy) obj='o_error', policy=policy)
timestamp = Timestamp(time()) timestamp = Timestamp.now()
with df.create() as writer: with df.create() as writer:
metadata = { metadata = {
'ETag': 'bogus_etag', 'ETag': 'bogus_etag',
@ -4063,7 +4063,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
}[policy.policy_type] }[policy.policy_type]
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o_error', policy=policy) obj='o_error', policy=policy)
timestamp = Timestamp(time()) timestamp = Timestamp.now()
with df.create() as writer: with df.create() as writer:
metadata = { metadata = {
'ETag': 'bogus_etag', 'ETag': 'bogus_etag',
@ -4080,7 +4080,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
self.assertEqual(expected[1], mock_cleanup.call_count) self.assertEqual(expected[1], mock_cleanup.call_count)
with mock.patch(self._manager_mock( with mock.patch(self._manager_mock(
'cleanup_ondisk_files', df)) as mock_cleanup: 'cleanup_ondisk_files', df)) as mock_cleanup:
timestamp = Timestamp(time()) timestamp = Timestamp.now()
df.delete(timestamp) df.delete(timestamp)
self.assertEqual(expected[2], mock_cleanup.call_count) self.assertEqual(expected[2], mock_cleanup.call_count)
@ -4095,7 +4095,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._get_open_disk_file(policy=policy, frag_index=fi, df = self._get_open_disk_file(policy=policy, frag_index=fi,
extra_metadata=metadata) extra_metadata=metadata)
ts = Timestamp(time()) ts = Timestamp.now()
df.delete(ts) df.delete(ts)
exp_name = '%s.ts' % ts.internal exp_name = '%s.ts' % ts.internal
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
@ -4740,7 +4740,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
data = '0' * 100 data = '0' * 100
metadata = { metadata = {
'ETag': md5(data).hexdigest(), 'ETag': md5(data).hexdigest(),
'X-Timestamp': Timestamp(time()).internal, 'X-Timestamp': Timestamp.now().internal,
'Content-Length': str(100), 'Content-Length': str(100),
} }
with mock.patch("swift.obj.diskfile.renamer", _m_renamer): with mock.patch("swift.obj.diskfile.renamer", _m_renamer):
@ -4850,7 +4850,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
data = '0' * 100 data = '0' * 100
metadata = { metadata = {
'ETag': md5(data).hexdigest(), 'ETag': md5(data).hexdigest(),
'X-Timestamp': Timestamp(time()).internal, 'X-Timestamp': Timestamp.now().internal,
'Content-Length': str(100), 'Content-Length': str(100),
} }
_m_renamer = mock.Mock() _m_renamer = mock.Mock()
@ -4878,7 +4878,7 @@ class TestECDiskFile(DiskFileMixin, unittest.TestCase):
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o_rename_err', obj='o_rename_err',
policy=POLICIES.default) policy=POLICIES.default)
timestamp = Timestamp(time()) timestamp = Timestamp.now()
with df.create() as writer: with df.create() as writer:
metadata = { metadata = {
'ETag': 'bogus_etag', 'ETag': 'bogus_etag',
@ -4934,7 +4934,7 @@ class TestECDiskFile(DiskFileMixin, unittest.TestCase):
df = self._simple_get_diskfile(account='a', container='c', df = self._simple_get_diskfile(account='a', container='c',
obj='o_fsync_dir_err', obj='o_fsync_dir_err',
policy=POLICIES.default) policy=POLICIES.default)
timestamp = Timestamp(time()) timestamp = Timestamp.now()
with df.create() as writer: with df.create() as writer:
metadata = { metadata = {
'ETag': 'bogus_etag', 'ETag': 'bogus_etag',
@ -6127,7 +6127,7 @@ class TestSuffixHashes(unittest.TestCase):
# check behaviour for legacy durable files # check behaviour for legacy durable files
for policy in self.iter_policies(): for policy in self.iter_policies():
if policy.policy_type == EC_POLICY: if policy.policy_type == EC_POLICY:
file1 = Timestamp(time()).internal + '.durable' file1 = Timestamp.now().internal + '.durable'
file_list = [file1] file_list = [file1]
self.check_cleanup_ondisk_files(policy, file_list, []) self.check_cleanup_ondisk_files(policy, file_list, [])
@ -7232,7 +7232,7 @@ class TestSuffixHashes(unittest.TestCase):
df = df_mgr.get_diskfile('sda1', '0', *matching_paths[0], df = df_mgr.get_diskfile('sda1', '0', *matching_paths[0],
policy=policy, frag_index=2) policy=policy, frag_index=2)
# create a real, valid hsh_path # create a real, valid hsh_path
df.delete(Timestamp(time())) df.delete(Timestamp.now())
# and a couple of empty hsh_paths # and a couple of empty hsh_paths
empty_hsh_paths = [] empty_hsh_paths = []
for path in matching_paths[1:]: for path in matching_paths[1:]:
@ -7542,7 +7542,7 @@ class TestSuffixHashes(unittest.TestCase):
df = df_mgr.get_diskfile(self.existing_device, '0', 'a', 'c', 'o', df = df_mgr.get_diskfile(self.existing_device, '0', 'a', 'c', 'o',
policy=policy, frag_index=4) policy=policy, frag_index=4)
os.makedirs(df._datadir) os.makedirs(df._datadir)
filename = Timestamp(time()).internal + '.ts' filename = Timestamp.now().internal + '.ts'
open(os.path.join(df._datadir, filename), 'w').close() open(os.path.join(df._datadir, filename), 'w').close()
suffix = os.path.basename(os.path.dirname(df._datadir)) suffix = os.path.basename(os.path.dirname(df._datadir))
# but get_hashes has no reason to find it (because we didn't # but get_hashes has no reason to find it (because we didn't
@ -7796,7 +7796,7 @@ class TestSuffixHashes(unittest.TestCase):
# create a real suffix dir # create a real suffix dir
df = df_mgr.get_diskfile(self.existing_device, '0', 'a', 'c', df = df_mgr.get_diskfile(self.existing_device, '0', 'a', 'c',
'o', policy=policy, frag_index=3) 'o', policy=policy, frag_index=3)
df.delete(Timestamp(time())) df.delete(Timestamp.now())
suffix = os.path.basename(os.path.dirname(df._datadir)) suffix = os.path.basename(os.path.dirname(df._datadir))
# touch a bad suffix dir # touch a bad suffix dir
part_dir = os.path.join(self.devices, self.existing_device, part_dir = os.path.join(self.devices, self.existing_device,

View File

@ -288,7 +288,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
df_mgr = self.reconstructor._df_router[policy] df_mgr = self.reconstructor._df_router[policy]
df = df_mgr.get_diskfile('sda1', part, 'a', 'c', object_name, df = df_mgr.get_diskfile('sda1', part, 'a', 'c', object_name,
policy=policy) policy=policy)
timestamp = timestamp or utils.Timestamp(time.time()) timestamp = timestamp or utils.Timestamp.now()
test_data = test_data or 'test data' test_data = test_data or 'test data'
write_diskfile(df, timestamp, data=test_data, frag_index=frag_index, write_diskfile(df, timestamp, data=test_data, frag_index=frag_index,
legacy_durable=self.legacy_durable) legacy_durable=self.legacy_durable)

View File

@ -2554,7 +2554,7 @@ class TestObjectController(unittest.TestCase):
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 404)
def test_PUT_ssync_multi_frag(self): def test_PUT_ssync_multi_frag(self):
timestamp = utils.Timestamp(time()).internal timestamp = utils.Timestamp.now().internal
def put_with_index(expected_rsp, frag_index, node_index=None): def put_with_index(expected_rsp, frag_index, node_index=None):
data_file_tail = '#%d#d.data' % frag_index data_file_tail = '#%d#d.data' % frag_index
@ -2926,7 +2926,7 @@ class TestObjectController(unittest.TestCase):
def test_GET_if_match_etag_is_at(self): def test_GET_if_match_etag_is_at(self):
headers = { headers = {
'X-Timestamp': utils.Timestamp(time()).internal, 'X-Timestamp': utils.Timestamp.now().internal,
'Content-Type': 'application/octet-stream', 'Content-Type': 'application/octet-stream',
'X-Object-Meta-Xtag': 'madeup', 'X-Object-Meta-Xtag': 'madeup',
'X-Object-Sysmeta-Xtag': 'alternate madeup', 'X-Object-Sysmeta-Xtag': 'alternate madeup',
@ -4019,7 +4019,7 @@ class TestObjectController(unittest.TestCase):
def mock_diskfile_delete(self, timestamp): def mock_diskfile_delete(self, timestamp):
raise DiskFileNoSpace() raise DiskFileNoSpace()
t_put = utils.Timestamp(time()) t_put = utils.Timestamp.now()
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'}, environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': t_put.internal, headers={'X-Timestamp': t_put.internal,
@ -4030,7 +4030,7 @@ class TestObjectController(unittest.TestCase):
with mock.patch('swift.obj.diskfile.BaseDiskFile.delete', with mock.patch('swift.obj.diskfile.BaseDiskFile.delete',
mock_diskfile_delete): mock_diskfile_delete):
t_delete = utils.Timestamp(time()) t_delete = utils.Timestamp.now()
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': t_delete.internal}) headers={'X-Timestamp': t_delete.internal})
@ -6865,7 +6865,7 @@ class TestObjectController(unittest.TestCase):
# phase1 - PUT request with object metadata in footer and # phase1 - PUT request with object metadata in footer and
# multiphase commit conversation # multiphase commit conversation
put_timestamp = utils.Timestamp(time()).internal put_timestamp = utils.Timestamp.now().internal
headers = { headers = {
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp, 'X-Timestamp': put_timestamp,
@ -6932,7 +6932,7 @@ class TestObjectServer(unittest.TestCase):
'Expect': '100-continue', 'Expect': '100-continue',
'Content-Length': len(test_body), 'Content-Length': len(test_body),
'Content-Type': 'application/test', 'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp(time()).internal, 'X-Timestamp': utils.Timestamp.now().internal,
} }
conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0', conn = bufferedhttp.http_connect('127.0.0.1', self.port, 'sda1', '0',
'PUT', '/a/c/o', headers=headers) 'PUT', '/a/c/o', headers=headers)
@ -6950,7 +6950,7 @@ class TestObjectServer(unittest.TestCase):
'Expect': '100-continue', 'Expect': '100-continue',
'Content-Length': len(test_body), 'Content-Length': len(test_body),
'Content-Type': 'application/test', 'Content-Type': 'application/test',
'X-Timestamp': utils.Timestamp(time()).internal, 'X-Timestamp': utils.Timestamp.now().internal,
'X-Backend-Obj-Metadata-Footer': 'yes', 'X-Backend-Obj-Metadata-Footer': 'yes',
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123', 'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary123',
} }
@ -6964,7 +6964,7 @@ class TestObjectServer(unittest.TestCase):
def test_expect_on_put_conflict(self): def test_expect_on_put_conflict(self):
test_body = 'test' test_body = 'test'
put_timestamp = utils.Timestamp(time()) put_timestamp = utils.Timestamp.now()
headers = { headers = {
'Expect': '100-continue', 'Expect': '100-continue',
'Content-Length': len(test_body), 'Content-Length': len(test_body),
@ -6993,7 +6993,7 @@ class TestObjectServer(unittest.TestCase):
def test_multiphase_put_no_mime_boundary(self): def test_multiphase_put_no_mime_boundary(self):
test_data = 'obj data' test_data = 'obj data'
put_timestamp = utils.Timestamp(time()).internal put_timestamp = utils.Timestamp.now().internal
headers = { headers = {
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp, 'X-Timestamp': put_timestamp,
@ -7010,7 +7010,7 @@ class TestObjectServer(unittest.TestCase):
resp.close() resp.close()
def test_expect_on_multiphase_put_diconnect(self): def test_expect_on_multiphase_put_diconnect(self):
put_timestamp = utils.Timestamp(time()).internal put_timestamp = utils.Timestamp.now().internal
headers = { headers = {
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp, 'X-Timestamp': put_timestamp,
@ -7104,7 +7104,7 @@ class TestObjectServer(unittest.TestCase):
'X-Backend-Obj-Multiphase-Commit': 'yes', 'X-Backend-Obj-Multiphase-Commit': 'yes',
} }
put_timestamp = utils.Timestamp(headers.setdefault( put_timestamp = utils.Timestamp(headers.setdefault(
'X-Timestamp', utils.Timestamp(time()).internal)) 'X-Timestamp', utils.Timestamp.now().internal))
container_update = \ container_update = \
'swift.obj.server.ObjectController.container_update' 'swift.obj.server.ObjectController.container_update'
with mock.patch(container_update) as _container_update: with mock.patch(container_update) as _container_update:
@ -7213,7 +7213,7 @@ class TestObjectServer(unittest.TestCase):
"--boundary123", "--boundary123",
)) ))
put_timestamp = utils.Timestamp(time()).internal put_timestamp = utils.Timestamp.now().internal
headers = { headers = {
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp, 'X-Timestamp': put_timestamp,
@ -7378,7 +7378,7 @@ class TestObjectServer(unittest.TestCase):
# phase1 - PUT request with multiphase commit conversation # phase1 - PUT request with multiphase commit conversation
# no object metadata in footer # no object metadata in footer
put_timestamp = utils.Timestamp(time()).internal put_timestamp = utils.Timestamp.now().internal
headers = { headers = {
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'X-Timestamp': put_timestamp, 'X-Timestamp': put_timestamp,

View File

@ -587,7 +587,7 @@ class BaseObjectControllerMixin(object):
def test_HEAD_x_newest_with_two_vector_timestamps(self): def test_HEAD_x_newest_with_two_vector_timestamps(self):
req = swob.Request.blank('/v1/a/c/o', method='HEAD', req = swob.Request.blank('/v1/a/c/o', method='HEAD',
headers={'X-Newest': 'true'}) headers={'X-Newest': 'true'})
ts = (utils.Timestamp(time.time(), offset=offset) ts = (utils.Timestamp.now(offset=offset)
for offset in itertools.count()) for offset in itertools.count())
timestamps = [next(ts) for i in range(self.replicas())] timestamps = [next(ts) for i in range(self.replicas())]
newest_timestamp = timestamps[-1] newest_timestamp = timestamps[-1]
@ -1402,7 +1402,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin,
test_indexes = [None] + [int(p) for p in POLICIES] test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes: for policy_index in test_indexes:
self.app.container_info['storage_policy'] = policy_index self.app.container_info['storage_policy'] = policy_index
put_timestamp = utils.Timestamp(time.time()).normal put_timestamp = utils.Timestamp.now().normal
req = swob.Request.blank( req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={ '/v1/a/c/o', method='PUT', headers={
'Content-Length': 0, 'Content-Length': 0,
@ -1416,7 +1416,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin,
test_indexes = [None] + [int(p) for p in POLICIES] test_indexes = [None] + [int(p) for p in POLICIES]
for policy_index in test_indexes: for policy_index in test_indexes:
self.app.container_info['storage_policy'] = policy_index self.app.container_info['storage_policy'] = policy_index
put_timestamp = utils.Timestamp(time.time()).normal put_timestamp = utils.Timestamp.now().normal
req = swob.Request.blank( req = swob.Request.blank(
'/v1/a/c/o', method='PUT', headers={ '/v1/a/c/o', method='PUT', headers={
'Content-Length': 0, 'Content-Length': 0,