Bump hacking
hacking 3.1.0 is too old. Note: We can't directly bump hacking to 6.x.0 (which is the latest major version) because of the existing cap by diskimage-builder. The cap is now being updated by [1]. [1] https://review.opendev.org/c/openstack/diskimage-builder/+/909336 Change-Id: I8778a7decc6669b4d95d6886c971433e7c34c5c8
This commit is contained in:
parent
6822a76ad9
commit
bb260949d4
@ -52,9 +52,9 @@ class XtraBackup(mysql_base.MySQLBaseRunner):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
cmd = (f'xtrabackup --backup --stream=xbstream --parallel=2 '
|
cmd = ('xtrabackup --backup --stream=xbstream --parallel=2 '
|
||||||
f'--datadir=%(datadir)s --user=%(user)s '
|
'--datadir=%(datadir)s --user=%(user)s '
|
||||||
f'--password=%(password)s --host=%(host)s'
|
'--password=%(password)s --host=%(host)s'
|
||||||
% {
|
% {
|
||||||
'datadir': self.datadir,
|
'datadir': self.datadir,
|
||||||
'user': CONF.db_user,
|
'user': CONF.db_user,
|
||||||
@ -91,9 +91,9 @@ class XtraBackup(mysql_base.MySQLBaseRunner):
|
|||||||
class XtraBackupIncremental(XtraBackup):
|
class XtraBackupIncremental(XtraBackup):
|
||||||
"""XtraBackup incremental backup."""
|
"""XtraBackup incremental backup."""
|
||||||
prepare_log = '/tmp/prepare.log'
|
prepare_log = '/tmp/prepare.log'
|
||||||
incremental_prep = (f'xtrabackup --prepare --apply-log-only'
|
incremental_prep = ('xtrabackup --prepare --apply-log-only'
|
||||||
f' --target-dir=%(restore_location)s'
|
' --target-dir=%(restore_location)s'
|
||||||
f' %(incremental_args)s')
|
' %(incremental_args)s')
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if not kwargs.get('lsn'):
|
if not kwargs.get('lsn'):
|
||||||
|
@ -312,7 +312,7 @@ class TestPgBasebackupIncremental(unittest.TestCase):
|
|||||||
def test_incremental_restore_cmd(self):
|
def test_incremental_restore_cmd(self):
|
||||||
# prepare the test
|
# prepare the test
|
||||||
runner = self.runner_cls(**self.params)
|
runner = self.runner_cls(**self.params)
|
||||||
cmd = f'tar xzf - -C /var/lib/postgresql/data/pgdata'
|
cmd = 'tar xzf - -C /var/lib/postgresql/data/pgdata'
|
||||||
|
|
||||||
# call the method
|
# call the method
|
||||||
ret = runner.incremental_restore_cmd()
|
ret = runner.incremental_restore_cmd()
|
||||||
|
@ -68,9 +68,9 @@ class TestXtraBackup(unittest.TestCase):
|
|||||||
runner = self.runner_cls(**self.params)
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
# assertions
|
# assertions
|
||||||
cmd = (f'xtrabackup --backup --stream=xbstream --parallel=2 '
|
cmd = ('xtrabackup --backup --stream=xbstream --parallel=2 '
|
||||||
f'--datadir=%(datadir)s --user=%(user)s '
|
'--datadir=%(datadir)s --user=%(user)s '
|
||||||
f'--password=%(password)s --host=%(host)s'
|
'--password=%(password)s --host=%(host)s'
|
||||||
% {
|
% {
|
||||||
'datadir': runner.datadir,
|
'datadir': runner.datadir,
|
||||||
'user': CONF.db_user,
|
'user': CONF.db_user,
|
||||||
|
@ -1,6 +1,3 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
|
||||||
# process, which may cause wedges in the gate later.
|
|
||||||
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
||||||
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
||||||
eventlet!=0.18.3,!=0.20.1,>=0.18.2 # MIT
|
eventlet!=0.18.3,!=0.20.1,>=0.18.2 # MIT
|
||||||
@ -54,4 +51,4 @@ oslo.cache>=1.26.0 # Apache-2.0
|
|||||||
# for trove network driver
|
# for trove network driver
|
||||||
Flask>=2.2.3 # BSD
|
Flask>=2.2.3 # BSD
|
||||||
pyroute2>=0.7.7;sys_platform!='win32' # Apache-2.0 (+ dual licensed GPL2)
|
pyroute2>=0.7.7;sys_platform!='win32' # Apache-2.0 (+ dual licensed GPL2)
|
||||||
gunicorn>=20.1.0 # MIT
|
gunicorn>=20.1.0 # MIT
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
|
||||||
# process, which may cause wedges in the gate later.
|
|
||||||
# Hacking already pins down pep8, pyflakes and flake8
|
# Hacking already pins down pep8, pyflakes and flake8
|
||||||
hacking>=3.0.1,<3.1.0 # Apache-2.0
|
hacking<6.0.0 # Apache-2.0
|
||||||
bandit[baseline]>=1.7.7 # Apache-2.0
|
bandit[baseline]>=1.7.7 # Apache-2.0
|
||||||
coverage!=4.4,>=4.0 # Apache-2.0
|
coverage!=4.4,>=4.0 # Apache-2.0
|
||||||
nose>=1.3.7 # LGPL
|
nose>=1.3.7 # LGPL
|
||||||
|
@ -67,7 +67,7 @@ class ModelBase(object):
|
|||||||
"""Overloaded to cause this object to look like a data entity."""
|
"""Overloaded to cause this object to look like a data entity."""
|
||||||
if not hasattr(other, 'id'):
|
if not hasattr(other, 'id'):
|
||||||
return False
|
return False
|
||||||
return type(other) == type(self) and other.id == self.id
|
return type(other) is type(self) and other.id == self.id
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
"""Overloaded to cause this object to look like a data entity."""
|
"""Overloaded to cause this object to look like a data entity."""
|
||||||
|
@ -506,9 +506,9 @@ class JsonCodec(StreamCodec):
|
|||||||
return jsonutils.dumps(dict_data)
|
return jsonutils.dumps(dict_data)
|
||||||
|
|
||||||
def deserialize(self, stream):
|
def deserialize(self, stream):
|
||||||
if type(stream) == str:
|
if isinstance(stream, str):
|
||||||
return jsonutils.load(io.StringIO(stream))
|
return jsonutils.load(io.StringIO(stream))
|
||||||
if type(stream) == bytes:
|
if isinstance(stream, bytes):
|
||||||
return jsonutils.load(io.BytesIO(stream))
|
return jsonutils.load(io.BytesIO(stream))
|
||||||
|
|
||||||
|
|
||||||
|
@ -40,7 +40,6 @@ from trove.common import pastedeploy
|
|||||||
from trove.common import utils
|
from trove.common import utils
|
||||||
|
|
||||||
CONTEXT_KEY = 'trove.context'
|
CONTEXT_KEY = 'trove.context'
|
||||||
Router = base_wsgi.Router
|
|
||||||
Debug = base_wsgi.Debug
|
Debug = base_wsgi.Debug
|
||||||
Middleware = base_wsgi.Middleware
|
Middleware = base_wsgi.Middleware
|
||||||
JSONDictSerializer = base_wsgi.JSONDictSerializer
|
JSONDictSerializer = base_wsgi.JSONDictSerializer
|
||||||
|
@ -12,8 +12,6 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import pycodestyle
|
|
||||||
|
|
||||||
from hacking import core
|
from hacking import core
|
||||||
|
|
||||||
_all_log_levels = (
|
_all_log_levels = (
|
||||||
@ -59,20 +57,19 @@ def check_raised_localized_exceptions(logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
def no_translate_logs(physical_line, logical_line, filename):
|
def no_translate_logs(logical_line, filename, noqa):
|
||||||
"""T105 - Log messages shouldn't be translated from the
|
"""T105 - Log messages shouldn't be translated from the
|
||||||
Pike release.
|
Pike release.
|
||||||
:param logical_line: The logical line to check.
|
:param logical_line: The logical line to check.
|
||||||
:param physical_line: The physical line to check.
|
|
||||||
:param filename: The file name where the logical line exists.
|
:param filename: The file name where the logical line exists.
|
||||||
|
:param noqa: whether the check should be skipped
|
||||||
:returns: None if the logical line passes the check, otherwise a tuple
|
:returns: None if the logical line passes the check, otherwise a tuple
|
||||||
is yielded that contains the offending index in logical line and a
|
is yielded that contains the offending index in logical line and a
|
||||||
message describe the check validation failure.
|
message describe the check validation failure.
|
||||||
"""
|
"""
|
||||||
if _translation_is_not_expected(filename):
|
if noqa:
|
||||||
return
|
return
|
||||||
|
if _translation_is_not_expected(filename):
|
||||||
if pycodestyle.noqa(physical_line):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
msg = "T105: Log message shouldn't be translated."
|
msg = "T105: Log message shouldn't be translated."
|
||||||
|
@ -504,7 +504,7 @@ class SimpleInstance(object):
|
|||||||
@property
|
@property
|
||||||
def access(self):
|
def access(self):
|
||||||
if hasattr(self.db_info, 'access'):
|
if hasattr(self.db_info, 'access'):
|
||||||
if type(self.db_info.access) == str:
|
if isinstance(self.db_info.access, str):
|
||||||
return json.loads(self.db_info.access)
|
return json.loads(self.db_info.access)
|
||||||
return self.db_info.access
|
return self.db_info.access
|
||||||
else:
|
else:
|
||||||
|
@ -208,9 +208,9 @@ class BackupCreateTest(trove_testtools.TestCase):
|
|||||||
instance = MagicMock()
|
instance = MagicMock()
|
||||||
instance.cluster_id = 'bad_id'
|
instance.cluster_id = 'bad_id'
|
||||||
with patch.object(instance_models.BuiltInstance, 'load',
|
with patch.object(instance_models.BuiltInstance, 'load',
|
||||||
return_value=instance),\
|
return_value=instance), \
|
||||||
patch.object(models.Backup, 'validate_can_perform_action',
|
patch.object(models.Backup, 'validate_can_perform_action',
|
||||||
return_value=None),\
|
return_value=None), \
|
||||||
patch.object(models.Backup, 'verify_swift_auth_token',
|
patch.object(models.Backup, 'verify_swift_auth_token',
|
||||||
return_value=None):
|
return_value=None):
|
||||||
self.assertRaises(exception.ClusterInstanceOperationNotSupported,
|
self.assertRaises(exception.ClusterInstanceOperationNotSupported,
|
||||||
@ -223,13 +223,13 @@ class BackupCreateTest(trove_testtools.TestCase):
|
|||||||
instance = MagicMock()
|
instance = MagicMock()
|
||||||
instance.cluster_id = None
|
instance.cluster_id = None
|
||||||
with patch.object(instance_models.BuiltInstance, 'load',
|
with patch.object(instance_models.BuiltInstance, 'load',
|
||||||
return_value=instance),\
|
return_value=instance), \
|
||||||
patch.object(models.Backup, 'validate_can_perform_action',
|
patch.object(models.Backup, 'validate_can_perform_action',
|
||||||
return_value=None),\
|
return_value=None), \
|
||||||
patch.object(models.Backup, 'verify_swift_auth_token',
|
patch.object(models.Backup, 'verify_swift_auth_token',
|
||||||
return_value=None),\
|
return_value=None), \
|
||||||
patch.object(DatabaseModelBase, 'is_valid',
|
patch.object(DatabaseModelBase, 'is_valid',
|
||||||
return_value=False),\
|
return_value=False), \
|
||||||
patch('trove.quota.quota.QuotaEngine.reserve',
|
patch('trove.quota.quota.QuotaEngine.reserve',
|
||||||
return_value=[]):
|
return_value=[]):
|
||||||
DatabaseModelBase.errors = {}
|
DatabaseModelBase.errors = {}
|
||||||
|
@ -41,24 +41,23 @@ class HackingTestCase(trove_testtools.TestCase):
|
|||||||
for level in all_log_levels:
|
for level in all_log_levels:
|
||||||
bad = 'LOG.%s(_("Bad"))' % level
|
bad = 'LOG.%s(_("Bad"))' % level
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, len(list(tc.no_translate_logs(bad, bad, 'f'))))
|
1, len(list(tc.no_translate_logs(bad, 'f', False))))
|
||||||
bad = "LOG.%s(_('Bad'))" % level
|
bad = "LOG.%s(_('Bad'))" % level
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, len(list(tc.no_translate_logs(bad, bad, 'f'))))
|
1, len(list(tc.no_translate_logs(bad, 'f', False))))
|
||||||
ok = 'LOG.%s("OK")' % level
|
ok = 'LOG.%s("OK")' % level
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, len(list(tc.no_translate_logs(ok, ok, 'f'))))
|
0, len(list(tc.no_translate_logs(ok, 'f', False))))
|
||||||
ok = "LOG.%s(_('OK')) # noqa" % level
|
ok = "LOG.%s(_('OK'))" % level
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, len(list(tc.no_translate_logs(ok, ok, 'f'))))
|
0, len(list(tc.no_translate_logs(ok, 'f', True))))
|
||||||
ok = "LOG.%s(variable)" % level
|
ok = "LOG.%s(variable)" % level
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, len(list(tc.no_translate_logs(ok, ok, 'f'))))
|
0, len(list(tc.no_translate_logs(ok, 'f', False))))
|
||||||
# Do not do validations in tests
|
# Do not do validations in tests
|
||||||
ok = 'LOG.%s(_("OK - unit tests"))' % level
|
ok = 'LOG.%s(_("OK - unit tests"))' % level
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, len(list(tc.no_translate_logs(ok, ok,
|
0, len(list(tc.no_translate_logs(ok, 'f/tests/f', False))))
|
||||||
'f/tests/f'))))
|
|
||||||
|
|
||||||
def test_check_localized_exception_messages(self):
|
def test_check_localized_exception_messages(self):
|
||||||
f = tc.check_raised_localized_exceptions
|
f = tc.check_raised_localized_exceptions
|
||||||
|
@ -269,10 +269,8 @@ class FreshInstanceTasksTest(BaseFreshInstanceTasksTest):
|
|||||||
# execute
|
# execute
|
||||||
files = self.freshinstancetasks.get_injected_files("test", 'test')
|
files = self.freshinstancetasks.get_injected_files("test", 'test')
|
||||||
# verify
|
# verify
|
||||||
self.assertTrue(
|
self.assertIn('/etc/trove/conf.d/guest_info.conf', files)
|
||||||
'/etc/trove/conf.d/guest_info.conf' in files)
|
self.assertIn('/etc/trove/conf.d/trove-guestagent.conf', files)
|
||||||
self.assertTrue(
|
|
||||||
'/etc/trove/conf.d/trove-guestagent.conf' in files)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.guestconfig_content,
|
self.guestconfig_content,
|
||||||
files['/etc/trove/conf.d/trove-guestagent.conf'])
|
files['/etc/trove/conf.d/trove-guestagent.conf'])
|
||||||
@ -289,10 +287,8 @@ class FreshInstanceTasksTest(BaseFreshInstanceTasksTest):
|
|||||||
# execute
|
# execute
|
||||||
files = self.freshinstancetasks.get_injected_files("test", 'test')
|
files = self.freshinstancetasks.get_injected_files("test", 'test')
|
||||||
# verify
|
# verify
|
||||||
self.assertTrue(
|
self.assertIn('/etc/guest_info', files)
|
||||||
'/etc/guest_info' in files)
|
self.assertIn('/etc/trove-guestagent.conf', files)
|
||||||
self.assertTrue(
|
|
||||||
'/etc/trove-guestagent.conf' in files)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.guestconfig_content,
|
self.guestconfig_content,
|
||||||
files['/etc/trove-guestagent.conf'])
|
files['/etc/trove-guestagent.conf'])
|
||||||
|
@ -32,7 +32,7 @@ class VolumeType(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, volume_type_id, context=None, client=None):
|
def load(cls, volume_type_id, context=None, client=None):
|
||||||
if not(client or context):
|
if not (client or context):
|
||||||
raise trove_exception.InvalidModelError(
|
raise trove_exception.InvalidModelError(
|
||||||
"client or context must be provided to load a volume_type")
|
"client or context must be provided to load a volume_type")
|
||||||
if not client:
|
if not client:
|
||||||
|
Loading…
Reference in New Issue
Block a user