Update hacking version
update the hacking to latest, duo to[1] [1]: https://github.com/openstack/oslo.messaging/blob/master/HACKING.rst Change-Id: I1ee7387272690f0de0d7b1937d7c2b0ee404063c
This commit is contained in:
parent
0a784d2604
commit
cb902e5aff
@ -285,8 +285,8 @@ class AddresserFactory(object):
|
||||
" present.")
|
||||
LOG.warning(w)
|
||||
|
||||
if self._mode == 'legacy' or (self._mode == 'dynamic'
|
||||
and product == 'qpid-cpp'):
|
||||
if self._mode == 'legacy' or (self._mode == 'dynamic' and
|
||||
product == 'qpid-cpp'):
|
||||
return LegacyAddresser(self._default_exchange,
|
||||
self._kwargs['legacy_server_prefix'],
|
||||
self._kwargs['legacy_broadcast_prefix'],
|
||||
|
@ -35,11 +35,6 @@ import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
if hasattr(time, 'monotonic'):
|
||||
now = time.monotonic
|
||||
else:
|
||||
from monotonic import monotonic as now # noqa
|
||||
|
||||
import proton
|
||||
import pyngus
|
||||
from six import iteritems
|
||||
@ -56,6 +51,11 @@ from oslo_messaging import exceptions
|
||||
from oslo_messaging.target import Target
|
||||
from oslo_messaging import transport
|
||||
|
||||
if hasattr(time, 'monotonic'):
|
||||
now = time.monotonic
|
||||
else:
|
||||
from monotonic import monotonic as now # noqa
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -491,8 +491,8 @@ class Sender(pyngus.SenderEventHandler):
|
||||
self._unacked.discard(send_task)
|
||||
if state == Sender._ACCEPTED:
|
||||
send_task._on_ack(Sender._ACCEPTED, info)
|
||||
elif (state == Sender._RELEASED
|
||||
or (state == Sender._MODIFIED and
|
||||
elif (state == Sender._RELEASED or
|
||||
(state == Sender._MODIFIED and
|
||||
# assuming delivery-failed means in-doubt:
|
||||
not info.get("delivery-failed") and
|
||||
not info.get("undeliverable-here"))):
|
||||
@ -1337,5 +1337,5 @@ class Controller(pyngus.ConnectionEventHandler):
|
||||
@property
|
||||
def _active(self):
|
||||
# Is the connection up
|
||||
return (self._socket_connection
|
||||
and self._socket_connection.pyngus_conn.active)
|
||||
return (self._socket_connection and
|
||||
self._socket_connection.pyngus_conn.active)
|
||||
|
@ -28,6 +28,7 @@ import heapq
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import pyngus
|
||||
import select
|
||||
import socket
|
||||
import threading
|
||||
@ -39,9 +40,6 @@ if hasattr(time, 'monotonic'):
|
||||
else:
|
||||
from monotonic import monotonic as now # noqa
|
||||
|
||||
import pyngus
|
||||
|
||||
from oslo_messaging._i18n import _LE, _LI, _LW
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -103,7 +101,7 @@ class _SocketConnection(object):
|
||||
if not addr:
|
||||
key = "%s:%i" % (host.hostname, host.port)
|
||||
error = "Invalid peer address '%s'" % key
|
||||
LOG.error(_LE("Invalid peer address '%s'"), key)
|
||||
LOG.error("Invalid peer address '%s'", key)
|
||||
self._handler.socket_error(error)
|
||||
return
|
||||
my_socket = socket.socket(addr[0][0], addr[0][1], addr[0][2])
|
||||
@ -114,7 +112,7 @@ class _SocketConnection(object):
|
||||
except socket.error as e:
|
||||
if e.errno != errno.EINPROGRESS:
|
||||
error = "Socket connect failure '%s'" % str(e)
|
||||
LOG.error(_LE("Socket connect failure '%s'"), str(e))
|
||||
LOG.error("Socket connect failure '%s'", str(e))
|
||||
self._handler.socket_error(error)
|
||||
return
|
||||
self.socket = my_socket
|
||||
@ -388,7 +386,7 @@ class Thread(threading.Thread):
|
||||
select.select(readfds, writefds, [], timeout)
|
||||
except select.error as serror:
|
||||
if serror[0] == errno.EINTR:
|
||||
LOG.warning(_LW("ignoring interrupt from select(): %s"),
|
||||
LOG.warning("ignoring interrupt from select(): %s",
|
||||
str(serror))
|
||||
continue
|
||||
raise # assuming fatal...
|
||||
@ -406,5 +404,5 @@ class Thread(threading.Thread):
|
||||
|
||||
self._scheduler._process() # run any deferred requests
|
||||
|
||||
LOG.info(_LI("eventloop thread exiting, container=%s"),
|
||||
LOG.info("eventloop thread exiting, container=%s",
|
||||
self._container.name)
|
||||
|
@ -13,8 +13,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = ['AMQPDriverBase']
|
||||
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
@ -33,6 +31,8 @@ from oslo_messaging._i18n import _LE
|
||||
from oslo_messaging._i18n import _LI
|
||||
from oslo_messaging._i18n import _LW
|
||||
|
||||
__all__ = ['AMQPDriverBase']
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Minimum/Maximum sleep between a poll and ack/requeue
|
||||
|
@ -1094,9 +1094,9 @@ class Connection(object):
|
||||
info = dict([(k, ci.get(k)) for k in
|
||||
['hostname', 'port', 'transport']])
|
||||
client_port = None
|
||||
if (not conn_error and self.channel
|
||||
and hasattr(self.channel.connection, 'sock')
|
||||
and self.channel.connection.sock):
|
||||
if (not conn_error and self.channel and
|
||||
hasattr(self.channel.connection, 'sock') and
|
||||
self.channel.connection.sock):
|
||||
client_port = self.channel.connection.sock.getsockname()[1]
|
||||
info.update({'client_port': client_port,
|
||||
'connection_id': self.connection_id})
|
||||
|
@ -13,13 +13,13 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = ['ConfFixture']
|
||||
|
||||
import sys
|
||||
|
||||
import fixtures
|
||||
from functools import wraps
|
||||
|
||||
__all__ = ['ConfFixture']
|
||||
|
||||
|
||||
def _import_opts(conf, module, opts, group=None):
|
||||
__import__(module)
|
||||
|
@ -13,11 +13,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
|
||||
__all__ = ['MessagingException', 'MessagingTimeout', 'MessageDeliveryFailure',
|
||||
'InvalidTarget']
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class MessagingException(Exception):
|
||||
"""Base class for exceptions."""
|
||||
|
@ -157,8 +157,8 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
"""Return the fully qualified name or a Name or Attribute."""
|
||||
if isinstance(node, ast.Name):
|
||||
return node.id
|
||||
elif (isinstance(node, ast.Attribute)
|
||||
and isinstance(node.value, (ast.Name, ast.Attribute))):
|
||||
elif (isinstance(node, ast.Attribute) and
|
||||
isinstance(node.value, (ast.Name, ast.Attribute))):
|
||||
method_name = node.attr
|
||||
obj_name = self._find_name(node.value)
|
||||
if obj_name is None:
|
||||
@ -189,8 +189,8 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
"""
|
||||
attr_node_types = (ast.Name, ast.Attribute)
|
||||
|
||||
if (len(node.targets) != 1
|
||||
or not isinstance(node.targets[0], attr_node_types)):
|
||||
if (len(node.targets) != 1 or
|
||||
not isinstance(node.targets[0], attr_node_types)):
|
||||
# say no to: "x, y = ..."
|
||||
return super(CheckForLoggingIssues, self).generic_visit(node)
|
||||
|
||||
@ -211,13 +211,13 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
return super(CheckForLoggingIssues, self).generic_visit(node)
|
||||
|
||||
# is this a call to an i18n function?
|
||||
if (isinstance(node.value.func, ast.Name)
|
||||
and node.value.func.id in self.i18n_names):
|
||||
if (isinstance(node.value.func, ast.Name) and
|
||||
node.value.func.id in self.i18n_names):
|
||||
self.assignments[target_name] = node.value.func.id
|
||||
return super(CheckForLoggingIssues, self).generic_visit(node)
|
||||
|
||||
if (not isinstance(node.value.func, ast.Attribute)
|
||||
or not isinstance(node.value.func.value, attr_node_types)):
|
||||
if (not isinstance(node.value.func, ast.Attribute) or
|
||||
not isinstance(node.value.func.value, attr_node_types)):
|
||||
# function must be an attribute on an object like
|
||||
# logging.getLogger
|
||||
return super(CheckForLoggingIssues, self).generic_visit(node)
|
||||
@ -225,8 +225,8 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
object_name = self._find_name(node.value.func.value)
|
||||
func_name = node.value.func.attr
|
||||
|
||||
if (object_name in self.logger_module_names
|
||||
and func_name == 'getLogger'):
|
||||
if (object_name in self.logger_module_names and
|
||||
func_name == 'getLogger'):
|
||||
self.logger_names.append(target_name)
|
||||
|
||||
return super(CheckForLoggingIssues, self).generic_visit(node)
|
||||
@ -250,8 +250,8 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
self.add_error(msg, message=self.USING_DEPRECATED_WARN)
|
||||
|
||||
# must be a logger instance and one of the support logging methods
|
||||
if (obj_name not in self.logger_names
|
||||
or method_name not in self.TRANS_HELPER_MAP):
|
||||
if (obj_name not in self.logger_names or
|
||||
method_name not in self.TRANS_HELPER_MAP):
|
||||
return super(CheckForLoggingIssues, self).generic_visit(node)
|
||||
|
||||
# the call must have arguments
|
||||
@ -269,15 +269,15 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
msg = node.args[0] # first arg to a logging method is the msg
|
||||
|
||||
# if first arg is a call to a i18n name
|
||||
if (isinstance(msg, ast.Call)
|
||||
and isinstance(msg.func, ast.Name)
|
||||
and msg.func.id in self.i18n_names):
|
||||
if (isinstance(msg, ast.Call) and
|
||||
isinstance(msg.func, ast.Name) and
|
||||
msg.func.id in self.i18n_names):
|
||||
self.add_error(msg, message=self.DEBUG_CHECK_DESC)
|
||||
|
||||
# if the first arg is a reference to a i18n call
|
||||
elif (isinstance(msg, ast.Name)
|
||||
and msg.id in self.assignments
|
||||
and not self._is_raised_later(node, msg.id)):
|
||||
elif (isinstance(msg, ast.Name) and
|
||||
msg.id in self.assignments and
|
||||
not self._is_raised_later(node, msg.id)):
|
||||
self.add_error(msg, message=self.DEBUG_CHECK_DESC)
|
||||
|
||||
def _process_non_debug(self, node, method_name):
|
||||
@ -321,11 +321,11 @@ class CheckForLoggingIssues(BaseASTChecker):
|
||||
return
|
||||
|
||||
helper_method_name = self.TRANS_HELPER_MAP[method_name]
|
||||
if (self.assignments[msg.id] != helper_method_name
|
||||
and not self._is_raised_later(node, msg.id)):
|
||||
if (self.assignments[msg.id] != helper_method_name and
|
||||
not self._is_raised_later(node, msg.id)):
|
||||
self.add_error(msg, message=self.NONDEBUG_CHECK_DESC)
|
||||
elif (self.assignments[msg.id] == helper_method_name
|
||||
and self._is_raised_later(node, msg.id)):
|
||||
elif (self.assignments[msg.id] == helper_method_name and
|
||||
self._is_raised_later(node, msg.id)):
|
||||
self.add_error(msg, message=self.EXCESS_HELPER_CHECK_DESC)
|
||||
|
||||
def _is_raised_later(self, node, name):
|
||||
|
@ -13,10 +13,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = [
|
||||
'list_opts'
|
||||
]
|
||||
|
||||
import copy
|
||||
import itertools
|
||||
|
||||
@ -30,6 +26,9 @@ from oslo_messaging.rpc import client
|
||||
from oslo_messaging import server
|
||||
from oslo_messaging import transport
|
||||
|
||||
__all__ = [
|
||||
'list_opts'
|
||||
]
|
||||
|
||||
_global_opt_lists = [
|
||||
drivers_base.base_opts,
|
||||
|
@ -16,13 +16,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = [
|
||||
'ClientSendError',
|
||||
'RPCClient',
|
||||
'RPCVersionCapError',
|
||||
'RemoteError',
|
||||
]
|
||||
|
||||
import abc
|
||||
import logging
|
||||
|
||||
@ -35,6 +28,12 @@ from oslo_messaging import exceptions
|
||||
from oslo_messaging import serializer as msg_serializer
|
||||
from oslo_messaging import transport as msg_transport
|
||||
|
||||
__all__ = [
|
||||
'ClientSendError',
|
||||
'RPCClient',
|
||||
'RPCVersionCapError',
|
||||
'RemoteError',
|
||||
]
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -16,18 +16,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = [
|
||||
'NoSuchMethod',
|
||||
'RPCAccessPolicyBase',
|
||||
'LegacyRPCAccessPolicy',
|
||||
'DefaultRPCAccessPolicy',
|
||||
'ExplicitRPCAccessPolicy',
|
||||
'RPCDispatcher',
|
||||
'RPCDispatcherError',
|
||||
'UnsupportedVersion',
|
||||
'ExpectedException',
|
||||
]
|
||||
|
||||
from abc import ABCMeta
|
||||
from abc import abstractmethod
|
||||
import logging
|
||||
@ -42,6 +30,18 @@ from oslo_messaging import serializer as msg_serializer
|
||||
from oslo_messaging import server as msg_server
|
||||
from oslo_messaging import target as msg_target
|
||||
|
||||
__all__ = [
|
||||
'NoSuchMethod',
|
||||
'RPCAccessPolicyBase',
|
||||
'LegacyRPCAccessPolicy',
|
||||
'DefaultRPCAccessPolicy',
|
||||
'ExplicitRPCAccessPolicy',
|
||||
'RPCDispatcher',
|
||||
'RPCDispatcherError',
|
||||
'UnsupportedVersion',
|
||||
'ExpectedException',
|
||||
]
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -121,12 +121,6 @@ A simple example of an RPC server with multiple endpoints might be::
|
||||
|
||||
"""
|
||||
|
||||
__all__ = [
|
||||
'get_rpc_server',
|
||||
'expected_exceptions',
|
||||
'expose'
|
||||
]
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
@ -135,6 +129,12 @@ from oslo_messaging.rpc import dispatcher as rpc_dispatcher
|
||||
from oslo_messaging import server as msg_server
|
||||
from oslo_messaging import transport as msg_transport
|
||||
|
||||
__all__ = [
|
||||
'get_rpc_server',
|
||||
'expected_exceptions',
|
||||
'expose'
|
||||
]
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -13,12 +13,13 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_messaging import transport as msg_transport
|
||||
|
||||
__all__ = [
|
||||
'get_rpc_transport'
|
||||
]
|
||||
|
||||
from oslo_messaging import transport as msg_transport
|
||||
|
||||
|
||||
def get_rpc_transport(conf, url=None,
|
||||
allowed_remote_exmods=None):
|
||||
|
@ -12,8 +12,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = ['Serializer', 'NoOpSerializer', 'JsonPayloadSerializer']
|
||||
|
||||
"""Provides the definition of a message serialization handler"""
|
||||
|
||||
import abc
|
||||
@ -21,6 +19,8 @@ import abc
|
||||
from oslo_serialization import jsonutils
|
||||
import six
|
||||
|
||||
__all__ = ['Serializer', 'NoOpSerializer', 'JsonPayloadSerializer']
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Serializer(object):
|
||||
|
@ -16,13 +16,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = [
|
||||
'ExecutorLoadFailure',
|
||||
'MessageHandlingServer',
|
||||
'MessagingServerError',
|
||||
'ServerListenError',
|
||||
]
|
||||
|
||||
import abc
|
||||
import functools
|
||||
import inspect
|
||||
@ -42,6 +35,13 @@ from oslo_messaging._drivers import base as driver_base
|
||||
from oslo_messaging._i18n import _LW
|
||||
from oslo_messaging import exceptions
|
||||
|
||||
__all__ = [
|
||||
'ExecutorLoadFailure',
|
||||
'MessageHandlingServer',
|
||||
'MessagingServerError',
|
||||
'ServerListenError',
|
||||
]
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# The default number of seconds of waiting after which we will emit a log
|
||||
|
@ -51,8 +51,8 @@ if pyngus:
|
||||
# The Cyrus-based SASL tests can only be run if the installed version of proton
|
||||
# has been built with Cyrus SASL support.
|
||||
_proton = importutils.try_import("proton")
|
||||
CYRUS_ENABLED = (pyngus and pyngus.VERSION >= (2, 0, 0) and _proton
|
||||
and getattr(_proton.SASL, "extended", lambda: False)())
|
||||
CYRUS_ENABLED = (pyngus and pyngus.VERSION >= (2, 0, 0) and _proton and
|
||||
getattr(_proton.SASL, "extended", lambda: False)())
|
||||
# same with SSL
|
||||
# SSL_ENABLED = (_proton and getattr(_proton.SSL, "present", lambda: False)())
|
||||
SSL_ENABLED = False
|
||||
@ -1305,10 +1305,10 @@ class TestAddressing(test_utils.BaseTestCase):
|
||||
s1_payload = [m.message.get('msg') for m in rl[0].get_messages()]
|
||||
s2_payload = [m.message.get('msg') for m in rl[1].get_messages()]
|
||||
|
||||
self.assertTrue("Server1" in s1_payload
|
||||
and "Server2" not in s1_payload)
|
||||
self.assertTrue("Server2" in s2_payload
|
||||
and "Server1" not in s2_payload)
|
||||
self.assertTrue("Server1" in s1_payload and
|
||||
"Server2" not in s1_payload)
|
||||
self.assertTrue("Server2" in s2_payload and
|
||||
"Server1" not in s2_payload)
|
||||
self.assertEqual(s1_payload.count("Fanout"), 1)
|
||||
self.assertEqual(s2_payload.count("Fanout"), 1)
|
||||
self.assertEqual((s1_payload + s2_payload).count("Anycast1"), 1)
|
||||
|
@ -153,8 +153,8 @@ class CallTestCase(utils.SkipIfNoTransportURL):
|
||||
self.assertEqual(10, server.endpoint.ival)
|
||||
|
||||
def test_monitor_long_call(self):
|
||||
if not (self.url.startswith("rabbit://")
|
||||
or self.url.startswith("amqp://")):
|
||||
if not (self.url.startswith("rabbit://") or
|
||||
self.url.startswith("amqp://")):
|
||||
self.skipTest("backend does not support call monitoring")
|
||||
|
||||
transport = self.useFixture(utils.RPCTransportFixture(self.conf,
|
||||
|
@ -17,16 +17,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = [
|
||||
'DriverLoadFailure',
|
||||
'InvalidTransportURL',
|
||||
'Transport',
|
||||
'TransportHost',
|
||||
'TransportURL',
|
||||
'get_transport',
|
||||
'set_transport_defaults',
|
||||
]
|
||||
|
||||
import logging
|
||||
|
||||
from debtcollector import removals
|
||||
@ -38,6 +28,16 @@ from stevedore import driver
|
||||
from oslo_messaging._i18n import _LW
|
||||
from oslo_messaging import exceptions
|
||||
|
||||
__all__ = [
|
||||
'DriverLoadFailure',
|
||||
'InvalidTransportURL',
|
||||
'Transport',
|
||||
'TransportHost',
|
||||
'TransportURL',
|
||||
'get_transport',
|
||||
'set_transport_defaults',
|
||||
]
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_transport_opts = [
|
||||
|
@ -3,7 +3,7 @@
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
# Hacking already pins down pep8, pyflakes and flake8
|
||||
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
|
||||
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
||||
|
||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||
mock>=2.0.0 # BSD
|
||||
|
@ -10,8 +10,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
import eventlet # noqa
|
||||
eventlet.monkey_patch() # noqa
|
||||
|
||||
import argparse
|
||||
import bisect
|
||||
|
4
tox.ini
4
tox.ini
@ -99,9 +99,11 @@ basepython = python3
|
||||
commands = bandit -r oslo_messaging -x tests -n5
|
||||
|
||||
[flake8]
|
||||
# E731 skipped as assign a lambda expression
|
||||
|
||||
show-source = True
|
||||
enable-extensions = H203,H106
|
||||
ignore = H405
|
||||
ignore = E731,H405
|
||||
exclude = .tox,dist,doc,*.egg,build,__init__.py
|
||||
|
||||
[hacking]
|
||||
|
Loading…
Reference in New Issue
Block a user