Keep CI up-to-date
Rally prior change - I77eba97596df4448065982956c3b6fb08c8e45db * Fix pep8 issues. For unknown reasons, previous flake8 version did not argue on some too long lines and some other issues. * Temporary disable run of VMTask scenarios in CI. It raises TimeoutError and blocks several jobs to be executed. Let's fix this issue separately. * Re-enable rally-tox-pep8, rally-tox-cover, rally-dsvm-tox-functional, rally-task-basic-with-existing-users, rally-task-simple-job, rally-task-neutron, rally-task-nova and rally-verify-tempest jobs * Add rally-tox-py310 Change-Id: I6a167793f1b900ff459519b593cac7c2c3e9724b
This commit is contained in:
parent
48ae844a70
commit
d66486874d
@ -1,16 +1,17 @@
|
|||||||
- project:
|
- project:
|
||||||
check:
|
check:
|
||||||
jobs:
|
jobs:
|
||||||
#- rally-tox-cover
|
- rally-tox-cover
|
||||||
#- rally-tox-pep8
|
- rally-tox-pep8
|
||||||
- rally-tox-py36
|
- rally-tox-py36
|
||||||
- rally-tox-py37
|
- rally-tox-py37
|
||||||
- rally-tox-py38
|
- rally-tox-py38
|
||||||
- rally-tox-py39
|
- rally-tox-py39
|
||||||
#- rally-dsvm-tox-functional
|
- rally-tox-py310
|
||||||
|
- rally-dsvm-tox-functional
|
||||||
- rally-openstack-docker-build
|
- rally-openstack-docker-build
|
||||||
#- rally-task-basic-with-existing-users
|
- rally-task-basic-with-existing-users
|
||||||
#- rally-task-simple-job
|
- rally-task-simple-job
|
||||||
- rally-task-barbican:
|
- rally-task-barbican:
|
||||||
files:
|
files:
|
||||||
- .zuul.d/zuul.yaml
|
- .zuul.d/zuul.yaml
|
||||||
@ -53,7 +54,7 @@
|
|||||||
#- rally-task-monasca
|
#- rally-task-monasca
|
||||||
- rally-task-murano:
|
- rally-task-murano:
|
||||||
voting: false
|
voting: false
|
||||||
#- rally-task-neutron
|
- rally-task-neutron
|
||||||
- rally-task-neutron-trunk:
|
- rally-task-neutron-trunk:
|
||||||
files:
|
files:
|
||||||
- .zuul.d/zuul.yaml
|
- .zuul.d/zuul.yaml
|
||||||
@ -67,8 +68,8 @@
|
|||||||
- tests/ci/playbooks
|
- tests/ci/playbooks
|
||||||
- rally-task-neutron-with-extensions:
|
- rally-task-neutron-with-extensions:
|
||||||
voting: false
|
voting: false
|
||||||
#- rally-task-nova:
|
- rally-task-nova:
|
||||||
# voting: false
|
voting: false
|
||||||
# it did not work for a long time
|
# it did not work for a long time
|
||||||
#- rally-task-senlin
|
#- rally-task-senlin
|
||||||
#- rally-task-octavia:
|
#- rally-task-octavia:
|
||||||
@ -80,19 +81,20 @@
|
|||||||
voting: false
|
voting: false
|
||||||
- rally-task-zaqar:
|
- rally-task-zaqar:
|
||||||
voting: false
|
voting: false
|
||||||
#- rally-verify-tempest
|
- rally-verify-tempest
|
||||||
gate:
|
gate:
|
||||||
jobs:
|
jobs:
|
||||||
#- rally-tox-cover
|
- rally-tox-cover
|
||||||
#- rally-tox-pep8
|
- rally-tox-pep8
|
||||||
- rally-tox-py36
|
- rally-tox-py36
|
||||||
- rally-tox-py37
|
- rally-tox-py37
|
||||||
- rally-tox-py38
|
- rally-tox-py38
|
||||||
- rally-tox-py39
|
- rally-tox-py39
|
||||||
#- rally-dsvm-tox-functional
|
- rally-tox-py310
|
||||||
|
- rally-dsvm-tox-functional
|
||||||
- rally-openstack-docker-build
|
- rally-openstack-docker-build
|
||||||
#- rally-task-basic-with-existing-users
|
#- rally-task-basic-with-existing-users
|
||||||
#- rally-task-simple-job
|
- rally-task-simple-job
|
||||||
- rally-task-barbican:
|
- rally-task-barbican:
|
||||||
files:
|
files:
|
||||||
- .zuul.d/zuul.yaml
|
- .zuul.d/zuul.yaml
|
||||||
@ -107,7 +109,7 @@
|
|||||||
- rally-task-ironic
|
- rally-task-ironic
|
||||||
- rally-task-keystone-glance-swift
|
- rally-task-keystone-glance-swift
|
||||||
- rally-task-mistral
|
- rally-task-mistral
|
||||||
#- rally-task-neutron
|
- rally-task-neutron
|
||||||
- rally-task-neutron-trunk:
|
- rally-task-neutron-trunk:
|
||||||
files:
|
files:
|
||||||
- rally-jobs/neutron-trunk.yaml
|
- rally-jobs/neutron-trunk.yaml
|
||||||
@ -118,7 +120,7 @@
|
|||||||
- rally_openstack/task/scenarios/neutron/trunk.py
|
- rally_openstack/task/scenarios/neutron/trunk.py
|
||||||
- rally_openstack/task/scenarios/neutron/network.py
|
- rally_openstack/task/scenarios/neutron/network.py
|
||||||
- tests/ci/playbooks
|
- tests/ci/playbooks
|
||||||
#- rally-verify-tempest
|
- rally-verify-tempest
|
||||||
post:
|
post:
|
||||||
jobs:
|
jobs:
|
||||||
- rally-openstack-docker-build-and-push:
|
- rally-openstack-docker-build-and-push:
|
||||||
|
@ -111,25 +111,3 @@
|
|||||||
sla:
|
sla:
|
||||||
failure_rate:
|
failure_rate:
|
||||||
max: 0
|
max: 0
|
||||||
|
|
||||||
- title: Test VMTask scenario
|
|
||||||
workloads:
|
|
||||||
-
|
|
||||||
scenario:
|
|
||||||
VMTasks.dd_load_test:
|
|
||||||
flavor:
|
|
||||||
name: "m1.tiny"
|
|
||||||
image:
|
|
||||||
name: {{image_name}}
|
|
||||||
floating_network: "public"
|
|
||||||
force_delete: false
|
|
||||||
username: "cirros"
|
|
||||||
contexts:
|
|
||||||
network: {}
|
|
||||||
runner:
|
|
||||||
constant:
|
|
||||||
times: 2
|
|
||||||
concurrency: 2
|
|
||||||
sla:
|
|
||||||
failure_rate:
|
|
||||||
max: 0
|
|
||||||
|
@ -719,105 +719,3 @@
|
|||||||
sla:
|
sla:
|
||||||
failure_rate:
|
failure_rate:
|
||||||
max: 0
|
max: 0
|
||||||
|
|
||||||
VMTasks.boot_runcommand_delete:
|
|
||||||
-
|
|
||||||
args:
|
|
||||||
flavor:
|
|
||||||
name: "m1.tiny"
|
|
||||||
image:
|
|
||||||
name: {{image_name}}
|
|
||||||
command:
|
|
||||||
script_file: "~/.rally/extra/instance_test.sh"
|
|
||||||
interpreter: "/bin/sh"
|
|
||||||
username: "cirros"
|
|
||||||
runner:
|
|
||||||
type: "constant"
|
|
||||||
times: {{smoke or 2}}
|
|
||||||
concurrency: {{smoke or 2}}
|
|
||||||
context:
|
|
||||||
users:
|
|
||||||
tenants: {{smoke or 2}}
|
|
||||||
users_per_tenant: {{smoke or 2}}
|
|
||||||
network: {}
|
|
||||||
sla:
|
|
||||||
failure_rate:
|
|
||||||
max: 0
|
|
||||||
-
|
|
||||||
args:
|
|
||||||
flavor:
|
|
||||||
name: "m1.tiny"
|
|
||||||
image:
|
|
||||||
name: {{image_name}}
|
|
||||||
command:
|
|
||||||
script_file: "~/.rally/extra/instance_test.sh"
|
|
||||||
interpreter: "/bin/sh"
|
|
||||||
username: "cirros"
|
|
||||||
volume_args:
|
|
||||||
size: 2
|
|
||||||
runner:
|
|
||||||
type: "constant"
|
|
||||||
times: {{smoke or 2}}
|
|
||||||
concurrency: {{smoke or 2}}
|
|
||||||
context:
|
|
||||||
users:
|
|
||||||
tenants: {{smoke or 2}}
|
|
||||||
users_per_tenant: {{smoke or 1}}
|
|
||||||
network: {}
|
|
||||||
sla:
|
|
||||||
failure_rate:
|
|
||||||
max: 0
|
|
||||||
-
|
|
||||||
args:
|
|
||||||
flavor:
|
|
||||||
name: {{flavor_name}}
|
|
||||||
image:
|
|
||||||
name: {{image_name}}
|
|
||||||
floating_network: "public"
|
|
||||||
command:
|
|
||||||
script_inline: |
|
|
||||||
time_seconds(){ (time -p $1 ) 2>&1 |awk '/real/{print $2}'; }
|
|
||||||
file=/tmp/test.img
|
|
||||||
c=100 #100M
|
|
||||||
write_seq=$(time_seconds "dd if=/dev/zero of=$file bs=1M count=$c")
|
|
||||||
read_seq=$(time_seconds "dd if=$file of=/dev/null bs=1M count=$c")
|
|
||||||
[ -f $file ] && rm $file
|
|
||||||
|
|
||||||
echo "{
|
|
||||||
\"write_seq\": $write_seq,
|
|
||||||
\"read_seq\": $read_seq
|
|
||||||
}"
|
|
||||||
interpreter: "/bin/sh"
|
|
||||||
username: "cirros"
|
|
||||||
runner:
|
|
||||||
type: "constant"
|
|
||||||
times: 2
|
|
||||||
concurrency: 2
|
|
||||||
context:
|
|
||||||
users:
|
|
||||||
tenants: 1
|
|
||||||
users_per_tenant: 1
|
|
||||||
network: {}
|
|
||||||
sla:
|
|
||||||
failure_rate:
|
|
||||||
max: 0
|
|
||||||
|
|
||||||
VMTasks.dd_load_test:
|
|
||||||
-
|
|
||||||
args:
|
|
||||||
flavor:
|
|
||||||
name: "m1.tiny"
|
|
||||||
image:
|
|
||||||
name: {{image_name}}
|
|
||||||
floating_network: "public"
|
|
||||||
force_delete: false
|
|
||||||
username: "cirros"
|
|
||||||
runner:
|
|
||||||
type: "constant"
|
|
||||||
times: 2
|
|
||||||
concurrency: 2
|
|
||||||
context:
|
|
||||||
users:
|
|
||||||
tenants: 2
|
|
||||||
users_per_tenant: 1
|
|
||||||
network: {}
|
|
||||||
|
@ -472,7 +472,7 @@ class BlockStorage(service.UnifiedService):
|
|||||||
|
|
||||||
@service.should_be_overridden
|
@service.should_be_overridden
|
||||||
def delete_encryption_type(self, volume_type):
|
def delete_encryption_type(self, volume_type):
|
||||||
"""Delete the encryption type information for the specified volume type.
|
"""Delete the encryption type information for the specified volume type
|
||||||
|
|
||||||
:param volume_type: the volume type whose encryption type information
|
:param volume_type: the volume type whose encryption type information
|
||||||
must be deleted
|
must be deleted
|
||||||
@ -481,7 +481,7 @@ class BlockStorage(service.UnifiedService):
|
|||||||
|
|
||||||
@service.should_be_overridden
|
@service.should_be_overridden
|
||||||
def update_encryption_type(self, volume_type, specs):
|
def update_encryption_type(self, volume_type, specs):
|
||||||
"""Update the encryption type information for the specified volume type.
|
"""Update the encryption type information for the specified volume type
|
||||||
|
|
||||||
:param volume_type: the volume type whose encryption type information
|
:param volume_type: the volume type whose encryption type information
|
||||||
will be updated
|
will be updated
|
||||||
|
@ -367,7 +367,7 @@ class CinderMixin(object):
|
|||||||
with atomic.ActionTimer(self, aname):
|
with atomic.ActionTimer(self, aname):
|
||||||
tuple_res = self._get_client().volume_types.delete(
|
tuple_res = self._get_client().volume_types.delete(
|
||||||
volume_type)
|
volume_type)
|
||||||
return (tuple_res[0].status_code == 202)
|
return tuple_res[0].status_code == 202
|
||||||
|
|
||||||
def set_volume_type_keys(self, volume_type, metadata):
|
def set_volume_type_keys(self, volume_type, metadata):
|
||||||
"""Set extra specs on a volume type.
|
"""Set extra specs on a volume type.
|
||||||
@ -438,7 +438,7 @@ class CinderMixin(object):
|
|||||||
search_opts)
|
search_opts)
|
||||||
|
|
||||||
def delete_encryption_type(self, volume_type):
|
def delete_encryption_type(self, volume_type):
|
||||||
"""Delete the encryption type information for the specified volume type.
|
"""Delete the encryption type information for the specified volume type
|
||||||
|
|
||||||
:param volume_type: the volume type whose encryption type information
|
:param volume_type: the volume type whose encryption type information
|
||||||
must be deleted
|
must be deleted
|
||||||
@ -452,7 +452,7 @@ class CinderMixin(object):
|
|||||||
"EncryptionType Deletion Failed")
|
"EncryptionType Deletion Failed")
|
||||||
|
|
||||||
def update_encryption_type(self, volume_type, specs):
|
def update_encryption_type(self, volume_type, specs):
|
||||||
"""Update the encryption type information for the specified volume type.
|
"""Update the encryption type information for the specified volume type
|
||||||
|
|
||||||
:param volume_type: the volume type whose encryption type information
|
:param volume_type: the volume type whose encryption type information
|
||||||
must be updated
|
must be updated
|
||||||
@ -746,7 +746,7 @@ class UnifiedCinderMixin(object):
|
|||||||
search_opts=search_opts)]
|
search_opts=search_opts)]
|
||||||
|
|
||||||
def delete_encryption_type(self, volume_type):
|
def delete_encryption_type(self, volume_type):
|
||||||
"""Delete the encryption type information for the specified volume type.
|
"""Delete the encryption type information for the specified volume type
|
||||||
|
|
||||||
:param volume_type: the volume type whose encryption type information
|
:param volume_type: the volume type whose encryption type information
|
||||||
must be deleted
|
must be deleted
|
||||||
@ -754,7 +754,7 @@ class UnifiedCinderMixin(object):
|
|||||||
return self._impl.delete_encryption_type(volume_type)
|
return self._impl.delete_encryption_type(volume_type)
|
||||||
|
|
||||||
def update_encryption_type(self, volume_type, specs):
|
def update_encryption_type(self, volume_type, specs):
|
||||||
"""Update the encryption type information for the specified volume type.
|
"""Update the encryption type information for the specified volume type
|
||||||
|
|
||||||
:param volume_type: the volume type whose encryption type information
|
:param volume_type: the volume type whose encryption type information
|
||||||
must be updated
|
must be updated
|
||||||
|
@ -37,7 +37,7 @@ CONF = cfg.CONF
|
|||||||
class CreateAndListTrunks(neutron_utils.NeutronScenario):
|
class CreateAndListTrunks(neutron_utils.NeutronScenario):
|
||||||
|
|
||||||
def run(self, network_create_args=None, subport_count=10):
|
def run(self, network_create_args=None, subport_count=10):
|
||||||
"""Create and a given number of trunks with subports and list all trunks
|
"""Create a given number of trunks with subports and list all trunks.
|
||||||
|
|
||||||
:param network_create_args: dict, POST /v2.0/networks request
|
:param network_create_args: dict, POST /v2.0/networks request
|
||||||
options. Deprecated.
|
options. Deprecated.
|
||||||
|
@ -491,7 +491,7 @@ class CloudResources(object):
|
|||||||
|
|
||||||
def compare(self, with_list):
|
def compare(self, with_list):
|
||||||
def make_uuid(res):
|
def make_uuid(res):
|
||||||
return"%s.%s:%s" % (
|
return "%s.%s:%s" % (
|
||||||
res["cls"], res["resource_name"],
|
res["cls"], res["resource_name"],
|
||||||
";".join(["%s=%s" % (k, v)
|
";".join(["%s=%s" % (k, v)
|
||||||
for k, v in sorted(res["id"].items())]))
|
for k, v in sorted(res["id"].items())]))
|
||||||
|
@ -325,25 +325,10 @@ class RunVerification(Step):
|
|||||||
"[dashboard,id-4f8851b1-0e69-482b-b63b-84c6e76f6c80,smoke]":
|
"[dashboard,id-4f8851b1-0e69-482b-b63b-84c6e76f6c80,smoke]":
|
||||||
"Fails for unknown reason",
|
"Fails for unknown reason",
|
||||||
|
|
||||||
"tempest.api.compute.servers.test_attach_interfaces"
|
|
||||||
".AttachInterfacesUnderV243Test.test_add_remove_fixed_ip"
|
|
||||||
"[id-c7e0e60b-ee45-43d0-abeb-8596fd42a2f9,network,smoke]":
|
|
||||||
"Fails for unknown reason",
|
|
||||||
|
|
||||||
"tempest.scenario.test_network_basic_ops"
|
|
||||||
".TestNetworkBasicOps.test_network_basic_ops"
|
|
||||||
"[compute,id-f323b3ba-82f8-4db7-8ea6-6a895869ec49,network,smoke]":
|
|
||||||
"Fails for unknown reason",
|
|
||||||
|
|
||||||
"tempest.scenario.test_server_basic_ops"
|
"tempest.scenario.test_server_basic_ops"
|
||||||
".TestServerBasicOps.test_server_basic_ops"
|
".TestServerBasicOps.test_server_basic_ops"
|
||||||
"[compute,id-7fff3fb3-91d8-4fd0-bd7d-0204f1f180ba,network,smoke]":
|
"[compute,id-7fff3fb3-91d8-4fd0-bd7d-0204f1f180ba,network,smoke]":
|
||||||
"Fails for unknown reason",
|
"Fails for unknown reason",
|
||||||
|
|
||||||
"tempest.api.compute.servers.test_server_actions"
|
|
||||||
".ServerActionsTestJSON.test_reboot_server_hard"
|
|
||||||
"[id-2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32,smoke]":
|
|
||||||
"Fails for unknown reason"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import testtools
|
import testtools
|
||||||
@ -46,8 +46,9 @@ class DeploymentTestCase(testtools.TestCase):
|
|||||||
rally = utils.Rally()
|
rally = utils.Rally()
|
||||||
rally.env.update(TEST_ENV)
|
rally.env.update(TEST_ENV)
|
||||||
rally("deployment create --name t_create_env --fromenv")
|
rally("deployment create --name t_create_env --fromenv")
|
||||||
|
existing_conf = rally("deployment config", getjson=True)
|
||||||
with open("/tmp/.tmp.deployment", "w") as f:
|
with open("/tmp/.tmp.deployment", "w") as f:
|
||||||
f.write(rally("deployment config"))
|
f.write(json.dumps(existing_conf))
|
||||||
rally("deployment create --name t_create_file "
|
rally("deployment create --name t_create_file "
|
||||||
"--filename /tmp/.tmp.deployment")
|
"--filename /tmp/.tmp.deployment")
|
||||||
self.assertIn("t_create_file", rally("deployment list"))
|
self.assertIn("t_create_file", rally("deployment list"))
|
||||||
|
@ -12,7 +12,6 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from tests.functional import utils
|
from tests.functional import utils
|
||||||
@ -22,7 +21,7 @@ class TaskTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_specify_version_by_deployment(self):
|
def test_specify_version_by_deployment(self):
|
||||||
rally = utils.Rally()
|
rally = utils.Rally()
|
||||||
deployment = json.loads(rally("deployment config"))
|
deployment = rally("deployment config", getjson=True)
|
||||||
deployment["openstack"]["api_info"] = {
|
deployment["openstack"]["api_info"] = {
|
||||||
"fakedummy": {
|
"fakedummy": {
|
||||||
"version": "2",
|
"version": "2",
|
||||||
@ -52,7 +51,7 @@ class TaskTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_specify_version_by_deployment_with_existing_users(self):
|
def test_specify_version_by_deployment_with_existing_users(self):
|
||||||
rally = utils.Rally()
|
rally = utils.Rally()
|
||||||
deployment = json.loads(rally("deployment config"))
|
deployment = rally("deployment config", getjson=True)
|
||||||
deployment["openstack"]["users"] = [deployment["openstack"]["admin"]]
|
deployment["openstack"]["users"] = [deployment["openstack"]["admin"]]
|
||||||
deployment["openstack"]["api_info"] = {
|
deployment["openstack"]["api_info"] = {
|
||||||
"fakedummy": {
|
"fakedummy": {
|
||||||
|
@ -201,9 +201,10 @@ class Rally(object):
|
|||||||
try:
|
try:
|
||||||
if no_logs or getjson:
|
if no_logs or getjson:
|
||||||
cmd = self.args + ["--log-file", "/dev/null"] + cmd
|
cmd = self.args + ["--log-file", "/dev/null"] + cmd
|
||||||
with open(os.devnull, "w") as DEVNULL:
|
output = encodeutils.safe_decode(
|
||||||
output = encodeutils.safe_decode(subprocess.check_output(
|
subprocess.check_output(
|
||||||
cmd, stderr=DEVNULL, env=self.env))
|
cmd, stderr=subprocess.DEVNULL, env=self.env)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
cmd = self.args + cmd
|
cmd = self.args + cmd
|
||||||
output = encodeutils.safe_decode(subprocess.check_output(
|
output = encodeutils.safe_decode(subprocess.check_output(
|
||||||
|
@ -1,39 +0,0 @@
|
|||||||
Rally Style Commandments
|
|
||||||
========================
|
|
||||||
|
|
||||||
- Step 1: Read the OpenStack Style Commandments
|
|
||||||
https://docs.openstack.org/hacking/latest/
|
|
||||||
- Step 2: Read on
|
|
||||||
|
|
||||||
Rally Specific Commandments
|
|
||||||
---------------------------
|
|
||||||
* [N30x] - Reserved for rules related to ``mock`` library
|
|
||||||
* [N301] - Ensure that ``assert_*`` methods from ``mock`` library is used correctly
|
|
||||||
* [N302] - Ensure that nonexistent "assert_called" is not used
|
|
||||||
* [N303] - Ensure that nonexistent "assert_called_once" is not used
|
|
||||||
* [N310-N314] - Reserved for rules related to logging
|
|
||||||
* [N310] - Ensure that ``rally.common.log`` is used as logging module
|
|
||||||
* [N311] - Validate that debug level logs are not translated
|
|
||||||
* [N312] - Validate correctness of debug on check.
|
|
||||||
* [N313] - Validate that LOG.warning is used instead of deprecated LOG.warn.
|
|
||||||
* [N32x] - Reserved for rules related to assert* methods
|
|
||||||
* [N320] - Ensure that ``assertTrue(isinstance(A, B))`` is not used
|
|
||||||
* [N321] - Ensure that ``assertEqual(type(A), B)`` is not used
|
|
||||||
* [N322] - Ensure that ``assertEqual(A, None)`` and ``assertEqual(None, A)`` are not used
|
|
||||||
* [N323] - Ensure that ``assertTrue/assertFalse(A in/not in B)`` are not used with collection contents
|
|
||||||
* [N324] - Ensure that ``assertEqual(A in/not in B, True/False)`` and ``assertEqual(True/False, A in/not in B)`` are not used with collection contents
|
|
||||||
* [N325] - Ensure that ``assertNotEqual(A, None)`` and ``assertNotEqual(None, A)`` are not used
|
|
||||||
* [N326] - Ensure that ``assertEqual(A, True/False)`` and ``assertEqual(True/False, A)`` are not used
|
|
||||||
* [N340] - Ensure that we are importing always ``from rally import objects``
|
|
||||||
* [N341] - Ensure that we are importing oslo_xyz packages instead of deprecated oslo.xyz ones
|
|
||||||
* [N342] - Ensure that we load opts from correct paths only
|
|
||||||
* [N350] - Ensure that single quotes are not used
|
|
||||||
* [N351] - Ensure that data structs (i.e Lists and Dicts) are declared literally rather than using constructors
|
|
||||||
* [N352] - Ensure that string formatting only uses a mapping if multiple mapping keys are used.
|
|
||||||
* [N353] - Ensure that unicode() function is not uset because of absence in py3
|
|
||||||
* [N354] - Ensure that ``:raises: Exception`` is not used
|
|
||||||
* [N355] - Ensure that we use only "new-style" Python classes
|
|
||||||
* [N356] - Ensure using ``dt`` as alias for ``datetime``
|
|
||||||
* [N360-N370] - Reserved for rules related to CLI
|
|
||||||
* [N360] - Ensure that CLI modules do not use ``rally.common.db``
|
|
||||||
* [N361] - Ensure that CLI modules do not use ``rally.common.objects``
|
|
@ -24,13 +24,11 @@ Guidelines for writing new hacking checks
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import functools
|
|
||||||
import re
|
import re
|
||||||
import tokenize
|
import tokenize
|
||||||
|
|
||||||
from hacking import core
|
from hacking import core
|
||||||
|
|
||||||
|
|
||||||
re_assert_equal_end_with_true_or_false = re.compile(
|
re_assert_equal_end_with_true_or_false = re.compile(
|
||||||
r"assertEqual\(.*?, \s+(True|False)\)$")
|
r"assertEqual\(.*?, \s+(True|False)\)$")
|
||||||
re_assert_equal_start_with_true_or_false = re.compile(
|
re_assert_equal_start_with_true_or_false = re.compile(
|
||||||
@ -78,22 +76,6 @@ re_datetime_alias = re.compile(r"^(from|import) datetime(?!\s+as\s+dt$)")
|
|||||||
re_log_warn = re.compile(r"(.)*LOG\.(warn)\(\s*('|\"|_)")
|
re_log_warn = re.compile(r"(.)*LOG\.(warn)\(\s*('|\"|_)")
|
||||||
|
|
||||||
|
|
||||||
def skip_ignored_lines(func):
|
|
||||||
|
|
||||||
@functools.wraps(func)
|
|
||||||
def wrapper(physical_line, logical_line, filename):
|
|
||||||
line = physical_line.strip()
|
|
||||||
if not line or line.startswith("#") or line.endswith("# noqa"):
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
for res in func(physical_line, logical_line, filename):
|
|
||||||
yield res
|
|
||||||
except StopIteration:
|
|
||||||
return
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_assert_mock_str(line):
|
def _parse_assert_mock_str(line):
|
||||||
point = line.find(".assert_")
|
point = line.find(".assert_")
|
||||||
|
|
||||||
@ -108,8 +90,7 @@ def _parse_assert_mock_str(line):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_assert_methods_from_mock(logical_line, filename, noqa=False):
|
||||||
def check_assert_methods_from_mock(physical_line, logical_line, filename):
|
|
||||||
"""Ensure that ``assert_*`` methods from ``mock`` library is used correctly
|
"""Ensure that ``assert_*`` methods from ``mock`` library is used correctly
|
||||||
|
|
||||||
N301 - base error number
|
N301 - base error number
|
||||||
@ -117,6 +98,8 @@ def check_assert_methods_from_mock(physical_line, logical_line, filename):
|
|||||||
N303 - related to nonexistent "assert_called_once"
|
N303 - related to nonexistent "assert_called_once"
|
||||||
N304 - related to nonexistent "called_once_with"
|
N304 - related to nonexistent "called_once_with"
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
correct_names = ["assert_any_call", "assert_called_once_with",
|
correct_names = ["assert_any_call", "assert_called_once_with",
|
||||||
"assert_called_with", "assert_has_calls",
|
"assert_called_with", "assert_has_calls",
|
||||||
@ -162,15 +145,16 @@ def check_assert_methods_from_mock(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_import_of_logging(logical_line, filename, noqa=False):
|
||||||
def check_import_of_logging(physical_line, logical_line, filename):
|
|
||||||
"""Check correctness import of logging module
|
"""Check correctness import of logging module
|
||||||
|
|
||||||
N310
|
N310
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
excluded_files = ["./rally/common/logging.py",
|
excluded_files = ["./rally/common/logging.py",
|
||||||
"./tests/unit/test_logging.py",
|
"./tests/unit/common/test_logging.py",
|
||||||
"./tests/ci/rally_verify.py",
|
"./tests/ci/rally_verify.py",
|
||||||
"./tests/ci/sync_requirements.py"]
|
"./tests/ci/sync_requirements.py"]
|
||||||
|
|
||||||
@ -186,12 +170,13 @@ def check_import_of_logging(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_import_of_config(logical_line, filename, noqa=False):
|
||||||
def check_import_of_config(physical_line, logical_line, filename):
|
|
||||||
"""Check correctness import of config module
|
"""Check correctness import of config module
|
||||||
|
|
||||||
N311
|
N311
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
excluded_files = ["./rally/common/cfg.py"]
|
excluded_files = ["./rally/common/cfg.py"]
|
||||||
|
|
||||||
@ -206,8 +191,7 @@ def check_import_of_config(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def no_use_conf_debug_check(logical_line, filename, noqa=False):
|
||||||
def no_use_conf_debug_check(physical_line, logical_line, filename):
|
|
||||||
"""Check for "cfg.CONF.debug"
|
"""Check for "cfg.CONF.debug"
|
||||||
|
|
||||||
Rally has two DEBUG level:
|
Rally has two DEBUG level:
|
||||||
@ -217,6 +201,8 @@ def no_use_conf_debug_check(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
N312
|
N312
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
excluded_files = ["./rally/common/logging.py"]
|
excluded_files = ["./rally/common/logging.py"]
|
||||||
|
|
||||||
point = logical_line.find("CONF.debug")
|
point = logical_line.find("CONF.debug")
|
||||||
@ -227,36 +213,39 @@ def no_use_conf_debug_check(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_true_instance(logical_line, noqa=False):
|
||||||
def assert_true_instance(physical_line, logical_line, filename):
|
|
||||||
"""Check for assertTrue(isinstance(a, b)) sentences
|
"""Check for assertTrue(isinstance(a, b)) sentences
|
||||||
|
|
||||||
N320
|
N320
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
if re_assert_true_instance.match(logical_line):
|
if re_assert_true_instance.match(logical_line):
|
||||||
yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, "
|
yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, "
|
||||||
"you should use assertIsInstance(a, b) instead.")
|
"you should use assertIsInstance(a, b) instead.")
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_equal_type(logical_line, noqa=False):
|
||||||
def assert_equal_type(physical_line, logical_line, filename):
|
|
||||||
"""Check for assertEqual(type(A), B) sentences
|
"""Check for assertEqual(type(A), B) sentences
|
||||||
|
|
||||||
N321
|
N321
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
if re_assert_equal_type.match(logical_line):
|
if re_assert_equal_type.match(logical_line):
|
||||||
yield (0, "N321 assertEqual(type(A), B) sentences not allowed, "
|
yield (0, "N321 assertEqual(type(A), B) sentences not allowed, "
|
||||||
"you should use assertIsInstance(a, b) instead.")
|
"you should use assertIsInstance(a, b) instead.")
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_equal_none(logical_line, noqa=False):
|
||||||
def assert_equal_none(physical_line, logical_line, filename):
|
|
||||||
"""Check for assertEqual(A, None) or assertEqual(None, A) sentences
|
"""Check for assertEqual(A, None) or assertEqual(None, A) sentences
|
||||||
|
|
||||||
N322
|
N322
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
res = (re_assert_equal_start_with_none.search(logical_line)
|
res = (re_assert_equal_start_with_none.search(logical_line)
|
||||||
or re_assert_equal_end_with_none.search(logical_line))
|
or re_assert_equal_end_with_none.search(logical_line))
|
||||||
if res:
|
if res:
|
||||||
@ -266,8 +255,7 @@ def assert_equal_none(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_true_or_false_with_in(logical_line, noqa=False):
|
||||||
def assert_true_or_false_with_in(physical_line, logical_line, filename):
|
|
||||||
"""Check assertTrue/False(A in/not in B) with collection contents
|
"""Check assertTrue/False(A in/not in B) with collection contents
|
||||||
|
|
||||||
Check for assertTrue/False(A in B), assertTrue/False(A not in B),
|
Check for assertTrue/False(A in B), assertTrue/False(A not in B),
|
||||||
@ -276,6 +264,8 @@ def assert_true_or_false_with_in(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
N323
|
N323
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
res = (re_assert_true_false_with_in_or_not_in.search(logical_line)
|
res = (re_assert_true_false_with_in_or_not_in.search(logical_line)
|
||||||
or re_assert_true_false_with_in_or_not_in_spaces.search(
|
or re_assert_true_false_with_in_or_not_in_spaces.search(
|
||||||
logical_line))
|
logical_line))
|
||||||
@ -286,8 +276,7 @@ def assert_true_or_false_with_in(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_equal_in(logical_line, noqa=False):
|
||||||
def assert_equal_in(physical_line, logical_line, filename):
|
|
||||||
"""Check assertEqual(A in/not in B, True/False) with collection contents
|
"""Check assertEqual(A in/not in B, True/False) with collection contents
|
||||||
|
|
||||||
Check for assertEqual(A in B, True/False), assertEqual(True/False, A in B),
|
Check for assertEqual(A in B, True/False), assertEqual(True/False, A in B),
|
||||||
@ -296,6 +285,8 @@ def assert_equal_in(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
N324
|
N324
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
res = (re_assert_equal_in_end_with_true_or_false.search(logical_line)
|
res = (re_assert_equal_in_end_with_true_or_false.search(logical_line)
|
||||||
or re_assert_equal_in_start_with_true_or_false.search(logical_line))
|
or re_assert_equal_in_start_with_true_or_false.search(logical_line))
|
||||||
if res:
|
if res:
|
||||||
@ -305,12 +296,13 @@ def assert_equal_in(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_not_equal_none(logical_line, noqa=False):
|
||||||
def assert_not_equal_none(physical_line, logical_line, filename):
|
|
||||||
"""Check for assertNotEqual(A, None) or assertEqual(None, A) sentences
|
"""Check for assertNotEqual(A, None) or assertEqual(None, A) sentences
|
||||||
|
|
||||||
N325
|
N325
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
res = (re_assert_not_equal_start_with_none.search(logical_line)
|
res = (re_assert_not_equal_start_with_none.search(logical_line)
|
||||||
or re_assert_not_equal_end_with_none.search(logical_line))
|
or re_assert_not_equal_end_with_none.search(logical_line))
|
||||||
if res:
|
if res:
|
||||||
@ -320,8 +312,7 @@ def assert_not_equal_none(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def assert_equal_true_or_false(logical_line, noqa=False):
|
||||||
def assert_equal_true_or_false(physical_line, logical_line, filename):
|
|
||||||
"""Check for assertEqual(A, True/False) sentences
|
"""Check for assertEqual(A, True/False) sentences
|
||||||
|
|
||||||
Check for assertEqual(A, True/False) sentences or
|
Check for assertEqual(A, True/False) sentences or
|
||||||
@ -329,6 +320,8 @@ def assert_equal_true_or_false(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
N326
|
N326
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
res = (re_assert_equal_end_with_true_or_false.search(logical_line)
|
res = (re_assert_equal_end_with_true_or_false.search(logical_line)
|
||||||
or re_assert_equal_start_with_true_or_false.search(logical_line))
|
or re_assert_equal_start_with_true_or_false.search(logical_line))
|
||||||
if res:
|
if res:
|
||||||
@ -338,9 +331,7 @@ def assert_equal_true_or_false(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_no_direct_rally_objects_import(logical_line, filename, noqa=False):
|
||||||
def check_no_direct_rally_objects_import(physical_line, logical_line,
|
|
||||||
filename):
|
|
||||||
"""Check if rally.common.objects are properly imported.
|
"""Check if rally.common.objects are properly imported.
|
||||||
|
|
||||||
If you import "from rally.common import objects" you are able to use
|
If you import "from rally.common import objects" you are able to use
|
||||||
@ -348,10 +339,9 @@ def check_no_direct_rally_objects_import(physical_line, logical_line,
|
|||||||
|
|
||||||
N340
|
N340
|
||||||
"""
|
"""
|
||||||
if filename == "./rally/common/objects/__init__.py":
|
if noqa:
|
||||||
return
|
return
|
||||||
|
if filename == "./rally/common/objects/__init__.py":
|
||||||
if filename == "./rally/common/objects/endpoint.py":
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if (logical_line.startswith("from rally.common.objects")
|
if (logical_line.startswith("from rally.common.objects")
|
||||||
@ -362,8 +352,7 @@ def check_no_direct_rally_objects_import(physical_line, logical_line,
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_no_oslo_deprecated_import(logical_line, noqa=False):
|
||||||
def check_no_oslo_deprecated_import(physical_line, logical_line, filename):
|
|
||||||
"""Check if oslo.foo packages are not imported instead of oslo_foo ones.
|
"""Check if oslo.foo packages are not imported instead of oslo_foo ones.
|
||||||
|
|
||||||
Libraries from oslo.foo namespace are deprecated because of namespace
|
Libraries from oslo.foo namespace are deprecated because of namespace
|
||||||
@ -371,6 +360,8 @@ def check_no_oslo_deprecated_import(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
N341
|
N341
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
if (logical_line.startswith("from oslo.")
|
if (logical_line.startswith("from oslo.")
|
||||||
or logical_line.startswith("import oslo.")):
|
or logical_line.startswith("import oslo.")):
|
||||||
yield (0, "N341: Import oslo module: `from oslo_xyz import ...`. "
|
yield (0, "N341: Import oslo module: `from oslo_xyz import ...`. "
|
||||||
@ -379,12 +370,13 @@ def check_no_oslo_deprecated_import(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_quotes(logical_line, noqa=False):
|
||||||
def check_quotes(physical_line, logical_line, filename):
|
|
||||||
"""Check that single quotation marks are not used
|
"""Check that single quotation marks are not used
|
||||||
|
|
||||||
N350
|
N350
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
in_string = False
|
in_string = False
|
||||||
in_multiline_string = False
|
in_multiline_string = False
|
||||||
@ -433,12 +425,13 @@ def check_quotes(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_no_constructor_data_struct(logical_line, noqa=False):
|
||||||
def check_no_constructor_data_struct(physical_line, logical_line, filename):
|
|
||||||
"""Check that data structs (lists, dicts) are declared using literals
|
"""Check that data structs (lists, dicts) are declared using literals
|
||||||
|
|
||||||
N351
|
N351
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
match = re_no_construct_dict.search(logical_line)
|
match = re_no_construct_dict.search(logical_line)
|
||||||
if match:
|
if match:
|
||||||
@ -449,17 +442,12 @@ def check_no_constructor_data_struct(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
def check_dict_formatting_in_string(logical_line, tokens):
|
def check_dict_formatting_in_string(logical_line, tokens, noqa=False):
|
||||||
"""Check that strings do not use dict-formatting with a single replacement
|
"""Check that strings do not use dict-formatting with a single replacement
|
||||||
|
|
||||||
N352
|
N352
|
||||||
"""
|
"""
|
||||||
# NOTE(stpierre): Can't use @skip_ignored_lines here because it's
|
if noqa:
|
||||||
# a stupid decorator that only works on functions that take
|
|
||||||
# (logical_line, filename) as arguments.
|
|
||||||
if (not logical_line
|
|
||||||
or logical_line.startswith("#")
|
|
||||||
or logical_line.endswith("# noqa")):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
current_string = ""
|
current_string = ""
|
||||||
@ -500,7 +488,7 @@ def check_dict_formatting_in_string(logical_line, tokens):
|
|||||||
format_keys.add(match.group(1))
|
format_keys.add(match.group(1))
|
||||||
if len(format_keys) == 1:
|
if len(format_keys) == 1:
|
||||||
yield (0,
|
yield (0,
|
||||||
"N353 Do not use mapping key string formatting "
|
"N352 Do not use mapping key string formatting "
|
||||||
"with a single key")
|
"with a single key")
|
||||||
if text != ")":
|
if text != ")":
|
||||||
# NOTE(stpierre): You can have a parenthesized string
|
# NOTE(stpierre): You can have a parenthesized string
|
||||||
@ -517,40 +505,30 @@ def check_dict_formatting_in_string(logical_line, tokens):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_raises(logical_line, filename, noqa=False):
|
||||||
def check_using_unicode(physical_line, logical_line, filename):
|
|
||||||
"""Check crosspython unicode usage
|
|
||||||
|
|
||||||
N353
|
|
||||||
"""
|
|
||||||
|
|
||||||
if re.search(r"\bunicode\(", logical_line):
|
|
||||||
yield (0, "N353 'unicode' function is absent in python3. Please "
|
|
||||||
"use 'str' instead.")
|
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
|
||||||
def check_raises(physical_line, logical_line, filename):
|
|
||||||
"""Check raises usage
|
"""Check raises usage
|
||||||
|
|
||||||
N354
|
N354
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
ignored_files = ["./tests/unit/test_hacking.py",
|
ignored_files = ["./tests/unit/test_hacking.py",
|
||||||
"./tests/hacking/checks.py"]
|
"./tests/hacking/checks.py"]
|
||||||
if filename not in ignored_files:
|
if filename not in ignored_files:
|
||||||
if re_raises.search(physical_line):
|
if re_raises.search(logical_line):
|
||||||
yield (0, "N354 ':Please use ':raises Exception: conditions' "
|
yield (0, "N354 ':Please use ':raises Exception: conditions' "
|
||||||
"in docstrings.")
|
"in docstrings.")
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_old_type_class(logical_line, noqa=False):
|
||||||
def check_old_type_class(physical_line, logical_line, filename):
|
|
||||||
"""Use new-style Python classes
|
"""Use new-style Python classes
|
||||||
|
|
||||||
N355
|
N355
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
|
|
||||||
if re_old_type_class.search(logical_line):
|
if re_old_type_class.search(logical_line):
|
||||||
yield (0, "N355 This class does not inherit from anything and thus "
|
yield (0, "N355 This class does not inherit from anything and thus "
|
||||||
@ -559,23 +537,25 @@ def check_old_type_class(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_datetime_alias(logical_line, noqa=False):
|
||||||
def check_datetime_alias(physical_line, logical_line, filename):
|
|
||||||
"""Ensure using ``dt`` as alias for ``datetime``
|
"""Ensure using ``dt`` as alias for ``datetime``
|
||||||
|
|
||||||
N356
|
N356
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
if re_datetime_alias.search(logical_line):
|
if re_datetime_alias.search(logical_line):
|
||||||
yield 0, "N356 Please use ``dt`` as alias for ``datetime``."
|
yield 0, "N356 Please use ``dt`` as alias for ``datetime``."
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_db_imports_in_cli(logical_line, filename, noqa=False):
|
||||||
def check_db_imports_in_cli(physical_line, logical_line, filename):
|
|
||||||
"""Ensure that CLI modules do not use ``rally.common.db``
|
"""Ensure that CLI modules do not use ``rally.common.db``
|
||||||
|
|
||||||
N360
|
N360
|
||||||
"""
|
"""
|
||||||
|
if noqa:
|
||||||
|
return
|
||||||
if (not filename.startswith("./rally/cli")
|
if (not filename.startswith("./rally/cli")
|
||||||
or filename == "./rally/cli/commands/db.py"):
|
or filename == "./rally/cli/commands/db.py"):
|
||||||
return
|
return
|
||||||
@ -585,8 +565,7 @@ def check_db_imports_in_cli(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_objects_imports_in_cli(logical_line, filename):
|
||||||
def check_objects_imports_in_cli(physical_line, logical_line, filename):
|
|
||||||
"""Ensure that CLI modules do not use ``rally.common.objects``
|
"""Ensure that CLI modules do not use ``rally.common.objects``
|
||||||
|
|
||||||
N361
|
N361
|
||||||
@ -599,28 +578,22 @@ def check_objects_imports_in_cli(physical_line, logical_line, filename):
|
|||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_log_warn(logical_line):
|
||||||
def check_log_warn(physical_line, logical_line, filename):
|
|
||||||
if re_log_warn.search(logical_line):
|
if re_log_warn.search(logical_line):
|
||||||
yield 0, "N313 LOG.warn is deprecated, please use LOG.warning"
|
yield 0, "N313 LOG.warn is deprecated, please use LOG.warning"
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
@skip_ignored_lines
|
def check_opts_import_path(logical_line, noqa=False):
|
||||||
def check_opts_import_path(physical_line, logical_line, filename):
|
|
||||||
"""Ensure that we load opts from correct paths only
|
"""Ensure that we load opts from correct paths only
|
||||||
|
|
||||||
N342
|
N342
|
||||||
"""
|
"""
|
||||||
excluded_files = ["./rally/task/engine.py",
|
if noqa:
|
||||||
"./rally/task/context.py",
|
return
|
||||||
"./rally/task/scenario.py",
|
forbidden_methods = [".register_opts("]
|
||||||
"./rally/common/opts.py"]
|
|
||||||
forbidden_methods = [r".register_opts("]
|
|
||||||
|
|
||||||
if filename not in excluded_files:
|
for forbidden_method in forbidden_methods:
|
||||||
for forbidden_method in forbidden_methods:
|
if logical_line.find(forbidden_method) != -1:
|
||||||
if logical_line.find(forbidden_method) != -1:
|
yield (0, "N342 All options should be loaded from correct "
|
||||||
yield (0, "N342 All options should be loaded from correct "
|
"paths only: rally_openstack/common/cfg")
|
||||||
"paths only. For 'openstack' "
|
|
||||||
"its './rally/plugin/openstack/cfg'")
|
|
||||||
|
@ -60,7 +60,7 @@ def generate_mac():
|
|||||||
|
|
||||||
|
|
||||||
def setup_dict(data, required=None, defaults=None):
|
def setup_dict(data, required=None, defaults=None):
|
||||||
"""Setup and validate dict scenario_base. on mandatory keys and default data.
|
"""Setup and validate dict scenario_base on mandatory keys and default data
|
||||||
|
|
||||||
This function reduces code that constructs dict objects
|
This function reduces code that constructs dict objects
|
||||||
with specific schema (e.g. for API data).
|
with specific schema (e.g. for API data).
|
||||||
|
@ -46,6 +46,17 @@ class RallyJobsTestCase(test.TestCase):
|
|||||||
return job_name, job_cfg
|
return job_name, job_cfg
|
||||||
return job, None
|
return job, None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _tox_job_sorter(job_name):
|
||||||
|
python_maj_version = 0
|
||||||
|
python_min_version = 0
|
||||||
|
_rally, _tox, job_name = job_name.split("-", 3)
|
||||||
|
if job_name.startswith("py"):
|
||||||
|
python_maj_version = int(job_name[2])
|
||||||
|
python_min_version = int(job_name[3:])
|
||||||
|
job_name = "py"
|
||||||
|
return job_name, python_maj_version, python_min_version
|
||||||
|
|
||||||
def _check_order_of_jobs(self, pipeline):
|
def _check_order_of_jobs(self, pipeline):
|
||||||
jobs = self.project_cfg[pipeline]["jobs"]
|
jobs = self.project_cfg[pipeline]["jobs"]
|
||||||
|
|
||||||
@ -64,8 +75,10 @@ class RallyJobsTestCase(test.TestCase):
|
|||||||
|
|
||||||
jobs_names = [self._parse_job(job)[0] for job in jobs]
|
jobs_names = [self._parse_job(job)[0] for job in jobs]
|
||||||
|
|
||||||
tox_jobs = sorted(job for job in jobs_names
|
tox_jobs = sorted(
|
||||||
if job.startswith("rally-tox"))
|
(job for job in jobs_names if job.startswith("rally-tox-")),
|
||||||
|
key=self._tox_job_sorter
|
||||||
|
)
|
||||||
for i, job in enumerate(tox_jobs):
|
for i, job in enumerate(tox_jobs):
|
||||||
if job != jobs[i]:
|
if job != jobs[i]:
|
||||||
self.fail(error_message % (job, jobs[i]))
|
self.fail(error_message % (job, jobs[i]))
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import inspect
|
||||||
import io
|
import io
|
||||||
import tokenize
|
import tokenize
|
||||||
|
|
||||||
@ -23,12 +24,23 @@ from tests.unit import test
|
|||||||
class HackingTestCase(test.TestCase):
|
class HackingTestCase(test.TestCase):
|
||||||
|
|
||||||
def _assert_good_samples(self, checker, samples, module_file="f"):
|
def _assert_good_samples(self, checker, samples, module_file="f"):
|
||||||
|
spec = inspect.getfullargspec(checker)
|
||||||
|
base_args = {}
|
||||||
|
if "filename" in spec.args:
|
||||||
|
base_args["filename"] = module_file
|
||||||
for s in samples:
|
for s in samples:
|
||||||
self.assertEqual([], list(checker(s, s, module_file)), s)
|
args = {"logical_line": s, **base_args}
|
||||||
|
self.assertEqual([], list(checker(*args)), s)
|
||||||
|
|
||||||
def _assert_bad_samples(self, checker, samples, module_file="f"):
|
def _assert_bad_samples(self, checker, samples, module_file="f"):
|
||||||
|
spec = inspect.getfullargspec(checker)
|
||||||
|
base_args = {}
|
||||||
|
if "filename" in spec.args:
|
||||||
|
base_args["filename"] = module_file
|
||||||
|
|
||||||
for s in samples:
|
for s in samples:
|
||||||
self.assertEqual(1, len(list(checker(s, s, module_file))), s)
|
args = {"logical_line": s, **base_args}
|
||||||
|
self.assertEqual(1, len(list(checker(**args))), s)
|
||||||
|
|
||||||
def test__parse_assert_mock_str(self):
|
def test__parse_assert_mock_str(self):
|
||||||
pos, method, obj = checks._parse_assert_mock_str(
|
pos, method, obj = checks._parse_assert_mock_str(
|
||||||
@ -43,21 +55,6 @@ class HackingTestCase(test.TestCase):
|
|||||||
self.assertIsNone(method)
|
self.assertIsNone(method)
|
||||||
self.assertIsNone(obj)
|
self.assertIsNone(obj)
|
||||||
|
|
||||||
@ddt.data(
|
|
||||||
{"line": "fdafadfdas # noqa", "result": []},
|
|
||||||
{"line": " # fdafadfdas", "result": []},
|
|
||||||
{"line": " ", "result": []},
|
|
||||||
{"line": "otherstuff", "result": [42]}
|
|
||||||
)
|
|
||||||
@ddt.unpack
|
|
||||||
def test_skip_ignored_lines(self, line, result):
|
|
||||||
|
|
||||||
@checks.skip_ignored_lines
|
|
||||||
def any_gen(physical_line, logical_line, file_name):
|
|
||||||
yield 42
|
|
||||||
|
|
||||||
self.assertEqual(result, list(any_gen(line, line, "f")))
|
|
||||||
|
|
||||||
def test_correct_usage_of_assert_from_mock(self):
|
def test_correct_usage_of_assert_from_mock(self):
|
||||||
correct_method_names = ["assert_any_call", "assert_called_once_with",
|
correct_method_names = ["assert_any_call", "assert_called_once_with",
|
||||||
"assert_called_with", "assert_has_calls"]
|
"assert_called_with", "assert_has_calls"]
|
||||||
@ -71,7 +68,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
fake_method = "rtfm.assert_something()"
|
fake_method = "rtfm.assert_something()"
|
||||||
|
|
||||||
actual_number, actual_msg = next(checks.check_assert_methods_from_mock(
|
actual_number, actual_msg = next(checks.check_assert_methods_from_mock(
|
||||||
fake_method, fake_method, "./tests/fake/test"))
|
fake_method, "./tests/fake/test"))
|
||||||
self.assertEqual(4, actual_number)
|
self.assertEqual(4, actual_number)
|
||||||
self.assertTrue(actual_msg.startswith("N301"))
|
self.assertTrue(actual_msg.startswith("N301"))
|
||||||
|
|
||||||
@ -79,7 +76,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
fake_method = "rtfm.assert_called()"
|
fake_method = "rtfm.assert_called()"
|
||||||
|
|
||||||
actual_number, actual_msg = next(checks.check_assert_methods_from_mock(
|
actual_number, actual_msg = next(checks.check_assert_methods_from_mock(
|
||||||
fake_method, fake_method, "./tests/fake/test"))
|
fake_method, "./tests/fake/test", False))
|
||||||
self.assertEqual(4, actual_number)
|
self.assertEqual(4, actual_number)
|
||||||
self.assertTrue(actual_msg.startswith("N302"))
|
self.assertTrue(actual_msg.startswith("N302"))
|
||||||
|
|
||||||
@ -87,7 +84,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
fake_method = "rtfm.assert_called_once()"
|
fake_method = "rtfm.assert_called_once()"
|
||||||
|
|
||||||
actual_number, actual_msg = next(checks.check_assert_methods_from_mock(
|
actual_number, actual_msg = next(checks.check_assert_methods_from_mock(
|
||||||
fake_method, fake_method, "./tests/fake/test"))
|
fake_method, "./tests/fake/test", False))
|
||||||
self.assertEqual(4, actual_number)
|
self.assertEqual(4, actual_number)
|
||||||
self.assertTrue(actual_msg.startswith("N303"))
|
self.assertTrue(actual_msg.startswith("N303"))
|
||||||
|
|
||||||
@ -100,16 +97,16 @@ class HackingTestCase(test.TestCase):
|
|||||||
"import rally.common.logging"]
|
"import rally.common.logging"]
|
||||||
|
|
||||||
for bad in bad_imports:
|
for bad in bad_imports:
|
||||||
checkres = checks.check_import_of_logging(bad, bad, "fakefile")
|
checkres = checks.check_import_of_logging(bad, "fakefile")
|
||||||
self.assertIsNotNone(next(checkres))
|
self.assertIsNotNone(next(checkres))
|
||||||
|
|
||||||
for bad in bad_imports:
|
for bad in bad_imports:
|
||||||
checkres = checks.check_import_of_logging(
|
checkres = checks.check_import_of_logging(
|
||||||
bad, bad, "./rally/common/logging.py")
|
bad, "./rally/common/logging.py")
|
||||||
self.assertEqual([], list(checkres))
|
self.assertEqual([], list(checkres))
|
||||||
|
|
||||||
for good in good_imports:
|
for good in good_imports:
|
||||||
checkres = checks.check_import_of_logging(good, good, "fakefile")
|
checkres = checks.check_import_of_logging(good, "fakefile")
|
||||||
self.assertEqual([], list(checkres))
|
self.assertEqual([], list(checkres))
|
||||||
|
|
||||||
def test_no_use_conf_debug_check(self):
|
def test_no_use_conf_debug_check(self):
|
||||||
@ -134,8 +131,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
)
|
)
|
||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_assert_true_instance(self, line, result):
|
def test_assert_true_instance(self, line, result):
|
||||||
self.assertEqual(
|
self.assertEqual(result, len(list(checks.assert_true_instance(line))))
|
||||||
result, len(list(checks.assert_true_instance(line, line, "f"))))
|
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{
|
{
|
||||||
@ -149,8 +145,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
)
|
)
|
||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_assert_equal_type(self, line, result):
|
def test_assert_equal_type(self, line, result):
|
||||||
self.assertEqual(result,
|
self.assertEqual(result, len(list(checks.assert_equal_type(line))))
|
||||||
len(list(checks.assert_equal_type(line, line, "f"))))
|
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{"line": "self.assertEqual(A, None)", "result": 1},
|
{"line": "self.assertEqual(A, None)", "result": 1},
|
||||||
@ -160,8 +155,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_assert_equal_none(self, line, result):
|
def test_assert_equal_none(self, line, result):
|
||||||
|
|
||||||
self.assertEqual(result,
|
self.assertEqual(result, len(list(checks.assert_equal_none(line))))
|
||||||
len(list(checks.assert_equal_none(line, line, "f"))))
|
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{"line": "self.assertNotEqual(A, None)", "result": 1},
|
{"line": "self.assertNotEqual(A, None)", "result": 1},
|
||||||
@ -171,9 +165,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_assert_not_equal_none(self, line, result):
|
def test_assert_not_equal_none(self, line, result):
|
||||||
|
|
||||||
self.assertEqual(result,
|
self.assertEqual(result, len(list(checks.assert_not_equal_none(line))))
|
||||||
len(list(checks.assert_not_equal_none(line,
|
|
||||||
line, "f"))))
|
|
||||||
|
|
||||||
def test_assert_true_or_false_with_in_or_not_in(self):
|
def test_assert_true_or_false_with_in_or_not_in(self):
|
||||||
good_lines = [
|
good_lines = [
|
||||||
@ -334,16 +326,6 @@ class HackingTestCase(test.TestCase):
|
|||||||
[],
|
[],
|
||||||
list(checks.check_dict_formatting_in_string(sample, tokens)))
|
list(checks.check_dict_formatting_in_string(sample, tokens)))
|
||||||
|
|
||||||
@ddt.data(
|
|
||||||
"text = unicode('sometext')",
|
|
||||||
"text = process(unicode('sometext'))"
|
|
||||||
)
|
|
||||||
def test_check_using_unicode(self, line):
|
|
||||||
|
|
||||||
checkres = checks.check_using_unicode(line, line, "fakefile")
|
|
||||||
self.assertIsNotNone(next(checkres))
|
|
||||||
self.assertEqual([], list(checkres))
|
|
||||||
|
|
||||||
def test_check_raises(self):
|
def test_check_raises(self):
|
||||||
self._assert_bad_samples(
|
self._assert_bad_samples(
|
||||||
checks.check_raises,
|
checks.check_raises,
|
||||||
@ -357,21 +339,17 @@ class HackingTestCase(test.TestCase):
|
|||||||
def test_check_db_imports_of_cli(self):
|
def test_check_db_imports_of_cli(self):
|
||||||
line = "from rally.common import db"
|
line = "from rally.common import db"
|
||||||
|
|
||||||
next(checks.check_db_imports_in_cli(
|
next(checks.check_db_imports_in_cli(line, "./rally/cli/filename"))
|
||||||
line, line, "./rally/cli/filename"))
|
|
||||||
|
|
||||||
checkres = checks.check_db_imports_in_cli(
|
checkres = checks.check_db_imports_in_cli(line, "./filename")
|
||||||
line, line, "./filename")
|
|
||||||
self.assertRaises(StopIteration, next, checkres)
|
self.assertRaises(StopIteration, next, checkres)
|
||||||
|
|
||||||
def test_check_objects_imports_of_cli(self):
|
def test_check_objects_imports_of_cli(self):
|
||||||
line = "from rally.common import objects"
|
line = "from rally.common import objects"
|
||||||
|
|
||||||
next(checks.check_objects_imports_in_cli(
|
next(checks.check_objects_imports_in_cli(line, "./rally/cli/filename"))
|
||||||
line, line, "./rally/cli/filename"))
|
|
||||||
|
|
||||||
checkres = checks.check_objects_imports_in_cli(
|
checkres = checks.check_objects_imports_in_cli(line, "./filename")
|
||||||
line, line, "./filename")
|
|
||||||
self.assertRaises(StopIteration, next, checkres)
|
self.assertRaises(StopIteration, next, checkres)
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
@ -379,7 +357,7 @@ class HackingTestCase(test.TestCase):
|
|||||||
"class Oldstyle:"
|
"class Oldstyle:"
|
||||||
)
|
)
|
||||||
def test_check_old_type_class(self, line):
|
def test_check_old_type_class(self, line):
|
||||||
checkres = checks.check_old_type_class(line, line, "fakefile")
|
checkres = checks.check_old_type_class(line)
|
||||||
self.assertIsNotNone(next(checkres))
|
self.assertIsNotNone(next(checkres))
|
||||||
self.assertEqual([], list(checkres))
|
self.assertEqual([], list(checkres))
|
||||||
|
|
||||||
@ -390,12 +368,12 @@ class HackingTestCase(test.TestCase):
|
|||||||
"from datetime import datetime as dtime"]
|
"from datetime import datetime as dtime"]
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
checkres = checks.check_datetime_alias(line, line, "fakefile")
|
checkres = checks.check_datetime_alias(line)
|
||||||
self.assertIsNotNone(next(checkres))
|
self.assertIsNotNone(next(checkres))
|
||||||
self.assertEqual([], list(checkres))
|
self.assertEqual([], list(checkres))
|
||||||
|
|
||||||
line = "import datetime as dt"
|
line = "import datetime as dt"
|
||||||
checkres = checks.check_datetime_alias(line, line, "fakefile")
|
checks.check_datetime_alias(line)
|
||||||
|
|
||||||
def test_check_log_warn(self):
|
def test_check_log_warn(self):
|
||||||
bad_samples = ["LOG.warn('foo')", "LOG.warn(_('bar'))"]
|
bad_samples = ["LOG.warn('foo')", "LOG.warn(_('bar'))"]
|
||||||
|
18
tox.ini
18
tox.ini
@ -6,7 +6,6 @@ envlist = py36,py37,py38,pep8
|
|||||||
[testenv]
|
[testenv]
|
||||||
extras = {env:RALLY_EXTRAS:}
|
extras = {env:RALLY_EXTRAS:}
|
||||||
setenv = VIRTUAL_ENV={envdir}
|
setenv = VIRTUAL_ENV={envdir}
|
||||||
HOME={homedir}
|
|
||||||
LANG=en_US.UTF-8
|
LANG=en_US.UTF-8
|
||||||
LANGUAGE=en_US:en
|
LANGUAGE=en_US:en
|
||||||
LC_ALL=C
|
LC_ALL=C
|
||||||
@ -22,7 +21,7 @@ deps =
|
|||||||
usedevelop = True
|
usedevelop = True
|
||||||
commands =
|
commands =
|
||||||
find . -type f -name "*.pyc" -delete
|
find . -type f -name "*.pyc" -delete
|
||||||
python {toxinidir}/tests/ci/pytest_launcher.py tests/unit --posargs={posargs}
|
python3 {toxinidir}/tests/ci/pytest_launcher.py tests/unit --posargs={posargs}
|
||||||
distribute = false
|
distribute = false
|
||||||
basepython = python3
|
basepython = python3
|
||||||
passenv =
|
passenv =
|
||||||
@ -33,6 +32,7 @@ passenv =
|
|||||||
no_proxy
|
no_proxy
|
||||||
NO_PROXY
|
NO_PROXY
|
||||||
REQUESTS_CA_BUNDLE
|
REQUESTS_CA_BUNDLE
|
||||||
|
HOME
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
commands = flake8
|
commands = flake8
|
||||||
@ -50,7 +50,6 @@ basepython = python3.8
|
|||||||
[testenv:py39]
|
[testenv:py39]
|
||||||
basepython = python3.9
|
basepython = python3.9
|
||||||
|
|
||||||
|
|
||||||
[testenv:venv]
|
[testenv:venv]
|
||||||
basepython = python3
|
basepython = python3
|
||||||
commands = {posargs}
|
commands = {posargs}
|
||||||
@ -64,10 +63,14 @@ sitepackages = True
|
|||||||
commands =
|
commands =
|
||||||
find . -type f -name "*.pyc" -delete
|
find . -type f -name "*.pyc" -delete
|
||||||
{toxinidir}/tests/ci/rally_functional_job.sh {posargs}
|
{toxinidir}/tests/ci/rally_functional_job.sh {posargs}
|
||||||
|
allowlist_externals = find
|
||||||
|
rm
|
||||||
|
make
|
||||||
|
{toxinidir}/tests/ci/rally_functional_job.sh
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
commands = {toxinidir}/tests/ci/cover.sh {posargs}
|
commands = {toxinidir}/tests/ci/cover.sh {posargs}
|
||||||
|
allowlist_externals = {toxinidir}/tests/ci/cover.sh
|
||||||
|
|
||||||
[testenv:genconfig]
|
[testenv:genconfig]
|
||||||
basepython = python3
|
basepython = python3
|
||||||
@ -109,7 +112,6 @@ extension =
|
|||||||
N350 = checks:check_quotes
|
N350 = checks:check_quotes
|
||||||
N351 = checks:check_no_constructor_data_struct
|
N351 = checks:check_no_constructor_data_struct
|
||||||
N352 = checks:check_dict_formatting_in_string
|
N352 = checks:check_dict_formatting_in_string
|
||||||
N353 = checks:check_using_unicode
|
|
||||||
N354 = checks:check_raises
|
N354 = checks:check_raises
|
||||||
N355 = checks:check_old_type_class
|
N355 = checks:check_old_type_class
|
||||||
N356 = checks:check_datetime_alias
|
N356 = checks:check_datetime_alias
|
||||||
@ -145,4 +147,8 @@ filterwarnings =
|
|||||||
ignore:.*EngineFacade is deprecated; please use oslo_db.sqlalchemy.enginefacade*:
|
ignore:.*EngineFacade is deprecated; please use oslo_db.sqlalchemy.enginefacade*:
|
||||||
ignore:.*unclosed file <_io.TextIOWrapper name='/tmp/rally.log'::
|
ignore:.*unclosed file <_io.TextIOWrapper name='/tmp/rally.log'::
|
||||||
# pytest-cov
|
# pytest-cov
|
||||||
ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning:
|
ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning:
|
||||||
|
ignore:::.*requests.*
|
||||||
|
# python 3.10
|
||||||
|
ignore:The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives:DeprecationWarning:
|
||||||
|
ignore:pkg_resources is deprecated as an API:DeprecationWarning:
|
||||||
|
Loading…
Reference in New Issue
Block a user