Bump hacking

hacking 3.0.x is too old. Bump it to the latest version available.

Also remove the note about old pip's behavior because recent pip does
not require specific order.

Change-Id: Ib84f9138bc2b908ce1a76c3dad501328326ffe99
This commit is contained in:
Takashi Kajinami 2024-11-17 00:35:50 +09:00
parent 20781e1ce5
commit 366540033f
10 changed files with 27 additions and 38 deletions

View File

@ -1,6 +1,3 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
oslo.config>=5.2.0 # Apache-2.0 oslo.config>=5.2.0 # Apache-2.0
openstackdocstheme>=2.2.1 # Apache-2.0 openstackdocstheme>=2.2.1 # Apache-2.0
sphinx>=2.0.0,!=2.1.0 # BSD sphinx>=2.0.0,!=2.1.0 # BSD

View File

@ -1,7 +1,3 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=3.1.1 # Apache-2.0 pbr>=3.1.1 # Apache-2.0
alembic>=0.9.8 # MIT alembic>=0.9.8 # MIT
cachetools>=2.0.1 # MIT License cachetools>=2.0.1 # MIT License

View File

@ -1,8 +1,4 @@
# The order of packages is significant, because pip processes them in the order hacking>=7.0.0,<7.1.0 # Apache-2.0
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
hacking>=3.0.1,<3.1.0 # Apache-2.0
coverage>=4.5.1 # Apache-2.0 coverage>=4.5.1 # Apache-2.0
python-subunit>=1.2.0 # Apache-2.0/BSD python-subunit>=1.2.0 # Apache-2.0/BSD
oslotest>=3.3.0 # Apache-2.0 oslotest>=3.3.0 # Apache-2.0
@ -12,4 +8,4 @@ testtools>=2.3.0 # MIT
stestr>=2.0.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0
reno>=3.1.0 # Apache-2.0 reno>=3.1.0 # Apache-2.0
zake>=0.1.6 # Apache-2.0 zake>=0.1.6 # Apache-2.0
WebTest>= 3.0.0 # Apache-2.0 WebTest>= 3.0.0 # Apache-2.0

View File

@ -91,7 +91,7 @@ class TemplateController(RootRestController):
@pecan.expose('json') @pecan.expose('json')
def delete(self, **kwargs): def delete(self, **kwargs):
# for backward computability # for backward computability
values = kwargs['uuid'] if 'uuid'in kwargs else kwargs['id'] values = kwargs['uuid'] if 'uuid' in kwargs else kwargs['id']
LOG.info("delete template. values: %s", values) LOG.info("delete template. values: %s", values)
uuids = self._to_uuids(values) uuids = self._to_uuids(values)
LOG.info("delete template. uuids: %s", uuids) LOG.info("delete template. uuids: %s", uuids)

View File

@ -90,7 +90,7 @@ class TemplateApis(object):
""" """
db = self.db db = self.db
if type(uuids) != list: if not isinstance(uuids, list):
uuids = [uuids] uuids = [uuids]
LOG.info("Deleting templates %s ", uuids) LOG.info("Deleting templates %s ", uuids)
templates = [t for _id in uuids for t in db.templates.query(uuid=_id) templates = [t for _id in uuids for t in db.templates.query(uuid=_id)

View File

@ -77,7 +77,7 @@ def check_no_contextlib_nested(logical_line):
"nested for more information.") "nested for more information.")
if ("with contextlib.nested(" in logical_line or if ("with contextlib.nested(" in logical_line or
"with nested(" in logical_line): "with nested(" in logical_line):
yield(0, msg) yield (0, msg)
@core.flake8ext @core.flake8ext
@ -91,8 +91,8 @@ def dict_constructor_with_list_copy(logical_line):
@core.flake8ext @core.flake8ext
def check_python3_xrange(logical_line): def check_python3_xrange(logical_line):
if re.search(r"\bxrange\s*\(", logical_line): if re.search(r"\bxrange\s*\(", logical_line):
yield(0, "V323: Do not use xrange. Use range, or six.moves.range for " yield (0, "V323: Do not use xrange. Use range, or six.moves.range for "
"large loops.") "large loops.")
@core.flake8ext @core.flake8ext
@ -100,7 +100,7 @@ def check_python3_no_iteritems(logical_line):
msg = ("V324: Use six.iteritems() or dict.items() instead of " msg = ("V324: Use six.iteritems() or dict.items() instead of "
"dict.iteritems().") "dict.iteritems().")
if re.search(r".*\.iteritems\(\)", logical_line): if re.search(r".*\.iteritems\(\)", logical_line):
yield(0, msg) yield (0, msg)
@core.flake8ext @core.flake8ext
@ -108,7 +108,7 @@ def check_python3_no_iterkeys(logical_line):
msg = ("V325: Use six.iterkeys() or dict.keys() instead of " msg = ("V325: Use six.iterkeys() or dict.keys() instead of "
"dict.iterkeys().") "dict.iterkeys().")
if re.search(r".*\.iterkeys\(\)", logical_line): if re.search(r".*\.iterkeys\(\)", logical_line):
yield(0, msg) yield (0, msg)
@core.flake8ext @core.flake8ext
@ -116,7 +116,7 @@ def check_python3_no_itervalues(logical_line):
msg = ("V326: Use six.itervalues() or dict.values instead of " msg = ("V326: Use six.itervalues() or dict.values instead of "
"dict.itervalues().") "dict.itervalues().")
if re.search(r".*\.itervalues\(\)", logical_line): if re.search(r".*\.itervalues\(\)", logical_line):
yield(0, msg) yield (0, msg)
@core.flake8ext @core.flake8ext
@ -133,7 +133,7 @@ def no_log_warn(logical_line):
V328 V328
""" """
if logical_line.startswith('LOG.warn('): if logical_line.startswith('LOG.warn('):
yield(0, 'V328: Use LOG.warning() rather than LOG.warn()') yield (0, 'V328: Use LOG.warning() rather than LOG.warn()')
@core.flake8ext @core.flake8ext

View File

@ -293,12 +293,12 @@ class HistoryFacadeConnection(object):
and_( and_(
or_( or_(
models.Alarm.project_id == project_id, models.Alarm.project_id == project_id,
models.Alarm.project_id == None), models.Alarm.project_id == None), # noqa: E711
or_( or_(
models.Alarm.vitrage_resource_project_id == models.Alarm.vitrage_resource_project_id ==
project_id, project_id,
models.Alarm.vitrage_resource_project_id == None) models.Alarm.vitrage_resource_project_id == None) # noqa: E501,E711
))) # noqa )))
else: else:
query = query.filter( query = query.filter(
or_(models.Alarm.project_id == project_id, or_(models.Alarm.project_id == project_id,
@ -315,7 +315,7 @@ class HistoryFacadeConnection(object):
for i in range(len(filter_by)): for i in range(len(filter_by)):
key = filter_by[i] key = filter_by[i]
val = filter_vals[i] val = filter_vals[i]
val = val if val and type(val) == list else [val] val = val if val and isinstance(val, list) else [val]
cond = or_(*[getattr(models.Alarm, key).like( cond = or_(*[getattr(models.Alarm, key).like(
'%' + val[j] + '%') for j in range(len(val))]) '%' + val[j] + '%') for j in range(len(val))])
query = query.filter(cond) query = query.filter(cond)

View File

@ -177,11 +177,11 @@ class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration):
_filter_vertices_to_be_marked_as_deleted(vertices) _filter_vertices_to_be_marked_as_deleted(vertices)
self.assertThat(vertices_to_mark_deleted, matchers.HasLength(3)) self.assertThat(vertices_to_mark_deleted, matchers.HasLength(3))
self.assertTrue(static_vertex in vertices_to_mark_deleted) self.assertIn(static_vertex, vertices_to_mark_deleted)
self.assertTrue(placeholder_vertex in vertices_to_mark_deleted) self.assertIn(placeholder_vertex, vertices_to_mark_deleted)
self.assertTrue(volume_vertex in vertices_to_mark_deleted) self.assertIn(volume_vertex, vertices_to_mark_deleted)
self.assertFalse(prometheus_vertex in vertices_to_mark_deleted) self.assertNotIn(prometheus_vertex, vertices_to_mark_deleted)
self.assertFalse(cluster_vertex in vertices_to_mark_deleted) self.assertNotIn(cluster_vertex, vertices_to_mark_deleted)
def _assert_vertices_status(self, category, vitrage_type, def _assert_vertices_status(self, category, vitrage_type,
num_vertices, num_marked_deleted): num_vertices, num_marked_deleted):

View File

@ -77,11 +77,11 @@ class ScenarioRepositoryTest(base.BaseTest, TestConfiguration):
# C: (A, B, C)} # C: (A, B, C)}
# Verify entity itself is also included. It is not required, but # Verify entity itself is also included. It is not required, but
# worth noting when handling equivalence # worth noting when handling equivalence
self.assertTrue(entity_props in equivalence) self.assertIn(entity_props, equivalence)
for equivalent_props in equivalence: for equivalent_props in equivalence:
# Verify equivalent scenarios are present in repository # Verify equivalent scenarios are present in repository
self.assertTrue(equivalent_props in self.assertIn(equivalent_props,
self.scenario_repository.entity_scenarios) self.scenario_repository.entity_scenarios)
def test_get_scenario_by_edge(self): def test_get_scenario_by_edge(self):
pass pass

View File

@ -316,7 +316,7 @@ class GraphAlgorithmTest(GraphTestBase):
for v in [t_v_host_alarm, t_v_host, t_v_vm, t_v_vm_alarm, for v in [t_v_host_alarm, t_v_host, t_v_vm, t_v_vm_alarm,
t_v_switch, t_v_switch, t_v_node]: t_v_switch, t_v_switch, t_v_node]:
del(v[VProps.VITRAGE_ID]) del v[VProps.VITRAGE_ID]
template_graph.add_vertex(t_v_alarm_fail) template_graph.add_vertex(t_v_alarm_fail)
mappings = ga.sub_graph_matching(template_graph, mappings = ga.sub_graph_matching(template_graph,
@ -630,7 +630,7 @@ class GraphAlgorithmTest(GraphTestBase):
e_alarm_not_on_host[EProps.VITRAGE_IS_DELETED] = True e_alarm_not_on_host[EProps.VITRAGE_IS_DELETED] = True
for v in [t_v_alarm_fail, t_v_host, t_v_vm, t_v_vm_alarm]: for v in [t_v_alarm_fail, t_v_host, t_v_vm, t_v_vm_alarm]:
del(v[VProps.VITRAGE_ID]) del v[VProps.VITRAGE_ID]
# add host vertex to subgraph # add host vertex to subgraph
template_graph.add_vertex(t_v_host) template_graph.add_vertex(t_v_host)
@ -848,7 +848,7 @@ class GraphAlgorithmTest(GraphTestBase):
e_alarm_not_on_vm[EProps.VITRAGE_IS_DELETED] = True e_alarm_not_on_vm[EProps.VITRAGE_IS_DELETED] = True
for v in [t_v_vm, t_v_vm_alarm]: for v in [t_v_vm, t_v_vm_alarm]:
del(v[VProps.VITRAGE_ID]) del v[VProps.VITRAGE_ID]
# add instance vertex to subgraph # add instance vertex to subgraph
template_graph.add_vertex(t_v_vm) template_graph.add_vertex(t_v_vm)
@ -1091,7 +1091,7 @@ class GraphAlgorithmTest(GraphTestBase):
t_v_network.vertex_id, t_v_stack.vertex_id, ELabel.CONNECT) t_v_network.vertex_id, t_v_stack.vertex_id, ELabel.CONNECT)
for v in [t_v_vm, t_v_alarm, t_v_network, t_v_stack]: for v in [t_v_vm, t_v_alarm, t_v_network, t_v_stack]:
del(v[VProps.VITRAGE_ID]) del v[VProps.VITRAGE_ID]
# add network vertex to subgraph # add network vertex to subgraph
template_graph.add_vertex(t_v_network) template_graph.add_vertex(t_v_network)