Bump hacking
hacking 3.0.x is too old. Bump it to the latest version available. Also remove the note about old pip's behavior because recent pip does not require specific order. Change-Id: Ib84f9138bc2b908ce1a76c3dad501328326ffe99
This commit is contained in:
parent
20781e1ce5
commit
366540033f
@ -1,6 +1,3 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
|
||||||
# process, which may cause wedges in the gate later.
|
|
||||||
oslo.config>=5.2.0 # Apache-2.0
|
oslo.config>=5.2.0 # Apache-2.0
|
||||||
openstackdocstheme>=2.2.1 # Apache-2.0
|
openstackdocstheme>=2.2.1 # Apache-2.0
|
||||||
sphinx>=2.0.0,!=2.1.0 # BSD
|
sphinx>=2.0.0,!=2.1.0 # BSD
|
||||||
|
@ -1,7 +1,3 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
|
||||||
# process, which may cause wedges in the gate later.
|
|
||||||
|
|
||||||
pbr>=3.1.1 # Apache-2.0
|
pbr>=3.1.1 # Apache-2.0
|
||||||
alembic>=0.9.8 # MIT
|
alembic>=0.9.8 # MIT
|
||||||
cachetools>=2.0.1 # MIT License
|
cachetools>=2.0.1 # MIT License
|
||||||
|
@ -1,8 +1,4 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
hacking>=7.0.0,<7.1.0 # Apache-2.0
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
|
||||||
# process, which may cause wedges in the gate later.
|
|
||||||
|
|
||||||
hacking>=3.0.1,<3.1.0 # Apache-2.0
|
|
||||||
coverage>=4.5.1 # Apache-2.0
|
coverage>=4.5.1 # Apache-2.0
|
||||||
python-subunit>=1.2.0 # Apache-2.0/BSD
|
python-subunit>=1.2.0 # Apache-2.0/BSD
|
||||||
oslotest>=3.3.0 # Apache-2.0
|
oslotest>=3.3.0 # Apache-2.0
|
||||||
|
@ -90,7 +90,7 @@ class TemplateApis(object):
|
|||||||
"""
|
"""
|
||||||
db = self.db
|
db = self.db
|
||||||
|
|
||||||
if type(uuids) != list:
|
if not isinstance(uuids, list):
|
||||||
uuids = [uuids]
|
uuids = [uuids]
|
||||||
LOG.info("Deleting templates %s ", uuids)
|
LOG.info("Deleting templates %s ", uuids)
|
||||||
templates = [t for _id in uuids for t in db.templates.query(uuid=_id)
|
templates = [t for _id in uuids for t in db.templates.query(uuid=_id)
|
||||||
|
@ -293,12 +293,12 @@ class HistoryFacadeConnection(object):
|
|||||||
and_(
|
and_(
|
||||||
or_(
|
or_(
|
||||||
models.Alarm.project_id == project_id,
|
models.Alarm.project_id == project_id,
|
||||||
models.Alarm.project_id == None),
|
models.Alarm.project_id == None), # noqa: E711
|
||||||
or_(
|
or_(
|
||||||
models.Alarm.vitrage_resource_project_id ==
|
models.Alarm.vitrage_resource_project_id ==
|
||||||
project_id,
|
project_id,
|
||||||
models.Alarm.vitrage_resource_project_id == None)
|
models.Alarm.vitrage_resource_project_id == None) # noqa: E501,E711
|
||||||
))) # noqa
|
)))
|
||||||
else:
|
else:
|
||||||
query = query.filter(
|
query = query.filter(
|
||||||
or_(models.Alarm.project_id == project_id,
|
or_(models.Alarm.project_id == project_id,
|
||||||
@ -315,7 +315,7 @@ class HistoryFacadeConnection(object):
|
|||||||
for i in range(len(filter_by)):
|
for i in range(len(filter_by)):
|
||||||
key = filter_by[i]
|
key = filter_by[i]
|
||||||
val = filter_vals[i]
|
val = filter_vals[i]
|
||||||
val = val if val and type(val) == list else [val]
|
val = val if val and isinstance(val, list) else [val]
|
||||||
cond = or_(*[getattr(models.Alarm, key).like(
|
cond = or_(*[getattr(models.Alarm, key).like(
|
||||||
'%' + val[j] + '%') for j in range(len(val))])
|
'%' + val[j] + '%') for j in range(len(val))])
|
||||||
query = query.filter(cond)
|
query = query.filter(cond)
|
||||||
|
@ -177,11 +177,11 @@ class TestConsistencyFunctional(TestFunctionalBase, TestConfiguration):
|
|||||||
_filter_vertices_to_be_marked_as_deleted(vertices)
|
_filter_vertices_to_be_marked_as_deleted(vertices)
|
||||||
|
|
||||||
self.assertThat(vertices_to_mark_deleted, matchers.HasLength(3))
|
self.assertThat(vertices_to_mark_deleted, matchers.HasLength(3))
|
||||||
self.assertTrue(static_vertex in vertices_to_mark_deleted)
|
self.assertIn(static_vertex, vertices_to_mark_deleted)
|
||||||
self.assertTrue(placeholder_vertex in vertices_to_mark_deleted)
|
self.assertIn(placeholder_vertex, vertices_to_mark_deleted)
|
||||||
self.assertTrue(volume_vertex in vertices_to_mark_deleted)
|
self.assertIn(volume_vertex, vertices_to_mark_deleted)
|
||||||
self.assertFalse(prometheus_vertex in vertices_to_mark_deleted)
|
self.assertNotIn(prometheus_vertex, vertices_to_mark_deleted)
|
||||||
self.assertFalse(cluster_vertex in vertices_to_mark_deleted)
|
self.assertNotIn(cluster_vertex, vertices_to_mark_deleted)
|
||||||
|
|
||||||
def _assert_vertices_status(self, category, vitrage_type,
|
def _assert_vertices_status(self, category, vitrage_type,
|
||||||
num_vertices, num_marked_deleted):
|
num_vertices, num_marked_deleted):
|
||||||
|
@ -77,10 +77,10 @@ class ScenarioRepositoryTest(base.BaseTest, TestConfiguration):
|
|||||||
# C: (A, B, C)}
|
# C: (A, B, C)}
|
||||||
# Verify entity itself is also included. It is not required, but
|
# Verify entity itself is also included. It is not required, but
|
||||||
# worth noting when handling equivalence
|
# worth noting when handling equivalence
|
||||||
self.assertTrue(entity_props in equivalence)
|
self.assertIn(entity_props, equivalence)
|
||||||
for equivalent_props in equivalence:
|
for equivalent_props in equivalence:
|
||||||
# Verify equivalent scenarios are present in repository
|
# Verify equivalent scenarios are present in repository
|
||||||
self.assertTrue(equivalent_props in
|
self.assertIn(equivalent_props,
|
||||||
self.scenario_repository.entity_scenarios)
|
self.scenario_repository.entity_scenarios)
|
||||||
|
|
||||||
def test_get_scenario_by_edge(self):
|
def test_get_scenario_by_edge(self):
|
||||||
|
@ -316,7 +316,7 @@ class GraphAlgorithmTest(GraphTestBase):
|
|||||||
|
|
||||||
for v in [t_v_host_alarm, t_v_host, t_v_vm, t_v_vm_alarm,
|
for v in [t_v_host_alarm, t_v_host, t_v_vm, t_v_vm_alarm,
|
||||||
t_v_switch, t_v_switch, t_v_node]:
|
t_v_switch, t_v_switch, t_v_node]:
|
||||||
del(v[VProps.VITRAGE_ID])
|
del v[VProps.VITRAGE_ID]
|
||||||
|
|
||||||
template_graph.add_vertex(t_v_alarm_fail)
|
template_graph.add_vertex(t_v_alarm_fail)
|
||||||
mappings = ga.sub_graph_matching(template_graph,
|
mappings = ga.sub_graph_matching(template_graph,
|
||||||
@ -630,7 +630,7 @@ class GraphAlgorithmTest(GraphTestBase):
|
|||||||
e_alarm_not_on_host[EProps.VITRAGE_IS_DELETED] = True
|
e_alarm_not_on_host[EProps.VITRAGE_IS_DELETED] = True
|
||||||
|
|
||||||
for v in [t_v_alarm_fail, t_v_host, t_v_vm, t_v_vm_alarm]:
|
for v in [t_v_alarm_fail, t_v_host, t_v_vm, t_v_vm_alarm]:
|
||||||
del(v[VProps.VITRAGE_ID])
|
del v[VProps.VITRAGE_ID]
|
||||||
|
|
||||||
# add host vertex to subgraph
|
# add host vertex to subgraph
|
||||||
template_graph.add_vertex(t_v_host)
|
template_graph.add_vertex(t_v_host)
|
||||||
@ -848,7 +848,7 @@ class GraphAlgorithmTest(GraphTestBase):
|
|||||||
e_alarm_not_on_vm[EProps.VITRAGE_IS_DELETED] = True
|
e_alarm_not_on_vm[EProps.VITRAGE_IS_DELETED] = True
|
||||||
|
|
||||||
for v in [t_v_vm, t_v_vm_alarm]:
|
for v in [t_v_vm, t_v_vm_alarm]:
|
||||||
del(v[VProps.VITRAGE_ID])
|
del v[VProps.VITRAGE_ID]
|
||||||
|
|
||||||
# add instance vertex to subgraph
|
# add instance vertex to subgraph
|
||||||
template_graph.add_vertex(t_v_vm)
|
template_graph.add_vertex(t_v_vm)
|
||||||
@ -1091,7 +1091,7 @@ class GraphAlgorithmTest(GraphTestBase):
|
|||||||
t_v_network.vertex_id, t_v_stack.vertex_id, ELabel.CONNECT)
|
t_v_network.vertex_id, t_v_stack.vertex_id, ELabel.CONNECT)
|
||||||
|
|
||||||
for v in [t_v_vm, t_v_alarm, t_v_network, t_v_stack]:
|
for v in [t_v_vm, t_v_alarm, t_v_network, t_v_stack]:
|
||||||
del(v[VProps.VITRAGE_ID])
|
del v[VProps.VITRAGE_ID]
|
||||||
|
|
||||||
# add network vertex to subgraph
|
# add network vertex to subgraph
|
||||||
template_graph.add_vertex(t_v_network)
|
template_graph.add_vertex(t_v_network)
|
||||||
|
Loading…
Reference in New Issue
Block a user