Merge "Advanced tags support"

This commit is contained in:
Jenkins 2015-12-17 20:39:11 +00:00 committed by Gerrit Code Review
commit e5c9b60f14
5 changed files with 112 additions and 29 deletions

View File

@ -14,21 +14,21 @@ resources:
ip: {{node}}::ip ip: {{node}}::ip
updates: updates:
- with_tags: ['resource=hosts_file'] - with_tags: 'resource=hosts_file'
values: values:
hosts:name: hosts:name:
- riak_service{{index}}::riak_hostname::NO_EVENTS - riak_service{{index}}::riak_hostname::NO_EVENTS
hosts:ip: hosts:ip:
- riak_service{{index}}::ip::NO_EVENTS - riak_service{{index}}::ip::NO_EVENTS
- with_tags: ['resource=haproxy_service_config', 'service=riak', 'protocol=http'] - with_tags: 'resource=haproxy_service_config & service=riak & protocol=http'
values: values:
backends:server: backends:server:
- riak_service{{index}}::riak_hostname - riak_service{{index}}::riak_hostname
backends:port: backends:port:
- riak_service{{index}}::riak_port_http - riak_service{{index}}::riak_port_http
- with_tags: ['resource=haproxy_service_config', 'service=riak', 'protocol=tcp'] - with_tags: 'resource=haproxy_service_config & service=riak & protocol=tcp'
values: values:
backends:server: backends:server:
- riak_service{{index}}::riak_hostname - riak_service{{index}}::riak_hostname
@ -38,7 +38,7 @@ updates:
events: events:
- type: depends_on - type: depends_on
parent: parent:
with_tags: ['resource=hosts_file', 'location={{node}}'] with_tags: 'resource=hosts_file & location={{node}}'
action: run action: run
state: success state: success
depend_action: riak_service{{index}}.run depend_action: riak_service{{index}}.run

View File

@ -15,6 +15,7 @@
from copy import deepcopy from copy import deepcopy
from hashlib import md5 from hashlib import md5
import itertools
import json import json
import os import os
from uuid import uuid4 from uuid import uuid4
@ -25,6 +26,8 @@ import networkx
from solar.core.signals import get_mapping from solar.core.signals import get_mapping
from solar.core.tags_set_parser import Expression
from solar.core.tags_set_parser import get_string_tokens
from solar.core import validation from solar.core import validation
from solar.dblayer.model import StrInt from solar.dblayer.model import StrInt
from solar.dblayer.solar_models import CommitedResource from solar.dblayer.solar_models import CommitedResource
@ -327,13 +330,18 @@ def load_all():
return [Resource(r) for r in DBResource.multi_get(candids)] return [Resource(r) for r in DBResource.multi_get(candids)]
def load_by_tags(tags): def load_by_tags(query):
tags = set(tags) if isinstance(query, (list, set, tuple)):
candids_all = set() query = '|'.join(query)
for tag in tags:
candids = DBResource.tags.filter(tag) parsed_tags = get_string_tokens(query)
candids_all.update(set(candids)) r_with_tags = [DBResource.tags.filter(tag) for tag in parsed_tags]
return [Resource(r) for r in DBResource.multi_get(candids_all)] r_with_tags = set(itertools.chain(*r_with_tags))
candids = [Resource(r) for r in DBResource.multi_get(r_with_tags)]
nodes = filter(
lambda n: Expression(query, n.tags).evaluate(), candids)
return nodes
def validate_resources(): def validate_resources():

View File

@ -12,6 +12,8 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import re
from ply import lex from ply import lex
from ply import yacc from ply import yacc
@ -23,9 +25,13 @@ tokens = (
"AND", "AND",
"OR", "OR",
"LPAREN", "LPAREN",
"RPAREN") "RPAREN",
"ANY",
"EQ")
t_STRING = r'[A-Za-z0-9-_/\\]+' t_STRING = r'[A-Za-z0-9-_/\\]+'
t_EQ = r'='
t_ANY = r'\*'
t_AND = '&|,' t_AND = '&|,'
t_OR = r'\|' t_OR = r'\|'
t_LPAREN = r'\(' t_LPAREN = r'\('
@ -60,10 +66,23 @@ class ScalarWrapper(object):
return self.value return self.value
def p_expression_logical_op(p): class AnyWrapper(object):
"""Parser
expression : expression AND expression def __init__(self, value):
global expression
# convert all tags from key=value to key=*
tags = map(lambda s: re.sub('=\w+', '=*', s), expression.tags)
self.value = (set([value]) <= set(tags))
def evaluate(self):
return self.value
def __call__(self):
return self.value
def p_expression_logical_op(p):
"""expression : expression AND expression
| expression OR expression | expression OR expression
""" """
result, arg1, op, arg2 = p result, arg1, op, arg2 = p
@ -76,18 +95,28 @@ def p_expression_logical_op(p):
def p_expression_string(p): def p_expression_string(p):
"""Parser """expression : STRING"""
p[0] = ScalarWrapper(p[1] + '=')
expression : STRING
def p_expression_assign(p):
"""expression : STRING EQ STRING
| STRING EQ
""" """
p[0] = ScalarWrapper(p[1]) if len(p) == 3:
last = ''
else:
last = p[3]
p[0] = ScalarWrapper(p[1] + p[2] + last)
def p_expression_assign_any(p):
"""expression : STRING EQ ANY"""
p[0] = AnyWrapper(p[1] + p[2] + p[3])
def p_expression_group(p): def p_expression_group(p):
"""Parser """expression : LPAREN expression RPAREN"""
expression : LPAREN expression RPAREN
"""
p[0] = p[2] p[0] = p[2]
@ -113,10 +142,26 @@ class Expression(object):
lexer = lex.lex() lexer = lex.lex()
parser = yacc.yacc(debug=False, write_tables=False) parser = yacc.yacc(debug=False, write_tables=False, errorlog=yacc.NullLogger())
expression = None expression = None
def get_string_tokens(txt):
lexer.input(txt)
parsed = []
token_part = ''
for token in lexer:
if token.type in ['STRING', 'ANY', 'EQ']:
token_part += token.value
else:
if token_part:
parsed.append(token_part)
token_part = ''
if token_part:
parsed.append(token_part)
return parsed
def parse(expr): def parse(expr):
global parser global parser
global expression global expression

View File

@ -21,16 +21,23 @@ from solar.dblayer.solar_models import Resource
@fixture @fixture
def tagged_resources(): def tagged_resources():
tags = ['n1', 'n2', 'n3'] base_tags = ['n1=x', 'n2']
tags = base_tags + ['node=t1']
t1 = Resource.from_dict('t1', t1 = Resource.from_dict('t1',
{'name': 't1', 'tags': tags, 'base_path': 'x'}) {'name': 't1', 'tags': tags, 'base_path': 'x'})
t1.save_lazy() t1.save_lazy()
tags = base_tags + ['node=t2']
t2 = Resource.from_dict('t2', t2 = Resource.from_dict('t2',
{'name': 't2', 'tags': tags, 'base_path': 'x'}) {'name': 't2', 'tags': tags, 'base_path': 'x'})
t2.save_lazy() t2.save_lazy()
tags = base_tags + ['node=t3']
t3 = Resource.from_dict('t3', t3 = Resource.from_dict('t3',
{'name': 't3', 'tags': tags, 'base_path': 'x'}) {'name': 't3', 'tags': tags, 'base_path': 'x'})
t3.save_lazy() t3.save_lazy()
tags = ['node=t3']
t4 = Resource.from_dict('t4',
{'name': 't4', 'tags': tags, 'base_path': 'x'})
t4.save_lazy()
ModelMeta.save_all_lazy() ModelMeta.save_all_lazy()
return [t1, t2, t3] return [t1, t2, t3]
@ -42,5 +49,28 @@ def test_add_remove_tags(tagged_resources):
for res in loaded: for res in loaded:
res.remove_tags('n1') res.remove_tags('n1')
assert len(resource.load_by_tags(set(['n1']))) == 0 assert len(resource.load_by_tags(set(['n1=']))) == 0
assert len(resource.load_by_tags(set(['n2']))) == 3 assert len(resource.load_by_tags(set(['n2=']))) == 3
def test_filter_with_and(tagged_resources):
loaded = resource.load_by_tags('node=t1 & n1=x')
assert len(loaded) == 1
loaded = resource.load_by_tags('node=t1,n1=*')
assert len(loaded) == 1
loaded = resource.load_by_tags('n2,n1=*')
assert len(loaded) == 3
loaded = resource.load_by_tags('node=* & n1=x')
assert len(loaded) == 3
def test_filter_with_or(tagged_resources):
loaded = resource.load_by_tags('node=t1 | node=t2')
assert len(loaded) == 2
loaded = resource.load_by_tags('node=t1 | node=t2 | node=t3')
assert len(loaded) == 4
def test_with_brackets(tagged_resources):
loaded = resource.load_by_tags('(node=t1 | node=t2 | node=t3) & n1=x')
assert len(loaded) == 3

View File

@ -33,7 +33,7 @@ envdir = devenv
usedevelop = True usedevelop = True
[flake8] [flake8]
ignore = H101,H236,E731 ignore = H101,H236,E731,H405
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,__init__.py,docs exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,__init__.py,docs
show-pep8 = True show-pep8 = True
show-source = True show-source = True