Merge branch 'master' into coverate

Conflicts:
	run_tests.sh
	solar/test-requirements.txt
This commit is contained in:
Łukasz Oleś 2015-09-24 13:28:23 +02:00
commit bb012aa90b
17 changed files with 462 additions and 217 deletions

2
.gitignore vendored
View File

@ -37,3 +37,5 @@ vagrant-settings.yaml
.ssh/ .ssh/
.cache .cache
.tox

6
.travis.yml Normal file
View File

@ -0,0 +1,6 @@
language: python
python: 2.7
script:
- ./run_tests.sh
services:
- redis-server

29
docs/removal.md Normal file
View File

@ -0,0 +1,29 @@
# Problems to solve with removal operation
1. It is tricky to figure out what to do with data that will be left when
you are removing resource that is a parent for other resources.
The basic example is a node resource.
If hosts_file1 subscribed to node properties, and we will just remove
node - hosts_file1 will be left with corrupted data.
Validation is not a solution, because we can not expect user to remove
each resource one-by-one.
log task=hosts_file1.run uid=c1545041-a5c5-400e-8c46-ad52d871e6c3
++ ip: None
++ ssh_user: None
++ hosts: [{u'ip': None, u'name': u'riak_server1.solar'}]
++ ssh_key: None
Proposed solution:
Add `solar res remove node1 -r` where *r* stands for recursive.
During this operation we will find all childs of specified resource, and
stage them for removal as well.
2. If so we need to be able to determine what to do with child resource
on removal.
Basically this seems like another type of event:
hosts1.remove -> success -> node1.remove
And
hosts2.update -> success -> node2.remove

View File

@ -34,5 +34,3 @@ pip install -r solar/test-requirements.txt --download-cache=/tmp/$JOB_NAME
pushd solar/solar pushd solar/solar
PYTHONPATH=$WORKSPACE/solar CONFIG_FILE=$CONFIG_FILE py.test --cov=solar -s test/ PYTHONPATH=$WORKSPACE/solar CONFIG_FILE=$CONFIG_FILE py.test --cov=solar -s test/
popd

View File

@ -41,6 +41,7 @@ from solar.cli import executors
from solar.cli.orch import orchestration from solar.cli.orch import orchestration
from solar.cli.system_log import changes from solar.cli.system_log import changes
from solar.cli.events import events from solar.cli.events import events
from solar.cli.resource import resource as cli_resource
# HELPERS # HELPERS
@ -157,203 +158,12 @@ def init_cli_connections():
fabric_api.local('dot -Tsvg graph.dot -o graph.svg') fabric_api.local('dot -Tsvg graph.dot -o graph.svg')
def init_cli_resource():
@main.group()
def resource():
pass
@resource.command()
@click.argument('action')
@click.argument('resource')
@click.option('-d', '--dry-run', default=False, is_flag=True)
@click.option('-m', '--dry-run-mapping', default='{}')
def action(dry_run_mapping, dry_run, action, resource):
if dry_run:
dry_run_executor = executors.DryRunExecutor(mapping=json.loads(dry_run_mapping))
click.echo(
'action {} for resource {}'.format(action, resource)
)
r = sresource.load(resource)
try:
actions.resource_action(r, action)
except errors.SolarError as e:
log.debug(e)
sys.exit(1)
if dry_run:
click.echo('EXECUTED:')
for key in dry_run_executor.executed:
click.echo('{}: {}'.format(
click.style(dry_run_executor.compute_hash(key), fg='green'),
str(key)
))
@resource.command()
@click.argument('resource')
def backtrack_inputs(resource):
r = sresource.load(resource)
inputs = []
def backtrack(i):
def format_input(i):
return '{}::{}'.format(i.resource.name, i.name)
if isinstance(i, list):
return [backtrack(bi) for bi in i]
if isinstance(i, dict):
return {
k: backtrack(bi) for k, bi in i.items()
}
bi = i.backtrack_value_emitter(level=1)
if isinstance(i, orm.DBResourceInput) and isinstance(bi, orm.DBResourceInput) and i == bi:
return (format_input(i), )
return (format_input(i), backtrack(bi))
for i in r.resource_inputs().values():
click.echo(yaml.safe_dump({i.name: backtrack(i)}, default_flow_style=False))
@resource.command()
def compile_all():
from solar.core.resource import compiler
destination_path = utils.read_config()['resources-compiled-file']
if os.path.exists(destination_path):
os.remove(destination_path)
for path in utils.find_by_mask(utils.read_config()['resources-files-mask']):
meta = utils.yaml_load(path)
meta['base_path'] = os.path.dirname(path)
compiler.compile(meta)
@resource.command()
def clear_all():
click.echo('Clearing all resources and connections')
orm.db.clear()
@resource.command()
@click.argument('name')
@click.argument(
'base_path', type=click.Path(exists=True, resolve_path=True))
@click.argument('args', nargs=-1)
def create(args, base_path, name):
args_parsed = {}
click.echo('create {} {} {}'.format(name, base_path, args))
for arg in args:
try:
args_parsed.update(json.loads(arg))
except ValueError:
k, v = arg.split('=')
args_parsed.update({k: v})
resources = vr.create(name, base_path, args=args_parsed)
for res in resources:
click.echo(res.color_repr())
@resource.command()
@click.option('--name', default=None)
@click.option('--tag', default=None)
@click.option('--json', default=False, is_flag=True)
@click.option('--color', default=True, is_flag=True)
def show(**kwargs):
resources = []
for res in sresource.load_all():
show = True
if kwargs['tag']:
if kwargs['tag'] not in res.tags:
show = False
if kwargs['name']:
if res.name != kwargs['name']:
show = False
if show:
resources.append(res)
echo = click.echo_via_pager
if kwargs['json']:
output = json.dumps([r.to_dict() for r in resources], indent=2)
echo = click.echo
else:
if kwargs['color']:
formatter = lambda r: r.color_repr()
else:
formatter = lambda r: unicode(r)
output = '\n'.join(formatter(r) for r in resources)
if output:
echo(output)
@resource.command()
@click.argument('resource_name')
@click.argument('tag_name')
@click.option('--add/--delete', default=True)
def tag(add, tag_name, resource_name):
click.echo('Tag {} with {} {}'.format(resource_name, tag_name, add))
r = sresource.load(resource_name)
if add:
r.add_tag(tag_name)
else:
r.remove_tag(tag_name)
# TODO: the above functions should save resource automatically to the DB
@resource.command()
@click.argument('name')
@click.argument('args', nargs=-1)
def update(name, args):
args_parsed = {}
for arg in args:
try:
args_parsed.update(json.loads(arg))
except ValueError:
k, v = arg.split('=')
args_parsed.update({k: v})
click.echo('Updating resource {} with args {}'.format(name, args_parsed))
res = sresource.load(name)
res.update(args_parsed)
@resource.command()
@click.option('--check-missing-connections', default=False, is_flag=True)
def validate(check_missing_connections):
errors = vr.validate_resources()
for r, error in errors:
click.echo('ERROR: %s: %s' % (r.name, error))
if check_missing_connections:
missing_connections = vr.find_missing_connections()
if missing_connections:
click.echo(
'The following resources have inputs of the same value '
'but are not connected:'
)
click.echo(
tabulate.tabulate([
['%s::%s' % (r1, i1), '%s::%s' % (r2, i2)]
for r1, i1, r2, i2 in missing_connections
])
)
@resource.command()
@click.argument('path', type=click.Path(exists=True, dir_okay=False))
def get_inputs(path):
with open(path) as f:
content = f.read()
click.echo(vr.get_inputs(content))
def run(): def run():
init_actions() init_actions()
init_cli_connect() init_cli_connect()
init_cli_connections() init_cli_connections()
init_cli_resource()
main.add_command(cli_resource)
main.add_command(orchestration) main.add_command(orchestration)
main.add_command(changes) main.add_command(changes)
main.add_command(events) main.add_command(events)

227
solar/solar/cli/resource.py Normal file
View File

@ -0,0 +1,227 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import os
import json
import yaml
import tabulate
import click
from solar.core import actions
from solar.core import resource as sresource
from solar.core.resource import virtual_resource as vr
from solar.core.log import log
from solar import errors
from solar.interfaces import orm
from solar import utils
from solar.cli import executors
@click.group()
def resource():
pass
@resource.command()
@click.argument('action')
@click.argument('resource')
@click.option('-d', '--dry-run', default=False, is_flag=True)
@click.option('-m', '--dry-run-mapping', default='{}')
def action(dry_run_mapping, dry_run, action, resource):
if dry_run:
dry_run_executor = executors.DryRunExecutor(mapping=json.loads(dry_run_mapping))
click.echo(
'action {} for resource {}'.format(action, resource)
)
r = sresource.load(resource)
try:
actions.resource_action(r, action)
except errors.SolarError as e:
log.debug(e)
sys.exit(1)
if dry_run:
click.echo('EXECUTED:')
for key in dry_run_executor.executed:
click.echo('{}: {}'.format(
click.style(dry_run_executor.compute_hash(key), fg='green'),
str(key)
))
@resource.command()
@click.argument('resource')
def backtrack_inputs(resource):
r = sresource.load(resource)
inputs = []
def backtrack(i):
def format_input(i):
return '{}::{}'.format(i.resource.name, i.name)
if isinstance(i, list):
return [backtrack(bi) for bi in i]
if isinstance(i, dict):
return {
k: backtrack(bi) for k, bi in i.items()
}
bi = i.backtrack_value_emitter(level=1)
if isinstance(i, orm.DBResourceInput) and isinstance(bi, orm.DBResourceInput) and i == bi:
return (format_input(i), )
return (format_input(i), backtrack(bi))
for i in r.resource_inputs().values():
click.echo(yaml.safe_dump({i.name: backtrack(i)}, default_flow_style=False))
@resource.command()
def compile_all():
from solar.core.resource import compiler
destination_path = utils.read_config()['resources-compiled-file']
if os.path.exists(destination_path):
os.remove(destination_path)
for path in utils.find_by_mask(utils.read_config()['resources-files-mask']):
meta = utils.yaml_load(path)
meta['base_path'] = os.path.dirname(path)
compiler.compile(meta)
@resource.command()
def clear_all():
click.echo('Clearing all resources and connections')
orm.db.clear()
@resource.command()
@click.argument('name')
@click.argument(
'base_path', type=click.Path(exists=True, resolve_path=True))
@click.argument('args', nargs=-1)
def create(args, base_path, name):
args_parsed = {}
click.echo('create {} {} {}'.format(name, base_path, args))
for arg in args:
try:
args_parsed.update(json.loads(arg))
except ValueError:
k, v = arg.split('=')
args_parsed.update({k: v})
resources = vr.create(name, base_path, args=args_parsed)
for res in resources:
click.echo(res.color_repr())
@resource.command()
@click.option('--name', default=None)
@click.option('--tag', default=None)
@click.option('--json', default=False, is_flag=True)
@click.option('--color', default=True, is_flag=True)
def show(**kwargs):
resources = []
for res in sresource.load_all():
show = True
if kwargs['tag']:
if kwargs['tag'] not in res.tags:
show = False
if kwargs['name']:
if res.name != kwargs['name']:
show = False
if show:
resources.append(res)
echo = click.echo_via_pager
if kwargs['json']:
output = json.dumps([r.to_dict() for r in resources], indent=2)
echo = click.echo
else:
if kwargs['color']:
formatter = lambda r: r.color_repr()
else:
formatter = lambda r: unicode(r)
output = '\n'.join(formatter(r) for r in resources)
if output:
echo(output)
@resource.command()
@click.argument('resource_name')
@click.argument('tag_name')
@click.option('--add/--delete', default=True)
def tag(add, tag_name, resource_name):
click.echo('Tag {} with {} {}'.format(resource_name, tag_name, add))
r = sresource.load(resource_name)
if add:
r.add_tag(tag_name)
else:
r.remove_tag(tag_name)
# TODO: the above functions should save resource automatically to the DB
@resource.command()
@click.argument('name')
@click.argument('args', nargs=-1)
def update(name, args):
args_parsed = {}
for arg in args:
try:
args_parsed.update(json.loads(arg))
except ValueError:
k, v = arg.split('=')
args_parsed.update({k: v})
click.echo('Updating resource {} with args {}'.format(name, args_parsed))
res = sresource.load(name)
res.update(args_parsed)
@resource.command()
@click.option('--check-missing-connections', default=False, is_flag=True)
def validate(check_missing_connections):
errors = vr.validate_resources()
for r, error in errors:
click.echo('ERROR: %s: %s' % (r.name, error))
if check_missing_connections:
missing_connections = vr.find_missing_connections()
if missing_connections:
click.echo(
'The following resources have inputs of the same value '
'but are not connected:'
)
click.echo(
tabulate.tabulate([
['%s::%s' % (r1, i1), '%s::%s' % (r2, i2)]
for r1, i1, r2, i2 in missing_connections
])
)
@resource.command()
@click.argument('path', type=click.Path(exists=True, dir_okay=False))
def get_inputs(path):
with open(path) as f:
content = f.read()
click.echo(vr.get_inputs(content))
@resource.command()
@click.argument('name')
def remove(name):
res = sresource.load(name)
res.delete()

View File

@ -39,7 +39,7 @@ def validate():
@changes.command() @changes.command()
@click.option('-d', default=False, is_flag=True) @click.option('-d', default=False, is_flag=True, help='detailed view')
def stage(d): def stage(d):
log = list(change.stage_changes().reverse()) log = list(change.stage_changes().reverse())
for item in log: for item in log:
@ -75,14 +75,28 @@ def commit(uid):
@changes.command() @changes.command()
@click.option('-n', default=5) @click.option('-n', default=5, help='number of items to show')
def history(n): @click.option('-d', default=False, is_flag=True, help='detailed view')
commited = list(data.CL().collection(n)) @click.option('-s', default=False, is_flag=True, help='short view, only uid')
if not commited: def history(n, d, s):
click.echo('No history.') log = list(data.CL().collection(n))
return for item in log:
commited.reverse() if s:
click.echo(commited) click.echo(item.uid)
continue
click.echo(item)
if d:
for line in item.details:
click.echo(' '*4+line)
if not log:
click.echo('No history')
@changes.command()
@click.argument('uid')
def revert(uid):
change.revert(uid)
@changes.command() @changes.command()

View File

@ -112,6 +112,9 @@ class Resource(object):
i.value = v i.value = v
i.save() i.save()
def delete(self):
return self.db_obj.delete()
def resource_inputs(self): def resource_inputs(self):
return { return {
i.name: i for i in self.db_obj.inputs.as_set() i.name: i for i in self.db_obj.inputs.as_set()

View File

@ -413,6 +413,17 @@ class DBResourceInput(DBObject):
)[0].start_node.properties )[0].start_node.properties
) )
def delete(self):
db.delete_relations(
source=self._db_node,
type_=base.BaseGraphDB.RELATION_TYPES.input_to_input
)
db.delete_relations(
dest=self._db_node,
type_=base.BaseGraphDB.RELATION_TYPES.input_to_input
)
super(DBResourceInput, self).delete()
def backtrack_value_emitter(self, level=None): def backtrack_value_emitter(self, level=None):
# TODO: this is actually just fetching head element in linked list # TODO: this is actually just fetching head element in linked list
# so this whole algorithm can be moved to the db backend probably # so this whole algorithm can be moved to the db backend probably
@ -559,6 +570,12 @@ class DBResource(DBObject):
event.save() event.save()
self.events.add(event) self.events.add(event)
def delete(self):
for input in self.inputs.as_set():
self.inputs.remove(input)
input.delete()
super(DBResource, self).delete()
# TODO: remove this # TODO: remove this
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -12,7 +12,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from dictdiffer import diff import dictdiffer
import networkx as nx import networkx as nx
from solar.core.log import log from solar.core.log import log
@ -39,12 +39,21 @@ def guess_action(from_, to):
def create_diff(staged, commited): def create_diff(staged, commited):
return list(diff(commited, staged)) return list(dictdiffer.diff(commited, staged))
def create_logitem(resource, action, diffed):
return data.LogItem(
utils.generate_uuid(),
resource,
'{}.{}'.format(resource, action),
diffed)
def _stage_changes(staged_resources, commited_resources, staged_log): def _stage_changes(staged_resources, commited_resources, staged_log):
for res_uid in staged_resources.keys(): union = set(staged_resources.keys()) | set(commited_resources.keys())
for res_uid in union:
commited_data = commited_resources.get(res_uid, {}) commited_data = commited_resources.get(res_uid, {})
staged_data = staged_resources.get(res_uid, {}) staged_data = staged_resources.get(res_uid, {})
@ -52,11 +61,7 @@ def _stage_changes(staged_resources, commited_resources, staged_log):
if df: if df:
action = guess_action(commited_data, staged_data) action = guess_action(commited_data, staged_data)
log_item = data.LogItem( log_item = create_logitem(res_uid, action, df)
utils.generate_uuid(),
res_uid,
'{}.{}'.format(res_uid, action),
df)
staged_log.append(log_item) staged_log.append(log_item)
return staged_log return staged_log
@ -102,3 +107,19 @@ def parameters(res, action, data):
'type': 'solar_resource', 'type': 'solar_resource',
# unique identifier for a node should be passed # unique identifier for a node should be passed
'target': data.get('ip')} 'target': data.get('ip')}
def revert_uids(uids):
commited = data.CD()
history = data.CL()
for uid in uids:
item = history.get(uid)
res_db = resource.load(item.res)
args_to_update = dictdiffer.revert(
item.diff, commited.get(item.res, {}))
res_db.update(args_to_update)
def revert(uid):
return revert_uids([uid])

View File

@ -115,7 +115,7 @@ class Log(object):
self.ordered_log = db.get_ordered_hash(path) self.ordered_log = db.get_ordered_hash(path)
def append(self, logitem): def append(self, logitem):
self.ordered_log.add([(logitem.log_action, logitem.to_dict())]) self.ordered_log.add([(logitem.uid, logitem.to_dict())])
def pop(self, uid): def pop(self, uid):
item = self.get(uid) item = self.get(uid)
@ -125,7 +125,7 @@ class Log(object):
return item return item
def update(self, logitem): def update(self, logitem):
self.ordered_log.update(logitem.log_action, logitem.to_dict()) self.ordered_log.update(logitem.uid, logitem.to_dict())
def clean(self): def clean(self):
self.ordered_log.clean() self.ordered_log.clean()

View File

@ -18,7 +18,7 @@ from dictdiffer import patch
def set_error(log_action, *args, **kwargs): def set_error(log_action, *args, **kwargs):
sl = data.SL() sl = data.SL()
item = sl.get(log_action) item = next((i for i in sl if i.log_action == log_action), None)
if item: if item:
item.state = data.STATES.error item.state = data.STATES.error
sl.update(item) sl.update(item)
@ -26,10 +26,11 @@ def set_error(log_action, *args, **kwargs):
def move_to_commited(log_action, *args, **kwargs): def move_to_commited(log_action, *args, **kwargs):
sl = data.SL() sl = data.SL()
item = sl.pop(log_action) item = next((i for i in sl if i.log_action == log_action), None)
sl.pop(item.uid)
if item: if item:
commited = data.CD() commited = data.CD()
staged_data = patch(item.diff, commited.get(item.log_action, {})) staged_data = patch(item.diff, commited.get(item.res, {}))
cl = data.CL() cl = data.CL()
item.state = data.STATES.success item.state = data.STATES.success
cl.append(item) cl.append(item)

View File

@ -228,6 +228,12 @@ class TestResourceORM(BaseResourceTest):
r.add_input('ip', 'str!', '10.0.0.2') r.add_input('ip', 'str!', '10.0.0.2')
self.assertEqual(len(r.inputs.as_set()), 1) self.assertEqual(len(r.inputs.as_set()), 1)
def test_delete_resource(self):
r = orm.DBResource(id='test1', name='test1', base_path='x')
r.save()
r.add_input('ip', 'str!', '10.0.0.2')
class TestResourceInputORM(BaseResourceTest): class TestResourceInputORM(BaseResourceTest):
def test_backtrack_simple(self): def test_backtrack_simple(self):

View File

@ -93,3 +93,29 @@ input:
self.assertDictEqual(sample.args, sample_l.args) self.assertDictEqual(sample.args, sample_l.args)
self.assertListEqual(sample.tags, sample_l.tags) self.assertListEqual(sample.tags, sample_l.tags)
def test_removal(self):
"""Test that connection removed with resource."""
sample_meta_dir = self.make_resource_meta("""
id: sample
handler: ansible
version: 1.0.0
input:
value:
schema: int
value: 0
""")
sample1 = self.create_resource(
'sample1', sample_meta_dir, {'value': 1}
)
sample2 = self.create_resource(
'sample2', sample_meta_dir, {}
)
signals.connect(sample1, sample2)
self.assertEqual(sample1.args['value'], sample2.args['value'])
sample1 = resource.load('sample1')
sample2 = resource.load('sample2')
sample1.delete()
self.assertEqual(sample2.args['value'], 0)

View File

@ -0,0 +1,46 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pytest import fixture
from solar.system_log import change
from solar.system_log import data
from solar.system_log import operations
from solar.core.resource import resource
from solar.interfaces import orm
def test_revert_update():
commit = {'a': '10'}
previous = {'a': '9'}
res = orm.DBResource(id='test1', name='test1', base_path='x')
res.save()
res.add_input('a', 'str', '9')
action = 'update'
resource_obj = resource.load(res.name)
assert resource_obj.args == previous
log = data.SL()
logitem =change.create_logitem(
res.name, action, change.create_diff(commit, previous))
log.append(logitem)
resource_obj.update(commit)
operations.move_to_commited(logitem.log_action)
assert resource_obj.args == commit
change.revert(logitem.uid)
assert resource_obj.args == previous

View File

@ -1,3 +1,5 @@
-r requirements.txt -r requirements.txt
pytest-mock hacking==0.7
pytest-cov pytest-cov
pytest-mock
tox

37
tox.ini Normal file
View File

@ -0,0 +1,37 @@
[tox]
minversion = 1.6
skipsdist = True
envlist = py27,pep8
[testenv]
usedevelop = True
install_command = pip install -U {opts} {packages}
setenv = VIRTUAL_ENV={envdir}
deps = -r{toxinidir}/test-requirements.txt
commands =
py.test {posargs:solar/solar/test}
[testenv:pep8]
deps = hacking==0.7
usedevelop = False
commands =
flake8 {posargs:solar/solar}
[testenv:venv]
deps = -r{toxinidir}/requirements.txt
commands = {posargs:}
[testenv:devenv]
envdir = devenv
usedevelop = True
[flake8]
# NOTE(eli): H304 is "No relative imports" error, relative
# imports are required for extensions which can be moved
# from nailgun directory to different place
ignore = H234,H302,H802,H304
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,__init__.py,docs
show-pep8 = True
show-source = True
count = True