Merge pull request #330 from dshulyak/system_log_tests

System log tests
This commit is contained in:
Jędrzej Nowak 2015-11-17 11:29:55 +01:00
commit af69f61577
11 changed files with 160 additions and 110 deletions

View File

@ -47,7 +47,7 @@ def stage(d):
for item in log: for item in log:
click.echo(data.compact(item)) click.echo(data.compact(item))
if d: if d:
for line in data.details(item): for line in data.details(item.diff):
click.echo(' '*4+line) click.echo(' '*4+line)
if not log: if not log:
click.echo('No changes') click.echo('No changes')
@ -60,7 +60,7 @@ def staged_item(uid):
click.echo('No staged changes for {}'.format(log_action)) click.echo('No staged changes for {}'.format(log_action))
else: else:
click.echo(data.compact(item)) click.echo(data.compact(item))
for line in data.details(item): for line in data.details(item.diff):
click.echo(' '*4+line) click.echo(' '*4+line)
@changes.command() @changes.command()
@ -89,7 +89,7 @@ def history(n, d, s):
click.echo(data.compact(item)) click.echo(data.compact(item))
if d: if d:
for line in data.details(item): for line in data.details(item.diff):
click.echo(' '*4+line) click.echo(' '*4+line)
if not log: if not log:
click.echo('No history') click.echo('No history')

View File

@ -12,4 +12,4 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from .resource import Resource, load, load_all, validate_resources, load_by_tags, load_updated from .resource import Resource, load, load_all, validate_resources, load_by_tags, load_updated, RESOURCE_STATE

View File

@ -81,7 +81,6 @@ class Resource(object):
inputs = metadata.get('input', {}) inputs = metadata.get('input', {})
self.auto_extend_inputs(inputs) self.auto_extend_inputs(inputs)
self.db_obj = DBResource.from_dict( self.db_obj = DBResource.from_dict(
name, name,
{ {
@ -98,11 +97,11 @@ class Resource(object):
'tags': tags, 'tags': tags,
'state': RESOURCE_STATE.created.name 'state': RESOURCE_STATE.created.name
}) })
self.create_inputs(args) self.create_inputs(args)
self.db_obj.save() self.db_obj.save()
# Load # Load
@dispatch(DBResource) @dispatch(DBResource)
def __init__(self, resource_db): def __init__(self, resource_db):
@ -269,6 +268,8 @@ class Resource(object):
mapping = dict((x, x) for x in mapping) mapping = dict((x, x) for x in mapping)
self.db_obj.connect(receiver.db_obj, mapping=mapping) self.db_obj.connect(receiver.db_obj, mapping=mapping)
self.db_obj.save_lazy() self.db_obj.save_lazy()
receiver.db_obj.save_lazy()
def connect_with_events(self, receiver, mapping=None, events=None, def connect_with_events(self, receiver, mapping=None, events=None,
use_defaults=False): use_defaults=False):

View File

@ -39,10 +39,10 @@ def guess_mapping(emitter, receiver):
:return: :return:
""" """
guessed = {} guessed = {}
for key in emitter.args:
if key in receiver.args:
guessed[key] = key
for key in emitter.db_obj.meta_inputs:
if key in receiver.db_obj.meta_inputs:
guessed[key] = key
return guessed return guessed
@ -124,7 +124,7 @@ def location_and_transports(emitter, receiver, orig_mapping):
# XXX: should be somehow parametrized (input attribute?) # XXX: should be somehow parametrized (input attribute?)
# with dirty_state_ok(DBResource, ('index', )): # with dirty_state_ok(DBResource, ('index', )):
for single in ('transports_id', 'location_id'): for single in ('transports_id', 'location_id'):
if single in inps_emitter and inps_receiver: if single in inps_emitter and single in inps_receiver:
_single(single, emitter, receiver, inps_emitter[single], inps_receiver[single]) _single(single, emitter, receiver, inps_emitter[single], inps_receiver[single])
else: else:
log.warning('Unable to create connection for %s with' log.warning('Unable to create connection for %s with'

View File

@ -823,6 +823,7 @@ class Model(object):
riak_obj = cls.bucket.new(key, data={}) riak_obj = cls.bucket.new(key, data={})
obj = cls.from_riakobj(riak_obj) obj = cls.from_riakobj(riak_obj)
obj._new = True obj._new = True
for field in cls._model_fields: for field in cls._model_fields:
# if field is cls._pkey_field: # if field is cls._pkey_field:
# continue # pkey already set # continue # pkey already set

View File

@ -228,6 +228,7 @@ class InputsFieldWrp(IndexFieldWrp):
_, ind_value = emit _, ind_value = emit
if ind_value.endswith('|{}|{}'.format(self._instance.key, name)): if ind_value.endswith('|{}|{}'.format(self._instance.key, name)):
to_dels.append(emit) to_dels.append(emit)
for to_del in to_dels: for to_del in to_dels:
self._instance._remove_index(*to_del) self._instance._remove_index(*to_del)

View File

@ -64,6 +64,7 @@ def create_sorted_diff(staged, commited):
def make_single_stage_item(resource_obj): def make_single_stage_item(resource_obj):
commited = resource_obj.load_commited() commited = resource_obj.load_commited()
base_path = resource_obj.base_path base_path = resource_obj.base_path
if resource_obj.to_be_removed(): if resource_obj.to_be_removed():
resource_args = {} resource_args = {}
resource_connections = [] resource_connections = []
@ -177,8 +178,6 @@ def _revert_remove(logitem):
def _update_inputs_connections(res_obj, args, old_connections, new_connections): def _update_inputs_connections(res_obj, args, old_connections, new_connections):
res_obj.update(args)
removed = [] removed = []
for item in old_connections: for item in old_connections:
@ -193,13 +192,19 @@ def _update_inputs_connections(res_obj, args, old_connections, new_connections):
for emitter, _, receiver, _ in removed: for emitter, _, receiver, _ in removed:
emmiter_obj = resource.load(emitter) emmiter_obj = resource.load(emitter)
receiver_obj = resource.load(receiver) receiver_obj = resource.load(receiver)
signals.disconnect(emmiter_obj, receiver_obj) emmiter_obj.disconnect(receiver_obj)
for emitter, emitter_input, receiver, receiver_input in added: for emitter, emitter_input, receiver, receiver_input in added:
emmiter_obj = resource.load(emitter) emmiter_obj = resource.load(emitter)
receiver_obj = resource.load(receiver) receiver_obj = resource.load(receiver)
signals.connect(emmiter_obj, receiver_obj, {emitter_input: receiver_input}) emmiter_obj.connect(receiver_obj, {emitter_input: receiver_input})
if removed or added:
# TODO without save we will get error that some values can not be updated
# even if connection was removed
receiver_obj.db_obj.save()
res_obj.update(args)
def _revert_update(logitem): def _revert_update(logitem):

View File

@ -31,8 +31,7 @@ def compact(logitem):
return 'log task={} uid={}'.format(logitem.log_action, logitem.uid) return 'log task={} uid={}'.format(logitem.log_action, logitem.uid)
def details(logitem): def details(diff):
diff = logitem.diff
rst = [] rst = []
for type_, val, change in diff: for type_, val, change in diff:
if type_ == 'add': if type_ == 'add':

View File

@ -38,6 +38,7 @@ def move_to_commited(log_action, *args, **kwargs):
commited = CommitedResource.get_or_create(item.resource) commited = CommitedResource.get_or_create(item.resource)
updated = resource_obj.db_obj.updated updated = resource_obj.db_obj.updated
if item.action == CHANGES.remove.name: if item.action == CHANGES.remove.name:
resource_obj.delete() resource_obj.delete()
commited.state = resource.RESOURCE_STATE.removed.name commited.state = resource.RESOURCE_STATE.removed.name
else: else:

View File

@ -11,32 +11,14 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import os from solar.dblayer.model import Model, ModelMeta, get_bucket
import pytest import pytest
import time import time
from solar.core.resource import Resource
# from solar.interfaces import db
from solar.dblayer.model import get_bucket, ModelMeta, Model def patched_get_bucket_name(cls):
return cls.__name__ + str(time.time())
@pytest.fixture
def resources():
base_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'resource_fixtures')
node_path = os.path.join(base_path, 'node')
node1 = Resource('node1', node_path, args={'ip':'10.0.0.1'})
node2 = Resource('node2', node_path, args={'ip':'10.0.0.2'})
base_service_path = os.path.join(base_path, 'base_service')
service1 = Resource('service1', base_service_path)
return {'node1' : node1,
'node2' : node2,
'service1': service1
}
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -46,6 +28,12 @@ def setup(request):
model.bucket = get_bucket(None, model, ModelMeta) model.bucket = get_bucket(None, model, ModelMeta)
@pytest.fixture(autouse=True)
def setup(request):
for model in ModelMeta._defined_models:
model.bucket = get_bucket(None, model, ModelMeta)
def pytest_runtest_teardown(item, nextitem): def pytest_runtest_teardown(item, nextitem):
ModelMeta.session_end(result=True) ModelMeta.session_end(result=True)
return nextitem return nextitem
@ -59,13 +47,18 @@ def pytest_runtest_call(item):
ModelMeta.session_end() ModelMeta.session_end()
ModelMeta.session_start() ModelMeta.session_start()
def patched_get_bucket_name(cls):
return cls.__name__ + str(time.time())
Model.get_bucket_name = classmethod(patched_get_bucket_name) Model.get_bucket_name = classmethod(patched_get_bucket_name)
from solar.dblayer.sql_client import SqlClient # from solar.dblayer.sql_client import SqlClient
client = SqlClient(':memory:', threadlocals=True, autocommit=False) # client = SqlClient(':memory:', threadlocals=False, autocommit=False)
# client = SqlClient('/tmp/blah.db', threadlocals=True,
# autocommit=False, pragmas=(('journal_mode', 'WAL'),
# ('synchronous', 'NORMAL')))
from solar.dblayer.riak_client import RiakClient
client = RiakClient(protocol='pbc', host='10.0.0.2', pb_port=8087)
# client = RiakClient(protocol='http', host='10.0.0.3', http_port=18098)
ModelMeta.setup(client) ModelMeta.setup(client)

View File

@ -13,25 +13,28 @@
# under the License. # under the License.
import mock import mock
from pytest import fixture from pytest import fixture
from pytest import mark from pytest import mark
from solar.system_log import change from solar.system_log import change
from solar.system_log import data from solar.system_log import data
from solar.system_log import operations from solar.system_log import operations
from solar.core import signals from solar.core import signals
from solar.core.resource import resource from solar.core.resource import resource, RESOURCE_STATE
from solar.interfaces import orm from solar.dblayer.solar_models import Resource as DBResource
from solar.dblayer.solar_models import CommitedResource
from solar.dblayer.model import ModelMeta
def test_revert_update(): def test_revert_update():
commit = {'a': '10'} commit = {'a': '10'}
previous = {'a': '9'} previous = {'a': '9'}
res = orm.DBResource(id='test1', name='test1', base_path='x') res = DBResource.from_dict('test1',
{'name': 'test1', 'base_path': 'x',
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res.save() res.save()
res.add_input('a', 'str', '9')
action = 'update' action = 'update'
res.inputs['a'] = '9'
resource_obj = resource.load(res.name) resource_obj = resource.load(res.name)
assert resource_obj.args == previous assert resource_obj.args == previous
@ -52,76 +55,97 @@ def test_revert_update():
def test_revert_update_connected(): def test_revert_update_connected():
res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1 = DBResource.from_dict('test1',
res1.save() {'name': 'test1', 'base_path': 'x',
res1.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res1.inputs['a'] = '9'
res1.save_lazy()
res2 = orm.DBResource(id='test2', name='test2', base_path='x') res2 = DBResource.from_dict('test2',
res2.save() {'name': 'test2', 'base_path': 'x',
res2.add_input('a', 'str', 0) 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res2.inputs['a'] = ''
res2.save_lazy()
res3 = orm.DBResource(id='test3', name='test3', base_path='x') res3 = DBResource.from_dict('test3',
res3.save() {'name': 'test3', 'base_path': 'x',
res3.add_input('a', 'str', 0) 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res3.inputs['a'] = ''
res3.save_lazy()
res1 = resource.load('test1') res1 = resource.load('test1')
res2 = resource.load('test2') res2 = resource.load('test2')
res3 = resource.load('test3') res3 = resource.load('test3')
signals.connect(res1, res2) res1.connect(res2)
signals.connect(res2, res3) res2.connect(res3)
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 3 assert len(staged_log) == 3
for item in staged_log: for item in staged_log:
assert item.action == 'run'
operations.move_to_commited(item.log_action) operations.move_to_commited(item.log_action)
assert len(staged_log) == 0 assert len(change.stage_changes()) == 0
signals.disconnect(res1, res2)
res1.disconnect(res2)
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 2 assert len(staged_log) == 2
to_revert = [] to_revert = []
for item in staged_log: for item in staged_log:
assert item.action == 'update'
operations.move_to_commited(item.log_action) operations.move_to_commited(item.log_action)
to_revert.append(item.uid) to_revert.append(item.uid)
change.revert_uids(sorted(to_revert, reverse=True)) change.revert_uids(sorted(to_revert, reverse=True))
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 2 assert len(staged_log) == 2
for item in staged_log: for item in staged_log:
assert item.diff == [['change', 'a', [0, '9']]] assert item.diff == [['change', 'a', ['', '9']]]
def test_revert_removal(): def test_revert_removal():
res = orm.DBResource(id='test1', name='test1', base_path='x') res = DBResource.from_dict('test1',
res.save() {'name': 'test1', 'base_path': 'x',
res.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
res.add_input('location_id', 'str', '1') 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res.add_input('transports_id', 'str', '1') res.inputs['a'] = '9'
res.save_lazy()
commited = orm.DBCommitedState.get_or_create('test1') commited = CommitedResource.from_dict('test1',
commited.inputs = {'a': '9', 'location_id': '1', 'transports_id': '1'} {'inputs': {'a': '9'},
commited.save() 'state': 'operational'})
commited.save_lazy()
logitem =change.create_logitem(
res.name, 'remove', change.create_diff({}, {'a': '9'}), [],
base_path=res.base_path)
log = data.SL()
log.append(logitem)
resource_obj = resource.load(res.name) resource_obj = resource.load(res.name)
resource_obj.remove() resource_obj.remove()
operations.move_to_commited(logitem.log_action) ModelMeta.save_all_lazy()
resources = orm.DBResource.load_all() changes = change.stage_changes()
assert len(changes) == 1
assert changes[0].diff == [['remove', '', [['a', '9']]]]
operations.move_to_commited(changes[0].log_action)
assert resources == [] ModelMeta.session_start()
assert logitem.diff == [('remove', '', [('a', '9')])] assert DBResource._c.obj_cache == {}
assert DBResource.bucket.get('test1').siblings == []
with mock.patch.object(resource, 'read_meta') as mread: with mock.patch.object(resource, 'read_meta') as mread:
mread.return_value = {'input': {'a': {'schema': 'str!'}}, 'id': 'mocked'} mread.return_value = {'input': {'a': {'schema': 'str!'}}, 'id': 'mocked'}
change.revert(logitem.uid) change.revert(changes[0].uid)
ModelMeta.save_all_lazy()
assert len(DBResource.bucket.get('test1').siblings) == 1
resource_obj = resource.load('test1') resource_obj = resource.load('test1')
assert resource_obj.args == {'a': '9', 'location_id': '1', 'transports_id': '1'} assert resource_obj.args == {
'a': '9', 'location_id': '', 'transports_id': ''}
@mark.xfail(reason='With current approach child will be notice changes after parent is removed') @mark.xfail(reason='With current approach child will be notice changes after parent is removed')
@ -158,18 +182,21 @@ def test_revert_removed_child():
def test_revert_create(): def test_revert_create():
res = orm.DBResource(id='test1', name='test1', base_path='x') res = DBResource.from_dict('test1',
res.save() {'name': 'test1', 'base_path': 'x',
res.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res.inputs['a'] = '9'
res.save_lazy()
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 1 assert len(staged_log) == 1
logitem = next(staged_log.collection()) logitem = staged_log[0]
operations.move_to_commited(logitem.log_action) operations.move_to_commited(logitem.log_action)
assert logitem.diff == [['add', '', [['a', '9']]]] assert logitem.diff == [['add', '', [['a', '9']]]]
commited = orm.DBCommitedState.load('test1') commited = CommitedResource.get('test1')
assert commited.inputs == {'a': '9'} assert commited.inputs == {'a': '9'}
change.revert(logitem.uid) change.revert(logitem.uid)
@ -178,17 +205,24 @@ def test_revert_create():
assert len(staged_log) == 1 assert len(staged_log) == 1
for item in staged_log: for item in staged_log:
operations.move_to_commited(item.log_action) operations.move_to_commited(item.log_action)
assert orm.DBResource.load_all() == [] assert resource.load_all() == []
def test_discard_all_pending_changes_resources_created(): def test_discard_all_pending_changes_resources_created():
res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1 = DBResource.from_dict('test1',
res1.save() {'name': 'test1', 'base_path': 'x',
res1.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res1.inputs['a'] = '9'
res1.save_lazy()
res2 = orm.DBResource(id='test2', name='test2', base_path='x') res2 = DBResource.from_dict('test2',
res2.save() {'name': 'test2', 'base_path': 'x',
res2.add_input('a', 'str', 0) 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res2.inputs['a'] = '0'
res2.save_lazy()
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 2 assert len(staged_log) == 2
@ -196,17 +230,24 @@ def test_discard_all_pending_changes_resources_created():
change.discard_all() change.discard_all()
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 0 assert len(staged_log) == 0
assert orm.DBResource.load_all() == [] assert resource.load_all() == []
def test_discard_connection(): def test_discard_connection():
res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1 = DBResource.from_dict('test1',
res1.save() {'name': 'test1', 'base_path': 'x',
res1.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res1.inputs['a'] = '9'
res1.save_lazy()
res2 = orm.DBResource(id='test2', name='test2', base_path='x') res2 = DBResource.from_dict('test2',
res2.save() {'name': 'test2', 'base_path': 'x',
res2.add_input('a', 'str', '0') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res2.inputs['a'] = '0'
res2.save_lazy()
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
for item in staged_log: for item in staged_log:
@ -214,7 +255,7 @@ def test_discard_connection():
res1 = resource.load('test1') res1 = resource.load('test1')
res2 = resource.load('test2') res2 = resource.load('test2')
signals.connect(res1, res2) res1.connect(res2, {'a': 'a'})
staged_log = change.stage_changes() staged_log = change.stage_changes()
assert len(staged_log) == 1 assert len(staged_log) == 1
assert res2.args == {'a': '9'} assert res2.args == {'a': '9'}
@ -224,9 +265,13 @@ def test_discard_connection():
def test_discard_removed(): def test_discard_removed():
res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1 = DBResource.from_dict('test1',
res1.save() {'name': 'test1', 'base_path': 'x',
res1.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res1.inputs['a'] = '9'
res1.save_lazy()
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
for item in staged_log: for item in staged_log:
operations.move_to_commited(item.log_action) operations.move_to_commited(item.log_action)
@ -242,9 +287,13 @@ def test_discard_removed():
def test_discard_update(): def test_discard_update():
res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1 = DBResource.from_dict('test1',
res1.save() {'name': 'test1', 'base_path': 'x',
res1.add_input('a', 'str', '9') 'state': RESOURCE_STATE.created.name,
'meta_inputs': {'a': {'value': None, 'schema': 'str'}}})
res1.inputs['a'] = '9'
res1.save_lazy()
ModelMeta.save_all_lazy()
staged_log = change.stage_changes() staged_log = change.stage_changes()
for item in staged_log: for item in staged_log:
operations.move_to_commited(item.log_action) operations.move_to_commited(item.log_action)