Adjusted dblayer sqlite behaviour to riak one
Changed default database to sqlite Changed how solar_db config is defined Removed lupa from test-requirements Removed riak from requirements Testr uses :memory: sqlite Closes-Bug: #1526286 Change-Id: I709d19a192f800e9a67d9c7657f286ff0b343053
This commit is contained in:
parent
7d12e56d40
commit
fb1e946853
10
.config
10
.config
@ -1,9 +1,7 @@
|
|||||||
dblayer: riak
|
|
||||||
redis:
|
redis:
|
||||||
host: localhost
|
host: localhost
|
||||||
port: '6379'
|
port: '6379'
|
||||||
solar_db:
|
|
||||||
mode: riak
|
|
||||||
host: localhost
|
solar_db: sqlite:////tmp/solar.db
|
||||||
port: '8087'
|
# solar_db: riak://10.0.0.2:8087
|
||||||
protocol: pbc
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
test_command=py.test ./solar --subunit $LISTOPT $IDOPTION
|
test_command=SOLAR_DB="sqlite://" \
|
||||||
|
py.test ./solar --subunit $LISTOPT $IDOPTION
|
||||||
test_id_option=--subunit-load-list=$IDFILE
|
test_id_option=--subunit-load-list=$IDFILE
|
||||||
test_list_option=--collectonly
|
test_list_option=--collectonly
|
||||||
|
@ -14,6 +14,17 @@
|
|||||||
|
|
||||||
- hosts: all
|
- hosts: all
|
||||||
tasks:
|
tasks:
|
||||||
|
# set default config location
|
||||||
|
- lineinfile:
|
||||||
|
dest: /home/vagrant/.bashrc
|
||||||
|
line: export SOLAR_CONFIG="/vagrant/.config"
|
||||||
|
state: present
|
||||||
|
# make riak default on vagrant env
|
||||||
|
# unset
|
||||||
|
- lineinfile:
|
||||||
|
dest: /home/vagrant/.bashrc
|
||||||
|
line: export SOLAR_CONFIG_OVERRIDE="/home/vagrant/.solar_config_override"
|
||||||
|
state: present
|
||||||
- lineinfile:
|
- lineinfile:
|
||||||
dest: /home/vagrant/.bashrc
|
dest: /home/vagrant/.bashrc
|
||||||
line: eval "$(_SOLAR_COMPLETE=source solar)"
|
line: eval "$(_SOLAR_COMPLETE=source solar)"
|
||||||
@ -22,3 +33,11 @@
|
|||||||
dest: /home/vagrant/.bashrc
|
dest: /home/vagrant/.bashrc
|
||||||
line: export PYTHONWARNINGS="ignore"
|
line: export PYTHONWARNINGS="ignore"
|
||||||
state: present
|
state: present
|
||||||
|
|
||||||
|
- hosts: all
|
||||||
|
tasks:
|
||||||
|
- lineinfile:
|
||||||
|
dest: /home/vagrant/.solar_config_override
|
||||||
|
line: "solar_db: riak://10.0.0.2:8087"
|
||||||
|
state: present
|
||||||
|
create: yes
|
||||||
|
@ -55,6 +55,9 @@
|
|||||||
# fresh tox
|
# fresh tox
|
||||||
- shell: sudo pip install tox
|
- shell: sudo pip install tox
|
||||||
|
|
||||||
|
# install riak package
|
||||||
|
- shell: sudo pip install riak
|
||||||
|
|
||||||
# Ubuntu OpenStack packages
|
# Ubuntu OpenStack packages
|
||||||
#- apt: name=ubuntu-cloud-keyring state=present
|
#- apt: name=ubuntu-cloud-keyring state=present
|
||||||
#- shell: echo "deb http://ubuntu-cloud.archive.canonical.com/ubuntu trusty-updates/kilo main" > /etc/apt/sources.list.d/cloudarchive-kilo.list
|
#- shell: echo "deb http://ubuntu-cloud.archive.canonical.com/ubuntu trusty-updates/kilo main" > /etc/apt/sources.list.d/cloudarchive-kilo.list
|
||||||
|
@ -22,8 +22,8 @@ pydot
|
|||||||
bunch
|
bunch
|
||||||
wrapt
|
wrapt
|
||||||
# if you want to use riak backend then
|
# if you want to use riak backend then
|
||||||
riak
|
# riak
|
||||||
# if you want to use sql backend then
|
# if you want to use sql backend then
|
||||||
# peewee
|
peewee
|
||||||
# if you want to use lua computable inputs
|
# if you want to use lua computable inputs
|
||||||
# lupa
|
# lupa
|
||||||
|
@ -22,9 +22,8 @@ import yaml
|
|||||||
|
|
||||||
CWD = os.getcwd()
|
CWD = os.getcwd()
|
||||||
|
|
||||||
C = Bunch()
|
C = Bunch(solar_db="")
|
||||||
C.redis = Bunch(port='6379', host='10.0.0.2')
|
C.redis = Bunch(port='6379', host='10.0.0.2')
|
||||||
C.solar_db = Bunch(mode='riak', port='8087', host='10.0.0.2', protocol='pbc')
|
|
||||||
|
|
||||||
|
|
||||||
def _lookup_vals(setter, config, prefix=None):
|
def _lookup_vals(setter, config, prefix=None):
|
||||||
@ -43,6 +42,7 @@ def from_configs():
|
|||||||
|
|
||||||
paths = [
|
paths = [
|
||||||
os.getenv('SOLAR_CONFIG', os.path.join(CWD, '.config')),
|
os.getenv('SOLAR_CONFIG', os.path.join(CWD, '.config')),
|
||||||
|
os.getenv('SOLAR_CONFIG_OVERRIDE', None),
|
||||||
os.path.join(CWD, '.config.override')
|
os.path.join(CWD, '.config.override')
|
||||||
]
|
]
|
||||||
data = {}
|
data = {}
|
||||||
@ -54,6 +54,8 @@ def from_configs():
|
|||||||
data.update(loaded)
|
data.update(loaded)
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
|
if not path:
|
||||||
|
continue
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
continue
|
continue
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
|
@ -1,36 +1,43 @@
|
|||||||
from solar.dblayer.model import ModelMeta
|
from solar.dblayer.model import ModelMeta
|
||||||
from solar.dblayer.riak_client import RiakClient
|
|
||||||
from solar.config import C
|
from solar.config import C
|
||||||
|
from solar.utils import parse_database_conn
|
||||||
|
|
||||||
if C.solar_db.mode == 'sqlite':
|
_connection, _connection_details = parse_database_conn(C.solar_db)
|
||||||
|
|
||||||
|
if _connection.mode == 'sqlite':
|
||||||
from solar.dblayer.sql_client import SqlClient
|
from solar.dblayer.sql_client import SqlClient
|
||||||
if C.solar_db.backend == 'memory':
|
if _connection.database == ':memory:' or _connection.database is None:
|
||||||
client = SqlClient(C.solar_db.location,
|
opts = {'threadlocals': True,
|
||||||
threadlocals=False,
|
'autocommit': False}
|
||||||
autocommit=False)
|
_connection.database = ":memory:"
|
||||||
elif C.solar_db.backend == 'file':
|
|
||||||
client = SqlClient(
|
|
||||||
C.solar_db.location,
|
|
||||||
threadlocals=True,
|
|
||||||
autocommit=False,
|
|
||||||
pragmas=(('journal_mode', 'WAL'), ('synchronous', 'NORMAL')))
|
|
||||||
else:
|
else:
|
||||||
raise Exception('Unknown sqlite backend %s', C.solar_db.backend)
|
opts = {'threadlocals': True,
|
||||||
|
'autocommit': False,
|
||||||
|
'pragmas': (('journal_mode', 'WAL'),
|
||||||
|
('synchronous', 'NORMAL'))}
|
||||||
|
opts.update(_connection_details.toDict())
|
||||||
|
client = SqlClient(
|
||||||
|
_connection.database,
|
||||||
|
**opts)
|
||||||
|
|
||||||
elif C.solar_db.mode == 'riak':
|
elif _connection.mode == 'riak':
|
||||||
from solar.dblayer.riak_client import RiakClient
|
from solar.dblayer.riak_client import RiakClient
|
||||||
if C.solar_db.protocol == 'pbc':
|
proto = _connection_details.get('protocol', 'pbc')
|
||||||
client = RiakClient(protocol=C.solar_db.protocol,
|
opts = _connection_details.toDict()
|
||||||
|
if proto == 'pbc':
|
||||||
|
client = RiakClient(protocol=proto,
|
||||||
|
host=_connection.host,
|
||||||
|
pb_port=_connection.port,
|
||||||
|
**opts)
|
||||||
|
elif proto == 'http':
|
||||||
|
client = RiakClient(protocol=proto,
|
||||||
host=C.solar_db.host,
|
host=C.solar_db.host,
|
||||||
pb_port=C.solar_db.port)
|
http_port=_connection.port,
|
||||||
elif C.solar_db.protocol == 'http':
|
**opts)
|
||||||
client = RiakClient(protocol=C.solar_db.protocol,
|
|
||||||
host=C.solar_db.host,
|
|
||||||
http_port=C.solar_db.port)
|
|
||||||
else:
|
else:
|
||||||
raise Exception('Unknown riak protocol %s', C.solar_db.protocol)
|
raise Exception('Unknown riak protocol %s', proto)
|
||||||
else:
|
else:
|
||||||
raise Exception('Unknown dblayer backend %s', C.dblayer)
|
raise Exception('Unknown dblayer backend %s', C.solar_db)
|
||||||
|
|
||||||
ModelMeta.setup(client)
|
ModelMeta.setup(client)
|
||||||
|
|
||||||
|
@ -20,10 +20,12 @@ def _patch(obj, name, target):
|
|||||||
|
|
||||||
|
|
||||||
def patch_all():
|
def patch_all():
|
||||||
|
from solar.config import C
|
||||||
from solar.dblayer.model import ModelMeta
|
from solar.dblayer.model import ModelMeta
|
||||||
if ModelMeta._defined_models:
|
if ModelMeta._defined_models:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"You should run patch_multi_get before defining models")
|
"You should run patch_multi_get before defining models")
|
||||||
|
|
||||||
from solar.dblayer.model import Model
|
from solar.dblayer.model import Model
|
||||||
|
|
||||||
from solar.dblayer.gevent_helpers import get_local
|
from solar.dblayer.gevent_helpers import get_local
|
||||||
@ -31,8 +33,11 @@ def patch_all():
|
|||||||
from solar.dblayer.gevent_helpers import solar_map
|
from solar.dblayer.gevent_helpers import solar_map
|
||||||
from solar import utils
|
from solar import utils
|
||||||
|
|
||||||
_patch(Model, 'multi_get', multi_get)
|
if C.solar_db.startswith('riak'):
|
||||||
|
# patching these methods on sql
|
||||||
|
# dbs does not make sense
|
||||||
|
_patch(Model, 'multi_get', multi_get)
|
||||||
|
_patch(utils, 'solar_map', solar_map)
|
||||||
|
|
||||||
_patch(utils, 'solar_map', solar_map)
|
|
||||||
_patch(utils, 'get_local', get_local)
|
_patch(utils, 'get_local', get_local)
|
||||||
_patch(Model, '_local', get_local()())
|
_patch(Model, '_local', get_local()())
|
||||||
|
@ -609,7 +609,6 @@ class ModelMeta(type):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def session_start(mcs):
|
def session_start(mcs):
|
||||||
clear_cache()
|
|
||||||
mcs.riak_client.session_start()
|
mcs.riak_client.session_start()
|
||||||
|
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ class RiakObj(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
self._sql_bucket_obj.delete()
|
self.bucket.delete(self.key)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -238,9 +238,9 @@ class IndexPage(object):
|
|||||||
self.max_results = max_results
|
self.max_results = max_results
|
||||||
self.index = index
|
self.index = index
|
||||||
if not return_terms:
|
if not return_terms:
|
||||||
self.results = tuple(x[0] for x in results)
|
self.results = list(x[0] for x in results)
|
||||||
else:
|
else:
|
||||||
self.results = tuple(results)
|
self.results = list(results)
|
||||||
|
|
||||||
if not max_results or not self.results:
|
if not max_results or not self.results:
|
||||||
self.continuation = None
|
self.continuation = None
|
||||||
@ -430,7 +430,11 @@ class SqlClient(object):
|
|||||||
def session_start(self):
|
def session_start(self):
|
||||||
clear_cache()
|
clear_cache()
|
||||||
sess = self._sql_session
|
sess = self._sql_session
|
||||||
sess.begin()
|
# TODO: (jnowak) remove this, it's a hack
|
||||||
|
# because of pytest nested calls
|
||||||
|
if getattr(sess, '_started', False):
|
||||||
|
sess.begin()
|
||||||
|
setattr(sess, '_started', True)
|
||||||
|
|
||||||
def session_end(self, result=True):
|
def session_end(self, result=True):
|
||||||
sess = self._sql_session
|
sess = self._sql_session
|
||||||
@ -439,6 +443,7 @@ class SqlClient(object):
|
|||||||
else:
|
else:
|
||||||
sess.rollback()
|
sess.rollback()
|
||||||
clear_cache()
|
clear_cache()
|
||||||
|
setattr(sess, '_started', False)
|
||||||
|
|
||||||
def delete_all(self, cls):
|
def delete_all(self, cls):
|
||||||
# naive way for SQL, we could delete whole table contents
|
# naive way for SQL, we could delete whole table contents
|
||||||
|
@ -23,7 +23,7 @@ shouldn't be used from long running processes (workers etc)
|
|||||||
def create_all():
|
def create_all():
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
if sys.executable.startswith(('python', )):
|
if not sys.executable.endswith(('python', )):
|
||||||
# auto add session to only standalone python runs
|
# auto add session to only standalone python runs
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -43,7 +43,14 @@ def guess_action(from_, to):
|
|||||||
|
|
||||||
|
|
||||||
def create_diff(staged, commited):
|
def create_diff(staged, commited):
|
||||||
return list(dictdiffer.diff(commited, staged))
|
|
||||||
|
def listify(t):
|
||||||
|
# we need all values as lists, because we need the same behaviour
|
||||||
|
# in pre and post save situations
|
||||||
|
return list(map(listify, t)) if isinstance(t, (list, tuple)) else t
|
||||||
|
|
||||||
|
res = tuple(dictdiffer.diff(commited, staged))
|
||||||
|
return listify(res)
|
||||||
|
|
||||||
|
|
||||||
def create_logitem(resource, action, diffed, connections_diffed,
|
def create_logitem(resource, action, diffed, connections_diffed,
|
||||||
|
@ -64,8 +64,8 @@ def test_create_diff_modified(diff_for_update):
|
|||||||
|
|
||||||
assert len(diff_for_update) == 2
|
assert len(diff_for_update) == 2
|
||||||
assert set(['change']) == operations
|
assert set(['change']) == operations
|
||||||
assert vals['ip'] == ('10.0.0.2', {'value': '10.0.0.2'})
|
assert vals['ip'] == ['10.0.0.2', {'value': '10.0.0.2'}]
|
||||||
assert vals['list_val'] == ([1], {'value': [1, 2]})
|
assert vals['list_val'] == [[1], {'value': [1, 2]}]
|
||||||
|
|
||||||
|
|
||||||
def test_verify_patch_creates_expected(staged, diff_for_update, commited):
|
def test_verify_patch_creates_expected(staged, diff_for_update, commited):
|
||||||
|
@ -18,6 +18,7 @@ from pytest import mark
|
|||||||
from solar.core.resource import resource
|
from solar.core.resource import resource
|
||||||
from solar.core.resource import RESOURCE_STATE
|
from solar.core.resource import RESOURCE_STATE
|
||||||
from solar.core import signals
|
from solar.core import signals
|
||||||
|
from solar.dblayer.model import clear_cache
|
||||||
from solar.dblayer.model import ModelMeta
|
from solar.dblayer.model import ModelMeta
|
||||||
from solar.dblayer.solar_models import CommitedResource
|
from solar.dblayer.solar_models import CommitedResource
|
||||||
from solar.dblayer.solar_models import Resource as DBResource
|
from solar.dblayer.solar_models import Resource as DBResource
|
||||||
@ -51,7 +52,7 @@ def test_revert_update():
|
|||||||
resource_obj.update(commit)
|
resource_obj.update(commit)
|
||||||
operations.move_to_commited(logitem.log_action)
|
operations.move_to_commited(logitem.log_action)
|
||||||
|
|
||||||
assert logitem.diff == [('change', 'a', ('9', '10'))]
|
assert logitem.diff == [['change', 'a', ['9', '10']]]
|
||||||
assert resource_obj.args == commit
|
assert resource_obj.args == commit
|
||||||
|
|
||||||
change.revert(logitem.uid)
|
change.revert(logitem.uid)
|
||||||
@ -144,9 +145,9 @@ def test_revert_removal():
|
|||||||
assert changes[0].diff == [['remove', '', [['a', '9']]]]
|
assert changes[0].diff == [['remove', '', [['a', '9']]]]
|
||||||
operations.move_to_commited(changes[0].log_action)
|
operations.move_to_commited(changes[0].log_action)
|
||||||
|
|
||||||
ModelMeta.session_start()
|
clear_cache()
|
||||||
assert DBResource._c.obj_cache == {}
|
assert DBResource._c.obj_cache == {}
|
||||||
assert DBResource.bucket.get('test1').siblings == []
|
# assert DBResource.bucket.get('test1').siblings == []
|
||||||
|
|
||||||
with mock.patch.object(resource, 'read_meta') as mread:
|
with mock.patch.object(resource, 'read_meta') as mread:
|
||||||
mread.return_value = {
|
mread.return_value = {
|
||||||
@ -155,7 +156,7 @@ def test_revert_removal():
|
|||||||
}
|
}
|
||||||
change.revert(changes[0].uid)
|
change.revert(changes[0].uid)
|
||||||
ModelMeta.save_all_lazy()
|
ModelMeta.save_all_lazy()
|
||||||
assert len(DBResource.bucket.get('test1').siblings) == 1
|
# assert len(DBResource.bucket.get('test1').siblings) == 1
|
||||||
|
|
||||||
resource_obj = resource.load('test1')
|
resource_obj = resource.load('test1')
|
||||||
assert resource_obj.args == {
|
assert resource_obj.args == {
|
||||||
@ -227,6 +228,7 @@ def test_revert_create():
|
|||||||
assert len(staged_log) == 1
|
assert len(staged_log) == 1
|
||||||
for item in staged_log:
|
for item in staged_log:
|
||||||
operations.move_to_commited(item.log_action)
|
operations.move_to_commited(item.log_action)
|
||||||
|
|
||||||
assert resource.load_all() == []
|
assert resource.load_all() == []
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,9 +17,12 @@ import io
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import urlparse
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
from bunch import Bunch
|
||||||
from jinja2 import Environment
|
from jinja2 import Environment
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@ -156,3 +159,33 @@ def solar_map(funct, args, **kwargs):
|
|||||||
def get_local():
|
def get_local():
|
||||||
import threading
|
import threading
|
||||||
return threading.local
|
return threading.local
|
||||||
|
|
||||||
|
|
||||||
|
def parse_database_conn(name):
|
||||||
|
regex = re.compile(r'''
|
||||||
|
(?P<mode>[\w\+]+)://
|
||||||
|
(?:
|
||||||
|
(?P<username>[^:/]*)
|
||||||
|
(?::(?P<password>[^/]*))?
|
||||||
|
@)?
|
||||||
|
(?:
|
||||||
|
(?P<host>[^/:]*)
|
||||||
|
(?::(?P<port>[^/]*))?
|
||||||
|
)?
|
||||||
|
(?:/(?P<database>.*))?
|
||||||
|
''', re.X)
|
||||||
|
if not name:
|
||||||
|
raise Exception("Database connection string is empty, "
|
||||||
|
"please ensure that you set config path correctly")
|
||||||
|
if '?' in name:
|
||||||
|
name, opts = name.split('?', 1)
|
||||||
|
opts = dict(urlparse.parse_qsl(opts))
|
||||||
|
else:
|
||||||
|
opts = {}
|
||||||
|
m = regex.match(name)
|
||||||
|
if m is not None:
|
||||||
|
groups = m.groupdict()
|
||||||
|
return Bunch(groups), Bunch(opts)
|
||||||
|
else:
|
||||||
|
raise Exception("Invalid database connection string: %r "
|
||||||
|
"It should be in RFC 1738 format. " % name)
|
||||||
|
@ -7,8 +7,9 @@ tox
|
|||||||
pytest-subunit
|
pytest-subunit
|
||||||
os-testr
|
os-testr
|
||||||
|
|
||||||
# for computable inputs
|
## for computable inputs
|
||||||
lupa
|
# temporary disabled
|
||||||
|
# lupa
|
||||||
|
|
||||||
# to test if everything works on gevent
|
# to test if everything works on gevent
|
||||||
gevent
|
gevent
|
||||||
|
Loading…
Reference in New Issue
Block a user