Merge "Add caching support"
This commit is contained in:
commit
aa22eedada
@ -90,7 +90,7 @@
|
|||||||
[grafana]
|
[grafana]
|
||||||
|
|
||||||
#
|
#
|
||||||
# From grafyaml
|
# From grafyaml.builder
|
||||||
#
|
#
|
||||||
|
|
||||||
# URL for grafana server. (string value)
|
# URL for grafana server. (string value)
|
||||||
@ -98,3 +98,17 @@
|
|||||||
|
|
||||||
# API key for access grafana. (string value)
|
# API key for access grafana. (string value)
|
||||||
#apikey = <None>
|
#apikey = <None>
|
||||||
|
|
||||||
|
|
||||||
|
[cache]
|
||||||
|
|
||||||
|
#
|
||||||
|
# From grafyaml.cache
|
||||||
|
#
|
||||||
|
|
||||||
|
# Directory used by grafyaml to store its cache files. (string value)
|
||||||
|
#cachedir = ~/.cache/grafyaml
|
||||||
|
|
||||||
|
# Maintain a special cache that contains an MD5 of every generated
|
||||||
|
# dashboard. (boolean value)
|
||||||
|
#enabled = true
|
||||||
|
@ -13,7 +13,9 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from grafana_dashboards.cache import Cache
|
||||||
from grafana_dashboards.grafana import Grafana
|
from grafana_dashboards.grafana import Grafana
|
||||||
from grafana_dashboards.parser import YamlParser
|
from grafana_dashboards.parser import YamlParser
|
||||||
|
|
||||||
@ -34,9 +36,12 @@ CONF = cfg.CONF
|
|||||||
CONF.register_group(grafana_group)
|
CONF.register_group(grafana_group)
|
||||||
CONF.register_opts(grafana_opts, group='grafana')
|
CONF.register_opts(grafana_opts, group='grafana')
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Builder(object):
|
class Builder(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.cache = Cache()
|
||||||
self.grafana = Grafana(CONF.grafana.url, CONF.grafana.apikey)
|
self.grafana = Grafana(CONF.grafana.url, CONF.grafana.apikey)
|
||||||
self.parser = YamlParser()
|
self.parser = YamlParser()
|
||||||
|
|
||||||
@ -44,5 +49,9 @@ class Builder(object):
|
|||||||
self.parser.parse(path)
|
self.parser.parse(path)
|
||||||
dashboards = self.parser.data.get('dashboard', {})
|
dashboards = self.parser.data.get('dashboard', {})
|
||||||
for item in dashboards:
|
for item in dashboards:
|
||||||
data = self.parser.get_dashboard(item)
|
data, md5 = self.parser.get_dashboard(item)
|
||||||
self.grafana.create_dashboard(data, overwrite=True)
|
if self.cache.has_changed(item, md5):
|
||||||
|
self.grafana.create_dashboard(data, overwrite=True)
|
||||||
|
self.cache.set(item, md5)
|
||||||
|
else:
|
||||||
|
LOG.debug("'%s' has not changed" % item)
|
||||||
|
74
grafana_dashboards/cache.py
Normal file
74
grafana_dashboards/cache.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# Copyright 2015 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from dogpile.cache.region import make_region
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
cache_opts = [
|
||||||
|
cfg.StrOpt(
|
||||||
|
'cachedir', default='~/.cache/grafyaml',
|
||||||
|
help='Directory used by grafyaml to store its cache files.'),
|
||||||
|
cfg.BoolOpt(
|
||||||
|
'enabled', default=True,
|
||||||
|
help='Maintain a special cache that contains an MD5 of every '
|
||||||
|
'generated dashboard.'),
|
||||||
|
]
|
||||||
|
cache_group = cfg.OptGroup(
|
||||||
|
name='cache', title='Cache options')
|
||||||
|
list_opts = lambda: [(cache_group, cache_opts), ]
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.register_opts(cache_opts)
|
||||||
|
CONF.register_opts(cache_opts, group='cache')
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Cache(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if not CONF.cache.enabled:
|
||||||
|
return
|
||||||
|
|
||||||
|
cache_dir = self._get_cache_dir()
|
||||||
|
self.region = make_region().configure(
|
||||||
|
'dogpile.cache.dbm',
|
||||||
|
arguments={
|
||||||
|
'filename': os.path.join(cache_dir, 'cache.dbm')
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, title):
|
||||||
|
if CONF.cache.enabled:
|
||||||
|
res = self.region.get(title)
|
||||||
|
return res if res else None
|
||||||
|
return None
|
||||||
|
|
||||||
|
def has_changed(self, title, md5):
|
||||||
|
if CONF.cache.enabled and self.get(title) == md5:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def set(self, title, md5):
|
||||||
|
if CONF.cache.enabled:
|
||||||
|
self.region.set(title, md5)
|
||||||
|
|
||||||
|
def _get_cache_dir(self):
|
||||||
|
path = os.path.expanduser(CONF.cache.cachedir)
|
||||||
|
if not os.path.isdir(path):
|
||||||
|
os.makedirs(path)
|
||||||
|
return path
|
@ -43,16 +43,18 @@ class Commands(object):
|
|||||||
self.builder.update_dashboard(path)
|
self.builder.update_dashboard(path)
|
||||||
|
|
||||||
|
|
||||||
|
def add_command_parsers(subparsers):
|
||||||
|
parser_update = subparsers.add_parser('update')
|
||||||
|
parser_update.add_argument(
|
||||||
|
'path', help='colon-separated list of paths to YAML files or'
|
||||||
|
' directories')
|
||||||
|
|
||||||
|
|
||||||
|
command_opt = cfg.SubCommandOpt('action', handler=add_command_parsers)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
CONF.register_cli_opt(command_opt)
|
||||||
def add_command_parsers(subparsers):
|
|
||||||
parser_update = subparsers.add_parser('update')
|
|
||||||
parser_update.add_argument(
|
|
||||||
'path', help='colon-separated list of paths to YAML files or'
|
|
||||||
' directories')
|
|
||||||
|
|
||||||
CONF.register_cli_opt(
|
|
||||||
cfg.SubCommandOpt('action', handler=add_command_parsers))
|
|
||||||
logging.register_options(CONF)
|
logging.register_options(CONF)
|
||||||
logging.setup(CONF, 'grafana-dashboard')
|
logging.setup(CONF, 'grafana-dashboard')
|
||||||
config.prepare_args(sys.argv)
|
config.prepare_args(sys.argv)
|
||||||
|
@ -12,7 +12,9 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import io
|
import io
|
||||||
|
import json
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from slugify import slugify
|
from slugify import slugify
|
||||||
@ -26,7 +28,13 @@ class YamlParser(object):
|
|||||||
self.data = {}
|
self.data = {}
|
||||||
|
|
||||||
def get_dashboard(self, slug):
|
def get_dashboard(self, slug):
|
||||||
return self.data.get('dashboard', {}).get(slug, None)
|
data = self.data.get('dashboard', {}).get(slug, None)
|
||||||
|
md5 = None
|
||||||
|
if data:
|
||||||
|
content = json.dumps(data)
|
||||||
|
md5 = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
return data, md5
|
||||||
|
|
||||||
def parse(self, fn):
|
def parse(self, fn):
|
||||||
with io.open(fn, 'r', encoding='utf-8') as fp:
|
with io.open(fn, 'r', encoding='utf-8') as fp:
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
# of appearance. Changing the order has an impact on the overall integration
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
|
dogpile.cache
|
||||||
oslo.config>=1.11.0
|
oslo.config>=1.11.0
|
||||||
oslo.log>=1.0.0,<1.1.0
|
oslo.log>=1.0.0,<1.1.0
|
||||||
python-slugify
|
python-slugify
|
||||||
|
@ -33,7 +33,8 @@ all_files = 1
|
|||||||
console_scripts =
|
console_scripts =
|
||||||
grafana-dashboard=grafana_dashboards.cmd:main
|
grafana-dashboard=grafana_dashboards.cmd:main
|
||||||
oslo.config.opts =
|
oslo.config.opts =
|
||||||
grafyaml = grafana_dashboards.builder:list_opts
|
grafyaml.builder = grafana_dashboards.builder:list_opts
|
||||||
|
grafyaml.cache = grafana_dashboards.cache:list_opts
|
||||||
|
|
||||||
[upload_sphinx]
|
[upload_sphinx]
|
||||||
upload-dir = doc/build/html
|
upload-dir = doc/build/html
|
||||||
|
@ -8,6 +8,7 @@ coverage>=3.6
|
|||||||
discover
|
discover
|
||||||
python-subunit>=0.0.18
|
python-subunit>=0.0.18
|
||||||
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
|
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
|
||||||
|
mock>=1.2
|
||||||
oslosphinx>=2.2.0 # Apache-2.0
|
oslosphinx>=2.2.0 # Apache-2.0
|
||||||
oslotest>=1.2.0 # Apache-2.0
|
oslotest>=1.2.0 # Apache-2.0
|
||||||
testrepository>=0.0.18
|
testrepository>=0.0.18
|
||||||
|
@ -16,57 +16,16 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import doctest
|
import fixtures
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
from grafana_dashboards.parser import YamlParser
|
from tests import conf_fixture
|
||||||
|
|
||||||
|
|
||||||
def get_scenarios(fixtures_path, in_ext='yaml', out_ext='json'):
|
class TestCase(testtools.TestCase):
|
||||||
scenarios = []
|
|
||||||
files = []
|
|
||||||
for dirpath, dirs, fs in os.walk(fixtures_path):
|
|
||||||
files.extend([os.path.join(dirpath, f) for f in fs])
|
|
||||||
|
|
||||||
input_files = [f for f in files if re.match(r'.*\.{0}$'.format(in_ext), f)]
|
|
||||||
|
|
||||||
for input_filename in input_files:
|
|
||||||
output_candidate = re.sub(
|
|
||||||
r'\.{0}$'.format(in_ext), '.{0}'.format(out_ext), input_filename)
|
|
||||||
if output_candidate not in files:
|
|
||||||
output_candidate = None
|
|
||||||
|
|
||||||
scenarios.append((input_filename, {
|
|
||||||
'in_filename': input_filename,
|
|
||||||
'out_filename': output_candidate,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return scenarios
|
|
||||||
|
|
||||||
|
|
||||||
class TestCase(object):
|
|
||||||
"""Test case base class for all unit tests."""
|
"""Test case base class for all unit tests."""
|
||||||
|
|
||||||
def _read_raw_content(self):
|
def setUp(self):
|
||||||
# if None assume empty file
|
super(TestCase, self).setUp()
|
||||||
if self.out_filename is None:
|
self.log_fixture = self.useFixture(fixtures.FakeLogger())
|
||||||
return ""
|
self.useFixture(conf_fixture.ConfFixture())
|
||||||
|
|
||||||
content = open(self.out_filename, 'r').read()
|
|
||||||
|
|
||||||
return content
|
|
||||||
|
|
||||||
def test_yaml_snippet(self):
|
|
||||||
parser = YamlParser()
|
|
||||||
expected_json = self._read_raw_content()
|
|
||||||
parser.parse(self.in_filename)
|
|
||||||
valid_yaml = parser.data
|
|
||||||
|
|
||||||
pretty_json = json.dumps(
|
|
||||||
valid_yaml, indent=4, separators=(',', ': '), sort_keys=True)
|
|
||||||
|
|
||||||
self.assertThat(pretty_json, testtools.matchers.DocTestMatches(
|
|
||||||
expected_json, doctest.ELLIPSIS | doctest.REPORT_NDIFF))
|
|
||||||
|
38
tests/conf_fixture.py
Normal file
38
tests/conf_fixture.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
# Copyright 2015 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import fixtures
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
from grafana_dashboards import config
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class ConfFixture(fixtures.Fixture):
|
||||||
|
"""Fixture to manage global conf settings."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(ConfFixture, self).setUp()
|
||||||
|
config.prepare_args([])
|
||||||
|
self.path = tempfile.mkdtemp()
|
||||||
|
CONF.cache.cachedir = self.path
|
||||||
|
self.addCleanup(self._cachedir)
|
||||||
|
self.addCleanup(CONF.reset)
|
||||||
|
|
||||||
|
def _cachedir(self):
|
||||||
|
shutil.rmtree(self.path)
|
2
tests/fixtures/builder/dashboard-0001.yaml
vendored
Normal file
2
tests/fixtures/builder/dashboard-0001.yaml
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
dashboard:
|
||||||
|
title: New dashboard
|
73
tests/schema/base.py
Normal file
73
tests/schema/base.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright 2010-2011 OpenStack Foundation
|
||||||
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||||
|
# Copyright 2015 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import doctest
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
from grafana_dashboards.parser import YamlParser
|
||||||
|
|
||||||
|
|
||||||
|
def get_scenarios(fixtures_path, in_ext='yaml', out_ext='json'):
|
||||||
|
scenarios = []
|
||||||
|
files = []
|
||||||
|
for dirpath, dirs, fs in os.walk(fixtures_path):
|
||||||
|
files.extend([os.path.join(dirpath, f) for f in fs])
|
||||||
|
|
||||||
|
input_files = [f for f in files if re.match(r'.*\.{0}$'.format(in_ext), f)]
|
||||||
|
|
||||||
|
for input_filename in input_files:
|
||||||
|
output_candidate = re.sub(
|
||||||
|
r'\.{0}$'.format(in_ext), '.{0}'.format(out_ext), input_filename)
|
||||||
|
if output_candidate not in files:
|
||||||
|
output_candidate = None
|
||||||
|
|
||||||
|
scenarios.append((input_filename, {
|
||||||
|
'in_filename': input_filename,
|
||||||
|
'out_filename': output_candidate,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return scenarios
|
||||||
|
|
||||||
|
|
||||||
|
class TestCase(object):
|
||||||
|
"""Test case base class for all unit tests."""
|
||||||
|
|
||||||
|
def _read_raw_content(self):
|
||||||
|
# if None assume empty file
|
||||||
|
if self.out_filename is None:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
content = open(self.out_filename, 'r').read()
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
def test_yaml_snippet(self):
|
||||||
|
parser = YamlParser()
|
||||||
|
expected_json = self._read_raw_content()
|
||||||
|
parser.parse(self.in_filename)
|
||||||
|
valid_yaml = parser.data
|
||||||
|
|
||||||
|
pretty_json = json.dumps(
|
||||||
|
valid_yaml, indent=4, separators=(',', ': '), sort_keys=True)
|
||||||
|
|
||||||
|
self.assertThat(pretty_json, testtools.matchers.DocTestMatches(
|
||||||
|
expected_json, doctest.ELLIPSIS | doctest.REPORT_NDIFF))
|
@ -17,8 +17,8 @@ import os
|
|||||||
from testscenarios.testcase import TestWithScenarios
|
from testscenarios.testcase import TestWithScenarios
|
||||||
from testtools import TestCase
|
from testtools import TestCase
|
||||||
|
|
||||||
from tests.base import get_scenarios
|
from tests.schema.base import get_scenarios
|
||||||
from tests.base import TestCase as BaseTestCase
|
from tests.schema.base import TestCase as BaseTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestCaseSchemaDashboard(TestWithScenarios, TestCase, BaseTestCase):
|
class TestCaseSchemaDashboard(TestWithScenarios, TestCase, BaseTestCase):
|
||||||
|
42
tests/test_builder.py
Normal file
42
tests/test_builder.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
# Copyright 2015 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import mock
|
||||||
|
|
||||||
|
from grafana_dashboards import builder
|
||||||
|
from tests.base import TestCase
|
||||||
|
|
||||||
|
|
||||||
|
class TestCaseBuilder(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestCaseBuilder, self).setUp()
|
||||||
|
self.builder = builder.Builder()
|
||||||
|
|
||||||
|
@mock.patch('grafana_dashboards.grafana.Grafana.create_dashboard')
|
||||||
|
def test_update_dashboard(self, mock_grafana):
|
||||||
|
dashboard = os.path.join(
|
||||||
|
os.path.dirname(__file__), 'fixtures/builder/dashboard-0001.yaml')
|
||||||
|
|
||||||
|
self.builder.update_dashboard(dashboard)
|
||||||
|
# Cache is empty, so we should update grafana.
|
||||||
|
self.assertEqual(mock_grafana.call_count, 1)
|
||||||
|
|
||||||
|
# Create a new builder to avoid duplicate dashboards.
|
||||||
|
builder2 = builder.Builder()
|
||||||
|
# Update again with same dashboard, ensure we don't update grafana.
|
||||||
|
builder2.update_dashboard(dashboard)
|
||||||
|
self.assertEqual(mock_grafana.call_count, 1)
|
91
tests/test_cache.py
Normal file
91
tests/test_cache.py
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
# Copyright 2015 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
from grafana_dashboards import cache
|
||||||
|
from tests.base import TestCase
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class TestCaseCache(TestCase):
|
||||||
|
|
||||||
|
dashboard = {
|
||||||
|
'hello-world': '2095312189753de6ad47dfe20cbe97ec',
|
||||||
|
}
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestCaseCache, self).setUp()
|
||||||
|
self.storage = None
|
||||||
|
|
||||||
|
def test_cache_has_changed(self):
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
res = self.storage.has_changed(
|
||||||
|
'hello-world', self.dashboard['hello-world'])
|
||||||
|
self.assertTrue(res)
|
||||||
|
self.storage.set('hello-world', self.dashboard['hello-world'])
|
||||||
|
res = self.storage.has_changed(
|
||||||
|
'hello-world', self.dashboard['hello-world'])
|
||||||
|
self.assertFalse(res)
|
||||||
|
|
||||||
|
def test_cache_disabled_has_changed(self):
|
||||||
|
CONF.cache.enabled = False
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
res = self.storage.has_changed(
|
||||||
|
'hello-world', self.dashboard['hello-world'])
|
||||||
|
self.assertTrue(res)
|
||||||
|
self.storage.set('hello-world', self.dashboard['hello-world'])
|
||||||
|
res = self.storage.has_changed(
|
||||||
|
'hello-world', self.dashboard['hello-world'])
|
||||||
|
self.assertTrue(res)
|
||||||
|
|
||||||
|
def test_cache_get_empty(self):
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
self.assertEqual(self.storage.get('empty'), None)
|
||||||
|
|
||||||
|
def test_cache_disabled_get_empty(self):
|
||||||
|
CONF.cache.enabled = False
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
self.assertEqual(self.storage.get('disabled'), None)
|
||||||
|
|
||||||
|
def test_cache_set_multiple(self):
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
self.storage.set('hello-world', self.dashboard['hello-world'])
|
||||||
|
self.assertEqual(
|
||||||
|
self.storage.get('hello-world'), self.dashboard['hello-world'])
|
||||||
|
dashboard = {
|
||||||
|
'foobar': '14758f1afd44c09b7992073ccf00b43d'
|
||||||
|
}
|
||||||
|
dashboard['hello-world'] = self.dashboard['hello-world']
|
||||||
|
|
||||||
|
self.storage.set('foobar', dashboard['foobar'])
|
||||||
|
self.assertEqual(self.storage.get('foobar'), dashboard['foobar'])
|
||||||
|
# Make sure hello-world is still valid.
|
||||||
|
self.assertEqual(
|
||||||
|
self.storage.get('hello-world'), self.dashboard['hello-world'])
|
||||||
|
|
||||||
|
def test_cache_set_single(self):
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
self.storage.set('hello-world', self.dashboard['hello-world'])
|
||||||
|
self.assertEqual(
|
||||||
|
self.storage.get('hello-world'), self.dashboard['hello-world'])
|
||||||
|
|
||||||
|
def test_cache_disabled_set_single(self):
|
||||||
|
CONF.cache.enabled = False
|
||||||
|
self.storage = cache.Cache()
|
||||||
|
self.storage.set('hello-world', self.dashboard['hello-world'])
|
||||||
|
# Make sure cache is empty.
|
||||||
|
self.assertEqual(
|
||||||
|
self.storage.get('hello-world'), None)
|
@ -18,15 +18,16 @@ import sys
|
|||||||
import fixtures
|
import fixtures
|
||||||
import six
|
import six
|
||||||
from testtools import matchers
|
from testtools import matchers
|
||||||
from testtools import TestCase
|
|
||||||
|
|
||||||
from grafana_dashboards import cmd
|
from grafana_dashboards import cmd
|
||||||
|
from tests.base import TestCase
|
||||||
|
|
||||||
|
|
||||||
class TestCaseCmd(TestCase):
|
class TestCaseCmd(TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestCaseCmd, self).setUp()
|
super(TestCaseCmd, self).setUp()
|
||||||
|
cmd.CONF.reset()
|
||||||
|
|
||||||
def shell(self, argstr, exitcodes=(0,)):
|
def shell(self, argstr, exitcodes=(0,)):
|
||||||
orig = sys.stdout
|
orig = sys.stdout
|
||||||
|
@ -38,7 +38,7 @@ class TestCaseParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Get parsed dashboard
|
# Get parsed dashboard
|
||||||
res = self.parser.get_dashboard('new-dashboard')
|
res, md5 = self.parser.get_dashboard('new-dashboard')
|
||||||
self.assertEqual(res, dashboard['new-dashboard'])
|
self.assertEqual(res, dashboard['new-dashboard'])
|
||||||
|
|
||||||
# Check for a dashboard that does not exist
|
# Check for a dashboard that does not exist
|
||||||
@ -49,11 +49,11 @@ class TestCaseParser(TestCase):
|
|||||||
os.path.dirname(__file__), 'fixtures/parser/dashboard-0002.yaml')
|
os.path.dirname(__file__), 'fixtures/parser/dashboard-0002.yaml')
|
||||||
self.parser.parse(path)
|
self.parser.parse(path)
|
||||||
|
|
||||||
res = self.parser.get_dashboard('foobar')
|
res, md5 = self.parser.get_dashboard('foobar')
|
||||||
self.assertEqual(res, dashboard['foobar'])
|
self.assertEqual(res, dashboard['foobar'])
|
||||||
|
|
||||||
# Ensure our first dashboard still exists.
|
# Ensure our first dashboard still exists.
|
||||||
res = self.parser.get_dashboard('new-dashboard')
|
res, md5 = self.parser.get_dashboard('new-dashboard')
|
||||||
self.assertEqual(res, dashboard['new-dashboard'])
|
self.assertEqual(res, dashboard['new-dashboard'])
|
||||||
|
|
||||||
def test_parse_duplicate(self):
|
def test_parse_duplicate(self):
|
||||||
@ -65,7 +65,7 @@ class TestCaseParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Get parsed dashboard
|
# Get parsed dashboard
|
||||||
res = self.parser.get_dashboard('new-dashboard')
|
res, md5 = self.parser.get_dashboard('new-dashboard')
|
||||||
self.assertEqual(res, dashboard['new-dashboard'])
|
self.assertEqual(res, dashboard['new-dashboard'])
|
||||||
|
|
||||||
path = os.path.join(
|
path = os.path.join(
|
||||||
@ -74,5 +74,5 @@ class TestCaseParser(TestCase):
|
|||||||
self.assertRaises(Exception, self.parser.parse, path)
|
self.assertRaises(Exception, self.parser.parse, path)
|
||||||
|
|
||||||
def _get_empty_dashboard(self, name):
|
def _get_empty_dashboard(self, name):
|
||||||
res = self.parser.get_dashboard(name)
|
res, md5 = self.parser.get_dashboard(name)
|
||||||
self.assertEqual(res, None)
|
self.assertEqual(res, None)
|
||||||
|
6
tox.ini
6
tox.ini
@ -14,7 +14,11 @@ commands = python setup.py test --slowest --testr-args='{posargs}'
|
|||||||
|
|
||||||
[testenv:genconfig]
|
[testenv:genconfig]
|
||||||
commands =
|
commands =
|
||||||
oslo-config-generator --namespace grafyaml --namespace oslo.log --output-file etc/grafyaml.conf
|
oslo-config-generator \
|
||||||
|
--namespace grafyaml.builder \
|
||||||
|
--namespace grafyaml.cache \
|
||||||
|
--namespace oslo.log \
|
||||||
|
--output-file etc/grafyaml.conf
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
commands = flake8
|
commands = flake8
|
||||||
|
Loading…
x
Reference in New Issue
Block a user