adding unit tests and conf changes
This commit is contained in:
parent
9e4be4a111
commit
b875825e0f
@ -102,7 +102,6 @@ Running the stats system on SAIO
|
||||
local1.* ~
|
||||
|
||||
#. Edit /etc/rsyslog.conf and make the following change::
|
||||
|
||||
$PrivDropToGroup adm
|
||||
|
||||
#. `mkdir -p /var/log/swift/hourly`
|
||||
@ -113,69 +112,72 @@ Running the stats system on SAIO
|
||||
#. Relogin to let the group change take effect.
|
||||
#. Create `/etc/swift/log-processor.conf`::
|
||||
|
||||
[log-processor]
|
||||
swift_account = <your-stats-account-hash>
|
||||
user = <your-user-name>
|
||||
[log-processor]
|
||||
swift_account = <your-stats-account-hash>
|
||||
user = <your-user-name>
|
||||
|
||||
[log-processor-access]
|
||||
swift_account = <your-stats-account-hash>
|
||||
container_name = log_data
|
||||
log_dir = /var/log/swift/hourly/
|
||||
source_filename_pattern = %Y%m%d%H
|
||||
class_path = swift.stats.access_processor.AccessLogProcessor
|
||||
user = <your-user-name>
|
||||
[log-processor-access]
|
||||
swift_account = <your-stats-account-hash>
|
||||
container_name = log_data
|
||||
log_dir = /var/log/swift/hourly/
|
||||
source_filename_pattern = ^
|
||||
(?P<year>[0-9]{4})
|
||||
(?P<month>[0-1][0-9])
|
||||
(?P<day>[0-3][0-9])
|
||||
(?P<hour>[0-2][0-9])
|
||||
.*$
|
||||
class_path = swift.stats.access_processor.AccessLogProcessor
|
||||
user = <your-user-name>
|
||||
|
||||
[log-processor-stats]
|
||||
swift_account = <your-stats-account-hash>
|
||||
container_name = account_stats
|
||||
log_dir = /var/log/swift/stats/
|
||||
source_filename_pattern = %Y%m%d%H_*
|
||||
class_path = swift.stats.stats_processor.StatsLogProcessor
|
||||
account_server_conf = /etc/swift/account-server/1.conf
|
||||
user = <your-user-name>
|
||||
[log-processor-stats]
|
||||
swift_account = <your-stats-account-hash>
|
||||
container_name = account_stats
|
||||
log_dir = /var/log/swift/stats/
|
||||
class_path = swift.stats.stats_processor.StatsLogProcessor
|
||||
account_server_conf = /etc/swift/account-server/1.conf
|
||||
user = <your-user-name>
|
||||
|
||||
[log-processor-container-stats]
|
||||
swift_account = <your-stats-account-hash>
|
||||
container_name = container_stats
|
||||
log_dir = /var/log/swift/stats/
|
||||
source_filename_pattern = container-stats-%Y%m%d%H_*
|
||||
class_path = swift.stats.stats_processor.StatsLogProcessor
|
||||
account_server_conf = /etc/swift/container-server/1.conf
|
||||
user = <your-user-name>
|
||||
[log-processor-container-stats]
|
||||
swift_account = <your-stats-account-hash>
|
||||
container_name = container_stats
|
||||
log_dir = /var/log/swift/stats/
|
||||
class_path = swift.stats.stats_processor.StatsLogProcessor
|
||||
account_server_conf = /etc/swift/container-server/1.conf
|
||||
user = <your-user-name>
|
||||
|
||||
#. Add the following under [app:proxy-server] in `/etc/swift/proxy-server.conf`::
|
||||
|
||||
log_facility = LOG_LOCAL1
|
||||
log_facility = LOG_LOCAL1
|
||||
|
||||
#. Create a `cron` job to run once per hour to create the stats logs. In
|
||||
`/etc/cron.d/swift-stats-log-creator`::
|
||||
|
||||
0 * * * * <your-user-name> swift-account-stats-logger /etc/swift/log-processor.conf
|
||||
0 * * * * <your-user-name> swift-account-stats-logger /etc/swift/log-processor.conf
|
||||
|
||||
#. Create a `cron` job to run once per hour to create the container stats logs. In
|
||||
`/etc/cron.d/swift-container-stats-log-creator`::
|
||||
|
||||
5 * * * * <your-user-name> swift-container-stats-logger /etc/swift/log-processor.conf
|
||||
5 * * * * <your-user-name> swift-container-stats-logger /etc/swift/log-processor.conf
|
||||
|
||||
#. Create a `cron` job to run once per hour to upload the stats logs. In
|
||||
`/etc/cron.d/swift-stats-log-uploader`::
|
||||
|
||||
10 * * * * <your-user-name> swift-log-uploader /etc/swift/log-processor.conf stats
|
||||
10 * * * * <your-user-name> swift-log-uploader /etc/swift/log-processor.conf stats
|
||||
|
||||
#. Create a `cron` job to run once per hour to upload the stats logs. In
|
||||
`/etc/cron.d/swift-stats-log-uploader`::
|
||||
|
||||
15 * * * * <your-user-name> swift-log-uploader /etc/swift/log-processor.conf container-stats
|
||||
15 * * * * <your-user-name> swift-log-uploader /etc/swift/log-processor.conf container-stats
|
||||
|
||||
#. Create a `cron` job to run once per hour to upload the access logs. In
|
||||
`/etc/cron.d/swift-access-log-uploader`::
|
||||
|
||||
5 * * * * <your-user-name> swift-log-uploader /etc/swift/log-processor.conf access
|
||||
5 * * * * <your-user-name> swift-log-uploader /etc/swift/log-processor.conf access
|
||||
|
||||
#. Create a `cron` job to run once per hour to process the logs. In
|
||||
`/etc/cron.d/swift-stats-processor`::
|
||||
|
||||
30 * * * * <your-user-name> swift-log-stats-collector /etc/swift/log-processor.conf
|
||||
30 * * * * <your-user-name> swift-log-stats-collector /etc/swift/log-processor.conf
|
||||
|
||||
After running for a few hours, you should start to see .csv files in the
|
||||
log_processing_data container in the swift stats account that was created
|
||||
|
@ -18,12 +18,13 @@ import time
|
||||
from paste.deploy import appconfig
|
||||
import shutil
|
||||
import hashlib
|
||||
import urllib
|
||||
|
||||
from swift.account.server import DATADIR as account_server_data_dir
|
||||
from swift.container.server import DATADIR as container_server_data_dir
|
||||
from swift.common.db import AccountBroker, ContainerBroker
|
||||
from swift.common.utils import renamer, get_logger, readconf, mkdirs, \
|
||||
TRUE_VALUES
|
||||
TRUE_VALUES, remove_file
|
||||
from swift.common.constraints import check_mount
|
||||
from swift.common.daemon import Daemon
|
||||
|
||||
@ -37,17 +38,14 @@ class DatabaseStatsCollector(Daemon):
|
||||
|
||||
def __init__(self, stats_conf, stats_type, data_dir, filename_format):
|
||||
super(DatabaseStatsCollector, self).__init__(stats_conf)
|
||||
self.target_dir = stats_conf.get('log_dir', '/var/log/swift')
|
||||
self.stats_type = stats_type
|
||||
server_conf_loc = stats_conf.get('%s_server_conf' % stats_type,
|
||||
'/etc/swift/%s-server.conf' % stats_type)
|
||||
server_conf = appconfig('config:%s' % server_conf_loc,
|
||||
name='%s-server' % stats_type)
|
||||
self.data_dir = data_dir
|
||||
self.filename_format = filename_format
|
||||
mkdirs(self.target_dir)
|
||||
self.devices = server_conf.get('devices', '/srv/node')
|
||||
self.mount_check = server_conf.get('mount_check',
|
||||
self.devices = stats_conf.get('devices', '/srv/node')
|
||||
self.mount_check = stats_conf.get('mount_check',
|
||||
'true').lower() in TRUE_VALUES
|
||||
self.target_dir = stats_conf.get('log_dir', '/var/log/swift')
|
||||
mkdirs(self.target_dir)
|
||||
self.logger = get_logger(stats_conf,
|
||||
log_route='%s-stats' % stats_type)
|
||||
|
||||
@ -69,31 +67,36 @@ class DatabaseStatsCollector(Daemon):
|
||||
mkdirs(working_dir)
|
||||
tmp_filename = os.path.join(working_dir, src_filename)
|
||||
hasher = hashlib.md5()
|
||||
with open(tmp_filename, 'wb') as statfile:
|
||||
for device in os.listdir(self.devices):
|
||||
if self.mount_check and not check_mount(self.devices, device):
|
||||
self.logger.error(
|
||||
_("Device %s is not mounted, skipping.") % device)
|
||||
continue
|
||||
db_dir = os.path.join(self.devices,
|
||||
device,
|
||||
self.data_dir)
|
||||
if not os.path.exists(db_dir):
|
||||
self.logger.debug(
|
||||
_("Path %s does not exist, skipping.") % db_dir)
|
||||
continue
|
||||
for root, dirs, files in os.walk(db_dir, topdown=False):
|
||||
for filename in files:
|
||||
if filename.endswith('.db'):
|
||||
db_path = os.path.join(root, filename)
|
||||
line_data = self.get_data(db_path)
|
||||
if line_data:
|
||||
statfile.write(line_data)
|
||||
hasher.update(line_data)
|
||||
try:
|
||||
with open(tmp_filename, 'wb') as statfile:
|
||||
for device in os.listdir(self.devices):
|
||||
if self.mount_check and not check_mount(self.devices,
|
||||
device):
|
||||
self.logger.error(
|
||||
_("Device %s is not mounted, skipping.") % device)
|
||||
continue
|
||||
db_dir = os.path.join(self.devices,
|
||||
device,
|
||||
self.data_dir)
|
||||
if not os.path.exists(db_dir):
|
||||
self.logger.debug(
|
||||
_("Path %s does not exist, skipping.") % db_dir)
|
||||
continue
|
||||
for root, dirs, files in os.walk(db_dir, topdown=False):
|
||||
for filename in files:
|
||||
if filename.endswith('.db'):
|
||||
db_path = os.path.join(root, filename)
|
||||
line_data = self.get_data(db_path)
|
||||
if line_data:
|
||||
statfile.write(line_data)
|
||||
hasher.update(line_data)
|
||||
|
||||
src_filename += hasher.hexdigest()
|
||||
renamer(tmp_filename, os.path.join(self.target_dir, src_filename))
|
||||
shutil.rmtree(working_dir, ignore_errors=True)
|
||||
src_filename += hasher.hexdigest()
|
||||
renamer(tmp_filename, os.path.join(self.target_dir, src_filename))
|
||||
shutil.rmtree(working_dir, ignore_errors=True)
|
||||
finally:
|
||||
# clean up temp file, remove_file ignores errors
|
||||
remove_file(tmp_filename)
|
||||
|
||||
|
||||
class AccountStat(DatabaseStatsCollector):
|
||||
|
@ -94,8 +94,8 @@ def temptree(files, contents=''):
|
||||
class FakeLogger(object):
|
||||
# a thread safe logger
|
||||
|
||||
def __init__(self):
|
||||
self.log_dict = dict(error=[], info=[], warning=[])
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.log_dict = dict(error=[], info=[], warning=[], debug=[])
|
||||
|
||||
def error(self, *args, **kwargs):
|
||||
self.log_dict['error'].append((args, kwargs))
|
||||
@ -106,6 +106,9 @@ class FakeLogger(object):
|
||||
def warning(self, *args, **kwargs):
|
||||
self.log_dict['warning'].append((args, kwargs))
|
||||
|
||||
def debug(self, *args, **kwargs):
|
||||
self.log_dict['debug'].append((args, kwargs))
|
||||
|
||||
|
||||
class MockTrue(object):
|
||||
"""
|
||||
|
@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# TODO: Tests
|
||||
|
||||
import unittest
|
||||
from swift.stats import db_stats
|
||||
|
||||
|
||||
class TestAccountStats(unittest.TestCase):
|
||||
|
||||
def test_placeholder(self):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
146
test/unit/stats/test_db_stats.py
Normal file
146
test/unit/stats/test_db_stats.py
Normal file
@ -0,0 +1,146 @@
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import unittest
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from shutil import rmtree
|
||||
from swift.stats import db_stats
|
||||
from tempfile import mkdtemp
|
||||
from test.unit import FakeLogger
|
||||
from swift.common.db import AccountBroker, ContainerBroker
|
||||
from swift.common.utils import mkdirs
|
||||
|
||||
class TestDbStats(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._was_logger = db_stats.get_logger
|
||||
db_stats.get_logger = FakeLogger
|
||||
self.testdir = os.path.join(mkdtemp(), 'tmp_test_db_stats')
|
||||
self.devices = os.path.join(self.testdir, 'node')
|
||||
rmtree(self.testdir, ignore_errors=1)
|
||||
mkdirs(os.path.join(self.devices, 'sda'))
|
||||
self.accounts = os.path.join(self.devices, 'sda', 'accounts')
|
||||
self.containers= os.path.join(self.devices, 'sda', 'containers')
|
||||
self.log_dir= '%s/log' % self.testdir
|
||||
|
||||
self.conf = dict(devices=self.devices,
|
||||
log_dir=self.log_dir,
|
||||
mount_check='false')
|
||||
def tearDown(self):
|
||||
db_stats.get_logger = self._was_logger
|
||||
rmtree(self.testdir)
|
||||
|
||||
def test_account_stat_get_data(self):
|
||||
stat = db_stats.AccountStat(self.conf)
|
||||
account_db = AccountBroker("%s/acc.db" % self.accounts,
|
||||
account='test_acc')
|
||||
account_db.initialize()
|
||||
account_db.put_container('test_container', time.time(),
|
||||
None, 10, 1000)
|
||||
info = stat.get_data("%s/acc.db" % self.accounts)
|
||||
self.assertEquals('''"test_acc",1,10,1000\n''', info)
|
||||
|
||||
def test_container_stat_get_data(self):
|
||||
stat = db_stats.ContainerStat(self.conf)
|
||||
container_db = ContainerBroker("%s/con.db" % self.containers,
|
||||
account='test_acc', container='test_con')
|
||||
container_db.initialize()
|
||||
container_db.put_object('test_obj', time.time(), 10, 'text', 'faketag')
|
||||
info = stat.get_data("%s/con.db" % self.containers)
|
||||
self.assertEquals('''"test_acc","test_con",1,10\n''', info)
|
||||
|
||||
def _gen_account_stat(self):
|
||||
stat = db_stats.AccountStat(self.conf)
|
||||
output_data = set()
|
||||
for i in range(10):
|
||||
account_db = AccountBroker("%s/stats-201001010%s-%s.db" %
|
||||
(self.accounts, i, uuid.uuid4().hex),
|
||||
account='test_acc_%s' % i)
|
||||
account_db.initialize()
|
||||
account_db.put_container('test_container', time.time(),
|
||||
None, 10, 1000)
|
||||
# this will "commit" the data
|
||||
account_db.get_info()
|
||||
output_data.add('''"test_acc_%s",1,10,1000''' % i),
|
||||
|
||||
self.assertEqual(len(output_data), 10)
|
||||
return stat, output_data
|
||||
|
||||
def _gen_container_stat(self):
|
||||
stat = db_stats.ContainerStat(self.conf)
|
||||
output_data = set()
|
||||
for i in range(10):
|
||||
account_db = ContainerBroker(
|
||||
"%s/container-stats-201001010%s-%s.db" % (self.containers, i,
|
||||
uuid.uuid4().hex),
|
||||
account='test_acc_%s' % i, container='test_con')
|
||||
account_db.initialize()
|
||||
account_db.put_object('test_obj', time.time(), 10, 'text', 'faketag')
|
||||
# this will "commit" the data
|
||||
account_db.get_info()
|
||||
output_data.add('''"test_acc_%s","test_con",1,10''' % i),
|
||||
|
||||
self.assertEqual(len(output_data), 10)
|
||||
return stat, output_data
|
||||
|
||||
def test_account_stat_run_once_account(self):
|
||||
stat, output_data = self._gen_account_stat()
|
||||
stat.run_once()
|
||||
stat_file = os.listdir(self.log_dir)[0]
|
||||
with open(os.path.join(self.log_dir, stat_file)) as stat_handle:
|
||||
for i in range(10):
|
||||
data = stat_handle.readline()
|
||||
output_data.discard(data.strip())
|
||||
|
||||
self.assertEqual(len(output_data), 0)
|
||||
|
||||
def test_account_stat_run_once_both(self):
|
||||
acc_stat, acc_output_data = self._gen_account_stat()
|
||||
con_stat, con_output_data = self._gen_container_stat()
|
||||
|
||||
acc_stat.run_once()
|
||||
stat_file = os.listdir(self.log_dir)[0]
|
||||
with open(os.path.join(self.log_dir, stat_file)) as stat_handle:
|
||||
for i in range(10):
|
||||
data = stat_handle.readline()
|
||||
acc_output_data.discard(data.strip())
|
||||
|
||||
self.assertEqual(len(acc_output_data), 0)
|
||||
|
||||
con_stat.run_once()
|
||||
stat_file = [f for f in os.listdir(self.log_dir) if f != stat_file][0]
|
||||
with open(os.path.join(self.log_dir, stat_file)) as stat_handle:
|
||||
for i in range(10):
|
||||
data = stat_handle.readline()
|
||||
con_output_data.discard(data.strip())
|
||||
|
||||
self.assertEqual(len(con_output_data), 0)
|
||||
|
||||
def test_account_stat_run_once_fail(self):
|
||||
stat, output_data = self._gen_account_stat()
|
||||
rmtree(self.accounts)
|
||||
stat.run_once()
|
||||
self.assertEquals(len(stat.logger.log_dict['debug']), 1)
|
||||
|
||||
def test_not_implemented(self):
|
||||
db_stat = db_stats.DatabaseStatsCollector(self.conf, 'account',
|
||||
'test_dir', 'stats-%Y%m%d%H_')
|
||||
self.assertRaises(Exception, db_stat.get_data)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -30,8 +30,6 @@ import logging
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
LOGGER = logging.getLogger()
|
||||
|
||||
DEFAULT_GLOB = '%Y%m%d%H'
|
||||
|
||||
COMPRESSED_DATA = '\x1f\x8b\x08\x08\x87\xa5zM\x02\xffdata\x00KI,I\x04\x00c' \
|
||||
'\xf3\xf3\xad\x04\x00\x00\x00'
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user