Implement log speed control via CLI, other minor changes

This commit is contained in:
f3flight 2016-08-03 11:18:57 +00:00
parent 1d0b77a1d2
commit 23f3756a09
4 changed files with 73 additions and 53 deletions

View File

@ -101,15 +101,21 @@ def parse_args():
parser.add_argument('--rqfile', metavar='PATH', action='append',
help=('Can be specified multiple times. Path to'
' rqfile(s) in yaml format, overrides default.'))
parser.add_argument('-l', '--logs',
parser.add_argument('-l', '--logs', action='store_true',
help=('Collect logs from nodes. Logs are not collected'
' by default due to their size.'),
action='store_true')
parser.add_argument('--logs-no-default',
' by default due to their size.'))
parser.add_argument('--logs-no-default', action='store_true',
help=('Do not use default log collection parameters,'
' only use what has been set up either via -L'
' or in rqfile(s). Implies "-l".'),
action='store_true')
' only use what has been provided either via -L'
' or in rqfile(s). Implies "-l".'))
parser.add_argument('--logs-speed', type=int, metavar='MBIT/S',
help=('Limit log collection bandwidth to 90% of the'
' specified speed in Mbit/s.'))
parser.add_argument('--logs-speed-auto', action='store_true',
help=('Limit log collection bandwidth to 90% of local'
' admin interface speed. If speed detection'
' fails, a default value will be used. See'
' "logs_speed_default" in conf.py.'))
parser.add_argument('--fuel-ip', help='fuel ip address')
parser.add_argument('--fuel-user', help='fuel username')
parser.add_argument('--fuel-pass', help='fuel password')
@ -220,6 +226,10 @@ def main(argv=None):
if args.days:
logs_conf['start'] = args.days
conf['logs'].append(logs_conf)
if args.logs_speed or args.logs_speed_auto:
conf['logs_speed_limit'] = True
if args.logs_speed:
conf['logs_speed'] = abs(args.logs_speed)
if conf['shell_mode']:
filter = conf['hard_filter']
# config cleanup for shell mode
@ -286,7 +296,7 @@ def main(argv=None):
enough = pretty_run(args.quiet, 'Checking free space',
nm.is_enough_space)
if enough:
print('Total logs size to collect: %dMB.' % (nm.alogsize / 1024))
print('Total logs size to collect: %dMB.' % (nm.alogsize / 1000))
msg = 'Collecting and packing logs'
pretty_run(args.quiet, msg, nm.get_logs,
args=(conf['compress_timeout'],),

View File

@ -62,6 +62,9 @@ def load_conf(filename):
conf['logs'] = [{'path': '/var/log',
'exclude': '\.[^12]\.gz$|\.\d{2,}\.gz$'}]
conf['logs_days'] = 30
conf['logs_speed_limit'] = False # enable speed limiting of log transfers
conf['logs_speed_default'] = 100 # Mbit/s, used when autodetect fails
conf['logs_speed'] = 0 # To manually specify max bandwidth in Mbit/s
'''Shell mode - only run what was specified via command line.
Skip actionable conf fields (see timmy/nodes.py -> Node.conf_actionable);
Skip rqfile import;

View File

@ -16,7 +16,7 @@
# under the License.
project_name = 'timmy'
version = '1.12.3'
version = '1.13.0'
if __name__ == '__main__':
exit(0)

View File

@ -31,25 +31,24 @@ import tools
from tools import w_list, run_with_lock
from copy import deepcopy
#try:
# import fuelclient.client
# if type(fuelclient.client.APIClient) is type:
# # fuel 9.1+ (originally 10.0+)
# from fuelclient.client import APIClient as FuelClient
# FUEL_10 = True
# elif type(fuelclient.client.APIClient) is fuelclient.client.Client:
# # fuel 9.0 and below
# from fuelclient.client import Client as FuelClient
# FUEL_10 = False
#except:
# FuelClient = None
#
#try:
# from fuelclient.client import logger
# logger.handlers = []
#except:
# pass
FuelClient = None
try:
import fuelclient.client
if type(fuelclient.client.APIClient) is type:
# fuel 9.1+ (originally 10.0+)
from fuelclient.client import APIClient as FuelClient
FUEL_10 = True
elif type(fuelclient.client.APIClient) is fuelclient.client.Client:
# fuel 9.0 and below
from fuelclient.client import Client as FuelClient
FUEL_10 = False
except:
FuelClient = None
try:
from fuelclient.client import logger
logger.handlers = []
except:
pass
class Node(object):
@ -475,7 +474,7 @@ class NodeManager(object):
if nodes_json:
self.nodes_json = tools.load_json_file(nodes_json)
else:
if (#not self.get_nodes_fuelclient() and
if (not self.get_nodes_fuelclient() and
not self.get_nodes_api() and
not self.get_nodes_cli()):
sys.exit(4)
@ -484,7 +483,7 @@ class NodeManager(object):
for node in self.nodes.values():
if not self.filter(node, self.conf['soft_filter']):
node.filtered_out = True
if (#not self.get_release_fuel_client() and
if (not self.get_release_fuel_client() and
not self.get_release_api() and
not self.get_release_cli()):
self.logger.warning('could not get Fuel and MOS versions')
@ -706,16 +705,16 @@ class NodeManager(object):
self.conf['fuel_port'],
request)
req = urllib2.Request(url, None, {'X-Auth-Token': self.token})
#try:
result = urllib2.urlopen(req)
code = result.getcode()
if code == 200:
return result.read()
else:
self.logger.error('NodeManager: cannot get API response'
' from %s, code %s' % (url, code))
#except:
# pass
try:
result = urllib2.urlopen(req)
code = result.getcode()
if code == 200:
return result.read()
else:
self.logger.error('NodeManager: cannot get API response'
' from %s, code %s' % (url, code))
except:
pass
def get_nodes_api(self):
self.logger.info('using API to get nodes json')
@ -870,10 +869,13 @@ class NodeManager(object):
self.logger.error("can't get free space\nouts: %s" %
outs)
return False
self.logger.info('logsize: %s Kb, free space: %s Kb' %
(self.alogsize, fs))
self.logger.info('logsize: %s Kb * %s, free space: %s Kb' %
(self.alogsize, coefficient, fs))
if (self.alogsize*coefficient > fs):
self.logger.error('Not enough space on device')
self.logger.error('Not enough space on device, logsize: %s Kb * %s'
', free space: %s Kb' % (self.alogsize,
coefficient,
fs))
return False
else:
return True
@ -893,7 +895,7 @@ class NodeManager(object):
if code != 0:
self.logger.error("Can't create archive %s" % (errs))
def find_adm_interface_speed(self, defspeed):
def find_adm_interface_speed(self):
'''Returns interface speed through which logs will be dowloaded'''
for node in self.nodes.values():
if not (node.ip == 'localhost' or node.ip.startswith('127.')):
@ -902,22 +904,27 @@ class NodeManager(object):
out, err, code = tools.launch_cmd(cmd, node.timeout)
if code != 0:
self.logger.warning("can't get iface speed: err: %s" % err)
return defspeed
return self.conf['logs_speed_default']
try:
speed = int(out)
except:
speed = defspeed
speed = self.conf['logs_speed_default']
return speed
@run_with_lock
def get_logs(self, timeout, fake=False, maxthreads=10, speed=100):
def get_logs(self, timeout, fake=False, maxthreads=10):
if fake:
self.logger.info('fake = True, skipping')
return
txtfl = []
speed = self.find_adm_interface_speed(speed)
speed = int(speed * 0.9 / min(maxthreads, len(self.nodes)))
pythonslowpipe = tools.slowpipe % speed
if self.conf['logs_speed_limit']:
if self.conf['logs_speed'] > 0:
speed = self.conf['logs_speed']
else:
speed = self.find_adm_interface_speed()
speed = int(speed * 0.9 / min(maxthreads, len(self.nodes)))
py_slowpipe = tools.slowpipe % speed
limitcmd = "| python -c '%s'; exit ${PIPESTATUS}" % py_slowpipe
run_items = []
for node in [n for n in self.nodes.values() if not n.filtered_out]:
if not node.logs_dict():
@ -933,9 +940,9 @@ class NodeManager(object):
input += '%s\0' % fn.lstrip(os.path.abspath(os.sep))
cmd = ("tar --gzip -C %s --create --warning=no-file-changed "
" --file - --null --files-from -" % os.path.abspath(os.sep))
if not (node.ip == 'localhost' or node.ip.startswith('127.')):
cmd = ' '.join([cmd, "| python -c '%s'; exit ${PIPESTATUS}" %
pythonslowpipe])
if self.conf['logs_speed_limit']:
if not (node.ip == 'localhost' or node.ip.startswith('127.')):
cmd = ' '.join([cmd, limitcmd])
args = {'cmd': cmd,
'timeout': timeout,
'outfile': node.archivelogsfile,