Merge "Add profiling middleware in Swift"
This commit is contained in:
commit
e294538c20
@ -209,3 +209,10 @@ TempURL
|
||||
.. automodule:: swift.common.middleware.tempurl
|
||||
:members:
|
||||
:show-inheritance:
|
||||
|
||||
XProfile
|
||||
==============
|
||||
|
||||
.. automodule:: swift.common.middleware.xprofile
|
||||
:members:
|
||||
:show-inheritance:
|
||||
|
@ -151,3 +151,36 @@ use = egg:swift#recon
|
||||
# Default is 2592000 seconds (30 days). This is in addition to any time
|
||||
# requested by delay_reaping.
|
||||
# reap_warn_after = 2592000
|
||||
|
||||
# Note: Put it at the beginning of the pipleline to profile all middleware. But
|
||||
# it is safer to put this after healthcheck.
|
||||
[filter:xprofile]
|
||||
use = egg:swift#xprofile
|
||||
# This option enable you to switch profilers which should inherit from python
|
||||
# standard profiler. Currently the supported value can be 'cProfile',
|
||||
# 'eventlet.green.profile' etc.
|
||||
# profile_module = eventlet.green.profile
|
||||
#
|
||||
# This prefix will be used to combine process ID and timestamp to name the
|
||||
# profile data file. Make sure the executing user has permission to write
|
||||
# into this path (missing path segments will be created, if necessary).
|
||||
# If you enable profiling in more than one type of daemon, you must override
|
||||
# it with an unique value like: /var/log/swift/profile/account.profile
|
||||
# log_filename_prefix = /tmp/log/swift/profile/default.profile
|
||||
#
|
||||
# the profile data will be dumped to local disk based on above naming rule
|
||||
# in this interval.
|
||||
# dump_interval = 5.0
|
||||
#
|
||||
# Be careful, this option will enable profiler to dump data into the file with
|
||||
# time stamp which means there will be lots of files piled up in the directory.
|
||||
# dump_timestamp = false
|
||||
#
|
||||
# This is the path of the URL to access the mini web UI.
|
||||
# path = /__profile__
|
||||
#
|
||||
# Clear the data when the wsgi server shutdown.
|
||||
# flush_at_shutdown = false
|
||||
#
|
||||
# unwind the iterator of applications
|
||||
# unwind = false
|
||||
|
@ -162,3 +162,36 @@ use = egg:swift#recon
|
||||
#
|
||||
# Maximum amount of time to spend syncing each container per pass
|
||||
# container_time = 60
|
||||
|
||||
# Note: Put it at the beginning of the pipleline to profile all middleware. But
|
||||
# it is safer to put this after healthcheck.
|
||||
[filter:xprofile]
|
||||
use = egg:swift#xprofile
|
||||
# This option enable you to switch profilers which should inherit from python
|
||||
# standard profiler. Currently the supported value can be 'cProfile',
|
||||
# 'eventlet.green.profile' etc.
|
||||
# profile_module = eventlet.green.profile
|
||||
#
|
||||
# This prefix will be used to combine process ID and timestamp to name the
|
||||
# profile data file. Make sure the executing user has permission to write
|
||||
# into this path (missing path segments will be created, if necessary).
|
||||
# If you enable profiling in more than one type of daemon, you must override
|
||||
# it with an unique value like: /var/log/swift/profile/container.profile
|
||||
# log_filename_prefix = /tmp/log/swift/profile/default.profile
|
||||
#
|
||||
# the profile data will be dumped to local disk based on above naming rule
|
||||
# in this interval.
|
||||
# dump_interval = 5.0
|
||||
#
|
||||
# Be careful, this option will enable profiler to dump data into the file with
|
||||
# time stamp which means there will be lots of files piled up in the directory.
|
||||
# dump_timestamp = false
|
||||
#
|
||||
# This is the path of the URL to access the mini web UI.
|
||||
# path = /__profile__
|
||||
#
|
||||
# Clear the data when the wsgi server shutdown.
|
||||
# flush_at_shutdown = false
|
||||
#
|
||||
# unwind the iterator of applications
|
||||
# unwind = false
|
||||
|
@ -233,3 +233,36 @@ use = egg:swift#recon
|
||||
# increment a counter for every object whose size is <= to the given break
|
||||
# points and report the result after a full scan.
|
||||
# object_size_stats =
|
||||
|
||||
# Note: Put it at the beginning of the pipleline to profile all middleware. But
|
||||
# it is safer to put this after healthcheck.
|
||||
[filter:xprofile]
|
||||
use = egg:swift#xprofile
|
||||
# This option enable you to switch profilers which should inherit from python
|
||||
# standard profiler. Currently the supported value can be 'cProfile',
|
||||
# 'eventlet.green.profile' etc.
|
||||
# profile_module = eventlet.green.profile
|
||||
#
|
||||
# This prefix will be used to combine process ID and timestamp to name the
|
||||
# profile data file. Make sure the executing user has permission to write
|
||||
# into this path (missing path segments will be created, if necessary).
|
||||
# If you enable profiling in more than one type of daemon, you must override
|
||||
# it with an unique value like: /var/log/swift/profile/object.profile
|
||||
# log_filename_prefix = /tmp/log/swift/profile/default.profile
|
||||
#
|
||||
# the profile data will be dumped to local disk based on above naming rule
|
||||
# in this interval.
|
||||
# dump_interval = 5.0
|
||||
#
|
||||
# Be careful, this option will enable profiler to dump data into the file with
|
||||
# time stamp which means there will be lots of files piled up in the directory.
|
||||
# dump_timestamp = false
|
||||
#
|
||||
# This is the path of the URL to access the mini web UI.
|
||||
# path = /__profile__
|
||||
#
|
||||
# Clear the data when the wsgi server shutdown.
|
||||
# flush_at_shutdown = false
|
||||
#
|
||||
# unwind the iterator of applications
|
||||
# unwind = false
|
||||
|
@ -574,3 +574,36 @@ use = egg:swift#container_sync
|
||||
# Updating those will have to be done manually, as knowing what the true realm
|
||||
# endpoint should be cannot always be guessed.
|
||||
# allow_full_urls = true
|
||||
|
||||
# Note: Put it at the beginning of the pipleline to profile all middleware. But
|
||||
# it is safer to put this after catch_errors, gatekeeper and healthcheck.
|
||||
[filter:xprofile]
|
||||
use = egg:swift#xprofile
|
||||
# This option enable you to switch profilers which should inherit from python
|
||||
# standard profiler. Currently the supported value can be 'cProfile',
|
||||
# 'eventlet.green.profile' etc.
|
||||
# profile_module = eventlet.green.profile
|
||||
#
|
||||
# This prefix will be used to combine process ID and timestamp to name the
|
||||
# profile data file. Make sure the executing user has permission to write
|
||||
# into this path (missing path segments will be created, if necessary).
|
||||
# If you enable profiling in more than one type of daemon, you must override
|
||||
# it with an unique value like: /var/log/swift/profile/proxy.profile
|
||||
# log_filename_prefix = /tmp/log/swift/profile/default.profile
|
||||
#
|
||||
# the profile data will be dumped to local disk based on above naming rule
|
||||
# in this interval.
|
||||
# dump_interval = 5.0
|
||||
#
|
||||
# Be careful, this option will enable profiler to dump data into the file with
|
||||
# time stamp which means there will be lots of files piled up in the directory.
|
||||
# dump_timestamp = false
|
||||
#
|
||||
# This is the path of the URL to access the mini web UI.
|
||||
# path = /__profile__
|
||||
#
|
||||
# Clear the data when the wsgi server shutdown.
|
||||
# flush_at_shutdown = false
|
||||
#
|
||||
# unwind the iterator of applications
|
||||
# unwind = false
|
||||
|
@ -91,6 +91,7 @@ paste.filter_factory =
|
||||
list_endpoints = swift.common.middleware.list_endpoints:filter_factory
|
||||
gatekeeper = swift.common.middleware.gatekeeper:filter_factory
|
||||
container_sync = swift.common.middleware.container_sync:filter_factory
|
||||
xprofile = swift.common.middleware.xprofile:filter_factory
|
||||
|
||||
[build_sphinx]
|
||||
all_files = 1
|
||||
|
0
swift/common/middleware/x_profile/__init__.py
Normal file
0
swift/common/middleware/x_profile/__init__.py
Normal file
45
swift/common/middleware/x_profile/exceptions.py
Normal file
45
swift/common/middleware/x_profile/exceptions.py
Normal file
@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2010-2012 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from swift import gettext_ as _
|
||||
|
||||
|
||||
class ProfileException(Exception):
|
||||
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return _('Profiling Error: %s') % self.msg
|
||||
|
||||
|
||||
class NotFoundException(ProfileException):
|
||||
pass
|
||||
|
||||
|
||||
class MethodNotAllowed(ProfileException):
|
||||
pass
|
||||
|
||||
|
||||
class ODFLIBNotInstalled(ProfileException):
|
||||
pass
|
||||
|
||||
|
||||
class PLOTLIBNotInstalled(ProfileException):
|
||||
pass
|
||||
|
||||
|
||||
class DataLoadFailure(ProfileException):
|
||||
pass
|
536
swift/common/middleware/x_profile/html_viewer.py
Normal file
536
swift/common/middleware/x_profile/html_viewer.py
Normal file
@ -0,0 +1,536 @@
|
||||
# Copyright (c) 2010-2012 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import cgi
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import tempfile
|
||||
|
||||
from swift import gettext_ as _
|
||||
from exceptions import PLOTLIBNotInstalled, ODFLIBNotInstalled,\
|
||||
NotFoundException, MethodNotAllowed, DataLoadFailure, ProfileException
|
||||
from profile_model import Stats2
|
||||
|
||||
PLOTLIB_INSTALLED = True
|
||||
try:
|
||||
import matplotlib
|
||||
# use agg backend for writing to file, not for rendering in a window.
|
||||
# otherwise some platform will complain "no display name and $DISPLAY
|
||||
# environment variable"
|
||||
matplotlib.use('agg')
|
||||
import matplotlib.pyplot as plt
|
||||
except ImportError:
|
||||
PLOTLIB_INSTALLED = False
|
||||
|
||||
|
||||
empty_description = """
|
||||
The default profile of current process or the profile you requested is
|
||||
empty. <input type="submit" name="refresh" value="Refresh"/>
|
||||
"""
|
||||
|
||||
profile_tmpl = """
|
||||
<select name="profile">
|
||||
<option value="current">current</option>
|
||||
<option value="all">all</option>
|
||||
${profile_list}
|
||||
</select>
|
||||
"""
|
||||
|
||||
sort_tmpl = """
|
||||
<select name="sort">
|
||||
<option value="time">time</option>
|
||||
<option value="cumulative">cumulative</option>
|
||||
<option value="calls">calls</option>
|
||||
<option value="pcalls">pcalls</option>
|
||||
<option value="name">name</option>
|
||||
<option value="file">file</option>
|
||||
<option value="module">module</option>
|
||||
<option value="line">line</option>
|
||||
<option value="nfl">nfl</option>
|
||||
<option value="stdname">stdname</option>
|
||||
</select>
|
||||
"""
|
||||
|
||||
limit_tmpl = """
|
||||
<select name="limit">
|
||||
<option value="-1">all</option>
|
||||
<option value="0.1">10%</option>
|
||||
<option value="0.2">20%</option>
|
||||
<option value="0.3">30%</option>
|
||||
<option value="10">10</option>
|
||||
<option value="20">20</option>
|
||||
<option value="30">30</option>
|
||||
<option value="50">50</option>
|
||||
<option value="100">100</option>
|
||||
<option value="200">200</option>
|
||||
<option value="300">300</option>
|
||||
<option value="400">400</option>
|
||||
<option value="500">500</option>
|
||||
</select>
|
||||
"""
|
||||
|
||||
fulldirs_tmpl = """
|
||||
<input type="checkbox" name="fulldirs" value="1"
|
||||
${fulldir_checked}/>
|
||||
"""
|
||||
|
||||
mode_tmpl = """
|
||||
<select name="mode">
|
||||
<option value="stats">stats</option>
|
||||
<option value="callees">callees</option>
|
||||
<option value="callers">callers</option>
|
||||
</select>
|
||||
"""
|
||||
|
||||
nfl_filter_tmpl = """
|
||||
<input type="text" name="nfl_filter" value="${nfl_filter}"
|
||||
placeholder="filename part" />
|
||||
"""
|
||||
|
||||
formelements_tmpl = """
|
||||
<div>
|
||||
<table>
|
||||
<tr>
|
||||
<td>
|
||||
<strong>Profile</strong>
|
||||
<td>
|
||||
<strong>Sort</strong>
|
||||
</td>
|
||||
<td>
|
||||
<strong>Limit</strong>
|
||||
</td>
|
||||
<td>
|
||||
<strong>Full Path</strong>
|
||||
</td>
|
||||
<td>
|
||||
<strong>Filter</strong>
|
||||
</td>
|
||||
<td>
|
||||
</td>
|
||||
<td>
|
||||
<strong>Plot Metric</strong>
|
||||
</td>
|
||||
<td>
|
||||
<strong>Plot Type</strong>
|
||||
<td>
|
||||
</td>
|
||||
<td>
|
||||
<strong>Format</strong>
|
||||
</td>
|
||||
<td>
|
||||
<td>
|
||||
</td>
|
||||
<td>
|
||||
</td>
|
||||
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
${profile}
|
||||
<td>
|
||||
${sort}
|
||||
</td>
|
||||
<td>
|
||||
${limit}
|
||||
</td>
|
||||
<td>
|
||||
${fulldirs}
|
||||
</td>
|
||||
<td>
|
||||
${nfl_filter}
|
||||
</td>
|
||||
<td>
|
||||
<input type="submit" name="query" value="query"/>
|
||||
</td>
|
||||
<td>
|
||||
<select name='metric'>
|
||||
<option value='nc'>call count</option>
|
||||
<option value='cc'>primitive call count</option>
|
||||
<option value='tt'>total time</option>
|
||||
<option value='ct'>cumulative time</option>
|
||||
</select>
|
||||
</td>
|
||||
<td>
|
||||
<select name='plottype'>
|
||||
<option value='bar'>bar</option>
|
||||
<option value='pie'>pie</option>
|
||||
</select>
|
||||
<td>
|
||||
<input type="submit" name="plot" value="plot"/>
|
||||
</td>
|
||||
<td>
|
||||
<select name='format'>
|
||||
<option value='default'>binary</option>
|
||||
<option value='json'>json</option>
|
||||
<option value='csv'>csv</option>
|
||||
<option value='ods'>ODF.ods</option>
|
||||
</select>
|
||||
</td>
|
||||
<td>
|
||||
<input type="submit" name="download" value="download"/>
|
||||
</td>
|
||||
<td>
|
||||
<input type="submit" name="clear" value="clear"/>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
"""
|
||||
|
||||
index_tmpl = """
|
||||
<html>
|
||||
<head>
|
||||
<title>profile results</title>
|
||||
<style>
|
||||
<!--
|
||||
tr.normal { background-color: #ffffff }
|
||||
tr.hover { background-color: #88eeee }
|
||||
//-->
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<form action="${action}" method="POST">
|
||||
|
||||
<div class="form-text">
|
||||
${description}
|
||||
</div>
|
||||
<hr />
|
||||
${formelements}
|
||||
|
||||
</form>
|
||||
<pre>
|
||||
${profilehtml}
|
||||
</pre>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
class HTMLViewer(object):
|
||||
|
||||
format_dict = {'default': 'application/octet-stream',
|
||||
'json': 'application/json',
|
||||
'csv': 'text/csv',
|
||||
'ods': 'application/vnd.oasis.opendocument.spreadsheet',
|
||||
'python': 'text/html'}
|
||||
|
||||
def __init__(self, app_path, profile_module, profile_log):
|
||||
self.app_path = app_path
|
||||
self.profile_module = profile_module
|
||||
self.profile_log = profile_log
|
||||
|
||||
def _get_param(self, query_dict, key, default=None, multiple=False):
|
||||
value = query_dict.get(key, default)
|
||||
if value is None or value == '':
|
||||
return default
|
||||
if multiple:
|
||||
return value
|
||||
if isinstance(value, list):
|
||||
return eval(value[0]) if isinstance(default, int) else value[0]
|
||||
else:
|
||||
return value
|
||||
|
||||
def render(self, url, method, path_entry, query_dict, clear_callback):
|
||||
plot = self._get_param(query_dict, 'plot', None)
|
||||
download = self._get_param(query_dict, 'download', None)
|
||||
clear = self._get_param(query_dict, 'clear', None)
|
||||
action = plot or download or clear
|
||||
profile_id = self._get_param(query_dict, 'profile', 'current')
|
||||
sort = self._get_param(query_dict, 'sort', 'time')
|
||||
limit = self._get_param(query_dict, 'limit', -1)
|
||||
fulldirs = self._get_param(query_dict, 'fulldirs', 0)
|
||||
nfl_filter = self._get_param(query_dict, 'nfl_filter', '').strip()
|
||||
metric_selected = self._get_param(query_dict, 'metric', 'cc')
|
||||
plot_type = self._get_param(query_dict, 'plottype', 'bar')
|
||||
download_format = self._get_param(query_dict, 'format', 'default')
|
||||
content = ''
|
||||
# GET /__profile, POST /__profile
|
||||
if len(path_entry) == 2 and method in ['GET', 'POST']:
|
||||
log_files = self.profile_log.get_logfiles(profile_id)
|
||||
if action == 'plot':
|
||||
content, headers = self.plot(log_files, sort, limit,
|
||||
nfl_filter, metric_selected,
|
||||
plot_type)
|
||||
elif action == 'download':
|
||||
content, headers = self.download(log_files, sort, limit,
|
||||
nfl_filter, download_format)
|
||||
else:
|
||||
if action == 'clear':
|
||||
self.profile_log.clear(profile_id)
|
||||
clear_callback and clear_callback()
|
||||
content, headers = self.index_page(log_files, sort, limit,
|
||||
fulldirs, nfl_filter,
|
||||
profile_id, url)
|
||||
# GET /__profile__/all
|
||||
# GET /__profile__/current
|
||||
# GET /__profile__/profile_id
|
||||
# GET /__profile__/profile_id/
|
||||
# GET /__profile__/profile_id/account.py:50(GETorHEAD)
|
||||
# GET /__profile__/profile_id/swift/proxy/controllers
|
||||
# /account.py:50(GETorHEAD)
|
||||
# with QUERY_STRING: ?format=[default|json|csv|ods]
|
||||
elif len(path_entry) > 2 and method == 'GET':
|
||||
profile_id = path_entry[2]
|
||||
log_files = self.profile_log.get_logfiles(profile_id)
|
||||
pids = self.profile_log.get_all_pids()
|
||||
# return all profiles in a json format by default.
|
||||
# GET /__profile__/
|
||||
if profile_id == '':
|
||||
content = '{"profile_ids": ["' + '","'.join(pids) + '"]}'
|
||||
headers = [('content-type', self.format_dict['json'])]
|
||||
else:
|
||||
if len(path_entry) > 3 and path_entry[3] != '':
|
||||
nfl_filter = '/'.join(path_entry[3:])
|
||||
if path_entry[-1].find(':0') == -1:
|
||||
nfl_filter = '/' + nfl_filter
|
||||
content, headers = self.download(log_files, sort, -1,
|
||||
nfl_filter, download_format)
|
||||
headers.append(('Access-Control-Allow-Origin', '*'))
|
||||
else:
|
||||
raise MethodNotAllowed(_('method %s is not allowed.') % method)
|
||||
return content, headers
|
||||
|
||||
def index_page(self, log_files=None, sort='time', limit=-1,
|
||||
fulldirs=0, nfl_filter='', profile_id='current', url='#'):
|
||||
headers = [('content-type', 'text/html')]
|
||||
if len(log_files) == 0:
|
||||
return empty_description, headers
|
||||
try:
|
||||
stats = Stats2(*log_files)
|
||||
if not fulldirs:
|
||||
stats.strip_dirs()
|
||||
stats.sort_stats(sort)
|
||||
nfl_filter_esc =\
|
||||
nfl_filter.replace('(', '\(').replace(')', '\)')
|
||||
amount = [nfl_filter_esc, limit] if nfl_filter_esc else [limit]
|
||||
profile_html = self.generate_stats_html(stats, self.app_path,
|
||||
profile_id, *amount)
|
||||
description = "Profiling information is generated by using\
|
||||
'%s' profiler." % self.profile_module
|
||||
sort_repl = '<option value="%s">' % sort
|
||||
sort_selected = '<option value="%s" selected>' % sort
|
||||
sort = sort_tmpl.replace(sort_repl, sort_selected)
|
||||
plist = ''.join(['<option value="%s">%s</option>' % (p, p)
|
||||
for p in self.profile_log.get_all_pids()])
|
||||
profile_element = string.Template(profile_tmpl).substitute(
|
||||
{'profile_list': plist})
|
||||
profile_repl = '<option value="%s">' % profile_id
|
||||
profile_selected = '<option value="%s" selected>' % profile_id
|
||||
profile_element = profile_element.replace(profile_repl,
|
||||
profile_selected)
|
||||
limit_repl = '<option value="%s">' % limit
|
||||
limit_selected = '<option value="%s" selected>' % limit
|
||||
limit = limit_tmpl.replace(limit_repl, limit_selected)
|
||||
fulldirs_checked = 'checked' if fulldirs else ''
|
||||
fulldirs_element = string.Template(fulldirs_tmpl).substitute(
|
||||
{'fulldir_checked': fulldirs_checked})
|
||||
nfl_filter_element = string.Template(nfl_filter_tmpl).\
|
||||
substitute({'nfl_filter': nfl_filter})
|
||||
form_elements = string.Template(formelements_tmpl).substitute(
|
||||
{'description': description,
|
||||
'action': url,
|
||||
'profile': profile_element,
|
||||
'sort': sort,
|
||||
'limit': limit,
|
||||
'fulldirs': fulldirs_element,
|
||||
'nfl_filter': nfl_filter_element,
|
||||
}
|
||||
)
|
||||
content = string.Template(index_tmpl).substitute(
|
||||
{'formelements': form_elements,
|
||||
'action': url,
|
||||
'description': description,
|
||||
'profilehtml': profile_html,
|
||||
})
|
||||
return content, headers
|
||||
except:
|
||||
raise DataLoadFailure(_('Can not load profile data from %s.')
|
||||
% log_files)
|
||||
|
||||
def download(self, log_files, sort='time', limit=-1, nfl_filter='',
|
||||
output_format='default'):
|
||||
if len(log_files) == 0:
|
||||
raise NotFoundException(_('no log file found'))
|
||||
try:
|
||||
nfl_esc = nfl_filter.replace('(', '\(').replace(')', '\)')
|
||||
# remove the slash that is intentionally added in the URL
|
||||
# to avoid failure of filtering stats data.
|
||||
if nfl_esc.startswith('/'):
|
||||
nfl_esc = nfl_esc[1:]
|
||||
stats = Stats2(*log_files)
|
||||
stats.sort_stats(sort)
|
||||
if output_format == 'python':
|
||||
data = self.format_source_code(nfl_filter)
|
||||
elif output_format == 'json':
|
||||
data = stats.to_json(nfl_esc, limit)
|
||||
elif output_format == 'csv':
|
||||
data = stats.to_csv(nfl_esc, limit)
|
||||
elif output_format == 'ods':
|
||||
data = stats.to_ods(nfl_esc, limit)
|
||||
else:
|
||||
profile_tmp_all = tempfile.mktemp('.profile', 'all')
|
||||
stats.dump_stats(profile_tmp_all)
|
||||
data = open(profile_tmp_all).read()
|
||||
os.remove(profile_tmp_all)
|
||||
return data, [('content-type', self.format_dict[output_format])]
|
||||
except ODFLIBNotInstalled as ex:
|
||||
raise ex
|
||||
except Exception as ex:
|
||||
raise ProfileException(_('Data download error: %s') % ex)
|
||||
|
||||
def plot(self, log_files, sort='time', limit=10, nfl_filter='',
|
||||
metric_selected='cc', plot_type='bar'):
|
||||
if not PLOTLIB_INSTALLED:
|
||||
raise PLOTLIBNotInstalled(_('python-matplotlib not installed.'))
|
||||
if len(log_files) == 0:
|
||||
raise NotFoundException(_('no log file found'))
|
||||
try:
|
||||
stats = Stats2(*log_files)
|
||||
stats.sort_stats(sort)
|
||||
stats_dict = stats.stats
|
||||
__, func_list = stats.get_print_list([nfl_filter, limit])
|
||||
nfls = []
|
||||
performance = []
|
||||
names = {'nc': 'Total Call Count', 'cc': 'Primitive Call Count',
|
||||
'tt': 'Total Time', 'ct': 'Cumulative Time'}
|
||||
for func in func_list:
|
||||
cc, nc, tt, ct, __ = stats_dict[func]
|
||||
metric = {'cc': cc, 'nc': nc, 'tt': tt, 'ct': ct}
|
||||
nfls.append(func[2])
|
||||
performance.append(metric[metric_selected])
|
||||
y_pos = range(len(nfls))
|
||||
error = [random.random() for __ in y_pos]
|
||||
plt.clf()
|
||||
if plot_type == 'pie':
|
||||
plt.pie(x=performance, explode=None, labels=nfls,
|
||||
autopct='%1.1f%%')
|
||||
else:
|
||||
plt.barh(y_pos, performance, xerr=error, align='center',
|
||||
alpha=0.4)
|
||||
plt.yticks(y_pos, nfls)
|
||||
plt.xlabel(names[metric_selected])
|
||||
plt.title('Profile Statistics (by %s)' % names[metric_selected])
|
||||
#plt.gcf().tight_layout(pad=1.2)
|
||||
profile_img = tempfile.mktemp('.png', 'plot')
|
||||
plt.savefig(profile_img, dpi=300)
|
||||
data = open(profile_img).read()
|
||||
os.remove(profile_img)
|
||||
return data, [('content-type', 'image/jpg')]
|
||||
except Exception as ex:
|
||||
raise ProfileException(_('plotting results failed due to %s') % ex)
|
||||
|
||||
def format_source_code(self, nfl):
|
||||
nfls = re.split('[:()]', nfl)
|
||||
file_path = nfls[0]
|
||||
try:
|
||||
lineno = int(nfls[1])
|
||||
except:
|
||||
lineno = 0
|
||||
# for security reason, this need to be fixed.
|
||||
if not file_path.endswith('.py'):
|
||||
return _('The file type are forbidden to access!')
|
||||
try:
|
||||
data = []
|
||||
i = 0
|
||||
with open(file_path) as f:
|
||||
lines = f.readlines()
|
||||
max_width = str(len(str(len(lines))))
|
||||
fmt = '<span id="L%d" rel="#L%d">%' + max_width\
|
||||
+ 'd|<code>%s</code></span>'
|
||||
for line in lines:
|
||||
l = cgi.escape(line, quote=None)
|
||||
i = i + 1
|
||||
if i == lineno:
|
||||
fmt2 = '<span id="L%d" style="background-color: \
|
||||
rgb(127,255,127)">%' + max_width +\
|
||||
'd|<code>%s</code></span>'
|
||||
data.append(fmt2 % (i, i, l))
|
||||
else:
|
||||
data.append(fmt % (i, i, i, l))
|
||||
data = ''.join(data)
|
||||
except Exception:
|
||||
return _('Can not access the file %s.') % file_path
|
||||
return '<pre>%s</pre>' % data
|
||||
|
||||
def generate_stats_html(self, stats, app_path, profile_id, *selection):
|
||||
html = []
|
||||
for filename in stats.files:
|
||||
html.append('<p>%s</p>' % filename)
|
||||
try:
|
||||
for func in stats.top_level:
|
||||
html.append('<p>%s</p>' % func[2])
|
||||
html.append('%s function calls' % stats.total_calls)
|
||||
if stats.total_calls != stats.prim_calls:
|
||||
html.append("(%d primitive calls)" % stats.prim_calls)
|
||||
html.append('in %.3f seconds' % stats.total_tt)
|
||||
if stats.fcn_list:
|
||||
stat_list = stats.fcn_list[:]
|
||||
msg = "<p>Ordered by: %s</p>" % stats.sort_type
|
||||
else:
|
||||
stat_list = stats.stats.keys()
|
||||
msg = '<p>Random listing order was used</p>'
|
||||
for sel in selection:
|
||||
stat_list, msg = stats.eval_print_amount(sel, stat_list, msg)
|
||||
html.append(msg)
|
||||
html.append('<table style="border-width: 1px">')
|
||||
if stat_list:
|
||||
html.append('<tr><th>#</th><th>Call Count</th>\
|
||||
<th>Total Time</th><th>Time/Call</th>\
|
||||
<th>Cumulative Time</th>\
|
||||
<th>Cumulative Time/Call</th>\
|
||||
<th>Filename:Lineno(Function)</th>\
|
||||
<th>JSON</th>\
|
||||
</tr>')
|
||||
count = 0
|
||||
for func in stat_list:
|
||||
count = count + 1
|
||||
html.append('<tr onMouseOver="this.className=\'hover\'"\
|
||||
onMouseOut="this.className=\'normal\'">\
|
||||
<td>%d)</td>' % count)
|
||||
cc, nc, tt, ct, __ = stats.stats[func]
|
||||
c = str(nc)
|
||||
if nc != cc:
|
||||
c = c + '/' + str(cc)
|
||||
html.append('<td>%s</td>' % c)
|
||||
html.append('<td>%f</td>' % tt)
|
||||
if nc == 0:
|
||||
html.append('<td>-</td>')
|
||||
else:
|
||||
html.append('<td>%f</td>' % (float(tt) / nc))
|
||||
html.append('<td>%f</td>' % ct)
|
||||
if cc == 0:
|
||||
html.append('<td>-</td>')
|
||||
else:
|
||||
html.append('<td>%f</td>' % (float(ct) / cc))
|
||||
nfls = cgi.escape(stats.func_std_string(func))
|
||||
if nfls.split(':')[0] not in ['', 'profile'] and\
|
||||
os.path.isfile(nfls.split(':')[0]):
|
||||
html.append('<td><a href="%s/%s%s?format=python#L%d">\
|
||||
%s</a></td>' % (app_path, profile_id,
|
||||
nfls, func[1], nfls))
|
||||
else:
|
||||
html.append('<td>%s</td>' % nfls)
|
||||
if not nfls.startswith('/'):
|
||||
nfls = '/' + nfls
|
||||
html.append('<td><a href="%s/%s%s?format=json">\
|
||||
--></a></td></tr>' % (app_path,
|
||||
profile_id, nfls))
|
||||
except Exception as ex:
|
||||
html.append("Exception:" % ex.message)
|
||||
return ''.join(html)
|
281
swift/common/middleware/x_profile/profile_model.py
Normal file
281
swift/common/middleware/x_profile/profile_model.py
Normal file
@ -0,0 +1,281 @@
|
||||
# Copyright (c) 2010-2012 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import pstats
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from swift import gettext_ as _
|
||||
from swift.common.middleware.x_profile.exceptions import ODFLIBNotInstalled
|
||||
|
||||
|
||||
ODFLIB_INSTALLED = True
|
||||
try:
|
||||
from odf.opendocument import OpenDocumentSpreadsheet
|
||||
from odf.table import Table, TableRow, TableCell
|
||||
from odf.text import P
|
||||
except ImportError:
|
||||
ODFLIB_INSTALLED = False
|
||||
|
||||
|
||||
class Stats2(pstats.Stats):
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
pstats.Stats.__init__(self, *args, **kwds)
|
||||
|
||||
def func_to_dict(self, func):
|
||||
return {'module': func[0], 'line': func[1], 'function': func[2]}
|
||||
|
||||
def func_std_string(self, func):
|
||||
return pstats.func_std_string(func)
|
||||
|
||||
def to_json(self, *selection):
|
||||
d = dict()
|
||||
d['files'] = [f for f in self.files]
|
||||
d['prim_calls'] = (self.prim_calls)
|
||||
d['total_calls'] = (self.total_calls)
|
||||
if hasattr(self, 'sort_type'):
|
||||
d['sort_type'] = self.sort_type
|
||||
else:
|
||||
d['sort_type'] = 'random'
|
||||
d['total_tt'] = (self.total_tt)
|
||||
if self.fcn_list:
|
||||
stat_list = self.fcn_list[:]
|
||||
else:
|
||||
stat_list = self.stats.keys()
|
||||
for s in selection:
|
||||
stat_list, __ = self.eval_print_amount(s, stat_list, '')
|
||||
|
||||
self.calc_callees()
|
||||
function_calls = []
|
||||
for func in stat_list:
|
||||
cc, nc, tt, ct, callers = self.stats[func]
|
||||
fdict = dict()
|
||||
fdict.update(self.func_to_dict(func))
|
||||
fdict.update({'cc': (cc), 'nc': (nc), 'tt': (tt),
|
||||
'ct': (ct)})
|
||||
if self.all_callees:
|
||||
fdict.update({'callees': []})
|
||||
for key in self.all_callees[func]:
|
||||
cee = self.func_to_dict(key)
|
||||
metric = self.all_callees[func][key]
|
||||
# FIXME: eventlet profiler don't provide full list of
|
||||
# the metrics
|
||||
if type(metric) is tuple:
|
||||
cc1, nc1, tt1, ct1 = metric
|
||||
cee.update({'cc': cc1, 'nc': nc1, 'tt': tt1,
|
||||
'ct': ct1})
|
||||
else:
|
||||
cee['nc'] = metric
|
||||
fdict['callees'].append(cee)
|
||||
cer = []
|
||||
for caller in callers:
|
||||
fd = self.func_to_dict(caller)
|
||||
metric2 = callers[caller]
|
||||
if isinstance(metric2, tuple):
|
||||
cc2, nc2, tt2, ct2 = metric2
|
||||
fd.update({'cc': cc2, 'nc': nc2, 'tt': tt2, 'ct': ct2})
|
||||
else:
|
||||
fd.update({'nc': metric2})
|
||||
cer.append(fd)
|
||||
fdict.update({'callers': cer})
|
||||
function_calls.append(fdict)
|
||||
d['stats'] = function_calls
|
||||
return json.dumps(d, indent=2)
|
||||
|
||||
def to_csv(self, *selection):
|
||||
if self.fcn_list:
|
||||
stat_list = self.fcn_list[:]
|
||||
order_text = "Ordered by: " + self.sort_type + '\r\n'
|
||||
else:
|
||||
stat_list = self.stats.keys()
|
||||
order_text = "Random listing order was used\r\n"
|
||||
for s in selection:
|
||||
stat_list, __ = self.eval_print_amount(s, stat_list, '')
|
||||
|
||||
csv = '%d function calls (%d primitive calls) in %.6f seconds.' % (
|
||||
self.total_calls, self.prim_calls, self.total_tt)
|
||||
csv = csv + order_text + 'call count(nc), primitive call count(cc), \
|
||||
total time(tt), time per call, \
|
||||
cumulative time(ct), time per call, \
|
||||
function\r\n'
|
||||
for func in stat_list:
|
||||
cc, nc, tt, ct, __ = self.stats[func]
|
||||
tpc = '' if nc == 0 else '%3f' % (tt / nc)
|
||||
cpc = '' if cc == 0 else '%3f' % (ct / cc)
|
||||
fn = '%s:%d(%s)' % (func[0], func[1], func[2])
|
||||
csv = csv + '%d,%d,%3f,%s,%3f,%s,%s\r\n' % (
|
||||
nc, cc, tt, tpc, ct, cpc, fn)
|
||||
return csv
|
||||
|
||||
def to_ods(self, *selection):
|
||||
if not ODFLIB_INSTALLED:
|
||||
raise ODFLIBNotInstalled(_('odfpy not installed.'))
|
||||
if self.fcn_list:
|
||||
stat_list = self.fcn_list[:]
|
||||
order_text = " Ordered by: " + self.sort_type + '\n'
|
||||
else:
|
||||
stat_list = self.stats.keys()
|
||||
order_text = " Random listing order was used\n"
|
||||
for s in selection:
|
||||
stat_list, __ = self.eval_print_amount(s, stat_list, '')
|
||||
spreadsheet = OpenDocumentSpreadsheet()
|
||||
table = Table(name="Profile")
|
||||
for fn in self.files:
|
||||
tcf = TableCell()
|
||||
tcf.addElement(P(text=fn))
|
||||
trf = TableRow()
|
||||
trf.addElement(tcf)
|
||||
table.addElement(trf)
|
||||
|
||||
tc_summary = TableCell()
|
||||
summary_text = '%d function calls (%d primitive calls) in %.6f \
|
||||
seconds' % (self.total_calls, self.prim_calls,
|
||||
self.total_tt)
|
||||
tc_summary.addElement(P(text=summary_text))
|
||||
tr_summary = TableRow()
|
||||
tr_summary.addElement(tc_summary)
|
||||
table.addElement(tr_summary)
|
||||
|
||||
tc_order = TableCell()
|
||||
tc_order.addElement(P(text=order_text))
|
||||
tr_order = TableRow()
|
||||
tr_order.addElement(tc_order)
|
||||
table.addElement(tr_order)
|
||||
|
||||
tr_header = TableRow()
|
||||
tc_cc = TableCell()
|
||||
tc_cc.addElement(P(text='Total Call Count'))
|
||||
tr_header.addElement(tc_cc)
|
||||
|
||||
tc_pc = TableCell()
|
||||
tc_pc.addElement(P(text='Primitive Call Count'))
|
||||
tr_header.addElement(tc_pc)
|
||||
|
||||
tc_tt = TableCell()
|
||||
tc_tt.addElement(P(text='Total Time(seconds)'))
|
||||
tr_header.addElement(tc_tt)
|
||||
|
||||
tc_pc = TableCell()
|
||||
tc_pc.addElement(P(text='Time Per call(seconds)'))
|
||||
tr_header.addElement(tc_pc)
|
||||
|
||||
tc_ct = TableCell()
|
||||
tc_ct.addElement(P(text='Cumulative Time(seconds)'))
|
||||
tr_header.addElement(tc_ct)
|
||||
|
||||
tc_pt = TableCell()
|
||||
tc_pt.addElement(P(text='Cumulative Time per call(seconds)'))
|
||||
tr_header.addElement(tc_pt)
|
||||
|
||||
tc_nfl = TableCell()
|
||||
tc_nfl.addElement(P(text='filename:lineno(function)'))
|
||||
tr_header.addElement(tc_nfl)
|
||||
|
||||
table.addElement(tr_header)
|
||||
|
||||
for func in stat_list:
|
||||
cc, nc, tt, ct, __ = self.stats[func]
|
||||
tr_header = TableRow()
|
||||
tc_nc = TableCell()
|
||||
tc_nc.addElement(P(text=nc))
|
||||
tr_header.addElement(tc_nc)
|
||||
|
||||
tc_pc = TableCell()
|
||||
tc_pc.addElement(P(text=cc))
|
||||
tr_header.addElement(tc_pc)
|
||||
|
||||
tc_tt = TableCell()
|
||||
tc_tt.addElement(P(text=tt))
|
||||
tr_header.addElement(tc_tt)
|
||||
|
||||
tc_tpc = TableCell()
|
||||
tc_tpc.addElement(P(text=(None if nc == 0 else float(tt) / nc)))
|
||||
tr_header.addElement(tc_tpc)
|
||||
|
||||
tc_ct = TableCell()
|
||||
tc_ct.addElement(P(text=ct))
|
||||
tr_header.addElement(tc_ct)
|
||||
|
||||
tc_tpt = TableCell()
|
||||
tc_tpt.addElement(P(text=(None if cc == 0 else float(ct) / cc)))
|
||||
tr_header.addElement(tc_tpt)
|
||||
|
||||
tc_nfl = TableCell()
|
||||
tc_nfl.addElement(P(text=func))
|
||||
tr_header.addElement(tc_nfl)
|
||||
table.addElement(tr_header)
|
||||
|
||||
spreadsheet.spreadsheet.addElement(table)
|
||||
tmp_ods = tempfile.mktemp('.ods', 'stats')
|
||||
spreadsheet.save(tmp_ods, False)
|
||||
data = open(tmp_ods).read()
|
||||
os.remove(tmp_ods)
|
||||
return data
|
||||
|
||||
|
||||
class ProfileLog(object):
|
||||
|
||||
def __init__(self, log_filename_prefix, dump_timestamp):
|
||||
self.log_filename_prefix = log_filename_prefix
|
||||
self.dump_timestamp = dump_timestamp
|
||||
|
||||
def get_all_pids(self):
|
||||
profile_ids = [l.replace(self.log_filename_prefix, '') for l
|
||||
in glob.glob(self.log_filename_prefix + '*')
|
||||
if not l.endswith('.tmp')]
|
||||
return sorted(profile_ids, reverse=True)
|
||||
|
||||
def get_logfiles(self, id_or_name):
|
||||
# The first file with timestamp in the sorted log_files
|
||||
# (PREFIX)(PROCESS_ID)-(TIMESTAMP)
|
||||
if id_or_name in ['all']:
|
||||
if self.dump_timestamp:
|
||||
latest_dict = {}
|
||||
for pid in self.get_all_pids():
|
||||
[process_id, __] = pid.split('-')
|
||||
if process_id not in latest_dict.keys():
|
||||
latest_dict[process_id] = self.log_filename_prefix +\
|
||||
pid
|
||||
log_files = latest_dict.values()
|
||||
else:
|
||||
log_files = [l for l in glob.glob(self.log_filename_prefix
|
||||
+ '*') if not l.endswith('.tmp')]
|
||||
else:
|
||||
pid = str(os.getpid()) if id_or_name in [None, '', 'current']\
|
||||
else id_or_name
|
||||
log_files = [l for l in glob.glob(self.log_filename_prefix +
|
||||
pid + '*') if not l.endswith('.tmp')]
|
||||
if len(log_files) > 0:
|
||||
log_files = sorted(log_files, reverse=True)[0:1]
|
||||
return log_files
|
||||
|
||||
def dump_profile(self, profiler, pid):
|
||||
if self.log_filename_prefix:
|
||||
pfn = self.log_filename_prefix + str(pid)
|
||||
if self.dump_timestamp:
|
||||
pfn = pfn + "-" + str(time.time())
|
||||
tmpfn = pfn + ".tmp"
|
||||
profiler.dump_stats(tmpfn)
|
||||
os.rename(tmpfn, pfn)
|
||||
return pfn
|
||||
|
||||
def clear(self, id_or_name):
|
||||
log_files = self.get_logfiles(id_or_name)
|
||||
for l in log_files:
|
||||
os.path.exists(l) and os.remove(l)
|
279
swift/common/middleware/xprofile.py
Normal file
279
swift/common/middleware/xprofile.py
Normal file
@ -0,0 +1,279 @@
|
||||
# Copyright (c) 2010-2012 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Profiling middleware for Swift Servers.
|
||||
|
||||
The current implementation is based on eventlet awared profiler.(For the
|
||||
future, more profilers could be added in to collect more data for analysis.)
|
||||
Profiling all incoming requests and accumulating cpu timing statistics
|
||||
information for performance tuning and optimization. An mini web UI is also
|
||||
provided for profiling data analysis. It can be accessed from the URL as
|
||||
below.
|
||||
|
||||
Index page for browse profile data::
|
||||
|
||||
http://SERVER_IP:PORT/__profile__
|
||||
|
||||
List all profiles to return profile ids in json format::
|
||||
|
||||
http://SERVER_IP:PORT/__profile__/
|
||||
http://SERVER_IP:PORT/__profile__/all
|
||||
|
||||
Retrieve specific profile data in different formats::
|
||||
|
||||
http://SERVER_IP:PORT/__profile__/PROFILE_ID?format=[default|json|csv|ods]
|
||||
http://SERVER_IP:PORT/__profile__/current?format=[default|json|csv|ods]
|
||||
http://SERVER_IP:PORT/__profile__/all?format=[default|json|csv|ods]
|
||||
|
||||
Retrieve metrics from specific function in json format::
|
||||
|
||||
http://SERVER_IP:PORT/__profile__/PROFILE_ID/NFL?format=json
|
||||
http://SERVER_IP:PORT/__profile__/current/NFL?format=json
|
||||
http://SERVER_IP:PORT/__profile__/all/NFL?format=json
|
||||
|
||||
NFL is defined by concatenation of file name, function name and the first
|
||||
line number.
|
||||
e.g.::
|
||||
account.py:50(GETorHEAD)
|
||||
or with full path:
|
||||
opt/stack/swift/swift/proxy/controllers/account.py:50(GETorHEAD)
|
||||
|
||||
A list of URL examples:
|
||||
|
||||
http://localhost:8080/__profile__ (proxy server)
|
||||
http://localhost:6000/__profile__/all (object server)
|
||||
http://localhost:6001/__profile__/current (container server)
|
||||
http://localhost:6002/__profile__/12345?format=json (account server)
|
||||
|
||||
The profiling middleware can be configured in paste file for WSGI servers such
|
||||
as proxy, account, container and object servers. Please refer to the sample
|
||||
configuration files in etc directory.
|
||||
|
||||
The profiling data is provided with four formats such as binary(by default),
|
||||
json, csv and odf spreadsheet which requires installing odfpy library.
|
||||
|
||||
sudo pip install odfpy
|
||||
|
||||
There's also a simple visualization capability which is enabled by using
|
||||
matplotlib toolkit. it is also required to be installed if you want to use
|
||||
it to visualize statistic data.
|
||||
|
||||
sudo apt-get install python-matplotlib
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from eventlet import greenthread, GreenPool, patcher
|
||||
import eventlet.green.profile as eprofile
|
||||
|
||||
from swift import gettext_ as _
|
||||
from swift.common.utils import get_logger, config_true_value
|
||||
from swift.common.swob import Request
|
||||
from x_profile.exceptions import NotFoundException, MethodNotAllowed,\
|
||||
ProfileException
|
||||
from x_profile.html_viewer import HTMLViewer
|
||||
from x_profile.profile_model import ProfileLog
|
||||
|
||||
# True if we are running on Python 3.
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3: # pragma: no cover
|
||||
text_type = str
|
||||
else:
|
||||
text_type = unicode
|
||||
|
||||
|
||||
def bytes_(s, encoding='utf-8', errors='strict'):
|
||||
if isinstance(s, text_type): # pragma: no cover
|
||||
return s.encode(encoding, errors)
|
||||
return s
|
||||
|
||||
try:
|
||||
from urllib.parse import parse_qs
|
||||
except ImportError:
|
||||
try:
|
||||
from urlparse import parse_qs
|
||||
except ImportError: # pragma: no cover
|
||||
from cgi import parse_qs
|
||||
|
||||
|
||||
DEFAULT_PROFILE_PREFIX = '/tmp/log/swift/profile/default.profile'
|
||||
|
||||
# unwind the iterator; it may call start_response, do lots of work, etc
|
||||
PROFILE_EXEC_EAGER = """
|
||||
app_iter = self.app(environ, start_response)
|
||||
app_iter_ = list(app_iter)
|
||||
if hasattr(app_iter, 'close'):
|
||||
app_iter.close()
|
||||
"""
|
||||
|
||||
# don't unwind the iterator (don't consume resources)
|
||||
PROFILE_EXEC_LAZY = """
|
||||
app_iter_ = self.app(environ, start_response)
|
||||
"""
|
||||
|
||||
thread = patcher.original('thread') # non-monkeypatched module needed
|
||||
|
||||
|
||||
# This monkey patch code fix the problem of eventlet profile tool
|
||||
# which can not accumulate profiling results across multiple calls
|
||||
# of runcalls and runctx.
|
||||
def new_setup(self):
|
||||
self._has_setup = True
|
||||
self.cur = None
|
||||
self.timings = {}
|
||||
self.current_tasklet = greenthread.getcurrent()
|
||||
self.thread_id = thread.get_ident()
|
||||
self.simulate_call("profiler")
|
||||
|
||||
|
||||
def new_runctx(self, cmd, globals, locals):
|
||||
if not getattr(self, '_has_setup', False):
|
||||
self._setup()
|
||||
try:
|
||||
return self.base.runctx(self, cmd, globals, locals)
|
||||
finally:
|
||||
self.TallyTimings()
|
||||
|
||||
|
||||
def new_runcall(self, func, *args, **kw):
|
||||
if not getattr(self, '_has_setup', False):
|
||||
self._setup()
|
||||
try:
|
||||
return self.base.runcall(self, func, *args, **kw)
|
||||
finally:
|
||||
self.TallyTimings()
|
||||
|
||||
|
||||
class ProfileMiddleware(object):
|
||||
|
||||
def __init__(self, app, conf):
|
||||
self.app = app
|
||||
self.logger = get_logger(conf, log_route='profile')
|
||||
self.log_filename_prefix = conf.get('log_filename_prefix',
|
||||
DEFAULT_PROFILE_PREFIX)
|
||||
dirname = os.path.dirname(self.log_filename_prefix)
|
||||
# Notes: this effort may fail due to permission denied.
|
||||
# it is better to be created and authorized to current
|
||||
# user in advance.
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
self.dump_interval = float(conf.get('dump_interval', 5.0))
|
||||
self.dump_timestamp = config_true_value(conf.get(
|
||||
'dump_timestamp', 'no'))
|
||||
self.flush_at_shutdown = config_true_value(conf.get(
|
||||
'flush_at_shutdown', 'no'))
|
||||
self.path = conf.get('path', '__profile__').replace('/', '')
|
||||
self.unwind = config_true_value(conf.get('unwind', 'no'))
|
||||
self.profile_module = conf.get('profile_module',
|
||||
'eventlet.green.profile')
|
||||
self.profiler = get_profiler(self.profile_module)
|
||||
self.profile_log = ProfileLog(self.log_filename_prefix,
|
||||
self.dump_timestamp)
|
||||
self.viewer = HTMLViewer(self.path, self.profile_module,
|
||||
self.profile_log)
|
||||
self.dump_pool = GreenPool(1000)
|
||||
self.last_dump_at = None
|
||||
|
||||
def __del__(self):
|
||||
if self.flush_at_shutdown:
|
||||
self.profile_log.clear(str(os.getpid()))
|
||||
|
||||
def _combine_body_qs(self, request):
|
||||
wsgi_input = request.environ['wsgi.input']
|
||||
query_dict = request.params
|
||||
qs_in_body = wsgi_input.read()
|
||||
query_dict.update(parse_qs(qs_in_body, keep_blank_values=True,
|
||||
strict_parsing=False))
|
||||
return query_dict
|
||||
|
||||
def dump_checkpoint(self):
|
||||
current_time = time.time()
|
||||
if self.last_dump_at is None or self.last_dump_at +\
|
||||
self.dump_interval < current_time:
|
||||
self.dump_pool.spawn_n(self.profile_log.dump_profile,
|
||||
self.profiler, os.getpid())
|
||||
self.last_dump_at = current_time
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
request = Request(environ)
|
||||
path_entry = request.path_info.split('/')
|
||||
# hijack favicon request sent by browser so that it doesn't
|
||||
# invoke profiling hook and contaminate the data.
|
||||
if path_entry[1] == 'favicon.ico':
|
||||
start_response('200 OK', [])
|
||||
return ''
|
||||
elif path_entry[1] == self.path:
|
||||
try:
|
||||
self.dump_checkpoint()
|
||||
query_dict = self._combine_body_qs(request)
|
||||
content, headers = self.viewer.render(request.url,
|
||||
request.method,
|
||||
path_entry,
|
||||
query_dict,
|
||||
self.renew_profile)
|
||||
start_response('200 OK', headers)
|
||||
return [bytes_(content)]
|
||||
except MethodNotAllowed as mx:
|
||||
start_response('405 Method Not Allowed', [])
|
||||
return '%s' % mx
|
||||
except NotFoundException as nx:
|
||||
start_response('404 Not Found', [])
|
||||
return '%s' % nx
|
||||
except ProfileException as pf:
|
||||
start_response('500 Internal Server Error', [])
|
||||
return '%s' % pf
|
||||
except Exception as ex:
|
||||
start_response('500 Internal Server Error', [])
|
||||
return _('Error on render profiling results: %s') % ex
|
||||
else:
|
||||
try:
|
||||
_locals = locals()
|
||||
code = self.unwind and PROFILE_EXEC_EAGER or\
|
||||
PROFILE_EXEC_LAZY
|
||||
self.profiler.runctx(code, globals(), _locals)
|
||||
app_iter = _locals['app_iter_']
|
||||
self.dump_checkpoint()
|
||||
return app_iter
|
||||
except:
|
||||
self.logger.exception(_('Error profiling code'))
|
||||
finally:
|
||||
pass
|
||||
|
||||
def renew_profile(self):
|
||||
self.profiler = get_profiler(self.profile_module)
|
||||
|
||||
|
||||
def get_profiler(profile_module):
|
||||
if profile_module == 'eventlet.green.profile':
|
||||
eprofile.Profile._setup = new_setup
|
||||
eprofile.Profile.runctx = new_runctx
|
||||
eprofile.Profile.runcall = new_runcall
|
||||
# hacked method to import profile module supported in python 2.6
|
||||
__import__(profile_module)
|
||||
return sys.modules[profile_module].Profile()
|
||||
|
||||
|
||||
def filter_factory(global_conf, **local_conf):
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
|
||||
def profile_filter(app):
|
||||
return ProfileMiddleware(app, conf)
|
||||
|
||||
return profile_filter
|
497
test/unit/common/middleware/test_xprofile.py
Normal file
497
test/unit/common/middleware/test_xprofile.py
Normal file
@ -0,0 +1,497 @@
|
||||
# Copyright (c) 2010-2012 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import StringIO
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from swift import gettext_ as _
|
||||
from swift.common.middleware import xprofile
|
||||
from swift.common.middleware.xprofile import ProfileMiddleware
|
||||
from swift.common.middleware.x_profile.exceptions import PLOTLIBNotInstalled,\
|
||||
MethodNotAllowed, NotFoundException, ODFLIBNotInstalled
|
||||
from swift.common.middleware.x_profile.html_viewer import HTMLViewer,\
|
||||
PLOTLIB_INSTALLED
|
||||
from swift.common.middleware.x_profile.profile_model import Stats2,\
|
||||
ProfileLog, ODFLIB_INSTALLED
|
||||
|
||||
|
||||
from swift.common.swob import Request, Response
|
||||
|
||||
|
||||
class FakeApp(object):
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
req = Request(env)
|
||||
return Response(request=req, body='FAKE APP')(
|
||||
env, start_response)
|
||||
|
||||
|
||||
class TestXProfile(unittest.TestCase):
|
||||
|
||||
def test_get_profiler(self):
|
||||
self.assert_(xprofile.get_profiler('cProfile') is not None)
|
||||
self.assert_(xprofile.get_profiler('eventlet.green.profile')
|
||||
is not None)
|
||||
|
||||
|
||||
class TestProfilers(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.profilers = [xprofile.get_profiler('cProfile'),
|
||||
xprofile.get_profiler('eventlet.green.profile')]
|
||||
|
||||
def fake_func(self, *args, **kw):
|
||||
return len(args) + len(kw)
|
||||
|
||||
def test_runcall(self):
|
||||
for p in self.profilers:
|
||||
v = p.runcall(self.fake_func, 'one', 'two', {'key1': 'value1'})
|
||||
self.assertEqual(v, 3)
|
||||
|
||||
def test_runctx(self):
|
||||
for p in self.profilers:
|
||||
p.runctx('import os;os.getcwd();', globals(), locals())
|
||||
p.snapshot_stats()
|
||||
self.assert_(p.stats is not None)
|
||||
self.assert_(len(p.stats.keys()) > 0)
|
||||
|
||||
|
||||
class TestProfileMiddleware(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.got_statuses = []
|
||||
self.app = ProfileMiddleware(FakeApp, {})
|
||||
self.tempdir = os.path.dirname(self.app.log_filename_prefix)
|
||||
self.pids = ['123', '456', str(os.getpid())]
|
||||
profiler = xprofile.get_profiler('eventlet.green.profile')
|
||||
for pid in self.pids:
|
||||
path = self.app.log_filename_prefix + pid
|
||||
profiler.runctx('import os;os.getcwd();', globals(), locals())
|
||||
profiler.dump_stats(path)
|
||||
profiler.runctx('import os;os.getcwd();', globals(), locals())
|
||||
profiler.dump_stats(path + '.tmp')
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||
|
||||
def get_app(self, app, global_conf, **local_conf):
|
||||
factory = xprofile.filter_factory(global_conf, **local_conf)
|
||||
return factory(app)
|
||||
|
||||
def start_response(self, status, headers):
|
||||
self.got_statuses = [status]
|
||||
self.headers = headers
|
||||
|
||||
def test_combine_body_qs(self):
|
||||
body = "profile=all&sort=time&limit=-1&fulldirs=1&nfl_filter=__call__"\
|
||||
+ "&query=query&metric=nc&format=default"
|
||||
wsgi_input = StringIO.StringIO(body)
|
||||
environ = {'REQUEST_METHOD': 'GET',
|
||||
'QUERY_STRING': 'profile=all&format=json',
|
||||
'wsgi.input': wsgi_input}
|
||||
req = Request.blank('/__profile__/', environ=environ)
|
||||
query_dict = self.app._combine_body_qs(req)
|
||||
self.assertEqual(query_dict['profile'], ['all'])
|
||||
self.assertEqual(query_dict['sort'], ['time'])
|
||||
self.assertEqual(query_dict['limit'], ['-1'])
|
||||
self.assertEqual(query_dict['fulldirs'], ['1'])
|
||||
self.assertEqual(query_dict['nfl_filter'], ['__call__'])
|
||||
self.assertEqual(query_dict['query'], ['query'])
|
||||
self.assertEqual(query_dict['metric'], ['nc'])
|
||||
self.assertEqual(query_dict['format'], ['default'])
|
||||
|
||||
def test_call(self):
|
||||
body = "sort=time&limit=-1&fulldirs=1&nfl_filter="\
|
||||
+ "&metric=nc"
|
||||
wsgi_input = StringIO.StringIO(body + '&query=query')
|
||||
environ = {'HTTP_HOST': 'localhost:8080',
|
||||
'PATH_INFO': '/__profile__',
|
||||
'REQUEST_METHOD': 'GET',
|
||||
'QUERY_STRING': 'profile=all&format=json',
|
||||
'wsgi.input': wsgi_input}
|
||||
resp = self.app(environ, self.start_response)
|
||||
self.assert_(resp[0].find('<html>') > 0, resp)
|
||||
self.assertEqual(self.got_statuses, ['200 OK'])
|
||||
self.assertEqual(self.headers, [('content-type', 'text/html')])
|
||||
wsgi_input = StringIO.StringIO(body + '&plot=plot')
|
||||
environ['wsgi.input'] = wsgi_input
|
||||
if PLOTLIB_INSTALLED:
|
||||
resp = self.app(environ, self.start_response)
|
||||
self.assertEqual(self.got_statuses, ['200 OK'])
|
||||
self.assertEqual(self.headers, [('content-type', 'image/jpg')])
|
||||
else:
|
||||
resp = self.app(environ, self.start_response)
|
||||
self.assertEqual(self.got_statuses, ['500 Internal Server Error'])
|
||||
wsgi_input = StringIO.StringIO(body +
|
||||
'&download=download&format=default')
|
||||
environ['wsgi.input'] = wsgi_input
|
||||
resp = self.app(environ, self.start_response)
|
||||
self.assertEqual(self.headers, [('content-type',
|
||||
HTMLViewer.format_dict['default'])])
|
||||
wsgi_input = StringIO.StringIO(body + '&download=download&format=json')
|
||||
environ['wsgi.input'] = wsgi_input
|
||||
resp = self.app(environ, self.start_response)
|
||||
self.assert_(self.headers == [('content-type',
|
||||
HTMLViewer.format_dict['json'])])
|
||||
env2 = environ.copy()
|
||||
env2['REQUEST_METHOD'] = 'DELETE'
|
||||
resp = self.app(env2, self.start_response)
|
||||
self.assertEqual(self.got_statuses, ['405 Method Not Allowed'], resp)
|
||||
|
||||
wsgi_input = StringIO.StringIO(body + '&profile=135&download=download')
|
||||
environ['wsgi.input'] = wsgi_input
|
||||
resp = self.app(environ, self.start_response)
|
||||
self.assertEqual(self.got_statuses, ['404 Not Found'], resp)
|
||||
|
||||
wsgi_input = StringIO.StringIO(body + '&download=download&format=ods')
|
||||
environ['wsgi.input'] = wsgi_input
|
||||
resp = self.app(environ, self.start_response)
|
||||
if ODFLIB_INSTALLED:
|
||||
self.assertEqual(self.headers, [('content-type',
|
||||
HTMLViewer.format_dict['ods'])])
|
||||
else:
|
||||
self.assertEqual(self.got_statuses, ['500 Internal Server Error'])
|
||||
|
||||
def test_dump_checkpoint(self):
|
||||
self.app.dump_checkpoint()
|
||||
self.assert_(self.app.last_dump_at is not None)
|
||||
|
||||
def test_renew_profile(self):
|
||||
old_profiler = self.app.profiler
|
||||
self.app.renew_profile()
|
||||
new_profiler = self.app.profiler
|
||||
self.assert_(old_profiler != new_profiler)
|
||||
|
||||
|
||||
class Test_profile_log(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.log_filename_prefix1 = tempfile.mkdtemp() + '/unittest.profile'
|
||||
self.profile_log1 = ProfileLog(self.log_filename_prefix1, False)
|
||||
self.pids1 = ['123', '456', str(os.getpid())]
|
||||
profiler1 = xprofile.get_profiler('eventlet.green.profile')
|
||||
for pid in self.pids1:
|
||||
profiler1.runctx('import os;os.getcwd();', globals(), locals())
|
||||
self.profile_log1.dump_profile(profiler1, pid)
|
||||
|
||||
self.log_filename_prefix2 = tempfile.mkdtemp() + '/unittest.profile'
|
||||
self.profile_log2 = ProfileLog(self.log_filename_prefix2, True)
|
||||
self.pids2 = ['321', '654', str(os.getpid())]
|
||||
profiler2 = xprofile.get_profiler('eventlet.green.profile')
|
||||
for pid in self.pids2:
|
||||
profiler2.runctx('import os;os.getcwd();', globals(), locals())
|
||||
self.profile_log2.dump_profile(profiler2, pid)
|
||||
|
||||
def tearDown(self):
|
||||
self.profile_log1.clear('all')
|
||||
self.profile_log2.clear('all')
|
||||
|
||||
def test_get_all_pids(self):
|
||||
self.assertEquals(self.profile_log1.get_all_pids(),
|
||||
sorted(self.pids1, reverse=True))
|
||||
for pid in self.profile_log2.get_all_pids():
|
||||
self.assert_(pid.split('-')[0] in self.pids2)
|
||||
|
||||
def test_clear(self):
|
||||
self.profile_log1.clear('123')
|
||||
self.assertFalse(os.path.exists(self.log_filename_prefix1 + '123'))
|
||||
self.profile_log1.clear('current')
|
||||
self.assertFalse(os.path.exists(self.log_filename_prefix1 +
|
||||
str(os.getpid())))
|
||||
self.profile_log1.clear('all')
|
||||
for pid in self.pids1:
|
||||
self.assertFalse(os.path.exists(self.log_filename_prefix1 + pid))
|
||||
|
||||
self.profile_log2.clear('321')
|
||||
self.assertFalse(os.path.exists(self.log_filename_prefix2 + '321'))
|
||||
self.profile_log2.clear('current')
|
||||
self.assertFalse(os.path.exists(self.log_filename_prefix2 +
|
||||
str(os.getpid())))
|
||||
self.profile_log2.clear('all')
|
||||
for pid in self.pids2:
|
||||
self.assertFalse(os.path.exists(self.log_filename_prefix2 + pid))
|
||||
|
||||
def test_get_logfiles(self):
|
||||
log_files = self.profile_log1.get_logfiles('all')
|
||||
self.assertEqual(len(log_files), 3)
|
||||
self.assertEquals(len(log_files), len(self.pids1))
|
||||
log_files = self.profile_log1.get_logfiles('current')
|
||||
self.assertEqual(len(log_files), 1)
|
||||
self.assertEquals(log_files, [self.log_filename_prefix1
|
||||
+ str(os.getpid())])
|
||||
log_files = self.profile_log1.get_logfiles(self.pids1[0])
|
||||
self.assertEqual(len(log_files), 1)
|
||||
self.assertEquals(log_files, [self.log_filename_prefix1
|
||||
+ self.pids1[0]])
|
||||
log_files = self.profile_log2.get_logfiles('all')
|
||||
self.assertEqual(len(log_files), 3)
|
||||
self.assertEquals(len(log_files), len(self.pids2))
|
||||
log_files = self.profile_log2.get_logfiles('current')
|
||||
self.assertEqual(len(log_files), 1)
|
||||
self.assert_(log_files[0].find(self.log_filename_prefix2 +
|
||||
str(os.getpid())) > -1)
|
||||
log_files = self.profile_log2.get_logfiles(self.pids2[0])
|
||||
self.assertEqual(len(log_files), 1)
|
||||
self.assert_(log_files[0].find(self.log_filename_prefix2 +
|
||||
self.pids2[0]) > -1)
|
||||
|
||||
def test_dump_profile(self):
|
||||
prof = xprofile.get_profiler('eventlet.green.profile')
|
||||
prof.runctx('import os;os.getcwd();', globals(), locals())
|
||||
prof.create_stats()
|
||||
pfn = self.profile_log1.dump_profile(prof, os.getpid())
|
||||
self.assert_(os.path.exists(pfn))
|
||||
os.remove(pfn)
|
||||
pfn = self.profile_log2.dump_profile(prof, os.getpid())
|
||||
self.assert_(os.path.exists(pfn))
|
||||
os.remove(pfn)
|
||||
|
||||
|
||||
class Test_html_viewer(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.app = ProfileMiddleware(FakeApp, {})
|
||||
self.log_files = []
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.log_filename_prefix = self.tempdir + '/unittest.profile'
|
||||
self.profile_log = ProfileLog(self.log_filename_prefix, False)
|
||||
self.pids = ['123', '456', str(os.getpid())]
|
||||
profiler = xprofile.get_profiler('eventlet.green.profile')
|
||||
for pid in self.pids:
|
||||
profiler.runctx('import os;os.getcwd();', globals(), locals())
|
||||
self.log_files.append(self.profile_log.dump_profile(profiler, pid))
|
||||
self.viewer = HTMLViewer('__profile__', 'eventlet.green.profile',
|
||||
self.profile_log)
|
||||
body = "profile=123&profile=456&sort=time&sort=nc&limit=10"\
|
||||
+ "&fulldirs=1&nfl_filter=getcwd&query=query&metric=nc"
|
||||
wsgi_input = StringIO.StringIO(body)
|
||||
environ = {'REQUEST_METHOD': 'GET',
|
||||
'QUERY_STRING': 'profile=all',
|
||||
'wsgi.input': wsgi_input}
|
||||
req = Request.blank('/__profile__/', environ=environ)
|
||||
self.query_dict = self.app._combine_body_qs(req)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||
|
||||
def fake_call_back(self):
|
||||
pass
|
||||
|
||||
def test_get_param(self):
|
||||
query_dict = self.query_dict
|
||||
get_param = self.viewer._get_param
|
||||
self.assertEqual(get_param(query_dict, 'profile', 'current', True),
|
||||
['123', '456'])
|
||||
self.assertEqual(get_param(query_dict, 'profile', 'current'), '123')
|
||||
self.assertEqual(get_param(query_dict, 'sort', 'time'), 'time')
|
||||
self.assertEqual(get_param(query_dict, 'sort', 'time', True),
|
||||
['time', 'nc'])
|
||||
self.assertEqual(get_param(query_dict, 'limit', -1), 10)
|
||||
self.assertEqual(get_param(query_dict, 'fulldirs', '0'), '1')
|
||||
self.assertEqual(get_param(query_dict, 'nfl_filter', ''), 'getcwd')
|
||||
self.assertEqual(get_param(query_dict, 'query', ''), 'query')
|
||||
self.assertEqual(get_param(query_dict, 'metric', 'time'), 'nc')
|
||||
self.assertEqual(get_param(query_dict, 'format', 'default'), 'default')
|
||||
|
||||
def test_render(self):
|
||||
url = 'http://localhost:8080/__profile__'
|
||||
path_entries = ['/__profile__'.split('/'),
|
||||
'/__profile__/'.split('/'),
|
||||
'/__profile__/123'.split('/'),
|
||||
'/__profile__/123/'.split('/'),
|
||||
'/__profile__/123/:0(getcwd)'.split('/'),
|
||||
'/__profile__/all'.split('/'),
|
||||
'/__profile__/all/'.split('/'),
|
||||
'/__profile__/all/:0(getcwd)'.split('/'),
|
||||
'/__profile__/current'.split('/'),
|
||||
'/__profile__/current/'.split('/'),
|
||||
'/__profile__/current/:0(getcwd)'.split('/')]
|
||||
|
||||
content, headers = self.viewer.render(url, 'GET', path_entries[0],
|
||||
self.query_dict, None)
|
||||
self.assert_(content is not None)
|
||||
self.assertEqual(headers, [('content-type', 'text/html')])
|
||||
|
||||
content, headers = self.viewer.render(url, 'POST', path_entries[0],
|
||||
self.query_dict, None)
|
||||
self.assert_(content is not None)
|
||||
self.assertEqual(headers, [('content-type', 'text/html')])
|
||||
|
||||
plot_dict = self.query_dict.copy()
|
||||
plot_dict['plot'] = ['plot']
|
||||
if PLOTLIB_INSTALLED:
|
||||
content, headers = self.viewer.render(url, 'POST', path_entries[0],
|
||||
plot_dict, None)
|
||||
self.assertEqual(headers, [('content-type', 'image/jpg')])
|
||||
else:
|
||||
self.assertRaises(PLOTLIBNotInstalled, self.viewer.render,
|
||||
url, 'POST', path_entries[0], plot_dict, None)
|
||||
|
||||
clear_dict = self.query_dict.copy()
|
||||
clear_dict['clear'] = ['clear']
|
||||
del clear_dict['query']
|
||||
clear_dict['profile'] = ['xxx']
|
||||
content, headers = self.viewer.render(url, 'POST', path_entries[0],
|
||||
clear_dict, None)
|
||||
self.assertEqual(headers, [('content-type', 'text/html')])
|
||||
|
||||
download_dict = self.query_dict.copy()
|
||||
download_dict['download'] = ['download']
|
||||
content, headers = self.viewer.render(url, 'POST', path_entries[0],
|
||||
download_dict, None)
|
||||
self.assert_(headers == [('content-type',
|
||||
self.viewer.format_dict['default'])])
|
||||
|
||||
content, headers = self.viewer.render(url, 'GET', path_entries[1],
|
||||
self.query_dict, None)
|
||||
self.assert_(isinstance(json.loads(content), dict))
|
||||
|
||||
for method in ['HEAD', 'PUT', 'DELETE', 'XYZMethod']:
|
||||
self.assertRaises(MethodNotAllowed, self.viewer.render, url,
|
||||
method, path_entries[10], self.query_dict, None)
|
||||
|
||||
for entry in path_entries[2:]:
|
||||
download_dict['format'] = 'default'
|
||||
content, headers = self.viewer.render(url, 'GET', entry,
|
||||
download_dict, None)
|
||||
self.assert_(('content-type', self.viewer.format_dict['default'])
|
||||
in headers, entry)
|
||||
download_dict['format'] = 'json'
|
||||
content, headers = self.viewer.render(url, 'GET', entry,
|
||||
download_dict, None)
|
||||
self.assert_(isinstance(json.loads(content), dict))
|
||||
|
||||
def test_index(self):
|
||||
content, headers = self.viewer.index_page(self.log_files[0:1],
|
||||
profile_id='current')
|
||||
self.assert_(content.find('<html>') > -1)
|
||||
self.assert_(headers == [('content-type', 'text/html')])
|
||||
|
||||
def test_index_all(self):
|
||||
content, headers = self.viewer.index_page(self.log_files,
|
||||
profile_id='all')
|
||||
for f in self.log_files:
|
||||
self.assert_(content.find(f) > 0, content)
|
||||
self.assert_(headers == [('content-type', 'text/html')])
|
||||
|
||||
def test_download(self):
|
||||
content, headers = self.viewer.download(self.log_files)
|
||||
self.assert_(content is not None)
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['default'])])
|
||||
content, headers = self.viewer.download(self.log_files, sort='calls',
|
||||
limit=10, nfl_filter='os')
|
||||
self.assert_(content is not None)
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['default'])])
|
||||
content, headers = self.viewer.download(self.log_files,
|
||||
output_format='default')
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['default'])])
|
||||
content, headers = self.viewer.download(self.log_files,
|
||||
output_format='json')
|
||||
self.assert_(isinstance(json.loads(content), dict))
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['json'])])
|
||||
content, headers = self.viewer.download(self.log_files,
|
||||
output_format='csv')
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['csv'])])
|
||||
if ODFLIB_INSTALLED:
|
||||
content, headers = self.viewer.download(self.log_files,
|
||||
output_format='ods')
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['ods'])])
|
||||
else:
|
||||
self.assertRaises(ODFLIBNotInstalled, self.viewer.download,
|
||||
self.log_files, output_format='ods')
|
||||
content, headers = self.viewer.download(self.log_files,
|
||||
nfl_filter=__file__,
|
||||
output_format='python')
|
||||
self.assertEqual(headers, [('content-type',
|
||||
self.viewer.format_dict['python'])])
|
||||
|
||||
def test_plot(self):
|
||||
if PLOTLIB_INSTALLED:
|
||||
content, headers = self.viewer.plot(self.log_files)
|
||||
self.assert_(content is not None)
|
||||
self.assertEqual(headers, [('content-type', 'image/jpg')])
|
||||
self.assertRaises(NotFoundException, self.viewer.plot, [])
|
||||
else:
|
||||
self.assertRaises(PLOTLIBNotInstalled, self.viewer.plot,
|
||||
self.log_files)
|
||||
|
||||
def test_format_source_code(self):
|
||||
nfl_os = '%s:%d(%s)' % (os.__file__[:-1], 136, 'makedirs')
|
||||
self.assert_('makedirs' in self.viewer.format_source_code(nfl_os))
|
||||
self.assertFalse('makedirsXYZ' in
|
||||
self.viewer.format_source_code(nfl_os))
|
||||
nfl_illegal = '%s:136(makedirs)' % os.__file__
|
||||
self.assert_(_('The file type are forbidden to access!') in
|
||||
self.viewer.format_source_code(nfl_illegal))
|
||||
nfl_not_exist = '%s.py:136(makedirs)' % os.__file__
|
||||
expected_msg = _('Can not access the file %s.') % os.__file__
|
||||
self.assert_(expected_msg in
|
||||
self.viewer.format_source_code(nfl_not_exist))
|
||||
|
||||
|
||||
class TestStats2(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.profile_file = tempfile.mktemp('profile', 'unittest')
|
||||
self.profilers = [xprofile.get_profiler('cProfile'),
|
||||
xprofile.get_profiler('eventlet.green.profile')]
|
||||
for p in self.profilers:
|
||||
p.runctx('import os;os.getcwd();', globals(), locals())
|
||||
p.dump_stats(self.profile_file)
|
||||
self.stats2 = Stats2(self.profile_file)
|
||||
self.selections = [['getcwd'], ['getcwd', -1],
|
||||
['getcwd', -10], ['getcwd', 0.1]]
|
||||
|
||||
def tearDown(self):
|
||||
os.remove(self.profile_file)
|
||||
|
||||
def test_func_to_dict(self):
|
||||
func = ['profile.py', 100, '__call__']
|
||||
self.assertEqual({'module': 'profile.py', 'line': 100, 'function':
|
||||
'__call__'}, self.stats2.func_to_dict(func))
|
||||
func = ['', 0, '__call__']
|
||||
self.assertEqual({'module': '', 'line': 0, 'function':
|
||||
'__call__'}, self.stats2.func_to_dict(func))
|
||||
|
||||
def test_to_json(self):
|
||||
for selection in self.selections:
|
||||
js = self.stats2.to_json(selection)
|
||||
self.assert_(isinstance(json.loads(js), dict))
|
||||
self.assert_(json.loads(js)['stats'] is not None)
|
||||
self.assert_(json.loads(js)['stats'][0] is not None)
|
||||
|
||||
def test_to_ods(self):
|
||||
if ODFLIB_INSTALLED:
|
||||
for selection in self.selections:
|
||||
self.assert_(self.stats2.to_ods(selection) is not None)
|
||||
|
||||
def test_to_csv(self):
|
||||
for selection in self.selections:
|
||||
self.assert_(self.stats2.to_csv(selection) is not None)
|
||||
self.assert_('function calls' in self.stats2.to_csv(selection))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
Loading…
x
Reference in New Issue
Block a user