Merge "Added oslo.log to requirements"
This commit is contained in:
commit
6d4dff6ee6
@ -9,6 +9,7 @@ oslo.config>=1.2.1
|
||||
oslo.context>=0.1.0
|
||||
pecan>=0.4.5
|
||||
oslo.db>=0.2.0
|
||||
oslo.log>=0.1.0
|
||||
pika>=0.9.14
|
||||
python-openid
|
||||
PyYAML>=3.1.0
|
||||
|
@ -16,6 +16,8 @@
|
||||
import os
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import _options
|
||||
from oslo_log import log
|
||||
import pecan
|
||||
from wsgiref import simple_server
|
||||
|
||||
@ -30,7 +32,6 @@ from storyboard.api.v1.search import impls as search_engine_impls
|
||||
from storyboard.api.v1.search import search_engine
|
||||
from storyboard.notifications.notification_hook import NotificationHook
|
||||
from storyboard.openstack.common.gettextutils import _LI # noqa
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.plugin.cron import load_crontab
|
||||
from storyboard.plugin.user_preferences import initialize_user_preferences
|
||||
|
||||
@ -38,6 +39,7 @@ CONF = cfg.CONF
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
log.register_options(CONF)
|
||||
API_OPTS = [
|
||||
cfg.StrOpt('bind_host',
|
||||
default='0.0.0.0',
|
||||
@ -72,13 +74,13 @@ def setup_app(pecan_config=None):
|
||||
pecan_config = get_pecan_config()
|
||||
|
||||
# Setup logging
|
||||
cfg.set_defaults(log.log_opts,
|
||||
cfg.set_defaults(_options.log_opts,
|
||||
default_log_levels=[
|
||||
'storyboard=INFO',
|
||||
'storyboard.openstack.common.db=WARN',
|
||||
'sqlalchemy=WARN'
|
||||
])
|
||||
log.setup('storyboard')
|
||||
log.setup(CONF, 'storyboard')
|
||||
|
||||
hooks = [
|
||||
user_id_hook.UserIdHook(),
|
||||
|
@ -18,10 +18,10 @@ from datetime import datetime
|
||||
from oauthlib.oauth2 import RequestValidator
|
||||
from oauthlib.oauth2 import WebApplicationServer
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from storyboard.api.auth.token_storage import storage
|
||||
from storyboard.db.api import users as user_api
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -14,11 +14,11 @@
|
||||
# limitations under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
import requests
|
||||
import six
|
||||
|
||||
from storyboard.api.auth import utils
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import json
|
||||
|
||||
from oslo_log import log
|
||||
import pecan
|
||||
from pecan import request
|
||||
from pecan import response
|
||||
@ -24,7 +25,6 @@ import six
|
||||
from storyboard.api.auth.oauth_validator import SERVER
|
||||
from storyboard.api.auth.openid_client import client as openid_client
|
||||
from storyboard.api.auth.token_storage import storage
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
from pecan import abort
|
||||
from pecan import request
|
||||
from pecan import rest
|
||||
@ -25,7 +26,6 @@ from storyboard.api.auth import authorization_checks as checks
|
||||
from storyboard.api.v1 import validations
|
||||
import storyboard.db.api.users as user_api
|
||||
from storyboard.openstack.common.gettextutils import _ # noqa
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
@ -16,6 +16,7 @@
|
||||
import uuid
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
from pecan import abort
|
||||
from pecan import request
|
||||
from pecan import response
|
||||
@ -29,7 +30,6 @@ import storyboard.api.v1.wmodels as wmodels
|
||||
import storyboard.db.api.access_tokens as token_api
|
||||
import storyboard.db.api.users as user_api
|
||||
from storyboard.openstack.common.gettextutils import _ # noqa
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
@ -16,8 +16,7 @@
|
||||
import os
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from storyboard.openstack.common import log
|
||||
from oslo_log import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -20,6 +20,7 @@ from oslo.db import exception as db_exc
|
||||
from oslo.db.sqlalchemy import session as db_session
|
||||
from oslo.db.sqlalchemy.utils import InvalidSortKey
|
||||
from oslo.db.sqlalchemy.utils import paginate_query
|
||||
from oslo_log import log
|
||||
import six
|
||||
import sqlalchemy.types as types
|
||||
from wsme.exc import ClientSideError
|
||||
@ -27,7 +28,6 @@ from wsme.exc import ClientSideError
|
||||
from storyboard.common import exception as exc
|
||||
from storyboard.db import models
|
||||
from storyboard.openstack.common.gettextutils import _ # noqa
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -25,8 +25,7 @@ down_revision = '021'
|
||||
|
||||
|
||||
from alembic import op
|
||||
|
||||
from storyboard.openstack.common import log
|
||||
from oslo_log import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -17,6 +17,7 @@ import warnings
|
||||
import yaml
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
from sqlalchemy.exc import SADeprecationWarning
|
||||
|
||||
from storyboard.common.custom_types import NameType
|
||||
@ -24,7 +25,6 @@ from storyboard.db.api import base as db_api
|
||||
from storyboard.db.models import Project
|
||||
from storyboard.db.models import ProjectGroup
|
||||
from storyboard.openstack.common.gettextutils import _LW # noqa
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
|
||||
warnings.simplefilter("ignore", SADeprecationWarning)
|
||||
|
@ -13,10 +13,10 @@
|
||||
# under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from storyboard.db.api import base as db_api
|
||||
from storyboard.migrate.launchpad.loader import LaunchpadLoader
|
||||
from storyboard.openstack.common import log
|
||||
|
||||
IMPORT_OPTS = [
|
||||
cfg.StrOpt("from-project",
|
||||
@ -38,8 +38,9 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def main():
|
||||
log.setup('storyboard')
|
||||
CONF.register_cli_opts(IMPORT_OPTS)
|
||||
log.register_options(CONF)
|
||||
log.setup(CONF, 'storyboard')
|
||||
CONF(project='storyboard')
|
||||
|
||||
# If the user requested an autoincrement value, set that before we start
|
||||
|
@ -18,8 +18,8 @@ from threading import Timer
|
||||
import pika
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.openstack.common.gettextutils import _, _LI # noqa
|
||||
|
||||
|
||||
|
@ -16,11 +16,11 @@
|
||||
import json
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
from pika.exceptions import ConnectionClosed
|
||||
|
||||
from storyboard.notifications.conf import NOTIFICATION_OPTS
|
||||
from storyboard.notifications.connection_service import ConnectionService
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.openstack.common.gettextutils import _, _LW, _LE # noqa
|
||||
|
||||
|
||||
|
@ -17,12 +17,12 @@ import json
|
||||
import time
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
from pika.exceptions import ConnectionClosed
|
||||
from stevedore import enabled
|
||||
|
||||
from storyboard.notifications.conf import NOTIFICATION_OPTS
|
||||
from storyboard.notifications.connection_service import ConnectionService
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.openstack.common.gettextutils import _, _LW # noqa
|
||||
|
||||
|
||||
@ -31,7 +31,8 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def subscribe():
|
||||
log.setup('storyboard')
|
||||
log.register_options(CONF)
|
||||
log.setup(CONF, 'storyboard')
|
||||
CONF(project='storyboard')
|
||||
CONF.register_opts(NOTIFICATION_OPTS, "notifications")
|
||||
|
||||
|
@ -18,8 +18,9 @@ import errno
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from storyboard.openstack.common import excutils
|
||||
from storyboard.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -27,10 +27,10 @@ import time
|
||||
import weakref
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
from storyboard.openstack.common import fileutils
|
||||
from storyboard.openstack.common.gettextutils import _, _LE, _LI
|
||||
from storyboard.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -1,712 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""OpenStack logging handler.
|
||||
|
||||
This module adds to logging functionality by adding the option to specify
|
||||
a context object when calling the various log methods. If the context object
|
||||
is not specified, default formatting is used. Additionally, an instance uuid
|
||||
may be passed as part of the log message, which is intended to make it easier
|
||||
for admins to find messages related to a specific instance.
|
||||
|
||||
It also allows setting of formatting information through conf.
|
||||
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import logging
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
from six import moves
|
||||
|
||||
from storyboard.openstack.common.gettextutils import _
|
||||
from storyboard.openstack.common import importutils
|
||||
from storyboard.openstack.common import jsonutils
|
||||
from storyboard.openstack.common import local
|
||||
|
||||
|
||||
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
|
||||
|
||||
# NOTE(ldbragst): Let's build a list of regex objects using the list of
|
||||
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
|
||||
# to the list of _SANITIZE_KEYS and we can generate regular expressions
|
||||
# for XML and JSON automatically.
|
||||
_SANITIZE_PATTERNS = []
|
||||
_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
|
||||
r'(<%(key)s>).*?(</%(key)s>)',
|
||||
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
|
||||
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])']
|
||||
|
||||
for key in _SANITIZE_KEYS:
|
||||
for pattern in _FORMAT_PATTERNS:
|
||||
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
|
||||
_SANITIZE_PATTERNS.append(reg_ex)
|
||||
|
||||
|
||||
common_cli_opts = [
|
||||
cfg.BoolOpt('debug',
|
||||
short='d',
|
||||
default=False,
|
||||
help='Print debugging output (set logging level to '
|
||||
'DEBUG instead of default WARNING level).'),
|
||||
cfg.BoolOpt('verbose',
|
||||
short='v',
|
||||
default=False,
|
||||
help='Print more verbose output (set logging level to '
|
||||
'INFO instead of default WARNING level).'),
|
||||
]
|
||||
|
||||
logging_cli_opts = [
|
||||
cfg.StrOpt('log-config-append',
|
||||
metavar='PATH',
|
||||
deprecated_name='log-config',
|
||||
help='The name of logging configuration file. It does not '
|
||||
'disable existing loggers, but just appends specified '
|
||||
'logging configuration to any other existing logging '
|
||||
'options. Please see the Python logging module '
|
||||
'documentation for details on logging configuration '
|
||||
'files.'),
|
||||
cfg.StrOpt('log-format',
|
||||
default=None,
|
||||
metavar='FORMAT',
|
||||
help='DEPRECATED. '
|
||||
'A logging.Formatter log message format string which may '
|
||||
'use any of the available logging.LogRecord attributes. '
|
||||
'This option is deprecated. Please use '
|
||||
'logging_context_format_string and '
|
||||
'logging_default_format_string instead.'),
|
||||
cfg.StrOpt('log-date-format',
|
||||
default=_DEFAULT_LOG_DATE_FORMAT,
|
||||
metavar='DATE_FORMAT',
|
||||
help='Format string for %%(asctime)s in log records. '
|
||||
'Default: %(default)s'),
|
||||
cfg.StrOpt('log-file',
|
||||
metavar='PATH',
|
||||
deprecated_name='logfile',
|
||||
help='(Optional) Name of log file to output to. '
|
||||
'If no default is set, logging will go to stdout.'),
|
||||
cfg.StrOpt('log-dir',
|
||||
deprecated_name='logdir',
|
||||
help='(Optional) The base directory used for relative '
|
||||
'--log-file paths'),
|
||||
cfg.BoolOpt('use-syslog',
|
||||
default=False,
|
||||
help='Use syslog for logging. '
|
||||
'Existing syslog format is DEPRECATED during I, '
|
||||
'and then will be changed in J to honor RFC5424'),
|
||||
cfg.BoolOpt('use-syslog-rfc-format',
|
||||
# TODO(bogdando) remove or use True after existing
|
||||
# syslog format deprecation in J
|
||||
default=False,
|
||||
help='(Optional) Use syslog rfc5424 format for logging. '
|
||||
'If enabled, will add APP-NAME (RFC5424) before the '
|
||||
'MSG part of the syslog message. The old format '
|
||||
'without APP-NAME is deprecated in I, '
|
||||
'and will be removed in J.'),
|
||||
cfg.StrOpt('syslog-log-facility',
|
||||
default='LOG_USER',
|
||||
help='Syslog facility to receive log lines')
|
||||
]
|
||||
|
||||
generic_log_opts = [
|
||||
cfg.BoolOpt('use_stderr',
|
||||
default=True,
|
||||
help='Log output to standard error')
|
||||
]
|
||||
|
||||
log_opts = [
|
||||
cfg.StrOpt('logging_context_format_string',
|
||||
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
|
||||
'%(name)s [%(request_id)s %(user_identity)s] '
|
||||
'%(instance)s%(message)s',
|
||||
help='Format string to use for log messages with context'),
|
||||
cfg.StrOpt('logging_default_format_string',
|
||||
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
|
||||
'%(name)s [-] %(instance)s%(message)s',
|
||||
help='Format string to use for log messages without context'),
|
||||
cfg.StrOpt('logging_debug_format_suffix',
|
||||
default='%(funcName)s %(pathname)s:%(lineno)d',
|
||||
help='Data to append to log format when level is DEBUG'),
|
||||
cfg.StrOpt('logging_exception_prefix',
|
||||
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
|
||||
'%(instance)s',
|
||||
help='Prefix each line of exception output with this format'),
|
||||
cfg.ListOpt('default_log_levels',
|
||||
default=[
|
||||
'amqp=WARN',
|
||||
'amqplib=WARN',
|
||||
'boto=WARN',
|
||||
'qpid=WARN',
|
||||
'sqlalchemy=WARN',
|
||||
'suds=INFO',
|
||||
'iso8601=WARN',
|
||||
'requests.packages.urllib3.connectionpool=WARN'
|
||||
],
|
||||
help='List of logger=LEVEL pairs'),
|
||||
cfg.BoolOpt('publish_errors',
|
||||
default=False,
|
||||
help='Publish error events'),
|
||||
cfg.BoolOpt('fatal_deprecations',
|
||||
default=False,
|
||||
help='Make deprecations fatal'),
|
||||
|
||||
# NOTE(mikal): there are two options here because sometimes we are handed
|
||||
# a full instance (and could include more information), and other times we
|
||||
# are just handed a UUID for the instance.
|
||||
cfg.StrOpt('instance_format',
|
||||
default='[instance: %(uuid)s] ',
|
||||
help='If an instance is passed with the log message, format '
|
||||
'it like this'),
|
||||
cfg.StrOpt('instance_uuid_format',
|
||||
default='[instance: %(uuid)s] ',
|
||||
help='If an instance UUID is passed with the log message, '
|
||||
'format it like this'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_cli_opts(common_cli_opts)
|
||||
CONF.register_cli_opts(logging_cli_opts)
|
||||
CONF.register_opts(generic_log_opts)
|
||||
CONF.register_opts(log_opts)
|
||||
|
||||
# our new audit level
|
||||
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
|
||||
# module aware of it so it acts like other levels.
|
||||
logging.AUDIT = logging.INFO + 1
|
||||
logging.addLevelName(logging.AUDIT, 'AUDIT')
|
||||
|
||||
|
||||
try:
|
||||
NullHandler = logging.NullHandler
|
||||
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
|
||||
class NullHandler(logging.Handler):
|
||||
def handle(self, record):
|
||||
pass
|
||||
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
def createLock(self):
|
||||
self.lock = None
|
||||
|
||||
|
||||
def _dictify_context(context):
|
||||
if context is None:
|
||||
return None
|
||||
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
|
||||
context = context.to_dict()
|
||||
return context
|
||||
|
||||
|
||||
def _get_binary_name():
|
||||
return os.path.basename(inspect.stack()[-1][1])
|
||||
|
||||
|
||||
def _get_log_file_path(binary=None):
|
||||
logfile = CONF.log_file
|
||||
logdir = CONF.log_dir
|
||||
|
||||
if logfile and not logdir:
|
||||
return logfile
|
||||
|
||||
if logfile and logdir:
|
||||
return os.path.join(logdir, logfile)
|
||||
|
||||
if logdir:
|
||||
binary = binary or _get_binary_name()
|
||||
return '%s.log' % (os.path.join(logdir, binary),)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def mask_password(message, secret="***"):
|
||||
"""Replace password with 'secret' in message.
|
||||
|
||||
:param message: The string which includes security information.
|
||||
:param secret: value with which to replace passwords.
|
||||
:returns: The unicode value of message with the password fields masked.
|
||||
|
||||
For example:
|
||||
|
||||
>>> mask_password("'adminPass' : 'aaaaa'")
|
||||
"'adminPass' : '***'"
|
||||
>>> mask_password("'admin_pass' : 'aaaaa'")
|
||||
"'admin_pass' : '***'"
|
||||
>>> mask_password('"password" : "aaaaa"')
|
||||
'"password" : "***"'
|
||||
>>> mask_password("'original_password' : 'aaaaa'")
|
||||
"'original_password' : '***'"
|
||||
>>> mask_password("u'original_password' : u'aaaaa'")
|
||||
"u'original_password' : u'***'"
|
||||
"""
|
||||
message = six.text_type(message)
|
||||
|
||||
# NOTE(ldbragst): Check to see if anything in message contains any key
|
||||
# specified in _SANITIZE_KEYS, if not then just return the message since
|
||||
# we don't have to mask any passwords.
|
||||
if not any(key in message for key in _SANITIZE_KEYS):
|
||||
return message
|
||||
|
||||
secret = r'\g<1>' + secret + r'\g<2>'
|
||||
for pattern in _SANITIZE_PATTERNS:
|
||||
message = re.sub(pattern, secret, message)
|
||||
return message
|
||||
|
||||
|
||||
class BaseLoggerAdapter(logging.LoggerAdapter):
|
||||
|
||||
def audit(self, msg, *args, **kwargs):
|
||||
self.log(logging.AUDIT, msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LazyAdapter(BaseLoggerAdapter):
|
||||
def __init__(self, name='unknown', version='unknown'):
|
||||
self._logger = None
|
||||
self.extra = {}
|
||||
self.name = name
|
||||
self.version = version
|
||||
|
||||
@property
|
||||
def logger(self):
|
||||
if not self._logger:
|
||||
self._logger = getLogger(self.name, self.version)
|
||||
return self._logger
|
||||
|
||||
|
||||
class ContextAdapter(BaseLoggerAdapter):
|
||||
warn = logging.LoggerAdapter.warning
|
||||
|
||||
def __init__(self, logger, project_name, version_string):
|
||||
self.logger = logger
|
||||
self.project = project_name
|
||||
self.version = version_string
|
||||
self._deprecated_messages_sent = dict()
|
||||
|
||||
@property
|
||||
def handlers(self):
|
||||
return self.logger.handlers
|
||||
|
||||
def deprecated(self, msg, *args, **kwargs):
|
||||
"""Call this method when a deprecated feature is used.
|
||||
|
||||
If the system is configured for fatal deprecations then the message
|
||||
is logged at the 'critical' level and :class:`DeprecatedConfig` will
|
||||
be raised.
|
||||
|
||||
Otherwise, the message will be logged (once) at the 'warn' level.
|
||||
|
||||
:raises: :class:`DeprecatedConfig` if the system is configured for
|
||||
fatal deprecations.
|
||||
|
||||
"""
|
||||
stdmsg = _("Deprecated: %s") % msg
|
||||
if CONF.fatal_deprecations:
|
||||
self.critical(stdmsg, *args, **kwargs)
|
||||
raise DeprecatedConfig(msg=stdmsg)
|
||||
|
||||
# Using a list because a tuple with dict can't be stored in a set.
|
||||
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
|
||||
|
||||
if args in sent_args:
|
||||
# Already logged this message, so don't log it again.
|
||||
return
|
||||
|
||||
sent_args.append(args)
|
||||
self.warn(stdmsg, *args, **kwargs)
|
||||
|
||||
def process(self, msg, kwargs):
|
||||
# NOTE(mrodden): catch any Message/other object and
|
||||
# coerce to unicode before they can get
|
||||
# to the python logging and possibly
|
||||
# cause string encoding trouble
|
||||
if not isinstance(msg, six.string_types):
|
||||
msg = six.text_type(msg)
|
||||
|
||||
if 'extra' not in kwargs:
|
||||
kwargs['extra'] = {}
|
||||
extra = kwargs['extra']
|
||||
|
||||
context = kwargs.pop('context', None)
|
||||
if not context:
|
||||
context = getattr(local.store, 'context', None)
|
||||
if context:
|
||||
extra.update(_dictify_context(context))
|
||||
|
||||
instance = kwargs.pop('instance', None)
|
||||
instance_uuid = (extra.get('instance_uuid') or
|
||||
kwargs.pop('instance_uuid', None))
|
||||
instance_extra = ''
|
||||
if instance:
|
||||
instance_extra = CONF.instance_format % instance
|
||||
elif instance_uuid:
|
||||
instance_extra = (CONF.instance_uuid_format
|
||||
% {'uuid': instance_uuid})
|
||||
extra['instance'] = instance_extra
|
||||
|
||||
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
|
||||
|
||||
extra['project'] = self.project
|
||||
extra['version'] = self.version
|
||||
extra['extra'] = extra.copy()
|
||||
return msg, kwargs
|
||||
|
||||
|
||||
class JSONFormatter(logging.Formatter):
|
||||
def __init__(self, fmt=None, datefmt=None):
|
||||
# NOTE(jkoelker) we ignore the fmt argument, but its still there
|
||||
# since logging.config.fileConfig passes it.
|
||||
self.datefmt = datefmt
|
||||
|
||||
def formatException(self, ei, strip_newlines=True):
|
||||
lines = traceback.format_exception(*ei)
|
||||
if strip_newlines:
|
||||
lines = [moves.filter(
|
||||
lambda x: x,
|
||||
line.rstrip().splitlines()) for line in lines]
|
||||
lines = list(itertools.chain(*lines))
|
||||
return lines
|
||||
|
||||
def format(self, record):
|
||||
message = {'message': record.getMessage(),
|
||||
'asctime': self.formatTime(record, self.datefmt),
|
||||
'name': record.name,
|
||||
'msg': record.msg,
|
||||
'args': record.args,
|
||||
'levelname': record.levelname,
|
||||
'levelno': record.levelno,
|
||||
'pathname': record.pathname,
|
||||
'filename': record.filename,
|
||||
'module': record.module,
|
||||
'lineno': record.lineno,
|
||||
'funcname': record.funcName,
|
||||
'created': record.created,
|
||||
'msecs': record.msecs,
|
||||
'relative_created': record.relativeCreated,
|
||||
'thread': record.thread,
|
||||
'thread_name': record.threadName,
|
||||
'process_name': record.processName,
|
||||
'process': record.process,
|
||||
'traceback': None}
|
||||
|
||||
if hasattr(record, 'extra'):
|
||||
message['extra'] = record.extra
|
||||
|
||||
if record.exc_info:
|
||||
message['traceback'] = self.formatException(record.exc_info)
|
||||
|
||||
return jsonutils.dumps(message)
|
||||
|
||||
|
||||
def _create_logging_excepthook(product_name):
|
||||
def logging_excepthook(exc_type, value, tb):
|
||||
extra = {}
|
||||
if CONF.verbose or CONF.debug:
|
||||
extra['exc_info'] = (exc_type, value, tb)
|
||||
getLogger(product_name).critical(
|
||||
"".join(traceback.format_exception_only(exc_type, value)),
|
||||
**extra)
|
||||
return logging_excepthook
|
||||
|
||||
|
||||
class LogConfigError(Exception):
|
||||
|
||||
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
|
||||
|
||||
def __init__(self, log_config, err_msg):
|
||||
self.log_config = log_config
|
||||
self.err_msg = err_msg
|
||||
|
||||
def __str__(self):
|
||||
return self.message % dict(log_config=self.log_config,
|
||||
err_msg=self.err_msg)
|
||||
|
||||
|
||||
def _load_log_config(log_config_append):
|
||||
try:
|
||||
logging.config.fileConfig(log_config_append,
|
||||
disable_existing_loggers=False)
|
||||
except moves.configparser.Error as exc:
|
||||
raise LogConfigError(log_config_append, str(exc))
|
||||
|
||||
|
||||
def setup(product_name, version='unknown'):
|
||||
"""Setup logging."""
|
||||
if CONF.log_config_append:
|
||||
_load_log_config(CONF.log_config_append)
|
||||
else:
|
||||
_setup_logging_from_conf(product_name, version)
|
||||
sys.excepthook = _create_logging_excepthook(product_name)
|
||||
|
||||
|
||||
def set_defaults(logging_context_format_string):
|
||||
cfg.set_defaults(log_opts,
|
||||
logging_context_format_string=
|
||||
logging_context_format_string)
|
||||
|
||||
|
||||
def _find_facility_from_conf():
|
||||
facility_names = logging.handlers.SysLogHandler.facility_names
|
||||
facility = getattr(logging.handlers.SysLogHandler,
|
||||
CONF.syslog_log_facility,
|
||||
None)
|
||||
|
||||
if facility is None and CONF.syslog_log_facility in facility_names:
|
||||
facility = facility_names.get(CONF.syslog_log_facility)
|
||||
|
||||
if facility is None:
|
||||
valid_facilities = facility_names.keys()
|
||||
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
|
||||
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
|
||||
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
|
||||
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
|
||||
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
|
||||
valid_facilities.extend(consts)
|
||||
raise TypeError(_('syslog facility must be one of: %s') %
|
||||
', '.join("'%s'" % fac
|
||||
for fac in valid_facilities))
|
||||
|
||||
return facility
|
||||
|
||||
|
||||
class RFCSysLogHandler(logging.handlers.SysLogHandler):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.binary_name = _get_binary_name()
|
||||
super(RFCSysLogHandler, self).__init__(*args, **kwargs)
|
||||
|
||||
def format(self, record):
|
||||
msg = super(RFCSysLogHandler, self).format(record)
|
||||
msg = self.binary_name + ' ' + msg
|
||||
return msg
|
||||
|
||||
|
||||
def _setup_logging_from_conf(project, version):
|
||||
log_root = getLogger(None).logger
|
||||
for handler in log_root.handlers:
|
||||
log_root.removeHandler(handler)
|
||||
|
||||
if CONF.use_syslog:
|
||||
facility = _find_facility_from_conf()
|
||||
# TODO(bogdando) use the format provided by RFCSysLogHandler
|
||||
# after existing syslog format deprecation in J
|
||||
if CONF.use_syslog_rfc_format:
|
||||
syslog = RFCSysLogHandler(address='/dev/log',
|
||||
facility=facility)
|
||||
else:
|
||||
syslog = logging.handlers.SysLogHandler(address='/dev/log',
|
||||
facility=facility)
|
||||
log_root.addHandler(syslog)
|
||||
|
||||
logpath = _get_log_file_path()
|
||||
if logpath:
|
||||
filelog = logging.handlers.WatchedFileHandler(logpath)
|
||||
log_root.addHandler(filelog)
|
||||
|
||||
if CONF.use_stderr:
|
||||
streamlog = ColorHandler()
|
||||
log_root.addHandler(streamlog)
|
||||
|
||||
elif not logpath:
|
||||
# pass sys.stdout as a positional argument
|
||||
# python2.6 calls the argument strm, in 2.7 it's stream
|
||||
streamlog = logging.StreamHandler(sys.stdout)
|
||||
log_root.addHandler(streamlog)
|
||||
|
||||
if CONF.publish_errors:
|
||||
handler = importutils.import_object(
|
||||
"storyboard.openstack.common.log_handler.PublishErrorsHandler",
|
||||
logging.ERROR)
|
||||
log_root.addHandler(handler)
|
||||
|
||||
datefmt = CONF.log_date_format
|
||||
for handler in log_root.handlers:
|
||||
# NOTE(alaski): CONF.log_format overrides everything currently. This
|
||||
# should be deprecated in favor of context aware formatting.
|
||||
if CONF.log_format:
|
||||
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
|
||||
datefmt=datefmt))
|
||||
log_root.info('Deprecated: log_format is now deprecated and will '
|
||||
'be removed in the next release')
|
||||
else:
|
||||
handler.setFormatter(ContextFormatter(project=project,
|
||||
version=version,
|
||||
datefmt=datefmt))
|
||||
|
||||
if CONF.debug:
|
||||
log_root.setLevel(logging.DEBUG)
|
||||
elif CONF.verbose:
|
||||
log_root.setLevel(logging.INFO)
|
||||
else:
|
||||
log_root.setLevel(logging.WARNING)
|
||||
|
||||
for pair in CONF.default_log_levels:
|
||||
mod, _sep, level_name = pair.partition('=')
|
||||
level = logging.getLevelName(level_name)
|
||||
logger = logging.getLogger(mod)
|
||||
logger.setLevel(level)
|
||||
|
||||
_loggers = {}
|
||||
|
||||
|
||||
def getLogger(name='unknown', version='unknown'):
|
||||
if name not in _loggers:
|
||||
_loggers[name] = ContextAdapter(logging.getLogger(name),
|
||||
name,
|
||||
version)
|
||||
return _loggers[name]
|
||||
|
||||
|
||||
def getLazyLogger(name='unknown', version='unknown'):
|
||||
"""Returns lazy logger.
|
||||
|
||||
Creates a pass-through logger that does not create the real logger
|
||||
until it is really needed and delegates all calls to the real logger
|
||||
once it is created.
|
||||
"""
|
||||
return LazyAdapter(name, version)
|
||||
|
||||
|
||||
class WritableLogger(object):
|
||||
"""A thin wrapper that responds to `write` and logs."""
|
||||
|
||||
def __init__(self, logger, level=logging.INFO):
|
||||
self.logger = logger
|
||||
self.level = level
|
||||
|
||||
def write(self, msg):
|
||||
self.logger.log(self.level, msg.rstrip())
|
||||
|
||||
|
||||
class ContextFormatter(logging.Formatter):
|
||||
"""A context.RequestContext aware formatter configured through flags.
|
||||
|
||||
The flags used to set format strings are: logging_context_format_string
|
||||
and logging_default_format_string. You can also specify
|
||||
logging_debug_format_suffix to append extra formatting if the log level is
|
||||
debug.
|
||||
|
||||
For information about what variables are available for the formatter see:
|
||||
http://docs.python.org/library/logging.html#formatter
|
||||
|
||||
If available, uses the context value stored in TLS - local.store.context
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize ContextFormatter instance
|
||||
|
||||
Takes additional keyword arguments which can be used in the message
|
||||
format string.
|
||||
|
||||
:keyword project: project name
|
||||
:type project: string
|
||||
:keyword version: project version
|
||||
:type version: string
|
||||
|
||||
"""
|
||||
|
||||
self.project = kwargs.pop('project', 'unknown')
|
||||
self.version = kwargs.pop('version', 'unknown')
|
||||
|
||||
logging.Formatter.__init__(self, *args, **kwargs)
|
||||
|
||||
def format(self, record):
|
||||
"""Uses contextstring if request_id is set, otherwise default."""
|
||||
|
||||
# store project info
|
||||
record.project = self.project
|
||||
record.version = self.version
|
||||
|
||||
# store request info
|
||||
context = getattr(local.store, 'context', None)
|
||||
if context:
|
||||
d = _dictify_context(context)
|
||||
for k, v in d.items():
|
||||
setattr(record, k, v)
|
||||
|
||||
# NOTE(sdague): default the fancier formatting params
|
||||
# to an empty string so we don't throw an exception if
|
||||
# they get used
|
||||
for key in ('instance', 'color'):
|
||||
if key not in record.__dict__:
|
||||
record.__dict__[key] = ''
|
||||
|
||||
if record.__dict__.get('request_id'):
|
||||
self._fmt = CONF.logging_context_format_string
|
||||
else:
|
||||
self._fmt = CONF.logging_default_format_string
|
||||
|
||||
if (record.levelno == logging.DEBUG and
|
||||
CONF.logging_debug_format_suffix):
|
||||
self._fmt += " " + CONF.logging_debug_format_suffix
|
||||
|
||||
# Cache this on the record, Logger will respect our formatted copy
|
||||
if record.exc_info:
|
||||
record.exc_text = self.formatException(record.exc_info, record)
|
||||
return logging.Formatter.format(self, record)
|
||||
|
||||
def formatException(self, exc_info, record=None):
|
||||
"""Format exception output with CONF.logging_exception_prefix."""
|
||||
if not record:
|
||||
return logging.Formatter.formatException(self, exc_info)
|
||||
|
||||
stringbuffer = moves.StringIO()
|
||||
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
|
||||
None, stringbuffer)
|
||||
lines = stringbuffer.getvalue().split('\n')
|
||||
stringbuffer.close()
|
||||
|
||||
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
|
||||
record.asctime = self.formatTime(record, self.datefmt)
|
||||
|
||||
formatted_lines = []
|
||||
for line in lines:
|
||||
pl = CONF.logging_exception_prefix % record.__dict__
|
||||
fl = '%s%s' % (pl, line)
|
||||
formatted_lines.append(fl)
|
||||
return '\n'.join(formatted_lines)
|
||||
|
||||
|
||||
class ColorHandler(logging.StreamHandler):
|
||||
LEVEL_COLORS = {
|
||||
logging.DEBUG: '\033[00;32m', # GREEN
|
||||
logging.INFO: '\033[00;36m', # CYAN
|
||||
logging.AUDIT: '\033[01;36m', # BOLD CYAN
|
||||
logging.WARN: '\033[01;33m', # BOLD YELLOW
|
||||
logging.ERROR: '\033[01;31m', # BOLD RED
|
||||
logging.CRITICAL: '\033[01;31m', # BOLD RED
|
||||
}
|
||||
|
||||
def format(self, record):
|
||||
record.color = self.LEVEL_COLORS[record.levelno]
|
||||
return logging.StreamHandler.format(self, record)
|
||||
|
||||
|
||||
class DeprecatedConfig(Exception):
|
||||
message = _("Fatal call to deprecated config: %(msg)s")
|
||||
|
||||
def __init__(self, msg):
|
||||
super(Exception, self).__init__(self.message % dict(msg=msg))
|
@ -26,10 +26,10 @@ import signal
|
||||
|
||||
from eventlet.green import subprocess
|
||||
from eventlet import greenthread
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from storyboard.openstack.common.gettextutils import _
|
||||
from storyboard.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -14,8 +14,9 @@
|
||||
|
||||
import atexit
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from oslo.config import cfg
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.plugin.base import StoryboardPluginLoader
|
||||
from storyboard.plugin.cron.manager import CronManager
|
||||
|
||||
@ -35,8 +36,9 @@ def main():
|
||||
crontab to target different plugins on different execution intervals.
|
||||
"""
|
||||
CONF.register_cli_opts(CRON_OPTS)
|
||||
log.register_options(CONF)
|
||||
CONF(project='storyboard')
|
||||
log.setup('storyboard')
|
||||
log.setup(CONF, 'storyboard')
|
||||
|
||||
loader = StoryboardPluginLoader(namespace="storyboard.plugin.cron")
|
||||
|
||||
|
@ -20,8 +20,9 @@ import os
|
||||
import pytz
|
||||
import six
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from storyboard.common.working_dir import get_working_directory
|
||||
from storyboard.openstack.common import log
|
||||
import storyboard.plugin.base as plugin_base
|
||||
|
||||
|
||||
|
@ -14,8 +14,8 @@
|
||||
|
||||
from crontab import CronTab
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.plugin.base import StoryboardPluginLoader
|
||||
from storyboard.plugin.cron.base import CronPluginBase
|
||||
|
||||
|
@ -14,10 +14,10 @@
|
||||
|
||||
import abc
|
||||
|
||||
from oslo_log import log
|
||||
import six
|
||||
|
||||
from storyboard.openstack.common.gettextutils import _LE # noqa
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.plugin.base import PluginBase
|
||||
from storyboard.plugin.base import StoryboardPluginLoader
|
||||
|
||||
|
@ -21,6 +21,7 @@ import uuid
|
||||
from alembic import command
|
||||
import fixtures
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log as logging
|
||||
import pecan
|
||||
import pecan.testing
|
||||
import six
|
||||
@ -30,7 +31,6 @@ import testtools
|
||||
from storyboard.db.api import base as db_api_base
|
||||
from storyboard.db.migration.cli import get_alembic_config
|
||||
from storyboard.openstack.common import lockutils
|
||||
from storyboard.openstack.common import log as logging
|
||||
import storyboard.tests.mock_data as mock_data
|
||||
|
||||
|
||||
@ -41,7 +41,8 @@ _TRUE_VALUES = ('true', '1', 'yes')
|
||||
|
||||
_DB_CACHE = None
|
||||
|
||||
logging.setup('storyboard')
|
||||
logging.register_options(CONF)
|
||||
logging.setup(CONF, 'storyboard')
|
||||
|
||||
|
||||
class TestCase(testtools.TestCase):
|
||||
|
@ -30,13 +30,13 @@ from alembic import command
|
||||
from alembic import config as alembic_config
|
||||
from alembic import migration
|
||||
from oslo.config import cfg
|
||||
from oslo_log import log as logging
|
||||
import six
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from storyboard.db import api as db_api
|
||||
import storyboard.db.migration
|
||||
from storyboard.openstack.common import lockutils
|
||||
from storyboard.openstack.common import log as logging
|
||||
from storyboard.openstack.common import processutils
|
||||
from storyboard.tests import base
|
||||
|
||||
|
@ -15,11 +15,11 @@
|
||||
import signal
|
||||
|
||||
from multiprocessing import Process
|
||||
from oslo_log import log
|
||||
from threading import Timer
|
||||
|
||||
from oslo.config import cfg
|
||||
from storyboard.notifications.subscriber import subscribe
|
||||
from storyboard.openstack.common import log
|
||||
from storyboard.openstack.common.gettextutils import _LI, _LW # noqa
|
||||
|
||||
|
||||
@ -39,8 +39,9 @@ def run():
|
||||
"""
|
||||
global MANAGER
|
||||
|
||||
log.setup('storyboard')
|
||||
CONF.register_cli_opts(IMPORT_OPTS)
|
||||
log.register_options(CONF)
|
||||
log.setup(CONF, 'storyboard')
|
||||
CONF(project='storyboard')
|
||||
|
||||
signal.signal(signal.SIGTERM, terminate)
|
||||
|
Loading…
Reference in New Issue
Block a user