system-config/modules/openstack_project/templates/logstash/indexer.conf.erb
Clark Boylan 219cef8d06 Don't index logs with DEBUG log level.
ElasticSearch has a hard time performing queries on large amounts of
data. It must load the fields it is searching on into memory and that
can cause the node to use all of the memory allocated to it which in
turn causes a lot of garbage collection essentially taking the node
offline. Filter out DEBUG log messages in the devstack logs to reduce
the amount of data that must be loaded into memory when performing
searches.

Change-Id: Icfe9c8c17ccef4f9379c774eef791f43463dcf6d
2013-08-06 11:52:26 -07:00

141 lines
4.2 KiB
Plaintext

input {
tcp {
host => "localhost"
port => 9999
format => "json"
message_format => "%{event_message}"
type => "jenkins"
}
}
# You can check grok patterns at http://grokdebug.herokuapp.com/
filter {
grep {
# Remove unneeded html tags.
type => "jenkins"
tags => ["console.html"]
# Drop matches.
negate => true
match => ["@message", "^</?pre>$"]
}
grep {
# Remove screen log headers.
type => "jenkins"
tags => ["screen"]
# Drop matches.
negate => true
match => ["@message", "^\+ "]
}
grep {
# Remove blank lines.
type => "jenkins"
tags => ["keystonefmt"]
# Drop matches.
negate => true
match => ["@message", "^$"]
}
multiline {
type => "jenkins"
tags => ["console.html"]
negate => true
pattern => "^%{DATESTAMP} \|"
what => "previous"
stream_identity => "%{@source_host}.%{filename}"
}
multiline {
type => "jenkins"
tags => ["oslofmt"]
negate => true
pattern => "^%{DATESTAMP} "
what => "previous"
stream_identity => "%{@source_host}.%{filename}"
}
multiline {
type => "jenkins"
tags => ["oslofmt"]
negate => false
pattern => "^%{DATESTAMP}%{SPACE}%{NUMBER}?%{SPACE}?TRACE"
what => "previous"
stream_identity => "%{@source_host}.%{filename}"
}
multiline {
type => "jenkins"
tags => ["keystonefmt"]
negate => true
pattern => "^\(\b%{NOTSPACE}\b\):"
what => "previous"
stream_identity => "%{@source_host}.%{filename}"
}
grok {
type => "jenkins"
tags => ["console.html"]
# Do multiline matching as the above mutliline filter may add newlines
# to the log messages.
pattern => [ "(?m)^%{DATESTAMP:logdate} \| %{GREEDYDATA:logmessage}" ]
add_field => [ "received_at", "%{@timestamp}" ]
}
grok {
type => "jenkins"
tags => ["oslofmt"]
# Do multiline matching as the above mutliline filter may add newlines
# to the log messages.
# TODO move the LOGLEVELs into a proper grok pattern.
pattern => [ "(?m)^%{DATESTAMP:logdate}%{SPACE}%{NUMBER:pid}?%{SPACE}?(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR) \[?\b%{NOTSPACE:module}\b\]?%{SPACE}?%{GREEDYDATA:logmessage}?" ]
add_field => [ "received_at", "%{@timestamp}" ]
}
grok {
type => "jenkins"
tags => ["keystonefmt"]
# Do multiline matching as the above mutliline filter may add newlines
# to the log messages.
# TODO move the LOGLEVELs into a proper grok pattern.
pattern => [ "(?m)^\(\b%{NOTSPACE:module}\b\):%{SPACE}%{DATESTAMP:logdate}%{SPACE}(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR)%{SPACE}%{GREEDYDATA:logmessage}" ]
add_field => [ "received_at", "%{@timestamp}" ]
}
grok {
type => "jenkins"
tags => ["apachecombined"]
pattern => [ "%{COMBINEDAPACHELOG}" ]
add_field => [ "received_at", "%{@timestamp}", "logdate", "%{timestamp}", "logmessage", "%{verb} %{request} %{response}" ]
}
grok {
type => "jenkins"
tags => ["syslog"]
# Syslog grok filter adapted from
# http://cookbook.logstash.net/recipes/syslog-pri/syslog.conf
pattern => [ "%{SYSLOGTIMESTAMP:logdate}%{SPACE}%{SYSLOGHOST:syslog_host}?%{SPACE}%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:? %{GREEDYDATA:logmessage}" ]
add_field => [ "received_at", "%{@timestamp}" ]
}
# Remove DEBUG logs to reduce the amount of data that needs to be processed.
grep {
type => "jenkins"
negate => true
match => [ "loglevel", "DEBUG" ]
}
# Filters below here should be consistent for all Jenkins log formats.
date {
type => "jenkins"
exclude_tags => "_grokparsefailure"
match => [ "logdate", "yyyy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "dd/MMM/yyyy:HH:mm:ss Z" ]
}
mutate {
type => "jenkins"
exclude_tags => "_grokparsefailure"
replace => [ "@message", "%{logmessage}" ]
}
mutate {
type => "jenkins"
exclude_tags => "_grokparsefailure"
remove => [ "logdate", "logmessage", "event_message" ]
}
}
output {
elasticsearch {
host => "<%= scope.lookupvar("::openstack_project::logstash_worker::discover_node") %>"
node_name => "<%= scope.lookupvar("::hostname") %>"
max_inflight_requests => 512
}
}