Merge "Upgrade Logstash to 1.2.1."

This commit is contained in:
Jenkins 2013-10-21 22:23:45 +00:00 committed by Gerrit Code Review
commit 6d3845cadb
5 changed files with 108 additions and 131 deletions

View File

@ -40,7 +40,7 @@ module KibanaConfig
# Change which fields are shown by default. Must be set as an array # Change which fields are shown by default. Must be set as an array
# Default_fields = ['@fields.vhost','@fields.response','@fields.request'] # Default_fields = ['@fields.vhost','@fields.response','@fields.request']
Default_fields = ['@message'] Default_fields = ['message']
# If set to true, Kibana will use the Highlight feature of Elasticsearch to # If set to true, Kibana will use the Highlight feature of Elasticsearch to
# display highlighted search results # display highlighted search results
@ -49,7 +49,7 @@ module KibanaConfig
# A field needs to be specified for the highlight feature. By default, # A field needs to be specified for the highlight feature. By default,
# Elasticsearch doesn't allow highlighting on _all because the field has to # Elasticsearch doesn't allow highlighting on _all because the field has to
# be either stored or part of the _source field. # be either stored or part of the _source field.
Highlighted_field = "@message" Highlighted_field = "message"
# Make URLs clickable in detailed view # Make URLs clickable in detailed view
Clickable_URLs = true Clickable_URLs = true
@ -122,10 +122,10 @@ module KibanaConfig
# field called _all that is searched when no field is specified. # field called _all that is searched when no field is specified.
# Dropping _all can reduce index size significantly. If you do that # Dropping _all can reduce index size significantly. If you do that
# you'll need to change primary_field to be '@message' # you'll need to change primary_field to be '@message'
Primary_field = '@message' Primary_field = 'message'
# Default Elastic Search index to query # Default Elastic Search index to query
Default_index = '@message' Default_index = 'message'
# TODO: This isn't functional yet # TODO: This isn't functional yet
# Prevent wildcard search terms which result in extremely slow queries # Prevent wildcard search terms which result in extremely slow queries

View File

@ -38,13 +38,13 @@ class logstash {
} }
exec { 'get_logstash_jar': exec { 'get_logstash_jar':
command => 'wget http://logstash.objects.dreamhost.com/release/logstash-1.1.12-monolithic.jar -O /opt/logstash/logstash-1.1.12-monolithic.jar', command => 'wget https://download.elasticsearch.org/logstash/logstash/logstash-1.2.1-flatjar.jar -O /opt/logstash/logstash-1.2.1-flatjar.jar',
path => '/bin:/usr/bin', path => '/bin:/usr/bin',
creates => '/opt/logstash/logstash-1.1.12-monolithic.jar', creates => '/opt/logstash/logstash-1.2.1-flatjar.jar',
require => File['/opt/logstash'], require => File['/opt/logstash'],
} }
file { '/opt/logstash/logstash-1.1.12-monolithic.jar': file { '/opt/logstash/logstash-1.2.1-flatjar.jar':
ensure => present, ensure => present,
owner => 'logstash', owner => 'logstash',
group => 'logstash', group => 'logstash',
@ -57,8 +57,8 @@ class logstash {
file { '/opt/logstash/logstash.jar': file { '/opt/logstash/logstash.jar':
ensure => link, ensure => link,
target => '/opt/logstash/logstash-1.1.12-monolithic.jar', target => '/opt/logstash/logstash-1.2.1-flatjar.jar',
require => File['/opt/logstash/logstash-1.1.12-monolithic.jar'], require => File['/opt/logstash/logstash-1.2.1-flatjar.jar'],
} }
file { '/var/log/logstash': file { '/var/log/logstash':

View File

@ -106,8 +106,8 @@ class EventProcessor(threading.Thread):
os.path.join(log_dir, fileopts['name']) os.path.join(log_dir, fileopts['name'])
fields["log_url"] = source_url fields["log_url"] = source_url
out_event = {} out_event = {}
out_event["@fields"] = fields out_event["fields"] = fields
out_event["@tags"] = [fileopts['name']] + fileopts.get('tags', []) out_event["tags"] = [fileopts['name']] + fileopts.get('tags', [])
return source_url, out_event return source_url, out_event

View File

@ -69,19 +69,20 @@ class LogRetriever(threading.Thread):
retry = arguments['retry'] retry = arguments['retry']
event = arguments['event'] event = arguments['event']
logging.debug("Handling event: " + json.dumps(event)) logging.debug("Handling event: " + json.dumps(event))
fields = event['@fields'] fields = event.get('fields') or event.get('@fields')
tags = event['@tags'] tags = event.get('tags') or event.get('@tags')
if fields['build_status'] != 'ABORTED': if fields['build_status'] != 'ABORTED':
# Handle events ignoring aborted builds. These builds are # Handle events ignoring aborted builds. These builds are
# discarded by zuul. # discarded by zuul.
log_lines = self._retrieve_log(source_url, retry) log_lines = self._retrieve_log(source_url, retry)
logging.debug("Pushing " + str(len(log_lines)) + " log lines.") logging.debug("Pushing " + str(len(log_lines)) + " log lines.")
base_event = {}
base_event.update(fields)
base_event["tags"] = tags
for line in log_lines: for line in log_lines:
out_event = {} out_event = base_event.copy()
out_event["@fields"] = fields out_event["message"] = line
out_event["@tags"] = tags
out_event["event_message"] = line
self.logq.put(out_event) self.logq.put(out_event)
job.sendWorkComplete() job.sendWorkComplete()
except Exception as e: except Exception as e:

View File

@ -2,132 +2,108 @@ input {
tcp { tcp {
host => "localhost" host => "localhost"
port => 9999 port => 9999
format => "json" codec => line {}
message_format => "%{event_message}"
type => "jenkins" type => "jenkins"
} }
} }
# You can check grok patterns at http://grokdebug.herokuapp.com/ # You can check grok patterns at http://grokdebug.herokuapp.com/
filter { filter {
grep { # This is a work around for a bug. We should be able to set the tcp
# Remove unneeded html tags. # input codec to json, but that codec doesn't support streaming.
type => "jenkins" # Convert to json here instead.
tags => ["console.html"] json {
# Drop matches. source => "message"
negate => true
match => ["@message", "^</?pre>$"]
} }
grep { if "screen" in [tags] and [message] =~ "^\+ " {
# Remove screen log headers. drop {}
type => "jenkins"
tags => ["screen"]
# Drop matches.
negate => true
match => ["@message", "^\+ "]
} }
grep { if "console.html" in [tags] {
# Remove blank lines. if [message] == "<pre>" or [message] == "</pre>" {
type => "jenkins" drop {}
tags => ["keystonefmt"] }
# Drop matches. multiline {
negate => true negate => true
match => ["@message", "^$"] pattern => "^%{TIMESTAMP_ISO8601} \|"
} what => "previous"
multiline { stream_identity => "%{host}.%{filename}"
type => "jenkins" }
tags => ["console.html"] grok {
negate => true # Do multiline matching as the above mutliline filter may add newlines
pattern => "^%{DATESTAMP} \|" # to the log messages.
what => "previous" match => { "message" => "(?m)^%{TIMESTAMP_ISO8601:logdate} \| %{GREEDYDATA:logmessage}" }
stream_identity => "%{@source_host}.%{filename}" add_field => { "received_at" => "%{@timestamp}" }
} }
multiline { } else if "oslofmt" in [tags] {
type => "jenkins" multiline {
tags => ["oslofmt"] negate => true
negate => true pattern => "^%{TIMESTAMP_ISO8601} "
pattern => "^%{DATESTAMP} " what => "previous"
what => "previous" stream_identity => "%{host}.%{filename}"
stream_identity => "%{@source_host}.%{filename}" }
} multiline {
multiline { negate => false
type => "jenkins" pattern => "^%{TIMESTAMP_ISO8601}%{SPACE}%{NUMBER}?%{SPACE}?TRACE"
tags => ["oslofmt"] what => "previous"
negate => false stream_identity => "%{host}.%{filename}"
pattern => "^%{DATESTAMP}%{SPACE}%{NUMBER}?%{SPACE}?TRACE" }
what => "previous" grok {
stream_identity => "%{@source_host}.%{filename}" # Do multiline matching as the above mutliline filter may add newlines
} # to the log messages.
multiline { # TODO move the LOGLEVELs into a proper grok pattern.
type => "jenkins" match => { "message" => "(?m)^%{TIMESTAMP_ISO8601:logdate}%{SPACE}%{NUMBER:pid}?%{SPACE}?(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR) \[?\b%{NOTSPACE:module}\b\]?%{SPACE}?%{GREEDYDATA:logmessage}?" }
tags => ["keystonefmt"] add_field => { "received_at" => "%{@timestamp}" }
negate => true }
pattern => "^\(\b%{NOTSPACE}\b\):" } else if "keystonefmt" in [tags] {
what => "previous" if [message] == "" {
stream_identity => "%{@source_host}.%{filename}" drop {}
} }
grok { multiline {
type => "jenkins" negate => true
tags => ["console.html"] pattern => "^\(\b%{NOTSPACE}\b\):"
# Do multiline matching as the above mutliline filter may add newlines what => "previous"
# to the log messages. stream_identity => "%{host}.%{filename}"
pattern => [ "(?m)^%{DATESTAMP:logdate} \| %{GREEDYDATA:logmessage}" ] }
add_field => [ "received_at", "%{@timestamp}" ] grok {
} # Do multiline matching as the above mutliline filter may add newlines
grok { # to the log messages.
type => "jenkins" # TODO move the LOGLEVELs into a proper grok pattern.
tags => ["oslofmt"] match => { "message" => "(?m)^\(\b%{NOTSPACE:module}\b\):%{SPACE}%{TIMESTAMP_ISO8601:logdate}%{SPACE}(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR)%{SPACE}%{GREEDYDATA:logmessage}" }
# Do multiline matching as the above mutliline filter may add newlines add_field => { "received_at" => "%{@timestamp}" }
# to the log messages. }
# TODO move the LOGLEVELs into a proper grok pattern. } else if "apachecombined" in [tags] {
pattern => [ "(?m)^%{DATESTAMP:logdate}%{SPACE}%{NUMBER:pid}?%{SPACE}?(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR) \[?\b%{NOTSPACE:module}\b\]?%{SPACE}?%{GREEDYDATA:logmessage}?" ] grok {
add_field => [ "received_at", "%{@timestamp}" ] match => { "message" => "%{COMBINEDAPACHELOG}" }
} add_field => { "received_at" => "%{@timestamp}" }
grok { add_field => { "logdate" => "%{timestamp}" }
type => "jenkins" add_field => { "logmessage" => "%{verb} %{request} %{response}" }
tags => ["keystonefmt"] }
# Do multiline matching as the above mutliline filter may add newlines } else if "syslog" in [tags] {
# to the log messages. grok {
# TODO move the LOGLEVELs into a proper grok pattern. # Syslog grok filter adapted from
pattern => [ "(?m)^\(\b%{NOTSPACE:module}\b\):%{SPACE}%{DATESTAMP:logdate}%{SPACE}(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR)%{SPACE}%{GREEDYDATA:logmessage}" ] # http://cookbook.logstash.net/recipes/syslog-pri/syslog.conf
add_field => [ "received_at", "%{@timestamp}" ] match => { "message" => "%{SYSLOGTIMESTAMP:logdate}%{SPACE}%{SYSLOGHOST:syslog_host}?%{SPACE}%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:? %{GREEDYDATA:logmessage}" }
} add_field => { "received_at" => "%{@timestamp}" }
grok { }
type => "jenkins"
tags => ["apachecombined"]
pattern => [ "%{COMBINEDAPACHELOG}" ]
add_field => [ "received_at", "%{@timestamp}", "logdate", "%{timestamp}", "logmessage", "%{verb} %{request} %{response}" ]
}
grok {
type => "jenkins"
tags => ["syslog"]
# Syslog grok filter adapted from
# http://cookbook.logstash.net/recipes/syslog-pri/syslog.conf
pattern => [ "%{SYSLOGTIMESTAMP:logdate}%{SPACE}%{SYSLOGHOST:syslog_host}?%{SPACE}%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:? %{GREEDYDATA:logmessage}" ]
add_field => [ "received_at", "%{@timestamp}" ]
}
# Remove DEBUG logs to reduce the amount of data that needs to be processed.
grep {
type => "jenkins"
negate => true
match => [ "loglevel", "DEBUG" ]
} }
# Filters below here should be consistent for all Jenkins log formats. # Filters below here should be consistent for all Jenkins log formats.
date { # Remove DEBUG logs to reduce the amount of data that needs to be processed.
type => "jenkins" if [loglevel] == "DEBUG" {
exclude_tags => "_grokparsefailure" drop {}
match => [ "logdate", "yyyy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "dd/MMM/yyyy:HH:mm:ss Z" ]
} }
mutate {
type => "jenkins" if ! ("_grokparsefailure" in [tags]) {
exclude_tags => "_grokparsefailure" date {
replace => [ "@message", "%{logmessage}" ] match => [ "logdate", "yyyy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "dd/MMM/yyyy:HH:mm:ss Z" ]
} timezone => "UTC"
mutate { }
type => "jenkins" mutate {
exclude_tags => "_grokparsefailure" replace => { "message" => "%{logmessage}" }
remove => [ "logdate", "logmessage", "event_message" ] }
mutate {
remove_field => [ "logdate", "logmessage" ]
}
} }
} }