Make Heka send logs to Elasticsearch

This patch includes changes relative to integrating Heka with
Elasticsearch and Kibana.

The main change is the addition of an Heka ElasticSearchOutput plugin
to make Heka send the logs it collects to Elasticsearch.

Since Logstash is not used the enable_elk deploy variable is renamed
to enable_central_logging.

If enable_central_logging is false then Elasticsearch and Kibana are
not started, and Heka won't attempt to send logs to Elasticsearch.

By default enable_central_logging is set to false. If
enable_central_logging is set to true after deployment then the Heka
container needs to be recreated (for Heka to get the new
configuration).

The Kibana configuration used property names that are deprecated in
Kibana 4.2. This is changed to use non-deprecated property names.

Previously logs read from files and from Syslog had a different Type
in Heka. This is changed to always use "log" for the Type. In this
way just one index instead of two is used in Elasticsearch, making
things easier to the user on the visualization side.

The HAProxy configuration is changed to add entries for Kibana.
Kibana server is now accessible via the internal VIP, and also via
the external VIP if there's one configured.

The HAProxy configuration is changed to add an entry for
Elasticsearch. So Elasticsearch is now accessible via the internal
VIP. Heka uses that channel for communicating with Elasticsearch.

Note that currently the Heka logs include "Plugin
elasticsearch_output" errors when Heka starts. This occurs when Heka
starts processing logs while Elasticsearch is not yet started. These
are transient errors that go away when Elasticsearch is ready. And
with buffering enabled on the ElasticSearchOuput plugin logs will be
buffered and then retransmitted when Elasticsearch is ready.

Change-Id: I6ff7a4f0ad04c4c666e174693a35ff49914280bb
Implements: blueprint central-logging-service
This commit is contained in:
Éric Lemoine 2016-02-24 16:05:36 +01:00
parent 5139a1442d
commit 491aff0b88
15 changed files with 98 additions and 34 deletions

View File

@ -130,7 +130,7 @@ rgw_port: "6780"
mistral_api_port: "8989" mistral_api_port: "8989"
kibana_port: "5601" kibana_server_port: "5601"
elasticsearch_port: "9200" elasticsearch_port: "9200"
@ -190,7 +190,7 @@ enable_murano: "no"
enable_ironic: "no" enable_ironic: "no"
enable_magnum: "no" enable_magnum: "no"
enable_mistral: "no" enable_mistral: "no"
enable_elk: "no" enable_central_logging: "no"
enable_mongodb: "no" enable_mongodb: "no"
ironic_keystone_user: "ironic" ironic_keystone_user: "ironic"

View File

@ -46,3 +46,11 @@
- "swift-object-updater" - "swift-object-updater"
- "swift-proxy-server" - "swift-proxy-server"
- "swift-rsyncd" - "swift-rsyncd"
- name: Copying over heka elasticsearch config file
template:
src: "heka-{{ item }}.toml.j2"
dest: "{{ node_config_directory }}/heka/heka-{{ item }}.toml"
with_items:
- "elasticsearch"
when: "{{ enable_central_logging | bool }}"

View File

@ -0,0 +1,16 @@
[elasticsearch_json_encoder]
type = "ESJsonEncoder"
index = {{'"%{Type}-%{%Y.%m.%d}"'}}
es_index_from_timestamp = true
fields = ["Timestamp", "Type", "Logger", "Severity", "Payload", "Pid", "Hostname", "DynamicFields"]
[elasticsearch_output]
type = "ElasticSearchOutput"
server = "{{ internal_protocol }}://{{ kolla_internal_vip_address }}:{{ elasticsearch_port }}"
message_matcher = "Type == 'log'"
encoder = "elasticsearch_json_encoder"
use_buffering = true
[elasticsearch_output.buffering]
max_buffer_size = 1073741824 # 1024 * 1024 * 1024
max_file_size = 134217728 # 128 * 1024 * 1024
full_action = "drop"

View File

@ -3,12 +3,12 @@
[haproxy_file_output] [haproxy_file_output]
type = "FileOutput" type = "FileOutput"
message_matcher = "Type == 'Syslog' && Fields[programname] =~ /(?i:haproxy)/" message_matcher = "Fields[programname] =~ /(?i:haproxy)/"
path = "/var/log/kolla/haproxy/haproxy.log" path = "/var/log/kolla/haproxy/haproxy.log"
encoder = "syslog_encoder" encoder = "syslog_encoder"
[keepalived_file_output] [keepalived_file_output]
type = "FileOutput" type = "FileOutput"
message_matcher = "Type == 'Syslog' && Fields[programname] =~ /(?i:keepalived)/" message_matcher = "Fields[programname] =~ /(?i:keepalived)/"
path = "/var/log/kolla/haproxy/keepalived.log" path = "/var/log/kolla/haproxy/keepalived.log"
encoder = "syslog_encoder" encoder = "syslog_encoder"

View File

@ -3,6 +3,6 @@
[{{ item }}_file_output] [{{ item }}_file_output]
type = "FileOutput" type = "FileOutput"
message_matcher = "Type == 'Syslog' && Fields[programname] == '{{ item }}'" message_matcher = "Fields[programname] == '{{ item }}'"
path = "/var/log/kolla/swift/{{ item }}.log" path = "/var/log/kolla/swift/{{ item }}.log"
encoder = "syslog_encoder" encoder = "syslog_encoder"

View File

@ -2,6 +2,13 @@
{ {
"command": "/usr/bin/hekad -config=/etc/heka/", "command": "/usr/bin/hekad -config=/etc/heka/",
"config_files": [ "config_files": [
{
"source": "{{ container_config_directory }}/heka-elasticsearch.toml",
"dest": "/etc/heka/heka-elasticsearch.toml",
"owner": "heka",
"perm": "0600",
"optional": "True"
},
{ {
"source": "{{ container_config_directory }}/heka-global.toml", "source": "{{ container_config_directory }}/heka-global.toml",
"dest": "/etc/heka/heka-global.toml", "dest": "/etc/heka/heka-global.toml",

View File

@ -2,8 +2,11 @@
#################### ####################
# Elasticsearch # Elasticsearch
#################### ####################
elasticsearch_port: "{{ elasticsearch_port }}" elasticsearch_cluster_name: "kolla_logging"
elasticsearch_host: "{{ kolla_internal_vip_address }}"
####################
# Docker
####################
elasticsearch_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-elasticsearch" elasticsearch_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-elasticsearch"
elasticsearch_tag: "{{ openstack_release }}" elasticsearch_tag: "{{ openstack_release }}"
elasticsearch_image_full: "{{ elasticsearch_image }}:{{ elasticsearch_tag }}" elasticsearch_image_full: "{{ elasticsearch_image }}:{{ elasticsearch_tag }}"

View File

@ -1,4 +1,17 @@
network.host: {{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }} {% set num_nodes = groups['elasticsearch'] | length %}
{% set minimum_master_nodes = (num_nodes / 2 + 1) | round(0, 'floor') | int if num_nodes > 2 else 1 %}
{% set recover_after_nodes = (num_nodes * 2 / 3) | round(0, 'floor') | int if num_nodes > 1 else 1 %}
node.name: "{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}"
network.host: "{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}"
cluster.name: "{{ elasticsearch_cluster_name }}"
node.master: true
node.data: true
discovery.zen.ping.unicast.hosts: [{% for host in groups['elasticsearch'] %}"{{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}"{% if not loop.last %},{% endif %}{% endfor %}]
discovery.zen.minimum_master_nodes: {{ minimum_master_nodes }}
gateway.expected_nodes: {{ num_nodes }}
gateway.recover_after_time: "5m"
gateway.recover_after_nodes: {{ recover_after_nodes }}
path.conf: "/etc/elasticsearch" path.conf: "/etc/elasticsearch"
path.data: "/var/lib/elasticsearch/data" path.data: "/var/lib/elasticsearch/data"
path.logs: "/var/log/elasticsearch" path.logs: "/var/log/elasticsearch"

View File

@ -328,3 +328,26 @@ listen radosgw_external
{% endfor %} {% endfor %}
{% endif %} {% endif %}
{% endif %} {% endif %}
{% if enable_central_logging | bool %}
listen kibana
bind {{ kolla_internal_vip_address }}:{{ kibana_server_port }}
{% for host in groups['kibana'] %}
server {{ hostvars[host]['ansible_hostname'] }} {{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}:{{ kibana_server_port }} check inter 2000 rise 2 fall 5
{% endfor %}
{% if haproxy_enable_external_vip | bool %}
listen kibana_external
bind {{ kolla_external_vip_address }}:{{ kibana_server_port }}
{% for host in groups['kibana'] %}
server {{ hostvars[host]['ansible_hostname'] }} {{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}:{{ kibana_server_port }} check inter 2000 rise 2 fall 5
{% endfor %}
{% endif %}
listen elasticsearch
option dontlog-normal
bind {{ kolla_internal_vip_address }}:{{ elasticsearch_port }}
{% for host in groups['elasticsearch'] %}
server {{ hostvars[host]['ansible_hostname'] }} {{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}:{{ elasticsearch_port }} check inter 2000 rise 2 fall 5
{% endfor %}
{% endif %}

View File

@ -2,12 +2,10 @@
#################### ####################
# Kibana # Kibana
#################### ####################
kibana_port: "{{ kibana_port }}" kibana_default_app_id: "discover"
kibana_host: "{{ kolla_internal_vip_address }}" kibana_elasticsearch_request_timeout: 300000
kibana_app_id: "discover" kibana_elasticsearch_shard_timeout: 0
kibana_request_timeout: 300000 kibana_elasticsearch_ssl_verify: false
kibana_shard_timeout: 0
kibana_verify_ssl: false
#################### ####################
@ -16,9 +14,3 @@ kibana_verify_ssl: false
kibana_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-kibana" kibana_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-kibana"
kibana_tag: "{{ openstack_release }}" kibana_tag: "{{ openstack_release }}"
kibana_image_full: "{{ kibana_image }}:{{ kibana_tag }}" kibana_image_full: "{{ kibana_image }}:{{ kibana_tag }}"
####################
# Elasticsearch
####################
elasticsearch_preserve_host: "true"

View File

@ -1,11 +1,10 @@
port: {{ kibana_port }} kibana.defaultAppId: "{{ kibana_default_app_id }}"
host: {{ kibana_host }} server.port: {{ kibana_server_port }}
elasticsearch_url: "{{ internal_protocol }}://{{ kolla_internal_fqdn }}:{{ elasticsearch_port }}" server.host: "{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}"
elasticsearch_preserve_host: {{ elasticsearch_preserve_host }} elasticsearch.url: "{{ internal_protocol }}://{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}:{{ elasticsearch_port }}"
default_app_id: {{ kibana_app_id }} elasticsearch.requestTimeout: {{ kibana_elasticsearch_request_timeout }}
request_timeout: {{ kibana_request_timeout }} elasticsearch.shardTimeout: {{ kibana_elasticsearch_shard_timeout }}
shard_timeout: {{ kibana_shard_timeout }} elasticsearch.ssl.verify: {{ kibana_elasticsearch_ssl_verify }}
verify_ssl: {{ kibana_verify_ssl }}
bundled_plugin_ids: bundled_plugin_ids:
- plugins/dashboard/index - plugins/dashboard/index
- plugins/discover/index - plugins/discover/index

View File

@ -30,13 +30,13 @@
roles: roles:
- { role: kibana, - { role: kibana,
tags: kibana, tags: kibana,
when: enable_elk | bool } when: enable_central_logging | bool }
- hosts: elasticsearch - hosts: elasticsearch
roles: roles:
- { role: elasticsearch, - { role: elasticsearch,
tags: elasticsearch, tags: elasticsearch,
when: enable_elk | bool } when: enable_central_logging | bool }
- hosts: memcached - hosts: memcached
roles: roles:

View File

@ -20,7 +20,7 @@ local utils = require "os_utils"
local msg = { local msg = {
Timestamp = nil, Timestamp = nil,
Type = 'Syslog', Type = 'log',
Hostname = read_config("hostname"), Hostname = read_config("hostname"),
Payload = nil, Payload = nil,
Pid = nil, Pid = nil,

View File

@ -20,7 +20,7 @@ local utils = require "os_utils"
local msg = { local msg = {
Timestamp = nil, Timestamp = nil,
Type = 'Syslog', Type = 'log',
Hostname = read_config("hostname"), Hostname = read_config("hostname"),
Payload = nil, Payload = nil,
Pid = nil, Pid = nil,

View File

@ -33,7 +33,9 @@ else
openvswitch_{vswitchd,db} \ openvswitch_{vswitchd,db} \
rabbitmq{,_bootstrap} \ rabbitmq{,_bootstrap} \
heka \ heka \
swift_{account_{auditor,reaper,replicator,server},container_{auditor,replicator,server,updater},object_{auditor,expirer,replicator,server,updater},proxy_server,rsyncd} swift_{account_{auditor,reaper,replicator,server},container_{auditor,replicator,server,updater},object_{auditor,expirer,replicator,server,updater},proxy_server,rsyncd} \
elasticsearch \
kibana
) )
ceph_osd_bootstrap=$(docker ps -a --filter "name=bootstrap_osd_*" --format "{{.Names}}") ceph_osd_bootstrap=$(docker ps -a --filter "name=bootstrap_osd_*" --format "{{.Names}}")
ceph_osd_containers=$(docker ps -a --filter "name=ceph_osd_*" --format "{{.Names}}") ceph_osd_containers=$(docker ps -a --filter "name=ceph_osd_*" --format "{{.Names}}")
@ -51,7 +53,8 @@ else
mongodb \ mongodb \
haproxy_socket \ haproxy_socket \
heka{,_socket} \ heka{,_socket} \
kolla_logs kolla_logs \
elasticsearch
) )
fi fi