部署Filebeat,Logstash,Elasticsearc
2019-11-20 本文已影响0人
夜醉梦紅尘
我们这里用docker实现,免去许多安装的步骤,拉取好镜像就可以了

围绕以下架构来构建



收集日志的Filebeat
filebeat>filebeat.yml
filebeat.config:
modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
processors:
- add_cloud_metadata: ~
filebeat.inputs:
- type: log //输入类型
paths:
- /*.log //容器根路径下的.log文件
output.logstash:
# The Logstash hosts
hosts: ["logstash:5044"] //输出到的主机名
配置logstash
docker-compose.yml
version: "3.2"
services:
logstash:
image: docker.elastic.co/logstash/logstash:7.4.2
volumes:
- type: bind
source: ../filebeat/2018.log
target: /2018.log
- type: bind
source: ./logstash_stdout.conf
target: /usr/share/logstash/pipeline/logstash.conf
logstash_stdout.conf
input {
beats {
port => 5044
host => "0.0.0.0"
}
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
}
# stdout { codec => rubydebug } # 假如有问题,可以打开此行进行调试
}
当分步测试成功可以写成总的docker-compose.yml
version: "3.2"
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.4.2
networks:
- "elk-net"
container_name: elasticsearch
ports:
- "9200:9200"
environment:
- discovery.type=single-node //单节点模式
- bootstrap.memory_lock=true
logstash:
image: docker.elastic.co/logstash/logstash:7.4.2
container_name: logstash
volumes:
- type: bind
source: "./logstash/logstash_stdout.conf"
target: "/usr/share/logstash/pipeline/logstash.conf"
networks:
- "elk-net"
depends_on:
- "elasticsearch"
filebeat:
image: docker.elastic.co/beats/filebeat:7.4.2
volumes:
- type: bind
source: "./filebeat/2018.log"
target: "/2018.log"
- type: bind
source: "./filebeat/filebeat.yml"
target: "/usr/share/filebeat/filebeat.yml"
networks:
- "elk-net"
depends_on:
- "logstash"
kibana:
image: docker.elastic.co/kibana/kibana:7.4.2
ports:
- "5601:5601"
networks:
- "elk-net"
depends_on:
- "elasticsearch"
networks:
elk-net:
记得添加同一个网络
docker network create elk-net
docker network create -d bridge elk-net
关于
output
- hosts ==>["elasticsearch:9200"] 指定机群的主机名,主机名就是容器名
- manage_template => false 禁用默认的模板,详细官方介绍
- index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
- %{[@metadata][beat]} 从源数据获取到处理日志的插件,比如
Filebeat
- %{[@metadata][version]} 从源数据获取到版本号
- %{+YYYY.MM.dd} 以
Logstash
的时间戳格式显示的时间
结果可在谷歌插件Elasticsearch中查看

访问127.0.0.1:5601可查看到

后续的一些操作



