ELK全流程配置
目录:
filebeat
filebeat.inputs:
- type: log
encoding: utf-8
enabled: true
paths:
- /home/nginx/logs/res.hualala.com.log
fields:
project_topic: nginx_res
idc: ali-bj
zone: bj
fields_under_root: true
output.kafka:
hosts: ["172.27.11.55:9094","172.27.11.56:9094","172.27.11.57:9094","172.23.58.142:9094", "172.23.58.143:9094"]
topic: "%{[project_topic]}"
partition.round_robin:
reachable_only: false
max_message_bytes: 10000000
xpack.monitoring:
enabled: true
elasticsearch:
hosts: ["http://172.27.11.90:9200"]
username:
password:
对于一些日志需要匹配的
- input_type: log
encoding: utf-8
paths:
- /home/hualala-client-touch/logs/*.log
fields:
project: hualala-client-touch
multiline.pattern: ^\d{4}-\d{1,2}-\d{1,2}
multiline.match: after
multiline.negate: true
multiline.timeout: 5s
kafka
版本选择
kafka_2.11-1.1.1
配置优化
server.properties
broker.id=1
auto.create.topics.enable=true
delete.topic.enable=true
default.replication.factor=2
listeners=PLAINTEXT://172.27.11.56:9094
advertised.listeners=PLAINTEXT://172.27.11.56:9094
num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600
log.dirs=/data1/kafka-logs,/data2/kafka-logs
num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=2
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1
log.retention.hours=168
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000
zookeeper.connect=172.27.11.56:2181,172.27.11.57:2181,172.27.11.55:2181/kafka
zookeeper.connection.timeout.ms=6000
group.initial.rebalance.delay.ms=0
unclean.leader.election.enable=false
auto.leader.rebalance.enable=false
message.max.bytes=100000000
JVM参数
export KAFKA_HEAP_OPTS="-Xmx6G -Xms6G"
logstash
jvm.options
-Xms8g
-Xmx8g
-XX:+UseG1GC
-XX:InitiatingHeapOccupancyPercent=75
-XX:MaxGCPauseMillis=100
-Djava.awt.headless=true
-Dfile.encoding=UTF-8
-Djruby.compile.invokedynamic=true
-Djruby.jit.threshold=0
-XX:+HeapDumpOnOutOfMemoryError
-Djava.security.egd=file:/dev/urandom
logstash.yml
log.level: info
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.username:
xpack.monitoring.elasticsearch.password:
xpack.monitoring.elasticsearch.hosts: ["http://192.168.31.10:9200","http://192.168.31.11:9200","http://192.168.31.12:9200"]
pipeline.yml
- pipeline.id: nginx_res.hualala.com
path.config: /shards/ulp-logstash/nginx_res.yml
pipeline.workers: 6
pipeline.batch.size: 2500
pipeline.batch.delay: 100
elasticsearch
elasticsearch.yml
cluster.name: ulp-sj02
node.name: ulp-es-h01
node.master: false
node.data: true
node.ingest: false
#machine learning node, x-pack required
node.ml: false
#cross-cluster search
cluster.remote.connect: true
path.data: /data1/elasticsearch-7.7.0/data/, /data2/elasticsearch-7.7.0/data/
path.logs: /data1/elasticsearch-7.7.0/logs/
bootstrap.memory_lock: true
http.cors.allow-origin: "*"
http.cors.allow-methods: OPTIONS, HEAD, GET, POST, PUT, DELETE
http.cors.allow-headers: X-Requested-With, X-Auth-Token, Content-Type, Content-Length, Authorization
network.host: 0.0.0.0
http.cors.enabled: true
http.port: 9200
discovery.seed_hosts: ["192.168.121.154", "192.168.121.155", "192.168.121.167", "192.168.121.166"]
# Bootstrap the cluster using an initial set of master-eligible nodes:
cluster.initial_master_nodes: ["ulp-es-m01"]
#gateway.recover_after_nodes: 3
#action.destructive_requires_name: true
cluster.routing.allocation.same_shard.host: true
node.attr.box_type: hot
xpack.security.enabled: false
xpack.security.transport.ssl.enabled: false
xpack.monitoring.exporters.my_local:
type: local
use_ingest: false
jvm.options
-Xms32g
-Xmx32g
-XX:+UseG1GC
-XX:InitiatingHeapOccupancyPercent=75
-XX:MaxGCPauseMillis=100
-Des.networkaddress.cache.ttl=60
-Des.networkaddress.cache.negative.ttl=10
-XX:+AlwaysPreTouch
-Xss1m
-Djava.awt.headless=true
-Dfile.encoding=UTF-8
-Djna.nosys=true
-XX:-OmitStackTraceInFastThrow
-Dio.netty.noUnsafe=true
-Dio.netty.noKeySetOptimization=true
-Dio.netty.recycler.maxCapacityPerThread=0
-Dlog4j.shutdownHookEnabled=false
-Dlog4j2.disable.jmx=true
-Djava.io.tmpdir=${ES_TMPDIR}
-XX:+HeapDumpOnOutOfMemoryError
-XX:HeapDumpPath=data
-XX:ErrorFile=logs/hs_err_pid%p.log
8:-XX:+PrintGCDetails
8:-XX:+PrintGCDateStamps
8:-XX:+PrintTenuringDistribution
8:-XX:+PrintGCApplicationStoppedTime
8:-Xloggc:logs/gc.log
8:-XX:+UseGCLogFileRotation
8:-XX:NumberOfGCLogFiles=32
8:-XX:GCLogFileSize=64m
9-:-Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m
9-:-Djava.locale.providers=COMPAT