# ssh
ssh id@log_generating_server
# cd
cd /dir/filebeat_to_kafka
# unzip
tar -xvf filebeat_to_kafka.tar
# Check running processes
ps -ef | grep filebeat
# stop work
kill -9 `ps -ef | grep -w filebeat-big.yml | grep -v grep | awk '{print $2}'`
# update yml file
vi filebeat-big.yml
#-----------------------filebeat prospectors---------------------------------
- input_type: log
paths:
- /dir/AAA???/app.log
document_type: filebeat_topic
scan_frequency: 10s
#--------------------------------------------------------------------
- input_type: log
paths:
- /dir/BBB???/app.log
document_type: filebeat_topic
scan_frequency: 10s
#--------------------------outputs----------------------------------
output.kafkaj:
hosts: ["kafka1.com:9092","kafka2.com:9092"]
topic: '%{[type]}'
partition.round_robin:
reachable_only: false
required_acks: 1
compression: gzip
max_message_bytes:1000000
#--------------------------logging--------------------------------
logging.level: critical
# start work
./filebeat -e -c filebeat.yml -d "publish" -path.data=bigdata
'kafka' 카테고리의 다른 글
[python] Getting startingOffsets value and reading kafka value into spark dataframe (0) | 2021.12.27 |
---|---|
[command] kafka topic CLI (0) | 2021.12.27 |