安装Docker-Compose
curl -L https://github.com/docker/compose/releases/download/1.17.1/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
随便找个地,创建docker-compose.yml文件,把这坨文本复制进去
version: '3'
services:
elasticsearch:
image: elasticsearch:7.1.0
container_name: elasticsearch
user: root
environment:
- "cluster.name=elasticsearch"
- "discovery.type=single-node"
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ports:
- 9200:9200
- 9300:9300
logstash:
image: logstash:7.1.0
container_name: logstash
environment:
- TZ=Asia/Shanghai
volumes:
- /mydata/logstash/logstash-springboot.conf:/usr/share/logstash/pipeline/logstash.conf
depends_on:
- elasticsearch
links:
- elasticsearch:es
ports:
- 4560:4560
kibana:
image: kibana:7.1.0
container_name: kibana
links:
- elasticsearch:es
depends_on:
- elasticsearch
environment:
- "elasticsearch.hosts=http://es:9200"
- "I18N_LOCALE=zh-CN"
ports:
- 5601:5601
运行docker-compose
docker-compose up -d
此时logstash会报错,要在对应数据卷上增加logstash-springboot.conf文件
input {
tcp {
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
}
}
output {
elasticsearch {
hosts => ["es:9200"]
index => "wing-blog-%{+YYYY.MM.dd}"
}
stdout { codec => rubydebug }
}
挂载es数据卷前,把es容器中的数据复制出来
docker cp elasticsearch:/usr/share/elasticsearch /root/elk
修改docker-compose文件,重新启动docker-compose容器
version: '3'
services:
elasticsearch:
image: elasticsearch:7.1.0
container_name: elasticsearch
user: root
environment:
- "cluster.name=elasticsearch"
- "discovery.type=single-node"
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
volumes:
- /root/elk/elasticsearch/data:/usr/share/elasticsearch/data
- /root/elk/elasticsearch/config:/usr/share/elasticsearch/config
- /root/elk/elasticsearch/plugins:/usr/share/elasticsearch/plugins
ports:
- 9200:9200
- 9300:9300
logstash:
image: logstash:7.1.0
container_name: logstash
environment:
- TZ=Asia/Shanghai
volumes:
- /root/elk/logstash/logstash-springboot.conf:/usr/share/logstash/pipeline/logstash.conf
depends_on:
- elasticsearch
links:
- elasticsearch:es
ports:
- 4560:4560
kibana:
image: kibana:7.1.0
container_name: kibana
links:
- elasticsearch:es
depends_on:
- elasticsearch
environment:
- "elasticsearch.hosts=http://es:9200"
- "I18N_LOCALE=zh-CN"
ports:
- 5601:5601
进入logstash容器,安装json解析器
docker exec -it logstash /bin/bash
cd /bin
logstash-plugin install logstash-codec-json_lines
exit
docker restart logstash
给es安装ik分词器
解压后,重命名为ik。
重启es
docker restart elasticsearch
测试
POST /_analyze
{
"analyzer": "ik_smart",
"text":"可变羽翼核心的博客真是太棒了!"
}