Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,6 @@ out/

### VS Code ###
.vscode/
/logs/**
/.github/git-commit-instructions.md
/monitoring/grafana/volume/**
3 changes: 3 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,9 @@ dependencies {
// monoitoring
implementation("org.springframework.boot:spring-boot-starter-actuator")
runtimeOnly("io.micrometer:micrometer-registry-prometheus")

// logging
implementation 'net.logstash.logback:logstash-logback-encoder:8.1'
}

tasks.named('test') {
Expand Down
94 changes: 94 additions & 0 deletions docker-compose-elk.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
version: "3.8"

services:
setup:
profiles: [ setup ]
init: true
build:
context: elk/setup/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION:-8.10.2}
volumes:
- ./elk/setup/sh/entrypoint.sh:/entrypoint.sh:ro,Z
- ./elk/setup/sh/lib.sh:/lib.sh:ro,Z
- ./elk/setup/roles:/roles:ro,Z
environment:
ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-rabbit1234}
KIBANA_SYSTEM_PASSWORD: ${KIBANA_SYSTEM_PASSWORD:-rabbit1234}
LOGSTASH_USERNAME: ${LOGSTASH_USERNAME:-rabbit_logstash}
LOGSTASH_PASSWORD: ${LOGSTASH_PASSWORD:-rabbit1234}
networks: [ rabbit-elk ]
depends_on: [ elasticsearch ]

elasticsearch:
container_name: rabbit-elasticsearch
build:
context: ./elk/elasticsearch
args:
ELASTIC_VERSION: ${ELASTIC_VERSION:-8.10.2}
volumes:
- ./elk/elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro,Z
- elasticsearch:/usr/share/elasticsearch/data
ports:
- "9200:9200"
- "9300:9300"
environment:
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-rabbit1234}
discovery.type: single-node
networks: [ rabbit-elk ]

logstash:
container_name: rabbit-logstash
build:
context: ./elk/logstash/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION:-8.10.2}
volumes:
- ./elk/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro,Z
- ./elk/logstash/pipeline:/usr/share/logstash/pipeline:ro,Z
ports:
- "5044:5044"
- "50000:50000/tcp"
- "50000:50000/udp"
- "9600:9600"
environment:
LS_JAVA_OPTS: "-Xmx256m -Xms256m"
LOGSTASH_USERNAME: ${LOGSTASH_USERNAME:-rabbit_logstash}
LOGSTASH_PASSWORD: ${LOGSTASH_PASSWORD:-rabbit1234}
networks: [ rabbit-elk ]
depends_on: [ elasticsearch ]

kibana:
container_name: rabbit-kibana
build:
context: ./elk/kibana/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION:-8.10.2}
volumes:
- ./elk/kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro,Z
ports:
- "5601:5601"
environment:
KIBANA_SYSTEM_PASSWORD: ${KIBANA_SYSTEM_PASSWORD:-rabbit1234}
networks: [ rabbit-elk ]
depends_on: [ elasticsearch ]

filebeat:
container_name: rabbit-filebeat
user: root
build:
context: ./elk/filebeat/
args:
ELASTIC_VERSION: ${ELASTIC_VERSION:-8.10.2}
volumes:
- ./logs:/logs
- ./elk/filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro,Z
networks: [ rabbit-elk ]
depends_on: [ logstash ]

volumes:
elasticsearch:

networks:
rabbit-elk:
4 changes: 4 additions & 0 deletions elk/elasticsearch/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
ARG ELASTIC_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION}
12 changes: 12 additions & 0 deletions elk/elasticsearch/config/elasticsearch.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
---
## Default Elasticsearch configuration from Elasticsearch base image.
## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml
#
cluster.name: docker-cluster
network.host: 0.0.0.0

xpack.license.self_generated.type: trial
xpack.security.enabled: true

# 추후 확장 시 제거
discovery.type: single-node
10 changes: 10 additions & 0 deletions elk/filebeat/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
ARG ELASTIC_VERSION

FROM docker.elastic.co/beats/filebeat:${ELASTIC_VERSION}

COPY config/filebeat.yml /usr/share/filebeat/filebeat.yml
#USER root
#
#RUN mkdir /var/logs
#
#RUN chown -R root /usr/share/filebeat
14 changes: 14 additions & 0 deletions elk/filebeat/config/filebeat.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
filebeat.inputs:
- type: log
enabled: true
paths:
- /logs/*.log
json.keys_under_root: true
json.add_error_key: true
json.overwrite_keys: true

output.logstash:
hosts: ["logstash:5044"]

setup.kibana:
host: "http://kibana:5601"
7 changes: 7 additions & 0 deletions elk/kibana/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
ARG ELASTIC_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/kibana/kibana:${ELASTIC_VERSION}

# Add your kibana plugins setup here
# Example: RUN kibana-plugin install <name|url>
9 changes: 9 additions & 0 deletions elk/kibana/config/kibana.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
server.name: kibana
server.host: 0.0.0.0

elasticsearch.hosts: [ "http://elasticsearch:9200" ]
elasticsearch.username: kibana_system
elasticsearch.password: ${KIBANA_SYSTEM_PASSWORD}

monitoring.ui.container.elasticsearch.enabled: true
monitoring.ui.container.logstash.enabled: true
8 changes: 8 additions & 0 deletions elk/logstash/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
ARG ELASTIC_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/logstash/logstash:${ELASTIC_VERSION}

# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json
RUN logstash-plugin install logstash-filter-prune
6 changes: 6 additions & 0 deletions elk/logstash/config/logstash.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
http.host: "0.0.0.0"

xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ]
xpack.monitoring.elasticsearch.username: ${LOGSTASH_USERNAME}
xpack.monitoring.elasticsearch.password: ${LOGSTASH_PASSWORD}
52 changes: 52 additions & 0 deletions elk/logstash/pipeline/logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
input {
beats {
port => 5044
}
}

filter {
mutate {
add_field => { "[@metadata][korea_time]" => "%{+YYYY.MM.dd}" }
}
ruby {
code => "
event.set('[@metadata][korea_time]', LogStash::Timestamp.at(event.get('@timestamp').to_i + 9 * 60 * 60).time.strftime('%Y.%m.%d'))
"
}
prune {
blacklist_names => [
"^ecs\\.",
"^host\\.",
"^log\\.file",
"^log\\.offset$",
"^event\\.original",
".*\\.keyword$"
]
}
}

output {
if [level] == "ERROR" {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "logstash-error-%{[@metadata][korea_time]}"
user => "${LOGSTASH_USERNAME}"
password => "${LOGSTASH_PASSWORD}"
}
} else if [level] == "WARN" {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "logstash-warn-%{[@metadata][korea_time]}"
user => "${LOGSTASH_USERNAME}"
password => "${LOGSTASH_PASSWORD}"
}
} else {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "logstash-info-%{[@metadata][korea_time]}"
user => "${LOGSTASH_USERNAME}"
password => "${LOGSTASH_PASSWORD}"
}
}
}

6 changes: 6 additions & 0 deletions elk/setup/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
ARG ELASTIC_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_VERSION:-9.0.2}

ENTRYPOINT ["/entrypoint.sh"]
34 changes: 34 additions & 0 deletions elk/setup/roles/logstash_writer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
{
"cluster": [
"manage_index_templates",
"monitor",
"manage_ilm",
"manage"
],
"indices": [
{
"names": [
"logs-generic-default",
"logstash-*",
"ecs-logstash-*"
],
"privileges": [
"write",
"create",
"create_index",
"manage",
"manage_ilm"
]
},
{
"names": [
"logstash",
"ecs-logstash"
],
"privileges": [
"write",
"manage"
]
}
]
}
105 changes: 105 additions & 0 deletions elk/setup/sh/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
#!/usr/bin/env bash

set -eu
set -o pipefail

source "${BASH_SOURCE[0]%/*}"/lib.sh


# --------------------------------------------------------
# Users declarations

declare -A users_passwords
users_passwords=(
[${LOGSTASH_USERNAME}]="${LOGSTASH_PASSWORD}"
[kibana_system]="${KIBANA_SYSTEM_PASSWORD:-}"
)

declare -A users_roles
users_roles=(
[${LOGSTASH_USERNAME}]='logstash_writer'
)

# --------------------------------------------------------
# Roles declarations

declare -A roles_files
roles_files=(
[logstash_writer]='logstash_writer.json'
)
# --------------------------------------------------------

log 'Waiting for availability of Elasticsearch. This can take several minutes.'

declare -i exit_code=0
wait_for_elasticsearch || exit_code=$?

if ((exit_code)); then
case $exit_code in
6)
suberr 'Could not resolve host. Is Elasticsearch running?'
;;
7)
suberr 'Failed to connect to host. Is Elasticsearch healthy?'
;;
28)
suberr 'Timeout connecting to host. Is Elasticsearch healthy?'
;;
*)
suberr "Connection to Elasticsearch failed. Exit code: ${exit_code}"
;;
esac

exit $exit_code
fi

sublog 'Elasticsearch is running'

log 'Waiting for initialization of built-in users'

wait_for_builtin_users || exit_code=$?

if ((exit_code)); then
suberr 'Timed out waiting for condition'
exit $exit_code
fi

sublog 'Built-in users were initialized'

for role in "${!roles_files[@]}"; do
log "Role '$role'"

declare body_file
body_file="${BASH_SOURCE[0]%/*}/roles/${roles_files[$role]:-}"
if [[ ! -f "${body_file:-}" ]]; then
sublog "No role body found at '${body_file}', skipping"
continue
fi

sublog 'Creating/updating'
ensure_role "$role" "$(<"${body_file}")"
done

for user in "${!users_passwords[@]}"; do
log "User '$user'"
if [[ -z "${users_passwords[$user]:-}" ]]; then
sublog 'No password defined, skipping'
continue
fi

declare -i user_exists=0
user_exists="$(check_user_exists "$user")"

if ((user_exists)); then
sublog 'User exists, setting password'
set_user_password "$user" "${users_passwords[$user]}"
else
if [[ -z "${users_roles[$user]:-}" ]]; then
suberr ' No role defined, skipping creation'
continue
fi

sublog 'User does not exist, creating'
create_user "$user" "${users_passwords[$user]}" "${users_roles[$user]}"
fi
done
Loading
Loading