Skip to content

Commit

Permalink
add process monitor and update example
Browse files Browse the repository at this point in the history
  • Loading branch information
chenqz1987 committed Jul 25, 2018
1 parent 92434f3 commit 1ba208a
Show file tree
Hide file tree
Showing 24 changed files with 149 additions and 84 deletions.
11 changes: 8 additions & 3 deletions README.md
Expand Up @@ -15,7 +15,11 @@ Prerequisites:

```
git clone git@github.com:AliyunContainerService/log-pilot.git
cd log-pilot/quickstart
# build log-pilot image
cd log-pilot/
./build.sh
# quick start
cd ../quickstart/
./run
```

Expand Down Expand Up @@ -73,8 +77,9 @@ Prerequisites:

```
go get github.com/AliyunContainerService/log-pilot
cd $GOPATH/github.com/AliyunContainerService/log-pilot/docker-images
./build.sh # This will create a new docker image named pilot:latest
cd $GOPATH/github.com/AliyunContainerService/log-pilot
# This will create a new docker image named log-pilot:latest
./build.sh
```

Contribute
Expand Down
24 changes: 17 additions & 7 deletions assets/filebeat/config.filebeat
Expand Up @@ -29,7 +29,7 @@ path.data: /var/lib/filebeat/data
filebeat.registry_file: /var/lib/filebeat/registry
filebeat.shutdown_timeout: ${FILEBEAT_SHUTDOWN_TIMEOUT:-0}
logging.level: ${FILEBEAT_LOG_LEVEL:-info}
logging.metrics.enabled: true
logging.metrics.enabled: ${FILEBEAT_METRICS_ENABLED:-false}
${FILEBEAT_MAX_PROCS:+max_procs: ${FILEBEAT_MAX_PROCS}}
setup.template.name: "${FILEBEAT_INDEX:-filebeat}"
setup.template.pattern: "${FILEBEAT_INDEX:-filebeat}-*"
Expand All @@ -43,19 +43,25 @@ EOF
}

es() {
if [ -f "/run/secrets/es_credential" ];then
if [ -f "/run/secrets/es_credential" ]; then
ELASTICSEARCH_USER=$(cat /run/secrets/es_credential | awk -F":" '{ print $1 }')
ELASTICSEARCH_PASSWORD=$(cat /run/secrets/es_credential | awk -F":" '{ print $2 }')
fi

assert_not_empty "$ELASTICSEARCH_HOST" "ELASTICSEARCH_HOST required"
assert_not_empty "$ELASTICSEARCH_PORT" "ELASTICSEARCH_PORT required"
if [ -n "$ELASTICSEARCH_HOSTS" ]; then
ELASTICSEARCH_HOSTS=$(echo $ELASTICSEARCH_HOSTS|awk -F, '{for(i=1;i<=NF;i++){printf "\"%s\",", $i}}')
ELASTICSEARCH_HOSTS=${ELASTICSEARCH_HOSTS%,}
else
assert_not_empty "$ELASTICSEARCH_HOST" "ELASTICSEARCH_HOST required"
assert_not_empty "$ELASTICSEARCH_PORT" "ELASTICSEARCH_PORT required"
ELASTICSEARCH_HOSTS="\"$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT\""
fi

cat >> $FILEBEAT_CONFIG << EOF
$(base)
output.elasticsearch:
hosts: ["$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT"]
index: ${FILEBEAT_INDEX:-filebeat}-%{+yyyy.MM.dd}
hosts: [$ELASTICSEARCH_HOSTS]
index: ${ELASTICSEARCH_INDEX:-filebeat}-%{+yyyy.MM.dd}
${ELASTICSEARCH_SCHEME:+protocol: ${ELASTICSEARCH_SCHEME}}
${ELASTICSEARCH_USER:+username: ${ELASTICSEARCH_USER}}
${ELASTICSEARCH_PASSWORD:+password: ${ELASTICSEARCH_PASSWORD}}
Expand Down Expand Up @@ -156,7 +162,11 @@ output.count:
EOF
}

case "$FILEBEAT_OUTPUT" in
if [ -n "$FILEBEAT_OUTPUT" ]; then
LOGGING_OUTPUT=$FILEBEAT_OUTPUT
fi

case "$LOGGING_OUTPUT" in
elasticsearch)
es;;
logstash)
Expand Down
26 changes: 18 additions & 8 deletions assets/fluentd/config.fluentd
Expand Up @@ -62,15 +62,20 @@ fi

es(){
if [ -f "/run/secrets/es_credential" ];then
ELASTICSEARCH_USER=$(cat /run/secrets/es_credential | awk -F":" '{ print $1 }')
ELASTICSEARCH_PASSWORD=$(cat /run/secrets/es_credential | awk -F":" '{ print $2 }')
ELASTICSEARCH_USER=$(cat /run/secrets/es_credential | awk -F":" '{ print $1 }')
ELASTICSEARCH_PASSWORD=$(cat /run/secrets/es_credential | awk -F":" '{ print $2 }')
fi
assert_not_empty "$ELASTICSEARCH_HOST" "ELASTICSEARCH_HOST required"
assert_not_empty "$ELASTICSEARCH_PORT" "ELASTICSEARCH_PORT required"

if [ -z "$ELASTICSEARCH_HOSTS" ]; then
assert_not_empty "$ELASTICSEARCH_HOST" "ELASTICSEARCH_HOST required"
assert_not_empty "$ELASTICSEARCH_PORT" "ELASTICSEARCH_PORT required"
ELASTICSEARCH_HOSTS="$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT"
fi

cat >> $FLUENTD_CONFIG << EOF
<match docker.**>
@type elasticsearch
hosts $ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT
hosts $ELASTICSEARCH_HOSTS
reconnect_on_error true
${ELASTICSEARCH_USER:+user ${ELASTICSEARCH_USER}}
${ELASTICSEARCH_PASSWORD:+password ${ELASTICSEARCH_PASSWORD}}
Expand Down Expand Up @@ -133,9 +138,10 @@ EOF

aliyun_sls(){
if [ -f "/run/secrets/aliyun_access_key" ];then
ALIYUNSLS_ACCESS_KEY_ID=$(cat /run/secrets/aliyun_access_key | awk -F":" '{ print $1 }')
ALIYUNSLS_ACCESS_KEY_SECRET=$(cat /run/secrets/aliyun_access_key | awk -F":" '{ print $2 }')
ALIYUNSLS_ACCESS_KEY_ID=$(cat /run/secrets/aliyun_access_key | awk -F":" '{ print $1 }')
ALIYUNSLS_ACCESS_KEY_SECRET=$(cat /run/secrets/aliyun_access_key | awk -F":" '{ print $2 }')
fi

assert_not_empty "$ALIYUNSLS_PROJECT" "ALIYUNSLS_PROJECT required"
assert_not_empty "$ALIYUNSLS_REGION_ENDPOINT" "ALIYUNSLS_REGION_ENDPOINT required"
assert_not_empty "$ALIYUNSLS_ACCESS_KEY_ID" "ALIYUNSLS_ACCESS_KEY_ID required"
Expand Down Expand Up @@ -224,7 +230,11 @@ aggregate all
EOF
}

case "$FLUENTD_OUTPUT" in
if [ -n "$FLUENTD_OUTPUT" ]; then
LOGGING_OUTPUT="$FLUENTD_OUTPUT"
fi

case "$LOGGING_OUTPUT" in
elasticsearch)
es;;
graylog)
Expand Down
20 changes: 20 additions & 0 deletions build.sh
@@ -0,0 +1,20 @@
#!/usr/bin/env bash
#
# build docker image
#

build()
{
echo -e "building image: log-pilot:latest\n"

docker build -t log-pilot:latest -f Dockerfile.$1 .
}

case $1 in
fluentd)
build fluentd
;;
*)
build filebeat
;;
esac
4 changes: 2 additions & 2 deletions docs/filebeat/docs.md
Expand Up @@ -31,7 +31,7 @@ docker run --rm -it \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /:/host \
-e PILOT_TYPE=filebeat \
-e FILEBEAT_OUTPUT=elasticsearch \
-e LOGGING_OUTPUT=elasticsearch \
-e ELASTICSEARCH_HOST=${ELASTICSEARCH_HOST} \
-e ELASTICSEARCH_PORT=${ELASTICSEARCH_PORT} \
registry.cn-hangzhou.aliyuncs.com/acs-sample/log-pilot:latest
Expand All @@ -40,7 +40,7 @@ docker run --rm -it \
Log output plugin configuration
===============================

You can config the environment variable ```FILEBEAT_OUTPUT ``` to determine which log management will be output.
You can config the environment variable ```LOGGING_OUTPUT ``` to determine which log management will be output.

### Supported log management

Expand Down
4 changes: 2 additions & 2 deletions docs/fluentd/docs.md
Expand Up @@ -31,7 +31,7 @@ docker run --rm -it \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /:/host \
-e PILOT_TYPE=fluentd \
-e FLUENTD_OUTPUT=elasticsearch \
-e LOGGING_OUTPUT=elasticsearch \
-e ELASTICSEARCH_HOST=${ELASTICSEARCH_HOST} \
-e ELASTICSEARCH_PORT=${ELASTICSEARCH_PORT} \
registry.cn-hangzhou.aliyuncs.com/acs-sample/log-pilot:latest
Expand All @@ -40,7 +40,7 @@ docker run --rm -it \
Log output plugin configuration
===============================

You can config the environment variable ```FLUENTD_OUTPUT ``` to determine which log management will be output.
You can config the environment variable ```LOGGING_OUTPUT ``` to determine which log management will be output.

### Supported log management

Expand Down
6 changes: 3 additions & 3 deletions docs/fluentd/output/aliyun_sls.md
Expand Up @@ -2,7 +2,7 @@

#### Environment variables for image `fluentd`

- FLUENTD_OUTPUT=aliyun_sls # Required, specify your output plugin name
- LOGGING_OUTPUT=aliyun_sls # Required, specify your output plugin name
- ALIYUNSLS_PROJECT=test-fluentd # Required, specify your aliyun sls project name
- ALIYUNSLS_REGION_ENDPOINT=cn-hangzhou.log.aliyuncs.com # Required, specify your region root endpoint
- ALIYUNSLS_ACCESS_KEY_ID="your aliyun access key id" # Required
Expand Down Expand Up @@ -35,7 +35,7 @@ swm2ft9bzaxyyi9umwbb0mdd6 aliyun_access_key 53 minutes ago 53 minutes a
docker run --rm -it \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /:/host \
-e FLUENTD_OUTPUT=aliyun_sls \
-e LOGGING_OUTPUT=aliyun_sls \
-e ALIYUNSLS_PROJECT="your-aliyun-sls-project-name" \
-e ALIYUNSLS_REGION_ENDPOINT=cn-hangzhou.log.aliyuncs.com \
-e ALIYUNSLS_ACCESS_KEY_ID="your-access-key-id" \
Expand Down Expand Up @@ -63,7 +63,7 @@ $ docker service create -t \
--mount type=bind,source=/var/run/docker.sock,destination=/var/run/docker.sock \
--mount type=bind,source=/,destination=/host \
--secret="aliyun_access_key" \
-e FLUENTD_OUTPUT=aliyun_sls \
-e LOGGING_OUTPUT=aliyun_sls \
-e ALIYUNSLS_PROJECT="your-aliyun-sls-project-name" \
-e ALIYUNSLS_REGION_ENDPOINT=cn-hangzhou.log.aliyuncs.com \
registry.cn-hangzhou.aliyuncs.com/acs-sample/fluentd-pilot:latest
Expand Down
2 changes: 1 addition & 1 deletion docs/fluentd/output/kafka.md
Expand Up @@ -3,7 +3,7 @@
#### Environment variables for image `fluentd`

```
- FLUENTD_OUTPUT=kafka # Required, specify your output plugin name
- LOGGING_OUTPUT=kafka # Required, specify your output plugin name
- KAFKA_BROKERS=<broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>...
- KAFKA_DEFAULT_TOPIC=topic
```
Expand Down
9 changes: 5 additions & 4 deletions examples/pilot-elasticsearch-kubernetes.yml
Expand Up @@ -4,14 +4,11 @@ metadata:
name: log-pilot
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
spec:
template:
metadata:
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
version: v1.22
spec:
tolerations:
- key: node-role.kubernetes.io/master
Expand All @@ -20,12 +17,16 @@ spec:
- name: log-pilot
image: registry.cn-hangzhou.aliyuncs.com/acs-sample/log-pilot:0.9.1-fluentd
env:
- name: "FLUENTD_OUTPUT"
- name: "LOGGING_OUTPUT"
value: "elasticsearch"
- name: "ELASTICSEARCH_HOST"
value: "{elasticsearch}" #changeme
- name: "ELASTICSEARCH_PORT"
value: "{port}" #changeme
- name: "NODE_NAME"
valueFrom:
fieldRef:
fieldPath: spec.nodeName
volumeMounts:
- name: sock
mountPath: /var/run/docker.sock
Expand Down
16 changes: 7 additions & 9 deletions examples/pilot-elastisearch-kubernetes-2.yml
Expand Up @@ -4,14 +4,11 @@ metadata:
name: log-pilot
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
spec:
template:
metadata:
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
version: v1.22
spec:
tolerations:
- key: node-role.kubernetes.io/master
Expand All @@ -20,12 +17,14 @@ spec:
- name: log-pilot
image: registry.cn-hangzhou.aliyuncs.com/acs-sample/log-pilot:0.9-filebeat
env:
- name: "FILEBEAT_OUTPUT"
- name: "LOGGING_OUTPUT"
value: "elasticsearch"
- name: "ELASTICSEARCH_HOST"
value: "changeme"
- name: "ELASTICSEARCH_PORT"
value: "9092"
- name: "ELASTICSEARCH_HOSTS"
value: "elasticsearch1:9200,elasticsearch2:9200"
- name: "NODE_NAME"
valueFrom:
fieldRef:
fieldPath: spec.nodeName
volumeMounts:
- name: sock
mountPath: /var/run/docker.sock
Expand Down Expand Up @@ -53,4 +52,3 @@ spec:
- name: root
hostPath:
path: /

12 changes: 6 additions & 6 deletions examples/pilot-kafka-kubernetes-2.yml
Expand Up @@ -4,14 +4,11 @@ metadata:
name: log-pilot
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
spec:
template:
metadata:
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
version: v1.22
spec:
tolerations:
- key: node-role.kubernetes.io/master
Expand All @@ -20,10 +17,14 @@ spec:
- name: log-pilot
image: registry.cn-hangzhou.aliyuncs.com/acs-sample/log-pilot:0.9-filebeat
env:
- name: "FILEBEAT_OUTPUT"
- name: "LOGGING_OUTPUT"
value: "kafka"
- name: "KAFKA_BROKERS"
value: "kafka1:9092,kafka2:9092"
value: "kafka1:9092,kafka2:9092"
- name: "NODE_NAME"
valueFrom:
fieldRef:
fieldPath: spec.nodeName
volumeMounts:
- name: sock
mountPath: /var/run/docker.sock
Expand Down Expand Up @@ -51,4 +52,3 @@ spec:
- name: root
hostPath:
path: /

9 changes: 5 additions & 4 deletions examples/pilot-kafka-kubernetes.yml
Expand Up @@ -4,14 +4,11 @@ metadata:
name: log-pilot
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
spec:
template:
metadata:
labels:
k8s-app: log-pilot
kubernetes.io/cluster-service: "true"
version: v1.22
spec:
tolerations:
- key: node-role.kubernetes.io/master
Expand All @@ -20,10 +17,14 @@ spec:
- name: log-pilot
image: registry.cn-hangzhou.aliyuncs.com/acs-sample/log-pilot:0.9.1-fluentd
env:
- name: "FLUENTD_OUTPUT"
- name: "LOGGING_OUTPUT"
value: "kafka"
- name: "KAFKA_BROKERS"
value: "kafka1:9092,kafka2:9092" #changeme
- name: "NODE_NAME"
valueFrom:
fieldRef:
fieldPath: spec.nodeName
volumeMounts:
- name: sock
mountPath: /var/run/docker.sock
Expand Down

0 comments on commit 1ba208a

Please sign in to comment.