-
Notifications
You must be signed in to change notification settings - Fork 3
/
fluentd-example-directives.conf
192 lines (154 loc) · 4.06 KB
/
fluentd-example-directives.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
### ########################################################################################
### Visit this website if in double
### https://medium.com/@alluri.prithvi/application-logs-from-kubernetes-to-s3-and-elasticsearch-using-fluentd-2f1b09a9872e
### ########################################################################################
@include systemd.conf
@include kubernetes.conf
## App 1 config
<source>
@type tail
@id XXXX ### Uniq ID
path XXXX ### Log path
pos_file XXXX ### pos file
tag XXXXX ### App tag
read_from_head true
<parse>
@type regexp
keep_time_key true
expression (?<logtime>.*?) - (?<names>[^ ]*) - (?<log_level>[^ ]*) - (?<message>.*)
time_format %Y/%m/%d %H:%M:%S,%6N%z
</parse>
</source>
## App 2 config
<source>
@type tail
@id XXXXX ## Uniq ID
path XXXX ### Log path
pos_file XXXX ### pos file
tag XXXXX ## App tag
read_from_head true
<parse>
@type regexp
keep_time_key true
expression (?<logtime>.*?) - (?<names>[^ ]*) - (?<log_level>[^ ]*) - (?<message>.*)
time_format %Y/%m/%d %H:%M:%S,%6N%z
</parse>
</source>
# Store Data in Elasticsearch and S3
<match **>
@type copy
deep_copy true
<store>
@type s3
aws_key_id XXXXXXXXXXXXXXXXX # Access Key
aws_sec_key XXXXXXXXXXXXXXXXXXXXXX # Secret Key
s3_bucket XXXXXX # S3 Bucket
s3_region XXXXX # Bucket Region
path "#{ENV['S3_LOGS_BUCKET_PREFIX']}"
buffer_path /var/log/fluent/s3
s3_object_key_format %{path}%{time_slice}/cluster-log-%{index}.%{file_extension}
time_slice_format %Y%m%d-%H
time_slice_wait 10m
flush_interval 60s
buffer_chunk_limit 256m
</store>
<store>
@type elasticsearch
@id out_es
log_level info
include_tag_key true
host "#{ENV['FLUENT_ELASTICSEARCH_HOST']}"
port "#{ENV['FLUENT_ELASTICSEARCH_PORT']}"
scheme "#{ENV['FLUENT_ELASTICSEARCH_SCHEME'] || 'https'}"
ssl_verify "#{ENV['FLUENT_ELASTICSEARCH_SSL_VERIFY'] || 'true'}"
reload_connections "#{ENV['FLUENT_ELASTICSEARCH_RELOAD_CONNECTIONS'] || 'true'}"
logstash_prefix "#{ENV['FLUENT_ELASTICSEARCH_LOGSTASH_PREFIX'] || 'applogs'}"
logstash_format true
<buffer>
flush_thread_count 8
flush_interval 5s
chunk_limit_size 2M
queue_limit_length 32
retry_max_interval 30
retry_forever true
</buffer>
</store>
</match>
<source>
type forward
port 24224
bind 0.0.0.0
</source>
<match *.*>
@type stdout
</match>
<match **>
@type copy
<store>
@type elasticsearch
host "#{ENV['ELASTICSEARCH_HOSTNAME'] || 'localhost'}"
ssl_verify "#{ENV['ELASTICSEARCH_SSL_VERIFY'] || 'true'}"
port 443
scheme https
logstash_format true
logstash_prefix rabbitmq.lab
type_name logs.via.fluentd
</store>
<store>
@type s3
s3_bucket ecosystem.up.bucket
s3_region eu-central-1
utc
path %Y/%m/%d/
s3_object_key_format %{path}docker_lab_logs.txt
time_slice_format %Y%m%d%H%M
buffer_type file
buffer_path /fluentd/log/s3
</store>
</match>
<source>
type forward
port 24224
bind 0.0.0.0
</source>
<match *.*>
type stdout
</match>
<match **>
@type elasticsearch
logstash_format true
host elasticsearch-xxxxxxxxxxxxxxxxx.xxxxx.elb.amazonaws.com
port 80
type_name rabbitmq-node-log
</match>
<source>
type forward
port 24224
bind 0.0.0.0
</source>
<match *.*>
@type stdout
</match>
<match **>
@type copy
<store>
@type elasticsearch
host vpc-elasticsearch-901260445-ya5j5mubvy6zvqwrxmfsvyptgy.eu-central-1.es.amazonaws.com
port 443
scheme https
logstash_format true
logstash_prefix rabbitmq.lab
type_name logs.via.fluentd
</store>
<store>
@type s3
s3_bucket ecosystem.up.bucket
s3_region eu-central-1
utc
path %Y/%m/%d/
s3_object_key_format %{path}docker_lab_logs.txt
time_slice_format %Y%m%d%H%M
buffer_type file
buffer_path /fluentd/log/s3
</store>
</match>