實作 Nginx Access Log
透過 FluentD 收集 /var/log/nginx/nginx_web.access.log 日誌
過濾不必要的紀錄
輸出到MongoDB & Elasticsearch
延伸閱讀
FluentD 參數說明
FluentD 實作 Nginx Access Log 補充
fluent.conf
# workers parameter for specifying the number of workers
<system>
workers 1
</system>
<worker 0>
<source>
@type tail
path /var/log/nginx/nginx_web.access.log
pos_file /var/log/td-agent/nginx_web.access.log.pos
tag nginx.web.access
format /^(?<remote>[^ ]*) (?<host>[^ ]*) (?<user>[^ ]*) \[(?<logtime>[^\]]*)\] "(?<method>\S+)(?: +(?<path>[^\"]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>[^\"]*)" "(?<agent>[^\"]*)")(?: "(?<custom_field1>[^\"]*)" "(?< custom_field2>[^\"]*)" "(?< custom_field3>[^\"]*)")?$/
time_format %d/%b/%Y:%H:%M:%S %z
</source>
# 濾掉不必要的存取紀錄
<filter nginx.web.access>
@type grep
<exclude>
key path
pattern /(?:\/js\/)|(?:\/css\/)|\.(?:jpg|gif|png|js|css|svg|ico|woff2)$/
</exclude>
</filter>
# 由於須將一筆紀錄分別存在 ES 及 MongoDB 中因此需要做 @type copy
<match nginx.web.access>
@type copy
<store>
@type mongo
host ${mongo db host address}
port ${mongo db port}
database ${mongo db database}
collection ${mongo db table}
# for capped collection
capped
capped_size 1024m
# authentication
user ${mongo db user name}
password ${mongo db password}
<inject>
# key name of timestamp
time_key time
</inject>
<buffer>
# flush
flush_interval 10s
</buffer>
</store>
<store>
@type elasticsearch
host ${elasticsearch host address}
port ${elasticsearch port}
index_name fluentd.${tag}
logstash_format true
logstash_prefix fluentd.${tag}
</store>
</match>
</worker>
Nginx
http {
... 略
log_format weblog '$remote_addr - $remote_user [$time_local] '
'"$request" $status $body_bytes_sent '
'"$http_referer" "$http_user_agent" '
'"$cookie_custom_field1" "$cookie_custom_field2" "$cookie_custom_field3"';
... 略
server {
location / {
... 略
access_log /var/log/nginx/nginx_web.access.log weblog;
}
}
}
Mongo Record Sample
Elasticsearch Index Sample
留言
張貼留言