ELKStack实时分析Haproxy访问日志配置
1.Haproxy配置日志规则在/etc/haproxy/haproxy.conf的frontend下增加
1
2
3
4
5
6
7
8
9
option httplog
option logasap
log LogServerIP local5
capture request header Host len 40
capture request header X-Forwarded-For len 50
#capture request header Accept-Language len 50
capture request header Referer len 200
capture request header User-Agent len 200
2.syslog配置开启远程接收
3.Logstash配置
indexer
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
input {
file {
path => "/var/log/haproxy.log"
start_position => beginning
sincedb_write_interval => 0
type => "HAPROXY_LOG"
codec => plain {
charset => "ISO-8859-1"
}
}
}
output {
#stdout { codec => rubydebug}
redis {
data_type => "list"
key => "logstash:Haproxy_log"
host => "192.168.1.2"
port => 6379
}
}
shipping
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
input {
redis {
data_type => "list"
key => "logstash:Haproxy_log"
host => "192.168.1.2"
port => 6379
threads => 5
type => "HAPROXY_LOG"
}
}
filter {
grok{
match => ["message" , "%{SYSLOGTIMESTAMP:syslog_timestamp} %{IPORHOST:syslog_server} %{SYSLOGPROG}: %{IP:client_ip}:%{INT:client_port} \[%{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:(?!<)%{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})(?!).%{INT:haproxy_milliseconds}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/\+%{NOTSPACE:time_duration} %{INT:http_status_code} \+%{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{IPORHOST:Host}\|?(%{IP:X_Forward_For})?\|?(%{URI:Referer})?\|%{GREEDYDATA:User_Agent}\})?( )( )?\"(<BADREQ>|(%{WORD:http_method} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?\""]
}
useragent {
source => "User_Agent"
target => "ua"
}
if =~ "." {
geoip {
source => ["X_Forward_For"]
database => "/usr/local/logstash2.2.2/bin/GeoLiteCity.dat"
}
} else {
geoip {
source => ["client_ip"]
database => "/usr/local/logstash2.2.2/bin/GeoLiteCity.dat"
}
}
date{
match => ["log_timestamp", "YYYY-MM-dd HH:mm:ss" ]
timezone =>"Etc/UCT"
}
mutate{
remove_field => ["log_timestamp"]
remove_field => [ "host" ]
remove_field => [ "path" ]
remove_field => [ "pid" ]
remove_field => [ "client_port" ]
remove_field => [ "program" ]
remove_field => [ "haproxy_monthday" ]
remove_field => [ "haproxy_month" ]
remove_field => [ "haproxy_year" ]
remove_field => [ "haproxy_hour" ]
remove_field => [ "haproxy_minute" ]
remove_field => [ "haproxy_second" ]
remove_field => [ "haproxy_milliseconds" ]
remove_field => [ "frontend_name" ]
remove_field => [ "captured_response_cookie" ]
remove_field => [ "captured_request_cookie" ]
convert => [ "timetaken","integer" ]
convert => [ "http_status_code","integer" ]
convert => [ "bytes_read","integer" ]
convert => [ "time_duration","integer" ]
convert => [ "time_backend_response","integer" ]
convert => [ "actconn","integer" ]
convert => [ "feconn","integer" ]
convert => [ "beconn","integer" ]
convert => [ "srvconn","integer" ]
convert => [ "retries","integer" ]
convert => [ "srv_queue","integer" ]
convert => [ "backend_queue","integer" ]
convert => [ "time_request","integer" ]
convert => [ "time_queue","integer" ]
convert => [ "time_backend_connect","integer" ]
}
}
output {
#stdout { codec => rubydebug}
elasticsearch {
hosts => "192.168.1.20:9200"
index => "logstash-%{+YYYY.MM.dd}"
}
}
页:
[1]