wiki'd

by JoKeru

Centralizing Logs

Log Server
[cc lang='bash']
# prerequisites
\$ apt-get install default-jre-headless curl -y
\$ echo -e "root hard nofile 1024000\nroot soft nofile 1024000" >> /etc/security/limits.conf

# logstash 1.2.0
\$ mkdir /usr/share/logstash
\$ wget -q --no-check-certificate https://logstash.objects.dreamhost.com/release/logstash-1.2.0-flatjar.jar -O /usr/share/logstash/logstash.jar
\$ vi /etc/init.d/logstash
\$ chmod +x /etc/init.d/logstash
\$ update-rc.d logstash defaults
\$ mkdir /etc/logstash
\$ vi /etc/logstash/logstash.conf
\$ /etc/init.d/logstash restart

# elasticsearch 0.90.3 (version required by logtash 1.2.0)
\$ wget -q https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.90.3.deb
\$ dpkg -i elasticsearch-0.90.3.deb; rm elasticsearch-0.90.3.deb
\$ vi /etc/elasticsearch/elasticsearch.yml
\$ /etc/init.d/elasticsearch restart

# lighttpd & kibana 3
\$ apt-get install lighttpd
\$ cd /var/www/
\$ wget https://github.com/elasticsearch/kibana/archive/master.tar.gz
\$ tar -xzf master.tar.gz
\$ /etc/init.d/lighttpd restart
[/cc]

/etc/init.d/logstash
[cc lang='bash']
#! /bin/sh

### BEGIN INIT INFO
# Provides: logstash
# Required-Start: \$remote_fs \$syslog
# Required-Stop: \$remote_fs \$syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start daemon at boot time
# Description: Enable service provided by daemon.
### END INIT INFO

. /lib/lsb/init-functions

name="logstash"
logstash_bin="/usr/bin/java -- -Xms1g -Xmx2g -jar /usr/share/logstash/logstash.jar"
logstash_conf="/etc/logstash/logstash.conf"
logstash_log="/var/log/logstash.log"
pid_file="/var/run/\$name.pid"

start () {
command="\${logstash_bin} agent -f \$logstash_conf --log \${logstash_log}"

log_daemon_msg "Starting \$name" "\$name"
if start-stop-daemon --start --quiet --oknodo --pidfile "\$pid_file" -b -m --exec \$command; then
log_end_msg 0
else
log_end_msg 1
fi
}

stop () {
log_daemon_msg "Stopping \$name" "\$name"
start-stop-daemon --stop --quiet --oknodo --pidfile "\$pid_file"
}

status () {
status_of_proc -p \$pid_file "" "\$name"
}

case \$1 in
start)
if status; then exit 0; fi
start
;;
stop)
stop
;;
reload)
stop
start
;;
restart)
stop
start
;;
status)
status && exit 0 || exit \$?
;;
*)
echo "Usage: \$0 {start|stop|restart|reload|status}"
exit 1
;;
esac

exit 0
[/cc]

/etc/logstash/logstash.conf
[cc lang='bash']
input {
tcp {
type => "squid-access"
port => 1025
}
}

filter {
grok {
type => "squid-access"
match => [ "message", "\<%{POSINT}>%{SYSLOGTIMESTAMP} %{SYSLOGHOST:source} %{DATA}(?:\[%{POSINT}\])?: %{TIMESTAMP_ISO8601:ts} %{IP:client}:%{INT} %{DATA:proxy} %{DATA:squid_code}/%{INT:http_code} %{WORD:request} %{DATA:uri} %{DATA:mime} %{DATA:remote} %{INT:size}B %{INT:time}ms %{DATA:remote_time}ms" ]
}
date {
type => "squid-access"
match => [ "ts" , "ISO8601" ]
}
mutate {
type => "squid-access"
rename => [ "source","@source" ]
remove => [ "message", "@version", "host", "source", "ts" ]
}
}

output {
# stdout {
# debug => true
# debug_format => "json"
# }
elasticsearch {
type => "squid-access"
cluster => "log-cluster"
index => "squid-access-%{+YYYY.MM.dd}"
}
}
[/cc]

/etc/elasticsearch/elasticsearch.yml
[cc lang='bash']
cluster.name: log-cluster
index.number_of_shards: 1
index.number_of_replicas: 0
compress.default.type: lzf
index.store.compress.stored: true
index.store.compress.tv: true
[/cc]

On Every Node
[cc lang='bash']
\$ cat \< /etc/rsyslog.d/log.jokeru.ro.conf
\$ModLoad imfile

\$InputFileName /var/log/squid/access.log
\$InputFileStateFile state-squid
\$InputFileTag squid:
\$InputFileFacility local0
\$InputFileSeverity debug # to not log in /var/log/messages
\$InputRunFileMonitor

\$InputFilePollInterval 3

local0.debug @@log.jokeru.ro:1025
EOF
\$ /etc/init.d/rsyslog restart
[/cc]

Kibana search syntax

Comments