Difference between revisions of "Logstash"

Line 143: Line 143:
  
 
<syntaxhighlight lang="bash">
 
<syntaxhighlight lang="bash">
ln -s /etc/init.d/logstash.sh /usr/bin/logtash
+
ln -s /etc/init.d/logstash.sh /usr/bin/logstash
 
</syntaxhighlight>
 
</syntaxhighlight>
  
Line 172: Line 172:
  
 
>> Logs in ''/var/log/logstash/
 
>> Logs in ''/var/log/logstash/
 
  
  
Line 247: Line 246:
 
FAIL2BAN %{TIMESTAMP_ISO8601:timestamp} %{JAVACLASS:criteria}: %{LOGLEVEL:level} \[%{WORD:service}\] Ban %{IPV4:clientip}
 
FAIL2BAN %{TIMESTAMP_ISO8601:timestamp} %{JAVACLASS:criteria}: %{LOGLEVEL:level} \[%{WORD:service}\] Ban %{IPV4:clientip}
 
</syntaxhighlight>
 
</syntaxhighlight>
 +
 +
 +
 +
==Logstash==
 +
 +
===Apache2===
 +
 +
 +
<syntaxhighlight lang="vim">
 +
###### Data sources to process #####
 +
input {
 +
file {
 +
    path => [ "/var/log/apache2/access.log", "/var/log/apache2/other_vhosts_access.log" ]
 +
    type => "apache-access"
 +
}
 +
file {
 +
    path => "/var/log/apache2/error.log"
 +
    type => "apache-error"
 +
}
 +
}
 +
 +
filter {
 +
# ------------------------ Parse services logs into fields ---------------------------
 +
# APACHE 2
 +
if [type] == "apache-access" {
 +
# To process log data (message's content) using some regex or precompiled GROK pattern
 +
grok {
 +
match => [ "message", "%{COMBINEDAPACHELOG}"]
 +
}
 +
# To extract log's time according to a date pattern
 +
date {
 +
match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z"]
 +
}
 +
# Extraction browser information, if available.
 +
if [agent] != "" {
 +
useragent {
 +
source => "agent"
 +
}
 +
}
 +
if [clientip] != "" {
 +
geoip {
 +
source => "clientip"
 +
}
 +
}
 +
}
 +
 +
if [type] == "apache-error" {
 +
grok {
 +
match => [ "message", "%{APACHEERRORLOG}"]
 +
# Directory where to find the custom patterns
 +
patterns_dir => ["/etc/logstash/grok"]
 +
}
 +
}
 +
 +
# ------------------- Process fields --------------------------
 +
# Apache2 error client IP
 +
if [clientip]  {
 +
geoip {
 +
source => "clientip"
 +
target => "apache_geoip"
 +
add_field => [ "[apache_geoip][coordinates]", "%{[apache_geoip][longitude]}" ]
 +
add_field => [ "[apache_geoip][coordinates]", "%{[apache_geoip][latitude]}"  ]
 +
}
 +
mutate {
 +
convert => [ "[apache_geoip][coordinates]", "float" ]
 +
}
 +
}
 +
}
 +
 +
output {
 +
  ...
 +
}
 +
</syntaxhighlight>
 +
  
  

Revision as of 14:27, 20 November 2014


Installation

You can install logstash either manually or as an APT-GET package. I recommend you to use the manual installation because the automatic one will chroot you in /var/log. If your application is using logs that are somewhere else, then you'll be screwed.


Manual installation (recommended)

Be careful: Logstash version must match the ElasticSearch version for better performances.


cd /tmp
wget https://download.elasticsearch.org/logstash/logstash/logstash-1.4.2.tar.gz
tar xjvf logstash-1.4.2.tar.gz
rm logstash-1.4.2.tar.gz
mv logstash-1.4.2/ /opt/
cd /opt
ln -s /opt/logstash-1.4.2 /opt/logstash


  • Create configuration directories
mkdir -p /etc/logstash/conf.d
mkdir /etc/logstash/grok
chmod -R 777 /etc/logstash


  • touch log file
touch /var/log/logstash.log
chmod -R 777 /var/log/logstash.log


  • Create an init.d script
cd /etc/init.d
vim logstash.sh


Parse the following content:

#!/bin/sh
### BEGIN INIT INFO
# Provides: logstash
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start daemon at boot time
# Description: Enable service provided by daemon.
### END INIT INFO

. /lib/lsb/init-functions

if [ $(id -u) -ne 0 ]; then
	echo -e " " 
	echo -e "!!!!!!!!!!!!!!!!!!!!" 
	echo -e "!! Security alert !!" 
	echo -e "!!!!!!!!!!!!!!!!!!!!" 
	echo -e "You need to be root or have root privileges to run this script!\n\n"
	echo -e " " 
	exit 1
fi


name="logstash"
logstash_bin="/opt/logstash/bin/logstash"
logstash_conf="/etc/logstash/conf.d/"
logstash_log="/var/log/logstash.log"
pid_file="/var/run/$name.pid"

start () {
	command="${logstash_bin} agent -f $logstash_conf --log ${logstash_log}"
	log_daemon_msg "Starting $name" "$name"
	if start-stop-daemon --start --quiet --oknodo --pidfile "$pid_file" -b -m --exec $command; then
		log_end_msg 0
	else
		log_end_msg 1
	fi
}
testConfig () {
	echo "#############################"
	echo " Logstash configuration test"
	echo "#############################"
	command="${logstash_bin} -f $logstash_conf --verbose -t"
	$command
}
stop () {
	log_daemon_msg "Stopping $name" "$name"
	start-stop-daemon --stop --quiet --oknodo --pidfile "$pid_file"
}
status () {
	status_of_proc -p $pid_file "" "$name"
}

case $1 in
	start)
		if status; then exit 0; fi
		start
		;;
	stop)
		stop
		;;
	reload)
		stop
		start
		;;
	restart)
		stop
		start
		;;
	status)
		status && exit 0 || exit $?
		;;
	testConfig)
		testConfig
		;;
	*)
		echo "Usage: $0 {start|stop|restart|reload|status|testConfig}"
		exit 1
		;;
esac
exit 0


  • Create symlinks
ln -s /etc/init.d/logstash.sh /usr/bin/logstash


  • Register application as a service (optional)
cd /etc/init.d
update-rc.d logstash.sh defaults


Automatic installation

Source: http://logstash.net/docs/latest/repositories

  • Add Logstash repository: see Sources#ELK
  • Install application
apt-get install logstash logstash-contrib

>> Binaries in /opt/logstash

>> Configuration in /etc/logstash/conf.d/

>> Logs in /var/log/logstash/



Configuration

GROK

Grok is used to split a log message into fields.

Grok tools

You can create your own grok patterns and test them with the on-line processor over here: http://grokdebug.herokuapp.com/


Apache2 error

Create configuration file:

vim /etc/logstash/grok/apache2ErrorLog.grok


Put the following content:

HTTPERRORDATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
APACHEERRORLOG \[%{HTTPERRORDATE:timestamp}\] \[%{WORD:severity}\] \[client %{IPORHOST:clientip}\] %{GREEDYDATA:message_remainder}


IpTables

Create configuration file:

vim /etc/logstash/grok/iptables.grok


Put the following content:

NETFILTERMAC %{COMMONMAC:dst_mac}:%{COMMONMAC:src_mac}:%{ETHTYPE:ethtype}
ETHTYPE (?:(?:[A-Fa-f0-9]{2}):(?:[A-Fa-f0-9]{2}))
# IPv6 + v4
IPTABLES %{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME} .* IN=(%{WORD:in_device})? OUT=(%{WORD:out_device})? *(MAC=%{NETFILTERMAC})? \ 
SRC=%{IP:src_ip} DST=%{IP:dst_ip} *(LEN=%{INT:pkt_length})? *(TOS=%{BASE16NUM:pkt_tos})? *(PREC=%{BASE16NUM:pkt_prec})? \ 
*(TTL=%{INT:pkt_ttl})? ID=%{INT:pkt_id} .* *(PROTO=%{WORD:protocol}) SPT=%{INT:src_port} DPT=%{INT:dst_port} \
*(WINDOW=%{INT:pkt_window})? *(RES=%{BASE16NUM:pkt_res})? .* *(URGP=%{INT:pkt_urgp})?
# IPv4 only
IPTABLES_V4 %{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME} .* IN=(%{WORD:in_device})? OUT=(%{WORD:out_device})? *(MAC=%{NETFILTERMAC})? \
SRC=%{IPV4:src_ip} DST=%{IPV4:dst_ip} *(LEN=%{INT:pkt_length})? *(TOS=%{BASE16NUM:pkt_tos})? *(PREC=%{BASE16NUM:pkt_prec})? \
*(TTL=%{INT:pkt_ttl})? ID=%{INT:pkt_id} .* *(PROTO=%{WORD:protocol}) SPT=%{INT:src_port} DPT=%{INT:dst_port} \
*(WINDOW=%{INT:pkt_window})? *(RES=%{BASE16NUM:pkt_res})? .* *(URGP=%{INT:pkt_urgp})?


Fail2ban

Create configuration file:

vim /etc/logstash/grok/fail2ban.grok


Put the following content:

FAIL2BAN %{TIMESTAMP_ISO8601:timestamp} %{JAVACLASS:criteria}: %{LOGLEVEL:level} \[%{WORD:service}\] Ban %{IPV4:clientip}


Logstash

Apache2

###### Data sources to process #####
input {
	file {
	    path => [ "/var/log/apache2/access.log", "/var/log/apache2/other_vhosts_access.log" ]
	    type => "apache-access"
	}
	file {
	    path => "/var/log/apache2/error.log"
	    type => "apache-error"
	}
}

filter {
	# ------------------------ Parse services logs into fields ---------------------------
	# APACHE 2
	if [type] == "apache-access" {
		# To process log data (message's content) using some regex or precompiled GROK pattern
		grok {
			match => [ "message", "%{COMBINEDAPACHELOG}"]
		}
		# To extract log's time according to a date pattern
		date {
			match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z"]
		}
		# Extraction browser information, if available.
		if [agent] != "" {
			useragent {
				source => "agent"
			}
		}
		if [clientip] != "" {
			geoip {
				source => "clientip"
			}
		}
	}

	if [type] == "apache-error" {
		grok {
			match => [ "message", "%{APACHEERRORLOG}"]
			# Directory where to find the custom patterns
			patterns_dir => ["/etc/logstash/grok"]
		}	
	}

	# ------------------- Process fields --------------------------	
	# Apache2 error client IP
	if [clientip]  {
		geoip {
			source => "clientip"
			target => "apache_geoip"
			add_field => [ "[apache_geoip][coordinates]", "%{[apache_geoip][longitude]}" ]
			add_field => [ "[apache_geoip][coordinates]", "%{[apache_geoip][latitude]}"  ]
		}
		mutate {
			convert => [ "[apache_geoip][coordinates]", "float" ]
		}
	}
}

output { 
   ...
}



Edit the configuration file:

vim /etc/logstash/conf.d/logstash.conf


Check your configuration:

cd /opt/logstash/bin
./logstash -f /etc/logstash/conf.d/ -t --verbose


Apache2 logs

To process your Apache2 logs you can use the following configuration. That comes from the official ElasticSearch webinar:

vim /etc/logstash/conf.d/apache2_logs.conf


Put the following content

## List of complete inputs | filters | output available on the official website: 
## http://logstash.net/docs/latest/index

## Configuration syntax: http://logstash.net/docs/latest/configuration


###### Data sources to process #####
input {
	file {
		path => "/var/log/apache2/combined_log"
		type => "apache"
	} 
	file {
	    path => "/var/log/messages"
	    type => "syslog"
	}
}


filter {
	# REMINDER: you can check on Kibana the field name to use for each filter.

	if [type] == "apache" {
		# To process log data (message's content) using some regex
		grok {
			match => [ "message", "%{}"]
		}
		# To extract log's time according to a date pattern
		date {
			match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z"]
		}
		# Extraction browser information, if available.
		if [agent] != "" {
			useragent {
				source => "agent"
			}
		}
		if [clientip] != "" {}
			geoip {
				source => "clientip"
			}
		}
	}
	
}

output {
	elasticsearch {
		cluster => "clusterName"
		node => "logstash_agent_name"
	}
}


Application logs

To be done: LOG4J logs



Start Logstash

service logstash start 

## OR ##
/etc/init.d/logstash start


References

https://home.regit.org/2014/01/a-bit-of-logstash-cooking/



mkdir -p /usr/share/logstash/grok vim /usr/share/logstash/grok/apacheErrorLog

HTTPERRORDATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} APACHEERRORLOG \[%{HTTPERRORDATE:timestamp}\] \[%{WORD:severity}\] \[client %{IPORHOST:clientip}\] %{GREEDYDATA:message_remainder}


/opt/logstash/bin/logstash agent -f /etc/logstash/conf.d/ --verbose