Skip to content

Commit

Permalink
Merge branch 'master' of github.com:logstash/logstash
Browse files Browse the repository at this point in the history
  • Loading branch information
jordansissel committed Jun 26, 2012
2 parents fed52dc + 7f6f004 commit ee977c6
Show file tree
Hide file tree
Showing 8 changed files with 302 additions and 20 deletions.
4 changes: 3 additions & 1 deletion CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
## Overview of this release:
* New input plugins: generator, heroku, pipe, ganglia
* New output plugins: juggernaut, metricscatcher, nagios_ncsa, pipe,
opentsdb
opentsdb, pagerduty
* New filter plugins: zeromq, environment, xml, csv, syslog_pri
* Fixes for gelf output

Expand All @@ -25,6 +25,7 @@
Previously, file input would have @source = 'file://host//var/log/foo' and
@source_path = '//var/log/foo'; now @source = 'file://host/var/log/foo'
and @source_path = '/var/log/foo'. [LOGSTASH-501]
- file input now rejects relative paths. [LOGSTASH-503]

## general
- NOTE: gemspec removed; deploying logstash as a gem hasn't been supported
Expand Down Expand Up @@ -58,6 +59,7 @@
- bugfix: some enhancements to grok pattern %{COMBINEDAPACHELOG}
- bugfix: grok: %{URIPATH} and %{URIPARAM} enhancements
- feature: grok: add %{UUID} pattern
- bugfix: grok: better error message when expanding unknown %{pattern}

## outputs
- bugfix: [LOGSTASH-351] - fix file input on windows
Expand Down
3 changes: 2 additions & 1 deletion Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ gem "bunny" # for amqp support, MIT-style license
gem "uuidtools" # for naming amqp queues, License ???

gem "filewatch", "0.3.3" # for file tailing, BSD License
gem "jls-grok", "0.10.6" # for grok filter, BSD License
gem "jls-grok", "0.10.7" # for grok filter, BSD License
gem "aws-sdk" # for AWS access: SNS and S3 log tailing. Apache 2.0 License
gem "jruby-elasticsearch", "0.0.13", :platforms => :jruby # BSD License
gem "onstomp" # for stomp protocol, Apache 2.0 License
Expand Down Expand Up @@ -38,6 +38,7 @@ gem "ffi-rzmq", "0.9.0"
gem "ffi"

gem "riemann-client", "0.0.6" #outputs/riemann, License: MIT
gem "riak-client", "1.0.3" #outputs/riak, License: Apache 2.0

group :test do
gem "mocha"
Expand Down
8 changes: 6 additions & 2 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ GEM
multi_json (~> 1.0)
multi_xml
i18n (0.6.0)
jls-grok (0.10.6)
jls-grok (0.10.7)
cabin (~> 0.4.0)
jruby-elasticsearch (0.0.13)
jruby-openssl (0.7.7)
Expand Down Expand Up @@ -80,6 +80,10 @@ GEM
nokogiri (1.5.3-java)
onstomp (1.0.6)
polyglot (0.3.3)
pry (0.9.9.6)
coderay (~> 1.0.5)
method_source (~> 0.7.1)
slop (>= 2.4.4, < 3)
pry (0.9.9.6-java)
coderay (~> 1.0.5)
method_source (~> 0.7.1)
Expand Down Expand Up @@ -134,7 +138,7 @@ DEPENDENCIES
gmetric (= 0.1.3)
haml
heroku
jls-grok (= 0.10.6)
jls-grok (= 0.10.7)
jruby-elasticsearch (= 0.0.13)
jruby-openssl
jruby-win32ole
Expand Down
39 changes: 26 additions & 13 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Requirements to build:
# ant
# cpio
# wget
# wget or curl
#
JRUBY_VERSION=1.6.7.2
ELASTICSEARCH_VERSION=0.19.4
Expand All @@ -20,6 +20,9 @@ PLUGIN_FILES=$(shell git ls-files | egrep '^lib/logstash/(inputs|outputs|filters
GEM_HOME=build/gems
QUIET=@

WGET=$(shell command -v wget)
CURL=$(shell command -v curl)

# OS-specific options
TARCHECK=$(shell tar --help|grep wildcard|wc -l|tr -d ' ')
ifeq (0, $(TARCHECK))
Expand All @@ -30,6 +33,20 @@ endif

default: jar

# Figure out if we're using wget or curl
.PHONY: wget-or-curl
wget-or-curl:
ifeq ($(WGET),)
ifeq ($(CURL),)
@echo "wget or curl are required."
exit 1
else
DOWNLOAD_COMMAND=curl -L -k -o
endif
else
DOWNLOAD_COMMAND=wget --no-check-certificate -O
endif

# Compile config grammar (ragel -> ruby)
.PHONY: compile-grammar
compile-grammar: lib/logstash/config/grammar.rb
Expand Down Expand Up @@ -83,21 +100,17 @@ build/jruby/jruby-$(JRUBY_VERSION)/lib/jruby-complete.jar: build/jruby/jruby-$(J
build/jruby/jruby-$(JRUBY_VERSION): build/jruby/jruby-src-$(JRUBY_VERSION).tar.gz
$(QUIET)tar -C build/jruby/ $(TAR_OPTS) -zxf $<

build/jruby/jruby-src-$(JRUBY_VERSION).tar.gz: | build/jruby
build/jruby/jruby-src-$(JRUBY_VERSION).tar.gz: wget-or-curl | build/jruby
@echo "=> Fetching jruby source"
$(QUIET)wget -O $@ http://jruby.org.s3.amazonaws.com/downloads/$(JRUBY_VERSION)/jruby-src-$(JRUBY_VERSION).tar.gz
$(QUIET)$(DOWNLOAD_COMMAND) $@ http://jruby.org.s3.amazonaws.com/downloads/$(JRUBY_VERSION)/jruby-src-$(JRUBY_VERSION).tar.gz

vendor/jar/elasticsearch-$(ELASTICSEARCH_VERSION).tar.gz: | vendor/jar
@# --no-check-certificate is for github and wget not supporting wildcard
@# certs sanely.
vendor/jar/elasticsearch-$(ELASTICSEARCH_VERSION).tar.gz: wget-or-curl | vendor/jar
@echo "=> Fetching elasticsearch"
$(QUIET)wget --no-check-certificate \
-O $@ $(ELASTICSEARCH_URL)/elasticsearch-$(ELASTICSEARCH_VERSION).tar.gz
$(QUIET)$(DOWNLOAD_COMMAND) $@ $(ELASTICSEARCH_URL)/elasticsearch-$(ELASTICSEARCH_VERSION).tar.gz

vendor/jar/graphtastic-rmiclient.jar: | vendor/jar
vendor/jar/graphtastic-rmiclient.jar: wget-or-curl | vendor/jar
@echo "=> Fetching graphtastic rmi client jar"
$(QUIET)wget --no-check-certificate \
-O $@ http://cloud.github.com/downloads/NickPadilla/GraphTastic/graphtastic-rmiclient.jar
$(QUIET)$(DOWNLOAD_COMMAND) $@ http://cloud.github.com/downloads/NickPadilla/GraphTastic/graphtastic-rmiclient.jar

.PHONY: vendor-elasticsearch
vendor-elasticsearch: $(ELASTICSEARCH)
Expand All @@ -106,8 +119,8 @@ $(ELASTICSEARCH): $(ELASTICSEARCH).tar.gz | vendor/jar
$(QUIET)tar -C $(shell dirname $@) -xf $< $(TAR_OPTS) --exclude '*sigar*' \
'elasticsearch-$(ELASTICSEARCH_VERSION)/lib/*.jar'

vendor/jar/joda-time-$(JODA_VERSION)-dist.tar.gz: | vendor/jar
wget -O $@ "http://downloads.sourceforge.net/project/joda-time/joda-time/$(JODA_VERSION)/joda-time-$(JODA_VERSION)-dist.tar.gz"
vendor/jar/joda-time-$(JODA_VERSION)-dist.tar.gz: wget-or-curl | vendor/jar
$(DOWNLOAD_COMMAND) $@ "http://downloads.sourceforge.net/project/joda-time/joda-time/$(JODA_VERSION)/joda-time-$(JODA_VERSION)-dist.tar.gz"

vendor/jar/joda-time-$(JODA_VERSION)/joda-time-$(JODA_VERSION).jar: vendor/jar/joda-time-$(JODA_VERSION)-dist.tar.gz | vendor/jar
tar -C vendor/jar -zxf $< joda-time-$(JODA_VERSION)/joda-time-$(JODA_VERSION).jar
Expand Down
19 changes: 17 additions & 2 deletions lib/logstash/inputs/file.rb
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
require "logstash/inputs/base"
require "logstash/namespace"

require "pathname"
require "socket" # for Socket.gethostname
require "uri"

require "addressable/uri"

# Stream events from files.
#
Expand All @@ -16,6 +19,7 @@ class LogStash::Inputs::File < LogStash::Inputs::Base

# The path to the file to use as an input.
# You can use globs here, such as "/var/log/*.log"
# Paths must be absolute and cannot be relative.
config :path, :validate => :array, :required => true

# Exclusions (matched against the filename, not full path). Globs
Expand Down Expand Up @@ -45,6 +49,17 @@ class LogStash::Inputs::File < LogStash::Inputs::Base
# monitored log files.
config :sincedb_write_interval, :validate => :number, :default => 15

public
def initialize(params)
super

@path.each do |path|
if Pathname.new(path).relative?
raise ArgumentError.new("File paths must be absolute, relative path specified: #{path}")
end
end
end

public
def register
require "filewatch/tail"
Expand All @@ -68,7 +83,7 @@ def run(queue)
hostname = Socket.gethostname

tail.subscribe do |path, line|
source = URI::Generic.new("file", nil, hostname, nil, nil, path, nil, nil, nil).to_s
source = Addressable::URI.new(:scheme => "file", :host => hostname, :path => path).to_s
@logger.debug("Received line", :path => path, :line => line)
e = to_event(line, source)
if e
Expand Down
68 changes: 68 additions & 0 deletions lib/logstash/outputs/pagerduty.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
require "logstash/outputs/base"
require "logstash/namespace"

# PagerDuty output
# Send specific events to PagerDuty for alerting

class LogStash::Outputs::PagerDuty < LogStash::Outputs::Base
config_name "pagerduty"
plugin_status "experimental"

# Service API Key
config :service_key, :validate => :string, :required => true

# The service key to use
# You'll need to set this up in PD beforehand
config :incident_key, :validate => :string, :default => "logstash/%{@source_host}/%{@type}"

# Event type
config :event_type, :validate => ["trigger", "acknowledge", "resolve"], :default => "trigger"

# Custom description
config :description, :validate => :string, :default => "Logstash event for %{@source_host}"

# Event details
# These might be keys from the logstash event you wish to include
# tags are automatically included if detected so no need to add them here
config :details, :validate => :hash, :default => {"timestamp" => "%{@timestamp}", "message" => "%{@message}"}

# PagerDuty API url
# You shouldn't need to change this
# This allows for flexibility
# should PD iterate the API
# and Logstash hasn't updated yet
config :pdurl, :validate => :string, :default => "http://events.pagerduty.com/generic/2010-04-15/create_event.json"

public
def register
require 'ftw'
@client = FTW::Agent.new
end # def register

public
def receive(event)
return unless output?(event)

pd_event = Hash.new
pd_event[:service_key] = "#{@service_key}"
pd_event[:incident_key] = event.sprintf(@incident_key)
pd_event[:event_type] = "#{@event_type}"
pd_event[:description] = event.sprintf(@description)
pd_event[:details] = Hash.new
@details.each do |key, value|
@logger.debug("Details added:" , key => event.sprintf(value))
pd_event[:details]["#{key}"] = event.sprintf(value)
end
pd_event[:details][:tags] = @tags if @tags

@logger.info("PD Event", :event => pd_event)
begin
request = @client.post(@pdurl, :body => pd_event.to_json)
@logger.debug("PD Request", :request => request)
response = @client.execute(request)
@logger.debug("PD Response", :response => response)
rescue Exception => e
@logger.debug("Unhandled exception", :error => e)
end
end # def receive
end # class LogStash::Outputs::PagerDuty
94 changes: 94 additions & 0 deletions lib/logstash/outputs/riak.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
require "logstash/outputs/base"
require "logstash/namespace"

# Riak is a distributed k/v store from Basho.
# It's based on the Dynamo model.

class LogStash::Outputs::Riak < LogStash::Outputs::Base
config_name "riak"
plugin_status "experimental"

# The nodes of your Riak cluster
# This can be a single host or
# a Logstash hash of node/port pairs
# e.g
# ["node1", "8098", "node2", "8098"]
config :nodes, :validate => :hash, :default => {"localhost" => "8098"}

# The protocol to use
# HTTP or ProtoBuf
# Applies to ALL backends listed above
# No mix and match
config :proto, :validate => ["http", "pb"], :default => "http"

# The bucket name to write events to
# Expansion is supported here as values are
# passed through event.sprintf
config :bucket, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"

# The event key name
# variables are valid here.
#
# Choose this carefully. Best to let riak decide....
config :key_name, :validate => :string


# Quorum options (NYI)
# Logstash hash of options for various quorum parameters
# i.e.
# `quorum => ["r", "1", "w", "1", "dw", "1"]`
config :quorum, :validate => :array, :default => {"r" => 1, "w" => 1, "dw" => 1}

# Indices
# Array of fields to add 2i on
# e.g.
# `indices => ["@source_host", "@type"]
# Off by default as not everyone runs eleveldb
config :indices, :validate => :array

# Search (NYI)
# Enable search on the bucket defined above
config :enable_search, :validate => :boolean, :default => false

public
def register
require 'riak'
cluster_nodes = Array.new
@logger.debug("Setting protocol", :protocol => @proto)
proto_type = "#{@proto}_port".to_sym
@nodes.each do |node,port|
@logger.debug("Adding node", :node => node, :port => port)
cluster_nodes << {:host => node, proto_type => port}
end
@logger.debug("Cluster nodes", :nodes => cluster_nodes)
@client = Riak::Client.new(:nodes => cluster_nodes)
end # def register

public
def receive(event)
return unless output?(event)

# setup our bucket
bukkit = @client.bucket(event.sprintf(@bucket))
@logger.debug("Bucket", :bukkit => bukkit.to_s)

@key_name.nil? ? evt_key=nil : evt_key=event.sprintf(@key_name)
evt = Riak::RObject.new(bukkit, evt_key)
@logger.debug("RObject", :robject => evt.to_s)
begin
evt.content_type = "application/json"
evt.data = event
if @indices
@indices.each do |k|
idx_name = "#{k.gsub('@','')}_bin"
@logger.debug("Riak index name", :idx => idx_name)
@logger.info("Indexes", :indexes => evt.indexes.to_s)
evt.indexes[idx_name] << event.sprintf("%{#{k}}")
end
end
evt.store
rescue Exception => e
@logger.warn("Exception storing", :message => e.message)
end
end # def receive
end # class LogStash::Outputs::Riak
Loading

0 comments on commit ee977c6

Please sign in to comment.