forked from elastic/logstash
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'master' of github.com:logstash/logstash
- Loading branch information
Showing
8 changed files
with
302 additions
and
20 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
require "logstash/outputs/base" | ||
require "logstash/namespace" | ||
|
||
# PagerDuty output | ||
# Send specific events to PagerDuty for alerting | ||
|
||
class LogStash::Outputs::PagerDuty < LogStash::Outputs::Base | ||
config_name "pagerduty" | ||
plugin_status "experimental" | ||
|
||
# Service API Key | ||
config :service_key, :validate => :string, :required => true | ||
|
||
# The service key to use | ||
# You'll need to set this up in PD beforehand | ||
config :incident_key, :validate => :string, :default => "logstash/%{@source_host}/%{@type}" | ||
|
||
# Event type | ||
config :event_type, :validate => ["trigger", "acknowledge", "resolve"], :default => "trigger" | ||
|
||
# Custom description | ||
config :description, :validate => :string, :default => "Logstash event for %{@source_host}" | ||
|
||
# Event details | ||
# These might be keys from the logstash event you wish to include | ||
# tags are automatically included if detected so no need to add them here | ||
config :details, :validate => :hash, :default => {"timestamp" => "%{@timestamp}", "message" => "%{@message}"} | ||
|
||
# PagerDuty API url | ||
# You shouldn't need to change this | ||
# This allows for flexibility | ||
# should PD iterate the API | ||
# and Logstash hasn't updated yet | ||
config :pdurl, :validate => :string, :default => "http://events.pagerduty.com/generic/2010-04-15/create_event.json" | ||
|
||
public | ||
def register | ||
require 'ftw' | ||
@client = FTW::Agent.new | ||
end # def register | ||
|
||
public | ||
def receive(event) | ||
return unless output?(event) | ||
|
||
pd_event = Hash.new | ||
pd_event[:service_key] = "#{@service_key}" | ||
pd_event[:incident_key] = event.sprintf(@incident_key) | ||
pd_event[:event_type] = "#{@event_type}" | ||
pd_event[:description] = event.sprintf(@description) | ||
pd_event[:details] = Hash.new | ||
@details.each do |key, value| | ||
@logger.debug("Details added:" , key => event.sprintf(value)) | ||
pd_event[:details]["#{key}"] = event.sprintf(value) | ||
end | ||
pd_event[:details][:tags] = @tags if @tags | ||
|
||
@logger.info("PD Event", :event => pd_event) | ||
begin | ||
request = @client.post(@pdurl, :body => pd_event.to_json) | ||
@logger.debug("PD Request", :request => request) | ||
response = @client.execute(request) | ||
@logger.debug("PD Response", :response => response) | ||
rescue Exception => e | ||
@logger.debug("Unhandled exception", :error => e) | ||
end | ||
end # def receive | ||
end # class LogStash::Outputs::PagerDuty |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
require "logstash/outputs/base" | ||
require "logstash/namespace" | ||
|
||
# Riak is a distributed k/v store from Basho. | ||
# It's based on the Dynamo model. | ||
|
||
class LogStash::Outputs::Riak < LogStash::Outputs::Base | ||
config_name "riak" | ||
plugin_status "experimental" | ||
|
||
# The nodes of your Riak cluster | ||
# This can be a single host or | ||
# a Logstash hash of node/port pairs | ||
# e.g | ||
# ["node1", "8098", "node2", "8098"] | ||
config :nodes, :validate => :hash, :default => {"localhost" => "8098"} | ||
|
||
# The protocol to use | ||
# HTTP or ProtoBuf | ||
# Applies to ALL backends listed above | ||
# No mix and match | ||
config :proto, :validate => ["http", "pb"], :default => "http" | ||
|
||
# The bucket name to write events to | ||
# Expansion is supported here as values are | ||
# passed through event.sprintf | ||
config :bucket, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}" | ||
|
||
# The event key name | ||
# variables are valid here. | ||
# | ||
# Choose this carefully. Best to let riak decide.... | ||
config :key_name, :validate => :string | ||
|
||
|
||
# Quorum options (NYI) | ||
# Logstash hash of options for various quorum parameters | ||
# i.e. | ||
# `quorum => ["r", "1", "w", "1", "dw", "1"]` | ||
config :quorum, :validate => :array, :default => {"r" => 1, "w" => 1, "dw" => 1} | ||
|
||
# Indices | ||
# Array of fields to add 2i on | ||
# e.g. | ||
# `indices => ["@source_host", "@type"] | ||
# Off by default as not everyone runs eleveldb | ||
config :indices, :validate => :array | ||
|
||
# Search (NYI) | ||
# Enable search on the bucket defined above | ||
config :enable_search, :validate => :boolean, :default => false | ||
|
||
public | ||
def register | ||
require 'riak' | ||
cluster_nodes = Array.new | ||
@logger.debug("Setting protocol", :protocol => @proto) | ||
proto_type = "#{@proto}_port".to_sym | ||
@nodes.each do |node,port| | ||
@logger.debug("Adding node", :node => node, :port => port) | ||
cluster_nodes << {:host => node, proto_type => port} | ||
end | ||
@logger.debug("Cluster nodes", :nodes => cluster_nodes) | ||
@client = Riak::Client.new(:nodes => cluster_nodes) | ||
end # def register | ||
|
||
public | ||
def receive(event) | ||
return unless output?(event) | ||
|
||
# setup our bucket | ||
bukkit = @client.bucket(event.sprintf(@bucket)) | ||
@logger.debug("Bucket", :bukkit => bukkit.to_s) | ||
|
||
@key_name.nil? ? evt_key=nil : evt_key=event.sprintf(@key_name) | ||
evt = Riak::RObject.new(bukkit, evt_key) | ||
@logger.debug("RObject", :robject => evt.to_s) | ||
begin | ||
evt.content_type = "application/json" | ||
evt.data = event | ||
if @indices | ||
@indices.each do |k| | ||
idx_name = "#{k.gsub('@','')}_bin" | ||
@logger.debug("Riak index name", :idx => idx_name) | ||
@logger.info("Indexes", :indexes => evt.indexes.to_s) | ||
evt.indexes[idx_name] << event.sprintf("%{#{k}}") | ||
end | ||
end | ||
evt.store | ||
rescue Exception => e | ||
@logger.warn("Exception storing", :message => e.message) | ||
end | ||
end # def receive | ||
end # class LogStash::Outputs::Riak |
Oops, something went wrong.