Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 45 additions & 1 deletion lib/logstash/codecs/json.rb
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,18 @@ class LogStash::Codecs::JSON < LogStash::Codecs::Base
# For nxlog users, you may to set this to "CP1252".
config :charset, :validate => ::Encoding.name_list, :default => "UTF-8"

# When specified, build a json object based on the mapping structure,
# taking necessary information from event.
# This setting allows you to build specific JSON object inside an output configuration that support codecs.
#
# * JSON key can either be literal string `"abc"` or interpolated string `"%{[event_accessor][subfield]}"`
# * JSON value can either be literal scalar,array,object or interpolated string `"%{[field]}"`, or part of the event `"[field][nested_field]"` (array,hash or scalar)
#
# The difference between using interpolated string `"%{[field]}"` or only the accessor `"[field]"` is the resulting element type.
# Using Interpolated string will always return a string value, while accessor will directly inject the event field as-is if it is an integer,boolean
#
config :encode_mapping, :validate => :hash

def register
@converter = LogStash::Util::Charset.new(@charset)
@converter.logger = @logger
Expand All @@ -38,8 +50,40 @@ def decode(data, &block)
parse(@converter.convert(data), &block)
end

def build_mapping(event, mapping)
map = Hash.new
mapping.each do |key, value|
k = event.sprintf(key)
v = build_mapping_value(event,value)
map[k] = v
end
return map
end

def build_mapping_value(event,value)
if value.is_a?(Hash)
v = build_mapping(event, value)
elsif value.is_a?(Array)
v = value.map do |val|
build_mapping_value(event, val)
end
else
if /^(?:\[[^\[\]]+\])+$/.match(value)
v = event.get(value)
else
v = event.sprintf(value)
end
end
return v
end

def encode(event)
@on_event.call(event, event.to_json)
if @encode_mapping
encode_result = LogStash::Json.dump(build_mapping(event,@encode_mapping))
else
encode_result = event.to_json
end
@on_event.call(event, encode_result)
end

private
Expand Down
117 changes: 116 additions & 1 deletion spec/codecs/json_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,105 @@
end
end

context "#build_mapping" do

context "event sprintf on key" do
it "should interpolate mapping key with sprintf syntax" do
mapping = { "%{my_key}" => "%{int}", "prefixed_%{my_key}" => "%{int}"}
data = { "my_key" => "abc", "int" => 123 }
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "abc" => "123", "prefixed_abc" => "123" }
end
end

context "event sprintf on value" do

it "should interpolate event fields that are scalar" do
mapping = { "int" => "%{int}", "double" => "%{double}" ,"bar" => "%{string}", "baz" => "%{bool}"}
data = { "int" => 123, "double" => 123.4 ,"string" => "string content", "bool" => false}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "int" => "123", "double" => "123.4","bar" => "string content", "baz" => "false" }
end

it "should interpolate event fields that are array" do
mapping = { "foo" => "%{[baz][bah]}"}
data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "foo" => "a,b,c" }
end

#Documenting the current behaviour, not expected to be used
it "should interpolate event fields that are hash" do
mapping = { "foo" => "%{[baz]}"}
data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "foo" => "{\"bah\":[\"a\",\"b\",\"c\"]}" }
end
end

context "event accessor on value" do

it "should include event fields that are scalar" do
mapping = { "int" => "[int]", "double" => "[double]" ,"bar" => "[string]", "baz" => "[bool]"}
data = { "int" => 123, "double" => 123.4 ,"string" => "string content", "bool" => false}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "int" => 123, "double" => 123.4,"bar" => "string content", "baz" => false }
end

#keep object type (hash/number/boolean)
it "should include event field from accessor string" do
mapping = { "foo" => "[baz][bah]"}
data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "foo" => ["a","b","c"] }
end
end
context "with mapping nested_hash" do
it "should support hash nested in hash" do
mapping = { "int" => "[int]", "nested_hash" => { "nested_hash-2" => {"foo" => "[bool]"}, "bar" => "%{bool}"} }
data = { "int" => 123, "double" => 123.4 ,"string" => "string content", "bool" => false}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "int" => 123, "nested_hash" => { "nested_hash-2" => {"foo" => false}, "bar" => "false"} }
end
end

context "with mapping array" do
it "should create array from event field accessor string" do
mapping = { "array" => [ "[int]", "[double]", "[string]", "[bool]" ] }
data = { "int" => 123, "double" => 123.4 ,"string" => "string content", "bool" => false}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "array" => [123, 123.4, "string content", false] }
end
end

context "with complex mapping array of hash && hash of array" do
it "should support array nested in array" do
mapping = { "array" => [ "[int]", "[double]", "[string]", "nested_array" => [ "[int]", "[double]" ] ] }
data = { "int" => 123, "double" => 123.4 ,"string" => "string content", "bool" => false}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "array" => [123, 123.4, "string content", "nested_array" => [123,123.4]] }
end

it "should support hash nested in array" do
mapping = { "array" => [ "[int]", "[double]", "[string]", "nested_hash" => { "foo" => "[bool]", "bar" => "%{bool}"} ] }
data = { "int" => 123, "double" => 123.4 ,"string" => "string content", "bool" => false}
event = LogStash::Event.new(data)
result = subject.build_mapping(event,mapping)
insist { result } == { "array" => [123, 123.4, "string content", "nested_hash" => { "foo" => false, "bar" => "false"}] }
end

end
end

context "#encode" do
it "should return json data" do
data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
Expand All @@ -167,6 +266,23 @@
end
end

context "#encode with mapping" do
subject do
LogStash::Codecs::JSON.new("encode_mapping" => {"prefixed_%{foo}" => "[baz][bah]"})
end
it "should return mapped json data" do
data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}}
event = LogStash::Event.new(data)
got_event = false
subject.on_event do |e, d|
insist { LogStash::Json.load(d) } == { "prefixed_bar" => ["a","b","c"]}
got_event = true
end
subject.encode(event)
insist { got_event }
end
end

context "forcing legacy parsing" do
it_behaves_like :codec do
before(:each) do
Expand All @@ -187,5 +303,4 @@
# do nothing
end
end

end