class LogStash::Outputs::Mongodb
This output writes events to MongoDB.
Public Instance Methods
close()
click to toggle source
# File lib/logstash/outputs/mongodb.rb, line 129 def close @closed.make_true @bulk_thread.wakeup @bulk_thread.join end
receive(event)
click to toggle source
# File lib/logstash/outputs/mongodb.rb, line 77 def receive(event) begin # Our timestamp object now has a to_bson method, using it here # {}.merge(other) so we don't taint the event hash innards document = {}.merge(event.to_hash) if !@isodate timestamp = event.timestamp if timestamp # not using timestamp.to_bson document["@timestamp"] = timestamp.to_json else @logger.warn("Cannot set MongoDB document `@timestamp` field because it does not exist in the event", :event => event) end end if @generateId document["_id"] = BSON::ObjectId.new end if @bulk collection = event.sprintf(@collection) @@mutex.synchronize do if(!@documents[collection]) @documents[collection] = [] end @documents[collection].push(document) if(@documents[collection].length >= @bulk_size) @db[collection].insert_many(@documents[collection]) @documents.delete(collection) end end else @db[event.sprintf(@collection)].insert_one(document) end rescue => e if e.message =~ /^E11000/ # On a duplicate key error, skip the insert. # We could check if the duplicate key err is the _id key # and generate a new primary key. # If the duplicate key error is on another field, we have no way # to fix the issue. @logger.warn("Skipping insert because of a duplicate key error", :event => event, :exception => e) else @logger.warn("Failed to send event to MongoDB, retrying in #{@retry_delay.to_s} seconds", :event => event, :exception => e) sleep(@retry_delay) retry end end end
register()
click to toggle source
# File lib/logstash/outputs/mongodb.rb, line 49 def register if @bulk_size > 1000 raise LogStash::ConfigurationError, "Bulk size must be lower than '1000', currently '#{@bulk_size}'" end Mongo::Logger.logger = @logger conn = Mongo::Client.new(@uri) @db = conn.use(@database) @closed = Concurrent::AtomicBoolean.new(false) @documents = {} @bulk_thread = Thread.new(@bulk_interval) do |bulk_interval| while @closed.false? do sleep(bulk_interval) @@mutex.synchronize do @documents.each do |collection, values| if values.length > 0 @db[collection].insert_many(values) @documents.delete(collection) end end end end end end