input { redis { host => "redis-1.localnet.local" port => 6379 data_type => "list" key => "log-mongodb" codec => "json" } } filter { grok { match => [ "message", "%{TIMESTAMP_ISO8601:timestamp}\s+\[%{DATA:mongo_conn}\]\s+%{GREEDYDATA:mongo_message}" ] tag_on_failure => [ "invalid_log" ] } if [timestamp] { date { match => [ "timestamp", "ISO8601" ] locale => "en" } grok { match => [ "mongo_message", "(?query) %{DATA:mongo_dbcol} query:\s*%{DATA:mongo_query}\s+planSummary:\s*%{GREEDYDATA:mongo_message}", "mongo_message", "(?insert) %{DATA:mongo_dbcol} query:\s*%{DATA:mongo_query}\s+ninserted:%{INT}\s+", "mongo_message", "(?update) %{DATA:mongo_dbcol} query:\s*%{DATA:mongo_query}\s+update:\s*%{DATA:mongo_update}\s+nMatched:%{INT:mongo_nMatched:int}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?remove) %{DATA:mongo_dbcol} query:\s*%{DATA:mongo_query}\s+ndeleted:%{INT:mongo_ndeleted:int}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?command) %{DATA:mongo_dbcol} command:\s*%{DATA:mongo_command}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?getmore) %{DATA:mongo_dbcol} cursorid:\s*%{INT:mongo_cursorid}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?options):\s*%{GREEDYDATA:mongo_message}", "mongo_message", "(?(distributed lock|Unauthorized))[:\s]+%{GREEDYDATA:mongo_message}", "mongo_message", "(?authenticate db)[:\s]+%{DATA:mongo_db}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?connection accepted) from %{IP:mongo_conn_ip}:%{INT:mongo_conn_port}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?end connection) %{IP:mongo_conn_ip}:%{INT:mongo_conn_port}\s+%{GREEDYDATA:mongo_message}", "mongo_message", "(?CMD) %{DATA:mongo_cmd}:\s*%{GREEDYDATA:mongo_message}", "mongo_message", "(?old journal file will be removed):\s*%{GREEDYDATA:mongo_message}", "mongo_message", "(?creating WriteBackListener) for:\s*%{GREEDYDATA:mongo_message}", "mongo_message", "about to log (?metadata) event:\s*%{GREEDYDATA:mongo_message}" ] overwrite => [ "mongo_message" ] tag_on_failure => [] } if [mongo_action] == "authenticate db" { grok { match => [ "mongo_message", "user: \"%{DATA:mongo_user}\"" ] tag_on_failure => [ "exception" ] } } if ![mongo_action] { # invalid log grok { match => [ "mongo_message", "(?insert) %{DATA:mongo_dbcol} query:" ] add_tag => [ "invalid_log" ] tag_on_failure => [] } } if ![mongo_action] { # warn log grok { match => [ "mongo_message", "^DR101 latency warning on journal file open %{INT}ms$", "mongo_message", "^DROP DATABASE:\s+%{DATA:mongo_db}$", "mongo_message", "^erased database %{DATA:mongo_db} from local registry$", "mongo_message", "^DBConfig::dropDatabase:\s+%{DATA:mongo_db}\s+dropped sharded collections:\s+%{INT}$", "mongo_message", "^DBConfig::dropDatabase:\s+%{DATA:mongo_db}$", "mongo_message", "^dropDatabase %{DATA:mongo_db} starting$", "mongo_message", "^dropDatabase %{DATA:mongo_db} finished$", "mongo_message", "^couldn't find database \[%{DATA:mongo_db}\] in config db$", "mongo_message", "^put \[%{DATA:mongo_db}\] on:\s+%{DATA}$" ] add_tag => [ "warn" ] add_field => { "mongo_action" => "status" } tag_on_failure => [] } } if ![mongo_action] { # pure status log grok { match => [ "mongo_message", "^connections:%{INT:mongo_conn_count}$", "mongo_message", "^%{INT:mongo_conn_count} connections now open$", "mongo_message", "^replication threads:%{INT}$", "mongo_message", "^mem.*?res:%{INT}\s+virt:%{INT}$", "mongo_message", "^mapped\s*\(incl journal view\):%{INT}$", "mongo_message", "^mapped\s*:\s*%{INT}$", "mongo_message", "^allocator: tcmalloc$", "mongo_message", "^build info:%{GREEDYDATA}$", "mongo_message", "^git version:%{GREEDYDATA}$", "mongo_message", "^db version[: ]%{GREEDYDATA}$", "mongo_message", "^pid=%{INT} port=%{INT} %{DATA} host=%{DATA}$", "mongo_message", "^SyncClusterConnection connecting to%{DATA}$", "mongo_message", "^starting$", "mongo_message", "^waiting for connections on port %{INT}$", "mongo_message", "^starting new replica set monitor for replica set %{DATA} with seeds %{DATA}$", "mongo_message", "^about to contact config servers and shards$", "mongo_message", "^allocating new datafile %{DATA}, filling with zeroes...$", "mongo_message", "^done allocating datafile %{DATA}, size: %{DATA},\s*took %{NUMBER} secs$", "mongo_message", "^ExtentManager took %{NUMBER} seconds to open:\s*%{DATA}$", "mongo_message", "^serverStatus was very slow:\s*%{GREEDYDATA}$", "mongo_message", "^could not acquire lock\s*%{DATA}\s*\(another update won\)$", "mongo_message", "^lock update won, completing lock propagation for\s*%{DATA}$", "mongo_message", "^lock update lost, lock\s*%{DATA}\s*not propagated\.$", "mongo_message", "^trying to delete %{NUMBER} old lock entries for process\s*%{DATA}$", "mongo_message", "^added index to empty collection$", "mongo_message", "^build index on\s*:\s*%{DATA}$", "mongo_message", "^flushing\s+mmaps\s+took\s+%{INT}ms\s+for\s+%{INT}\s+files?$", "mongo_message", "^removeJournalFiles$" ] add_field => { "mongo_action" => "status" } tag_on_failure => [ "exception" ] } } # split following grok because break_on_match not work in logstash 1.4.2 # https://github.com/elasticsearch/logstash/issues/1547 ### grok group start ### grok { match => [ "mongo_message", "\b%{INT:mongo_action_time:int}ms\b" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bntoreturn:%{INT:mongo_ntoreturn:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bntoskip:%{INT:mongo_ntoskip:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bnscanned:%{INT:mongo_nscanned:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bnscannedObjects:%{INT:mongo_nscannedObjects:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bkeyUpdates:%{INT:mongo_keyUpdates:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bnumYields:%{INT:mongo_numYields:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\br:%{INT:mongo_r:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bw:%{INT:mongo_w:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bnreturned:%{INT:mongo_nreturned:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\breslen:%{INT:mongo_reslen:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bsync:%{INT:mongo_sync:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\block:%{INT:mongo_lock:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bninserted:%{INT:mongo_ninserted:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bnMatched:%{INT:mongo_nMatched:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bnModified:%{INT:mongo_nModified:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bfastmod:%{INT:mongo_fastmod:int}" ] tag_on_failure => [] } grok { match => [ "mongo_message", "\bndeleted:%{INT:mongo_ndeleted:int}" ] tag_on_failure => [] } ### grok group end ### ruby { code => " ['host', 'mongo_action', 'mongo_conn', 'mongo_db', 'mongo_dbcol', 'mongo_user'].each do |k| event[k + '_facet'] = event[k] if event[k] end " } } } output { # stdout { # codec => rubydebug # } elasticsearch { host => "eslogstash.localnet.local" port => 9300 cluster => "eslogstash" index => "mongodb-v1" } }