![Software License][ico-license]
, (*1)
JSON Context for Monolog
Simple helper package with Monolog formatters., (*2)
This helps to set up consistent JSON context log output.
The aim of these formatters is to write log lines that may easily be grokked by logstash., (*3)
Example Filebeat + Logstash setup
Filebeat configuration
# ...
filebeat.prospectors:
- type: log
enabled: true
paths:
- /usr/some/path/*.log
tail_files: true
multiline:
pattern: '^\[[[:digit:]]{4}-[[:digit:]]{2}-[[:digit:]]{2}'
negate: true
match: after
fields:
source: context_json
index: testing
# ...
Logstash configuration
input {
beats {
port => 5000
}
}
filter {
# Split up the custom message into fields
grok {
match => { "message" => "\[%{TIMESTAMP_ISO8601:timestamp}\] %{DATA:channel}\.%{LOGLEVEL:severity}: %{GREEDYDATA:context}" }
overwrite => [ "message", "context" ]
}
# Take the timestamp from the log line and use it for @timestamp
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss" ]
}
# Clean up (optional)
mutate {
remove_field => ["timestamp", "prospector", "beat"]
}
# Pull fields.index up one level, add it so we can use it in the output.
# Optional, but I needed this to get `index => "%{index}-%{+YYYY.MM}"`
# working for the elasticsearch output.
if (![fields][index]) {
mutate {
add_field => { "index" => "default" }
}
} else {
mutate {
add_field => { "index" => "%{[fields][index]}" }
remove_field => "[fields][index]"
}
}
# Make sure to interpret the context field as JSON
json {
source => "context"
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
index => "%{index}-%{+YYYY.MM}"
}
}
PHP
<?php
$formatter = new \Czim\MonologJsonContext\Formatters\JsonContextFormatter(null, 'test-application');
$logger = (new \Monolog\Logger('channel'))
->pushHandler(
(new \Monolog\Handler\RotatingFileHandler('/usr/some/path/test.log', 7))
->setFormatter(
)
);
$logger->info('Your message', ['testing' => true, 'category' => 'documentation.test']);
Credits
License
The MIT License (MIT). Please see License File for more information., (*4)