@1xtr/moleculer-elasticsearch-logger
v1.0.5
Published
Custom MoleculerJS logger for send logs to Elasticsearch directly
Downloads
72
Maintainers
Readme
Send logs to Elasticsearch directly
This is a fork from native Datadog logger
Description
Easy to send logs directly to elasticsearch
Used client "@elastic/elasticsearch": "^8.4.0"
Install
$ npm install @1xtr/moleculer-elasticsearch-logger --save
Usage
const ElasticLogger = require('@1xtr/moleculer-elasticsearch-logger')
module.exports = {
logger: new ElasticLogger({
// put here your options
})
}
Default options
Note: field timestamp contain UNIX timestamp in milliseconds, but for create Data Views in Kibana need to transform it to Date type
yyyy-MM-dd'T'HH:mm:ss.SSSXXX
in pipeline.
If index
field not set, all logs send to moleculer-${row.ts.yyyymmdd()}
indexes.
For example, moleculer-20220929
If you need to use Ingest Pipelines you can set pipeline
options. Ingest pipeline example
const defaultOptions = {
clientOptions: {
node: 'http://localhost:9200',
tls: {
// ca: readFileSync('/ca.crt'),
rejectUnauthorized: false,
},
},
index: null,
pipeline: null,
source: process.env.MOL_NODE_NAME || 'moleculer',
hostname: hostname(),
objectPrinter: null,
interval: 5 * 1000,
excludeModules: []
}
Options example
{
"clientOptions": {
"node": "http://es01:9200",
"auth": {
"username": "log-user",
"password": "very-StRoNg-password"
},
"tls": {
"rejectUnauthorized": false
}
},
"pipeline": "moleculer",
"excludeModules": [
"broker",
"registry",
"discovery",
"transporter",
"$node",
"transit",
"cacher"
]
}
Ingest Pipeline example
- create
@timestamp
with type Date from_source.timestamp
- save logs to index name
moleculer-yyyyMMdd
- remove
timestamp
field - try parse JSON from
message
field and save object toparsedMsg
- set
requestID
field from_source.parsedMsg.requestID
- set
subdomain
field from_source.parsedMsg.subdomain
- set
action
field from_source.parsedMsg.action
- set
title
field from_source.parsedMsg.title
- set
caller
field from_source.parsedMsg.caller
- if
ctx.parsedMsg?.title == "Incoming webhook"
add tagwebhook
- remove parsed json
_source.parsedMsg
- if
message
is empty drop document
[
{
"date": {
"field": "_source.timestamp",
"formats": [
"UNIX_MS"
],
"target_field": "@timestamp"
}
},
{
"date_index_name": {
"field": "_source.timestamp",
"date_rounding": "d",
"index_name_prefix": "moleculer-",
"index_name_format": "yyyyMMdd",
"date_formats": [
"UNIX_MS"
]
}
},
{
"remove": {
"field": "timestamp",
"ignore_missing": true,
"ignore_failure": true
}
},
{
"json": {
"field": "_source.message",
"target_field": "parsedMsg",
"ignore_failure": true
}
},
{
"set": {
"field": "requestID",
"copy_from": "_source.parsedMsg.requestID",
"ignore_empty_value": true
}
},
{
"set": {
"field": "subdomain",
"copy_from": "_source.parsedMsg.subdomain",
"ignore_empty_value": true
}
},
{
"set": {
"field": "action",
"copy_from": "_source.parsedMsg.action",
"ignore_empty_value": true
}
},
{
"set": {
"field": "title",
"copy_from": "_source.parsedMsg.title",
"ignore_empty_value": true
}
},
{
"set": {
"field": "caller",
"copy_from": "_source.parsedMsg.caller",
"ignore_empty_value": true
}
},
{
"script": {
"source": "ctx['tags'].add(\"webhook\");",
"if": "ctx.parsedMsg?.title == \"Incoming webhook\";",
"ignore_failure": true,
"description": "Add tag webhook"
}
},
{
"remove": {
"field": "_source.parsedMsg",
"ignore_missing": true
}
},
{
"drop": {
"if": "ctx.message === ''"
}
}
]