2017-09-23 18:20:22 +02:00
|
|
|
package cmd
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2017-09-24 02:02:01 +02:00
|
|
|
"github.com/go-logfmt/logfmt"
|
2017-09-23 18:20:22 +02:00
|
|
|
"github.com/pkg/errors"
|
|
|
|
"github.com/zrepl/zrepl/logger"
|
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
type EntryFormatter interface {
|
2017-11-14 21:51:19 +01:00
|
|
|
SetMetadataFlags(flags MetadataFlags)
|
2017-09-23 18:20:22 +02:00
|
|
|
Format(e *logger.Entry) ([]byte, error)
|
|
|
|
}
|
|
|
|
|
|
|
|
const (
|
|
|
|
FieldLevel = "level"
|
|
|
|
FieldMessage = "msg"
|
|
|
|
FieldTime = "time"
|
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
logJobField string = "job"
|
|
|
|
logTaskField string = "task"
|
|
|
|
logFSField string = "filesystem"
|
|
|
|
logMapFromField string = "map_from"
|
|
|
|
logMapToField string = "map_to"
|
|
|
|
logIncFromField string = "inc_from"
|
|
|
|
logIncToField string = "inc_to"
|
|
|
|
)
|
|
|
|
|
|
|
|
type NoFormatter struct{}
|
|
|
|
|
2017-11-14 21:51:19 +01:00
|
|
|
func (f NoFormatter) SetMetadataFlags(flags MetadataFlags) {}
|
|
|
|
|
2017-09-23 18:20:22 +02:00
|
|
|
func (f NoFormatter) Format(e *logger.Entry) ([]byte, error) {
|
|
|
|
return []byte(e.Message), nil
|
|
|
|
}
|
|
|
|
|
2017-09-24 02:05:41 +02:00
|
|
|
type HumanFormatter struct {
|
2017-11-14 21:51:19 +01:00
|
|
|
metadataFlags MetadataFlags
|
2017-12-23 01:22:38 +01:00
|
|
|
ignoreFields map[string]bool
|
2017-09-24 02:05:41 +02:00
|
|
|
}
|
|
|
|
|
2017-12-27 12:56:46 +01:00
|
|
|
const HumanFormatterDateFormat = time.RFC3339
|
|
|
|
|
2017-11-14 21:51:19 +01:00
|
|
|
func (f *HumanFormatter) SetMetadataFlags(flags MetadataFlags) {
|
|
|
|
f.metadataFlags = flags
|
2017-09-24 02:05:41 +02:00
|
|
|
}
|
2017-09-23 18:20:22 +02:00
|
|
|
|
2017-12-23 01:22:38 +01:00
|
|
|
func (f *HumanFormatter) SetIgnoreFields(ignore []string) {
|
|
|
|
if ignore == nil {
|
|
|
|
f.ignoreFields = nil
|
|
|
|
return
|
|
|
|
}
|
|
|
|
f.ignoreFields = make(map[string]bool, len(ignore))
|
|
|
|
|
|
|
|
for _, field := range ignore {
|
|
|
|
f.ignoreFields[field] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *HumanFormatter) ignored(field string) bool {
|
|
|
|
return f.ignoreFields != nil && f.ignoreFields[field]
|
|
|
|
}
|
|
|
|
|
2017-09-24 02:05:41 +02:00
|
|
|
func (f *HumanFormatter) Format(e *logger.Entry) (out []byte, err error) {
|
2017-09-23 18:20:22 +02:00
|
|
|
|
|
|
|
var line bytes.Buffer
|
|
|
|
|
2017-11-14 21:51:19 +01:00
|
|
|
if f.metadataFlags&MetadataTime != 0 {
|
2017-12-27 12:56:46 +01:00
|
|
|
fmt.Fprintf(&line, "%s ", e.Time.Format(HumanFormatterDateFormat))
|
2017-11-14 21:51:19 +01:00
|
|
|
}
|
|
|
|
if f.metadataFlags&MetadataLevel != 0 {
|
2017-09-24 02:05:41 +02:00
|
|
|
fmt.Fprintf(&line, "[%s]", e.Level.Short())
|
|
|
|
}
|
2017-09-23 18:20:22 +02:00
|
|
|
|
|
|
|
prefixFields := []string{logJobField, logTaskField, logFSField}
|
|
|
|
prefixed := make(map[string]bool, len(prefixFields)+2)
|
|
|
|
for _, field := range prefixFields {
|
|
|
|
val, ok := e.Fields[field].(string)
|
|
|
|
if ok {
|
2017-12-23 01:22:38 +01:00
|
|
|
if !f.ignored(field) {
|
|
|
|
fmt.Fprintf(&line, "[%s]", val)
|
|
|
|
prefixed[field] = true
|
|
|
|
}
|
2017-09-23 18:20:22 +02:00
|
|
|
} else {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// even more prefix fields
|
|
|
|
mapFrom, mapFromOk := e.Fields[logMapFromField].(string)
|
|
|
|
mapTo, mapToOk := e.Fields[logMapToField].(string)
|
2017-12-23 01:22:38 +01:00
|
|
|
if mapFromOk && mapToOk && !f.ignored(logMapFromField) && !f.ignored(logMapToField) {
|
2017-09-23 18:20:22 +02:00
|
|
|
fmt.Fprintf(&line, "[%s => %s]", mapFrom, mapTo)
|
|
|
|
prefixed[logMapFromField], prefixed[logMapToField] = true, true
|
|
|
|
}
|
|
|
|
incFrom, incFromOk := e.Fields[logIncFromField].(string)
|
|
|
|
incTo, incToOk := e.Fields[logIncToField].(string)
|
2017-12-23 01:22:38 +01:00
|
|
|
if incFromOk && incToOk && !f.ignored(logIncFromField) && !f.ignored(logMapToField) {
|
2017-09-23 18:20:22 +02:00
|
|
|
fmt.Fprintf(&line, "[%s => %s]", incFrom, incTo)
|
|
|
|
prefixed[logIncFromField], prefixed[logIncToField] = true, true
|
|
|
|
}
|
|
|
|
|
2017-09-24 02:05:41 +02:00
|
|
|
if line.Len() > 0 {
|
|
|
|
fmt.Fprint(&line, ": ")
|
|
|
|
}
|
|
|
|
fmt.Fprint(&line, e.Message)
|
2017-09-23 18:20:22 +02:00
|
|
|
|
2017-11-12 21:43:19 +01:00
|
|
|
if len(e.Fields)-len(prefixed) > 0 {
|
|
|
|
fmt.Fprint(&line, " ")
|
|
|
|
enc := logfmt.NewEncoder(&line)
|
|
|
|
for field, value := range e.Fields {
|
2017-12-23 01:22:38 +01:00
|
|
|
if prefixed[field] || f.ignored(field) {
|
2017-11-12 21:43:19 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err := logfmtTryEncodeKeyval(enc, field, value); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-09-23 18:20:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return line.Bytes(), nil
|
|
|
|
}
|
|
|
|
|
2017-11-14 21:51:19 +01:00
|
|
|
type JSONFormatter struct {
|
|
|
|
metadataFlags MetadataFlags
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *JSONFormatter) SetMetadataFlags(flags MetadataFlags) {
|
|
|
|
f.metadataFlags = flags
|
|
|
|
}
|
2017-09-23 18:20:22 +02:00
|
|
|
|
2017-09-24 02:05:41 +02:00
|
|
|
func (f *JSONFormatter) Format(e *logger.Entry) ([]byte, error) {
|
2017-09-23 18:20:22 +02:00
|
|
|
data := make(logger.Fields, len(e.Fields)+3)
|
|
|
|
for k, v := range e.Fields {
|
|
|
|
switch v := v.(type) {
|
|
|
|
case error:
|
|
|
|
// Otherwise errors are ignored by `encoding/json`
|
|
|
|
// https://github.com/sirupsen/logrus/issues/137
|
|
|
|
data[k] = v.Error()
|
|
|
|
default:
|
|
|
|
_, err := json.Marshal(v)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Errorf("field is not JSON encodable: %s", k)
|
|
|
|
}
|
|
|
|
data[k] = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
data[FieldMessage] = e.Message
|
|
|
|
data[FieldTime] = e.Time.Format(time.RFC3339)
|
|
|
|
data[FieldLevel] = e.Level
|
|
|
|
|
|
|
|
return json.Marshal(data)
|
|
|
|
|
|
|
|
}
|
2017-09-24 02:02:01 +02:00
|
|
|
|
|
|
|
type LogfmtFormatter struct {
|
2017-11-14 21:51:19 +01:00
|
|
|
metadataFlags MetadataFlags
|
2017-09-24 02:02:01 +02:00
|
|
|
}
|
|
|
|
|
2017-11-14 21:51:19 +01:00
|
|
|
func (f *LogfmtFormatter) SetMetadataFlags(flags MetadataFlags) {
|
|
|
|
f.metadataFlags = flags
|
2017-09-24 02:02:01 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func (f *LogfmtFormatter) Format(e *logger.Entry) ([]byte, error) {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
enc := logfmt.NewEncoder(&buf)
|
|
|
|
|
2017-11-14 21:51:19 +01:00
|
|
|
if f.metadataFlags&MetadataTime != 0 {
|
2017-09-24 02:02:01 +02:00
|
|
|
enc.EncodeKeyval(FieldTime, e.Time)
|
2017-11-14 21:51:19 +01:00
|
|
|
}
|
|
|
|
if f.metadataFlags&MetadataLevel != 0 {
|
2017-09-24 02:02:01 +02:00
|
|
|
enc.EncodeKeyval(FieldLevel, e.Level)
|
|
|
|
}
|
|
|
|
|
|
|
|
// at least try and put job and task in front
|
|
|
|
prefixed := make(map[string]bool, 2)
|
|
|
|
prefix := []string{logJobField, logTaskField}
|
|
|
|
for _, pf := range prefix {
|
|
|
|
v, ok := e.Fields[pf]
|
|
|
|
if !ok {
|
|
|
|
break
|
|
|
|
}
|
2017-11-12 21:43:19 +01:00
|
|
|
if err := logfmtTryEncodeKeyval(enc, pf, v); err != nil {
|
|
|
|
return nil, err // unlikely
|
|
|
|
}
|
2017-09-24 02:02:01 +02:00
|
|
|
prefixed[pf] = true
|
|
|
|
}
|
|
|
|
|
|
|
|
enc.EncodeKeyval(FieldMessage, e.Message)
|
|
|
|
|
|
|
|
for k, v := range e.Fields {
|
|
|
|
if !prefixed[k] {
|
2017-11-12 21:43:19 +01:00
|
|
|
if err := logfmtTryEncodeKeyval(enc, k, v); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-09-24 02:02:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-12 21:43:19 +01:00
|
|
|
return buf.Bytes(), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func logfmtTryEncodeKeyval(enc *logfmt.Encoder, field, value interface{}) error {
|
|
|
|
|
|
|
|
err := enc.EncodeKeyval(field, value)
|
|
|
|
switch err {
|
|
|
|
case nil: // ok
|
|
|
|
return nil
|
|
|
|
case logfmt.ErrUnsupportedValueType:
|
|
|
|
enc.EncodeKeyval(field, fmt.Sprintf("<%T>", value))
|
|
|
|
return nil
|
2017-09-24 02:02:01 +02:00
|
|
|
}
|
2017-11-12 21:43:19 +01:00
|
|
|
return errors.Wrapf(err, "cannot encode field '%s'", field)
|
2017-09-24 02:02:01 +02:00
|
|
|
|
|
|
|
}
|