cmd: configurable logrus formatters

We lost the nice context-stack [jobname][taskname][...] at the beginning
of each log line when switching to logrus.

Define some field names that define these contexts.
Write a human-friendly formatter that presents these field names like
the solution we had before logrus.

Write some other formatters for logfmt and json output along the way.

Limit ourselves to stdout logging for now.
This commit is contained in:
Christian Schwarz 2017-09-23 11:24:36 +02:00
parent 3ff9e6d2f7
commit 9465b593f9
10 changed files with 187 additions and 101 deletions

View File

@ -45,7 +45,7 @@ func (a *IntervalAutosnap) Run(ctx context.Context, didSnaps chan struct{}) {
a.log.Debug("examine filesystem state") a.log.Debug("examine filesystem state")
for i, d := range ds { for i, d := range ds {
l := a.log.WithField("filesystem", d.ToString()) l := a.log.WithField(logFSField, d.ToString())
fsvs, err := zfs.ZFSListFilesystemVersions(d, &PrefixSnapshotFilter{a.Prefix}) fsvs, err := zfs.ZFSListFilesystemVersions(d, &PrefixSnapshotFilter{a.Prefix})
if err != nil { if err != nil {
@ -129,7 +129,7 @@ func (a *IntervalAutosnap) doSnapshots(didSnaps chan struct{}) {
suffix := time.Now().In(time.UTC).Format("20060102_150405_000") suffix := time.Now().In(time.UTC).Format("20060102_150405_000")
snapname := fmt.Sprintf("%s%s", a.Prefix, suffix) snapname := fmt.Sprintf("%s%s", a.Prefix, suffix)
a.log.WithField("filesystem", d.ToString()). a.log.WithField(logFSField, d.ToString()).
WithField("snapname", snapname). WithField("snapname", snapname).
Info("create snapshot") Info("create snapshot")

View File

@ -90,7 +90,7 @@ func (j *LocalJob) JobStart(ctx context.Context) {
// All local datasets will be passed to its Map() function, // All local datasets will be passed to its Map() function,
// but only those for which a mapping exists will actually be pulled. // but only those for which a mapping exists will actually be pulled.
// We can pay this small performance penalty for now. // We can pay this small performance penalty for now.
handler := NewHandler(log.WithField("task", "handler"), localPullACL{}, &PrefixSnapshotFilter{j.SnapshotPrefix}) handler := NewHandler(log.WithField(logTaskField, "handler"), localPullACL{}, &PrefixSnapshotFilter{j.SnapshotPrefix})
registerEndpoints(local, handler) registerEndpoints(local, handler)
@ -112,7 +112,7 @@ func (j *LocalJob) JobStart(ctx context.Context) {
} }
makeCtx := func(parent context.Context, taskName string) (ctx context.Context) { makeCtx := func(parent context.Context, taskName string) (ctx context.Context) {
return context.WithValue(parent, contextKeyLog, log.WithField("task", taskName)) return context.WithValue(parent, contextKeyLog, log.WithField(logTaskField, taskName))
} }
var snapCtx, plCtx, prCtx, pullCtx context.Context var snapCtx, plCtx, prCtx, pullCtx context.Context
snapCtx = makeCtx(ctx, "autosnap") snapCtx = makeCtx(ctx, "autosnap")

View File

@ -119,7 +119,7 @@ start:
log.Printf("starting pull") log.Printf("starting pull")
pullLog := log.WithField("task", "pull") pullLog := log.WithField(logTaskField, "pull")
err = doPull(PullContext{client, pullLog, j.Mapping, j.InitialReplPolicy}) err = doPull(PullContext{client, pullLog, j.Mapping, j.InitialReplPolicy})
if err != nil { if err != nil {
log.Printf("error doing pull: %s", err) log.Printf("error doing pull: %s", err)
@ -128,7 +128,7 @@ start:
closeRPCWithTimeout(log, client, time.Second*10, "") closeRPCWithTimeout(log, client, time.Second*10, "")
log.Printf("starting prune") log.Printf("starting prune")
prunectx := context.WithValue(ctx, contextKeyLog, log.WithField("task", "prune")) prunectx := context.WithValue(ctx, contextKeyLog, log.WithField(logTaskField, "prune"))
pruner, err := j.Pruner(PrunePolicySideDefault, false) pruner, err := j.Pruner(PrunePolicySideDefault, false)
if err != nil { if err != nil {
log.Printf("error creating pruner: %s", err) log.Printf("error creating pruner: %s", err)

View File

@ -84,9 +84,9 @@ func (j *SourceJob) JobStart(ctx context.Context) {
return return
} }
snapContext := context.WithValue(ctx, contextKeyLog, log.WithField("task", "autosnap")) snapContext := context.WithValue(ctx, contextKeyLog, log.WithField(logTaskField, "autosnap"))
prunerContext := context.WithValue(ctx, contextKeyLog, log.WithField("task", "prune")) prunerContext := context.WithValue(ctx, contextKeyLog, log.WithField(logTaskField, "prune"))
serveContext := context.WithValue(ctx, contextKeyLog, log.WithField("task", "serve")) serveContext := context.WithValue(ctx, contextKeyLog, log.WithField(logTaskField, "serve"))
didSnaps := make(chan struct{}) didSnaps := make(chan struct{})
go j.serve(serveContext) go j.serve(serveContext)

View File

@ -3,75 +3,101 @@ package cmd
import ( import (
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/pkg/errors" "github.com/pkg/errors"
//"github.com/rifflock/lfshook"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"os"
) )
type LoggingConfig struct { type LoggingConfig struct {
Stdout struct { Stdout struct {
Level logrus.Level Level logrus.Level
Format LogFormat
} }
//LFS lfshook.PathMap
} }
func parseLogging(i interface{}) (c *LoggingConfig, err error) { func parseLogging(i interface{}) (c *LoggingConfig, err error) {
c = &LoggingConfig{} c = &LoggingConfig{}
c.Stdout.Level = logrus.WarnLevel c.Stdout.Level = logrus.WarnLevel
c.Stdout.Format = LogFormatHuman
if i == nil { if i == nil {
return c, nil return c, nil
} }
var asMap struct { var asMap struct {
Mate string Stdout struct {
Stdout map[string]string Level string
LFS map[string]string Format string
}
} }
if err = mapstructure.Decode(i, &asMap); err != nil { if err = mapstructure.Decode(i, &asMap); err != nil {
return nil, errors.Wrap(err, "mapstructure error") return nil, errors.Wrap(err, "mapstructure error")
} }
//if asMap.LFS != nil { if asMap.Stdout.Level != "" {
// c.LFS = make(map[logrus.Level]string, len(asMap.LFS)) lvl, err := logrus.ParseLevel(asMap.Stdout.Level)
// for level_str, path := range asMap.LFS {
// level, err := logrus.ParseLevel(level_str)
// if err != nil {
// return nil, errors.Wrapf(err, "cannot parse level '%s'", level_str)
// }
// if len(path) <= 0 {
// return nil, errors.Errorf("path must be longer than 0")
// }
// c.LFS[level] = path
// }
//}
if asMap.Stdout != nil {
lvl, err := logrus.ParseLevel(asMap.Stdout["level"])
if err != nil { if err != nil {
return nil, errors.Wrap(err, "cannot parse stdout log level") return nil, errors.Wrap(err, "cannot parse stdout log level")
} }
c.Stdout.Level = lvl c.Stdout.Level = lvl
} }
if asMap.Stdout.Format != "" {
format, err := parseLogFormat(asMap.Stdout.Format)
if err != nil {
return nil, errors.Wrap(err, "cannot parse log format")
}
c.Stdout.Format = format
}
return c, nil return c, nil
} }
type LogFormat string
const (
LogFormatHuman LogFormat = "human"
LogFormatLogfmt LogFormat = "logfmt"
LogFormatJSON LogFormat = "json"
)
func (f LogFormat) Formatter() logrus.Formatter {
switch f {
case LogFormatHuman:
return HumanFormatter{}
case LogFormatLogfmt:
return &logrus.TextFormatter{}
case LogFormatJSON:
return &logrus.JSONFormatter{}
default:
panic("incomplete implementation")
}
}
var LogFormats []LogFormat = []LogFormat{LogFormatHuman, LogFormatLogfmt, LogFormatJSON}
func parseLogFormat(i interface{}) (f LogFormat, err error) {
var is string
switch j := i.(type) {
case string:
is = j
default:
return "", errors.Errorf("invalid log format: wrong type: %T", i)
}
for _, f := range LogFormats {
if string(f) == is {
return f, nil
}
}
return "", errors.Errorf("invalid log format: '%s'", is)
}
func (c *LoggingConfig) MakeLogrus() (l logrus.FieldLogger) { func (c *LoggingConfig) MakeLogrus() (l logrus.FieldLogger) {
log := logrus.New() log := logrus.New()
log.Out = nopWriter(0) log.Out = os.Stdout
log.Level = logrus.DebugLevel log.Level = c.Stdout.Level
log.Formatter = c.Stdout.Format.Formatter()
//log.Level = logrus.DebugLevel
//
//if len(c.LFS) > 0 {
// lfshook := lfshook.NewHook(c.LFS)
// log.Hooks.Add(lfshook)
//}
stdhook := NewStdHook()
log.Hooks.Add(stdhook)
return log return log

View File

@ -73,7 +73,7 @@ func (d *Daemon) Loop(ctx context.Context) {
for _, job := range d.conf.Jobs { for _, job := range d.conf.Jobs {
log.Printf("starting job %s", job.JobName()) log.Printf("starting job %s", job.JobName())
logger := log.WithField("job", job.JobName()) logger := log.WithField(logJobField, job.JobName())
i++ i++
jobCtx := context.WithValue(ctx, contextKeyLog, logger) jobCtx := context.WithValue(ctx, contextKeyLog, logger)
go func(j Job) { go func(j Job) {

View File

@ -2,52 +2,123 @@ package cmd
import ( import (
"bytes" "bytes"
"encoding/json"
"fmt" "fmt"
"github.com/pkg/errors"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"os" "strings"
"time"
) )
type CLIFormatter struct { const (
logJobField string = "job"
logTaskField string = "task"
logFSField string = "filesystem"
logMapFromField string = "map_from"
logMapToField string = "map_to"
logIncFromField string = "inc_from"
logIncToField string = "inc_to"
)
type NoFormatter struct{}
func (f NoFormatter) Format(e *logrus.Entry) ([]byte, error) {
return []byte(e.Message), nil
} }
func (f CLIFormatter) Format(e *logrus.Entry) (out []byte, err error) { type HumanFormatter struct{}
var buf bytes.Buffer
fmt.Fprintf(&buf, "%s\n", e.Message) func (f HumanFormatter) shortLevel(l logrus.Level) string {
return buf.Bytes(), nil switch l {
case logrus.DebugLevel:
return "DBG"
case logrus.InfoLevel:
return "INF"
case logrus.WarnLevel:
return "WRN"
case logrus.ErrorLevel:
return "ERR"
case logrus.PanicLevel:
return "PNC"
}
panic("incomplete implementation")
} }
var stdhookStderrLevels []logrus.Level = []logrus.Level{ func (f HumanFormatter) Format(e *logrus.Entry) (out []byte, err error) {
logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel, logrus.WarnLevel,
var line bytes.Buffer
fmt.Fprintf(&line, "[%s]", f.shortLevel(e.Level))
prefixFields := []string{logJobField, logTaskField, logFSField}
prefixed := make(map[string]bool, len(prefixFields)+2)
for _, field := range prefixFields {
val, ok := e.Data[field].(string)
if ok {
fmt.Fprintf(&line, "[%s]", val)
prefixed[field] = true
} else {
break
}
}
// even more prefix fields
mapFrom, mapFromOk := e.Data[logMapFromField].(string)
mapTo, mapToOk := e.Data[logMapToField].(string)
if mapFromOk && mapToOk {
fmt.Fprintf(&line, "[%s => %s]", mapFrom, mapTo)
prefixed[logMapFromField], prefixed[logMapToField] = true, true
}
incFrom, incFromOk := e.Data[logIncFromField].(string)
incTo, incToOk := e.Data[logIncToField].(string)
if incFromOk && incToOk {
fmt.Fprintf(&line, "[%s => %s]", incFrom, incTo)
prefixed[logIncFromField], prefixed[logIncToField] = true, true
} }
type Stdhook struct { fmt.Fprintf(&line, ": %s", e.Message)
for field, value := range e.Data {
if prefixed[field] {
continue
} }
func NewStdHook() *Stdhook { if strings.ContainsAny(field, " \t") {
return &Stdhook{} return nil, errors.Errorf("field must not contain whitespace: '%s'", field)
}
fmt.Fprintf(&line, " %s=\"%s\"", field, value)
} }
func (h *Stdhook) Levels() []logrus.Level { fmt.Fprintf(&line, "\n")
// Accept all so we can filter the output later
return []logrus.Level{ return line.Bytes(), nil
logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel, logrus.WarnLevel,
logrus.InfoLevel, logrus.DebugLevel,
}
} }
func (h *Stdhook) Fire(entry *logrus.Entry) error { type JSONFormatter struct{}
s, err := entry.String()
func (f JSONFormatter) Format(e *logrus.Entry) ([]byte, error) {
data := make(logrus.Fields, len(e.Data)+3)
for k, v := range e.Data {
switch v := v.(type) {
case error:
// Otherwise errors are ignored by `encoding/json`
// https://github.com/sirupsen/logrus/issues/137
data[k] = v.Error()
default:
_, err := json.Marshal(v)
if err != nil { if err != nil {
return err return nil, errors.Errorf("field is not JSON encodable: %s", k)
} }
for _, l := range stdhookStderrLevels { data[k] = v
if l == entry.Level {
fmt.Fprint(os.Stderr, s)
return nil
} }
} }
fmt.Fprint(os.Stdout, s)
return nil data["msg"] = e.Message
data["time"] = e.Time.Format(time.RFC3339)
data["level"] = e.Level
return json.Marshal(data)
} }
type nopWriter int type nopWriter int

View File

@ -2,7 +2,6 @@ package cmd
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"github.com/zrepl/zrepl/zfs" "github.com/zrepl/zrepl/zfs"
"time" "time"
@ -45,7 +44,7 @@ func (p *Pruner) Run(ctx context.Context) (r []PruneResult, err error) {
for _, fs := range filesystems { for _, fs := range filesystems {
log := log.WithField("filesystem", fs.ToString()) log := log.WithField(logFSField, fs.ToString())
fsversions, err := zfs.ZFSListFilesystemVersions(fs, &PrefixSnapshotFilter{p.SnapshotPrefix}) fsversions, err := zfs.ZFSListFilesystemVersions(fs, &PrefixSnapshotFilter{p.SnapshotPrefix})
if err != nil { if err != nil {
@ -57,26 +56,16 @@ func (p *Pruner) Run(ctx context.Context) (r []PruneResult, err error) {
continue continue
} }
dbgj, err := json.Marshal(fsversions)
if err != nil {
panic(err)
}
log.WithField("fsversions", string(dbgj)).Debug()
keep, remove, err := p.PrunePolicy.Prune(fs, fsversions) keep, remove, err := p.PrunePolicy.Prune(fs, fsversions)
if err != nil { if err != nil {
log.WithError(err).Error("error evaluating prune policy") log.WithError(err).Error("error evaluating prune policy")
continue continue
} }
dbgj, err = json.Marshal(keep) log.WithField("fsversions", fsversions).
if err != nil { WithField("keep", keep).
panic(err) WithField("remove", remove).
} Debug("prune policy debug dump")
log.WithField("keep", string(dbgj)).Debug()
dbgj, err = json.Marshal(remove)
log.WithField("remove", string(dbgj)).Debug()
r = append(r, PruneResult{fs, fsversions, keep, remove}) r = append(r, PruneResult{fs, fsversions, keep, remove})

View File

@ -90,8 +90,8 @@ func doPull(pull PullContext) (err error) {
if localFs == nil { if localFs == nil {
continue continue
} }
log.WithField("map_remote", remoteFilesystems[fs].ToString()). log.WithField(logMapFromField, remoteFilesystems[fs].ToString()).
WithField("map_local", localFs.ToString()).Debug() WithField(logMapToField, localFs.ToString()).Debug()
m := RemoteLocalMapping{remoteFilesystems[fs], localFs} m := RemoteLocalMapping{remoteFilesystems[fs], localFs}
replMapping[m.Local.ToString()] = m replMapping[m.Local.ToString()] = m
localTraversal.Add(m.Local) localTraversal.Add(m.Local)
@ -107,7 +107,7 @@ func doPull(pull PullContext) (err error) {
log.Info("start per-filesystem sync") log.Info("start per-filesystem sync")
localTraversal.WalkTopDown(func(v zfs.DatasetPathVisit) bool { localTraversal.WalkTopDown(func(v zfs.DatasetPathVisit) bool {
log := log.WithField("filesystem", v.Path.ToString()) log := log.WithField(logFSField, v.Path.ToString())
if v.FilledIn { if v.FilledIn {
if _, exists := localFilesystemState[v.Path.ToString()]; exists { if _, exists := localFilesystemState[v.Path.ToString()]; exists {
@ -129,8 +129,8 @@ func doPull(pull PullContext) (err error) {
panic("internal inconsistency: replMapping should contain mapping for any path that was not filled in by WalkTopDown()") panic("internal inconsistency: replMapping should contain mapping for any path that was not filled in by WalkTopDown()")
} }
log = log.WithField("map_remote", m.Remote.ToString()). log = log.WithField(logMapToField, m.Remote.ToString()).
WithField("map_local", m.Local.ToString()) WithField(logMapFromField, m.Local.ToString())
log.Debug("examing local filesystem state") log.Debug("examing local filesystem state")
localState, localExists := localFilesystemState[m.Local.ToString()] localState, localExists := localFilesystemState[m.Local.ToString()]
@ -245,7 +245,7 @@ func doPull(pull PullContext) (err error) {
from, to := diff.IncrementalPath[i], diff.IncrementalPath[i+1] from, to := diff.IncrementalPath[i], diff.IncrementalPath[i+1]
log, _ := log.WithField("inc_from", from.Name).WithField("inc_to", to.Name), 0 log, _ := log.WithField(logIncFromField, from.Name).WithField(logIncToField, to.Name), 0
log.Debug("requesting incremental snapshot stream") log.Debug("requesting incremental snapshot stream")
r := IncrementalTransferRequest{ r := IncrementalTransferRequest{

View File

@ -65,7 +65,7 @@ func init() {
func testCmdGlobalInit(cmd *cobra.Command, args []string) { func testCmdGlobalInit(cmd *cobra.Command, args []string) {
log := logrus.New() log := logrus.New()
log.Formatter = CLIFormatter{} log.Formatter = NoFormatter{}
testCmdGlobal.log = log testCmdGlobal.log = log
var err error var err error