2017-09-10 16:13:05 +02:00
|
|
|
package cmd
|
|
|
|
|
|
|
|
import (
|
2017-09-13 23:27:18 +02:00
|
|
|
"context"
|
2017-09-10 16:13:05 +02:00
|
|
|
mapstructure "github.com/mitchellh/mapstructure"
|
|
|
|
"github.com/pkg/errors"
|
2017-09-11 13:50:35 +02:00
|
|
|
"github.com/zrepl/zrepl/rpc"
|
2017-09-11 15:45:10 +02:00
|
|
|
"github.com/zrepl/zrepl/util"
|
2017-09-11 13:50:35 +02:00
|
|
|
"io"
|
2017-09-13 23:27:18 +02:00
|
|
|
"sync"
|
2017-09-11 15:45:10 +02:00
|
|
|
"time"
|
2017-09-10 16:13:05 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
type SourceJob struct {
|
|
|
|
Name string
|
|
|
|
Serve AuthenticatedChannelListenerFactory
|
|
|
|
Datasets *DatasetMapFilter
|
2017-09-13 23:27:18 +02:00
|
|
|
SnapshotPrefix string
|
2017-09-10 16:13:05 +02:00
|
|
|
Interval time.Duration
|
|
|
|
Prune PrunePolicy
|
2017-09-11 15:45:10 +02:00
|
|
|
Debug JobDebugSettings
|
2017-09-13 23:27:18 +02:00
|
|
|
|
|
|
|
snapCancel context.CancelFunc
|
|
|
|
serveCancel context.CancelFunc
|
2017-09-10 16:13:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func parseSourceJob(name string, i map[string]interface{}) (j *SourceJob, err error) {
|
|
|
|
|
|
|
|
var asMap struct {
|
|
|
|
Serve map[string]interface{}
|
|
|
|
Datasets map[string]string
|
|
|
|
SnapshotPrefix string `mapstructure:"snapshot_prefix"`
|
|
|
|
Interval string
|
|
|
|
Prune map[string]interface{}
|
2017-09-11 15:45:10 +02:00
|
|
|
Debug map[string]interface{}
|
2017-09-10 16:13:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if err = mapstructure.Decode(i, &asMap); err != nil {
|
|
|
|
err = errors.Wrap(err, "mapstructure error")
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
j = &SourceJob{Name: name}
|
|
|
|
|
|
|
|
if j.Serve, err = parseAuthenticatedChannelListenerFactory(asMap.Serve); err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if j.Datasets, err = parseDatasetMapFilter(asMap.Datasets, true); err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-09-13 23:27:18 +02:00
|
|
|
if j.SnapshotPrefix, err = parseSnapshotPrefix(asMap.SnapshotPrefix); err != nil {
|
2017-09-10 16:13:05 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if j.Interval, err = time.ParseDuration(asMap.Interval); err != nil {
|
|
|
|
err = errors.Wrap(err, "cannot parse 'interval'")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if j.Prune, err = parsePrunePolicy(asMap.Prune); err != nil {
|
2017-09-13 23:46:34 +02:00
|
|
|
err = errors.Wrap(err, "cannot parse 'prune'")
|
2017-09-10 16:13:05 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-09-11 15:45:10 +02:00
|
|
|
if err = mapstructure.Decode(asMap.Debug, &j.Debug); err != nil {
|
|
|
|
err = errors.Wrap(err, "cannot parse 'debug'")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-09-10 16:13:05 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func (j *SourceJob) JobName() string {
|
|
|
|
return j.Name
|
|
|
|
}
|
|
|
|
|
2017-09-13 23:27:18 +02:00
|
|
|
func (j *SourceJob) JobStart(ctx context.Context) {
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
|
|
|
log := ctx.Value(contextKeyLog).(Logger)
|
|
|
|
|
|
|
|
log.Printf("starting autosnap")
|
|
|
|
var snapContext context.Context
|
|
|
|
snapContext, j.snapCancel = context.WithCancel(ctx)
|
|
|
|
snapContext = context.WithValue(snapContext, contextKeyLog, util.NewPrefixLogger(log, "autosnap"))
|
|
|
|
a := IntervalAutosnap{DatasetFilter: j.Datasets, Prefix: j.SnapshotPrefix, SnapshotInterval: j.Interval}
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
a.Run(snapContext)
|
|
|
|
wg.Done()
|
|
|
|
}()
|
|
|
|
|
|
|
|
log.Printf("starting serve")
|
|
|
|
var serveContext context.Context
|
|
|
|
serveContext, j.serveCancel = context.WithCancel(ctx)
|
|
|
|
serveContext = context.WithValue(serveContext, contextKeyLog, util.NewPrefixLogger(log, "serve"))
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
j.serve(serveContext)
|
|
|
|
wg.Done()
|
|
|
|
}()
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (j *SourceJob) serve(ctx context.Context) {
|
|
|
|
|
|
|
|
log := ctx.Value(contextKeyLog).(Logger)
|
2017-09-10 16:13:05 +02:00
|
|
|
|
|
|
|
listener, err := j.Serve.Listen()
|
|
|
|
if err != nil {
|
2017-09-13 23:27:18 +02:00
|
|
|
log.Printf("error listening: %s", err)
|
|
|
|
return
|
2017-09-10 16:13:05 +02:00
|
|
|
}
|
|
|
|
|
2017-09-11 13:50:35 +02:00
|
|
|
rwcChan := make(chan io.ReadWriteCloser)
|
|
|
|
|
|
|
|
// Serve connections until interrupted or error
|
|
|
|
outer:
|
2017-09-10 16:13:05 +02:00
|
|
|
for {
|
|
|
|
|
2017-09-11 13:50:35 +02:00
|
|
|
go func() {
|
|
|
|
rwc, err := listener.Accept()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error accepting connection: %s", err)
|
|
|
|
close(rwcChan)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
rwcChan <- rwc
|
|
|
|
}()
|
2017-09-10 16:13:05 +02:00
|
|
|
|
2017-09-11 13:50:35 +02:00
|
|
|
select {
|
|
|
|
|
|
|
|
case rwc, notClosed := <-rwcChan:
|
|
|
|
|
|
|
|
if !notClosed {
|
|
|
|
break outer // closed because of accept error
|
|
|
|
}
|
|
|
|
|
2017-09-11 15:45:10 +02:00
|
|
|
rwc, err := util.NewReadWriteCloserLogger(rwc, j.Debug.Conn.ReadDump, j.Debug.Conn.WriteDump)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
2017-09-11 13:50:35 +02:00
|
|
|
// construct connection handler
|
2017-09-16 20:24:46 +02:00
|
|
|
handler := NewHandler(log, j.Datasets, &PrefixSnapshotFilter{j.SnapshotPrefix})
|
2017-09-11 13:50:35 +02:00
|
|
|
|
|
|
|
// handle connection
|
|
|
|
rpcServer := rpc.NewServer(rwc)
|
2017-09-11 15:45:10 +02:00
|
|
|
if j.Debug.RPC.Log {
|
|
|
|
rpclog := util.NewPrefixLogger(log, "rpc")
|
|
|
|
rpcServer.SetLogger(rpclog, true)
|
|
|
|
}
|
2017-09-11 13:50:35 +02:00
|
|
|
registerEndpoints(rpcServer, handler)
|
|
|
|
if err = rpcServer.Serve(); err != nil {
|
|
|
|
log.Printf("error serving connection: %s", err)
|
|
|
|
}
|
|
|
|
rwc.Close()
|
|
|
|
|
2017-09-13 23:27:18 +02:00
|
|
|
case <-ctx.Done():
|
|
|
|
log.Printf("context: %s", ctx.Err())
|
2017-09-11 13:50:35 +02:00
|
|
|
break outer
|
2017-09-10 16:13:05 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-09-11 13:50:35 +02:00
|
|
|
log.Printf("closing listener")
|
|
|
|
err = listener.Close()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("error closing listener: %s", err)
|
|
|
|
}
|
|
|
|
|
2017-09-13 23:27:18 +02:00
|
|
|
return
|
2017-09-11 13:50:35 +02:00
|
|
|
|
2017-09-10 16:13:05 +02:00
|
|
|
}
|