mirror of
https://github.com/zrepl/zrepl.git
synced 2024-12-23 15:38:49 +01:00
74719ad846
JSONDecoder was buffering more of connection data than just the JSON. => Unchunker didn't bother and just started unchunking. While chaining JSONDecoder.Buffered() and the connection using ChainedReader works, it's still not a clean architecture. => Every JSON message is now wrapped in a chunked stream (chunked and unchunked) => no special-cases => Keep ChainedReader, might be useful later on...
34 lines
574 B
Go
34 lines
574 B
Go
package rpc
|
|
|
|
import (
|
|
"bytes"
|
|
"github.com/stretchr/testify/assert"
|
|
"github.com/zrepl/zrepl/util"
|
|
"io"
|
|
"strings"
|
|
"testing"
|
|
)
|
|
|
|
func TestByteStreamRPCDecodeJSONError(t *testing.T) {
|
|
|
|
r := strings.NewReader("{'a':'aber'}")
|
|
|
|
var chunked bytes.Buffer
|
|
ch := util.NewChunker(r)
|
|
io.Copy(&chunked, &ch)
|
|
|
|
type SampleType struct {
|
|
A uint
|
|
}
|
|
var s SampleType
|
|
err := readChunkedJSON(&chunked, &s)
|
|
assert.NotNil(t, err)
|
|
|
|
_, ok := err.(ByteStreamRPCDecodeJSONError)
|
|
if !ok {
|
|
t.Errorf("expected ByteStreamRPCDecodeJSONError, got %t\n", err)
|
|
t.Errorf("%s\n", err)
|
|
}
|
|
|
|
}
|