MF-1421 - Make flattening of JSON transformer only available on InfluxDB (#1432)

* MF-1421 - Add a flag for making flattening JSON transformer optional

Signed-off-by: Burak Sekili <buraksekili@gmail.com>

* Add test cases for JSON transformer without flattening

Signed-off-by: Burak Sekili <buraksekili@gmail.com>

* Add a comment for Transform

Signed-off-by: Burak Sekili <buraksekili@gmail.com>

* Separate TestTransformJSON into two tests

Signed-off-by: Burak Sekili <buraksekili@gmail.com>

* Replace flatten flag

Signed-off-by: Burak Sekili <buraksekili@gmail.com>

* Remove unnecessary flattening while reading a message

Signed-off-by: Burak Sekili <buraksekili@gmail.com>
This commit is contained in:
Burak Sekili 2021-07-22 12:20:47 +03:00 committed by GitHub
parent 19f0437f57
commit 5ac1203b55
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 102 additions and 45 deletions

View File

@ -83,6 +83,12 @@ func (repo *influxRepo) jsonPoints(pts influxdata.BatchPoints, msgs json.Message
for i, m := range msgs.Data {
t := time.Unix(0, m.Created+int64(i))
flat, err := json.Flatten(m.Payload)
if err != nil {
return nil, errors.Wrap(json.ErrTransform, err)
}
m.Payload = flat
// Copy first-level fields so that the original Payload is unchanged.
fields := make(map[string]interface{})
for k, v := range m.Payload {

View File

@ -9,6 +9,8 @@ import (
"testing"
"time"
"github.com/mainflux/mainflux/pkg/errors"
"github.com/gofrs/uuid"
influxdata "github.com/influxdata/influxdb/client/v2"
writer "github.com/mainflux/mainflux/consumers/writers/influxdb"
@ -159,18 +161,73 @@ func TestSaveJSON(t *testing.T) {
},
}
invalidKeySepMsg := msg
invalidKeySepMsg.Payload = map[string]interface{}{
"field_1": 123,
"field_2": "value",
"field_3": false,
"field_4": 12.344,
"field_5": map[string]interface{}{
"field_1": "value",
"field_2": 42,
},
"field_6/field_7": "value",
}
invalidKeyNameMsg := msg
invalidKeyNameMsg.Payload = map[string]interface{}{
"field_1": 123,
"field_2": "value",
"field_3": false,
"field_4": 12.344,
"field_5": map[string]interface{}{
"field_1": "value",
"field_2": 42,
},
"publisher": "value",
}
now := time.Now().Unix()
msgs := json.Messages{
Format: "some_json",
}
invalidKeySepMsgs := json.Messages{
Format: "some_json",
}
invalidKeyNameMsgs := json.Messages{
Format: "some_json",
}
for i := 0; i < streamsSize; i++ {
msg.Created = now + int64(i)
msgs.Data = append(msgs.Data, msg)
invalidKeySepMsgs.Data = append(invalidKeySepMsgs.Data, invalidKeySepMsg)
invalidKeyNameMsgs.Data = append(invalidKeyNameMsgs.Data, invalidKeyNameMsg)
}
err = repo.Consume(msgs)
assert.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err))
cases := []struct {
desc string
msgs json.Messages
err error
}{
{
desc: "consume valid json messages",
msgs: msgs,
err: nil,
},
{
desc: "consume invalid json messages containing invalid key separator",
msgs: invalidKeySepMsgs,
err: json.ErrInvalidKey,
},
{
desc: "consume invalid json messages containing invalid key name",
msgs: invalidKeySepMsgs,
err: json.ErrInvalidKey,
},
}
for _, tc := range cases {
err = repo.Consume(tc.msgs)
assert.True(t, errors.Contains(err, tc.err), fmt.Sprintf("%s expected %s, got %s", tc.desc, tc.err, err))
row, err := queryDB(selectMsgs)
assert.Nil(t, err, fmt.Sprintf("Querying InfluxDB to retrieve data expected to succeed: %s.\n", err))
@ -178,3 +235,4 @@ func TestSaveJSON(t *testing.T) {
count := len(row)
assert.Equal(t, streamsSize, count, fmt.Sprintf("Expected to have %d messages saved, found %d instead.\n", streamsSize, count))
}
}

View File

@ -17,9 +17,9 @@ const sep = "/"
var keys = [...]string{"publisher", "protocol", "channel", "subtopic"}
var (
// ErrTransform reprents an error during parsing message.
// ErrTransform represents an error during parsing message.
ErrTransform = errors.New("unable to parse JSON object")
errInvalidKey = errors.New("invalid object key")
ErrInvalidKey = errors.New("invalid object key")
errUnknownFormat = errors.New("unknown format of JSON message")
errInvalidFormat = errors.New("invalid JSON object")
errInvalidNestedJSON = errors.New("invalid nested JSON object")
@ -32,6 +32,7 @@ func New() transformers.Transformer {
return funcTransformer(transformer)
}
// Transform transforms Mainflux message to a list of JSON messages.
func (fh funcTransformer) Transform(msg messaging.Message) (interface{}, error) {
return fh(msg)
}
@ -58,11 +59,7 @@ func transformer(msg messaging.Message) (interface{}, error) {
}
switch p := payload.(type) {
case map[string]interface{}:
flat, err := Flatten(p)
if err != nil {
return nil, errors.Wrap(ErrTransform, err)
}
ret.Payload = flat
ret.Payload = p
return Messages{[]Message{ret}, format}, nil
case []interface{}:
res := []Message{}
@ -72,12 +69,8 @@ func transformer(msg messaging.Message) (interface{}, error) {
if !ok {
return nil, errors.Wrap(ErrTransform, errInvalidNestedJSON)
}
flat, err := Flatten(v)
if err != nil {
return nil, errors.Wrap(ErrTransform, err)
}
newMsg := ret
newMsg.Payload = flat
newMsg.Payload = v
res = append(res, newMsg)
}
return Messages{res, format}, nil
@ -86,7 +79,7 @@ func transformer(msg messaging.Message) (interface{}, error) {
}
}
// ParseFlat receives flat map that reprents complex JSON objects and returns
// ParseFlat receives flat map that represents complex JSON objects and returns
// the corresponding complex JSON object with nested maps. It's the opposite
// of the Flatten function.
func ParseFlat(flat interface{}) interface{} {
@ -97,14 +90,14 @@ func ParseFlat(flat interface{}) interface{} {
if value == nil {
continue
}
keys := strings.Split(key, sep)
n := len(keys)
subKeys := strings.Split(key, sep)
n := len(subKeys)
if n == 1 {
msg[key] = value
continue
}
current := msg
for i, k := range keys {
for i, k := range subKeys {
if _, ok := current[k]; !ok {
current[k] = make(map[string]interface{})
}
@ -127,11 +120,11 @@ func Flatten(m map[string]interface{}) (map[string]interface{}, error) {
func flatten(prefix string, m, m1 map[string]interface{}) (map[string]interface{}, error) {
for k, v := range m1 {
if strings.Contains(k, sep) {
return nil, errInvalidKey
return nil, ErrInvalidKey
}
for _, key := range keys {
if k == key {
return nil, errInvalidKey
return nil, ErrInvalidKey
}
}
switch val := v.(type) {

View File

@ -17,7 +17,7 @@ import (
const (
validPayload = `{"key1": "val1", "key2": 123, "key3": "val3", "key4": {"key5": "val5"}}`
listPayload = `[{"key1": "val1", "key2": 123, "keylist3": "val3", "key4": {"key5": "val5"}}, {"key1": "val1", "key2": 123, "key3": "val3", "key4": {"key5": "val5"}}]`
invalidPayload = `{"key1": "val1", "key2": 123, "key3/1": "val3", "key4": {"key5": "val5"}}`
invalidPayload = `{"key1": }`
)
func TestTransformJSON(t *testing.T) {
@ -37,7 +37,7 @@ func TestTransformJSON(t *testing.T) {
listMsg := msg
listMsg.Payload = []byte(listPayload)
jsonMsg := json.Messages{
jsonMsgs := json.Messages{
Data: []json.Message{
{
Channel: msg.Channel,
@ -49,7 +49,9 @@ func TestTransformJSON(t *testing.T) {
"key1": "val1",
"key2": float64(123),
"key3": "val3",
"key4/key5": "val5",
"key4": map[string]interface{}{
"key5": "val5",
},
},
},
},
@ -71,7 +73,9 @@ func TestTransformJSON(t *testing.T) {
"key1": "val1",
"key2": float64(123),
"keylist3": "val3",
"key4/key5": "val5",
"key4": map[string]interface{}{
"key5": "val5",
},
},
},
{
@ -84,7 +88,9 @@ func TestTransformJSON(t *testing.T) {
"key1": "val1",
"key2": float64(123),
"key3": "val3",
"key4/key5": "val5",
"key4": map[string]interface{}{
"key5": "val5",
},
},
},
},
@ -100,7 +106,7 @@ func TestTransformJSON(t *testing.T) {
{
desc: "test transform JSON",
msg: msg,
json: jsonMsg,
json: jsonMsgs,
err: nil,
},
{

View File

@ -9,7 +9,6 @@ import (
"github.com/gocql/gocql"
"github.com/mainflux/mainflux/pkg/errors"
jsont "github.com/mainflux/mainflux/pkg/transformers/json"
"github.com/mainflux/mainflux/pkg/transformers/senml"
"github.com/mainflux/mainflux/readers"
)
@ -107,7 +106,6 @@ func (cr cassandraRepository) ReadAll(chanID string, rpm readers.PageMetadata) (
if err != nil {
return readers.MessagesPage{}, errors.Wrap(errReadMessages, err)
}
m["payload"] = jsont.ParseFlat(m["payload"])
page.Messages = append(page.Messages, m)
}
}

View File

@ -8,7 +8,6 @@ import (
"encoding/json"
"github.com/mainflux/mainflux/pkg/errors"
jsont "github.com/mainflux/mainflux/pkg/transformers/json"
"github.com/mainflux/mainflux/pkg/transformers/senml"
"github.com/mainflux/mainflux/readers"
"go.mongodb.org/mongo-driver/bson"
@ -75,7 +74,6 @@ func (repo mongoRepository) ReadAll(chanID string, rpm readers.PageMetadata) (re
if err := cursor.Decode(&m); err != nil {
return readers.MessagesPage{}, errors.Wrap(errReadMessages, err)
}
m["payload"] = jsont.ParseFlat(m["payload"])
messages = append(messages, m)
}

View File

@ -10,7 +10,6 @@ import (
"github.com/jmoiron/sqlx" // required for DB access
"github.com/lib/pq"
"github.com/mainflux/mainflux/pkg/errors"
jsont "github.com/mainflux/mainflux/pkg/transformers/json"
"github.com/mainflux/mainflux/pkg/transformers/senml"
"github.com/mainflux/mainflux/readers"
)
@ -105,7 +104,6 @@ func (tr postgresRepository) ReadAll(chanID string, rpm readers.PageMetadata) (r
if err != nil {
return readers.MessagesPage{}, errors.Wrap(errReadMessages, err)
}
m["payload"] = jsont.ParseFlat(m["payload"])
page.Messages = append(page.Messages, m)
}