Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Serix struct tag changes #603

Merged
merged 17 commits into from
Nov 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ds/set_impl.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ func (s *set[ElementType]) apply(mutations SetMutations[ElementType]) (appliedMu
//
//nolint:tagliatelle // heck knows why this linter fails here
type readableSet[T comparable] struct {
*orderedmap.OrderedMap[T, types.Empty] `serix:"0"`
*orderedmap.OrderedMap[T, types.Empty] `serix:""`
}

// newReadableSet creates a new readable set with the given elements.
Expand Down
4 changes: 4 additions & 0 deletions serializer/error.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ var (
ErrDeserializationInvalidBoolValue = ierrors.New("invalid bool value")
// ErrDeserializationLengthInvalid gets returned if a length denotation exceeds a specified limit.
ErrDeserializationLengthInvalid = ierrors.New("length denotation invalid")
// ErrDeserializationLengthMinNotReached gets returned if a length denotation is less than a specified limit.
ErrDeserializationLengthMinNotReached = ierrors.Wrap(ErrDeserializationLengthInvalid, "min length not reached")
// ErrDeserializationLengthMaxExceeded gets returned if a length denotation is more than a specified limit.
ErrDeserializationLengthMaxExceeded = ierrors.Wrap(ErrDeserializationLengthInvalid, "max length exceeded")
// ErrDeserializationNotAllConsumed gets returned if not all bytes were consumed during deserialization of a given type.
ErrDeserializationNotAllConsumed = ierrors.New("not all data has been consumed but should have been")
// ErrUint256NumNegative gets returned if a supposed uint256 has a sign bit.
Expand Down
10 changes: 5 additions & 5 deletions serializer/serializer.go
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ func (s *Serializer) WriteVariableByteSlice(data []byte, lenType SeriLengthPrefi
return s

case minLen > 0 && sliceLen < minLen:
s.err = errProducer(ierrors.Wrapf(ErrSliceLengthTooShort, "slice (len %d) is less than min length of %d ", sliceLen, maxLen))
s.err = errProducer(ierrors.Wrapf(ErrSliceLengthTooShort, "slice (len %d) is less than min length of %d ", sliceLen, minLen))

return s
}
Expand Down Expand Up @@ -776,9 +776,9 @@ func (d *Deserializer) ReadVariableByteSlice(slice *[]byte, lenType SeriLengthPr

switch {
case maxLen > 0 && sliceLength > maxLen:
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthInvalid, "denoted %d bytes, max allowed %d ", sliceLength, maxLen))
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthMaxExceeded, "denoted %d bytes, max allowed %d ", sliceLength, maxLen))
case minLen > 0 && sliceLength < minLen:
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthInvalid, "denoted %d bytes, min required %d ", sliceLength, minLen))
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthMinNotReached, "denoted %d bytes, min required %d ", sliceLength, minLen))
}

dest := make([]byte, sliceLength)
Expand Down Expand Up @@ -1158,9 +1158,9 @@ func (d *Deserializer) ReadString(s *string, lenType SeriLengthPrefixType, errPr

switch {
case maxLen > 0 && strLen > maxLen:
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthInvalid, "string defined to be of %d bytes length but max %d is allowed", strLen, maxLen))
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthMaxExceeded, "string defined to be of %d bytes length but max %d is allowed", strLen, maxLen))
case minLen > 0 && strLen < minLen:
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthInvalid, "string defined to be of %d bytes length but min %d is required", strLen, minLen))
d.err = errProducer(ierrors.Wrapf(ErrDeserializationLengthMinNotReached, "string defined to be of %d bytes length but min %d is required", strLen, minLen))
}

if len(d.src[d.offset:]) < strLen {
Expand Down
80 changes: 62 additions & 18 deletions serializer/serix/decode.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,15 @@ func (api *API) decode(ctx context.Context, b []byte, value reflect.Value, ts Ty
return 0, ierrors.WithStack(err)
}
}

if opts.validation {
if err := api.callSyntacticValidator(ctx, value, valueType); err != nil {
return 0, ierrors.Wrap(err, "post-deserialization validation failed")
}

if err := api.checkMaxByteSize(bytesRead, ts); err != nil {
return bytesRead, err
}
}

return bytesRead, nil
Expand Down Expand Up @@ -168,7 +173,16 @@ func (api *API) decodeBasedOnType(ctx context.Context, b []byte, value reflect.V
addrValue.Interface().(*string),
serializer.SeriLengthPrefixType(lengthPrefixType),
func(err error) error {
return ierrors.Wrap(err, "failed to read string value from the deserializer")
err = ierrors.Wrap(err, "failed to read string value from the deserializer")

switch {
case ierrors.Is(err, serializer.ErrDeserializationLengthMinNotReached):
return ierrors.Join(err, serializer.ErrArrayValidationMinElementsNotReached)
case ierrors.Is(err, serializer.ErrDeserializationLengthMaxExceeded):
return ierrors.Join(err, serializer.ErrArrayValidationMaxElementsExceeded)
default:
return err
}
}, minLen, maxLen)

return deseri.Done()
Expand Down Expand Up @@ -272,7 +286,7 @@ func (api *API) decodeStructFields(

for _, sField := range structFields {
fieldValue := value.Field(sField.index)
if sField.isEmbedded && !sField.settings.nest {
if sField.isEmbedded && !sField.settings.inlined {
fieldType := sField.fType
if fieldType.Kind() == reflect.Ptr {
if fieldValue.IsNil() {
Expand Down Expand Up @@ -373,7 +387,16 @@ func (api *API) decodeSlice(ctx context.Context, b []byte, value reflect.Value,
addrValue.Interface().(*[]byte),
serializer.SeriLengthPrefixType(lengthPrefixType),
func(err error) error {
return ierrors.Wrap(err, "failed to read bytes from the deserializer")
err = ierrors.Wrap(err, "failed to read bytes from the deserializer")

switch {
case ierrors.Is(err, serializer.ErrDeserializationLengthMinNotReached):
return ierrors.Join(err, serializer.ErrArrayValidationMinElementsNotReached)
case ierrors.Is(err, serializer.ErrDeserializationLengthMaxExceeded):
return ierrors.Join(err, serializer.ErrArrayValidationMaxElementsExceeded)
default:
return err
}
}, minLen, maxLen)

return deseri.Done()
Expand Down Expand Up @@ -403,25 +426,60 @@ func (api *API) decodeSlice(ctx context.Context, b []byte, value reflect.Value,
return bytesRead, nil
}

func (api *API) decodeMapKVPair(ctx context.Context, b []byte, key, val reflect.Value, opts *options) (int, error) {
keyTypeSettings := api.getTypeSettingsByValue(key)
valueTypeSettings := api.getTypeSettingsByValue(val)

keyBytesRead, err := api.decode(ctx, b, key, keyTypeSettings, opts)
if err != nil {
return 0, ierrors.Wrapf(err, "failed to decode map key of type %s", key.Type())
}
b = b[keyBytesRead:]
elemBytesRead, err := api.decode(ctx, b, val, valueTypeSettings, opts)
if err != nil {
return 0, ierrors.Wrapf(err, "failed to decode map element of type %s", val.Type())
}

return keyBytesRead + elemBytesRead, nil
}

func (api *API) decodeMap(ctx context.Context, b []byte, value reflect.Value,
valueType reflect.Type, ts TypeSettings, opts *options) (int, error) {
if value.IsNil() {
value.Set(reflect.MakeMap(valueType))
}

deserializeItem := func(b []byte) (bytesRead int, err error) {
keyValue := reflect.New(valueType.Key()).Elem()
elemValue := reflect.New(valueType.Elem()).Elem()
bytesRead, err = api.decodeMapKVPair(ctx, b, keyValue, elemValue, opts)
if err != nil {
return 0, ierrors.WithStack(err)
}

if value.MapIndex(keyValue).IsValid() {
// map entry already exists
return 0, ierrors.Wrapf(ErrMapValidationViolatesUniqueness, "map entry with key %v already exists", keyValue.Interface())
}

value.SetMapIndex(keyValue, elemValue)

return bytesRead, nil
}
ts = ts.ensureOrdering()

return api.decodeSequence(b, deserializeItem, valueType, ts, opts)
consumedBytes, err := api.decodeSequence(b, deserializeItem, valueType, ts, opts)
if err != nil {
return consumedBytes, err
}

if opts.validation {
if err := api.checkMinMaxBounds(value, ts); err != nil {
return consumedBytes, err
}
}

return consumedBytes, nil
}

func (api *API) decodeSequence(b []byte, deserializeItem serializer.DeserializeFunc, valueType reflect.Type, ts TypeSettings, opts *options) (int, error) {
Expand All @@ -447,17 +505,3 @@ func (api *API) decodeSequence(b []byte, deserializeItem serializer.DeserializeF

return deseri.Done()
}

func (api *API) decodeMapKVPair(ctx context.Context, b []byte, key, val reflect.Value, opts *options) (int, error) {
keyBytesRead, err := api.decode(ctx, b, key, TypeSettings{}, opts)
if err != nil {
return 0, ierrors.Wrapf(err, "failed to decode map key of type %s", key.Type())
}
b = b[keyBytesRead:]
elemBytesRead, err := api.decode(ctx, b, val, TypeSettings{}, opts)
if err != nil {
return 0, ierrors.Wrapf(err, "failed to decode map element of type %s", val.Type())
}

return keyBytesRead + elemBytesRead, nil
}
2 changes: 1 addition & 1 deletion serializer/serix/decode_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ func TestDecode_ArrayRules(t *testing.T) {
ts := serix.TypeSettings{}.WithLengthPrefixType(boolsLenType).WithArrayRules(rules)
bytesRead, err := testAPI.Decode(ctx, bytes, testObj, serix.WithValidation(), serix.WithTypeSettings(ts))
require.Zero(t, bytesRead)
assert.Contains(t, err.Error(), "min count of elements within the array not reached")
require.ErrorIs(t, err, serializer.ErrArrayValidationMinElementsNotReached)
}

func testDecode(t testing.TB, ctx context.Context, expected serializer.Serializable, opts ...serix.Option) {
Expand Down
98 changes: 45 additions & 53 deletions serializer/serix/encode.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,15 @@ func (api *API) encode(ctx context.Context, value reflect.Value, ts TypeSettings
return nil, ierrors.WithStack(err)
}
}

if opts.validation {
if err = api.callBytesValidator(ctx, valueType, b); err != nil {
return nil, ierrors.Wrap(err, "post-serialization validation failed")
}

if err := api.checkMaxByteSize(len(b), ts); err != nil {
return nil, err
}
}

return b, nil
Expand Down Expand Up @@ -109,7 +114,11 @@ func (api *API) encodeBasedOnType(
if !set {
return nil, ierrors.New("can't serialize 'string' type: no LengthPrefixType was provided")
}
minLen, maxLen := ts.MinMaxLen()

var minLen, maxLen int
if opts.validation {
minLen, maxLen = ts.MinMaxLen()
}
seri := serializer.NewSerializer()

return seri.WriteString(
Expand Down Expand Up @@ -143,42 +152,6 @@ func (api *API) encodeBasedOnType(
return nil, ierrors.Errorf("can't encode: unsupported type %T", valueI)
}

// checks whether the given value is within its defined bounds in case it has a length.
func (api *API) checkMinMaxBounds(v reflect.Value, ts TypeSettings) error {
if has := hasLength(v); !has {
return nil
}

l := uint(v.Len())
if minLen, ok := ts.MinLen(); ok {
if l < minLen {
return ierrors.Wrapf(serializer.ErrArrayValidationMinElementsNotReached, "can't serialize '%s' type: min length %d not reached (len %d)", v.Kind(), minLen, l)
}
}
if maxLen, ok := ts.MaxLen(); ok {
if l > maxLen {
return ierrors.Wrapf(serializer.ErrArrayValidationMaxElementsExceeded, "can't serialize '%s' type: max length %d exceeded (len %d)", v.Kind(), maxLen, l)
}
}

return nil
}

// checks whether the given value has the concept of a length.
func hasLength(v reflect.Value) bool {
k := v.Kind()
switch k {
case reflect.Array:
case reflect.Map:
case reflect.Slice:
case reflect.String:
default:
return false
}

return true
}

func (api *API) encodeInterface(
ctx context.Context, value reflect.Value, valueType reflect.Type, ts TypeSettings, opts *options,
) ([]byte, error) {
Expand Down Expand Up @@ -239,7 +212,7 @@ func (api *API) encodeStructFields(

for _, sField := range structFields {
fieldValue := value.Field(sField.index)
if sField.isEmbedded && !sField.settings.nest {
if sField.isEmbedded && !sField.settings.inlined {
fieldType := sField.fType
if fieldValue.Kind() == reflect.Ptr {
if fieldValue.IsNil() {
Expand Down Expand Up @@ -352,10 +325,36 @@ func (api *API) encodeSlice(ctx context.Context, value reflect.Value, valueType
return encodeSliceOfBytes(data, valueType, ts, opts)
}

func (api *API) encodeMapKVPair(ctx context.Context, key, val reflect.Value, opts *options) ([]byte, error) {
keyTypeSettings := api.getTypeSettingsByValue(key)
valueTypeSettings := api.getTypeSettingsByValue(val)

keyBytes, err := api.encode(ctx, key, keyTypeSettings, opts)
if err != nil {
return nil, ierrors.Wrapf(err, "failed to encode map key of type %s", key.Type())
}

elemBytes, err := api.encode(ctx, val, valueTypeSettings, opts)
if err != nil {
return nil, ierrors.Wrapf(err, "failed to encode map element of type %s", val.Type())
}

buf := bytes.NewBuffer(keyBytes)
buf.Write(elemBytes)

return buf.Bytes(), nil
}

func (api *API) encodeMap(ctx context.Context, value reflect.Value, valueType reflect.Type,
ts TypeSettings, opts *options) ([]byte, error) {
size := value.Len()
data := make([][]byte, size)

if opts.validation {
if err := api.checkMinMaxBounds(value, ts); err != nil {
return nil, err
}
}

data := make([][]byte, value.Len())
iter := value.MapRange()
for i := 0; iter.Next(); i++ {
key := iter.Key()
Expand All @@ -368,36 +367,29 @@ func (api *API) encodeMap(ctx context.Context, value reflect.Value, valueType re
}
ts = ts.ensureOrdering()

return encodeSliceOfBytes(data, valueType, ts, opts)
}

func (api *API) encodeMapKVPair(ctx context.Context, key, val reflect.Value, opts *options) ([]byte, error) {
keyBytes, err := api.encode(ctx, key, TypeSettings{}, opts)
if err != nil {
return nil, ierrors.Wrapf(err, "failed to encode map key of type %s", key.Type())
}
elemBytes, err := api.encode(ctx, val, TypeSettings{}, opts)
bytes, err := encodeSliceOfBytes(data, valueType, ts, opts)
if err != nil {
return nil, ierrors.Wrapf(err, "failed to encode map element of type %s", val.Type())
return nil, err
}
buf := bytes.NewBuffer(keyBytes)
buf.Write(elemBytes)

return buf.Bytes(), nil
return bytes, nil
}

func encodeSliceOfBytes(data [][]byte, valueType reflect.Type, ts TypeSettings, opts *options) ([]byte, error) {
lengthPrefixType, set := ts.LengthPrefixType()
if !set {
return nil, ierrors.Errorf("no LengthPrefixType was provided for type %s", valueType)
}

arrayRules := ts.ArrayRules()
if arrayRules == nil {
arrayRules = new(ArrayRules)
}

serializationMode := ts.toMode(opts)
serializerArrayRules := serializer.ArrayRules(*arrayRules)
serializerArrayRulesPtr := &serializerArrayRules

seri := serializer.NewSerializer()
seri.WriteSliceOfByteSlices(data,
serializationMode,
Expand Down
2 changes: 1 addition & 1 deletion serializer/serix/encode_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ func TestEncode_ArrayRules(t *testing.T) {
ts := serix.TypeSettings{}.WithLengthPrefixType(serix.LengthPrefixTypeAsUint32).WithArrayRules(rules)
got, err := testAPI.Encode(ctx, testObj, serix.WithValidation(), serix.WithTypeSettings(ts))
require.Nil(t, got)
assert.Contains(t, err.Error(), "min count of elements within the array not reached")
require.ErrorIs(t, err, serializer.ErrArrayValidationMinElementsNotReached)
}

func testEncode(t testing.TB, testObj serializer.Serializable, opts ...serix.Option) {
Expand Down
Loading
Loading