diff --git a/array.go b/array.go index f8a3ae3..209b225 100644 --- a/array.go +++ b/array.go @@ -2772,11 +2772,20 @@ func (a *Array) setParentUpdater(f parentUpdater) { // setCallbackWithChild sets up callback function with child value (child) // so parent array (a) can be notified when child value is modified. func (a *Array) setCallbackWithChild(i uint64, child Value, maxInlineSize uint64) { - c, ok := child.(mutableValueNotifier) + // Unwrap child value if needed (e.g. interpreter.SomeValue) + unwrappedChild, wrapperSize := unwrapValue(child) + + c, ok := unwrappedChild.(mutableValueNotifier) if !ok { return } + if maxInlineSize < wrapperSize { + maxInlineSize = 0 + } else { + maxInlineSize -= wrapperSize + } + vid := c.ValueID() // mutableElementIndex is lazily initialized. @@ -2809,6 +2818,8 @@ func (a *Array) setCallbackWithChild(i uint64, child Value, maxInlineSize uint64 return false, err } + storable = unwrapStorable(storable) + // Verify retrieved element is either SlabIDStorable or Slab, with identical value ID. switch storable := storable.(type) { case SlabIDStorable: @@ -2827,15 +2838,19 @@ func (a *Array) setCallbackWithChild(i uint64, child Value, maxInlineSize uint64 return false, nil } + // NOTE: Must reset child using original child (not unwrapped child) + // Set child value with parent array using updated index. - // Set() calls c.Storable() which returns inlined or not-inlined child storable. - existingValueStorable, err := a.set(adjustedIndex, c) + // Set() calls child.Storable() which returns inlined or not-inlined child storable. + existingValueStorable, err := a.set(adjustedIndex, child) if err != nil { return false, err } // Verify overwritten storable has identical value ID. + existingValueStorable = unwrapStorable(existingValueStorable) + switch existingValueStorable := existingValueStorable.(type) { case SlabIDStorable: sid := SlabID(existingValueStorable) @@ -2923,38 +2938,87 @@ func (a *Array) Set(index uint64, value Value) (Storable, error) { // If overwritten storable is an inlined slab, uninline the slab and store it in storage. // This is to prevent potential data loss because the overwritten inlined slab was not in // storage and any future changes to it would have been lost. - switch s := existingStorable.(type) { + existingStorable, existingValueID, _, err = uninlineStorableIfNeeded(a.Storage, existingStorable) + if err != nil { + return nil, err + } + + // Remove overwritten array/map's ValueID from mutableElementIndex if: + // - new value isn't array/map, or + // - new value is array/map with different value ID + if existingValueID != emptyValueID { + unwrappedValue, _ := unwrapValue(value) + newValue, ok := unwrappedValue.(mutableValueNotifier) + if !ok || existingValueID != newValue.ValueID() { + delete(a.mutableElementIndex, existingValueID) + } + } + + return existingStorable, nil +} + +// uninlineStorableIfNeeded uninlines given storable if needed, and +// returns uninlined Storable and its ValueID. +// If given storable is a WrapperStorable, this function uninlines +// wrapped storable if needed and returns a new WrapperStorable +// with wrapped uninlined storable and its ValidID. +func uninlineStorableIfNeeded(storage SlabStorage, storable Storable) (Storable, ValueID, bool, error) { + if storable == nil { + return storable, emptyValueID, false, nil + } + + switch s := storable.(type) { case ArraySlab: // inlined array slab - err = s.Uninline(a.Storage) + err := s.Uninline(storage) if err != nil { - return nil, err + return nil, emptyValueID, false, err } - existingStorable = SlabIDStorable(s.SlabID()) - existingValueID = slabIDToValueID(s.SlabID()) + + slabID := s.SlabID() + + newStorable := SlabIDStorable(slabID) + valueID := slabIDToValueID(slabID) + + return newStorable, valueID, true, nil case MapSlab: // inlined map slab - err = s.Uninline(a.Storage) + err := s.Uninline(storage) if err != nil { - return nil, err + return nil, emptyValueID, false, err } - existingStorable = SlabIDStorable(s.SlabID()) - existingValueID = slabIDToValueID(s.SlabID()) + + slabID := s.SlabID() + + newStorable := SlabIDStorable(slabID) + valueID := slabIDToValueID(slabID) + + return newStorable, valueID, true, nil case SlabIDStorable: // uninlined slab - existingValueID = slabIDToValueID(SlabID(s)) - } + valueID := slabIDToValueID(SlabID(s)) - // Remove overwritten array/map's ValueID from mutableElementIndex if: - // - new value isn't array/map, or - // - new value is array/map with different value ID - if existingValueID != emptyValueID { - newValue, ok := value.(mutableValueNotifier) - if !ok || existingValueID != newValue.ValueID() { - delete(a.mutableElementIndex, existingValueID) + return storable, valueID, false, nil + + case WrapperStorable: + unwrappedStorable := unwrapStorable(s) + + // Uninline wrapped storable if needed. + uninlinedWrappedStorable, valueID, uninlined, err := uninlineStorableIfNeeded(storage, unwrappedStorable) + if err != nil { + return nil, emptyValueID, false, err + } + + if !uninlined { + return storable, valueID, uninlined, nil } + + // Create a new WrapperStorable with uninlinedWrappedStorable + newStorable := s.WrapAtreeStorable(uninlinedWrappedStorable) + + return newStorable, valueID, uninlined, nil } - return existingStorable, nil + return storable, emptyValueID, false, nil } func (a *Array) set(index uint64, value Value) (Storable, error) { @@ -3068,39 +3132,20 @@ func (a *Array) Remove(index uint64) (Storable, error) { return nil, err } - // If overwritten storable is an inlined slab, uninline the slab and store it in storage. + // If removed storable is an inlined slab, uninline the slab and store it in storage. // This is to prevent potential data loss because the overwritten inlined slab was not in // storage and any future changes to it would have been lost. - switch s := storable.(type) { - case ArraySlab: - err = s.Uninline(a.Storage) - if err != nil { - return nil, err - } - storable = SlabIDStorable(s.SlabID()) - - // Delete removed element ValueID from mutableElementIndex - removedValueID := slabIDToValueID(s.SlabID()) - delete(a.mutableElementIndex, removedValueID) - - case MapSlab: - err = s.Uninline(a.Storage) - if err != nil { - return nil, err - } - storable = SlabIDStorable(s.SlabID()) - - // Delete removed element ValueID from mutableElementIndex - removedValueID := slabIDToValueID(s.SlabID()) - delete(a.mutableElementIndex, removedValueID) + removedStorable, removedValueID, _, err := uninlineStorableIfNeeded(a.Storage, storable) + if err != nil { + return nil, err + } - case SlabIDStorable: - // Delete removed element ValueID from mutableElementIndex - removedValueID := slabIDToValueID(SlabID(s)) + // Delete removed element ValueID from mutableElementIndex + if removedValueID != emptyValueID { delete(a.mutableElementIndex, removedValueID) } - return storable, nil + return removedStorable, nil } func (a *Array) remove(index uint64) (Storable, error) { @@ -3361,7 +3406,10 @@ var defaultReadOnlyArrayIteratorMutatinCallback ReadOnlyArrayIteratorMutationCal var _ ArrayIterator = &readOnlyArrayIterator{} func (i *readOnlyArrayIterator) setMutationCallback(value Value) { - if v, ok := value.(mutableValueNotifier); ok { + + unwrappedChild, _ := unwrapValue(value) + + if v, ok := unwrappedChild.(mutableValueNotifier); ok { v.setParentUpdater(func() (found bool, err error) { i.valueMutationCallback(value) return true, NewReadOnlyIteratorElementMutationError(i.array.ValueID(), v.ValueID()) diff --git a/array_debug.go b/array_debug.go index 89a1702..2d7be18 100644 --- a/array_debug.go +++ b/array_debug.go @@ -701,41 +701,62 @@ func (v *serializationVerifier) arrayDataSlabEqual(expected, actual *ArrayDataSl ee := expected.elements[i] ae := actual.elements[i] - switch ee := ee.(type) { + err := v.compareStorable(ee, ae) + if err != nil { + return NewFatalError(fmt.Errorf("failed to compare element %d: %s", i, err)) + } + } - case SlabIDStorable: // Compare not-inlined element - if !v.compare(ee, ae) { - return NewFatalError(fmt.Errorf("element %d %+v is wrong, want %+v", i, ae, ee)) - } + return nil +} - ev, err := ee.StoredValue(v.storage) - if err != nil { - // Don't need to wrap error as external error because err is already categorized by SlabIDStorable.StoredValue(). - return err - } +func (v *serializationVerifier) compareStorable(expected, actual Storable) error { - return v.verifyValue(ev) + switch expected := expected.(type) { - case *ArrayDataSlab: // Compare inlined array - ae, ok := ae.(*ArrayDataSlab) - if !ok { - return NewFatalError(fmt.Errorf("expect element as inlined *ArrayDataSlab, actual %T", ae)) - } + case SlabIDStorable: // Compare not-inlined element + if !v.compare(expected, actual) { + return NewFatalError(fmt.Errorf("failed to compare SlabIDStorable: %+v is wrong, want %+v", actual, expected)) + } - return v.arrayDataSlabEqual(ee, ae) + actualValue, err := actual.StoredValue(v.storage) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by SlabIDStorable.StoredValue(). + return err + } - case *MapDataSlab: // Compare inlined map - ae, ok := ae.(*MapDataSlab) - if !ok { - return NewFatalError(fmt.Errorf("expect element as inlined *MapDataSlab, actual %T", ae)) - } + return v.verifyValue(actualValue) - return v.mapDataSlabEqual(ee, ae) + case *ArrayDataSlab: // Compare inlined array + actual, ok := actual.(*ArrayDataSlab) + if !ok { + return NewFatalError(fmt.Errorf("expect storable as inlined *ArrayDataSlab, actual %T", actual)) + } - default: - if !v.compare(ee, ae) { - return NewFatalError(fmt.Errorf("element %d %+v is wrong, want %+v", i, ae, ee)) - } + return v.arrayDataSlabEqual(expected, actual) + + case *MapDataSlab: // Compare inlined map + actual, ok := actual.(*MapDataSlab) + if !ok { + return NewFatalError(fmt.Errorf("expect storable as inlined *MapDataSlab, actual %T", actual)) + } + + return v.mapDataSlabEqual(expected, actual) + + case WrapperStorable: // Compare wrapper storable + actual, ok := actual.(WrapperStorable) + if !ok { + return NewFatalError(fmt.Errorf("expect storable as WrapperStorable, actual %T", actual)) + } + + unwrappedExpected := expected.UnwrapAtreeStorable() + unwrappedActual := actual.UnwrapAtreeStorable() + + return v.compareStorable(unwrappedExpected, unwrappedActual) + + default: + if !v.compare(expected, actual) { + return NewFatalError(fmt.Errorf("%+v is wrong, want %+v", actual, expected)) } } diff --git a/array_test.go b/array_test.go index 172d02c..533d97f 100644 --- a/array_test.go +++ b/array_test.go @@ -4991,7 +4991,7 @@ func TestArrayNestedStorables(t *testing.T) { for i := uint64(0); i < arraySize; i++ { s := strings.Repeat("a", int(i)) v := SomeValue{Value: NewStringValue(s)} - values[i] = v + values[i] = someValue{NewStringValue(s)} err := array.Append(v) require.NoError(t, err) @@ -5178,6 +5178,16 @@ func TestArrayLoadedValueIterator(t *testing.T) { typeInfo := testTypeInfo{42} address := Address{1, 2, 3, 4, 5, 6, 7, 8} + runTest := func(name string, f func(useWrapperValue bool) func(*testing.T)) { + for _, useWrapperValue := range []bool{false, true} { + if useWrapperValue { + name += ", use wrapper value" + } + + t.Run(name, f(useWrapperValue)) + } + } + t.Run("empty", func(t *testing.T) { storage := newTestPersistentStorage(t) @@ -5191,686 +5201,725 @@ func TestArrayLoadedValueIterator(t *testing.T) { testArrayLoadedElements(t, array, nil) }) - t.Run("root data slab with simple values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with simple values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 3 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + const arraySize = 3 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array: 1 root data slab - require.Equal(t, 1, len(storage.deltas)) - require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) + // parent array: 1 root data slab + require.Equal(t, 1, len(storage.deltas)) + require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } }) - t.Run("root data slab with composite values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 3 - array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + const arraySize = 3 + array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+arraySize, len(storage.deltas)) - require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) + // parent array: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+arraySize, len(storage.deltas)) + require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } }) - t.Run("root data slab with composite values, unload composite element from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values, unload composite element from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 3 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + const arraySize = 3 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+arraySize, len(storage.deltas)) - require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) + // parent array: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+arraySize, len(storage.deltas)) + require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) - // Unload composite element from front to back - for i := 0; i < len(values); i++ { - slabID := childSlabIDs[i] + // Unload composite element from front to back + for i := 0; i < len(values); i++ { + slabID := childSlabIDs[i] - err := storage.Remove(slabID) - require.NoError(t, err) + err := storage.Remove(slabID) + require.NoError(t, err) - expectedValues := values[i+1:] - testArrayLoadedElements(t, array, expectedValues) + expectedValues := values[i+1:] + testArrayLoadedElements(t, array, expectedValues) + } } }) - t.Run("root data slab with composite values, unload composite element from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values, unload composite element from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 3 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + const arraySize = 3 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+arraySize, len(storage.deltas)) - require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) + // parent array: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+arraySize, len(storage.deltas)) + require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) - // Unload composite element from back to front - for i := len(values) - 1; i >= 0; i-- { - slabID := childSlabIDs[i] + // Unload composite element from back to front + for i := len(values) - 1; i >= 0; i-- { + slabID := childSlabIDs[i] - err := storage.Remove(slabID) - require.NoError(t, err) + err := storage.Remove(slabID) + require.NoError(t, err) - expectedValues := values[:i] - testArrayLoadedElements(t, array, expectedValues) + expectedValues := values[:i] + testArrayLoadedElements(t, array, expectedValues) + } } }) - t.Run("root data slab with composite values, unload composite element in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) - - const arraySize = 3 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) - - // parent array: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+arraySize, len(storage.deltas)) - require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - - testArrayLoadedElements(t, array, values) - - // Unload composite element in the middle - unloadValueIndex := 1 - - slabID := childSlabIDs[unloadValueIndex] - - err := storage.Remove(slabID) - require.NoError(t, err) - - copy(values[unloadValueIndex:], values[unloadValueIndex+1:]) - values = values[:len(values)-1] + runTest("root data slab with composite values, unload composite element in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - testArrayLoadedElements(t, array, values) - }) + const arraySize = 3 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - t.Run("root data slab with composite values, unload composite elements during iteration", func(t *testing.T) { - storage := newTestPersistentStorage(t) + // parent array: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+arraySize, len(storage.deltas)) + require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - const arraySize = 3 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + testArrayLoadedElements(t, array, values) - // parent array: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+arraySize, len(storage.deltas)) - require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) + // Unload composite element in the middle + unloadValueIndex := 1 - testArrayLoadedElements(t, array, values) + slabID := childSlabIDs[unloadValueIndex] - i := 0 - err := array.IterateReadOnlyLoadedValues(func(v Value) (bool, error) { - // At this point, iterator returned first element (v). - - // Remove all other nested composite elements (except first element) from storage. - for _, slabID := range childSlabIDs[1:] { - err := storage.Remove(slabID) - require.NoError(t, err) - } + err := storage.Remove(slabID) + require.NoError(t, err) - require.Equal(t, 0, i) - valueEqual(t, values[0], v) - i++ - return true, nil - }) + copy(values[unloadValueIndex:], values[unloadValueIndex+1:]) + values = values[:len(values)-1] - require.NoError(t, err) - require.Equal(t, 1, i) // Only first element is iterated because other elements are remove during iteration. + testArrayLoadedElements(t, array, values) + } }) - t.Run("root data slab with simple and composite values, unload composite element", func(t *testing.T) { - const arraySize = 3 - - // Create an array with nested composite value at specified index - for childArrayIndex := 0; childArrayIndex < arraySize; childArrayIndex++ { + runTest("root data slab with composite values, unload composite elements during iteration", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { storage := newTestPersistentStorage(t) - array, values, childSlabID := createArrayWithSimpleAndChildArrayValues(t, storage, address, typeInfo, arraySize, childArrayIndex) + const arraySize = 3 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) // parent array: 1 root data slab - // nested composite element: 1 root data slab - require.Equal(t, 2, len(storage.deltas)) + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+arraySize, len(storage.deltas)) require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) testArrayLoadedElements(t, array, values) - // Unload composite element - err := storage.Remove(childSlabID) - require.NoError(t, err) + i := 0 + err := array.IterateReadOnlyLoadedValues(func(v Value) (bool, error) { + // At this point, iterator returned first element (v). - copy(values[childArrayIndex:], values[childArrayIndex+1:]) - values = values[:len(values)-1] + // Remove all other nested composite elements (except first element) from storage. + for _, slabID := range childSlabIDs[1:] { + err := storage.Remove(slabID) + require.NoError(t, err) + } - testArrayLoadedElements(t, array, values) + require.Equal(t, 0, i) + valueEqual(t, values[0], v) + i++ + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, 1, i) // Only first element is iterated because other elements are remove during iteration. } }) - t.Run("root metadata slab with simple values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with simple and composite values, unload composite element", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + const arraySize = 3 - const arraySize = 20 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + // Create an array with nested composite value at specified index + for childArrayIndex := 0; childArrayIndex < arraySize; childArrayIndex++ { + storage := newTestPersistentStorage(t) - // parent array: 1 root metadata slab, 2 data slabs - require.Equal(t, 3, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + array, values, childSlabID := createArrayWithSimpleAndChildArrayValues(t, storage, address, typeInfo, arraySize, childArrayIndex, useWrapperValue) - testArrayLoadedElements(t, array, values) - }) + // parent array: 1 root data slab + // nested composite element: 1 root data slab + require.Equal(t, 2, len(storage.deltas)) + require.Equal(t, 0, getArrayMetaDataSlabCount(storage)) - t.Run("root metadata slab with composite values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + testArrayLoadedElements(t, array, values) - const arraySize = 20 - array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + // Unload composite element + err := storage.Remove(childSlabID) + require.NoError(t, err) - // parent array: 1 root metadata slab, 2 data slabs - // nested composite value element: 1 root data slab for each - require.Equal(t, 3+arraySize, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + copy(values[childArrayIndex:], values[childArrayIndex+1:]) + values = values[:len(values)-1] - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } + } }) - t.Run("root metadata slab with composite values, unload composite element from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab with simple values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 20 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + const arraySize = 20 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array: 1 root metadata slab, 2 data slabs - // nested composite value element: 1 root data slab for each - require.Equal(t, 3+arraySize, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + // parent array: 1 root metadata slab, 2 data slabs + require.Equal(t, 3, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } + }) - // Unload composite element from front to back - for i := 0; i < len(childSlabIDs); i++ { - slabID := childSlabIDs[i] + runTest("root metadata slab with composite values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - err := storage.Remove(slabID) - require.NoError(t, err) + const arraySize = 20 + array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - expectedValues := values[i+1:] - testArrayLoadedElements(t, array, expectedValues) + // parent array: 1 root metadata slab, 2 data slabs + // nested composite value element: 1 root data slab for each + require.Equal(t, 3+arraySize, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + + testArrayLoadedElements(t, array, values) } }) - t.Run("root metadata slab with composite values, unload composite element from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab with composite values, unload composite element from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 20 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + const arraySize = 20 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array: 1 root metadata slab, 2 data slabs - // nested composite value element: 1 root data slab for each - require.Equal(t, 3+arraySize, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + // parent array: 1 root metadata slab, 2 data slabs + // nested composite value element: 1 root data slab for each + require.Equal(t, 3+arraySize, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) - // Unload composite element from back to front - for i := len(childSlabIDs) - 1; i >= 0; i-- { - slabID := childSlabIDs[i] + // Unload composite element from front to back + for i := 0; i < len(childSlabIDs); i++ { + slabID := childSlabIDs[i] - err := storage.Remove(slabID) - require.NoError(t, err) + err := storage.Remove(slabID) + require.NoError(t, err) - expectedValues := values[:i] - testArrayLoadedElements(t, array, expectedValues) + expectedValues := values[i+1:] + testArrayLoadedElements(t, array, expectedValues) + } } }) - t.Run("root metadata slab with composite values, unload composite element in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) - - const arraySize = 20 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) - - // parent array: 1 root metadata slab, 2 data slabs - // nested composite value element: 1 root data slab for each - require.Equal(t, 3+arraySize, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + runTest("root metadata slab with composite values, unload composite element from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - testArrayLoadedElements(t, array, values) + const arraySize = 20 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - // Unload composite element in the middle - for _, index := range []int{4, 14} { + // parent array: 1 root metadata slab, 2 data slabs + // nested composite value element: 1 root data slab for each + require.Equal(t, 3+arraySize, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - slabID := childSlabIDs[index] + testArrayLoadedElements(t, array, values) - err := storage.Remove(slabID) - require.NoError(t, err) + // Unload composite element from back to front + for i := len(childSlabIDs) - 1; i >= 0; i-- { + slabID := childSlabIDs[i] - copy(values[index:], values[index+1:]) - values = values[:len(values)-1] + err := storage.Remove(slabID) + require.NoError(t, err) - testArrayLoadedElements(t, array, values) + expectedValues := values[:i] + testArrayLoadedElements(t, array, expectedValues) + } } }) - t.Run("root metadata slab with simple and composite values, unload composite element", func(t *testing.T) { - const arraySize = 20 - - // Create an array with composite value at specified index. - for childArrayIndex := 0; childArrayIndex < arraySize; childArrayIndex++ { + runTest("root metadata slab with composite values, unload composite element in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { storage := newTestPersistentStorage(t) - array, values, childSlabID := createArrayWithSimpleAndChildArrayValues(t, storage, address, typeInfo, arraySize, childArrayIndex) + const arraySize = 20 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) // parent array: 1 root metadata slab, 2 data slabs // nested composite value element: 1 root data slab for each - require.Equal(t, 3+1, len(storage.deltas)) + require.Equal(t, 3+arraySize, len(storage.deltas)) require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) testArrayLoadedElements(t, array, values) - // Unload composite value - err := storage.Remove(childSlabID) - require.NoError(t, err) + // Unload composite element in the middle + for _, index := range []int{4, 14} { - copy(values[childArrayIndex:], values[childArrayIndex+1:]) - values = values[:len(values)-1] + slabID := childSlabIDs[index] - testArrayLoadedElements(t, array, values) - } - }) + err := storage.Remove(slabID) + require.NoError(t, err) - t.Run("root metadata slab, unload data slab from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + copy(values[index:], values[index+1:]) + values = values[:len(values)-1] - const arraySize = 30 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + testArrayLoadedElements(t, array, values) + } + } + }) - // parent array (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + runTest("root metadata slab with simple and composite values, unload composite element", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + const arraySize = 20 - testArrayLoadedElements(t, array, values) + // Create an array with composite value at specified index. + for childArrayIndex := 0; childArrayIndex < arraySize; childArrayIndex++ { + storage := newTestPersistentStorage(t) - metaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + array, values, childSlabID := createArrayWithSimpleAndChildArrayValues(t, storage, address, typeInfo, arraySize, childArrayIndex, useWrapperValue) - // Unload data slabs from front to back - for i := 0; i < len(metaDataSlab.childrenHeaders); i++ { + // parent array: 1 root metadata slab, 2 data slabs + // nested composite value element: 1 root data slab for each + require.Equal(t, 3+1, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - childHeader := metaDataSlab.childrenHeaders[i] + testArrayLoadedElements(t, array, values) - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + // Unload composite value + err := storage.Remove(childSlabID) + require.NoError(t, err) - values = values[childHeader.count:] + copy(values[childArrayIndex:], values[childArrayIndex+1:]) + values = values[:len(values)-1] - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } } }) - t.Run("root metadata slab, unload data slab from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload data slab from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 30 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + const arraySize = 30 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + // parent array (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) - metaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + metaDataSlab, ok := array.root.(*ArrayMetaDataSlab) + require.True(t, ok) - // Unload data slabs from back to front - for i := len(metaDataSlab.childrenHeaders) - 1; i >= 0; i-- { + // Unload data slabs from front to back + for i := 0; i < len(metaDataSlab.childrenHeaders); i++ { - childHeader := metaDataSlab.childrenHeaders[i] + childHeader := metaDataSlab.childrenHeaders[i] - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - values = values[:len(values)-int(childHeader.count)] + values = values[childHeader.count:] - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } } }) - t.Run("root metadata slab, unload data slab in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload data slab from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 30 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + const arraySize = 30 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) + // parent array (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) - metaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + metaDataSlab, ok := array.root.(*ArrayMetaDataSlab) + require.True(t, ok) - require.True(t, len(metaDataSlab.childrenHeaders) > 2) + // Unload data slabs from back to front + for i := len(metaDataSlab.childrenHeaders) - 1; i >= 0; i-- { - index := 1 - childHeader := metaDataSlab.childrenHeaders[index] + childHeader := metaDataSlab.childrenHeaders[i] - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - copy(values[metaDataSlab.childrenCountSum[index-1]:], values[metaDataSlab.childrenCountSum[index]:]) - values = values[:array.Count()-uint64(childHeader.count)] + values = values[:len(values)-int(childHeader.count)] - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } + } }) - t.Run("root metadata slab, unload non-root metadata slab from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload data slab in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 250 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + const arraySize = 30 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array (3 levels): 1 root metadata slab, 2 non-root metadata slabs, n data slabs - require.Equal(t, 3, getArrayMetaDataSlabCount(storage)) + // parent array (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getArrayMetaDataSlabCount(storage)) - rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + testArrayLoadedElements(t, array, values) - // Unload non-root metadata slabs from front to back - for i := 0; i < len(rootMetaDataSlab.childrenHeaders); i++ { + metaDataSlab, ok := array.root.(*ArrayMetaDataSlab) + require.True(t, ok) - childHeader := rootMetaDataSlab.childrenHeaders[i] + require.True(t, len(metaDataSlab.childrenHeaders) > 2) + + index := 1 + childHeader := metaDataSlab.childrenHeaders[index] err := storage.Remove(childHeader.slabID) require.NoError(t, err) - values = values[childHeader.count:] + copy(values[metaDataSlab.childrenCountSum[index-1]:], values[metaDataSlab.childrenCountSum[index]:]) + values = values[:array.Count()-uint64(childHeader.count)] testArrayLoadedElements(t, array, values) } }) - t.Run("root metadata slab, unload non-root metadata slab from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload non-root metadata slab from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const arraySize = 250 - array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize) + const arraySize = 250 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - // parent array (3 levels): 1 root metadata slab, 2 child metadata slabs, n data slabs - require.Equal(t, 3, getArrayMetaDataSlabCount(storage)) + // parent array (3 levels): 1 root metadata slab, 2 non-root metadata slabs, n data slabs + require.Equal(t, 3, getArrayMetaDataSlabCount(storage)) - rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) + require.True(t, ok) - // Unload non-root metadata slabs from back to front - for i := len(rootMetaDataSlab.childrenHeaders) - 1; i >= 0; i-- { + // Unload non-root metadata slabs from front to back + for i := 0; i < len(rootMetaDataSlab.childrenHeaders); i++ { - childHeader := rootMetaDataSlab.childrenHeaders[i] + childHeader := rootMetaDataSlab.childrenHeaders[i] - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - values = values[childHeader.count:] + values = values[childHeader.count:] - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } } }) - t.Run("root metadata slab with composite values, unload random composite value", func(t *testing.T) { + runTest("root metadata slab, unload non-root metadata slab from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - storage := newTestPersistentStorage(t) + const arraySize = 250 + array, values := createArrayWithSimpleValues(t, storage, address, typeInfo, arraySize, useWrapperValue) - const arraySize = 500 - array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + // parent array (3 levels): 1 root metadata slab, 2 child metadata slabs, n data slabs + require.Equal(t, 3, getArrayMetaDataSlabCount(storage)) - // parent array (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs - // nested composite elements: 1 root data slab for each - require.True(t, len(storage.deltas) > 1+arraySize) - require.True(t, getArrayMetaDataSlabCount(storage) > 1) + rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) + require.True(t, ok) - testArrayLoadedElements(t, array, values) + // Unload non-root metadata slabs from back to front + for i := len(rootMetaDataSlab.childrenHeaders) - 1; i >= 0; i-- { - r := newRand(t) + childHeader := rootMetaDataSlab.childrenHeaders[i] - // Unload random composite element - for len(values) > 0 { + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - i := r.Intn(len(values)) + values = values[childHeader.count:] - slabID := childSlabIDs[i] + testArrayLoadedElements(t, array, values) + } + } + }) - err := storage.Remove(slabID) - require.NoError(t, err) + runTest("root metadata slab with composite values, unload random composite value", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - copy(values[i:], values[i+1:]) - values = values[:len(values)-1] + const arraySize = 500 + array, values, childSlabIDs := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - copy(childSlabIDs[i:], childSlabIDs[i+1:]) - childSlabIDs = childSlabIDs[:len(childSlabIDs)-1] + // parent array (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs + // nested composite elements: 1 root data slab for each + require.True(t, len(storage.deltas) > 1+arraySize) + require.True(t, getArrayMetaDataSlabCount(storage) > 1) testArrayLoadedElements(t, array, values) - } - }) - t.Run("root metadata slab with composite values, unload random data slab", func(t *testing.T) { + r := newRand(t) - storage := newTestPersistentStorage(t) + // Unload random composite element + for len(values) > 0 { - const arraySize = 500 - array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + i := r.Intn(len(values)) - // parent array (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs - // nested composite elements: 1 root data slab for each - require.True(t, len(storage.deltas) > 1+arraySize) - require.True(t, getArrayMetaDataSlabCount(storage) > 1) + slabID := childSlabIDs[i] - testArrayLoadedElements(t, array, values) - - rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + err := storage.Remove(slabID) + require.NoError(t, err) - type slabInfo struct { - id SlabID - startIndex int - count int - } + copy(values[i:], values[i+1:]) + values = values[:len(values)-1] - count := 0 - var dataSlabInfos []*slabInfo - for _, mheader := range rootMetaDataSlab.childrenHeaders { - nonrootMetaDataSlab, ok := storage.deltas[mheader.slabID].(*ArrayMetaDataSlab) - require.True(t, ok) + copy(childSlabIDs[i:], childSlabIDs[i+1:]) + childSlabIDs = childSlabIDs[:len(childSlabIDs)-1] - for _, h := range nonrootMetaDataSlab.childrenHeaders { - dataSlabInfo := &slabInfo{id: h.slabID, startIndex: count, count: int(h.count)} - dataSlabInfos = append(dataSlabInfos, dataSlabInfo) - count += int(h.count) + testArrayLoadedElements(t, array, values) } } + }) - r := newRand(t) + runTest("root metadata slab with composite values, unload random data slab", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Unload random data slab. - for len(dataSlabInfos) > 0 { - indexToUnload := r.Intn(len(dataSlabInfos)) + const arraySize = 500 + array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - slabInfoToUnload := dataSlabInfos[indexToUnload] + // parent array (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs + // nested composite elements: 1 root data slab for each + require.True(t, len(storage.deltas) > 1+arraySize) + require.True(t, getArrayMetaDataSlabCount(storage) > 1) - // Update startIndex for all data slabs after indexToUnload. - for i := indexToUnload + 1; i < len(dataSlabInfos); i++ { - dataSlabInfos[i].startIndex -= slabInfoToUnload.count + testArrayLoadedElements(t, array, values) + + rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) + require.True(t, ok) + + type slabInfo struct { + id SlabID + startIndex int + count int } - // Remove slabInfo to be unloaded from dataSlabInfos. - copy(dataSlabInfos[indexToUnload:], dataSlabInfos[indexToUnload+1:]) - dataSlabInfos = dataSlabInfos[:len(dataSlabInfos)-1] + count := 0 + var dataSlabInfos []*slabInfo + for _, mheader := range rootMetaDataSlab.childrenHeaders { + nonrootMetaDataSlab, ok := storage.deltas[mheader.slabID].(*ArrayMetaDataSlab) + require.True(t, ok) - err := storage.Remove(slabInfoToUnload.id) - require.NoError(t, err) + for _, h := range nonrootMetaDataSlab.childrenHeaders { + dataSlabInfo := &slabInfo{id: h.slabID, startIndex: count, count: int(h.count)} + dataSlabInfos = append(dataSlabInfos, dataSlabInfo) + count += int(h.count) + } + } - copy(values[slabInfoToUnload.startIndex:], values[slabInfoToUnload.startIndex+slabInfoToUnload.count:]) - values = values[:len(values)-slabInfoToUnload.count] + r := newRand(t) - testArrayLoadedElements(t, array, values) - } + // Unload random data slab. + for len(dataSlabInfos) > 0 { + indexToUnload := r.Intn(len(dataSlabInfos)) - require.Equal(t, 0, len(values)) - }) + slabInfoToUnload := dataSlabInfos[indexToUnload] - t.Run("root metadata slab with composite values, unload random slab", func(t *testing.T) { + // Update startIndex for all data slabs after indexToUnload. + for i := indexToUnload + 1; i < len(dataSlabInfos); i++ { + dataSlabInfos[i].startIndex -= slabInfoToUnload.count + } - storage := newTestPersistentStorage(t) + // Remove slabInfo to be unloaded from dataSlabInfos. + copy(dataSlabInfos[indexToUnload:], dataSlabInfos[indexToUnload+1:]) + dataSlabInfos = dataSlabInfos[:len(dataSlabInfos)-1] - const arraySize = 500 - array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize) + err := storage.Remove(slabInfoToUnload.id) + require.NoError(t, err) - // parent array (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs - // nested composite elements: 1 root data slab for each - require.True(t, len(storage.deltas) > 1+arraySize) - require.True(t, getArrayMetaDataSlabCount(storage) > 1) + copy(values[slabInfoToUnload.startIndex:], values[slabInfoToUnload.startIndex+slabInfoToUnload.count:]) + values = values[:len(values)-slabInfoToUnload.count] - testArrayLoadedElements(t, array, values) + testArrayLoadedElements(t, array, values) + } - type slabInfo struct { - id SlabID - startIndex int - count int - children []*slabInfo + require.Equal(t, 0, len(values)) } + }) - rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) - require.True(t, ok) + runTest("root metadata slab with composite values, unload random slab", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) + + const arraySize = 500 + array, values, _ := createArrayWithChildArrays(t, storage, address, typeInfo, arraySize, useWrapperValue) - var dataSlabCount, metadataSlabCount int - nonrootMetadataSlabInfos := make([]*slabInfo, len(rootMetaDataSlab.childrenHeaders)) - for i, mheader := range rootMetaDataSlab.childrenHeaders { + // parent array (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs + // nested composite elements: 1 root data slab for each + require.True(t, len(storage.deltas) > 1+arraySize) + require.True(t, getArrayMetaDataSlabCount(storage) > 1) - nonrootMetadataSlabInfo := &slabInfo{ - id: mheader.slabID, - startIndex: metadataSlabCount, - count: int(mheader.count), + testArrayLoadedElements(t, array, values) + + type slabInfo struct { + id SlabID + startIndex int + count int + children []*slabInfo } - metadataSlabCount += int(mheader.count) - nonrootMetadataSlab, ok := storage.deltas[mheader.slabID].(*ArrayMetaDataSlab) + rootMetaDataSlab, ok := array.root.(*ArrayMetaDataSlab) require.True(t, ok) - children := make([]*slabInfo, len(nonrootMetadataSlab.childrenHeaders)) - for i, h := range nonrootMetadataSlab.childrenHeaders { - children[i] = &slabInfo{ - id: h.slabID, - startIndex: dataSlabCount, - count: int(h.count), + var dataSlabCount, metadataSlabCount int + nonrootMetadataSlabInfos := make([]*slabInfo, len(rootMetaDataSlab.childrenHeaders)) + for i, mheader := range rootMetaDataSlab.childrenHeaders { + + nonrootMetadataSlabInfo := &slabInfo{ + id: mheader.slabID, + startIndex: metadataSlabCount, + count: int(mheader.count), } - dataSlabCount += int(h.count) - } + metadataSlabCount += int(mheader.count) - nonrootMetadataSlabInfo.children = children - nonrootMetadataSlabInfos[i] = nonrootMetadataSlabInfo - } + nonrootMetadataSlab, ok := storage.deltas[mheader.slabID].(*ArrayMetaDataSlab) + require.True(t, ok) - r := newRand(t) + children := make([]*slabInfo, len(nonrootMetadataSlab.childrenHeaders)) + for i, h := range nonrootMetadataSlab.childrenHeaders { + children[i] = &slabInfo{ + id: h.slabID, + startIndex: dataSlabCount, + count: int(h.count), + } + dataSlabCount += int(h.count) + } - const ( - metadataSlabType int = iota - dataSlabType - maxSlabType - ) + nonrootMetadataSlabInfo.children = children + nonrootMetadataSlabInfos[i] = nonrootMetadataSlabInfo + } + + r := newRand(t) - for len(nonrootMetadataSlabInfos) > 0 { + const ( + metadataSlabType int = iota + dataSlabType + maxSlabType + ) - var slabInfoToBeRemoved *slabInfo - var isLastSlab bool + for len(nonrootMetadataSlabInfos) > 0 { - // Unload random metadata or data slab. - switch r.Intn(maxSlabType) { + var slabInfoToBeRemoved *slabInfo + var isLastSlab bool - case metadataSlabType: - // Unload metadata slab at random index. - metadataSlabIndex := r.Intn(len(nonrootMetadataSlabInfos)) + // Unload random metadata or data slab. + switch r.Intn(maxSlabType) { - isLastSlab = metadataSlabIndex == len(nonrootMetadataSlabInfos)-1 + case metadataSlabType: + // Unload metadata slab at random index. + metadataSlabIndex := r.Intn(len(nonrootMetadataSlabInfos)) - slabInfoToBeRemoved = nonrootMetadataSlabInfos[metadataSlabIndex] + isLastSlab = metadataSlabIndex == len(nonrootMetadataSlabInfos)-1 - count := slabInfoToBeRemoved.count + slabInfoToBeRemoved = nonrootMetadataSlabInfos[metadataSlabIndex] - // Update startIndex for subsequence metadata and data slabs. - for i := metadataSlabIndex + 1; i < len(nonrootMetadataSlabInfos); i++ { - nonrootMetadataSlabInfos[i].startIndex -= count + count := slabInfoToBeRemoved.count - for j := 0; j < len(nonrootMetadataSlabInfos[i].children); j++ { - nonrootMetadataSlabInfos[i].children[j].startIndex -= count + // Update startIndex for subsequence metadata and data slabs. + for i := metadataSlabIndex + 1; i < len(nonrootMetadataSlabInfos); i++ { + nonrootMetadataSlabInfos[i].startIndex -= count + + for j := 0; j < len(nonrootMetadataSlabInfos[i].children); j++ { + nonrootMetadataSlabInfos[i].children[j].startIndex -= count + } } - } - copy(nonrootMetadataSlabInfos[metadataSlabIndex:], nonrootMetadataSlabInfos[metadataSlabIndex+1:]) - nonrootMetadataSlabInfos = nonrootMetadataSlabInfos[:len(nonrootMetadataSlabInfos)-1] + copy(nonrootMetadataSlabInfos[metadataSlabIndex:], nonrootMetadataSlabInfos[metadataSlabIndex+1:]) + nonrootMetadataSlabInfos = nonrootMetadataSlabInfos[:len(nonrootMetadataSlabInfos)-1] - case dataSlabType: - // Unload data slab at randome index. - metadataSlabIndex := r.Intn(len(nonrootMetadataSlabInfos)) + case dataSlabType: + // Unload data slab at randome index. + metadataSlabIndex := r.Intn(len(nonrootMetadataSlabInfos)) - metaSlabInfo := nonrootMetadataSlabInfos[metadataSlabIndex] + metaSlabInfo := nonrootMetadataSlabInfos[metadataSlabIndex] - dataSlabIndex := r.Intn(len(metaSlabInfo.children)) + dataSlabIndex := r.Intn(len(metaSlabInfo.children)) - slabInfoToBeRemoved = metaSlabInfo.children[dataSlabIndex] + slabInfoToBeRemoved = metaSlabInfo.children[dataSlabIndex] - isLastSlab = (metadataSlabIndex == len(nonrootMetadataSlabInfos)-1) && - (dataSlabIndex == len(metaSlabInfo.children)-1) + isLastSlab = (metadataSlabIndex == len(nonrootMetadataSlabInfos)-1) && + (dataSlabIndex == len(metaSlabInfo.children)-1) - count := slabInfoToBeRemoved.count + count := slabInfoToBeRemoved.count - // Update startIndex for subsequence data slabs. - for i := dataSlabIndex + 1; i < len(metaSlabInfo.children); i++ { - metaSlabInfo.children[i].startIndex -= count - } + // Update startIndex for subsequence data slabs. + for i := dataSlabIndex + 1; i < len(metaSlabInfo.children); i++ { + metaSlabInfo.children[i].startIndex -= count + } - copy(metaSlabInfo.children[dataSlabIndex:], metaSlabInfo.children[dataSlabIndex+1:]) - metaSlabInfo.children = metaSlabInfo.children[:len(metaSlabInfo.children)-1] + copy(metaSlabInfo.children[dataSlabIndex:], metaSlabInfo.children[dataSlabIndex+1:]) + metaSlabInfo.children = metaSlabInfo.children[:len(metaSlabInfo.children)-1] - metaSlabInfo.count -= count + metaSlabInfo.count -= count - // Update startIndex for all subsequence metadata slabs. - for i := metadataSlabIndex + 1; i < len(nonrootMetadataSlabInfos); i++ { - nonrootMetadataSlabInfos[i].startIndex -= count + // Update startIndex for all subsequence metadata slabs. + for i := metadataSlabIndex + 1; i < len(nonrootMetadataSlabInfos); i++ { + nonrootMetadataSlabInfos[i].startIndex -= count - for j := 0; j < len(nonrootMetadataSlabInfos[i].children); j++ { - nonrootMetadataSlabInfos[i].children[j].startIndex -= count + for j := 0; j < len(nonrootMetadataSlabInfos[i].children); j++ { + nonrootMetadataSlabInfos[i].children[j].startIndex -= count + } } - } - if len(metaSlabInfo.children) == 0 { - copy(nonrootMetadataSlabInfos[metadataSlabIndex:], nonrootMetadataSlabInfos[metadataSlabIndex+1:]) - nonrootMetadataSlabInfos = nonrootMetadataSlabInfos[:len(nonrootMetadataSlabInfos)-1] + if len(metaSlabInfo.children) == 0 { + copy(nonrootMetadataSlabInfos[metadataSlabIndex:], nonrootMetadataSlabInfos[metadataSlabIndex+1:]) + nonrootMetadataSlabInfos = nonrootMetadataSlabInfos[:len(nonrootMetadataSlabInfos)-1] + } } - } - err := storage.Remove(slabInfoToBeRemoved.id) - require.NoError(t, err) + err := storage.Remove(slabInfoToBeRemoved.id) + require.NoError(t, err) - if isLastSlab { - values = values[:slabInfoToBeRemoved.startIndex] - } else { - copy(values[slabInfoToBeRemoved.startIndex:], values[slabInfoToBeRemoved.startIndex+slabInfoToBeRemoved.count:]) - values = values[:len(values)-slabInfoToBeRemoved.count] + if isLastSlab { + values = values[:slabInfoToBeRemoved.startIndex] + } else { + copy(values[slabInfoToBeRemoved.startIndex:], values[slabInfoToBeRemoved.startIndex+slabInfoToBeRemoved.count:]) + values = values[:len(values)-slabInfoToBeRemoved.count] + } + + testArrayLoadedElements(t, array, values) } - testArrayLoadedElements(t, array, values) + require.Equal(t, 0, len(values)) } - - require.Equal(t, 0, len(values)) }) } @@ -5880,6 +5929,7 @@ func createArrayWithSimpleValues( address Address, typeInfo TypeInfo, arraySize int, + useWrapperValue bool, ) (*Array, []Value) { // Create parent array @@ -5889,10 +5939,19 @@ func createArrayWithSimpleValues( values := make([]Value, arraySize) r := rune('a') for i := 0; i < arraySize; i++ { - values[i] = NewStringValue(strings.Repeat(string(r), 20)) + s := NewStringValue(strings.Repeat(string(r), 20)) - err := array.Append(values[i]) - require.NoError(t, err) + if useWrapperValue { + err := array.Append(SomeValue{s}) + require.NoError(t, err) + + values[i] = someValue{s} + } else { + err := array.Append(s) + require.NoError(t, err) + + values[i] = s + } } return array, values @@ -5904,6 +5963,7 @@ func createArrayWithChildArrays( address Address, typeInfo TypeInfo, arraySize int, + useWrapperValue bool, ) (*Array, []Value, []SlabID) { const childArraySize = 50 @@ -5928,12 +5988,20 @@ func createArrayWithChildArrays( expectedChildArrayValues[j] = v } - expectedValues[i] = arrayValue(expectedChildArrayValues) childSlabIDs[i] = childArray.SlabID() // Append nested array to parent - err = array.Append(childArray) - require.NoError(t, err) + if useWrapperValue { + err = array.Append(SomeValue{childArray}) + require.NoError(t, err) + + expectedValues[i] = someValue{arrayValue(expectedChildArrayValues)} + } else { + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = arrayValue(expectedChildArrayValues) + } } return array, expectedValues, childSlabIDs @@ -5946,6 +6014,7 @@ func createArrayWithSimpleAndChildArrayValues( typeInfo TypeInfo, arraySize int, compositeValueIndex int, + useWrapperValue bool, ) (*Array, []Value, SlabID) { const childArraySize = 50 @@ -5973,19 +6042,34 @@ func createArrayWithSimpleAndChildArrayValues( expectedChildArrayValues[j] = v } - err = array.Append(childArray) - require.NoError(t, err) + if useWrapperValue { + err = array.Append(SomeValue{childArray}) + require.NoError(t, err) + + expectedValues[i] = someValue{arrayValue(expectedChildArrayValues)} + } else { + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = arrayValue(expectedChildArrayValues) + } - expectedValues[i] = arrayValue(expectedChildArrayValues) childSlabID = childArray.SlabID() } else { v := NewStringValue(strings.Repeat(string(r), 20)) r++ - err = array.Append(v) - require.NoError(t, err) + if useWrapperValue { + err = array.Append(SomeValue{v}) + require.NoError(t, err) - expectedValues[i] = v + expectedValues[i] = someValue{v} + } else { + err = array.Append(v) + require.NoError(t, err) + + expectedValues[i] = v + } } } diff --git a/array_wrappervalue_test.go b/array_wrappervalue_test.go new file mode 100644 index 0000000..098e8a5 --- /dev/null +++ b/array_wrappervalue_test.go @@ -0,0 +1,3110 @@ +/* + * Atree - Scalable Arrays and Ordered Maps + * + * Copyright Flow Foundation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package atree + +import ( + "fmt" + "math" + "math/rand" + "runtime" + "sort" + "testing" + + "github.com/stretchr/testify/require" +) + +func newWrapperValue( + nestedLevels int, + wrappedValue Value, + expectedWrappedValue Value, +) (wrapperValue Value, expectedWrapperValue Value) { + + wrapperValue = SomeValue{wrappedValue} + expectedWrapperValue = someValue{expectedWrappedValue} + + for i := 1; i < nestedLevels; i++ { + wrapperValue = SomeValue{wrapperValue} + expectedWrapperValue = someValue{expectedWrapperValue} + } + + return +} + +func getWrappedValue(t *testing.T, v Value, expected Value) (Value, Value) { + for { + sw, vIsSomeValue := v.(SomeValue) + + esw, expectedIsSomeValue := expected.(someValue) + + require.Equal(t, vIsSomeValue, expectedIsSomeValue) + + if !vIsSomeValue { + break + } + + v = sw.Value + expected = esw.Value + } + + return v, expected +} + +type newValueFunc func(SlabStorage) (value Value, expected Value) + +var nilValueFunc = func() newValueFunc { + return func(_ SlabStorage) (Value, Value) { + return nil, nil + } +} + +var newWrapperValueFunc = func( + nestedLevels int, + newWrappedValue newValueFunc, +) newValueFunc { + return func(storage SlabStorage) (value Value, expected Value) { + wrappedValue, expectedWrappedValue := newWrappedValue(storage) + return newWrapperValue(nestedLevels, wrappedValue, expectedWrappedValue) + } +} + +var newRandomUint64ValueFunc = func(r *rand.Rand) newValueFunc { + return func(SlabStorage) (value Value, expected Value) { + v := Uint64Value(r.Intn(1844674407370955161)) + return v, v + } +} + +var newArrayValueFunc = func( + t *testing.T, + address Address, + typeInfo TypeInfo, + arraySize int, + newValue newValueFunc, +) newValueFunc { + return func(storage SlabStorage) (value Value, expected Value) { + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + + for i := 0; i < arraySize; i++ { + v, expectedV := newValue(storage) + + err := array.Append(v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + return array, arrayValue(expectedValues) + } +} + +type modifyValueFunc func(SlabStorage, Value, Value) (value Value, expected Value, err error) + +var replaceWithNewValueFunc = func(newValue newValueFunc) modifyValueFunc { + return func(storage SlabStorage, _ Value, _ Value) (Value, Value, error) { + v, expected := newValue(storage) + return v, expected, nil + } +} + +var modifyRandomUint64ValueFunc = func(r *rand.Rand) modifyValueFunc { + return replaceWithNewValueFunc(newRandomUint64ValueFunc(r)) +} + +var modifyWrapperValueFunc = func( + t *testing.T, + nestedLevels int, + modifyWrappedValue modifyValueFunc, +) modifyValueFunc { + return func( + storage SlabStorage, + v Value, + expected Value, + ) (modifiedValue Value, expectedModifiedValue Value, err error) { + wrappedValue, expectedWrappedValue := getWrappedValue(t, v, expected) + + newWrappedValue, expectedNewWrappedValue, err := modifyWrappedValue(storage, wrappedValue, expectedWrappedValue) + if err != nil { + return nil, nil, err + } + + newWrapperValue, expectedNewWrapperValue := newWrapperValue(nestedLevels, newWrappedValue, expectedNewWrappedValue) + + return newWrapperValue, expectedNewWrapperValue, nil + } +} + +var modifyArrayValueFunc = func( + t *testing.T, + needToResetModifiedValue bool, + modifyValueFunc modifyValueFunc, +) modifyValueFunc { + return func( + storage SlabStorage, + originalValue Value, + expectedOrigianlValue Value, + ) ( + modifiedValue Value, + expectedModifiedValue Value, + err error, + ) { + array, ok := originalValue.(*Array) + require.True(t, ok) + + expectedValues, ok := expectedOrigianlValue.(arrayValue) + require.True(t, ok) + + require.Equal(t, uint64(len(expectedValues)), array.Count()) + require.True(t, array.Count() > 0) + + // Modify first element + + index := 0 + + v, err := array.Get(uint64(index)) + require.NoError(t, err) + + modifiedV, expectedModifiedV, err := modifyValueFunc(storage, v, expectedValues[index]) + if err != nil { + return nil, nil, err + } + + if modifiedV == nil { + + existingStorable, err := array.Remove(uint64(index)) + if err != nil { + return nil, nil, err + } + require.NotNil(t, existingStorable) + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingStorable) + + var removedSlabID SlabID + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "removed storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingStorable) + + case SlabIDStorable: + removedSlabID = SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, array.Address(), removedSlabID.Address()) + } + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expectedValues[index], existingValue) + + // Remove slabs from storage + if removedSlabID != SlabIDUndefined { + err = storage.Remove(removedSlabID) + require.NoError(t, err) + } + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + + } else { + + if needToResetModifiedValue { + existingStorable, err := array.Set(uint64(index), modifiedV) + if err != nil { + return nil, nil, err + } + require.NotNil(t, existingStorable) + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingStorable) + + var overwrittenSlabID SlabID + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "overwritten storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingStorable) + + case SlabIDStorable: + overwrittenSlabID = SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, array.Address(), overwrittenSlabID.Address()) + } + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + + valueEqual(t, expectedValues[index], existingValue) + + if overwrittenSlabID != SlabIDUndefined { + // Remove slabs from storage given we are not interested in removed element + err = storage.Remove(overwrittenSlabID) + require.NoError(t, err) + } + + expectedValues[index] = expectedModifiedV + } + } + + return array, expectedValues, nil + } +} + +type arrayWrapperValueTestCase struct { + name string + modifyName string + wrapperValueNestedLevels int + mustSetModifiedElementInArray bool + newElement newValueFunc + modifyElement modifyValueFunc +} + +func newArrayWrapperValueTestCases( + t *testing.T, + r *rand.Rand, + address Address, + typeInfo TypeInfo, +) []arrayWrapperValueTestCase { + + return []arrayWrapperValueTestCase{ + + // Test arrays [SomeValue(uint64)] + { + name: "[SomeValue(uint64)]", + modifyName: "modify wrapped primitive", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInArray: true, + newElement: newWrapperValueFunc(1, newRandomUint64ValueFunc(r)), + modifyElement: modifyWrapperValueFunc(t, 1, modifyRandomUint64ValueFunc(r)), + }, + + // Test arrays [SomeValue(SomeValue(uint64))] + { + name: "[SomeValue(SomeValue(uint64))]", + modifyName: "modify wrapped primitive", + wrapperValueNestedLevels: 2, + mustSetModifiedElementInArray: true, + newElement: newWrapperValueFunc(2, newRandomUint64ValueFunc(r)), + modifyElement: modifyWrapperValueFunc(t, 2, modifyRandomUint64ValueFunc(r)), + }, + + // Test arrays [SomeValue([uint64]))] + { + name: "[SomeValue([uint64])]", + modifyName: "modify wrapped array", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc(1, newArrayValueFunc(t, address, typeInfo, 2, newRandomUint64ValueFunc(r))), + modifyElement: modifyWrapperValueFunc(t, 1, modifyArrayValueFunc(t, true, modifyRandomUint64ValueFunc(r))), + }, + + // Test arrays [SomeValue(SomeValue([uint64])))] + { + name: "[SomeValue(SomeValue([uint64]))]", + modifyName: "modify wrapped array", + wrapperValueNestedLevels: 2, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc(2, newArrayValueFunc(t, address, typeInfo, 2, newRandomUint64ValueFunc(r))), + modifyElement: modifyWrapperValueFunc(t, 2, modifyArrayValueFunc(t, true, modifyRandomUint64ValueFunc(r))), + }, + + // Test arrays [SomeValue([SomeValue(uint64)]))] + { + name: "[SomeValue([SomeValue(uint64)])]", + modifyName: "modify wrapped array", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc(1, newArrayValueFunc(t, address, typeInfo, 2, newWrapperValueFunc(1, newRandomUint64ValueFunc(r)))), + modifyElement: modifyWrapperValueFunc(t, 1, modifyArrayValueFunc(t, true, modifyWrapperValueFunc(t, 1, modifyRandomUint64ValueFunc(r)))), + }, + + // Test arrays [SomeValue(SomeValue([SomeValue(SomeValue(uint64))])))] + { + name: "[SomeValue(SomeValue([SomeValue(SomeValue(uint64))]))]", + modifyName: "modify wrapped array", + wrapperValueNestedLevels: 2, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc(2, newArrayValueFunc(t, address, typeInfo, 2, newWrapperValueFunc(2, newRandomUint64ValueFunc(r)))), + modifyElement: modifyWrapperValueFunc(t, 2, modifyArrayValueFunc(t, true, modifyWrapperValueFunc(t, 2, modifyRandomUint64ValueFunc(r)))), + }, + + // Test arrays [SomeValue([SomeValue([SomeValue(uint64)])]))] and modify innermost array + { + name: "[SomeValue([SomeValue([SomeValue(uint64)])])]", + modifyName: "modify wrapped level-2 array", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + modifyElement: modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + false, + modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))))), + }, + + // Test arrays [SomeValue([SomeValue([SomeValue(uint64)])]))] and remove element from middle array + { + name: "[SomeValue([SomeValue([SomeValue(uint64)])])]", + modifyName: "remove element from wrapped level-1 array", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + modifyElement: modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + true, + replaceWithNewValueFunc(nilValueFunc()))), + }, + + { + name: "[SomeValue([SomeValue([SomeValue(uint64)])])]", + modifyName: "modify element in wrapped level-1 array", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInArray: false, + newElement: newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + modifyElement: modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + true, + replaceWithNewValueFunc( + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r))))))), + }, + } +} + +// TestArrayWrapperValueAppendAndModify tests +// - appending WrapperValue to array +// - retrieveing WrapperValue from array +// - modifing retrieved WrapperValue +func TestArrayWrapperValueAppendAndModify(t *testing.T) { + + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases { + + arraySize := arraySizeTestCase.arraySize + + name := arraySizeTestCase.name + " " + tc.name + if tc.modifyName != "" { + name += ", " + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + arraySlabID := array.SlabID() + + // Append WrapperValue to array + expectedValues := make([]Value, arraySize) + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + err := array.Append(v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Retrieve and modify WrapperValue from array + for i := uint64(0); i < array.Count(); i++ { + v, err := array.Get(i) + require.NoError(t, err) + + expected := expectedValues[i] + valueEqual(t, expected, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyElement(storage, v, expected) + require.NoError(t, err) + + if tc.mustSetModifiedElementInArray { + testSetElementInArray(t, storage, array, i, newV, expected) + } + + expectedValues[i] = newExpectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load array from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + array2, err := NewArrayWithRootID(storage2, arraySlabID) + require.NoError(t, err) + require.Equal(t, uint64(arraySize), array2.Count()) + + // Test loaded array + testArray(t, storage2, typeInfo, address, array2, expectedValues, true) + }) + } + } +} + +// TestArrayWrapperValueInsertAndModify tests +// - inserting WrapperValue (in reverse order) to array +// - retrieving WrapperValue from array +// - modifing retrieved WrapperValue +func TestArrayWrapperValueInsertAndModify(t *testing.T) { + + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases { + + arraySize := arraySizeTestCase.arraySize + + name := arraySizeTestCase.name + " " + tc.name + if tc.modifyName != "" { + name += "," + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + arraySlabID := array.SlabID() + + // Insert WrapperValue in reverse order to array + expectedValues := make([]Value, arraySize) + for i := arraySize - 1; i >= 0; i-- { + v, expectedV := tc.newElement(storage) + + err := array.Insert(0, v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Retrieve and modify WrapperValue from array + for i := uint64(0); i < array.Count(); i++ { + v, err := array.Get(i) + require.NoError(t, err) + + expected := expectedValues[i] + valueEqual(t, expected, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyElement(storage, v, expected) + require.NoError(t, err) + + if tc.mustSetModifiedElementInArray { + testSetElementInArray(t, storage, array, i, newV, expected) + } + + expectedValues[i] = newExpectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load array from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + array2, err := NewArrayWithRootID(storage2, arraySlabID) + require.NoError(t, err) + require.Equal(t, uint64(arraySize), array2.Count()) + + // Test loaded array + testArray(t, storage2, typeInfo, address, array2, expectedValues, true) + }) + } + } +} + +// TestArrayWrapperValueSetAndModify tests +// - inserting WrapperValue to array +// - retrieving WrapperValue from array +// - modifing retrieved WrapperValue +// - setting modified WrapperValue +func TestArrayWrapperValueSetAndModify(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases { + + arraySize := arraySizeTestCase.arraySize + + name := arraySizeTestCase.name + " " + tc.name + if tc.modifyName != "" { + name += "," + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + arraySlabID := array.SlabID() + + // Insert WrapperValue to array + expectedValues := make([]Value, arraySize) + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + err := array.Insert(array.Count(), v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Set new WrapperValue in array + for i := 0; i < arraySize; i++ { + v, expected := tc.newElement(storage) + + testSetElementInArray(t, storage, array, uint64(i), v, expectedValues[i]) + + expectedValues[i] = expected + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Retrieve and modify WrapperValue from array + for i := uint64(0); i < array.Count(); i++ { + v, err := array.Get(i) + require.NoError(t, err) + + expected := expectedValues[i] + valueEqual(t, expected, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyElement(storage, v, expected) + require.NoError(t, err) + + if tc.mustSetModifiedElementInArray { + testSetElementInArray(t, storage, array, i, newV, expected) + } + + expectedValues[i] = newExpectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load array from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + array2, err := NewArrayWithRootID(storage2, arraySlabID) + require.NoError(t, err) + require.Equal(t, uint64(arraySize), array2.Count()) + + // Test loaded array + testArray(t, storage2, typeInfo, address, array2, expectedValues, true) + }) + } + } +} + +// TestArrayWrapperValueInsertAndRemove tests +// - inserting WrapperValue to array +// - remove all elements +// - also test setting new elements before removal +func TestArrayWrapperValueInsertAndRemove(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + modifyTestCases := []struct { + name string + needToModifyElement bool + }{ + {name: "modify elements", needToModifyElement: true}, + {name: "", needToModifyElement: false}, + } + + removeSizeTestCases := []struct { + name string + removeAllElements bool + removeElementCount int + }{ + {name: "remove all elements", removeAllElements: true}, + {name: "remove 1 element", removeElementCount: 1}, + {name: fmt.Sprintf("remove %d element", smallArraySize/2), removeElementCount: smallArraySize / 2}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases { + + for _, modifyTestCase := range modifyTestCases { + + for _, removeSizeTestCase := range removeSizeTestCases { + + arraySize := arraySizeTestCase.arraySize + + needToModifyElement := modifyTestCase.needToModifyElement + + removeSize := removeSizeTestCase.removeElementCount + if removeSizeTestCase.removeAllElements { + removeSize = arraySize + } + + name := arraySizeTestCase.name + " " + tc.name + if modifyTestCase.needToModifyElement { + name += ", " + tc.modifyName + } + if removeSizeTestCase.name != "" { + name += ", " + removeSizeTestCase.name + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + arraySlabID := array.SlabID() + + // Insert WrapperValue to array + expectedValues := make([]Value, arraySize) + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + err := array.Insert(array.Count(), v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Retrieve and modify WrapperValue from array + if needToModifyElement { + for i := uint64(0); i < array.Count(); i++ { + v, err := array.Get(i) + require.NoError(t, err) + + expected := expectedValues[i] + valueEqual(t, expected, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyElement(storage, v, expected) + require.NoError(t, err) + + if tc.mustSetModifiedElementInArray { + testSetElementInArray(t, storage, array, i, newV, expected) + } + + expectedValues[i] = newExpectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + } + + // Remove random elements + for i := 0; i < removeSize; i++ { + + removeIndex := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(removeIndex), expectedValues[removeIndex]) + + expectedValues = append(expectedValues[:removeIndex], expectedValues[removeIndex+1:]...) + } + + require.Equal(t, uint64(arraySize-removeSize), array.Count()) + require.Equal(t, arraySize-removeSize, len(expectedValues)) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load array from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + array2, err := NewArrayWithRootID(storage2, arraySlabID) + require.NoError(t, err) + require.Equal(t, uint64(arraySize-removeSize), array2.Count()) + + // Test loaded array + testArray(t, storage2, typeInfo, address, array2, expectedValues, true) + }) + } + } + } + } +} + +// TestArrayWrapperValueSetAndRemove tests +// - inserting WrapperValue to array +// - remove all elements +// - also test setting new elements before removal +func TestArrayWrapperValueSetAndRemove(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + modifyTestCases := []struct { + name string + needToModifyElement bool + }{ + {name: "modify elements", needToModifyElement: true}, + {name: "", needToModifyElement: false}, + } + + removeSizeTestCases := []struct { + name string + removeAllElements bool + removeElementCount int + }{ + {name: "remove all elements", removeAllElements: true}, + {name: "remove 1 element", removeElementCount: 1}, + {name: fmt.Sprintf("remove %d element", smallArraySize/2), removeElementCount: smallArraySize / 2}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases { + + for _, modifyTestCase := range modifyTestCases { + + for _, removeSizeTestCase := range removeSizeTestCases { + + arraySize := arraySizeTestCase.arraySize + + needToModifyElement := modifyTestCase.needToModifyElement + + removeSize := removeSizeTestCase.removeElementCount + if removeSizeTestCase.removeAllElements { + removeSize = arraySize + } + + name := arraySizeTestCase.name + " " + tc.name + if modifyTestCase.needToModifyElement { + name += ", " + tc.modifyName + } + if removeSizeTestCase.name != "" { + name += ", " + removeSizeTestCase.name + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + arraySlabID := array.SlabID() + + expectedValues := make([]Value, arraySize) + + // Insert WrapperValue to array + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + err := array.Insert(array.Count(), v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Set WrapperValue in array + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + testSetElementInArray(t, storage, array, uint64(i), v, expectedValues[i]) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Retrieve and modify WrapperValue from array + if needToModifyElement { + for i := uint64(0); i < array.Count(); i++ { + v, err := array.Get(i) + require.NoError(t, err) + + expected := expectedValues[i] + valueEqual(t, expected, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyElement(storage, v, expected) + require.NoError(t, err) + + if tc.mustSetModifiedElementInArray { + testSetElementInArray(t, storage, array, i, newV, expected) + } + + expectedValues[i] = newExpectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + } + + // Remove random elements + for i := 0; i < removeSize; i++ { + + removeIndex := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(removeIndex), expectedValues[removeIndex]) + + expectedValues = append(expectedValues[:removeIndex], expectedValues[removeIndex+1:]...) + } + + require.Equal(t, uint64(arraySize-removeSize), array.Count()) + require.Equal(t, arraySize-removeSize, len(expectedValues)) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load array from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + array2, err := NewArrayWithRootID(storage2, arraySlabID) + require.NoError(t, err) + require.Equal(t, uint64(arraySize-removeSize), array2.Count()) + + // Test loaded array + testArray(t, storage2, typeInfo, address, array2, expectedValues, true) + }) + } + } + } + } +} + +func TestArrayWrapperValueReadOnlyIterate(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + modifyTestCases := []struct { + name string + testModifyElement bool + }{ + {name: "modify elements", testModifyElement: true}, + {name: "", testModifyElement: false}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases[:1] { + + for _, modifyTestCase := range modifyTestCases { + + // Can't test modifying elements in readonly iteration if elements are not containers. + if modifyTestCase.testModifyElement && tc.mustSetModifiedElementInArray { + continue + } + + arraySize := arraySizeTestCase.arraySize + + testModifyElement := modifyTestCase.testModifyElement + + name := arraySizeTestCase.name + " " + tc.name + if modifyTestCase.testModifyElement { + name += ", " + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + + // Insert WrapperValue to array + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + err := array.Insert(array.Count(), v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + iterator, err := array.ReadOnlyIterator() + require.NoError(t, err) + + count := 0 + for { + next, err := iterator.Next() + require.NoError(t, err) + + if next == nil { + break + } + + expected := expectedValues[count] + + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, next) + + valueEqual(t, expected, next) + + // Test modifying elements that don't need to reset in parent container. + if testModifyElement { + _, _, err := tc.modifyElement(storage, next, expected) + var targetErr *ReadOnlyIteratorElementMutationError + require.ErrorAs(t, err, &targetErr) + } + + count++ + } + + testArrayMutableElementIndex(t, array) + }) + } + } + } +} + +func TestArrayWrapperValueIterate(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallArraySize = 10 + largeArraySize = 512 + ) + + arraySizeTestCases := []struct { + name string + arraySize int + }{ + {name: "small array", arraySize: smallArraySize}, + {name: "large array", arraySize: largeArraySize}, + } + + modifyTestCases := []struct { + name string + testModifyElement bool + }{ + {name: "modify elements", testModifyElement: true}, + {name: "", testModifyElement: false}, + } + + testCases := newArrayWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, arraySizeTestCase := range arraySizeTestCases[:1] { + + for _, modifyTestCase := range modifyTestCases { + + elementIsContainer := !tc.mustSetModifiedElementInArray + + // Can't test modifying elements in readonly iteration if elements are not containers. + if modifyTestCase.testModifyElement && !elementIsContainer { + continue + } + + arraySize := arraySizeTestCase.arraySize + + testModifyElement := modifyTestCase.testModifyElement + + name := arraySizeTestCase.name + " " + tc.name + if modifyTestCase.testModifyElement { + name += ", " + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + + // Insert WrapperValue to array + for i := 0; i < arraySize; i++ { + v, expectedV := tc.newElement(storage) + + err := array.Insert(array.Count(), v) + require.NoError(t, err) + + expectedValues[i] = expectedV + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + iterator, err := array.Iterator() + require.NoError(t, err) + + count := 0 + for { + next, err := iterator.Next() + require.NoError(t, err) + + if next == nil { + break + } + + expected := expectedValues[count] + + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, next) + + valueEqual(t, expected, next) + + // Test modifying container elements. + if testModifyElement { + _, newExpectedV, err := tc.modifyElement(storage, next, expected) + require.NoError(t, err) + + expectedValues[count] = newExpectedV + } + + count++ + } + + require.Equal(t, uint64(arraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + } + } + } +} + +func TestArrayWrapperValueInlineArrayAtLevel1(t *testing.T) { + + testLevel1WrappedChildArrayInlined := func(t *testing.T, array *Array, expectedInlined bool) { + rootDataSlab, isDataSlab := array.root.(*ArrayDataSlab) + require.True(t, isDataSlab) + + require.Equal(t, 1, len(rootDataSlab.elements)) + + storable := rootDataSlab.elements[0] + + storabeleAsSomeStoable, isSomeStorable := storable.(SomeStorable) + require.True(t, isSomeStorable) + + wrappedStorable := storabeleAsSomeStoable.Storable + + switch wrappedStorable := wrappedStorable.(type) { + case SlabIDStorable: + inlined := false + require.Equal(t, expectedInlined, inlined) + + case ArraySlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + case MapSlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + default: + require.Fail(t, "wrapped storable has unexpected type: %T", wrappedStorable) + } + } + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + storage := newTestPersistentStorage(t) + + var expectedValues arrayValue + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + // Append WrapperValue SomeValue([]) to array + { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + require.False(t, childArray.Inlined()) + + err = array.Append(SomeValue{childArray}) + require.NoError(t, err) + + require.True(t, childArray.Inlined()) + + expectedValues = append(expectedValues, someValue{arrayValue{}}) + + require.Equal(t, uint64(1), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + testLevel1WrappedChildArrayInlined(t, array, true) + } + + // Retrieve wrapped child array, and then append new elements to child array. + // Wrapped child array is expected to be unlined at the end of loop. + + const childArraySize = 32 + for i := 0; i < childArraySize; i++ { + // Get element + element, err := array.Get(0) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + wrappedValue := elementAsSomeValue.Value + + wrappedArray, isArray := wrappedValue.(*Array) + require.True(t, isArray) + + expectedWrappedValue := expectedValues[0].(someValue).Value + + expectedWrappedArray := expectedWrappedValue.(arrayValue) + + // Append new elements to wrapped child array + + v := Uint64Value(i) + + err = wrappedArray.Append(SomeValue{v}) + require.NoError(t, err) + + expectedWrappedArray = append(expectedWrappedArray, someValue{v}) + + expectedValues[0] = someValue{expectedWrappedArray} + + require.Equal(t, uint64(i+1), wrappedArray.Count()) + require.Equal(t, i+1, len(expectedWrappedArray)) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + } + + testLevel1WrappedChildArrayInlined(t, array, false) + + // Retrieve wrapped child array, and then remove elements to child array. + // Wrapped child array is expected to be inlined at the end of loop. + + childArraySizeAfterRemoval := 2 + removeCount := childArraySize - childArraySizeAfterRemoval + + for i := 0; i < removeCount; i++ { + // Get element + element, err := array.Get(0) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + wrappedValue := elementAsSomeValue.Value + + wrappedArray, isArray := wrappedValue.(*Array) + require.True(t, isArray) + + expectedWrappedValue := expectedValues[0].(someValue).Value + + expectedWrappedArray := expectedWrappedValue.(arrayValue) + + // Remove first element from wrapped child array + + existingStorable, err := wrappedArray.Remove(0) + require.NoError(t, err) + require.NotNil(t, existingStorable) + + // Verify removed value + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expectedWrappedArray[0], existingValue) + + expectedWrappedArray = expectedWrappedArray[1:] + + expectedValues[0] = someValue{expectedWrappedArray} + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + } + + testLevel1WrappedChildArrayInlined(t, array, true) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) +} + +func TestArrayWrapperValueInlineArrayAtLevel2(t *testing.T) { + + testLevel2WrappedChildArrayInlined := func(t *testing.T, array *Array, expectedInlined bool) { + rootDataSlab, isDataSlab := array.root.(*ArrayDataSlab) + require.True(t, isDataSlab) + + require.Equal(t, 1, len(rootDataSlab.elements)) + + // Get unwrapped value at level 1 + + storableAtLevel1 := rootDataSlab.elements[0] + + storabeleAsSomeStoable, isSomeStorable := storableAtLevel1.(SomeStorable) + require.True(t, isSomeStorable) + + wrappedStorableAtLevel1 := storabeleAsSomeStoable.Storable + + wrappedArrayAtlevel1, isArray := wrappedStorableAtLevel1.(*ArrayDataSlab) + require.True(t, isArray) + + // Get unwrapped value at level 2 + + storableAtLevel2 := wrappedArrayAtlevel1.elements[0] + + storabeleAsSomeStoable, isSomeStorable = storableAtLevel2.(SomeStorable) + require.True(t, isSomeStorable) + + wrappedStorableAtLevel2 := storabeleAsSomeStoable.Storable + + switch wrappedStorable := wrappedStorableAtLevel2.(type) { + case SlabIDStorable: + inlined := false + require.Equal(t, expectedInlined, inlined) + + case ArraySlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + case MapSlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + default: + require.Fail(t, "wrapped storable has unexpected type: %T", wrappedStorable) + } + } + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + storage := newTestPersistentStorage(t) + + var expectedValues arrayValue + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + // Append WrapperValue SomeValue([SomeValue[]]) to array + { + // Create grand child array + gchildArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + require.False(t, gchildArray.Inlined()) + + // Create child array + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + require.False(t, childArray.Inlined()) + + // Append grand child array to child array + err = childArray.Append(SomeValue{gchildArray}) + require.NoError(t, err) + + require.True(t, gchildArray.Inlined()) + + // Append child array to array + err = array.Append(SomeValue{childArray}) + require.NoError(t, err) + + require.True(t, childArray.Inlined()) + + expectedValues = append(expectedValues, someValue{arrayValue{someValue{arrayValue{}}}}) + + require.Equal(t, uint64(1), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + testLevel2WrappedChildArrayInlined(t, array, true) + } + + // Retrieve wrapped gchild array, and then append new elements to gchild array. + // Wrapped gchild array is expected to be unlined at the end of loop. + + const gchildArraySize = 32 + for i := 0; i < gchildArraySize; i++ { + // Get element at level 1 + + elementAtLevel1, err := array.Get(0) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel1, isSomeValue := elementAtLevel1.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel1 := elementAsSomeValueAtLevel1.Value + + wrappedArrayAtLevel1, isArray := wrappedValueAtLevel1.(*Array) + require.True(t, isArray) + + expectedWrappedValueAtLevel1 := expectedValues[0].(someValue).Value + + expectedWrappedArrayAtLevel1 := expectedWrappedValueAtLevel1.(arrayValue) + + // Get element at level 2 + + elementAtLevel2, err := wrappedArrayAtLevel1.Get(0) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel2, isSomeValue := elementAtLevel2.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel2 := elementAsSomeValueAtLevel2.Value + + wrappedArrayAtLevel2, isArray := wrappedValueAtLevel2.(*Array) + require.True(t, isArray) + + expectedWrappedValueAtLevel2 := expectedWrappedArrayAtLevel1[0].(someValue).Value + + expectedWrappedArrayAtLevel2 := expectedWrappedValueAtLevel2.(arrayValue) + + // Append new elements to wrapped gchild array + + v := Uint64Value(i) + + err = wrappedArrayAtLevel2.Append(SomeValue{v}) + require.NoError(t, err) + + expectedWrappedArrayAtLevel2 = append(expectedWrappedArrayAtLevel2, someValue{v}) + + expectedValues[0] = someValue{arrayValue{someValue{expectedWrappedArrayAtLevel2}}} + + require.Equal(t, uint64(i+1), wrappedArrayAtLevel2.Count()) + require.Equal(t, i+1, len(expectedWrappedArrayAtLevel2)) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + } + + testLevel2WrappedChildArrayInlined(t, array, false) + + // Retrieve wrapped gchild array, and then remove elements from gchild array. + // Wrapped gchild array is expected to be inlined at the end of loop. + + gchildArraySizeAfterRemoval := 2 + removeCount := gchildArraySize - gchildArraySizeAfterRemoval + + for i := 0; i < removeCount; i++ { + // Get elementAtLevel1 + elementAtLevel1, err := array.Get(0) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel1, isSomeValue := elementAtLevel1.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel1 := elementAsSomeValueAtLevel1.Value + + wrappedArrayAtLevel1, isArray := wrappedValueAtLevel1.(*Array) + require.True(t, isArray) + + expectedWrappedValueAtLevel1 := expectedValues[0].(someValue).Value + + expectedWrappedArrayAtLevel1 := expectedWrappedValueAtLevel1.(arrayValue) + + // Get element at level 2 + + elementAtLevel2, err := wrappedArrayAtLevel1.Get(0) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel2, isSomeValue := elementAtLevel2.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel2 := elementAsSomeValueAtLevel2.Value + + wrappedArrayAtLevel2, isArray := wrappedValueAtLevel2.(*Array) + require.True(t, isArray) + + expectedWrappedValueAtLevel2 := expectedWrappedArrayAtLevel1[0].(someValue).Value + + expectedWrappedArrayAtLevel2 := expectedWrappedValueAtLevel2.(arrayValue) + + // Remove first element from wrapped gchild array + + existingStorable, err := wrappedArrayAtLevel2.Remove(0) + require.NoError(t, err) + require.NotNil(t, existingStorable) + + // Verify removed value + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expectedWrappedArrayAtLevel2[0], existingValue) + + expectedWrappedArrayAtLevel2 = expectedWrappedArrayAtLevel2[1:] + + expectedValues[0] = someValue{arrayValue{someValue{expectedWrappedArrayAtLevel2}}} + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + } + + testLevel2WrappedChildArrayInlined(t, array, true) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) +} + +func TestArrayWrapperValueModifyNewArrayAtLevel1(t *testing.T) { + + const ( + minWriteOperationSize = 124 + maxWriteOperationSize = 256 + ) + + r := newRand(t) + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + newElementFuncs := []newValueFunc{ + // SomeValue(uint64) + newWrapperValueFunc(1, newRandomUint64ValueFunc(r)), + + // SomeValue([SomeValue(uint64)]) + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + r.Intn(4), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))), + + // SomeValue([SomeValue([SomeValue(uint64)])]) + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + r.Intn(4), + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + r.Intn(4), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + } + + storage := newTestPersistentStorage(t) + + var expectedValues arrayValue + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + actualArraySize := 0 + + t.Run("append and remove", func(t *testing.T) { + + // Append elements + + var appendCount int + for appendCount < minWriteOperationSize { + appendCount = r.Intn(maxWriteOperationSize + 1) + } + + actualArraySize += appendCount + + for i := 0; i < appendCount; i++ { + newValue := newElementFuncs[r.Intn(len(newElementFuncs))] + v, expected := newValue(storage) + + err = array.Append(v) + require.NoError(t, err) + + expectedValues = append(expectedValues, expected) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove at least half of elements + + var removeCount int + minRemoveCount := int(array.Count()) / 2 + maxRemoveCount := int(array.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(array.Count()) + 1) + } + + actualArraySize -= removeCount + + removeIndex := getRandomUniquePositiveNumbers(r, int(array.Count()), removeCount) + + sort.Sort(sort.Reverse(sort.IntSlice(removeIndex))) + + for _, index := range removeIndex { + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("insert and remove", func(t *testing.T) { + // Insert elements + + var insertCount int + for insertCount < minWriteOperationSize { + insertCount = r.Intn(maxWriteOperationSize + 1) + } + + actualArraySize += insertCount + + lowestInsertIndex := math.MaxInt + + for i := 0; i < insertCount; i++ { + newValue := newElementFuncs[r.Intn(len(newElementFuncs))] + v, expected := newValue(storage) + + index := r.Intn(int(array.Count())) + + if index < lowestInsertIndex { + lowestInsertIndex = index + } + + err = array.Insert(uint64(index), v) + require.NoError(t, err) + + newExpectedValue := make([]Value, len(expectedValues)+1) + + copy(newExpectedValue, expectedValues[:index]) + newExpectedValue[index] = expected + copy(newExpectedValue[index+1:], expectedValues[index:]) + + expectedValues = newExpectedValue + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements (including one previously inserted element) + + var removeCount int + minRemoveCount := int(array.Count()) / 2 + maxRemoveCount := int(array.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(array.Count()) + 1) + } + + actualArraySize -= removeCount + + // Remove previously inserted element first + + { + index := lowestInsertIndex + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + // Remove more elements + + for i := 1; i < removeCount; i++ { + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("set and remove", func(t *testing.T) { + // Set elements + + var setCount int + if array.Count() <= 10 { + setCount = int(array.Count()) + } else { + for setCount < int(array.Count())/2 { + setCount = r.Intn(int(array.Count()) + 1) + } + } + + setIndex := make([]int, 0, setCount) + + for i := 0; i < setCount; i++ { + newValue := newElementFuncs[r.Intn(len(newElementFuncs))] + v, expected := newValue(storage) + + index := r.Intn(int(array.Count())) + + testSetElementInArray(t, storage, array, uint64(index), v, expectedValues[index]) + + expectedValues[index] = expected + + setIndex = append(setIndex, index) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements (including some previously set elements) + + var removeCount int + minRemoveCount := int(array.Count()) / 2 + maxRemoveCount := int(array.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(array.Count())) + } + + actualArraySize -= removeCount + + // Remove some previously set elements first + + // Reverse sort and deduplicate set index + sort.Sort(sort.Reverse(sort.IntSlice(setIndex))) + + prev := setIndex[0] + for i := 1; i < len(setIndex); { + cur := setIndex[i] + + if prev != cur { + prev = cur + i++ + } else { + setIndex = append(setIndex[:i], setIndex[i+1:]...) + } + } + + removeSetCount := removeCount / 2 + if len(setIndex) < removeSetCount { + removeSetCount = len(setIndex) + } + + for _, index := range setIndex[:removeSetCount] { + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + for i := 0; i < removeCount-removeSetCount; i++ { + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("remove all", func(t *testing.T) { + // Remove all elements + + for array.Count() > 0 { + // Remove element at random index + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(0), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) +} + +func TestArrayWrapperValueModifyNewArrayAtLevel2(t *testing.T) { + + const ( + minWriteOperationSize = 124 + maxWriteOperationSize = 256 + ) + + r := newRand(t) + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + // newValue creates value of type SomeValue([SomeValue(uint64)]). + newValue := + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + r.Intn(4)+1, // at least one element + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))) + + // modifyValue modifies nested array's first element. + modifyValue := + modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))) + + storage := newTestPersistentStorage(t) + + var expectedValues arrayValue + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + actualArraySize := 0 + + t.Run("append and remove", func(t *testing.T) { + + // Append elements + + var appendCount int + for appendCount < minWriteOperationSize { + appendCount = r.Intn(maxWriteOperationSize + 1) + } + + actualArraySize += appendCount + + for i := 0; i < appendCount; i++ { + v, expected := newValue(storage) + + err = array.Append(v) + require.NoError(t, err) + + expectedValues = append(expectedValues, expected) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements + + var removeCount int + minRemoveCount := int(array.Count()) / 2 + maxRemoveCount := int(array.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(array.Count()) + 1) + } + + actualArraySize -= removeCount + + removeIndex := getRandomUniquePositiveNumbers(r, int(array.Count()), removeCount) + + sort.Sort(sort.Reverse(sort.IntSlice(removeIndex))) + + for _, index := range removeIndex { + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("insert and remove", func(t *testing.T) { + // Insert elements + + var insertCount int + for insertCount < minWriteOperationSize { + insertCount = r.Intn(maxWriteOperationSize + 1) + } + + actualArraySize += insertCount + + lowestInsertIndex := math.MaxInt + + for i := 0; i < insertCount; i++ { + v, expected := newValue(storage) + + index := r.Intn(int(array.Count())) + + if index < lowestInsertIndex { + lowestInsertIndex = index + } + + err = array.Insert(uint64(index), v) + require.NoError(t, err) + + newExpectedValue := make([]Value, len(expectedValues)+1) + + copy(newExpectedValue, expectedValues[:index]) + newExpectedValue[index] = expected + copy(newExpectedValue[index+1:], expectedValues[index:]) + + expectedValues = newExpectedValue + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements (including one previously inserted element) + + var removeCount int + minRemoveCount := int(array.Count()) / 2 + maxRemoveCount := int(array.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(array.Count()) + 1) + } + + actualArraySize -= removeCount + + // Remove previously inserted element first + + { + index := lowestInsertIndex + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + // Remove more elements + + for i := 1; i < removeCount; i++ { + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("modify retrieved nested container and remove", func(t *testing.T) { + // Set elements + + var setCount int + if array.Count() <= 10 { + setCount = int(array.Count()) + } else { + for setCount < int(array.Count())/2 { + setCount = r.Intn(int(array.Count()) + 1) + } + } + + setIndex := make([]int, 0, setCount) + + for i := 0; i < setCount; i++ { + + index := r.Intn(int(array.Count())) + + // Get element + originalValue, err := array.Get(uint64(index)) + require.NoError(t, err) + require.NotNil(t, originalValue) + + _, isWrapperValue := originalValue.(SomeValue) + require.True(t, isWrapperValue) + + // Modify retrieved element without setting back explicitly. + _, modifiedExpectedValue, err := modifyValue(storage, originalValue, expectedValues[index]) + require.NoError(t, err) + + expectedValues[index] = modifiedExpectedValue + + setIndex = append(setIndex, index) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements (including some previously set elements) + + var removeCount int + minRemoveCount := int(array.Count()) / 2 + maxRemoveCount := int(array.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(array.Count())) + } + + actualArraySize -= removeCount + + // Remove some previously set elements first + + // Reverse sort and deduplicate set index + sort.Sort(sort.Reverse(sort.IntSlice(setIndex))) + + prev := setIndex[0] + for i := 1; i < len(setIndex); { + cur := setIndex[i] + + if prev != cur { + prev = cur + i++ + } else { + setIndex = append(setIndex[:i], setIndex[i+1:]...) + } + } + + removeSetCount := removeCount / 2 + if len(setIndex) < removeSetCount { + removeSetCount = len(setIndex) + } + + for _, index := range setIndex[:removeSetCount] { + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + for i := 0; i < removeCount-removeSetCount; i++ { + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("remove all", func(t *testing.T) { + // Remove all elements + + for array.Count() > 0 { + // Remove element at random index + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(0), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) +} + +func TestArrayWrapperValueModifyNewArrayAtLevel3(t *testing.T) { + + const ( + minWriteOperationSize = 124 + maxWriteOperationSize = 256 + ) + + r := newRand(t) + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + // newValue creates value of type SomeValue([SomeValue([SomeValue(uint64)])])) + newValue := + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + 2, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))) + + // modifyValue modifies innermost nested array's first element. + modifyValue := + modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + false, + modifyWrapperValueFunc( + t, + 1, + modifyArrayValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))))) + + storage := newTestPersistentStorage(t) + + var expectedValues arrayValue + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + actualArraySize := 0 + + t.Run("append and remove", func(t *testing.T) { + + // Append elements + + var appendCount int + for appendCount < minWriteOperationSize { + appendCount = r.Intn(maxWriteOperationSize + 1) + } + + actualArraySize += appendCount + + for i := 0; i < appendCount; i++ { + v, expected := newValue(storage) + + err = array.Append(v) + require.NoError(t, err) + + expectedValues = append(expectedValues, expected) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements + + var removeCount int + for removeCount < int(array.Count())/2 { + removeCount = r.Intn(int(array.Count()) + 1) + } + + actualArraySize -= removeCount + + removeIndex := getRandomUniquePositiveNumbers(r, int(array.Count()), removeCount) + + sort.Sort(sort.Reverse(sort.IntSlice(removeIndex))) + + for _, index := range removeIndex { + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("insert and remove", func(t *testing.T) { + // Insert elements + + var insertCount int + for insertCount < minWriteOperationSize { + insertCount = r.Intn(maxWriteOperationSize + 1) + } + + actualArraySize += insertCount + + lowestInsertIndex := math.MaxInt + + for i := 0; i < insertCount; i++ { + v, expected := newValue(storage) + + index := r.Intn(int(array.Count())) + + if index < lowestInsertIndex { + lowestInsertIndex = index + } + + err = array.Insert(uint64(index), v) + require.NoError(t, err) + + newExpectedValue := make([]Value, len(expectedValues)+1) + + copy(newExpectedValue, expectedValues[:index]) + newExpectedValue[index] = expected + copy(newExpectedValue[index+1:], expectedValues[index:]) + + expectedValues = newExpectedValue + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements (including one previously inserted element) + + var removeCount int + for removeCount < int(array.Count())/2 { + removeCount = r.Intn(int(array.Count()) + 1) + } + + actualArraySize -= removeCount + + // Remove previously inserted element first + + { + index := lowestInsertIndex + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + // Remove more elements + + for i := 1; i < removeCount; i++ { + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("modify retrieved nested container and remove", func(t *testing.T) { + // Set elements + + var setCount int + if array.Count() <= 10 { + setCount = int(array.Count()) + } else { + for setCount < int(array.Count())/2 { + setCount = r.Intn(int(array.Count()) + 1) + } + } + + setIndex := make([]int, 0, setCount) + + for i := 0; i < setCount; i++ { + + index := r.Intn(int(array.Count())) + + // Get element + originalValue, err := array.Get(uint64(index)) + require.NoError(t, err) + require.NotNil(t, originalValue) + + _, isWrapperValue := originalValue.(SomeValue) + require.True(t, isWrapperValue) + + // Modify retrieved element without setting back explicitly. + _, modifiedExpectedValue, err := modifyValue(storage, originalValue, expectedValues[index]) + require.NoError(t, err) + + expectedValues[index] = modifiedExpectedValue + + setIndex = append(setIndex, index) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + // Remove some elements (including some previously set elements) + + var removeCount int + for removeCount < int(array.Count())/2 { + removeCount = r.Intn(int(array.Count())) + } + + actualArraySize -= removeCount + + // Remove some previously set elements first + + // Reverse sort and deduplicate set index + sort.Sort(sort.Reverse(sort.IntSlice(setIndex))) + + prev := setIndex[0] + for i := 1; i < len(setIndex); { + cur := setIndex[i] + + if prev != cur { + prev = cur + i++ + } else { + setIndex = append(setIndex[:i], setIndex[i+1:]...) + } + } + + removeSetCount := removeCount / 2 + if len(setIndex) < removeSetCount { + removeSetCount = len(setIndex) + } + + for _, index := range setIndex[:removeSetCount] { + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + for i := 0; i < removeCount-removeSetCount; i++ { + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(actualArraySize), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) + + t.Run("remove all", func(t *testing.T) { + // Remove all elements + + for array.Count() > 0 { + // Remove element at random index + index := r.Intn(int(array.Count())) + + testRemoveElementFromArray(t, storage, array, uint64(index), expectedValues[index]) + + expectedValues = append(expectedValues[:index], expectedValues[index+1:]...) + } + + require.Equal(t, uint64(0), array.Count()) + + testArrayMutableElementIndex(t, array) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + }) +} + +func TestArrayWrapperValueModifyExistingArray(t *testing.T) { + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + t.Run("modify level-1 wrapper array in [SomeValue([SomeValue(uint64)])]", func(t *testing.T) { + const ( + arraySize = 3 + childArraySize = 2 + ) + + typeInfo := testTypeInfo{42} + + r := newRand(t) + + createStorage := func(arraySize int) ( + _ BaseStorage, + rootSlabID SlabID, + expectedValues []Value, + ) { + storage := newTestPersistentStorage(t) + + createArrayOfSomeValueOfArrayOfSomeValueOfUint64 := + newArrayValueFunc( + t, + address, + typeInfo, + arraySize, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + childArraySize, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r))))) + + v, expected := createArrayOfSomeValueOfArrayOfSomeValueOfUint64(storage) + + array := v.(*Array) + expectedValues = expected.(arrayValue) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + err := storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + return storage.baseStorage, array.SlabID(), expectedValues + } + + // Create a base storage with array in the format of + // [SomeValue([SomeValue(uint64)])] + baseStorage, rootSlabID, expectedValues := createStorage(arraySize) + require.Equal(t, arraySize, len(expectedValues)) + + // Create a new storage with encoded array + storage := newTestPersistentStorageWithBaseStorage(t, baseStorage) + + // Load existing array from storage + array, err := NewArrayWithRootID(storage, rootSlabID) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedValues)), array.Count()) + + // Get and verify first element as SomeValue(array) + + expectedValue := expectedValues[0] + + // Get array element (SomeValue) + element, err := array.Get(uint64(0)) + require.NoError(t, err) + + // Test retrieved element type and value + + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + unwrappedChildArray, isArray := elementAsSomeValue.Value.(*Array) + require.True(t, isArray) + + expectedValuesAsSomeValue, isSomeValue := expectedValue.(someValue) + require.True(t, isSomeValue) + + expectedUnwrappedChildArray, isArrayValue := expectedValuesAsSomeValue.Value.(arrayValue) + require.True(t, isArrayValue) + + require.Equal(t, uint64(len(expectedUnwrappedChildArray)), unwrappedChildArray.Count()) + + // Modify wrapped child array of SomeValue + + newValue := NewStringValue("x") + err = unwrappedChildArray.Append(SomeValue{newValue}) + require.NoError(t, err) + + expectedUnwrappedChildArray = append(expectedUnwrappedChildArray, someValue{newValue}) + expectedValues[0] = someValue{expectedUnwrappedChildArray} + + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Verify modified wrapped child array of SomeValue using new storage with committed data + + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + // Load existing array from storage + array2, err := NewArrayWithRootID(storage2, rootSlabID) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedValues)), array2.Count()) + + testArray(t, storage, typeInfo, address, array2, expectedValues, true) + }) + + t.Run("modify 2-level wrapper array in [SomeValue([SomeValue([SomeValue(uint64)])])]", func(t *testing.T) { + const ( + arraySize = 4 + childArraySize = 3 + gchildArraySize = 2 + ) + + typeInfo := testTypeInfo{42} + + createStorage := func(arraySize int) ( + _ BaseStorage, + rootSlabID SlabID, + expectedValues []Value, + ) { + storage := newTestPersistentStorage(t) + + r := newRand(t) + + createArrayOfSomeValueOfArrayOfSomeValueOfArrayOfSomeValueOfUint64 := + newArrayValueFunc( + t, + address, + typeInfo, + arraySize, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + childArraySize, + newWrapperValueFunc( + 1, + newArrayValueFunc( + t, + address, + typeInfo, + gchildArraySize, + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r))))))) + + v, expected := createArrayOfSomeValueOfArrayOfSomeValueOfArrayOfSomeValueOfUint64(storage) + + array := v.(*Array) + expectedValues = expected.(arrayValue) + + testArray(t, storage, typeInfo, address, array, expectedValues, true) + + err := storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + return storage.baseStorage, array.SlabID(), expectedValues + } + + // Create a base storage with array in the format of + // [SomeValue([SomeValue([SomeValue(uint64)])])] + baseStorage, rootSlabID, expectedValues := createStorage(arraySize) + require.Equal(t, arraySize, len(expectedValues)) + + // Create a new storage with encoded array + storage := newTestPersistentStorageWithBaseStorage(t, baseStorage) + + // Load existing array from storage + array, err := NewArrayWithRootID(storage, rootSlabID) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedValues)), array.Count()) + + // Get and verify first element as SomeValue(array) + + expectedValue := expectedValues[0] + + element, err := array.Get(uint64(0)) + require.NoError(t, err) + + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + unwrappedChildArray, isArray := elementAsSomeValue.Value.(*Array) + require.True(t, isArray) + + expectedValuesAsSomeValue, isSomeValue := expectedValue.(someValue) + require.True(t, isSomeValue) + + expectedUnwrappedChildArray, isArrayValue := expectedValuesAsSomeValue.Value.(arrayValue) + require.True(t, isArrayValue) + + require.Equal(t, uint64(len(expectedUnwrappedChildArray)), unwrappedChildArray.Count()) + + // Get and verify nested child element as SomeValue(array) + + childArrayElement, err := unwrappedChildArray.Get(uint64(0)) + require.NoError(t, err) + + childArrayElementAsSomeValue, isSomeValue := childArrayElement.(SomeValue) + require.True(t, isSomeValue) + + unwrappedGChildArray, isArray := childArrayElementAsSomeValue.Value.(*Array) + require.True(t, isArray) + + expectedChildValuesAsSomeValue, isSomeValue := expectedUnwrappedChildArray[0].(someValue) + require.True(t, isSomeValue) + + expectedUnwrappedGChildArray, isArrayValue := expectedChildValuesAsSomeValue.Value.(arrayValue) + require.True(t, isArrayValue) + + require.Equal(t, uint64(len(expectedUnwrappedGChildArray)), unwrappedGChildArray.Count()) + + // Modify wrapped gchild array of SomeValue + + newValue := NewStringValue("x") + err = unwrappedGChildArray.Append(SomeValue{newValue}) + require.NoError(t, err) + + expectedUnwrappedGChildArray = append(expectedUnwrappedGChildArray, someValue{newValue}) + expectedValues[0].(someValue).Value.(arrayValue)[0] = someValue{expectedUnwrappedGChildArray} + + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Verify modified wrapped child array of SomeValue using new storage with committed data + + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + // Load existing array from storage + array2, err := NewArrayWithRootID(storage2, rootSlabID) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedValues)), array2.Count()) + + testArray(t, storage, typeInfo, address, array2, expectedValues, true) + }) +} + +func testWrapperValueLevels(t *testing.T, expectedNestedLevels int, v Value) { + nestedLevels := 0 + for { + sw, ok := v.(SomeValue) + if !ok { + break + } + v = sw.Value + nestedLevels++ + } + require.Equal(t, expectedNestedLevels, nestedLevels) +} + +func testArrayMutableElementIndex(t *testing.T, v Value) { + v, _ = unwrapValue(v) + + array, ok := v.(*Array) + if !ok { + return + } + + originalMutableIndex := make(map[ValueID]uint64) + + for vid, index := range array.mutableElementIndex { + originalMutableIndex[vid] = index + } + + for i := uint64(0); i < array.Count(); i++ { + element, err := array.Get(i) + require.NoError(t, err) + + element, _ = unwrapValue(element) + + switch element := element.(type) { + case *Array: + vid := element.ValueID() + index, exists := originalMutableIndex[vid] + require.True(t, exists) + require.Equal(t, i, index) + + delete(originalMutableIndex, vid) + + case *OrderedMap: + vid := element.ValueID() + index, exists := originalMutableIndex[vid] + require.True(t, exists) + require.Equal(t, i, index) + + delete(originalMutableIndex, vid) + } + } + + require.Equal(t, 0, len(originalMutableIndex)) +} + +func testSetElementInArray(t *testing.T, storage SlabStorage, array *Array, index uint64, newValue Value, expected Value) { + existingStorable, err := array.Set(index, newValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingStorable) + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "overwritten storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingStorable) + + case SlabIDStorable: + overwrittenSlabID := SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, array.Address(), overwrittenSlabID.Address()) + } + + // Verify overwritten value + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expected, existingValue) + + removeFromStorage(t, storage, existingValue) +} + +func testRemoveElementFromArray(t *testing.T, storage SlabStorage, array *Array, index uint64, expected Value) { + existingStorable, err := array.Remove(index) + require.NoError(t, err) + require.NotNil(t, existingStorable) + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingStorable) + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "removed storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingStorable) + + case SlabIDStorable: + removedSlabID := SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, array.Address(), removedSlabID.Address()) + } + + // Verify removed value + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expected, existingValue) + + removeFromStorage(t, storage, existingValue) +} + +func getRandomUniquePositiveNumbers(r *rand.Rand, nonInclusiveMax int, count int) []int { + set := make(map[int]struct{}) + for len(set) < count { + n := r.Intn(nonInclusiveMax) + set[n] = struct{}{} + } + + slice := make([]int, 0, count) + for n := range set { + slice = append(slice, n) + } + + return slice +} + +func removeFromStorage(t *testing.T, storage SlabStorage, v Value) { + switch v := v.(type) { + case *Array: + rootSlabID := v.SlabID() + + // Remove all elements from storage + for v.Count() > 0 { + existingStorable, err := v.Remove(uint64(0)) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + + removeFromStorage(t, storage, existingValue) + } + + // Remove root slab from storage + err := storage.Remove(rootSlabID) + require.NoError(t, err) + + case *OrderedMap: + rootSlabID := v.SlabID() + + keys := make([]Value, 0, v.Count()) + err := v.IterateReadOnlyKeys(func(key Value) (bool, error) { + keys = append(keys, key) + return true, nil + }) + require.NoError(t, err) + + for _, key := range keys { + existingKeyStorable, existingValueStorable, err := v.Remove(compare, hashInputProvider, key) + require.NoError(t, err) + + existingKey, err := existingKeyStorable.StoredValue(storage) + require.NoError(t, err) + + removeFromStorage(t, storage, existingKey) + + existingValue, err := existingValueStorable.StoredValue(storage) + require.NoError(t, err) + + removeFromStorage(t, storage, existingValue) + } + + // Remove root slab from storage + err = storage.Remove(rootSlabID) + require.NoError(t, err) + + case WrapperValue: + wrappedValue, _ := v.UnwrapAtreeValue() + removeFromStorage(t, storage, wrappedValue) + } +} diff --git a/cmd/stress/array.go b/cmd/stress/array.go index adbaf63..c6bb148 100644 --- a/cmd/stress/array.go +++ b/cmd/stress/array.go @@ -151,6 +151,8 @@ func testArray( var m runtime.MemStats + count := uint64(0) + for { runtime.ReadMemStats(&m) allocMiB := m.Alloc / 1024 / 1024 @@ -218,6 +220,8 @@ func testArray( opCountForStorageHealthCheck++ + count++ + // Update status status.incOp(prevOp, array.Count()) @@ -245,6 +249,10 @@ func testArray( // Drop cache after commit to force slab decoding at next op. storage.DropCache() } + + if flagIterationCount > 0 && flagIterationCount == count { + break + } } } @@ -526,6 +534,19 @@ func modifyContainer(expectedValue atree.Value, value atree.Value, nestedLevels return nil, err } + case SomeValue: + expectedSomeValue, ok := expectedValue.(someValue) + if !ok { + return nil, fmt.Errorf("failed to get expected value of type someValue: got %T", expectedValue) + } + + expected, err := modifyContainer(expectedSomeValue.Value, value.Value, nestedLevels) + if err != nil { + return nil, err + } + + return someValue{expected}, nil + default: return nil, fmt.Errorf("failed to get container: got %T", value) } @@ -535,6 +556,7 @@ func modifyContainer(expectedValue atree.Value, value atree.Value, nestedLevels func hasChildContainerInArray(expectedValues arrayValue) bool { for _, v := range expectedValues { + v, _ = unwrapValue(v) switch v.(type) { case arrayValue, mapValue: return true @@ -546,6 +568,7 @@ func hasChildContainerInArray(expectedValues arrayValue) bool { func getRandomChildContainerIndexInArray(expectedValues arrayValue) (index int, found bool) { indexes := make([]int, 0, len(expectedValues)) for i, v := range expectedValues { + v, _ = unwrapValue(v) switch v.(type) { case arrayValue, mapValue: indexes = append(indexes, i) diff --git a/cmd/stress/main.go b/cmd/stress/main.go index fbd735b..d256cf3 100644 --- a/cmd/stress/main.go +++ b/cmd/stress/main.go @@ -93,6 +93,8 @@ var ( flagSeedHex string flagMinHeapAllocMiB, flagMaxHeapAllocMiB uint64 flagMinOpsForStorageHealthCheck uint64 + flagAlwaysUseWrapperValue bool + flagIterationCount uint64 ) func main() { @@ -104,6 +106,8 @@ func main() { flag.StringVar(&flagSeedHex, "seed", "", "seed for prng in hex (default is Unix time)") flag.Uint64Var(&flagMinHeapAllocMiB, "minheap", 1000, "min HeapAlloc in MiB to stop extra removal of elements") flag.Uint64Var(&flagMaxHeapAllocMiB, "maxheap", 2000, "max HeapAlloc in MiB to trigger extra removal of elements") + flag.BoolVar(&flagAlwaysUseWrapperValue, "wrappervalue", false, "always use wrapper value") + flag.Uint64Var(&flagIterationCount, "count", 0, "(testing) number of ops") flag.Parse() diff --git a/cmd/stress/map.go b/cmd/stress/map.go index 0e11b62..05c7017 100644 --- a/cmd/stress/map.go +++ b/cmd/stress/map.go @@ -41,6 +41,8 @@ const ( maxMapOp ) +const mapNoOp mapOpType = -1 + type mapStatus struct { lock sync.RWMutex @@ -127,6 +129,8 @@ func testMap( var ms runtime.MemStats + count := uint64(0) + for { runtime.ReadMemStats(&ms) allocMiB := ms.Alloc / 1024 / 1024 @@ -196,6 +200,8 @@ func testMap( opCountForStorageHealthCheck++ + count++ + // Update status status.incOp(prevOp, m.Count()) @@ -223,6 +229,10 @@ func testMap( // Drop cache after commit to force slab decoding at next op. storage.DropCache() } + + if flagIterationCount > 0 && flagIterationCount == count { + break + } } } @@ -233,7 +243,14 @@ func nextMapOp( forceRemove bool, ) (mapOpType, error) { - if forceRemove { + isComposite := m.Type().IsComposite() + + if isComposite && m.Count() == 0 { + // No op for empty composite values. + return mapNoOp, nil + } + + if forceRemove && !isComposite { if m.Count() == 0 { return 0, fmt.Errorf("failed to force remove map elements because map has no elements") } @@ -248,6 +265,14 @@ func nextMapOp( nextOp := mapOpType(r.Intn(int(maxMapOp))) switch nextOp { + case mapRemoveOp: + if !isComposite { + return nextOp, nil + } + + // Can't remove fields in map of composite type. + // Try another map operations. + case mapMutateChildContainerAfterSet: if nestedLevels-1 > 0 { return nextOp, nil @@ -269,6 +294,18 @@ func nextMapOp( } } +func getMapKeys(m *atree.OrderedMap) ([]atree.Value, error) { + keys := make([]atree.Value, 0, m.Count()) + err := m.IterateKeys(compare, hashInputProvider, func(key atree.Value) (resume bool, err error) { + keys = append(keys, key) + return true, nil + }) + if err != nil { + return nil, err + } + return keys, nil +} + func modifyMap( expectedValues mapValue, m *atree.OrderedMap, @@ -284,6 +321,10 @@ func modifyMap( return nil, 0, err } + if nextOp == mapNoOp { + return expectedValues, nextOp, nil + } + switch nextOp { case mapSetOp1, mapSetOp2, mapSetOp3, mapMutateChildContainerAfterSet: @@ -298,9 +339,28 @@ func modifyMap( panic("not reachable") } - expectedKey, key, err := randomKey() - if err != nil { - return nil, 0, fmt.Errorf("failed to generate random key %s: %s", key, err) + var expectedKey, key atree.Value + var err error + + if m.Type().IsComposite() { + // Update existing field, instead of creating new field. + keys, err := getMapKeys(m) + if err != nil { + return nil, 0, fmt.Errorf("failed to iterate composite keys: %s", err) + } + if len(keys) == 0 { + // No op for empty composite values. + return expectedValues, mapNoOp, nil + } + + key = keys[r.Intn(len(keys))] + expectedKey = key + + } else { + expectedKey, key, err = randomKey() + if err != nil { + return nil, 0, fmt.Errorf("failed to generate random key %s: %s", key, err) + } } expectedChildValue, child, err := randomValue(storage, address, nextNestedLevels) @@ -351,6 +411,10 @@ func modifyMap( } case mapRemoveOp: + if m.Type().IsComposite() { + panic("not reachable") + } + // Use for-range on Go map to get random key var key atree.Value for k := range expectedValues { @@ -426,6 +490,7 @@ func modifyMap( func hasChildContainerInMap(expectedValues mapValue) bool { for _, v := range expectedValues { + v, _ = unwrapValue(v) switch v.(type) { case arrayValue, mapValue: return true @@ -437,6 +502,7 @@ func hasChildContainerInMap(expectedValues mapValue) bool { func getRandomChildContainerKeyInMap(expectedValues mapValue) (key atree.Value, found bool) { keys := make([]atree.Value, 0, len(expectedValues)) for k, v := range expectedValues { + v, _ = unwrapValue(v) switch v.(type) { case arrayValue, mapValue: keys = append(keys, k) diff --git a/cmd/stress/storable.go b/cmd/stress/storable.go index c5676a1..cd5e68d 100644 --- a/cmd/stress/storable.go +++ b/cmd/stress/storable.go @@ -40,10 +40,13 @@ const ( const ( // CBOR tag numbers used to encode elements. - cborTagUInt8Value = 161 - cborTagUInt16Value = 162 - cborTagUInt32Value = 163 - cborTagUInt64Value = 164 + cborTagUInt8Value = 161 + cborTagUInt16Value = 162 + cborTagUInt32Value = 163 + cborTagUInt64Value = 164 + cborTagSomeValue = 165 + cborTagHashableMap = 166 + cborTagSomeValueWithNestedLevels = 167 // CBOR tag numbers in this block cannot exceed 230 (reservedMinTagNumForContainerType). ) @@ -89,8 +92,14 @@ func init() { } } +type HashableValue interface { + atree.Value + HashInput(scratch []byte) ([]byte, error) +} + type Uint8Value uint8 +var _ HashableValue = Uint8Value(0) var _ atree.Value = Uint8Value(0) var _ atree.Storable = Uint8Value(0) @@ -123,7 +132,7 @@ func (v Uint8Value) Encode(enc *atree.Encoder) error { return enc.CBOR.EncodeUint8(uint8(v)) } -func (v Uint8Value) getHashInput(scratch []byte) ([]byte, error) { +func (v Uint8Value) HashInput(scratch []byte) ([]byte, error) { const cborTypePositiveInt = 0x00 @@ -156,6 +165,7 @@ func (v Uint8Value) String() string { type Uint16Value uint16 +var _ HashableValue = Uint16Value(0) var _ atree.Value = Uint16Value(0) var _ atree.Storable = Uint16Value(0) @@ -182,7 +192,7 @@ func (v Uint16Value) Encode(enc *atree.Encoder) error { return enc.CBOR.EncodeUint16(uint16(v)) } -func (v Uint16Value) getHashInput(scratch []byte) ([]byte, error) { +func (v Uint16Value) HashInput(scratch []byte) ([]byte, error) { const cborTypePositiveInt = 0x00 buf := scratch @@ -220,6 +230,7 @@ func (v Uint16Value) String() string { type Uint32Value uint32 +var _ HashableValue = Uint32Value(0) var _ atree.Value = Uint32Value(0) var _ atree.Storable = Uint32Value(0) @@ -252,7 +263,7 @@ func (v Uint32Value) Encode(enc *atree.Encoder) error { return enc.CBOR.EncodeUint32(uint32(v)) } -func (v Uint32Value) getHashInput(scratch []byte) ([]byte, error) { +func (v Uint32Value) HashInput(scratch []byte) ([]byte, error) { const cborTypePositiveInt = 0x00 @@ -297,6 +308,7 @@ func (v Uint32Value) String() string { type Uint64Value uint64 +var _ HashableValue = Uint64Value(0) var _ atree.Value = Uint64Value(0) var _ atree.Storable = Uint64Value(0) @@ -329,7 +341,7 @@ func (v Uint64Value) Encode(enc *atree.Encoder) error { return enc.CBOR.EncodeUint64(uint64(v)) } -func (v Uint64Value) getHashInput(scratch []byte) ([]byte, error) { +func (v Uint64Value) HashInput(scratch []byte) ([]byte, error) { const cborTypePositiveInt = 0x00 buf := scratch @@ -382,8 +394,10 @@ type StringValue struct { size uint32 } +var _ HashableValue = &StringValue{} var _ atree.Value = &StringValue{} var _ atree.Storable = &StringValue{} +var _ atree.ComparableStorable = &StringValue{} func NewStringValue(s string) StringValue { size := atree.GetUintCBORSize(uint64(len(s))) + uint32(len(s)) @@ -398,6 +412,28 @@ func (v StringValue) StoredValue(_ atree.SlabStorage) (atree.Value, error) { return v, nil } +func (v StringValue) Equal(other atree.Storable) bool { + if _, ok := other.(StringValue); !ok { + return false + } + return v.str == other.(StringValue).str +} + +func (v StringValue) Less(other atree.Storable) bool { + if _, ok := other.(StringValue); !ok { + return false + } + return v.str < other.(StringValue).str +} + +func (v StringValue) ID() string { + return v.str +} + +func (v StringValue) Copy() atree.Storable { + return v +} + func (v StringValue) Storable(storage atree.SlabStorage, address atree.Address, maxInlineSize uint64) (atree.Storable, error) { if uint64(v.ByteSize()) <= maxInlineSize { return v, nil @@ -410,7 +446,7 @@ func (v StringValue) Encode(enc *atree.Encoder) error { return enc.CBOR.EncodeString(v.str) } -func (v StringValue) getHashInput(scratch []byte) ([]byte, error) { +func (v StringValue) HashInput(scratch []byte) ([]byte, error) { const cborTypeTextString = 0x60 @@ -464,6 +500,275 @@ func (v StringValue) String() string { return v.str } +type SomeValue struct { + Value atree.Value +} + +var _ HashableValue = SomeValue{} +var _ atree.Value = SomeValue{} +var _ atree.WrapperValue = SomeValue{} + +// NOTE: For testing purposes, SomeValue and SomeStorable are mostly copied +// from github.com/onflow/cadence (interpreter.SomeValue and interpreter.SomeStorable). +func (v SomeValue) Storable( + storage atree.SlabStorage, + address atree.Address, + maxInlineSize uint64, +) (atree.Storable, error) { + + // SomeStorable returned from this function can be encoded in two ways: + // - if non-SomeStorable is too large, non-SomeStorable is encoded in a separate slab + // while SomeStorable wrapper is encoded inline with reference to slab containing + // non-SomeStorable. + // - otherwise, SomeStorable with non-SomeStorable is encoded inline. + // + // The above applies to both immutable non-SomeValue (such as StringValue), + // and mutable non-SomeValue (such as ArrayValue). + + nonSomeValue, nestedLevels := v.nonSomeValue() + + someStorableEncodedPrefixSize := getSomeStorableEncodedPrefixSize(nestedLevels) + + // Reduce maxInlineSize for non-SomeValue to make sure + // that SomeStorable wrapper is always encoded inline. + maxInlineSize -= uint64(someStorableEncodedPrefixSize) + + nonSomeValueStorable, err := nonSomeValue.Storable( + storage, + address, + maxInlineSize, + ) + if err != nil { + return nil, err + } + + valueStorable := nonSomeValueStorable + for i := 1; i < int(nestedLevels); i++ { + valueStorable = SomeStorable{ + Storable: valueStorable, + } + } + + // No need to call maybeLargeImmutableStorable() here for SomeStorable because: + // - encoded SomeStorable size = someStorableEncodedPrefixSize + non-SomeValueStorable size + // - non-SomeValueStorable size < maxInlineSize - someStorableEncodedPrefixSize + return SomeStorable{ + Storable: valueStorable, + }, nil +} + +func (v SomeValue) HashInput(scratch []byte) ([]byte, error) { + + wv, ok := v.Value.(HashableValue) + if !ok { + return nil, fmt.Errorf("failed to hash wrapped value: %s", v.Value) + } + + b, err := wv.HashInput(scratch) + if err != nil { + return nil, err + } + + hi := make([]byte, len(b)+2) + hi[0] = 0xd8 + hi[1] = cborTagSomeValue + copy(hi[2:], b) + + return hi, nil +} + +func (v SomeValue) String() string { + return fmt.Sprintf("SomeValue(%s)", v.Value) +} + +func (v SomeValue) UnwrapAtreeValue() (atree.Value, uint64) { + nonSomeValue, nestedLevels := v.nonSomeValue() + + someStorableEncodedPrefixSize := getSomeStorableEncodedPrefixSize(nestedLevels) + + wv, ok := nonSomeValue.(atree.WrapperValue) + if !ok { + return nonSomeValue, uint64(someStorableEncodedPrefixSize) + } + + unwrappedValue, wrapperSize := wv.UnwrapAtreeValue() + + return unwrappedValue, wrapperSize + uint64(someStorableEncodedPrefixSize) +} + +// nonSomeValue returns a non-SomeValue and nested levels of SomeValue reached +// by traversing nested SomeValue (SomeValue containing SomeValue, etc.) +// until it reaches a non-SomeValue. +// For example, +// - `SomeValue{true}` has non-SomeValue `true`, and nested levels 1 +// - `SomeValue{SomeValue{1}}` has non-SomeValue `1` and nested levels 2 +// - `SomeValue{SomeValue{[SomeValue{SomeValue{SomeValue{1}}}]}} has +// non-SomeValue `[SomeValue{SomeValue{SomeValue{1}}}]` and nested levels 2 +func (v SomeValue) nonSomeValue() (atree.Value, uint64) { + nestedLevels := uint64(1) + for { + switch value := v.Value.(type) { + case SomeValue: + nestedLevels++ + v = value + + default: + return value, nestedLevels + } + } +} + +const ( + cborTagSize = 2 + someStorableWithMultipleNestedlevelsArraySize = 1 + someStorableWithMultipleNestedLevelsArrayCount = 2 +) + +func getSomeStorableEncodedPrefixSize(nestedLevels uint64) uint32 { + if nestedLevels == 1 { + return cborTagSize + } + return cborTagSize + + someStorableWithMultipleNestedlevelsArraySize + + atree.GetUintCBORSize(nestedLevels) +} + +type SomeStorable struct { + Storable atree.Storable +} + +var _ atree.ContainerStorable = SomeStorable{} +var _ atree.WrapperStorable = SomeStorable{} + +func (s SomeStorable) HasPointer() bool { + if ms, ok := s.Storable.(atree.ContainerStorable); ok { + return ms.HasPointer() + } + return false +} + +func (s SomeStorable) ByteSize() uint32 { + nonSomeStorable, nestedLevels := s.nonSomeStorable() + return getSomeStorableEncodedPrefixSize(nestedLevels) + nonSomeStorable.ByteSize() +} + +func (s SomeStorable) Encode(e *atree.Encoder) error { + nonSomeStorable, nestedLevels := s.nonSomeStorable() + if nestedLevels == 1 { + return s.encode(e) + } + return s.encodeMultipleNestedLevels(e, nestedLevels, nonSomeStorable) +} + +// encode encodes SomeStorable with nested levels = 1 as +// +// cbor.Tag{ +// Number: CBORTagSomeValue, +// Content: Value(v.Value), +// } +func (s SomeStorable) encode(e *atree.Encoder) error { + // NOTE: when updating, also update SomeStorable.ByteSize + err := e.CBOR.EncodeRawBytes([]byte{ + // tag number + 0xd8, cborTagSomeValue, + }) + if err != nil { + return err + } + return s.Storable.Encode(e) +} + +// encodeMultipleNestedLevels encodes SomeStorable with nested levels > 1 as +// +// cbor.Tag{ +// Number: CBORTagSomeValueWithNestedLevels, +// Content: CBORArray[nested_levels, innermsot_value], +// } +func (s SomeStorable) encodeMultipleNestedLevels( + e *atree.Encoder, + levels uint64, + nonSomeStorable atree.Storable, +) error { + // NOTE: when updating, also update SomeStorable.ByteSize + err := e.CBOR.EncodeRawBytes([]byte{ + // tag number + 0xd8, cborTagSomeValueWithNestedLevels, + // array of 2 elements + 0x82, + }) + if err != nil { + return err + } + + err = e.CBOR.EncodeUint64(levels) + if err != nil { + return err + } + + return nonSomeStorable.Encode(e) +} + +// nonSomeStorable returns a non-SomeStorable and nested levels of SomeStorable reached +// by traversing nested SomeStorable (SomeStorable containing SomeStorable, etc.) +// until it reaches a non-SomeStorable. +// For example, +// - `SomeStorable{true}` has non-SomeStorable `true`, and nested levels 1 +// - `SomeStorable{SomeStorable{1}}` has non-SomeStorable `1` and nested levels 2 +// - `SomeStorable{SomeStorable{[SomeStorable{SomeStorable{SomeStorable{1}}}]}} has +// non-SomeStorable `[SomeStorable{SomeStorable{SomeStorable{1}}}]` and nested levels 2 +func (s SomeStorable) nonSomeStorable() (atree.Storable, uint64) { + nestedLevels := uint64(1) + for { + switch storable := s.Storable.(type) { + case SomeStorable: + nestedLevels++ + s = storable + + default: + return storable, nestedLevels + } + } +} + +func (s SomeStorable) ChildStorables() []atree.Storable { + return []atree.Storable{s.Storable} +} + +func (s SomeStorable) StoredValue(storage atree.SlabStorage) (atree.Value, error) { + wv, err := s.Storable.StoredValue(storage) + if err != nil { + return nil, err + } + + return SomeValue{wv}, nil +} + +func (s SomeStorable) String() string { + return fmt.Sprintf("SomeStorable(%s)", s.Storable) +} + +func (s SomeStorable) UnwrapAtreeStorable() atree.Storable { + storable := s.Storable + for { + ws, ok := storable.(atree.WrapperStorable) + if !ok { + break + } + storable = ws.UnwrapAtreeStorable() + } + return storable +} + +func (s SomeStorable) WrapAtreeStorable(storable atree.Storable) atree.Storable { + _, nestedLevels := s.nonSomeStorable() + + newStorable := SomeStorable{Storable: storable} + for i := 1; i < int(nestedLevels); i++ { + newStorable = SomeStorable{Storable: newStorable} + } + return newStorable +} + func decodeStorable(dec *cbor.StreamDecoder, id atree.SlabID, inlinedExtraData []atree.ExtraData) (atree.Storable, error) { t, err := dec.NextType() if err != nil { @@ -535,6 +840,63 @@ func decodeStorable(dec *cbor.StreamDecoder, id atree.SlabID, inlinedExtraData [ } return Uint64Value(n), nil + case cborTagSomeValue: + storable, err := decodeStorable(dec, id, inlinedExtraData) + if err != nil { + return nil, err + } + return SomeStorable{Storable: storable}, nil + + case cborTagSomeValueWithNestedLevels: + count, err := dec.DecodeArrayHead() + if err != nil { + return nil, fmt.Errorf( + "invalid some value with nested levels encoding: %w", + err, + ) + } + + if count != someStorableWithMultipleNestedLevelsArrayCount { + return nil, fmt.Errorf( + "invalid array count for some value with nested levels encoding: got %d, expect %d", + count, someStorableWithMultipleNestedLevelsArrayCount, + ) + } + + nestedLevels, err := dec.DecodeUint64() + if err != nil { + return nil, fmt.Errorf( + "invalid nested levels for some value with nested levels encoding: %w", + err, + ) + } + + if nestedLevels <= 1 { + return nil, fmt.Errorf( + "invalid nested levels for some value with nested levels encoding: got %d, expect > 1", + nestedLevels, + ) + } + + nonSomeStorable, err := decodeStorable(dec, id, inlinedExtraData) + if err != nil { + return nil, fmt.Errorf( + "invalid nonSomeStorable for some value with nested levels encoding: %w", + err, + ) + } + + storable := SomeStorable{ + Storable: nonSomeStorable, + } + for i := uint64(1); i < nestedLevels; i++ { + storable = SomeStorable{ + Storable: storable, + } + } + + return storable, nil + default: return nil, fmt.Errorf("invalid tag number %d", tagNumber) } @@ -592,29 +954,23 @@ func compare(storage atree.SlabStorage, value atree.Value, storable atree.Storab } return false, nil + + case SomeValue: + other, ok := storable.(SomeStorable) + if !ok { + return false, nil + } + + return compare(storage, v.Value, other.Storable) } return false, fmt.Errorf("value %T not supported for comparison", value) } func hashInputProvider(value atree.Value, buffer []byte) ([]byte, error) { - switch v := value.(type) { - - case Uint8Value: - return v.getHashInput(buffer) - - case Uint16Value: - return v.getHashInput(buffer) - - case Uint32Value: - return v.getHashInput(buffer) - - case Uint64Value: - return v.getHashInput(buffer) - - case StringValue: - return v.getHashInput(buffer) + if hashable, ok := value.(HashableValue); ok { + return hashable.HashInput(buffer) } - return nil, fmt.Errorf("value %T not supported for hash input", value) + return nil, fmt.Errorf("value %T doesn't implement HashableValue interface", value) } diff --git a/cmd/stress/utils.go b/cmd/stress/utils.go index fd7abe0..4acc02f 100644 --- a/cmd/stress/utils.go +++ b/cmd/stress/utils.go @@ -148,11 +148,36 @@ func randomValue( ) (expected atree.Value, actual atree.Value, err error) { if nestedLevels <= 0 { t := r.Intn(maxSimpleValueType) - return generateSimpleValue(t) + + expected, actual, err = generateSimpleValue(t) + if err != nil { + return nil, nil, err + } + } else { + t := r.Intn(maxContainerValueType) + + expected, actual, err = generateContainerValue(t, storage, address, nestedLevels) + if err != nil { + return nil, nil, err + } } - t := r.Intn(maxContainerValueType) - return generateContainerValue(t, storage, address, nestedLevels) + expected, actual = randomWrapperValue(expected, actual) + return expected, actual, nil +} + +func randomWrapperValue(expected atree.Value, actual atree.Value) (atree.Value, atree.Value) { + const ( + noWrapperValue = iota + useWrapperValue + maxWrapperValueChoice + ) + + if flagAlwaysUseWrapperValue || r.Intn(maxWrapperValueChoice) == useWrapperValue { + return someValue{expected}, SomeValue{actual} + } + + return expected, actual } func removeStorable(storage atree.SlabStorage, storable atree.Storable) error { @@ -162,7 +187,9 @@ func removeStorable(storage atree.SlabStorage, storable atree.Storable) error { return err } - switch v := value.(type) { + unwrappedValue, _ := unwrapValue(value) + + switch v := unwrappedValue.(type) { case *atree.Array: err := v.PopIterate(func(storable atree.Storable) { _ = removeStorable(storage, storable) @@ -181,7 +208,7 @@ func removeStorable(storage atree.SlabStorage, storable atree.Storable) error { } } - if sid, ok := storable.(atree.SlabIDStorable); ok { + if sid, ok := unwrapStorable(storable).(atree.SlabIDStorable); ok { return storage.Remove(atree.SlabID(sid)) } @@ -212,6 +239,17 @@ func valueEqual(expected atree.Value, actual atree.Value) error { case *atree.OrderedMap: return fmt.Errorf("expected value shouldn't be *OrderedMap") + case someValue: + actual, ok := actual.(SomeValue) + if !ok { + return fmt.Errorf("failed to convert actual value to SomeValue, got %T", actual) + } + + return valueEqual(expected.Value, actual.Value) + + case SomeValue: + return fmt.Errorf("expected value shouldn't be SomeValue") + default: if !reflect.DeepEqual(expected, actual) { return fmt.Errorf("expected value %v (%T) != actual value %v (%T)", expected, expected, actual, actual) @@ -543,6 +581,25 @@ func (v mapValue) Storable(atree.SlabStorage, atree.Address, uint64) (atree.Stor panic("not reachable") } +type someValue struct { + Value atree.Value +} + +var _ atree.Value = &someValue{} +var _ atree.WrapperValue = &someValue{} + +func (v someValue) Storable(atree.SlabStorage, atree.Address, uint64) (atree.Storable, error) { + panic("not reachable") +} + +func (v someValue) String() string { + return fmt.Sprintf("someValue(%s)", v.Value) +} + +func (v someValue) UnwrapAtreeValue() (atree.Value, uint64) { + return unwrapValue(v.Value) +} + var typeInfoComparator = func(a atree.TypeInfo, b atree.TypeInfo) bool { aID, _ := getEncodedTypeInfo(a) bID, _ := getEncodedTypeInfo(b) @@ -581,3 +638,21 @@ func putTypeIDBuffer(e *bytes.Buffer) { e.Reset() typeIDBufferPool.Put(e) } + +func unwrapValue(v atree.Value) (atree.Value, uint64) { + switch v := v.(type) { + case atree.WrapperValue: + return v.UnwrapAtreeValue() + default: + return v, 0 + } +} + +func unwrapStorable(s atree.Storable) atree.Storable { + switch s := s.(type) { + case atree.WrapperStorable: + return s.UnwrapAtreeStorable() + default: + return s + } +} diff --git a/map.go b/map.go index 51493c3..8c50e4c 100644 --- a/map.go +++ b/map.go @@ -4892,11 +4892,20 @@ func (m *OrderedMap) setCallbackWithChild( child Value, maxInlineSize uint64, ) { - c, ok := child.(mutableValueNotifier) + // Unwrap child value if needed (e.g. interpreter.SomeValue) + unwrappedChild, wrapperSize := unwrapValue(child) + + c, ok := unwrappedChild.(mutableValueNotifier) if !ok { return } + if maxInlineSize < wrapperSize { + maxInlineSize = 0 + } else { + maxInlineSize -= wrapperSize + } + vid := c.ValueID() c.setParentUpdater(func() (found bool, err error) { @@ -4921,6 +4930,8 @@ func (m *OrderedMap) setCallbackWithChild( return false, err } + valueStorable = unwrapStorable(valueStorable) + // Verify retrieved element value is either SlabIDStorable or Slab, with identical value ID. switch valueStorable := valueStorable.(type) { case SlabIDStorable: @@ -4939,15 +4950,19 @@ func (m *OrderedMap) setCallbackWithChild( return false, nil } + // NOTE: Must reset child using original child (not unwrapped child) + // Set child value with parent map using same key. - // Set() calls c.Storable() which returns inlined or not-inlined child storable. - existingValueStorable, err := m.set(comparator, hip, key, c) + // Set() calls child.Storable() which returns inlined or not-inlined child storable. + existingValueStorable, err := m.set(comparator, hip, key, child) if err != nil { return false, err } // Verify overwritten storable has identical value ID. + existingValueStorable = unwrapStorable(existingValueStorable) + switch existingValueStorable := existingValueStorable.(type) { case SlabIDStorable: sid := SlabID(existingValueStorable) @@ -5156,20 +5171,10 @@ func (m *OrderedMap) Set(comparator ValueComparator, hip HashInputProvider, key // If overwritten storable is an inlined slab, uninline the slab and store it in storage. // This is to prevent potential data loss because the overwritten inlined slab was not in // storage and any future changes to it would have been lost. - switch s := storable.(type) { - case ArraySlab: // inlined array slab - err = s.Uninline(m.Storage) - if err != nil { - return nil, err - } - storable = SlabIDStorable(s.SlabID()) - case MapSlab: // inlined map slab - err = s.Uninline(m.Storage) - if err != nil { - return nil, err - } - storable = SlabIDStorable(s.SlabID()) + storable, _, _, err = uninlineStorableIfNeeded(m.Storage, storable) + if err != nil { + return nil, err } return storable, nil @@ -5259,20 +5264,15 @@ func (m *OrderedMap) Remove(comparator ValueComparator, hip HashInputProvider, k // If overwritten storable is an inlined slab, uninline the slab and store it in storage. // This is to prevent potential data loss because the overwritten inlined slab was not in // storage and any future changes to it would have been lost. - switch s := valueStorable.(type) { - case ArraySlab: - err = s.Uninline(m.Storage) - if err != nil { - return nil, nil, err - } - valueStorable = SlabIDStorable(s.SlabID()) - case MapSlab: - err = s.Uninline(m.Storage) - if err != nil { - return nil, nil, err - } - valueStorable = SlabIDStorable(s.SlabID()) + keyStorable, _, _, err = uninlineStorableIfNeeded(m.Storage, keyStorable) + if err != nil { + return nil, nil, err + } + + valueStorable, _, _, err = uninlineStorableIfNeeded(m.Storage, valueStorable) + if err != nil { + return nil, nil, err } return keyStorable, valueStorable, nil @@ -5762,14 +5762,19 @@ var defaultReadOnlyMapIteratorMutatinCallback ReadOnlyMapIteratorMutationCallbac var _ MapIterator = &readOnlyMapIterator{} func (i *readOnlyMapIterator) setMutationCallback(key, value Value) { - if k, ok := key.(mutableValueNotifier); ok { + + unwrappedKey, _ := unwrapValue(key) + + if k, ok := unwrappedKey.(mutableValueNotifier); ok { k.setParentUpdater(func() (found bool, err error) { i.keyMutationCallback(key) return true, NewReadOnlyIteratorElementMutationError(i.m.ValueID(), k.ValueID()) }) } - if v, ok := value.(mutableValueNotifier); ok { + unwrappedValue, _ := unwrapValue(value) + + if v, ok := unwrappedValue.(mutableValueNotifier); ok { v.setParentUpdater(func() (found bool, err error) { i.valueMutationCallback(value) return true, NewReadOnlyIteratorElementMutationError(i.m.ValueID(), v.ValueID()) diff --git a/map_debug.go b/map_debug.go index 7e5aea8..f06f1e4 100644 --- a/map_debug.go +++ b/map_debug.go @@ -1307,45 +1307,9 @@ func (v *serializationVerifier) mapSingleElementEqual(expected, actual *singleEl } } - // Compare nested element - switch ee := expected.value.(type) { - case SlabIDStorable: // Compare not-inlined element - if !v.compare(expected.value, actual.value) { - return NewFatalError(fmt.Errorf("singleElement value %v is wrong, want %v", actual.value, expected.value)) - } - - value, err := ee.StoredValue(v.storage) - if err != nil { - // Don't need to wrap error as external error because err is already categorized by SlabIDStorable.StoredValue(). - return err - } - - err = v.verifyValue(value) - if err != nil { - // Don't need to wrap error as external error because err is already categorized by verifyVaue(). - return err - } - - case *ArrayDataSlab: // Compare inlined array element - ae, ok := actual.value.(*ArrayDataSlab) - if !ok { - return NewFatalError(fmt.Errorf("expect element as *ArrayDataSlab, actual %T", ae)) - } - - return v.arrayDataSlabEqual(ee, ae) - - case *MapDataSlab: // Compare inlined map element - ae, ok := actual.value.(*MapDataSlab) - if !ok { - return NewFatalError(fmt.Errorf("expect element as *MapDataSlab, actual %T", ae)) - } - - return v.mapDataSlabEqual(ee, ae) - - default: - if !v.compare(expected.value, actual.value) { - return NewFatalError(fmt.Errorf("singleElement value %v is wrong, want %v", actual.value, expected.value)) - } + err := v.compareStorable(expected.value, actual.value) + if err != nil { + return NewFatalError(fmt.Errorf("failed to compare singleElement value with key %s: %s", expected.key, err)) } return nil diff --git a/map_test.go b/map_test.go index e783811..4c8f94f 100644 --- a/map_test.go +++ b/map_test.go @@ -14261,7 +14261,7 @@ func TestMapNestedStorables(t *testing.T) { vs := strings.Repeat("b", int(i)) v := SomeValue{Value: NewStringValue(vs)} - keyValues[k] = v + keyValues[k] = someValue{NewStringValue(vs)} existingStorable, err := m.Set(compare, hashInputProvider, k, v) require.NoError(t, err) @@ -14299,7 +14299,7 @@ func TestMapNestedStorables(t *testing.T) { ks := strings.Repeat("a", int(i)) k := SomeValue{Value: NewStringValue(ks)} - keyValues[k] = arrayValue{v} + keyValues[k] = arrayValue{someValue{NewStringValue(vs)}} existingStorable, err := m.Set(compare, hashInputProvider, k, childArray) require.NoError(t, err) @@ -14744,6 +14744,16 @@ func TestMapLoadedValueIterator(t *testing.T) { typeInfo := testTypeInfo{42} address := Address{1, 2, 3, 4, 5, 6, 7, 8} + runTest := func(name string, f func(useWrapperValue bool) func(*testing.T)) { + for _, useWrapperValue := range []bool{false, true} { + if useWrapperValue { + name += ", use wrapper value" + } + + t.Run(name, f(useWrapperValue)) + } + } + t.Run("empty", func(t *testing.T) { storage := newTestPersistentStorage(t) @@ -14759,1399 +14769,1498 @@ func TestMapLoadedValueIterator(t *testing.T) { testMapLoadedElements(t, m, nil) }) - t.Run("root data slab with simple values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with simple values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 3 + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - require.Equal(t, 1, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + require.Equal(t, 1, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with composite values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 3 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with composite values in collision group", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in collision group", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 collision groups, 2 elements in each group. - const mapSize = 6 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, - ) + // Create parent map with 3 collision groups, 2 elements in each group. + const mapSize = 6 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with composite values in external collision group", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision group, 4 elements in the group. - const mapSize = 12 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision group, 4 elements in the group. + const mapSize = 12 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with composite values, unload value from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values, unload value from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 3 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from front to back. - for i := 0; i < len(values); i++ { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from front to back. + for i := 0; i < len(values); i++ { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[i+1:] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[i+1:] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with long string keys, unload key from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with long string keys, unload key from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values := createMapWithLongStringKey(t, storage, address, typeInfo, mapSize) + const mapSize = 3 + m, values := createMapWithLongStringKey(t, storage, address, typeInfo, mapSize, useWrapperValue) - // parent map: 1 root data slab - // long string keys: 1 storable slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // long string keys: 1 storable slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload external key from front to back. - for i := 0; i < len(values); i++ { - k := values[i][0] + // Unload external key from front to back. + for i := 0; i < len(values); i++ { + k := values[i][0] - s, ok := k.(StringValue) - require.True(t, ok) + s, ok := k.(StringValue) + require.True(t, ok) - // Find storage id for StringValue s. - var keyID SlabID - for id, slab := range storage.deltas { - if sslab, ok := slab.(*StorableSlab); ok { - if other, ok := sslab.storable.(StringValue); ok { - if s.str == other.str { - keyID = id - break + // Find storage id for StringValue s. + var keyID SlabID + for id, slab := range storage.deltas { + if sslab, ok := slab.(*StorableSlab); ok { + if other, ok := sslab.storable.(StringValue); ok { + if s.str == other.str { + keyID = id + break + } } } } - } - require.NoError(t, keyID.Valid()) + require.NoError(t, keyID.Valid()) - err := storage.Remove(keyID) - require.NoError(t, err) + err := storage.Remove(keyID) + require.NoError(t, err) - expectedValues := values[i+1:] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[i+1:] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values in collision group, unload value from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in collision group, unload value from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 collision groups, 2 elements in each group. - const mapSize = 6 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, - ) + // Create parent map with 3 collision groups, 2 elements in each group. + const mapSize = 6 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from front to back. - for i := 0; i < len(values); i++ { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from front to back. + for i := 0; i < len(values); i++ { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[i+1:] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[i+1:] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values in external collision group, unload value from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group, unload value from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision groups, 4 elements in the group. - const mapSize = 12 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision groups, 4 elements in the group. + const mapSize = 12 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from front to back - for i := 0; i < len(values); i++ { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from front to back + for i := 0; i < len(values); i++ { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[i+1:] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[i+1:] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values in external collision group, unload external slab from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group, unload external slab from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision groups, 4 elements in the group. - const mapSize = 12 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision groups, 4 elements in the group. + const mapSize = 12 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload external collision group slab from front to back + // Unload external collision group slab from front to back - var externalCollisionSlabIDs []SlabID - for id, slab := range storage.deltas { - if dataSlab, ok := slab.(*MapDataSlab); ok { - if dataSlab.collisionGroup { - externalCollisionSlabIDs = append(externalCollisionSlabIDs, id) + var externalCollisionSlabIDs []SlabID + for id, slab := range storage.deltas { + if dataSlab, ok := slab.(*MapDataSlab); ok { + if dataSlab.collisionGroup { + externalCollisionSlabIDs = append(externalCollisionSlabIDs, id) + } } } - } - require.Equal(t, 3, len(externalCollisionSlabIDs)) + require.Equal(t, 3, len(externalCollisionSlabIDs)) - sort.Slice(externalCollisionSlabIDs, func(i, j int) bool { - a := externalCollisionSlabIDs[i] - b := externalCollisionSlabIDs[j] - if a.address == b.address { - return a.IndexAsUint64() < b.IndexAsUint64() - } - return a.AddressAsUint64() < b.AddressAsUint64() - }) + sort.Slice(externalCollisionSlabIDs, func(i, j int) bool { + a := externalCollisionSlabIDs[i] + b := externalCollisionSlabIDs[j] + if a.address == b.address { + return a.IndexAsUint64() < b.IndexAsUint64() + } + return a.AddressAsUint64() < b.AddressAsUint64() + }) - for i, id := range externalCollisionSlabIDs { - err := storage.Remove(id) - require.NoError(t, err) + for i, id := range externalCollisionSlabIDs { + err := storage.Remove(id) + require.NoError(t, err) - expectedValues := values[i*4+4:] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[i*4+4:] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values, unload composite value from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values, unload composite value from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 3 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from back to front. - for i := len(values) - 1; i >= 0; i-- { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from back to front. + for i := len(values) - 1; i >= 0; i-- { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[:i] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[:i] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with long string key, unload key from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with long string key, unload key from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values := createMapWithLongStringKey(t, storage, address, typeInfo, mapSize) + const mapSize = 3 + m, values := createMapWithLongStringKey(t, storage, address, typeInfo, mapSize, useWrapperValue) - // parent map: 1 root data slab - // long string keys: 1 storable slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // long string keys: 1 storable slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from front to back. - for i := len(values) - 1; i >= 0; i-- { - k := values[i][0] + // Unload composite element from front to back. + for i := len(values) - 1; i >= 0; i-- { + k := values[i][0] - s, ok := k.(StringValue) - require.True(t, ok) + s, ok := k.(StringValue) + require.True(t, ok) - // Find storage id for StringValue s. - var keyID SlabID - for id, slab := range storage.deltas { - if sslab, ok := slab.(*StorableSlab); ok { - if other, ok := sslab.storable.(StringValue); ok { - if s.str == other.str { - keyID = id - break + // Find storage id for StringValue s. + var keyID SlabID + for id, slab := range storage.deltas { + if sslab, ok := slab.(*StorableSlab); ok { + if other, ok := sslab.storable.(StringValue); ok { + if s.str == other.str { + keyID = id + break + } } } } - } - require.NoError(t, keyID.Valid()) + require.NoError(t, keyID.Valid()) - err := storage.Remove(keyID) - require.NoError(t, err) + err := storage.Remove(keyID) + require.NoError(t, err) - expectedValues := values[:i] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[:i] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values in collision group, unload value from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in collision group, unload value from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 collision groups, 2 elements in each group. - const mapSize = 6 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, - ) + // Create parent map with 3 collision groups, 2 elements in each group. + const mapSize = 6 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from back to front - for i := len(values) - 1; i >= 0; i-- { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from back to front + for i := len(values) - 1; i >= 0; i-- { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[:i] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[:i] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values in external collision group, unload value from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group, unload value from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision groups, 4 elements in the group. - const mapSize = 12 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision groups, 4 elements in the group. + const mapSize = 12 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from back to front - for i := len(values) - 1; i >= 0; i-- { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from back to front + for i := len(values) - 1; i >= 0; i-- { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[:i] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[:i] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values in external collision group, unload external slab from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group, unload external slab from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision groups, 4 elements in the group. - const mapSize = 12 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision groups, 4 elements in the group. + const mapSize = 12 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload external slabs from back to front - var externalCollisionSlabIDs []SlabID - for id, slab := range storage.deltas { - if dataSlab, ok := slab.(*MapDataSlab); ok { - if dataSlab.collisionGroup { - externalCollisionSlabIDs = append(externalCollisionSlabIDs, id) + // Unload external slabs from back to front + var externalCollisionSlabIDs []SlabID + for id, slab := range storage.deltas { + if dataSlab, ok := slab.(*MapDataSlab); ok { + if dataSlab.collisionGroup { + externalCollisionSlabIDs = append(externalCollisionSlabIDs, id) + } } } - } - require.Equal(t, 3, len(externalCollisionSlabIDs)) + require.Equal(t, 3, len(externalCollisionSlabIDs)) - sort.Slice(externalCollisionSlabIDs, func(i, j int) bool { - a := externalCollisionSlabIDs[i] - b := externalCollisionSlabIDs[j] - if a.address == b.address { - return a.IndexAsUint64() < b.IndexAsUint64() - } - return a.AddressAsUint64() < b.AddressAsUint64() - }) + sort.Slice(externalCollisionSlabIDs, func(i, j int) bool { + a := externalCollisionSlabIDs[i] + b := externalCollisionSlabIDs[j] + if a.address == b.address { + return a.IndexAsUint64() < b.IndexAsUint64() + } + return a.AddressAsUint64() < b.AddressAsUint64() + }) - for i := len(externalCollisionSlabIDs) - 1; i >= 0; i-- { - err := storage.Remove(externalCollisionSlabIDs[i]) - require.NoError(t, err) + for i := len(externalCollisionSlabIDs) - 1; i >= 0; i-- { + err := storage.Remove(externalCollisionSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[:i*4] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[:i*4] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root data slab with composite values, unload value in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values, unload value in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 3 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload value in the middle - unloadValueIndex := 1 + // Unload value in the middle + unloadValueIndex := 1 - err := storage.Remove(childSlabIDs[unloadValueIndex]) - require.NoError(t, err) + err := storage.Remove(childSlabIDs[unloadValueIndex]) + require.NoError(t, err) - copy(values[unloadValueIndex:], values[unloadValueIndex+1:]) - values = values[:len(values)-1] + copy(values[unloadValueIndex:], values[unloadValueIndex+1:]) + values = values[:len(values)-1] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with long string key, unload key in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with long string key, unload key in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 3 - m, values := createMapWithLongStringKey(t, storage, address, typeInfo, mapSize) + const mapSize = 3 + m, values := createMapWithLongStringKey(t, storage, address, typeInfo, mapSize, useWrapperValue) - // parent map: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload key in the middle. - unloadValueIndex := 1 + // Unload key in the middle. + unloadValueIndex := 1 - k := values[unloadValueIndex][0] + k := values[unloadValueIndex][0] - s, ok := k.(StringValue) - require.True(t, ok) + s, ok := k.(StringValue) + require.True(t, ok) - // Find storage id for StringValue s. - var keyID SlabID - for id, slab := range storage.deltas { - if sslab, ok := slab.(*StorableSlab); ok { - if other, ok := sslab.storable.(StringValue); ok { - if s.str == other.str { - keyID = id - break + // Find storage id for StringValue s. + var keyID SlabID + for id, slab := range storage.deltas { + if sslab, ok := slab.(*StorableSlab); ok { + if other, ok := sslab.storable.(StringValue); ok { + if s.str == other.str { + keyID = id + break + } } } } - } - require.NoError(t, keyID.Valid()) + require.NoError(t, keyID.Valid()) - err := storage.Remove(keyID) - require.NoError(t, err) + err := storage.Remove(keyID) + require.NoError(t, err) - copy(values[unloadValueIndex:], values[unloadValueIndex+1:]) - values = values[:len(values)-1] + copy(values[unloadValueIndex:], values[unloadValueIndex+1:]) + values = values[:len(values)-1] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with composite values in collision group, unload value in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in collision group, unload value in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 collision groups, 2 elements in each group. - const mapSize = 6 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, - ) + // Create parent map with 3 collision groups, 2 elements in each group. + const mapSize = 6 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 2), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element in the middle - for _, unloadValueIndex := range []int{1, 3, 5} { - err := storage.Remove(childSlabIDs[unloadValueIndex]) - require.NoError(t, err) - } + // Unload composite element in the middle + for _, unloadValueIndex := range []int{1, 3, 5} { + err := storage.Remove(childSlabIDs[unloadValueIndex]) + require.NoError(t, err) + } - expectedValues := [][2]Value{ - values[0], - values[2], - values[4], + expectedValues := [][2]Value{ + values[0], + values[2], + values[4], + } + testMapLoadedElements(t, m, expectedValues) } - testMapLoadedElements(t, m, expectedValues) }) - t.Run("root data slab with composite values in external collision group, unload value in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group, unload value in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision groups, 4 elements in the group. - const mapSize = 12 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision groups, 4 elements in the group. + const mapSize = 12 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite value in the middle. - for _, unloadValueIndex := range []int{1, 3, 5, 7, 9, 11} { - err := storage.Remove(childSlabIDs[unloadValueIndex]) - require.NoError(t, err) - } + // Unload composite value in the middle. + for _, unloadValueIndex := range []int{1, 3, 5, 7, 9, 11} { + err := storage.Remove(childSlabIDs[unloadValueIndex]) + require.NoError(t, err) + } - expectedValues := [][2]Value{ - values[0], - values[2], - values[4], - values[6], - values[8], - values[10], + expectedValues := [][2]Value{ + values[0], + values[2], + values[4], + values[6], + values[8], + values[10], + } + testMapLoadedElements(t, m, expectedValues) } - testMapLoadedElements(t, m, expectedValues) }) - t.Run("root data slab with composite values in external collision group, unload external slab in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root data slab with composite values in external collision group, unload external slab in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - // Create parent map with 3 external collision groups, 4 elements in the group. - const mapSize = 12 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, - ) + // Create parent map with 3 external collision groups, 4 elements in the group. + const mapSize = 12 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i / 4), Digest(i)} }, + useWrapperValue, + ) - // parent map: 1 root data slab, 3 external collision group - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+3+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + // parent map: 1 root data slab, 3 external collision group + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+3+mapSize, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload external slabs in the middle. - var externalCollisionSlabIDs []SlabID - for id, slab := range storage.deltas { - if dataSlab, ok := slab.(*MapDataSlab); ok { - if dataSlab.collisionGroup { - externalCollisionSlabIDs = append(externalCollisionSlabIDs, id) + // Unload external slabs in the middle. + var externalCollisionSlabIDs []SlabID + for id, slab := range storage.deltas { + if dataSlab, ok := slab.(*MapDataSlab); ok { + if dataSlab.collisionGroup { + externalCollisionSlabIDs = append(externalCollisionSlabIDs, id) + } } } - } - require.Equal(t, 3, len(externalCollisionSlabIDs)) - - sort.Slice(externalCollisionSlabIDs, func(i, j int) bool { - a := externalCollisionSlabIDs[i] - b := externalCollisionSlabIDs[j] - if a.address == b.address { - return a.IndexAsUint64() < b.IndexAsUint64() - } - return a.AddressAsUint64() < b.AddressAsUint64() - }) + require.Equal(t, 3, len(externalCollisionSlabIDs)) - id := externalCollisionSlabIDs[1] - err := storage.Remove(id) - require.NoError(t, err) - - copy(values[4:], values[8:]) - values = values[:8] - - testMapLoadedElements(t, m, values) - }) - - t.Run("root data slab with composite values, unload composite elements during iteration", func(t *testing.T) { - storage := newTestPersistentStorage(t) - - const mapSize = 3 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) - - // parent map: 1 root data slab - // nested composite elements: 1 root data slab for each - require.Equal(t, 1+mapSize, len(storage.deltas)) - require.Equal(t, 0, getMapMetaDataSlabCount(storage)) - - testMapLoadedElements(t, m, values) - - i := 0 - err := m.IterateReadOnlyLoadedValues(func(k Value, v Value) (bool, error) { - // At this point, iterator returned first element (v). + sort.Slice(externalCollisionSlabIDs, func(i, j int) bool { + a := externalCollisionSlabIDs[i] + b := externalCollisionSlabIDs[j] + if a.address == b.address { + return a.IndexAsUint64() < b.IndexAsUint64() + } + return a.AddressAsUint64() < b.AddressAsUint64() + }) - // Remove all other nested composite elements (except first element) from storage. - for _, slabID := range childSlabIDs[1:] { - err := storage.Remove(slabID) - require.NoError(t, err) - } + id := externalCollisionSlabIDs[1] + err := storage.Remove(id) + require.NoError(t, err) - require.Equal(t, 0, i) - valueEqual(t, values[0][0], k) - valueEqual(t, values[0][1], v) - i++ - return true, nil - }) + copy(values[4:], values[8:]) + values = values[:8] - require.NoError(t, err) - require.Equal(t, 1, i) // Only first element is iterated because other elements are remove during iteration. + testMapLoadedElements(t, m, values) + } }) - t.Run("root data slab with simple and composite values, unloading composite value", func(t *testing.T) { - const mapSize = 3 - - // Create a map with nested composite value at specified index - for childArrayIndex := 0; childArrayIndex < mapSize; childArrayIndex++ { + runTest("root data slab with composite values, unload composite elements during iteration", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { storage := newTestPersistentStorage(t) - m, values, childSlabID := createMapWithSimpleAndChildArrayValues( + const mapSize = 3 + m, values, childSlabIDs := createMapWithChildArrayValues( t, storage, address, typeInfo, mapSize, - childArrayIndex, func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, ) // parent map: 1 root data slab - // composite element: 1 root data slab - require.Equal(t, 2, len(storage.deltas)) + // nested composite elements: 1 root data slab for each + require.Equal(t, 1+mapSize, len(storage.deltas)) require.Equal(t, 0, getMapMetaDataSlabCount(storage)) testMapLoadedElements(t, m, values) - // Unload composite value - err := storage.Remove(childSlabID) - require.NoError(t, err) + i := 0 + err := m.IterateReadOnlyLoadedValues(func(k Value, v Value) (bool, error) { + // At this point, iterator returned first element (v). - copy(values[childArrayIndex:], values[childArrayIndex+1:]) - values = values[:len(values)-1] + // Remove all other nested composite elements (except first element) from storage. + for _, slabID := range childSlabIDs[1:] { + err := storage.Remove(slabID) + require.NoError(t, err) + } - testMapLoadedElements(t, m, values) + require.Equal(t, 0, i) + valueEqual(t, values[0][0], k) + valueEqual(t, values[0][1], v) + i++ + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, 1, i) // Only first element is iterated because other elements are remove during iteration. } }) - t.Run("root metadata slab with simple values", func(t *testing.T) { - storage := newTestPersistentStorage(t) - - const mapSize = 20 - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + runTest("root data slab with simple and composite values, unloading composite value", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + const mapSize = 3 + + // Create a map with nested composite value at specified index + for childArrayIndex := 0; childArrayIndex < mapSize; childArrayIndex++ { + storage := newTestPersistentStorage(t) + + m, values, childSlabID := createMapWithSimpleAndChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + childArrayIndex, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) + + // parent map: 1 root data slab + // composite element: 1 root data slab + require.Equal(t, 2, len(storage.deltas)) + require.Equal(t, 0, getMapMetaDataSlabCount(storage)) + + testMapLoadedElements(t, m, values) + + // Unload composite value + err := storage.Remove(childSlabID) + require.NoError(t, err) - // parent map (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + copy(values[childArrayIndex:], values[childArrayIndex+1:]) + values = values[:len(values)-1] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } + } }) - t.Run("root metadata slab with composite values", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab with simple values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 20 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 20 + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (2 levels): 1 root metadata slab, 3 data slabs - // composite values: 1 root data slab for each - require.Equal(t, 4+mapSize, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + // parent map (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root metadata slab with composite values, unload value from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) - - const mapSize = 20 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) - - // parent map (2 levels): 1 root metadata slab, 3 data slabs - // composite values : 1 root data slab for each - require.Equal(t, 4+mapSize, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + runTest("root metadata slab with composite values", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - testMapLoadedElements(t, m, values) + const mapSize = 20 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // Unload composite element from front to back - for i := 0; i < len(values); i++ { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // parent map (2 levels): 1 root metadata slab, 3 data slabs + // composite values: 1 root data slab for each + require.Equal(t, 4+mapSize, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - expectedValues := values[i+1:] - testMapLoadedElements(t, m, expectedValues) + testMapLoadedElements(t, m, values) } }) - t.Run("root metadata slab with composite values, unload values from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab with composite values, unload value from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 20 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 20 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (2 levels): 1 root metadata slab, 3 data slabs - // composite values: 1 root data slab for each - require.Equal(t, 4+mapSize, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + // parent map (2 levels): 1 root metadata slab, 3 data slabs + // composite values : 1 root data slab for each + require.Equal(t, 4+mapSize, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - // Unload composite element from back to front - for i := len(values) - 1; i >= 0; i-- { - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + // Unload composite element from front to back + for i := 0; i < len(values); i++ { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - expectedValues := values[:i] - testMapLoadedElements(t, m, expectedValues) + expectedValues := values[i+1:] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root metadata slab with composite values, unload value in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) - - const mapSize = 20 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) - - // parent map (2 levels): 1 root metadata slab, 3 data slabs - // composite values: 1 root data slab for each - require.Equal(t, 4+mapSize, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + runTest("root metadata slab with composite values, unload values from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - testMapLoadedElements(t, m, values) + const mapSize = 20 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // Unload composite element in the middle - for _, index := range []int{4, 14} { - err := storage.Remove(childSlabIDs[index]) - require.NoError(t, err) + // parent map (2 levels): 1 root metadata slab, 3 data slabs + // composite values: 1 root data slab for each + require.Equal(t, 4+mapSize, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - copy(values[index:], values[index+1:]) - values = values[:len(values)-1] + testMapLoadedElements(t, m, values) - copy(childSlabIDs[index:], childSlabIDs[index+1:]) - childSlabIDs = childSlabIDs[:len(childSlabIDs)-1] + // Unload composite element from back to front + for i := len(values) - 1; i >= 0; i-- { + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - testMapLoadedElements(t, m, values) + expectedValues := values[:i] + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root metadata slab with simple and composite values, unload composite value", func(t *testing.T) { - const mapSize = 20 - - // Create a map with nested composite value at specified index - for childArrayIndex := 0; childArrayIndex < mapSize; childArrayIndex++ { + runTest("root metadata slab with composite values, unload value in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { storage := newTestPersistentStorage(t) - m, values, childSlabID := createMapWithSimpleAndChildArrayValues( + const mapSize = 20 + m, values, childSlabIDs := createMapWithChildArrayValues( t, storage, address, typeInfo, mapSize, - childArrayIndex, func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, ) // parent map (2 levels): 1 root metadata slab, 3 data slabs // composite values: 1 root data slab for each - require.Equal(t, 5, len(storage.deltas)) + require.Equal(t, 4+mapSize, len(storage.deltas)) require.Equal(t, 1, getMapMetaDataSlabCount(storage)) testMapLoadedElements(t, m, values) - err := storage.Remove(childSlabID) - require.NoError(t, err) + // Unload composite element in the middle + for _, index := range []int{4, 14} { + err := storage.Remove(childSlabIDs[index]) + require.NoError(t, err) - copy(values[childArrayIndex:], values[childArrayIndex+1:]) - values = values[:len(values)-1] + copy(values[index:], values[index+1:]) + values = values[:len(values)-1] - testMapLoadedElements(t, m, values) + copy(childSlabIDs[index:], childSlabIDs[index+1:]) + childSlabIDs = childSlabIDs[:len(childSlabIDs)-1] + + testMapLoadedElements(t, m, values) + } } }) - t.Run("root metadata slab, unload data slab from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab with simple and composite values, unload composite value", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + const mapSize = 20 + + // Create a map with nested composite value at specified index + for childArrayIndex := 0; childArrayIndex < mapSize; childArrayIndex++ { + storage := newTestPersistentStorage(t) + + m, values, childSlabID := createMapWithSimpleAndChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + childArrayIndex, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) + + // parent map (2 levels): 1 root metadata slab, 3 data slabs + // composite values: 1 root data slab for each + require.Equal(t, 5, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + + testMapLoadedElements(t, m, values) + + err := storage.Remove(childSlabID) + require.NoError(t, err) - const mapSize = 20 + copy(values[childArrayIndex:], values[childArrayIndex+1:]) + values = values[:len(values)-1] - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + testMapLoadedElements(t, m, values) + } + } + }) - // parent map (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + runTest("root metadata slab, unload data slab from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - testMapLoadedElements(t, m, values) + const mapSize = 20 - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // Unload data slabs from front to back - for i := 0; i < len(rootMetaDataSlab.childrenHeaders); i++ { + // parent map (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - childHeader := rootMetaDataSlab.childrenHeaders[i] + testMapLoadedElements(t, m, values) - // Get data slab element count before unload it from storage. - // Element count isn't in the header. - mapDataSlab, ok := storage.deltas[childHeader.slabID].(*MapDataSlab) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) require.True(t, ok) - count := mapDataSlab.elements.Count() + // Unload data slabs from front to back + for i := 0; i < len(rootMetaDataSlab.childrenHeaders); i++ { - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + childHeader := rootMetaDataSlab.childrenHeaders[i] - values = values[count:] + // Get data slab element count before unload it from storage. + // Element count isn't in the header. + mapDataSlab, ok := storage.deltas[childHeader.slabID].(*MapDataSlab) + require.True(t, ok) - testMapLoadedElements(t, m, values) + count := mapDataSlab.elements.Count() + + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) + + values = values[count:] + + testMapLoadedElements(t, m, values) + } } }) - t.Run("root metadata slab, unload data slab from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload data slab from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 20 + const mapSize = 20 - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + // parent map (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) + require.True(t, ok) - // Unload data slabs from back to front - for i := len(rootMetaDataSlab.childrenHeaders) - 1; i >= 0; i-- { + // Unload data slabs from back to front + for i := len(rootMetaDataSlab.childrenHeaders) - 1; i >= 0; i-- { - childHeader := rootMetaDataSlab.childrenHeaders[i] + childHeader := rootMetaDataSlab.childrenHeaders[i] - // Get data slab element count before unload it from storage - // Element count isn't in the header. - mapDataSlab, ok := storage.deltas[childHeader.slabID].(*MapDataSlab) - require.True(t, ok) + // Get data slab element count before unload it from storage + // Element count isn't in the header. + mapDataSlab, ok := storage.deltas[childHeader.slabID].(*MapDataSlab) + require.True(t, ok) - count := mapDataSlab.elements.Count() + count := mapDataSlab.elements.Count() - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - values = values[:len(values)-int(count)] + values = values[:len(values)-int(count)] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } } }) - t.Run("root metadata slab, unload data slab in the middle", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload data slab in the middle", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 20 + const mapSize = 20 - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (2 levels): 1 root metadata slab, 3 data slabs - require.Equal(t, 4, len(storage.deltas)) - require.Equal(t, 1, getMapMetaDataSlabCount(storage)) + // parent map (2 levels): 1 root metadata slab, 3 data slabs + require.Equal(t, 4, len(storage.deltas)) + require.Equal(t, 1, getMapMetaDataSlabCount(storage)) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) + require.True(t, ok) - require.True(t, len(rootMetaDataSlab.childrenHeaders) > 2) + require.True(t, len(rootMetaDataSlab.childrenHeaders) > 2) - index := 1 - childHeader := rootMetaDataSlab.childrenHeaders[index] + index := 1 + childHeader := rootMetaDataSlab.childrenHeaders[index] - // Get element count from previous data slab - mapDataSlab, ok := storage.deltas[rootMetaDataSlab.childrenHeaders[0].slabID].(*MapDataSlab) - require.True(t, ok) + // Get element count from previous data slab + mapDataSlab, ok := storage.deltas[rootMetaDataSlab.childrenHeaders[0].slabID].(*MapDataSlab) + require.True(t, ok) - countAtIndex0 := mapDataSlab.elements.Count() + countAtIndex0 := mapDataSlab.elements.Count() - // Get element count from slab to be unloaded - mapDataSlab, ok = storage.deltas[rootMetaDataSlab.childrenHeaders[index].slabID].(*MapDataSlab) - require.True(t, ok) + // Get element count from slab to be unloaded + mapDataSlab, ok = storage.deltas[rootMetaDataSlab.childrenHeaders[index].slabID].(*MapDataSlab) + require.True(t, ok) - countAtIndex1 := mapDataSlab.elements.Count() + countAtIndex1 := mapDataSlab.elements.Count() - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - copy(values[countAtIndex0:], values[countAtIndex0+countAtIndex1:]) - values = values[:m.Count()-uint64(countAtIndex1)] + copy(values[countAtIndex0:], values[countAtIndex0+countAtIndex1:]) + values = values[:m.Count()-uint64(countAtIndex1)] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } }) - t.Run("root metadata slab, unload non-root metadata slab from front to back", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload non-root metadata slab from front to back", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 200 + const mapSize = 200 - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (3 levels): 1 root metadata slab, 3 child metadata slabs, n data slabs - require.Equal(t, 4, getMapMetaDataSlabCount(storage)) + // parent map (3 levels): 1 root metadata slab, 3 child metadata slabs, n data slabs + require.Equal(t, 4, getMapMetaDataSlabCount(storage)) - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) + require.True(t, ok) - // Unload non-root metadata slabs from front to back. - for i := 0; i < len(rootMetaDataSlab.childrenHeaders); i++ { + // Unload non-root metadata slabs from front to back. + for i := 0; i < len(rootMetaDataSlab.childrenHeaders); i++ { - childHeader := rootMetaDataSlab.childrenHeaders[i] + childHeader := rootMetaDataSlab.childrenHeaders[i] - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - // Use firstKey to deduce number of elements in slab. - var expectedValues [][2]Value - if i < len(rootMetaDataSlab.childrenHeaders)-1 { - nextChildHeader := rootMetaDataSlab.childrenHeaders[i+1] - expectedValues = values[int(nextChildHeader.firstKey):] - } + // Use firstKey to deduce number of elements in slab. + var expectedValues [][2]Value + if i < len(rootMetaDataSlab.childrenHeaders)-1 { + nextChildHeader := rootMetaDataSlab.childrenHeaders[i+1] + expectedValues = values[int(nextChildHeader.firstKey):] + } - testMapLoadedElements(t, m, expectedValues) + testMapLoadedElements(t, m, expectedValues) + } } }) - t.Run("root metadata slab, unload non-root metadata slab from back to front", func(t *testing.T) { - storage := newTestPersistentStorage(t) + runTest("root metadata slab, unload non-root metadata slab from back to front", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 200 + const mapSize = 200 - m, values := createMapWithSimpleValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + m, values := createMapWithSimpleValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (3 levels): 1 root metadata slab, 3 child metadata slabs, n data slabs - require.Equal(t, 4, getMapMetaDataSlabCount(storage)) + // parent map (3 levels): 1 root metadata slab, 3 child metadata slabs, n data slabs + require.Equal(t, 4, getMapMetaDataSlabCount(storage)) - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) + require.True(t, ok) - // Unload non-root metadata slabs from back to front. - for i := len(rootMetaDataSlab.childrenHeaders) - 1; i >= 0; i-- { + // Unload non-root metadata slabs from back to front. + for i := len(rootMetaDataSlab.childrenHeaders) - 1; i >= 0; i-- { - childHeader := rootMetaDataSlab.childrenHeaders[i] + childHeader := rootMetaDataSlab.childrenHeaders[i] - err := storage.Remove(childHeader.slabID) - require.NoError(t, err) + err := storage.Remove(childHeader.slabID) + require.NoError(t, err) - // Use firstKey to deduce number of elements in slabs. - values = values[:childHeader.firstKey] + // Use firstKey to deduce number of elements in slabs. + values = values[:childHeader.firstKey] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } } }) - t.Run("root metadata slab with composite values, unload composite value at random index", func(t *testing.T) { - - storage := newTestPersistentStorage(t) + runTest("root metadata slab with composite values, unload composite value at random index", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 500 - m, values, childSlabIDs := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 500 + m, values, childSlabIDs := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs - // nested composite elements: 1 root data slab for each - require.True(t, len(storage.deltas) > 1+mapSize) - require.True(t, getMapMetaDataSlabCount(storage) > 1) + // parent map (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs + // nested composite elements: 1 root data slab for each + require.True(t, len(storage.deltas) > 1+mapSize) + require.True(t, getMapMetaDataSlabCount(storage) > 1) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - r := newRand(t) + r := newRand(t) - // Unload composite element in random position - for len(values) > 0 { + // Unload composite element in random position + for len(values) > 0 { - i := r.Intn(len(values)) + i := r.Intn(len(values)) - err := storage.Remove(childSlabIDs[i]) - require.NoError(t, err) + err := storage.Remove(childSlabIDs[i]) + require.NoError(t, err) - copy(values[i:], values[i+1:]) - values = values[:len(values)-1] + copy(values[i:], values[i+1:]) + values = values[:len(values)-1] - copy(childSlabIDs[i:], childSlabIDs[i+1:]) - childSlabIDs = childSlabIDs[:len(childSlabIDs)-1] + copy(childSlabIDs[i:], childSlabIDs[i+1:]) + childSlabIDs = childSlabIDs[:len(childSlabIDs)-1] - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) + } } }) - t.Run("root metadata slab with composite values, unload random data slab", func(t *testing.T) { + runTest("root metadata slab with composite values, unload random data slab", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - storage := newTestPersistentStorage(t) + const mapSize = 500 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - const mapSize = 500 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + // parent map (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs + // composite values: 1 root data slab for each + require.True(t, len(storage.deltas) > 1+mapSize) + require.True(t, getMapMetaDataSlabCount(storage) > 1) - // parent map (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs - // composite values: 1 root data slab for each - require.True(t, len(storage.deltas) > 1+mapSize) - require.True(t, getMapMetaDataSlabCount(storage) > 1) + testMapLoadedElements(t, m, values) - testMapLoadedElements(t, m, values) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) + require.True(t, ok) - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + type slabInfo struct { + id SlabID + startIndex int + count int + } - type slabInfo struct { - id SlabID - startIndex int - count int - } + var dataSlabInfos []*slabInfo + for _, mheader := range rootMetaDataSlab.childrenHeaders { - var dataSlabInfos []*slabInfo - for _, mheader := range rootMetaDataSlab.childrenHeaders { + nonRootMetaDataSlab, ok := storage.deltas[mheader.slabID].(*MapMetaDataSlab) + require.True(t, ok) - nonRootMetaDataSlab, ok := storage.deltas[mheader.slabID].(*MapMetaDataSlab) - require.True(t, ok) + for i := 0; i < len(nonRootMetaDataSlab.childrenHeaders); i++ { + h := nonRootMetaDataSlab.childrenHeaders[i] - for i := 0; i < len(nonRootMetaDataSlab.childrenHeaders); i++ { - h := nonRootMetaDataSlab.childrenHeaders[i] + if len(dataSlabInfos) > 0 { + // Update previous slabInfo.count + dataSlabInfos[len(dataSlabInfos)-1].count = int(h.firstKey) - dataSlabInfos[len(dataSlabInfos)-1].startIndex + } - if len(dataSlabInfos) > 0 { - // Update previous slabInfo.count - dataSlabInfos[len(dataSlabInfos)-1].count = int(h.firstKey) - dataSlabInfos[len(dataSlabInfos)-1].startIndex + dataSlabInfos = append(dataSlabInfos, &slabInfo{id: h.slabID, startIndex: int(h.firstKey)}) } - - dataSlabInfos = append(dataSlabInfos, &slabInfo{id: h.slabID, startIndex: int(h.firstKey)}) } - } - r := newRand(t) + r := newRand(t) - for len(dataSlabInfos) > 0 { - index := r.Intn(len(dataSlabInfos)) + for len(dataSlabInfos) > 0 { + index := r.Intn(len(dataSlabInfos)) - slabToBeRemoved := dataSlabInfos[index] + slabToBeRemoved := dataSlabInfos[index] - // Update startIndex for all subsequence data slabs - for i := index + 1; i < len(dataSlabInfos); i++ { - dataSlabInfos[i].startIndex -= slabToBeRemoved.count - } + // Update startIndex for all subsequence data slabs + for i := index + 1; i < len(dataSlabInfos); i++ { + dataSlabInfos[i].startIndex -= slabToBeRemoved.count + } - err := storage.Remove(slabToBeRemoved.id) - require.NoError(t, err) + err := storage.Remove(slabToBeRemoved.id) + require.NoError(t, err) - if index == len(dataSlabInfos)-1 { - values = values[:slabToBeRemoved.startIndex] - } else { - copy(values[slabToBeRemoved.startIndex:], values[slabToBeRemoved.startIndex+slabToBeRemoved.count:]) - values = values[:len(values)-slabToBeRemoved.count] - } + if index == len(dataSlabInfos)-1 { + values = values[:slabToBeRemoved.startIndex] + } else { + copy(values[slabToBeRemoved.startIndex:], values[slabToBeRemoved.startIndex+slabToBeRemoved.count:]) + values = values[:len(values)-slabToBeRemoved.count] + } - copy(dataSlabInfos[index:], dataSlabInfos[index+1:]) - dataSlabInfos = dataSlabInfos[:len(dataSlabInfos)-1] + copy(dataSlabInfos[index:], dataSlabInfos[index+1:]) + dataSlabInfos = dataSlabInfos[:len(dataSlabInfos)-1] - testMapLoadedElements(t, m, values) - } + testMapLoadedElements(t, m, values) + } - require.Equal(t, 0, len(values)) + require.Equal(t, 0, len(values)) + } }) - t.Run("root metadata slab with composite values, unload random slab", func(t *testing.T) { - - storage := newTestPersistentStorage(t) + runTest("root metadata slab with composite values, unload random slab", func(useWrapperValue bool) func(t *testing.T) { + return func(t *testing.T) { + storage := newTestPersistentStorage(t) - const mapSize = 500 - m, values, _ := createMapWithChildArrayValues( - t, - storage, - address, - typeInfo, - mapSize, - func(i int) []Digest { return []Digest{Digest(i)} }, - ) + const mapSize = 500 + m, values, _ := createMapWithChildArrayValues( + t, + storage, + address, + typeInfo, + mapSize, + func(i int) []Digest { return []Digest{Digest(i)} }, + useWrapperValue, + ) - // parent map (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs - // composite values: 1 root data slab for each - require.True(t, len(storage.deltas) > 1+mapSize) - require.True(t, getMapMetaDataSlabCount(storage) > 1) + // parent map (3 levels): 1 root metadata slab, n non-root metadata slabs, n data slabs + // composite values: 1 root data slab for each + require.True(t, len(storage.deltas) > 1+mapSize) + require.True(t, getMapMetaDataSlabCount(storage) > 1) - testMapLoadedElements(t, m, values) + testMapLoadedElements(t, m, values) - type slabInfo struct { - id SlabID - startIndex int - count int - children []*slabInfo - } + type slabInfo struct { + id SlabID + startIndex int + count int + children []*slabInfo + } - rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) - require.True(t, ok) + rootMetaDataSlab, ok := m.root.(*MapMetaDataSlab) + require.True(t, ok) - metadataSlabInfos := make([]*slabInfo, len(rootMetaDataSlab.childrenHeaders)) - for i, mheader := range rootMetaDataSlab.childrenHeaders { + metadataSlabInfos := make([]*slabInfo, len(rootMetaDataSlab.childrenHeaders)) + for i, mheader := range rootMetaDataSlab.childrenHeaders { - if i > 0 { - prevMetaDataSlabInfo := metadataSlabInfos[i-1] - prevDataSlabInfo := prevMetaDataSlabInfo.children[len(prevMetaDataSlabInfo.children)-1] + if i > 0 { + prevMetaDataSlabInfo := metadataSlabInfos[i-1] + prevDataSlabInfo := prevMetaDataSlabInfo.children[len(prevMetaDataSlabInfo.children)-1] - // Update previous metadata slab count - prevMetaDataSlabInfo.count = int(mheader.firstKey) - prevMetaDataSlabInfo.startIndex + // Update previous metadata slab count + prevMetaDataSlabInfo.count = int(mheader.firstKey) - prevMetaDataSlabInfo.startIndex - // Update previous data slab count - prevDataSlabInfo.count = int(mheader.firstKey) - prevDataSlabInfo.startIndex - } + // Update previous data slab count + prevDataSlabInfo.count = int(mheader.firstKey) - prevDataSlabInfo.startIndex + } - metadataSlabInfo := &slabInfo{ - id: mheader.slabID, - startIndex: int(mheader.firstKey), - } + metadataSlabInfo := &slabInfo{ + id: mheader.slabID, + startIndex: int(mheader.firstKey), + } - nonRootMetadataSlab, ok := storage.deltas[mheader.slabID].(*MapMetaDataSlab) - require.True(t, ok) + nonRootMetadataSlab, ok := storage.deltas[mheader.slabID].(*MapMetaDataSlab) + require.True(t, ok) - children := make([]*slabInfo, len(nonRootMetadataSlab.childrenHeaders)) - for i, h := range nonRootMetadataSlab.childrenHeaders { - children[i] = &slabInfo{ - id: h.slabID, - startIndex: int(h.firstKey), - } - if i > 0 { - children[i-1].count = int(h.firstKey) - children[i-1].startIndex + children := make([]*slabInfo, len(nonRootMetadataSlab.childrenHeaders)) + for i, h := range nonRootMetadataSlab.childrenHeaders { + children[i] = &slabInfo{ + id: h.slabID, + startIndex: int(h.firstKey), + } + if i > 0 { + children[i-1].count = int(h.firstKey) - children[i-1].startIndex + } } - } - metadataSlabInfo.children = children - metadataSlabInfos[i] = metadataSlabInfo - } + metadataSlabInfo.children = children + metadataSlabInfos[i] = metadataSlabInfo + } - const ( - metadataSlabType int = iota - dataSlabType - maxSlabType - ) + const ( + metadataSlabType int = iota + dataSlabType + maxSlabType + ) - r := newRand(t) + r := newRand(t) - for len(metadataSlabInfos) > 0 { + for len(metadataSlabInfos) > 0 { - var slabInfoToBeRemoved *slabInfo - var isLastSlab bool + var slabInfoToBeRemoved *slabInfo + var isLastSlab bool - switch r.Intn(maxSlabType) { + switch r.Intn(maxSlabType) { - case metadataSlabType: + case metadataSlabType: - metadataSlabIndex := r.Intn(len(metadataSlabInfos)) + metadataSlabIndex := r.Intn(len(metadataSlabInfos)) - isLastSlab = metadataSlabIndex == len(metadataSlabInfos)-1 + isLastSlab = metadataSlabIndex == len(metadataSlabInfos)-1 - slabInfoToBeRemoved = metadataSlabInfos[metadataSlabIndex] + slabInfoToBeRemoved = metadataSlabInfos[metadataSlabIndex] - count := slabInfoToBeRemoved.count + count := slabInfoToBeRemoved.count - // Update startIndex for subsequence metadata slabs - for i := metadataSlabIndex + 1; i < len(metadataSlabInfos); i++ { - metadataSlabInfos[i].startIndex -= count + // Update startIndex for subsequence metadata slabs + for i := metadataSlabIndex + 1; i < len(metadataSlabInfos); i++ { + metadataSlabInfos[i].startIndex -= count - for j := 0; j < len(metadataSlabInfos[i].children); j++ { - metadataSlabInfos[i].children[j].startIndex -= count + for j := 0; j < len(metadataSlabInfos[i].children); j++ { + metadataSlabInfos[i].children[j].startIndex -= count + } } - } - copy(metadataSlabInfos[metadataSlabIndex:], metadataSlabInfos[metadataSlabIndex+1:]) - metadataSlabInfos = metadataSlabInfos[:len(metadataSlabInfos)-1] + copy(metadataSlabInfos[metadataSlabIndex:], metadataSlabInfos[metadataSlabIndex+1:]) + metadataSlabInfos = metadataSlabInfos[:len(metadataSlabInfos)-1] - case dataSlabType: + case dataSlabType: - metadataSlabIndex := r.Intn(len(metadataSlabInfos)) + metadataSlabIndex := r.Intn(len(metadataSlabInfos)) - metadataSlabInfo := metadataSlabInfos[metadataSlabIndex] + metadataSlabInfo := metadataSlabInfos[metadataSlabIndex] - dataSlabIndex := r.Intn(len(metadataSlabInfo.children)) + dataSlabIndex := r.Intn(len(metadataSlabInfo.children)) - isLastSlab = (metadataSlabIndex == len(metadataSlabInfos)-1) && - (dataSlabIndex == len(metadataSlabInfo.children)-1) + isLastSlab = (metadataSlabIndex == len(metadataSlabInfos)-1) && + (dataSlabIndex == len(metadataSlabInfo.children)-1) - slabInfoToBeRemoved = metadataSlabInfo.children[dataSlabIndex] + slabInfoToBeRemoved = metadataSlabInfo.children[dataSlabIndex] - count := slabInfoToBeRemoved.count + count := slabInfoToBeRemoved.count - // Update startIndex for all subsequence data slabs in this metadata slab info - for i := dataSlabIndex + 1; i < len(metadataSlabInfo.children); i++ { - metadataSlabInfo.children[i].startIndex -= count - } + // Update startIndex for all subsequence data slabs in this metadata slab info + for i := dataSlabIndex + 1; i < len(metadataSlabInfo.children); i++ { + metadataSlabInfo.children[i].startIndex -= count + } - copy(metadataSlabInfo.children[dataSlabIndex:], metadataSlabInfo.children[dataSlabIndex+1:]) - metadataSlabInfo.children = metadataSlabInfo.children[:len(metadataSlabInfo.children)-1] + copy(metadataSlabInfo.children[dataSlabIndex:], metadataSlabInfo.children[dataSlabIndex+1:]) + metadataSlabInfo.children = metadataSlabInfo.children[:len(metadataSlabInfo.children)-1] - metadataSlabInfo.count -= count + metadataSlabInfo.count -= count - // Update startIndex for all subsequence metadata slabs. - for i := metadataSlabIndex + 1; i < len(metadataSlabInfos); i++ { - metadataSlabInfos[i].startIndex -= count + // Update startIndex for all subsequence metadata slabs. + for i := metadataSlabIndex + 1; i < len(metadataSlabInfos); i++ { + metadataSlabInfos[i].startIndex -= count - for j := 0; j < len(metadataSlabInfos[i].children); j++ { - metadataSlabInfos[i].children[j].startIndex -= count + for j := 0; j < len(metadataSlabInfos[i].children); j++ { + metadataSlabInfos[i].children[j].startIndex -= count + } } - } - if len(metadataSlabInfo.children) == 0 { - copy(metadataSlabInfos[metadataSlabIndex:], metadataSlabInfos[metadataSlabIndex+1:]) - metadataSlabInfos = metadataSlabInfos[:len(metadataSlabInfos)-1] + if len(metadataSlabInfo.children) == 0 { + copy(metadataSlabInfos[metadataSlabIndex:], metadataSlabInfos[metadataSlabIndex+1:]) + metadataSlabInfos = metadataSlabInfos[:len(metadataSlabInfos)-1] + } } - } - err := storage.Remove(slabInfoToBeRemoved.id) - require.NoError(t, err) + err := storage.Remove(slabInfoToBeRemoved.id) + require.NoError(t, err) - if isLastSlab { - values = values[:slabInfoToBeRemoved.startIndex] - } else { - copy(values[slabInfoToBeRemoved.startIndex:], values[slabInfoToBeRemoved.startIndex+slabInfoToBeRemoved.count:]) - values = values[:len(values)-slabInfoToBeRemoved.count] + if isLastSlab { + values = values[:slabInfoToBeRemoved.startIndex] + } else { + copy(values[slabInfoToBeRemoved.startIndex:], values[slabInfoToBeRemoved.startIndex+slabInfoToBeRemoved.count:]) + values = values[:len(values)-slabInfoToBeRemoved.count] + } + + testMapLoadedElements(t, m, values) } - testMapLoadedElements(t, m, values) + require.Equal(t, 0, len(values)) } - - require.Equal(t, 0, len(values)) }) } @@ -16161,6 +16270,7 @@ func createMapWithLongStringKey( address Address, typeInfo TypeInfo, size int, + useWrapperValue bool, ) (*OrderedMap, [][2]Value) { digesterBuilder := &mockDigesterBuilder{} @@ -16177,14 +16287,22 @@ func createMapWithLongStringKey( k := NewStringValue(s) v := Uint64Value(i) - expectedValues[i] = [2]Value{k, v} - digests := []Digest{Digest(i)} digesterBuilder.On("Digest", k).Return(mockDigester{digests}) - existingStorable, err := m.Set(compare, hashInputProvider, k, v) - require.NoError(t, err) - require.Nil(t, existingStorable) + if useWrapperValue { + existingStorable, err := m.Set(compare, hashInputProvider, k, SomeValue{v}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[i] = [2]Value{k, someValue{v}} + } else { + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[i] = [2]Value{k, v} + } r++ } @@ -16199,6 +16317,7 @@ func createMapWithSimpleValues( typeInfo TypeInfo, size int, newDigests func(i int) []Digest, + useWrapperValue bool, ) (*OrderedMap, [][2]Value) { digesterBuilder := &mockDigesterBuilder{} @@ -16215,11 +16334,19 @@ func createMapWithSimpleValues( digests := newDigests(i) digesterBuilder.On("Digest", k).Return(mockDigester{digests}) - expectedValues[i] = [2]Value{k, v} + if useWrapperValue { + expectedValues[i] = [2]Value{k, someValue{v}} - existingStorable, err := m.Set(compare, hashInputProvider, expectedValues[i][0], expectedValues[i][1]) - require.NoError(t, err) - require.Nil(t, existingStorable) + existingStorable, err := m.Set(compare, hashInputProvider, k, SomeValue{v}) + require.NoError(t, err) + require.Nil(t, existingStorable) + } else { + expectedValues[i] = [2]Value{k, v} + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + } } return m, expectedValues @@ -16232,6 +16359,7 @@ func createMapWithChildArrayValues( typeInfo TypeInfo, size int, newDigests func(i int) []Digest, + useWrapperValue bool, ) (*OrderedMap, [][2]Value, []SlabID) { const childArraySize = 50 @@ -16262,16 +16390,25 @@ func createMapWithChildArrayValues( k := Uint64Value(i) v := childArray - expectedValues[i] = [2]Value{k, arrayValue(expectedChildValues)} slabIDs[i] = childArray.SlabID() digests := newDigests(i) digesterBuilder.On("Digest", k).Return(mockDigester{digests}) // Set child array to parent - existingStorable, err := m.Set(compare, hashInputProvider, k, v) - require.NoError(t, err) - require.Nil(t, existingStorable) + if useWrapperValue { + existingStorable, err := m.Set(compare, hashInputProvider, k, SomeValue{v}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[i] = [2]Value{k, someValue{arrayValue(expectedChildValues)}} + } else { + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[i] = [2]Value{k, arrayValue(expectedChildValues)} + } } return m, expectedValues, slabIDs @@ -16285,6 +16422,7 @@ func createMapWithSimpleAndChildArrayValues( size int, compositeValueIndex int, newDigests func(i int) []Digest, + useWrapperValue bool, ) (*OrderedMap, [][2]Value, SlabID) { const childArraySize = 50 @@ -16318,11 +16456,19 @@ func createMapWithSimpleAndChildArrayValues( expectedChildValues[j] = v } - values[i] = [2]Value{k, arrayValue(expectedChildValues)} + if useWrapperValue { + existingStorable, err := m.Set(compare, hashInputProvider, k, SomeValue{childArray}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + values[i] = [2]Value{k, someValue{arrayValue(expectedChildValues)}} + } else { + existingStorable, err := m.Set(compare, hashInputProvider, k, childArray) + require.NoError(t, err) + require.Nil(t, existingStorable) - existingStorable, err := m.Set(compare, hashInputProvider, k, childArray) - require.NoError(t, err) - require.Nil(t, existingStorable) + values[i] = [2]Value{k, arrayValue(expectedChildValues)} + } slabID = childArray.SlabID() diff --git a/map_wrappervalue_test.go b/map_wrappervalue_test.go new file mode 100644 index 0000000..6a2fe4d --- /dev/null +++ b/map_wrappervalue_test.go @@ -0,0 +1,2315 @@ +/* + * Atree - Scalable Arrays and Ordered Maps + * + * Copyright Flow Foundation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package atree + +import ( + "fmt" + "math/rand" + "runtime" + "testing" + + "github.com/stretchr/testify/require" +) + +type newKeyFunc func(SlabStorage) (key Value, expected Value) + +var newRandomUint64KeyFunc = func(r *rand.Rand) newKeyFunc { + return func(SlabStorage) (key Value, expected Value) { + v := Uint64Value(r.Intn(1844674407370955161)) + return v, v + } +} + +var newUint64KeyFunc = func() newKeyFunc { + i := 0 + return func(SlabStorage) (key Value, expected Value) { + v := Uint64Value(i) + i++ + return v, v + } +} + +var newMapValueFunc = func( + t *testing.T, + address Address, + typeInfo TypeInfo, + mapSize int, + newKey newKeyFunc, + newValue newValueFunc, +) newValueFunc { + return func(storage SlabStorage) (value Value, expected Value) { + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value) + + for i := 0; i < mapSize; i++ { + k, expectedK := newKey(storage) + v, expectedV := newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[expectedK] = expectedV + } + + return m, mapValue(keyValues) + } +} + +var modifyMapValueFunc = func( + t *testing.T, + needToResetModifiedValue bool, + modifyValueFunc modifyValueFunc, +) modifyValueFunc { + return func( + storage SlabStorage, + originalValue Value, + expectedOrigianlValue Value, + ) ( + modifiedValue Value, + expectedModifiedValue Value, + err error, + ) { + m, ok := originalValue.(*OrderedMap) + require.True(t, ok) + + expectedValues, ok := expectedOrigianlValue.(mapValue) + require.True(t, ok) + + require.Equal(t, uint64(len(expectedValues)), m.Count()) + require.True(t, m.Count() > 0) + + // Modify first element + + var firstKey Value + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + firstKey = k + return false, nil + }) + require.NoError(t, err) + + v, err := m.Get(compare, hashInputProvider, firstKey) + require.NoError(t, err) + + modifiedV, expectedModifiedV, err := modifyValueFunc(storage, v, expectedValues[firstKey]) + if err != nil { + return nil, nil, err + } + + if modifiedV == nil { + + existingKeyStorable, existingValueStorable, err := m.Remove(compare, hashInputProvider, firstKey) + if err != nil { + return nil, nil, err + } + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingValueStorable) + + var removedSlabID SlabID + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "removed storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingValueStorable) + + case SlabIDStorable: + removedSlabID = SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, m.Address(), removedSlabID.Address()) + } + + existingValue, err := existingValueStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expectedValues[firstKey], existingValue) + + // Remove slabs from storage + if removedSlabID != SlabIDUndefined { + err = storage.Remove(removedSlabID) + require.NoError(t, err) + } + + delete(expectedValues, firstKey) + + } else { + + if needToResetModifiedValue { + existingStorable, err := m.Set(compare, hashInputProvider, firstKey, modifiedV) + if err != nil { + return nil, nil, err + } + require.NotNil(t, existingStorable) + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingStorable) + + var overwrittenSlabID SlabID + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "overwritten storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingStorable) + + case SlabIDStorable: + overwrittenSlabID = SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, m.Address(), overwrittenSlabID.Address()) + } + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + + valueEqual(t, expectedValues[firstKey], existingValue) + + if overwrittenSlabID != SlabIDUndefined { + // Remove slabs from storage given we are not interested in removed element + err = storage.Remove(overwrittenSlabID) + require.NoError(t, err) + } + + expectedValues[firstKey] = expectedModifiedV + } + } + + return m, expectedValues, nil + } +} + +type mapWrapperValueTestCase struct { + name string + modifyName string + wrapperValueNestedLevels int + mustSetModifiedElementInMap bool + newKey newKeyFunc + newValue newValueFunc + modifyValue modifyValueFunc +} + +func newMapWrapperValueTestCases( + t *testing.T, + r *rand.Rand, + address Address, + typeInfo TypeInfo, +) []mapWrapperValueTestCase { + + return []mapWrapperValueTestCase{ + + // Test maps {uint64: SomeValue(uint64)} + { + name: "{uint64: SomeValue(uint64)}", + modifyName: "modify wrapped primitive", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInMap: true, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc(1, newRandomUint64ValueFunc(r)), + modifyValue: modifyWrapperValueFunc(t, 1, modifyRandomUint64ValueFunc(r)), + }, + + // Test maps {uint64: SomeValue(SomeValue(uint64))} + { + name: "{uint64: SomeValue(SomeValue(uint64))}", + modifyName: "modify wrapped primitive", + wrapperValueNestedLevels: 2, + mustSetModifiedElementInMap: true, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc(2, newRandomUint64ValueFunc(r)), + modifyValue: modifyWrapperValueFunc(t, 2, modifyRandomUint64ValueFunc(r)), + }, + + // Test maps {uint64: SomeValue({uint64: uint64}))} + { + name: "{uint64: SomeValue({uint64: uint64})}", + modifyName: "modify wrapped map", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newRandomUint64ValueFunc(r))), + modifyValue: modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + modifyRandomUint64ValueFunc(r))), + }, + + // Test maps {uint64: SomeValue(SomeValue({uint64: uint64})))} + { + name: "{uint64: SomeValue(SomeValue({uint64: uint64}))}", + modifyName: "modify wrapped map", + wrapperValueNestedLevels: 2, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 2, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newRandomUint64ValueFunc(r))), + modifyValue: modifyWrapperValueFunc( + t, + 2, + modifyMapValueFunc( + t, + true, + modifyRandomUint64ValueFunc(r))), + }, + + // Test maps {uint64: SomeValue({uint64: SomeValue(uint64)}))} + { + name: "{uint64: SomeValue({uint64: SomeValue(uint64)})}", + modifyName: "modify wrapped map", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc(1, newRandomUint64ValueFunc(r)))), + modifyValue: modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))), + }, + + // Test maps {uint64: SomeValue(SomeValue({uint64: SomeValue(SomeValue(uint64))})))} + { + name: "{uint64: SomeValue(SomeValue({uint64: SomeValue(SomeValue(uint64))}))}", + modifyName: "modify wrapped map", + wrapperValueNestedLevels: 2, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 2, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 2, + newRandomUint64ValueFunc(r)))), + modifyValue: modifyWrapperValueFunc( + t, + 2, + modifyMapValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 2, + modifyRandomUint64ValueFunc(r)))), + }, + + // Test maps {uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})}))} and modify innermost map + { + name: "{uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})}", + modifyName: "modify wrapped level-2 map", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + modifyValue: modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + false, + modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))))), + }, + + // Test maps {uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})) and remove element from middle map + { + name: "{uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})}", + modifyName: "remove element from wrapped level-1 map", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + modifyValue: modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + replaceWithNewValueFunc(nilValueFunc()))), + }, + + // Test maps {uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})) and modify element from middle map + { + name: "{uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})}", + modifyName: "modify element in wrapped level-1 map", + wrapperValueNestedLevels: 1, + mustSetModifiedElementInMap: false, + newKey: newRandomUint64KeyFunc(r), + newValue: newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + modifyValue: modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + replaceWithNewValueFunc( + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r))))))), + }, + } +} + +// TestMapWrapperValueSetAndModify tests +// - setting WrapperValue to map +// - retrieving WrapperValue from map +// - modifing retrieved WrapperValue +// - setting modified WrapperValue +func TestMapWrapperValueSetAndModify(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallMapSize = 10 + largeMapSize = 512 + ) + + mapSizeTestCases := []struct { + name string + mapSize int + }{ + {name: "small map", mapSize: smallMapSize}, + {name: "large map", mapSize: largeMapSize}, + } + + testCases := newMapWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, mapSizeTestCase := range mapSizeTestCases { + + mapSize := mapSizeTestCase.mapSize + + name := mapSizeTestCase.name + " " + tc.name + if tc.modifyName != "" { + name += "," + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + rootSlabID := m.SlabID() + + // Set WrapperValue + expectedValues := make(map[Value]Value) + for len(expectedValues) < mapSize { + k, expectedK := tc.newKey(storage) + + if _, exists := expectedValues[expectedK]; exists { + continue + } + + v, expectedV := tc.newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[expectedK] = expectedV + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Retrieve and modify WrapperValue from map + for key, expectedValue := range expectedValues { + v, err := m.Get(compare, hashInputProvider, key) + require.NoError(t, err) + + valueEqual(t, expectedValue, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyValue(storage, v, expectedValue) + require.NoError(t, err) + + if tc.mustSetModifiedElementInMap { + testSetElementInMap(t, storage, m, key, newV, expectedValue) + } + + expectedValues[key] = newExpectedV + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load map from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + m2, err := NewMapWithRootID(storage2, rootSlabID, newBasicDigesterBuilder()) + require.NoError(t, err) + require.Equal(t, uint64(mapSize), m2.Count()) + + // Test loaded map + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + } + } +} + +// TestMapWrapperValueSetAndRemove tests +// - inserting WrapperValue to map +// - remove all elements +func TestMapWrapperValueSetAndRemove(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallMapSize = 10 + largeMapSize = 512 + ) + + mapSizeTestCases := []struct { + name string + mapSize int + }{ + {name: "small map", mapSize: smallMapSize}, + {name: "large map", mapSize: largeMapSize}, + } + + modifyTestCases := []struct { + name string + needToModifyElement bool + }{ + {name: "modify elements", needToModifyElement: true}, + {name: "", needToModifyElement: false}, + } + + removeSizeTestCases := []struct { + name string + removeAllElements bool + removeElementCount int + }{ + {name: "remove all elements", removeAllElements: true}, + {name: "remove 1 element", removeElementCount: 1}, + {name: fmt.Sprintf("remove %d element", smallMapSize/2), removeElementCount: smallMapSize / 2}, + } + + testCases := newMapWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, mapSizeTestCase := range mapSizeTestCases { + + for _, modifyTestCase := range modifyTestCases { + + for _, removeSizeTestCase := range removeSizeTestCases { + + mapSize := mapSizeTestCase.mapSize + + needToModifyElement := modifyTestCase.needToModifyElement + + removeSize := removeSizeTestCase.removeElementCount + if removeSizeTestCase.removeAllElements { + removeSize = mapSize + } + + name := mapSizeTestCase.name + " " + tc.name + if modifyTestCase.needToModifyElement { + name += ", " + tc.modifyName + } + if removeSizeTestCase.name != "" { + name += ", " + removeSizeTestCase.name + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + rootSlabID := m.SlabID() + + expectedValues := make(map[Value]Value) + + // Set WrapperValue in map + for len(expectedValues) < mapSize { + k, expectedK := tc.newKey(storage) + + if _, exists := expectedValues[expectedK]; exists { + continue + } + + v, expectedV := tc.newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[expectedK] = expectedV + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Retrieve and modify WrapperValue from map + if needToModifyElement { + for key, expected := range expectedValues { + v, err := m.Get(compare, hashInputProvider, key) + require.NoError(t, err) + + valueEqual(t, expected, v) + + // Verify that v is WrapperValue + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, v) + + // Modify element + newV, newExpectedV, err := tc.modifyValue(storage, v, expected) + require.NoError(t, err) + + if tc.mustSetModifiedElementInMap { + testSetElementInMap(t, storage, m, key, newV, expected) + } + + expectedValues[key] = newExpectedV + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + } + + keys := make([]Value, 0, len(expectedValues)) + for key := range expectedValues { + keys = append(keys, key) + } + + // Remove random elements + for i := 0; i < removeSize; i++ { + + removeKeyIndex := r.Intn(len(keys)) + removeKey := keys[removeKeyIndex] + + testRemoveElementFromMap(t, storage, m, removeKey, expectedValues[removeKey]) + + delete(expectedValues, removeKey) + + keys = append(keys[:removeKeyIndex], keys[removeKeyIndex+1:]...) + } + + require.Equal(t, uint64(mapSize-removeSize), m.Count()) + require.Equal(t, mapSize-removeSize, len(expectedValues)) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Commit storage + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Load map from encoded data + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + m2, err := NewMapWithRootID(storage2, rootSlabID, newBasicDigesterBuilder()) + require.NoError(t, err) + require.Equal(t, uint64(mapSize-removeSize), m2.Count()) + + // Test loaded map + testMap(t, storage2, typeInfo, address, m2, expectedValues, nil, true) + }) + } + } + } + } +} + +func TestMapWrapperValueReadOnlyIterate(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallMapSize = 10 + largeMapSize = 512 + ) + + mapSizeTestCases := []struct { + name string + mapSize int + }{ + {name: "small map", mapSize: smallMapSize}, + {name: "large map", mapSize: largeMapSize}, + } + + modifyTestCases := []struct { + name string + testModifyElement bool + }{ + {name: "modify elements", testModifyElement: true}, + {name: "", testModifyElement: false}, + } + + testCases := newMapWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, mapSizeTestCase := range mapSizeTestCases[:1] { + + for _, modifyTestCase := range modifyTestCases { + + // Can't test modifying elements in readonly iteration if elements are not containers. + if modifyTestCase.testModifyElement && tc.mustSetModifiedElementInMap { + continue + } + + mapSize := mapSizeTestCase.mapSize + + testModifyElement := modifyTestCase.testModifyElement + + name := mapSizeTestCase.name + " " + tc.name + if modifyTestCase.testModifyElement { + name += ", " + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + expectedValues := make(map[Value]Value) + + // Set WrapperValue to map + for len(expectedValues) < mapSize { + k, expectedK := tc.newKey(storage) + + if _, exists := expectedValues[expectedK]; exists { + continue + } + + v, expectedV := tc.newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[expectedK] = expectedV + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + iterator, err := m.ReadOnlyIterator() + require.NoError(t, err) + + count := 0 + for { + nextKey, nextValue, err := iterator.Next() + require.NoError(t, err) + + if nextKey == nil { + break + } + + expected := expectedValues[nextKey] + + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, nextValue) + + valueEqual(t, expected, nextValue) + + // Test modifying elements that don't need to reset in parent container. + if testModifyElement { + _, _, err := tc.modifyValue(storage, nextValue, expected) + var targetErr *ReadOnlyIteratorElementMutationError + require.ErrorAs(t, err, &targetErr) + } + + count++ + } + }) + } + } + } +} + +func TestMapWrapperValueIterate(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + const ( + smallMapSize = 10 + largeMapSize = 512 + ) + + mapSizeTestCases := []struct { + name string + mapSize int + }{ + {name: "small map", mapSize: smallMapSize}, + {name: "large map", mapSize: largeMapSize}, + } + + modifyTestCases := []struct { + name string + testModifyElement bool + }{ + {name: "modify elements", testModifyElement: true}, + {name: "", testModifyElement: false}, + } + + testCases := newMapWrapperValueTestCases(t, r, address, typeInfo) + + for _, tc := range testCases { + + for _, mapSizeTestCase := range mapSizeTestCases[:1] { + + for _, modifyTestCase := range modifyTestCases { + + elementIsContainer := !tc.mustSetModifiedElementInMap + + // Can't test modifying elements in readonly iteration if elements are not containers. + if modifyTestCase.testModifyElement && !elementIsContainer { + continue + } + + mapSize := mapSizeTestCase.mapSize + + testModifyElement := modifyTestCase.testModifyElement + + name := mapSizeTestCase.name + " " + tc.name + if modifyTestCase.testModifyElement { + name += ", " + tc.modifyName + } + + t.Run(name, func(t *testing.T) { + + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + expectedValues := make(map[Value]Value) + + // Set WrapperValue in map + for len(expectedValues) < mapSize { + k, expectedK := tc.newKey(storage) + + if _, exists := expectedValues[expectedK]; exists { + continue + } + + v, expectedV := tc.newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[expectedK] = expectedV + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + iterator, err := m.Iterator(compare, hashInputProvider) + require.NoError(t, err) + + count := 0 + for { + nextKey, nextValue, err := iterator.Next() + require.NoError(t, err) + + if nextKey == nil { + break + } + + expected := expectedValues[nextKey] + + testWrapperValueLevels(t, tc.wrapperValueNestedLevels, nextValue) + + valueEqual(t, expected, nextValue) + + // Test modifying container elements. + if testModifyElement { + _, newExpectedV, err := tc.modifyValue(storage, nextValue, expected) + require.NoError(t, err) + + expectedValues[nextKey] = newExpectedV + } + + count++ + } + + require.Equal(t, uint64(mapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + } + } + } +} + +func TestMapWrapperValueInlineMapAtLevel1(t *testing.T) { + + testLevel1WrappedChildMapInlined := func(t *testing.T, m *OrderedMap, expectedInlined bool) { + rootDataSlab, isDataSlab := m.root.(*MapDataSlab) + require.True(t, isDataSlab) + + elements, isHkeyElements := rootDataSlab.elements.(*hkeyElements) + require.True(t, isHkeyElements) + + require.Equal(t, 1, len(elements.elems)) + + element, isSingleElement := elements.elems[0].(*singleElement) + require.True(t, isSingleElement) + + value := element.value + + storabeleAsSomeStorable, isSomeStorable := value.(SomeStorable) + require.True(t, isSomeStorable) + + wrappedStorable := storabeleAsSomeStorable.Storable + + switch wrappedStorable := wrappedStorable.(type) { + case SlabIDStorable: + inlined := false + require.Equal(t, expectedInlined, inlined) + + case ArraySlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + case MapSlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + default: + require.Fail(t, "wrapped storable has unexpected type: %T", wrappedStorable) + } + } + + SetThreshold(256) + defer SetThreshold(1024) + + r := newRand(t) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + storage := newTestPersistentStorage(t) + + expectedValues := make(mapValue) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + // Set WrapperValue SomeValue([]) in map + { + // Create standalone child map + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + require.False(t, childMap.Inlined()) + + // Set child map (level-1 inlined map) in parent map + key := Uint64Value(0) + + existingStorable, err := m.Set(compare, hashInputProvider, key, SomeValue{childMap}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.True(t, childMap.Inlined()) + + expectedValues[key] = someValue{mapValue{}} + + require.Equal(t, uint64(1), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + testLevel1WrappedChildMapInlined(t, m, true) + } + + // Retrieve wrapped child map, and then insert new elements to child map. + // Wrapped child map is expected to be unlined at the end of loop. + + const childMapSize = 8 + for i := 0; i <= childMapSize; i++ { + // Get element + element, err := m.Get(compare, hashInputProvider, Uint64Value(0)) + require.NoError(t, err) + require.NotNil(t, element) + + // Test retrieved element type + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + wrappedValue := elementAsSomeValue.Value + + wrappedMap, isMap := wrappedValue.(*OrderedMap) + require.True(t, isMap) + + expectedWrappedValue := expectedValues[Uint64Value(0)].(someValue).Value + + expectedWrappedMap := expectedWrappedValue.(mapValue) + + // Insert new elements to wrapped child map + + k := Uint64Value(i) + v := Uint64Value(r.Intn(256)) + + existingStorable, err := wrappedMap.Set(compare, hashInputProvider, k, SomeValue{v}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedWrappedMap[k] = someValue{v} + + expectedValues[Uint64Value(0)] = someValue{expectedWrappedMap} + + require.Equal(t, uint64(i+1), wrappedMap.Count()) + require.Equal(t, i+1, len(expectedWrappedMap)) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + } + + testLevel1WrappedChildMapInlined(t, m, false) + + // Retrieve wrapped child map, and then remove elements from child map. + // Wrapped child map is expected to be inlined at the end of loop. + + childMapSizeAfterRemoval := 2 + removeCount := childMapSize - childMapSizeAfterRemoval + + for i := 0; i < removeCount; i++ { + // Get element + element, err := m.Get(compare, hashInputProvider, Uint64Value(0)) + require.NoError(t, err) + require.NotNil(t, element) + + // Test retrieved element type + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + wrappedValue := elementAsSomeValue.Value + + wrappedMap, isMap := wrappedValue.(*OrderedMap) + require.True(t, isMap) + + expectedWrappedValue := expectedValues[Uint64Value(0)].(someValue).Value + + expectedWrappedMap := expectedWrappedValue.(mapValue) + + // Remove element from wrapped child map + + existingKeyStorable, existingValueStorable, err := wrappedMap.Remove(compare, hashInputProvider, Uint64Value(i)) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + // Verify removed key and value + + existingValue, err := existingValueStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expectedWrappedMap[Uint64Value(i)], existingValue) + + delete(expectedWrappedMap, Uint64Value(i)) + + expectedValues[Uint64Value(0)] = someValue{expectedWrappedMap} + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + } + + testLevel1WrappedChildMapInlined(t, m, true) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) +} + +func TestMapWrapperValueInlineMapAtLevel2(t *testing.T) { + + testLevel2WrappedChildMapInlined := func(t *testing.T, m *OrderedMap, expectedInlined bool) { + rootDataSlab, isDataSlab := m.root.(*MapDataSlab) + require.True(t, isDataSlab) + + elementsAtLevel1, isHkeyElements := rootDataSlab.elements.(*hkeyElements) + require.True(t, isHkeyElements) + + require.Equal(t, 1, len(elementsAtLevel1.elems)) + + elementAtLevel1, isSingleElement := elementsAtLevel1.elems[0].(*singleElement) + require.True(t, isSingleElement) + + // Get unwrapped value at level 1 + + storableAtLevel1 := elementAtLevel1.value + + storabeleAsSomeStoable, isSomeStorable := storableAtLevel1.(SomeStorable) + require.True(t, isSomeStorable) + + wrappedStorableAtLevel1 := storabeleAsSomeStoable.Storable + + wrappedMapAtlevel1, isMap := wrappedStorableAtLevel1.(*MapDataSlab) + require.True(t, isMap) + + // Get unwrapped value at level 2 + + elementsAtLevel2, isHkeyElements := wrappedMapAtlevel1.elements.(*hkeyElements) + require.True(t, isHkeyElements) + + elementAtLevel2, isSingleElement := elementsAtLevel2.elems[0].(*singleElement) + require.True(t, isSingleElement) + + storableAtLevel2 := elementAtLevel2.value + + storabeleAsSomeStoable, isSomeStorable = storableAtLevel2.(SomeStorable) + require.True(t, isSomeStorable) + + wrappedStorableAtLevel2 := storabeleAsSomeStoable.Storable + + switch wrappedStorable := wrappedStorableAtLevel2.(type) { + case SlabIDStorable: + inlined := false + require.Equal(t, expectedInlined, inlined) + + case ArraySlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + case MapSlab: + inlined := true + require.Equal(t, expectedInlined, inlined) + + default: + require.Fail(t, "wrapped storable has unexpected type: %T", wrappedStorable) + } + } + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + r := newRand(t) + + storage := newTestPersistentStorage(t) + + expectedValues := make(mapValue) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + // Set WrapperValue SomeValue({SomeValue{}}) to map + { + // Create grand child map + gchildMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + require.False(t, gchildMap.Inlined()) + + // Create child map + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + require.False(t, childMap.Inlined()) + + // Set grand child map to child map + existingStorable, err := childMap.Set(compare, hashInputProvider, Uint64Value(0), SomeValue{gchildMap}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.True(t, gchildMap.Inlined()) + + // Append child map to map + existingStorable, err = m.Set(compare, hashInputProvider, Uint64Value(0), SomeValue{childMap}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.True(t, childMap.Inlined()) + + expectedValues[Uint64Value(0)] = someValue{mapValue{Uint64Value(0): someValue{mapValue{}}}} + + require.Equal(t, uint64(1), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + testLevel2WrappedChildMapInlined(t, m, true) + } + + // Retrieve wrapped gchild map, and then insert new elements to gchild map. + // Wrapped gchild map is expected to be unlined at the end of loop. + + const gchildMapSize = 8 + for i := 0; i < gchildMapSize; i++ { + // Get element at level 1 + + elementAtLevel1, err := m.Get(compare, hashInputProvider, Uint64Value(0)) + require.NoError(t, err) + require.NotNil(t, elementAtLevel1) + + // Test retrieved element type + elementAsSomeValueAtLevel1, isSomeValue := elementAtLevel1.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel1 := elementAsSomeValueAtLevel1.Value + + wrappedMapAtLevel1, isMap := wrappedValueAtLevel1.(*OrderedMap) + require.True(t, isMap) + + expectedWrappedValueAtLevel1 := expectedValues[Uint64Value(0)].(someValue).Value + + expectedWrappedMapAtLevel1 := expectedWrappedValueAtLevel1.(mapValue) + + // Get element at level 2 + + elementAtLevel2, err := wrappedMapAtLevel1.Get(compare, hashInputProvider, Uint64Value(0)) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel2, isSomeValue := elementAtLevel2.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel2 := elementAsSomeValueAtLevel2.Value + + wrappedMapAtLevel2, isMap := wrappedValueAtLevel2.(*OrderedMap) + require.True(t, isMap) + + expectedWrappedValueAtLevel2 := expectedWrappedMapAtLevel1[Uint64Value(0)].(someValue).Value + + expectedWrappedMapAtLevel2 := expectedWrappedValueAtLevel2.(mapValue) + + // Insert new elements to wrapped gchild map + + k := Uint64Value(i) + v := Uint64Value(r.Intn(256)) + + existingStorable, err := wrappedMapAtLevel2.Set(compare, hashInputProvider, k, SomeValue{v}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedWrappedMapAtLevel2[k] = someValue{v} + + expectedValues[Uint64Value(0)] = someValue{mapValue{Uint64Value(0): someValue{expectedWrappedMapAtLevel2}}} + + require.Equal(t, uint64(i+1), wrappedMapAtLevel2.Count()) + require.Equal(t, i+1, len(expectedWrappedMapAtLevel2)) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + } + + testLevel2WrappedChildMapInlined(t, m, false) + + // Retrieve wrapped gchild map, and then remove elements from gchild map. + // Wrapped gchild map is expected to be inlined at the end of loop. + + gchildMapSizeAfterRemoval := 2 + removeCount := gchildMapSize - gchildMapSizeAfterRemoval + + for i := 0; i < removeCount; i++ { + // Get elementAtLevel1 + elementAtLevel1, err := m.Get(compare, hashInputProvider, Uint64Value(0)) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel1, isSomeValue := elementAtLevel1.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel1 := elementAsSomeValueAtLevel1.Value + + wrappedMapAtLevel1, isMap := wrappedValueAtLevel1.(*OrderedMap) + require.True(t, isMap) + + expectedWrappedValueAtLevel1 := expectedValues[Uint64Value(0)].(someValue).Value + + expectedWrappedMapAtLevel1 := expectedWrappedValueAtLevel1.(mapValue) + + // Get element at level 2 + + elementAtLevel2, err := wrappedMapAtLevel1.Get(compare, hashInputProvider, Uint64Value(0)) + require.NoError(t, err) + + // Test retrieved element type + elementAsSomeValueAtLevel2, isSomeValue := elementAtLevel2.(SomeValue) + require.True(t, isSomeValue) + + wrappedValueAtLevel2 := elementAsSomeValueAtLevel2.Value + + wrappedMapAtLevel2, isMap := wrappedValueAtLevel2.(*OrderedMap) + require.True(t, isMap) + + expectedWrappedValueAtLevel2 := expectedWrappedMapAtLevel1[Uint64Value(0)].(someValue).Value + + expectedWrappedMapAtLevel2 := expectedWrappedValueAtLevel2.(mapValue) + + // Remove first element from wrapped gchild map + + existingKeyStorable, existingValueStorable, err := wrappedMapAtLevel2.Remove(compare, hashInputProvider, Uint64Value(i)) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + // Verify removed value + + existingValue, err := existingValueStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expectedWrappedMapAtLevel2[Uint64Value(i)], existingValue) + + delete(expectedWrappedMapAtLevel2, Uint64Value(i)) + + expectedValues[Uint64Value(0)] = someValue{mapValue{Uint64Value(0): someValue{expectedWrappedMapAtLevel2}}} + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + } + + testLevel2WrappedChildMapInlined(t, m, true) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) +} + +func TestMapWrapperValueModifyNewMapAtLevel1(t *testing.T) { + + const ( + minWriteOperationSize = 124 + maxWriteOperationSize = 256 + ) + + r := newRand(t) + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + newElementFuncs := []newValueFunc{ + // SomeValue(uint64) + newWrapperValueFunc(1, newRandomUint64ValueFunc(r)), + + // SomeValue({uint64: SomeValue(uint64)}) + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + r.Intn(4), + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))), + + // SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})}) + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + r.Intn(4), + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + r.Intn(4), + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))), + } + + storage := newTestPersistentStorage(t) + + expectedValues := make(mapValue) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + actualMapSize := 0 + + t.Run("set and remove", func(t *testing.T) { + // Insert elements + + var setCount int + for setCount < minWriteOperationSize { + setCount = r.Intn(maxWriteOperationSize + 1) + } + + actualMapSize += setCount + + for i := 0; i < setCount; i++ { + k := Uint64Value(i) + + newValue := newElementFuncs[r.Intn(len(newElementFuncs))] + v, expected := newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[k] = expected + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Remove some elements + + var removeCount int + minRemoveCount := int(m.Count()) / 2 + maxRemoveCount := int(m.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(m.Count()) + 1) + } + + actualMapSize -= removeCount + + // Remove elements + + keys := make([]Value, 0, len(expectedValues)) + for k := range expectedValues { + keys = append(keys, k) + } + + for i := 0; i < removeCount; i++ { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + + t.Run("remove all", func(t *testing.T) { + // Remove all elements + + keys := make([]Value, 0, len(expectedValues)) + for k := range expectedValues { + keys = append(keys, k) + } + + for len(keys) > 0 { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(0), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) +} + +func TestMapWrapperValueModifyNewMapAtLevel2(t *testing.T) { + + const ( + minWriteOperationSize = 124 + maxWriteOperationSize = 256 + ) + + r := newRand(t) + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + // newValue creates value of type SomeValue({uint64: SomeValue(uint64)}). + newValue := + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + r.Intn(4)+1, // at least one element + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))) + + // modifyValue modifies nested map's first element. + modifyValue := + modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))) + + storage := newTestPersistentStorage(t) + + expectedValues := make(mapValue) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + actualMapSize := 0 + + t.Run("set and remove", func(t *testing.T) { + // Set elements + + var setCount int + for setCount < minWriteOperationSize { + setCount = r.Intn(maxWriteOperationSize + 1) + } + + actualMapSize += setCount + + for i := 0; i < setCount; i++ { + k := Uint64Value(i) + v, expected := newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[k] = expected + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Remove some elements (including one previously inserted element) + + var removeCount int + minRemoveCount := int(m.Count()) / 2 + maxRemoveCount := int(m.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(m.Count()) + 1) + } + + actualMapSize -= removeCount + + // Remove elements + + keys := make([]Value, 0, len(expectedValues)) + for k := range expectedValues { + keys = append(keys, k) + } + + for i := 0; i < removeCount; i++ { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + + t.Run("modify retrieved nested container and remove", func(t *testing.T) { + // Set elements + + var setCount int + if m.Count() <= 10 { + setCount = int(m.Count()) + } else { + for setCount < int(m.Count())/2 { + setCount = r.Intn(int(m.Count()) + 1) + } + } + + keys := make([]Value, 0, len(expectedValues)) + for k := range expectedValues { + keys = append(keys, k) + } + + for i := 0; i < setCount; i++ { + index := r.Intn(len(keys)) + setKey := keys[index] + + // Get element + originalValue, err := m.Get(compare, hashInputProvider, setKey) + require.NoError(t, err) + require.NotNil(t, originalValue) + + _, isWrapperValue := originalValue.(SomeValue) + require.True(t, isWrapperValue) + + // Modify retrieved element without setting back explicitly. + _, modifiedExpectedValue, err := modifyValue(storage, originalValue, expectedValues[setKey]) + require.NoError(t, err) + + expectedValues[setKey] = modifiedExpectedValue + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Remove some elements (including some previously set elements) + + var removeCount int + minRemoveCount := int(m.Count()) / 2 + maxRemoveCount := int(m.Count()) / 4 * 3 + for removeCount < minRemoveCount || removeCount > maxRemoveCount { + removeCount = r.Intn(int(m.Count())) + } + + actualMapSize -= removeCount + + // Remove more elements + + for i := 0; i < removeCount; i++ { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + + t.Run("remove all", func(t *testing.T) { + // Remove all elements + + keys := make([]Value, 0, len(expectedValues)) + for k := range expectedValues { + keys = append(keys, k) + } + + for len(keys) > 0 { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(0), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) +} + +func TestMapWrapperValueModifyNewMapAtLevel3(t *testing.T) { + + const ( + minWriteOperationSize = 124 + maxWriteOperationSize = 256 + ) + + r := newRand(t) + + SetThreshold(256) + defer SetThreshold(1024) + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + typeInfo := testTypeInfo{42} + + // newValue creates value of type SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})) + newValue := + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + 2, + newRandomUint64KeyFunc(r), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r)))))) + + // modifyValue modifies innermost nested map's first element. + modifyValue := + modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + false, + modifyWrapperValueFunc( + t, + 1, + modifyMapValueFunc( + t, + true, + modifyWrapperValueFunc( + t, + 1, + modifyRandomUint64ValueFunc(r)))))) + + storage := newTestPersistentStorage(t) + + expectedValues := make(mapValue) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + actualMapSize := 0 + + t.Run("set and remove", func(t *testing.T) { + // Insert elements + + var setCount int + for setCount < minWriteOperationSize { + setCount = r.Intn(maxWriteOperationSize + 1) + } + + actualMapSize += setCount + + for i := 0; i < setCount; i++ { + k := Uint64Value(i) + v, expected := newValue(storage) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedValues[k] = expected + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Remove some elements + + var removeCount int + for removeCount < int(m.Count())/2 { + removeCount = r.Intn(int(m.Count()) + 1) + } + + actualMapSize -= removeCount + + keys := make([]Value, 0, m.Count()) + err := m.IterateReadOnlyKeys(func(key Value) (resume bool, err error) { + keys = append(keys, key) + return true, nil + }) + require.NoError(t, err) + + for i := 0; i < removeCount; i++ { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + + t.Run("modify retrieved nested container and remove", func(t *testing.T) { + // Set elements + + var setCount int + if m.Count() <= 10 { + setCount = int(m.Count()) + } else { + for setCount < int(m.Count())/2 { + setCount = r.Intn(int(m.Count()) + 1) + } + } + + keys := make([]Value, 0, m.Count()) + err := m.IterateReadOnlyKeys(func(key Value) (resume bool, err error) { + keys = append(keys, key) + return true, nil + }) + require.NoError(t, err) + + for i := 0; i < setCount; i++ { + index := r.Intn(len(keys)) + key := keys[index] + + // Get element + originalValue, err := m.Get(compare, hashInputProvider, key) + require.NoError(t, err) + require.NotNil(t, originalValue) + + _, isWrapperValue := originalValue.(SomeValue) + require.True(t, isWrapperValue) + + // Modify retrieved element without setting back explicitly. + _, modifiedExpectedValue, err := modifyValue(storage, originalValue, expectedValues[key]) + require.NoError(t, err) + + expectedValues[key] = modifiedExpectedValue + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + + // Remove some elements + + var removeCount int + for removeCount < int(m.Count())/2 { + removeCount = r.Intn(int(m.Count())) + } + + actualMapSize -= removeCount + + for i := 0; i < removeCount; i++ { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(actualMapSize), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) + + t.Run("remove all", func(t *testing.T) { + // Remove all elements + + keys := make([]Value, 0, m.Count()) + err := m.IterateReadOnlyKeys(func(key Value) (resume bool, err error) { + keys = append(keys, key) + return true, nil + }) + require.NoError(t, err) + + for m.Count() > 0 { + index := r.Intn(len(keys)) + key := keys[index] + + testRemoveElementFromMap(t, storage, m, key, expectedValues[key]) + + delete(expectedValues, key) + keys = append(keys[:index], keys[index+1:]...) + } + + require.Equal(t, uint64(0), m.Count()) + + testMap(t, storage, typeInfo, address, m, expectedValues, nil, true) + }) +} + +func TestMapWrapperValueModifyExistingMap(t *testing.T) { + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + t.Run("modify level-1 wrapper map in {uint64: SomeValue({uint64: SomeValue(uint64)})}", func(t *testing.T) { + const ( + mapSize = 3 + childMapSize = 2 + ) + + typeInfo := testTypeInfo{42} + + r := newRand(t) + + createStorage := func(mapSize int) ( + _ BaseStorage, + rootSlabID SlabID, + expectedKeyValues map[Value]Value, + ) { + storage := newTestPersistentStorage(t) + + createMapOfSomeValueOfMapOfSomeValueOfUint64 := + newMapValueFunc( + t, + address, + typeInfo, + mapSize, + newUint64KeyFunc(), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + childMapSize, + newUint64KeyFunc(), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r))))) + + v, expected := createMapOfSomeValueOfMapOfSomeValueOfUint64(storage) + + m := v.(*OrderedMap) + expectedKeyValues = expected.(mapValue) + + testMap(t, storage, typeInfo, address, m, expectedKeyValues, nil, true) + + err := storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + return storage.baseStorage, m.SlabID(), expectedKeyValues + } + + // Create a base storage with map in the format of + // {uint64: SomeValue({uint64: SomeValue(uint64)})} + baseStorage, rootSlabID, expectedKeyValues := createStorage(mapSize) + require.Equal(t, mapSize, len(expectedKeyValues)) + + keys := make([]Value, 0, len(expectedKeyValues)) + for k := range expectedKeyValues { + keys = append(keys, k) + } + + // Create a new storage with encoded map + storage := newTestPersistentStorageWithBaseStorage(t, baseStorage) + + // Load existing map from storage + m, err := NewMapWithRootID(storage, rootSlabID, newBasicDigesterBuilder()) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedKeyValues)), m.Count()) + + // Get and verify first element as SomeValue(map) + + key := keys[0] + expectedValues := expectedKeyValues[key] + + // Get map element (SomeValue) + element, err := m.Get(compare, hashInputProvider, key) + require.NoError(t, err) + + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + unwrappedChildMap, isOrderedMap := elementAsSomeValue.Value.(*OrderedMap) + require.True(t, isOrderedMap) + + expectedValuesAsSomeValue, isSomeValue := expectedValues.(someValue) + require.True(t, isSomeValue) + + expectedUnwrappedChildMap, isMapValue := expectedValuesAsSomeValue.Value.(mapValue) + require.True(t, isMapValue) + + require.Equal(t, uint64(len(expectedUnwrappedChildMap)), unwrappedChildMap.Count()) + + // Modify wrapped child map of SomeValue + + newKey := NewStringValue("x") + newValue := NewStringValue("y") + existingStorable, err := unwrappedChildMap.Set(compare, hashInputProvider, newKey, SomeValue{newValue}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues := expectedKeyValues[key].(someValue).Value.(mapValue) + expectedChildMapValues[newKey] = someValue{newValue} + + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Verify modified wrapped child map of SomeValue using new storage with committed data + + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + // Load existing map from storage + m2, err := NewMapWithRootID(storage2, rootSlabID, newBasicDigesterBuilder()) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedKeyValues)), m2.Count()) + + testMap(t, storage, typeInfo, address, m2, expectedKeyValues, nil, true) + }) + + t.Run("get and modify 2-level wrapper map in {uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(uint64)})})}", func(t *testing.T) { + const ( + mapSize = 4 + childMapSize = 3 + gchildMapSize = 2 + ) + + typeInfo := testTypeInfo{42} + + r := newRand(t) + + createStorage := func(mapSize int) ( + _ BaseStorage, + rootSlabID SlabID, + expectedKeyValues map[Value]Value, + ) { + storage := newTestPersistentStorage(t) + + createMapOfSomeValueOfMapOfSomeValueOfMapOfSomeValueOfUint64 := + newMapValueFunc( + t, + address, + typeInfo, + mapSize, + newUint64KeyFunc(), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + mapSize, + newUint64KeyFunc(), + newWrapperValueFunc( + 1, + newMapValueFunc( + t, + address, + typeInfo, + childMapSize, + newUint64KeyFunc(), + newWrapperValueFunc( + 1, + newRandomUint64ValueFunc(r))))))) + + v, expected := createMapOfSomeValueOfMapOfSomeValueOfMapOfSomeValueOfUint64(storage) + + m := v.(*OrderedMap) + expectedKeyValues = expected.(mapValue) + + testMap(t, storage, typeInfo, address, m, expectedKeyValues, nil, true) + + err := storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + return storage.baseStorage, m.SlabID(), expectedKeyValues + } + + // Create a base storage with map in the format of + // {uint64: SomeValue({uint64: SomeValue({uint64: SomeValue(string)})})} + baseStorage, rootSlabID, expectedKeyValues := createStorage(mapSize) + require.Equal(t, mapSize, len(expectedKeyValues)) + + keys := make([]Value, 0, len(expectedKeyValues)) + for k := range expectedKeyValues { + keys = append(keys, k) + } + + // Create a new storage with encoded map + storage := newTestPersistentStorageWithBaseStorage(t, baseStorage) + + // Load existing map from storage + m, err := NewMapWithRootID(storage, rootSlabID, newBasicDigesterBuilder()) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedKeyValues)), m.Count()) + + // Get and verify first element as SomeValue(map) + + key := keys[0] + expectedValues := expectedKeyValues[key] + + // Get map element (SomeValue) + element, err := m.Get(compare, hashInputProvider, key) + require.NoError(t, err) + + elementAsSomeValue, isSomeValue := element.(SomeValue) + require.True(t, isSomeValue) + + unwrappedChildMap, isOrderedMap := elementAsSomeValue.Value.(*OrderedMap) + require.True(t, isOrderedMap) + + expectedValuesAsSomeValue, isSomeValue := expectedValues.(someValue) + require.True(t, isSomeValue) + + expectedUnwrappedChildMap, isMapValue := expectedValuesAsSomeValue.Value.(mapValue) + require.True(t, isMapValue) + + require.Equal(t, uint64(len(expectedUnwrappedChildMap)), unwrappedChildMap.Count()) + + // Get and verify nested child element as SomeValue(map) + + childMapKeys := make([]Value, 0, len(expectedUnwrappedChildMap)) + for k := range expectedUnwrappedChildMap { + childMapKeys = append(childMapKeys, k) + } + + childMapKey := childMapKeys[0] + + childMapElement, err := unwrappedChildMap.Get(compare, hashInputProvider, childMapKey) + require.NoError(t, err) + + childMapElementAsSomeValue, isSomeValue := childMapElement.(SomeValue) + require.True(t, isSomeValue) + + unwrappedGChildMap, isOrderedMap := childMapElementAsSomeValue.Value.(*OrderedMap) + require.True(t, isOrderedMap) + + expectedChildValuesAsSomeValue, isSomeValue := expectedUnwrappedChildMap[childMapKey].(someValue) + require.True(t, isSomeValue) + + expectedUnwrappedGChildMap, isMapValue := expectedChildValuesAsSomeValue.Value.(mapValue) + require.True(t, isMapValue) + + require.Equal(t, uint64(len(expectedUnwrappedGChildMap)), unwrappedGChildMap.Count()) + + // Modify wrapped gchild map of SomeValue + + newKey := NewStringValue("x") + newValue := NewStringValue("y") + existingStorable, err := unwrappedGChildMap.Set(compare, hashInputProvider, newKey, SomeValue{newValue}) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues := expectedKeyValues[key].(someValue).Value.(mapValue) + expectedGChildMapValues := expectedChildMapValues[childMapKey].(someValue).Value.(mapValue) + expectedGChildMapValues[newKey] = someValue{newValue} + + err = storage.FastCommit(runtime.NumCPU()) + require.NoError(t, err) + + // Verify modified wrapped child map of SomeValue using new storage with committed data + + storage2 := newTestPersistentStorageWithBaseStorage(t, storage.baseStorage) + + // Load existing map from storage + m2, err := NewMapWithRootID(storage2, rootSlabID, newBasicDigesterBuilder()) + require.NoError(t, err) + require.Equal(t, uint64(len(expectedKeyValues)), m2.Count()) + + testMap(t, storage, typeInfo, address, m2, expectedKeyValues, nil, true) + }) +} + +func testSetElementInMap(t *testing.T, storage SlabStorage, m *OrderedMap, key Value, newValue Value, expected Value) { + existingStorable, err := m.Set(compare, hashInputProvider, key, newValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + + // var overwrittenSlabID SlabID + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingStorable) + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "overwritten storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingStorable) + + case SlabIDStorable: + overwrittenSlabID := SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, m.Address(), overwrittenSlabID.Address()) + } + + // Verify overwritten value + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expected, existingValue) + + removeFromStorage(t, storage, existingValue) +} + +func testRemoveElementFromMap(t *testing.T, storage SlabStorage, m *OrderedMap, key Value, expected Value) { + existingKeyStorable, existingValueStorable, err := m.Remove(compare, hashInputProvider, key) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + // var removedSlabID SlabID + + // Verify wrapped storable doesn't contain inlined slab + + wrappedStorable := unwrapStorable(existingValueStorable) + + switch wrappedStorable := wrappedStorable.(type) { + case ArraySlab, MapSlab: + require.Fail(t, "removed storable shouldn't be (wrapped) ArraySlab or MapSlab: %s", existingValueStorable) + + case SlabIDStorable: + removedSlabID := SlabID(wrappedStorable) + + // Verify SlabID has the same address + require.Equal(t, m.Address(), removedSlabID.Address()) + } + + // Verify removed value + + existingKey, err := existingKeyStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, key, existingKey) + + removeFromStorage(t, storage, existingKey) + + existingValue, err := existingValueStorable.StoredValue(storage) + require.NoError(t, err) + valueEqual(t, expected, existingValue) + + removeFromStorage(t, storage, existingValue) +} diff --git a/storable.go b/storable.go index bd49ef3..d123f10 100644 --- a/storable.go +++ b/storable.go @@ -63,6 +63,17 @@ type ContainerStorable interface { HasPointer() bool } +// WrapperStorable is an interface that supports storable wrapping another storable. +type WrapperStorable interface { + Storable + + // UnwrapAtreeStorable returns innermost wrapped Storable. + UnwrapAtreeStorable() Storable + + // WrapAtreeStorable returns a new WrapperStorable with given storable as innermost wrapped storable. + WrapAtreeStorable(Storable) Storable +} + func hasPointer(storable Storable) bool { if cs, ok := storable.(ContainerStorable); ok { return cs.HasPointer() @@ -70,6 +81,15 @@ func hasPointer(storable Storable) bool { return false } +func unwrapStorable(s Storable) Storable { + switch s := s.(type) { + case WrapperStorable: + return s.UnwrapAtreeStorable() + default: + return s + } +} + const ( // WARNING: tag numbers defined in here in github.com/onflow/atree // MUST not overlap with tag numbers used by Cadence internal value encoding. @@ -247,6 +267,26 @@ func getLoadedValue(storage SlabStorage, storable Storable) (Value, error) { return v, nil + case WrapperStorable: + // Check if wrapped storable is SlabIDStorable. + wrappedStorable := unwrapStorable(storable) + + if wrappedSlabIDStorable, isSlabIDStorable := wrappedStorable.(SlabIDStorable); isSlabIDStorable { + slab := storage.RetrieveIfLoaded(SlabID(wrappedSlabIDStorable)) + if slab == nil { + // Skip because it references unloaded slab. + return nil, nil + } + } + + v, err := storable.StoredValue(storage) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Storable interface. + return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") + } + + return v, nil + default: v, err := storable.StoredValue(storage) if err != nil { diff --git a/storable_test.go b/storable_test.go index f1db065..ec01d07 100644 --- a/storable_test.go +++ b/storable_test.go @@ -31,12 +31,13 @@ import ( // This file contains value implementations for testing purposes const ( - cborTagUInt8Value = 161 - cborTagUInt16Value = 162 - cborTagUInt32Value = 163 - cborTagUInt64Value = 164 - cborTagSomeValue = 165 - cborTagHashableMap = 166 + cborTagUInt8Value = 161 + cborTagUInt16Value = 162 + cborTagUInt32Value = 163 + cborTagUInt64Value = 164 + cborTagSomeValue = 165 + cborTagHashableMap = 166 + cborTagSomeValueWithNestedLevels = 167 ) func TestIsCBORTagNumberRangeAvailable(t *testing.T) { @@ -591,6 +592,56 @@ func decodeStorable(dec *cbor.StreamDecoder, id SlabID, inlinedExtraData []Extra } return SomeStorable{Storable: storable}, nil + case cborTagSomeValueWithNestedLevels: + count, err := dec.DecodeArrayHead() + if err != nil { + return nil, fmt.Errorf( + "invalid some value with nested levels encoding: %w", + err, + ) + } + + if count != someStorableWithMultipleNestedLevelsArrayCount { + return nil, fmt.Errorf( + "invalid array count for some value with nested levels encoding: got %d, expect %d", + count, someStorableWithMultipleNestedLevelsArrayCount, + ) + } + + nestedLevels, err := dec.DecodeUint64() + if err != nil { + return nil, fmt.Errorf( + "invalid nested levels for some value with nested levels encoding: %w", + err, + ) + } + + if nestedLevels <= 1 { + return nil, fmt.Errorf( + "invalid nested levels for some value with nested levels encoding: got %d, expect > 1", + nestedLevels, + ) + } + + nonSomeStorable, err := decodeStorable(dec, id, inlinedExtraData) + if err != nil { + return nil, fmt.Errorf( + "invalid nonSomeStorable for some value with nested levels encoding: %w", + err, + ) + } + + storable := SomeStorable{ + Storable: nonSomeStorable, + } + for i := uint64(1); i < nestedLevels; i++ { + storable = SomeStorable{ + Storable: storable, + } + } + + return storable, nil + default: return nil, fmt.Errorf("invalid tag number %d", tagNumber) } @@ -727,18 +778,55 @@ type SomeValue struct { var _ Value = SomeValue{} var _ HashableValue = SomeValue{} +var _ WrapperValue = SomeValue{} + +// NOTE: For testing purposes, SomeValue and SomeStorable are mostly copied +// from github.com/onflow/cadence (interpreter.SomeValue and interpreter.SomeStorable). +// Ideally, integration tests at github.com/onflow/cadence should test integration with atree +// for mutations of nested data types. -func (v SomeValue) Storable(storage SlabStorage, address Address, maxSize uint64) (Storable, error) { +func (v SomeValue) Storable( + storage SlabStorage, + address Address, + maxInlineSize uint64, +) (Storable, error) { - valueStorable, err := v.Value.Storable( + // SomeStorable returned from this function can be encoded in two ways: + // - if non-SomeStorable is too large, non-SomeStorable is encoded in a separate slab + // while SomeStorable wrapper is encoded inline with reference to slab containing + // non-SomeStorable. + // - otherwise, SomeStorable with non-SomeStorable is encoded inline. + // + // The above applies to both immutable non-SomeValue (such as StringValue), + // and mutable non-SomeValue (such as ArrayValue). + + nonSomeValue, nestedLevels := v.nonSomeValue() + + someStorableEncodedPrefixSize := getSomeStorableEncodedPrefixSize(nestedLevels) + + // Reduce maxInlineSize for non-SomeValue to make sure + // that SomeStorable wrapper is always encoded inline. + maxInlineSize -= uint64(someStorableEncodedPrefixSize) + + nonSomeValueStorable, err := nonSomeValue.Storable( storage, address, - maxSize-2, + maxInlineSize, ) if err != nil { return nil, err } + valueStorable := nonSomeValueStorable + for i := 1; i < int(nestedLevels); i++ { + valueStorable = SomeStorable{ + Storable: valueStorable, + } + } + + // No need to call maybeLargeImmutableStorable() here for SomeStorable because: + // - encoded SomeStorable size = someStorableEncodedPrefixSize + non-SomeValueStorable size + // - non-SomeValueStorable size < maxInlineSize - someStorableEncodedPrefixSize return SomeStorable{ Storable: valueStorable, }, nil @@ -765,7 +853,57 @@ func (v SomeValue) HashInput(scratch []byte) ([]byte, error) { } func (v SomeValue) String() string { - return fmt.Sprintf("%s", v.Value) + return fmt.Sprintf("SomeValue(%s)", v.Value) +} + +func (v SomeValue) UnwrapAtreeValue() (Value, uint64) { + nonSomeValue, nestedLevels := v.nonSomeValue() + + someStorableEncodedPrefixSize := getSomeStorableEncodedPrefixSize(nestedLevels) + + wv, ok := nonSomeValue.(WrapperValue) + if !ok { + return nonSomeValue, uint64(someStorableEncodedPrefixSize) + } + + unwrappedValue, wrapperSize := wv.UnwrapAtreeValue() + + return unwrappedValue, wrapperSize + uint64(someStorableEncodedPrefixSize) +} + +// nonSomeValue returns a non-SomeValue and nested levels of SomeValue reached +// by traversing nested SomeValue (SomeValue containing SomeValue, etc.) +// until it reaches a non-SomeValue. +// For example, +// - `SomeValue{true}` has non-SomeValue `true`, and nested levels 1 +// - `SomeValue{SomeValue{1}}` has non-SomeValue `1` and nested levels 2 +// - `SomeValue{SomeValue{[SomeValue{SomeValue{SomeValue{1}}}]}} has +// non-SomeValue `[SomeValue{SomeValue{SomeValue{1}}}]` and nested levels 2 +func (v SomeValue) nonSomeValue() (Value, uint64) { + nestedLevels := uint64(1) + for { + switch value := v.Value.(type) { + case SomeValue: + nestedLevels++ + v = value + + default: + return value, nestedLevels + } + } +} + +const ( + cborTagSize = 2 + someStorableWithMultipleNestedlevelsArraySize = 1 + someStorableWithMultipleNestedLevelsArrayCount = 2 +) + +func getSomeStorableEncodedPrefixSize(nestedLevels uint64) uint32 { + if nestedLevels == 1 { + return cborTagSize + } + return cborTagSize + someStorableWithMultipleNestedlevelsArraySize + getUintCBORSize(nestedLevels) } type SomeStorable struct { @@ -773,36 +911,104 @@ type SomeStorable struct { } var _ ContainerStorable = SomeStorable{} +var _ WrapperStorable = SomeStorable{} -func (v SomeStorable) HasPointer() bool { - if ms, ok := v.Storable.(ContainerStorable); ok { +func (s SomeStorable) HasPointer() bool { + if ms, ok := s.Storable.(ContainerStorable); ok { return ms.HasPointer() } return false } -func (v SomeStorable) ByteSize() uint32 { - // tag number (2 bytes) + encoded content - return 2 + v.Storable.ByteSize() +func (s SomeStorable) ByteSize() uint32 { + nonSomeStorable, nestedLevels := s.nonSomeStorable() + return getSomeStorableEncodedPrefixSize(nestedLevels) + nonSomeStorable.ByteSize() } -func (v SomeStorable) Encode(enc *Encoder) error { - err := enc.CBOR.EncodeRawBytes([]byte{ +func (s SomeStorable) Encode(e *Encoder) error { + nonSomeStorable, nestedLevels := s.nonSomeStorable() + if nestedLevels == 1 { + return s.encode(e) + } + return s.encodeMultipleNestedLevels(e, nestedLevels, nonSomeStorable) +} + +// encode encodes SomeStorable with nested levels = 1 as +// +// cbor.Tag{ +// Number: CBORTagSomeValue, +// Content: Value(v.Value), +// } +func (s SomeStorable) encode(e *Encoder) error { + // NOTE: when updating, also update SomeStorable.ByteSize + err := e.CBOR.EncodeRawBytes([]byte{ // tag number 0xd8, cborTagSomeValue, }) if err != nil { return err } - return v.Storable.Encode(enc) + return s.Storable.Encode(e) } -func (v SomeStorable) ChildStorables() []Storable { - return []Storable{v.Storable} +// encodeMultipleNestedLevels encodes SomeStorable with nested levels > 1 as +// +// cbor.Tag{ +// Number: CBORTagSomeValueWithNestedLevels, +// Content: CBORArray[nested_levels, innermsot_value], +// } +func (s SomeStorable) encodeMultipleNestedLevels( + e *Encoder, + levels uint64, + nonSomeStorable Storable, +) error { + // NOTE: when updating, also update SomeStorable.ByteSize + err := e.CBOR.EncodeRawBytes([]byte{ + // tag number + 0xd8, cborTagSomeValueWithNestedLevels, + // array of 2 elements + 0x82, + }) + if err != nil { + return err + } + + err = e.CBOR.EncodeUint64(levels) + if err != nil { + return err + } + + return nonSomeStorable.Encode(e) +} + +// nonSomeStorable returns a non-SomeStorable and nested levels of SomeStorable reached +// by traversing nested SomeStorable (SomeStorable containing SomeStorable, etc.) +// until it reaches a non-SomeStorable. +// For example, +// - `SomeStorable{true}` has non-SomeStorable `true`, and nested levels 1 +// - `SomeStorable{SomeStorable{1}}` has non-SomeStorable `1` and nested levels 2 +// - `SomeStorable{SomeStorable{[SomeStorable{SomeStorable{SomeStorable{1}}}]}} has +// non-SomeStorable `[SomeStorable{SomeStorable{SomeStorable{1}}}]` and nested levels 2 +func (s SomeStorable) nonSomeStorable() (Storable, uint64) { + nestedLevels := uint64(1) + for { + switch storable := s.Storable.(type) { + case SomeStorable: + nestedLevels++ + s = storable + + default: + return storable, nestedLevels + } + } +} + +func (s SomeStorable) ChildStorables() []Storable { + return []Storable{s.Storable} } -func (v SomeStorable) StoredValue(storage SlabStorage) (Value, error) { - wv, err := v.Storable.StoredValue(storage) +func (s SomeStorable) StoredValue(storage SlabStorage) (Value, error) { + wv, err := s.Storable.StoredValue(storage) if err != nil { return nil, err } @@ -810,8 +1016,30 @@ func (v SomeStorable) StoredValue(storage SlabStorage) (Value, error) { return SomeValue{wv}, nil } -func (v SomeStorable) String() string { - return fmt.Sprintf("%s", v.Storable) +func (s SomeStorable) String() string { + return fmt.Sprintf("SomeStorable(%s)", s.Storable) +} + +func (s SomeStorable) UnwrapAtreeStorable() Storable { + storable := s.Storable + for { + ws, ok := storable.(WrapperStorable) + if !ok { + break + } + storable = ws.UnwrapAtreeStorable() + } + return storable +} + +func (s SomeStorable) WrapAtreeStorable(storable Storable) Storable { + _, nestedLevels := s.nonSomeStorable() + + newStorable := SomeStorable{Storable: storable} + for i := 1; i < int(nestedLevels); i++ { + newStorable = SomeStorable{Storable: newStorable} + } + return newStorable } type testMutableValue struct { @@ -902,3 +1130,19 @@ func (v *HashableMap) HashInput(scratch []byte) ([]byte, error) { copy(buf[3:], vid[:]) return buf, nil } + +func getUintCBORSize(v uint64) uint32 { + if v <= 23 { + return 1 + } + if v <= math.MaxUint8 { + return 2 + } + if v <= math.MaxUint16 { + return 3 + } + if v <= math.MaxUint32 { + return 5 + } + return 9 +} diff --git a/storage.go b/storage.go index f562ae8..dae0db7 100644 --- a/storage.go +++ b/storage.go @@ -642,7 +642,8 @@ func (s *PersistentSlabStorage) SlabIterator() (SlabIterator, error) { } var err error - slab, ok, err = s.RetrieveIgnoringDeltas(id) + // Don't cache retrieved child slabs during slab iteration to prevent changes to storage cache. + slab, ok, err = s.RetrieveIgnoringDeltas(id, false) if !ok { return NewSlabNotFoundErrorf(id, "slab not found during slab iteration") } @@ -1194,7 +1195,7 @@ func (s *PersistentSlabStorage) DropCache() { s.cache = make(map[SlabID]Slab) } -func (s *PersistentSlabStorage) RetrieveIgnoringDeltas(id SlabID) (Slab, bool, error) { +func (s *PersistentSlabStorage) RetrieveIgnoringDeltas(id SlabID, cache bool) (Slab, bool, error) { // check the read cache next if slab, ok := s.cache[id]; ok { @@ -1218,7 +1219,9 @@ func (s *PersistentSlabStorage) RetrieveIgnoringDeltas(id SlabID) (Slab, bool, e } // save decoded slab to cache - s.cache[id] = slab + if cache { + s.cache[id] = slab + } return slab, ok, nil } @@ -1245,7 +1248,7 @@ func (s *PersistentSlabStorage) Retrieve(id SlabID) (Slab, bool, error) { } // Don't need to wrap error as external error because err is already categorized by PersistentSlabStorage.RetrieveIgnoringDeltas(). - return s.RetrieveIgnoringDeltas(id) + return s.RetrieveIgnoringDeltas(id, true) } func (s *PersistentSlabStorage) Store(id SlabID, slab Slab) error { diff --git a/storage_test.go b/storage_test.go index 59906fe..79510bd 100644 --- a/storage_test.go +++ b/storage_test.go @@ -1045,6 +1045,147 @@ func TestPersistentStorageSlabIterator(t *testing.T) { } require.Equal(t, len(data), count) }) + + t.Run("not-empty storage with some slabs in deltas", func(t *testing.T) { + + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + id1 := SlabID{address: address, index: SlabIndex{0, 0, 0, 0, 0, 0, 0, 1}} + id2 := SlabID{address: address, index: SlabIndex{0, 0, 0, 0, 0, 0, 0, 2}} + + originalData := map[SlabID][]byte{ + // (data slab) data: [aaaaaaaaaaaaaaaaaaaaaa ... aaaaaaaaaaaaaaaaaaaaaa] + id1: { + // version + 0x10, + // flag + 0xc0, + // array extra data + 0x81, + // type info + 0x18, 0x2a, + // CBOR encoded array head (fixed size 3 byte) + 0x99, 0x00, 0x14, + // CBOR encoded array elements + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0xd8, 0xff, 0x50, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, + }, + + // (data slab) next: 0, data: [0] + id2: { + // version + 0x10, + // extra data flag + 0x80, + // array of extra data + 0x81, + // type info + 0x18, 0x2b, + + // CBOR encoded array head (fixed size 3 byte) + 0x99, 0x00, 0x01, + // CBOR encoded array elements + 0xd8, 0xa4, 0x00, + }, + } + + modifiedData := map[SlabID][]byte{ + // (data slab) data: [aaaaaaaaaaaaaaaaaaaaaa ... aaaaaaaaaaaaaaaaaaaaaa] + id1: { + // version + 0x10, + // flag + 0xc0, + // array extra data + 0x81, + // type info + 0x18, 0x2a, + // CBOR encoded array head (fixed size 3 byte) + 0x99, 0x00, 0x14, + // CBOR encoded array elements + 0x76, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, 0x62, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0x76, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, 0x61, + 0xd8, 0xff, 0x50, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, + }, + + // (data slab) next: 0, data: [0] + id2: { + // version + 0x10, + // extra data flag + 0x80, + // array of extra data + 0x81, + // type info + 0x18, 0x2b, + + // CBOR encoded array head (fixed size 3 byte) + 0x99, 0x00, 0x01, + // CBOR encoded array elements + 0xd8, 0xa4, 0x00, + }, + } + storage := newTestPersistentStorageWithData(t, originalData) + + array, err := NewArrayWithRootID(storage, id1) + require.NoError(t, err) + + storable, err := array.Set(uint64(0), NewStringValue("bbbbbbbbbbbbbbbbbbbbbb")) + require.NoError(t, err) + require.NotNil(t, storable) + + iterator, err := storage.SlabIterator() + require.NoError(t, err) + + count := 0 + for { + id, slab := iterator() + if id == SlabIDUndefined { + break + } + + encodedSlab, err := EncodeSlab(slab, storage.cborEncMode) + require.NoError(t, err) + + require.Equal(t, modifiedData[id], encodedSlab) + count++ + } + require.Equal(t, len(originalData), count) + }) } func TestPersistentStorageGenerateSlabID(t *testing.T) { diff --git a/typeinfo.go b/typeinfo.go index ef7f218..944cb2a 100644 --- a/typeinfo.go +++ b/typeinfo.go @@ -594,6 +594,10 @@ func (ied *InlinedExtraData) empty() bool { func makeCompactMapTypeID(encodedTypeInfo string, names []ComparableStorable) string { const separator = "," + if len(names) == 0 { + return encodedTypeInfo + } + if len(names) == 1 { return encodedTypeInfo + separator + names[0].ID() } diff --git a/utils_test.go b/utils_test.go index 1e5b980..cc9b8cc 100644 --- a/utils_test.go +++ b/utils_test.go @@ -343,6 +343,15 @@ func valueEqual(t *testing.T, expected Value, actual Value) { case *OrderedMap: require.FailNow(t, "expected value shouldn't be *OrderedMap") + case someValue: + actual, ok := actual.(SomeValue) + require.True(t, ok) + + valueEqual(t, expected.Value, actual.Value) + + case SomeValue: + require.FailNow(t, "expected value shouldn't be SomeValue") + default: require.Equal(t, expected, actual) } @@ -438,3 +447,13 @@ var _ Value = &mapValue{} func (v mapValue) Storable(SlabStorage, Address, uint64) (Storable, error) { panic("not reachable") } + +type someValue struct { + Value Value +} + +var _ Value = &someValue{} + +func (v someValue) Storable(SlabStorage, Address, uint64) (Storable, error) { + panic("not reachable") +} diff --git a/value.go b/value.go index 0652d1a..42e3abc 100644 --- a/value.go +++ b/value.go @@ -22,6 +22,14 @@ type Value interface { Storable(SlabStorage, Address, uint64) (Storable, error) } +// WrapperValue is an interface that supports value wrapping another value. +type WrapperValue interface { + Value + + // UnwrapAtreeValue returns innermost wrapped Value and wrapper size. + UnwrapAtreeValue() (Value, uint64) +} + type ValueComparator func(SlabStorage, Value, Storable) (bool, error) type StorableComparator func(Storable, Storable) bool @@ -36,3 +44,12 @@ type mutableValueNotifier interface { Inlined() bool Inlinable(uint64) bool } + +func unwrapValue(v Value) (Value, uint64) { + switch v := v.(type) { + case WrapperValue: + return v.UnwrapAtreeValue() + default: + return v, 0 + } +}