diff --git a/go.mod b/go.mod index 17efb989b..c279c9f53 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/kr/pretty v0.3.1 github.com/leanovate/gopter v0.2.9 github.com/logrusorgru/aurora/v4 v4.0.0 - github.com/onflow/atree v0.8.1 + github.com/onflow/atree v0.9.0 github.com/rivo/uniseg v0.4.4 github.com/schollz/progressbar/v3 v3.13.1 github.com/stretchr/testify v1.10.0 diff --git a/go.sum b/go.sum index 46e8c1a64..a1a1dd72c 100644 --- a/go.sum +++ b/go.sum @@ -72,8 +72,8 @@ github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2Em github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/onflow/atree v0.8.1 h1:DAnPnL9/Ks3LaAnkQVokokTBG/znTW0DJfovDtJDhLI= -github.com/onflow/atree v0.8.1/go.mod h1:FT6udJF9Q7VQTu3wknDhFX+VV4D44ZGdqtTAE5iztck= +github.com/onflow/atree v0.9.0 h1:M+Z/UPwzv0/Yy7ChI5T1ZIHD3YN1cs/hxGEs/HWhzaY= +github.com/onflow/atree v0.9.0/go.mod h1:FT6udJF9Q7VQTu3wknDhFX+VV4D44ZGdqtTAE5iztck= github.com/onflow/crypto v0.25.0 h1:BeWbLsh3ZD13Ej+Uky6kg1PL1ZIVBDVX+2MVBNwqddg= github.com/onflow/crypto v0.25.0/go.mod h1:C8FbaX0x8y+FxWjbkHy0Q4EASCDR9bSPWZqlpCLYyVI= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= diff --git a/interpreter/bitwise_test.go b/interpreter/bitwise_test.go index a288e7e66..9f0d12d74 100644 --- a/interpreter/bitwise_test.go +++ b/interpreter/bitwise_test.go @@ -20,10 +20,14 @@ package interpreter_test import ( "fmt" + "math/big" "testing" + "github.com/stretchr/testify/require" + "github.com/onflow/cadence/interpreter" "github.com/onflow/cadence/sema" + . "github.com/onflow/cadence/test_utils/common_utils" . "github.com/onflow/cadence/test_utils/interpreter_utils" ) @@ -251,3 +255,914 @@ func TestInterpretBitwiseRightShift(t *testing.T) { }) } } + +func TestInterpretBitwiseNegativeShift(t *testing.T) { + t.Run("Int8 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int8 = 0x7f + let b: Int8 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("Int16 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int16 = 0x7f + let b: Int16 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("Int32 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int32 = 0x7f + let b: Int32 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("Int64 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int64 = 0x7f + let b: Int64 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("Int128 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int128 = 0x7f + let b: Int128 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("Int256 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int256 = 0x7f + let b: Int256 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) +} + +func TestInterpretBitwiseLeftShift8(t *testing.T) { + + t.Parallel() + + t.Run("Int8 << 9 (zero result)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int8 = 0x7f + let b: Int8 = 9 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt8Value(0), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int8 << 1 (positive to positive)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int8 = 5 + let b: Int8 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt8Value(10), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int8 << 1 (negative to negative)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int8 = -5 // 0b1111_1011 + let b: Int8 = 1 + let c = a << b // 0b1111_0110 --> -10 + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt8Value(-10), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int8 << 1 (positive to negative)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int8 = 5 // 0b0000_0101 + let b: Int8 = 7 + let c = a << b // 0b1000_0000 --> -128 + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt8Value(-128), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int8 << 1 (negative to positive)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int8 = -5 // 0b1111_1011 + let b: Int8 = 5 + let c = a << b // 0b0110_0000 --> 96 + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt8Value(0x60), // or 96 + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("UInt8 << 9", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt8 = 0x7f + let b: UInt8 = 9 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt8Value(0), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("UInt8 << 1", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt8 = 0xff + let b: UInt8 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt8Value(0xfe), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word8 << 9", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word8 = 0xff + let b: Word8 = 9 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord8Value(0), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word8 << 1", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word8 = 0xff + let b: Word8 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord8Value(0xfe), + inter.Globals.Get("c").GetValue(inter), + ) + }) +} + +func TestInterpretBitwiseLeftShift128(t *testing.T) { + + t.Parallel() + + t.Run("Int128 << 130 (zero result)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = 0x7fff_ffff_ffff_ffff_ffff_ffff_ffff_ffff + let b: Int128 = 130 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromInt64(int64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 << 1 (positive to positive)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = 5 + let b: Int128 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromInt64(10), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 << 1 (negative to negative)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = -5 + let b: Int128 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromInt64(-10), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 << 127 (positive to negative)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = 5 // 0b0000_0101 + let b: Int128 = 127 + let c = a << b // 0b1000_0000_..._0000 --> -2^127 + `, + ) + + bigInt, _ := big.NewInt(0).SetString("-0x80000000_00000000_00000000_00000000", 0) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromBigInt(bigInt), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 << 125 (negative to positive)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = -5 // 0b1111_1111_..._1111_1011 + let b: Int128 = 125 + let c = a << b // 0b0110_0000_..._0000 + `, + ) + + bigInt, _ := big.NewInt(0).SetString("0x60000000_00000000_00000000_00000000", 0) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromBigInt(bigInt), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int128 = 0x7fff_ffff + let b: Int128 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("UInt128 << 130", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt128 = 0x7fff_ffff + let b: UInt128 = 130 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt128ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("UInt128 << 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt128 = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff + let b: UInt128 = 32 + let c = a << b + `, + ) + + bigInt, _ := big.NewInt(0).SetString("0xffff_ffff_ffff_ffff_ffff_ffff_0000_0000", 0) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt128ValueFromBigInt(bigInt), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word128 << 130", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word128 = 0xffff_ffff_ffff_ffff + let b: Word128 = 130 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord128ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word128 << 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word128 = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff + let b: Word128 = 32 + let c = a << b + `, + ) + + bigInt, _ := big.NewInt(0).SetString("0xffff_ffff_ffff_ffff_ffff_ffff_0000_0000", 0) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord128ValueFromBigInt(bigInt), + inter.Globals.Get("c").GetValue(inter), + ) + }) +} + +func TestInterpretBitwiseLeftShift256(t *testing.T) { + + t.Parallel() + + t.Run("Int256 << 260 (zero result)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = 0x7fff_ffff + let b: Int256 = 260 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromInt64(int64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 << 1 (positive to positive)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = 5 + let b: Int256 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromInt64(10), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 << 1 (negative to negative)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = -5 + let b: Int256 = 1 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromInt64(-10), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 << 255 (positive to negative)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = 5 // 0b0000_0101 + let b: Int256 = 255 + let c = a << b // 0b1000_0000_..._0000 --> -2^127 + `, + ) + + bigInt, _ := big.NewInt(0).SetString("-0x80000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000", 0) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromBigInt(bigInt), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 << 253 (negative to positive)", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = -5 // 0b1111_1111_..._1111_1011 + let b: Int256 = 253 + let c = a << b // 0b0110_0000_..._0000 + `, + ) + + bigInt, _ := big.NewInt(0).SetString("0x60000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000", 0) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromBigInt(bigInt), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 << -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int256 = 0x7fff_ffff + let b: Int256 = -3 + let c = a << b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("UInt256 << 260", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt256 = 0x7fff_ffff + let b: UInt256 = 260 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt256ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("UInt256 << 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt256 = 0x7fff_ffff + let b: UInt256 = 32 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt256ValueFromUint64(uint64(0x7fff_ffff_0000_0000)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word256 << 260", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word256 = 0x7fff_ffff + let b: Word256 = 260 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord256ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word256 << 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word256 = 0x7fff_ffff + let b: Word256 = 32 + let c = a << b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord256ValueFromUint64(uint64(0x7fff_ffff_0000_0000)), + inter.Globals.Get("c").GetValue(inter), + ) + }) +} + +func TestInterpretBitwiseRightShift128(t *testing.T) { + + t.Parallel() + + t.Run("Int128 >> 130", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = 0x7fff_ffff_ffff_ffff_ffff_ffff_ffff_ffff + let b: Int128 = 130 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromInt64(int64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 >> 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int128 = 0x7fff_ffff_0000_0000 + let b: Int128 = 32 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt128ValueFromInt64(int64(0x7fff_ffff)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int128 >> -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int128 = 0x7fff_ffff + let b: Int128 = -3 + let c = a >> b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("UInt128 >> 130", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt128 = 0x7fff_ffff + let b: UInt128 = 130 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt128ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("UInt128 >> 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt128 = 0xffff_ffff_0000_0000 + let b: UInt128 = 32 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt128ValueFromUint64(uint64(0xffff_ffff)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word128 >> 130", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word128 = 0xffff_ffff_ffff_ffff + let b: Word128 = 130 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord128ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word128 >> 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word128 = 0xffff_ffff_0000_0000 + let b: Word128 = 32 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord128ValueFromUint64(uint64(0xffff_ffff)), + inter.Globals.Get("c").GetValue(inter), + ) + }) +} + +func TestInterpretBitwiseRightShift256(t *testing.T) { + + t.Parallel() + + t.Run("Int256 >> 260", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = 0x7fff_ffff + let b: Int256 = 260 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromInt64(int64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 >> 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Int256 = 0x7fff_ffff_0000_0000 + let b: Int256 = 32 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredInt256ValueFromInt64(int64(0x7fff_ffff)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Int256 >> -3", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + fun test() { + let a: Int256 = 0x7fff_ffff + let b: Int256 = -3 + let c = a >> b + } + `) + _, err := inter.Invoke("test") + RequireError(t, err) + + var shiftErr interpreter.NegativeShiftError + require.ErrorAs(t, err, &shiftErr) + }) + + t.Run("UInt256 >> 260", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt256 = 0x7fff_ffff + let b: UInt256 = 260 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt256ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("UInt256 >> 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: UInt256 = 0x7fff_ffff_0000_0000 + let b: UInt256 = 32 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredUInt256ValueFromUint64(uint64(0x7fff_ffff)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word256 >> 260", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word256 = 0x7fff_ffff + let b: Word256 = 260 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord256ValueFromUint64(uint64(0)), + inter.Globals.Get("c").GetValue(inter), + ) + }) + + t.Run("Word256 >> 32", func(t *testing.T) { + + inter := parseCheckAndInterpret(t, + ` + let a: Word256 = 0x7fff_ffff_0000_0000 + let b: Word256 = 32 + let c = a >> b + `, + ) + + AssertValuesEqual( + t, + inter, + interpreter.NewUnmeteredWord256ValueFromUint64(uint64(0x7fff_ffff)), + inter.Globals.Get("c").GetValue(inter), + ) + }) +} diff --git a/interpreter/errors.go b/interpreter/errors.go index b009ec810..fba381b4b 100644 --- a/interpreter/errors.go +++ b/interpreter/errors.go @@ -300,7 +300,21 @@ func (e UnderflowError) Error() string { return "underflow" } -// UnderflowError +// NegativeShiftError + +type NegativeShiftError struct { + LocationRange +} + +var _ errors.UserError = NegativeShiftError{} + +func (NegativeShiftError) IsUserError() {} + +func (e NegativeShiftError) Error() string { + return "negative shift" +} + +// DivisionByZeroError type DivisionByZeroError struct { LocationRange diff --git a/interpreter/memory_metering_test.go b/interpreter/memory_metering_test.go index 725ea9303..62f9d6ef9 100644 --- a/interpreter/memory_metering_test.go +++ b/interpreter/memory_metering_test.go @@ -3289,9 +3289,9 @@ func TestInterpretUInt128Metering(t *testing.T) { _, err := inter.Invoke("main") require.NoError(t, err) - // creation: 8 + 8 + // creation: 8 + 8 + 16 // result: 16 - assert.Equal(t, uint64(32), meter.getMemory(common.MemoryKindBigInt)) + assert.Equal(t, uint64(48), meter.getMemory(common.MemoryKindBigInt)) }) t.Run("bitwise right-shift", func(t *testing.T) { @@ -3587,9 +3587,9 @@ func TestInterpretUInt256Metering(t *testing.T) { _, err := inter.Invoke("main") require.NoError(t, err) - // creation: 8 + 8 + // creation: 8 + 8 + 32 // result: 32 - assert.Equal(t, uint64(48), meter.getMemory(common.MemoryKindBigInt)) + assert.Equal(t, uint64(80), meter.getMemory(common.MemoryKindBigInt)) }) t.Run("bitwise right-shift", func(t *testing.T) { @@ -5308,9 +5308,9 @@ func TestInterpretInt128Metering(t *testing.T) { _, err := inter.Invoke("main") require.NoError(t, err) - // two literals: 8 + 8 + // two literals: 8 + 8 + 16 // result: 16 - assert.Equal(t, uint64(32), meter.getMemory(common.MemoryKindBigInt)) + assert.Equal(t, uint64(48), meter.getMemory(common.MemoryKindBigInt)) }) t.Run("bitwise right shift", func(t *testing.T) { @@ -5677,9 +5677,9 @@ func TestInterpretInt256Metering(t *testing.T) { _, err := inter.Invoke("main") require.NoError(t, err) - // two literals: 8 + 8 + // two literals: 8 + 8 + 32 // result: 32 - assert.Equal(t, uint64(48), meter.getMemory(common.MemoryKindBigInt)) + assert.Equal(t, uint64(80), meter.getMemory(common.MemoryKindBigInt)) }) t.Run("bitwise right shift", func(t *testing.T) { diff --git a/interpreter/misc_test.go b/interpreter/misc_test.go index aa952bcc4..7ef440fac 100644 --- a/interpreter/misc_test.go +++ b/interpreter/misc_test.go @@ -41,6 +41,7 @@ import ( "github.com/onflow/cadence/stdlib" . "github.com/onflow/cadence/test_utils/common_utils" . "github.com/onflow/cadence/test_utils/interpreter_utils" + . "github.com/onflow/cadence/test_utils/runtime_utils" . "github.com/onflow/cadence/test_utils/sema_utils" ) @@ -67,6 +68,23 @@ func parseCheckAndInterpretWithOptions( return parseCheckAndInterpretWithOptionsAndMemoryMetering(t, code, options, nil) } +func parseCheckAndInterpretWithAtreeValidationsDisabled( + t testing.TB, + code string, + options ParseCheckAndInterpretOptions, +) ( + inter *interpreter.Interpreter, + err error, +) { + return parseCheckAndInterpretWithOptionsAndMemoryMeteringAndAtreeValidations( + t, + code, + options, + nil, + false, + ) +} + func parseCheckAndInterpretWithLogs( tb testing.TB, code string, @@ -169,6 +187,30 @@ func parseCheckAndInterpretWithOptionsAndMemoryMetering( err error, ) { + // Atree validation should be disabled for memory metering tests. + // Otherwise, validation may also affect the memory consumption. + enableAtreeValidations := memoryGauge == nil + + return parseCheckAndInterpretWithOptionsAndMemoryMeteringAndAtreeValidations( + t, + code, + options, + memoryGauge, + enableAtreeValidations, + ) +} + +func parseCheckAndInterpretWithOptionsAndMemoryMeteringAndAtreeValidations( + t testing.TB, + code string, + options ParseCheckAndInterpretOptions, + memoryGauge common.MemoryGauge, + enableAtreeValidations bool, +) ( + inter *interpreter.Interpreter, + err error, +) { + checker, err := ParseAndCheckWithOptionsAndMemoryMetering(t, code, ParseAndCheckOptions{ @@ -197,10 +239,15 @@ func parseCheckAndInterpretWithOptionsAndMemoryMetering( if options.Config != nil { config = *options.Config } - if memoryGauge == nil { + + if enableAtreeValidations { config.AtreeValueValidationEnabled = true config.AtreeStorageValidationEnabled = true + } else { + config.AtreeValueValidationEnabled = false + config.AtreeStorageValidationEnabled = false } + if config.UUIDHandler == nil { config.UUIDHandler = func() (uint64, error) { uuid++ @@ -12559,3 +12606,675 @@ func TestInterpretStringTemplates(t *testing.T) { ) }) } + +func TestInterpretSomeValueChildContainerMutation(t *testing.T) { + + t.Parallel() + + test := func(t *testing.T, code string) { + + t.Parallel() + + ledger := NewTestLedger(nil, nil) + + newInter := func() *interpreter.Interpreter { + + inter, err := parseCheckAndInterpretWithOptions(t, + code, + ParseCheckAndInterpretOptions{ + Config: &interpreter.Config{ + Storage: runtime.NewStorage(ledger, nil, runtime.StorageConfig{}), + }, + }, + ) + require.NoError(t, err) + + return inter + } + + // Setup + + inter := newInter() + + foo, err := inter.Invoke("setup") + require.NoError(t, err) + + address := common.MustBytesToAddress([]byte{0x1}) + path := interpreter.NewUnmeteredPathValue(common.PathDomainStorage, "foo") + + storage := inter.Storage().(*runtime.Storage) + storageMap := storage.GetDomainStorageMap( + inter, + address, + common.StorageDomain(path.Domain), + true, + ) + + foo = foo.Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(address), + false, + nil, + nil, + true, + ) + + // Write the value to the storage map. + // However, the value is not referenced by the root of the storage yet + // (a storage map), so atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + withoutAtreeStorageValidationEnabled( + inter, + func() struct{} { + storageMap.WriteValue( + inter, + interpreter.StringStorageMapKey(path.Identifier), + foo, + ) + return struct{}{} + }, + ) + + err = storage.Commit(inter, false) + require.NoError(t, err) + + // Update + + inter = newInter() + + storage = inter.Storage().(*runtime.Storage) + storageMap = storage.GetDomainStorageMap( + inter, + address, + common.StorageDomain(path.Domain), + false, + ) + require.NotNil(t, storageMap) + + ref := interpreter.NewStorageReferenceValue( + nil, + interpreter.UnauthorizedAccess, + address, + path, + nil, + ) + + result, err := inter.Invoke("update", ref) + require.NoError(t, err) + assert.Equal(t, interpreter.TrueValue, result) + + err = storage.Commit(inter, false) + require.NoError(t, err) + + // Update again + + inter = newInter() + + storage = inter.Storage().(*runtime.Storage) + storageMap = storage.GetDomainStorageMap( + inter, + address, + common.StorageDomain(path.Domain), + false, + ) + require.NotNil(t, storageMap) + + ref = interpreter.NewStorageReferenceValue( + nil, + interpreter.UnauthorizedAccess, + address, + path, + nil, + ) + + result, err = inter.Invoke("updateAgain", ref) + require.NoError(t, err) + assert.Equal(t, interpreter.TrueValue, result) + } + + t.Run("dictionary, one level", func(t *testing.T) { + + test(t, ` + struct Foo { + let values: {String: Int}? + + init() { + self.values = {} + } + + fun set(key: String, value: Int) { + if let ref: auth(Mutate) &{String: Int} = &self.values { + ref[key] = value + } + } + + fun get(key: String): Int? { + if let ref: &{String: Int} = &self.values { + return ref[key] + } + return nil + } + } + + fun setup(): Foo { + let foo = Foo() + foo.set(key: "a", value: 1) + return foo + } + + fun update(foo: &Foo): Bool { + if foo.get(key: "a") != 1 { + return false + } + foo.set(key: "a", value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.get(key: "a") != 2 { + return false + } + foo.set(key: "a", value: 3) + return true + } + `) + }) + + t.Run("dictionary, two levels", func(t *testing.T) { + test(t, ` + struct Foo { + let values: {String: Int}?? + + init() { + self.values = {} + } + + fun set(key: String, value: Int) { + if let optRef: auth(Mutate) &{String: Int}? = &self.values { + if let ref: auth(Mutate) &{String: Int} = optRef { + ref[key] = value + } + } + } + + fun get(key: String): Int? { + if let optRef: &{String: Int}? = &self.values { + if let ref: &{String: Int} = optRef { + return ref[key] + } + } + return nil + } + } + + fun setup(): Foo { + let foo = Foo() + foo.set(key: "a", value: 1) + return foo + } + + fun update(foo: &Foo): Bool { + if foo.get(key: "a") != 1 { + return false + } + foo.set(key: "a", value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.get(key: "a") != 2 { + return false + } + foo.set(key: "a", value: 3) + return true + } + `) + }) + + t.Run("dictionary, nested", func(t *testing.T) { + + test(t, ` + struct Bar { + let values: {String: Int}? + + init() { + self.values = {} + } + + fun set(key: String, value: Int) { + if let ref: auth(Mutate) &{String: Int} = &self.values { + ref[key] = value + } + } + + fun get(key: String): Int? { + if let ref: &{String: Int} = &self.values { + return ref[key] + } + return nil + } + } + + struct Foo { + let values: {String: Bar}? + + init() { + self.values = {} + } + + fun set(key: String, value: Int) { + if let ref: auth(Mutate) &{String: Bar} = &self.values { + if ref[key] == nil { + ref[key] = Bar() + } + ref[key]?.set(key: key, value: value) + } + } + + fun get(key: String): Int? { + if let ref: &{String: Bar} = &self.values { + return ref[key]?.get(key: key) ?? nil + } + return nil + } + } + + fun setup(): Foo { + let foo = Foo() + foo.set(key: "a", value: 1) + return foo + } + + fun update(foo: &Foo): Bool { + if foo.get(key: "a") != 1 { + return false + } + foo.set(key: "a", value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.get(key: "a") != 2 { + return false + } + foo.set(key: "a", value: 3) + return true + } + `) + }) + + t.Run("resource, one level", func(t *testing.T) { + + test(t, ` + + resource Bar { + var value: Int + + init() { + self.value = 0 + } + } + + resource Foo { + let bar: @Bar? + + init() { + self.bar <- create Bar() + } + + fun set(value: Int) { + if let ref: &Bar = &self.bar { + ref.value = value + } + } + + fun getValue(): Int? { + return self.bar?.value + } + } + + fun setup(): @Foo { + let foo <- create Foo() + foo.set(value: 1) + return <-foo + } + + fun update(foo: &Foo): Bool { + if foo.getValue() != 1 { + return false + } + foo.set(value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.getValue() != 2 { + return false + } + foo.set(value: 3) + return true + } + `) + + }) + + t.Run("resource, two levels", func(t *testing.T) { + + test(t, ` + + resource Bar { + var value: Int + + init() { + self.value = 0 + } + } + + resource Foo { + let bar: @Bar?? + + init() { + self.bar <- create Bar() + } + + fun set(value: Int) { + if let optRef: &Bar? = &self.bar { + if let ref = optRef { + ref.value = value + } + } + } + + fun getValue(): Int? { + if let optRef: &Bar? = &self.bar { + return optRef?.value + } + return nil + } + } + + fun setup(): @Foo { + let foo <- create Foo() + foo.set(value: 1) + return <-foo + } + + fun update(foo: &Foo): Bool { + if foo.getValue() != 1 { + return false + } + foo.set(value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.getValue() != 2 { + return false + } + foo.set(value: 3) + return true + } + `) + }) + + t.Run("resource, nested", func(t *testing.T) { + + test(t, ` + resource Baz { + var value: Int + + init() { + self.value = 0 + } + } + + resource Bar { + let baz: @Baz? + + init() { + self.baz <- create Baz() + } + + fun set(value: Int) { + if let ref: &Baz = &self.baz { + ref.value = value + } + } + + fun getValue(): Int? { + return self.baz?.value + } + } + + resource Foo { + let bar: @Bar? + + init() { + self.bar <- create Bar() + } + + fun set(value: Int) { + if let ref: &Bar = &self.bar { + ref.set(value: value) + } + } + + fun getValue(): Int? { + return self.bar?.getValue() ?? nil + } + } + + fun setup(): @Foo { + let foo <- create Foo() + foo.set(value: 1) + return <-foo + } + + fun update(foo: &Foo): Bool { + if foo.getValue() != 1 { + return false + } + foo.set(value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.getValue() != 2 { + return false + } + foo.set(value: 3) + return true + } + `) + }) + + t.Run("array, one level", func(t *testing.T) { + + test(t, ` + + struct Foo { + let values: [Int]? + + init() { + self.values = [] + } + + fun set(value: Int) { + if let ref: auth(Mutate) &[Int] = &self.values { + if ref.length == 0 { + ref.append(value) + } else { + ref[0] = value + } + } + } + + fun getValue(): Int? { + if let ref: &[Int] = &self.values { + return ref[0] + } + return nil + } + } + + fun setup(): Foo { + let foo = Foo() + foo.set(value: 1) + return foo + } + + fun update(foo: &Foo): Bool { + if foo.getValue() != 1 { + return false + } + foo.set(value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.getValue() != 2 { + return false + } + foo.set(value: 3) + return true + } + `) + + }) + + t.Run("array, two levels", func(t *testing.T) { + + test(t, ` + + struct Foo { + let values: [Int]?? + + init() { + self.values = [] + } + + fun set(value: Int) { + if let optRef: auth(Mutate) &[Int]? = &self.values { + if let ref = optRef { + if ref.length == 0 { + ref.append(value) + } else { + ref[0] = value + } + } + } + } + + fun getValue(): Int? { + if let optRef: &[Int]? = &self.values { + if let ref = optRef { + return ref[0] + } + } + return nil + } + } + + fun setup(): Foo { + let foo = Foo() + foo.set(value: 1) + return foo + } + + fun update(foo: &Foo): Bool { + if foo.getValue() != 1 { + return false + } + foo.set(value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.getValue() != 2 { + return false + } + foo.set(value: 3) + return true + } + `) + }) + + t.Run("array, nested", func(t *testing.T) { + + test(t, ` + + struct Bar { + let values: [Int]? + + init() { + self.values = [] + } + + fun set(value: Int) { + if let ref: auth(Mutate) &[Int] = &self.values { + if ref.length == 0 { + ref.append(value) + } else { + ref[0] = value + } + } + } + + fun getValue(): Int? { + if let ref: &[Int] = &self.values { + return ref[0] + } + return nil + } + } + + struct Foo { + let values: [Bar]? + + init() { + self.values = [] + } + + fun set(value: Int) { + if let ref: auth(Mutate) &[Bar] = &self.values { + if ref.length == 0 { + ref.append(Bar()) + } + ref[0].set(value: value) + } + } + + fun getValue(): Int? { + if let ref: &[Bar] = &self.values { + return ref[0].getValue() + } + return nil + } + } + + fun setup(): Foo { + let foo = Foo() + foo.set(value: 1) + return foo + } + + fun update(foo: &Foo): Bool { + if foo.getValue() != 1 { + return false + } + foo.set(value: 2) + return true + } + + fun updateAgain(foo: &Foo): Bool { + if foo.getValue() != 2 { + return false + } + foo.set(value: 3) + return true + } + `) + + }) +} diff --git a/interpreter/resources_test.go b/interpreter/resources_test.go index 564c74525..8965aef84 100644 --- a/interpreter/resources_test.go +++ b/interpreter/resources_test.go @@ -24,6 +24,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/onflow/atree" + "github.com/onflow/cadence/interpreter" "github.com/onflow/cadence/sema" . "github.com/onflow/cadence/test_utils/common_utils" @@ -3542,8 +3544,12 @@ func TestInterpretInvalidNilCoalescingResourceDuplication(t *testing.T) { t.Parallel() - inter, err := parseCheckAndInterpretWithOptions(t, - ` + t.Run("remove", func(t *testing.T) { + + t.Parallel() + + inter, err := parseCheckAndInterpretWithAtreeValidationsDisabled(t, + ` access(all) resource R { access(all) let answer: Int init() { @@ -3564,18 +3570,59 @@ func TestInterpretInvalidNilCoalescingResourceDuplication(t *testing.T) { return answer1 + answer2 } `, - ParseCheckAndInterpretOptions{ - HandleCheckerError: func(err error) { - errs := RequireCheckerErrors(t, err, 1) - assert.IsType(t, &sema.InvalidNilCoalescingRightResourceOperandError{}, errs[0]) + ParseCheckAndInterpretOptions{ + HandleCheckerError: func(err error) { + errs := RequireCheckerErrors(t, err, 1) + assert.IsType(t, &sema.InvalidNilCoalescingRightResourceOperandError{}, errs[0]) + }, }, - }, - ) - require.NoError(t, err) + ) + require.NoError(t, err) - _, err = inter.Invoke("main") - require.Error(t, err) + _, err = inter.Invoke("main") + require.Error(t, err) + + var inliningError *atree.FatalError + require.ErrorAs(t, err, &inliningError) + require.Contains(t, inliningError.Error(), "failed to uninline") + }) + + t.Run("destroy", func(t *testing.T) { + + t.Parallel() + + inter, err := parseCheckAndInterpretWithAtreeValidationsDisabled(t, + ` + access(all) resource R { + access(all) let answer: Int + init() { + self.answer = 42 + } + } + + access(all) fun main(): Int { + let rs <- [<- create R(), nil] + rs[1] <-! (nil ?? rs[0]) + let answer1 = rs[0]?.answer! + let answer2 = rs[1]?.answer! + destroy rs + return answer1 + answer2 + } + `, + ParseCheckAndInterpretOptions{ + HandleCheckerError: func(err error) { + errs := RequireCheckerErrors(t, err, 1) + assert.IsType(t, &sema.InvalidNilCoalescingRightResourceOperandError{}, errs[0]) + }, + }, + ) + require.NoError(t, err) + + _, err = inter.Invoke("main") + require.Error(t, err) + + var destroyedResourceErr interpreter.DestroyedResourceError + require.ErrorAs(t, err, &destroyedResourceErr) + }) - var destroyedResourceErr interpreter.DestroyedResourceError - require.ErrorAs(t, err, &destroyedResourceErr) } diff --git a/interpreter/value.go b/interpreter/value.go index bf698515c..cff5dad1e 100644 --- a/interpreter/value.go +++ b/interpreter/value.go @@ -246,6 +246,13 @@ type ValueIterator interface { Next(interpreter *Interpreter, locationRange LocationRange) Value } +// atreeContainerBackedValue is an interface for values using atree containers +// (atree.Array or atree.OrderedMap) under the hood. +type atreeContainerBackedValue interface { + Value + isAtreeContainerBackedValue() +} + func safeAdd(a, b int, locationRange LocationRange) int { // INT32-C if (b > 0) && (a > (goMaxInt - b)) { diff --git a/interpreter/value_array.go b/interpreter/value_array.go index b032b9625..88a60cd32 100644 --- a/interpreter/value_array.go +++ b/interpreter/value_array.go @@ -213,14 +213,18 @@ func newArrayValueFromAtreeArray( var _ Value = &ArrayValue{} var _ atree.Value = &ArrayValue{} +var _ atree.WrapperValue = &ArrayValue{} var _ EquatableValue = &ArrayValue{} var _ ValueIndexableValue = &ArrayValue{} var _ MemberAccessibleValue = &ArrayValue{} var _ ReferenceTrackedResourceKindedValue = &ArrayValue{} var _ IterableValue = &ArrayValue{} +var _ atreeContainerBackedValue = &ArrayValue{} func (*ArrayValue) isValue() {} +func (*ArrayValue) isAtreeContainerBackedValue() {} + func (v *ArrayValue) Accept(interpreter *Interpreter, visitor Visitor, locationRange LocationRange) { descend := visitor.VisitArrayValue(interpreter, v) if !descend { @@ -1292,9 +1296,17 @@ func (v *ArrayValue) Storable( address atree.Address, maxInlineSize uint64, ) (atree.Storable, error) { + // NOTE: Need to change ArrayValue.UnwrapAtreeValue() + // if ArrayValue is stored with wrapping. return v.array.Storable(storage, address, maxInlineSize) } +func (v *ArrayValue) UnwrapAtreeValue() (atree.Value, uint64) { + // Wrapper size is 0 because ArrayValue is stored as + // atree.Array without any physical wrapping (see ArrayValue.Storable()). + return v.array, 0 +} + func (v *ArrayValue) IsReferenceTrackedResourceKindedValue() {} func (v *ArrayValue) Transfer( @@ -1994,3 +2006,7 @@ func (v *ArrayValue) SetType(staticType ArrayStaticType) { panic(errors.NewExternalError(err)) } } + +func (v *ArrayValue) Inlined() bool { + return v.array.Inlined() +} diff --git a/interpreter/value_composite.go b/interpreter/value_composite.go index cf460d466..c973d0a69 100644 --- a/interpreter/value_composite.go +++ b/interpreter/value_composite.go @@ -236,9 +236,14 @@ var _ HashableValue = &CompositeValue{} var _ MemberAccessibleValue = &CompositeValue{} var _ ReferenceTrackedResourceKindedValue = &CompositeValue{} var _ ContractValue = &CompositeValue{} +var _ atree.Value = &CompositeValue{} +var _ atree.WrapperValue = &CompositeValue{} +var _ atreeContainerBackedValue = &CompositeValue{} func (*CompositeValue) isValue() {} +func (*CompositeValue) isAtreeContainerBackedValue() {} + func (v *CompositeValue) Accept(interpreter *Interpreter, visitor Visitor, locationRange LocationRange) { descend := visitor.VisitCompositeValue(interpreter, v) if !descend { @@ -1138,9 +1143,18 @@ func (v *CompositeValue) Storable( return NonStorable{Value: v}, nil } + // NOTE: Need to change CompositeValue.UnwrapAtreeValue() + // if CompositeValue is stored with wrapping. + return v.dictionary.Storable(storage, address, maxInlineSize) } +func (v *CompositeValue) UnwrapAtreeValue() (atree.Value, uint64) { + // Wrapper size is 0 because CompositeValue is stored as + // atree.OrderedMap without any physical wrapping (see CompositeValue.Storable()). + return v.dictionary, 0 +} + func (v *CompositeValue) NeedsStoreTo(address atree.Address) bool { return address != v.StorageAddress() } @@ -1938,3 +1952,7 @@ func (v *CompositeValue) ForEach( } } } + +func (v *CompositeValue) Inlined() bool { + return v.dictionary.Inlined() +} diff --git a/interpreter/value_dictionary.go b/interpreter/value_dictionary.go index b7b804c46..fd18943c6 100644 --- a/interpreter/value_dictionary.go +++ b/interpreter/value_dictionary.go @@ -243,13 +243,17 @@ func newDictionaryValueFromAtreeMap( var _ Value = &DictionaryValue{} var _ atree.Value = &DictionaryValue{} +var _ atree.WrapperValue = &DictionaryValue{} var _ EquatableValue = &DictionaryValue{} var _ ValueIndexableValue = &DictionaryValue{} var _ MemberAccessibleValue = &DictionaryValue{} var _ ReferenceTrackedResourceKindedValue = &DictionaryValue{} +var _ atreeContainerBackedValue = &DictionaryValue{} func (*DictionaryValue) isValue() {} +func (*DictionaryValue) isAtreeContainerBackedValue() {} + func (v *DictionaryValue) Accept(interpreter *Interpreter, visitor Visitor, locationRange LocationRange) { descend := visitor.VisitDictionaryValue(interpreter, v) if !descend { @@ -1268,9 +1272,17 @@ func (v *DictionaryValue) Storable( address atree.Address, maxInlineSize uint64, ) (atree.Storable, error) { + // NOTE: Need to change DictionaryValue.UnwrapAtreeValue() + // if DictionaryValue is stored with wrapping. return v.dictionary.Storable(storage, address, maxInlineSize) } +func (v *DictionaryValue) UnwrapAtreeValue() (atree.Value, uint64) { + // Wrapper size is 0 because DictionaryValue is stored as + // atree.OrderedMap without any physical wrapping (see DictionaryValue.Storable()). + return v.dictionary, 0 +} + func (v *DictionaryValue) IsReferenceTrackedResourceKindedValue() {} func (v *DictionaryValue) Transfer( @@ -1586,3 +1598,7 @@ func (v *DictionaryValue) SetType(staticType *DictionaryStaticType) { panic(errors.NewExternalError(err)) } } + +func (v *DictionaryValue) Inlined() bool { + return v.dictionary.Inlined() +} diff --git a/interpreter/value_int.go b/interpreter/value_int.go index a9e6ef2c5..1c100e95c 100644 --- a/interpreter/value_int.go +++ b/interpreter/value_int.go @@ -526,7 +526,7 @@ func (v IntValue) BitwiseLeftShift(interpreter *Interpreter, other IntegerValue, } if o.BigInt.Sign() < 0 { - panic(UnderflowError{ + panic(NegativeShiftError{ LocationRange: locationRange, }) } @@ -559,7 +559,7 @@ func (v IntValue) BitwiseRightShift(interpreter *Interpreter, other IntegerValue } if o.BigInt.Sign() < 0 { - panic(UnderflowError{ + panic(NegativeShiftError{ LocationRange: locationRange, }) } diff --git a/interpreter/value_int128.go b/interpreter/value_int128.go index b95c3b70f..2df6c0e4d 100644 --- a/interpreter/value_int128.go +++ b/interpreter/value_int128.go @@ -20,6 +20,7 @@ package interpreter import ( "math/big" + "math/bits" "github.com/onflow/atree" @@ -30,6 +31,37 @@ import ( "github.com/onflow/cadence/sema" ) +// toTwosComplement sets `res` to the two's complement representation of a big.Int `x` in the given target bit size. +// `res` is returned and is awlways a positive big.Int. +func toTwosComplement(res, x *big.Int, targetBitSize uint) *big.Int { + bytes := SignedBigIntToSizedBigEndianBytes(x, targetBitSize/8) + return res.SetBytes(bytes) +} + +// toTwosComplement converts `res` to the big.Int representation from the two's complement format of a +// signed integer. +// `res` is returned and can be positive or negative. +func fromTwosComplement(res *big.Int) *big.Int { + bytes := res.Bytes() + return BigEndianBytesToSignedBigInt(bytes) +} + +// truncate trims a big.Int to maxWords by directly modifying its underlying representation. +func truncate(x *big.Int, maxWords int) *big.Int { + // Get the absolute value of x as a nat slice. + abs := x.Bits() + + // Limit the nat slice to maxWords. + if len(abs) > maxWords { + abs = abs[:maxWords] + } + + // Update the big.Int's internal representation. + x.SetBits(abs) + + return x +} + // Int128Value type Int128Value struct { @@ -652,20 +684,25 @@ func (v Int128Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerVal } if o.BigInt.Sign() < 0 { - panic(UnderflowError{ + panic(NegativeShiftError{ LocationRange: locationRange, }) } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) + if !o.BigInt.IsUint64() || o.BigInt.Uint64() >= 128 { + return NewInt128ValueFromUint64(interpreter, 0) } + // The maximum shift value at this point is 127, which may lead to an + // additional allocation of up to 128 bits. Add usage for possible + // intermediate value. + common.UseMemory(interpreter, Int128MemoryUsage) + valueGetter := func() *big.Int { res := new(big.Int) - res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) - return res + res = toTwosComplement(res, v.BigInt, 128) + res = res.Lsh(res, uint(o.BigInt.Uint64())) + res = truncate(res, 128/bits.UintSize) + return fromTwosComplement(res) } return NewInt128ValueFromBigInt(interpreter, valueGetter) @@ -683,14 +720,12 @@ func (v Int128Value) BitwiseRightShift(interpreter *Interpreter, other IntegerVa } if o.BigInt.Sign() < 0 { - panic(UnderflowError{ + panic(NegativeShiftError{ LocationRange: locationRange, }) } if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) + return NewInt128ValueFromUint64(interpreter, 0) } valueGetter := func() *big.Int { diff --git a/interpreter/value_int16.go b/interpreter/value_int16.go index 393093b15..33eb51f11 100644 --- a/interpreter/value_int16.go +++ b/interpreter/value_int16.go @@ -575,6 +575,12 @@ func (v Int16Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerValu }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int16 { return int16(v << o) } @@ -593,6 +599,12 @@ func (v Int16Value) BitwiseRightShift(interpreter *Interpreter, other IntegerVal }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int16 { return int16(v >> o) } diff --git a/interpreter/value_int256.go b/interpreter/value_int256.go index c766978af..09f27f3a6 100644 --- a/interpreter/value_int256.go +++ b/interpreter/value_int256.go @@ -20,6 +20,7 @@ package interpreter import ( "math/big" + "math/bits" "github.com/onflow/atree" @@ -649,21 +650,26 @@ func (v Int256Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerVal }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() || o.BigInt.Uint64() >= 256 { + return NewInt256ValueFromUint64(interpreter, 0) + } + + // The maximum shift value at this point is 255, which may lead to an + // additional allocation of up to 256 bits. Add usage for possible + // intermediate value. + common.UseMemory(interpreter, Int256MemoryUsage) + valueGetter := func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } - res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) - - return res + res = toTwosComplement(res, v.BigInt, 256) + res = res.Lsh(res, uint(o.BigInt.Uint64())) + res = truncate(res, 256/bits.UintSize) + return fromTwosComplement(res) } return NewInt256ValueFromBigInt(interpreter, valueGetter) @@ -680,18 +686,17 @@ func (v Int256Value) BitwiseRightShift(interpreter *Interpreter, other IntegerVa }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() { + return NewInt256ValueFromUint64(interpreter, 0) + } + valueGetter := func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } res.Rsh(v.BigInt, uint(o.BigInt.Uint64())) return res } diff --git a/interpreter/value_int32.go b/interpreter/value_int32.go index e5847fc47..6bc739ffe 100644 --- a/interpreter/value_int32.go +++ b/interpreter/value_int32.go @@ -575,6 +575,12 @@ func (v Int32Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerValu }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int32 { return int32(v << o) } @@ -593,6 +599,12 @@ func (v Int32Value) BitwiseRightShift(interpreter *Interpreter, other IntegerVal }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int32 { return int32(v >> o) } diff --git a/interpreter/value_int64.go b/interpreter/value_int64.go index 5f331e95a..e00d378bc 100644 --- a/interpreter/value_int64.go +++ b/interpreter/value_int64.go @@ -566,6 +566,12 @@ func (v Int64Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerValu }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int64 { return int64(v << o) } @@ -584,6 +590,12 @@ func (v Int64Value) BitwiseRightShift(interpreter *Interpreter, other IntegerVal }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int64 { return int64(v >> o) } diff --git a/interpreter/value_int8.go b/interpreter/value_int8.go index 12cc54768..325cfb2a9 100644 --- a/interpreter/value_int8.go +++ b/interpreter/value_int8.go @@ -574,6 +574,12 @@ func (v Int8Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerValue }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int8 { return int8(v << o) } @@ -592,6 +598,12 @@ func (v Int8Value) BitwiseRightShift(interpreter *Interpreter, other IntegerValu }) } + if o < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + valueGetter := func() int8 { return int8(v >> o) } diff --git a/interpreter/value_some.go b/interpreter/value_some.go index f33f524b8..ddc0343d1 100644 --- a/interpreter/value_some.go +++ b/interpreter/value_some.go @@ -51,6 +51,31 @@ var _ Value = &SomeValue{} var _ EquatableValue = &SomeValue{} var _ MemberAccessibleValue = &SomeValue{} var _ OptionalValue = &SomeValue{} +var _ atree.Value = &SomeValue{} +var _ atree.WrapperValue = &SomeValue{} + +// UnwrapAtreeValue returns non-SomeValue and wrapper size. +func (v *SomeValue) UnwrapAtreeValue() (atree.Value, uint64) { + // NOTE: + // - non-SomeValue is the same as non-SomeValue in SomeValue.Storable() + // - non-SomeValue wrapper size is the same as encoded wrapper size in SomeStorable.ByteSize(). + + // Unwrap SomeValue(s) + nonSomeValue, nestedLevels := v.nonSomeValue() + + // Get SomeValue(s) wrapper size + someStorableEncodedPrefixSize := getSomeStorableEncodedPrefixSize(nestedLevels) + + // Unwrap nonSomeValue if needed + switch nonSomeValue := nonSomeValue.(type) { + case atree.WrapperValue: + unwrappedValue, wrapperSize := nonSomeValue.UnwrapAtreeValue() + return unwrappedValue, wrapperSize + uint64(someStorableEncodedPrefixSize) + + default: + return nonSomeValue, uint64(someStorableEncodedPrefixSize) + } +} func (*SomeValue) isValue() {} @@ -225,9 +250,18 @@ func (v *SomeValue) Storable( // The above applies to both immutable non-SomeValue (such as StringValue), // and mutable non-SomeValue (such as ArrayValue). - if v.valueStorable == nil { + // NOTE: + // - If SomeValue's inner value is a value with atree.Array or atree.OrderedMap, + // we MUST NOT cache SomeStorable because we need to call nonSomeValue.Storable() + // to trigger container inlining or un-inlining. + // - Otherwise, we need to cache SomeStorable because nonSomeValue.Storable() can + // create registers in storage, such as large string. + + nonSomeValue, nestedLevels := v.nonSomeValue() + + _, isContainerValue := nonSomeValue.(atreeContainerBackedValue) - nonSomeValue, nestedLevels := v.nonSomeValue() + if v.valueStorable == nil || isContainerValue { someStorableEncodedPrefixSize := getSomeStorableEncodedPrefixSize(nestedLevels) @@ -379,6 +413,31 @@ type SomeStorable struct { } var _ atree.ContainerStorable = SomeStorable{} +var _ atree.WrapperStorable = SomeStorable{} + +func (s SomeStorable) UnwrapAtreeStorable() atree.Storable { + storable := s.Storable + + switch storable := storable.(type) { + case atree.WrapperStorable: + return storable.UnwrapAtreeStorable() + + default: + return storable + } +} + +// WrapAtreeStorable() wraps storable as innermost wrapped value and +// returns new wrapped storable. +func (s SomeStorable) WrapAtreeStorable(storable atree.Storable) atree.Storable { + _, nestedLevels := s.nonSomeStorable() + + newStorable := SomeStorable{Storable: storable} + for i := 1; i < int(nestedLevels); i++ { + newStorable = SomeStorable{Storable: newStorable} + } + return newStorable +} func (s SomeStorable) HasPointer() bool { switch cs := s.Storable.(type) { diff --git a/interpreter/value_some_test.go b/interpreter/value_some_test.go new file mode 100644 index 000000000..7adb51b0a --- /dev/null +++ b/interpreter/value_some_test.go @@ -0,0 +1,805 @@ +/* + * Cadence - The resource-oriented smart contract programming language + * + * Copyright Flow Foundation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package interpreter_test + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/onflow/atree" + + "github.com/onflow/cadence/ast" + "github.com/onflow/cadence/common" + "github.com/onflow/cadence/interpreter" + "github.com/onflow/cadence/sema" + . "github.com/onflow/cadence/test_utils/common_utils" +) + +func TestSomeValueUnwrapAtreeValue(t *testing.T) { + + const ( + cborTagSize = 2 + someStorableWithMultipleNestedLevelsArraySize = 1 + ) + + t.Parallel() + + t.Run("SomeValue(bool)", func(t *testing.T) { + bv := interpreter.BoolValue(true) + + v := interpreter.NewUnmeteredSomeValueNonCopying(bv) + + unwrappedValue, wrapperSize := v.UnwrapAtreeValue() + require.Equal(t, bv, unwrappedValue) + require.Equal(t, uint64(cborTagSize), wrapperSize) + }) + + t.Run("SomeValue(SomeValue(bool))", func(t *testing.T) { + bv := interpreter.BoolValue(true) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + bv)) + + unwrappedValue, wrapperSize := v.UnwrapAtreeValue() + require.Equal(t, bv, unwrappedValue) + require.Equal(t, uint64(cborTagSize+someStorableWithMultipleNestedLevelsArraySize+1), wrapperSize) + }) + + t.Run("SomeValue(SomeValue(ArrayValue(...)))", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + values := []interpreter.Value{ + interpreter.NewUnmeteredUInt64Value(0), + interpreter.NewUnmeteredUInt64Value(1), + } + + array := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + address, + values..., + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + array)) + + unwrappedValue, wrapperSize := v.UnwrapAtreeValue() + require.IsType(t, &atree.Array{}, unwrappedValue) + require.Equal(t, uint64(cborTagSize+someStorableWithMultipleNestedLevelsArraySize+1), wrapperSize) + + atreeArray := unwrappedValue.(*atree.Array) + require.Equal(t, atree.Address(address), atreeArray.Address()) + require.Equal(t, uint64(len(values)), atreeArray.Count()) + + for i, expectedValue := range values { + v, err := atreeArray.Get(uint64(i)) + require.NoError(t, err) + require.Equal(t, expectedValue, v) + } + }) + + t.Run("SomeValue(SomeValue(DictionaryValue(...)))", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + values := []interpreter.Value{ + interpreter.NewUnmeteredUInt64Value(0), + interpreter.NewUnmeteredStringValue("a"), + interpreter.NewUnmeteredUInt64Value(1), + interpreter.NewUnmeteredStringValue("b"), + } + + dict := interpreter.NewDictionaryValueWithAddress( + inter, + interpreter.EmptyLocationRange, + &interpreter.DictionaryStaticType{ + KeyType: interpreter.PrimitiveStaticTypeAnyStruct, + ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + }, + address, + values..., + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + dict)) + + unwrappedValue, wrapperSize := v.UnwrapAtreeValue() + require.IsType(t, &atree.OrderedMap{}, unwrappedValue) + require.Equal(t, uint64(cborTagSize+someStorableWithMultipleNestedLevelsArraySize+1), wrapperSize) + + // Verify unwrapped value + atreeMap := unwrappedValue.(*atree.OrderedMap) + require.Equal(t, atree.Address(address), atreeMap.Address()) + require.Equal(t, uint64(len(values)/2), atreeMap.Count()) + + valueComparator := func( + storage atree.SlabStorage, + atreeValue atree.Value, + otherStorable atree.Storable, + ) (bool, error) { + value := interpreter.MustConvertStoredValue(inter, atreeValue) + otherValue := interpreter.StoredValue(inter, otherStorable, storage) + return value.(interpreter.EquatableValue).Equal(inter, interpreter.EmptyLocationRange, otherValue), nil + } + + hashInputProvider := func( + value atree.Value, + scratch []byte, + ) ([]byte, error) { + hashInput := interpreter.MustConvertStoredValue(inter, value).(interpreter.HashableValue). + HashInput(inter, interpreter.EmptyLocationRange, scratch) + return hashInput, nil + } + + for i := 0; i < len(values); i += 2 { + key := values[i] + expectedValue := values[i+1] + + v, err := atreeMap.Get( + valueComparator, + hashInputProvider, + key, + ) + require.NoError(t, err) + require.Equal(t, expectedValue, v) + } + }) + + t.Run("SomeValue(SomeValue(CompositeValue(...)))", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + identifier := "test" + + location := common.AddressLocation{ + Address: address, + Name: identifier, + } + + kind := common.CompositeKindStructure + + fields := []interpreter.CompositeField{ + interpreter.NewUnmeteredCompositeField( + "field1", + interpreter.NewUnmeteredStringValue("a"), + ), + interpreter.NewUnmeteredCompositeField( + "field2", + interpreter.NewUnmeteredStringValue("b"), + ), + } + + composite := interpreter.NewCompositeValue( + inter, + interpreter.EmptyLocationRange, + location, + identifier, + kind, + fields, + address, + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + composite)) + + unwrappedValue, wrapperSize := v.UnwrapAtreeValue() + require.IsType(t, &atree.OrderedMap{}, unwrappedValue) + require.Equal(t, uint64(cborTagSize+someStorableWithMultipleNestedLevelsArraySize+1), wrapperSize) + + // Verify unwrapped value + atreeMap := unwrappedValue.(*atree.OrderedMap) + require.Equal(t, atree.Address(address), atreeMap.Address()) + require.Equal(t, uint64(len(fields)), atreeMap.Count()) + + for _, f := range fields { + v, err := atreeMap.Get( + interpreter.StringAtreeValueComparator, + interpreter.StringAtreeValueHashInput, + interpreter.StringAtreeValue(f.Name), + ) + require.NoError(t, err) + require.Equal(t, f.Value, v) + } + }) +} + +func TestSomeStorableUnwrapAtreeStorable(t *testing.T) { + + t.Parallel() + + address := common.Address{'A'} + + t.Run("SomeValue(bool)", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.BoolValue(true)) + + const maxInlineSize = 1024 / 4 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.Equal(t, interpreter.BoolValue(true), unwrappedStorable) + }) + + t.Run("SomeValue(SomeValue(bool))", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.BoolValue(true))) + + const maxInlineSize = 1024 / 4 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.Equal(t, interpreter.BoolValue(true), unwrappedStorable) + }) + + t.Run("SomeValue(SomeValue(ArrayValue(...))), small ArrayValue", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + values := []interpreter.Value{ + interpreter.NewUnmeteredUInt64Value(0), + interpreter.NewUnmeteredUInt64Value(1), + } + + array := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + address, + values..., + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + array)) + + const maxInlineSize = 1024 / 4 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.IsType(t, &atree.ArrayDataSlab{}, unwrappedStorable) + + unwrappedValue, err := unwrappedStorable.(*atree.ArrayDataSlab).StoredValue(storage) + require.NoError(t, err) + require.IsType(t, &atree.Array{}, unwrappedValue) + + atreeArray := unwrappedValue.(*atree.Array) + require.Equal(t, atree.Address(address), atreeArray.Address()) + require.Equal(t, uint64(len(values)), atreeArray.Count()) + + for i, expectedValue := range values { + v, err := atreeArray.Get(uint64(i)) + require.NoError(t, err) + require.Equal(t, expectedValue, v) + } + }) + + t.Run("SomeValue(SomeValue(ArrayValue(...))), large ArrayValue", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + const valuesCount = 40 + values := make([]interpreter.Value, valuesCount) + for i := range valuesCount { + values[i] = interpreter.NewUnmeteredUInt64Value(uint64(i)) + } + + array := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + address, + values..., + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + array)) + + const maxInlineSize = 1024 / 8 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.IsType(t, atree.SlabIDStorable{}, unwrappedStorable) + + unwrappedValue, err := unwrappedStorable.(atree.SlabIDStorable).StoredValue(storage) + require.NoError(t, err) + require.IsType(t, &atree.Array{}, unwrappedValue) + + atreeArray := unwrappedValue.(*atree.Array) + require.Equal(t, atree.Address(address), atreeArray.Address()) + require.Equal(t, uint64(len(values)), atreeArray.Count()) + + for i, expectedValue := range values { + v, err := atreeArray.Get(uint64(i)) + require.NoError(t, err) + require.Equal(t, expectedValue, v) + } + }) + + t.Run("SomeValue(SomeValue(DictionaryValue(...))), small DictionaryValue", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + values := []interpreter.Value{ + interpreter.NewUnmeteredUInt64Value(0), + interpreter.NewUnmeteredStringValue("a"), + interpreter.NewUnmeteredUInt64Value(1), + interpreter.NewUnmeteredStringValue("b"), + } + + dict := interpreter.NewDictionaryValueWithAddress( + inter, + interpreter.EmptyLocationRange, + &interpreter.DictionaryStaticType{ + KeyType: interpreter.PrimitiveStaticTypeAnyStruct, + ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + }, + address, + values..., + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + dict)) + + const maxInlineSize = 1024 / 4 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.IsType(t, &atree.MapDataSlab{}, unwrappedStorable) + + unwrappedValue, err := unwrappedStorable.(*atree.MapDataSlab).StoredValue(storage) + require.NoError(t, err) + require.IsType(t, &atree.OrderedMap{}, unwrappedValue) + + // Verify unwrapped value + atreeMap := unwrappedValue.(*atree.OrderedMap) + require.Equal(t, atree.Address(address), atreeMap.Address()) + require.Equal(t, uint64(len(values)/2), atreeMap.Count()) + + valueComparator := func( + storage atree.SlabStorage, + atreeValue atree.Value, + otherStorable atree.Storable, + ) (bool, error) { + value := interpreter.MustConvertStoredValue(inter, atreeValue) + otherValue := interpreter.StoredValue(inter, otherStorable, storage) + return value.(interpreter.EquatableValue).Equal(inter, interpreter.EmptyLocationRange, otherValue), nil + } + + hashInputProvider := func( + value atree.Value, + scratch []byte, + ) ([]byte, error) { + hashInput := interpreter.MustConvertStoredValue(inter, value).(interpreter.HashableValue). + HashInput(inter, interpreter.EmptyLocationRange, scratch) + return hashInput, nil + } + + for i := 0; i < len(values); i += 2 { + key := values[i] + expectedValue := values[i+1] + + v, err := atreeMap.Get( + valueComparator, + hashInputProvider, + key, + ) + require.NoError(t, err) + require.Equal(t, expectedValue, v) + } + }) + + t.Run("SomeValue(SomeValue(DictionaryValue(...))), large DictionaryValue", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + const valuesCount = 20 + values := make([]interpreter.Value, valuesCount*2) + + char := 'a' + for i := 0; i < len(values); i += 2 { + values[i] = interpreter.NewUnmeteredUInt64Value(uint64(i)) + values[i+1] = interpreter.NewUnmeteredStringValue(string(char)) + char += 1 + } + + dict := interpreter.NewDictionaryValueWithAddress( + inter, + interpreter.EmptyLocationRange, + &interpreter.DictionaryStaticType{ + KeyType: interpreter.PrimitiveStaticTypeAnyStruct, + ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + }, + address, + values..., + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + dict)) + + const maxInlineSize = 1024 / 8 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.IsType(t, atree.SlabIDStorable{}, unwrappedStorable) + + unwrappedValue, err := unwrappedStorable.(atree.SlabIDStorable).StoredValue(storage) + require.NoError(t, err) + require.IsType(t, &atree.OrderedMap{}, unwrappedValue) + + // Verify unwrapped value + atreeMap := unwrappedValue.(*atree.OrderedMap) + require.Equal(t, atree.Address(address), atreeMap.Address()) + require.Equal(t, uint64(len(values)/2), atreeMap.Count()) + + valueComparator := func( + storage atree.SlabStorage, + atreeValue atree.Value, + otherStorable atree.Storable, + ) (bool, error) { + value := interpreter.MustConvertStoredValue(inter, atreeValue) + otherValue := interpreter.StoredValue(inter, otherStorable, storage) + return value.(interpreter.EquatableValue).Equal(inter, interpreter.EmptyLocationRange, otherValue), nil + } + + hashInputProvider := func( + value atree.Value, + scratch []byte, + ) ([]byte, error) { + hashInput := interpreter.MustConvertStoredValue(inter, value).(interpreter.HashableValue). + HashInput(inter, interpreter.EmptyLocationRange, scratch) + return hashInput, nil + } + + for i := 0; i < len(values); i += 2 { + key := values[i] + expectedValue := values[i+1] + + v, err := atreeMap.Get( + valueComparator, + hashInputProvider, + key, + ) + require.NoError(t, err) + require.Equal(t, expectedValue, v) + } + }) + + t.Run("SomeValue(SomeValue(CompositeValue(...))), small CompositeValue", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + identifier := "test" + + location := common.AddressLocation{ + Address: address, + Name: identifier, + } + + kind := common.CompositeKindStructure + + fields := []interpreter.CompositeField{ + interpreter.NewUnmeteredCompositeField( + "field1", + interpreter.NewUnmeteredStringValue("a"), + ), + interpreter.NewUnmeteredCompositeField( + "field2", + interpreter.NewUnmeteredStringValue("b"), + ), + } + + composite := interpreter.NewCompositeValue( + inter, + interpreter.EmptyLocationRange, + location, + identifier, + kind, + fields, + address, + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + composite)) + + const maxInlineSize = 1024 / 4 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.IsType(t, &atree.MapDataSlab{}, unwrappedStorable) + + unwrappedValue, err := unwrappedStorable.(*atree.MapDataSlab).StoredValue(storage) + require.NoError(t, err) + require.IsType(t, &atree.OrderedMap{}, unwrappedValue) + + // Verify unwrapped value + atreeMap := unwrappedValue.(*atree.OrderedMap) + require.Equal(t, atree.Address(address), atreeMap.Address()) + require.Equal(t, uint64(len(fields)), atreeMap.Count()) + + for _, f := range fields { + v, err := atreeMap.Get( + interpreter.StringAtreeValueComparator, + interpreter.StringAtreeValueHashInput, + interpreter.StringAtreeValue(f.Name), + ) + require.NoError(t, err) + require.Equal(t, f.Value, v) + } + }) + + t.Run("SomeValue(SomeValue(CompositeValue(...))), large CompositeValue", func(t *testing.T) { + storage := newUnmeteredInMemoryStorage() + + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + }, + ) + require.NoError(t, err) + + address := common.Address{'A'} + + identifier := "test" + + location := common.AddressLocation{ + Address: address, + Name: identifier, + } + + kind := common.CompositeKindStructure + + const fieldsCount = 20 + fields := make([]interpreter.CompositeField, fieldsCount) + char := 'a' + for i := range len(fields) { + fields[i] = interpreter.NewUnmeteredCompositeField( + fmt.Sprintf("field%d", i), + interpreter.NewUnmeteredStringValue(string(char)), + ) + char += 1 + } + + composite := interpreter.NewCompositeValue( + inter, + interpreter.EmptyLocationRange, + location, + identifier, + kind, + fields, + address, + ) + + v := interpreter.NewUnmeteredSomeValueNonCopying( + interpreter.NewUnmeteredSomeValueNonCopying( + composite)) + + const maxInlineSize = 1024 / 8 + storable, err := v.Storable(storage, atree.Address(address), maxInlineSize) + require.NoError(t, err) + require.IsType(t, interpreter.SomeStorable{}, storable) + + unwrappedStorable := storable.(interpreter.SomeStorable).UnwrapAtreeStorable() + require.IsType(t, atree.SlabIDStorable{}, unwrappedStorable) + + unwrappedValue, err := unwrappedStorable.(atree.SlabIDStorable).StoredValue(storage) + require.NoError(t, err) + require.IsType(t, &atree.OrderedMap{}, unwrappedValue) + + // Verify unwrapped value + atreeMap := unwrappedValue.(*atree.OrderedMap) + require.Equal(t, atree.Address(address), atreeMap.Address()) + require.Equal(t, uint64(len(fields)), atreeMap.Count()) + + for _, f := range fields { + v, err := atreeMap.Get( + interpreter.StringAtreeValueComparator, + interpreter.StringAtreeValueHashInput, + interpreter.StringAtreeValue(f.Name), + ) + require.NoError(t, err) + require.Equal(t, f.Value, v) + } + }) +} diff --git a/interpreter/value_uint.go b/interpreter/value_uint.go index aec0c661e..04a404a65 100644 --- a/interpreter/value_uint.go +++ b/interpreter/value_uint.go @@ -534,7 +534,7 @@ func (v UIntValue) BitwiseLeftShift(interpreter *Interpreter, other IntegerValue } if o.BigInt.Sign() < 0 { - panic(UnderflowError{ + panic(NegativeShiftError{ LocationRange: locationRange, }) } @@ -568,7 +568,7 @@ func (v UIntValue) BitwiseRightShift(interpreter *Interpreter, other IntegerValu } if o.BigInt.Sign() < 0 { - panic(UnderflowError{ + panic(NegativeShiftError{ LocationRange: locationRange, }) } diff --git a/interpreter/value_uint128.go b/interpreter/value_uint128.go index e0d7cb0e9..dfde3cfa4 100644 --- a/interpreter/value_uint128.go +++ b/interpreter/value_uint128.go @@ -20,6 +20,7 @@ package interpreter import ( "math/big" + "math/bits" "github.com/onflow/atree" @@ -580,21 +581,26 @@ func (v UInt128Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerVa }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() || o.BigInt.Uint64() >= 128 { + return NewUInt128ValueFromUint64(interpreter, 0) + } + + // The maximum shift value at this point is 127, which may lead to an + // additional allocation of up to 128 bits. Add usage for possible + // intermediate value. + common.UseMemory(interpreter, Uint128MemoryUsage) + return NewUInt128ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } - return res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + res = res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + return truncate(res, 128/bits.UintSize) }, ) } @@ -610,20 +616,19 @@ func (v UInt128Value) BitwiseRightShift(interpreter *Interpreter, other IntegerV }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() { + return NewUInt128ValueFromUint64(interpreter, 0) + } + return NewUInt128ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } return res.Rsh(v.BigInt, uint(o.BigInt.Uint64())) }, ) diff --git a/interpreter/value_uint256.go b/interpreter/value_uint256.go index 975454387..2050df91a 100644 --- a/interpreter/value_uint256.go +++ b/interpreter/value_uint256.go @@ -20,6 +20,7 @@ package interpreter import ( "math/big" + "math/bits" "github.com/onflow/atree" @@ -580,21 +581,26 @@ func (v UInt256Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerVa }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() || o.BigInt.Uint64() >= 256 { + return NewUInt256ValueFromUint64(interpreter, 0) + } + + // The maximum shift value at this point is 255, which may lead to an + // additional allocation of up to 256 bits. Add usage for possible + // intermediate value. + common.UseMemory(interpreter, Uint256MemoryUsage) + return NewUInt256ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } - return res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + res = res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + return truncate(res, 256/bits.UintSize) }, ) } @@ -610,20 +616,19 @@ func (v UInt256Value) BitwiseRightShift(interpreter *Interpreter, other IntegerV }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() { + return NewUInt256ValueFromUint64(interpreter, 0) + } + return NewUInt256ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } return res.Rsh(v.BigInt, uint(o.BigInt.Uint64())) }, ) diff --git a/interpreter/value_word128.go b/interpreter/value_word128.go index d84505594..a9aacc4bf 100644 --- a/interpreter/value_word128.go +++ b/interpreter/value_word128.go @@ -20,6 +20,7 @@ package interpreter import ( "math/big" + "math/bits" "github.com/onflow/atree" @@ -486,21 +487,26 @@ func (v Word128Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerVa }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() || o.BigInt.Uint64() >= 128 { + return NewWord128ValueFromUint64(interpreter, 0) + } + + // The maximum shift value at this point is 127, which may lead to an + // additional allocation of up to 128 bits. Add usage for possible + // intermediate value. + common.UseMemory(interpreter, Uint128MemoryUsage) + return NewWord128ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } - return res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + res = res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + return truncate(res, 128/bits.UintSize) }, ) } @@ -515,20 +521,19 @@ func (v Word128Value) BitwiseRightShift(interpreter *Interpreter, other IntegerV }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() { + return NewWord128ValueFromUint64(interpreter, 0) + } + return NewWord128ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } return res.Rsh(v.BigInt, uint(o.BigInt.Uint64())) }, ) diff --git a/interpreter/value_word256.go b/interpreter/value_word256.go index 49924b624..9446d1c81 100644 --- a/interpreter/value_word256.go +++ b/interpreter/value_word256.go @@ -20,6 +20,7 @@ package interpreter import ( "math/big" + "math/bits" "github.com/onflow/atree" @@ -486,21 +487,26 @@ func (v Word256Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerVa }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() || o.BigInt.Uint64() >= 256 { + return NewWord256ValueFromUint64(interpreter, 0) + } + + // The maximum shift value at this point is 255, which may lead to an + // additional allocation of up to 256 bits. Add usage for possible + // intermediate value. + common.UseMemory(interpreter, Uint256MemoryUsage) + return NewWord256ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } - return res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + res = res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + return truncate(res, 256/bits.UintSize) }, ) } @@ -516,20 +522,19 @@ func (v Word256Value) BitwiseRightShift(interpreter *Interpreter, other IntegerV }) } + if o.BigInt.Sign() < 0 { + panic(NegativeShiftError{ + LocationRange: locationRange, + }) + } + if !o.BigInt.IsUint64() { + return NewWord256ValueFromUint64(interpreter, 0) + } + return NewWord256ValueFromBigInt( interpreter, func() *big.Int { res := new(big.Int) - if o.BigInt.Sign() < 0 { - panic(UnderflowError{ - LocationRange: locationRange, - }) - } - if !o.BigInt.IsUint64() { - panic(OverflowError{ - LocationRange: locationRange, - }) - } return res.Rsh(v.BigInt, uint(o.BigInt.Uint64())) }, ) diff --git a/interpreter/values_test.go b/interpreter/values_test.go index b874aa421..fc8260f87 100644 --- a/interpreter/values_test.go +++ b/interpreter/values_test.go @@ -23,6 +23,7 @@ import ( "fmt" "math" "math/rand" + "strconv" "strings" "testing" "time" @@ -32,1685 +33,4886 @@ import ( "github.com/onflow/atree" + "github.com/onflow/cadence" "github.com/onflow/cadence/ast" "github.com/onflow/cadence/common" + "github.com/onflow/cadence/errors" "github.com/onflow/cadence/interpreter" + "github.com/onflow/cadence/runtime" "github.com/onflow/cadence/sema" . "github.com/onflow/cadence/test_utils/common_utils" . "github.com/onflow/cadence/test_utils/interpreter_utils" + . "github.com/onflow/cadence/test_utils/runtime_utils" ) -// TODO: make these program args? -const containerMaxDepth = 3 -const containerMaxSize = 100 -const compositeMaxFields = 10 +var defaultRandomValueLimits = randomValueLimits{ + containerMaxDepth: 4, + containerMaxSize: 40, + compositeMaxFields: 10, +} var runSmokeTests = flag.Bool("runSmokeTests", false, "Run smoke tests on values") var validateAtree = flag.Bool("validateAtree", true, "Enable atree validation") var smokeTestSeed = flag.Int64("smokeTestSeed", -1, "Seed for prng (-1 specifies current Unix time)") -func TestInterpretRandomMapOperations(t *testing.T) { - if !*runSmokeTests { - t.Skip("smoke tests are disabled") - } - - t.Parallel() +func newRandomValueTestInterpreter(t *testing.T) (inter *interpreter.Interpreter, resetStorage func()) { - r := newRandomValueGenerator() - t.Logf("seed: %d", r.seed) + config := &interpreter.Config{ + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + AtreeStorageValidationEnabled: *validateAtree, + AtreeValueValidationEnabled: *validateAtree, + } - storage := newUnmeteredInMemoryStorage() inter, err := interpreter.NewInterpreter( &interpreter.Program{ - Program: ast.NewProgram(nil, []ast.Declaration{}), Elaboration: sema.NewElaboration(nil), }, TestLocation, - &interpreter.Config{ - Storage: storage, - ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { - return interpreter.VirtualImport{ - Elaboration: inter.Program.Elaboration, - } - }, - AtreeStorageValidationEnabled: *validateAtree, - AtreeValueValidationEnabled: *validateAtree, - }, + config, ) require.NoError(t, err) - numberOfValues := r.randomInt(containerMaxSize) + ledger := NewTestLedger(nil, nil) - var testMap, copyOfTestMap *interpreter.DictionaryValue - var storageSize, slabCounts int + resetStorage = func() { + if config.Storage != nil { + storage := config.Storage.(*runtime.Storage) + err := storage.Commit(inter, false) + require.NoError(t, err) + } + config.Storage = runtime.NewStorage(ledger, nil, runtime.StorageConfig{}) + } - entries := newValueMap(numberOfValues) - orgOwner := common.Address{'A'} + resetStorage() - t.Run("construction", func(t *testing.T) { - keyValues := make([]interpreter.Value, numberOfValues*2) - for i := 0; i < numberOfValues; i++ { - key := r.randomHashableValue(inter) - value := r.randomStorableValue(inter, 0) + return inter, resetStorage +} - entries.put(inter, key, value) +func importValue(t *testing.T, inter *interpreter.Interpreter, value cadence.Value) interpreter.Value { - keyValues[i*2] = key - keyValues[i*2+1] = value - } + switch value := value.(type) { + case cadence.Array: + // Work around for "cannot import array: elements do not belong to the same type", + // caused by import of array without expected type, which leads to inference of the element type: + // Create an empty array with an expected type, then append imported elements to it. - testMap = interpreter.NewDictionaryValueWithAddress( + arrayResult, err := runtime.ImportValue( inter, interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, - orgOwner, - keyValues..., + nil, + nil, + cadence.Array{}, + sema.NewVariableSizedType(nil, sema.AnyStructType), ) + require.NoError(t, err) + require.IsType(t, &interpreter.ArrayValue{}, arrayResult) + array := arrayResult.(*interpreter.ArrayValue) - storageSize, slabCounts = getSlabStorageSize(t, storage) + for _, element := range value.Values { + array.Append( + inter, + interpreter.EmptyLocationRange, + importValue(t, inter, element), + ) + } - require.Equal(t, testMap.Count(), entries.size()) + return array - entries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - exists := testMap.ContainsKey(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, bool(exists)) + case cadence.Dictionary: + // Work around for "cannot import dictionary: keys does not belong to the same type", + // caused by import of dictionary without expected type, which leads to inference of the key type: + // Create an empty dictionary with an expected type, then append imported key-value pairs to it. - value, found := testMap.Get(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, found) - AssertValuesEqual(t, inter, orgValue, value) + dictionaryResult, err := runtime.ImportValue( + inter, + interpreter.EmptyLocationRange, + nil, + nil, + cadence.Dictionary{}, + sema.NewDictionaryType( + nil, + sema.HashableStructType, + sema.AnyStructType, + ), + ) + require.NoError(t, err) + require.IsType(t, &interpreter.DictionaryValue{}, dictionaryResult) + dictionary := dictionaryResult.(*interpreter.DictionaryValue) - return false - }) + for _, pair := range value.Pairs { + dictionary.Insert( + inter, + interpreter.EmptyLocationRange, + importValue(t, inter, pair.Key), + importValue(t, inter, pair.Value), + ) + } - owner := testMap.GetOwner() - assert.Equal(t, orgOwner, owner) - }) + return dictionary - t.Run("iterate", func(t *testing.T) { - require.Equal(t, testMap.Count(), entries.size()) + case cadence.Struct: - testMap.Iterate( + structResult, err := runtime.ImportValue( inter, interpreter.EmptyLocationRange, - func(key, value interpreter.Value) (resume bool) { - orgValue, ok := entries.get(inter, key) - require.True(t, ok, "cannot find key: %v", key) - - AssertValuesEqual(t, inter, orgValue, value) - return true + nil, + nil, + cadence.Struct{ + StructType: value.StructType, }, + nil, ) - }) + require.NoError(t, err) + require.IsType(t, &interpreter.CompositeValue{}, structResult) + composite := structResult.(*interpreter.CompositeValue) + + for fieldName, fieldValue := range value.FieldsMappedByName() { + composite.SetMember( + inter, + interpreter.EmptyLocationRange, + fieldName, + importValue(t, inter, fieldValue), + ) + } + + return composite + + case cadence.Optional: - t.Run("deep copy", func(t *testing.T) { - newOwner := atree.Address{'B'} - copyOfTestMap = testMap.Transfer( + if value.Value == nil { + return interpreter.NilValue{} + } + + return interpreter.NewUnmeteredSomeValueNonCopying( + importValue(t, inter, value.Value), + ) + + default: + result, err := runtime.ImportValue( inter, interpreter.EmptyLocationRange, - newOwner, - false, nil, nil, - true, // testMap is standalone. - ).(*interpreter.DictionaryValue) + value, + nil, + ) + require.NoError(t, err) + return result + } +} - require.Equal(t, entries.size(), copyOfTestMap.Count()) +func withoutAtreeStorageValidationEnabled[T any](inter *interpreter.Interpreter, f func() T) T { + config := inter.SharedState.Config + original := config.AtreeStorageValidationEnabled + config.AtreeStorageValidationEnabled = false + result := f() + config.AtreeStorageValidationEnabled = original + return result +} - entries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - exists := copyOfTestMap.ContainsKey(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, bool(exists)) +func TestInterpretSmokeRandomDictionaryOperations(t *testing.T) { + if !*runSmokeTests { + t.Skip("smoke tests are disabled") + } - value, found := copyOfTestMap.Get(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, found) - AssertValuesEqual(t, inter, orgValue, value) + t.Parallel() - return false - }) + orgOwner := common.Address{'A'} - owner := copyOfTestMap.GetOwner() - assert.Equal(t, newOwner[:], owner[:]) - }) + const dictionaryStorageMapKey = interpreter.StringStorageMapKey("dictionary") - t.Run("deep remove", func(t *testing.T) { - copyOfTestMap.DeepRemove(inter, true) - err = storage.Remove(copyOfTestMap.SlabID()) - require.NoError(t, err) + writeDictionary := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + dictionary *interpreter.DictionaryValue, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + dictionary, + ) + } - // deep removal should clean up everything - newStorageSize, newSlabCounts := getSlabStorageSize(t, storage) - assert.Equal(t, slabCounts, newSlabCounts) - assert.Equal(t, storageSize, newStorageSize) + readDictionary := func( + t *testing.T, + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) *interpreter.DictionaryValue { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) - require.Equal(t, entries.size(), testMap.Count()) + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) - // go over original values again and check no missing data (no side effect should be found) - entries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - exists := testMap.ContainsKey(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, bool(exists)) + require.IsType(t, &interpreter.DictionaryValue{}, readValue) + return readValue.(*interpreter.DictionaryValue) + } - value, found := testMap.Get(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, found) - AssertValuesEqual(t, inter, orgValue, value) + removeDictionary := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ). + RemoveValue( + inter, + storageMapKey, + ) + } - return false - }) + createDictionary := func( + t *testing.T, + r *randomValueGenerator, + inter *interpreter.Interpreter, + ) ( + *interpreter.DictionaryValue, + cadence.Dictionary, + ) { - owner := testMap.GetOwner() - assert.Equal(t, orgOwner, owner) - }) + expectedValue := r.randomDictionaryValue(inter, 0) - t.Run("insert", func(t *testing.T) { - newEntries := newValueMap(numberOfValues) + keyValues := make([]interpreter.Value, 2*len(expectedValue.Pairs)) + for i, pair := range expectedValue.Pairs { + + key := importValue(t, inter, pair.Key) + value := importValue(t, inter, pair.Value) + + keyValues[i*2] = key + keyValues[i*2+1] = value + } + + // Construct a dictionary directly in the owner's account. + // However, the dictionary is not referenced by the root of the storage yet + // (a storage map), so atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - dictionary := interpreter.NewDictionaryValueWithAddress( + dictionary := withoutAtreeStorageValidationEnabled( inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + func() *interpreter.DictionaryValue { + return interpreter.NewDictionaryValueWithAddress( + inter, + interpreter.EmptyLocationRange, + &interpreter.DictionaryStaticType{ + KeyType: interpreter.PrimitiveStaticTypeHashableStruct, + ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + }, + orgOwner, + keyValues..., + ) }, - orgOwner, ) - // Insert - for i := 0; i < numberOfValues; i++ { - key := r.randomHashableValue(inter) - value := r.randomStorableValue(inter, 0) + // Store the dictionary in a storage map, so that the dictionary's slab + // is referenced by the root of the storage. + + writeDictionary( + inter, + orgOwner, + dictionaryStorageMapKey, + dictionary, + ) - newEntries.put(inter, key, value) + return dictionary, expectedValue + } - _ = dictionary.Insert(inter, interpreter.EmptyLocationRange, key, value) - } + checkDictionary := func( + t *testing.T, + inter *interpreter.Interpreter, + dictionary *interpreter.DictionaryValue, + expectedValue cadence.Dictionary, + expectedOwner common.Address, + ) { + require.Equal(t, len(expectedValue.Pairs), dictionary.Count()) - require.Equal(t, newEntries.size(), dictionary.Count()) + for _, pair := range expectedValue.Pairs { + pairKey := importValue(t, inter, pair.Key) - // Go over original values again and check no missing data (no side effect should be found) - newEntries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - exists := dictionary.ContainsKey(inter, interpreter.EmptyLocationRange, orgKey) + exists := dictionary.ContainsKey(inter, interpreter.EmptyLocationRange, pairKey) require.True(t, bool(exists)) - value, found := dictionary.Get(inter, interpreter.EmptyLocationRange, orgKey) + value, found := dictionary.Get(inter, interpreter.EmptyLocationRange, pairKey) require.True(t, found) - AssertValuesEqual(t, inter, orgValue, value) - return false - }) - }) + pairValue := importValue(t, inter, pair.Value) + AssertValuesEqual(t, inter, pairValue, value) + } - t.Run("remove", func(t *testing.T) { - newEntries := newValueMap(numberOfValues) + owner := dictionary.GetOwner() + assert.Equal(t, expectedOwner, owner) + } - keyValues := make([][2]interpreter.Value, numberOfValues) - for i := 0; i < numberOfValues; i++ { - key := r.randomHashableValue(inter) - value := r.randomStorableValue(inter, 0) + checkIteration := func( + t *testing.T, + inter *interpreter.Interpreter, + dictionary *interpreter.DictionaryValue, + expectedValue cadence.Dictionary, + ) { + // Index the expected key-value pairs for lookup during iteration - newEntries.put(inter, key, value) + indexedExpected := map[any]interpreter.DictionaryEntryValues{} + for _, pair := range expectedValue.Pairs { + pairKey := importValue(t, inter, pair.Key) - keyValues[i][0] = key - keyValues[i][1] = value + mapKey := mapKey(inter, pairKey) + + require.NotContains(t, indexedExpected, mapKey) + indexedExpected[mapKey] = interpreter.DictionaryEntryValues{ + Key: pairKey, + Value: importValue(t, inter, pair.Value), + } } - dictionary := interpreter.NewDictionaryValueWithAddress( + require.Equal(t, len(expectedValue.Pairs), dictionary.Count()) + + var iterations int + + dictionary.Iterate( inter, interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, - orgOwner, - ) - - require.Equal(t, 0, dictionary.Count()) + func(key, value interpreter.Value) (resume bool) { - // Get the initial storage size before inserting values - startingStorageSize, startingSlabCounts := getSlabStorageSize(t, storage) + mapKey := mapKey(inter, key) + require.Contains(t, indexedExpected, mapKey) - // Insert - for _, keyValue := range keyValues { - dictionary.Insert(inter, interpreter.EmptyLocationRange, keyValue[0], keyValue[1]) - } + pair := indexedExpected[mapKey] - require.Equal(t, newEntries.size(), dictionary.Count()) + AssertValuesEqual(t, inter, pair.Key, key) + AssertValuesEqual(t, inter, pair.Value, value) - // Remove - newEntries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - removedValue := dictionary.Remove(inter, interpreter.EmptyLocationRange, orgKey) + iterations += 1 - require.IsType(t, &interpreter.SomeValue{}, removedValue) - someValue := removedValue.(*interpreter.SomeValue) + return true + }, + ) - // Removed value must be same as the original value - innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) - AssertValuesEqual(t, inter, orgValue, innerValue) + assert.Equal(t, len(expectedValue.Pairs), iterations) + } - return false - }) + t.Run("construction", func(t *testing.T) { - // Dictionary must be empty - require.Equal(t, 0, dictionary.Count()) + t.Parallel() - storageSize, slabCounts := getSlabStorageSize(t, storage) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - // Storage size after removals should be same as the size before insertion. - assert.Equal(t, startingStorageSize, storageSize) - assert.Equal(t, startingSlabCounts, slabCounts) - }) + inter, resetStorage := newRandomValueTestInterpreter(t) - t.Run("remove enum key", func(t *testing.T) { + dictionary, expectedValue := createDictionary(t, &r, inter) - dictionary := interpreter.NewDictionaryValueWithAddress( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, ) - require.Equal(t, 0, dictionary.Count()) - - // Get the initial storage size after creating empty dictionary - startingStorageSize, startingSlabCounts := getSlabStorageSize(t, storage) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - newEntries := newValueMap(numberOfValues) + resetStorage() - keyValues := make([][2]interpreter.Value, numberOfValues) - for i := 0; i < numberOfValues; i++ { - // Create a random enum as key - key := r.generateRandomHashableValue(inter, randomValueKindEnum) - value := interpreter.Void + dictionary = readDictionary( + t, + inter, + orgOwner, + dictionaryStorageMapKey, + ) - newEntries.put(inter, key, value) + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - keyValues[i][0] = key - keyValues[i][1] = value + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } + }) - // Insert - for _, keyValue := range keyValues { - dictionary.Insert(inter, interpreter.EmptyLocationRange, keyValue[0], keyValue[1]) - } + t.Run("iterate", func(t *testing.T) { - // Remove - newEntries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - removedValue := dictionary.Remove(inter, interpreter.EmptyLocationRange, orgKey) + t.Parallel() - require.IsType(t, &interpreter.SomeValue{}, removedValue) - someValue := removedValue.(*interpreter.SomeValue) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - // Removed value must be same as the original value - innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) - AssertValuesEqual(t, inter, orgValue, innerValue) + inter, resetStorage := newRandomValueTestInterpreter(t) - return false - }) + dictionary, expectedValue := createDictionary(t, &r, inter) - // Dictionary must be empty - require.Equal(t, 0, dictionary.Count()) + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - storageSize, slabCounts = getSlabStorageSize(t, storage) + checkIteration( + t, + inter, + dictionary, + expectedValue, + ) - // Storage size after removals should be same as the size before insertion. - assert.Equal(t, startingStorageSize, storageSize) - assert.Equal(t, startingSlabCounts, slabCounts) - }) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - t.Run("update enum key", func(t *testing.T) { + resetStorage() - dictionary := interpreter.NewDictionaryValueWithAddress( + dictionary = readDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, orgOwner, + dictionaryStorageMapKey, ) - require.Equal(t, 0, dictionary.Count()) - - value1 := interpreter.NewUnmeteredIntValueFromInt64(1) - value2 := interpreter.NewUnmeteredIntValueFromInt64(2) - - keys := make([]interpreter.Value, numberOfValues) - for i := 0; i < numberOfValues; i++ { - // Create a random enum as key - key := r.generateRandomHashableValue(inter, randomValueKindEnum) + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - keys[i] = key - } + checkIteration( + t, + inter, + dictionary, + expectedValue, + ) - // Insert - for _, key := range keys { - dictionary.Insert( - inter, - interpreter.EmptyLocationRange, - // Need to clone the key, as it is transferred, and we want to keep using it. - key.Clone(inter), - // Always insert value1 - value1, - ) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } + }) - // Update - for _, key := range keys { - oldValue := dictionary.Insert( - inter, - interpreter.EmptyLocationRange, - // Need to clone the key, as it is transferred, and we want to keep using it. - key.Clone(inter), - // Change all value1 to value2 - value2, - ) - - require.IsType(t, &interpreter.SomeValue{}, oldValue) - someValue := oldValue.(*interpreter.SomeValue) + t.Run("move (transfer and deep remove)", func(t *testing.T) { - // Removed value must be same as the original value - innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) - AssertValuesEqual(t, inter, value1, innerValue) - } + t.Parallel() - // Check the values - for _, key := range keys { - readValue := dictionary.GetKey( - inter, - interpreter.EmptyLocationRange, - key, - ) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - require.IsType(t, &interpreter.SomeValue{}, readValue) - someValue := readValue.(*interpreter.SomeValue) + inter, resetStorage := newRandomValueTestInterpreter(t) - // Read value must be updated value - innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) - AssertValuesEqual(t, inter, value2, innerValue) - } - }) + original, expectedValue := createDictionary(t, &r, inter) - t.Run("random insert & remove", func(t *testing.T) { - keyValues := make([][2]interpreter.Value, numberOfValues) - for i := 0; i < numberOfValues; i++ { - // Generate unique key - var key interpreter.Value - for { - key = r.randomHashableValue(inter) + checkDictionary( + t, + inter, + original, + expectedValue, + orgOwner, + ) - var foundConflict bool - for j := 0; j < i; j++ { - existingKey := keyValues[j][0] - if key.(interpreter.EquatableValue).Equal(inter, interpreter.EmptyLocationRange, existingKey) { - foundConflict = true - break - } - } - if !foundConflict { - break - } - } + resetStorage() - keyValues[i][0] = key - keyValues[i][1] = r.randomStorableValue(inter, 0) - } + original = readDictionary( + t, + inter, + orgOwner, + dictionaryStorageMapKey, + ) - dictionary := interpreter.NewDictionaryValueWithAddress( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, + original, + expectedValue, orgOwner, ) - require.Equal(t, 0, dictionary.Count()) + // Transfer the dictionary to a new owner - // Get the initial storage size before inserting values - startingStorageSize, startingSlabCounts := getSlabStorageSize(t, storage) + newOwner := common.Address{'B'} - insertCount := 0 - deleteCount := 0 + transferred := original.Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(newOwner), + false, + nil, + nil, + false, + ).(*interpreter.DictionaryValue) - isInsert := func() bool { - if dictionary.Count() == 0 { - return true - } + // Store the transferred dictionary in a storage map, so that the dictionary's slab + // is referenced by the root of the storage. - if insertCount >= numberOfValues { - return false - } + const transferredStorageMapKey = interpreter.StringStorageMapKey("transferred") - return r.randomInt(1) == 1 - } + writeDictionary( + inter, + newOwner, + transferredStorageMapKey, + transferred, + ) - for insertCount < numberOfValues || dictionary.Count() > 0 { - // Perform a random operation out of insert/remove - if isInsert() { - key := keyValues[insertCount][0] - if _, ok := key.(*interpreter.CompositeValue); ok { - key = key.Clone(inter) - } + withoutAtreeStorageValidationEnabled(inter, func() struct{} { - value := keyValues[insertCount][1].Clone(inter) + removeDictionary( + inter, + orgOwner, + dictionaryStorageMapKey, + ) - dictionary.Insert( - inter, - interpreter.EmptyLocationRange, - key, - value, - ) - insertCount++ - } else { - key := keyValues[deleteCount][0] - orgValue := keyValues[deleteCount][1] + return struct{}{} + }) - removedValue := dictionary.Remove(inter, interpreter.EmptyLocationRange, key) + checkDictionary( + t, + inter, + transferred, + expectedValue, + newOwner, + ) - require.IsType(t, &interpreter.SomeValue{}, removedValue) - someValue := removedValue.(*interpreter.SomeValue) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - // Removed value must be same as the original value - innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) - AssertValuesEqual(t, inter, orgValue, innerValue) + resetStorage() - deleteCount++ - } - } + transferred = readDictionary( + t, + inter, + newOwner, + transferredStorageMapKey, + ) - // Dictionary must be empty - require.Equal(t, 0, dictionary.Count()) + checkDictionary( + t, + inter, + transferred, + expectedValue, + newOwner, + ) - storageSize, slabCounts := getSlabStorageSize(t, storage) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - // Storage size after removals should be same as the size before insertion. - assert.Equal(t, startingStorageSize, storageSize) - assert.Equal(t, startingSlabCounts, slabCounts) + // TODO: check deep removal cleaned up everything in original account (storage size, slab count) }) - t.Run("move", func(t *testing.T) { - newOwner := atree.Address{'B'} - - entries := newValueMap(numberOfValues) + t.Run("insert", func(t *testing.T) { + t.Parallel() - keyValues := make([]interpreter.Value, numberOfValues*2) - for i := 0; i < numberOfValues; i++ { - key := r.randomHashableValue(inter) - value := r.randomStorableValue(inter, 0) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - entries.put(inter, key, value) + inter, resetStorage := newRandomValueTestInterpreter(t) - keyValues[i*2] = key - keyValues[i*2+1] = value - } + dictionary, expectedValue := createDictionary(t, &r, inter) - dictionary := interpreter.NewDictionaryValueWithAddress( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, - keyValues..., ) - require.Equal(t, entries.size(), dictionary.Count()) + resetStorage() - movedDictionary := dictionary.Transfer( + dictionary = readDictionary( + t, inter, - interpreter.EmptyLocationRange, - newOwner, - true, - nil, - nil, - true, // dictionary is standalone. - ).(*interpreter.DictionaryValue) - - require.Equal(t, entries.size(), movedDictionary.Count()) - - // Cleanup the slab of original dictionary. - err := storage.Remove(dictionary.SlabID()) - require.NoError(t, err) - - // Check the values - entries.foreach(func(orgKey, orgValue interpreter.Value) (exit bool) { - exists := movedDictionary.ContainsKey(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, bool(exists)) + orgOwner, + dictionaryStorageMapKey, + ) - value, found := movedDictionary.Get(inter, interpreter.EmptyLocationRange, orgKey) - require.True(t, found) - AssertValuesEqual(t, inter, orgValue, value) + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - return false - }) + // Insert new values into the dictionary. + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - owner := movedDictionary.GetOwner() - assert.Equal(t, newOwner[:], owner[:]) - }) -} + numberOfValues := r.randomInt(r.containerMaxSize) -func TestInterpretRandomArrayOperations(t *testing.T) { - if !*runSmokeTests { - t.Skip("smoke tests are disabled") - } + for i := 0; i < numberOfValues; i++ { - r := newRandomValueGenerator() - t.Logf("seed: %d", r.seed) + // Generate a unique key + var key cadence.Value + var importedKey interpreter.Value + for { + key = r.randomHashableValue(inter) + importedKey = importValue(t, inter, key) - storage := newUnmeteredInMemoryStorage() - inter, err := interpreter.NewInterpreter( - &interpreter.Program{ - Program: ast.NewProgram(nil, []ast.Declaration{}), - Elaboration: sema.NewElaboration(nil), - }, - TestLocation, - &interpreter.Config{ - Storage: storage, - ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { - return interpreter.VirtualImport{ - Elaboration: inter.Program.Elaboration, + if !dictionary.ContainsKey( + inter, + interpreter.EmptyLocationRange, + importedKey, + ) { + break } - }, - }, - ) - require.NoError(t, err) + } - numberOfValues := r.randomInt(containerMaxSize) + value := r.randomStorableValue(inter, 0) + importedValue := importValue(t, inter, value) - var testArray, copyOfTestArray *interpreter.ArrayValue - var storageSize, slabCounts int + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - elements := make([]interpreter.Value, numberOfValues) - orgOwner := common.Address{'A'} + _ = withoutAtreeStorageValidationEnabled(inter, func() struct{} { - t.Run("construction", func(t *testing.T) { - values := make([]interpreter.Value, numberOfValues) - for i := 0; i < numberOfValues; i++ { - value := r.randomStorableValue(inter, 0) - elements[i] = value - values[i] = value.Clone(inter) + existing := dictionary.Insert( + inter, + interpreter.EmptyLocationRange, + importedKey, + importedValue, + ) + require.Equal(t, + interpreter.NilOptionalValue, + existing, + ) + return struct{}{} + }) + + expectedValue.Pairs = append( + expectedValue.Pairs, + cadence.KeyValuePair{ + Key: key, + Value: value, + }, + ) } - testArray = interpreter.NewArrayValue( + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, - values..., ) - storageSize, slabCounts = getSlabStorageSize(t, storage) - - require.Equal(t, len(elements), testArray.Count()) - - for index, orgElement := range elements { - element := testArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, orgElement, element) - } - - owner := testArray.GetOwner() - assert.Equal(t, orgOwner, owner) - }) - - t.Run("iterate", func(t *testing.T) { - require.Equal(t, testArray.Count(), len(elements)) + resetStorage() - index := 0 - testArray.Iterate( + dictionary = readDictionary( + t, inter, - func(element interpreter.Value) (resume bool) { - orgElement := elements[index] - AssertValuesEqual(t, inter, orgElement, element) - - elementByIndex := testArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, element, elementByIndex) - - index++ - return true - }, - false, - interpreter.EmptyLocationRange, + orgOwner, + dictionaryStorageMapKey, ) - }) - t.Run("deep copy", func(t *testing.T) { - newOwner := atree.Address{'B'} - copyOfTestArray = testArray.Transfer( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - newOwner, - false, - nil, - nil, - true, // testArray is standalone. - ).(*interpreter.ArrayValue) - - require.Equal(t, len(elements), copyOfTestArray.Count()) + dictionary, + expectedValue, + orgOwner, + ) - for index, orgElement := range elements { - element := copyOfTestArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, orgElement, element) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - - owner := copyOfTestArray.GetOwner() - assert.Equal(t, newOwner[:], owner[:]) }) - t.Run("deep removal", func(t *testing.T) { - copyOfTestArray.DeepRemove(inter, true) - err = storage.Remove(copyOfTestArray.SlabID()) - require.NoError(t, err) - - // deep removal should clean up everything - newStorageSize, newSlabCounts := getSlabStorageSize(t, storage) - assert.Equal(t, slabCounts, newSlabCounts) - assert.Equal(t, storageSize, newStorageSize) - - assert.Equal(t, len(elements), testArray.Count()) + t.Run("remove", func(t *testing.T) { + t.Parallel() - // go over original elements again and check no missing data (no side effect should be found) - for index, orgElement := range elements { - element := testArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, orgElement, element) - } + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - owner := testArray.GetOwner() - assert.Equal(t, orgOwner, owner) - }) + inter, resetStorage := newRandomValueTestInterpreter(t) - t.Run("insert", func(t *testing.T) { - newElements := make([]interpreter.Value, numberOfValues) + dictionary, expectedValue := createDictionary(t, &r, inter) - testArray = interpreter.NewArrayValue( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, ) - require.Equal(t, 0, testArray.Count()) + resetStorage() - for i := 0; i < numberOfValues; i++ { - element := r.randomStorableValue(inter, 0) - newElements[i] = element + dictionary = readDictionary( + t, + inter, + orgOwner, + dictionaryStorageMapKey, + ) - testArray.Insert( - inter, - interpreter.EmptyLocationRange, - i, - element.Clone(inter), - ) - } + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - require.Equal(t, len(newElements), testArray.Count()) + // Remove + for _, pair := range expectedValue.Pairs { - // Go over original values again and check no missing data (no side effect should be found) - for index, element := range newElements { - value := testArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, element, value) - } - }) + key := importValue(t, inter, pair.Key) - t.Run("append", func(t *testing.T) { - newElements := make([]interpreter.Value, numberOfValues) + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - testArray = interpreter.NewArrayValue( - inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, - orgOwner, - ) + removedValue := withoutAtreeStorageValidationEnabled(inter, func() interpreter.OptionalValue { + return dictionary.Remove(inter, interpreter.EmptyLocationRange, key) + }) - require.Equal(t, 0, testArray.Count()) + require.IsType(t, &interpreter.SomeValue{}, removedValue) + someValue := removedValue.(*interpreter.SomeValue) - for i := 0; i < numberOfValues; i++ { - element := r.randomStorableValue(inter, 0) - newElements[i] = element + value := importValue(t, inter, pair.Value) - testArray.Append( - inter, - interpreter.EmptyLocationRange, - element.Clone(inter), - ) + // Removed value must be same as the original value + innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) + AssertValuesEqual(t, inter, value, innerValue) } - require.Equal(t, len(newElements), testArray.Count()) - - // Go over original values again and check no missing data (no side effect should be found) - for index, element := range newElements { - value := testArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, element, value) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - }) - t.Run("remove", func(t *testing.T) { - newElements := make([]interpreter.Value, numberOfValues) + expectedValue = cadence.Dictionary{}. + WithType(expectedValue.Type().(*cadence.DictionaryType)) - for i := 0; i < numberOfValues; i++ { - newElements[i] = r.randomStorableValue(inter, 0) - } + // Dictionary must be empty + require.Equal(t, 0, dictionary.Count()) - testArray = interpreter.NewArrayValue( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, ) - require.Equal(t, 0, testArray.Count()) - - // Get the initial storage size before inserting values - startingStorageSize, startingSlabCounts := getSlabStorageSize(t, storage) - - // Insert - for index, element := range newElements { - testArray.Insert( - inter, - interpreter.EmptyLocationRange, - index, - element.Clone(inter), - ) - } + resetStorage() - require.Equal(t, len(newElements), testArray.Count()) + dictionary = readDictionary( + t, + inter, + orgOwner, + dictionaryStorageMapKey, + ) - // Remove - for _, element := range newElements { - removedValue := testArray.Remove(inter, interpreter.EmptyLocationRange, 0) + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - // Removed value must be same as the original value - AssertValuesEqual(t, inter, element, removedValue) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - // Array must be empty - require.Equal(t, 0, testArray.Count()) + // TODO: check storage size, slab count + }) - storageSize, slabCounts := getSlabStorageSize(t, storage) + t.Run("update", func(t *testing.T) { + t.Parallel() - // Storage size after removals should be same as the size before insertion. - assert.Equal(t, startingStorageSize, storageSize) - assert.Equal(t, startingSlabCounts, slabCounts) - }) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - t.Run("random insert & remove", func(t *testing.T) { - elements := make([]interpreter.Value, numberOfValues) + inter, resetStorage := newRandomValueTestInterpreter(t) - for i := 0; i < numberOfValues; i++ { - elements[i] = r.randomStorableValue(inter, 0) - } + dictionary, expectedValue := createDictionary(t, &r, inter) - testArray = interpreter.NewArrayValue( + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, ) - require.Equal(t, 0, testArray.Count()) + resetStorage() - // Get the initial storage size before inserting values - startingStorageSize, startingSlabCounts := getSlabStorageSize(t, storage) + dictionary = readDictionary( + t, + inter, + orgOwner, + dictionaryStorageMapKey, + ) - insertCount := 0 - deleteCount := 0 + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - isInsert := func() bool { - if testArray.Count() == 0 { - return true - } + elementCount := dictionary.Count() - if insertCount >= numberOfValues { - return false - } + // Generate new values - return r.randomInt(1) == 1 + newValues := make([]cadence.Value, len(expectedValue.Pairs)) + for i := range expectedValue.Pairs { + newValues[i] = r.randomStorableValue(inter, 0) } - for insertCount < numberOfValues || testArray.Count() > 0 { - // Perform a random operation out of insert/remove - if isInsert() { - value := elements[insertCount].Clone(inter) + // Update + for i, pair := range expectedValue.Pairs { + + key := importValue(t, inter, pair.Key) + newValue := importValue(t, inter, newValues[i]) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - testArray.Append( + existingValue := withoutAtreeStorageValidationEnabled(inter, func() interpreter.OptionalValue { + return dictionary.Insert( inter, interpreter.EmptyLocationRange, - value, + key, + newValue, ) - insertCount++ - } else { - orgValue := elements[deleteCount] - removedValue := testArray.RemoveFirst(inter, interpreter.EmptyLocationRange) - - // Removed value must be same as the original value - AssertValuesEqual(t, inter, orgValue, removedValue) + }) - deleteCount++ - } - } - - // Dictionary must be empty - require.Equal(t, 0, testArray.Count()) + require.IsType(t, &interpreter.SomeValue{}, existingValue) + someValue := existingValue.(*interpreter.SomeValue) - storageSize, slabCounts := getSlabStorageSize(t, storage) + value := importValue(t, inter, pair.Value) - // Storage size after removals should be same as the size before insertion. - assert.Equal(t, startingStorageSize, storageSize) - assert.Equal(t, startingSlabCounts, slabCounts) - }) + // Removed value must be same as the original value + innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) + AssertValuesEqual(t, inter, value, innerValue) - t.Run("move", func(t *testing.T) { - values := make([]interpreter.Value, numberOfValues) - elements := make([]interpreter.Value, numberOfValues) + expectedValue.Pairs[i].Value = newValues[i] + } - for i := 0; i < numberOfValues; i++ { - value := r.randomStorableValue(inter, 0) - elements[i] = value - values[i] = value.Clone(inter) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - array := interpreter.NewArrayValue( + // Dictionary must have same number of key-value pairs + require.Equal(t, elementCount, dictionary.Count()) + + checkDictionary( + t, inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, + dictionary, + expectedValue, orgOwner, - values..., ) - require.Equal(t, len(elements), array.Count()) - - owner := array.GetOwner() - assert.Equal(t, orgOwner, owner) + resetStorage() - newOwner := atree.Address{'B'} - movedArray := array.Transfer( + dictionary = readDictionary( + t, inter, - interpreter.EmptyLocationRange, - newOwner, - true, - nil, - nil, - true, // array is standalone. - ).(*interpreter.ArrayValue) - - require.Equal(t, len(elements), movedArray.Count()) + orgOwner, + dictionaryStorageMapKey, + ) - // Cleanup the slab of original array. - err := storage.Remove(array.SlabID()) - require.NoError(t, err) + checkDictionary( + t, + inter, + dictionary, + expectedValue, + orgOwner, + ) - // Check the elements - for index, orgElement := range elements { - element := movedArray.Get(inter, interpreter.EmptyLocationRange, index) - AssertValuesEqual(t, inter, orgElement, element) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - owner = movedArray.GetOwner() - assert.Equal(t, newOwner[:], owner[:]) + // TODO: check storage size, slab count }) } -func TestInterpretRandomCompositeValueOperations(t *testing.T) { +func TestInterpretSmokeRandomCompositeOperations(t *testing.T) { if !*runSmokeTests { t.Skip("smoke tests are disabled") } - r := newRandomValueGenerator() - t.Logf("seed: %d", r.seed) + t.Parallel() - storage := newUnmeteredInMemoryStorage() - inter, err := interpreter.NewInterpreter( - &interpreter.Program{ - Program: ast.NewProgram(nil, []ast.Declaration{}), - Elaboration: sema.NewElaboration(nil), - }, - TestLocation, - &interpreter.Config{ - Storage: storage, - ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { - return interpreter.VirtualImport{ - Elaboration: inter.Program.Elaboration, - } - }, - }, - ) - require.NoError(t, err) + orgOwner := common.Address{'A'} - var testComposite, copyOfTestComposite *interpreter.CompositeValue - var storageSize, slabCounts int - var orgFields map[string]interpreter.Value + const compositeStorageMapKey = interpreter.StringStorageMapKey("composite") - fieldsCount := r.randomInt(compositeMaxFields) - orgOwner := common.Address{'A'} + writeComposite := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + composite *interpreter.CompositeValue, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + composite, + ) + } - t.Run("construction", func(t *testing.T) { - testComposite, orgFields = r.randomCompositeValue(orgOwner, fieldsCount, inter, 0) + removeComposite := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + RemoveValue( + inter, + storageMapKey, + ) + } + + readComposite := func( + t *testing.T, + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) *interpreter.CompositeValue { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) - storageSize, slabCounts = getSlabStorageSize(t, storage) + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) + + require.IsType(t, &interpreter.CompositeValue{}, readValue) + return readValue.(*interpreter.CompositeValue) + } - for fieldName, orgFieldValue := range orgFields { - fieldValue := testComposite.GetField(inter, interpreter.EmptyLocationRange, fieldName) - AssertValuesEqual(t, inter, orgFieldValue, fieldValue) + createComposite := func( + t *testing.T, + r *randomValueGenerator, + inter *interpreter.Interpreter, + ) ( + *interpreter.CompositeValue, + cadence.Struct, + ) { + expectedValue := r.randomStructValue(inter, 0) + + fieldsMappedByName := expectedValue.FieldsMappedByName() + fields := make([]interpreter.CompositeField, 0, len(fieldsMappedByName)) + for name, field := range fieldsMappedByName { + + value := importValue(t, inter, field) + + fields = append(fields, interpreter.CompositeField{ + Name: name, + Value: value, + }) } - owner := testComposite.GetOwner() - assert.Equal(t, orgOwner, owner) - }) + // Construct a composite directly in the owner's account. + // However, the composite is not referenced by the root of the storage yet + // (a storage map), so atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - t.Run("iterate", func(t *testing.T) { - fieldCount := 0 - testComposite.ForEachField(inter, func(name string, value interpreter.Value) (resume bool) { - orgValue, ok := orgFields[name] - require.True(t, ok) - AssertValuesEqual(t, inter, orgValue, value) - fieldCount++ - - // continue iteration - return true - }, interpreter.EmptyLocationRange) - - assert.Equal(t, len(orgFields), fieldCount) - }) + composite := withoutAtreeStorageValidationEnabled( + inter, + func() *interpreter.CompositeValue { + return interpreter.NewCompositeValue( + inter, + interpreter.EmptyLocationRange, + expectedValue.StructType.Location, + expectedValue.StructType.QualifiedIdentifier, + common.CompositeKindStructure, + fields, + orgOwner, + ) + }, + ) - t.Run("deep copy", func(t *testing.T) { - newOwner := atree.Address{'B'} + // Store the composite in a storage map, so that the composite's slab + // is referenced by the root of the storage. - copyOfTestComposite = testComposite.Transfer( + writeComposite( inter, - interpreter.EmptyLocationRange, - newOwner, - false, - nil, - nil, - true, // testComposite is standalone. - ).(*interpreter.CompositeValue) + orgOwner, + compositeStorageMapKey, + composite, + ) - for name, orgValue := range orgFields { - value := copyOfTestComposite.GetField(inter, interpreter.EmptyLocationRange, name) - AssertValuesEqual(t, inter, orgValue, value) - } + return composite, expectedValue + } - owner := copyOfTestComposite.GetOwner() - assert.Equal(t, newOwner[:], owner[:]) - }) + checkComposite := func( + t *testing.T, + inter *interpreter.Interpreter, + composite *interpreter.CompositeValue, + expectedValue cadence.Struct, + expectedOwner common.Address, + ) { + fieldsMappedByName := expectedValue.FieldsMappedByName() - t.Run("deep remove", func(t *testing.T) { - copyOfTestComposite.DeepRemove(inter, true) - err = storage.Remove(copyOfTestComposite.SlabID()) - require.NoError(t, err) + require.Equal(t, len(fieldsMappedByName), composite.FieldCount()) + + for name, field := range fieldsMappedByName { - // deep removal should clean up everything - newStorageSize, newSlabCounts := getSlabStorageSize(t, storage) - assert.Equal(t, slabCounts, newSlabCounts) - assert.Equal(t, storageSize, newStorageSize) + value := composite.GetMember(inter, interpreter.EmptyLocationRange, name) - // go over original values again and check no missing data (no side effect should be found) - for name, orgValue := range orgFields { - value := testComposite.GetField(inter, interpreter.EmptyLocationRange, name) - AssertValuesEqual(t, inter, orgValue, value) + fieldValue := importValue(t, inter, field) + AssertValuesEqual(t, inter, fieldValue, value) } - owner := testComposite.GetOwner() - assert.Equal(t, orgOwner, owner) - }) + owner := composite.GetOwner() + assert.Equal(t, expectedOwner, owner) + } - t.Run("remove field", func(t *testing.T) { - newOwner := atree.Address{'c'} + t.Run("construction", func(t *testing.T) { - composite := testComposite.Transfer( - inter, - interpreter.EmptyLocationRange, - newOwner, - false, - nil, - nil, - true, // testComposite is standalone. - ).(*interpreter.CompositeValue) + t.Parallel() - require.NoError(t, err) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - for name := range orgFields { - composite.RemoveField(inter, interpreter.EmptyLocationRange, name) - value := composite.GetField(inter, interpreter.EmptyLocationRange, name) - assert.Nil(t, value) - } - }) + inter, resetStorage := newRandomValueTestInterpreter(t) - t.Run("move", func(t *testing.T) { - composite, fields := r.randomCompositeValue(orgOwner, fieldsCount, inter, 0) + composite, expectedValue := createComposite(t, &r, inter) - owner := composite.GetOwner() - assert.Equal(t, orgOwner, owner) + checkComposite( + t, + inter, + composite, + expectedValue, + orgOwner, + ) - newOwner := atree.Address{'B'} - movedComposite := composite.Transfer( + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + resetStorage() + + composite = readComposite( + t, inter, - interpreter.EmptyLocationRange, - newOwner, - true, - nil, - nil, - true, // composite is standalone. - ).(*interpreter.CompositeValue) + orgOwner, + compositeStorageMapKey, + ) - // Cleanup the slab of original composite. - err := storage.Remove(composite.SlabID()) - require.NoError(t, err) + checkComposite( + t, + inter, + composite, + expectedValue, + orgOwner, + ) - // Check the elements - for fieldName, orgFieldValue := range fields { - fieldValue := movedComposite.GetField(inter, interpreter.EmptyLocationRange, fieldName) - AssertValuesEqual(t, inter, orgFieldValue, fieldValue) + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - owner = composite.GetOwner() - assert.Equal(t, orgOwner, owner) }) -} -func (r randomValueGenerator) randomCompositeValue( - orgOwner common.Address, - fieldsCount int, - inter *interpreter.Interpreter, - currentDepth int, -) (*interpreter.CompositeValue, map[string]interpreter.Value) { + t.Run("move (transfer and deep remove)", func(t *testing.T) { - orgFields := make(map[string]interpreter.Value, fieldsCount) + t.Parallel() - identifier := r.randomUTF8String() + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - location := common.AddressLocation{ - Address: orgOwner, - Name: identifier, - } + inter, resetStorage := newRandomValueTestInterpreter(t) - fields := make([]interpreter.CompositeField, fieldsCount) + original, expectedValue := createComposite(t, &r, inter) - fieldNames := make(map[string]any, fieldsCount) + checkComposite( + t, + inter, + original, + expectedValue, + orgOwner, + ) - for i := 0; i < fieldsCount; { - fieldName := r.randomUTF8String() + resetStorage() - // avoid duplicate field names - if _, ok := fieldNames[fieldName]; ok { - continue - } - fieldNames[fieldName] = struct{}{} + original = readComposite( + t, + inter, + orgOwner, + compositeStorageMapKey, + ) - field := interpreter.NewUnmeteredCompositeField( - fieldName, - r.randomStorableValue(inter, currentDepth+1), + checkComposite( + t, + inter, + original, + expectedValue, + orgOwner, ) - fields[i] = field - orgFields[field.Name] = field.Value.Clone(inter) + // Transfer the composite to a new owner - i++ - } + newOwner := common.Address{'B'} - kind := common.CompositeKindStructure + transferred := original.Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(newOwner), + false, + nil, + nil, + false, + ).(*interpreter.CompositeValue) - compositeType := &sema.CompositeType{ - Location: location, - Identifier: identifier, - Kind: kind, - } + // Store the transferred composite in a storage map, so that the composite's slab + // is referenced by the root of the storage. - compositeType.Members = &sema.StringMemberOrderedMap{} - for _, field := range fields { - compositeType.Members.Set( - field.Name, - sema.NewUnmeteredPublicConstantFieldMember( - compositeType, - field.Name, - sema.AnyStructType, - "", - ), + const transferredStorageMapKey = interpreter.StringStorageMapKey("transferred") + + writeComposite( + inter, + newOwner, + transferredStorageMapKey, + transferred, ) - } - // Add the type to the elaboration, to short-circuit the type-lookup - inter.Program.Elaboration.SetCompositeType( - compositeType.ID(), - compositeType, - ) + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + removeComposite( + inter, + orgOwner, + compositeStorageMapKey, + ) - testComposite := interpreter.NewCompositeValue( - inter, - interpreter.EmptyLocationRange, - location, - identifier, - kind, - fields, - orgOwner, - ) - return testComposite, orgFields -} + return struct{}{} + }) -func getSlabStorageSize(t *testing.T, storage interpreter.InMemoryStorage) (totalSize int, slabCounts int) { - slabs, err := storage.Encode() - require.NoError(t, err) + checkComposite( + t, + inter, + transferred, + expectedValue, + newOwner, + ) - for id, slab := range slabs { - if id.HasTempAddress() { - continue + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - totalSize += len(slab) - slabCounts++ - } - - return -} - -type randomValueGenerator struct { - seed int64 - rand *rand.Rand -} + resetStorage() -func newRandomValueGenerator() randomValueGenerator { - seed := *smokeTestSeed - if seed == -1 { - seed = time.Now().UnixNano() - } + transferred = readComposite( + t, + inter, + newOwner, + transferredStorageMapKey, + ) - return randomValueGenerator{ - seed: seed, - rand: rand.New(rand.NewSource(seed)), - } -} -func (r randomValueGenerator) randomStorableValue(inter *interpreter.Interpreter, currentDepth int) interpreter.Value { - n := 0 - if currentDepth < containerMaxDepth { - n = r.randomInt(randomValueKindComposite) - } else { - n = r.randomInt(randomValueKindCapability) - } + checkComposite( + t, + inter, + transferred, + expectedValue, + newOwner, + ) - switch n { + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - // Non-hashable - case randomValueKindVoid: - return interpreter.Void - case randomValueKindNil: - return interpreter.Nil - case randomValueKindDictionaryVariant1, - randomValueKindDictionaryVariant2: - return r.randomDictionaryValue(inter, currentDepth) - case randomValueKindArrayVariant1, - randomValueKindArrayVariant2: - return r.randomArrayValue(inter, currentDepth) - case randomValueKindComposite: - fieldsCount := r.randomInt(compositeMaxFields) - v, _ := r.randomCompositeValue(common.ZeroAddress, fieldsCount, inter, currentDepth) - return v - case randomValueKindCapability: - return interpreter.NewUnmeteredCapabilityValue( - interpreter.UInt64Value(r.randomInt(math.MaxInt-1)), - r.randomAddressValue(), - &interpreter.ReferenceStaticType{ - Authorization: interpreter.UnauthorizedAccess, - ReferencedType: interpreter.PrimitiveStaticTypeAnyStruct, - }, - ) - case randomValueKindSome: - return interpreter.NewUnmeteredSomeValueNonCopying( - r.randomStorableValue(inter, currentDepth+1), - ) + // TODO: check deep removal cleaned up everything in original account (storage size, slab count) + }) - // Hashable - default: - return r.generateRandomHashableValue(inter, n) - } -} + t.Run("update", func(t *testing.T) { + t.Parallel() -func (r randomValueGenerator) randomHashableValue(interpreter *interpreter.Interpreter) interpreter.Value { - return r.generateRandomHashableValue(interpreter, r.randomInt(randomValueKindEnum)) -} + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) -func (r randomValueGenerator) generateRandomHashableValue(inter *interpreter.Interpreter, n int) interpreter.Value { - switch n { + inter, resetStorage := newRandomValueTestInterpreter(t) - // Int* - case randomValueKindInt: - return interpreter.NewUnmeteredIntValueFromInt64(int64(r.randomSign()) * r.rand.Int63()) - case randomValueKindInt8: - return interpreter.NewUnmeteredInt8Value(int8(r.randomInt(math.MaxUint8))) - case randomValueKindInt16: - return interpreter.NewUnmeteredInt16Value(int16(r.randomInt(math.MaxUint16))) - case randomValueKindInt32: - return interpreter.NewUnmeteredInt32Value(int32(r.randomSign()) * r.rand.Int31()) - case randomValueKindInt64: - return interpreter.NewUnmeteredInt64Value(int64(r.randomSign()) * r.rand.Int63()) - case randomValueKindInt128: - return interpreter.NewUnmeteredInt128ValueFromInt64(int64(r.randomSign()) * r.rand.Int63()) - case randomValueKindInt256: - return interpreter.NewUnmeteredInt256ValueFromInt64(int64(r.randomSign()) * r.rand.Int63()) + composite, expectedValue := createComposite(t, &r, inter) - // UInt* - case randomValueKindUInt: - return interpreter.NewUnmeteredUIntValueFromUint64(r.rand.Uint64()) - case randomValueKindUInt8: - return interpreter.NewUnmeteredUInt8Value(uint8(r.randomInt(math.MaxUint8))) - case randomValueKindUInt16: - return interpreter.NewUnmeteredUInt16Value(uint16(r.randomInt(math.MaxUint16))) - case randomValueKindUInt32: - return interpreter.NewUnmeteredUInt32Value(r.rand.Uint32()) - case randomValueKindUInt64Variant1, - randomValueKindUInt64Variant2, - randomValueKindUInt64Variant3, - randomValueKindUInt64Variant4: // should be more common - return interpreter.NewUnmeteredUInt64Value(r.rand.Uint64()) - case randomValueKindUInt128: - return interpreter.NewUnmeteredUInt128ValueFromUint64(r.rand.Uint64()) - case randomValueKindUInt256: - return interpreter.NewUnmeteredUInt256ValueFromUint64(r.rand.Uint64()) + checkComposite( + t, + inter, + composite, + expectedValue, + orgOwner, + ) - // Word* - case randomValueKindWord8: - return interpreter.NewUnmeteredWord8Value(uint8(r.randomInt(math.MaxUint8))) - case randomValueKindWord16: - return interpreter.NewUnmeteredWord16Value(uint16(r.randomInt(math.MaxUint16))) - case randomValueKindWord32: - return interpreter.NewUnmeteredWord32Value(r.rand.Uint32()) - case randomValueKindWord64: - return interpreter.NewUnmeteredWord64Value(r.rand.Uint64()) - case randomValueKindWord128: - return interpreter.NewUnmeteredWord128ValueFromUint64(r.rand.Uint64()) - case randomValueKindWord256: - return interpreter.NewUnmeteredWord256ValueFromUint64(r.rand.Uint64()) + resetStorage() - // (U)Fix* - case randomValueKindFix64: - return interpreter.NewUnmeteredFix64ValueWithInteger( - int64(r.randomSign())*r.rand.Int63n(sema.Fix64TypeMaxInt), - interpreter.EmptyLocationRange, + composite = readComposite( + t, + inter, + orgOwner, + compositeStorageMapKey, ) - case randomValueKindUFix64: - return interpreter.NewUnmeteredUFix64ValueWithInteger( - uint64(r.rand.Int63n( - int64(sema.UFix64TypeMaxInt), - )), - interpreter.EmptyLocationRange, + + checkComposite( + t, + inter, + composite, + expectedValue, + orgOwner, ) - // String - case randomValueKindStringVariant1, - randomValueKindStringVariant2, - randomValueKindStringVariant3, - randomValueKindStringVariant4: // small string - should be more common - size := r.randomInt(255) - return interpreter.NewUnmeteredStringValue(r.randomUTF8StringOfSize(size)) - case randomValueKindStringVariant5: // large string - size := r.randomInt(4048) + 255 - return interpreter.NewUnmeteredStringValue(r.randomUTF8StringOfSize(size)) + typeID := expectedValue.StructType.Location. + TypeID(nil, expectedValue.StructType.QualifiedIdentifier) + compositeType := inter.Program.Elaboration.CompositeType(typeID) - case randomValueKindBoolVariantTrue: - return interpreter.TrueValue - case randomValueKindBoolVariantFalse: - return interpreter.FalseValue + typeFieldCount := len(compositeType.Fields) + require.Equal(t, typeFieldCount, len(expectedValue.FieldsMappedByName())) + require.Equal(t, typeFieldCount, composite.FieldCount()) - case randomValueKindAddress: - return r.randomAddressValue() + // Generate new values - case randomValueKindPath: - return r.randomPathValue() + newValues := make([]cadence.Value, typeFieldCount) - case randomValueKindEnum: - // Get a random integer subtype to be used as the raw-type of enum - typ := r.randomInt(randomValueKindWord64) + for i := range compositeType.Fields { + newValues[i] = r.randomStorableValue(inter, 0) + } - rawValue := r.generateRandomHashableValue(inter, typ).(interpreter.NumberValue) + // Update + for i, name := range compositeType.Fields { - identifier := r.randomUTF8String() + newValue := importValue(t, inter, newValues[i]) - address := r.randomAddressValue() + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - location := common.AddressLocation{ - Address: common.Address(address), - Name: identifier, + existed := withoutAtreeStorageValidationEnabled(inter, func() bool { + return composite.SetMember( + inter, + interpreter.EmptyLocationRange, + name, + newValue, + ) + }) + + require.True(t, existed) } - enumType := &sema.CompositeType{ - Identifier: identifier, - EnumRawType: r.intSubtype(typ), - Kind: common.CompositeKindEnum, - Location: location, + expectedValue = cadence.NewStruct(newValues). + WithType(expectedValue.Type().(*cadence.StructType)) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - inter.Program.Elaboration.SetCompositeType( - enumType.ID(), - enumType, + // Composite must have same number of key-value pairs + require.Equal(t, typeFieldCount, composite.FieldCount()) + + checkComposite( + t, + inter, + composite, + expectedValue, + orgOwner, ) - enum := interpreter.NewCompositeValue( + resetStorage() + + composite = readComposite( + t, inter, - interpreter.EmptyLocationRange, - location, - enumType.QualifiedIdentifier(), - enumType.Kind, - []interpreter.CompositeField{ - { - Name: sema.EnumRawValueFieldName, - Value: rawValue, - }, - }, - common.ZeroAddress, + orgOwner, + compositeStorageMapKey, + ) + + checkComposite( + t, + inter, + composite, + expectedValue, + orgOwner, ) - if enum.GetField(inter, interpreter.EmptyLocationRange, sema.EnumRawValueFieldName) == nil { - panic("enum without raw value") + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - return enum + // TODO: check storage size, slab count + }) +} - default: - panic(fmt.Sprintf("unsupported: %d", n)) +func TestInterpretSmokeRandomArrayOperations(t *testing.T) { + if !*runSmokeTests { + t.Skip("smoke tests are disabled") } -} -func (r randomValueGenerator) randomSign() int { - if r.randomInt(1) == 1 { - return 1 + t.Parallel() + + orgOwner := common.Address{'A'} + + const arrayStorageMapKey = interpreter.StringStorageMapKey("array") + + writeArray := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + array *interpreter.ArrayValue, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + array, + ) } - return -1 -} + removeArray := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + RemoveValue( + inter, + storageMapKey, + ) + } -func (r randomValueGenerator) randomAddressValue() interpreter.AddressValue { - data := make([]byte, 8) - r.rand.Read(data) - return interpreter.NewUnmeteredAddressValueFromBytes(data) -} + readArray := func( + t *testing.T, + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) *interpreter.ArrayValue { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) -func (r randomValueGenerator) randomPathValue() interpreter.PathValue { - randomDomain := r.rand.Intn(len(common.AllPathDomains)) - identifier := r.randomUTF8String() + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) - return interpreter.PathValue{ - Domain: common.AllPathDomains[randomDomain], - Identifier: identifier, + require.IsType(t, &interpreter.ArrayValue{}, readValue) + return readValue.(*interpreter.ArrayValue) } -} -func (r randomValueGenerator) randomDictionaryValue( - inter *interpreter.Interpreter, - currentDepth int, -) interpreter.Value { + createArray := func( + t *testing.T, + r *randomValueGenerator, + inter *interpreter.Interpreter, + ) ( + *interpreter.ArrayValue, + cadence.Array, + ) { + expectedValue := r.randomArrayValue(inter, 0) + + elements := make([]interpreter.Value, len(expectedValue.Values)) + for i, value := range expectedValue.Values { + elements[i] = importValue(t, inter, value) + } - entryCount := r.randomInt(containerMaxSize) - keyValues := make([]interpreter.Value, entryCount*2) + // Construct an array directly in the owner's account. + // However, the array is not referenced by the root of the storage yet + // (a storage map), so atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. - for i := 0; i < entryCount; i++ { - key := r.randomHashableValue(inter) - value := r.randomStorableValue(inter, currentDepth+1) - keyValues[i*2] = key - keyValues[i*2+1] = value + array := withoutAtreeStorageValidationEnabled( + inter, + func() *interpreter.ArrayValue { + return interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + orgOwner, + elements..., + ) + }, + ) + + // Store the array in a storage map, so that the array's slab + // is referenced by the root of the storage. + + writeArray( + inter, + orgOwner, + arrayStorageMapKey, + array, + ) + + return array, expectedValue } - return interpreter.NewDictionaryValueWithAddress( - inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, - common.ZeroAddress, - keyValues..., - ) -} + checkArray := func( + t *testing.T, + inter *interpreter.Interpreter, + array *interpreter.ArrayValue, + expectedValue cadence.Array, + expectedOwner common.Address, + ) { + require.Equal(t, len(expectedValue.Values), array.Count()) -func (r randomValueGenerator) randomInt(upperBound int) int { - return r.rand.Intn(upperBound + 1) -} + for i, value := range expectedValue.Values { + value := importValue(t, inter, value) -func (r randomValueGenerator) randomArrayValue(inter *interpreter.Interpreter, currentDepth int) interpreter.Value { - elementsCount := r.randomInt(containerMaxSize) - elements := make([]interpreter.Value, elementsCount) + element := array.Get(inter, interpreter.EmptyLocationRange, i) - for i := 0; i < elementsCount; i++ { - value := r.randomStorableValue(inter, currentDepth+1) - elements[i] = value.Clone(inter) + AssertValuesEqual(t, inter, value, element) + } + + owner := array.GetOwner() + assert.Equal(t, expectedOwner, owner) } - return interpreter.NewArrayValue( - inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, - common.ZeroAddress, - elements..., - ) -} + checkIteration := func( + t *testing.T, + inter *interpreter.Interpreter, + array *interpreter.ArrayValue, + expectedValue cadence.Array, + ) { + require.Equal(t, len(expectedValue.Values), array.Count()) -func (r randomValueGenerator) intSubtype(n int) sema.Type { - switch n { - // Int - case randomValueKindInt: - return sema.IntType - case randomValueKindInt8: - return sema.Int8Type - case randomValueKindInt16: - return sema.Int16Type - case randomValueKindInt32: - return sema.Int32Type - case randomValueKindInt64: - return sema.Int64Type - case randomValueKindInt128: - return sema.Int128Type - case randomValueKindInt256: - return sema.Int256Type + var iterations int - // UInt - case randomValueKindUInt: - return sema.UIntType - case randomValueKindUInt8: - return sema.UInt8Type - case randomValueKindUInt16: - return sema.UInt16Type - case randomValueKindUInt32: - return sema.UInt32Type - case randomValueKindUInt64Variant1, - randomValueKindUInt64Variant2, - randomValueKindUInt64Variant3, - randomValueKindUInt64Variant4: - return sema.UInt64Type - case randomValueKindUInt128: - return sema.UInt128Type - case randomValueKindUInt256: - return sema.UInt256Type + array.Iterate( + inter, + func(element interpreter.Value) (resume bool) { + value := importValue(t, inter, expectedValue.Values[iterations]) - // Word - case randomValueKindWord8: - return sema.Word8Type - case randomValueKindWord16: - return sema.Word16Type - case randomValueKindWord32: - return sema.Word32Type - case randomValueKindWord64: - return sema.Word64Type - case randomValueKindWord128: - return sema.Word128Type - case randomValueKindWord256: - return sema.Word256Type + AssertValuesEqual(t, inter, value, element) - default: - panic(fmt.Sprintf("unsupported: %d", n)) + iterations += 1 + + return true + }, + false, + interpreter.EmptyLocationRange, + ) + + assert.Equal(t, len(expectedValue.Values), iterations) } -} -const ( - // Hashable values - // Int* - randomValueKindInt = iota - randomValueKindInt8 - randomValueKindInt16 - randomValueKindInt32 - randomValueKindInt64 - randomValueKindInt128 - randomValueKindInt256 + t.Run("construction", func(t *testing.T) { - // UInt* - randomValueKindUInt - randomValueKindUInt8 - randomValueKindUInt16 - randomValueKindUInt32 - randomValueKindUInt64Variant1 - randomValueKindUInt64Variant2 - randomValueKindUInt64Variant3 - randomValueKindUInt64Variant4 - randomValueKindUInt128 - randomValueKindUInt256 + t.Parallel() - // Word* - randomValueKindWord8 - randomValueKindWord16 - randomValueKindWord32 - randomValueKindWord64 - randomValueKindWord128 - randomValueKindWord256 + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - // (U)Fix* - randomValueKindFix64 - randomValueKindUFix64 + inter, resetStorage := newRandomValueTestInterpreter(t) - // String - randomValueKindStringVariant1 - randomValueKindStringVariant2 - randomValueKindStringVariant3 - randomValueKindStringVariant4 - randomValueKindStringVariant5 + array, expectedValue := createArray(t, &r, inter) - randomValueKindBoolVariantTrue - randomValueKindBoolVariantFalse - randomValueKindPath - randomValueKindAddress - randomValueKindEnum + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) - // Non-hashable values - randomValueKindVoid - randomValueKindNil // `Never?` - randomValueKindCapability + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - // Containers - randomValueKindSome - randomValueKindArrayVariant1 - randomValueKindArrayVariant2 - randomValueKindDictionaryVariant1 - randomValueKindDictionaryVariant2 - randomValueKindComposite -) + resetStorage() -func (r randomValueGenerator) randomUTF8String() string { - return r.randomUTF8StringOfSize(8) -} + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) -func (r randomValueGenerator) randomUTF8StringOfSize(size int) string { - identifier := make([]byte, size) - r.rand.Read(identifier) - return strings.ToValidUTF8(string(identifier), "$") -} + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) -type valueMap struct { - values map[any]interpreter.Value - keys map[any]interpreter.Value -} + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) -func newValueMap(size int) *valueMap { - return &valueMap{ - values: make(map[any]interpreter.Value, size), - keys: make(map[any]interpreter.Value, size), - } -} + t.Run("iterate", func(t *testing.T) { -type enumKey struct { - location common.Location - qualifiedIdentifier string - kind common.CompositeKind - rawValue interpreter.Value -} + t.Parallel() -func (m *valueMap) put(inter *interpreter.Interpreter, key, value interpreter.Value) { - internalKey := m.internalKey(inter, key) + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) - // Deep copy enum keys. This should be fine since we use an internal key for enums. - // Deep copying other values would mess key-lookup. - if _, ok := key.(*interpreter.CompositeValue); ok { - key = key.Clone(inter) - } + inter, resetStorage := newRandomValueTestInterpreter(t) - m.keys[internalKey] = key - m.values[internalKey] = value.Clone(inter) -} + array, expectedValue := createArray(t, &r, inter) -func (m *valueMap) get(inter *interpreter.Interpreter, key interpreter.Value) (interpreter.Value, bool) { - internalKey := m.internalKey(inter, key) - value, ok := m.values[internalKey] - return value, ok -} + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) -func (m *valueMap) foreach(apply func(key, value interpreter.Value) (exit bool)) { - for internalKey, key := range m.keys { - value := m.values[internalKey] - exit := apply(key, value) + checkIteration( + t, + inter, + array, + expectedValue, + ) - if exit { - return + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - } -} -func (m *valueMap) internalKey(inter *interpreter.Interpreter, key interpreter.Value) any { - switch key := key.(type) { - case *interpreter.StringValue: - return *key - case *interpreter.CompositeValue: - return enumKey{ - location: key.Location, - qualifiedIdentifier: key.QualifiedIdentifier, - kind: key.Kind, - rawValue: key.GetField(inter, interpreter.EmptyLocationRange, sema.EnumRawValueFieldName), + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + checkIteration( + t, + inter, + array, + expectedValue, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) } - case interpreter.Value: - return key - default: - panic("unreachable") - } -} -func (m *valueMap) size() int { - return len(m.keys) -} + }) -// This test is a reproducer for "slab was not reachable from leaves" false alarm. -// https://github.com/onflow/cadence/pull/2882#issuecomment-1781298107 -// In this test, storage.CheckHealth() should be called after array.DeepRemove(), -// not in the middle of array.DeepRemove(). -// CheckHealth() is called in the middle of array.DeepRemove() when: -// - array.DeepRemove() calls childArray1 and childArray2 DeepRemove() -// - DeepRemove() calls maybeValidateAtreeValue() -// - maybeValidateAtreeValue() calls CheckHealth() -func TestCheckStorageHealthInMiddleOfDeepRemove(t *testing.T) { + t.Run("move (transfer and deep remove)", func(t *testing.T) { - storage := newUnmeteredInMemoryStorage() - inter, err := interpreter.NewInterpreter( + t.Parallel() + + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) + + inter, resetStorage := newRandomValueTestInterpreter(t) + + original, expectedValue := createArray(t, &r, inter) + + checkArray( + t, + inter, + original, + expectedValue, + orgOwner, + ) + + resetStorage() + + original = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + original, + expectedValue, + orgOwner, + ) + + // Transfer the array to a new owner + + newOwner := common.Address{'B'} + + transferred := original.Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(newOwner), + false, + nil, + nil, + false, + ).(*interpreter.ArrayValue) + + // Store the transferred array in a storage map, so that the array's slab + // is referenced by the root of the storage. + + const transferredStorageMapKey = interpreter.StringStorageMapKey("transferred") + + writeArray( + inter, + newOwner, + transferredStorageMapKey, + transferred, + ) + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + + removeArray( + inter, + orgOwner, + arrayStorageMapKey, + ) + + return struct{}{} + }) + + checkArray( + t, + inter, + transferred, + expectedValue, + newOwner, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + resetStorage() + + transferred = readArray( + t, + inter, + newOwner, + transferredStorageMapKey, + ) + + checkArray( + t, + inter, + transferred, + expectedValue, + newOwner, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // TODO: check deep removal cleaned up everything in original account (storage size, slab count) + }) + + t.Run("insert", func(t *testing.T) { + t.Parallel() + + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) + + inter, resetStorage := newRandomValueTestInterpreter(t) + + array, expectedValue := createArray(t, &r, inter) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + existingValueCount := len(expectedValue.Values) + + // Insert new values into the array. + + newValueCount := r.randomInt(r.containerMaxSize) + + for i := 0; i < newValueCount; i++ { + + value := r.randomStorableValue(inter, 0) + importedValue := importValue(t, inter, value) + + // Generate a random index + index := 0 + if existingValueCount > 0 { + index = r.rand.Intn(existingValueCount) + } + + expectedValue.Values = append(expectedValue.Values, nil) + copy(expectedValue.Values[index+1:], expectedValue.Values[index:]) + expectedValue.Values[index] = value + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + _ = withoutAtreeStorageValidationEnabled(inter, func() struct{} { + + array.Insert( + inter, + interpreter.EmptyLocationRange, + index, + importedValue, + ) + + return struct{}{} + }) + } + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + }) + + t.Run("remove", func(t *testing.T) { + t.Parallel() + + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) + + inter, resetStorage := newRandomValueTestInterpreter(t) + + array, expectedValue := createArray(t, &r, inter) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + // Random remove + numberOfValues := len(expectedValue.Values) + for i := 0; i < numberOfValues; i++ { + + index := r.rand.Intn(len(expectedValue.Values)) + + value := importValue(t, inter, expectedValue.Values[index]) + + expectedValue.Values = append( + expectedValue.Values[:index], + expectedValue.Values[index+1:]..., + ) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + removedValue := withoutAtreeStorageValidationEnabled(inter, func() interpreter.Value { + return array.Remove(inter, interpreter.EmptyLocationRange, index) + }) + + // Removed value must be same as the original value + AssertValuesEqual(t, inter, value, removedValue) + } + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Array must be empty + require.Equal(t, 0, array.Count()) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // TODO: check storage size, slab count + }) + + t.Run("update", func(t *testing.T) { + t.Parallel() + + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) + + inter, resetStorage := newRandomValueTestInterpreter(t) + + array, expectedValue := createArray(t, &r, inter) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + elementCount := array.Count() + + // Random update + for i := 0; i < len(expectedValue.Values); i++ { + + index := r.rand.Intn(len(expectedValue.Values)) + + expectedValue.Values[index] = r.randomStorableValue(inter, 0) + newValue := importValue(t, inter, expectedValue.Values[index]) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + array.Set( + inter, + interpreter.EmptyLocationRange, + index, + newValue, + ) + return struct{}{} + }) + + } + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Array must have same number of elements + require.Equal(t, elementCount, array.Count()) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + resetStorage() + + array = readArray( + t, + inter, + orgOwner, + arrayStorageMapKey, + ) + + checkArray( + t, + inter, + array, + expectedValue, + orgOwner, + ) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // TODO: check storage size, slab count + }) +} + +func TestInterpretSmokeRandomNestedArrayOperations(t *testing.T) { + if !*runSmokeTests { + t.Skip("smoke tests are disabled") + } + + owner := common.Address{'A'} + + limits := randomValueLimits{ + containerMaxDepth: 6, + containerMaxSize: 20, + compositeMaxFields: 10, + } + + const opCount = 5 + + const arrayStorageMapKey = interpreter.StringStorageMapKey("array") + + writeArray := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + array *interpreter.ArrayValue, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + array, + ) + } + + readArray := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) *interpreter.ArrayValue { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) + + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) + + require.IsType(t, &interpreter.ArrayValue{}, readValue) + return readValue.(*interpreter.ArrayValue) + } + + getNestedArray := func( + inter *interpreter.Interpreter, + rootValue interpreter.Value, + owner common.Address, + path []pathElement, + ) *interpreter.ArrayValue { + nestedValue := getNestedValue(t, inter, rootValue, path) + require.IsType(t, &interpreter.ArrayValue{}, nestedValue) + nestedArray := nestedValue.(*interpreter.ArrayValue) + require.Equal(t, owner, nestedArray.GetOwner()) + return nestedArray + } + + createValue := func( + t *testing.T, + r *randomValueGenerator, + inter *interpreter.Interpreter, + predicate func(cadence.Array) bool, + ) ( + actualRootValue interpreter.Value, + generatedValue cadence.Value, + path []pathElement, + ) { + + // It does not matter what the root value is, + // as long as it contains a nested array, + // which it is nested inside an optional, + // and it satisfies the given predicate. + + for { + generatedValue = r.randomArrayValue(inter, 0) + + path = findNestedCadenceValue( + generatedValue, + func(value cadence.Value, path []pathElement) bool { + array, ok := value.(cadence.Array) + if !ok { + return false + } + + if !predicate(array) { + return false + } + + var foundSome bool + for _, element := range path { + if _, ok := element.(somePathElement); ok { + foundSome = true + break + } + } + return foundSome + }, + ) + if path != nil { + break + } + } + + actualRootValue = importValue(t, inter, generatedValue).Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(owner), + false, + nil, + nil, + // TODO: is has no parent container = true correct? + true, + ) + + // Store the array in a storage map, so that the array's slab + // is referenced by the root of the storage. + + writeArray( + inter, + owner, + arrayStorageMapKey, + actualRootValue.(*interpreter.ArrayValue), + ) + + return + } + + checkIteration := func( + t *testing.T, + inter *interpreter.Interpreter, + actualArray *interpreter.ArrayValue, + expectedArray *interpreter.ArrayValue, + ) { + expectedCount := expectedArray.Count() + require.Equal(t, expectedCount, actualArray.Count()) + + var iterations int + + actualArray.Iterate( + inter, + func(element interpreter.Value) (resume bool) { + + expectedElement := expectedArray.Get( + inter, + interpreter.EmptyLocationRange, + iterations, + ) + AssertValuesEqual(t, inter, expectedElement, element) + + iterations += 1 + + return true + }, + false, + interpreter.EmptyLocationRange, + ) + + assert.Equal(t, expectedCount, iterations) + } + + t.Run("insert", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + // Accept any array, even empty ones, + // given we're only inserting + func(array cadence.Array) bool { + return true + }, + ) + + actualNestedArray := getNestedArray( + inter, + actualRootValue, + owner, + path, + ) + + type insert struct { + index int + value cadence.Value + } + + performInsert := func(array *interpreter.ArrayValue, insert insert) { + + newValue := importValue(t, inter, insert.value) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + array.Insert( + inter, + interpreter.EmptyLocationRange, + insert.index, + newValue, + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + var inserts []insert + + elementCount := actualNestedArray.Count() + + for i := 0; i < opCount; i++ { + var index int + elementCountAfterInserts := elementCount + i + if elementCountAfterInserts > 0 { + index = r.rand.Intn(elementCountAfterInserts) + } + + inserts = append( + inserts, + insert{ + index: index, + value: r.randomStorableValue(inter, 0), + }, + ) + } + + for i, insert := range inserts { + + resetStorage() + + actualRootValue = readArray(inter, owner, arrayStorageMapKey) + actualNestedArray = getNestedArray( + inter, + actualRootValue, + owner, + path, + ) + + performInsert( + actualNestedArray, + insert, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedArray := getNestedArray( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, insert := range inserts[:i+1] { + + performInsert( + expectedNestedArray, + insert, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedArray, + expectedNestedArray, + ) + } + }) + + t.Run("update", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + // Generate a non-empty array, + // so we have at least one element to update + func(array cadence.Array) bool { + return len(array.Values) > 0 + }, + ) + + actualNestedArray := getNestedArray( + inter, + actualRootValue, + owner, + path, + ) + + elementCount := actualNestedArray.Count() + require.Greater(t, elementCount, 0) + + type update struct { + index int + value cadence.Value + } + + performUpdate := func(array *interpreter.ArrayValue, update update) { + + newValue := importValue(t, inter, update.value) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + array.Set( + inter, + interpreter.EmptyLocationRange, + update.index, + newValue, + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Array must have same number of elements + require.Equal(t, elementCount, array.Count()) + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + var updates []update + + for i := 0; i < opCount; i++ { + updates = append( + updates, + update{ + index: r.rand.Intn(elementCount), + value: r.randomStorableValue(inter, 0), + }, + ) + } + + for i, update := range updates { + + resetStorage() + + actualRootValue = readArray(inter, owner, arrayStorageMapKey) + actualNestedArray = getNestedArray( + inter, + actualRootValue, + owner, + path, + ) + + performUpdate( + actualNestedArray, + update, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedArray := getNestedArray( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, update := range updates[:i+1] { + + performUpdate( + expectedNestedArray, + update, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedArray, + expectedNestedArray, + ) + } + }) + + t.Run("remove", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + func(array cadence.Array) bool { + return len(array.Values) >= opCount + }, + ) + + actualNestedArray := getNestedArray( + inter, + actualRootValue, + owner, + path, + ) + + elementCount := actualNestedArray.Count() + require.GreaterOrEqual(t, elementCount, opCount) + + performRemove := func(array *interpreter.ArrayValue, index int) { + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + array.Remove( + inter, + interpreter.EmptyLocationRange, + index, + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + var removes []int + + for i := 0; i < opCount; i++ { + index := r.rand.Intn(elementCount - i) + removes = append(removes, index) + } + + for i, index := range removes { + + resetStorage() + + actualRootValue = readArray(inter, owner, arrayStorageMapKey) + actualNestedArray = getNestedArray( + inter, + actualRootValue, + owner, + path, + ) + + performRemove( + actualNestedArray, + index, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedArray := getNestedArray( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, index := range removes[:i+1] { + + performRemove( + expectedNestedArray, + index, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedArray, + expectedNestedArray, + ) + } + }) +} + +func TestInterpretSmokeRandomNestedDictionaryOperations(t *testing.T) { + if !*runSmokeTests { + t.Skip("smoke tests are disabled") + } + + owner := common.Address{'A'} + + limits := randomValueLimits{ + containerMaxDepth: 6, + containerMaxSize: 20, + compositeMaxFields: 10, + } + + const opCount = 5 + + const dictionaryStorageMapKey = interpreter.StringStorageMapKey("dictionary") + + writeDictionary := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + dictionary *interpreter.DictionaryValue, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + dictionary, + ) + } + + readDictionary := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) *interpreter.DictionaryValue { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) + + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) + + require.IsType(t, &interpreter.DictionaryValue{}, readValue) + return readValue.(*interpreter.DictionaryValue) + } + + getNestedDictionary := func( + inter *interpreter.Interpreter, + rootValue interpreter.Value, + owner common.Address, + path []pathElement, + ) *interpreter.DictionaryValue { + nestedValue := getNestedValue(t, inter, rootValue, path) + require.IsType(t, &interpreter.DictionaryValue{}, nestedValue) + nestedDictionary := nestedValue.(*interpreter.DictionaryValue) + require.Equal(t, owner, nestedDictionary.GetOwner()) + return nestedDictionary + } + + createValue := func( + t *testing.T, + r *randomValueGenerator, + inter *interpreter.Interpreter, + predicate func(cadence.Dictionary) bool, + ) ( + actualRootValue interpreter.Value, + generatedValue cadence.Value, + path []pathElement, + ) { + + // It does not matter what the root value is, + // as long as it contains a nested dictionary, + // which it is nested inside an optional, + // and it satisfies the given predicate. + + for { + generatedValue = r.randomDictionaryValue(inter, 0) + + path = findNestedCadenceValue( + generatedValue, + func(value cadence.Value, path []pathElement) bool { + dictionary, ok := value.(cadence.Dictionary) + if !ok { + return false + } + + if !predicate(dictionary) { + return false + } + + var foundSome bool + for _, element := range path { + if _, ok := element.(somePathElement); ok { + foundSome = true + break + } + } + return foundSome + }, + ) + if path != nil { + break + } + } + + actualRootValue = importValue(t, inter, generatedValue).Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(owner), + false, + nil, + nil, + // TODO: is has no parent container = true correct? + true, + ) + + // Store the dictionary in a storage map, so that the dictionary's slab + // is referenced by the root of the storage. + + writeDictionary( + inter, + owner, + dictionaryStorageMapKey, + actualRootValue.(*interpreter.DictionaryValue), + ) + + return + } + + checkIteration := func( + t *testing.T, + inter *interpreter.Interpreter, + actualDictionary *interpreter.DictionaryValue, + expectedDictionary *interpreter.DictionaryValue, + ) { + expectedCount := expectedDictionary.Count() + require.Equal(t, expectedCount, actualDictionary.Count()) + + var iterations int + + actualDictionary.Iterate( + inter, + interpreter.EmptyLocationRange, + func(key, element interpreter.Value) (resume bool) { + + expectedElement, exists := expectedDictionary.Get( + inter, + interpreter.EmptyLocationRange, + key, + ) + require.True(t, exists) + AssertValuesEqual(t, inter, expectedElement, element) + + iterations += 1 + + return true + }, + ) + + assert.Equal(t, expectedCount, iterations) + } + + t.Run("insert", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + // Accept any dictionary, even empty ones, + // given we're only inserting + func(dictionary cadence.Dictionary) bool { + return true + }, + ) + + actualNestedDictionary := getNestedDictionary( + inter, + actualRootValue, + owner, + path, + ) + + type insert struct { + key cadence.Value + value cadence.Value + } + + performInsert := func(dictionary *interpreter.DictionaryValue, insert insert) { + + newKey := importValue(t, inter, insert.key) + newValue := importValue(t, inter, insert.value) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + dictionary.Insert( + inter, + interpreter.EmptyLocationRange, + newKey, + newValue, + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + var inserts []insert + insertSet := map[any]struct{}{} + + for i := 0; i < opCount; i++ { + // Generate a unique key + var key cadence.Value + for { + key = r.randomHashableValue(inter) + + importedKey := importValue(t, inter, key) + if actualNestedDictionary.ContainsKey( + inter, + interpreter.EmptyLocationRange, + importedKey, + ) { + continue + } + + mapKey := mapKey(inter, importedKey) + if _, ok := insertSet[mapKey]; ok { + continue + } + insertSet[mapKey] = struct{}{} + + break + } + + inserts = append( + inserts, + insert{ + key: key, + value: r.randomStorableValue(inter, 0), + }, + ) + } + + for i, insert := range inserts { + + resetStorage() + + actualRootValue = readDictionary(inter, owner, dictionaryStorageMapKey) + actualNestedDictionary = getNestedDictionary( + inter, + actualRootValue, + owner, + path, + ) + + performInsert( + actualNestedDictionary, + insert, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedDictionary := getNestedDictionary( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, insert := range inserts[:i+1] { + + performInsert( + expectedNestedDictionary, + insert, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedDictionary, + expectedNestedDictionary, + ) + } + }) + + t.Run("update", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + // Generate a non-empty dictionary, + // so we have at least one element to update + func(dictionary cadence.Dictionary) bool { + return len(dictionary.Pairs) > 0 + }, + ) + + actualNestedDictionary := getNestedDictionary( + inter, + actualRootValue, + owner, + path, + ) + + elementCount := actualNestedDictionary.Count() + require.Greater(t, elementCount, 0) + + type update struct { + key cadence.Value + value cadence.Value + } + + performUpdate := func(dictionary *interpreter.DictionaryValue, update update) { + + key := importValue(t, inter, update.key) + newValue := importValue(t, inter, update.value) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + dictionary.SetKey( + inter, + interpreter.EmptyLocationRange, + key, + interpreter.NewUnmeteredSomeValueNonCopying(newValue), + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Dictionary must have same number of elements + require.Equal(t, elementCount, dictionary.Count()) + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + keys := make([]cadence.Value, 0, elementCount) + + actualNestedDictionary.IterateKeys( + inter, + interpreter.EmptyLocationRange, + func(key interpreter.Value) (resume bool) { + cadenceKey, err := runtime.ExportValue( + key, + inter, + interpreter.EmptyLocationRange, + ) + require.NoError(t, err) + + keys = append(keys, cadenceKey) + + return true + }, + ) + + var updates []update + + for i := 0; i < opCount; i++ { + index := r.rand.Intn(elementCount) + + updates = append( + updates, + update{ + key: keys[index], + value: r.randomStorableValue(inter, 0), + }, + ) + } + + for i, update := range updates { + + resetStorage() + + actualRootValue = readDictionary(inter, owner, dictionaryStorageMapKey) + actualNestedDictionary = getNestedDictionary( + inter, + actualRootValue, + owner, + path, + ) + + performUpdate( + actualNestedDictionary, + update, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedDictionary := getNestedDictionary( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, update := range updates[:i+1] { + + performUpdate( + expectedNestedDictionary, + update, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedDictionary, + expectedNestedDictionary, + ) + } + }) + + t.Run("remove", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + func(dictionary cadence.Dictionary) bool { + return len(dictionary.Pairs) >= opCount + }, + ) + + actualNestedDictionary := getNestedDictionary( + inter, + actualRootValue, + owner, + path, + ) + + elementCount := actualNestedDictionary.Count() + require.GreaterOrEqual(t, elementCount, opCount) + + performRemove := func(dictionary *interpreter.DictionaryValue, key cadence.Value) { + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + dictionary.Remove( + inter, + interpreter.EmptyLocationRange, + importValue(t, inter, key), + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + keys := make([]interpreter.Value, 0, elementCount) + + actualNestedDictionary.IterateKeys( + inter, + interpreter.EmptyLocationRange, + func(key interpreter.Value) (resume bool) { + + keys = append(keys, key) + + return true + }, + ) + + var removes []cadence.Value + removeSet := map[any]struct{}{} + + for i := 0; i < opCount; i++ { + // Find a unique key + var key interpreter.Value + for { + key = keys[r.rand.Intn(elementCount)] + + mapKey := mapKey(inter, key) + if _, ok := removeSet[mapKey]; ok { + continue + } + removeSet[mapKey] = struct{}{} + + break + } + + cadenceKey, err := runtime.ExportValue( + key, + inter, + interpreter.EmptyLocationRange, + ) + require.NoError(t, err) + + removes = append(removes, cadenceKey) + } + + for i, index := range removes { + + resetStorage() + + actualRootValue = readDictionary(inter, owner, dictionaryStorageMapKey) + actualNestedDictionary = getNestedDictionary( + inter, + actualRootValue, + owner, + path, + ) + + performRemove( + actualNestedDictionary, + index, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedDictionary := getNestedDictionary( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, index := range removes[:i+1] { + + performRemove( + expectedNestedDictionary, + index, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedDictionary, + expectedNestedDictionary, + ) + } + }) +} + +func TestInterpretSmokeRandomNestedCompositeOperations(t *testing.T) { + if !*runSmokeTests { + t.Skip("smoke tests are disabled") + } + + owner := common.Address{'A'} + + limits := randomValueLimits{ + containerMaxDepth: 6, + containerMaxSize: 20, + compositeMaxFields: 10, + } + + const opCount = 5 + + const compositeStorageMapKey = interpreter.StringStorageMapKey("composite") + + writeComposite := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + composite *interpreter.CompositeValue, + ) { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + composite, + ) + } + + readComposite := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) *interpreter.CompositeValue { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) + + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) + + require.IsType(t, &interpreter.CompositeValue{}, readValue) + return readValue.(*interpreter.CompositeValue) + } + + getNestedComposite := func( + inter *interpreter.Interpreter, + rootValue interpreter.Value, + owner common.Address, + path []pathElement, + ) *interpreter.CompositeValue { + nestedValue := getNestedValue(t, inter, rootValue, path) + require.IsType(t, &interpreter.CompositeValue{}, nestedValue) + nestedComposite := nestedValue.(*interpreter.CompositeValue) + require.Equal(t, owner, nestedComposite.GetOwner()) + return nestedComposite + } + + createValue := func( + t *testing.T, + r *randomValueGenerator, + inter *interpreter.Interpreter, + predicate func(cadence.Composite) bool, + ) ( + actualRootValue interpreter.Value, + generatedValue cadence.Value, + path []pathElement, + ) { + + // It does not matter what the root value is, + // as long as it contains a nested composite, + // which it is nested inside an optional, + // and it satisfies the given predicate. + + for { + generatedValue = r.randomStructValue(inter, 0) + + path = findNestedCadenceValue( + generatedValue, + func(value cadence.Value, path []pathElement) bool { + composite, ok := value.(cadence.Struct) + if !ok { + return false + } + + if !predicate(composite) { + return false + } + + var foundSome bool + for _, element := range path { + if _, ok := element.(somePathElement); ok { + foundSome = true + break + } + } + return foundSome + }, + ) + if path != nil { + break + } + } + + actualRootValue = importValue(t, inter, generatedValue).Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(owner), + false, + nil, + nil, + // TODO: is has no parent container = true correct? + true, + ) + + // Store the composite in a storage map, so that the composite's slab + // is referenced by the root of the storage. + + writeComposite( + inter, + owner, + compositeStorageMapKey, + actualRootValue.(*interpreter.CompositeValue), + ) + + return + } + + checkIteration := func( + t *testing.T, + inter *interpreter.Interpreter, + actualComposite *interpreter.CompositeValue, + expectedComposite *interpreter.CompositeValue, + ) { + expectedCount := expectedComposite.FieldCount() + require.Equal(t, expectedCount, actualComposite.FieldCount()) + + var iterations int + + actualComposite.ForEachField( + inter, + func(name string, element interpreter.Value) (resume bool) { + + expectedElement := expectedComposite.GetMember( + inter, + interpreter.EmptyLocationRange, + name, + ) + AssertValuesEqual(t, inter, expectedElement, element) + + iterations += 1 + + return true + }, + interpreter.EmptyLocationRange, + ) + + assert.Equal(t, expectedCount, iterations) + } + + t.Run("insert", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + // Accept any composite, even empty ones, + // given we're only inserting + func(composite cadence.Composite) bool { + return true + }, + ) + + actualNestedComposite := getNestedComposite( + inter, + actualRootValue, + owner, + path, + ) + + type insert struct { + name string + value cadence.Value + } + + performInsert := func(composite *interpreter.CompositeValue, insert insert) { + + newValue := importValue(t, inter, insert.value) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + composite.SetMember( + inter, + interpreter.EmptyLocationRange, + insert.name, + newValue, + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + var inserts []insert + insertSet := map[string]struct{}{} + + for i := 0; i < opCount; i++ { + // Generate a unique name + var name string + for { + name = r.randomUTF8String() + + if actualNestedComposite.GetMember( + inter, + interpreter.EmptyLocationRange, + name, + ) != nil { + continue + } + + if _, ok := insertSet[name]; ok { + continue + } + insertSet[name] = struct{}{} + + break + } + + inserts = append( + inserts, + insert{ + name: name, + value: r.randomStorableValue(inter, 0), + }, + ) + } + + for i, insert := range inserts { + + resetStorage() + + actualRootValue = readComposite(inter, owner, compositeStorageMapKey) + actualNestedComposite = getNestedComposite( + inter, + actualRootValue, + owner, + path, + ) + + performInsert( + actualNestedComposite, + insert, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedComposite := getNestedComposite( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, insert := range inserts[:i+1] { + + performInsert( + expectedNestedComposite, + insert, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedComposite, + expectedNestedComposite, + ) + } + }) + + t.Run("update", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + // Generate a non-empty composite, + // so we have at least one element to update + func(composite cadence.Composite) bool { + return len(composite.FieldsMappedByName()) > 0 + }, + ) + + actualNestedComposite := getNestedComposite( + inter, + actualRootValue, + owner, + path, + ) + + fieldCount := actualNestedComposite.FieldCount() + require.Greater(t, fieldCount, 0) + + type update struct { + name string + value cadence.Value + } + + performUpdate := func(composite *interpreter.CompositeValue, update update) { + + newValue := importValue(t, inter, update.value) + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + composite.SetMember( + inter, + interpreter.EmptyLocationRange, + update.name, + interpreter.NewUnmeteredSomeValueNonCopying(newValue), + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Composite must have same number of elements + require.Equal(t, fieldCount, composite.FieldCount()) + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + var updates []update + + fieldNames := make([]string, 0, fieldCount) + + actualNestedComposite.ForEachFieldName( + func(name string) (resume bool) { + fieldNames = append(fieldNames, name) + return true + }, + ) + + for i := 0; i < opCount; i++ { + index := r.rand.Intn(fieldCount) + + updates = append( + updates, + update{ + name: fieldNames[index], + value: r.randomStorableValue(inter, 0), + }, + ) + } + + for i, update := range updates { + + resetStorage() + + actualRootValue = readComposite(inter, owner, compositeStorageMapKey) + actualNestedComposite = getNestedComposite( + inter, + actualRootValue, + owner, + path, + ) + + performUpdate( + actualNestedComposite, + update, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedComposite := getNestedComposite( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, update := range updates[:i+1] { + + performUpdate( + expectedNestedComposite, + update, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedComposite, + expectedNestedComposite, + ) + } + }) + + t.Run("remove", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + r := newRandomValueGenerator( + *smokeTestSeed, + limits, + ) + t.Logf("seed: %d", r.seed) + + actualRootValue, generatedValue, path := + createValue( + t, + &r, + inter, + func(composite cadence.Composite) bool { + return len(composite.FieldsMappedByName()) >= opCount + }, + ) + + actualNestedComposite := getNestedComposite( + inter, + actualRootValue, + owner, + path, + ) + + fieldCount := actualNestedComposite.FieldCount() + require.GreaterOrEqual(t, fieldCount, opCount) + + performRemove := func(composite *interpreter.CompositeValue, name string) { + + // Atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + + withoutAtreeStorageValidationEnabled(inter, func() struct{} { + composite.RemoveMember( + inter, + interpreter.EmptyLocationRange, + name, + ) + return struct{}{} + }) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + } + + // We use the generated value twice: once as the expected value, and once as the actual value. + // We first perform mutations on the actual value, and then compare it to the expected value. + // The actual value is stored in an account and reloaded. + // The expected value is temporary (zero address), and is not stored in storage. + // Given that the storage reset destroys the data for the expected value because it is temporary, + // we re-import it each time and perform all operations on it from scratch. + + fieldNames := make([]string, 0, fieldCount) + + actualNestedComposite.ForEachFieldName( + func(name string) (resume bool) { + + fieldNames = append(fieldNames, name) + + return true + }, + ) + + var removes []string + removeSet := map[string]struct{}{} + + for i := 0; i < opCount; i++ { + // Find a unique name + var name string + for { + name = fieldNames[r.rand.Intn(fieldCount)] + + if _, ok := removeSet[name]; ok { + continue + } + removeSet[name] = struct{}{} + + break + } + + removes = append(removes, name) + } + + for i, index := range removes { + + resetStorage() + + actualRootValue = readComposite(inter, owner, compositeStorageMapKey) + actualNestedComposite = getNestedComposite( + inter, + actualRootValue, + owner, + path, + ) + + performRemove( + actualNestedComposite, + index, + ) + + // Re-create the expected value from scratch, + // by importing the generated value, and performing all updates on it + // that have been performed on the actual value so far. + + expectedRootValue := importValue(t, inter, generatedValue) + expectedNestedComposite := getNestedComposite( + inter, + expectedRootValue, + common.ZeroAddress, + path, + ) + + for _, index := range removes[:i+1] { + + performRemove( + expectedNestedComposite, + index, + ) + } + AssertValuesEqual(t, inter, expectedRootValue, actualRootValue) + + checkIteration( + t, + inter, + actualNestedComposite, + expectedNestedComposite, + ) + } + }) +} + +func findNestedCadenceValue( + value cadence.Value, + predicate func(value cadence.Value, path []pathElement) bool, +) []pathElement { + return findNestedCadenceRecursive(value, nil, predicate) +} + +func findNestedCadenceRecursive( + value cadence.Value, + path []pathElement, + predicate func(value cadence.Value, path []pathElement) bool, +) []pathElement { + if predicate(value, path) { + return path + } + + switch value := value.(type) { + case cadence.Array: + for index, element := range value.Values { + + nestedPath := path + nestedPath = append(nestedPath, arrayPathElement{index}) + + result := findNestedCadenceRecursive(element, nestedPath, predicate) + if result != nil { + return result + } + } + + case cadence.Dictionary: + for _, pair := range value.Pairs { + + nestedPath := path + nestedPath = append(nestedPath, dictionaryPathElement{pair.Key}) + + result := findNestedCadenceRecursive(pair.Value, nestedPath, predicate) + if result != nil { + return result + } + } + + case cadence.Struct: + for name, field := range value.FieldsMappedByName() { + + nestedPath := path + nestedPath = append(nestedPath, structPathElement{name}) + + result := findNestedCadenceRecursive(field, nestedPath, predicate) + if result != nil { + return result + } + } + + case cadence.Optional: + nestedValue := value.Value + if nestedValue == nil { + break + } + + nestedPath := path + nestedPath = append(nestedPath, somePathElement{}) + + result := findNestedCadenceRecursive(nestedValue, nestedPath, predicate) + if result != nil { + return result + } + } + + return nil +} + +func getNestedValue( + t *testing.T, + inter *interpreter.Interpreter, + value interpreter.Value, + path []pathElement, +) interpreter.Value { + for i, element := range path { + switch element := element.(type) { + case arrayPathElement: + require.IsType( + t, + &interpreter.ArrayValue{}, + value, + "path: %v", + path[:i], + ) + array := value.(*interpreter.ArrayValue) + + value = array.Get( + inter, + interpreter.EmptyLocationRange, + element.index, + ) + + require.NotNil(t, + value, + "missing value for array element %d (path: %v)", + element.index, + path[:i], + ) + + case dictionaryPathElement: + require.IsType( + t, + &interpreter.DictionaryValue{}, + value, + "path: %v", + path[:i], + ) + dictionary := value.(*interpreter.DictionaryValue) + + key := importValue(t, inter, element.key) + + var found bool + value, found = dictionary.Get( + inter, + interpreter.EmptyLocationRange, + key, + ) + require.True(t, + found, + "missing value for dictionary key %s (path: %v)", + element.key, + path[:i], + ) + require.NotNil(t, + value, + "missing value for dictionary key %s (path: %v)", + element.key, + path[:i], + ) + + case structPathElement: + require.IsType( + t, + &interpreter.CompositeValue{}, + value, + "path: %v", + path[:i], + ) + composite := value.(*interpreter.CompositeValue) + + value = composite.GetMember( + inter, + interpreter.EmptyLocationRange, + element.name, + ) + + require.NotNil(t, + value, + "missing value for composite field %q (path: %v)", + element.name, + path[:i], + ) + + case somePathElement: + require.IsType( + t, + &interpreter.SomeValue{}, + value, + "path: %v", + path[:i], + ) + optional := value.(*interpreter.SomeValue) + + value = optional.InnerValue(inter, interpreter.EmptyLocationRange) + + require.NotNil(t, + value, + "missing value for optional (path: %v)", + path[:i], + ) + + default: + panic(errors.NewUnexpectedError("unsupported path element: %T", element)) + } + } + + return value +} + +type pathElement interface { + isPathElement() +} + +type arrayPathElement struct { + index int +} + +var _ pathElement = arrayPathElement{} + +func (arrayPathElement) isPathElement() {} + +type dictionaryPathElement struct { + key cadence.Value +} + +var _ pathElement = dictionaryPathElement{} + +func (dictionaryPathElement) isPathElement() {} + +type structPathElement struct { + name string +} + +var _ pathElement = structPathElement{} + +func (structPathElement) isPathElement() {} + +type somePathElement struct{} + +var _ pathElement = somePathElement{} + +func (somePathElement) isPathElement() {} + +type randomValueLimits struct { + containerMaxDepth int + containerMaxSize int + compositeMaxFields int +} + +type randomValueGenerator struct { + seed int64 + rand *rand.Rand + randomValueLimits +} + +func newRandomValueGenerator(seed int64, limits randomValueLimits) randomValueGenerator { + if seed == -1 { + seed = time.Now().UnixNano() + } + + return randomValueGenerator{ + seed: seed, + rand: rand.New(rand.NewSource(seed)), + randomValueLimits: limits, + } +} +func (r randomValueGenerator) randomStorableValue(inter *interpreter.Interpreter, currentDepth int) cadence.Value { + var kind randomValueKind + if currentDepth < r.containerMaxDepth { + kind = r.randomValueKind(randomValueKindStruct) + } else { + kind = r.randomValueKind(randomValueKindCapability) + } + + switch kind { + + // Non-hashable + case randomValueKindVoid: + return cadence.Void{} + + case randomValueKindNil: + return cadence.NewOptional(nil) + + case randomValueKindDictionaryVariant1, + randomValueKindDictionaryVariant2: + return r.randomDictionaryValue(inter, currentDepth) + + case randomValueKindArrayVariant1, + randomValueKindArrayVariant2: + return r.randomArrayValue(inter, currentDepth) + + case randomValueKindStruct: + return r.randomStructValue(inter, currentDepth) + + case randomValueKindCapability: + return r.randomCapabilityValue() + + case randomValueKindSome: + return cadence.NewOptional( + r.randomStorableValue(inter, currentDepth+1), + ) + + // Hashable + default: + return r.generateHashableValueOfKind(inter, kind) + } +} + +func (r randomValueGenerator) randomHashableValue(inter *interpreter.Interpreter) cadence.Value { + return r.generateHashableValueOfKind(inter, r.randomValueKind(randomValueKindEnum)) +} + +func (r randomValueGenerator) generateHashableValueOfKind(inter *interpreter.Interpreter, kind randomValueKind) cadence.Value { + switch kind { + + // Int* + case randomValueKindInt: + // TODO: generate larger numbers + return cadence.NewInt(r.randomSign() * int(r.rand.Int63())) + case randomValueKindInt8: + return cadence.NewInt8(int8(r.randomInt(math.MaxUint8))) + case randomValueKindInt16: + return cadence.NewInt16(int16(r.randomInt(math.MaxUint16))) + case randomValueKindInt32: + return cadence.NewInt32(int32(r.randomSign()) * r.rand.Int31()) + case randomValueKindInt64: + return cadence.NewInt64(int64(r.randomSign()) * r.rand.Int63()) + case randomValueKindInt128: + // TODO: generate larger numbers + return cadence.NewInt128(r.randomSign() * int(r.rand.Int63())) + case randomValueKindInt256: + // TODO: generate larger numbers + return cadence.NewInt256(r.randomSign() * int(r.rand.Int63())) + + // UInt* + case randomValueKindUInt: + // TODO: generate larger numbers + return cadence.NewUInt(uint(r.rand.Uint64())) + case randomValueKindUInt8: + return cadence.NewUInt8(uint8(r.randomInt(math.MaxUint8))) + case randomValueKindUInt16: + return cadence.NewUInt16(uint16(r.randomInt(math.MaxUint16))) + case randomValueKindUInt32: + return cadence.NewUInt32(r.rand.Uint32()) + case randomValueKindUInt64Variant1, + randomValueKindUInt64Variant2, + randomValueKindUInt64Variant3, + randomValueKindUInt64Variant4: // should be more common + return cadence.NewUInt64(r.rand.Uint64()) + case randomValueKindUInt128: + // TODO: generate larger numbers + return cadence.NewUInt128(uint(r.rand.Uint64())) + case randomValueKindUInt256: + // TODO: generate larger numbers + return cadence.NewUInt256(uint(r.rand.Uint64())) + + // Word* + case randomValueKindWord8: + return cadence.NewWord8(uint8(r.randomInt(math.MaxUint8))) + case randomValueKindWord16: + return cadence.NewWord16(uint16(r.randomInt(math.MaxUint16))) + case randomValueKindWord32: + return cadence.NewWord32(r.rand.Uint32()) + case randomValueKindWord64: + return cadence.NewWord64(r.rand.Uint64()) + case randomValueKindWord128: + // TODO: generate larger numbers + return cadence.NewWord128(uint(r.rand.Uint64())) + case randomValueKindWord256: + // TODO: generate larger numbers + return cadence.NewWord256(uint(r.rand.Uint64())) + + // (U)Fix* + case randomValueKindFix64: + return cadence.Fix64( + int64(r.randomSign()) * r.rand.Int63n(sema.Fix64TypeMaxInt), + ) + case randomValueKindUFix64: + return cadence.UFix64( + uint64(r.rand.Int63n(int64(sema.UFix64TypeMaxInt))), + ) + + // String + case randomValueKindStringVariant1, + randomValueKindStringVariant2, + randomValueKindStringVariant3, + randomValueKindStringVariant4: // small string - should be more common + size := r.randomInt(255) + return cadence.String(r.randomUTF8StringOfSize(size)) + case randomValueKindStringVariant5: // large string + size := r.randomInt(4048) + 255 + return cadence.String(r.randomUTF8StringOfSize(size)) + + case randomValueKindBoolVariantTrue: + return cadence.NewBool(true) + case randomValueKindBoolVariantFalse: + return cadence.NewBool(false) + + case randomValueKindAddress: + return r.randomAddressValue() + + case randomValueKindPath: + return r.randomPathValue() + + case randomValueKindEnum: + return r.randomEnumValue(inter) + + default: + panic(fmt.Sprintf("unsupported: %d", kind)) + } +} + +func (r randomValueGenerator) randomSign() int { + if r.randomInt(1) == 1 { + return 1 + } + + return -1 +} + +func (r randomValueGenerator) randomAddressValue() (address cadence.Address) { + r.rand.Read(address[:]) + return address +} + +func (r randomValueGenerator) randomPathValue() cadence.Path { + randomDomain := r.rand.Intn(len(common.AllPathDomains)) + identifier := r.randomUTF8String() + + return cadence.Path{ + Domain: common.AllPathDomains[randomDomain], + Identifier: identifier, + } +} + +func (r randomValueGenerator) randomCapabilityValue() cadence.Capability { + return cadence.NewCapability( + cadence.UInt64(r.randomInt(math.MaxInt-1)), + r.randomAddressValue(), + cadence.NewReferenceType( + cadence.UnauthorizedAccess, + cadence.AnyStructType, + ), + ) +} + +func (r randomValueGenerator) randomDictionaryValue(inter *interpreter.Interpreter, currentDepth int) cadence.Dictionary { + + entryCount := r.randomInt(r.containerMaxSize) + keyValues := make([]cadence.KeyValuePair, entryCount) + + existingKeys := map[string]struct{}{} + + for i := 0; i < entryCount; i++ { + + // generate a unique key + var key cadence.Value + for { + key = r.randomHashableValue(inter) + keyStr := key.String() + + // avoid duplicate keys + _, exists := existingKeys[keyStr] + if !exists { + existingKeys[keyStr] = struct{}{} + break + } + } + + keyValues[i] = cadence.KeyValuePair{ + Key: key, + Value: r.randomStorableValue(inter, currentDepth+1), + } + } + + return cadence.NewDictionary(keyValues). + WithType( + cadence.NewDictionaryType( + cadence.HashableStructType, + cadence.AnyStructType, + ), + ) +} + +func (r randomValueGenerator) randomInt(upperBound int) int { + return r.rand.Intn(upperBound + 1) +} + +func (r randomValueGenerator) randomArrayValue(inter *interpreter.Interpreter, currentDepth int) cadence.Array { + elementsCount := r.randomInt(r.containerMaxSize) + elements := make([]cadence.Value, elementsCount) + + for i := 0; i < elementsCount; i++ { + elements[i] = r.randomStorableValue(inter, currentDepth+1) + } + + return cadence.NewArray(elements). + WithType(cadence.NewVariableSizedArrayType(cadence.AnyStructType)) +} + +func (r randomValueGenerator) randomStructValue(inter *interpreter.Interpreter, currentDepth int) cadence.Struct { + fieldsCount := r.randomInt(r.compositeMaxFields) + + fields := make([]cadence.Field, fieldsCount) + fieldValues := make([]cadence.Value, fieldsCount) + + existingFieldNames := make(map[string]any, fieldsCount) + + for i := 0; i < fieldsCount; i++ { + // generate a unique field name + var fieldName string + for { + fieldName = r.randomUTF8String() + + // avoid duplicate field names + _, exists := existingFieldNames[fieldName] + if !exists { + existingFieldNames[fieldName] = struct{}{} + break + } + } + + fields[i] = cadence.NewField(fieldName, cadence.AnyStructType) + fieldValues[i] = r.randomStorableValue(inter, currentDepth+1) + } + + identifier := fmt.Sprintf("S%d", r.rand.Uint64()) + + address := r.randomAddressValue() + + location := common.AddressLocation{ + Address: common.Address(address), + Name: identifier, + } + + kind := common.CompositeKindStructure + + compositeType := &sema.CompositeType{ + Location: location, + Identifier: identifier, + Kind: kind, + Members: &sema.StringMemberOrderedMap{}, + } + + fieldNames := make([]string, fieldsCount) + + for i := 0; i < fieldsCount; i++ { + fieldName := fields[i].Identifier + compositeType.Members.Set( + fieldName, + sema.NewUnmeteredPublicConstantFieldMember( + compositeType, + fieldName, + sema.AnyStructType, + "", + ), + ) + fieldNames[i] = fieldName + } + compositeType.Fields = fieldNames + + // Add the type to the elaboration, to short-circuit the type-lookup. + inter.Program.Elaboration.SetCompositeType( + compositeType.ID(), + compositeType, + ) + + return cadence.NewStruct(fieldValues).WithType( + cadence.NewStructType( + location, + identifier, + fields, + nil, + ), + ) +} + +func (r randomValueGenerator) cadenceIntegerType(kind randomValueKind) cadence.Type { + switch kind { + // Int + case randomValueKindInt: + return cadence.IntType + case randomValueKindInt8: + return cadence.Int8Type + case randomValueKindInt16: + return cadence.Int16Type + case randomValueKindInt32: + return cadence.Int32Type + case randomValueKindInt64: + return cadence.Int64Type + case randomValueKindInt128: + return cadence.Int128Type + case randomValueKindInt256: + return cadence.Int256Type + + // UInt + case randomValueKindUInt: + return cadence.UIntType + case randomValueKindUInt8: + return cadence.UInt8Type + case randomValueKindUInt16: + return cadence.UInt16Type + case randomValueKindUInt32: + return cadence.UInt32Type + case randomValueKindUInt64Variant1, + randomValueKindUInt64Variant2, + randomValueKindUInt64Variant3, + randomValueKindUInt64Variant4: + return cadence.UInt64Type + case randomValueKindUInt128: + return cadence.UInt128Type + case randomValueKindUInt256: + return cadence.UInt256Type + + // Word + case randomValueKindWord8: + return cadence.Word8Type + case randomValueKindWord16: + return cadence.Word16Type + case randomValueKindWord32: + return cadence.Word32Type + case randomValueKindWord64: + return cadence.Word64Type + case randomValueKindWord128: + return cadence.Word128Type + case randomValueKindWord256: + return cadence.Word256Type + + default: + panic(fmt.Sprintf("unsupported kind: %d", kind)) + } +} + +func (r randomValueGenerator) semaIntegerType(kind randomValueKind) sema.Type { + switch kind { + // Int + case randomValueKindInt: + return sema.IntType + case randomValueKindInt8: + return sema.Int8Type + case randomValueKindInt16: + return sema.Int16Type + case randomValueKindInt32: + return sema.Int32Type + case randomValueKindInt64: + return sema.Int64Type + case randomValueKindInt128: + return sema.Int128Type + case randomValueKindInt256: + return sema.Int256Type + + // UInt + case randomValueKindUInt: + return sema.UIntType + case randomValueKindUInt8: + return sema.UInt8Type + case randomValueKindUInt16: + return sema.UInt16Type + case randomValueKindUInt32: + return sema.UInt32Type + case randomValueKindUInt64Variant1, + randomValueKindUInt64Variant2, + randomValueKindUInt64Variant3, + randomValueKindUInt64Variant4: + return sema.UInt64Type + case randomValueKindUInt128: + return sema.UInt128Type + case randomValueKindUInt256: + return sema.UInt256Type + + // Word + case randomValueKindWord8: + return sema.Word8Type + case randomValueKindWord16: + return sema.Word16Type + case randomValueKindWord32: + return sema.Word32Type + case randomValueKindWord64: + return sema.Word64Type + case randomValueKindWord128: + return sema.Word128Type + case randomValueKindWord256: + return sema.Word256Type + + default: + panic(fmt.Sprintf("unsupported kind: %d", kind)) + } +} + +type randomValueKind uint8 + +const ( + // Hashable values + // Int* + randomValueKindInt randomValueKind = iota + randomValueKindInt8 + randomValueKindInt16 + randomValueKindInt32 + randomValueKindInt64 + randomValueKindInt128 + randomValueKindInt256 + + // UInt* + randomValueKindUInt + randomValueKindUInt8 + randomValueKindUInt16 + randomValueKindUInt32 + randomValueKindUInt64Variant1 + randomValueKindUInt64Variant2 + randomValueKindUInt64Variant3 + randomValueKindUInt64Variant4 + randomValueKindUInt128 + randomValueKindUInt256 + + // Word* + randomValueKindWord8 + randomValueKindWord16 + randomValueKindWord32 + randomValueKindWord64 + randomValueKindWord128 + randomValueKindWord256 + + // (U)Fix* + randomValueKindFix64 + randomValueKindUFix64 + + // String + randomValueKindStringVariant1 + randomValueKindStringVariant2 + randomValueKindStringVariant3 + randomValueKindStringVariant4 + randomValueKindStringVariant5 + + randomValueKindBoolVariantTrue + randomValueKindBoolVariantFalse + randomValueKindPath + randomValueKindAddress + randomValueKindEnum + + // Non-hashable values + randomValueKindVoid + randomValueKindNil // `Never?` + randomValueKindCapability + + // Containers + randomValueKindSome + randomValueKindArrayVariant1 + randomValueKindArrayVariant2 + randomValueKindDictionaryVariant1 + randomValueKindDictionaryVariant2 + randomValueKindStruct +) + +func (r randomValueGenerator) randomUTF8String() string { + return r.randomUTF8StringOfSize(8) +} + +func (r randomValueGenerator) randomUTF8StringOfSize(size int) string { + identifier := make([]byte, size) + r.rand.Read(identifier) + return strings.ToValidUTF8(string(identifier), "$") +} + +func (r randomValueGenerator) randomEnumValue(inter *interpreter.Interpreter) cadence.Enum { + // Get a random integer subtype to be used as the raw-type of enum + typ := r.randomValueKind(randomValueKindWord64) + + rawValue := r.generateHashableValueOfKind(inter, typ).(cadence.NumberValue) + + identifier := fmt.Sprintf("E%d", r.rand.Uint64()) + + address := r.randomAddressValue() + + location := common.AddressLocation{ + Address: common.Address(address), + Name: identifier, + } + + semaRawType := r.semaIntegerType(typ) + + semaEnumType := &sema.CompositeType{ + Identifier: identifier, + EnumRawType: semaRawType, + Kind: common.CompositeKindEnum, + Location: location, + Members: &sema.StringMemberOrderedMap{}, + Fields: []string{ + sema.EnumRawValueFieldName, + }, + } + + semaEnumType.Members.Set( + sema.EnumRawValueFieldName, + sema.NewUnmeteredPublicConstantFieldMember( + semaEnumType, + sema.EnumRawValueFieldName, + semaRawType, + "", + ), + ) + + // Add the type to the elaboration, to short-circuit the type-lookup. + inter.Program.Elaboration.SetCompositeType( + semaEnumType.ID(), + semaEnumType, + ) + + rawType := r.cadenceIntegerType(typ) + + fields := []cadence.Value{ + rawValue, + } + + return cadence.NewEnum(fields).WithType( + cadence.NewEnumType( + location, + identifier, + rawType, + []cadence.Field{ + { + Identifier: sema.EnumRawValueFieldName, + Type: rawType, + }, + }, + nil, + ), + ) +} + +func (r randomValueGenerator) randomValueKind(kind randomValueKind) randomValueKind { + return randomValueKind(r.randomInt(int(kind))) +} + +func TestRandomValueGeneration(t *testing.T) { + + inter, _ := newRandomValueTestInterpreter(t) + + limits := defaultRandomValueLimits + + // Generate random values + for i := 0; i < 1000; i++ { + r1 := newRandomValueGenerator(int64(i), limits) + v1 := r1.randomStorableValue(inter, 0) + + r2 := newRandomValueGenerator(int64(i), limits) + v2 := r2.randomStorableValue(inter, 0) + + // Check if the generated values are equal + assert.Equal(t, v1, v2) + } +} + +func mapKey(inter *interpreter.Interpreter, key interpreter.Value) any { + + switch key := key.(type) { + case *interpreter.StringValue: + type stringValue string + return stringValue(key.Str) + + case interpreter.CharacterValue: + type characterValue string + return characterValue(key.Str) + + case interpreter.TypeValue: + type typeValue common.TypeID + return typeValue(key.Type.ID()) + + case *interpreter.CompositeValue: + type enumKey struct { + location common.Location + qualifiedIdentifier string + kind common.CompositeKind + rawValue string + } + return enumKey{ + location: key.Location, + qualifiedIdentifier: key.QualifiedIdentifier, + kind: key.Kind, + rawValue: key.GetField( + inter, + interpreter.EmptyLocationRange, + sema.EnumRawValueFieldName, + ).String(), + } + + case interpreter.IntValue: + type intValue string + return intValue(key.String()) + + case interpreter.UIntValue: + type uintValue string + return uintValue(key.String()) + + case interpreter.Int8Value: + type int8Value string + return int8Value(key.String()) + + case interpreter.UInt8Value: + type uint8Value string + return uint8Value(key.String()) + + case interpreter.Int16Value: + type int16Value string + return int16Value(key.String()) + + case interpreter.UInt16Value: + type uint16Value string + return uint16Value(key.String()) + + case interpreter.Int32Value: + type int32Value string + return int32Value(key.String()) + + case interpreter.UInt32Value: + type uint32Value string + return uint32Value(key.String()) + + case interpreter.Int64Value: + type int64Value string + return int64Value(key.String()) + + case interpreter.UInt64Value: + type uint64Value string + return uint64Value(key.String()) + + case interpreter.Int128Value: + type int128Value string + return int128Value(key.String()) + + case interpreter.UInt128Value: + type uint128Value string + return uint128Value(key.String()) + + case interpreter.Int256Value: + type int256Value string + return int256Value(key.String()) + + case interpreter.UInt256Value: + type uint256Value string + return uint256Value(key.String()) + + case interpreter.Word8Value: + type word8Value string + return word8Value(key.String()) + + case interpreter.Word16Value: + type word16Value string + return word16Value(key.String()) + + case interpreter.Word32Value: + type word32Value string + return word32Value(key.String()) + + case interpreter.Word64Value: + type word64Value string + return word64Value(key.String()) + + case interpreter.Word128Value: + type word128Value string + return word128Value(key.String()) + + case interpreter.Word256Value: + type word256Value string + return word256Value(key.String()) + + case interpreter.PathValue: + return key + + case interpreter.AddressValue: + return key + + case interpreter.BoolValue: + return key + + case interpreter.Fix64Value: + type fix64Value string + return fix64Value(key.String()) + + case interpreter.UFix64Value: + type ufix64Value string + return ufix64Value(key.String()) + + default: + panic(errors.NewUnexpectedError("unsupported map key type: %T", key)) + } +} + +// This test is a reproducer for "slab was not reachable from leaves" false alarm. +// https://github.com/onflow/cadence/pull/2882#issuecomment-1781298107 +// In this test, storage.CheckHealth() should be called after array.DeepRemove(), +// not in the middle of array.DeepRemove(). +// CheckHealth() is called in the middle of array.DeepRemove() when: +// - array.DeepRemove() calls childArray1 and childArray2 DeepRemove() +// - DeepRemove() calls maybeValidateAtreeValue() +// - maybeValidateAtreeValue() calls CheckHealth() +func TestCheckStorageHealthInMiddleOfDeepRemove(t *testing.T) { + + t.Parallel() + + storage := newUnmeteredInMemoryStorage() + inter, err := interpreter.NewInterpreter( + &interpreter.Program{ + Program: ast.NewProgram(nil, []ast.Declaration{}), + Elaboration: sema.NewElaboration(nil), + }, + TestLocation, + &interpreter.Config{ + Storage: storage, + ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { + return interpreter.VirtualImport{ + Elaboration: inter.Program.Elaboration, + } + }, + AtreeStorageValidationEnabled: true, + AtreeValueValidationEnabled: true, + }, + ) + require.NoError(t, err) + + owner := common.Address{'A'} + + // Create a small child array which will be inlined in parent container. + childArray1 := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + owner, + interpreter.NewUnmeteredStringValue("a"), + ) + + size := int(atree.MaxInlineArrayElementSize()) - 10 + + // Create a large child array which will NOT be inlined in parent container. + childArray2 := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + owner, + interpreter.NewUnmeteredStringValue(strings.Repeat("b", size)), + interpreter.NewUnmeteredStringValue(strings.Repeat("c", size)), + ) + + // Create an array with childArray1 and childArray2. + array := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + owner, + childArray1, // inlined + childArray2, // not inlined + ) + + // DeepRemove removes all elements (childArray1 and childArray2) recursively in array. + array.DeepRemove(inter, true) + + // As noted earlier in comments at the top of this test: + // storage.CheckHealth() should be called after array.DeepRemove(), not in the middle of array.DeepRemove(). + // This happens when: + // - array.DeepRemove() calls childArray1 and childArray2 DeepRemove() + // - DeepRemove() calls maybeValidateAtreeValue() + // - maybeValidateAtreeValue() calls CheckHealth() +} + +// This test is a reproducer for "slab was not reachable from leaves" false alarm. +// https://github.com/onflow/cadence/pull/2882#issuecomment-1796381227 +// In this test, storage.CheckHealth() should be called after DictionaryValue.Transfer() +// with remove flag, not in the middle of DictionaryValue.Transfer(). +func TestInterpretCheckStorageHealthInMiddleOfTransferAndRemove(t *testing.T) { + + t.Parallel() + + r := newRandomValueGenerator(*smokeTestSeed, defaultRandomValueLimits) + t.Logf("seed: %d", r.seed) + + storage := newUnmeteredInMemoryStorage() + inter, err := interpreter.NewInterpreter( &interpreter.Program{ Program: ast.NewProgram(nil, []ast.Declaration{}), Elaboration: sema.NewElaboration(nil), @@ -1723,206 +4925,1588 @@ func TestCheckStorageHealthInMiddleOfDeepRemove(t *testing.T) { Elaboration: inter.Program.Elaboration, } }, - AtreeStorageValidationEnabled: true, - AtreeValueValidationEnabled: true, - }, - ) - require.NoError(t, err) + AtreeStorageValidationEnabled: true, + AtreeValueValidationEnabled: true, + }, + ) + require.NoError(t, err) + + // Create large array value with zero address which will not be inlined. + gchildArray := interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + &interpreter.VariableSizedStaticType{ + Type: interpreter.PrimitiveStaticTypeAnyStruct, + }, + common.ZeroAddress, + interpreter.NewUnmeteredStringValue(strings.Repeat("b", int(atree.MaxInlineArrayElementSize())-10)), + interpreter.NewUnmeteredStringValue(strings.Repeat("c", int(atree.MaxInlineArrayElementSize())-10)), + ) + + // Create small composite value with zero address which will be inlined. + identifier := "test" + + location := common.AddressLocation{ + Address: common.ZeroAddress, + Name: identifier, + } + + compositeType := &sema.CompositeType{ + Location: location, + Identifier: identifier, + Kind: common.CompositeKindStructure, + } + + fields := []interpreter.CompositeField{ + interpreter.NewUnmeteredCompositeField("a", interpreter.NewUnmeteredUInt64Value(0)), + interpreter.NewUnmeteredCompositeField("b", interpreter.NewUnmeteredUInt64Value(1)), + interpreter.NewUnmeteredCompositeField("c", interpreter.NewUnmeteredUInt64Value(2)), + } + + compositeType.Members = &sema.StringMemberOrderedMap{} + for _, field := range fields { + compositeType.Members.Set( + field.Name, + sema.NewUnmeteredPublicConstantFieldMember( + compositeType, + field.Name, + sema.AnyStructType, + "", + ), + ) + } + + // Add the type to the elaboration, to short-circuit the type-lookup. + inter.Program.Elaboration.SetCompositeType( + compositeType.ID(), + compositeType, + ) + + gchildComposite := interpreter.NewCompositeValue( + inter, + interpreter.EmptyLocationRange, + location, + identifier, + common.CompositeKindStructure, + fields, + common.ZeroAddress, + ) + + // Create large dictionary with zero address with 2 data slabs containing: + // - SomeValue(SlabID) as first physical element in the first data slab + // - inlined CompositeValue as last physical element in the second data slab + + numberOfValues := 10 + firstElementIndex := 7 // index of first physical element in the first data slab + lastElementIndex := 8 // index of last physical element in the last data slab + keyValues := make([]interpreter.Value, numberOfValues*2) + for i := 0; i < numberOfValues; i++ { + key := interpreter.NewUnmeteredUInt64Value(uint64(i)) + + var value interpreter.Value + switch i { + case firstElementIndex: + value = interpreter.NewUnmeteredSomeValueNonCopying(gchildArray) + + case lastElementIndex: + value = gchildComposite + + default: + // Other values are inlined random strings. + const size = 235 + value = interpreter.NewUnmeteredStringValue(r.randomUTF8StringOfSize(size)) + } + + keyValues[i*2] = key + keyValues[i*2+1] = value + } + + childMap := interpreter.NewDictionaryValueWithAddress( + inter, + interpreter.EmptyLocationRange, + &interpreter.DictionaryStaticType{ + KeyType: interpreter.PrimitiveStaticTypeAnyStruct, + ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + }, + common.ZeroAddress, + keyValues..., + ) + + // Create dictionary with non-zero address containing child dictionary. + owner := common.Address{'A'} + m := interpreter.NewDictionaryValueWithAddress( + inter, + interpreter.EmptyLocationRange, + &interpreter.DictionaryStaticType{ + KeyType: interpreter.PrimitiveStaticTypeAnyStruct, + ValueType: interpreter.PrimitiveStaticTypeAnyStruct, + }, + owner, + interpreter.NewUnmeteredUInt64Value(0), + childMap, + ) + + inter.ValidateAtreeValue(m) + + require.NoError(t, storage.CheckHealth()) +} + +// TestInterpretIterateReadOnlyLoadedWithSomeValueChildren tests https://github.com/onflow/atree-internal/pull/7 +func TestInterpretIterateReadOnlyLoadedWithSomeValueChildren(t *testing.T) { + t.Parallel() + + owner := common.Address{'A'} + + const storageMapKey = interpreter.StringStorageMapKey("value") + + writeValue := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + value interpreter.Value, + ) { + value = value.Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(owner), + false, + nil, + nil, + // TODO: is has no parent container = true correct? + true, + ) + + // Write the value to the storage map. + // However, the value is not referenced by the root of the storage yet + // (a storage map), so atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + withoutAtreeStorageValidationEnabled( + inter, + func() struct{} { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + value, + ) + + return struct{}{} + }, + ) + } + + readValue := func( + t *testing.T, + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) interpreter.Value { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) + + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) + + return readValue + } + + t.Run("dictionary", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + var cadenceRootPairs []cadence.KeyValuePair + + const expectedRootCount = 10 + const expectedInnerCount = 100 + + for i := 0; i < expectedRootCount; i++ { + var cadenceInnerPairs []cadence.KeyValuePair + + for j := 0; j < expectedInnerCount; j++ { + cadenceInnerPairs = append( + cadenceInnerPairs, + cadence.KeyValuePair{ + Key: cadence.NewInt(j), + Value: cadence.String(strings.Repeat("cadence", 1000)), + }, + ) + } + + cadenceRootPairs = append( + cadenceRootPairs, + cadence.KeyValuePair{ + Key: cadence.NewInt(i), + Value: cadence.NewOptional( + cadence.NewDictionary(cadenceInnerPairs), + ), + }, + ) + } + + cadenceRootDictionary := cadence.NewDictionary(cadenceRootPairs) + + rootDictionary := importValue(t, inter, cadenceRootDictionary).(*interpreter.DictionaryValue) + + // Check that the inner dictionaries are not inlined. + // If the test fails here, adjust the value generation code above + // to ensure that the inner dictionaries are not inlined. + + rootDictionary.Iterate( + inter, + interpreter.EmptyLocationRange, + func(key, value interpreter.Value) (resume bool) { + + require.IsType(t, &interpreter.SomeValue{}, value) + someValue := value.(*interpreter.SomeValue) + + innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) + + require.IsType(t, &interpreter.DictionaryValue{}, innerValue) + innerDictionary := innerValue.(*interpreter.DictionaryValue) + require.False(t, innerDictionary.Inlined()) + + // continue iteration + return true + }, + ) + + writeValue( + inter, + owner, + storageMapKey, + rootDictionary, + ) + + resetStorage() + + rootDictionary = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.DictionaryValue) + + var iterations int + rootDictionary.IterateReadOnlyLoaded( + inter, + interpreter.EmptyLocationRange, + func(_, _ interpreter.Value) (resume bool) { + iterations += 1 + + // continue iteration + return true + }, + ) + + require.Equal(t, 0, iterations) + + iterations = 0 + rootDictionary.Iterate( + inter, + interpreter.EmptyLocationRange, + func(_, _ interpreter.Value) (resume bool) { + iterations += 1 + + // continue iteration + return true + }, + ) + + require.Equal(t, expectedRootCount, iterations) + }) + + t.Run("array", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + var cadenceRootElements []cadence.Value + + const expectedRootCount = 10 + const expectedInnerCount = 100 + + for i := 0; i < expectedRootCount; i++ { + var cadenceInnerElements []cadence.Value + + for j := 0; j < expectedInnerCount; j++ { + cadenceInnerElements = append( + cadenceInnerElements, + cadence.String(strings.Repeat("cadence", 1000)), + ) + } + + cadenceRootElements = append( + cadenceRootElements, + cadence.NewOptional( + cadence.NewArray(cadenceInnerElements), + ), + ) + } + + cadenceRootArray := cadence.NewArray(cadenceRootElements) + + rootArray := importValue(t, inter, cadenceRootArray).(*interpreter.ArrayValue) + + // Check that the inner arrays are not inlined. + // If the test fails here, adjust the value generation code above + // to ensure that the inner arrays are not inlined. + + rootArray.Iterate( + inter, + func(value interpreter.Value) (resume bool) { + + require.IsType(t, &interpreter.SomeValue{}, value) + someValue := value.(*interpreter.SomeValue) + + innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) + + require.IsType(t, &interpreter.ArrayValue{}, innerValue) + innerArray := innerValue.(*interpreter.ArrayValue) + require.False(t, innerArray.Inlined()) + + // continue iteration + return true + }, + false, + interpreter.EmptyLocationRange, + ) + + writeValue( + inter, + owner, + storageMapKey, + rootArray, + ) + + resetStorage() + + rootArray = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.ArrayValue) + + var iterations int + rootArray.IterateReadOnlyLoaded( + inter, + func(_ interpreter.Value) (resume bool) { + iterations += 1 + + // continue iteration + return true + }, + interpreter.EmptyLocationRange, + ) + + require.Equal(t, 0, iterations) + + iterations = 0 + + rootArray.Iterate( + inter, + func(_ interpreter.Value) (resume bool) { + iterations += 1 + + // continue iteration + return true + }, + false, + interpreter.EmptyLocationRange, + ) + + require.Equal(t, expectedRootCount, iterations) + }) + + t.Run("composite", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + newCadenceType := func(fieldCount int) *cadence.StructType { + typeIdentifier := fmt.Sprintf("S%d", fieldCount) + + typeLocation := common.AddressLocation{ + Address: owner, + Name: typeIdentifier, + } + + fieldNames := make([]string, 0, fieldCount) + for i := 0; i < fieldCount; i++ { + fieldName := fmt.Sprintf("field%d", i) + fieldNames = append(fieldNames, fieldName) + } + + cadenceFields := make([]cadence.Field, 0, fieldCount) + for _, fieldName := range fieldNames { + cadenceFields = append( + cadenceFields, + cadence.Field{ + Identifier: fieldName, + Type: cadence.AnyStructType, + }, + ) + } + + structType := cadence.NewStructType( + typeLocation, + typeIdentifier, + cadenceFields, + nil, + ) + + compositeType := &sema.CompositeType{ + Location: typeLocation, + Identifier: typeIdentifier, + Kind: common.CompositeKindStructure, + Members: &sema.StringMemberOrderedMap{}, + Fields: fieldNames, + } + + for _, fieldName := range fieldNames { + compositeType.Members.Set( + fieldName, + sema.NewUnmeteredPublicConstantFieldMember( + compositeType, + fieldName, + sema.AnyStructType, + "", + ), + ) + } + + // Add the type to the elaboration, to short-circuit the type-lookup. + inter.Program.Elaboration.SetCompositeType( + compositeType.ID(), + compositeType, + ) + + return structType + } + + var cadenceRootValues []cadence.Value + + const expectedRootCount = 10 + const expectedInnerCount = 100 + + rootStructType := newCadenceType(expectedRootCount) + innerStructType := newCadenceType(expectedInnerCount) + + for i := 0; i < expectedRootCount; i++ { + var cadenceInnerValues []cadence.Value + + for j := 0; j < expectedInnerCount; j++ { + cadenceInnerValues = append( + cadenceInnerValues, + cadence.String(strings.Repeat("cadence", 1000)), + ) + } + + cadenceRootValues = append( + cadenceRootValues, + cadence.NewOptional( + cadence.NewStruct(cadenceInnerValues). + WithType(innerStructType), + ), + ) + } + + cadenceRootStruct := cadence.NewStruct(cadenceRootValues). + WithType(rootStructType) + + rootStruct := importValue(t, inter, cadenceRootStruct).(*interpreter.CompositeValue) + + // Check that the inner structs are not inlined. + // If the test fails here, adjust the value generation code above + // to ensure that the inner structs are not inlined. + + rootStruct.ForEachField( + inter, + func(fieldName string, value interpreter.Value) (resume bool) { + + require.IsType(t, &interpreter.SomeValue{}, value) + someValue := value.(*interpreter.SomeValue) + + innerValue := someValue.InnerValue(inter, interpreter.EmptyLocationRange) + + require.IsType(t, &interpreter.CompositeValue{}, innerValue) + innerStruct := innerValue.(*interpreter.CompositeValue) + require.False(t, innerStruct.Inlined()) + + // continue iteration + return true + }, + interpreter.EmptyLocationRange, + ) + + writeValue( + inter, + owner, + storageMapKey, + rootStruct, + ) + + resetStorage() + + rootStruct = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.CompositeValue) + + var iterations int + rootStruct.ForEachReadOnlyLoadedField( + inter, + func(_ string, _ interpreter.Value) (resume bool) { + iterations += 1 + + // continue iteration + return true + }, + interpreter.EmptyLocationRange, + ) + + require.Equal(t, 0, iterations) + + iterations = 0 + rootStruct.ForEachField( + inter, + func(_ string, _ interpreter.Value) (resume bool) { + iterations += 1 + + // continue iteration + return true + }, + interpreter.EmptyLocationRange, + ) + + require.Equal(t, expectedRootCount, iterations) + }) +} + +func TestInterpretNestedAtreeContainerInSomeValueStorableTracking(t *testing.T) { + t.Parallel() + + owner := common.Address{'A'} + + const storageMapKey = interpreter.StringStorageMapKey("value") + + writeValue := func( + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + value interpreter.Value, + ) { + value = value.Transfer( + inter, + interpreter.EmptyLocationRange, + atree.Address(owner), + false, + nil, + nil, + // TODO: is has no parent container = true correct? + true, + ) + + // Write the value to the storage map. + // However, the value is not referenced by the root of the storage yet + // (a storage map), so atree storage validation must be temporarily disabled + // to not report any "unreferenced slab" errors. + withoutAtreeStorageValidationEnabled( + inter, + func() struct{} { + inter.Storage(). + GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + true, + ). + WriteValue( + inter, + storageMapKey, + value, + ) + + return struct{}{} + }, + ) + } + + readValue := func( + t *testing.T, + inter *interpreter.Interpreter, + owner common.Address, + storageMapKey interpreter.StorageMapKey, + ) interpreter.Value { + storageMap := inter.Storage().GetDomainStorageMap( + inter, + owner, + common.StorageDomainPathStorage, + false, + ) + require.NotNil(t, storageMap) + + readValue := storageMap.ReadValue(inter, storageMapKey) + require.NotNil(t, readValue) + + return readValue + } + + t.Run("dictionary (inlined -> uninlined -> inlined)", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + // Start with an empty dictionary + + cadenceChildDictionary := cadence.NewDictionary(nil) + + cadenceRootOptionalValue := cadence.NewOptional(cadenceChildDictionary) + + rootSomeValue := importValue(t, inter, cadenceRootOptionalValue).(*interpreter.SomeValue) + + writeValue( + inter, + owner, + storageMapKey, + rootSomeValue, + ) + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + // Fill the dictionary until it becomes uninlined + + childDictionary := rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.DictionaryValue) + + require.True(t, childDictionary.Inlined()) + + for i := 0; childDictionary.Inlined(); i++ { + childDictionary.Insert( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(i)), + interpreter.NewUnmeteredIntValueFromInt64(int64(i)), + ) + } + + require.False(t, childDictionary.Inlined()) + + uninlinedCount := childDictionary.Count() + + // Verify the contents of the dictionary + + childDictionary = rootSomeValue.InnerValue(inter, interpreter.EmptyLocationRange).(*interpreter.DictionaryValue) + + verify := func(count int) { + require.Equal(t, count, childDictionary.Count()) + + for i := 0; i < count; i++ { + key := interpreter.NewUnmeteredStringValue(strconv.Itoa(i)) + value, exists := childDictionary.Get( + inter, + interpreter.EmptyLocationRange, + key, + ) + require.True(t, exists) + expectedValue := interpreter.NewUnmeteredIntValueFromInt64(int64(i)) + AssertValuesEqual(t, inter, expectedValue, value) + } + } + + verify(uninlinedCount) + + // Remove the last element to make the dictionary inlined again + + inlinedCount := uninlinedCount - 1 + + existingValue := childDictionary.Remove( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(inlinedCount)), + ) + require.IsType(t, &interpreter.SomeValue{}, existingValue) + + require.True(t, childDictionary.Inlined()) + + // Verify the contents of the dictionary again + + verify(inlinedCount) + + // Add a new element to make the dictionary uninlined again + + childDictionary.Insert( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(inlinedCount)), + interpreter.NewUnmeteredIntValueFromInt64(int64(inlinedCount)), + ) + + require.False(t, childDictionary.Inlined()) + + // Verify the contents of the dictionary again + + verify(uninlinedCount) + + // Remove all elements + + for i := 0; i < uninlinedCount; i++ { + existingValue := childDictionary.Remove( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(i)), + ) + require.IsType(t, &interpreter.SomeValue{}, existingValue) + } + + require.Equal(t, 0, childDictionary.Count()) + require.True(t, childDictionary.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Validate after storage reset and reload of root value + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childDictionary = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.DictionaryValue) + + require.Equal(t, 0, childDictionary.Count()) + require.True(t, childDictionary.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) + + t.Run("dictionary (uninlined -> inlined -> uninlined)", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + // Start with a large dictionary which will get uninlined + + var cadenceChildPairs []cadence.KeyValuePair + + for i := 0; i < 1000; i++ { + cadenceChildPairs = append( + cadenceChildPairs, + cadence.KeyValuePair{ + Key: cadence.String(strconv.Itoa(i)), + Value: cadence.NewInt(i), + }, + ) + } + + cadenceChildDictionary := cadence.NewDictionary(cadenceChildPairs) + + cadenceRootOptionalValue := cadence.NewOptional(cadenceChildDictionary) + + rootSomeValue := importValue(t, inter, cadenceRootOptionalValue).(*interpreter.SomeValue) + + writeValue( + inter, + owner, + storageMapKey, + rootSomeValue, + ) + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childDictionary := rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.DictionaryValue) + + // Check that the inner dictionary is not inlined. + // If the test fails here, adjust the value generation code above + // to ensure that the inner dictionary is not inlined. + + require.False(t, childDictionary.Inlined()) + + // Verify the contents of the dictionary + + inlinedCount := childDictionary.Count() + + // Verify the contents of the dictionary + + verify := func(count int) { + require.Equal(t, count, childDictionary.Count()) + + for i := 0; i < count; i++ { + key := interpreter.NewUnmeteredStringValue(strconv.Itoa(i)) + value, exists := childDictionary.Get( + inter, + interpreter.EmptyLocationRange, + key, + ) + require.True(t, exists) + expectedValue := interpreter.NewUnmeteredIntValueFromInt64(int64(i)) + AssertValuesEqual(t, inter, expectedValue, value) + } + } + + verify(inlinedCount) + + // Remove elements until the dictionary is inlined + + for i := inlinedCount - 1; !childDictionary.Inlined(); i-- { + existingValue := childDictionary.Remove( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(i)), + ) + + require.IsType(t, &interpreter.SomeValue{}, existingValue) + existingSomeValue := existingValue.(*interpreter.SomeValue) + + existingInnerValue := existingSomeValue.InnerValue(inter, interpreter.EmptyLocationRange) + expectedValue := interpreter.NewUnmeteredIntValueFromInt64(int64(i)) + AssertValuesEqual(t, inter, expectedValue, existingInnerValue) + + } + + inlinedCount = childDictionary.Count() + + require.True(t, childDictionary.Inlined()) + + // Verify the contents of the dictionary again + + verify(inlinedCount) + + // Add element to make the dictionary uninlined again + + childDictionary.Insert( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(inlinedCount)), + interpreter.NewUnmeteredIntValueFromInt64(int64(inlinedCount)), + ) + + require.False(t, childDictionary.Inlined()) + + // Verify the contents of the dictionary again + + uninlinedCount := inlinedCount + 1 + + verify(uninlinedCount) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Validate after storage reset and reload of root value + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childDictionary = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.DictionaryValue) + + verify(uninlinedCount) + + require.False(t, childDictionary.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) + + t.Run("array (inlined -> uninlined -> inlined)", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + // Start with an empty array + + cadenceChildArray := cadence.NewArray(nil) + + cadenceRootOptionalValue := cadence.NewOptional(cadenceChildArray) + + rootSomeValue := importValue(t, inter, cadenceRootOptionalValue).(*interpreter.SomeValue) + + writeValue( + inter, + owner, + storageMapKey, + rootSomeValue, + ) + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + // Fill the array until it becomes uninlined + + childArray := rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.ArrayValue) + + require.True(t, childArray.Inlined()) + + for i := 0; childArray.Inlined(); i++ { + childArray.Append( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(i)), + ) + } + + require.False(t, childArray.Inlined()) + + uninlinedCount := childArray.Count() + + // Verify the contents of the array + + childArray = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.ArrayValue) + + verify := func(count int) { + require.Equal(t, count, childArray.Count()) + + for i := 0; i < count; i++ { + value := childArray.Get(inter, interpreter.EmptyLocationRange, i) + expectedValue := interpreter.NewUnmeteredStringValue(strconv.Itoa(i)) + AssertValuesEqual(t, inter, expectedValue, value) + } + } + + verify(uninlinedCount) + + // Remove the last element to make the array inlined again + + inlinedCount := uninlinedCount - 1 + + childArray.Remove( + inter, + interpreter.EmptyLocationRange, + inlinedCount, + ) + + require.True(t, childArray.Inlined()) + + // Verify the contents of the array again + + verify(inlinedCount) + + // Add a new element to make the array uninlined again + + childArray.Append( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(inlinedCount)), + ) + + require.False(t, childArray.Inlined()) + + // Verify the contents of the array again + + verify(uninlinedCount) + + // Remove all elements + + for i := uninlinedCount - 1; i >= 0; i-- { + childArray.Remove( + inter, + interpreter.EmptyLocationRange, + i, + ) + } + + require.Equal(t, 0, childArray.Count()) + require.True(t, childArray.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Validate after storage reset and reload of root value + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childArray = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.ArrayValue) + + require.Equal(t, 0, childArray.Count()) + require.True(t, childArray.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) + + t.Run("array (uninlined -> inlined -> uninlined)", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + // Start with a large array which will get uninlined + + var cadenceChildElements []cadence.Value + + for i := 0; i < 1000; i++ { + cadenceChildElements = append( + cadenceChildElements, + cadence.String(strconv.Itoa(i)), + ) + } + + cadenceChildArray := cadence.NewArray(cadenceChildElements) + + cadenceRootOptionalValue := cadence.NewOptional(cadenceChildArray) + + rootSomeValue := importValue(t, inter, cadenceRootOptionalValue).(*interpreter.SomeValue) + + writeValue( + inter, + owner, + storageMapKey, + rootSomeValue, + ) + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childArray := rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.ArrayValue) + + // Check that the inner array is not inlined. + // If the test fails here, adjust the value generation code above + // to ensure that the inner array is not inlined. + + require.False(t, childArray.Inlined()) + + // Verify the contents of the array + + inlinedCount := childArray.Count() + + // Verify the contents of the array + + verify := func(count int) { + require.Equal(t, count, childArray.Count()) + + for i := 0; i < count; i++ { + value := childArray.Get(inter, interpreter.EmptyLocationRange, i) + expectedValue := interpreter.NewUnmeteredStringValue(strconv.Itoa(i)) + AssertValuesEqual(t, inter, expectedValue, value) + } + } + + verify(inlinedCount) + + // Remove elements until the array is inlined + + for i := inlinedCount - 1; !childArray.Inlined(); i-- { + existingValue := childArray.Remove( + inter, + interpreter.EmptyLocationRange, + i, + ) + expectedValue := interpreter.NewUnmeteredStringValue(strconv.Itoa(i)) + AssertValuesEqual(t, inter, expectedValue, existingValue) + } + + inlinedCount = childArray.Count() + + require.True(t, childArray.Inlined()) + + // Verify the contents of the array again + + verify(inlinedCount) + + // Add element to make the array uninlined again + + childArray.Append( + inter, + interpreter.EmptyLocationRange, + interpreter.NewUnmeteredStringValue(strconv.Itoa(inlinedCount)), + ) + + require.False(t, childArray.Inlined()) + + // Verify the contents of the array again + + uninlinedCount := inlinedCount + 1 + + verify(uninlinedCount) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Validate after storage reset and reload of root value + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childArray = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.ArrayValue) + + verify(uninlinedCount) + + require.False(t, childArray.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) + + t.Run("composite (inlined -> uninlined -> inlined)", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + // Start with an empty composite + + const qualifiedIdentifier = "Test" + location := common.AddressLocation{ + Address: owner, + Name: qualifiedIdentifier, + } + + cadenceStructType := cadence.NewStructType( + location, + qualifiedIdentifier, + nil, + nil, + ) + + semaStructType := &sema.CompositeType{ + Location: location, + Identifier: qualifiedIdentifier, + Kind: common.CompositeKindStructure, + Members: &sema.StringMemberOrderedMap{}, + } + + // Add the type to the elaboration, to short-circuit the type-lookup. + inter.Program.Elaboration.SetCompositeType( + semaStructType.ID(), + semaStructType, + ) + + cadenceChildComposite := cadence.NewStruct(nil).WithType(cadenceStructType) + + cadenceRootOptionalValue := cadence.NewOptional(cadenceChildComposite) + + rootSomeValue := importValue(t, inter, cadenceRootOptionalValue).(*interpreter.SomeValue) + + writeValue( + inter, + owner, + storageMapKey, + rootSomeValue, + ) + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + // Fill the composite until it becomes uninlined + + childComposite := rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.CompositeValue) + + require.True(t, childComposite.Inlined()) + + for i := 0; childComposite.Inlined(); i++ { + childComposite.SetMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(i), + interpreter.NewUnmeteredIntValueFromInt64(int64(i)), + ) + } + + require.False(t, childComposite.Inlined()) + + uninlinedCount := childComposite.FieldCount() + + // Verify the contents of the composite + + childComposite = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.CompositeValue) + + verify := func(count int) { + require.Equal(t, count, childComposite.FieldCount()) + + for i := 0; i < count; i++ { + value := childComposite.GetMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(i), + ) + expectedValue := interpreter.NewUnmeteredIntValueFromInt64(int64(i)) + AssertValuesEqual(t, inter, expectedValue, value) + } + } + + verify(uninlinedCount) + + // Remove the last element to make the composite inlined again + + inlinedCount := uninlinedCount - 1 + + childComposite.RemoveMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(inlinedCount), + ) + + require.True(t, childComposite.Inlined()) - owner := common.Address{'A'} + // Verify the contents of the composite again - // Create a small child array which will be inlined in parent container. - childArray1 := interpreter.NewArrayValue( - inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, - owner, - interpreter.NewUnmeteredStringValue("a"), - ) + verify(inlinedCount) - size := int(atree.MaxInlineArrayElementSize()) - 10 + // Add a new element to make the composite uninlined again - // Create a large child array which will NOT be inlined in parent container. - childArray2 := interpreter.NewArrayValue( - inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, - owner, - interpreter.NewUnmeteredStringValue(strings.Repeat("b", size)), - interpreter.NewUnmeteredStringValue(strings.Repeat("c", size)), - ) + childComposite.SetMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(inlinedCount), + interpreter.NewUnmeteredIntValueFromInt64(int64(inlinedCount)), + ) - // Create an array with childArray1 and childArray2. - array := interpreter.NewArrayValue( - inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, - owner, - childArray1, // inlined - childArray2, // not inlined - ) + require.False(t, childComposite.Inlined()) - // DeepRemove removes all elements (childArray1 and childArray2) recursively in array. - array.DeepRemove(inter, true) + // Verify the contents of the composite again - // As noted earlier in comments at the top of this test: - // storage.CheckHealth() should be called after array.DeepRemove(), not in the middle of array.DeepRemove(). - // This happens when: - // - array.DeepRemove() calls childArray1 and childArray2 DeepRemove() - // - DeepRemove() calls maybeValidateAtreeValue() - // - maybeValidateAtreeValue() calls CheckHealth() -} + verify(uninlinedCount) -// This test is a reproducer for "slab was not reachable from leaves" false alarm. -// https://github.com/onflow/cadence/pull/2882#issuecomment-1796381227 -// In this test, storage.CheckHealth() should be called after DictionaryValue.Transfer() -// with remove flag, not in the middle of DictionaryValue.Transfer(). -func TestCheckStorageHealthInMiddleOfTransferAndRemove(t *testing.T) { - r := newRandomValueGenerator() - t.Logf("seed: %d", r.seed) + // Remove all elements - storage := newUnmeteredInMemoryStorage() - inter, err := interpreter.NewInterpreter( - &interpreter.Program{ - Program: ast.NewProgram(nil, []ast.Declaration{}), - Elaboration: sema.NewElaboration(nil), - }, - TestLocation, - &interpreter.Config{ - Storage: storage, - ImportLocationHandler: func(inter *interpreter.Interpreter, location common.Location) interpreter.Import { - return interpreter.VirtualImport{ - Elaboration: inter.Program.Elaboration, - } - }, - AtreeStorageValidationEnabled: true, - AtreeValueValidationEnabled: true, - }, - ) - require.NoError(t, err) + for i := 0; i < uninlinedCount; i++ { + childComposite.RemoveMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(i), + ) + } - // Create large array value with zero address which will not be inlined. - gchildArray := interpreter.NewArrayValue( - inter, - interpreter.EmptyLocationRange, - &interpreter.VariableSizedStaticType{ - Type: interpreter.PrimitiveStaticTypeAnyStruct, - }, - common.ZeroAddress, - interpreter.NewUnmeteredStringValue(strings.Repeat("b", int(atree.MaxInlineArrayElementSize())-10)), - interpreter.NewUnmeteredStringValue(strings.Repeat("c", int(atree.MaxInlineArrayElementSize())-10)), - ) + require.Equal(t, 0, childComposite.FieldCount()) + require.True(t, childComposite.Inlined()) - // Create small composite value with zero address which will be inlined. - identifier := "test" + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } - location := common.AddressLocation{ - Address: common.ZeroAddress, - Name: identifier, - } + // Validate after storage reset and reload of root value - compositeType := &sema.CompositeType{ - Location: location, - Identifier: identifier, - Kind: common.CompositeKindStructure, - } + resetStorage() - fields := []interpreter.CompositeField{ - interpreter.NewUnmeteredCompositeField("a", interpreter.NewUnmeteredUInt64Value(0)), - interpreter.NewUnmeteredCompositeField("b", interpreter.NewUnmeteredUInt64Value(1)), - interpreter.NewUnmeteredCompositeField("c", interpreter.NewUnmeteredUInt64Value(2)), - } + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) - compositeType.Members = &sema.StringMemberOrderedMap{} - for _, field := range fields { - compositeType.Members.Set( - field.Name, - sema.NewUnmeteredPublicConstantFieldMember( - compositeType, - field.Name, - sema.AnyStructType, - "", - ), + childComposite = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.CompositeValue) + + require.Equal(t, 0, childComposite.FieldCount()) + require.True(t, childComposite.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) + + t.Run("composite (uninlined -> inlined -> uninlined)", func(t *testing.T) { + t.Parallel() + + inter, resetStorage := newRandomValueTestInterpreter(t) + + // Start with a large composite which will get uninlined + + const qualifiedIdentifier = "Test" + location := common.AddressLocation{ + Address: owner, + Name: qualifiedIdentifier, + } + + const fieldCount = 1000 + + fields := make([]cadence.Field, fieldCount) + for i := 0; i < fieldCount; i++ { + fields[i] = cadence.Field{ + Identifier: strconv.Itoa(i), + Type: cadence.IntType, + } + } + + cadenceStructType := cadence.NewStructType( + location, + qualifiedIdentifier, + fields, + nil, ) - } - // Add the type to the elaboration, to short-circuit the type-lookup. - inter.Program.Elaboration.SetCompositeType( - compositeType.ID(), - compositeType, - ) + semaStructType := &sema.CompositeType{ + Location: location, + Identifier: qualifiedIdentifier, + Kind: common.CompositeKindStructure, + Members: &sema.StringMemberOrderedMap{}, + } - gchildComposite := interpreter.NewCompositeValue( - inter, - interpreter.EmptyLocationRange, - location, - identifier, - common.CompositeKindStructure, - fields, - common.ZeroAddress, - ) + // Add the type to the elaboration, to short-circuit the type-lookup. + inter.Program.Elaboration.SetCompositeType( + semaStructType.ID(), + semaStructType, + ) + fieldNames := make([]string, fieldCount) + + for i := 0; i < fieldCount; i++ { + fieldName := fields[0].Identifier + semaStructType.Members.Set( + fieldName, + sema.NewUnmeteredPublicConstantFieldMember( + semaStructType, + fieldName, + sema.IntType, + "", + ), + ) + fieldNames[i] = fieldName + } + semaStructType.Fields = fieldNames - // Create large dictionary with zero address with 2 data slabs containing: - // - SomeValue(SlabID) as first physical element in the first data slab - // - inlined CompositeValue as last physical element in the second data slab + var cadenceChildElements []cadence.Value - numberOfValues := 10 - firstElementIndex := 7 // index of first physical element in the first data slab - lastElementIndex := 8 // index of last physical element in the last data slab - keyValues := make([]interpreter.Value, numberOfValues*2) - for i := 0; i < numberOfValues; i++ { - key := interpreter.NewUnmeteredUInt64Value(uint64(i)) + for i := 0; i < fieldCount; i++ { + cadenceChildElements = append( + cadenceChildElements, + cadence.NewInt(i), + ) - var value interpreter.Value - switch i { - case firstElementIndex: - value = interpreter.NewUnmeteredSomeValueNonCopying(gchildArray) + } - case lastElementIndex: - value = gchildComposite + cadenceChildComposite := cadence.NewStruct(cadenceChildElements). + WithType(cadenceStructType) - default: - // Other values are inlined random strings. - const size = 235 - value = interpreter.NewUnmeteredStringValue(r.randomUTF8StringOfSize(size)) + cadenceRootOptionalValue := cadence.NewOptional(cadenceChildComposite) + + rootSomeValue := importValue(t, inter, cadenceRootOptionalValue).(*interpreter.SomeValue) + + writeValue( + inter, + owner, + storageMapKey, + rootSomeValue, + ) + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childComposite := rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.CompositeValue) + + // Check that the inner composite is not inlined. + // If the test fails here, adjust the value generation code above + // to ensure that the inner composite is not inlined. + + require.False(t, childComposite.Inlined()) + + // Verify the contents of the composite + + inlinedCount := childComposite.FieldCount() + + // Verify the contents of the composite + + verify := func(count int) { + require.Equal(t, count, childComposite.FieldCount()) + + for i := 0; i < count; i++ { + value := childComposite.GetMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(i), + ) + expectedValue := interpreter.NewUnmeteredIntValueFromInt64(int64(i)) + AssertValuesEqual(t, inter, expectedValue, value) + } } - keyValues[i*2] = key - keyValues[i*2+1] = value - } + verify(inlinedCount) - childMap := interpreter.NewDictionaryValueWithAddress( - inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, - common.ZeroAddress, - keyValues..., - ) + // Remove elements until the composite is inlined - // Create dictionary with non-zero address containing child dictionary. - owner := common.Address{'A'} - m := interpreter.NewDictionaryValueWithAddress( - inter, - interpreter.EmptyLocationRange, - &interpreter.DictionaryStaticType{ - KeyType: interpreter.PrimitiveStaticTypeAnyStruct, - ValueType: interpreter.PrimitiveStaticTypeAnyStruct, - }, - owner, - interpreter.NewUnmeteredUInt64Value(0), - childMap, - ) + for i := inlinedCount - 1; !childComposite.Inlined(); i-- { + existingValue := childComposite.RemoveMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(i), + ) - inter.ValidateAtreeValue(m) + expectedValue := interpreter.NewUnmeteredIntValueFromInt64(int64(i)) + AssertValuesEqual(t, inter, expectedValue, existingValue) - require.NoError(t, storage.CheckHealth()) + } + + inlinedCount = childComposite.FieldCount() + + require.True(t, childComposite.Inlined()) + + // Verify the contents of the composite again + + verify(inlinedCount) + + // Add element to make the composite uninlined again + + childComposite.SetMember( + inter, + interpreter.EmptyLocationRange, + strconv.Itoa(inlinedCount), + interpreter.NewUnmeteredIntValueFromInt64(int64(inlinedCount)), + ) + + require.False(t, childComposite.Inlined()) + + // Verify the contents of the composite again + + uninlinedCount := inlinedCount + 1 + + verify(uninlinedCount) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + + // Validate after storage reset and reload of root value + + resetStorage() + + rootSomeValue = readValue( + t, + inter, + owner, + storageMapKey, + ).(*interpreter.SomeValue) + + childComposite = rootSomeValue.InnerValue( + inter, + interpreter.EmptyLocationRange, + ).(*interpreter.CompositeValue) + + verify(uninlinedCount) + + require.False(t, childComposite.Inlined()) + + if *validateAtree { + err := inter.Storage().CheckHealth() + require.NoError(t, err) + } + }) } diff --git a/runtime/runtime_test.go b/runtime/runtime_test.go index 44d1e5ed5..891ab78e9 100644 --- a/runtime/runtime_test.go +++ b/runtime/runtime_test.go @@ -11954,3 +11954,327 @@ func TestRuntimeInvocationReturnTypeInferenceFailure(t *testing.T) { var typeErr *sema.InvocationTypeInferenceError require.ErrorAs(t, err, &typeErr) } + +func TestRuntimeSomeValueChildContainerMutation(t *testing.T) { + + t.Parallel() + + buyTicketTx := []byte(` + import Foo from 0x1 + + transaction() { + prepare(acct: auth(Storage, Capabilities) &Account) { + Foo.logVaultBalance() + var pool = Foo.borrowLotteryPool()! + pool.buyTickets() + Foo.logVaultBalance() + } + execute {} + } + `) + + nextTransactionLocation := NewTransactionLocationGenerator() + + setupTest := func(t *testing.T) ( + runTransaction func(tx []byte) (logs []string), + ) { + + rt := NewTestInterpreterRuntime() + + accountCodes := map[Location][]byte{} + + address := common.MustBytesToAddress([]byte{0x1}) + + var logs []string + + runtimeInterface := &TestRuntimeInterface{ + Storage: NewTestLedger(nil, nil), + OnGetSigningAccounts: func() ([]Address, error) { + return []Address{address}, nil + }, + OnResolveLocation: NewSingleIdentifierLocationResolver(t), + OnGetAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + OnUpdateAccountContractCode: func(location common.AddressLocation, code []byte) error { + accountCodes[location] = code + return nil + }, + OnProgramLog: func(message string) { + logs = append(logs, message) + }, + OnDecodeArgument: func(b []byte, t cadence.Type) (cadence.Value, error) { + return json.Decode(nil, b) + }, + OnEmitEvent: func(event cadence.Event) error { + return nil + }, + } + + runTransaction = func(tx []byte) []string { + + logs = logs[:0] + + err := rt.ExecuteTransaction( + Script{ + Source: tx, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + require.NoError(t, err) + + return logs + } + + return runTransaction + } + + t.Run("non optional vault", func(t *testing.T) { + + t.Parallel() + + contractFoo := ` + access(all) contract Foo { + access(all) resource Vault { + access(all) + var balance: UFix64 + init(balance: UFix64) { + self.balance = balance + } + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + access(all) fun createEmptyVault(): @Vault { + return <- create Vault(balance: 0.0) + } + access(all) resource LotteryPool { + access(contract) + let ftVault: @Vault + init() { + self.ftVault <- Foo.createEmptyVault() + } + access(all) + fun buyTickets() { + self.borrowVault().deposit(from: <- create Vault(balance: 5.0)) + } + access(all) fun buyNewTicket() { + self.borrowVault().deposit(from: <- create Vault(balance: 5.0)) + } + access(self) + view fun borrowVault(): &Vault { + return &self.ftVault as &Vault + } + } + init() { + self.account.storage.save(<- create LotteryPool(), to: /storage/lottery_pool) + } + access(all) fun borrowLotteryPool(): &LotteryPool? { + return self.account.storage.borrow<&LotteryPool>(from: /storage/lottery_pool) + } + access(all) fun logVaultBalance() { + var pool = self.borrowLotteryPool()! + log(pool.ftVault.balance) + } + } + ` + + runTransaction := setupTest(t) + + runTransaction(DeploymentTransaction( + "Foo", + []byte(contractFoo), + )) + + logs := runTransaction(buyTicketTx) + assert.Equal(t, []string{"0.00000000", "5.00000000"}, logs) + + logs = runTransaction(buyTicketTx) + assert.Equal(t, []string{"5.00000000", "10.00000000"}, logs) + }) + + t.Run("optional vault", func(t *testing.T) { + + t.Parallel() + + contractFoo := ` + access(all) contract Foo { + access(all) resource Vault { + access(all) + var balance: UFix64 + init(balance: UFix64) { + self.balance = balance + } + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + access(all) fun createEmptyVault(): @Vault { + return <- create Vault(balance: 0.0) + } + access(all) resource LotteryPool { + access(contract) + let ftVault: @Vault? + init() { + self.ftVault <- Foo.createEmptyVault() + } + access(all) + fun buyTickets() { + self.borrowVault().deposit(from: <- create Vault(balance: 5.0)) + } + access(all) fun buyNewTicket() { + self.borrowVault().deposit(from: <- create Vault(balance: 5.0)) + } + access(self) + view fun borrowVault(): &Vault { + return &self.ftVault as &Vault? ?? panic("Cannot borrow vault") + } + } + init() { + self.account.storage.save(<- create LotteryPool(), to: /storage/lottery_pool) + } + access(all) fun borrowLotteryPool(): &LotteryPool? { + return self.account.storage.borrow<&LotteryPool>(from: /storage/lottery_pool) + } + access(all) fun logVaultBalance() { + var pool = self.borrowLotteryPool()! + log(pool.ftVault!.balance) + } + } + ` + + runTransaction := setupTest(t) + + runTransaction(DeploymentTransaction( + "Foo", + []byte(contractFoo), + )) + + logs := runTransaction(buyTicketTx) + assert.Equal(t, []string{"0.00000000", "5.00000000"}, logs) + + logs = runTransaction(buyTicketTx) + assert.Equal(t, []string{"5.00000000", "10.00000000"}, logs) + }) + + t.Run("deeply nested optional vault", func(t *testing.T) { + contractFoo := ` + access(all) + contract Foo { + access(all) + resource Vault { + access(all) + var balance: UFix64 + init(balance: UFix64) { + self.balance = balance + } + access(all) + fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + access(all) + fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + access(all) + fun createEmptyVault(): @Vault { + return <- create Vault(balance: 0.0) + } + access(all) + resource LotteryPool { + access(contract) + let jackpotPool: @Change + access(contract) + let lotteries: @{UInt64: Lottery} + init() { + self.jackpotPool <- create Change() + self.lotteries <- {0: <- create Lottery()} + } + access(all) + fun buyTickets() { + var lotteryRef = self.borrowLotteryRef()! + lotteryRef.buyNewTicket() + } + access(self) + fun borrowLotteryRef(): &Lottery? { + return &self.lotteries[0] + } + } + access(all) + resource Lottery { + access(contract) + let current: @Change + init() { + self.current <- create Change() + } + access(all) + fun buyNewTicket() { + var change = self.borrowCurrentLotteryChange() + change.forceMerge() + } + access(contract) + view fun borrowCurrentLotteryChange(): &Change { + return &self.current + } + } + access(all) + resource Change { + access(contract) + var ftVault: @Vault? + init() { + self.ftVault <- Foo.createEmptyVault() + } + access(all) + fun forceMerge() { + self.borrowVault().deposit(from: <- create Vault(balance: 5.0)) + } + access(self) + view fun borrowVault(): &Vault { + return &self.ftVault as &Vault? ?? panic("Cannot borrow vault") + } + } + init() { + self.account.storage.save(<- create LotteryPool(), to: /storage/lottery_pool) + } + access(all) + fun borrowLotteryPool(): &LotteryPool? { + return self.account.storage.borrow<&LotteryPool>(from: /storage/lottery_pool) + } + access(all) + fun logVaultBalance() { + var pool = self.borrowLotteryPool()! + log(pool.lotteries[0]!.current.ftVault!.balance) + } + } + ` + + runTransaction := setupTest(t) + + runTransaction(DeploymentTransaction( + "Foo", + []byte(contractFoo), + )) + + logs := runTransaction(buyTicketTx) + assert.Equal(t, []string{"0.00000000", "5.00000000"}, logs) + + logs = runTransaction(buyTicketTx) + assert.Equal(t, []string{"5.00000000", "10.00000000"}, logs) + }) +} diff --git a/tools/compatibility-check/go.mod b/tools/compatibility-check/go.mod index 4e7a813b9..e4755a3d0 100644 --- a/tools/compatibility-check/go.mod +++ b/tools/compatibility-check/go.mod @@ -43,7 +43,7 @@ require ( github.com/multiformats/go-multibase v0.2.0 // indirect github.com/multiformats/go-multihash v0.2.3 // indirect github.com/multiformats/go-varint v0.0.7 // indirect - github.com/onflow/atree v0.8.1 // indirect + github.com/onflow/atree v0.9.0 // indirect github.com/onflow/crypto v0.25.2 // indirect github.com/onflow/flow-core-contracts/lib/go/templates v1.3.3-0.20241017220455-79fdc6c8ba53 // indirect github.com/onflow/flow-ft/lib/go/contracts v1.0.1 // indirect diff --git a/tools/compatibility-check/go.sum b/tools/compatibility-check/go.sum index 39d0ed901..79fce2ea6 100644 --- a/tools/compatibility-check/go.sum +++ b/tools/compatibility-check/go.sum @@ -342,8 +342,8 @@ github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/n github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/onflow/atree v0.8.1 h1:DAnPnL9/Ks3LaAnkQVokokTBG/znTW0DJfovDtJDhLI= -github.com/onflow/atree v0.8.1/go.mod h1:FT6udJF9Q7VQTu3wknDhFX+VV4D44ZGdqtTAE5iztck= +github.com/onflow/atree v0.9.0 h1:M+Z/UPwzv0/Yy7ChI5T1ZIHD3YN1cs/hxGEs/HWhzaY= +github.com/onflow/atree v0.9.0/go.mod h1:FT6udJF9Q7VQTu3wknDhFX+VV4D44ZGdqtTAE5iztck= github.com/onflow/crypto v0.25.2 h1:GjHunqVt+vPcdqhxxhAXiMIF3YiLX7gTuTR5O+VG2ns= github.com/onflow/crypto v0.25.2/go.mod h1:fY7eLqUdMKV8EGOw301unP8h7PvLVy8/6gVR++/g0BY= github.com/onflow/flow-core-contracts/lib/go/contracts v1.4.0 h1:R86HaOuk6vpuECZnriEUE7bw9inC2AtdSn8lL/iwQLQ= diff --git a/tools/storage-explorer/go.mod b/tools/storage-explorer/go.mod index 1a253cbae..03f9bd430 100644 --- a/tools/storage-explorer/go.mod +++ b/tools/storage-explorer/go.mod @@ -4,9 +4,9 @@ go 1.23 require ( github.com/gorilla/mux v1.8.1 - github.com/onflow/atree v0.8.0 - github.com/onflow/cadence v1.0.0-preview.52 - github.com/onflow/flow-go v0.37.10 + github.com/onflow/atree v0.9.0 + github.com/onflow/cadence v1.0.0-preview-atree-register-inlining.29 + github.com/onflow/flow-go v0.35.7-crescendo-preview.23-atree-inlining github.com/rs/zerolog v1.32.0 ) @@ -164,7 +164,7 @@ require ( github.com/spf13/viper v1.15.0 // indirect github.com/status-im/keycard-go v0.2.0 // indirect github.com/stretchr/objx v0.5.2 // indirect - github.com/stretchr/testify v1.9.0 // indirect + github.com/stretchr/testify v1.10.0 // indirect github.com/subosito/gotenv v1.4.2 // indirect github.com/supranational/blst v0.3.11 // indirect github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect @@ -177,10 +177,7 @@ require ( github.com/vmihailenco/msgpack/v4 v4.3.11 // indirect github.com/vmihailenco/tagparser v0.1.1 // indirect github.com/x448/float16 v0.8.4 // indirect - github.com/zeebo/blake3 v0.2.3 // indirect - go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 // indirect + github.com/zeebo/blake3 v0.2.4 // indirect go.opentelemetry.io/otel v1.24.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 // indirect diff --git a/tools/storage-explorer/go.sum b/tools/storage-explorer/go.sum index 728dc2855..e88101456 100644 --- a/tools/storage-explorer/go.sum +++ b/tools/storage-explorer/go.sum @@ -1903,11 +1903,11 @@ github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onflow/atree v0.6.1-0.20230711151834-86040b30171f/go.mod h1:xvP61FoOs95K7IYdIYRnNcYQGf4nbF/uuJ0tHf4DRuM= -github.com/onflow/atree v0.8.0 h1:qg5c6J1gVDNObughpEeWm8oxqhPGdEyGrda121GM4u0= -github.com/onflow/atree v0.8.0/go.mod h1:yccR+LR7xc1Jdic0mrjocbHvUD7lnVvg8/Ct1AA5zBo= +github.com/onflow/atree v0.8.0-rc.6 h1:GWgaylK24b5ta2Hq+TvyOF7X5tZLiLzMMn7lEt59fsA= +github.com/onflow/atree v0.8.0-rc.6/go.mod h1:yccR+LR7xc1Jdic0mrjocbHvUD7lnVvg8/Ct1AA5zBo= +github.com/onflow/atree v0.9.0/go.mod h1:FT6udJF9Q7VQTu3wknDhFX+VV4D44ZGdqtTAE5iztck= github.com/onflow/cadence v1.0.0-M3/go.mod h1:odXGZZ/wGNA5mwT8bC9v8u8EXACHllB2ABSZK65TGL8= -github.com/onflow/cadence v1.0.0-preview.52 h1:hZ92e6lL2+PQa3C1i5jJh0zZYFdW89+X1MS0Bkd6Ayo= -github.com/onflow/cadence v1.0.0-preview.52/go.mod h1:7wvvecnAZtYOspLOS3Lh+FuAmMeSrXhAWiycC3kQ1UU= +github.com/onflow/cadence v1.0.0-preview-atree-register-inlining.29/go.mod h1:KclJlSGWG4USgPK4CsI3V/YtCHYOwPpjyzb6iEfWlbM= github.com/onflow/crypto v0.25.0/go.mod h1:C8FbaX0x8y+FxWjbkHy0Q4EASCDR9bSPWZqlpCLYyVI= github.com/onflow/crypto v0.25.2 h1:GjHunqVt+vPcdqhxxhAXiMIF3YiLX7gTuTR5O+VG2ns= github.com/onflow/crypto v0.25.2/go.mod h1:fY7eLqUdMKV8EGOw301unP8h7PvLVy8/6gVR++/g0BY= @@ -1919,6 +1919,7 @@ github.com/onflow/flow-ft/lib/go/contracts v1.0.0 h1:mToacZ5NWqtlWwk/7RgIl/jeKB/ github.com/onflow/flow-ft/lib/go/contracts v1.0.0/go.mod h1:PwsL8fC81cjnUnTfmyL/HOIyHnyaw/JA474Wfj2tl6A= github.com/onflow/flow-ft/lib/go/templates v1.0.0 h1:6cMS/lUJJ17HjKBfMO/eh0GGvnpElPgBXx7h5aoWJhs= github.com/onflow/flow-ft/lib/go/templates v1.0.0/go.mod h1:uQ8XFqmMK2jxyBSVrmyuwdWjTEb+6zGjRYotfDJ5pAE= +github.com/onflow/flow-go v0.35.7-crescendo-preview.23-atree-inlining/go.mod h1:rTPlD+FVYJDKp+TbVkoOlo9cEZ1co3w438/o/IUGgH8= github.com/onflow/flow-go v0.37.10 h1:Nz2Gp63+0ubb9FuQaEZgCsXNXM5WsXq/j0ukC74N5Vw= github.com/onflow/flow-go v0.37.10/go.mod h1:bfOCsCk0v1J93vXd+zrYkCmRIVOaL9oAXvNFWgVOujE= github.com/onflow/flow-go-sdk v1.0.0-M1/go.mod h1:TDW0MNuCs4SvqYRUzkbRnRmHQL1h4X8wURsCw9P9beo= @@ -2137,6 +2138,7 @@ github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/supranational/blst v0.3.8-0.20220526154634-513d2456b344/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= @@ -2204,6 +2206,7 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/blake3 v0.2.3 h1:TFoLXsjeXqRNFxSbk35Dk4YtszE/MQQGK10BH4ptoTg= github.com/zeebo/blake3 v0.2.3/go.mod h1:mjJjZpnsyIVtVgTOSpJ9vmRE4wgDeyt2HU3qXvvKCaQ= +github.com/zeebo/blake3 v0.2.4/go.mod h1:7eeQ6d2iXWRGF6npfaxl2CU+xy2Fjo2gxeyZGCRUjcE= github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo= github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= diff --git a/values.go b/values.go index d3799c0a8..fe235afd8 100644 --- a/values.go +++ b/values.go @@ -653,7 +653,7 @@ var _ Value = UInt{} func NewUInt(i uint) UInt { return UInt{ - Value: big.NewInt(int64(i)), + Value: (&big.Int{}).SetUint64(uint64(i)), } } @@ -860,7 +860,7 @@ var UInt128MemoryUsage = common.NewCadenceBigIntMemoryUsage(16) func NewUInt128(i uint) UInt128 { return UInt128{ - Value: big.NewInt(int64(i)), + Value: (&big.Int{}).SetUint64(uint64(i)), } } @@ -924,7 +924,7 @@ var UInt256MemoryUsage = common.NewCadenceBigIntMemoryUsage(32) func NewUInt256(i uint) UInt256 { return UInt256{ - Value: big.NewInt(int64(i)), + Value: (&big.Int{}).SetUint64(uint64(i)), } } @@ -1134,7 +1134,7 @@ var Word128MemoryUsage = common.NewCadenceBigIntMemoryUsage(16) func NewWord128(i uint) Word128 { return Word128{ - Value: big.NewInt(int64(i)), + Value: (&big.Int{}).SetUint64(uint64(i)), } } @@ -1198,7 +1198,7 @@ var Word256MemoryUsage = common.NewCadenceBigIntMemoryUsage(32) func NewWord256(i uint) Word256 { return Word256{ - Value: big.NewInt(int64(i)), + Value: (&big.Int{}).SetUint64(uint64(i)), } } diff --git a/values_test.go b/values_test.go index ac5ea3b06..fd960f180 100644 --- a/values_test.go +++ b/values_test.go @@ -20,6 +20,7 @@ package cadence import ( "fmt" + "math" "math/big" "testing" "unicode/utf8" @@ -58,8 +59,8 @@ func newValueTestCases() map[string]valueTestCase { return map[string]valueTestCase{ "UInt": { - value: NewUInt(10), - string: "10", + value: NewUInt(math.MaxUint64), + string: "18446744073709551615", expectedType: UIntType, }, "UInt8": { @@ -83,13 +84,13 @@ func newValueTestCases() map[string]valueTestCase { expectedType: UInt64Type, }, "UInt128": { - value: NewUInt128(128), - string: "128", + value: NewUInt128(math.MaxUint64), + string: "18446744073709551615", expectedType: UInt128Type, }, "UInt256": { - value: NewUInt256(256), - string: "256", + value: NewUInt256(math.MaxUint64), + string: "18446744073709551615", expectedType: UInt256Type, }, "Int": { @@ -148,13 +149,13 @@ func newValueTestCases() map[string]valueTestCase { expectedType: Word64Type, }, "Word128": { - value: NewWord128(128), - string: "128", + value: NewWord128(math.MaxUint64), + string: "18446744073709551615", expectedType: Word128Type, }, "Word256": { - value: NewWord256(256), - string: "256", + value: NewWord256(math.MaxUint64), + string: "18446744073709551615", expectedType: Word256Type, }, "UFix64": {