Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Review reports and implement exporting #240

Merged
merged 4 commits into from
Jan 29, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions internal/block/guarantee.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ type CredentialSignature struct {
Signature [crypto.Ed25519SignatureSize]byte // The Ed25519 signature
}

// WorkReport represents a work report in the JAM state (equation 11.2 v0.5.0)
// TODO: The total serialized size of a work-report may be no greater than MaxWorkPackageSizeBytes.
// WorkReport represents a work report in the JAM state (equation 11.2 v0.5.4)
type WorkReport struct {
WorkPackageSpecification WorkPackageSpecification // Work-package specification (s)
RefinementContext RefinementContext // Refinement context (x)
Expand All @@ -46,11 +45,11 @@ type WorkPackageSpecification struct {
SegmentCount uint16 // Segment count (n)
}

// RefinementContext describes the context of the chain at the point that the report’s corresponding work-package was evaluated.
// RefinementContext describes the context of the chain at the point that the report’s corresponding work-package was evaluated. 11.4 GP 0.5.4
type RefinementContext struct {
Anchor RefinementContextAnchor // Historical block anchor
LookupAnchor RefinementContextLookupAnchor // Historical block anchor
PrerequisiteWorkPackage []crypto.Hash // Prerequisite work package (p) (optional)
PrerequisiteWorkPackage []crypto.Hash // Prerequisite work package (p)
}

type RefinementContextAnchor struct {
Expand Down
2 changes: 1 addition & 1 deletion internal/service/service.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ func (sa ServiceAccount) AddPreimage(p []byte, currentTimeslot jamtime.Timeslot)
return nil
}

// LookupPreimage implements the historical lookup function (Λ) as defined in Equation (9.7 v0.5.0).
// LookupPreimage implements the historical lookup function (Λ) as defined in Equation (9.7 v0.5.4).
func (sa ServiceAccount) LookupPreimage(t jamtime.Timeslot, h crypto.Hash) []byte {
p, exists := sa.PreimageLookup[h]
if !exists {
Expand Down
2 changes: 1 addition & 1 deletion internal/statetransition/state_transition.go
Original file line number Diff line number Diff line change
Expand Up @@ -1367,7 +1367,7 @@ func ValidateExtrinsicGuarantees(
prerequisitePackageHashes[key] = struct{}{}
}

// Check total dependencies
// Check total dependencies. 11.3 GP 0.5.4
totalDeps := len(guarantee.WorkReport.RefinementContext.PrerequisiteWorkPackage) +
len(guarantee.WorkReport.SegmentRootLookup)
if totalDeps > common.WorkReportMaxSumOfDependencies {
Expand Down
5 changes: 3 additions & 2 deletions internal/work/constants.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const (
NumberOfErasureCodecPiecesInSegment = 6 // WP = 6: The number of erasure-coded pieces in a segment.
SizeOfErasureCodecPiecesInOctets = 684 // WE = 684: The basic size of erasure-coded pieces in octets.
SizeOfSegment = NumberOfErasureCodecPiecesInSegment * SizeOfErasureCodecPiecesInOctets // WG = WP*WE = 4104: The size of a segment in octets.
MaxSizeOfEncodedWorkPackage = 12 * 1 << 20 // WB = 12*2^20 = 12MB: The maximum size of an encoded work-package together with its extrinsic data and import implications, in octets.
MaxAllocatedGasRefine = 500_000_000 // GR = 500, 000, 000: The gas allocated to invoke a work-package’s Refine logic.
SegmentsPerPage = 64
MaxSizeOfEncodedWorkPackage = 12 * 1 << 20 // WB = 12*2^20 = 12MB: The maximum size of an encoded work-package together with its extrinsic data and import implications, in octets.
MaxAllocatedGasRefine = 500_000_000 // GR = 500, 000, 000: The gas allocated to invoke a work-package’s Refine logic.
)
4 changes: 2 additions & 2 deletions internal/work/item.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type Extrinsic struct {
Length uint32
}

// Item represents I (14.2 v0.5.2)
// Item represents I (14.2 v0.5.4)
type Item struct {
ServiceId uint32 // s ∈ N_S
CodeHash crypto.Hash // c ∈ H
Expand All @@ -37,7 +37,7 @@ func (w *Item) Size() uint64 {
return total
}

// ToWorkResult item-to-result function C (14.8 v0.5.2)
// ToWorkResult item-to-result function C (14.8 v0.5.4)
func (w *Item) ToWorkResult(o block.WorkResultOutputOrError) block.WorkResult {
payloadHash := crypto.HashData(w.Payload)

Expand Down
48 changes: 41 additions & 7 deletions internal/work/package.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@ package work

import (
"fmt"

"github.com/eigerco/strawberry/internal/block"
"github.com/eigerco/strawberry/internal/common"
"github.com/eigerco/strawberry/internal/crypto"
"github.com/eigerco/strawberry/internal/merkle/binary_tree"
"github.com/eigerco/strawberry/internal/polkavm"
"github.com/eigerco/strawberry/internal/service"
"github.com/eigerco/strawberry/pkg/serialization/codec/jam"
)

// Package represents P (14.2 v0.5.2)
// Package represents P (14.2 v0.5.4)
type Package struct {
AuthorizationToken []byte // j ∈ Y
AuthorizerService uint32 // h ∈ N_S
Expand All @@ -19,7 +21,7 @@ type Package struct {
WorkItems []Item // w ∈ ⟦I⟧
}

// ValidateNumberOfEntries (14.4 v0.5.2)
// ValidateNumberOfEntries (14.4 v0.5.4)
func (wp *Package) ValidateNumberOfEntries() error {
var totalExported, totalImported uint16
for _, w := range wp.WorkItems {
Expand All @@ -37,7 +39,7 @@ func (wp *Package) ValidateNumberOfEntries() error {
return nil
}

// ValidateSize (14.5 v0.5.2)
// ValidateSize (14.5 v0.5.4)
func (wp *Package) ValidateSize() error {
totalSize := uint64(len(wp.AuthorizationToken)) + uint64(len(wp.Parameterization))

Expand All @@ -52,7 +54,7 @@ func (wp *Package) ValidateSize() error {
return nil
}

// ValidateGas (14.7 v0.5.2)
// ValidateGas (14.7 v0.5.4)
func (wp *Package) ValidateGas() error {
var totalAccumulate, totalRefine uint64
for _, w := range wp.WorkItems {
Expand All @@ -70,7 +72,7 @@ func (wp *Package) ValidateGas() error {
return nil
}

// ComputeAuthorizerHashes (14.9 v0.5.2)
// ComputeAuthorizerHashes (14.9 v0.5.4)
func (wp *Package) ComputeAuthorizerHashes(
serviceState service.ServiceState,
) (authorizationCode []byte, impliedAuthorizerHash crypto.Hash, err error) {
Expand All @@ -85,7 +87,7 @@ func (wp *Package) ComputeAuthorizerHashes(
return authorizationCode, impliedAuthorizerHash, nil
}

// GetAuthorizationCode pc = Λ(δ[p.h], (p.x)^t, p.u) (14.9 v0.5.2)
// GetAuthorizationCode pc = Λ(δ[p.h], (p.x)^t, p.u) (14.9 v0.5.4)
func (wp *Package) GetAuthorizationCode(serviceState service.ServiceState) ([]byte, error) {
// Retrieve the service account by authorizer service index p.h
sa, exists := serviceState[block.ServiceId(wp.AuthorizerService)]
Expand All @@ -101,3 +103,35 @@ func (wp *Package) GetAuthorizationCode(serviceState service.ServiceState) ([]by

return authorizationCode, nil
}

// ComputePagedProofs P(s) → [E(J₆(s,i), L₆(s,i))₍l₎ | i ∈ ℕ₍⌈|s|/64⌉₎] (14.10 v0.5.4)
func ComputePagedProofs(segments []polkavm.Segment) ([]polkavm.Segment, error) {
if len(segments) == 0 {
return nil, fmt.Errorf("no segments provided")
}
blobs := make([][]byte, len(segments))
for i, seg := range segments {
blobs[i] = seg[:]
}
numPages := (len(segments) + SegmentsPerPage - 1) / SegmentsPerPage
pagedProofs := make([]polkavm.Segment, numPages)
for pageIndex := 0; pageIndex < numPages; pageIndex++ {
// Get leaf hashes and proof for page
leafHashes := binary_tree.GetLeafPage(blobs, pageIndex, NumberOfErasureCodecPiecesInSegment, crypto.HashData)
proof := binary_tree.GeneratePageProof(blobs, pageIndex, NumberOfErasureCodecPiecesInSegment, crypto.HashData)

// Encode leaves and proof
marshalledLeaves, err := jam.Marshal(leafHashes)
if err != nil {
return nil, fmt.Errorf("failed to marshal leaf hashes: %w", err)
}
marshalledProof, err := jam.Marshal(proof)
if err != nil {
return nil, fmt.Errorf("failed to marshal proof: %w", err)
}
combined := append(marshalledLeaves, marshalledProof...)
padded := ZeroPadding(combined, SizeOfSegment)
copy(pagedProofs[pageIndex][:], padded)
}
return pagedProofs, nil
}
77 changes: 77 additions & 0 deletions internal/work/package_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package work_test

import (
"bytes"
"github.com/eigerco/strawberry/internal/polkavm"
"testing"

"github.com/stretchr/testify/assert"
Expand All @@ -14,6 +16,22 @@ import (
"github.com/eigerco/strawberry/internal/work"
)

// Helper functions
func createTestSegment(pattern byte) (seg polkavm.Segment) {
for i := range seg {
seg[i] = pattern
}
return seg
}

func createSegments(count int) []polkavm.Segment {
segments := make([]polkavm.Segment, count)
for i := range segments {
segments[i] = createTestSegment(0x42)
}
return segments
}

func Test_ValidateNumberOfEntries(t *testing.T) {
p := work.Package{
WorkItems: []work.Item{
Expand Down Expand Up @@ -125,3 +143,62 @@ func Test_ComputeAuthorizerHashes(t *testing.T) {
_, _, err = p.ComputeAuthorizerHashes(serviceState)
assert.Error(t, err)
}

func TestComputePagedProofs(t *testing.T) {
tests := []struct {
name string
inputSegments []polkavm.Segment
expectError bool
errorMessage string
}{
{
name: "empty segments",
inputSegments: []polkavm.Segment{},
expectError: true,
errorMessage: "no segments provided",
},
{
name: "single page of segments",
inputSegments: createSegments(work.SegmentsPerPage),
expectError: false,
},
{
name: "multiple pages of segments",
inputSegments: createSegments(work.SegmentsPerPage * 2),
expectError: false,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
proofs, err := work.ComputePagedProofs(tt.inputSegments)

if tt.expectError {
require.Error(t, err)
assert.Contains(t, err.Error(), tt.errorMessage)
return
}

require.NoError(t, err)
expectedNumPages := len(tt.inputSegments) / work.SegmentsPerPage
assert.Equal(t, expectedNumPages, len(proofs))
})
}
}

func TestComputePagedProofsConsistency(t *testing.T) {
// Create two identical sets of segments
segments1 := createSegments(work.SegmentsPerPage)
segments2 := createSegments(work.SegmentsPerPage)

proofs1, err := work.ComputePagedProofs(segments1)
require.NoError(t, err)

proofs2, err := work.ComputePagedProofs(segments2)
require.NoError(t, err)

assert.Equal(t, len(proofs1), len(proofs2))
for i := range proofs1 {
assert.True(t, bytes.Equal(proofs1[i][:], proofs2[i][:]))
}
}
Loading