Skip to content

Commit

Permalink
handle invalid batches (#85)
Browse files Browse the repository at this point in the history
* handle invalid batches

* add tests
  • Loading branch information
ToniRamirezM authored Jun 14, 2024
1 parent f6ea12a commit 8c7dd5c
Show file tree
Hide file tree
Showing 15 changed files with 217 additions and 33 deletions.
29 changes: 29 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
name: Test
on:
push:
branches:
- main
pull_request:

jobs:
test:
strategy:
matrix:
go-version: [ 1.21.x ]
goarch: [ "amd64" ]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Install Go
uses: actions/setup-go@v3
with:
go-version: ${{ matrix.go-version }}
env:
GOARCH: ${{ matrix.goarch }}

- name: Test
run: make test
working-directory: ./
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,10 @@ build-docker-nc: ## Builds a docker image with the node binary - but without bui
stop: ## Stops all services
docker-compose down

.PHONY: test
test:
go test -count=1 -short -race -p 1 -timeout 60s ./...

.PHONY: install-linter
install-linter: ## Installs the linter
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $$(go env GOPATH)/bin v1.54.2
Expand Down
85 changes: 77 additions & 8 deletions aggregator/aggregator.go
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package aggregator

import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"math/big"
"net"
"strconv"
"strings"
"sync"
"sync/atomic"
Expand All @@ -31,6 +33,7 @@ import (
"github.com/0xPolygonHermez/zkevm-synchronizer-l1/synchronizer"
"github.com/ethereum/go-ethereum/common"
"github.com/iden3/go-iden3-crypto/keccak256"
"github.com/iden3/go-iden3-crypto/poseidon"
"google.golang.org/grpc"
grpchealth "google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/peer"
Expand Down Expand Up @@ -204,6 +207,7 @@ func (a *Aggregator) handleReceivedDataStream(entry *datastreamer.FileEntry, cli
a.currentStreamBatch.BatchNumber = batch.Number
a.currentStreamBatch.ChainID = batch.ChainId
a.currentStreamBatch.ForkID = batch.ForkId
a.currentStreamBatch.Type = batch.Type
case datastreamer.EntryType(datastream.EntryType_ENTRY_TYPE_BATCH_END):
batch := &datastream.BatchEnd{}
err := proto.Unmarshal(entry.Data, batch)
Expand All @@ -222,11 +226,7 @@ func (a *Aggregator) handleReceivedDataStream(entry *datastreamer.FileEntry, cli

// Save Current Batch
if a.currentStreamBatch.BatchNumber != 0 {
batchl2Data, err := state.EncodeBatchV2(&a.currentStreamBatchRaw)
if err != nil {
log.Errorf("Error encoding batch: %v", err)
return err
}
var batchl2Data []byte

// Get batchl2Data from L1
virtualBatch, err := a.l1Syncr.GetVirtualBatchByBatchNumber(ctx, a.currentStreamBatch.BatchNumber)
Expand All @@ -246,15 +246,26 @@ func (a *Aggregator) handleReceivedDataStream(entry *datastreamer.FileEntry, cli
}
}

if a.cfg.UseL1BatchData {
// If the batch is marked as Invalid in the DS we enforce retrieve the data from L1
if a.cfg.UseL1BatchData || a.currentStreamBatch.Type == datastream.BatchType_BATCH_TYPE_INVALID {
a.currentStreamBatch.BatchL2Data = virtualBatch.BatchL2Data
} else {
batchl2Data, err = state.EncodeBatchV2(&a.currentStreamBatchRaw)
if err != nil {
log.Errorf("Error encoding batch: %v", err)
return err
}
a.currentStreamBatch.BatchL2Data = batchl2Data
}

if common.Bytes2Hex(batchl2Data) != common.Bytes2Hex(virtualBatch.BatchL2Data) {
log.Warnf("BatchL2Data from L1 and data stream are different for batch %d", a.currentStreamBatch.BatchNumber)
log.Warnf("DataStream BatchL2Data:%v", common.Bytes2Hex(batchl2Data))

if a.currentStreamBatch.Type == datastream.BatchType_BATCH_TYPE_INVALID {
log.Warnf("Batch is marked as invalid in data stream")
} else {
log.Warnf("DataStream BatchL2Data:%v", common.Bytes2Hex(batchl2Data))
}
log.Warnf("L1 BatchL2Data:%v", common.Bytes2Hex(virtualBatch.BatchL2Data))
}

Expand Down Expand Up @@ -1231,7 +1242,17 @@ func (a *Aggregator) tryGenerateBatchProof(ctx context.Context, prover proverInt

log.Info("Batch proof generated")

proof.Proof = resGetProof
// Sanity Check: state root from the proof must match the one from the batch
proofStateRoot, err := GetStateRootFromBatchProof(resGetProof)
if err != nil {
err = fmt.Errorf("failed to get state root from batch proof, %w", err)
log.Error(FirstToUpper(err.Error()))
return false, err
}
// Check if the state root from the proof matches the one from the batch
if !bytes.Equal(proofStateRoot.Bytes(), batchToProve.StateRoot.Bytes()) {
log.Fatalf("State root from the proof [%#x] does not match the one from the batch [%#x]", proofStateRoot, batchToProve.StateRoot)
}

// NOTE(pg): the defer func is useless from now on, use a different variable
// name for errors (or shadow err in inner scopes) to not trigger it.
Expand Down Expand Up @@ -1259,6 +1280,54 @@ func (a *Aggregator) tryGenerateBatchProof(ctx context.Context, prover proverInt
return true, nil
}

// GetStateRootFromBatchProof returns the state root from the batch proof.
func GetStateRootFromBatchProof(resGetProof string) (common.Hash, error) {
type Publics struct {
Publics []string `mapstructure:"publics"`
}

var publics Publics
err := json.Unmarshal([]byte(resGetProof), &publics)
if err != nil {
log.Errorf("Error unmarshalling proof: %v", err)
return common.Hash{}, err
}

var v [8]uint64
var j = 0
for i := 19; i < 19+8; i++ {
u64, err := strconv.ParseInt(publics.Publics[i], 10, 64)
if err != nil {
log.Fatal(err)
}
v[j] = uint64(u64)
j++
}
bigSR := fea2scalar(v[:])
hexSR := fmt.Sprintf("%x", bigSR)
if len(hexSR)%2 != 0 {
hexSR = "0" + hexSR
}

return common.HexToHash(hexSR), nil
}

// fea2scalar converts array of uint64 values into one *big.Int.
func fea2scalar(v []uint64) *big.Int {
if len(v) != poseidon.NROUNDSF {
return big.NewInt(0)
}
res := new(big.Int).SetUint64(v[0])
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[1]), 32)) //nolint:gomnd
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[2]), 64)) //nolint:gomnd
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[3]), 96)) //nolint:gomnd
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[4]), 128)) //nolint:gomnd
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[5]), 160)) //nolint:gomnd
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[6]), 192)) //nolint:gomnd
res.Add(res, new(big.Int).Lsh(new(big.Int).SetUint64(v[7]), 224)) //nolint:gomnd
return res
}

// canVerifyProof returns true if we have reached the timeout to verify a proof
// and no other prover is verifying a proof (verifyingProof = false).
func (a *Aggregator) canVerifyProof() bool {
Expand Down
4 changes: 2 additions & 2 deletions aggregator/prover/aggregator.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

23 changes: 16 additions & 7 deletions aggregator/prover/aggregator_grpc.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion config/default.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ GeneratingProofCleanupThreshold = "10m"
ForkId = 9
GasOffset = 0
WitnessURL = "localhost:8123"
UseL1BatchData = false
UseL1BatchData = true
UseFullWitness = false
[Aggregator.DB]
Name = "aggregator_db"
Expand Down
1 change: 1 addition & 0 deletions proto/src/proto/datastream/v1/datastream.proto
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,5 @@ enum BatchType {
BATCH_TYPE_REGULAR = 1;
BATCH_TYPE_FORCED = 2;
BATCH_TYPE_INJECTED = 3;
BATCH_TYPE_INVALID = 4;
}
34 changes: 19 additions & 15 deletions state/datastream/datastream.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions state/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package state
import (
"time"

"github.com/0xPolygonHermez/zkevm-aggregator/state/datastream"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
)
Expand Down Expand Up @@ -40,6 +41,7 @@ type Batch struct {
ForcedBatchNum *uint64
ChainID uint64
ForkID uint64
Type datastream.BatchType
}

// Sequence represents the sequence interval
Expand Down
1 change: 1 addition & 0 deletions test/stateroot/proofs/1871.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions test/stateroot/proofs/1872.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions test/stateroot/proofs/1873.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions test/stateroot/proofs/1874.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions test/stateroot/proofs/1875.json

Large diffs are not rendered by default.

Loading

0 comments on commit 8c7dd5c

Please sign in to comment.