Browse Source

Merge pull request #475 from hermeznetwork/feature/zki-batchnum

Add ZKI tests Til last batches MinimumFlow0 &other
feature/sql-semaphore1
Eduard S 3 years ago
committed by GitHub
parent
commit
dade77046b
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 133 additions and 118 deletions
  1. +9
    -9
      api/api_test.go
  2. +1
    -1
      db/statedb/statedb.go
  3. +16
    -17
      test/til/sets.go
  4. +1
    -1
      txprocessor/txprocessor.go
  5. +24
    -22
      txprocessor/txprocessor_test.go
  6. +70
    -56
      txprocessor/zkinputsgen_test.go
  7. +12
    -12
      txselector/txselector_test.go

+ 9
- 9
api/api_test.go

@ -56,14 +56,14 @@ var SetBlockchain = `
CreateAccountCoordinator(0) Coord
CreateAccountCoordinator(1) Coord
// close Block:0, Batch:0
// close Block:0, Batch:1
> batch
CreateAccountDeposit(0) A: 11111111100000000000
CreateAccountDeposit(1) C: 22222222200000000000
CreateAccountCoordinator(0) C
// close Block:0, Batch:1
// close Block:0, Batch:2
> batchL1
// Expected balances:
// Coord(0): 0, Coord(1): 0
@ -71,19 +71,19 @@ var SetBlockchain = `
CreateAccountDeposit(1) A: 33333333300000000000
// close Block:0, Batch:2
// close Block:0, Batch:3
> batchL1
// close Block:0, Batch:3
// close Block:0, Batch:4
> batchL1
CreateAccountDepositTransfer(0) B-A: 44444444400000000000, 123444444400000000000
// close Block:0, Batch:4
// close Block:0, Batch:5
> batchL1
CreateAccountDeposit(0) D: 55555555500000000000
// close Block:0, Batch:5
// close Block:0, Batch:6
> batchL1
CreateAccountCoordinator(1) B
@ -91,7 +91,7 @@ var SetBlockchain = `
Transfer(1) A-B: 11111100000000000 (2)
Transfer(0) B-C: 22222200000000000 (3)
// close Block:0, Batch:6
// close Block:0, Batch:7
> batchL1 // forge L1User{1}, forge L1Coord{2}, forge L2{2}
Deposit(0) C: 66666666600000000000
@ -105,14 +105,14 @@ var SetBlockchain = `
ForceTransfer(0) D-B: 77777700000000000
ForceExit(0) B: 88888800000000000
// close Block:0, Batch:7
// close Block:0, Batch:8
> batchL1
> block
Transfer(0) D-A: 99999900000000000 (77)
Transfer(0) B-D: 12312300000000000 (55)
// close Block:1, Batch:0
// close Block:1, Batch:1
> batchL1
CreateAccountCoordinator(0) F

+ 1
- 1
db/statedb/statedb.go

@ -101,7 +101,7 @@ func NewStateDB(pathDB string, keep int, typ TypeStateDB, nLevels int) (*StateDB
// Internally this advances & stores the current BatchNum, and then stores a
// Checkpoint of the current state of the StateDB.
func (s *StateDB) MakeCheckpoint() error {
log.Debugw("Making StateDB checkpoint", "batch", s.CurrentBatch())
log.Debugw("Making StateDB checkpoint", "batch", s.CurrentBatch()+1)
return s.db.MakeCheckpoint()
}

+ 16
- 17
test/til/sets.go

@ -13,7 +13,7 @@ AddToken(1)
AddToken(2)
AddToken(3)
// block:0 batch:0
// block:0 batch:1
// Coordinator accounts, Idxs: 256, 257, 258, 259
CreateAccountCoordinator(0) Coord
@ -22,7 +22,7 @@ CreateAccountCoordinator(2) Coord
CreateAccountCoordinator(3) Coord
> batch
// block:0 batch:1
// block:0 batch:2
// deposits TokenID: 1
CreateAccountDeposit(1) A: 50
@ -61,7 +61,7 @@ CreateAccountDeposit(0) B: 10000
CreateAccountDeposit(0) C: 1
> batchL1
// block:0 batch:2
// block:0 batch:3
// transactions TokenID: 1
Transfer(1) A-B: 5 (1)
@ -90,7 +90,7 @@ Transfer(0) B-C: 50 (100)
> batchL1
> block
// block:1 batch:0
// block:1 batch:1
// A (3) still does not exist, coordinator should create new L1Tx to create the account
CreateAccountCoordinator(3) A
@ -161,7 +161,7 @@ Exit(1) Y: 5 (1)
Exit(1) Z: 5 (1)
> batch
// block:1 batch:1
// block:1 batch:2
Deposit(1) A: 50
Deposit(1) B: 5
@ -214,27 +214,26 @@ Type: Blockchain
AddToken(1)
// close Block:0, Batch:0
// close Block:0, Batch:1
> batch
CreateAccountDeposit(0) A: 500
CreateAccountDeposit(1) C: 0
// close Block:0, Batch:1
// close Block:0, Batch:2
> batchL1 // freeze L1User{2}, forge L1Coord{0}
// Expected balances:
// C(0): 0
CreateAccountDeposit(1) A: 500
// close Block:0, Batch:2
// close Block:0, Batch:3
> batchL1 // freeze L1User{1}, forge L1User{2}
// Expected balances:
// A(0): 500
// C(0): 0, C(1): 0
// close Block:0, Batch:3
// close Block:0, Batch:4
> batchL1 // freeze L1User{nil}, forge L1User{1}
// Expected balances:
// A(0): 500, A(1): 500
@ -243,11 +242,11 @@ CreateAccountDeposit(1) A: 500
CreateAccountDepositTransfer(0) B-A: 500, 100
// close Block:0, Batch:4
// close Block:0, Batch:5
> batchL1 // freeze L1User{1}, forge L1User{nil}
CreateAccountDeposit(0) D: 800
// close Block:0, Batch:5
// close Block:0, Batch:6
> batchL1 // freeze L1User{1}, forge L1User{1}
// Expected balances:
// A(0): 600, A(1): 500
@ -265,7 +264,7 @@ CreateAccountCoordinator(0) C
Transfer(1) A-B: 200 (126)
Transfer(0) B-C: 100 (126)
// close Block:0, Batch:6
// close Block:0, Batch:7
> batchL1 // forge L1User{1}, forge L1Coord{4}, forge L2{2}
// Expected balances:
// Coord(0): 10, Coord(1): 20
@ -285,7 +284,7 @@ Exit(0) A: 100 (126)
ForceTransfer(0) D-B: 200
ForceExit(0) B: 100
// close Block:0, Batch:7
// close Block:0, Batch:8
> batchL1 // freeze L1User{4}, forge L1User{nil}, forge L2{4}
> block
// Expected balances:
@ -298,8 +297,8 @@ ForceExit(0) B: 100
Transfer(0) D-A: 300 (126)
Transfer(0) B-D: 100 (126)
// close Block:1, Batch:0
> batchL1 // freeze L1User{nil}, forge L1User{4}, forge L2{1}
// close (batch9) Block:1, Batch:1
> batchL1 // freeze L1User{nil}, forge L1User{4}, forge L2{2}
// Expected balances:
// Coord(0): 75, Coord(1): 30
// A(0): 730, A(1): 280
@ -309,7 +308,7 @@ Transfer(0) B-D: 100 (126)
CreateAccountCoordinator(0) F
> batch // forge L1CoordinatorTx{1}
> batch // batch10: forge L1CoordinatorTx{1}
> block
`

+ 1
- 1
txprocessor/txprocessor.go

@ -117,7 +117,7 @@ func (tp *TxProcessor) ProcessTxs(coordIdxs []common.Idx, l1usertxs, l1coordinat
if tp.s.Typ == statedb.TypeBatchBuilder {
tp.zki = common.NewZKInputs(tp.config.ChainID, tp.config.MaxTx, tp.config.MaxL1Tx,
tp.config.MaxFeeTx, tp.config.NLevels, tp.s.CurrentBatch().BigInt())
tp.config.MaxFeeTx, tp.config.NLevels, (tp.s.CurrentBatch() + 1).BigInt())
tp.zki.OldLastIdx = tp.s.CurrentIdx().BigInt()
tp.zki.OldStateRoot = tp.s.MT.Root().BigInt()
tp.zki.Metadata.NewLastIdxRaw = tp.s.CurrentIdx()

+ 24
- 22
txprocessor/txprocessor_test.go

@ -223,19 +223,19 @@ func TestProcessTxsBalances(t *testing.T) {
}
tp := NewTxProcessor(sdb, config)
log.Debug("block:0 batch:0, only L1CoordinatorTxs")
log.Debug("block:0 batch:1, only L1CoordinatorTxs")
_, err = tp.ProcessTxs(nil, nil, blocks[0].Rollup.Batches[0].L1CoordinatorTxs, nil)
require.NoError(t, err)
assert.Equal(t, "0", tp.s.MT.Root().BigInt().String())
log.Debug("block:0 batch:1")
log.Debug("block:0 batch:2")
l1UserTxs := []common.L1Tx{}
l2Txs := common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[1].L2Txs)
_, err = tp.ProcessTxs(nil, l1UserTxs, blocks[0].Rollup.Batches[1].L1CoordinatorTxs, l2Txs)
require.NoError(t, err)
assert.Equal(t, "0", tp.s.MT.Root().BigInt().String())
log.Debug("block:0 batch:2")
log.Debug("block:0 batch:3")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[2].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[2].L2Txs)
_, err = tp.ProcessTxs(nil, l1UserTxs, blocks[0].Rollup.Batches[2].L1CoordinatorTxs, l2Txs)
@ -243,7 +243,7 @@ func TestProcessTxsBalances(t *testing.T) {
checkBalance(t, tc, sdb, "A", 0, "500")
assert.Equal(t, "13644148972047617726265275926674266298636745191961029124811988256139761111521", tp.s.MT.Root().BigInt().String())
log.Debug("block:0 batch:3")
log.Debug("block:0 batch:4")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[3].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[3].L2Txs)
_, err = tp.ProcessTxs(nil, l1UserTxs, blocks[0].Rollup.Batches[3].L1CoordinatorTxs, l2Txs)
@ -252,7 +252,7 @@ func TestProcessTxsBalances(t *testing.T) {
checkBalance(t, tc, sdb, "A", 1, "500")
assert.Equal(t, "12433441613247342495680642890662773367605896324555599297255745922589338651261", tp.s.MT.Root().BigInt().String())
log.Debug("block:0 batch:4")
log.Debug("block:0 batch:5")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[4].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[4].L2Txs)
_, err = tp.ProcessTxs(nil, l1UserTxs, blocks[0].Rollup.Batches[4].L1CoordinatorTxs, l2Txs)
@ -261,7 +261,7 @@ func TestProcessTxsBalances(t *testing.T) {
checkBalance(t, tc, sdb, "A", 1, "500")
assert.Equal(t, "12433441613247342495680642890662773367605896324555599297255745922589338651261", tp.s.MT.Root().BigInt().String())
log.Debug("block:0 batch:5")
log.Debug("block:0 batch:6")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[5].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[5].L2Txs)
_, err = tp.ProcessTxs(nil, l1UserTxs, blocks[0].Rollup.Batches[5].L1CoordinatorTxs, l2Txs)
@ -272,7 +272,7 @@ func TestProcessTxsBalances(t *testing.T) {
assert.Equal(t, "4191361650490017591061467288209836928064232431729236465872209988325272262963", tp.s.MT.Root().BigInt().String())
coordIdxs := []common.Idx{261, 262}
log.Debug("block:0 batch:6")
log.Debug("block:0 batch:7")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[6].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[6].L2Txs)
_, err = tp.ProcessTxs(coordIdxs, l1UserTxs, blocks[0].Rollup.Batches[6].L1CoordinatorTxs, l2Txs)
@ -287,7 +287,7 @@ func TestProcessTxsBalances(t *testing.T) {
checkBalance(t, tc, sdb, "D", 0, "800")
assert.Equal(t, "7614010373759339299470010949167613050707822522530721724565424494781010548240", tp.s.MT.Root().BigInt().String())
log.Debug("block:0 batch:7")
log.Debug("block:0 batch:8")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[7].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[7].L2Txs)
_, err = tp.ProcessTxs(coordIdxs, l1UserTxs, blocks[0].Rollup.Batches[7].L1CoordinatorTxs, l2Txs)
@ -303,7 +303,8 @@ func TestProcessTxsBalances(t *testing.T) {
checkBalance(t, tc, sdb, "D", 0, "800")
assert.Equal(t, "21231789250434471575486264439945776732824482207853465397552873521865656677689", tp.s.MT.Root().BigInt().String())
log.Debug("block:1 batch:0")
coordIdxs = []common.Idx{262}
log.Debug("block:1 batch:1")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[1].Rollup.Batches[0].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[1].Rollup.Batches[0].L2Txs)
_, err = tp.ProcessTxs(coordIdxs, l1UserTxs, blocks[1].Rollup.Batches[0].L1CoordinatorTxs, l2Txs)
@ -319,7 +320,8 @@ func TestProcessTxsBalances(t *testing.T) {
checkBalance(t, tc, sdb, "D", 0, "470")
assert.Equal(t, "11289313644810782435120113035387729451095637380468777086895109386127538554246", tp.s.MT.Root().BigInt().String())
log.Debug("block:1 batch:1")
coordIdxs = []common.Idx{}
log.Debug("block:1 batch:2")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[1].Rollup.Batches[1].Batch.ForgeL1TxsNum])
l2Txs = common.L2TxsToPoolL2Txs(blocks[1].Rollup.Batches[1].L2Txs)
_, err = tp.ProcessTxs(coordIdxs, l1UserTxs, blocks[1].Rollup.Batches[1].L1CoordinatorTxs, l2Txs)
@ -332,8 +334,8 @@ func TestProcessTxsBalances(t *testing.T) {
_, err = tp.ProcessTxs(coordIdxs, []common.L1Tx{}, []common.L1Tx{}, poolL2Txs)
require.NoError(t, err)
checkBalance(t, tc, sdb, "Coord", 0, "105")
checkBalance(t, tc, sdb, "Coord", 1, "40")
checkBalance(t, tc, sdb, "Coord", 0, "75")
checkBalance(t, tc, sdb, "Coord", 1, "30")
checkBalance(t, tc, sdb, "A", 0, "510")
checkBalance(t, tc, sdb, "A", 1, "170")
checkBalance(t, tc, sdb, "B", 0, "480")
@ -384,13 +386,13 @@ func TestProcessTxsSynchronizer(t *testing.T) {
// Process the 1st batch, which contains the L1CoordinatorTxs necessary
// to create the Coordinator accounts to receive the fees
log.Debug("block:0 batch:0, only L1CoordinatorTxs")
log.Debug("block:0 batch:1, only L1CoordinatorTxs")
ptOut, err := tp.ProcessTxs(nil, nil, blocks[0].Rollup.Batches[0].L1CoordinatorTxs, nil)
require.NoError(t, err)
assert.Equal(t, 4, len(ptOut.CreatedAccounts))
assert.Equal(t, 0, len(ptOut.CollectedFees))
log.Debug("block:0 batch:1")
log.Debug("block:0 batch:2")
l2Txs := common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[1].L2Txs)
ptOut, err = tp.ProcessTxs(coordIdxs, blocks[0].Rollup.L1UserTxs,
blocks[0].Rollup.Batches[1].L1CoordinatorTxs, l2Txs)
@ -406,7 +408,7 @@ func TestProcessTxsSynchronizer(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "50", acc.Balance.String())
log.Debug("block:0 batch:2")
log.Debug("block:0 batch:3")
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[2].L2Txs)
ptOut, err = tp.ProcessTxs(coordIdxs, nil, blocks[0].Rollup.Batches[2].L1CoordinatorTxs, l2Txs)
require.NoError(t, err)
@ -421,7 +423,7 @@ func TestProcessTxsSynchronizer(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "35", acc.Balance.String())
log.Debug("block:1 batch:0")
log.Debug("block:1 batch:1")
l2Txs = common.L2TxsToPoolL2Txs(blocks[1].Rollup.Batches[0].L2Txs)
// before processing expect l2Txs[0:2].Nonce==0
assert.Equal(t, common.Nonce(0), l2Txs[0].Nonce)
@ -447,7 +449,7 @@ func TestProcessTxsSynchronizer(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "57", acc.Balance.String())
log.Debug("block:1 batch:1")
log.Debug("block:1 batch:2")
l2Txs = common.L2TxsToPoolL2Txs(blocks[1].Rollup.Batches[1].L2Txs)
ptOut, err = tp.ProcessTxs(coordIdxs, blocks[1].Rollup.L1UserTxs,
blocks[1].Rollup.Batches[1].L1CoordinatorTxs, l2Txs)
@ -506,13 +508,13 @@ func TestProcessTxsBatchBuilder(t *testing.T) {
// Process the 1st batch, which contains the L1CoordinatorTxs necessary
// to create the Coordinator accounts to receive the fees
log.Debug("block:0 batch:0, only L1CoordinatorTxs")
log.Debug("block:0 batch:1, only L1CoordinatorTxs")
ptOut, err := tp.ProcessTxs(nil, nil, blocks[0].Rollup.Batches[0].L1CoordinatorTxs, nil)
require.NoError(t, err)
// expect 0 at CreatedAccount, as is only computed when StateDB.Type==TypeSynchronizer
assert.Equal(t, 0, len(ptOut.CreatedAccounts))
log.Debug("block:0 batch:1")
log.Debug("block:0 batch:2")
l2Txs := common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[1].L2Txs)
ptOut, err = tp.ProcessTxs(coordIdxs, blocks[0].Rollup.L1UserTxs, blocks[0].Rollup.Batches[1].L1CoordinatorTxs, l2Txs)
require.NoError(t, err)
@ -522,7 +524,7 @@ func TestProcessTxsBatchBuilder(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "50", acc.Balance.String())
log.Debug("block:0 batch:2")
log.Debug("block:0 batch:3")
l2Txs = common.L2TxsToPoolL2Txs(blocks[0].Rollup.Batches[2].L2Txs)
ptOut, err = tp.ProcessTxs(coordIdxs, nil, blocks[0].Rollup.Batches[2].L1CoordinatorTxs, l2Txs)
require.NoError(t, err)
@ -532,7 +534,7 @@ func TestProcessTxsBatchBuilder(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "35", acc.Balance.String())
log.Debug("block:1 batch:0")
log.Debug("block:1 batch:1")
l2Txs = common.L2TxsToPoolL2Txs(blocks[1].Rollup.Batches[0].L2Txs)
_, err = tp.ProcessTxs(coordIdxs, nil, blocks[1].Rollup.Batches[0].L1CoordinatorTxs, l2Txs)
require.NoError(t, err)
@ -540,7 +542,7 @@ func TestProcessTxsBatchBuilder(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "57", acc.Balance.String())
log.Debug("block:1 batch:1")
log.Debug("block:1 batch:2")
l2Txs = common.L2TxsToPoolL2Txs(blocks[1].Rollup.Batches[1].L2Txs)
_, err = tp.ProcessTxs(coordIdxs, blocks[1].Rollup.L1UserTxs, blocks[1].Rollup.Batches[1].L1CoordinatorTxs, l2Txs)
require.NoError(t, err)

+ 70
- 56
txprocessor/zkinputsgen_test.go
File diff suppressed because it is too large
View File


+ 12
- 12
txselector/txselector_test.go

@ -185,7 +185,7 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
// coordIdxs, accAuths, l1UserTxs, l1CoordTxs, l2Txs, err
log.Debug("block:0 batch:0")
log.Debug("block:0 batch:1")
l1UserTxs := []common.L1Tx{}
_, _, oL1UserTxs, oL1CoordTxs, oL2Txs, err := txsel.GetL1L2TxSelection(selectionConfig, l1UserTxs)
require.NoError(t, err)
@ -195,7 +195,7 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
assert.Equal(t, common.BatchNum(1), txsel.localAccountsDB.CurrentBatch())
assert.Equal(t, common.Idx(255), txsel.localAccountsDB.CurrentIdx())
log.Debug("block:0 batch:1")
log.Debug("block:0 batch:2")
l1UserTxs = []common.L1Tx{}
_, _, oL1UserTxs, oL1CoordTxs, oL2Txs, err = txsel.GetL1L2TxSelection(selectionConfig, l1UserTxs)
require.NoError(t, err)
@ -205,7 +205,7 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
assert.Equal(t, common.BatchNum(2), txsel.localAccountsDB.CurrentBatch())
assert.Equal(t, common.Idx(255), txsel.localAccountsDB.CurrentIdx())
log.Debug("block:0 batch:2")
log.Debug("block:0 batch:3")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[2].Batch.ForgeL1TxsNum])
_, _, oL1UserTxs, oL1CoordTxs, oL2Txs, err = txsel.GetL1L2TxSelection(selectionConfig, l1UserTxs)
require.NoError(t, err)
@ -217,7 +217,7 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
checkBalance(t, tc, txsel, "A", 0, "500")
checkBalance(t, tc, txsel, "C", 1, "0")
log.Debug("block:0 batch:3")
log.Debug("block:0 batch:4")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[3].Batch.ForgeL1TxsNum])
_, _, oL1UserTxs, oL1CoordTxs, oL2Txs, err = txsel.GetL1L2TxSelection(selectionConfig, l1UserTxs)
require.NoError(t, err)
@ -230,7 +230,7 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
checkBalance(t, tc, txsel, "A", 1, "500")
checkBalance(t, tc, txsel, "C", 1, "0")
log.Debug("block:0 batch:4")
log.Debug("block:0 batch:5")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[4].Batch.ForgeL1TxsNum])
_, _, oL1UserTxs, oL1CoordTxs, oL2Txs, err = txsel.GetL1L2TxSelection(selectionConfig, l1UserTxs)
require.NoError(t, err)
@ -243,7 +243,7 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
checkBalance(t, tc, txsel, "A", 1, "500")
checkBalance(t, tc, txsel, "C", 1, "0")
log.Debug("block:0 batch:5")
log.Debug("block:0 batch:6")
l1UserTxs = til.L1TxsToCommonL1Txs(tc.Queues[*blocks[0].Rollup.Batches[5].Batch.ForgeL1TxsNum])
_, _, oL1UserTxs, oL1CoordTxs, oL2Txs, err = txsel.GetL1L2TxSelection(selectionConfig, l1UserTxs)
require.NoError(t, err)
@ -257,8 +257,8 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
checkBalance(t, tc, txsel, "B", 0, "400")
checkBalance(t, tc, txsel, "C", 1, "0")
log.Debug("block:0 batch:6")
// simulate the PoolL2Txs of the batch6
log.Debug("block:0 batch:7")
// simulate the PoolL2Txs of the batch7
batchPoolL2 := `
Type: PoolL2
PoolTransfer(1) A-B: 200 (126)
@ -302,8 +302,8 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
err = txsel.l2db.StartForging(common.TxIDsFromPoolL2Txs(poolL2Txs), txsel.localAccountsDB.CurrentBatch())
require.NoError(t, err)
log.Debug("block:0 batch:7")
// simulate the PoolL2Txs of the batch7
log.Debug("block:0 batch:8")
// simulate the PoolL2Txs of the batch8
batchPoolL2 = `
Type: PoolL2
PoolTransfer(0) A-B: 100 (126)
@ -337,8 +337,8 @@ func TestGetL2TxSelectionMinimumFlow0(t *testing.T) {
err = txsel.l2db.StartForging(common.TxIDsFromPoolL2Txs(poolL2Txs), txsel.localAccountsDB.CurrentBatch())
require.NoError(t, err)
log.Debug("block:1 batch:0")
// simulate the PoolL2Txs of the batch6
log.Debug("(batch9)block:1 batch:1")
// simulate the PoolL2Txs of the batch9
batchPoolL2 = `
Type: PoolL2
PoolTransfer(0) D-A: 300 (126)

Loading…
Cancel
Save