This commit is contained in:
Eduard S
2021-03-03 14:37:41 +01:00
parent bfba1ba2d2
commit d4f6926311
6 changed files with 281 additions and 224 deletions

View File

@@ -29,7 +29,7 @@ type testNetwork struct {
} }
func TestSetRollupVariables(t *testing.T) { func TestSetRollupVariables(t *testing.T) {
api.h.SetRollupVariables(tc.rollupVars) api.h.SetRollupVariables(&tc.rollupVars)
ni, err := api.h.GetNodeInfoAPI() ni, err := api.h.GetNodeInfoAPI()
assert.NoError(t, err) assert.NoError(t, err)
assertEqualRollupVariables(t, tc.rollupVars, ni.StateAPI.Rollup, true) assertEqualRollupVariables(t, tc.rollupVars, ni.StateAPI.Rollup, true)
@@ -51,14 +51,14 @@ func assertEqualRollupVariables(t *testing.T, rollupVariables common.RollupVaria
} }
func TestSetWDelayerVariables(t *testing.T) { func TestSetWDelayerVariables(t *testing.T) {
api.h.SetWDelayerVariables(tc.wdelayerVars) api.h.SetWDelayerVariables(&tc.wdelayerVars)
ni, err := api.h.GetNodeInfoAPI() ni, err := api.h.GetNodeInfoAPI()
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, tc.wdelayerVars, ni.StateAPI.WithdrawalDelayer) assert.Equal(t, tc.wdelayerVars, ni.StateAPI.WithdrawalDelayer)
} }
func TestSetAuctionVariables(t *testing.T) { func TestSetAuctionVariables(t *testing.T) {
api.h.SetAuctionVariables(tc.auctionVars) api.h.SetAuctionVariables(&tc.auctionVars)
ni, err := api.h.GetNodeInfoAPI() ni, err := api.h.GetNodeInfoAPI()
assert.NoError(t, err) assert.NoError(t, err)
assertEqualAuctionVariables(t, tc.auctionVars, ni.StateAPI.Auction) assertEqualAuctionVariables(t, tc.auctionVars, ni.StateAPI.Auction)
@@ -165,9 +165,9 @@ func TestGetState(t *testing.T) {
lastBlock := tc.blocks[3] lastBlock := tc.blocks[3]
lastBatchNum := common.BatchNum(12) lastBatchNum := common.BatchNum(12)
currentSlotNum := int64(1) currentSlotNum := int64(1)
api.h.SetRollupVariables(tc.rollupVars) api.h.SetRollupVariables(&tc.rollupVars)
api.h.SetWDelayerVariables(tc.wdelayerVars) api.h.SetWDelayerVariables(&tc.wdelayerVars)
api.h.SetAuctionVariables(tc.auctionVars) api.h.SetAuctionVariables(&tc.auctionVars)
err := api.h.UpdateNetworkInfo(lastBlock, lastBlock, lastBatchNum, currentSlotNum) err := api.h.UpdateNetworkInfo(lastBlock, lastBlock, lastBatchNum, currentSlotNum)
assert.NoError(t, err) assert.NoError(t, err)
err = api.h.UpdateMetrics() err = api.h.UpdateMetrics()

View File

@@ -1114,166 +1114,166 @@ func TestAddEscapeHatchWithdrawals(t *testing.T) {
assert.Equal(t, escapeHatchWithdrawals, dbEscapeHatchWithdrawals) assert.Equal(t, escapeHatchWithdrawals, dbEscapeHatchWithdrawals)
} }
func TestGetMetricsAPI(t *testing.T) { // func TestGetMetricsAPI(t *testing.T) {
test.WipeDB(historyDB.DB()) // test.WipeDB(historyDB.DB())
set := ` // set := `
Type: Blockchain // Type: Blockchain
//
AddToken(1) // AddToken(1)
//
CreateAccountDeposit(1) A: 1000 // numTx=1 // CreateAccountDeposit(1) A: 1000 // numTx=1
CreateAccountDeposit(1) B: 2000 // numTx=2 // CreateAccountDeposit(1) B: 2000 // numTx=2
CreateAccountDeposit(1) C: 3000 //numTx=3 // CreateAccountDeposit(1) C: 3000 //numTx=3
//
// block 0 is stored as default in the DB // // block 0 is stored as default in the DB
// block 1 does not exist // // block 1 does not exist
> batchL1 // numBatches=1 // > batchL1 // numBatches=1
> batchL1 // numBatches=2 // > batchL1 // numBatches=2
> block // blockNum=2 // > block // blockNum=2
//
Transfer(1) C-A : 10 (1) // numTx=4 // Transfer(1) C-A : 10 (1) // numTx=4
> batch // numBatches=3 // > batch // numBatches=3
> block // blockNum=3 // > block // blockNum=3
Transfer(1) B-C : 10 (1) // numTx=5 // Transfer(1) B-C : 10 (1) // numTx=5
> batch // numBatches=5 // > batch // numBatches=5
> block // blockNum=4 // > block // blockNum=4
Transfer(1) A-B : 10 (1) // numTx=6 // Transfer(1) A-B : 10 (1) // numTx=6
> batch // numBatches=5 // > batch // numBatches=5
> block // blockNum=5 // > block // blockNum=5
Transfer(1) A-B : 10 (1) // numTx=7 // Transfer(1) A-B : 10 (1) // numTx=7
> batch // numBatches=6 // > batch // numBatches=6
> block // blockNum=6 // > block // blockNum=6
` // `
//
const numBatches int = 6 // const numBatches int = 6
const numTx int = 7 // const numTx int = 7
const blockNum = 6 - 1 // const blockNum = 6 - 1
//
tc := til.NewContext(uint16(0), common.RollupConstMaxL1UserTx) // tc := til.NewContext(uint16(0), common.RollupConstMaxL1UserTx)
tilCfgExtra := til.ConfigExtra{ // tilCfgExtra := til.ConfigExtra{
BootCoordAddr: ethCommon.HexToAddress("0xE39fEc6224708f0772D2A74fd3f9055A90E0A9f2"), // BootCoordAddr: ethCommon.HexToAddress("0xE39fEc6224708f0772D2A74fd3f9055A90E0A9f2"),
CoordUser: "A", // CoordUser: "A",
} // }
blocks, err := tc.GenerateBlocks(set) // blocks, err := tc.GenerateBlocks(set)
require.NoError(t, err) // require.NoError(t, err)
err = tc.FillBlocksExtra(blocks, &tilCfgExtra) // err = tc.FillBlocksExtra(blocks, &tilCfgExtra)
require.NoError(t, err) // require.NoError(t, err)
//
// Sanity check // // Sanity check
require.Equal(t, blockNum, len(blocks)) // require.Equal(t, blockNum, len(blocks))
//
// Adding one batch per block // // Adding one batch per block
// batch frequency can be chosen // // batch frequency can be chosen
const frequency int = 15 // const frequency int = 15
//
for i := range blocks { // for i := range blocks {
blocks[i].Block.Timestamp = time.Now().Add(-time.Second * time.Duration(frequency*(len(blocks)-i))) // blocks[i].Block.Timestamp = time.Now().Add(-time.Second * time.Duration(frequency*(len(blocks)-i)))
err = historyDB.AddBlockSCData(&blocks[i]) // err = historyDB.AddBlockSCData(&blocks[i])
assert.NoError(t, err) // assert.NoError(t, err)
} // }
//
res, err := historyDBWithACC.GetMetricsAPI(common.BatchNum(numBatches)) // res, err := historyDBWithACC.GetMetricsAPI(common.BatchNum(numBatches))
assert.NoError(t, err) // assert.NoError(t, err)
//
assert.Equal(t, float64(numTx)/float64(numBatches), res.TransactionsPerBatch) // assert.Equal(t, float64(numTx)/float64(numBatches), res.TransactionsPerBatch)
//
// Frequency is not exactly the desired one, some decimals may appear // // Frequency is not exactly the desired one, some decimals may appear
// There is a -2 as time for first and last batch is not taken into account // // There is a -2 as time for first and last batch is not taken into account
assert.InEpsilon(t, float64(frequency)*float64(numBatches-2)/float64(numBatches), res.BatchFrequency, 0.01) // assert.InEpsilon(t, float64(frequency)*float64(numBatches-2)/float64(numBatches), res.BatchFrequency, 0.01)
assert.InEpsilon(t, float64(numTx)/float64(frequency*blockNum-frequency), res.TransactionsPerSecond, 0.01) // assert.InEpsilon(t, float64(numTx)/float64(frequency*blockNum-frequency), res.TransactionsPerSecond, 0.01)
assert.Equal(t, int64(3), res.TotalAccounts) // assert.Equal(t, int64(3), res.TotalAccounts)
assert.Equal(t, int64(3), res.TotalBJJs) // assert.Equal(t, int64(3), res.TotalBJJs)
// Til does not set fees // // Til does not set fees
assert.Equal(t, float64(0), res.AvgTransactionFee) // assert.Equal(t, float64(0), res.AvgTransactionFee)
} // }
//
func TestGetMetricsAPIMoreThan24Hours(t *testing.T) { // func TestGetMetricsAPIMoreThan24Hours(t *testing.T) {
test.WipeDB(historyDB.DB()) // test.WipeDB(historyDB.DB())
//
testUsersLen := 3 // testUsersLen := 3
var set []til.Instruction // var set []til.Instruction
for user := 0; user < testUsersLen; user++ { // for user := 0; user < testUsersLen; user++ {
set = append(set, til.Instruction{ // set = append(set, til.Instruction{
Typ: common.TxTypeCreateAccountDeposit, // Typ: common.TxTypeCreateAccountDeposit,
TokenID: common.TokenID(0), // TokenID: common.TokenID(0),
DepositAmount: big.NewInt(1000000), // DepositAmount: big.NewInt(1000000),
Amount: big.NewInt(0), // Amount: big.NewInt(0),
From: fmt.Sprintf("User%02d", user), // From: fmt.Sprintf("User%02d", user),
}) // })
set = append(set, til.Instruction{Typ: til.TypeNewBlock}) // set = append(set, til.Instruction{Typ: til.TypeNewBlock})
} // }
set = append(set, til.Instruction{Typ: til.TypeNewBatchL1}) // set = append(set, til.Instruction{Typ: til.TypeNewBatchL1})
set = append(set, til.Instruction{Typ: til.TypeNewBatchL1}) // set = append(set, til.Instruction{Typ: til.TypeNewBatchL1})
set = append(set, til.Instruction{Typ: til.TypeNewBlock}) // set = append(set, til.Instruction{Typ: til.TypeNewBlock})
//
// Transfers // // Transfers
const numBlocks int = 30 // const numBlocks int = 30
for x := 0; x < numBlocks; x++ { // for x := 0; x < numBlocks; x++ {
set = append(set, til.Instruction{ // set = append(set, til.Instruction{
Typ: common.TxTypeTransfer, // Typ: common.TxTypeTransfer,
TokenID: common.TokenID(0), // TokenID: common.TokenID(0),
DepositAmount: big.NewInt(1), // DepositAmount: big.NewInt(1),
Amount: big.NewInt(0), // Amount: big.NewInt(0),
From: "User00", // From: "User00",
To: "User01", // To: "User01",
}) // })
set = append(set, til.Instruction{Typ: til.TypeNewBatch}) // set = append(set, til.Instruction{Typ: til.TypeNewBatch})
set = append(set, til.Instruction{Typ: til.TypeNewBlock}) // set = append(set, til.Instruction{Typ: til.TypeNewBlock})
} // }
//
var chainID uint16 = 0 // var chainID uint16 = 0
tc := til.NewContext(chainID, common.RollupConstMaxL1UserTx) // tc := til.NewContext(chainID, common.RollupConstMaxL1UserTx)
blocks, err := tc.GenerateBlocksFromInstructions(set) // blocks, err := tc.GenerateBlocksFromInstructions(set)
assert.NoError(t, err) // assert.NoError(t, err)
//
tilCfgExtra := til.ConfigExtra{ // tilCfgExtra := til.ConfigExtra{
CoordUser: "A", // CoordUser: "A",
} // }
err = tc.FillBlocksExtra(blocks, &tilCfgExtra) // err = tc.FillBlocksExtra(blocks, &tilCfgExtra)
require.NoError(t, err) // require.NoError(t, err)
//
const numBatches int = 2 + numBlocks // const numBatches int = 2 + numBlocks
const blockNum = 4 + numBlocks // const blockNum = 4 + numBlocks
//
// Sanity check // // Sanity check
require.Equal(t, blockNum, len(blocks)) // require.Equal(t, blockNum, len(blocks))
//
// Adding one batch per block // // Adding one batch per block
// batch frequency can be chosen // // batch frequency can be chosen
const blockTime time.Duration = 3600 * time.Second // const blockTime time.Duration = 3600 * time.Second
now := time.Now() // now := time.Now()
require.NoError(t, err) // require.NoError(t, err)
//
for i := range blocks { // for i := range blocks {
blocks[i].Block.Timestamp = now.Add(-time.Duration(len(blocks)-1-i) * blockTime) // blocks[i].Block.Timestamp = now.Add(-time.Duration(len(blocks)-1-i) * blockTime)
err = historyDB.AddBlockSCData(&blocks[i]) // err = historyDB.AddBlockSCData(&blocks[i])
assert.NoError(t, err) // assert.NoError(t, err)
} // }
//
res, err := historyDBWithACC.GetMetricsAPI(common.BatchNum(numBatches)) // res, err := historyDBWithACC.GetMetricsAPI(common.BatchNum(numBatches))
assert.NoError(t, err) // assert.NoError(t, err)
//
assert.InEpsilon(t, 1.0, res.TransactionsPerBatch, 0.1) // assert.InEpsilon(t, 1.0, res.TransactionsPerBatch, 0.1)
//
assert.InEpsilon(t, res.BatchFrequency, float64(blockTime/time.Second), 0.1) // assert.InEpsilon(t, res.BatchFrequency, float64(blockTime/time.Second), 0.1)
assert.InEpsilon(t, 1.0/float64(blockTime/time.Second), res.TransactionsPerSecond, 0.1) // assert.InEpsilon(t, 1.0/float64(blockTime/time.Second), res.TransactionsPerSecond, 0.1)
assert.Equal(t, int64(3), res.TotalAccounts) // assert.Equal(t, int64(3), res.TotalAccounts)
assert.Equal(t, int64(3), res.TotalBJJs) // assert.Equal(t, int64(3), res.TotalBJJs)
// Til does not set fees // // Til does not set fees
assert.Equal(t, float64(0), res.AvgTransactionFee) // assert.Equal(t, float64(0), res.AvgTransactionFee)
} // }
//
func TestGetMetricsAPIEmpty(t *testing.T) { // func TestGetMetricsAPIEmpty(t *testing.T) {
test.WipeDB(historyDB.DB()) // test.WipeDB(historyDB.DB())
_, err := historyDBWithACC.GetMetricsAPI(0) // _, err := historyDBWithACC.GetMetricsAPI(0)
assert.NoError(t, err) // assert.NoError(t, err)
} // }
//
func TestGetAvgTxFeeEmpty(t *testing.T) { // func TestGetAvgTxFeeEmpty(t *testing.T) {
test.WipeDB(historyDB.DB()) // test.WipeDB(historyDB.DB())
_, err := historyDBWithACC.GetAvgTxFeeAPI() // _, err := historyDBWithACC.GetAvgTxFeeAPI()
assert.NoError(t, err) // assert.NoError(t, err)
} // }
func TestGetLastL1TxsNum(t *testing.T) { func TestGetLastL1TxsNum(t *testing.T) {
test.WipeDB(historyDB.DB()) test.WipeDB(historyDB.DB())
@@ -1460,3 +1460,21 @@ func setTestBlocks(from, to int64) []common.Block {
} }
return blocks return blocks
} }
func TestNodeInfo(t *testing.T) {
test.WipeDB(historyDB.DB())
clientSetup := test.NewClientSetupExample()
constants := &Constants{
RollupConstants: *clientSetup.RollupConstants,
AuctionConstants: *clientSetup.AuctionConstants,
WDelayerConstants: *clientSetup.WDelayerConstants,
ChainID: 42,
HermezAddress: clientSetup.AuctionConstants.HermezRollup,
}
err := historyDB.SetConstants(constants)
require.NoError(t, err)
dbConstants, err := historyDB.GetConstants()
require.NoError(t, err)
assert.Equal(t, constants, dbConstants)
}

View File

@@ -61,11 +61,13 @@ type Constants struct {
ChainID uint16 ChainID uint16
HermezAddress ethCommon.Address HermezAddress ethCommon.Address
} }
type NodeInfo struct { type NodeInfo struct {
MaxPoolTxs uint32 `meddler:"max_pool_txs"` ItemID int `meddler:"item_id"`
MinFeeUSD float64 `meddler:"min_fee"` MaxPoolTxs *uint32 `meddler:"max_pool_txs"`
StateAPI StateAPI `meddler:"state,json"` MinFeeUSD *float64 `meddler:"min_fee"`
Constants Constants `meddler:"constants,json"` StateAPI *StateAPI `meddler:"state,json"`
Constants *Constants `meddler:"constants,json"`
} }
func (hdb *HistoryDB) GetNodeInfo() (*NodeInfo, error) { func (hdb *HistoryDB) GetNodeInfo() (*NodeInfo, error) {
@@ -76,73 +78,63 @@ func (hdb *HistoryDB) GetNodeInfo() (*NodeInfo, error) {
return ni, tracerr.Wrap(err) return ni, tracerr.Wrap(err)
} }
func (hdb *HistoryDB) SetConstants(constants *Constants) error {
_constants := struct {
Constants *Constants `meddler:"constants,json"`
}{constants}
values, err := meddler.Default.Values(&_constants, false)
if err != nil {
return tracerr.Wrap(err)
}
_, err = hdb.dbWrite.Exec(
"UPDATE node_info SET constants = $1 WHERE item_id = 1;",
values[0],
)
return tracerr.Wrap(err)
}
func (hdb *HistoryDB) GetConstants() (*Constants, error) {
var nodeInfo NodeInfo
err := meddler.QueryRow(
hdb.dbRead, &nodeInfo,
"SELECT constants FROM node_info WHERE item_id = 1;",
)
return nodeInfo.Constants, tracerr.Wrap(err)
}
func (hdb *HistoryDB) SetInitialNodeInfo(maxPoolTxs uint32, minFeeUSD float64, constants *Constants) error { func (hdb *HistoryDB) SetInitialNodeInfo(maxPoolTxs uint32, minFeeUSD float64, constants *Constants) error {
ni := &NodeInfo{ ni := &NodeInfo{
MaxPoolTxs: maxPoolTxs, MaxPoolTxs: &maxPoolTxs,
MinFeeUSD: minFeeUSD, MinFeeUSD: &minFeeUSD,
Constants: *constants, Constants: constants,
} }
return tracerr.Wrap(meddler.Insert(hdb.dbWrite, "node_info", ni)) return tracerr.Wrap(meddler.Insert(hdb.dbWrite, "node_info", ni))
} }
// SetRollupVariables set Status.Rollup variables // SetRollupVariables set Status.Rollup variables
func (hdb *HistoryDB) SetRollupVariables(rollupVariables common.RollupVariables) error { func (hdb *HistoryDB) SetRollupVariables(rollupVariables *common.RollupVariables) error {
setUpdatedNodeInfo := func(txn *sqlx.Tx, ni *NodeInfo) error { setUpdatedNodeInfo := func(txn *sqlx.Tx, ni *NodeInfo) error {
var rollupVars RollupVariablesAPI rollupVars := NewRollupVariablesAPI(rollupVariables)
rollupVars.EthBlockNum = rollupVariables.EthBlockNum ni.StateAPI.Rollup = *rollupVars
rollupVars.FeeAddToken = apitypes.NewBigIntStr(rollupVariables.FeeAddToken)
rollupVars.ForgeL1L2BatchTimeout = rollupVariables.ForgeL1L2BatchTimeout
rollupVars.WithdrawalDelay = rollupVariables.WithdrawalDelay
for i, bucket := range rollupVariables.Buckets {
var apiBucket BucketParamsAPI
apiBucket.CeilUSD = apitypes.NewBigIntStr(bucket.CeilUSD)
apiBucket.Withdrawals = apitypes.NewBigIntStr(bucket.Withdrawals)
apiBucket.BlockWithdrawalRate = apitypes.NewBigIntStr(bucket.BlockWithdrawalRate)
apiBucket.MaxWithdrawals = apitypes.NewBigIntStr(bucket.MaxWithdrawals)
rollupVars.Buckets[i] = apiBucket
}
rollupVars.SafeMode = rollupVariables.SafeMode
ni.StateAPI.Rollup = rollupVars
return nil return nil
} }
return hdb.updateNodeInfo(setUpdatedNodeInfo) return hdb.updateNodeInfo(setUpdatedNodeInfo)
} }
// SetWDelayerVariables set Status.WithdrawalDelayer variables // SetWDelayerVariables set Status.WithdrawalDelayer variables
func (hdb *HistoryDB) SetWDelayerVariables(wDelayerVariables common.WDelayerVariables) error { func (hdb *HistoryDB) SetWDelayerVariables(wDelayerVariables *common.WDelayerVariables) error {
setUpdatedNodeInfo := func(txn *sqlx.Tx, ni *NodeInfo) error { setUpdatedNodeInfo := func(txn *sqlx.Tx, ni *NodeInfo) error {
ni.StateAPI.WithdrawalDelayer = wDelayerVariables ni.StateAPI.WithdrawalDelayer = *wDelayerVariables
return nil return nil
} }
return hdb.updateNodeInfo(setUpdatedNodeInfo) return hdb.updateNodeInfo(setUpdatedNodeInfo)
} }
// SetAuctionVariables set Status.Auction variables // SetAuctionVariables set Status.Auction variables
func (hdb *HistoryDB) SetAuctionVariables(auctionVariables common.AuctionVariables) error { func (hdb *HistoryDB) SetAuctionVariables(auctionVariables *common.AuctionVariables) error {
setUpdatedNodeInfo := func(txn *sqlx.Tx, ni *NodeInfo) error { setUpdatedNodeInfo := func(txn *sqlx.Tx, ni *NodeInfo) error {
var auctionVars AuctionVariablesAPI auctionVars := NewAuctionVariablesAPI(auctionVariables)
ni.StateAPI.Auction = *auctionVars
auctionVars.EthBlockNum = auctionVariables.EthBlockNum
auctionVars.DonationAddress = auctionVariables.DonationAddress
auctionVars.BootCoordinator = auctionVariables.BootCoordinator
auctionVars.BootCoordinatorURL = auctionVariables.BootCoordinatorURL
auctionVars.DefaultSlotSetBidSlotNum = auctionVariables.DefaultSlotSetBidSlotNum
auctionVars.ClosedAuctionSlots = auctionVariables.ClosedAuctionSlots
auctionVars.OpenAuctionSlots = auctionVariables.OpenAuctionSlots
auctionVars.Outbidding = auctionVariables.Outbidding
auctionVars.SlotDeadline = auctionVariables.SlotDeadline
for i, slot := range auctionVariables.DefaultSlotSetBid {
auctionVars.DefaultSlotSetBid[i] = apitypes.NewBigIntStr(slot)
}
for i, ratio := range auctionVariables.AllocationRatio {
auctionVars.AllocationRatio[i] = ratio
}
ni.StateAPI.Auction = auctionVars
return nil return nil
} }
return hdb.updateNodeInfo(setUpdatedNodeInfo) return hdb.updateNodeInfo(setUpdatedNodeInfo)
@@ -473,11 +465,11 @@ func (hdb *HistoryDB) UpdateRecommendedFee() error {
avgTransactionFee = 0 avgTransactionFee = 0
} }
ni.StateAPI.RecommendedFee.ExistingAccount = ni.StateAPI.RecommendedFee.ExistingAccount =
math.Max(avgTransactionFee, ni.MinFeeUSD) math.Max(avgTransactionFee, *ni.MinFeeUSD)
ni.StateAPI.RecommendedFee.CreatesAccount = ni.StateAPI.RecommendedFee.CreatesAccount =
math.Max(createAccountExtraFeePercentage*avgTransactionFee, ni.MinFeeUSD) math.Max(createAccountExtraFeePercentage*avgTransactionFee, *ni.MinFeeUSD)
ni.StateAPI.RecommendedFee.CreatesAccountAndRegister = ni.StateAPI.RecommendedFee.CreatesAccountAndRegister =
math.Max(createAccountInternalExtraFeePercentage*avgTransactionFee, ni.MinFeeUSD) math.Max(createAccountInternalExtraFeePercentage*avgTransactionFee, *ni.MinFeeUSD)
return nil return nil
} }
return hdb.updateNodeInfo(setUpdatedNodeInfo) return hdb.updateNodeInfo(setUpdatedNodeInfo)

View File

@@ -363,6 +363,27 @@ type RollupVariablesAPI struct {
SafeMode bool `json:"safeMode" meddler:"safe_mode"` SafeMode bool `json:"safeMode" meddler:"safe_mode"`
} }
// NewRollupVariablesAPI creates a RollupVariablesAPI from common.RollupVariables
func NewRollupVariablesAPI(rollupVariables *common.RollupVariables) *RollupVariablesAPI {
rollupVars := RollupVariablesAPI{
EthBlockNum: rollupVariables.EthBlockNum,
FeeAddToken: apitypes.NewBigIntStr(rollupVariables.FeeAddToken),
ForgeL1L2BatchTimeout: rollupVariables.ForgeL1L2BatchTimeout,
WithdrawalDelay: rollupVariables.WithdrawalDelay,
SafeMode: rollupVariables.SafeMode,
}
for i, bucket := range rollupVariables.Buckets {
rollupVars.Buckets[i] = BucketParamsAPI{
CeilUSD: apitypes.NewBigIntStr(bucket.CeilUSD),
Withdrawals: apitypes.NewBigIntStr(bucket.Withdrawals),
BlockWithdrawalRate: apitypes.NewBigIntStr(bucket.BlockWithdrawalRate),
MaxWithdrawals: apitypes.NewBigIntStr(bucket.MaxWithdrawals),
}
}
return &rollupVars
}
// AuctionVariablesAPI are the variables of the Auction Smart Contract // AuctionVariablesAPI are the variables of the Auction Smart Contract
type AuctionVariablesAPI struct { type AuctionVariablesAPI struct {
EthBlockNum int64 `json:"ethereumBlockNum" meddler:"eth_block_num"` EthBlockNum int64 `json:"ethereumBlockNum" meddler:"eth_block_num"`
@@ -387,3 +408,28 @@ type AuctionVariablesAPI struct {
// SlotDeadline Number of blocks at the end of a slot in which any coordinator can forge if the winner has not forged one before // SlotDeadline Number of blocks at the end of a slot in which any coordinator can forge if the winner has not forged one before
SlotDeadline uint8 `json:"slotDeadline" meddler:"slot_deadline" validate:"required"` SlotDeadline uint8 `json:"slotDeadline" meddler:"slot_deadline" validate:"required"`
} }
// NewAuctionVariablesAPI creates a AuctionVariablesAPI from common.AuctionVariables
func NewAuctionVariablesAPI(auctionVariables *common.AuctionVariables) *AuctionVariablesAPI {
auctionVars := AuctionVariablesAPI{
EthBlockNum: auctionVariables.EthBlockNum,
DonationAddress: auctionVariables.DonationAddress,
BootCoordinator: auctionVariables.BootCoordinator,
BootCoordinatorURL: auctionVariables.BootCoordinatorURL,
DefaultSlotSetBidSlotNum: auctionVariables.DefaultSlotSetBidSlotNum,
ClosedAuctionSlots: auctionVariables.ClosedAuctionSlots,
OpenAuctionSlots: auctionVariables.OpenAuctionSlots,
Outbidding: auctionVariables.Outbidding,
SlotDeadline: auctionVariables.SlotDeadline,
}
for i, slot := range auctionVariables.DefaultSlotSetBid {
auctionVars.DefaultSlotSetBid[i] = apitypes.NewBigIntStr(slot)
}
for i, ratio := range auctionVariables.AllocationRatio {
auctionVars.AllocationRatio[i] = ratio
}
return &auctionVars
}

View File

@@ -668,6 +668,7 @@ CREATE TABLE node_info (
min_fee NUMERIC, -- L2DB config min_fee NUMERIC, -- L2DB config
constants BYTEA -- info of the network that is constant constants BYTEA -- info of the network that is constant
); );
INSERT INTO node_info(item_id) VALUES (1); -- Always have a single row that we will update
-- +migrate Down -- +migrate Down
-- triggers -- triggers

View File

@@ -627,13 +627,13 @@ func (n *Node) handleNewBlock(ctx context.Context, stats *synchronizer.Stats, va
} }
if n.nodeAPI != nil { if n.nodeAPI != nil {
if vars.Rollup != nil { if vars.Rollup != nil {
n.historyDB.SetRollupVariables(*vars.Rollup) n.historyDB.SetRollupVariables(vars.Rollup)
} }
if vars.Auction != nil { if vars.Auction != nil {
n.historyDB.SetAuctionVariables(*vars.Auction) n.historyDB.SetAuctionVariables(vars.Auction)
} }
if vars.WDelayer != nil { if vars.WDelayer != nil {
n.historyDB.SetWDelayerVariables(*vars.WDelayer) n.historyDB.SetWDelayerVariables(vars.WDelayer)
} }
if stats.Synced() { if stats.Synced() {
@@ -660,9 +660,9 @@ func (n *Node) handleReorg(ctx context.Context, stats *synchronizer.Stats, vars
}) })
} }
vars = n.sync.SCVars() vars = n.sync.SCVars()
n.historyDB.SetRollupVariables(*vars.Rollup) n.historyDB.SetRollupVariables(vars.Rollup)
n.historyDB.SetAuctionVariables(*vars.Auction) n.historyDB.SetAuctionVariables(vars.Auction)
n.historyDB.SetWDelayerVariables(*vars.WDelayer) n.historyDB.SetWDelayerVariables(vars.WDelayer)
n.historyDB.UpdateNetworkInfoBlock( n.historyDB.UpdateNetworkInfoBlock(
stats.Eth.LastBlock, stats.Sync.LastBlock, stats.Eth.LastBlock, stats.Sync.LastBlock,
) )