Merge pull request #631 from hermeznetwork/feature/serveapicli2

Allow serving API only via new cli command
This commit is contained in:
arnau
2021-03-15 15:41:36 +01:00
committed by GitHub
25 changed files with 1436 additions and 869 deletions

View File

@@ -153,8 +153,8 @@ type Coordinator struct {
pipelineNum int // Pipeline sequential number. The first pipeline is 1
pipelineFromBatch fromBatch // batch from which we started the pipeline
provers []prover.Client
consts synchronizer.SCConsts
vars synchronizer.SCVariables
consts common.SCConsts
vars common.SCVariables
stats synchronizer.Stats
started bool
@@ -194,8 +194,8 @@ func NewCoordinator(cfg Config,
batchBuilder *batchbuilder.BatchBuilder,
serverProofs []prover.Client,
ethClient eth.ClientInterface,
scConsts *synchronizer.SCConsts,
initSCVars *synchronizer.SCVariables,
scConsts *common.SCConsts,
initSCVars *common.SCVariables,
) (*Coordinator, error) {
// nolint reason: hardcoded `1.0`, by design the percentage can't be over 100%
if cfg.L1BatchTimeoutPerc >= 1.0 { //nolint:gomnd
@@ -284,13 +284,13 @@ type MsgSyncBlock struct {
Batches []common.BatchData
// Vars contains each Smart Contract variables if they are updated, or
// nil if they haven't changed.
Vars synchronizer.SCVariablesPtr
Vars common.SCVariablesPtr
}
// MsgSyncReorg indicates a reorg
type MsgSyncReorg struct {
Stats synchronizer.Stats
Vars synchronizer.SCVariablesPtr
Vars common.SCVariablesPtr
}
// MsgStopPipeline indicates a signal to reset the pipeline
@@ -309,7 +309,7 @@ func (c *Coordinator) SendMsg(ctx context.Context, msg interface{}) {
}
}
func updateSCVars(vars *synchronizer.SCVariables, update synchronizer.SCVariablesPtr) {
func updateSCVars(vars *common.SCVariables, update common.SCVariablesPtr) {
if update.Rollup != nil {
vars.Rollup = *update.Rollup
}
@@ -321,7 +321,7 @@ func updateSCVars(vars *synchronizer.SCVariables, update synchronizer.SCVariable
}
}
func (c *Coordinator) syncSCVars(vars synchronizer.SCVariablesPtr) {
func (c *Coordinator) syncSCVars(vars common.SCVariablesPtr) {
updateSCVars(&c.vars, vars)
}

View File

@@ -189,12 +189,12 @@ func newTestCoordinator(t *testing.T, forgerAddr ethCommon.Address, ethClient *t
&prover.MockClient{Delay: 400 * time.Millisecond},
}
scConsts := &synchronizer.SCConsts{
scConsts := &common.SCConsts{
Rollup: *ethClientSetup.RollupConstants,
Auction: *ethClientSetup.AuctionConstants,
WDelayer: *ethClientSetup.WDelayerConstants,
}
initSCVars := &synchronizer.SCVariables{
initSCVars := &common.SCVariables{
Rollup: *ethClientSetup.RollupVariables,
Auction: *ethClientSetup.AuctionVariables,
WDelayer: *ethClientSetup.WDelayerVariables,
@@ -534,7 +534,7 @@ func TestCoordinatorStress(t *testing.T) {
coord.SendMsg(ctx, MsgSyncBlock{
Stats: *stats,
Batches: blockData.Rollup.Batches,
Vars: synchronizer.SCVariablesPtr{
Vars: common.SCVariablesPtr{
Rollup: blockData.Rollup.Vars,
Auction: blockData.Auction.Vars,
WDelayer: blockData.WDelayer.Vars,

View File

@@ -22,7 +22,7 @@ import (
type statsVars struct {
Stats synchronizer.Stats
Vars synchronizer.SCVariablesPtr
Vars common.SCVariablesPtr
}
type state struct {
@@ -36,7 +36,7 @@ type state struct {
type Pipeline struct {
num int
cfg Config
consts synchronizer.SCConsts
consts common.SCConsts
// state
state state
@@ -57,7 +57,7 @@ type Pipeline struct {
purger *Purger
stats synchronizer.Stats
vars synchronizer.SCVariables
vars common.SCVariables
statsVarsCh chan statsVars
ctx context.Context
@@ -90,7 +90,7 @@ func NewPipeline(ctx context.Context,
coord *Coordinator,
txManager *TxManager,
provers []prover.Client,
scConsts *synchronizer.SCConsts,
scConsts *common.SCConsts,
) (*Pipeline, error) {
proversPool := NewProversPool(len(provers))
proversPoolSize := 0
@@ -125,7 +125,7 @@ func NewPipeline(ctx context.Context,
// SetSyncStatsVars is a thread safe method to sets the synchronizer Stats
func (p *Pipeline) SetSyncStatsVars(ctx context.Context, stats *synchronizer.Stats,
vars *synchronizer.SCVariablesPtr) {
vars *common.SCVariablesPtr) {
select {
case p.statsVarsCh <- statsVars{Stats: *stats, Vars: *vars}:
case <-ctx.Done():
@@ -134,7 +134,7 @@ func (p *Pipeline) SetSyncStatsVars(ctx context.Context, stats *synchronizer.Sta
// reset pipeline state
func (p *Pipeline) reset(batchNum common.BatchNum,
stats *synchronizer.Stats, vars *synchronizer.SCVariables) error {
stats *synchronizer.Stats, vars *common.SCVariables) error {
p.state = state{
batchNum: batchNum,
lastForgeL1TxsNum: stats.Sync.LastForgeL1TxsNum,
@@ -195,7 +195,7 @@ func (p *Pipeline) reset(batchNum common.BatchNum,
return nil
}
func (p *Pipeline) syncSCVars(vars synchronizer.SCVariablesPtr) {
func (p *Pipeline) syncSCVars(vars common.SCVariablesPtr) {
updateSCVars(&p.vars, vars)
}
@@ -256,7 +256,7 @@ func (p *Pipeline) handleForgeBatch(ctx context.Context,
// Start the forging pipeline
func (p *Pipeline) Start(batchNum common.BatchNum,
stats *synchronizer.Stats, vars *synchronizer.SCVariables) error {
stats *synchronizer.Stats, vars *common.SCVariables) error {
if p.started {
log.Fatal("Pipeline already started")
}

View File

@@ -206,11 +206,7 @@ PoolTransfer(0) User2-User3: 300 (126)
require.NoError(t, err)
}
err = pipeline.reset(batchNum, syncStats, &synchronizer.SCVariables{
Rollup: *syncSCVars.Rollup,
Auction: *syncSCVars.Auction,
WDelayer: *syncSCVars.WDelayer,
})
err = pipeline.reset(batchNum, syncStats, syncSCVars)
require.NoError(t, err)
// Sanity check
sdbAccounts, err := pipeline.txSelector.LocalAccountsDB().TestGetAccounts()

View File

@@ -31,10 +31,10 @@ type TxManager struct {
batchCh chan *BatchInfo
chainID *big.Int
account accounts.Account
consts synchronizer.SCConsts
consts common.SCConsts
stats synchronizer.Stats
vars synchronizer.SCVariables
vars common.SCVariables
statsVarsCh chan statsVars
discardPipelineCh chan int // int refers to the pipelineNum
@@ -55,7 +55,7 @@ type TxManager struct {
// NewTxManager creates a new TxManager
func NewTxManager(ctx context.Context, cfg *Config, ethClient eth.ClientInterface, l2DB *l2db.L2DB,
coord *Coordinator, scConsts *synchronizer.SCConsts, initSCVars *synchronizer.SCVariables) (
coord *Coordinator, scConsts *common.SCConsts, initSCVars *common.SCVariables) (
*TxManager, error) {
chainID, err := ethClient.EthChainID()
if err != nil {
@@ -104,7 +104,7 @@ func (t *TxManager) AddBatch(ctx context.Context, batchInfo *BatchInfo) {
// SetSyncStatsVars is a thread safe method to sets the synchronizer Stats
func (t *TxManager) SetSyncStatsVars(ctx context.Context, stats *synchronizer.Stats,
vars *synchronizer.SCVariablesPtr) {
vars *common.SCVariablesPtr) {
select {
case t.statsVarsCh <- statsVars{Stats: *stats, Vars: *vars}:
case <-ctx.Done():
@@ -120,7 +120,7 @@ func (t *TxManager) DiscardPipeline(ctx context.Context, pipelineNum int) {
}
}
func (t *TxManager) syncSCVars(vars synchronizer.SCVariablesPtr) {
func (t *TxManager) syncSCVars(vars common.SCVariablesPtr) {
updateSCVars(&t.vars, vars)
}