Browse Source

Update common.L1Tx parsers, add test checks

feature/sql-semaphore1
arnaucube 4 years ago
parent
commit
92fa8aa439
3 changed files with 69 additions and 58 deletions
  1. +43
    -56
      common/l1tx.go
  2. +13
    -2
      common/l1tx_test.go
  3. +13
    -0
      common/token.go

+ 43
- 56
common/l1tx.go

@ -1,18 +1,17 @@
package common package common
import ( import (
"fmt"
"math/big" "math/big"
ethCommon "github.com/ethereum/go-ethereum/common" ethCommon "github.com/ethereum/go-ethereum/common"
"github.com/hermeznetwork/hermez-node/utils"
"github.com/iden3/go-iden3-crypto/babyjub" "github.com/iden3/go-iden3-crypto/babyjub"
) )
const ( const (
fromBJJCompressedB = 256
fromEthAddrB = 160
f16B = 16
tokenIDB = 32
cidXB = 32
// L1TxBytesLen is the length of the byte array that represents the L1Tx
L1TxBytesLen = 68
) )
// L1Tx is a struct that represents a L1 tx // L1Tx is a struct that represents a L1 tx
@ -64,69 +63,57 @@ func (tx *L1Tx) Tx() *Tx {
} }
// Bytes encodes a L1Tx into []byte // Bytes encodes a L1Tx into []byte
func (tx *L1Tx) Bytes(nLevels int) []byte {
res := big.NewInt(0)
res = res.Add(res, big.NewInt(0).Or(big.NewInt(0), tx.ToIdx.BigInt()))
res = res.Add(res, big.NewInt(0).Lsh(big.NewInt(0).Or(big.NewInt(0), big.NewInt(int64(tx.TokenID))), uint(nLevels)))
res = res.Add(res, big.NewInt(0).Lsh(big.NewInt(0).Or(big.NewInt(0), tx.Amount), uint(nLevels+tokenIDB)))
res = res.Add(res, big.NewInt(0).Lsh(big.NewInt(0).Or(big.NewInt(0), tx.LoadAmount), uint(nLevels+tokenIDB+f16B)))
res = res.Add(res, big.NewInt(0).Lsh(big.NewInt(0).Or(big.NewInt(0), tx.FromIdx.BigInt()), uint(nLevels+tokenIDB+2*f16B)))
fromBJJ := big.NewInt(0)
fromBJJ.SetString(tx.FromBJJ.String(), 16)
fromBJJCompressed := big.NewInt(0).Or(big.NewInt(0), fromBJJ)
res = res.Add(res, big.NewInt(0).Lsh(fromBJJCompressed, uint(2*nLevels+tokenIDB+2*f16B)))
fromEthAddr := big.NewInt(0).Or(big.NewInt(0), tx.FromEthAddr.Hash().Big())
res = res.Add(res, big.NewInt(0).Lsh(fromEthAddr, uint(fromBJJCompressedB+2*nLevels+tokenIDB+2*f16B)))
return res.Bytes()
func (tx *L1Tx) Bytes(nLevels int) ([]byte, error) {
var b [68]byte
copy(b[0:4], tx.ToIdx.Bytes())
copy(b[4:8], tx.TokenID.Bytes())
amountFloat16, err := utils.NewFloat16(tx.Amount)
if err != nil {
return nil, err
}
copy(b[8:10], amountFloat16.Bytes())
loadAmountFloat16, err := utils.NewFloat16(tx.LoadAmount)
if err != nil {
return nil, err
}
copy(b[10:12], loadAmountFloat16.Bytes())
copy(b[12:16], tx.FromIdx.Bytes())
pkComp := tx.FromBJJ.Compress()
copy(b[16:48], SwapEndianness(pkComp[:]))
copy(b[48:68], SwapEndianness(tx.FromEthAddr.Bytes()))
return SwapEndianness(b[:]), nil
} }
// L1TxFromBytes decodes a L1Tx from []byte // L1TxFromBytes decodes a L1Tx from []byte
func L1TxFromBytes(l1TxEncoded []byte) (*L1Tx, error) {
l1Tx := &L1Tx{}
var idxB uint = cidXB
l1TxEncodedBI := big.NewInt(0)
l1TxEncodedBI.SetBytes(l1TxEncoded)
toIdx, err := IdxFromBigInt(extract(l1TxEncodedBI, 0, idxB))
func L1TxFromBytes(bRaw []byte) (*L1Tx, error) {
if len(bRaw) != L1TxBytesLen {
return nil, fmt.Errorf("Can not parse L1Tx bytes, expected length %d, current: %d", 68, len(bRaw))
}
b := SwapEndianness(bRaw)
tx := &L1Tx{}
var err error
tx.ToIdx, err = IdxFromBytes(b[0:4])
if err != nil { if err != nil {
return nil, err return nil, err
} }
l1Tx.ToIdx = toIdx
l1Tx.TokenID = TokenID(extract(l1TxEncodedBI, idxB, tokenIDB).Uint64())
l1Tx.Amount = extract(l1TxEncodedBI, idxB+tokenIDB, f16B)
l1Tx.LoadAmount = extract(l1TxEncodedBI, idxB+tokenIDB+f16B, f16B)
fromIdx, err := IdxFromBigInt(extract(l1TxEncodedBI, idxB+tokenIDB+2*f16B, f16B))
tx.TokenID, err = TokenIDFromBytes(b[4:8])
if err != nil { if err != nil {
return nil, err return nil, err
} }
l1Tx.FromIdx = fromIdx
tx.Amount = new(big.Int).SetBytes(SwapEndianness(b[8:10]))
tx.LoadAmount = new(big.Int).SetBytes(SwapEndianness(b[10:12]))
tx.FromIdx, err = IdxFromBytes(b[12:16])
if err != nil {
return nil, err
}
pkCompB := SwapEndianness(b[16:48])
var pkComp babyjub.PublicKeyComp var pkComp babyjub.PublicKeyComp
copy(pkComp[:], extract(l1TxEncodedBI, 2*idxB+tokenIDB+2*f16B, fromBJJCompressedB).Bytes())
pk, err := pkComp.Decompress()
copy(pkComp[:], pkCompB)
tx.FromBJJ, err = pkComp.Decompress()
if err != nil { if err != nil {
return nil, err return nil, err
} }
l1Tx.FromBJJ = pk
l1Tx.FromEthAddr = ethCommon.BigToAddress(extract(l1TxEncodedBI, fromBJJCompressedB+2*idxB+tokenIDB+2*f16B, fromEthAddrB))
return l1Tx, nil
}
// extract masks and shifts a bigInt
func extract(num *big.Int, origin uint, len uint) *big.Int {
mask := big.NewInt(0).Sub(big.NewInt(0).Lsh(big.NewInt(1), len), big.NewInt(1))
return big.NewInt(0).And(big.NewInt(0).Rsh(num, origin), mask)
tx.FromEthAddr = ethCommon.BytesToAddress(SwapEndianness(b[48:68]))
return tx, nil
} }

+ 13
- 2
common/l1tx_test.go

@ -32,12 +32,23 @@ func TestL1TxCodec(t *testing.T) {
expected, err := utils.HexDecode("c58d29fa6e86e4fae04ddced660d45bcf3cb237056ca90f80d7c374ae7485e9bcc47d4ac399460948da6aeeb899311097925a72c00000002000200010000000500000003") expected, err := utils.HexDecode("c58d29fa6e86e4fae04ddced660d45bcf3cb237056ca90f80d7c374ae7485e9bcc47d4ac399460948da6aeeb899311097925a72c00000002000200010000000500000003")
require.Nil(t, err) require.Nil(t, err)
encodedData := l1Tx.Bytes(32)
encodedData, err := l1Tx.Bytes(32)
require.Nil(t, err)
assert.Equal(t, expected, encodedData) assert.Equal(t, expected, encodedData)
decodedData, err := L1TxFromBytes(encodedData) decodedData, err := L1TxFromBytes(encodedData)
require.Nil(t, err) require.Nil(t, err)
assert.Equal(t, l1Tx, *decodedData)
encodedData2 := decodedData.Bytes(32)
encodedData2, err := decodedData.Bytes(32)
require.Nil(t, err)
assert.Equal(t, encodedData, encodedData2) assert.Equal(t, encodedData, encodedData2)
// expect error if length!=68
_, err = L1TxFromBytes(encodedData[:66])
require.NotNil(t, err)
_, err = L1TxFromBytes([]byte{})
require.NotNil(t, err)
_, err = L1TxFromBytes(nil)
require.NotNil(t, err)
} }

+ 13
- 0
common/token.go

@ -2,12 +2,16 @@ package common
import ( import (
"encoding/binary" "encoding/binary"
"fmt"
"math/big" "math/big"
"time" "time"
ethCommon "github.com/ethereum/go-ethereum/common" ethCommon "github.com/ethereum/go-ethereum/common"
) )
// tokenIDBytesLen defines the length of the TokenID byte array representation
const tokenIDBytesLen = 4
// Token is a struct that represents an Ethereum token that is supported in Hermez network // Token is a struct that represents an Ethereum token that is supported in Hermez network
type Token struct { type Token struct {
TokenID TokenID `meddler:"token_id"` TokenID TokenID `meddler:"token_id"`
@ -41,3 +45,12 @@ func (t TokenID) Bytes() []byte {
func (t TokenID) BigInt() *big.Int { func (t TokenID) BigInt() *big.Int {
return big.NewInt(int64(t)) return big.NewInt(int64(t))
} }
// TokenIDFromBytes returns TokenID from a byte array
func TokenIDFromBytes(b []byte) (TokenID, error) {
if len(b) != tokenIDBytesLen {
return 0, fmt.Errorf("can not parse TokenID, bytes len %d, expected 4", len(b))
}
tid := binary.LittleEndian.Uint32(b[:4])
return TokenID(tid), nil
}

Loading…
Cancel
Save