12 Commits

Author SHA1 Message Date
polydez
2a5b8ffb21 feat: implement functionality needed for computing openings for recent blocks (#367)
* refactor: make `InnerNode` and `NodeMutation` public
* feat: implement serialization for `LeafIndex`
2025-01-24 17:32:30 -08:00
polydez
589839fef1 feat: reverse mutations generation, mutations serialization (#355)
* feat: revert mutations generation, mutations serialization
* tests: check both `apply_mutations` and `apply_mutations_with_reversion`
* feat: add `num_leaves` method for `Smt`
* refactor: improve ad-hoc benchmarks
* chore: update crate version to v0.13.1
2024-12-26 18:16:38 -08:00
crStiv
1444bbc0f2 fix: typos of different importance (#359) 2024-12-16 10:27:51 -08:00
Bobbin Threadbare
c64f43b262 chore: merge v0.13.0 release 2024-11-24 22:36:08 -08:00
Bobbin Threadbare
1867f842d3 chore: update changelog 2024-11-24 22:26:51 -08:00
Al-Kindi-0
e1072ecc7f chore: update to winterfell dependencies to 0.11 (#346) 2024-11-24 22:20:19 -08:00
Bobbin Threadbare
063ad49afd chore: update crate version to v0.13.0 2024-11-21 15:56:55 -08:00
Philipp Gackstatter
a27f9ad828 refactor: use thiserror to derive errors and update error messages (#344) 2024-11-21 15:52:20 -08:00
Al-Kindi-0
50dd6bda19 fix: skip using the field element containing the proof-of-work (#343) 2024-11-18 00:16:27 -08:00
Bobbin Threadbare
3909b01993 chore: merge v0.12.0 release from 0xPolygonMiden/next 2024-10-30 15:25:34 -07:00
Bobbin Threadbare
ee20a49953 chore: increment crate version to v0.12.0 and update changelog 2024-10-30 15:04:08 -07:00
Al-Kindi-0
0d75e3593b chore: migrate to Winterfell v0.10.0 release (#338) 2024-10-29 15:02:46 -07:00
32 changed files with 1044 additions and 550 deletions

View File

@@ -1,3 +1,22 @@
## 0.13.2 (2025-01-24)
- Made `InnerNode` and `NodeMutation` public. Implemented (de)serialization of `LeafIndex` (#367).
## 0.13.1 (2024-12-26)
- Generate reverse mutations set on applying of mutations set, implemented serialization of `MutationsSet` (#355).
## 0.13.0 (2024-11-24)
- Fixed a bug in the implementation of `draw_integers` for `RpoRandomCoin` (#343).
- [BREAKING] Refactor error messages and use `thiserror` to derive errors (#344).
- [BREAKING] Updated Winterfell dependency to v0.11 (#346).
## 0.12.0 (2024-10-30)
- [BREAKING] Updated Winterfell dependency to v0.10 (#338).
## 0.11.0 (2024-10-17)
- [BREAKING]: renamed `Mmr::open()` into `Mmr::open_at()` and `Mmr::peaks()` into `Mmr::peaks_at()` (#234).

216
Cargo.lock generated
View File

@@ -19,9 +19,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "anstream"
version = "0.6.15"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [
"anstyle",
"anstyle-parse",
@@ -34,36 +34,36 @@ dependencies = [
[[package]]
name = "anstyle"
version = "1.0.8"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]]
name = "anstyle-parse"
version = "0.2.5"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.1"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.4"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
dependencies = [
"anstyle",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -78,6 +78,12 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "assert_matches"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9"
[[package]]
name = "autocfg"
version = "1.4.0"
@@ -86,18 +92,18 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "bit-set"
version = "0.5.3"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
dependencies = [
"bit-vec",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]]
name = "bitflags"
@@ -107,9 +113,9 @@ checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "blake3"
version = "1.5.4"
version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7"
checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e"
dependencies = [
"arrayref",
"arrayvec",
@@ -147,9 +153,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.1.30"
version = "1.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945"
checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e"
dependencies = [
"jobserver",
"libc",
@@ -191,9 +197,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.20"
version = "4.5.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8"
checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
dependencies = [
"clap_builder",
"clap_derive",
@@ -201,9 +207,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.20"
version = "4.5.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54"
checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
dependencies = [
"anstream",
"anstyle",
@@ -225,15 +231,15 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.7.2"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "colorchoice"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "constant_time_eq"
@@ -243,9 +249,9 @@ checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
[[package]]
name = "cpufeatures"
version = "0.2.14"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0"
checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3"
dependencies = [
"libc",
]
@@ -288,9 +294,9 @@ dependencies = [
[[package]]
name = "crossbeam-deque"
version = "0.8.5"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
@@ -307,9 +313,9 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.20"
version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crunchy"
@@ -345,19 +351,19 @@ checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]]
name = "errno"
version = "0.3.9"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
name = "fastrand"
version = "2.1.1"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fnv"
@@ -450,9 +456,9 @@ dependencies = [
[[package]]
name = "itoa"
version = "1.0.11"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]]
name = "jobserver"
@@ -465,10 +471,11 @@ dependencies = [
[[package]]
name = "js-sys"
version = "0.3.72"
version = "0.3.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9"
checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
dependencies = [
"once_cell",
"wasm-bindgen",
]
@@ -489,15 +496,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.161"
version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "libm"
version = "0.2.8"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
[[package]]
name = "linux-raw-sys"
@@ -519,8 +526,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "miden-crypto"
version = "0.11.0"
version = "0.13.2"
dependencies = [
"assert_matches",
"blake3",
"cc",
"clap",
@@ -537,6 +545,7 @@ dependencies = [
"seq-macro",
"serde",
"sha3",
"thiserror",
"winter-crypto",
"winter-math",
"winter-rand-utils",
@@ -668,18 +677,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.88"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [
"unicode-ident",
]
[[package]]
name = "proptest"
version = "1.5.0"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50"
dependencies = [
"bit-set",
"bit-vec",
@@ -703,9 +712,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "1.0.37"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2",
]
@@ -771,9 +780,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.11.0"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
@@ -783,9 +792,9 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.4.8"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
@@ -800,15 +809,15 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rustix"
version = "0.38.37"
version = "0.38.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811"
checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -846,18 +855,18 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
[[package]]
name = "serde"
version = "1.0.210"
version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.210"
version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
dependencies = [
"proc-macro2",
"quote",
@@ -866,9 +875,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.129"
version = "1.0.134"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbcf9b78a125ee667ae19388837dd12294b858d101fdd393cb9d5501ef09eb2"
checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d"
dependencies = [
"itoa",
"memchr",
@@ -900,9 +909,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.79"
version = "2.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590"
checksum = "70ae51629bf965c5c098cc9e87908a3df5301051a9e087d6f9bef5c9771ed126"
dependencies = [
"proc-macro2",
"quote",
@@ -911,9 +920,9 @@ dependencies = [
[[package]]
name = "tempfile"
version = "3.13.0"
version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b"
checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c"
dependencies = [
"cfg-if",
"fastrand",
@@ -922,6 +931,26 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "thiserror"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tinytemplate"
version = "1.2.1"
@@ -946,9 +975,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
[[package]]
name = "unicode-ident"
version = "1.0.13"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "utf8parse"
@@ -989,9 +1018,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.95"
version = "0.2.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e"
checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
dependencies = [
"cfg-if",
"once_cell",
@@ -1000,13 +1029,12 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.95"
version = "0.2.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358"
checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn",
@@ -1015,9 +1043,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.95"
version = "0.2.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56"
checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -1025,9 +1053,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.95"
version = "0.2.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
dependencies = [
"proc-macro2",
"quote",
@@ -1038,15 +1066,15 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.95"
version = "0.2.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d"
checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
[[package]]
name = "web-sys"
version = "0.3.72"
version = "0.3.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112"
checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -1145,9 +1173,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winter-crypto"
version = "0.9.0"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00fbb724d2d9fbfd3aa16ea27f5e461d4fe1d74b0c9e0ed1bf79e9e2a955f4d5"
checksum = "67c57748fd2da77742be601f03eda639ff6046879738fd1faae86e80018263cb"
dependencies = [
"blake3",
"sha3",
@@ -1157,9 +1185,9 @@ dependencies = [
[[package]]
name = "winter-math"
version = "0.9.3"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0e685b3b872d82e58a86519294a814b7bc7a4d3cd2c93570a7d80c0c5a1aba"
checksum = "6020c17839fa107ce4a7cc178e407ebbc24adfac1980f4fa2111198e052700ab"
dependencies = [
"serde",
"winter-utils",
@@ -1167,9 +1195,9 @@ dependencies = [
[[package]]
name = "winter-rand-utils"
version = "0.9.0"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b827c901ab0c316d89812858ff451d60855c0a5c7ae734b098c62a28624181"
checksum = "226e4c455f6eb72f64ac6eeb7642df25e21ff2280a4f6b09db75392ad6b390ef"
dependencies = [
"rand",
"winter-utils",
@@ -1177,9 +1205,9 @@ dependencies = [
[[package]]
name = "winter-utils"
version = "0.9.3"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "961e81e9388877a25db1c034ba38253de2055f569633ae6a665d857a0556391b"
checksum = "1507ef312ea5569d54c2c7446a18b82143eb2a2e21f5c3ec7cfbe8200c03bd7c"
[[package]]
name = "zerocopy"

View File

@@ -1,12 +1,12 @@
[package]
name = "miden-crypto"
version = "0.11.0"
version = "0.13.2"
description = "Miden Cryptographic primitives"
authors = ["miden contributors"]
readme = "README.md"
license = "MIT"
repository = "https://github.com/0xPolygonMiden/crypto"
documentation = "https://docs.rs/miden-crypto/0.11.0"
documentation = "https://docs.rs/miden-crypto/0.13.1"
categories = ["cryptography", "no-std"]
keywords = ["miden", "crypto", "hash", "merkle"]
edition = "2021"
@@ -52,22 +52,24 @@ num = { version = "0.4", default-features = false, features = ["alloc", "libm"]
num-complex = { version = "0.4", default-features = false }
rand = { version = "0.8", default-features = false }
rand_core = { version = "0.6", default-features = false }
rand-utils = { version = "0.9", package = "winter-rand-utils", optional = true }
rand-utils = { version = "0.11", package = "winter-rand-utils", optional = true }
serde = { version = "1.0", default-features = false, optional = true, features = ["derive"] }
sha3 = { version = "0.10", default-features = false }
winter-crypto = { version = "0.9", default-features = false }
winter-math = { version = "0.9", default-features = false }
winter-utils = { version = "0.9", default-features = false }
thiserror = { version = "2.0", default-features = false }
winter-crypto = { version = "0.11", default-features = false }
winter-math = { version = "0.11", default-features = false }
winter-utils = { version = "0.11", default-features = false }
[dev-dependencies]
assert_matches = { version = "1.5", default-features = false }
criterion = { version = "0.5", features = ["html_reports"] }
getrandom = { version = "0.2", features = ["js"] }
hex = { version = "0.4", default-features = false, features = ["alloc"] }
proptest = "1.5"
proptest = "1.6"
rand_chacha = { version = "0.3", default-features = false }
rand-utils = { version = "0.9", package = "winter-rand-utils" }
rand-utils = { version = "0.11", package = "winter-rand-utils" }
seq-macro = { version = "0.3" }
[build-dependencies]
cc = { version = "1.1", optional = true, features = ["parallel"] }
cc = { version = "1.2", optional = true, features = ["parallel"] }
glob = "0.3"

View File

@@ -81,6 +81,10 @@ build-sve: ## Build with sve support
# --- benchmarking --------------------------------------------------------------------------------
.PHONY: bench-tx
bench-tx: ## Run crypto benchmarks
.PHONY: bench
bench: ## Run crypto benchmarks
cargo bench
.PHONY: bench-smt-concurrent
bench-smt-concurrent: ## Run SMT benchmarks with concurrent feature
cargo run --release --features executable -- --size 1000000

View File

@@ -13,7 +13,7 @@ else
if git diff --exit-code "origin/${BASE_REF}" -- "${CHANGELOG_FILE}"; then
>&2 echo "Changes should come with an entry in the \"CHANGELOG.md\" file. This behavior
can be overridden by using the \"no changelog\" label, which is used for changes
that are trivial / explicitely stated not to require a changelog entry."
that are trivial / explicitly stated not to require a changelog entry."
exit 1
fi

View File

@@ -1,8 +1,8 @@
use alloc::string::String;
use alloc::{string::String, vec::Vec};
use core::{
mem::{size_of, transmute, transmute_copy},
ops::Deref,
slice::from_raw_parts,
slice::{self, from_raw_parts},
};
use super::{Digest, ElementHasher, Felt, FieldElement, Hasher};
@@ -33,6 +33,14 @@ const DIGEST20_BYTES: usize = 20;
#[cfg_attr(feature = "serde", serde(into = "String", try_from = "&str"))]
pub struct Blake3Digest<const N: usize>([u8; N]);
impl<const N: usize> Blake3Digest<N> {
pub fn digests_as_bytes(digests: &[Blake3Digest<N>]) -> &[u8] {
let p = digests.as_ptr();
let len = digests.len() * N;
unsafe { slice::from_raw_parts(p as *const u8, len) }
}
}
impl<const N: usize> Default for Blake3Digest<N> {
fn default() -> Self {
Self([0; N])
@@ -114,6 +122,10 @@ impl Hasher for Blake3_256 {
Self::hash(prepare_merge(values))
}
fn merge_many(values: &[Self::Digest]) -> Self::Digest {
Blake3Digest(blake3::hash(Blake3Digest::digests_as_bytes(values)).into())
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
@@ -174,6 +186,11 @@ impl Hasher for Blake3_192 {
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
}
fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
Blake3Digest(*shrink_bytes(&blake3::hash(&bytes).into()))
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
@@ -242,6 +259,11 @@ impl Hasher for Blake3_160 {
Self::hash(prepare_merge(values))
}
fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
Blake3Digest(*shrink_bytes(&blake3::hash(&bytes).into()))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);

View File

@@ -1,5 +1,7 @@
use alloc::string::String;
use core::{cmp::Ordering, fmt::Display, ops::Deref};
use core::{cmp::Ordering, fmt::Display, ops::Deref, slice};
use thiserror::Error;
use super::{Digest, Felt, StarkField, DIGEST_BYTES, DIGEST_SIZE, ZERO};
use crate::{
@@ -34,13 +36,19 @@ impl RpoDigest {
<Self as Digest>::as_bytes(self)
}
pub fn digests_as_elements<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
pub fn digests_as_elements_iter<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
where
I: Iterator<Item = &'a Self>,
{
digests.flat_map(|d| d.0.iter())
}
pub fn digests_as_elements(digests: &[Self]) -> &[Felt] {
let p = digests.as_ptr();
let len = digests.len() * DIGEST_SIZE;
unsafe { slice::from_raw_parts(p as *const Felt, len) }
}
/// Returns hexadecimal representation of this digest prefixed with `0x`.
pub fn to_hex(&self) -> String {
bytes_to_hex_string(self.as_bytes())
@@ -121,9 +129,12 @@ impl Randomizable for RpoDigest {
// CONVERSIONS: FROM RPO DIGEST
// ================================================================================================
#[derive(Copy, Clone, Debug)]
#[derive(Debug, Error)]
pub enum RpoDigestError {
InvalidInteger,
#[error("failed to convert digest field element to {0}")]
TypeConversion(&'static str),
#[error("failed to convert to field element: {0}")]
InvalidFieldElement(String),
}
impl TryFrom<&RpoDigest> for [bool; DIGEST_SIZE] {
@@ -147,10 +158,10 @@ impl TryFrom<RpoDigest> for [bool; DIGEST_SIZE] {
}
Ok([
to_bool(value.0[0].as_int()).ok_or(RpoDigestError::InvalidInteger)?,
to_bool(value.0[1].as_int()).ok_or(RpoDigestError::InvalidInteger)?,
to_bool(value.0[2].as_int()).ok_or(RpoDigestError::InvalidInteger)?,
to_bool(value.0[3].as_int()).ok_or(RpoDigestError::InvalidInteger)?,
to_bool(value.0[0].as_int()).ok_or(RpoDigestError::TypeConversion("bool"))?,
to_bool(value.0[1].as_int()).ok_or(RpoDigestError::TypeConversion("bool"))?,
to_bool(value.0[2].as_int()).ok_or(RpoDigestError::TypeConversion("bool"))?,
to_bool(value.0[3].as_int()).ok_or(RpoDigestError::TypeConversion("bool"))?,
])
}
}
@@ -168,10 +179,22 @@ impl TryFrom<RpoDigest> for [u8; DIGEST_SIZE] {
fn try_from(value: RpoDigest) -> Result<Self, Self::Error> {
Ok([
value.0[0].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[1].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[2].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[3].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[0]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u8"))?,
value.0[1]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u8"))?,
value.0[2]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u8"))?,
value.0[3]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u8"))?,
])
}
}
@@ -189,10 +212,22 @@ impl TryFrom<RpoDigest> for [u16; DIGEST_SIZE] {
fn try_from(value: RpoDigest) -> Result<Self, Self::Error> {
Ok([
value.0[0].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[1].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[2].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[3].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[0]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u16"))?,
value.0[1]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u16"))?,
value.0[2]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u16"))?,
value.0[3]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u16"))?,
])
}
}
@@ -210,10 +245,22 @@ impl TryFrom<RpoDigest> for [u32; DIGEST_SIZE] {
fn try_from(value: RpoDigest) -> Result<Self, Self::Error> {
Ok([
value.0[0].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[1].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[2].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[3].as_int().try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value.0[0]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u32"))?,
value.0[1]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u32"))?,
value.0[2]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u32"))?,
value.0[3]
.as_int()
.try_into()
.map_err(|_| RpoDigestError::TypeConversion("u32"))?,
])
}
}
@@ -337,10 +384,10 @@ impl TryFrom<[u64; DIGEST_SIZE]> for RpoDigest {
fn try_from(value: [u64; DIGEST_SIZE]) -> Result<Self, RpoDigestError> {
Ok(Self([
value[0].try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value[1].try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value[2].try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value[3].try_into().map_err(|_| RpoDigestError::InvalidInteger)?,
value[0].try_into().map_err(RpoDigestError::InvalidFieldElement)?,
value[1].try_into().map_err(RpoDigestError::InvalidFieldElement)?,
value[2].try_into().map_err(RpoDigestError::InvalidFieldElement)?,
value[3].try_into().map_err(RpoDigestError::InvalidFieldElement)?,
]))
}
}

View File

@@ -154,7 +154,7 @@ impl Hasher for Rpo256 {
// initialize the state by copying the digest elements into the rate portion of the state
// (8 total elements), and set the capacity elements to 0.
let mut state = [ZERO; STATE_WIDTH];
let it = Self::Digest::digests_as_elements(values.iter());
let it = Self::Digest::digests_as_elements_iter(values.iter());
for (i, v) in it.enumerate() {
state[RATE_RANGE.start + i] = *v;
}
@@ -164,6 +164,10 @@ impl Hasher for Rpo256 {
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
fn merge_many(values: &[Self::Digest]) -> Self::Digest {
Self::hash_elements(Self::Digest::digests_as_elements(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
// initialize the state as follows:
// - seed is copied into the first 4 elements of the rate portion of the state.
@@ -290,7 +294,7 @@ impl Rpo256 {
// initialize the state by copying the digest elements into the rate portion of the state
// (8 total elements), and set the capacity elements to 0.
let mut state = [ZERO; STATE_WIDTH];
let it = RpoDigest::digests_as_elements(values.iter());
let it = RpoDigest::digests_as_elements_iter(values.iter());
for (i, v) in it.enumerate() {
state[RATE_RANGE.start + i] = *v;
}

View File

@@ -1,5 +1,7 @@
use alloc::string::String;
use core::{cmp::Ordering, fmt::Display, ops::Deref};
use core::{cmp::Ordering, fmt::Display, ops::Deref, slice};
use thiserror::Error;
use super::{Digest, Felt, StarkField, DIGEST_BYTES, DIGEST_SIZE, ZERO};
use crate::{
@@ -34,13 +36,19 @@ impl RpxDigest {
<Self as Digest>::as_bytes(self)
}
pub fn digests_as_elements<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
pub fn digests_as_elements_iter<'a, I>(digests: I) -> impl Iterator<Item = &'a Felt>
where
I: Iterator<Item = &'a Self>,
{
digests.flat_map(|d| d.0.iter())
}
pub fn digests_as_elements(digests: &[Self]) -> &[Felt] {
let p = digests.as_ptr();
let len = digests.len() * DIGEST_SIZE;
unsafe { slice::from_raw_parts(p as *const Felt, len) }
}
/// Returns hexadecimal representation of this digest prefixed with `0x`.
pub fn to_hex(&self) -> String {
bytes_to_hex_string(self.as_bytes())
@@ -121,9 +129,12 @@ impl Randomizable for RpxDigest {
// CONVERSIONS: FROM RPX DIGEST
// ================================================================================================
#[derive(Copy, Clone, Debug)]
#[derive(Debug, Error)]
pub enum RpxDigestError {
InvalidInteger,
#[error("failed to convert digest field element to {0}")]
TypeConversion(&'static str),
#[error("failed to convert to field element: {0}")]
InvalidFieldElement(String),
}
impl TryFrom<&RpxDigest> for [bool; DIGEST_SIZE] {
@@ -147,10 +158,10 @@ impl TryFrom<RpxDigest> for [bool; DIGEST_SIZE] {
}
Ok([
to_bool(value.0[0].as_int()).ok_or(RpxDigestError::InvalidInteger)?,
to_bool(value.0[1].as_int()).ok_or(RpxDigestError::InvalidInteger)?,
to_bool(value.0[2].as_int()).ok_or(RpxDigestError::InvalidInteger)?,
to_bool(value.0[3].as_int()).ok_or(RpxDigestError::InvalidInteger)?,
to_bool(value.0[0].as_int()).ok_or(RpxDigestError::TypeConversion("bool"))?,
to_bool(value.0[1].as_int()).ok_or(RpxDigestError::TypeConversion("bool"))?,
to_bool(value.0[2].as_int()).ok_or(RpxDigestError::TypeConversion("bool"))?,
to_bool(value.0[3].as_int()).ok_or(RpxDigestError::TypeConversion("bool"))?,
])
}
}
@@ -168,10 +179,22 @@ impl TryFrom<RpxDigest> for [u8; DIGEST_SIZE] {
fn try_from(value: RpxDigest) -> Result<Self, Self::Error> {
Ok([
value.0[0].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[1].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[2].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[3].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[0]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u8"))?,
value.0[1]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u8"))?,
value.0[2]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u8"))?,
value.0[3]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u8"))?,
])
}
}
@@ -189,10 +212,22 @@ impl TryFrom<RpxDigest> for [u16; DIGEST_SIZE] {
fn try_from(value: RpxDigest) -> Result<Self, Self::Error> {
Ok([
value.0[0].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[1].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[2].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[3].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[0]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u16"))?,
value.0[1]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u16"))?,
value.0[2]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u16"))?,
value.0[3]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u16"))?,
])
}
}
@@ -210,10 +245,22 @@ impl TryFrom<RpxDigest> for [u32; DIGEST_SIZE] {
fn try_from(value: RpxDigest) -> Result<Self, Self::Error> {
Ok([
value.0[0].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[1].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[2].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[3].as_int().try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value.0[0]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u32"))?,
value.0[1]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u32"))?,
value.0[2]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u32"))?,
value.0[3]
.as_int()
.try_into()
.map_err(|_| RpxDigestError::TypeConversion("u32"))?,
])
}
}
@@ -337,10 +384,10 @@ impl TryFrom<[u64; DIGEST_SIZE]> for RpxDigest {
fn try_from(value: [u64; DIGEST_SIZE]) -> Result<Self, RpxDigestError> {
Ok(Self([
value[0].try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value[1].try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value[2].try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value[3].try_into().map_err(|_| RpxDigestError::InvalidInteger)?,
value[0].try_into().map_err(RpxDigestError::InvalidFieldElement)?,
value[1].try_into().map_err(RpxDigestError::InvalidFieldElement)?,
value[2].try_into().map_err(RpxDigestError::InvalidFieldElement)?,
value[3].try_into().map_err(RpxDigestError::InvalidFieldElement)?,
]))
}
}

View File

@@ -160,7 +160,7 @@ impl Hasher for Rpx256 {
// initialize the state by copying the digest elements into the rate portion of the state
// (8 total elements), and set the capacity elements to 0.
let mut state = [ZERO; STATE_WIDTH];
let it = Self::Digest::digests_as_elements(values.iter());
let it = Self::Digest::digests_as_elements_iter(values.iter());
for (i, v) in it.enumerate() {
state[RATE_RANGE.start + i] = *v;
}
@@ -170,6 +170,10 @@ impl Hasher for Rpx256 {
RpxDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}
fn merge_many(values: &[Self::Digest]) -> Self::Digest {
Self::hash_elements(Self::Digest::digests_as_elements(values))
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
// initialize the state as follows:
// - seed is copied into the first 4 elements of the rate portion of the state.
@@ -293,7 +297,7 @@ impl Rpx256 {
// initialize the state by copying the digest elements into the rate portion of the state
// (8 total elements), and set the capacity elements to 0.
let mut state = [ZERO; STATE_WIDTH];
let it = RpxDigest::digests_as_elements(values.iter());
let it = RpxDigest::digests_as_elements_iter(values.iter());
for (i, v) in it.enumerate() {
state[RATE_RANGE.start + i] = *v;
}

View File

@@ -4,8 +4,9 @@ use clap::Parser;
use miden_crypto::{
hash::rpo::{Rpo256, RpoDigest},
merkle::{MerkleError, Smt},
Felt, Word, ONE,
Felt, Word, EMPTY_WORD, ONE,
};
use rand::{prelude::IteratorRandom, thread_rng, Rng};
use rand_utils::rand_value;
#[derive(Parser, Debug)]
@@ -13,7 +14,7 @@ use rand_utils::rand_value;
pub struct BenchmarkCmd {
/// Size of the tree
#[clap(short = 's', long = "size")]
size: u64,
size: usize,
}
fn main() {
@@ -29,101 +30,153 @@ pub fn benchmark_smt() {
let mut entries = Vec::new();
for i in 0..tree_size {
let key = rand_value::<RpoDigest>();
let value = [ONE, ONE, ONE, Felt::new(i)];
let value = [ONE, ONE, ONE, Felt::new(i as u64)];
entries.push((key, value));
}
let mut tree = construction(entries, tree_size).unwrap();
insertion(&mut tree, tree_size).unwrap();
batched_insertion(&mut tree, tree_size).unwrap();
proof_generation(&mut tree, tree_size).unwrap();
let mut tree = construction(entries.clone(), tree_size).unwrap();
insertion(&mut tree).unwrap();
batched_insertion(&mut tree).unwrap();
batched_update(&mut tree, entries).unwrap();
proof_generation(&mut tree).unwrap();
}
/// Runs the construction benchmark for [`Smt`], returning the constructed tree.
pub fn construction(entries: Vec<(RpoDigest, Word)>, size: u64) -> Result<Smt, MerkleError> {
pub fn construction(entries: Vec<(RpoDigest, Word)>, size: usize) -> Result<Smt, MerkleError> {
println!("Running a construction benchmark:");
let now = Instant::now();
let tree = Smt::with_entries(entries)?;
let elapsed = now.elapsed();
println!(
"Constructed a SMT with {} key-value pairs in {:.3} seconds",
size,
elapsed.as_secs_f32(),
);
let elapsed = now.elapsed().as_secs_f32();
println!("Constructed a SMT with {size} key-value pairs in {elapsed:.1} seconds");
println!("Number of leaf nodes: {}\n", tree.leaves().count());
Ok(tree)
}
/// Runs the insertion benchmark for the [`Smt`].
pub fn insertion(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
pub fn insertion(tree: &mut Smt) -> Result<(), MerkleError> {
const NUM_INSERTIONS: usize = 1_000;
println!("Running an insertion benchmark:");
let size = tree.num_leaves();
let mut insertion_times = Vec::new();
for i in 0..20 {
for i in 0..NUM_INSERTIONS {
let test_key = Rpo256::hash(&rand_value::<u64>().to_be_bytes());
let test_value = [ONE, ONE, ONE, Felt::new(size + i)];
let test_value = [ONE, ONE, ONE, Felt::new((size + i) as u64)];
let now = Instant::now();
tree.insert(test_key, test_value);
let elapsed = now.elapsed();
insertion_times.push(elapsed.as_secs_f32());
insertion_times.push(elapsed.as_micros());
}
println!(
"An average insertion time measured by 20 inserts into a SMT with {} key-value pairs is {:.3} milliseconds\n",
size,
// calculate the average by dividing by 20 and convert to milliseconds by multiplying by
// 1000. As a result, we can only multiply by 50
insertion_times.iter().sum::<f32>() * 50f32,
"An average insertion time measured by {NUM_INSERTIONS} inserts into an SMT with {size} leaves is {:.0} μs\n",
// calculate the average
insertion_times.iter().sum::<u128>() as f64 / (NUM_INSERTIONS as f64),
);
Ok(())
}
pub fn batched_insertion(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
pub fn batched_insertion(tree: &mut Smt) -> Result<(), MerkleError> {
const NUM_INSERTIONS: usize = 1_000;
println!("Running a batched insertion benchmark:");
let new_pairs: Vec<(RpoDigest, Word)> = (0..1000)
let size = tree.num_leaves();
let new_pairs: Vec<(RpoDigest, Word)> = (0..NUM_INSERTIONS)
.map(|i| {
let key = Rpo256::hash(&rand_value::<u64>().to_be_bytes());
let value = [ONE, ONE, ONE, Felt::new(size + i)];
let value = [ONE, ONE, ONE, Felt::new((size + i) as u64)];
(key, value)
})
.collect();
let now = Instant::now();
let mutations = tree.compute_mutations(new_pairs);
let compute_elapsed = now.elapsed();
let compute_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
let now = Instant::now();
tree.apply_mutations(mutations).unwrap();
let apply_elapsed = now.elapsed();
tree.apply_mutations(mutations)?;
let apply_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
println!(
"An average batch computation time measured by a 1k-batch into an SMT with {} key-value pairs over {:.3} milliseconds is {:.3} milliseconds",
size,
compute_elapsed.as_secs_f32() * 1000f32,
// Dividing by the number of iterations, 1000, and then multiplying by 1000 to get
// milliseconds, cancels out.
compute_elapsed.as_secs_f32(),
"An average insert-batch computation time measured by a {NUM_INSERTIONS}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
compute_elapsed,
compute_elapsed * 1000_f64 / NUM_INSERTIONS as f64, // time in μs
);
println!(
"An average batch application time measured by a 1k-batch into an SMT with {} key-value pairs over {:.3} milliseconds is {:.3} milliseconds",
size,
apply_elapsed.as_secs_f32() * 1000f32,
// Dividing by the number of iterations, 1000, and then multiplying by 1000 to get
// milliseconds, cancels out.
apply_elapsed.as_secs_f32(),
"An average insert-batch application time measured by a {NUM_INSERTIONS}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
apply_elapsed,
apply_elapsed * 1000_f64 / NUM_INSERTIONS as f64, // time in μs
);
println!(
"An average batch insertion time measured by a 1k-batch into an SMT with {} key-value pairs totals to {:.3} milliseconds",
size,
(compute_elapsed + apply_elapsed).as_secs_f32() * 1000f32,
"An average batch insertion time measured by a 1k-batch into an SMT with {size} leaves totals to {:.1} ms",
(compute_elapsed + apply_elapsed),
);
println!();
Ok(())
}
pub fn batched_update(tree: &mut Smt, entries: Vec<(RpoDigest, Word)>) -> Result<(), MerkleError> {
const NUM_UPDATES: usize = 1_000;
const REMOVAL_PROBABILITY: f64 = 0.2;
println!("Running a batched update benchmark:");
let size = tree.num_leaves();
let mut rng = thread_rng();
let new_pairs =
entries
.into_iter()
.choose_multiple(&mut rng, NUM_UPDATES)
.into_iter()
.map(|(key, _)| {
let value = if rng.gen_bool(REMOVAL_PROBABILITY) {
EMPTY_WORD
} else {
[ONE, ONE, ONE, Felt::new(rng.gen())]
};
(key, value)
});
assert_eq!(new_pairs.len(), NUM_UPDATES);
let now = Instant::now();
let mutations = tree.compute_mutations(new_pairs);
let compute_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
let now = Instant::now();
tree.apply_mutations(mutations)?;
let apply_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
println!(
"An average update-batch computation time measured by a {NUM_UPDATES}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
compute_elapsed,
compute_elapsed * 1000_f64 / NUM_UPDATES as f64, // time in μs
);
println!(
"An average update-batch application time measured by a {NUM_UPDATES}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
apply_elapsed,
apply_elapsed * 1000_f64 / NUM_UPDATES as f64, // time in μs
);
println!(
"An average batch update time measured by a 1k-batch into an SMT with {size} leaves totals to {:.1} ms",
(compute_elapsed + apply_elapsed),
);
println!();
@@ -132,28 +185,29 @@ pub fn batched_insertion(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
}
/// Runs the proof generation benchmark for the [`Smt`].
pub fn proof_generation(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
pub fn proof_generation(tree: &mut Smt) -> Result<(), MerkleError> {
const NUM_PROOFS: usize = 100;
println!("Running a proof generation benchmark:");
let mut insertion_times = Vec::new();
for i in 0..20 {
let size = tree.num_leaves();
for i in 0..NUM_PROOFS {
let test_key = Rpo256::hash(&rand_value::<u64>().to_be_bytes());
let test_value = [ONE, ONE, ONE, Felt::new(size + i)];
let test_value = [ONE, ONE, ONE, Felt::new((size + i) as u64)];
tree.insert(test_key, test_value);
let now = Instant::now();
let _proof = tree.open(&test_key);
let elapsed = now.elapsed();
insertion_times.push(elapsed.as_secs_f32());
insertion_times.push(now.elapsed().as_micros());
}
println!(
"An average proving time measured by 20 value proofs in a SMT with {} key-value pairs in {:.3} microseconds",
size,
// calculate the average by dividing by 20 and convert to microseconds by multiplying by
// 1000000. As a result, we can only multiply by 50000
insertion_times.iter().sum::<f32>() * 50000f32,
"An average proving time measured by {NUM_PROOFS} value proofs in an SMT with {size} leaves in {:.0} μs",
// calculate the average
insertion_times.iter().sum::<u128>() as f64 / (NUM_PROOFS as f64),
);
Ok(())

View File

@@ -1,65 +1,34 @@
use alloc::vec::Vec;
use core::fmt;
use thiserror::Error;
use super::{smt::SmtLeafError, MerklePath, NodeIndex, RpoDigest};
use super::{NodeIndex, RpoDigest};
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Debug, Error)]
pub enum MerkleError {
ConflictingRoots(Vec<RpoDigest>),
#[error("expected merkle root {expected_root} found {actual_root}")]
ConflictingRoots {
expected_root: RpoDigest,
actual_root: RpoDigest,
},
#[error("provided merkle tree depth {0} is too small")]
DepthTooSmall(u8),
#[error("provided merkle tree depth {0} is too big")]
DepthTooBig(u64),
#[error("multiple values provided for merkle tree index {0}")]
DuplicateValuesForIndex(u64),
DuplicateValuesForKey(RpoDigest),
InvalidIndex { depth: u8, value: u64 },
InvalidDepth { expected: u8, provided: u8 },
InvalidSubtreeDepth { subtree_depth: u8, tree_depth: u8 },
InvalidPath(MerklePath),
InvalidNumEntries(usize),
NodeNotInSet(NodeIndex),
NodeNotInStore(RpoDigest, NodeIndex),
#[error("node index value {value} is not valid for depth {depth}")]
InvalidNodeIndex { depth: u8, value: u64 },
#[error("provided node index depth {provided} does not match expected depth {expected}")]
InvalidNodeIndexDepth { expected: u8, provided: u8 },
#[error("merkle subtree depth {subtree_depth} exceeds merkle tree depth {tree_depth}")]
SubtreeDepthExceedsDepth { subtree_depth: u8, tree_depth: u8 },
#[error("number of entries in the merkle tree exceeds the maximum of {0}")]
TooManyEntries(usize),
#[error("node index `{0}` not found in the tree")]
NodeIndexNotFoundInTree(NodeIndex),
#[error("node {0:?} with index `{1}` not found in the store")]
NodeIndexNotFoundInStore(RpoDigest, NodeIndex),
#[error("number of provided merkle tree leaves {0} is not a power of two")]
NumLeavesNotPowerOfTwo(usize),
#[error("root {0:?} is not in the store")]
RootNotInStore(RpoDigest),
SmtLeaf(SmtLeafError),
}
impl fmt::Display for MerkleError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use MerkleError::*;
match self {
ConflictingRoots(roots) => write!(f, "the merkle paths roots do not match {roots:?}"),
DepthTooSmall(depth) => write!(f, "the provided depth {depth} is too small"),
DepthTooBig(depth) => write!(f, "the provided depth {depth} is too big"),
DuplicateValuesForIndex(key) => write!(f, "multiple values provided for key {key}"),
DuplicateValuesForKey(key) => write!(f, "multiple values provided for key {key}"),
InvalidIndex { depth, value } => {
write!(f, "the index value {value} is not valid for the depth {depth}")
},
InvalidDepth { expected, provided } => {
write!(f, "the provided depth {provided} is not valid for {expected}")
},
InvalidSubtreeDepth { subtree_depth, tree_depth } => {
write!(f, "tried inserting a subtree of depth {subtree_depth} into a tree of depth {tree_depth}")
},
InvalidPath(_path) => write!(f, "the provided path is not valid"),
InvalidNumEntries(max) => write!(f, "number of entries exceeded the maximum: {max}"),
NodeNotInSet(index) => write!(f, "the node with index ({index}) is not in the set"),
NodeNotInStore(hash, index) => {
write!(f, "the node {hash:?} with index ({index}) is not in the store")
},
NumLeavesNotPowerOfTwo(leaves) => {
write!(f, "the leaves count {leaves} is not a power of 2")
},
RootNotInStore(root) => write!(f, "the root {:?} is not in the store", root),
SmtLeaf(smt_leaf_error) => write!(f, "smt leaf error: {smt_leaf_error}"),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for MerkleError {}
impl From<SmtLeafError> for MerkleError {
fn from(value: SmtLeafError) -> Self {
Self::SmtLeaf(value)
}
}

View File

@@ -38,7 +38,7 @@ impl NodeIndex {
/// Returns an error if the `value` is greater than or equal to 2^{depth}.
pub const fn new(depth: u8, value: u64) -> Result<Self, MerkleError> {
if (64 - value.leading_zeros()) > depth as u32 {
Err(MerkleError::InvalidIndex { depth, value })
Err(MerkleError::InvalidNodeIndex { depth, value })
} else {
Ok(Self { depth, value })
}
@@ -128,7 +128,7 @@ impl NodeIndex {
self.value
}
/// Returns true if the current instance points to a right sibling node.
/// Returns `true` if the current instance points to a right sibling node.
pub const fn is_value_odd(&self) -> bool {
(self.value & 1) == 1
}
@@ -182,6 +182,7 @@ impl Deserializable for NodeIndex {
#[cfg(test)]
mod tests {
use assert_matches::assert_matches;
use proptest::prelude::*;
use super::*;
@@ -190,19 +191,19 @@ mod tests {
fn test_node_index_value_too_high() {
assert_eq!(NodeIndex::new(0, 0).unwrap(), NodeIndex { depth: 0, value: 0 });
let err = NodeIndex::new(0, 1).unwrap_err();
assert_eq!(err, MerkleError::InvalidIndex { depth: 0, value: 1 });
assert_matches!(err, MerkleError::InvalidNodeIndex { depth: 0, value: 1 });
assert_eq!(NodeIndex::new(1, 1).unwrap(), NodeIndex { depth: 1, value: 1 });
let err = NodeIndex::new(1, 2).unwrap_err();
assert_eq!(err, MerkleError::InvalidIndex { depth: 1, value: 2 });
assert_matches!(err, MerkleError::InvalidNodeIndex { depth: 1, value: 2 });
assert_eq!(NodeIndex::new(2, 3).unwrap(), NodeIndex { depth: 2, value: 3 });
let err = NodeIndex::new(2, 4).unwrap_err();
assert_eq!(err, MerkleError::InvalidIndex { depth: 2, value: 4 });
assert_matches!(err, MerkleError::InvalidNodeIndex { depth: 2, value: 4 });
assert_eq!(NodeIndex::new(3, 7).unwrap(), NodeIndex { depth: 3, value: 7 });
let err = NodeIndex::new(3, 8).unwrap_err();
assert_eq!(err, MerkleError::InvalidIndex { depth: 3, value: 8 });
assert_matches!(err, MerkleError::InvalidNodeIndex { depth: 3, value: 8 });
}
#[test]

View File

@@ -1,41 +1,27 @@
use core::fmt::{Display, Formatter};
#[cfg(feature = "std")]
use std::error::Error;
use alloc::string::String;
use thiserror::Error;
use crate::merkle::MerkleError;
#[derive(Debug, PartialEq, Eq, Clone)]
#[derive(Debug, Error)]
pub enum MmrError {
InvalidPosition(usize),
InvalidPeaks,
InvalidPeak,
PeakOutOfBounds(usize, usize),
#[error("mmr does not contain position {0}")]
PositionNotFound(usize),
#[error("mmr peaks are invalid: {0}")]
InvalidPeaks(String),
#[error(
"mmr peak does not match the computed merkle root of the provided authentication path"
)]
PeakPathMismatch,
#[error("requested peak index is {peak_idx} but the number of peaks is {peaks_len}")]
PeakOutOfBounds { peak_idx: usize, peaks_len: usize },
#[error("invalid mmr update")]
InvalidUpdate,
UnknownPeak,
MerkleError(MerkleError),
#[error("mmr does not contain a peak with depth {0}")]
UnknownPeak(u8),
#[error("invalid merkle path")]
InvalidMerklePath(#[source] MerkleError),
#[error("merkle root computation failed")]
MerkleRootComputationFailed(#[source] MerkleError),
}
impl Display for MmrError {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
match self {
MmrError::InvalidPosition(pos) => write!(fmt, "Mmr does not contain position {pos}"),
MmrError::InvalidPeaks => write!(fmt, "Invalid peaks count"),
MmrError::InvalidPeak => {
write!(fmt, "Peak values does not match merkle path computed root")
},
MmrError::PeakOutOfBounds(peak_idx, peaks_len) => write!(
fmt,
"Requested peak index is {} but the number of peaks is {}",
peak_idx, peaks_len
),
MmrError::InvalidUpdate => write!(fmt, "Invalid Mmr update"),
MmrError::UnknownPeak => {
write!(fmt, "Peak not in Mmr")
},
MmrError::MerkleError(err) => write!(fmt, "{}", err),
}
}
}
#[cfg(feature = "std")]
impl Error for MmrError {}

View File

@@ -99,7 +99,7 @@ impl Mmr {
pub fn open_at(&self, pos: usize, forest: usize) -> Result<MmrProof, MmrError> {
// find the target tree responsible for the MMR position
let tree_bit =
leaf_to_corresponding_tree(pos, forest).ok_or(MmrError::InvalidPosition(pos))?;
leaf_to_corresponding_tree(pos, forest).ok_or(MmrError::PositionNotFound(pos))?;
// isolate the trees before the target
let forest_before = forest & high_bitmask(tree_bit + 1);
@@ -126,7 +126,7 @@ impl Mmr {
pub fn get(&self, pos: usize) -> Result<RpoDigest, MmrError> {
// find the target tree responsible for the MMR position
let tree_bit =
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::InvalidPosition(pos))?;
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::PositionNotFound(pos))?;
// isolate the trees before the target
let forest_before = self.forest & high_bitmask(tree_bit + 1);
@@ -174,7 +174,10 @@ impl Mmr {
/// Returns an error if the specified `forest` value is not valid for this MMR.
pub fn peaks_at(&self, forest: usize) -> Result<MmrPeaks, MmrError> {
if forest > self.forest {
return Err(MmrError::InvalidPeaks);
return Err(MmrError::InvalidPeaks(format!(
"requested forest {forest} exceeds current forest {}",
self.forest
)));
}
let peaks: Vec<RpoDigest> = TrueBitPositionIterator::new(forest)
@@ -199,7 +202,7 @@ impl Mmr {
/// that have been merged together, followed by the new peaks of the [Mmr].
pub fn get_delta(&self, from_forest: usize, to_forest: usize) -> Result<MmrDelta, MmrError> {
if to_forest > self.forest || from_forest > to_forest {
return Err(MmrError::InvalidPeaks);
return Err(MmrError::InvalidPeaks(format!("to_forest {to_forest} exceeds the current forest {} or from_forest {from_forest} exceeds to_forest", self.forest)));
}
if from_forest == to_forest {

View File

@@ -145,7 +145,7 @@ impl PartialMmr {
/// in the underlying MMR.
pub fn open(&self, pos: usize) -> Result<Option<MmrProof>, MmrError> {
let tree_bit =
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::InvalidPosition(pos))?;
leaf_to_corresponding_tree(pos, self.forest).ok_or(MmrError::PositionNotFound(pos))?;
let depth = tree_bit as usize;
let mut nodes = Vec::with_capacity(depth);
@@ -298,12 +298,12 @@ impl PartialMmr {
// invalid.
let tree = 1 << path.depth();
if tree & self.forest == 0 {
return Err(MmrError::UnknownPeak);
return Err(MmrError::UnknownPeak(path.depth()));
};
if leaf_pos + 1 == self.forest
&& path.depth() == 0
&& self.peaks.last().map_or(false, |v| *v == leaf)
&& self.peaks.last().is_some_and(|v| *v == leaf)
{
self.track_latest = true;
return Ok(());
@@ -319,9 +319,11 @@ impl PartialMmr {
// Compute the root of the authentication path, and check it matches the current version of
// the PartialMmr.
let computed = path.compute_root(path_idx as u64, leaf).map_err(MmrError::MerkleError)?;
let computed = path
.compute_root(path_idx as u64, leaf)
.map_err(MmrError::MerkleRootComputationFailed)?;
if self.peaks[peak_pos] != computed {
return Err(MmrError::InvalidPeak);
return Err(MmrError::PeakPathMismatch);
}
let mut idx = InOrderIndex::from_leaf_pos(leaf_pos);
@@ -356,7 +358,10 @@ impl PartialMmr {
/// inserted into the partial MMR.
pub fn apply(&mut self, delta: MmrDelta) -> Result<Vec<(InOrderIndex, RpoDigest)>, MmrError> {
if delta.forest < self.forest {
return Err(MmrError::InvalidPeaks);
return Err(MmrError::InvalidPeaks(format!(
"forest of mmr delta {} is less than current forest {}",
delta.forest, self.forest
)));
}
let mut inserted_nodes = Vec::new();

View File

@@ -45,7 +45,11 @@ impl MmrPeaks {
/// Returns an error if the number of leaves and the number of peaks are inconsistent.
pub fn new(num_leaves: usize, peaks: Vec<RpoDigest>) -> Result<Self, MmrError> {
if num_leaves.count_ones() as usize != peaks.len() {
return Err(MmrError::InvalidPeaks);
return Err(MmrError::InvalidPeaks(format!(
"number of one bits in leaves is {} which does not equal peak length {}",
num_leaves.count_ones(),
peaks.len()
)));
}
Ok(Self { num_leaves, peaks })
@@ -77,7 +81,7 @@ impl MmrPeaks {
pub fn get_peak(&self, peak_idx: usize) -> Result<&RpoDigest, MmrError> {
self.peaks
.get(peak_idx)
.ok_or(MmrError::PeakOutOfBounds(peak_idx, self.peaks.len()))
.ok_or(MmrError::PeakOutOfBounds { peak_idx, peaks_len: self.peaks.len() })
}
/// Converts this [MmrPeaks] into its components: number of leaves and a vector of peaks of
@@ -106,7 +110,7 @@ impl MmrPeaks {
opening
.merkle_path
.verify(opening.relative_pos() as u64, value, root)
.map_err(MmrError::MerkleError)
.map_err(MmrError::InvalidMerklePath)
}
/// Flattens and pads the peaks to make hashing inside of the Miden VM easier.

View File

@@ -22,8 +22,8 @@ pub use path::{MerklePath, RootPath, ValuePath};
mod smt;
pub use smt::{
LeafIndex, MutationSet, SimpleSmt, Smt, SmtLeaf, SmtLeafError, SmtProof, SmtProofError,
SMT_DEPTH, SMT_MAX_DEPTH, SMT_MIN_DEPTH,
InnerNode, LeafIndex, MutationSet, NodeMutation, SimpleSmt, Smt, SmtLeaf, SmtLeafError,
SmtProof, SmtProofError, SMT_DEPTH, SMT_MAX_DEPTH, SMT_MIN_DEPTH,
};
mod mmr;

View File

@@ -116,7 +116,7 @@ impl PartialMerkleTree {
// depth of 63 because we consider passing in a vector of size 2^64 infeasible.
let max = 2usize.pow(63);
if layers.len() > max {
return Err(MerkleError::InvalidNumEntries(max));
return Err(MerkleError::TooManyEntries(max));
}
// Get maximum depth
@@ -147,11 +147,12 @@ impl PartialMerkleTree {
let index = NodeIndex::new(depth, index_value)?;
// get hash of the current node
let node = nodes.get(&index).ok_or(MerkleError::NodeNotInSet(index))?;
let node =
nodes.get(&index).ok_or(MerkleError::NodeIndexNotFoundInTree(index))?;
// get hash of the sibling node
let sibling = nodes
.get(&index.sibling())
.ok_or(MerkleError::NodeNotInSet(index.sibling()))?;
.ok_or(MerkleError::NodeIndexNotFoundInTree(index.sibling()))?;
// get parent hash
let parent = Rpo256::merge(&index.build_node(*node, *sibling));
@@ -184,7 +185,10 @@ impl PartialMerkleTree {
/// # Errors
/// Returns an error if the specified NodeIndex is not contained in the nodes map.
pub fn get_node(&self, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
self.nodes.get(&index).ok_or(MerkleError::NodeNotInSet(index)).copied()
self.nodes
.get(&index)
.ok_or(MerkleError::NodeIndexNotFoundInTree(index))
.copied()
}
/// Returns true if provided index contains in the leaves set, false otherwise.
@@ -224,7 +228,7 @@ impl PartialMerkleTree {
}
if !self.nodes.contains_key(&index) {
return Err(MerkleError::NodeNotInSet(index));
return Err(MerkleError::NodeIndexNotFoundInTree(index));
}
let mut path = Vec::new();
@@ -335,15 +339,16 @@ impl PartialMerkleTree {
if self.root() == EMPTY_DIGEST {
self.nodes.insert(ROOT_INDEX, root);
} else if self.root() != root {
return Err(MerkleError::ConflictingRoots([self.root(), root].to_vec()));
return Err(MerkleError::ConflictingRoots {
expected_root: self.root(),
actual_root: root,
});
}
Ok(())
}
/// Updates value of the leaf at the specified index returning the old leaf value.
/// By default the specified index is assumed to belong to the deepest layer. If the considered
/// node does not belong to the tree, the first node on the way to the root will be changed.
///
/// By default the specified index is assumed to belong to the deepest layer. If the considered
/// node does not belong to the tree, the first node on the way to the root will be changed.
@@ -352,6 +357,7 @@ impl PartialMerkleTree {
///
/// # Errors
/// Returns an error if:
/// - No entry exists at the specified index.
/// - The specified index is greater than the maximum number of nodes on the deepest layer.
pub fn update_leaf(&mut self, index: u64, value: Word) -> Result<RpoDigest, MerkleError> {
let mut node_index = NodeIndex::new(self.max_depth(), index)?;
@@ -367,7 +373,7 @@ impl PartialMerkleTree {
let old_value = self
.nodes
.insert(node_index, value.into())
.ok_or(MerkleError::NodeNotInSet(node_index))?;
.ok_or(MerkleError::NodeIndexNotFoundInTree(node_index))?;
// if the old value and new value are the same, there is nothing to update
if value == *old_value {

View File

@@ -61,7 +61,10 @@ impl MerklePath {
pub fn verify(&self, index: u64, node: RpoDigest, root: &RpoDigest) -> Result<(), MerkleError> {
let computed_root = self.compute_root(index, node)?;
if &computed_root != root {
return Err(MerkleError::ConflictingRoots(vec![computed_root, *root]));
return Err(MerkleError::ConflictingRoots {
expected_root: *root,
actual_root: computed_root,
});
}
Ok(())

View File

@@ -1,86 +1,39 @@
use alloc::vec::Vec;
use core::fmt;
use thiserror::Error;
use crate::{
hash::rpo::RpoDigest,
merkle::{LeafIndex, SMT_DEPTH},
Word,
};
// SMT LEAF ERROR
// =================================================================================================
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Debug, Error)]
pub enum SmtLeafError {
InconsistentKeys {
entries: Vec<(RpoDigest, Word)>,
key_1: RpoDigest,
key_2: RpoDigest,
},
InvalidNumEntriesForMultiple(usize),
SingleKeyInconsistentWithLeafIndex {
#[error(
"multiple leaf requires all keys to map to the same leaf index but key1 {key_1} and key2 {key_2} map to different indices"
)]
InconsistentMultipleLeafKeys { key_1: RpoDigest, key_2: RpoDigest },
#[error("single leaf key {key} maps to {actual_leaf_index:?} but was expected to map to {expected_leaf_index:?}")]
InconsistentSingleLeafIndices {
key: RpoDigest,
leaf_index: LeafIndex<SMT_DEPTH>,
expected_leaf_index: LeafIndex<SMT_DEPTH>,
actual_leaf_index: LeafIndex<SMT_DEPTH>,
},
MultipleKeysInconsistentWithLeafIndex {
#[error("supplied leaf index {leaf_index_supplied:?} does not match {leaf_index_from_keys:?} for multiple leaf")]
InconsistentMultipleLeafIndices {
leaf_index_from_keys: LeafIndex<SMT_DEPTH>,
leaf_index_supplied: LeafIndex<SMT_DEPTH>,
},
}
#[cfg(feature = "std")]
impl std::error::Error for SmtLeafError {}
impl fmt::Display for SmtLeafError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use SmtLeafError::*;
match self {
InvalidNumEntriesForMultiple(num_entries) => {
write!(f, "Multiple leaf requires 2 or more entries. Got: {num_entries}")
},
InconsistentKeys { entries, key_1, key_2 } => {
write!(f, "Multiple leaf requires all keys to map to the same leaf index. Offending keys: {key_1} and {key_2}. Entries: {entries:?}.")
},
SingleKeyInconsistentWithLeafIndex { key, leaf_index } => {
write!(
f,
"Single key in leaf inconsistent with leaf index. Key: {key}, leaf index: {}",
leaf_index.value()
)
},
MultipleKeysInconsistentWithLeafIndex {
leaf_index_from_keys,
leaf_index_supplied,
} => {
write!(
f,
"Keys in entries map to leaf index {}, but leaf index {} was supplied",
leaf_index_from_keys.value(),
leaf_index_supplied.value()
)
},
}
}
#[error("multiple leaf requires at least two entries but only {0} were given")]
MultipleLeafRequiresTwoEntries(usize),
}
// SMT PROOF ERROR
// =================================================================================================
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Debug, Error)]
pub enum SmtProofError {
InvalidPathLength(usize),
}
#[cfg(feature = "std")]
impl std::error::Error for SmtProofError {}
impl fmt::Display for SmtProofError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use SmtProofError::*;
match self {
InvalidPathLength(path_length) => {
write!(f, "Invalid Merkle path length. Expected {SMT_DEPTH}, got {path_length}")
},
}
}
#[error("merkle path length {0} does not match SMT depth {SMT_DEPTH}")]
InvalidMerklePathLength(usize),
}

View File

@@ -31,10 +31,12 @@ impl SmtLeaf {
1 => {
let (key, value) = entries[0];
if LeafIndex::<SMT_DEPTH>::from(key) != leaf_index {
return Err(SmtLeafError::SingleKeyInconsistentWithLeafIndex {
let computed_index = LeafIndex::<SMT_DEPTH>::from(key);
if computed_index != leaf_index {
return Err(SmtLeafError::InconsistentSingleLeafIndices {
key,
leaf_index,
expected_leaf_index: leaf_index,
actual_leaf_index: computed_index,
});
}
@@ -46,7 +48,7 @@ impl SmtLeaf {
// `new_multiple()` checked that all keys map to the same leaf index. We still need
// to ensure that that leaf index is `leaf_index`.
if leaf.index() != leaf_index {
Err(SmtLeafError::MultipleKeysInconsistentWithLeafIndex {
Err(SmtLeafError::InconsistentMultipleLeafIndices {
leaf_index_from_keys: leaf.index(),
leaf_index_supplied: leaf_index,
})
@@ -68,14 +70,14 @@ impl SmtLeaf {
Self::Single((key, value))
}
/// Returns a new single leaf with the specified entry. The leaf index is derived from the
/// Returns a new multiple leaf with the specified entries. The leaf index is derived from the
/// entries' keys.
///
/// # Errors
/// - Returns an error if 2 keys in `entries` map to a different leaf index
pub fn new_multiple(entries: Vec<(RpoDigest, Word)>) -> Result<Self, SmtLeafError> {
if entries.len() < 2 {
return Err(SmtLeafError::InvalidNumEntriesForMultiple(entries.len()));
return Err(SmtLeafError::MultipleLeafRequiresTwoEntries(entries.len()));
}
// Check that all keys map to the same leaf index
@@ -89,8 +91,7 @@ impl SmtLeaf {
let next_leaf_index: LeafIndex<SMT_DEPTH> = next_key.into();
if next_leaf_index != first_leaf_index {
return Err(SmtLeafError::InconsistentKeys {
entries,
return Err(SmtLeafError::InconsistentMultipleLeafKeys {
key_1: first_key,
key_2: next_key,
});

View File

@@ -114,6 +114,11 @@ impl Smt {
<Self as SparseMerkleTree<SMT_DEPTH>>::root(self)
}
/// Returns the number of non-empty leaves in this tree.
pub fn num_leaves(&self) -> usize {
self.leaves.len()
}
/// Returns the leaf to which `key` maps
pub fn get_leaf(&self, key: &RpoDigest) -> SmtLeaf {
<Self as SparseMerkleTree<SMT_DEPTH>>::get_leaf(self, key)
@@ -200,7 +205,7 @@ impl Smt {
<Self as SparseMerkleTree<SMT_DEPTH>>::compute_mutations(self, kv_pairs)
}
/// Apply the prospective mutations computed with [`Smt::compute_mutations()`] to this tree.
/// Applies the prospective mutations computed with [`Smt::compute_mutations()`] to this tree.
///
/// # Errors
/// If `mutations` was computed on a tree with a different root than this one, returns
@@ -214,6 +219,23 @@ impl Smt {
<Self as SparseMerkleTree<SMT_DEPTH>>::apply_mutations(self, mutations)
}
/// Applies the prospective mutations computed with [`Smt::compute_mutations()`] to this tree
/// and returns the reverse mutation set.
///
/// Applying the reverse mutation sets to the updated tree will revert the changes.
///
/// # Errors
/// If `mutations` was computed on a tree with a different root than this one, returns
/// [`MerkleError::ConflictingRoots`] with a two-item [`Vec`]. The first item is the root hash
/// the `mutations` were computed against, and the second item is the actual current root of
/// this tree.
pub fn apply_mutations_with_reversion(
&mut self,
mutations: MutationSet<SMT_DEPTH, RpoDigest, Word>,
) -> Result<MutationSet<SMT_DEPTH, RpoDigest, Word>, MerkleError> {
<Self as SparseMerkleTree<SMT_DEPTH>>::apply_mutations_with_reversion(self, mutations)
}
// HELPERS
// --------------------------------------------------------------------------------------------
@@ -275,12 +297,12 @@ impl SparseMerkleTree<SMT_DEPTH> for Smt {
.unwrap_or_else(|| EmptySubtreeRoots::get_inner_node(SMT_DEPTH, index.depth()))
}
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) {
self.inner_nodes.insert(index, inner_node);
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) -> Option<InnerNode> {
self.inner_nodes.insert(index, inner_node)
}
fn remove_inner_node(&mut self, index: NodeIndex) {
let _ = self.inner_nodes.remove(&index);
fn remove_inner_node(&mut self, index: NodeIndex) -> Option<InnerNode> {
self.inner_nodes.remove(&index)
}
fn insert_value(&mut self, key: Self::Key, value: Self::Value) -> Option<Self::Value> {

View File

@@ -25,7 +25,7 @@ impl SmtProof {
pub fn new(path: MerklePath, leaf: SmtLeaf) -> Result<Self, SmtProofError> {
let depth: usize = SMT_DEPTH.into();
if path.len() != depth {
return Err(SmtProofError::InvalidPathLength(path.len()));
return Err(SmtProofError::InvalidMerklePathLength(path.len()));
}
Ok(Self { path, leaf })

View File

@@ -1,12 +1,14 @@
use alloc::vec::Vec;
use alloc::{collections::BTreeMap, vec::Vec};
use super::{Felt, LeafIndex, NodeIndex, Rpo256, RpoDigest, Smt, SmtLeaf, EMPTY_WORD, SMT_DEPTH};
use crate::{
merkle::{smt::SparseMerkleTree, EmptySubtreeRoots, MerkleStore},
merkle::{
smt::{NodeMutation, SparseMerkleTree},
EmptySubtreeRoots, MerkleStore, MutationSet,
},
utils::{Deserializable, Serializable},
Word, ONE, WORD_SIZE,
};
// SMT
// --------------------------------------------------------------------------------------------
@@ -412,21 +414,49 @@ fn test_prospective_insertion() {
let mutations = smt.compute_mutations(vec![(key_1, value_1)]);
assert_eq!(mutations.root(), root_1, "prospective root 1 did not match actual root 1");
smt.apply_mutations(mutations).unwrap();
let revert = apply_mutations(&mut smt, mutations);
assert_eq!(smt.root(), root_1, "mutations before and after apply did not match");
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
assert_eq!(revert.root(), root_empty, "reverse mutations new root did not match");
assert_eq!(
revert.new_pairs,
BTreeMap::from_iter([(key_1, EMPTY_WORD)]),
"reverse mutations pairs did not match"
);
assert_eq!(
revert.node_mutations,
smt.inner_nodes.keys().map(|key| (*key, NodeMutation::Removal)).collect(),
"reverse mutations inner nodes did not match"
);
let mutations = smt.compute_mutations(vec![(key_2, value_2)]);
assert_eq!(mutations.root(), root_2, "prospective root 2 did not match actual root 2");
let mutations =
smt.compute_mutations(vec![(key_3, EMPTY_WORD), (key_2, value_2), (key_3, value_3)]);
assert_eq!(mutations.root(), root_3, "mutations before and after apply did not match");
smt.apply_mutations(mutations).unwrap();
let old_root = smt.root();
let revert = apply_mutations(&mut smt, mutations);
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
assert_eq!(revert.root(), old_root, "reverse mutations new root did not match");
assert_eq!(
revert.new_pairs,
BTreeMap::from_iter([(key_2, EMPTY_WORD), (key_3, EMPTY_WORD)]),
"reverse mutations pairs did not match"
);
// Edge case: multiple values at the same key, where a later pair restores the original value.
let mutations = smt.compute_mutations(vec![(key_3, EMPTY_WORD), (key_3, value_3)]);
assert_eq!(mutations.root(), root_3);
smt.apply_mutations(mutations).unwrap();
let old_root = smt.root();
let revert = apply_mutations(&mut smt, mutations);
assert_eq!(smt.root(), root_3);
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
assert_eq!(revert.root(), old_root, "reverse mutations new root did not match");
assert_eq!(
revert.new_pairs,
BTreeMap::from_iter([(key_3, value_3)]),
"reverse mutations pairs did not match"
);
// Test batch updates, and that the order doesn't matter.
let pairs =
@@ -437,8 +467,16 @@ fn test_prospective_insertion() {
root_empty,
"prospective root for batch removal did not match actual root",
);
smt.apply_mutations(mutations).unwrap();
let old_root = smt.root();
let revert = apply_mutations(&mut smt, mutations);
assert_eq!(smt.root(), root_empty, "mutations before and after apply did not match");
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
assert_eq!(revert.root(), old_root, "reverse mutations new root did not match");
assert_eq!(
revert.new_pairs,
BTreeMap::from_iter([(key_1, value_1), (key_2, value_2), (key_3, value_3)]),
"reverse mutations pairs did not match"
);
let pairs = vec![(key_3, value_3), (key_1, value_1), (key_2, value_2)];
let mutations = smt.compute_mutations(pairs);
@@ -447,6 +485,72 @@ fn test_prospective_insertion() {
assert_eq!(smt.root(), root_3);
}
#[test]
fn test_mutations_revert() {
let mut smt = Smt::default();
let key_1: RpoDigest = RpoDigest::from([ONE, ONE, ONE, Felt::new(1)]);
let key_2: RpoDigest =
RpoDigest::from([2_u32.into(), 2_u32.into(), 2_u32.into(), Felt::new(2)]);
let key_3: RpoDigest =
RpoDigest::from([0_u32.into(), 0_u32.into(), 0_u32.into(), Felt::new(3)]);
let value_1 = [ONE; WORD_SIZE];
let value_2 = [2_u32.into(); WORD_SIZE];
let value_3 = [3_u32.into(); WORD_SIZE];
smt.insert(key_1, value_1);
smt.insert(key_2, value_2);
let mutations =
smt.compute_mutations(vec![(key_1, EMPTY_WORD), (key_2, value_1), (key_3, value_3)]);
let original = smt.clone();
let revert = smt.apply_mutations_with_reversion(mutations).unwrap();
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
assert_eq!(revert.root(), original.root(), "reverse mutations new root did not match");
smt.apply_mutations(revert).unwrap();
assert_eq!(smt, original, "SMT with applied revert mutations did not match original SMT");
}
#[test]
fn test_mutation_set_serialization() {
let mut smt = Smt::default();
let key_1: RpoDigest = RpoDigest::from([ONE, ONE, ONE, Felt::new(1)]);
let key_2: RpoDigest =
RpoDigest::from([2_u32.into(), 2_u32.into(), 2_u32.into(), Felt::new(2)]);
let key_3: RpoDigest =
RpoDigest::from([0_u32.into(), 0_u32.into(), 0_u32.into(), Felt::new(3)]);
let value_1 = [ONE; WORD_SIZE];
let value_2 = [2_u32.into(); WORD_SIZE];
let value_3 = [3_u32.into(); WORD_SIZE];
smt.insert(key_1, value_1);
smt.insert(key_2, value_2);
let mutations =
smt.compute_mutations(vec![(key_1, EMPTY_WORD), (key_2, value_1), (key_3, value_3)]);
let serialized = mutations.to_bytes();
let deserialized =
MutationSet::<SMT_DEPTH, RpoDigest, Word>::read_from_bytes(&serialized).unwrap();
assert_eq!(deserialized, mutations, "deserialized mutations did not match original");
let revert = smt.apply_mutations_with_reversion(mutations).unwrap();
let serialized = revert.to_bytes();
let deserialized =
MutationSet::<SMT_DEPTH, RpoDigest, Word>::read_from_bytes(&serialized).unwrap();
assert_eq!(deserialized, revert, "deserialized mutations did not match original");
}
/// Tests that 2 key-value pairs stored in the same leaf have the same path
#[test]
fn test_smt_path_to_keys_in_same_leaf_are_equal() {
@@ -602,3 +706,19 @@ fn build_multiple_leaf_node(kv_pairs: &[(RpoDigest, Word)]) -> RpoDigest {
Rpo256::hash_elements(&elements)
}
/// Applies mutations with and without reversion to the given SMT, comparing resulting SMTs,
/// returning mutation set for reversion.
fn apply_mutations(
smt: &mut Smt,
mutation_set: MutationSet<SMT_DEPTH, RpoDigest, Word>,
) -> MutationSet<SMT_DEPTH, RpoDigest, Word> {
let mut smt2 = smt.clone();
let reversion = smt.apply_mutations_with_reversion(mutation_set.clone()).unwrap();
smt2.apply_mutations(mutation_set).unwrap();
assert_eq!(&smt2, smt);
reversion
}

View File

@@ -1,5 +1,7 @@
use alloc::{collections::BTreeMap, vec::Vec};
use winter_utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
use super::{EmptySubtreeRoots, InnerNodeInfo, MerkleError, MerklePath, NodeIndex};
use crate::{
hash::rpo::{Rpo256, RpoDigest},
@@ -40,7 +42,7 @@ pub const SMT_MAX_DEPTH: u8 = 64;
/// Every key maps to one leaf. If there are as many keys as there are leaves, then
/// [Self::Leaf] should be the same type as [Self::Value], as is the case with
/// [crate::merkle::SimpleSmt]. However, if there are more keys than leaves, then [`Self::Leaf`]
/// must accomodate all keys that map to the same leaf.
/// must accommodate all keys that map to the same leaf.
///
/// [SparseMerkleTree] currently doesn't support optimizations that compress Merkle proofs.
pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
@@ -133,9 +135,9 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
node_hash = Rpo256::merge(&[left, right]);
if node_hash == *EmptySubtreeRoots::entry(DEPTH, node_depth) {
// If a subtree is empty, when can remove the inner node, since it's equal to the
// If a subtree is empty, then can remove the inner node, since it's equal to the
// default value
self.remove_inner_node(index)
self.remove_inner_node(index);
} else {
self.insert_inner_node(index, InnerNode { left, right });
}
@@ -241,7 +243,7 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
}
}
/// Apply the prospective mutations computed with [`SparseMerkleTree::compute_mutations()`] to
/// Applies the prospective mutations computed with [`SparseMerkleTree::compute_mutations()`] to
/// this tree.
///
/// # Errors
@@ -267,13 +269,20 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
// Guard against accidentally trying to apply mutations that were computed against a
// different tree, including a stale version of this tree.
if old_root != self.root() {
return Err(MerkleError::ConflictingRoots(vec![old_root, self.root()]));
return Err(MerkleError::ConflictingRoots {
expected_root: self.root(),
actual_root: old_root,
});
}
for (index, mutation) in node_mutations {
match mutation {
Removal => self.remove_inner_node(index),
Addition(node) => self.insert_inner_node(index, node),
Removal => {
self.remove_inner_node(index);
},
Addition(node) => {
self.insert_inner_node(index, node);
},
}
}
@@ -286,6 +295,76 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
Ok(())
}
/// Applies the prospective mutations computed with [`SparseMerkleTree::compute_mutations()`] to
/// this tree and returns the reverse mutation set. Applying the reverse mutation sets to the
/// updated tree will revert the changes.
///
/// # Errors
/// If `mutations` was computed on a tree with a different root than this one, returns
/// [`MerkleError::ConflictingRoots`] with a two-item [`Vec`]. The first item is the root hash
/// the `mutations` were computed against, and the second item is the actual current root of
/// this tree.
fn apply_mutations_with_reversion(
&mut self,
mutations: MutationSet<DEPTH, Self::Key, Self::Value>,
) -> Result<MutationSet<DEPTH, Self::Key, Self::Value>, MerkleError>
where
Self: Sized,
{
use NodeMutation::*;
let MutationSet {
old_root,
node_mutations,
new_pairs,
new_root,
} = mutations;
// Guard against accidentally trying to apply mutations that were computed against a
// different tree, including a stale version of this tree.
if old_root != self.root() {
return Err(MerkleError::ConflictingRoots {
expected_root: self.root(),
actual_root: old_root,
});
}
let mut reverse_mutations = BTreeMap::new();
for (index, mutation) in node_mutations {
match mutation {
Removal => {
if let Some(node) = self.remove_inner_node(index) {
reverse_mutations.insert(index, Addition(node));
}
},
Addition(node) => {
if let Some(old_node) = self.insert_inner_node(index, node) {
reverse_mutations.insert(index, Addition(old_node));
} else {
reverse_mutations.insert(index, Removal);
}
},
}
}
let mut reverse_pairs = BTreeMap::new();
for (key, value) in new_pairs {
if let Some(old_value) = self.insert_value(key.clone(), value) {
reverse_pairs.insert(key, old_value);
} else {
reverse_pairs.insert(key, Self::EMPTY_VALUE);
}
}
self.set_root(new_root);
Ok(MutationSet {
old_root: new_root,
node_mutations: reverse_mutations,
new_pairs: reverse_pairs,
new_root: old_root,
})
}
// REQUIRED METHODS
// ---------------------------------------------------------------------------------------------
@@ -299,10 +378,10 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
fn get_inner_node(&self, index: NodeIndex) -> InnerNode;
/// Inserts an inner node at the given index
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode);
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) -> Option<InnerNode>;
/// Removes an inner node at the given index
fn remove_inner_node(&mut self, index: NodeIndex);
fn remove_inner_node(&mut self, index: NodeIndex) -> Option<InnerNode>;
/// Inserts a leaf node, and returns the value at the key if already exists
fn insert_value(&mut self, key: Self::Key, value: Self::Value) -> Option<Self::Value>;
@@ -349,7 +428,7 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub(crate) struct InnerNode {
pub struct InnerNode {
pub left: RpoDigest,
pub right: RpoDigest,
}
@@ -403,7 +482,7 @@ impl<const DEPTH: u8> TryFrom<NodeIndex> for LeafIndex<DEPTH> {
fn try_from(node_index: NodeIndex) -> Result<Self, Self::Error> {
if node_index.depth() != DEPTH {
return Err(MerkleError::InvalidDepth {
return Err(MerkleError::InvalidNodeIndexDepth {
expected: DEPTH,
provided: node_index.depth(),
});
@@ -413,17 +492,29 @@ impl<const DEPTH: u8> TryFrom<NodeIndex> for LeafIndex<DEPTH> {
}
}
impl<const DEPTH: u8> Serializable for LeafIndex<DEPTH> {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
self.index.write_into(target);
}
}
impl<const DEPTH: u8> Deserializable for LeafIndex<DEPTH> {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
Ok(Self { index: source.read()? })
}
}
// MUTATIONS
// ================================================================================================
/// A change to an inner node of a [`SparseMerkleTree`] that hasn't yet been applied.
/// A change to an inner node of a sparse Merkle tree that hasn't yet been applied.
/// [`MutationSet`] stores this type in relation to a [`NodeIndex`] to keep track of what changes
/// need to occur at which node indices.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum NodeMutation {
/// Corresponds to [`SparseMerkleTree::remove_inner_node()`].
pub enum NodeMutation {
/// Node needs to be removed.
Removal,
/// Corresponds to [`SparseMerkleTree::insert_inner_node()`].
/// Node needs to be inserted.
Addition(InnerNode),
}
@@ -453,9 +544,94 @@ pub struct MutationSet<const DEPTH: u8, K, V> {
}
impl<const DEPTH: u8, K, V> MutationSet<DEPTH, K, V> {
/// Queries the root that was calculated during `SparseMerkleTree::compute_mutations()`. See
/// Returns the SMT root that was calculated during `SparseMerkleTree::compute_mutations()`. See
/// that method for more information.
pub fn root(&self) -> RpoDigest {
self.new_root
}
/// Returns the SMT root before the mutations were applied.
pub fn old_root(&self) -> RpoDigest {
self.old_root
}
/// Returns the set of inner nodes that need to be removed or added.
pub fn node_mutations(&self) -> &BTreeMap<NodeIndex, NodeMutation> {
&self.node_mutations
}
/// Returns the set of top-level key-value pairs that need to be added, updated or deleted
/// (i.e. set to `EMPTY_WORD`).
pub fn new_pairs(&self) -> &BTreeMap<K, V> {
&self.new_pairs
}
}
// SERIALIZATION
// ================================================================================================
impl Serializable for InnerNode {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
self.left.write_into(target);
self.right.write_into(target);
}
}
impl Deserializable for InnerNode {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
let left = source.read()?;
let right = source.read()?;
Ok(Self { left, right })
}
}
impl Serializable for NodeMutation {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
match self {
NodeMutation::Removal => target.write_bool(false),
NodeMutation::Addition(inner_node) => {
target.write_bool(true);
inner_node.write_into(target);
},
}
}
}
impl Deserializable for NodeMutation {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
if source.read_bool()? {
let inner_node = source.read()?;
return Ok(NodeMutation::Addition(inner_node));
}
Ok(NodeMutation::Removal)
}
}
impl<const DEPTH: u8, K: Serializable, V: Serializable> Serializable for MutationSet<DEPTH, K, V> {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write(self.old_root);
target.write(self.new_root);
self.node_mutations.write_into(target);
self.new_pairs.write_into(target);
}
}
impl<const DEPTH: u8, K: Deserializable + Ord, V: Deserializable> Deserializable
for MutationSet<DEPTH, K, V>
{
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
let old_root = source.read()?;
let new_root = source.read()?;
let node_mutations = source.read()?;
let new_pairs = source.read()?;
Ok(Self {
old_root,
node_mutations,
new_pairs,
new_root,
})
}
}

View File

@@ -81,7 +81,7 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
for (idx, (key, value)) in entries.into_iter().enumerate() {
if idx >= max_num_entries {
return Err(MerkleError::InvalidNumEntries(max_num_entries));
return Err(MerkleError::TooManyEntries(max_num_entries));
}
let old_value = tree.insert(LeafIndex::<DEPTH>::new(key)?, value);
@@ -221,7 +221,7 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
<Self as SparseMerkleTree<DEPTH>>::compute_mutations(self, kv_pairs)
}
/// Apply the prospective mutations computed with [`SimpleSmt::compute_mutations()`] to this
/// Applies the prospective mutations computed with [`SimpleSmt::compute_mutations()`] to this
/// tree.
///
/// # Errors
@@ -236,6 +236,23 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
<Self as SparseMerkleTree<DEPTH>>::apply_mutations(self, mutations)
}
/// Applies the prospective mutations computed with [`SimpleSmt::compute_mutations()`] to
/// this tree and returns the reverse mutation set.
///
/// Applying the reverse mutation sets to the updated tree will revert the changes.
///
/// # Errors
/// If `mutations` was computed on a tree with a different root than this one, returns
/// [`MerkleError::ConflictingRoots`] with a two-item [`alloc::vec::Vec`]. The first item is the
/// root hash the `mutations` were computed against, and the second item is the actual
/// current root of this tree.
pub fn apply_mutations_with_reversion(
&mut self,
mutations: MutationSet<DEPTH, LeafIndex<DEPTH>, Word>,
) -> Result<MutationSet<DEPTH, LeafIndex<DEPTH>, Word>, MerkleError> {
<Self as SparseMerkleTree<DEPTH>>::apply_mutations_with_reversion(self, mutations)
}
/// Inserts a subtree at the specified index. The depth at which the subtree is inserted is
/// computed as `DEPTH - SUBTREE_DEPTH`.
///
@@ -246,7 +263,7 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
subtree: SimpleSmt<SUBTREE_DEPTH>,
) -> Result<RpoDigest, MerkleError> {
if SUBTREE_DEPTH > DEPTH {
return Err(MerkleError::InvalidSubtreeDepth {
return Err(MerkleError::SubtreeDepthExceedsDepth {
subtree_depth: SUBTREE_DEPTH,
tree_depth: DEPTH,
});
@@ -321,12 +338,12 @@ impl<const DEPTH: u8> SparseMerkleTree<DEPTH> for SimpleSmt<DEPTH> {
.unwrap_or_else(|| EmptySubtreeRoots::get_inner_node(DEPTH, index.depth()))
}
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) {
self.inner_nodes.insert(index, inner_node);
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) -> Option<InnerNode> {
self.inner_nodes.insert(index, inner_node)
}
fn remove_inner_node(&mut self, index: NodeIndex) {
let _ = self.inner_nodes.remove(&index);
fn remove_inner_node(&mut self, index: NodeIndex) -> Option<InnerNode> {
self.inner_nodes.remove(&index)
}
fn insert_value(&mut self, key: LeafIndex<DEPTH>, value: Word) -> Option<Word> {

View File

@@ -1,5 +1,7 @@
use alloc::vec::Vec;
use assert_matches::assert_matches;
use super::{
super::{MerkleError, RpoDigest, SimpleSmt},
NodeIndex,
@@ -257,12 +259,12 @@ fn test_simplesmt_fail_on_duplicates() {
// consecutive
let entries = [(1, *first), (1, *second)];
let smt = SimpleSmt::<64>::with_leaves(entries);
assert_eq!(smt.unwrap_err(), MerkleError::DuplicateValuesForIndex(1));
assert_matches!(smt.unwrap_err(), MerkleError::DuplicateValuesForIndex(1));
// not consecutive
let entries = [(1, *first), (5, int_to_leaf(5)), (1, *second)];
let smt = SimpleSmt::<64>::with_leaves(entries);
assert_eq!(smt.unwrap_err(), MerkleError::DuplicateValuesForIndex(1));
assert_matches!(smt.unwrap_err(), MerkleError::DuplicateValuesForIndex(1));
}
}

View File

@@ -136,7 +136,10 @@ impl<T: KvMap<RpoDigest, StoreNode>> MerkleStore<T> {
self.nodes.get(&hash).ok_or(MerkleError::RootNotInStore(hash))?;
for i in (0..index.depth()).rev() {
let node = self.nodes.get(&hash).ok_or(MerkleError::NodeNotInStore(hash, index))?;
let node = self
.nodes
.get(&hash)
.ok_or(MerkleError::NodeIndexNotFoundInStore(hash, index))?;
let bit = (index.value() >> i) & 1;
hash = if bit == 0 { node.left } else { node.right }
@@ -162,7 +165,10 @@ impl<T: KvMap<RpoDigest, StoreNode>> MerkleStore<T> {
self.nodes.get(&hash).ok_or(MerkleError::RootNotInStore(hash))?;
for i in (0..index.depth()).rev() {
let node = self.nodes.get(&hash).ok_or(MerkleError::NodeNotInStore(hash, index))?;
let node = self
.nodes
.get(&hash)
.ok_or(MerkleError::NodeIndexNotFoundInStore(hash, index))?;
let bit = (index.value() >> i) & 1;
hash = if bit == 0 {

View File

@@ -1,3 +1,4 @@
use assert_matches::assert_matches;
use seq_macro::seq;
#[cfg(feature = "std")]
use {
@@ -42,14 +43,14 @@ const VALUES8: [RpoDigest; 8] = [
fn test_root_not_in_store() -> Result<(), MerkleError> {
let mtree = MerkleTree::new(digests_to_words(&VALUES4))?;
let store = MerkleStore::from(&mtree);
assert_eq!(
assert_matches!(
store.get_node(VALUES4[0], NodeIndex::make(mtree.depth(), 0)),
Err(MerkleError::RootNotInStore(VALUES4[0])),
Err(MerkleError::RootNotInStore(root)) if root == VALUES4[0],
"Leaf 0 is not a root"
);
assert_eq!(
assert_matches!(
store.get_path(VALUES4[0], NodeIndex::make(mtree.depth(), 0)),
Err(MerkleError::RootNotInStore(VALUES4[0])),
Err(MerkleError::RootNotInStore(root)) if root == VALUES4[0],
"Leaf 0 is not a root"
);
@@ -64,46 +65,46 @@ fn test_merkle_tree() -> Result<(), MerkleError> {
// STORE LEAVES ARE CORRECT -------------------------------------------------------------------
// checks the leaves in the store corresponds to the expected values
assert_eq!(
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 0)),
Ok(VALUES4[0]),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 0)).unwrap(),
VALUES4[0],
"node 0 must be in the tree"
);
assert_eq!(
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 1)),
Ok(VALUES4[1]),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 1)).unwrap(),
VALUES4[1],
"node 1 must be in the tree"
);
assert_eq!(
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 2)),
Ok(VALUES4[2]),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 2)).unwrap(),
VALUES4[2],
"node 2 must be in the tree"
);
assert_eq!(
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 3)),
Ok(VALUES4[3]),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 3)).unwrap(),
VALUES4[3],
"node 3 must be in the tree"
);
// STORE LEAVES MATCH TREE --------------------------------------------------------------------
// sanity check the values returned by the store and the tree
assert_eq!(
mtree.get_node(NodeIndex::make(mtree.depth(), 0)),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 0)),
mtree.get_node(NodeIndex::make(mtree.depth(), 0)).unwrap(),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 0)).unwrap(),
"node 0 must be the same for both MerkleTree and MerkleStore"
);
assert_eq!(
mtree.get_node(NodeIndex::make(mtree.depth(), 1)),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 1)),
mtree.get_node(NodeIndex::make(mtree.depth(), 1)).unwrap(),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 1)).unwrap(),
"node 1 must be the same for both MerkleTree and MerkleStore"
);
assert_eq!(
mtree.get_node(NodeIndex::make(mtree.depth(), 2)),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 2)),
mtree.get_node(NodeIndex::make(mtree.depth(), 2)).unwrap(),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 2)).unwrap(),
"node 2 must be the same for both MerkleTree and MerkleStore"
);
assert_eq!(
mtree.get_node(NodeIndex::make(mtree.depth(), 3)),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 3)),
mtree.get_node(NodeIndex::make(mtree.depth(), 3)).unwrap(),
store.get_node(mtree.root(), NodeIndex::make(mtree.depth(), 3)).unwrap(),
"node 3 must be the same for both MerkleTree and MerkleStore"
);
@@ -115,8 +116,8 @@ fn test_merkle_tree() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
mtree.get_path(NodeIndex::make(mtree.depth(), 0)),
Ok(result.path),
mtree.get_path(NodeIndex::make(mtree.depth(), 0)).unwrap(),
result.path,
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
);
@@ -126,8 +127,8 @@ fn test_merkle_tree() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
mtree.get_path(NodeIndex::make(mtree.depth(), 1)),
Ok(result.path),
mtree.get_path(NodeIndex::make(mtree.depth(), 1)).unwrap(),
result.path,
"merkle path for index 1 must be the same for the MerkleTree and MerkleStore"
);
@@ -137,8 +138,8 @@ fn test_merkle_tree() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
mtree.get_path(NodeIndex::make(mtree.depth(), 2)),
Ok(result.path),
mtree.get_path(NodeIndex::make(mtree.depth(), 2)).unwrap(),
result.path,
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
);
@@ -148,8 +149,8 @@ fn test_merkle_tree() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
mtree.get_path(NodeIndex::make(mtree.depth(), 3)),
Ok(result.path),
mtree.get_path(NodeIndex::make(mtree.depth(), 3)).unwrap(),
result.path,
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
);
@@ -240,56 +241,56 @@ fn test_sparse_merkle_tree() -> Result<(), MerkleError> {
// STORE LEAVES ARE CORRECT ==============================================================
// checks the leaves in the store corresponds to the expected values
assert_eq!(
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 0)),
Ok(VALUES4[0]),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 0)).unwrap(),
VALUES4[0],
"node 0 must be in the tree"
);
assert_eq!(
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 1)),
Ok(VALUES4[1]),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 1)).unwrap(),
VALUES4[1],
"node 1 must be in the tree"
);
assert_eq!(
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 2)),
Ok(VALUES4[2]),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 2)).unwrap(),
VALUES4[2],
"node 2 must be in the tree"
);
assert_eq!(
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 3)),
Ok(VALUES4[3]),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 3)).unwrap(),
VALUES4[3],
"node 3 must be in the tree"
);
assert_eq!(
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 4)),
Ok(RpoDigest::default()),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 4)).unwrap(),
RpoDigest::default(),
"unmodified node 4 must be ZERO"
);
// STORE LEAVES MATCH TREE ===============================================================
// sanity check the values returned by the store and the tree
assert_eq!(
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 0)),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 0)),
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 0)).unwrap(),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 0)).unwrap(),
"node 0 must be the same for both SparseMerkleTree and MerkleStore"
);
assert_eq!(
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 1)),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 1)),
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 1)).unwrap(),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 1)).unwrap(),
"node 1 must be the same for both SparseMerkleTree and MerkleStore"
);
assert_eq!(
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 2)),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 2)),
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 2)).unwrap(),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 2)).unwrap(),
"node 2 must be the same for both SparseMerkleTree and MerkleStore"
);
assert_eq!(
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 3)),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 3)),
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 3)).unwrap(),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 3)).unwrap(),
"node 3 must be the same for both SparseMerkleTree and MerkleStore"
);
assert_eq!(
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 4)),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 4)),
smt.get_node(NodeIndex::make(SMT_MAX_DEPTH, 4)).unwrap(),
store.get_node(smt.root(), NodeIndex::make(SMT_MAX_DEPTH, 4)).unwrap(),
"node 4 must be the same for both SparseMerkleTree and MerkleStore"
);
@@ -385,46 +386,46 @@ fn test_add_merkle_paths() -> Result<(), MerkleError> {
// STORE LEAVES ARE CORRECT ==============================================================
// checks the leaves in the store corresponds to the expected values
assert_eq!(
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 0)),
Ok(VALUES4[0]),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 0)).unwrap(),
VALUES4[0],
"node 0 must be in the pmt"
);
assert_eq!(
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 1)),
Ok(VALUES4[1]),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 1)).unwrap(),
VALUES4[1],
"node 1 must be in the pmt"
);
assert_eq!(
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 2)),
Ok(VALUES4[2]),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 2)).unwrap(),
VALUES4[2],
"node 2 must be in the pmt"
);
assert_eq!(
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 3)),
Ok(VALUES4[3]),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 3)).unwrap(),
VALUES4[3],
"node 3 must be in the pmt"
);
// STORE LEAVES MATCH PMT ================================================================
// sanity check the values returned by the store and the pmt
assert_eq!(
pmt.get_node(NodeIndex::make(pmt.max_depth(), 0)),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 0)),
pmt.get_node(NodeIndex::make(pmt.max_depth(), 0)).unwrap(),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 0)).unwrap(),
"node 0 must be the same for both PartialMerkleTree and MerkleStore"
);
assert_eq!(
pmt.get_node(NodeIndex::make(pmt.max_depth(), 1)),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 1)),
pmt.get_node(NodeIndex::make(pmt.max_depth(), 1)).unwrap(),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 1)).unwrap(),
"node 1 must be the same for both PartialMerkleTree and MerkleStore"
);
assert_eq!(
pmt.get_node(NodeIndex::make(pmt.max_depth(), 2)),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 2)),
pmt.get_node(NodeIndex::make(pmt.max_depth(), 2)).unwrap(),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 2)).unwrap(),
"node 2 must be the same for both PartialMerkleTree and MerkleStore"
);
assert_eq!(
pmt.get_node(NodeIndex::make(pmt.max_depth(), 3)),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 3)),
pmt.get_node(NodeIndex::make(pmt.max_depth(), 3)).unwrap(),
store.get_node(pmt.root(), NodeIndex::make(pmt.max_depth(), 3)).unwrap(),
"node 3 must be the same for both PartialMerkleTree and MerkleStore"
);
@@ -436,8 +437,8 @@ fn test_add_merkle_paths() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
pmt.get_path(NodeIndex::make(pmt.max_depth(), 0)),
Ok(result.path),
pmt.get_path(NodeIndex::make(pmt.max_depth(), 0)).unwrap(),
result.path,
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
);
@@ -447,8 +448,8 @@ fn test_add_merkle_paths() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
pmt.get_path(NodeIndex::make(pmt.max_depth(), 1)),
Ok(result.path),
pmt.get_path(NodeIndex::make(pmt.max_depth(), 1)).unwrap(),
result.path,
"merkle path for index 1 must be the same for the MerkleTree and MerkleStore"
);
@@ -458,8 +459,8 @@ fn test_add_merkle_paths() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
pmt.get_path(NodeIndex::make(pmt.max_depth(), 2)),
Ok(result.path),
pmt.get_path(NodeIndex::make(pmt.max_depth(), 2)).unwrap(),
result.path,
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
);
@@ -469,8 +470,8 @@ fn test_add_merkle_paths() -> Result<(), MerkleError> {
"Value for merkle path at index 0 must match leaf value"
);
assert_eq!(
pmt.get_path(NodeIndex::make(pmt.max_depth(), 3)),
Ok(result.path),
pmt.get_path(NodeIndex::make(pmt.max_depth(), 3)).unwrap(),
result.path,
"merkle path for index 0 must be the same for the MerkleTree and MerkleStore"
);
@@ -498,7 +499,7 @@ fn wont_open_to_different_depth_root() {
let store = MerkleStore::from(&mtree);
let index = NodeIndex::root();
let err = store.get_node(root, index).err().unwrap();
assert_eq!(err, MerkleError::RootNotInStore(root));
assert_matches!(err, MerkleError::RootNotInStore(err_root) if err_root == root);
}
#[test]
@@ -537,7 +538,7 @@ fn test_set_node() -> Result<(), MerkleError> {
let value = int_to_node(42);
let index = NodeIndex::make(mtree.depth(), 0);
let new_root = store.set_node(mtree.root(), index, value)?.root;
assert_eq!(store.get_node(new_root, index), Ok(value), "Value must have changed");
assert_eq!(store.get_node(new_root, index).unwrap(), value, "value must have changed");
Ok(())
}
@@ -724,7 +725,7 @@ fn get_leaf_depth_works_with_depth_8() {
assert_eq!(8, store.get_leaf_depth(root, 8, k).unwrap());
}
// flip last bit of a and expect it to return the the same depth, but for an empty node
// flip last bit of a and expect it to return the same depth, but for an empty node
assert_eq!(8, store.get_leaf_depth(root, 8, 0b01101000_u64).unwrap());
// flip fourth bit of a and expect an empty node on depth 4
@@ -745,7 +746,7 @@ fn get_leaf_depth_works_with_depth_8() {
// duplicate the tree on `a` and assert the depth is short-circuited by such sub-tree
let index = NodeIndex::new(8, a).unwrap();
root = store.set_node(root, index, root).unwrap().root;
assert_eq!(Err(MerkleError::DepthTooBig(9)), store.get_leaf_depth(root, 8, a));
assert_matches!(store.get_leaf_depth(root, 8, a).unwrap_err(), MerkleError::DepthTooBig(9));
}
#[test]

View File

@@ -145,8 +145,10 @@ impl RandomCoin for RpoRandomCoin {
self.state[RATE_START] += nonce;
Rpo256::apply_permutation(&mut self.state);
// reset the buffer
self.current = RATE_START;
// reset the buffer and move the next random element pointer to the second rate element.
// this is done as the first rate element will be "biased" via the provided `nonce` to
// contain some number of leading zeros.
self.current = RATE_START + 1;
// determine how many bits are needed to represent valid values in the domain
let v_mask = (domain_size - 1) as u64;

View File

@@ -1,7 +1,9 @@
//! Utilities used in this crate which can also be generally useful downstream.
use alloc::string::String;
use core::fmt::{self, Display, Write};
use core::fmt::{self, Write};
use thiserror::Error;
use super::Word;
@@ -46,36 +48,20 @@ pub fn bytes_to_hex_string<const N: usize>(data: [u8; N]) -> String {
}
/// Defines errors which can occur during parsing of hexadecimal strings.
#[derive(Debug)]
#[derive(Debug, Error)]
pub enum HexParseError {
#[error(
"expected hex data to have length {expected}, including the 0x prefix, found {actual}"
)]
InvalidLength { expected: usize, actual: usize },
#[error("hex encoded data must start with 0x prefix")]
MissingPrefix,
#[error("hex encoded data must contain only characters [a-zA-Z0-9]")]
InvalidChar,
#[error("hex encoded values of a Digest must be inside the field modulus")]
OutOfRange,
}
impl Display for HexParseError {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
HexParseError::InvalidLength { expected, actual } => {
write!(f, "Expected hex data to have length {expected}, including the 0x prefix. Got {actual}")
},
HexParseError::MissingPrefix => {
write!(f, "Hex encoded data must start with 0x prefix")
},
HexParseError::InvalidChar => {
write!(f, "Hex encoded data must contain characters [a-zA-Z0-9]")
},
HexParseError::OutOfRange => {
write!(f, "Hex encoded values of an RpoDigest must be inside the field modulus")
},
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for HexParseError {}
/// Parses a hex string into an array of bytes of known size.
pub fn hex_to_bytes<const N: usize>(value: &str) -> Result<[u8; N], HexParseError> {
let expected: usize = (N * 2) + 2;