mirror of
https://github.com/arnaucube/miden-crypto.git
synced 2026-01-11 08:31:30 +01:00
Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 38148bd09c | |||
|
|
a424652ba7 | ||
|
|
2a5b8ffb21 | ||
|
|
0e85398732 | ||
|
|
a75dced6e9 | ||
|
|
6da2a62b2b | ||
|
|
f825c23415 | ||
|
|
7ee6d7fb93 | ||
|
|
e4373e54c9 | ||
|
|
d470a5087b | ||
|
|
43b2954d60 | ||
|
|
589839fef1 | ||
|
|
ef3183fc0b | ||
|
|
8db71b66d9 | ||
|
|
1444bbc0f2 | ||
|
|
d2181f44c9 | ||
|
|
c64f43b262 | ||
|
|
3909b01993 | ||
|
|
d74e746a7f |
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
matrix:
|
||||
toolchain: [stable, nightly]
|
||||
os: [ubuntu]
|
||||
args: [default, no-std]
|
||||
args: [default, smt-hashmaps, no-std]
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@main
|
||||
|
||||
19
CHANGELOG.md
19
CHANGELOG.md
@@ -1,11 +1,24 @@
|
||||
## 0.14.0 (TBD)
|
||||
|
||||
- [BREAKING] Increment minimum supported Rust version to 1.84.
|
||||
- Removed duplicated check in RpoFalcon512 verification (#368).
|
||||
|
||||
## 0.13.2 (2025-01-24)
|
||||
|
||||
- Made `InnerNode` and `NodeMutation` public. Implemented (de)serialization of `LeafIndex` (#367).
|
||||
|
||||
|
||||
## 0.13.1 (2024-12-26)
|
||||
|
||||
- Generate reverse mutations set on applying of mutations set, implemented serialization of `MutationsSet` (#355).
|
||||
|
||||
## 0.13.0 (2024-11-24)
|
||||
|
||||
- Fixed a bug in the implementation of `draw_integers` for `RpoRandomCoin` (#343).
|
||||
- [BREAKING] Refactor error messages and use `thiserror` to derive errors (#344).
|
||||
- [BREAKING] Updated Winterfell dependency to v0.11 (#346).
|
||||
- Added RPO-STARK based DSA (#349).
|
||||
- Added benchmarks for DSA implementations (#354).
|
||||
- Implemented deterministic RPO-STARK based DSA (#358).
|
||||
- Added support for hashmaps in `Smt` and `SimpleSmt` which gives up to 10x boost in some operations (#363).
|
||||
|
||||
|
||||
## 0.12.0 (2024-10-30)
|
||||
|
||||
|
||||
315
Cargo.lock
generated
315
Cargo.lock
generated
@@ -11,6 +11,12 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||
|
||||
[[package]]
|
||||
name = "anes"
|
||||
version = "0.1.6"
|
||||
@@ -53,17 +59,18 @@ version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-wincon"
|
||||
version = "3.0.6"
|
||||
version = "3.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
|
||||
checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"windows-sys 0.59.0",
|
||||
"once_cell",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -92,24 +99,24 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.5.3"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
||||
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.6.3"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.6.0"
|
||||
version = "2.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
|
||||
|
||||
[[package]]
|
||||
name = "blake3"
|
||||
@@ -153,9 +160,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.3"
|
||||
version = "1.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d"
|
||||
checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
@@ -197,9 +204,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.23"
|
||||
version = "4.5.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
|
||||
checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -207,9 +214,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.23"
|
||||
version = "4.5.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
|
||||
checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -219,9 +226,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.18"
|
||||
version = "4.5.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
|
||||
checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -294,9 +301,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
|
||||
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
|
||||
dependencies = [
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
@@ -313,15 +320,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.20"
|
||||
version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
|
||||
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
|
||||
[[package]]
|
||||
name = "crunchy"
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
|
||||
checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929"
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
@@ -349,6 +356,12 @@ version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.3.10"
|
||||
@@ -356,7 +369,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -371,6 +384,12 @@ version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.7"
|
||||
@@ -396,9 +415,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
|
||||
|
||||
[[package]]
|
||||
name = "half"
|
||||
@@ -410,6 +429,18 @@ dependencies = [
|
||||
"crunchy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
dependencies = [
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
"foldhash",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
@@ -430,13 +461,13 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.13"
|
||||
version = "0.4.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b"
|
||||
checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -471,9 +502,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.76"
|
||||
version = "0.3.77"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
|
||||
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
@@ -496,9 +527,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.168"
|
||||
version = "0.2.169"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d"
|
||||
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
@@ -508,15 +539,15 @@ checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.14"
|
||||
version = "0.4.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
|
||||
checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.22"
|
||||
version = "0.4.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -535,6 +566,7 @@ dependencies = [
|
||||
"criterion",
|
||||
"getrandom",
|
||||
"glob",
|
||||
"hashbrown",
|
||||
"hex",
|
||||
"num",
|
||||
"num-complex",
|
||||
@@ -547,13 +579,10 @@ dependencies = [
|
||||
"serde",
|
||||
"sha3",
|
||||
"thiserror",
|
||||
"winter-air",
|
||||
"winter-crypto",
|
||||
"winter-math",
|
||||
"winter-prover",
|
||||
"winter-rand-utils",
|
||||
"winter-utils",
|
||||
"winter-verifier",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -642,12 +671,6 @@ version = "11.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9"
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff"
|
||||
|
||||
[[package]]
|
||||
name = "plotters"
|
||||
version = "0.3.7"
|
||||
@@ -687,18 +710,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.92"
|
||||
version = "1.0.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proptest"
|
||||
version = "1.5.0"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
|
||||
checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"bit-vec",
|
||||
@@ -722,9 +745,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.37"
|
||||
version = "1.0.38"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
|
||||
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -819,17 +842,23 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.42"
|
||||
version = "0.38.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85"
|
||||
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
|
||||
|
||||
[[package]]
|
||||
name = "rusty-fork"
|
||||
version = "0.3.0"
|
||||
@@ -865,18 +894,18 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.216"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
|
||||
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.216"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
|
||||
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -885,9 +914,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.133"
|
||||
version = "1.0.137"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
|
||||
checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -919,9 +948,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.90"
|
||||
version = "2.0.96"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31"
|
||||
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -930,31 +959,32 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.14.0"
|
||||
version = "3.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c"
|
||||
checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.6"
|
||||
version = "2.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47"
|
||||
checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.6"
|
||||
version = "2.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312"
|
||||
checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -971,34 +1001,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||
dependencies = [
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.17.0"
|
||||
@@ -1056,20 +1058,21 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
|
||||
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
|
||||
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
@@ -1081,9 +1084,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
|
||||
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -1091,9 +1094,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
|
||||
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1104,15 +1107,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
|
||||
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.76"
|
||||
version = "0.3.77"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc"
|
||||
checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
@@ -1124,16 +1130,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1209,82 +1206,33 @@ version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "winter-air"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
dependencies = [
|
||||
"libm",
|
||||
"winter-crypto",
|
||||
"winter-fri",
|
||||
"winter-math",
|
||||
"winter-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winter-crypto"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67c57748fd2da77742be601f03eda639ff6046879738fd1faae86e80018263cb"
|
||||
dependencies = [
|
||||
"blake3",
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"sha3",
|
||||
"winter-math",
|
||||
"winter-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winter-fri"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
dependencies = [
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"winter-crypto",
|
||||
"winter-math",
|
||||
"winter-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winter-math"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6020c17839fa107ce4a7cc178e407ebbc24adfac1980f4fa2111198e052700ab"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"winter-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winter-maybe-async"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winter-prover"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
dependencies = [
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"tracing",
|
||||
"winter-air",
|
||||
"winter-crypto",
|
||||
"winter-fri",
|
||||
"winter-math",
|
||||
"winter-maybe-async",
|
||||
"winter-rand-utils",
|
||||
"winter-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winter-rand-utils"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "226e4c455f6eb72f64ac6eeb7642df25e21ff2280a4f6b09db75392ad6b390ef"
|
||||
dependencies = [
|
||||
"rand",
|
||||
"winter-utils",
|
||||
@@ -1293,19 +1241,8 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "winter-utils"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
|
||||
[[package]]
|
||||
name = "winter-verifier"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/Al-Kindi-0/winterfell?branch=al-zk#5bafedbc2ba00cf85c6182725754547f6cddafc3"
|
||||
dependencies = [
|
||||
"winter-air",
|
||||
"winter-crypto",
|
||||
"winter-fri",
|
||||
"winter-math",
|
||||
"winter-utils",
|
||||
]
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1507ef312ea5569d54c2c7446a18b82143eb2a2e21f5c3ec7cfbe8200c03bd7c"
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
|
||||
27
Cargo.toml
27
Cargo.toml
@@ -10,7 +10,7 @@ documentation = "https://docs.rs/miden-crypto/0.14.0"
|
||||
categories = ["cryptography", "no-std"]
|
||||
keywords = ["miden", "crypto", "hash", "merkle"]
|
||||
edition = "2021"
|
||||
rust-version = "1.82"
|
||||
rust-version = "1.84"
|
||||
|
||||
[[bin]]
|
||||
name = "miden-crypto"
|
||||
@@ -19,10 +19,6 @@ bench = false
|
||||
doctest = false
|
||||
required-features = ["executable"]
|
||||
|
||||
[[bench]]
|
||||
name = "dsa"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "hash"
|
||||
harness = false
|
||||
@@ -52,6 +48,7 @@ harness = false
|
||||
concurrent = ["dep:rayon"]
|
||||
default = ["std", "concurrent"]
|
||||
executable = ["dep:clap", "dep:rand-utils", "std"]
|
||||
smt_hashmaps = ["dep:hashbrown"]
|
||||
internal = []
|
||||
serde = ["dep:serde", "serde?/alloc", "winter-math/serde"]
|
||||
std = [
|
||||
@@ -67,30 +64,28 @@ std = [
|
||||
[dependencies]
|
||||
blake3 = { version = "1.5", default-features = false }
|
||||
clap = { version = "4.5", optional = true, features = ["derive"] }
|
||||
getrandom = { version = "0.2", features = ["js"] }
|
||||
hashbrown = { version = "0.15", optional = true, features = ["serde"] }
|
||||
num = { version = "0.4", default-features = false, features = ["alloc", "libm"] }
|
||||
num-complex = { version = "0.4", default-features = false }
|
||||
rand = { version = "0.8", default-features = false }
|
||||
rand_chacha = { version = "0.3", default-features = false }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
rand-utils = {git = 'https://github.com/Al-Kindi-0/winterfell', package = "winter-rand-utils" , branch = 'al-zk', optional = true }
|
||||
rand-utils = { version = "0.11", package = "winter-rand-utils", optional = true }
|
||||
rayon = { version = "1.10", optional = true }
|
||||
serde = { version = "1.0", default-features = false, optional = true, features = ["derive"] }
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
thiserror = { version = "2.0", default-features = false }
|
||||
winter-air = {git = 'https://github.com/Al-Kindi-0/winterfell', branch = 'al-zk' }
|
||||
winter-crypto = {git = 'https://github.com/Al-Kindi-0/winterfell', branch = 'al-zk' }
|
||||
winter-prover = {git = 'https://github.com/Al-Kindi-0/winterfell', branch = 'al-zk' }
|
||||
winter-verifier = {git = 'https://github.com/Al-Kindi-0/winterfell', branch = 'al-zk' }
|
||||
winter-math = {git = 'https://github.com/Al-Kindi-0/winterfell', branch = 'al-zk' }
|
||||
winter-utils = {git = 'https://github.com/Al-Kindi-0/winterfell', branch = 'al-zk' }
|
||||
winter-crypto = { version = "0.11", default-features = false }
|
||||
winter-math = { version = "0.11", default-features = false }
|
||||
winter-utils = { version = "0.11", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = { version = "1.5", default-features = false }
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
getrandom = { version = "0.2", features = ["js"] }
|
||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||
proptest = "1.5"
|
||||
rand-utils = {git = 'https://github.com/Al-Kindi-0/winterfell', package = "winter-rand-utils" , branch = 'al-zk' }
|
||||
proptest = "1.6"
|
||||
rand_chacha = { version = "0.3", default-features = false }
|
||||
rand-utils = { version = "0.11", package = "winter-rand-utils" }
|
||||
seq-macro = { version = "0.3" }
|
||||
|
||||
[build-dependencies]
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Polygon Miden
|
||||
Copyright (c) 2025 Polygon Miden
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
15
Makefile
15
Makefile
@@ -46,6 +46,9 @@ doc: ## Generate and check documentation
|
||||
test-default: ## Run tests with default features
|
||||
$(DEBUG_OVERFLOW_INFO) cargo nextest run --profile default --release --all-features
|
||||
|
||||
.PHONY: test-smt-hashmaps
|
||||
test-smt-hashmaps: ## Run tests with `smt_hashmaps` feature enabled
|
||||
$(DEBUG_OVERFLOW_INFO) cargo nextest run --profile default --release --features smt_hashmaps
|
||||
|
||||
.PHONY: test-no-std
|
||||
test-no-std: ## Run tests with `no-default-features` (std)
|
||||
@@ -53,7 +56,7 @@ test-no-std: ## Run tests with `no-default-features` (std)
|
||||
|
||||
|
||||
.PHONY: test
|
||||
test: test-default test-no-std ## Run all tests
|
||||
test: test-default test-smt-hashmaps test-no-std ## Run all tests
|
||||
|
||||
# --- checking ------------------------------------------------------------------------------------
|
||||
|
||||
@@ -81,6 +84,10 @@ build-sve: ## Build with sve support
|
||||
|
||||
# --- benchmarking --------------------------------------------------------------------------------
|
||||
|
||||
.PHONY: bench-tx
|
||||
bench-tx: ## Run crypto benchmarks
|
||||
cargo bench --features="concurrent"
|
||||
.PHONY: bench
|
||||
bench: ## Run crypto benchmarks
|
||||
cargo bench --features concurrent
|
||||
|
||||
.PHONY: bench-smt-concurrent
|
||||
bench-smt-concurrent: ## Run SMT benchmarks with concurrent feature
|
||||
cargo run --release --features concurrent,executable -- --size 1000000
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
[](https://github.com/0xPolygonMiden/crypto/blob/main/LICENSE)
|
||||
[](https://github.com/0xPolygonMiden/crypto/actions/workflows/test.yml)
|
||||
[](https://github.com/0xPolygonMiden/crypto/actions/workflows/build.yml)
|
||||
[](https://www.rust-lang.org/tools/install)
|
||||
[](https://www.rust-lang.org/tools/install)
|
||||
[](https://crates.io/crates/miden-crypto)
|
||||
|
||||
This crate contains cryptographic primitives used in Polygon Miden.
|
||||
@@ -63,6 +63,7 @@ This crate can be compiled with the following features:
|
||||
- `concurrent`- enabled by default; enables multi-threaded implementation of `Smt::with_entries()` which significantly improves performance on multi-core CPUs.
|
||||
- `std` - enabled by default and relies on the Rust standard library.
|
||||
- `no_std` does not rely on the Rust standard library and enables compilation to WebAssembly.
|
||||
- `smt_hashmaps` - uses hashbrown hashmaps in SMT implementation which significantly improves performance of SMT updating. Keys ordering in SMT iterators is not guarantied when this feature is enabled.
|
||||
|
||||
All of these features imply the use of [alloc](https://doc.rust-lang.org/alloc/) to support heap-allocated collections.
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#include <stddef.h>
|
||||
#include <arm_sve.h>
|
||||
#include "library.h"
|
||||
#include "rpo_hash.h"
|
||||
#include "rpo_hash_128bit.h"
|
||||
#include "rpo_hash_256bit.h"
|
||||
|
||||
// The STATE_WIDTH of RPO hash is 12x u64 elements.
|
||||
// The current generation of SVE-enabled processors - Neoverse V1
|
||||
@@ -31,48 +32,24 @@
|
||||
|
||||
bool add_constants_and_apply_sbox(uint64_t state[STATE_WIDTH], uint64_t constants[STATE_WIDTH]) {
|
||||
const uint64_t vl = svcntd(); // number of u64 numbers in one SVE vector
|
||||
|
||||
if (vl != 4) {
|
||||
|
||||
if (vl == 2) {
|
||||
return add_constants_and_apply_sbox_128(state, constants);
|
||||
} else if (vl == 4) {
|
||||
return add_constants_and_apply_sbox_256(state, constants);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
svbool_t ptrue = svptrue_b64();
|
||||
|
||||
svuint64_t state1 = svld1(ptrue, state + 0*vl);
|
||||
svuint64_t state2 = svld1(ptrue, state + 1*vl);
|
||||
|
||||
svuint64_t const1 = svld1(ptrue, constants + 0*vl);
|
||||
svuint64_t const2 = svld1(ptrue, constants + 1*vl);
|
||||
|
||||
add_constants(ptrue, &state1, &const1, &state2, &const2, state+8, constants+8);
|
||||
apply_sbox(ptrue, &state1, &state2, state+8);
|
||||
|
||||
svst1(ptrue, state + 0*vl, state1);
|
||||
svst1(ptrue, state + 1*vl, state2);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool add_constants_and_apply_inv_sbox(uint64_t state[STATE_WIDTH], uint64_t constants[STATE_WIDTH]) {
|
||||
const uint64_t vl = svcntd(); // number of u64 numbers in one SVE vector
|
||||
|
||||
if (vl != 4) {
|
||||
if (vl == 2) {
|
||||
return add_constants_and_apply_inv_sbox_128(state, constants);
|
||||
} else if (vl == 4) {
|
||||
return add_constants_and_apply_inv_sbox_256(state, constants);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
svbool_t ptrue = svptrue_b64();
|
||||
|
||||
svuint64_t state1 = svld1(ptrue, state + 0 * vl);
|
||||
svuint64_t state2 = svld1(ptrue, state + 1 * vl);
|
||||
|
||||
svuint64_t const1 = svld1(ptrue, constants + 0 * vl);
|
||||
svuint64_t const2 = svld1(ptrue, constants + 1 * vl);
|
||||
|
||||
add_constants(ptrue, &state1, &const1, &state2, &const2, state + 8, constants + 8);
|
||||
apply_inv_sbox(ptrue, &state1, &state2, state + 8);
|
||||
|
||||
svst1(ptrue, state + 0 * vl, state1);
|
||||
svst1(ptrue, state + 1 * vl, state2);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
318
arch/arm64-sve/rpo/rpo_hash_128bit.h
Normal file
318
arch/arm64-sve/rpo/rpo_hash_128bit.h
Normal file
@@ -0,0 +1,318 @@
|
||||
#ifndef RPO_SVE_RPO_HASH_128_H
|
||||
#define RPO_SVE_RPO_HASH_128_H
|
||||
|
||||
#include <arm_sve.h>
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
|
||||
#define STATE_WIDTH 12
|
||||
|
||||
#define COPY_128(NAME, VIN1, VIN2, VIN3, VIN4, SIN) \
|
||||
svuint64_t NAME ## _1 = VIN1; \
|
||||
svuint64_t NAME ## _2 = VIN2; \
|
||||
svuint64_t NAME ## _3 = VIN3; \
|
||||
svuint64_t NAME ## _4 = VIN4; \
|
||||
uint64_t NAME ## _tail[4]; \
|
||||
memcpy(NAME ## _tail, SIN, 4 * sizeof(uint64_t))
|
||||
|
||||
#define MULTIPLY_128(PRED, DEST, OP) \
|
||||
mul_128(PRED, &DEST ## _1, &OP ## _1, &DEST ## _2, &OP ## _2, &DEST ## _3, &OP ## _3, &DEST ## _4, &OP ## _4, DEST ## _tail, OP ## _tail)
|
||||
|
||||
#define SQUARE_128(PRED, NAME) \
|
||||
sq_128(PRED, &NAME ## _1, &NAME ## _2, &NAME ## _3, &NAME ## _4, NAME ## _tail)
|
||||
|
||||
#define SQUARE_DEST_128(PRED, DEST, SRC) \
|
||||
COPY_128(DEST, SRC ## _1, SRC ## _2, SRC ## _3, SRC ## _4, SRC ## _tail); \
|
||||
SQUARE_128(PRED, DEST);
|
||||
|
||||
#define POW_ACC_128(PRED, NAME, CNT, TAIL) \
|
||||
for (size_t i = 0; i < CNT; i++) { \
|
||||
SQUARE_128(PRED, NAME); \
|
||||
} \
|
||||
MULTIPLY_128(PRED, NAME, TAIL);
|
||||
|
||||
#define POW_ACC_DEST(PRED, DEST, CNT, HEAD, TAIL) \
|
||||
COPY_128(DEST, HEAD ## _1, HEAD ## _2, HEAD ## _3, HEAD ## _4, HEAD ## _tail); \
|
||||
POW_ACC_128(PRED, DEST, CNT, TAIL)
|
||||
|
||||
extern inline void add_constants_128(
|
||||
svbool_t pg,
|
||||
svuint64_t *state1,
|
||||
svuint64_t *const1,
|
||||
svuint64_t *state2,
|
||||
svuint64_t *const2,
|
||||
svuint64_t *state3,
|
||||
svuint64_t *const3,
|
||||
svuint64_t *state4,
|
||||
svuint64_t *const4,
|
||||
|
||||
uint64_t *state_tail,
|
||||
uint64_t *const_tail
|
||||
) {
|
||||
uint64_t Ms = 0xFFFFFFFF00000001ull;
|
||||
svuint64_t Mv = svindex_u64(Ms, 0);
|
||||
|
||||
uint64_t p_1 = Ms - const_tail[0];
|
||||
uint64_t p_2 = Ms - const_tail[1];
|
||||
uint64_t p_3 = Ms - const_tail[2];
|
||||
uint64_t p_4 = Ms - const_tail[3];
|
||||
|
||||
uint64_t x_1, x_2, x_3, x_4;
|
||||
uint32_t adj_1 = -__builtin_sub_overflow(state_tail[0], p_1, &x_1);
|
||||
uint32_t adj_2 = -__builtin_sub_overflow(state_tail[1], p_2, &x_2);
|
||||
uint32_t adj_3 = -__builtin_sub_overflow(state_tail[2], p_3, &x_3);
|
||||
uint32_t adj_4 = -__builtin_sub_overflow(state_tail[3], p_4, &x_4);
|
||||
|
||||
state_tail[0] = x_1 - (uint64_t)adj_1;
|
||||
state_tail[1] = x_2 - (uint64_t)adj_2;
|
||||
state_tail[2] = x_3 - (uint64_t)adj_3;
|
||||
state_tail[3] = x_4 - (uint64_t)adj_4;
|
||||
|
||||
svuint64_t p1 = svsub_x(pg, Mv, *const1);
|
||||
svuint64_t p2 = svsub_x(pg, Mv, *const2);
|
||||
svuint64_t p3 = svsub_x(pg, Mv, *const3);
|
||||
svuint64_t p4 = svsub_x(pg, Mv, *const4);
|
||||
|
||||
svuint64_t x1 = svsub_x(pg, *state1, p1);
|
||||
svuint64_t x2 = svsub_x(pg, *state2, p2);
|
||||
svuint64_t x3 = svsub_x(pg, *state3, p3);
|
||||
svuint64_t x4 = svsub_x(pg, *state4, p4);
|
||||
|
||||
svbool_t pt1 = svcmplt_u64(pg, *state1, p1);
|
||||
svbool_t pt2 = svcmplt_u64(pg, *state2, p2);
|
||||
svbool_t pt3 = svcmplt_u64(pg, *state3, p3);
|
||||
svbool_t pt4 = svcmplt_u64(pg, *state4, p4);
|
||||
|
||||
*state1 = svsub_m(pt1, x1, (uint32_t)-1);
|
||||
*state2 = svsub_m(pt2, x2, (uint32_t)-1);
|
||||
*state3 = svsub_m(pt3, x3, (uint32_t)-1);
|
||||
*state4 = svsub_m(pt4, x4, (uint32_t)-1);
|
||||
}
|
||||
|
||||
extern inline void mul_128(
|
||||
svbool_t pg,
|
||||
svuint64_t *r1,
|
||||
const svuint64_t *op1,
|
||||
svuint64_t *r2,
|
||||
const svuint64_t *op2,
|
||||
svuint64_t *r3,
|
||||
const svuint64_t *op3,
|
||||
svuint64_t *r4,
|
||||
const svuint64_t *op4,
|
||||
uint64_t *r_tail,
|
||||
const uint64_t *op_tail
|
||||
) {
|
||||
__uint128_t x_1 = r_tail[0];
|
||||
__uint128_t x_2 = r_tail[1];
|
||||
__uint128_t x_3 = r_tail[2];
|
||||
__uint128_t x_4 = r_tail[3];
|
||||
|
||||
x_1 *= (__uint128_t) op_tail[0];
|
||||
x_2 *= (__uint128_t) op_tail[1];
|
||||
x_3 *= (__uint128_t) op_tail[2];
|
||||
x_4 *= (__uint128_t) op_tail[3];
|
||||
|
||||
uint64_t x0_1 = x_1;
|
||||
uint64_t x0_2 = x_2;
|
||||
uint64_t x0_3 = x_3;
|
||||
uint64_t x0_4 = x_4;
|
||||
|
||||
svuint64_t l1 = svmul_x(pg, *r1, *op1);
|
||||
svuint64_t l2 = svmul_x(pg, *r2, *op2);
|
||||
svuint64_t l3 = svmul_x(pg, *r3, *op3);
|
||||
svuint64_t l4 = svmul_x(pg, *r4, *op4);
|
||||
|
||||
uint64_t x1_1 = (x_1 >> 64);
|
||||
uint64_t x1_2 = (x_2 >> 64);
|
||||
uint64_t x1_3 = (x_3 >> 64);
|
||||
uint64_t x1_4 = (x_4 >> 64);
|
||||
|
||||
uint64_t a_1, a_2, a_3, a_4;
|
||||
uint64_t e_1 = __builtin_add_overflow(x0_1, (x0_1 << 32), &a_1);
|
||||
uint64_t e_2 = __builtin_add_overflow(x0_2, (x0_2 << 32), &a_2);
|
||||
uint64_t e_3 = __builtin_add_overflow(x0_3, (x0_3 << 32), &a_3);
|
||||
uint64_t e_4 = __builtin_add_overflow(x0_4, (x0_4 << 32), &a_4);
|
||||
|
||||
svuint64_t ls1 = svlsl_x(pg, l1, 32);
|
||||
svuint64_t ls2 = svlsl_x(pg, l2, 32);
|
||||
svuint64_t ls3 = svlsl_x(pg, l3, 32);
|
||||
svuint64_t ls4 = svlsl_x(pg, l4, 32);
|
||||
|
||||
svuint64_t a1 = svadd_x(pg, l1, ls1);
|
||||
svuint64_t a2 = svadd_x(pg, l2, ls2);
|
||||
svuint64_t a3 = svadd_x(pg, l3, ls3);
|
||||
svuint64_t a4 = svadd_x(pg, l4, ls4);
|
||||
|
||||
svbool_t e1 = svcmplt(pg, a1, l1);
|
||||
svbool_t e2 = svcmplt(pg, a2, l2);
|
||||
svbool_t e3 = svcmplt(pg, a3, l3);
|
||||
svbool_t e4 = svcmplt(pg, a4, l4);
|
||||
|
||||
svuint64_t as1 = svlsr_x(pg, a1, 32);
|
||||
svuint64_t as2 = svlsr_x(pg, a2, 32);
|
||||
svuint64_t as3 = svlsr_x(pg, a3, 32);
|
||||
svuint64_t as4 = svlsr_x(pg, a4, 32);
|
||||
|
||||
svuint64_t b1 = svsub_x(pg, a1, as1);
|
||||
svuint64_t b2 = svsub_x(pg, a2, as2);
|
||||
svuint64_t b3 = svsub_x(pg, a3, as3);
|
||||
svuint64_t b4 = svsub_x(pg, a4, as4);
|
||||
|
||||
b1 = svsub_m(e1, b1, 1);
|
||||
b2 = svsub_m(e2, b2, 1);
|
||||
b3 = svsub_m(e3, b3, 1);
|
||||
b4 = svsub_m(e4, b4, 1);
|
||||
|
||||
uint64_t b_1 = a_1 - (a_1 >> 32) - e_1;
|
||||
uint64_t b_2 = a_2 - (a_2 >> 32) - e_2;
|
||||
uint64_t b_3 = a_3 - (a_3 >> 32) - e_3;
|
||||
uint64_t b_4 = a_4 - (a_4 >> 32) - e_4;
|
||||
|
||||
uint64_t r_1, r_2, r_3, r_4;
|
||||
uint32_t c_1 = __builtin_sub_overflow(x1_1, b_1, &r_1);
|
||||
uint32_t c_2 = __builtin_sub_overflow(x1_2, b_2, &r_2);
|
||||
uint32_t c_3 = __builtin_sub_overflow(x1_3, b_3, &r_3);
|
||||
uint32_t c_4 = __builtin_sub_overflow(x1_4, b_4, &r_4);
|
||||
|
||||
svuint64_t h1 = svmulh_x(pg, *r1, *op1);
|
||||
svuint64_t h2 = svmulh_x(pg, *r2, *op2);
|
||||
svuint64_t h3 = svmulh_x(pg, *r3, *op3);
|
||||
svuint64_t h4 = svmulh_x(pg, *r4, *op4);
|
||||
|
||||
svuint64_t tr1 = svsub_x(pg, h1, b1);
|
||||
svuint64_t tr2 = svsub_x(pg, h2, b2);
|
||||
svuint64_t tr3 = svsub_x(pg, h3, b3);
|
||||
svuint64_t tr4 = svsub_x(pg, h4, b4);
|
||||
|
||||
svbool_t c1 = svcmplt_u64(pg, h1, b1);
|
||||
svbool_t c2 = svcmplt_u64(pg, h2, b2);
|
||||
svbool_t c3 = svcmplt_u64(pg, h3, b3);
|
||||
svbool_t c4 = svcmplt_u64(pg, h4, b4);
|
||||
|
||||
*r1 = svsub_m(c1, tr1, (uint32_t) -1);
|
||||
*r2 = svsub_m(c2, tr2, (uint32_t) -1);
|
||||
*r3 = svsub_m(c3, tr3, (uint32_t) -1);
|
||||
*r4 = svsub_m(c4, tr4, (uint32_t) -1);
|
||||
|
||||
uint32_t minus1_1 = 0 - c_1;
|
||||
uint32_t minus1_2 = 0 - c_2;
|
||||
uint32_t minus1_3 = 0 - c_3;
|
||||
uint32_t minus1_4 = 0 - c_4;
|
||||
|
||||
r_tail[0] = r_1 - (uint64_t)minus1_1;
|
||||
r_tail[1] = r_2 - (uint64_t)minus1_2;
|
||||
r_tail[2] = r_3 - (uint64_t)minus1_3;
|
||||
r_tail[3] = r_4 - (uint64_t)minus1_4;
|
||||
}
|
||||
|
||||
extern inline void sq_128(svbool_t pg, svuint64_t *a, svuint64_t *b, svuint64_t *c, svuint64_t *d, uint64_t *e) {
|
||||
mul_128(pg, a, a, b, b, c, c, d, d, e, e);
|
||||
}
|
||||
|
||||
extern inline void apply_sbox_128(
|
||||
svbool_t pg,
|
||||
svuint64_t *state1,
|
||||
svuint64_t *state2,
|
||||
svuint64_t *state3,
|
||||
svuint64_t *state4,
|
||||
uint64_t *state_tail
|
||||
) {
|
||||
COPY_128(x, *state1, *state2, *state3, *state4, state_tail); // copy input to x
|
||||
SQUARE_128(pg, x); // x contains input^2
|
||||
mul_128(pg, state1, &x_1, state2, &x_2, state3, &x_3, state4, &x_4, state_tail, x_tail); // state contains input^3
|
||||
SQUARE_128(pg, x); // x contains input^4
|
||||
mul_128(pg, state1, &x_1, state2, &x_2, state3, &x_3, state4, &x_4, state_tail, x_tail); // state contains input^7
|
||||
}
|
||||
|
||||
extern inline void apply_inv_sbox_128(
|
||||
svbool_t pg,
|
||||
svuint64_t *state1,
|
||||
svuint64_t *state2,
|
||||
svuint64_t *state3,
|
||||
svuint64_t *state4,
|
||||
uint64_t *state_tail
|
||||
) {
|
||||
// base^10
|
||||
COPY_128(t1, *state1, *state2, *state3, *state4, state_tail);
|
||||
SQUARE_128(pg, t1);
|
||||
|
||||
// base^100
|
||||
SQUARE_DEST_128(pg, t2, t1);
|
||||
|
||||
// base^100100
|
||||
POW_ACC_DEST(pg, t3, 3, t2, t2);
|
||||
|
||||
// base^100100100100
|
||||
POW_ACC_DEST(pg, t4, 6, t3, t3);
|
||||
|
||||
// compute base^100100100100100100100100
|
||||
POW_ACC_DEST(pg, t5, 12, t4, t4);
|
||||
|
||||
// compute base^100100100100100100100100100100
|
||||
POW_ACC_DEST(pg, t6, 6, t5, t3);
|
||||
|
||||
// compute base^1001001001001001001001001001000100100100100100100100100100100
|
||||
POW_ACC_DEST(pg, t7, 31, t6, t6);
|
||||
|
||||
// compute base^1001001001001001001001001001000110110110110110110110110110110111
|
||||
SQUARE_128(pg, t7);
|
||||
MULTIPLY_128(pg, t7, t6);
|
||||
SQUARE_128(pg, t7);
|
||||
SQUARE_128(pg, t7);
|
||||
MULTIPLY_128(pg, t7, t1);
|
||||
MULTIPLY_128(pg, t7, t2);
|
||||
mul_128(pg, state1, &t7_1, state2, &t7_2, state3, &t7_3, state4, &t7_4, state_tail, t7_tail);
|
||||
}
|
||||
|
||||
bool add_constants_and_apply_sbox_128(uint64_t state[STATE_WIDTH], uint64_t constants[STATE_WIDTH]) {
|
||||
const uint64_t vl = 2; // number of u64 numbers in one 128 bit SVE vector
|
||||
svbool_t ptrue = svptrue_b64();
|
||||
|
||||
svuint64_t state1 = svld1(ptrue, state + 0 * vl);
|
||||
svuint64_t state2 = svld1(ptrue, state + 1 * vl);
|
||||
svuint64_t state3 = svld1(ptrue, state + 2 * vl);
|
||||
svuint64_t state4 = svld1(ptrue, state + 3 * vl);
|
||||
|
||||
svuint64_t const1 = svld1(ptrue, constants + 0 * vl);
|
||||
svuint64_t const2 = svld1(ptrue, constants + 1 * vl);
|
||||
svuint64_t const3 = svld1(ptrue, constants + 2 * vl);
|
||||
svuint64_t const4 = svld1(ptrue, constants + 3 * vl);
|
||||
|
||||
add_constants_128(ptrue, &state1, &const1, &state2, &const2, &state3, &const3, &state4, &const4, state + 8, constants + 8);
|
||||
apply_sbox_128(ptrue, &state1, &state2, &state3, &state4, state + 8);
|
||||
|
||||
svst1(ptrue, state + 0 * vl, state1);
|
||||
svst1(ptrue, state + 1 * vl, state2);
|
||||
svst1(ptrue, state + 2 * vl, state3);
|
||||
svst1(ptrue, state + 3 * vl, state4);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool add_constants_and_apply_inv_sbox_128(uint64_t state[STATE_WIDTH], uint64_t constants[STATE_WIDTH]) {
|
||||
const uint64_t vl = 2; // number of u64 numbers in one 128 bit SVE vector
|
||||
svbool_t ptrue = svptrue_b64();
|
||||
|
||||
svuint64_t state1 = svld1(ptrue, state + 0 * vl);
|
||||
svuint64_t state2 = svld1(ptrue, state + 1 * vl);
|
||||
svuint64_t state3 = svld1(ptrue, state + 2 * vl);
|
||||
svuint64_t state4 = svld1(ptrue, state + 3 * vl);
|
||||
|
||||
svuint64_t const1 = svld1(ptrue, constants + 0 * vl);
|
||||
svuint64_t const2 = svld1(ptrue, constants + 1 * vl);
|
||||
svuint64_t const3 = svld1(ptrue, constants + 2 * vl);
|
||||
svuint64_t const4 = svld1(ptrue, constants + 3 * vl);
|
||||
|
||||
add_constants_128(ptrue, &state1, &const1, &state2, &const2, &state3, &const3, &state4, &const4, state + 8, constants + 8);
|
||||
apply_inv_sbox_128(ptrue, &state1, &state2, &state3, &state4, state + 8);
|
||||
|
||||
svst1(ptrue, state + 0 * vl, state1);
|
||||
svst1(ptrue, state + 1 * vl, state2);
|
||||
svst1(ptrue, state + 2 * vl, state3);
|
||||
svst1(ptrue, state + 3 * vl, state4);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endif //RPO_SVE_RPO_HASH_128_H
|
||||
@@ -1,38 +1,40 @@
|
||||
#ifndef RPO_SVE_RPO_HASH_H
|
||||
#define RPO_SVE_RPO_HASH_H
|
||||
#ifndef RPO_SVE_RPO_HASH_256_H
|
||||
#define RPO_SVE_RPO_HASH_256_H
|
||||
|
||||
#include <arm_sve.h>
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
|
||||
#define COPY(NAME, VIN1, VIN2, SIN3) \
|
||||
#define STATE_WIDTH 12
|
||||
|
||||
#define COPY_256(NAME, VIN1, VIN2, SIN3) \
|
||||
svuint64_t NAME ## _1 = VIN1; \
|
||||
svuint64_t NAME ## _2 = VIN2; \
|
||||
uint64_t NAME ## _3[4]; \
|
||||
memcpy(NAME ## _3, SIN3, 4 * sizeof(uint64_t))
|
||||
|
||||
#define MULTIPLY(PRED, DEST, OP) \
|
||||
mul(PRED, &DEST ## _1, &OP ## _1, &DEST ## _2, &OP ## _2, DEST ## _3, OP ## _3)
|
||||
#define MULTIPLY_256(PRED, DEST, OP) \
|
||||
mul_256(PRED, &DEST ## _1, &OP ## _1, &DEST ## _2, &OP ## _2, DEST ## _3, OP ## _3)
|
||||
|
||||
#define SQUARE(PRED, NAME) \
|
||||
sq(PRED, &NAME ## _1, &NAME ## _2, NAME ## _3)
|
||||
#define SQUARE_256(PRED, NAME) \
|
||||
sq_256(PRED, &NAME ## _1, &NAME ## _2, NAME ## _3)
|
||||
|
||||
#define SQUARE_DEST(PRED, DEST, SRC) \
|
||||
COPY(DEST, SRC ## _1, SRC ## _2, SRC ## _3); \
|
||||
SQUARE(PRED, DEST);
|
||||
#define SQUARE_DEST_256(PRED, DEST, SRC) \
|
||||
COPY_256(DEST, SRC ## _1, SRC ## _2, SRC ## _3); \
|
||||
SQUARE_256(PRED, DEST);
|
||||
|
||||
#define POW_ACC(PRED, NAME, CNT, TAIL) \
|
||||
for (size_t i = 0; i < CNT; i++) { \
|
||||
SQUARE(PRED, NAME); \
|
||||
SQUARE_256(PRED, NAME); \
|
||||
} \
|
||||
MULTIPLY(PRED, NAME, TAIL);
|
||||
MULTIPLY_256(PRED, NAME, TAIL);
|
||||
|
||||
#define POW_ACC_DEST(PRED, DEST, CNT, HEAD, TAIL) \
|
||||
COPY(DEST, HEAD ## _1, HEAD ## _2, HEAD ## _3); \
|
||||
#define POW_ACC_DEST_256(PRED, DEST, CNT, HEAD, TAIL) \
|
||||
COPY_256(DEST, HEAD ## _1, HEAD ## _2, HEAD ## _3); \
|
||||
POW_ACC(PRED, DEST, CNT, TAIL)
|
||||
|
||||
extern inline void add_constants(
|
||||
extern inline void add_constants_256(
|
||||
svbool_t pg,
|
||||
svuint64_t *state1,
|
||||
svuint64_t *const1,
|
||||
@@ -73,7 +75,7 @@ extern inline void add_constants(
|
||||
*state2 = svsub_m(pt2, x2, (uint32_t)-1);
|
||||
}
|
||||
|
||||
extern inline void mul(
|
||||
extern inline void mul_256(
|
||||
svbool_t pg,
|
||||
svuint64_t *r1,
|
||||
const svuint64_t *op1,
|
||||
@@ -163,59 +165,97 @@ extern inline void mul(
|
||||
r3[3] = r_4 - (uint64_t)minus1_4;
|
||||
}
|
||||
|
||||
extern inline void sq(svbool_t pg, svuint64_t *a, svuint64_t *b, uint64_t *c) {
|
||||
mul(pg, a, a, b, b, c, c);
|
||||
extern inline void sq_256(svbool_t pg, svuint64_t *a, svuint64_t *b, uint64_t *c) {
|
||||
mul_256(pg, a, a, b, b, c, c);
|
||||
}
|
||||
|
||||
extern inline void apply_sbox(
|
||||
extern inline void apply_sbox_256(
|
||||
svbool_t pg,
|
||||
svuint64_t *state1,
|
||||
svuint64_t *state2,
|
||||
uint64_t *state3
|
||||
) {
|
||||
COPY(x, *state1, *state2, state3); // copy input to x
|
||||
SQUARE(pg, x); // x contains input^2
|
||||
mul(pg, state1, &x_1, state2, &x_2, state3, x_3); // state contains input^3
|
||||
SQUARE(pg, x); // x contains input^4
|
||||
mul(pg, state1, &x_1, state2, &x_2, state3, x_3); // state contains input^7
|
||||
COPY_256(x, *state1, *state2, state3); // copy input to x
|
||||
SQUARE_256(pg, x); // x contains input^2
|
||||
mul_256(pg, state1, &x_1, state2, &x_2, state3, x_3); // state contains input^3
|
||||
SQUARE_256(pg, x); // x contains input^4
|
||||
mul_256(pg, state1, &x_1, state2, &x_2, state3, x_3); // state contains input^7
|
||||
}
|
||||
|
||||
extern inline void apply_inv_sbox(
|
||||
extern inline void apply_inv_sbox_256(
|
||||
svbool_t pg,
|
||||
svuint64_t *state_1,
|
||||
svuint64_t *state_2,
|
||||
uint64_t *state_3
|
||||
) {
|
||||
// base^10
|
||||
COPY(t1, *state_1, *state_2, state_3);
|
||||
SQUARE(pg, t1);
|
||||
COPY_256(t1, *state_1, *state_2, state_3);
|
||||
SQUARE_256(pg, t1);
|
||||
|
||||
// base^100
|
||||
SQUARE_DEST(pg, t2, t1);
|
||||
SQUARE_DEST_256(pg, t2, t1);
|
||||
|
||||
// base^100100
|
||||
POW_ACC_DEST(pg, t3, 3, t2, t2);
|
||||
POW_ACC_DEST_256(pg, t3, 3, t2, t2);
|
||||
|
||||
// base^100100100100
|
||||
POW_ACC_DEST(pg, t4, 6, t3, t3);
|
||||
POW_ACC_DEST_256(pg, t4, 6, t3, t3);
|
||||
|
||||
// compute base^100100100100100100100100
|
||||
POW_ACC_DEST(pg, t5, 12, t4, t4);
|
||||
POW_ACC_DEST_256(pg, t5, 12, t4, t4);
|
||||
|
||||
// compute base^100100100100100100100100100100
|
||||
POW_ACC_DEST(pg, t6, 6, t5, t3);
|
||||
POW_ACC_DEST_256(pg, t6, 6, t5, t3);
|
||||
|
||||
// compute base^1001001001001001001001001001000100100100100100100100100100100
|
||||
POW_ACC_DEST(pg, t7, 31, t6, t6);
|
||||
POW_ACC_DEST_256(pg, t7, 31, t6, t6);
|
||||
|
||||
// compute base^1001001001001001001001001001000110110110110110110110110110110111
|
||||
SQUARE(pg, t7);
|
||||
MULTIPLY(pg, t7, t6);
|
||||
SQUARE(pg, t7);
|
||||
SQUARE(pg, t7);
|
||||
MULTIPLY(pg, t7, t1);
|
||||
MULTIPLY(pg, t7, t2);
|
||||
mul(pg, state_1, &t7_1, state_2, &t7_2, state_3, t7_3);
|
||||
SQUARE_256(pg, t7);
|
||||
MULTIPLY_256(pg, t7, t6);
|
||||
SQUARE_256(pg, t7);
|
||||
SQUARE_256(pg, t7);
|
||||
MULTIPLY_256(pg, t7, t1);
|
||||
MULTIPLY_256(pg, t7, t2);
|
||||
mul_256(pg, state_1, &t7_1, state_2, &t7_2, state_3, t7_3);
|
||||
}
|
||||
|
||||
#endif //RPO_SVE_RPO_HASH_H
|
||||
bool add_constants_and_apply_sbox_256(uint64_t state[STATE_WIDTH], uint64_t constants[STATE_WIDTH]) {
|
||||
const uint64_t vl = 4; // number of u64 numbers in one 128 bit SVE vector
|
||||
svbool_t ptrue = svptrue_b64();
|
||||
|
||||
svuint64_t state1 = svld1(ptrue, state + 0 * vl);
|
||||
svuint64_t state2 = svld1(ptrue, state + 1 * vl);
|
||||
|
||||
svuint64_t const1 = svld1(ptrue, constants + 0 * vl);
|
||||
svuint64_t const2 = svld1(ptrue, constants + 1 * vl);
|
||||
|
||||
add_constants_256(ptrue, &state1, &const1, &state2, &const2, state + 8, constants + 8);
|
||||
apply_sbox_256(ptrue, &state1, &state2, state + 8);
|
||||
|
||||
svst1(ptrue, state + 0 * vl, state1);
|
||||
svst1(ptrue, state + 1 * vl, state2);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool add_constants_and_apply_inv_sbox_256(uint64_t state[STATE_WIDTH], uint64_t constants[STATE_WIDTH]) {
|
||||
const uint64_t vl = 4; // number of u64 numbers in one 128 bit SVE vector
|
||||
svbool_t ptrue = svptrue_b64();
|
||||
|
||||
svuint64_t state1 = svld1(ptrue, state + 0 * vl);
|
||||
svuint64_t state2 = svld1(ptrue, state + 1 * vl);
|
||||
|
||||
svuint64_t const1 = svld1(ptrue, constants + 0 * vl);
|
||||
svuint64_t const2 = svld1(ptrue, constants + 1 * vl);
|
||||
|
||||
add_constants_256(ptrue, &state1, &const1, &state2, &const2, state + 8, constants + 8);
|
||||
apply_inv_sbox_256(ptrue, &state1, &state2, state + 8);
|
||||
|
||||
svst1(ptrue, state + 0 * vl, state1);
|
||||
svst1(ptrue, state + 1 * vl, state2);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#endif //RPO_SVE_RPO_HASH_256_H
|
||||
@@ -1,6 +1,4 @@
|
||||
# Benchmarks
|
||||
|
||||
## Miden VM Hash Functions
|
||||
# Miden VM Hash Functions
|
||||
In the Miden VM, we make use of different hash functions. Some of these are "traditional" hash functions, like `BLAKE3`, which are optimized for out-of-STARK performance, while others are algebraic hash functions, like `Rescue Prime`, and are more optimized for a better performance inside the STARK. In what follows, we benchmark several such hash functions and compare against other constructions that are used by other proving systems. More precisely, we benchmark:
|
||||
|
||||
* **BLAKE3** as specified [here](https://github.com/BLAKE3-team/BLAKE3-specs/blob/master/blake3.pdf) and implemented [here](https://github.com/BLAKE3-team/BLAKE3) (with a wrapper exposed via this crate).
|
||||
@@ -10,41 +8,43 @@ In the Miden VM, we make use of different hash functions. Some of these are "tra
|
||||
* **Rescue Prime Optimized (RPO)** as specified [here](https://eprint.iacr.org/2022/1577) and implemented in this crate.
|
||||
* **Rescue Prime Extended (RPX)** a variant of the [xHash](https://eprint.iacr.org/2023/1045) hash function as implemented in this crate.
|
||||
|
||||
### Comparison and Instructions
|
||||
## Comparison and Instructions
|
||||
|
||||
#### Comparison
|
||||
### Comparison
|
||||
We benchmark the above hash functions using two scenarios. The first is a 2-to-1 $(a,b)\mapsto h(a,b)$ hashing where both $a$, $b$ and $h(a,b)$ are the digests corresponding to each of the hash functions.
|
||||
The second scenario is that of sequential hashing where we take a sequence of length $100$ field elements and hash these to produce a single digest. The digests are $4$ field elements in a prime field with modulus $2^{64} - 2^{32} + 1$ (i.e., 32 bytes) for Poseidon, Rescue Prime and RPO, and an array `[u8; 32]` for SHA3 and BLAKE3.
|
||||
|
||||
##### Scenario 1: 2-to-1 hashing `h(a,b)`
|
||||
#### Scenario 1: 2-to-1 hashing `h(a,b)`
|
||||
|
||||
| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 | RPX_256 |
|
||||
| ------------------- | ------ | ------- | --------- | --------- | ------- | ------- |
|
||||
| Apple M1 Pro | 76 ns | 245 ns | 1.5 µs | 9.1 µs | 5.2 µs | 2.7 µs |
|
||||
| Apple M2 Max | 71 ns | 233 ns | 1.3 µs | 7.9 µs | 4.6 µs | 2.4 µs |
|
||||
| Amazon Graviton 3 | 108 ns | | | | 5.3 µs | 3.1 µs |
|
||||
| Amazon Graviton 4 | 96 ns | | | | 5.1 µs | 2.8 µs |
|
||||
| AMD Ryzen 9 5950X | 64 ns | 273 ns | 1.2 µs | 9.1 µs | 5.5 µs | |
|
||||
| AMD EPYC 9R14 | 83 ns | | | | 4.3 µs | 2.4 µs |
|
||||
| Intel Core i5-8279U | 68 ns | 536 ns | 2.0 µs | 13.6 µs | 8.5 µs | 4.4 µs |
|
||||
| Intel Xeon 8375C | 67 ns | | | | 8.2 µs | |
|
||||
|
||||
##### Scenario 2: Sequential hashing of 100 elements `h([a_0,...,a_99])`
|
||||
#### Scenario 2: Sequential hashing of 100 elements `h([a_0,...,a_99])`
|
||||
|
||||
| Function | BLAKE3 | SHA3 | Poseidon | Rp64_256 | RPO_256 | RPX_256 |
|
||||
| ------------------- | -------| ------- | --------- | --------- | ------- | ------- |
|
||||
| Apple M1 Pro | 1.0 µs | 1.5 µs | 19.4 µs | 118 µs | 69 µs | 35 µs |
|
||||
| Apple M2 Max | 0.9 µs | 1.5 µs | 17.4 µs | 103 µs | 60 µs | 31 µs |
|
||||
| Amazon Graviton 3 | 1.4 µs | | | | 69 µs | 41 µs |
|
||||
| Amazon Graviton 4 | 1.2 µs | | | | 67 µs | 36 µs |
|
||||
| AMD Ryzen 9 5950X | 0.8 µs | 1.7 µs | 15.7 µs | 120 µs | 72 µs | |
|
||||
| AMD EPYC 9R14 | 0.9 µs | | | | 56 µs | 32 µs |
|
||||
| Intel Core i5-8279U | 0.9 µs | | | | 107 µs | 56 µs |
|
||||
| Intel Xeon 8375C | 0.8 µs | | | | 110 µs | |
|
||||
|
||||
Notes:
|
||||
- On Graviton 3, RPO256 and RPX256 are run with SVE acceleration enabled.
|
||||
- On Graviton 3 and 4, RPO256 and RPX256 are run with SVE acceleration enabled.
|
||||
- On AMD EPYC 9R14, RPO256 and RPX256 are run with AVX2 acceleration enabled.
|
||||
|
||||
#### Instructions
|
||||
### Instructions
|
||||
Before you can run the benchmarks, you'll need to make sure you have Rust [installed](https://www.rust-lang.org/tools/install). After that, to run the benchmarks for RPO and BLAKE3, clone the current repository, and from the root directory of the repo run the following:
|
||||
|
||||
```
|
||||
@@ -56,47 +56,3 @@ To run the benchmarks for Rescue Prime, Poseidon and SHA3, clone the following [
|
||||
```
|
||||
cargo bench hash
|
||||
```
|
||||
|
||||
## Miden VM DSA
|
||||
|
||||
We make use of the following digital signature algorithms (DSA) in the Miden VM:
|
||||
|
||||
* **RPO-Falcon512** as specified [here](https://falcon-sign.info/falcon.pdf) with the one difference being the use of the RPO hash function for the hash-to-point algorithm (Algorithm 3 in the previous reference) instead of SHAKE256.
|
||||
* **RPO-STARK** as specified [here](https://eprint.iacr.org/2024/1553), where the parameters are the ones for the unique-decoding regime (UDR) with the two differences:
|
||||
* We rely on Conjecture 1 in the [ethSTARK](https://eprint.iacr.org/2021/582) paper.
|
||||
* The number of FRI queries is $30$ and the grinding factor is $12$ bits. Thus using the previous point we can argue that the modified version achieves at least $102$ bits of average-case existential unforgeability security against $2^{113}$-query bound adversaries that can obtain up to $2^{64}$ signatures under the same public key.
|
||||
|
||||
|
||||
|
||||
### Comparison and Instructions
|
||||
|
||||
#### Comparison
|
||||
|
||||
|
||||
##### Key Generation
|
||||
|
||||
| DSA | RPO-Falcon512 | RPO-STARK |
|
||||
| ------------------- | :-----------: | :-------: |
|
||||
| Apple M1 Pro | 590 ms | 6 µs |
|
||||
| Intel Core i5-8279U | 585 ms | 10 µs |
|
||||
|
||||
##### Signature Generation
|
||||
|
||||
| DSA | RPO-Falcon512 | RPO-STARK |
|
||||
| ------------------- | :-----------: | :-------: |
|
||||
| Apple M1 Pro | 1.5 ms | 78 ms |
|
||||
| Intel Core i5-8279U | 1.8 ms | 130 ms |
|
||||
|
||||
##### Signature Verification
|
||||
|
||||
| DSA | RPO-Falcon512 | RPO-STARK |
|
||||
| ------------------- | :-----------: | :-------: |
|
||||
| Apple M1 Pro | 0.7 ms | 4.5 ms |
|
||||
| Intel Core i5-8279U | 1.2 ms | 7.9 ms |
|
||||
|
||||
#### Instructions
|
||||
Before you can run the benchmarks, you'll need to make sure you have Rust [installed](https://www.rust-lang.org/tools/install). After that, to run the benchmarks, clone the current repository, and from the root directory of the repo run the following:
|
||||
|
||||
```
|
||||
cargo bench --bench dsa
|
||||
```
|
||||
@@ -1,88 +0,0 @@
|
||||
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use miden_crypto::dsa::{
|
||||
rpo_falcon512::SecretKey as FalconSecretKey, rpo_stark::SecretKey as RpoStarkSecretKey,
|
||||
};
|
||||
use rand_utils::rand_array;
|
||||
|
||||
fn key_gen_falcon(c: &mut Criterion) {
|
||||
c.bench_function("Falcon public key generation", |bench| {
|
||||
bench.iter_batched(|| FalconSecretKey::new(), |sk| sk.public_key(), BatchSize::SmallInput)
|
||||
});
|
||||
|
||||
c.bench_function("Falcon secret key generation", |bench| {
|
||||
bench.iter_batched(|| {}, |_| FalconSecretKey::new(), BatchSize::SmallInput)
|
||||
});
|
||||
}
|
||||
|
||||
fn key_gen_rpo_stark(c: &mut Criterion) {
|
||||
c.bench_function("RPO-STARK public key generation", |bench| {
|
||||
bench.iter_batched(
|
||||
|| RpoStarkSecretKey::random(),
|
||||
|sk| sk.public_key(),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
|
||||
c.bench_function("RPO-STARK secret key generation", |bench| {
|
||||
bench.iter_batched(|| {}, |_| RpoStarkSecretKey::random(), BatchSize::SmallInput)
|
||||
});
|
||||
}
|
||||
|
||||
fn signature_gen_falcon(c: &mut Criterion) {
|
||||
c.bench_function("Falcon signature generation", |bench| {
|
||||
bench.iter_batched(
|
||||
|| (FalconSecretKey::new(), rand_array().into()),
|
||||
|(sk, msg)| sk.sign(msg),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn signature_gen_rpo_stark(c: &mut Criterion) {
|
||||
c.bench_function("RPO-STARK signature generation", |bench| {
|
||||
bench.iter_batched(
|
||||
|| (RpoStarkSecretKey::random(), rand_array().into()),
|
||||
|(sk, msg)| sk.sign(msg),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn signature_ver_falcon(c: &mut Criterion) {
|
||||
c.bench_function("Falcon signature verification", |bench| {
|
||||
bench.iter_batched(
|
||||
|| {
|
||||
let sk = FalconSecretKey::new();
|
||||
let msg = rand_array().into();
|
||||
(sk.public_key(), msg, sk.sign(msg))
|
||||
},
|
||||
|(pk, msg, sig)| pk.verify(msg, &sig),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn signature_ver_rpo_stark(c: &mut Criterion) {
|
||||
c.bench_function("RPO-STARK signature verification", |bench| {
|
||||
bench.iter_batched(
|
||||
|| {
|
||||
let sk = RpoStarkSecretKey::random();
|
||||
let msg = rand_array().into();
|
||||
(sk.public_key(), msg, sk.sign(msg))
|
||||
},
|
||||
|(pk, msg, sig)| pk.verify(msg, &sig),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
dsa_group,
|
||||
key_gen_falcon,
|
||||
key_gen_rpo_stark,
|
||||
signature_gen_falcon,
|
||||
signature_gen_rpo_stark,
|
||||
signature_ver_falcon,
|
||||
signature_ver_rpo_stark
|
||||
);
|
||||
criterion_main!(dsa_group);
|
||||
@@ -1,5 +1,5 @@
|
||||
[toolchain]
|
||||
channel = "1.82"
|
||||
channel = "1.84"
|
||||
components = ["rustfmt", "rust-src", "clippy"]
|
||||
targets = ["wasm32-unknown-unknown"]
|
||||
profile = "minimal"
|
||||
|
||||
@@ -13,7 +13,7 @@ else
|
||||
if git diff --exit-code "origin/${BASE_REF}" -- "${CHANGELOG_FILE}"; then
|
||||
>&2 echo "Changes should come with an entry in the \"CHANGELOG.md\" file. This behavior
|
||||
can be overridden by using the \"no changelog\" label, which is used for changes
|
||||
that are trivial / explicitely stated not to require a changelog entry."
|
||||
that are trivial / explicitly stated not to require a changelog entry."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
//! Digital signature schemes supported by default in the Miden VM.
|
||||
|
||||
pub mod rpo_falcon512;
|
||||
|
||||
pub mod rpo_stark;
|
||||
|
||||
@@ -97,7 +97,7 @@ impl Signature {
|
||||
}
|
||||
|
||||
let c = hash_to_point_rpo256(message, &self.nonce);
|
||||
h_digest == pubkey_com && verify_helper(&c, &self.s2, self.pk_poly())
|
||||
verify_helper(&c, &self.s2, self.pk_poly())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -289,9 +289,9 @@ impl Deserializable for SignaturePoly {
|
||||
}
|
||||
m += 128;
|
||||
if m >= 2048 {
|
||||
return Err(DeserializationError::InvalidValue(
|
||||
"Failed to decode signature: high bits {m} exceed 2048".to_string(),
|
||||
));
|
||||
return Err(DeserializationError::InvalidValue(format!(
|
||||
"Failed to decode signature: high bits {m} exceed 2048",
|
||||
)));
|
||||
}
|
||||
}
|
||||
if s != 0 && m == 0 {
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
mod signature;
|
||||
pub use signature::{PublicKey, SecretKey, Signature};
|
||||
|
||||
mod stark;
|
||||
pub use stark::{PublicInputs, RescueAir};
|
||||
|
||||
// TESTS
|
||||
// ================================================================================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SecretKey;
|
||||
use crate::Word;
|
||||
|
||||
#[test]
|
||||
fn test_signature() {
|
||||
let sk = SecretKey::new(Word::default());
|
||||
|
||||
let message = Word::default();
|
||||
let signature = sk.sign(message);
|
||||
let pk = sk.public_key();
|
||||
assert!(pk.verify(message, &signature))
|
||||
}
|
||||
}
|
||||
@@ -1,173 +0,0 @@
|
||||
use rand::{distributions::Uniform, prelude::Distribution, Rng};
|
||||
use winter_air::{FieldExtension, ProofOptions};
|
||||
use winter_math::{fields::f64::BaseElement, FieldElement};
|
||||
use winter_prover::Proof;
|
||||
use winter_utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
||||
|
||||
use crate::{
|
||||
dsa::rpo_stark::stark::RpoSignatureScheme,
|
||||
hash::{rpo::Rpo256, DIGEST_SIZE},
|
||||
StarkField, Word, ZERO,
|
||||
};
|
||||
|
||||
// CONSTANTS
|
||||
// ================================================================================================
|
||||
|
||||
/// Specifies the parameters of the STARK underlying the signature scheme. These parameters provide
|
||||
/// at least 102 bits of security under the conjectured security of the toy protocol in
|
||||
/// the ethSTARK paper [1].
|
||||
///
|
||||
/// [1]: https://eprint.iacr.org/2021/582
|
||||
pub const PROOF_OPTIONS: ProofOptions =
|
||||
ProofOptions::new(30, 8, 12, FieldExtension::Quadratic, 4, 7, true);
|
||||
|
||||
// PUBLIC KEY
|
||||
// ================================================================================================
|
||||
|
||||
/// A public key for verifying signatures.
|
||||
///
|
||||
/// The public key is a [Word] (i.e., 4 field elements) that is the hash of the secret key.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PublicKey(Word);
|
||||
|
||||
impl PublicKey {
|
||||
/// Returns the [Word] defining the public key.
|
||||
pub fn inner(&self) -> Word {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PublicKey {
|
||||
/// Verifies the provided signature against provided message and this public key.
|
||||
pub fn verify(&self, message: Word, signature: &Signature) -> bool {
|
||||
signature.verify(message, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serializable for PublicKey {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
self.0.write_into(target);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserializable for PublicKey {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let pk = <Word>::read_from(source)?;
|
||||
Ok(Self(pk))
|
||||
}
|
||||
}
|
||||
|
||||
// SECRET KEY
|
||||
// ================================================================================================
|
||||
|
||||
/// A secret key for generating signatures.
|
||||
///
|
||||
/// The secret key is a [Word] (i.e., 4 field elements).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct SecretKey(Word);
|
||||
|
||||
impl SecretKey {
|
||||
/// Generates a secret key from OS-provided randomness.
|
||||
pub fn new(word: Word) -> Self {
|
||||
Self(word)
|
||||
}
|
||||
|
||||
/// Generates a secret key from a [Word].
|
||||
#[cfg(feature = "std")]
|
||||
pub fn random() -> Self {
|
||||
use rand::{rngs::StdRng, SeedableRng};
|
||||
|
||||
let mut rng = StdRng::from_entropy();
|
||||
Self::with_rng(&mut rng)
|
||||
}
|
||||
|
||||
/// Generates a secret_key using the provided random number generator `Rng`.
|
||||
pub fn with_rng<R: Rng>(rng: &mut R) -> Self {
|
||||
let mut sk = [ZERO; 4];
|
||||
let uni_dist = Uniform::from(0..BaseElement::MODULUS);
|
||||
|
||||
for s in sk.iter_mut() {
|
||||
let sampled_integer = uni_dist.sample(rng);
|
||||
*s = BaseElement::new(sampled_integer);
|
||||
}
|
||||
|
||||
Self(sk)
|
||||
}
|
||||
|
||||
/// Computes the public key corresponding to this secret key.
|
||||
pub fn public_key(&self) -> PublicKey {
|
||||
let mut elements = [BaseElement::ZERO; 8];
|
||||
elements[..DIGEST_SIZE].copy_from_slice(&self.0);
|
||||
let pk = Rpo256::hash_elements(&elements);
|
||||
PublicKey(pk.into())
|
||||
}
|
||||
|
||||
/// Signs a message with this secret key.
|
||||
pub fn sign(&self, message: Word) -> Signature {
|
||||
let signature: RpoSignatureScheme<Rpo256> = RpoSignatureScheme::new(PROOF_OPTIONS);
|
||||
let proof = signature.sign(self.0, message);
|
||||
Signature { proof }
|
||||
}
|
||||
}
|
||||
|
||||
impl Serializable for SecretKey {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
self.0.write_into(target);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserializable for SecretKey {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let sk = <Word>::read_from(source)?;
|
||||
Ok(Self(sk))
|
||||
}
|
||||
}
|
||||
|
||||
// SIGNATURE
|
||||
// ================================================================================================
|
||||
|
||||
/// An RPO STARK-based signature over a message.
|
||||
///
|
||||
/// The signature is a STARK proof of knowledge of a pre-image given an image where the map is
|
||||
/// the RPO permutation, the pre-image is the secret key and the image is the public key.
|
||||
/// The current implementation follows the description in [1] but relies on the conjectured security
|
||||
/// of the toy protocol in the ethSTARK paper [2], which gives us using the parameter set
|
||||
/// given in `PROOF_OPTIONS` a signature with $102$ bits of average-case existential unforgeability
|
||||
/// security against $2^{113}$-query bound adversaries that can obtain up to $2^{64}$ signatures
|
||||
/// under the same public key.
|
||||
///
|
||||
/// [1]: https://eprint.iacr.org/2024/1553
|
||||
/// [2]: https://eprint.iacr.org/2021/582
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Signature {
|
||||
proof: Proof,
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
/// Returns the STARK proof constituting the signature.
|
||||
pub fn inner(&self) -> Proof {
|
||||
self.proof.clone()
|
||||
}
|
||||
|
||||
/// Returns true if this signature is a valid signature for the specified message generated
|
||||
/// against the secret key matching the specified public key.
|
||||
pub fn verify(&self, message: Word, pk: PublicKey) -> bool {
|
||||
let signature: RpoSignatureScheme<Rpo256> = RpoSignatureScheme::new(PROOF_OPTIONS);
|
||||
|
||||
let res = signature.verify(pk.inner(), message, self.proof.clone());
|
||||
res.is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl Serializable for Signature {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
self.proof.write_into(target);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserializable for Signature {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let proof = Proof::read_from(source)?;
|
||||
Ok(Self { proof })
|
||||
}
|
||||
}
|
||||
@@ -1,198 +0,0 @@
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use winter_math::{fields::f64::BaseElement, FieldElement, ToElements};
|
||||
use winter_prover::{
|
||||
Air, AirContext, Assertion, EvaluationFrame, ProofOptions, TraceInfo,
|
||||
TransitionConstraintDegree,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
hash::{ARK1, ARK2, MDS, STATE_WIDTH},
|
||||
Word, ZERO,
|
||||
};
|
||||
|
||||
// CONSTANTS
|
||||
// ================================================================================================
|
||||
|
||||
pub const HASH_CYCLE_LEN: usize = 8;
|
||||
|
||||
// AIR
|
||||
// ================================================================================================
|
||||
|
||||
pub struct RescueAir {
|
||||
context: AirContext<BaseElement>,
|
||||
pub_key: Word,
|
||||
}
|
||||
|
||||
impl Air for RescueAir {
|
||||
type BaseField = BaseElement;
|
||||
type PublicInputs = PublicInputs;
|
||||
|
||||
type GkrProof = ();
|
||||
type GkrVerifier = ();
|
||||
|
||||
// CONSTRUCTOR
|
||||
// --------------------------------------------------------------------------------------------
|
||||
fn new(trace_info: TraceInfo, pub_inputs: PublicInputs, options: ProofOptions) -> Self {
|
||||
let degrees = vec![
|
||||
// Apply RPO rounds.
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
TransitionConstraintDegree::new(7),
|
||||
];
|
||||
assert_eq!(STATE_WIDTH, trace_info.width());
|
||||
let context = AirContext::new(trace_info, degrees, 12, options);
|
||||
let context = context.set_num_transition_exemptions(1);
|
||||
RescueAir { context, pub_key: pub_inputs.pub_key }
|
||||
}
|
||||
|
||||
fn context(&self) -> &AirContext<Self::BaseField> {
|
||||
&self.context
|
||||
}
|
||||
|
||||
fn evaluate_transition<E: FieldElement + From<Self::BaseField>>(
|
||||
&self,
|
||||
frame: &EvaluationFrame<E>,
|
||||
periodic_values: &[E],
|
||||
result: &mut [E],
|
||||
) {
|
||||
let current = frame.current();
|
||||
let next = frame.next();
|
||||
// expected state width is 12 field elements
|
||||
debug_assert_eq!(STATE_WIDTH, current.len());
|
||||
debug_assert_eq!(STATE_WIDTH, next.len());
|
||||
|
||||
enforce_rpo_round(frame, result, periodic_values);
|
||||
}
|
||||
|
||||
fn get_assertions(&self) -> Vec<Assertion<Self::BaseField>> {
|
||||
let initial_step = 0;
|
||||
let last_step = self.trace_length() - 1;
|
||||
vec![
|
||||
// Assert that the capacity as well as the second half of the rate portion of the state
|
||||
// are initialized to `ZERO`.The first half of the rate is unconstrained as it will
|
||||
// contain the secret key
|
||||
Assertion::single(0, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(1, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(2, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(3, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(8, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(9, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(10, initial_step, Self::BaseField::ZERO),
|
||||
Assertion::single(11, initial_step, Self::BaseField::ZERO),
|
||||
// Assert that the public key is the correct one
|
||||
Assertion::single(4, last_step, self.pub_key[0]),
|
||||
Assertion::single(5, last_step, self.pub_key[1]),
|
||||
Assertion::single(6, last_step, self.pub_key[2]),
|
||||
Assertion::single(7, last_step, self.pub_key[3]),
|
||||
]
|
||||
}
|
||||
|
||||
fn get_periodic_column_values(&self) -> Vec<Vec<Self::BaseField>> {
|
||||
get_round_constants()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PublicInputs {
|
||||
pub(crate) pub_key: Word,
|
||||
pub(crate) msg: Word,
|
||||
}
|
||||
|
||||
impl PublicInputs {
|
||||
pub fn new(pub_key: Word, msg: Word) -> Self {
|
||||
Self { pub_key, msg }
|
||||
}
|
||||
}
|
||||
|
||||
impl ToElements<BaseElement> for PublicInputs {
|
||||
fn to_elements(&self) -> Vec<BaseElement> {
|
||||
let mut res = self.pub_key.to_vec();
|
||||
res.extend_from_slice(self.msg.as_ref());
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
// HELPER EVALUATORS
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
/// Enforces constraints for a single round of the Rescue Prime Optimized hash functions.
|
||||
pub fn enforce_rpo_round<E: FieldElement + From<BaseElement>>(
|
||||
frame: &EvaluationFrame<E>,
|
||||
result: &mut [E],
|
||||
ark: &[E],
|
||||
) {
|
||||
// compute the state that should result from applying the first 5 operations of the RPO round to
|
||||
// the current hash state.
|
||||
let mut step1 = [E::ZERO; STATE_WIDTH];
|
||||
step1.copy_from_slice(frame.current());
|
||||
|
||||
apply_mds(&mut step1);
|
||||
// add constants
|
||||
for i in 0..STATE_WIDTH {
|
||||
step1[i] += ark[i];
|
||||
}
|
||||
apply_sbox(&mut step1);
|
||||
apply_mds(&mut step1);
|
||||
// add constants
|
||||
for i in 0..STATE_WIDTH {
|
||||
step1[i] += ark[STATE_WIDTH + i];
|
||||
}
|
||||
|
||||
// compute the state that should result from applying the inverse of the last operation of the
|
||||
// RPO round to the next step of the computation.
|
||||
let mut step2 = [E::ZERO; STATE_WIDTH];
|
||||
step2.copy_from_slice(frame.next());
|
||||
apply_sbox(&mut step2);
|
||||
|
||||
// make sure that the results are equal.
|
||||
for i in 0..STATE_WIDTH {
|
||||
result[i] = step2[i] - step1[i]
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn apply_sbox<E: FieldElement + From<BaseElement>>(state: &mut [E; STATE_WIDTH]) {
|
||||
state.iter_mut().for_each(|v| {
|
||||
let t2 = v.square();
|
||||
let t4 = t2.square();
|
||||
*v *= t2 * t4;
|
||||
});
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn apply_mds<E: FieldElement + From<BaseElement>>(state: &mut [E; STATE_WIDTH]) {
|
||||
let mut result = [E::ZERO; STATE_WIDTH];
|
||||
result.iter_mut().zip(MDS).for_each(|(r, mds_row)| {
|
||||
state.iter().zip(mds_row).for_each(|(&s, m)| {
|
||||
*r += E::from(m) * s;
|
||||
});
|
||||
});
|
||||
*state = result
|
||||
}
|
||||
|
||||
/// Returns RPO round constants arranged in column-major form.
|
||||
pub fn get_round_constants() -> Vec<Vec<BaseElement>> {
|
||||
let mut constants = Vec::new();
|
||||
for _ in 0..(STATE_WIDTH * 2) {
|
||||
constants.push(vec![ZERO; HASH_CYCLE_LEN]);
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0..HASH_CYCLE_LEN - 1 {
|
||||
for j in 0..STATE_WIDTH {
|
||||
constants[j][i] = ARK1[i][j];
|
||||
constants[j + STATE_WIDTH][i] = ARK2[i][j];
|
||||
}
|
||||
}
|
||||
|
||||
constants
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
use alloc::vec::Vec;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use prover::RpoSignatureProver;
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use winter_crypto::{ElementHasher, SaltedMerkleTree};
|
||||
use winter_math::fields::f64::BaseElement;
|
||||
use winter_prover::{Proof, ProofOptions, Prover};
|
||||
use winter_utils::Serializable;
|
||||
use winter_verifier::{verify, AcceptableOptions, VerifierError};
|
||||
|
||||
use crate::{
|
||||
hash::{rpo::Rpo256, DIGEST_SIZE},
|
||||
rand::RpoRandomCoin,
|
||||
};
|
||||
|
||||
mod air;
|
||||
pub use air::{PublicInputs, RescueAir};
|
||||
mod prover;
|
||||
|
||||
/// Represents an abstract STARK-based signature scheme with knowledge of RPO pre-image as
|
||||
/// the hard relation.
|
||||
pub struct RpoSignatureScheme<H: ElementHasher> {
|
||||
options: ProofOptions,
|
||||
_h: PhantomData<H>,
|
||||
}
|
||||
|
||||
impl<H: ElementHasher<BaseField = BaseElement> + Sync> RpoSignatureScheme<H> {
|
||||
pub fn new(options: ProofOptions) -> Self {
|
||||
RpoSignatureScheme { options, _h: PhantomData }
|
||||
}
|
||||
|
||||
pub fn sign(&self, sk: [BaseElement; DIGEST_SIZE], msg: [BaseElement; DIGEST_SIZE]) -> Proof {
|
||||
// create a prover
|
||||
let prover = RpoSignatureProver::<H>::new(msg, self.options.clone());
|
||||
|
||||
// generate execution trace
|
||||
let trace = prover.build_trace(sk);
|
||||
|
||||
// generate the initial seed for the PRNG used for zero-knowledge
|
||||
let seed: [u8; 32] = generate_seed(sk, msg);
|
||||
|
||||
// generate the proof
|
||||
prover.prove(trace, Some(seed)).expect("failed to generate the signature")
|
||||
}
|
||||
|
||||
pub fn verify(
|
||||
&self,
|
||||
pub_key: [BaseElement; DIGEST_SIZE],
|
||||
msg: [BaseElement; DIGEST_SIZE],
|
||||
proof: Proof,
|
||||
) -> Result<(), VerifierError> {
|
||||
// we make sure that the parameters used in generating the proof match the expected ones
|
||||
if *proof.options() != self.options {
|
||||
return Err(VerifierError::UnacceptableProofOptions);
|
||||
}
|
||||
let pub_inputs = PublicInputs { pub_key, msg };
|
||||
let acceptable_options = AcceptableOptions::OptionSet(vec![proof.options().clone()]);
|
||||
verify::<RescueAir, Rpo256, RpoRandomCoin, SaltedMerkleTree<Rpo256, ChaCha20Rng>>(
|
||||
proof,
|
||||
pub_inputs,
|
||||
&acceptable_options,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Deterministically generates a seed for seeding the PRNG used for zero-knowledge.
|
||||
///
|
||||
/// This uses the argument described in [RFC 6979](https://datatracker.ietf.org/doc/html/rfc6979#section-3.5)
|
||||
/// § 3.5 where the concatenation of the private key and the hashed message, i.e., sk || H(m), is
|
||||
/// used in order to construct the initial seed of a PRNG.
|
||||
///
|
||||
/// Note that we hash in also a context string in order to domain separate between different
|
||||
/// instantiations of the signature scheme.
|
||||
#[inline]
|
||||
pub fn generate_seed(sk: [BaseElement; DIGEST_SIZE], msg: [BaseElement; DIGEST_SIZE]) -> [u8; 32] {
|
||||
let context_bytes = "
|
||||
Seed for PRNG used for Zero-knowledge in RPO-STARK signature scheme:
|
||||
1. Version: Conjectured security
|
||||
2. FRI queries: 30
|
||||
3. Blowup factor: 8
|
||||
4. Grinding bits: 12
|
||||
5. Field extension degree: 2
|
||||
6. FRI folding factor: 4
|
||||
7. FRI remainder polynomial max degree: 7
|
||||
"
|
||||
.to_bytes();
|
||||
let sk_bytes = sk.to_bytes();
|
||||
let msg_bytes = msg.to_bytes();
|
||||
|
||||
let total_length = context_bytes.len() + sk_bytes.len() + msg_bytes.len();
|
||||
let mut buffer = Vec::with_capacity(total_length);
|
||||
buffer.extend_from_slice(&context_bytes);
|
||||
buffer.extend_from_slice(&sk_bytes);
|
||||
buffer.extend_from_slice(&msg_bytes);
|
||||
|
||||
blake3::hash(&buffer).into()
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use winter_air::{
|
||||
AuxRandElements, ConstraintCompositionCoefficients, PartitionOptions, ZkParameters,
|
||||
};
|
||||
use winter_crypto::{ElementHasher, SaltedMerkleTree};
|
||||
use winter_math::{fields::f64::BaseElement, FieldElement};
|
||||
use winter_prover::{
|
||||
matrix::ColMatrix, CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment,
|
||||
DefaultConstraintEvaluator, DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace,
|
||||
TraceInfo, TracePolyTable, TraceTable,
|
||||
};
|
||||
|
||||
use super::air::{PublicInputs, RescueAir, HASH_CYCLE_LEN};
|
||||
use crate::{
|
||||
hash::{rpo::Rpo256, STATE_WIDTH},
|
||||
rand::RpoRandomCoin,
|
||||
Word, ZERO,
|
||||
};
|
||||
|
||||
// PROVER
|
||||
// ================================================================================================
|
||||
|
||||
/// A prover for the RPO STARK-based signature scheme.
|
||||
///
|
||||
/// The signature is based on the the one-wayness of the RPO hash function but it is generic over
|
||||
/// the hash function used for instantiating the random oracle for the BCS transform.
|
||||
pub(crate) struct RpoSignatureProver<H: ElementHasher + Sync> {
|
||||
message: Word,
|
||||
options: ProofOptions,
|
||||
_hasher: PhantomData<H>,
|
||||
}
|
||||
|
||||
impl<H: ElementHasher + Sync> RpoSignatureProver<H> {
|
||||
pub(crate) fn new(message: Word, options: ProofOptions) -> Self {
|
||||
Self { message, options, _hasher: PhantomData }
|
||||
}
|
||||
|
||||
pub(crate) fn build_trace(&self, sk: Word) -> TraceTable<BaseElement> {
|
||||
let mut trace = TraceTable::new(STATE_WIDTH, HASH_CYCLE_LEN);
|
||||
|
||||
trace.fill(
|
||||
|state| {
|
||||
// initialize first half of the rate portion of the state with the secret key
|
||||
state[0] = ZERO;
|
||||
state[1] = ZERO;
|
||||
state[2] = ZERO;
|
||||
state[3] = ZERO;
|
||||
state[4] = sk[0];
|
||||
state[5] = sk[1];
|
||||
state[6] = sk[2];
|
||||
state[7] = sk[3];
|
||||
state[8] = ZERO;
|
||||
state[9] = ZERO;
|
||||
state[10] = ZERO;
|
||||
state[11] = ZERO;
|
||||
},
|
||||
|step, state| {
|
||||
Rpo256::apply_round(
|
||||
state.try_into().expect("should not fail given the size of the array"),
|
||||
step,
|
||||
);
|
||||
},
|
||||
);
|
||||
trace
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: ElementHasher> Prover for RpoSignatureProver<H>
|
||||
where
|
||||
H: ElementHasher<BaseField = BaseElement> + Sync,
|
||||
{
|
||||
type BaseField = BaseElement;
|
||||
type Air = RescueAir;
|
||||
type Trace = TraceTable<BaseElement>;
|
||||
type HashFn = Rpo256;
|
||||
type VC = SaltedMerkleTree<Self::HashFn, Self::ZkPrng>;
|
||||
type RandomCoin = RpoRandomCoin;
|
||||
type TraceLde<E: FieldElement<BaseField = Self::BaseField>> =
|
||||
DefaultTraceLde<E, Self::HashFn, Self::VC>;
|
||||
type ConstraintCommitment<E: FieldElement<BaseField = Self::BaseField>> =
|
||||
DefaultConstraintCommitment<E, Self::HashFn, Self::ZkPrng, Self::VC>;
|
||||
type ConstraintEvaluator<'a, E: FieldElement<BaseField = Self::BaseField>> =
|
||||
DefaultConstraintEvaluator<'a, Self::Air, E>;
|
||||
type ZkPrng = ChaCha20Rng;
|
||||
|
||||
fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs {
|
||||
let last_step = trace.length() - 1;
|
||||
// Note that the message is not part of the execution trace but is part of the public
|
||||
// inputs. This is explained in the reference description of the DSA and intuitively
|
||||
// it is done in order to make sure that the message is part of the Fiat-Shamir
|
||||
// transcript and hence binds the proof/signature to the message
|
||||
PublicInputs {
|
||||
pub_key: [
|
||||
trace.get(4, last_step),
|
||||
trace.get(5, last_step),
|
||||
trace.get(6, last_step),
|
||||
trace.get(7, last_step),
|
||||
],
|
||||
msg: self.message,
|
||||
}
|
||||
}
|
||||
|
||||
fn options(&self) -> &ProofOptions {
|
||||
&self.options
|
||||
}
|
||||
|
||||
fn new_trace_lde<E: FieldElement<BaseField = Self::BaseField>>(
|
||||
&self,
|
||||
trace_info: &TraceInfo,
|
||||
main_trace: &ColMatrix<Self::BaseField>,
|
||||
domain: &StarkDomain<Self::BaseField>,
|
||||
partition_option: PartitionOptions,
|
||||
zk_parameters: Option<ZkParameters>,
|
||||
prng: &mut Option<Self::ZkPrng>,
|
||||
) -> (Self::TraceLde<E>, TracePolyTable<E>) {
|
||||
DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng)
|
||||
}
|
||||
|
||||
fn new_evaluator<'a, E: FieldElement<BaseField = Self::BaseField>>(
|
||||
&self,
|
||||
air: &'a Self::Air,
|
||||
aux_rand_elements: Option<AuxRandElements<E>>,
|
||||
composition_coefficients: ConstraintCompositionCoefficients<E>,
|
||||
) -> Self::ConstraintEvaluator<'a, E> {
|
||||
DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients)
|
||||
}
|
||||
|
||||
fn build_constraint_commitment<E: FieldElement<BaseField = Self::BaseField>>(
|
||||
&self,
|
||||
composition_poly_trace: CompositionPolyTrace<E>,
|
||||
num_constraint_composition_columns: usize,
|
||||
domain: &StarkDomain<Self::BaseField>,
|
||||
partition_options: PartitionOptions,
|
||||
zk_parameters: Option<ZkParameters>,
|
||||
prng: &mut Option<Self::ZkPrng>,
|
||||
) -> (Self::ConstraintCommitment<E>, CompositionPoly<E>) {
|
||||
DefaultConstraintCommitment::new(
|
||||
composition_poly_trace,
|
||||
num_constraint_composition_columns,
|
||||
domain,
|
||||
partition_options,
|
||||
zk_parameters,
|
||||
prng,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ use super::{CubeExtension, Felt, FieldElement, StarkField, ZERO};
|
||||
pub mod blake;
|
||||
|
||||
mod rescue;
|
||||
pub(crate) use rescue::{ARK1, ARK2, DIGEST_SIZE, MDS, STATE_WIDTH};
|
||||
pub mod rpo {
|
||||
pub use super::rescue::{Rpo256, RpoDigest, RpoDigestError};
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ mod arch;
|
||||
pub use arch::optimized::{add_constants_and_apply_inv_sbox, add_constants_and_apply_sbox};
|
||||
|
||||
mod mds;
|
||||
pub(crate) use mds::{apply_mds, MDS};
|
||||
use mds::{apply_mds, MDS};
|
||||
|
||||
mod rpo;
|
||||
pub use rpo::{Rpo256, RpoDigest, RpoDigestError};
|
||||
@@ -26,7 +26,7 @@ const NUM_ROUNDS: usize = 7;
|
||||
|
||||
/// Sponge state is set to 12 field elements or 96 bytes; 8 elements are reserved for rate and
|
||||
/// the remaining 4 elements are reserved for capacity.
|
||||
pub(crate) const STATE_WIDTH: usize = 12;
|
||||
const STATE_WIDTH: usize = 12;
|
||||
|
||||
/// The rate portion of the state is located in elements 4 through 11.
|
||||
const RATE_RANGE: Range<usize> = 4..12;
|
||||
@@ -42,8 +42,8 @@ const CAPACITY_RANGE: Range<usize> = 0..4;
|
||||
///
|
||||
/// The digest is returned from state elements 4, 5, 6, and 7 (the first four elements of the
|
||||
/// rate portion).
|
||||
pub(crate) const DIGEST_RANGE: Range<usize> = 4..8;
|
||||
pub(crate) const DIGEST_SIZE: usize = DIGEST_RANGE.end - DIGEST_RANGE.start;
|
||||
const DIGEST_RANGE: Range<usize> = 4..8;
|
||||
const DIGEST_SIZE: usize = DIGEST_RANGE.end - DIGEST_RANGE.start;
|
||||
|
||||
/// The number of bytes needed to encoded a digest
|
||||
const DIGEST_BYTES: usize = 32;
|
||||
@@ -144,7 +144,7 @@ fn add_constants(state: &mut [Felt; STATE_WIDTH], ark: &[Felt; STATE_WIDTH]) {
|
||||
///
|
||||
/// The constants are broken up into two arrays ARK1 and ARK2; ARK1 contains the constants for the
|
||||
/// first half of RPO round, and ARK2 contains constants for the second half of RPO round.
|
||||
pub(crate) const ARK1: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = [
|
||||
const ARK1: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = [
|
||||
[
|
||||
Felt::new(5789762306288267392),
|
||||
Felt::new(6522564764413701783),
|
||||
@@ -245,7 +245,7 @@ pub(crate) const ARK1: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = [
|
||||
],
|
||||
];
|
||||
|
||||
pub(crate) const ARK2: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = [
|
||||
const ARK2: [[Felt; STATE_WIDTH]; NUM_ROUNDS] = [
|
||||
[
|
||||
Felt::new(6077062762357204287),
|
||||
Felt::new(15277620170502011191),
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use alloc::string::String;
|
||||
use core::{cmp::Ordering, fmt::Display, ops::Deref, slice};
|
||||
|
||||
use rand::{
|
||||
distributions::{Standard, Uniform},
|
||||
prelude::Distribution,
|
||||
use core::{
|
||||
cmp::Ordering,
|
||||
fmt::Display,
|
||||
hash::{Hash, Hasher},
|
||||
ops::Deref,
|
||||
slice,
|
||||
};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use super::{Digest, Felt, StarkField, DIGEST_BYTES, DIGEST_SIZE, ZERO};
|
||||
@@ -59,6 +61,12 @@ impl RpoDigest {
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for RpoDigest {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
state.write(&self.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl Digest for RpoDigest {
|
||||
fn as_bytes(&self) -> [u8; DIGEST_BYTES] {
|
||||
let mut result = [0; DIGEST_BYTES];
|
||||
@@ -130,18 +138,6 @@ impl Randomizable for RpoDigest {
|
||||
}
|
||||
}
|
||||
|
||||
impl Distribution<RpoDigest> for Standard {
|
||||
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> RpoDigest {
|
||||
let mut res = [ZERO; DIGEST_SIZE];
|
||||
let uni_dist = Uniform::from(0..Felt::MODULUS);
|
||||
for r in res.iter_mut() {
|
||||
let sampled_integer = uni_dist.sample(rng);
|
||||
*r = Felt::new(sampled_integer);
|
||||
}
|
||||
RpoDigest::new(res)
|
||||
}
|
||||
}
|
||||
|
||||
// CONVERSIONS: FROM RPO DIGEST
|
||||
// ================================================================================================
|
||||
|
||||
|
||||
160
src/main.rs
160
src/main.rs
@@ -4,8 +4,9 @@ use clap::Parser;
|
||||
use miden_crypto::{
|
||||
hash::rpo::{Rpo256, RpoDigest},
|
||||
merkle::{MerkleError, Smt},
|
||||
Felt, Word, ONE,
|
||||
Felt, Word, EMPTY_WORD, ONE,
|
||||
};
|
||||
use rand::{prelude::IteratorRandom, thread_rng, Rng};
|
||||
use rand_utils::rand_value;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
@@ -13,7 +14,7 @@ use rand_utils::rand_value;
|
||||
pub struct BenchmarkCmd {
|
||||
/// Size of the tree
|
||||
#[clap(short = 's', long = "size")]
|
||||
size: u64,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@@ -29,101 +30,152 @@ pub fn benchmark_smt() {
|
||||
let mut entries = Vec::new();
|
||||
for i in 0..tree_size {
|
||||
let key = rand_value::<RpoDigest>();
|
||||
let value = [ONE, ONE, ONE, Felt::new(i)];
|
||||
let value = [ONE, ONE, ONE, Felt::new(i as u64)];
|
||||
entries.push((key, value));
|
||||
}
|
||||
|
||||
let mut tree = construction(entries, tree_size).unwrap();
|
||||
insertion(&mut tree, tree_size).unwrap();
|
||||
batched_insertion(&mut tree, tree_size).unwrap();
|
||||
proof_generation(&mut tree, tree_size).unwrap();
|
||||
let mut tree = construction(entries.clone(), tree_size).unwrap();
|
||||
insertion(&mut tree).unwrap();
|
||||
batched_insertion(&mut tree).unwrap();
|
||||
batched_update(&mut tree, entries).unwrap();
|
||||
proof_generation(&mut tree).unwrap();
|
||||
}
|
||||
|
||||
/// Runs the construction benchmark for [`Smt`], returning the constructed tree.
|
||||
pub fn construction(entries: Vec<(RpoDigest, Word)>, size: u64) -> Result<Smt, MerkleError> {
|
||||
pub fn construction(entries: Vec<(RpoDigest, Word)>, size: usize) -> Result<Smt, MerkleError> {
|
||||
println!("Running a construction benchmark:");
|
||||
let now = Instant::now();
|
||||
let tree = Smt::with_entries(entries)?;
|
||||
let elapsed = now.elapsed();
|
||||
println!(
|
||||
"Constructed a SMT with {} key-value pairs in {:.3} seconds",
|
||||
size,
|
||||
elapsed.as_secs_f32(),
|
||||
);
|
||||
let elapsed = now.elapsed().as_secs_f32();
|
||||
|
||||
println!("Constructed a SMT with {size} key-value pairs in {elapsed:.1} seconds");
|
||||
println!("Number of leaf nodes: {}\n", tree.leaves().count());
|
||||
|
||||
Ok(tree)
|
||||
}
|
||||
|
||||
/// Runs the insertion benchmark for the [`Smt`].
|
||||
pub fn insertion(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
|
||||
pub fn insertion(tree: &mut Smt) -> Result<(), MerkleError> {
|
||||
const NUM_INSERTIONS: usize = 1_000;
|
||||
|
||||
println!("Running an insertion benchmark:");
|
||||
|
||||
let size = tree.num_leaves();
|
||||
let mut insertion_times = Vec::new();
|
||||
|
||||
for i in 0..20 {
|
||||
for i in 0..NUM_INSERTIONS {
|
||||
let test_key = Rpo256::hash(&rand_value::<u64>().to_be_bytes());
|
||||
let test_value = [ONE, ONE, ONE, Felt::new(size + i)];
|
||||
let test_value = [ONE, ONE, ONE, Felt::new((size + i) as u64)];
|
||||
|
||||
let now = Instant::now();
|
||||
tree.insert(test_key, test_value);
|
||||
let elapsed = now.elapsed();
|
||||
insertion_times.push(elapsed.as_secs_f32());
|
||||
insertion_times.push(elapsed.as_micros());
|
||||
}
|
||||
|
||||
println!(
|
||||
"An average insertion time measured by 20 inserts into a SMT with {} key-value pairs is {:.3} milliseconds\n",
|
||||
size,
|
||||
// calculate the average by dividing by 20 and convert to milliseconds by multiplying by
|
||||
// 1000. As a result, we can only multiply by 50
|
||||
insertion_times.iter().sum::<f32>() * 50f32,
|
||||
"An average insertion time measured by {NUM_INSERTIONS} inserts into an SMT with {size} leaves is {:.0} μs\n",
|
||||
// calculate the average
|
||||
insertion_times.iter().sum::<u128>() as f64 / (NUM_INSERTIONS as f64),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn batched_insertion(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
|
||||
pub fn batched_insertion(tree: &mut Smt) -> Result<(), MerkleError> {
|
||||
const NUM_INSERTIONS: usize = 1_000;
|
||||
|
||||
println!("Running a batched insertion benchmark:");
|
||||
|
||||
let new_pairs: Vec<(RpoDigest, Word)> = (0..1000)
|
||||
let size = tree.num_leaves();
|
||||
|
||||
let new_pairs: Vec<(RpoDigest, Word)> = (0..NUM_INSERTIONS)
|
||||
.map(|i| {
|
||||
let key = Rpo256::hash(&rand_value::<u64>().to_be_bytes());
|
||||
let value = [ONE, ONE, ONE, Felt::new(size + i)];
|
||||
let value = [ONE, ONE, ONE, Felt::new((size + i) as u64)];
|
||||
(key, value)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let now = Instant::now();
|
||||
let mutations = tree.compute_mutations(new_pairs);
|
||||
let compute_elapsed = now.elapsed();
|
||||
let compute_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
|
||||
|
||||
let now = Instant::now();
|
||||
tree.apply_mutations(mutations).unwrap();
|
||||
let apply_elapsed = now.elapsed();
|
||||
tree.apply_mutations(mutations)?;
|
||||
let apply_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
|
||||
|
||||
println!(
|
||||
"An average batch computation time measured by a 1k-batch into an SMT with {} key-value pairs over {:.3} milliseconds is {:.3} milliseconds",
|
||||
size,
|
||||
compute_elapsed.as_secs_f32() * 1000f32,
|
||||
// Dividing by the number of iterations, 1000, and then multiplying by 1000 to get
|
||||
// milliseconds, cancels out.
|
||||
compute_elapsed.as_secs_f32(),
|
||||
"An average insert-batch computation time measured by a {NUM_INSERTIONS}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
|
||||
compute_elapsed,
|
||||
compute_elapsed * 1000_f64 / NUM_INSERTIONS as f64, // time in μs
|
||||
);
|
||||
|
||||
println!(
|
||||
"An average batch application time measured by a 1k-batch into an SMT with {} key-value pairs over {:.3} milliseconds is {:.3} milliseconds",
|
||||
size,
|
||||
apply_elapsed.as_secs_f32() * 1000f32,
|
||||
// Dividing by the number of iterations, 1000, and then multiplying by 1000 to get
|
||||
// milliseconds, cancels out.
|
||||
apply_elapsed.as_secs_f32(),
|
||||
"An average insert-batch application time measured by a {NUM_INSERTIONS}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
|
||||
apply_elapsed,
|
||||
apply_elapsed * 1000_f64 / NUM_INSERTIONS as f64, // time in μs
|
||||
);
|
||||
|
||||
println!(
|
||||
"An average batch insertion time measured by a 1k-batch into an SMT with {} key-value pairs totals to {:.3} milliseconds",
|
||||
size,
|
||||
(compute_elapsed + apply_elapsed).as_secs_f32() * 1000f32,
|
||||
"An average batch insertion time measured by a 1k-batch into an SMT with {size} leaves totals to {:.1} ms",
|
||||
(compute_elapsed + apply_elapsed),
|
||||
);
|
||||
|
||||
println!();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn batched_update(tree: &mut Smt, entries: Vec<(RpoDigest, Word)>) -> Result<(), MerkleError> {
|
||||
const NUM_UPDATES: usize = 1_000;
|
||||
const REMOVAL_PROBABILITY: f64 = 0.2;
|
||||
|
||||
println!("Running a batched update benchmark:");
|
||||
|
||||
let size = tree.num_leaves();
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let new_pairs =
|
||||
entries
|
||||
.into_iter()
|
||||
.choose_multiple(&mut rng, NUM_UPDATES)
|
||||
.into_iter()
|
||||
.map(|(key, _)| {
|
||||
let value = if rng.gen_bool(REMOVAL_PROBABILITY) {
|
||||
EMPTY_WORD
|
||||
} else {
|
||||
[ONE, ONE, ONE, Felt::new(rng.gen())]
|
||||
};
|
||||
|
||||
(key, value)
|
||||
});
|
||||
|
||||
assert_eq!(new_pairs.len(), NUM_UPDATES);
|
||||
|
||||
let now = Instant::now();
|
||||
let mutations = tree.compute_mutations(new_pairs);
|
||||
let compute_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
|
||||
|
||||
let now = Instant::now();
|
||||
tree.apply_mutations(mutations)?;
|
||||
let apply_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms
|
||||
|
||||
println!(
|
||||
"An average update-batch computation time measured by a {NUM_UPDATES}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
|
||||
compute_elapsed,
|
||||
compute_elapsed * 1000_f64 / NUM_UPDATES as f64, // time in μs
|
||||
);
|
||||
|
||||
println!(
|
||||
"An average update-batch application time measured by a {NUM_UPDATES}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs",
|
||||
apply_elapsed,
|
||||
apply_elapsed * 1000_f64 / NUM_UPDATES as f64, // time in μs
|
||||
);
|
||||
|
||||
println!(
|
||||
"An average batch update time measured by a 1k-batch into an SMT with {size} leaves totals to {:.1} ms",
|
||||
(compute_elapsed + apply_elapsed),
|
||||
);
|
||||
|
||||
println!();
|
||||
@@ -132,28 +184,28 @@ pub fn batched_insertion(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
|
||||
}
|
||||
|
||||
/// Runs the proof generation benchmark for the [`Smt`].
|
||||
pub fn proof_generation(tree: &mut Smt, size: u64) -> Result<(), MerkleError> {
|
||||
pub fn proof_generation(tree: &mut Smt) -> Result<(), MerkleError> {
|
||||
const NUM_PROOFS: usize = 100;
|
||||
|
||||
println!("Running a proof generation benchmark:");
|
||||
|
||||
let mut insertion_times = Vec::new();
|
||||
let size = tree.num_leaves();
|
||||
|
||||
for i in 0..20 {
|
||||
for i in 0..NUM_PROOFS {
|
||||
let test_key = Rpo256::hash(&rand_value::<u64>().to_be_bytes());
|
||||
let test_value = [ONE, ONE, ONE, Felt::new(size + i)];
|
||||
let test_value = [ONE, ONE, ONE, Felt::new((size + i) as u64)];
|
||||
tree.insert(test_key, test_value);
|
||||
|
||||
let now = Instant::now();
|
||||
let _proof = tree.open(&test_key);
|
||||
let elapsed = now.elapsed();
|
||||
insertion_times.push(elapsed.as_secs_f32());
|
||||
insertion_times.push(now.elapsed().as_micros());
|
||||
}
|
||||
|
||||
println!(
|
||||
"An average proving time measured by 20 value proofs in a SMT with {} key-value pairs in {:.3} microseconds",
|
||||
size,
|
||||
// calculate the average by dividing by 20 and convert to microseconds by multiplying by
|
||||
// 1000000. As a result, we can only multiply by 50000
|
||||
insertion_times.iter().sum::<f32>() * 50000f32,
|
||||
"An average proving time measured by {NUM_PROOFS} value proofs in an SMT with {size} leaves in {:.0} μs",
|
||||
// calculate the average
|
||||
insertion_times.iter().sum::<u128>() as f64 / (NUM_PROOFS as f64),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -136,7 +136,7 @@ impl NodeIndex {
|
||||
self.value
|
||||
}
|
||||
|
||||
/// Returns true if the current instance points to a right sibling node.
|
||||
/// Returns `true` if the current instance points to a right sibling node.
|
||||
pub const fn is_value_odd(&self) -> bool {
|
||||
(self.value & 1) == 1
|
||||
}
|
||||
|
||||
@@ -270,7 +270,7 @@ pub fn path_to_text(path: &MerklePath) -> Result<String, fmt::Error> {
|
||||
}
|
||||
|
||||
// remove the last ", "
|
||||
if path.len() != 0 {
|
||||
if !path.is_empty() {
|
||||
s.pop();
|
||||
s.pop();
|
||||
}
|
||||
|
||||
@@ -303,7 +303,7 @@ impl PartialMmr {
|
||||
|
||||
if leaf_pos + 1 == self.forest
|
||||
&& path.depth() == 0
|
||||
&& self.peaks.last().map_or(false, |v| *v == leaf)
|
||||
&& self.peaks.last().is_some_and(|v| *v == leaf)
|
||||
{
|
||||
self.track_latest = true;
|
||||
return Ok(());
|
||||
|
||||
@@ -24,8 +24,8 @@ mod smt;
|
||||
#[cfg(feature = "internal")]
|
||||
pub use smt::build_subtree_for_bench;
|
||||
pub use smt::{
|
||||
LeafIndex, MutationSet, SimpleSmt, Smt, SmtLeaf, SmtLeafError, SmtProof, SmtProofError,
|
||||
SubtreeLeaf, SMT_DEPTH, SMT_MAX_DEPTH, SMT_MIN_DEPTH,
|
||||
InnerNode, LeafIndex, MutationSet, NodeMutation, SimpleSmt, Smt, SmtLeaf, SmtLeafError,
|
||||
SmtProof, SmtProofError, SubtreeLeaf, SMT_DEPTH, SMT_MAX_DEPTH, SMT_MIN_DEPTH,
|
||||
};
|
||||
|
||||
mod mmr;
|
||||
|
||||
@@ -3,6 +3,7 @@ use super::RpoDigest;
|
||||
/// Representation of a node with two children used for iterating over containers.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
|
||||
#[cfg_attr(test, derive(PartialOrd, Ord))]
|
||||
pub struct InnerNodeInfo {
|
||||
pub value: RpoDigest,
|
||||
pub left: RpoDigest,
|
||||
|
||||
@@ -46,7 +46,7 @@ impl SmtLeaf {
|
||||
let leaf = Self::new_multiple(entries)?;
|
||||
|
||||
// `new_multiple()` checked that all keys map to the same leaf index. We still need
|
||||
// to ensure that that leaf index is `leaf_index`.
|
||||
// to ensure that leaf index is `leaf_index`.
|
||||
if leaf.index() != leaf_index {
|
||||
Err(SmtLeafError::InconsistentMultipleLeafIndices {
|
||||
leaf_index_from_keys: leaf.index(),
|
||||
@@ -70,7 +70,7 @@ impl SmtLeaf {
|
||||
Self::Single((key, value))
|
||||
}
|
||||
|
||||
/// Returns a new single leaf with the specified entry. The leaf index is derived from the
|
||||
/// Returns a new multiple leaf with the specified entries. The leaf index is derived from the
|
||||
/// entries' keys.
|
||||
///
|
||||
/// # Errors
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
use alloc::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
string::ToString,
|
||||
vec::Vec,
|
||||
};
|
||||
use alloc::{collections::BTreeSet, string::ToString, vec::Vec};
|
||||
|
||||
use super::{
|
||||
EmptySubtreeRoots, Felt, InnerNode, InnerNodeInfo, LeafIndex, MerkleError, MerklePath,
|
||||
MutationSet, NodeIndex, Rpo256, RpoDigest, SparseMerkleTree, Word, EMPTY_WORD,
|
||||
EmptySubtreeRoots, Felt, InnerNode, InnerNodeInfo, InnerNodes, LeafIndex, MerkleError,
|
||||
MerklePath, MutationSet, NodeIndex, Rpo256, RpoDigest, SparseMerkleTree, Word, EMPTY_WORD,
|
||||
};
|
||||
|
||||
mod error;
|
||||
@@ -30,6 +26,8 @@ pub const SMT_DEPTH: u8 = 64;
|
||||
// SMT
|
||||
// ================================================================================================
|
||||
|
||||
type Leaves = super::Leaves<SmtLeaf>;
|
||||
|
||||
/// Sparse Merkle tree mapping 256-bit keys to 256-bit values. Both keys and values are represented
|
||||
/// by 4 field elements.
|
||||
///
|
||||
@@ -43,8 +41,8 @@ pub const SMT_DEPTH: u8 = 64;
|
||||
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
|
||||
pub struct Smt {
|
||||
root: RpoDigest,
|
||||
leaves: BTreeMap<u64, SmtLeaf>,
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
inner_nodes: InnerNodes,
|
||||
leaves: Leaves,
|
||||
}
|
||||
|
||||
impl Smt {
|
||||
@@ -64,8 +62,8 @@ impl Smt {
|
||||
|
||||
Self {
|
||||
root,
|
||||
leaves: BTreeMap::new(),
|
||||
inner_nodes: BTreeMap::new(),
|
||||
inner_nodes: Default::default(),
|
||||
leaves: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,11 +146,7 @@ impl Smt {
|
||||
/// # Panics
|
||||
/// With debug assertions on, this function panics if `root` does not match the root node in
|
||||
/// `inner_nodes`.
|
||||
pub fn from_raw_parts(
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
leaves: BTreeMap<u64, SmtLeaf>,
|
||||
root: RpoDigest,
|
||||
) -> Self {
|
||||
pub fn from_raw_parts(inner_nodes: InnerNodes, leaves: Leaves, root: RpoDigest) -> Self {
|
||||
// Our particular implementation of `from_raw_parts()` never returns `Err`.
|
||||
<Self as SparseMerkleTree<SMT_DEPTH>>::from_raw_parts(inner_nodes, leaves, root).unwrap()
|
||||
}
|
||||
@@ -170,6 +164,11 @@ impl Smt {
|
||||
<Self as SparseMerkleTree<SMT_DEPTH>>::root(self)
|
||||
}
|
||||
|
||||
/// Returns the number of non-empty leaves in this tree.
|
||||
pub fn num_leaves(&self) -> usize {
|
||||
self.leaves.len()
|
||||
}
|
||||
|
||||
/// Returns the leaf to which `key` maps
|
||||
pub fn get_leaf(&self, key: &RpoDigest) -> SmtLeaf {
|
||||
<Self as SparseMerkleTree<SMT_DEPTH>>::get_leaf(self, key)
|
||||
@@ -256,7 +255,7 @@ impl Smt {
|
||||
<Self as SparseMerkleTree<SMT_DEPTH>>::compute_mutations(self, kv_pairs)
|
||||
}
|
||||
|
||||
/// Apply the prospective mutations computed with [`Smt::compute_mutations()`] to this tree.
|
||||
/// Applies the prospective mutations computed with [`Smt::compute_mutations()`] to this tree.
|
||||
///
|
||||
/// # Errors
|
||||
/// If `mutations` was computed on a tree with a different root than this one, returns
|
||||
@@ -270,6 +269,23 @@ impl Smt {
|
||||
<Self as SparseMerkleTree<SMT_DEPTH>>::apply_mutations(self, mutations)
|
||||
}
|
||||
|
||||
/// Applies the prospective mutations computed with [`Smt::compute_mutations()`] to this tree
|
||||
/// and returns the reverse mutation set.
|
||||
///
|
||||
/// Applying the reverse mutation sets to the updated tree will revert the changes.
|
||||
///
|
||||
/// # Errors
|
||||
/// If `mutations` was computed on a tree with a different root than this one, returns
|
||||
/// [`MerkleError::ConflictingRoots`] with a two-item [`Vec`]. The first item is the root hash
|
||||
/// the `mutations` were computed against, and the second item is the actual current root of
|
||||
/// this tree.
|
||||
pub fn apply_mutations_with_reversion(
|
||||
&mut self,
|
||||
mutations: MutationSet<SMT_DEPTH, RpoDigest, Word>,
|
||||
) -> Result<MutationSet<SMT_DEPTH, RpoDigest, Word>, MerkleError> {
|
||||
<Self as SparseMerkleTree<SMT_DEPTH>>::apply_mutations_with_reversion(self, mutations)
|
||||
}
|
||||
|
||||
// HELPERS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
@@ -317,8 +333,8 @@ impl SparseMerkleTree<SMT_DEPTH> for Smt {
|
||||
const EMPTY_ROOT: RpoDigest = *EmptySubtreeRoots::entry(SMT_DEPTH, 0);
|
||||
|
||||
fn from_raw_parts(
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
leaves: BTreeMap<u64, SmtLeaf>,
|
||||
inner_nodes: InnerNodes,
|
||||
leaves: Leaves,
|
||||
root: RpoDigest,
|
||||
) -> Result<Self, MerkleError> {
|
||||
if cfg!(debug_assertions) {
|
||||
@@ -344,12 +360,12 @@ impl SparseMerkleTree<SMT_DEPTH> for Smt {
|
||||
.unwrap_or_else(|| EmptySubtreeRoots::get_inner_node(SMT_DEPTH, index.depth()))
|
||||
}
|
||||
|
||||
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) {
|
||||
self.inner_nodes.insert(index, inner_node);
|
||||
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) -> Option<InnerNode> {
|
||||
self.inner_nodes.insert(index, inner_node)
|
||||
}
|
||||
|
||||
fn remove_inner_node(&mut self, index: NodeIndex) {
|
||||
let _ = self.inner_nodes.remove(&index);
|
||||
fn remove_inner_node(&mut self, index: NodeIndex) -> Option<InnerNode> {
|
||||
self.inner_nodes.remove(&index)
|
||||
}
|
||||
|
||||
fn insert_value(&mut self, key: Self::Key, value: Self::Value) -> Option<Self::Value> {
|
||||
|
||||
@@ -2,11 +2,13 @@ use alloc::vec::Vec;
|
||||
|
||||
use super::{Felt, LeafIndex, NodeIndex, Rpo256, RpoDigest, Smt, SmtLeaf, EMPTY_WORD, SMT_DEPTH};
|
||||
use crate::{
|
||||
merkle::{smt::SparseMerkleTree, EmptySubtreeRoots, MerkleStore},
|
||||
merkle::{
|
||||
smt::{NodeMutation, SparseMerkleTree, UnorderedMap},
|
||||
EmptySubtreeRoots, MerkleStore, MutationSet,
|
||||
},
|
||||
utils::{Deserializable, Serializable},
|
||||
Word, ONE, WORD_SIZE,
|
||||
};
|
||||
|
||||
// SMT
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
@@ -412,21 +414,49 @@ fn test_prospective_insertion() {
|
||||
|
||||
let mutations = smt.compute_mutations(vec![(key_1, value_1)]);
|
||||
assert_eq!(mutations.root(), root_1, "prospective root 1 did not match actual root 1");
|
||||
smt.apply_mutations(mutations).unwrap();
|
||||
let revert = apply_mutations(&mut smt, mutations);
|
||||
assert_eq!(smt.root(), root_1, "mutations before and after apply did not match");
|
||||
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
|
||||
assert_eq!(revert.root(), root_empty, "reverse mutations new root did not match");
|
||||
assert_eq!(
|
||||
revert.new_pairs,
|
||||
UnorderedMap::from_iter([(key_1, EMPTY_WORD)]),
|
||||
"reverse mutations pairs did not match"
|
||||
);
|
||||
assert_eq!(
|
||||
revert.node_mutations,
|
||||
smt.inner_nodes.keys().map(|key| (*key, NodeMutation::Removal)).collect(),
|
||||
"reverse mutations inner nodes did not match"
|
||||
);
|
||||
|
||||
let mutations = smt.compute_mutations(vec![(key_2, value_2)]);
|
||||
assert_eq!(mutations.root(), root_2, "prospective root 2 did not match actual root 2");
|
||||
let mutations =
|
||||
smt.compute_mutations(vec![(key_3, EMPTY_WORD), (key_2, value_2), (key_3, value_3)]);
|
||||
assert_eq!(mutations.root(), root_3, "mutations before and after apply did not match");
|
||||
smt.apply_mutations(mutations).unwrap();
|
||||
let old_root = smt.root();
|
||||
let revert = apply_mutations(&mut smt, mutations);
|
||||
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
|
||||
assert_eq!(revert.root(), old_root, "reverse mutations new root did not match");
|
||||
assert_eq!(
|
||||
revert.new_pairs,
|
||||
UnorderedMap::from_iter([(key_2, EMPTY_WORD), (key_3, EMPTY_WORD)]),
|
||||
"reverse mutations pairs did not match"
|
||||
);
|
||||
|
||||
// Edge case: multiple values at the same key, where a later pair restores the original value.
|
||||
let mutations = smt.compute_mutations(vec![(key_3, EMPTY_WORD), (key_3, value_3)]);
|
||||
assert_eq!(mutations.root(), root_3);
|
||||
smt.apply_mutations(mutations).unwrap();
|
||||
let old_root = smt.root();
|
||||
let revert = apply_mutations(&mut smt, mutations);
|
||||
assert_eq!(smt.root(), root_3);
|
||||
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
|
||||
assert_eq!(revert.root(), old_root, "reverse mutations new root did not match");
|
||||
assert_eq!(
|
||||
revert.new_pairs,
|
||||
UnorderedMap::from_iter([(key_3, value_3)]),
|
||||
"reverse mutations pairs did not match"
|
||||
);
|
||||
|
||||
// Test batch updates, and that the order doesn't matter.
|
||||
let pairs =
|
||||
@@ -437,8 +467,16 @@ fn test_prospective_insertion() {
|
||||
root_empty,
|
||||
"prospective root for batch removal did not match actual root",
|
||||
);
|
||||
smt.apply_mutations(mutations).unwrap();
|
||||
let old_root = smt.root();
|
||||
let revert = apply_mutations(&mut smt, mutations);
|
||||
assert_eq!(smt.root(), root_empty, "mutations before and after apply did not match");
|
||||
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
|
||||
assert_eq!(revert.root(), old_root, "reverse mutations new root did not match");
|
||||
assert_eq!(
|
||||
revert.new_pairs,
|
||||
UnorderedMap::from_iter([(key_1, value_1), (key_2, value_2), (key_3, value_3)]),
|
||||
"reverse mutations pairs did not match"
|
||||
);
|
||||
|
||||
let pairs = vec![(key_3, value_3), (key_1, value_1), (key_2, value_2)];
|
||||
let mutations = smt.compute_mutations(pairs);
|
||||
@@ -447,6 +485,72 @@ fn test_prospective_insertion() {
|
||||
assert_eq!(smt.root(), root_3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mutations_revert() {
|
||||
let mut smt = Smt::default();
|
||||
|
||||
let key_1: RpoDigest = RpoDigest::from([ONE, ONE, ONE, Felt::new(1)]);
|
||||
let key_2: RpoDigest =
|
||||
RpoDigest::from([2_u32.into(), 2_u32.into(), 2_u32.into(), Felt::new(2)]);
|
||||
let key_3: RpoDigest =
|
||||
RpoDigest::from([0_u32.into(), 0_u32.into(), 0_u32.into(), Felt::new(3)]);
|
||||
|
||||
let value_1 = [ONE; WORD_SIZE];
|
||||
let value_2 = [2_u32.into(); WORD_SIZE];
|
||||
let value_3 = [3_u32.into(); WORD_SIZE];
|
||||
|
||||
smt.insert(key_1, value_1);
|
||||
smt.insert(key_2, value_2);
|
||||
|
||||
let mutations =
|
||||
smt.compute_mutations(vec![(key_1, EMPTY_WORD), (key_2, value_1), (key_3, value_3)]);
|
||||
|
||||
let original = smt.clone();
|
||||
|
||||
let revert = smt.apply_mutations_with_reversion(mutations).unwrap();
|
||||
assert_eq!(revert.old_root, smt.root(), "reverse mutations old root did not match");
|
||||
assert_eq!(revert.root(), original.root(), "reverse mutations new root did not match");
|
||||
|
||||
smt.apply_mutations(revert).unwrap();
|
||||
|
||||
assert_eq!(smt, original, "SMT with applied revert mutations did not match original SMT");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mutation_set_serialization() {
|
||||
let mut smt = Smt::default();
|
||||
|
||||
let key_1: RpoDigest = RpoDigest::from([ONE, ONE, ONE, Felt::new(1)]);
|
||||
let key_2: RpoDigest =
|
||||
RpoDigest::from([2_u32.into(), 2_u32.into(), 2_u32.into(), Felt::new(2)]);
|
||||
let key_3: RpoDigest =
|
||||
RpoDigest::from([0_u32.into(), 0_u32.into(), 0_u32.into(), Felt::new(3)]);
|
||||
|
||||
let value_1 = [ONE; WORD_SIZE];
|
||||
let value_2 = [2_u32.into(); WORD_SIZE];
|
||||
let value_3 = [3_u32.into(); WORD_SIZE];
|
||||
|
||||
smt.insert(key_1, value_1);
|
||||
smt.insert(key_2, value_2);
|
||||
|
||||
let mutations =
|
||||
smt.compute_mutations(vec![(key_1, EMPTY_WORD), (key_2, value_1), (key_3, value_3)]);
|
||||
|
||||
let serialized = mutations.to_bytes();
|
||||
let deserialized =
|
||||
MutationSet::<SMT_DEPTH, RpoDigest, Word>::read_from_bytes(&serialized).unwrap();
|
||||
|
||||
assert_eq!(deserialized, mutations, "deserialized mutations did not match original");
|
||||
|
||||
let revert = smt.apply_mutations_with_reversion(mutations).unwrap();
|
||||
|
||||
let serialized = revert.to_bytes();
|
||||
let deserialized =
|
||||
MutationSet::<SMT_DEPTH, RpoDigest, Word>::read_from_bytes(&serialized).unwrap();
|
||||
|
||||
assert_eq!(deserialized, revert, "deserialized mutations did not match original");
|
||||
}
|
||||
|
||||
/// Tests that 2 key-value pairs stored in the same leaf have the same path
|
||||
#[test]
|
||||
fn test_smt_path_to_keys_in_same_leaf_are_equal() {
|
||||
@@ -499,21 +603,21 @@ fn test_smt_get_value() {
|
||||
/// Tests that `entries()` works as expected
|
||||
#[test]
|
||||
fn test_smt_entries() {
|
||||
let key_1: RpoDigest = RpoDigest::from([ONE, ONE, ONE, ONE]);
|
||||
let key_2: RpoDigest = RpoDigest::from([2_u32, 2_u32, 2_u32, 2_u32]);
|
||||
let key_1 = RpoDigest::from([ONE, ONE, ONE, ONE]);
|
||||
let key_2 = RpoDigest::from([2_u32, 2_u32, 2_u32, 2_u32]);
|
||||
|
||||
let value_1 = [ONE; WORD_SIZE];
|
||||
let value_2 = [2_u32.into(); WORD_SIZE];
|
||||
let entries = [(key_1, value_1), (key_2, value_2)];
|
||||
|
||||
let smt = Smt::with_entries([(key_1, value_1), (key_2, value_2)]).unwrap();
|
||||
let smt = Smt::with_entries(entries).unwrap();
|
||||
|
||||
let mut entries = smt.entries();
|
||||
let mut expected = Vec::from_iter(entries);
|
||||
expected.sort_by_key(|(k, _)| *k);
|
||||
let mut actual: Vec<_> = smt.entries().cloned().collect();
|
||||
actual.sort_by_key(|(k, _)| *k);
|
||||
|
||||
// Note: for simplicity, we assume the order `(k1,v1), (k2,v2)`. If a new implementation
|
||||
// switches the order, it is OK to modify the order here as well.
|
||||
assert_eq!(&(key_1, value_1), entries.next().unwrap());
|
||||
assert_eq!(&(key_2, value_2), entries.next().unwrap());
|
||||
assert!(entries.next().is_none());
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
/// Tests that `EMPTY_ROOT` constant generated in the `Smt` equals to the root of the empty tree of
|
||||
@@ -602,3 +706,19 @@ fn build_multiple_leaf_node(kv_pairs: &[(RpoDigest, Word)]) -> RpoDigest {
|
||||
|
||||
Rpo256::hash_elements(&elements)
|
||||
}
|
||||
|
||||
/// Applies mutations with and without reversion to the given SMT, comparing resulting SMTs,
|
||||
/// returning mutation set for reversion.
|
||||
fn apply_mutations(
|
||||
smt: &mut Smt,
|
||||
mutation_set: MutationSet<SMT_DEPTH, RpoDigest, Word>,
|
||||
) -> MutationSet<SMT_DEPTH, RpoDigest, Word> {
|
||||
let mut smt2 = smt.clone();
|
||||
|
||||
let reversion = smt.apply_mutations_with_reversion(mutation_set.clone()).unwrap();
|
||||
smt2.apply_mutations(mutation_set).unwrap();
|
||||
|
||||
assert_eq!(&smt2, smt);
|
||||
|
||||
reversion
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use alloc::{collections::BTreeMap, vec::Vec};
|
||||
use core::mem;
|
||||
use core::{hash::Hash, mem};
|
||||
|
||||
use num::Integer;
|
||||
use winter_utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
||||
|
||||
use super::{EmptySubtreeRoots, InnerNodeInfo, MerkleError, MerklePath, NodeIndex};
|
||||
use crate::{
|
||||
@@ -27,6 +28,15 @@ pub const SMT_MAX_DEPTH: u8 = 64;
|
||||
// SPARSE MERKLE TREE
|
||||
// ================================================================================================
|
||||
|
||||
/// A map whose keys are not guarantied to be ordered.
|
||||
#[cfg(feature = "smt_hashmaps")]
|
||||
type UnorderedMap<K, V> = hashbrown::HashMap<K, V>;
|
||||
#[cfg(not(feature = "smt_hashmaps"))]
|
||||
type UnorderedMap<K, V> = alloc::collections::BTreeMap<K, V>;
|
||||
type InnerNodes = UnorderedMap<NodeIndex, InnerNode>;
|
||||
type Leaves<T> = UnorderedMap<u64, T>;
|
||||
type NodeMutations = UnorderedMap<NodeIndex, NodeMutation>;
|
||||
|
||||
/// An abstract description of a sparse Merkle tree.
|
||||
///
|
||||
/// A sparse Merkle tree is a key-value map which also supports proving that a given value is indeed
|
||||
@@ -43,12 +53,12 @@ pub const SMT_MAX_DEPTH: u8 = 64;
|
||||
/// Every key maps to one leaf. If there are as many keys as there are leaves, then
|
||||
/// [Self::Leaf] should be the same type as [Self::Value], as is the case with
|
||||
/// [crate::merkle::SimpleSmt]. However, if there are more keys than leaves, then [`Self::Leaf`]
|
||||
/// must accomodate all keys that map to the same leaf.
|
||||
/// must accommodate all keys that map to the same leaf.
|
||||
///
|
||||
/// [SparseMerkleTree] currently doesn't support optimizations that compress Merkle proofs.
|
||||
pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
/// The type for a key
|
||||
type Key: Clone + Ord;
|
||||
type Key: Clone + Ord + Eq + Hash;
|
||||
/// The type for a value
|
||||
type Value: Clone + PartialEq;
|
||||
/// The type for a leaf
|
||||
@@ -147,9 +157,9 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
node_hash = Rpo256::merge(&[left, right]);
|
||||
|
||||
if node_hash == *EmptySubtreeRoots::entry(DEPTH, node_depth) {
|
||||
// If a subtree is empty, when can remove the inner node, since it's equal to the
|
||||
// If a subtree is empty, then can remove the inner node, since it's equal to the
|
||||
// default value
|
||||
self.remove_inner_node(index)
|
||||
self.remove_inner_node(index);
|
||||
} else {
|
||||
self.insert_inner_node(index, InnerNode { left, right });
|
||||
}
|
||||
@@ -172,8 +182,8 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
use NodeMutation::*;
|
||||
|
||||
let mut new_root = self.root();
|
||||
let mut new_pairs: BTreeMap<Self::Key, Self::Value> = Default::default();
|
||||
let mut node_mutations: BTreeMap<NodeIndex, NodeMutation> = Default::default();
|
||||
let mut new_pairs: UnorderedMap<Self::Key, Self::Value> = Default::default();
|
||||
let mut node_mutations: NodeMutations = Default::default();
|
||||
|
||||
for (key, value) in kv_pairs {
|
||||
// If the old value and the new value are the same, there is nothing to update.
|
||||
@@ -255,7 +265,7 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply the prospective mutations computed with [`SparseMerkleTree::compute_mutations()`] to
|
||||
/// Applies the prospective mutations computed with [`SparseMerkleTree::compute_mutations()`] to
|
||||
/// this tree.
|
||||
///
|
||||
/// # Errors
|
||||
@@ -289,8 +299,12 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
|
||||
for (index, mutation) in node_mutations {
|
||||
match mutation {
|
||||
Removal => self.remove_inner_node(index),
|
||||
Addition(node) => self.insert_inner_node(index, node),
|
||||
Removal => {
|
||||
self.remove_inner_node(index);
|
||||
},
|
||||
Addition(node) => {
|
||||
self.insert_inner_node(index, node);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -303,14 +317,84 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Applies the prospective mutations computed with [`SparseMerkleTree::compute_mutations()`] to
|
||||
/// this tree and returns the reverse mutation set. Applying the reverse mutation sets to the
|
||||
/// updated tree will revert the changes.
|
||||
///
|
||||
/// # Errors
|
||||
/// If `mutations` was computed on a tree with a different root than this one, returns
|
||||
/// [`MerkleError::ConflictingRoots`] with a two-item [`Vec`]. The first item is the root hash
|
||||
/// the `mutations` were computed against, and the second item is the actual current root of
|
||||
/// this tree.
|
||||
fn apply_mutations_with_reversion(
|
||||
&mut self,
|
||||
mutations: MutationSet<DEPTH, Self::Key, Self::Value>,
|
||||
) -> Result<MutationSet<DEPTH, Self::Key, Self::Value>, MerkleError>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
use NodeMutation::*;
|
||||
let MutationSet {
|
||||
old_root,
|
||||
node_mutations,
|
||||
new_pairs,
|
||||
new_root,
|
||||
} = mutations;
|
||||
|
||||
// Guard against accidentally trying to apply mutations that were computed against a
|
||||
// different tree, including a stale version of this tree.
|
||||
if old_root != self.root() {
|
||||
return Err(MerkleError::ConflictingRoots {
|
||||
expected_root: self.root(),
|
||||
actual_root: old_root,
|
||||
});
|
||||
}
|
||||
|
||||
let mut reverse_mutations = NodeMutations::new();
|
||||
for (index, mutation) in node_mutations {
|
||||
match mutation {
|
||||
Removal => {
|
||||
if let Some(node) = self.remove_inner_node(index) {
|
||||
reverse_mutations.insert(index, Addition(node));
|
||||
}
|
||||
},
|
||||
Addition(node) => {
|
||||
if let Some(old_node) = self.insert_inner_node(index, node) {
|
||||
reverse_mutations.insert(index, Addition(old_node));
|
||||
} else {
|
||||
reverse_mutations.insert(index, Removal);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let mut reverse_pairs = UnorderedMap::new();
|
||||
for (key, value) in new_pairs {
|
||||
if let Some(old_value) = self.insert_value(key.clone(), value) {
|
||||
reverse_pairs.insert(key, old_value);
|
||||
} else {
|
||||
reverse_pairs.insert(key, Self::EMPTY_VALUE);
|
||||
}
|
||||
}
|
||||
|
||||
self.set_root(new_root);
|
||||
|
||||
Ok(MutationSet {
|
||||
old_root: new_root,
|
||||
node_mutations: reverse_mutations,
|
||||
new_pairs: reverse_pairs,
|
||||
new_root: old_root,
|
||||
})
|
||||
}
|
||||
|
||||
// REQUIRED METHODS
|
||||
// ---------------------------------------------------------------------------------------------
|
||||
|
||||
/// Construct this type from already computed leaves and nodes. The caller ensures passed
|
||||
/// arguments are correct and consistent with each other.
|
||||
fn from_raw_parts(
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
leaves: BTreeMap<u64, Self::Leaf>,
|
||||
inner_nodes: InnerNodes,
|
||||
leaves: Leaves<Self::Leaf>,
|
||||
root: RpoDigest,
|
||||
) -> Result<Self, MerkleError>
|
||||
where
|
||||
@@ -326,10 +410,10 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
fn get_inner_node(&self, index: NodeIndex) -> InnerNode;
|
||||
|
||||
/// Inserts an inner node at the given index
|
||||
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode);
|
||||
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) -> Option<InnerNode>;
|
||||
|
||||
/// Removes an inner node at the given index
|
||||
fn remove_inner_node(&mut self, index: NodeIndex);
|
||||
fn remove_inner_node(&mut self, index: NodeIndex) -> Option<InnerNode>;
|
||||
|
||||
/// Inserts a leaf node, and returns the value at the key if already exists
|
||||
fn insert_value(&mut self, key: Self::Key, value: Self::Value) -> Option<Self::Value>;
|
||||
@@ -441,7 +525,7 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
#[cfg(feature = "concurrent")]
|
||||
fn build_subtrees(
|
||||
mut entries: Vec<(Self::Key, Self::Value)>,
|
||||
) -> (BTreeMap<NodeIndex, InnerNode>, BTreeMap<u64, Self::Leaf>) {
|
||||
) -> (InnerNodes, Leaves<Self::Leaf>) {
|
||||
entries.sort_by_key(|item| {
|
||||
let index = Self::key_to_leaf_index(&item.0);
|
||||
index.value()
|
||||
@@ -456,10 +540,10 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
|
||||
#[cfg(feature = "concurrent")]
|
||||
fn build_subtrees_from_sorted_entries(
|
||||
entries: Vec<(Self::Key, Self::Value)>,
|
||||
) -> (BTreeMap<NodeIndex, InnerNode>, BTreeMap<u64, Self::Leaf>) {
|
||||
) -> (InnerNodes, Leaves<Self::Leaf>) {
|
||||
use rayon::prelude::*;
|
||||
|
||||
let mut accumulated_nodes: BTreeMap<NodeIndex, InnerNode> = Default::default();
|
||||
let mut accumulated_nodes: InnerNodes = Default::default();
|
||||
|
||||
let PairComputations {
|
||||
leaves: mut leaf_subtrees,
|
||||
@@ -559,25 +643,37 @@ impl<const DEPTH: u8> TryFrom<NodeIndex> for LeafIndex<DEPTH> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<const DEPTH: u8> Serializable for LeafIndex<DEPTH> {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
self.index.write_into(target);
|
||||
}
|
||||
}
|
||||
|
||||
impl<const DEPTH: u8> Deserializable for LeafIndex<DEPTH> {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
Ok(Self { index: source.read()? })
|
||||
}
|
||||
}
|
||||
|
||||
// MUTATIONS
|
||||
// ================================================================================================
|
||||
|
||||
/// A change to an inner node of a [`SparseMerkleTree`] that hasn't yet been applied.
|
||||
/// A change to an inner node of a sparse Merkle tree that hasn't yet been applied.
|
||||
/// [`MutationSet`] stores this type in relation to a [`NodeIndex`] to keep track of what changes
|
||||
/// need to occur at which node indices.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) enum NodeMutation {
|
||||
/// Corresponds to [`SparseMerkleTree::remove_inner_node()`].
|
||||
pub enum NodeMutation {
|
||||
/// Node needs to be removed.
|
||||
Removal,
|
||||
/// Corresponds to [`SparseMerkleTree::insert_inner_node()`].
|
||||
/// Node needs to be inserted.
|
||||
Addition(InnerNode),
|
||||
}
|
||||
|
||||
/// Represents a group of prospective mutations to a `SparseMerkleTree`, created by
|
||||
/// `SparseMerkleTree::compute_mutations()`, and that can be applied with
|
||||
/// `SparseMerkleTree::apply_mutations()`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct MutationSet<const DEPTH: u8, K, V> {
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq)]
|
||||
pub struct MutationSet<const DEPTH: u8, K: Eq + Hash, V> {
|
||||
/// The root of the Merkle tree this MutationSet is for, recorded at the time
|
||||
/// [`SparseMerkleTree::compute_mutations()`] was called. Exists to guard against applying
|
||||
/// mutations to the wrong tree or applying stale mutations to a tree that has since changed.
|
||||
@@ -587,27 +683,147 @@ pub struct MutationSet<const DEPTH: u8, K, V> {
|
||||
/// index overlayed, if any. Each [`NodeMutation::Addition`] corresponds to a
|
||||
/// [`SparseMerkleTree::insert_inner_node()`] call, and each [`NodeMutation::Removal`]
|
||||
/// corresponds to a [`SparseMerkleTree::remove_inner_node()`] call.
|
||||
node_mutations: BTreeMap<NodeIndex, NodeMutation>,
|
||||
node_mutations: NodeMutations,
|
||||
/// The set of top-level key-value pairs we're prospectively adding to the tree, including
|
||||
/// adding empty values. The "effective" value for a key is the value in this BTreeMap, falling
|
||||
/// back to the existing value in the Merkle tree. Each entry corresponds to a
|
||||
/// [`SparseMerkleTree::insert_value()`] call.
|
||||
new_pairs: BTreeMap<K, V>,
|
||||
new_pairs: UnorderedMap<K, V>,
|
||||
/// The calculated root for the Merkle tree, given these mutations. Publicly retrievable with
|
||||
/// [`MutationSet::root()`]. Corresponds to a [`SparseMerkleTree::set_root()`]. call.
|
||||
new_root: RpoDigest,
|
||||
}
|
||||
|
||||
impl<const DEPTH: u8, K, V> MutationSet<DEPTH, K, V> {
|
||||
/// Queries the root that was calculated during `SparseMerkleTree::compute_mutations()`. See
|
||||
impl<const DEPTH: u8, K: Eq + Hash, V> MutationSet<DEPTH, K, V> {
|
||||
/// Returns the SMT root that was calculated during `SparseMerkleTree::compute_mutations()`. See
|
||||
/// that method for more information.
|
||||
pub fn root(&self) -> RpoDigest {
|
||||
self.new_root
|
||||
}
|
||||
|
||||
/// Returns the SMT root before the mutations were applied.
|
||||
pub fn old_root(&self) -> RpoDigest {
|
||||
self.old_root
|
||||
}
|
||||
|
||||
/// Returns the set of inner nodes that need to be removed or added.
|
||||
pub fn node_mutations(&self) -> &NodeMutations {
|
||||
&self.node_mutations
|
||||
}
|
||||
|
||||
/// Returns the set of top-level key-value pairs that need to be added, updated or deleted
|
||||
/// (i.e. set to `EMPTY_WORD`).
|
||||
pub fn new_pairs(&self) -> &UnorderedMap<K, V> {
|
||||
&self.new_pairs
|
||||
}
|
||||
}
|
||||
|
||||
// SERIALIZATION
|
||||
// ================================================================================================
|
||||
|
||||
impl Serializable for InnerNode {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
target.write(self.left);
|
||||
target.write(self.right);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserializable for InnerNode {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let left = source.read()?;
|
||||
let right = source.read()?;
|
||||
|
||||
Ok(Self { left, right })
|
||||
}
|
||||
}
|
||||
|
||||
impl Serializable for NodeMutation {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
match self {
|
||||
NodeMutation::Removal => target.write_bool(false),
|
||||
NodeMutation::Addition(inner_node) => {
|
||||
target.write_bool(true);
|
||||
inner_node.write_into(target);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserializable for NodeMutation {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
if source.read_bool()? {
|
||||
let inner_node = source.read()?;
|
||||
return Ok(NodeMutation::Addition(inner_node));
|
||||
}
|
||||
|
||||
Ok(NodeMutation::Removal)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const DEPTH: u8, K: Serializable + Eq + Hash, V: Serializable> Serializable
|
||||
for MutationSet<DEPTH, K, V>
|
||||
{
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
target.write(self.old_root);
|
||||
target.write(self.new_root);
|
||||
|
||||
let inner_removals: Vec<_> = self
|
||||
.node_mutations
|
||||
.iter()
|
||||
.filter(|(_, value)| matches!(value, NodeMutation::Removal))
|
||||
.map(|(key, _)| key)
|
||||
.collect();
|
||||
let inner_additions: Vec<_> = self
|
||||
.node_mutations
|
||||
.iter()
|
||||
.filter_map(|(key, value)| match value {
|
||||
NodeMutation::Addition(node) => Some((key, node)),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
target.write(inner_removals);
|
||||
target.write(inner_additions);
|
||||
|
||||
target.write_usize(self.new_pairs.len());
|
||||
target.write_many(&self.new_pairs);
|
||||
}
|
||||
}
|
||||
|
||||
impl<const DEPTH: u8, K: Deserializable + Ord + Eq + Hash, V: Deserializable> Deserializable
|
||||
for MutationSet<DEPTH, K, V>
|
||||
{
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let old_root = source.read()?;
|
||||
let new_root = source.read()?;
|
||||
|
||||
let inner_removals: Vec<NodeIndex> = source.read()?;
|
||||
let inner_additions: Vec<(NodeIndex, InnerNode)> = source.read()?;
|
||||
|
||||
let node_mutations = NodeMutations::from_iter(
|
||||
inner_removals.into_iter().map(|index| (index, NodeMutation::Removal)).chain(
|
||||
inner_additions
|
||||
.into_iter()
|
||||
.map(|(index, node)| (index, NodeMutation::Addition(node))),
|
||||
),
|
||||
);
|
||||
|
||||
let num_new_pairs = source.read_usize()?;
|
||||
let new_pairs = source.read_many(num_new_pairs)?;
|
||||
let new_pairs = UnorderedMap::from_iter(new_pairs);
|
||||
|
||||
Ok(Self {
|
||||
old_root,
|
||||
node_mutations,
|
||||
new_pairs,
|
||||
new_root,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// SUBTREES
|
||||
// ================================================================================================
|
||||
|
||||
/// A subtree is of depth 8.
|
||||
const SUBTREE_DEPTH: u8 = 8;
|
||||
|
||||
@@ -627,10 +843,10 @@ pub struct SubtreeLeaf {
|
||||
}
|
||||
|
||||
/// Helper struct to organize the return value of [`SparseMerkleTree::sorted_pairs_to_leaves()`].
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct PairComputations<K, L> {
|
||||
/// Literal leaves to be added to the sparse Merkle tree's internal mapping.
|
||||
pub nodes: BTreeMap<K, L>,
|
||||
pub nodes: UnorderedMap<K, L>,
|
||||
/// "Conceptual" leaves that will be used for computations.
|
||||
pub leaves: Vec<Vec<SubtreeLeaf>>,
|
||||
}
|
||||
@@ -658,7 +874,7 @@ impl<'s> SubtreeLeavesIter<'s> {
|
||||
Self { leaves: leaves.drain(..).peekable() }
|
||||
}
|
||||
}
|
||||
impl core::iter::Iterator for SubtreeLeavesIter<'_> {
|
||||
impl Iterator for SubtreeLeavesIter<'_> {
|
||||
type Item = Vec<SubtreeLeaf>;
|
||||
|
||||
/// Each `next()` collects an entire subtree.
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
use alloc::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
vec::Vec,
|
||||
};
|
||||
use alloc::{collections::BTreeSet, vec::Vec};
|
||||
|
||||
use super::{
|
||||
super::ValuePath, EmptySubtreeRoots, InnerNode, InnerNodeInfo, LeafIndex, MerkleError,
|
||||
MerklePath, MutationSet, NodeIndex, RpoDigest, SparseMerkleTree, Word, EMPTY_WORD,
|
||||
super::ValuePath, EmptySubtreeRoots, InnerNode, InnerNodeInfo, InnerNodes, LeafIndex,
|
||||
MerkleError, MerklePath, MutationSet, NodeIndex, RpoDigest, SparseMerkleTree, Word, EMPTY_WORD,
|
||||
SMT_MAX_DEPTH, SMT_MIN_DEPTH,
|
||||
};
|
||||
|
||||
@@ -15,6 +12,8 @@ mod tests;
|
||||
// SPARSE MERKLE TREE
|
||||
// ================================================================================================
|
||||
|
||||
type Leaves = super::Leaves<Word>;
|
||||
|
||||
/// A sparse Merkle tree with 64-bit keys and 4-element leaf values, without compaction.
|
||||
///
|
||||
/// The root of the tree is recomputed on each new leaf update.
|
||||
@@ -22,8 +21,8 @@ mod tests;
|
||||
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
|
||||
pub struct SimpleSmt<const DEPTH: u8> {
|
||||
root: RpoDigest,
|
||||
leaves: BTreeMap<u64, Word>,
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
inner_nodes: InnerNodes,
|
||||
leaves: Leaves,
|
||||
}
|
||||
|
||||
impl<const DEPTH: u8> SimpleSmt<DEPTH> {
|
||||
@@ -54,8 +53,8 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
|
||||
|
||||
Ok(Self {
|
||||
root,
|
||||
leaves: BTreeMap::new(),
|
||||
inner_nodes: BTreeMap::new(),
|
||||
inner_nodes: Default::default(),
|
||||
leaves: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -108,11 +107,7 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
|
||||
/// # Panics
|
||||
/// With debug assertions on, this function panics if `root` does not match the root node in
|
||||
/// `inner_nodes`.
|
||||
pub fn from_raw_parts(
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
leaves: BTreeMap<u64, Word>,
|
||||
root: RpoDigest,
|
||||
) -> Self {
|
||||
pub fn from_raw_parts(inner_nodes: InnerNodes, leaves: Leaves, root: RpoDigest) -> Self {
|
||||
// Our particular implementation of `from_raw_parts()` never returns `Err`.
|
||||
<Self as SparseMerkleTree<DEPTH>>::from_raw_parts(inner_nodes, leaves, root).unwrap()
|
||||
}
|
||||
@@ -241,7 +236,7 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
|
||||
<Self as SparseMerkleTree<DEPTH>>::compute_mutations(self, kv_pairs)
|
||||
}
|
||||
|
||||
/// Apply the prospective mutations computed with [`SimpleSmt::compute_mutations()`] to this
|
||||
/// Applies the prospective mutations computed with [`SimpleSmt::compute_mutations()`] to this
|
||||
/// tree.
|
||||
///
|
||||
/// # Errors
|
||||
@@ -256,6 +251,23 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
|
||||
<Self as SparseMerkleTree<DEPTH>>::apply_mutations(self, mutations)
|
||||
}
|
||||
|
||||
/// Applies the prospective mutations computed with [`SimpleSmt::compute_mutations()`] to
|
||||
/// this tree and returns the reverse mutation set.
|
||||
///
|
||||
/// Applying the reverse mutation sets to the updated tree will revert the changes.
|
||||
///
|
||||
/// # Errors
|
||||
/// If `mutations` was computed on a tree with a different root than this one, returns
|
||||
/// [`MerkleError::ConflictingRoots`] with a two-item [`alloc::vec::Vec`]. The first item is the
|
||||
/// root hash the `mutations` were computed against, and the second item is the actual
|
||||
/// current root of this tree.
|
||||
pub fn apply_mutations_with_reversion(
|
||||
&mut self,
|
||||
mutations: MutationSet<DEPTH, LeafIndex<DEPTH>, Word>,
|
||||
) -> Result<MutationSet<DEPTH, LeafIndex<DEPTH>, Word>, MerkleError> {
|
||||
<Self as SparseMerkleTree<DEPTH>>::apply_mutations_with_reversion(self, mutations)
|
||||
}
|
||||
|
||||
/// Inserts a subtree at the specified index. The depth at which the subtree is inserted is
|
||||
/// computed as `DEPTH - SUBTREE_DEPTH`.
|
||||
///
|
||||
@@ -327,8 +339,8 @@ impl<const DEPTH: u8> SparseMerkleTree<DEPTH> for SimpleSmt<DEPTH> {
|
||||
const EMPTY_ROOT: RpoDigest = *EmptySubtreeRoots::entry(DEPTH, 0);
|
||||
|
||||
fn from_raw_parts(
|
||||
inner_nodes: BTreeMap<NodeIndex, InnerNode>,
|
||||
leaves: BTreeMap<u64, Word>,
|
||||
inner_nodes: InnerNodes,
|
||||
leaves: Leaves,
|
||||
root: RpoDigest,
|
||||
) -> Result<Self, MerkleError> {
|
||||
if cfg!(debug_assertions) {
|
||||
@@ -354,12 +366,12 @@ impl<const DEPTH: u8> SparseMerkleTree<DEPTH> for SimpleSmt<DEPTH> {
|
||||
.unwrap_or_else(|| EmptySubtreeRoots::get_inner_node(DEPTH, index.depth()))
|
||||
}
|
||||
|
||||
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) {
|
||||
self.inner_nodes.insert(index, inner_node);
|
||||
fn insert_inner_node(&mut self, index: NodeIndex, inner_node: InnerNode) -> Option<InnerNode> {
|
||||
self.inner_nodes.insert(index, inner_node)
|
||||
}
|
||||
|
||||
fn remove_inner_node(&mut self, index: NodeIndex) {
|
||||
let _ = self.inner_nodes.remove(&index);
|
||||
fn remove_inner_node(&mut self, index: NodeIndex) -> Option<InnerNode> {
|
||||
self.inner_nodes.remove(&index)
|
||||
}
|
||||
|
||||
fn insert_value(&mut self, key: LeafIndex<DEPTH>, value: Word) -> Option<Word> {
|
||||
|
||||
@@ -141,12 +141,15 @@ fn test_inner_node_iterator() -> Result<(), MerkleError> {
|
||||
let l2n2 = tree.get_node(NodeIndex::make(2, 2))?;
|
||||
let l2n3 = tree.get_node(NodeIndex::make(2, 3))?;
|
||||
|
||||
let nodes: Vec<InnerNodeInfo> = tree.inner_nodes().collect();
|
||||
let expected = vec![
|
||||
let mut nodes: Vec<InnerNodeInfo> = tree.inner_nodes().collect();
|
||||
let mut expected = [
|
||||
InnerNodeInfo { value: root, left: l1n0, right: l1n1 },
|
||||
InnerNodeInfo { value: l1n0, left: l2n0, right: l2n1 },
|
||||
InnerNodeInfo { value: l1n1, left: l2n2, right: l2n3 },
|
||||
];
|
||||
nodes.sort();
|
||||
expected.sort();
|
||||
|
||||
assert_eq!(nodes, expected);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -725,7 +725,7 @@ fn get_leaf_depth_works_with_depth_8() {
|
||||
assert_eq!(8, store.get_leaf_depth(root, 8, k).unwrap());
|
||||
}
|
||||
|
||||
// flip last bit of a and expect it to return the the same depth, but for an empty node
|
||||
// flip last bit of a and expect it to return the same depth, but for an empty node
|
||||
assert_eq!(8, store.get_leaf_depth(root, 8, 0b01101000_u64).unwrap());
|
||||
|
||||
// flip fourth bit of a and expect an empty node on depth 4
|
||||
|
||||
@@ -174,36 +174,6 @@ impl RandomCoin for RpoRandomCoin {
|
||||
|
||||
Ok(values)
|
||||
}
|
||||
|
||||
fn reseed_with_salt(
|
||||
&mut self,
|
||||
data: <Self::Hasher as winter_crypto::Hasher>::Digest,
|
||||
salt: Option<<Self::Hasher as winter_crypto::Hasher>::Digest>,
|
||||
) {
|
||||
// Reset buffer
|
||||
self.current = RATE_START;
|
||||
|
||||
// Add the new seed material to the first half of the rate portion of the RPO state
|
||||
let data: Word = data.into();
|
||||
|
||||
self.state[RATE_START] += data[0];
|
||||
self.state[RATE_START + 1] += data[1];
|
||||
self.state[RATE_START + 2] += data[2];
|
||||
self.state[RATE_START + 3] += data[3];
|
||||
|
||||
if let Some(salt) = salt {
|
||||
// Add the salt to the second half of the rate portion of the RPO state
|
||||
let data: Word = salt.into();
|
||||
|
||||
self.state[RATE_START + 4] += data[0];
|
||||
self.state[RATE_START + 5] += data[1];
|
||||
self.state[RATE_START + 6] += data[2];
|
||||
self.state[RATE_START + 7] += data[3];
|
||||
}
|
||||
|
||||
// Absorb
|
||||
Rpo256::apply_permutation(&mut self.state);
|
||||
}
|
||||
}
|
||||
|
||||
// FELT RNG IMPLEMENTATION
|
||||
|
||||
@@ -172,36 +172,6 @@ impl RandomCoin for RpxRandomCoin {
|
||||
|
||||
Ok(values)
|
||||
}
|
||||
|
||||
fn reseed_with_salt(
|
||||
&mut self,
|
||||
data: <Self::Hasher as winter_crypto::Hasher>::Digest,
|
||||
salt: Option<<Self::Hasher as winter_crypto::Hasher>::Digest>,
|
||||
) {
|
||||
// Reset buffer
|
||||
self.current = RATE_START;
|
||||
|
||||
// Add the new seed material to the first half of the rate portion of the RPO state
|
||||
let data: Word = data.into();
|
||||
|
||||
self.state[RATE_START] += data[0];
|
||||
self.state[RATE_START + 1] += data[1];
|
||||
self.state[RATE_START + 2] += data[2];
|
||||
self.state[RATE_START + 3] += data[3];
|
||||
|
||||
if let Some(salt) = salt {
|
||||
// Add the salt to the second half of the rate portion of the RPO state
|
||||
let data: Word = salt.into();
|
||||
|
||||
self.state[RATE_START + 4] += data[0];
|
||||
self.state[RATE_START + 5] += data[1];
|
||||
self.state[RATE_START + 6] += data[2];
|
||||
self.state[RATE_START + 7] += data[3];
|
||||
}
|
||||
|
||||
// Absorb
|
||||
Rpx256::apply_permutation(&mut self.state);
|
||||
}
|
||||
}
|
||||
|
||||
// FELT RNG IMPLEMENTATION
|
||||
|
||||
Reference in New Issue
Block a user